Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
8,900
def to_internal_value(self, data): try: return int(data) except __HOLE__: self.fail('not_a_number')
ValueError
dataset/ETHPy150Open uppsaladatavetare/foobar-api/src/foobar/rest/fields.py/IntEnumField.to_internal_value
8,901
def get_max_age(response): """ Returns the max-age from the response Cache-Control header as an integer (or ``None`` if it wasn't found or wasn't an integer. """ if not response.has_header('Cache-Control'): return cc = dict([_to_tuple(el) for el in cc_delim_re.split(response['Cache-Control'])]) if 'max-age' in cc: try: return int(cc['max-age']) except (ValueError, __HOLE__): pass
TypeError
dataset/ETHPy150Open adieu/django-nonrel/django/utils/cache.py/get_max_age
8,902
def configure(self): """ Combines (in order) the declared/inherited inner Meta, any view options, and finally any valid AJAX GET parameters from client modifications to the data they see. """ self.resolve_virtual_columns(*tuple(self.missing_columns)) self.config = self.normalize_config(self._meta.__dict__, self.query_config) self.config['column_searches'] = {} for i, name in enumerate(self.columns.keys()): column_search = self.query_config.get(OPTION_NAME_MAP['search_column'] % i, None) if column_search: self.config['column_searches'][name] = column_search column_order = list(self.columns.keys()) if self.config['ordering']: for i, name in enumerate(self.config['ordering']): column_name = name.lstrip('-+') try: index = column_order.index(column_name) except __HOLE__: # It is important to ignore a bad ordering name, since the model.Meta may # specify a field name that is not present on the datatable columns list. continue self.columns[column_name].sort_priority = i self.columns[column_name].sort_direction = 'desc' if name[0] == '-' else 'asc' self.columns[column_name].index = index # Client request configuration mergers
ValueError
dataset/ETHPy150Open pivotal-energy-solutions/django-datatable-view/datatableview/datatables.py/Datatable.configure
8,903
def normalize_config_start_offset(self, config, query_config): try: start_offset = query_config.get(OPTION_NAME_MAP['start_offset'], 0) start_offset = int(start_offset) except __HOLE__: start_offset = 0 else: if start_offset < 0: start_offset = 0 return start_offset
ValueError
dataset/ETHPy150Open pivotal-energy-solutions/django-datatable-view/datatableview/datatables.py/Datatable.normalize_config_start_offset
8,904
def normalize_config_page_length(self, config, query_config): try: page_length = query_config.get(OPTION_NAME_MAP['page_length'], config['page_length']) page_length = int(page_length) except __HOLE__: page_length = config['page_length'] else: if page_length == -1: # dataTables' way of asking for all items, no pagination pass elif page_length < MINIMUM_PAGE_LENGTH: page_length = MINIMUM_PAGE_LENGTH return page_length
ValueError
dataset/ETHPy150Open pivotal-energy-solutions/django-datatable-view/datatableview/datatables.py/Datatable.normalize_config_page_length
8,905
def normalize_config_ordering(self, config, query_config): # For "n" columns (iSortingCols), the queried values iSortCol_0..iSortCol_n are used as # column indices to check the values of sSortDir_X and bSortable_X default_ordering = config['ordering'] ordering = [] columns_list = list(self.columns.values()) try: num_sorting_columns = int(query_config.get(OPTION_NAME_MAP['num_sorting_columns'], 0)) except ValueError: num_sorting_columns = 0 # Default sorting from view or model definition if num_sorting_columns == 0: return default_ordering for sort_queue_i in range(num_sorting_columns): try: column_index = int(query_config.get(OPTION_NAME_MAP['sort_column'] % sort_queue_i, '')) except __HOLE__: continue # Reject out-of-range sort requests if column_index >= len(columns_list): continue column = columns_list[column_index] # Reject requests for unsortable columns if column.name in config['unsortable_columns']: continue sort_direction = query_config.get(OPTION_NAME_MAP['sort_column_direction'] % sort_queue_i, None) sort_modifier = None if sort_direction == 'asc': sort_modifier = '' elif sort_direction == 'desc': sort_modifier = '-' else: # Aggressively skip invalid specification continue ordering.append('%s%s' % (sort_modifier, column.name)) if not ordering and config['model']: return config['model']._meta.ordering return ordering
ValueError
dataset/ETHPy150Open pivotal-energy-solutions/django-datatable-view/datatableview/datatables.py/Datatable.normalize_config_ordering
8,906
def run(self): while not self.done: line = self.fd.readline() if line: data = None try: data = json.loads(line.strip()) except __HOLE__: if not isinstance(data, dict): logger.exception( "Non JSON data from daemon: {0}".format(line) ) else: self.call_callback(data)
ValueError
dataset/ETHPy150Open srusskih/SublimeJEDI/sublime_jedi/utils.py/ThreadReader.run
8,907
def _start_process(self, settings): options = { 'stdin': subprocess.PIPE, 'stdout': subprocess.PIPE, 'stderr': subprocess.PIPE, 'universal_newlines': True, 'cwd': CUR_DIR, 'bufsize': -1, } # hide "cmd" window in Windows if sys.platform == "win32": startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW options['startupinfo'] = startupinfo command = [ settings['python_interpreter'], '-B', 'daemon.py', '-p', settings['project_name'] ] for folder in settings['extra_packages']: command.extend(['-e', folder]) command.extend(['-f', settings['complete_funcargs']]) logger.debug( 'Daemon process starting with parameters: {0} {1}' .format(command, options) ) try: return subprocess.Popen(command, **options) except __HOLE__: logger.error( 'Daemon process failed with next parameters: {0} {1}' .format(command, options) ) raise
OSError
dataset/ETHPy150Open srusskih/SublimeJEDI/sublime_jedi/utils.py/Daemon._start_process
8,908
def expand_path(view, path): """ Expand ST build system and OS environment variables to normalized path that allows collapsing up-level references for basic path manipulation through combination of variables and/or separators, i.e.: "python_interpreter": "$project_path/../../virtual/bin/python", "python_package_paths": ["$home/.buildout/eggs"] :type view: sublime.View :type path: str :rtype: str """ subl_vars = {} try: subl_vars['$file'] = view.file_name() subl_vars['$packages'] = sublime.packages_path() try: subl_vars['$project'] = view.window().project_file_name() except __HOLE__: subl_vars['$project'] = get_project_file_name(view.window()) subl_vars = split_path(subl_vars, ['$file', '$project']) if '$' in path or '%' in path: exp_path = path for k in sorted(subl_vars, key=len, reverse=True): if subl_vars[k]: exp_path = exp_path.replace(k, subl_vars[k]) exp_path = os.path.normpath(os.path.expandvars(exp_path)) if os.path.exists(exp_path): path = exp_path except Exception: logger.exception('Exception while expanding "{0}"'.format(path)) return path
AttributeError
dataset/ETHPy150Open srusskih/SublimeJEDI/sublime_jedi/utils.py/expand_path
8,909
def getXLevelStats(self, sh_order): """ Checks and return the stats about the levels for different X-rules in the current span. This is used only for Shallow-n Hiero, particularly in X productions. Returns a list of boolean flags for each level of X-rules in the current span upto the top_X_level. Each flag can be True (if at least one terminal 'X' entry is found) or false (otherwise). """ left_side = "X" for key in self.table.iterkeys(): if key[0] != left_side: continue found_levels = 0 curr_depth = self.top_X_level + 1 if self.top_X_level < sh_order else sh_order XLevels = [False for i in xrange(curr_depth)] for entry in self.table[key]: entry_depth = entry.depth_hier if entry_depth < curr_depth: try: if not XLevels[entry_depth]: XLevels[entry_depth] = True found_levels += 1 except __HOLE__: sys.stderr.write("ERROR: Index Out of Range error in cell.py : 291\n") sys.stderr.write(" Index %d exceeds the size of array XLevels (%d). Exiting!!\n" % (entry_depth, curr_depth)) sys.exit(1) if (found_levels == curr_depth): break return XLevels
IndexError
dataset/ETHPy150Open sfu-natlang/Kriya/src/Kriya-Decoder/cell.py/Cell.getXLevelStats
8,910
def absent(name): ''' Ensures that the host group does not exist, eventually delete host group. .. versionadded:: 2016.3.0 :param name: name of the host group :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) .. code-block:: yaml delete_testing_host_group: zabbix_hostgroup.absent: - name: 'My hostgroup name' ''' ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} # Comment and change messages comment_hostgroup_deleted = 'Host group {0} deleted.'.format(name) comment_hostgroup_notdeleted = 'Unable to delete host group: {0}. '.format(name) comment_hostgroup_notexists = 'Host group {0} does not exist.'.format(name) changes_hostgroup_deleted = {name: {'old': 'Host group {0} exists.'.format(name), 'new': 'Host group {0} deleted.'.format(name), } } hostgroup_exists = __salt__['zabbix.hostgroup_exists'](name) # Dry run, test=true mode if __opts__['test']: if not hostgroup_exists: ret['result'] = True ret['comment'] = comment_hostgroup_notexists else: ret['result'] = None ret['comment'] = comment_hostgroup_deleted ret['changes'] = changes_hostgroup_deleted return ret hostgroup_get = __salt__['zabbix.hostgroup_get'](name) if not hostgroup_get: ret['result'] = True ret['comment'] = comment_hostgroup_notexists else: try: groupid = hostgroup_get[0]['groupid'] hostgroup_delete = __salt__['zabbix.hostgroup_delete'](groupid) except __HOLE__: hostgroup_delete = False if hostgroup_delete and 'error' not in hostgroup_delete: ret['result'] = True ret['comment'] = comment_hostgroup_deleted ret['changes'] = changes_hostgroup_deleted else: ret['result'] = False ret['comment'] = comment_hostgroup_notdeleted + str(hostgroup_delete['error']) return ret
KeyError
dataset/ETHPy150Open saltstack/salt/salt/states/zabbix_hostgroup.py/absent
8,911
def respond_webhook(self, environ): """ Passes the request onto a bot with a webhook if the webhook path is requested. """ request = FieldStorage(fp=environ["wsgi.input"], environ=environ) url = environ["PATH_INFO"] params = dict([(k, request[k].value) for k in request]) try: if self.bot is None: raise NotImplementedError response = self.bot.handle_webhook_event(environ, url, params) except __HOLE__: return 404 except: self.logger.debug(format_exc()) return 500 return response or 200
NotImplementedError
dataset/ETHPy150Open stephenmcd/gnotty/gnotty/server.py/IRCApplication.respond_webhook
8,912
def respond_static(self, environ): """ Serves a static file when Django isn't being used. """ path = os.path.normpath(environ["PATH_INFO"]) if path == "/": content = self.index() content_type = "text/html" else: path = os.path.join(os.path.dirname(__file__), path.lstrip("/")) try: with open(path, "r") as f: content = f.read() except __HOLE__: return 404 content_type = guess_type(path)[0] return (200, [("Content-Type", content_type)], content)
IOError
dataset/ETHPy150Open stephenmcd/gnotty/gnotty/server.py/IRCApplication.respond_static
8,913
def authorized(self, environ): """ If we're running Django and ``GNOTTY_LOGIN_REQUIRED`` is set to ``True``, pull the session cookie from the environment and validate that the user is authenticated. """ if self.django and settings.LOGIN_REQUIRED: try: from django.conf import settings as django_settings from django.contrib.auth import SESSION_KEY from django.contrib.auth.models import User from django.contrib.sessions.models import Session from django.core.exceptions import ObjectDoesNotExist cookie = SimpleCookie(environ["HTTP_COOKIE"]) cookie_name = django_settings.SESSION_COOKIE_NAME session_key = cookie[cookie_name].value session = Session.objects.get(session_key=session_key) user_id = session.get_decoded().get(SESSION_KEY) user = User.objects.get(id=user_id) except (__HOLE__, KeyError, ObjectDoesNotExist): return False return True
ImportError
dataset/ETHPy150Open stephenmcd/gnotty/gnotty/server.py/IRCApplication.authorized
8,914
def kill(pid_file): """ Attempts to shut down a previously started daemon. """ try: with open(pid_file) as f: os.kill(int(f.read()), 9) os.remove(pid_file) except (IOError, __HOLE__): return False return True
OSError
dataset/ETHPy150Open stephenmcd/gnotty/gnotty/server.py/kill
8,915
def parallelingest(self, zslab, resolution): # extract parameters for iteration numxtiles = self.proj.datasetcfg.imagesz[resolution][0]/self.tilesz numytiles = self.proj.datasetcfg.imagesz[resolution][1]/self.tilesz zstart = self.proj.datasetcfg.slicerange[0] zend = self.proj.datasetcfg.slicerange[1] # slices per ingest group zslices = self.proj.datasetcfg.cubedim[resolution][2] print zslab, zslab*zslices, resolution, multiprocessing.current_process() # Ingest in database aligned slabs in the z dimension for each zslab # over all tiles in that slice for ytile in range(numytiles): for xtile in range(numxtiles): # RBTODO need to generalize to other project types cuboid = np.zeros ( [zslices,self.tilesz,self.tilesz], dtype=np.uint8 ) # over each slice for zslice in range(zslices): #if we are at the end of the space, quit if zslab*zslices+zstart+zslice > zend: break filename = '{}/{}/{}/{}/{}.jpg'.format(self.prefix,resolution,zslab*zslices+zslice+zstart,ytile,xtile) try: # add tile to stack tileimage = Image.open ( filename, 'r' ) cuboid [zslice,:,:] = np.asarray ( tileimage ) except __HOLE__, e: print "Failed to open file %s" % (e) # Writing the missing image-files and not raising an error. Look at missing_files in the same directory f = open('missing_files','a') f.write(filename+'\n') f.close() #raise # here we have continuous cuboid, let's upload it to the database #print "Starting commit",zslab*zslices,multiprocessing.current_process() #continue corner = [ xtile*self.tilesz, ytile*self.tilesz, zslab*zslices ] self.db.writeImageCuboid ( corner, resolution, cuboid) if(zslice%64==0): self.db.commit()
IOError
dataset/ETHPy150Open neurodata/ndstore/ingest/catmaid/catmaid_parallel_kunal.py/CatmaidIngester.parallelingest
8,916
def iterate_date_values(d, start_date=None, stop_date=None, default=0): """ Convert (date, value) sorted lists into contiguous value-per-day data sets. Great for sparklines. Example:: [(datetime.date(2011, 1, 1), 1), (datetime.date(2011, 1, 4), 2)] -> [1, 0, 0, 2] """ dataiter = iter(d) cur_day, cur_val = next(dataiter) start_date = start_date or cur_day while cur_day < start_date: cur_day, cur_val = next(dataiter) for d in iterate_date(start_date, stop_date): if d != cur_day: yield default continue yield cur_val try: cur_day, cur_val = next(dataiter) except __HOLE__: if not stop_date: raise
StopIteration
dataset/ETHPy150Open shazow/unstdlib.py/unstdlib/standard/datetime_.py/iterate_date_values
8,917
def install(app_name): """ Installs the application and gives an QuepyApp object """ module_paths = { u"settings": u"{0}.settings", u"parsing": u"{0}", } modules = {} for module_name, module_path in module_paths.iteritems(): try: modules[module_name] = import_module(module_path.format(app_name)) except __HOLE__, error: message = u"Error importing {0!r}: {1}" raise ImportError(message.format(module_name, error)) return QuepyApp(**modules)
ImportError
dataset/ETHPy150Open machinalis/quepy/quepy/quepyapp.py/install
8,918
def __init__(self, parsing, settings): """ Creates the application based on `parsing`, `settings` modules. """ assert isinstance(parsing, ModuleType) assert isinstance(settings, ModuleType) self._parsing_module = parsing self._settings_module = settings # Save the settings right after loading settings module self._save_settings_values() self.tagger = get_tagger() self.language = getattr(self._settings_module, "LANGUAGE", None) if not self.language: raise ValueError("Missing configuration for language") self.rules = [] for element in dir(self._parsing_module): element = getattr(self._parsing_module, element) try: if issubclass(element, QuestionTemplate) and \ element is not QuestionTemplate: self.rules.append(element()) except __HOLE__: continue self.rules.sort(key=lambda x: x.weight, reverse=True)
TypeError
dataset/ETHPy150Open machinalis/quepy/quepy/quepyapp.py/QuepyApp.__init__
8,919
def parse_database(database_filename): try: from lxml import etree except __HOLE__: LOG.warning("Install the 'lxml' Python package to speed up CAN database parsing") try: # Python 2.5 import xml.etree.cElementTree as etree except ImportError: try: # Python 2.5 import xml.etree.ElementTree as etree except ImportError: try: # normal cElementTree install import cElementTree as etree except ImportError: try: # normal ElementTree install import elementtree.ElementTree as etree except ImportError: fatal_error("Failed to import ElementTree from any known place") return etree.parse(database_filename)
ImportError
dataset/ETHPy150Open openxc/openxc-python/openxc/generator/xml_to_json.py/parse_database
8,920
def __init__(self, ip, name, weight, region=None): """ args: ip: string, IP address name: string, name of the server weight: int, weight of the server, if set to 0 the server is disabled region: string, id of the region, used in topology-based distribution """ ### ip try: _ip = ipaddress.ip_address(ip) except __HOLE__: log_msg = ('"{}" does not appear to be a valid IP address' .format(ip)) LOG.error(log_msg) raise Error(log_msg) if _ip.version != 4: log_msg = 'only v4 IP addresses are currently supported' LOG.error(log_msg) raise Error(log_msg) self.ip = ip ### name if (not isinstance(name, str) or len(name) > MAX_POOL_MEMBER_NAME_LEN): log_msg = ('"{}" name must be a str, {} chars max'. format(name, MAX_POOL_MEMBER_NAME_LEN)) LOG.error(log_msg) raise Error(log_msg) else: self.name = name ### weight if (not isinstance(weight, int) or weight < 0 or weight > MAX_POOL_MEMBER_WEIGHT): log_msg = ('"{}" weight "{}" must be an int between 0 and {}'. format(name, weight, MAX_POOL_MEMBER_WEIGHT)) raise Error(log_msg) else: self.weight = weight ### region if (not region is None and (not isinstance(region, (str)) or len(region) > MAX_REGION_LEN)): log_msg = ('"{}" region "{}" must be a str, {} chars max'. format(name, region, MAX_POOL_MEMBER_NAME_LEN)) LOG.error(log_msg) raise Error(log_msg) else: self.region = region # curent status of the server # None = new, True = up, False = down self.status = None # reason why this status has been set self.status_reason = None # timestamp when the probe was issued last time # used to determine when to send a new probe self.last_probe_issued_time = None # this is used by tracker to determine how many more # probing requests to attempt before declaring the member down # set to the parent's pool monitor retries value initially self.retries_left = None
ValueError
dataset/ETHPy150Open polaris-gslb/polaris-gslb/polaris_health/state/pool.py/PoolMember.__init__
8,921
def get_current_user(): token = os.getenv('HTTP_AUTHORIZATION') if token: try: token = token.split(' ')[1] except __HOLE__: pass user, _ = User.get_by_bearer_token(token) return user
IndexError
dataset/ETHPy150Open loudnate/appengine-endpoints-auth-example/auth/endpoints.py/get_current_user
8,922
def build_page(rex, kwargs): """Build page key from format pattern and request data. :param str: Format string (e.g. `'{node}:{file}'`) :param dict kwargs: Data used to render format string """ target_node = kwargs.get('node') or kwargs.get('project') target_id = target_node._id data = { 'target_id': target_id, } data.update(kwargs) data.update(request.args.to_dict()) try: return rex.format(**data) except __HOLE__: return None
KeyError
dataset/ETHPy150Open CenterForOpenScience/osf.io/framework/analytics/__init__.py/build_page
8,923
def _get_default_engine(path, allow_remote=False): if allow_remote and is_remote_uri(path): # pragma: no cover try: import netCDF4 engine = 'netcdf4' except ImportError: try: import pydap engine = 'pydap' except ImportError: raise ValueError('netCDF4 or pydap is required for accessing ' 'remote datasets via OPeNDAP') else: try: import netCDF4 engine = 'netcdf4' except ImportError: # pragma: no cover try: import scipy.io.netcdf engine = 'scipy' except __HOLE__: raise ValueError('cannot read or write netCDF files without ' 'netCDF4-python or scipy installed') return engine
ImportError
dataset/ETHPy150Open pydata/xarray/xarray/backends/api.py/_get_default_engine
8,924
def open_dataset(filename_or_obj, group=None, decode_cf=True, mask_and_scale=True, decode_times=True, concat_characters=True, decode_coords=True, engine=None, chunks=None, lock=None, drop_variables=None): """Load and decode a dataset from a file or file-like object. Parameters ---------- filename_or_obj : str, file or xarray.backends.*DataStore Strings are interpreted as a path to a netCDF file or an OpenDAP URL and opened with python-netCDF4, unless the filename ends with .gz, in which case the file is gunzipped and opened with scipy.io.netcdf (only netCDF3 supported). File-like objects are opened with scipy.io.netcdf (only netCDF3 supported). group : str, optional Path to the netCDF4 group in the given file to open (only works for netCDF4 files). decode_cf : bool, optional Whether to decode these variables, assuming they were saved according to CF conventions. mask_and_scale : bool, optional If True, replace array values equal to `_FillValue` with NA and scale values according to the formula `original_values * scale_factor + add_offset`, where `_FillValue`, `scale_factor` and `add_offset` are taken from variable attributes (if they exist). If the `_FillValue` or `missing_value` attribute contains multiple values a warning will be issued and all array values matching one of the multiple values will be replaced by NA. decode_times : bool, optional If True, decode times encoded in the standard NetCDF datetime format into datetime objects. Otherwise, leave them encoded as numbers. concat_characters : bool, optional If True, concatenate along the last dimension of character arrays to form string arrays. Dimensions will only be concatenated over (and removed) if they have no corresponding variable and if they are only used as the last dimension of character arrays. decode_coords : bool, optional If True, decode the 'coordinates' attribute to identify coordinates in the resulting dataset. engine : {'netcdf4', 'scipy', 'pydap', 'h5netcdf', 'pynio'}, optional Engine to use when reading files. If not provided, the default engine is chosen based on available dependencies, with a preference for 'netcdf4'. chunks : int or dict, optional If chunks is provided, it used to load the new dataset into dask arrays. This is an experimental feature; see the documentation for more details. lock : False, True or threading.Lock, optional If chunks is provided, this argument is passed on to :py:func:`dask.array.from_array`. By default, a per-variable lock is used when reading data from netCDF files with the netcdf4 and h5netcdf engines to avoid issues with concurrent access when using dask's multithreaded backend. drop_variables: string or iterable, optional A variable or list of variables to exclude from being parsed from the dataset. This may be useful to drop variables with problems or inconsistent values. Returns ------- dataset : Dataset The newly created dataset. See Also -------- open_mfdataset """ if not decode_cf: mask_and_scale = False decode_times = False concat_characters = False decode_coords = False def maybe_decode_store(store, lock=False): ds = conventions.decode_cf( store, mask_and_scale=mask_and_scale, decode_times=decode_times, concat_characters=concat_characters, decode_coords=decode_coords, drop_variables=drop_variables) if chunks is not None: try: from dask.base import tokenize except ImportError: import dask # raise the usual error if dask is entirely missing if dask.__version__ < '0.6': raise ImportError('xarray requires dask version 0.6 or newer') else: raise if (isinstance(filename_or_obj, basestring) and not is_remote_uri(filename_or_obj)): file_arg = os.path.getmtime(filename_or_obj) else: file_arg = filename_or_obj token = tokenize(file_arg, group, decode_cf, mask_and_scale, decode_times, concat_characters, decode_coords, engine, chunks, drop_variables) name_prefix = '%s:%s/' % (filename_or_obj, group or '') ds2 = ds.chunk(chunks, name_prefix=name_prefix, token=token, lock=lock) ds2._file_obj = ds._file_obj else: ds2 = ds return ds2 if isinstance(filename_or_obj, backends.AbstractDataStore): store = filename_or_obj elif isinstance(filename_or_obj, basestring): if filename_or_obj.endswith('.gz'): if engine is not None and engine != 'scipy': raise ValueError('can only read gzipped netCDF files with ' "default engine or engine='scipy'") # if the string ends with .gz, then gunzip and open as netcdf file if sys.version_info[:2] < (2, 7): raise ValueError('reading a gzipped netCDF not ' 'supported on Python 2.6') try: store = backends.ScipyDataStore(gzip.open(filename_or_obj)) except __HOLE__ as e: # TODO: gzipped loading only works with NetCDF3 files. if 'is not a valid NetCDF 3 file' in e.message: raise ValueError('gzipped file loading only supports ' 'NetCDF 3 files.') else: raise else: if engine is None: engine = _get_default_engine(filename_or_obj, allow_remote=True) if engine == 'netcdf4': store = backends.NetCDF4DataStore(filename_or_obj, group=group) elif engine == 'scipy': store = backends.ScipyDataStore(filename_or_obj) elif engine == 'pydap': store = backends.PydapDataStore(filename_or_obj) elif engine == 'h5netcdf': store = backends.H5NetCDFStore(filename_or_obj, group=group) elif engine == 'pynio': store = backends.NioDataStore(filename_or_obj) else: raise ValueError('unrecognized engine for open_dataset: %r' % engine) if lock is None: lock = _default_lock(filename_or_obj, engine) with close_on_error(store): return maybe_decode_store(store, lock) else: if engine is not None and engine != 'scipy': raise ValueError('can only read file-like objects with ' "default engine or engine='scipy'") # assume filename_or_obj is a file-like object store = backends.ScipyDataStore(filename_or_obj) return maybe_decode_store(store)
TypeError
dataset/ETHPy150Open pydata/xarray/xarray/backends/api.py/open_dataset
8,925
def to_netcdf(dataset, path=None, mode='w', format=None, group=None, engine=None, writer=None, encoding=None): """This function creates an appropriate datastore for writing a dataset to disk as a netCDF file See `Dataset.to_netcdf` for full API docs. The ``writer`` argument is only for the private use of save_mfdataset. """ if encoding is None: encoding = {} if path is None: path = BytesIO() if engine is None: engine = 'scipy' elif engine is not None: raise ValueError('invalid engine for creating bytes with ' 'to_netcdf: %r. Only the default engine ' "or engine='scipy' is supported" % engine) elif engine is None: engine = _get_default_engine(path) # validate Dataset keys and DataArray names _validate_dataset_names(dataset) try: store_cls = WRITEABLE_STORES[engine] except __HOLE__: raise ValueError('unrecognized engine for to_netcdf: %r' % engine) if format is not None: format = format.upper() # if a writer is provided, store asynchronously sync = writer is None store = store_cls(path, mode, format, group, writer) try: dataset.dump_to_store(store, sync=sync, encoding=encoding) if isinstance(path, BytesIO): return path.getvalue() finally: if sync: store.close() if not sync: return store
KeyError
dataset/ETHPy150Open pydata/xarray/xarray/backends/api.py/to_netcdf
8,926
def get_cache_key(): """ Fetch a request key from either a Django or Flask request. Fall back on a process-global dummy object if we are not in either type of request """ # TODO: This is ugly use of exceptions; is there a better way to track whether in a given type of request? try: return request._get_current_object() except __HOLE__: # Not in a flask request context if getattr(api_globals, 'request', None) is not None: return api_globals.request else: # Not in a Django request return dummy_request
RuntimeError
dataset/ETHPy150Open CenterForOpenScience/osf.io/framework/mongo/__init__.py/get_cache_key
8,927
def not_flagvar(self, node): name = node.value try: value = self._vars[name] except __HOLE__: raise ValueError("Unknown flag variable %s" % name) else: return not _LIT_BOOL[value]
KeyError
dataset/ETHPy150Open cournape/Bento/bento/parser/visitor.py/Dispatcher.not_flagvar
8,928
def flagvar(self, node): name = node.value try: value = self._vars[name] except __HOLE__: raise ValueError("Unknown flag variable %s" % name) else: return _LIT_BOOL[value]
KeyError
dataset/ETHPy150Open cournape/Bento/bento/parser/visitor.py/Dispatcher.flagvar
8,929
def LJSONExporter(landmark_group, file_handle, **kwargs): r""" Given a file handle to write in to (which should act like a Python `file` object), write out the landmark data. No value is returned. Writes out the LJSON format which is a verbose format that closely resembles the landmark group format. It describes semantic labels and connectivity between labels. The first axis of the format represents the image y-axis and is consistent with ordering within Menpo. Parameters ---------- landmark_group : map:`LandmarkGroup` The landmark group to write out. file_handle : `file`-like object The file to write in to """ lg_json = landmark_group.tojson() # Add version string lg_json['version'] = 2 # Convert nan values to None so that json correctly maps them to 'null' points = lg_json['landmarks']['points'] # Flatten list try: ndim = len(points[0]) except __HOLE__: ndim = 0 filtered_points = [None if np.isnan(x) else x for x in itertools.chain(*points)] # Recreate tuples if ndim == 2: lg_json['landmarks']['points'] = list(zip(filtered_points[::2], filtered_points[1::2])) elif ndim == 3: lg_json['landmarks']['points'] = list(zip(filtered_points[::3], filtered_points[1::3], filtered_points[2::3])) else: lg_json['landmarks']['points'] = [] return json.dump(lg_json, file_handle, indent=4, separators=(',', ': '), sort_keys=True, allow_nan=False)
IndexError
dataset/ETHPy150Open menpo/menpo/menpo/io/output/landmark.py/LJSONExporter
8,930
@signalcommand def handle(self, *args, **options): self.style = color_style() self.options = options if options["requirements"]: req_files = options["requirements"] elif os.path.exists("requirements.txt"): req_files = ["requirements.txt"] elif os.path.exists("requirements"): req_files = ["requirements/{0}".format(f) for f in os.listdir("requirements") if os.path.isfile(os.path.join("requirements", f)) and f.lower().endswith(".txt")] else: raise CommandError("Requirements not found") try: from pip.download import PipSession except __HOLE__: raise CommandError("Pip version 6 or higher is required") self.reqs = {} with PipSession() as session: for filename in req_files: for req in parse_requirements(filename, session=session): # url attribute changed to link in pip version 6.1.0 and above if LooseVersion(pip.__version__) > LooseVersion('6.0.8'): self.reqs[req.name] = { "pip_req": req, "url": req.link, } else: self.reqs[req.name] = { "pip_req": req, "url": req.url, } if options["github_api_token"]: self.github_api_token = options["github_api_token"] elif os.environ.get("GITHUB_API_TOKEN"): self.github_api_token = os.environ.get("GITHUB_API_TOKEN") else: self.github_api_token = None # only 50 requests per hour self.check_pypi() if HAS_REQUESTS: self.check_github() else: print(self.style.ERROR("Cannot check github urls. The requests library is not installed. ( pip install requests )")) self.check_other()
ImportError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/management/commands/pipchecker.py/Command.handle
8,931
def check_pypi(self): """ If the requirement is frozen to pypi, check for a new version. """ for dist in pip.get_installed_distributions(): name = dist.project_name if name in self.reqs.keys(): self.reqs[name]["dist"] = dist pypi = ServerProxy("https://pypi.python.org/pypi") for name, req in list(self.reqs.items()): if req["url"]: continue # skipping github packages. elif "dist" in req: dist = req["dist"] dist_version = LooseVersion(dist.version) available = pypi.package_releases(req["pip_req"].name) try: available_version = LooseVersion(available[0]) except __HOLE__: available_version = None if not available_version: msg = self.style.WARN("release is not on pypi (check capitalization and/or --extra-index-url)") elif self.options['show_newer'] and dist_version > available_version: msg = self.style.INFO("{0} available (newer installed)".format(available_version)) elif available_version > dist_version: msg = self.style.INFO("{0} available".format(available_version)) else: msg = "up to date" del self.reqs[name] continue pkg_info = self.style.BOLD("{dist.project_name} {dist.version}".format(dist=dist)) else: msg = "not installed" pkg_info = name print("{pkg_info:40} {msg}".format(pkg_info=pkg_info, msg=msg)) del self.reqs[name]
IndexError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/management/commands/pipchecker.py/Command.check_pypi
8,932
def check_github(self): """ If the requirement is frozen to a github url, check for new commits. API Tokens ---------- For more than 50 github api calls per hour, pipchecker requires authentication with the github api by settings the environemnt variable ``GITHUB_API_TOKEN`` or setting the command flag --github-api-token='mytoken'``. To create a github api token for use at the command line:: curl -u 'rizumu' -d '{"scopes":["repo"], "note":"pipchecker"}' https://api.github.com/authorizations For more info on github api tokens: https://help.github.com/articles/creating-an-oauth-token-for-command-line-use http://developer.github.com/v3/oauth/#oauth-authorizations-api Requirement Format ------------------ Pipchecker gets the sha of frozen repo and checks if it is found at the head of any branches. If it is not found then the requirement is considered to be out of date. Therefore, freezing at the commit hash will provide the expected results, but if freezing at a branch or tag name, pipchecker will not be able to determine with certainty if the repo is out of date. Freeze at the commit hash (sha):: git+git://github.com/django/django.git@393c268e725f5b229ecb554f3fac02cfc250d2df#egg=Django Freeze with a branch name:: git+git://github.com/django/django.git@master#egg=Django Freeze with a tag:: git+git://github.com/django/django.git@1.5b2#egg=Django Do not freeze:: git+git://github.com/django/django.git#egg=Django """ for name, req in list(self.reqs.items()): req_url = req["url"] if not req_url: continue req_url = str(req_url) if req_url.startswith("git") and "github.com/" not in req_url: continue if req_url.endswith(".tar.gz") or req_url.endswith(".tar.bz2") or req_url.endswith(".zip"): continue headers = { "content-type": "application/json", } if self.github_api_token: headers["Authorization"] = "token {0}".format(self.github_api_token) try: user, repo = urlparse(req_url).path.split("#")[0].strip("/").rstrip("/").split("/") except (__HOLE__, IndexError) as e: print(self.style.ERROR("\nFailed to parse %r: %s\n" % (req_url, e))) continue try: test_auth = requests.get("https://api.github.com/django/", headers=headers).json() except HTTPError as e: print("\n%s\n" % str(e)) return if "message" in test_auth and test_auth["message"] == "Bad credentials": print(self.style.ERROR("\nGithub API: Bad credentials. Aborting!\n")) return elif "message" in test_auth and test_auth["message"].startswith("API Rate Limit Exceeded"): print(self.style.ERROR("\nGithub API: Rate Limit Exceeded. Aborting!\n")) return frozen_commit_sha = None if ".git" in repo: repo_name, frozen_commit_full = repo.split(".git") if frozen_commit_full.startswith("@"): frozen_commit_sha = frozen_commit_full[1:] elif "@" in repo: repo_name, frozen_commit_sha = repo.split("@") if frozen_commit_sha is None: msg = self.style.ERROR("repo is not frozen") if frozen_commit_sha: branch_url = "https://api.github.com/repos/{0}/{1}/branches".format(user, repo_name) branch_data = requests.get(branch_url, headers=headers).json() frozen_commit_url = "https://api.github.com/repos/{0}/{1}/commits/{2}".format( user, repo_name, frozen_commit_sha ) frozen_commit_data = requests.get(frozen_commit_url, headers=headers).json() if "message" in frozen_commit_data and frozen_commit_data["message"] == "Not Found": msg = self.style.ERROR("{0} not found in {1}. Repo may be private.".format(frozen_commit_sha[:10], name)) elif frozen_commit_sha in [branch["commit"]["sha"] for branch in branch_data]: msg = self.style.BOLD("up to date") else: msg = self.style.INFO("{0} is not the head of any branch".format(frozen_commit_data["sha"][:10])) if "dist" in req: pkg_info = "{dist.project_name} {dist.version}".format(dist=req["dist"]) elif frozen_commit_sha is None: pkg_info = name else: pkg_info = "{0} {1}".format(name, frozen_commit_sha[:10]) print("{pkg_info:40} {msg}".format(pkg_info=pkg_info, msg=msg)) del self.reqs[name]
ValueError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/management/commands/pipchecker.py/Command.check_github
8,933
def _Open(self, path_spec=None, mode='rb'): """Opens the file-like object defined by path specification. Args: path_spec: optional path specification (instance of PathSpec). mode: optional file access mode. The default is 'rb' read-only binary. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid. """ if not path_spec: raise ValueError(u'Missing path specification.') if path_spec.HasParent(): raise errors.PathSpecError(u'Unsupported path specification with parent.') location = getattr(path_spec, u'location', None) if location is None: raise errors.PathSpecError(u'Path specification missing location.') # Windows does not support running os.stat on device files so we use # libsmdev to do an initial check. try: is_device = pysmdev.check_device(location) except IOError as exception: # Since os.stat() will not recognize Windows device file names and # will return '[Error 87] The parameter is incorrect' we check here # if pysmdev exception message contains ' access denied ' and raise # AccessError instead. # Note that exception.message no longer works in Python 3. exception_string = str(exception) if not isinstance(exception_string, py2to3.UNICODE_TYPE): exception_string = py2to3.UNICODE_TYPE( exception_string, errors=u'replace') if u' access denied ' in exception_string: raise errors.AccessError( u'Access denied to file: {0:s} with error: {1:s}'.format( location, exception_string)) is_device = False if not is_device: try: stat_info = os.stat(location) except __HOLE__ as exception: raise IOError(u'Unable to open file with error: {0:s}.'.format( exception)) # In case the libsmdev check is not able to detect the device also use # the stat information. if stat.S_ISCHR(stat_info.st_mode) or stat.S_ISBLK(stat_info.st_mode): is_device = True if is_device: self._file_object = pysmdev.handle() self._file_object.open(location, mode=mode) self._size = self._file_object.media_size else: self._file_object = open(location, mode=mode) self._size = stat_info.st_size # Note: that the following functions do not follow the style guide # because they are part of the file-like object interface.
OSError
dataset/ETHPy150Open log2timeline/dfvfs/dfvfs/file_io/os_file_io.py/OSFile._Open
8,934
def checkAuth(ip, port, title, version): """ """ if version in ["5.1", "6.0", "6.1"] and title is JINTERFACES.WM: for (usr, pswd) in default_credentials: url = "http://%s:%s/admin-console/login.seam" % (ip, port) data = OrderedDict([ ("login_form", "login_form"), ("login_form:name", usr), ("login_form:password", pswd), ("login_form:submit", "Login"), ("javax.faces.ViewState", utility.fetch_viewState(url)), ]) response = utility.requests_post(url, data=data) if response.status_code == 200: utility.Msg("Successfully authenticated with %s:%s" % (usr, pswd), LOG.DEBUG) if version in ["5.1"]: return (dict_from_cookiejar(response.history[0].cookies), None) return (dict_from_cookiejar(response.cookies), None) else: if title is JINTERFACES.JMX: url = "http://%s:%s/jmx-console/" % (ip, port) elif title is JINTERFACES.MM: url = "http://%s:%s/management" % (ip, port) elif title is JINTERFACES.WC: url = "http://%s:%s/web-console" % (ip, port) else: utility.Msg("Unsupported auth interface: %s" % title, LOG.DEBUG) return # check with given auth if state.usr_auth: (usr, pswd) = state.usr_auth.split(':') return _auth(usr, pswd, url, version) # else try default credentials for (usr, pswd) in default_credentials: cook = _auth(usr, pswd, url, version) if cook: return cook # if we're still here, check if they supplied a wordlist if state.bf_wordlist and not state.hasbf: state.hasbf = True wordlist = [] with open(state.bf_wordlist, 'r') as f: # ensure everything is ascii or requests will explode wordlist = [x.decode("ascii", "ignore").rstrip() for x in f.readlines()] utility.Msg("Brute forcing %s account with %d passwords..." % (state.bf_user, len(wordlist)), LOG.DEBUG) try: for (idx, word) in enumerate(wordlist): stdout.flush() stdout.write("\r\033[32m [%s] Brute forcing password for %s [%d/%d]\033[0m" \ % (utility.timestamp(), state.bf_user, idx+1, len(wordlist))) cook = _auth(state.bf_user, word, url, version) if cook: print '' # newline # lets insert these credentials to the default list so we # don't need to bruteforce it each time if not (state.bf_user, word) in default_credentials: default_credentials.insert(0, (state.bf_user, word)) utility.Msg("Successful login %s:%s" % (state.bf_user, word), LOG.SUCCESS) return cook print '' except __HOLE__: pass
KeyboardInterrupt
dataset/ETHPy150Open hatRiot/clusterd/src/platform/jboss/authenticate.py/checkAuth
8,935
def seen(self, host_name=None, location_name=None, gps=None, gps_accuracy=0, battery=None): """Mark the device as seen.""" self.last_seen = dt_util.utcnow() self.host_name = host_name self.location_name = location_name self.gps_accuracy = gps_accuracy or 0 self.battery = battery if gps is None: self.gps = None else: try: self.gps = tuple(float(val) for val in gps) except __HOLE__: _LOGGER.warning('Could not parse gps value for %s: %s', self.dev_id, gps) self.gps = None self.update()
ValueError
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/components/device_tracker/__init__.py/Device.seen
8,936
def test_as_context_mgr_w_error(self): _PROJECT = 'PROJECT' _PROPERTIES = {'foo': 'bar'} connection = _Connection() entity = _Entity(_PROPERTIES) key = entity.key = _Key(_PROJECT) client = _Client(_PROJECT, connection) self.assertEqual(list(client._batches), []) try: with self._makeOne(client) as batch: self.assertEqual(list(client._batches), [batch]) batch.put(entity) raise ValueError("testing") except __HOLE__: pass self.assertEqual(list(client._batches), []) mutated_entity = _mutated_pb(self, batch.mutations, 'upsert') self.assertEqual(mutated_entity.key, key._key) self.assertEqual(connection._committed, [])
ValueError
dataset/ETHPy150Open GoogleCloudPlatform/gcloud-python/gcloud/datastore/test_batch.py/TestBatch.test_as_context_mgr_w_error
8,937
@permission_checker.require_any('add', 'change', 'delete') @vary_on_headers('X-Requested-With') def index(request): Document = get_document_model() # Get documents (filtered by user permission) documents = permission_policy.instances_user_has_any_permission_for( request.user, ['change', 'delete'] ) # Ordering if 'ordering' in request.GET and request.GET['ordering'] in ['title', '-created_at']: ordering = request.GET['ordering'] else: ordering = '-created_at' documents = documents.order_by(ordering) # Filter by collection current_collection = None collection_id = request.GET.get('collection_id') if collection_id: try: current_collection = Collection.objects.get(id=collection_id) documents = documents.filter(collection=current_collection) except (__HOLE__, Collection.DoesNotExist): pass # Search query_string = None if 'q' in request.GET: form = SearchForm(request.GET, placeholder=_("Search documents")) if form.is_valid(): query_string = form.cleaned_data['q'] documents = documents.search(query_string) else: form = SearchForm(placeholder=_("Search documents")) # Pagination paginator, documents = paginate(request, documents) collections = permission_policy.collections_user_has_any_permission_for( request.user, ['add', 'change'] ) if len(collections) < 2: collections = None # Create response if request.is_ajax(): return render(request, 'wagtaildocs/documents/results.html', { 'ordering': ordering, 'documents': documents, 'query_string': query_string, 'is_searching': bool(query_string), }) else: return render(request, 'wagtaildocs/documents/index.html', { 'ordering': ordering, 'documents': documents, 'query_string': query_string, 'is_searching': bool(query_string), 'search_form': form, 'popular_tags': Document.popular_tags(), 'user_can_add': permission_policy.user_has_permission(request.user, 'add'), 'collections': collections, 'current_collection': current_collection, })
ValueError
dataset/ETHPy150Open torchbox/wagtail/wagtail/wagtaildocs/views/documents.py/index
8,938
@permission_checker.require('change') def edit(request, document_id): Document = get_document_model() DocumentForm = get_document_form(Document) doc = get_object_or_404(Document, id=document_id) if not permission_policy.user_has_permission_for_instance(request.user, 'change', doc): return permission_denied(request) if request.method == 'POST': original_file = doc.file form = DocumentForm(request.POST, request.FILES, instance=doc, user=request.user) if form.is_valid(): if 'file' in form.changed_data: # if providing a new document file, delete the old one. # NB Doing this via original_file.delete() clears the file field, # which definitely isn't what we want... original_file.storage.delete(original_file.name) doc = form.save() # Reindex the document to make sure all tags are indexed for backend in get_search_backends(): backend.add(doc) messages.success(request, _("Document '{0}' updated").format(doc.title), buttons=[ messages.button(reverse('wagtaildocs:edit', args=(doc.id,)), _('Edit')) ]) return redirect('wagtaildocs:index') else: messages.error(request, _("The document could not be saved due to errors.")) else: form = DocumentForm(instance=doc, user=request.user) filesize = None # Get file size when there is a file associated with the Document object if doc.file: try: filesize = doc.file.size except __HOLE__: # File doesn't exist pass if not filesize: messages.error( request, _("The file could not be found. Please change the source or delete the document"), buttons=[messages.button(reverse('wagtaildocs:delete', args=(doc.id,)), _('Delete'))] ) return render(request, "wagtaildocs/documents/edit.html", { 'document': doc, 'filesize': filesize, 'form': form, 'user_can_delete': permission_policy.user_has_permission_for_instance( request.user, 'delete', doc ), })
OSError
dataset/ETHPy150Open torchbox/wagtail/wagtail/wagtaildocs/views/documents.py/edit
8,939
def lxml_available(): try: from lxml.etree import LXML_VERSION LXML = LXML_VERSION >= (3, 3, 1, 0) if not LXML: import warnings warnings.warn("The installed version of lxml is too old to be used with openpyxl") return False # we have it, but too old else: return True # we have it, and recent enough except __HOLE__: return False # we don't even have it
ImportError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/openpyxl-2.3.0-b2/openpyxl/xml/__init__.py/lxml_available
8,940
def _get_parameter_from_permission(self, session, permission, parameter_name, default_value = DEFAULT_VALUE): try: param = [ p for p in permission.parameters if p.get_name() == parameter_name ][0] except __HOLE__: if default_value == DEFAULT_VALUE: raise DbErrors.DbIllegalStatusError( permission.get_permission_type() + " permission without " + parameter_name ) else: return default_value return param.value
IndexError
dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/weblab/core/db.py/DatabaseGateway._get_parameter_from_permission
8,941
def _get_float_parameter_from_permission(self, session, permission, parameter_name, default_value = DEFAULT_VALUE): value = self._get_parameter_from_permission(session, permission, parameter_name, default_value) try: return float(value) except __HOLE__: raise DbErrors.InvalidPermissionParameterFormatError( "Expected float as parameter '%s' of '%s', found: '%s'" % ( parameter_name, permission.get_permission_type(), value ) )
ValueError
dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/weblab/core/db.py/DatabaseGateway._get_float_parameter_from_permission
8,942
def _get_int_parameter_from_permission(self, session, permission, parameter_name, default_value = DEFAULT_VALUE): value = self._get_parameter_from_permission(session, permission, parameter_name, default_value) try: return int(value) except __HOLE__: raise DbErrors.InvalidPermissionParameterFormatError( "Expected int as parameter '%s' of '%s', found: '%s'" % ( parameter_name, permission.get_permission_type(), value ) )
ValueError
dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/weblab/core/db.py/DatabaseGateway._get_int_parameter_from_permission
8,943
@logged() def grant_external_credentials(self, username, external_id, system): """ Given a system and an external_id, grant access with those credentials for user user_id. Before calling this method, the system has checked that this user is the owner of external_id and of user_id""" session = self.Session() try: try: auth_type = session.query(model.DbAuthType).filter_by(name=system).one() auth = auth_type.auths[0] except (NoResultFound, __HOLE__): raise DbErrors.DbUserNotFoundError("System '%s' not found in database" % system) try: user = session.query(model.DbUser).filter_by(login=username).one() except NoResultFound: raise DbErrors.DbUserNotFoundError("User '%s' not found in database" % user) for user_auth in user.auths: if user_auth.auth == auth: raise DbErrors.DbUserNotFoundError("User '%s' already has credentials in system %s" % (username, system)) user_auth = model.DbUserAuth(user = user, auth = auth, configuration=str(external_id)) session.add(user_auth) session.commit() finally: session.close() ##################################################################### ################## create_external_user ######################### #####################################################################
KeyError
dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/weblab/core/db.py/DatabaseGateway.grant_external_credentials
8,944
@logged() def create_external_user(self, external_user, external_id, system, group_names): session = self.Session() try: try: auth_type = session.query(model.DbAuthType).filter_by(name=system).one() auth = auth_type.auths[0] except (NoResultFound, __HOLE__): raise DbErrors.DbUserNotFoundError("System '%s' not found in database" % system) groups = [] for group_name in group_names: try: group = session.query(model.DbGroup).filter_by(name=group_name).one() except NoResultFound: raise DbErrors.DbUserNotFoundError("Group '%s' not found in database" % group_name) groups.append(group) try: role = session.query(model.DbRole).filter_by(name=external_user.role.name).one() user = model.DbUser(external_user.login, external_user.full_name, external_user.email, role = role) user_auth = model.DbUserAuth(user, auth, configuration = external_id) for group in groups: group.users.append(user) session.add(user) session.add(user_auth) session.commit() except Exception as e: log.log( DatabaseGateway, log.level.Warning, "Couldn't create user: %s" % e) log.log_exc(DatabaseGateway, log.level.Info) raise DbErrors.DatabaseError("Couldn't create user! Contact administrator") finally: session.close() # Location updater
KeyError
dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/weblab/core/db.py/DatabaseGateway.create_external_user
8,945
def _check(self, name, size=None, *extra): func = getattr(imageop, name) for height in VALUES: for width in VALUES: strlen = abs(width * height) if size: strlen *= size if strlen < MAX_LEN: data = "A" * strlen else: data = AAAAA if size: arguments = (data, size, width, height) + extra else: arguments = (data, width, height) + extra try: func(*arguments) except (__HOLE__, imageop.error): pass
ValueError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_imageop.py/InputValidationTests._check
8,946
def test_main(): run_unittest(InputValidationTests) try: import imgfile except __HOLE__: return # Create binary test files uu.decode(get_qualified_path('testrgb'+os.extsep+'uue'), 'test'+os.extsep+'rgb') image, width, height = getimage('test'+os.extsep+'rgb') # Return the selected part of image, which should by width by height # in size and consist of pixels of psize bytes. if verbose: print 'crop' newimage = imageop.crop (image, 4, width, height, 0, 0, 1, 1) # Return image scaled to size newwidth by newheight. No interpolation # is done, scaling is done by simple-minded pixel duplication or removal. # Therefore, computer-generated images or dithered images will # not look nice after scaling. if verbose: print 'scale' scaleimage = imageop.scale(image, 4, width, height, 1, 1) # Run a vertical low-pass filter over an image. It does so by computing # each destination pixel as the average of two vertically-aligned source # pixels. The main use of this routine is to forestall excessive flicker # if the image two vertically-aligned source pixels, hence the name. if verbose: print 'tovideo' videoimage = imageop.tovideo (image, 4, width, height) # Convert an rgb image to an 8 bit rgb if verbose: print 'rgb2rgb8' greyimage = imageop.rgb2rgb8(image, width, height) # Convert an 8 bit rgb image to a 24 bit rgb image if verbose: print 'rgb82rgb' image = imageop.rgb82rgb(greyimage, width, height) # Convert an rgb image to an 8 bit greyscale image if verbose: print 'rgb2grey' greyimage = imageop.rgb2grey(image, width, height) # Convert an 8 bit greyscale image to a 24 bit rgb image if verbose: print 'grey2rgb' image = imageop.grey2rgb(greyimage, width, height) # Convert a 8-bit deep greyscale image to a 1-bit deep image by # thresholding all the pixels. The resulting image is tightly packed # and is probably only useful as an argument to mono2grey. if verbose: print 'grey2mono' monoimage = imageop.grey2mono (greyimage, width, height, 0) # monoimage, width, height = getimage('monotest.rgb') # Convert a 1-bit monochrome image to an 8 bit greyscale or color image. # All pixels that are zero-valued on input get value p0 on output and # all one-value input pixels get value p1 on output. To convert a # monochrome black-and-white image to greyscale pass the values 0 and # 255 respectively. if verbose: print 'mono2grey' greyimage = imageop.mono2grey (monoimage, width, height, 0, 255) # Convert an 8-bit greyscale image to a 1-bit monochrome image using a # (simple-minded) dithering algorithm. if verbose: print 'dither2mono' monoimage = imageop.dither2mono (greyimage, width, height) # Convert an 8-bit greyscale image to a 4-bit greyscale image without # dithering. if verbose: print 'grey2grey4' grey4image = imageop.grey2grey4 (greyimage, width, height) # Convert an 8-bit greyscale image to a 2-bit greyscale image without # dithering. if verbose: print 'grey2grey2' grey2image = imageop.grey2grey2 (greyimage, width, height) # Convert an 8-bit greyscale image to a 2-bit greyscale image with # dithering. As for dither2mono, the dithering algorithm is currently # very simple. if verbose: print 'dither2grey2' grey2image = imageop.dither2grey2 (greyimage, width, height) # Convert a 4-bit greyscale image to an 8-bit greyscale image. if verbose: print 'grey42grey' greyimage = imageop.grey42grey (grey4image, width, height) # Convert a 2-bit greyscale image to an 8-bit greyscale image. if verbose: print 'grey22grey' image = imageop.grey22grey (grey2image, width, height) # Cleanup unlink('test'+os.extsep+'rgb')
ImportError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_imageop.py/test_main
8,947
def get_qualified_path(name): """ return a more qualified path to name""" import sys import os path = sys.path try: path = [os.path.dirname(__file__)] + path except __HOLE__: pass for dir in path: fullname = os.path.join(dir, name) if os.path.exists(fullname): return fullname return name
NameError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_imageop.py/get_qualified_path
8,948
def set_center_freq(self, freq_mhz): """Set tuner center frequency to provided megahertz value.""" try: self.sdr.set_center_freq(freq_mhz*1000000.0) self._clear_intensity() except __HOLE__: # Error setting value, ignore it for now but in the future consider # adding an error message dialog. pass
IOError
dataset/ETHPy150Open adafruit/FreqShow/model.py/FreqShowModel.set_center_freq
8,949
def set_sample_rate(self, sample_rate_mhz): """Set tuner sample rate to provided frequency in megahertz.""" try: self.sdr.set_sample_rate(sample_rate_mhz*1000000.0) except __HOLE__: # Error setting value, ignore it for now but in the future consider # adding an error message dialog. pass
IOError
dataset/ETHPy150Open adafruit/FreqShow/model.py/FreqShowModel.set_sample_rate
8,950
def set_gain(self, gain_db): """Set gain of tuner. Can be the string 'AUTO' for automatic gain or a numeric value in decibels for fixed gain. """ if gain_db == 'AUTO': self.sdr.set_manual_gain_enabled(False) self.auto_gain = True self._clear_intensity() else: try: self.sdr.set_gain(float(gain_db)) self.auto_gain = False self._clear_intensity() except __HOLE__: # Error setting value, ignore it for now but in the future consider # adding an error message dialog. pass
IOError
dataset/ETHPy150Open adafruit/FreqShow/model.py/FreqShowModel.set_gain
8,951
def callInfoFromWSDL(port, name): """Return a SOAPCallInfo given a WSDL port and operation name.""" wsdl = port.getService().getWSDL() binding = port.getBinding() portType = binding.getPortType() operation = portType.operations[name] opbinding = binding.operations[name] messages = wsdl.messages callinfo = SOAPCallInfo(name) addrbinding = port.getAddressBinding() if not isinstance(addrbinding, SoapAddressBinding): raise ValueError, 'Unsupported binding type.' callinfo.location = addrbinding.location soapbinding = binding.findBinding(SoapBinding) if soapbinding is None: raise ValueError, 'Missing soap:binding element.' callinfo.transport = soapbinding.transport callinfo.style = soapbinding.style or 'document' soap_op_binding = opbinding.findBinding(SoapOperationBinding) if soap_op_binding is not None: callinfo.soapAction = soap_op_binding.soapAction callinfo.style = soap_op_binding.style or callinfo.style parameterOrder = operation.parameterOrder if operation.input is not None: message = messages[operation.input.message] msgrole = opbinding.input mime = msgrole.findBinding(MimeMultipartRelatedBinding) if mime is not None: raise ValueError, 'Mime bindings are not supported.' else: for item in msgrole.findBindings(SoapHeaderBinding): part = messages[item.message].parts[item.part] header = callinfo.addInHeaderInfo( part.name, part.element or part.type, item.namespace, element_type = part.element and 1 or 0 ) header.encodingStyle = item.encodingStyle body = msgrole.findBinding(SoapBodyBinding) if body is None: raise ValueError, 'Missing soap:body binding.' callinfo.encodingStyle = body.encodingStyle callinfo.namespace = body.namespace callinfo.use = body.use if body.parts is not None: parts = [] for name in body.parts: parts.append(message.parts[name]) else: parts = message.parts.values() for part in parts: callinfo.addInParameter( part.name, part.element or part.type, element_type = part.element and 1 or 0 ) if operation.output is not None: try: message = messages[operation.output.message] except __HOLE__: if self.strict: raise RuntimeError( "Recieved message not defined in the WSDL schema: %s" % operation.output.message) else: message = wsdl.addMessage(operation.output.message) print "Warning:", \ "Recieved message not defined in the WSDL schema.", \ "Adding it." print "Message:", operation.output.message msgrole = opbinding.output mime = msgrole.findBinding(MimeMultipartRelatedBinding) if mime is not None: raise ValueError, 'Mime bindings are not supported.' else: for item in msgrole.findBindings(SoapHeaderBinding): part = messages[item.message].parts[item.part] header = callinfo.addOutHeaderInfo( part.name, part.element or part.type, item.namespace, element_type = part.element and 1 or 0 ) header.encodingStyle = item.encodingStyle body = msgrole.findBinding(SoapBodyBinding) if body is None: raise ValueError, 'Missing soap:body binding.' callinfo.encodingStyle = body.encodingStyle callinfo.namespace = body.namespace callinfo.use = body.use if body.parts is not None: parts = [] for name in body.parts: parts.append(message.parts[name]) else: parts = message.parts.values() if parts: for part in parts: callinfo.addOutParameter( part.name, part.element or part.type, element_type = part.element and 1 or 0 ) return callinfo
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/SOAPpy/wstools/WSDLTools.py/callInfoFromWSDL
8,952
def detect_language (self, sources): """Detect the language of a given file, or list of files. Uses language_map, and language_order to do the job. """ if type(sources) is not ListType: sources = [sources] lang = None index = len(self.language_order) for source in sources: base, ext = os.path.splitext(source) extlang = self.language_map.get(ext) try: extindex = self.language_order.index(extlang) if extindex < index: lang = extlang index = extindex except __HOLE__: pass return lang # detect_language () # -- Worker methods ------------------------------------------------ # (must be implemented by subclasses)
ValueError
dataset/ETHPy150Open babble/babble/include/jython/Lib/distutils/ccompiler.py/CCompiler.detect_language
8,953
def compile(self, sources, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None): """Compile one or more source files. 'sources' must be a list of filenames, most likely C/C++ files, but in reality anything that can be handled by a particular compiler and compiler class (eg. MSVCCompiler can handle resource files in 'sources'). Return a list of object filenames, one per source filename in 'sources'. Depending on the implementation, not all source files will necessarily be compiled, but all corresponding object filenames will be returned. If 'output_dir' is given, object files will be put under it, while retaining their original path component. That is, "foo/bar.c" normally compiles to "foo/bar.o" (for a Unix implementation); if 'output_dir' is "build", then it would compile to "build/foo/bar.o". 'macros', if given, must be a list of macro definitions. A macro definition is either a (name, value) 2-tuple or a (name,) 1-tuple. The former defines a macro; if the value is None, the macro is defined without an explicit value. The 1-tuple case undefines a macro. Later definitions/redefinitions/ undefinitions take precedence. 'include_dirs', if given, must be a list of strings, the directories to add to the default include file search path for this compilation only. 'debug' is a boolean; if true, the compiler will be instructed to output debug symbols in (or alongside) the object file(s). 'extra_preargs' and 'extra_postargs' are implementation- dependent. On platforms that have the notion of a command-line (e.g. Unix, DOS/Windows), they are most likely lists of strings: extra command-line arguments to prepand/append to the compiler command line. On other platforms, consult the implementation class documentation. In any event, they are intended as an escape hatch for those occasions when the abstract compiler framework doesn't cut the mustard. 'depends', if given, is a list of filenames that all targets depend on. If a source file is older than any file in depends, then the source file will be recompiled. This supports dependency tracking, but only at a coarse granularity. Raises CompileError on failure. """ # A concrete compiler class can either override this method # entirely or implement _compile(). macros, objects, extra_postargs, pp_opts, build = \ self._setup_compile(output_dir, macros, include_dirs, sources, depends, extra_postargs) cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) for obj in objects: try: src, ext = build[obj] except __HOLE__: continue self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) # Return *all* object filenames, not just the ones we just built. return objects
KeyError
dataset/ETHPy150Open babble/babble/include/jython/Lib/distutils/ccompiler.py/CCompiler.compile
8,954
def has_function(self, funcname, includes=None, include_dirs=None, libraries=None, library_dirs=None): """Return a boolean indicating whether funcname is supported on the current platform. The optional arguments can be used to augment the compilation environment. """ # this can't be included at module scope because it tries to # import math which might not be available at that point - maybe # the necessary logic should just be inlined? import tempfile if includes is None: includes = [] if include_dirs is None: include_dirs = [] if libraries is None: libraries = [] if library_dirs is None: library_dirs = [] fd, fname = tempfile.mkstemp(".c", funcname, text=True) f = os.fdopen(fd, "w") for incl in includes: f.write("""#include "%s"\n""" % incl) f.write("""\ main (int argc, char **argv) { %s(); } """ % funcname) f.close() try: objects = self.compile([fname], include_dirs=include_dirs) except CompileError: return False try: self.link_executable(objects, "a.out", libraries=libraries, library_dirs=library_dirs) except (LinkError, __HOLE__): return False return True
TypeError
dataset/ETHPy150Open babble/babble/include/jython/Lib/distutils/ccompiler.py/CCompiler.has_function
8,955
def new_compiler (plat=None, compiler=None, verbose=0, dry_run=0, force=0): """Generate an instance of some CCompiler subclass for the supplied platform/compiler combination. 'plat' defaults to 'os.name' (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler for that platform. Currently only 'posix' and 'nt' are supported, and the default compilers are "traditional Unix interface" (UnixCCompiler class) and Visual C++ (MSVCCompiler class). Note that it's perfectly possible to ask for a Unix compiler object under Windows, and a Microsoft compiler object under Unix -- if you supply a value for 'compiler', 'plat' is ignored. """ if plat is None: plat = os.name try: if compiler is None: compiler = get_default_compiler(plat) (module_name, class_name, long_description) = compiler_class[compiler] except __HOLE__: msg = "don't know how to compile C/C++ code on platform '%s'" % plat if compiler is not None: msg = msg + " with '%s' compiler" % compiler raise DistutilsPlatformError, msg try: module_name = "distutils." + module_name __import__ (module_name) module = sys.modules[module_name] klass = vars(module)[class_name] except ImportError: raise DistutilsModuleError, \ "can't compile C/C++ code: unable to load module '%s'" % \ module_name except KeyError: raise DistutilsModuleError, \ ("can't compile C/C++ code: unable to find class '%s' " + "in module '%s'") % (class_name, module_name) # XXX The None is necessary to preserve backwards compatibility # with classes that expect verbose to be the first positional # argument. return klass (None, dry_run, force)
KeyError
dataset/ETHPy150Open babble/babble/include/jython/Lib/distutils/ccompiler.py/new_compiler
8,956
@classmethod def _get_storage_path(cls): try: return cls._storage_path except __HOLE__: storage_path = getattr(settings, "SESSION_FILE_PATH", None) if not storage_path: storage_path = tempfile.gettempdir() # Make sure the storage path is valid. if not os.path.isdir(storage_path): raise ImproperlyConfigured( "The session storage path %r doesn't exist. Please set your" " SESSION_FILE_PATH setting to an existing directory in which" " Django can store session data." % storage_path) cls._storage_path = storage_path return storage_path
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/sessions/backends/file.py/SessionStore._get_storage_path
8,957
def load(self): session_data = {} try: with open(self._key_to_file(), "rb") as session_file: file_data = session_file.read() # Don't fail if there is no data in the session file. # We may have opened the empty placeholder file. if file_data: try: session_data = self.decode(file_data) except (EOFError, SuspiciousOperation): self.create() # Remove expired sessions. expiry_age = self.get_expiry_age( modification=self._last_modification(), expiry=session_data.get('_session_expiry')) if expiry_age < 0: session_data = {} self.delete() self.create() except __HOLE__: self.create() return session_data
IOError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/sessions/backends/file.py/SessionStore.load
8,958
def save(self, must_create=False): # Get the session data now, before we start messing # with the file it is stored within. session_data = self._get_session(no_load=must_create) session_file_name = self._key_to_file() try: # Make sure the file exists. If it does not already exist, an # empty placeholder file is created. flags = os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) if must_create: flags |= os.O_EXCL fd = os.open(session_file_name, flags) os.close(fd) except __HOLE__ as e: if must_create and e.errno == errno.EEXIST: raise CreateError raise # Write the session file without interfering with other threads # or processes. By writing to an atomically generated temporary # file and then using the atomic os.rename() to make the complete # file visible, we avoid having to lock the session file, while # still maintaining its integrity. # # Note: Locking the session file was explored, but rejected in part # because in order to be atomic and cross-platform, it required a # long-lived lock file for each session, doubling the number of # files in the session storage directory at any given time. This # rename solution is cleaner and avoids any additional overhead # when reading the session data, which is the more common case # unless SESSION_SAVE_EVERY_REQUEST = True. # # See ticket #8616. dir, prefix = os.path.split(session_file_name) try: output_file_fd, output_file_name = tempfile.mkstemp(dir=dir, prefix=prefix + '_out_') renamed = False try: try: os.write(output_file_fd, self.encode(session_data).encode()) finally: os.close(output_file_fd) os.rename(output_file_name, session_file_name) renamed = True finally: if not renamed: os.unlink(output_file_name) except (OSError, IOError, EOFError): pass
OSError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/sessions/backends/file.py/SessionStore.save
8,959
def delete(self, session_key=None): if session_key is None: if self.session_key is None: return session_key = self.session_key try: os.unlink(self._key_to_file(session_key)) except __HOLE__: pass
OSError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/sessions/backends/file.py/SessionStore.delete
8,960
@staticmethod def insert(col, doc_or_docs): try: doc_or_docs.fromkeys doc_or_docs = [doc_or_docs] except __HOLE__: pass doc_data = "".join(BSON.encode(doc) for doc in doc_or_docs) colname = _make_c_string(col) return "%s%s%s" % (_ZERO, colname, doc_data)
AttributeError
dataset/ETHPy150Open dieseldev/diesel/diesel/protocols/mongodb.py/Ops.insert
8,961
def next(self): try: return self.cache.popleft() except __HOLE__: more = self.cursor.more() if not more: raise StopIteration() else: self.cache.extend(more) return self.next()
IndexError
dataset/ETHPy150Open dieseldev/diesel/diesel/protocols/mongodb.py/MongoIter.next
8,962
@mod.route("/<path>/edit/<content_type>/") def edit_content(path, content_type="Content"): path = "/%s/" # Get the content type otherwise 404 try: Document = utils.content_type(current_app, content_type) except __HOLE__: abort(404) content = Document.objects(path=path).first() if content is None: content = Document() Form = content.admin_form() if Form is None: abort(404) # We can't edit this content item form = Form(request.forms, instance=content) if request.method == "post" and form.validate(): doc = form.save() redirect(doc.path) return render_template("content.html", content=content, admin_form=form)
KeyError
dataset/ETHPy150Open ericmoritz/flaskcma/flaskcma/content/views.py/edit_content
8,963
def prepare_api_params(params): """Construct ordered dict of params for API call. This method returns an alphabetized OrderedDict in order to maximize cache hits on the API. """ try: fmt = params.pop('format') except __HOLE__: fmt = 'json' try: limit = params.pop('limit') except KeyError: limit ='0' new_params = [] for key, val in params.items(): new_params.append((key, val)) new_params.sort() new_params.extend([('format', fmt), ('limit', limit)]) ordered = OrderedDict(new_params) return ordered
KeyError
dataset/ETHPy150Open openelections/openelections-core/openelex/api/base.py/prepare_api_params
8,964
def perform_request(self, method, url, params=None, body=None): """ Perform the actual request. Retrieve a connection from the connection pool, pass all the information to it's perform_request method and return the data. If an exception was raised, mark the connection as failed and retry (up to `max_retries` times). If the operation was succesful and the connection used was previously marked as dead, mark it as live, resetting it's failure count. :arg method: HTTP method to use :arg url: absolute url (without host) to target :arg params: dictionary of query parameters, will be handed over to the underlying :class:`~elasticsearch.Connection` class for serialization :arg body: body of the request, will be serializes using serializer and passed to the connection """ if body is not None: body = self.serializer.dumps(body) # some clients or environments don't support sending GET with body if method in ('HEAD', 'GET') and self.send_get_body_as != 'GET': # send it as post instead if self.send_get_body_as == 'POST': method = 'POST' # or as source parameter elif self.send_get_body_as == 'source': if params is None: params = {} params['source'] = body body = None if body is not None: try: body = body.encode('utf-8') except (__HOLE__, AttributeError): # bytes/str - no need to re-encode pass ignore = () timeout = None if params: timeout = params.pop('request_timeout', None) ignore = params.pop('ignore', ()) if isinstance(ignore, int): ignore = (ignore, ) for attempt in range(self.max_retries + 1): connection = self.get_connection() try: status, headers, data = connection.perform_request(method, url, params, body, ignore=ignore, timeout=timeout) except TransportError as e: retry = False if isinstance(e, ConnectionTimeout): retry = self.retry_on_timeout elif isinstance(e, ConnectionError): retry = True elif e.status_code in self.retry_on_status: retry = True if retry: # only mark as dead if we are retrying self.mark_dead(connection) # raise exception on last retry if attempt == self.max_retries: raise else: raise else: # connection didn't fail, confirm it's live status self.connection_pool.mark_live(connection) if data: data = self.deserializer.loads(data, headers.get('content-type')) return status, data
UnicodeDecodeError
dataset/ETHPy150Open KunihikoKido/sublime-elasticsearch-client/lib/elasticsearch/transport.py/Transport.perform_request
8,965
def _test_custom_workflow(self, workflow, error_expected=False): deployment = deploy(resource("dsl/basic_task_not_exist.yaml")) try: execute_workflow(workflow, deployment.id) if error_expected: self.fail('RuntimeError expected') except __HOLE__ as e: if not error_expected: self.fail('Success expected. error message: {0}'.format(e)) self.assertIn(self.AGENT_ALIVE_FAIL, str(e))
RuntimeError
dataset/ETHPy150Open cloudify-cosmo/cloudify-manager/tests/workflow_tests/test_agent_alive_verification.py/TestAgentAliveVerification._test_custom_workflow
8,966
def _set_power_state(task, target_state): """Turns the server power on/off or do a reboot. :param task: a TaskManager instance containing the node to act on. :param target_state: target state of the node. :raises: InvalidParameterValue if an invalid power state was specified. :raises: MissingParameterValue if some mandatory information is missing on the node :raises: IRMCOperationError on an error from SCCI """ node = task.node irmc_client = irmc_common.get_irmc_client(node) if target_state in (states.POWER_ON, states.REBOOT): irmc_boot.attach_boot_iso_if_needed(task) try: irmc_client(STATES_MAP[target_state]) except __HOLE__: msg = _("_set_power_state called with invalid power state " "'%s'") % target_state raise exception.InvalidParameterValue(msg) except scci.SCCIClientError as irmc_exception: LOG.error(_LE("iRMC set_power_state failed to set state to %(tstate)s " " for node %(node_id)s with error: %(error)s"), {'tstate': target_state, 'node_id': node.uuid, 'error': irmc_exception}) operation = _('iRMC set_power_state') raise exception.IRMCOperationError(operation=operation, error=irmc_exception)
KeyError
dataset/ETHPy150Open openstack/ironic/ironic/drivers/modules/irmc/power.py/_set_power_state
8,967
def _SelectCatenate(globalConfig): try: return os.environ['GIT_CATENATE'] except __HOLE__: pass pager = globalConfig.GetString('core.catenate') if pager: return pager try: return os.environ['CATENATE'] except KeyError: pass return 'cat'
KeyError
dataset/ETHPy150Open esrlabs/git-repo/portable.py/_SelectCatenate
8,968
def run(options): try: if options.get('simple_db_migrate_version'): msg = 'simple-db-migrate v%s' % SIMPLE_DB_MIGRATE_VERSION CLI.info_and_exit(msg) if options.get('show_colors'): CLI.show_colors() # Create config if options.get('config_file') or os.path.exists('simple-db-migrate.conf'): config = FileConfig(options.get('config_file') or 'simple-db-migrate.conf', options.get('environment')) else: config = Config() config.update('schema_version', options.get('schema_version')) config.update('show_sql', options.get('show_sql')) config.update('show_sql_only', options.get('show_sql_only')) config.update('new_migration', options.get('new_migration')) config.update('drop_db_first', options.get('drop_db_first')) config.update('paused_mode', options.get('paused_mode')) config.update('log_dir', options.get('log_dir')) config.update('label_version', options.get('label_version')) config.update('force_use_files_on_down', options.get('force_use_files_on_down')) config.update('force_execute_old_migrations_versions', options.get('force_execute_old_migrations_versions')) config.update('utc_timestamp', options.get('utc_timestamp')) config.update('database_user', options.get('database_user')) config.update('database_password', options.get('database_password')) config.update('database_host', options.get('database_host')) config.update('database_port', options.get('database_port')) config.update('database_name', options.get('database_name')) if config.get('database_port', None): config.update('database_port', int(config.get('database_port'))) if options.get('database_migrations_dir'): config.update("database_migrations_dir", Config._parse_migrations_dir(options.get('database_migrations_dir'))) config.update('database_engine', options.get('database_engine')) if not config.get('database_engine', None): config.update('database_engine', "mysql") config.update('database_version_table', options.get('database_version_table')) if not config.get('database_version_table', None): config.update('database_version_table', "__db_version__") # paused mode forces log_level to 2 log_level = int(options.get('log_level')) if options.get('paused_mode'): log_level = 2 config.update('log_level', log_level) # Ask the password for user if configured if config.get('database_password') == '<<ask_me>>': if options.get('password'): passwd = options.get('password') else: CLI.msg('\nPlease inform password to connect to database "%s@%s:%s"' % (config.get('database_user'), config.get('database_host'), config.get('database_name'))) passwd = getpass() config.update('database_password', passwd) if options.get('info_database'): if options.get('info_database').lower() == 'last_label': CLI.info_and_exit(Main(config).last_label() or "NONE") elif options.get('info_database').lower() == 'labels': labels = Main(config).labels() CLI.info_and_exit(labels and "\n".join(labels) or "NONE") else: CLI.error_and_exit("The '%s' is a wrong parameter for info" % options.get('info_database').lower()) # If CLI was correctly parsed, execute db-migrate. Main(config).execute() except __HOLE__: CLI.info_and_exit("\nExecution interrupted by user...") except Exception as e: CLI.error_and_exit(str(e))
KeyboardInterrupt
dataset/ETHPy150Open guilhermechapiewski/simple-db-migrate/simple_db_migrate/__init__.py/run
8,969
def _check_if_pyc(fname): """Return True if the extension is .pyc, False if .py and None if otherwise""" from imp import find_module from os.path import realpath, dirname, basename, splitext # Normalize the file-path for the find_module() filepath = realpath(fname) dirpath = dirname(filepath) module_name = splitext(basename(filepath))[0] # Validate and fetch try: fileobj, fullpath, (_, _, pytype) = find_module(module_name, [dirpath]) except __HOLE__: raise IOError("Cannot find config file. " "Path maybe incorrect! : {0}".format(filepath)) return pytype, fileobj, fullpath
ImportError
dataset/ETHPy150Open chalasr/Flask-P2P/venv/lib/python2.7/site-packages/gunicorn/_compat.py/_check_if_pyc
8,970
def wrap_error(func, *args, **kw): """ Wrap socket.error, IOError, OSError, select.error to raise new specialized exceptions of Python 3.3 like InterruptedError (PEP 3151). """ try: return func(*args, **kw) except (socket.error, __HOLE__, OSError) as exc: if hasattr(exc, 'winerror'): _wrap_error(exc, _MAP_ERRNO, exc.winerror) # _MAP_ERRNO does not contain all Windows errors. # For some errors like "file not found", exc.errno should # be used (ex: ENOENT). _wrap_error(exc, _MAP_ERRNO, exc.errno) raise except select.error as exc: if exc.args: _wrap_error(exc, _MAP_ERRNO, exc.args[0]) raise
IOError
dataset/ETHPy150Open chalasr/Flask-P2P/venv/lib/python2.7/site-packages/gunicorn/_compat.py/wrap_error
8,971
@classmethod def fix_up(cls, command): """ Verifies :code:`command` class structure and configures the :code:`command.map` method. Verifies that :code:`command` derives from :code:`ReportingCommand` and overrides :code:`ReportingCommand.reduce`. It then configures :code:`command.reduce`, if an overriding implementation of :code:`ReportingCommand.reduce` has been provided. :param command: :code:`ReportingCommand` class Exceptions: :code:`TypeError` :code:`command` class is not derived from :code:`ReportingCommand` :code:`AttributeError` No :code:`ReportingCommand.reduce` override """ if not issubclass(command, ReportingCommand): raise TypeError('%s is not a ReportingCommand' % command) if command.reduce == ReportingCommand.reduce: raise AttributeError('No ReportingCommand.reduce override') f = vars(command)['map'] # Function backing the map method # There is no way to add custom attributes to methods. See # [Why does setattr fail on a method](http://goo.gl/aiOsqh) # for an explanation. if f == vars(ReportingCommand)['map']: cls._requires_preop = False return try: settings = f._settings except __HOLE__: f.ConfigurationSettings = StreamingCommand.ConfigurationSettings return # Create new `StreamingCommand.ConfigurationSettings` class module = '.'.join([command.__module__, command.__name__, 'map']) name = 'ConfigurationSettings' bases = (StreamingCommand.ConfigurationSettings,) f.ConfigurationSettings = ConfigurationSettingsType( module, name, bases, settings) del f._settings return #endregion #endregion
AttributeError
dataset/ETHPy150Open splunk/splunk-ref-pas-code/spikes/googledrive_addon/bin/splunklib/searchcommands/reporting_command.py/ReportingCommand.ConfigurationSettings.fix_up
8,972
def load_checkers(ft): if ft not in checker_manager: try: importlib.import_module("lints.{}".format(ft)) except __HOLE__: return {} return checker_manager[ft]
ImportError
dataset/ETHPy150Open maralla/vim-linter/pythonx/linter/checker.py/load_checkers
8,973
def _read_log_conf(proc_name, log_dir): """ _read_log_conf(proc_name, log_dir) -> StringIO or None This method also replaces the %LOG_DIR% and %PROC_NAME% occurrences. """ def _repl(match): if match.group(0) == '%LOG_DIR%': return log_dir elif match.group(0) == '%PROC_NAME%': return proc_name log_conf = get_desktop_root('conf', 'log.conf') if not os.path.isfile(log_conf): return None try: raw = file(log_conf).read() sio = StringIO(CONF_RE.sub(_repl, raw)) return sio except __HOLE__, ex: print >> sys.stderr, "ERROR: Failed to open %s: %s" % (log_conf, ex) return None
IOError
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/log/__init__.py/_read_log_conf
8,974
def chown_log_dir(uid, gid): """ chown all files in the log dir to this user and group. Should only be called after loggic has been setup. Return success """ if _log_dir is None: return False try: os.chown(_log_dir, uid, gid) for entry in os.listdir(_log_dir): os.chown(os.path.join(_log_dir, entry), uid, gid) return True except __HOLE__, ex: print >> sys.stderr, 'Failed to chown log directory %s: ex' % (_log_dir, ex) return False
OSError
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/log/__init__.py/chown_log_dir
8,975
def basic_logging(proc_name, log_dir=None): """ Configure logging for the program ``proc_name``: - Apply log.conf in the config directory. - If DESKTOP_LOGLEVEL environment variable is specified, the root console handler (stdout/stderr) is set to that level. If there is no console handler, a new one is created. - Defining the environment variable DESKTOP_DEBUG is the same as setting DESKTOP_LOGLEVEL=DEBUG. The ``log_dir`` will replace the %LOG_DIR% in log.conf. If not specified, we look for the DESTKOP_LOG_DIR environment variable, and then default to the DEFAULT_LOG_DIR. This removes all previously installed logging handlers. """ # Setup log_dir if not log_dir: log_dir = os.getenv("DESKTOP_LOG_DIR", DEFAULT_LOG_DIR) if not os.path.exists(log_dir): try: os.makedirs(log_dir) except __HOLE__, err: print >> sys.stderr, 'Failed to create log directory "%s": %s' % (log_dir, err) raise err # Remember where our log directory is global _log_dir _log_dir = log_dir log_conf = _read_log_conf(proc_name, log_dir) if log_conf is not None: logging.config.fileConfig(log_conf) root_logger = logging.getLogger() else: # Get rid of any preinstalled/default handlers root_logger = logging.getLogger() for h in root_logger.handlers: root_logger.removeHandler(h) # always keep DEBUG at the root, since we'll filter in the # handlers themselves - this allows the /logs endpoint # to always have all logs. root_logger.setLevel(logging.DEBUG) # Handle env variables env_loglevel = os.getenv("DESKTOP_LOGLEVEL") env_debug = os.getenv('DESKTOP_DEBUG') if env_debug: env_loglevel = 'DEBUG' if env_loglevel: try: lvl = getattr(logging, env_loglevel.upper()) except AttributeError: raise Exception("Invalid log level in DESKTOP_LOGLEVEL: %s" % (env_loglevel,)) # Set the StreamHandler to the level (create one if necessary) handler = _find_console_stream_handler(root_logger) if not handler: handler = logging.StreamHandler() handler.setFormatter(logging.Formatter(LOG_FORMAT, DATE_FORMAT)) root_logger.addHandler(handler) handler.setLevel(lvl) # Set all loggers but error.log to the same logging level error_handler = logging.getLogger('handler_errorlog') for h in root_logger.handlers: if isinstance(h, (FileHandler, RotatingFileHandler)) and h != error_handler: h.setLevel(lvl)
OSError
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/log/__init__.py/basic_logging
8,976
def VerifyIpolicy(owner, ipolicy, iscluster, callback): """Checks if an ipolicy has correct form. @type owner: str @param owner: name of the object containing the attribute @type ipolicy: dict @param ipolicy: actual value of the ipolicy parameters @type iscluster: bool @param iscluster: True iff the owner is the cluster @type callback: callable @param callback: will be called if there is an error """ try: objects.InstancePolicy.CheckParameterSyntax(ipolicy, iscluster) except errors.ConfigurationError, err: callback("%s has invalid instance policy: %s" % (owner, err)) for key, value in ipolicy.items(): if key == constants.ISPECS_MINMAX: for k in range(len(value)): VerifyIspecs(owner, "ipolicy/%s[%s]" % (key, k), value[k], callback) elif key == constants.ISPECS_STD: VerifyType(owner, "ipolicy/" + key, value, constants.ISPECS_PARAMETER_TYPES, callback) else: # FIXME: assuming list type if key in constants.IPOLICY_PARAMETERS: exp_type = float # if the value is int, it can be converted into float convertible_types = [int] else: exp_type = list convertible_types = [] # Try to convert from allowed types, if necessary. if any(isinstance(value, ct) for ct in convertible_types): try: value = exp_type(value) ipolicy[key] = value except __HOLE__: pass if not isinstance(value, exp_type): callback("%s has invalid instance policy: for %s," " expecting %s, got %s" % (owner, key, exp_type.__name__, type(value)))
ValueError
dataset/ETHPy150Open ganeti/ganeti/lib/config/verify.py/VerifyIpolicy
8,977
@patch("nailgun.statistics.oswl.collector.time.sleep", side_effect=StopIteration) @patch.object(sys, "argv", new=["_", consts.OSWL_RESOURCE_TYPES.vm]) def test_oswl_is_not_collected_when_stats_collecting_disabled(self, *_): collect_func_path = ("nailgun.statistics.oswl.collector.collect") must_send_stats_path = ("nailgun.statistics.oswl.collector" ".MasterNodeSettings.must_send_stats") with patch(must_send_stats_path, return_value=False): with patch(collect_func_path) as collect_mock: try: run_collecting() except __HOLE__: pass self.assertFalse(collect_mock.called) with patch(must_send_stats_path, return_value=True): with patch(collect_func_path) as collect_mock: try: run_collecting() except StopIteration: pass self.assertTrue(collect_mock.called)
StopIteration
dataset/ETHPy150Open openstack/fuel-web/nailgun/nailgun/test/unit/fuel_statistics_tests/test_oswl_collector.py/TestOSWLCollector.test_oswl_is_not_collected_when_stats_collecting_disabled
8,978
def __str__(self): try: return self.get() except __HOLE__: return self.join.join(self.value)
ValueError
dataset/ETHPy150Open thasso/pyjip/jip/options.py/Option.__str__
8,979
def get_client_state(self, cli): """ Return the ClientState instance for this CommandLineInterface. """ try: return self._client_states[cli] except __HOLE__: s = ClientState() self._client_states[cli] = s return s
KeyError
dataset/ETHPy150Open jonathanslenders/pymux/pymux/main.py/Pymux.get_client_state
8,980
def _create_pane(self, window=None, command=None, start_directory=None): """ Create a new :class:`pymux.arrangement.Pane` instance. (Don't put it in a window yet.) :param window: If a window is given, take the CWD of the current process of that window as the start path for this pane. :param command: If given, run this command instead of `self.default_shell`. :param start_directory: If given, use this as the CWD. """ assert window is None or isinstance(window, Window) assert command is None or isinstance(command, six.text_type) assert start_directory is None or isinstance(start_directory, six.text_type) def done_callback(): " When the process finishes. " if not self.remain_on_exit: # Remove pane from layout. self.arrangement.remove_pane(pane) # No panes left? -> Quit. if not self.arrangement.has_panes: self.eventloop.stop() self.invalidate() def bell(): " Sound bell on all clients. " if self.enable_bell: for c in self.clis.values(): c.output.bell() # Start directory. if start_directory: path = start_directory elif window and window.active_process: # When the path of the active process is known, # start the new process at the same location. path = window.active_process.get_cwd() else: path = None def before_exec(): " Called in the process fork (in the child process). " # Go to this directory. try: os.chdir(path or self.original_cwd) except __HOLE__: pass # No such file or directory. # Set terminal variable. (We emulate xterm.) os.environ['TERM'] = self.default_terminal # Make sure to set the PYMUX environment variable. if self.socket_name: os.environ['PYMUX'] = '%s,%i' % ( self.socket_name, pane.pane_id) if command: command = command.split() else: command = [self.default_shell] # Create process and pane. def has_priority(): return self.arrangement.pane_has_priority(pane) process = Process.from_command( self.eventloop, self.invalidate, command, done_callback, bell_func=bell, before_exec_func=before_exec, has_priority=has_priority) pane = Pane(process) # Keep track of panes. This is a WeakKeyDictionary, we only add, but # don't remove. self.panes_by_id[pane.pane_id] = pane logger.info('Created process %r.', command) process.start() return pane
OSError
dataset/ETHPy150Open jonathanslenders/pymux/pymux/main.py/Pymux._create_pane
8,981
def __getitem__(self, name): " Override __getitem__ to make lookup of pane- buffers dynamic. " if name.startswith('pane-'): try: id = int(name[len('pane-'):]) return self.pymux.panes_by_id[id].scroll_buffer except (__HOLE__, KeyError): raise KeyError elif name.startswith('search-'): try: id = int(name[len('search-'):]) return self.pymux.panes_by_id[id].search_buffer except (ValueError, KeyError): raise KeyError else: return super(_BufferMapping, self).__getitem__(name)
ValueError
dataset/ETHPy150Open jonathanslenders/pymux/pymux/main.py/_BufferMapping.__getitem__
8,982
def _load_file(self, name): try: path = os.path.join(BACKLIGHT_DIR, self.backlight_name, name) with open(path, 'r') as f: return f.read().strip() except __HOLE__: return False except Exception: logger.exception("Failed to get %s" % name)
IOError
dataset/ETHPy150Open qtile/qtile/libqtile/widget/backlight.py/Backlight._load_file
8,983
def _get_info(self): try: info = { 'brightness': float(self._load_file(self.brightness_file)), 'max': float(self._load_file(self.max_brightness_file)), } except __HOLE__: return False return info
TypeError
dataset/ETHPy150Open qtile/qtile/libqtile/widget/backlight.py/Backlight._get_info
8,984
def stop(self): for sig, callback in self._old.items(): try: signal.signal(sig, callback) except __HOLE__: pass
ValueError
dataset/ETHPy150Open circus-tent/circus/circus/sighandler.py/SysHandler.stop
8,985
def signal(self, sig, frame=None): signame = self.SIG_NAMES.get(sig) logger.info('Got signal SIG_%s' % signame.upper()) if signame is not None: try: handler = getattr(self, "handle_%s" % signame) handler() except __HOLE__: pass except Exception as e: tb = traceback.format_exc() logger.error("error: %s [%s]" % (e, tb)) sys.exit(1)
AttributeError
dataset/ETHPy150Open circus-tent/circus/circus/sighandler.py/SysHandler.signal
8,986
def setEnv(self, name, value=None): """ Set an environment variable for the worker process before it is launched. The worker process will typically inherit the environment of the machine it is running on but this method makes it possible to override specific variables in that inherited environment before the worker is launched. Note that this mechanism is different to the one used by the worker internally to set up the environment of a job. A call to this method affects all jobs issued after this method returns. Note to implementors: This means that you would typically need to copy the variables before enqueuing a job. If no value is provided it will be looked up from the current environment. NB: Only the Mesos and single-machine batch systems support passing environment variables. On other batch systems, this method has no effect. See https://github.com/BD2KGenomics/toil/issues/547. :param str name: the environment variable to be set on the worker. :param str value: if given, the environment variable given by name will be set to this value. if None, the variable's current value will be used as the value on the worker :raise RuntimeError: if value is None and the name cannot be found in the environment """ if value is None: try: value = os.environ[name] except __HOLE__: raise RuntimeError("%s does not exist in current environment", name) self.environment[name] = value
KeyError
dataset/ETHPy150Open BD2KGenomics/toil/src/toil/batchSystems/abstractBatchSystem.py/BatchSystemSupport.setEnv
8,987
def is_pip_installed(): try: subprocess.Popen(['pip'], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() return True except __HOLE__: return False
OSError
dataset/ETHPy150Open ros-infrastructure/rosdep/src/rosdep2/platforms/pip.py/is_pip_installed
8,988
def __getitem__(self, index): if not isinstance(index, (int, long)): raise TypeError('list widget indices must be integers') subwidget = self._subwidgets.get(index) if subwidget is None: try: value = self._value[index] except __HOLE__: # return an widget without value if we try # to access a field not in the list value = None subwidget = _make_widget(self._field.field, make_name(self.name, index), value, self._all_errors) self._subwidgets[index] = subwidget return subwidget
IndexError
dataset/ETHPy150Open mitsuhiko/fungiform/fungiform/widgets.py/ListWidget.__getitem__
8,989
def _create_dynamic_streams(self, stream_type, parser, video): try: streams = parser(self.session, video[1]) return streams.items() except __HOLE__ as err: self.logger.error("Failed to extract {0} streams: {1}", stream_type, err)
IOError
dataset/ETHPy150Open chrippa/livestreamer/src/livestreamer/plugins/viasat.py/Viasat._create_dynamic_streams
8,990
def getName(self, i): try: return self.map.keys()[i] except __HOLE__,e: return None
IndexError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/xml/sax/saxutils.py/AttributeMap.getName
8,991
def getValue(self, i): try: if type(i)==types.IntType: return self.map[self.getName(i)] else: return self.map[i] except __HOLE__,e: return None
KeyError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/xml/sax/saxutils.py/AttributeMap.getValue
8,992
def extract(query, choices, processor=None, scorer=None, limit=5): """Select the best match in a list or dictionary of choices. Find best matches in a list or dictionary of choices, return a list of tuples containing the match and it's score. If a dictionary is used, also returns the key for each match. Arguments: query: An object representing the thing we want to find. choices: An iterable or dictionary-like object containing choices to be matched against the query. Dictionary arguments of {key: value} pairs will attempt to match the query against each value. processor: Optional function of the form f(a) -> b, where a is an individual choice and b is the choice to be used in matching. This can be used to match against, say, the first element of a list: lambda x: x[0] Defaults to fuzzywuzzy.utils.full_process(). scorer: Optional function for scoring matches between the query and an individual processed choice. This should be a function of the form f(query, choice) -> int. By default, fuzz.WRatio() is used and expects both query and choice to be strings. limit: Optional maximum for the number of elements returned. Defaults to 5. Returns: List of tuples containing the match and its score. If a list is used for choices, then the result will be 2-tuples. If a dictionary is used, then the result will be 3-tuples containing he key for each match. For example, searching for 'bird' in the dictionary {'bard': 'train', 'dog': 'man'} may return [('train', 22, 'bard'), ('man', 0, 'dog')] """ if choices is None: return [] # Catch generators without lengths try: if len(choices) == 0: return [] except TypeError: pass # default, turn whatever the choice is into a workable string if not processor: processor = utils.full_process # default: wratio if not scorer: scorer = fuzz.WRatio sl = [] try: # See if choices is a dictionary-like object. for key, choice in choices.items(): processed = processor(choice) score = scorer(query, processed) sl.append((choice, score, key)) except __HOLE__: # It's a list; just iterate over it. for choice in choices: processed = processor(choice) score = scorer(query, processed) sl.append((choice, score)) sl.sort(key=lambda i: i[1], reverse=True) return sl[:limit]
AttributeError
dataset/ETHPy150Open seatgeek/fuzzywuzzy/fuzzywuzzy/process.py/extract
8,993
def get_aws_metadata(headers, provider=None): if not provider: provider = boto.provider.get_default() metadata_prefix = provider.metadata_prefix metadata = {} for hkey in headers.keys(): if hkey.lower().startswith(metadata_prefix): val = urllib.unquote_plus(headers[hkey]) try: metadata[hkey[len(metadata_prefix):]] = unicode(val, 'utf-8') except __HOLE__: metadata[hkey[len(metadata_prefix):]] = val del headers[hkey] return metadata
UnicodeDecodeError
dataset/ETHPy150Open radlab/sparrow/deploy/third_party/boto-2.1.1/boto/utils.py/get_aws_metadata
8,994
def parse_ts(ts): try: dt = datetime.datetime.strptime(ts, ISO8601) return dt except __HOLE__: dt = datetime.datetime.strptime(ts, ISO8601_MS) return dt
ValueError
dataset/ETHPy150Open radlab/sparrow/deploy/third_party/boto-2.1.1/boto/utils.py/parse_ts
8,995
def emit(self, record): """ Emit a record. Format the record and send it to the specified addressees. It would be really nice if I could add authorization to this class without having to resort to cut and paste inheritance but, no. """ try: port = self.mailport if not port: port = smtplib.SMTP_PORT smtp = smtplib.SMTP(self.mailhost, port) smtp.login(self.username, self.password) msg = self.format(record) msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % ( self.fromaddr, ','.join(self.toaddrs), self.getSubject(record), formatdate(), msg) smtp.sendmail(self.fromaddr, self.toaddrs, msg) smtp.quit() except (__HOLE__, SystemExit): raise except: self.handleError(record)
KeyboardInterrupt
dataset/ETHPy150Open radlab/sparrow/deploy/third_party/boto-2.1.1/boto/utils.py/AuthSMTPHandler.emit
8,996
def as_variable(obj, key=None, strict=True, copy=False): """Convert an object into an Variable - If the object is already an `Variable`, return it. - If the object is a `DataArray`, return it if `strict=False` or return its variable if `strict=True`. - Otherwise, if the object has 'dims' and 'data' attributes, convert it into a new `Variable`. - If all else fails, attempt to convert the object into an `Variable` by unpacking it into the arguments for `Variable.__init__`. """ # TODO: consider extending this method to automatically handle Iris and # pandas objects. if strict and hasattr(obj, 'variable'): # extract the primary Variable from DataArrays obj = obj.variable if not isinstance(obj, (Variable, xr.DataArray)): if hasattr(obj, 'dims') and (hasattr(obj, 'data') or hasattr(obj, 'values')): obj = Variable(obj.dims, getattr(obj, 'data', obj.values), getattr(obj, 'attrs', None), getattr(obj, 'encoding', None)) elif isinstance(obj, tuple): try: obj = Variable(*obj) except __HOLE__: raise TypeError('cannot convert argument into an Variable') elif utils.is_scalar(obj): obj = Variable([], obj) elif getattr(obj, 'name', None) is not None: obj = Variable(obj.name, obj) elif key is not None: obj = Variable(key, obj) else: raise TypeError('cannot infer Variable dimensions') else: if copy: obj = obj.copy(deep=False) return obj
TypeError
dataset/ETHPy150Open pydata/xarray/xarray/core/variable.py/as_variable
8,997
@encoding.setter def encoding(self, value): try: self._encoding = dict(value) except __HOLE__: raise ValueError('encoding must be castable to a dictionary')
ValueError
dataset/ETHPy150Open pydata/xarray/xarray/core/variable.py/Variable.encoding
8,998
def equals(self, other): """True if two Variables have the same dimensions and values; otherwise False. Variables can still be equal (like pandas objects) if they have NaN values in the same locations. This method is necessary because `v1 == v2` for Variables does element-wise comparisions (like numpy.ndarrays). """ other = getattr(other, 'variable', other) try: return (self.dims == other.dims and self._data_equals(other)) except (TypeError, __HOLE__): return False
AttributeError
dataset/ETHPy150Open pydata/xarray/xarray/core/variable.py/Variable.equals
8,999
def broadcast_equals(self, other): """True if two Variables have the values after being broadcast against each other; otherwise False. Variables can still be equal (like pandas objects) if they have NaN values in the same locations. """ try: self, other = broadcast_variables(self, other) except (__HOLE__, AttributeError): return False return self.equals(other)
ValueError
dataset/ETHPy150Open pydata/xarray/xarray/core/variable.py/Variable.broadcast_equals