_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q264000
get_author_and_version
validation
def get_author_and_version(package): """ Return package author and version as listed in `init.py`. """ init_py = open(os.path.join(package, '__init__.py')).read() author = re.search("__author__ = ['\"]([^'\"]+)['\"]", init_py).group(1) version = re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1) return author, version
python
{ "resource": "" }
q264001
api_subclass_factory
validation
def api_subclass_factory(name, docstring, remove_methods, base=SlackApi): """Create an API subclass with fewer methods than its base class. Arguments: name (:py:class:`str`): The name of the new class. docstring (:py:class:`str`): The docstring for the new class. remove_methods (:py:class:`dict`): The methods to remove from the base class's :py:attr:`API_METHODS` for the subclass. The key is the name of the root method (e.g. ``'auth'`` for ``'auth.test'``, the value is either a tuple of child method names (e.g. ``('test',)``) or, if all children should be removed, the special value :py:const:`ALL`. base (:py:class:`type`, optional): The base class (defaults to :py:class:`SlackApi`). Returns: :py:class:`type`: The new subclass. Raises: :py:class:`KeyError`: If the method wasn't in the superclass. """ methods = deepcopy(base.API_METHODS) for parent, to_remove in remove_methods.items(): if to_remove is ALL: del methods[parent] else: for method in to_remove: del methods[parent][method] return type(name, (base,), dict(API_METHODS=methods, __doc__=docstring))
python
{ "resource": "" }
q264002
SlackApi.execute_method
validation
async def execute_method(self, method, **params): """Execute a specified Slack Web API method. Arguments: method (:py:class:`str`): The name of the method. **params (:py:class:`dict`): Any additional parameters required. Returns: :py:class:`dict`: The JSON data from the response. Raises: :py:class:`aiohttp.web_exceptions.HTTPException`: If the HTTP request returns a code other than 200 (OK). SlackApiError: If the Slack API is reached but the response contains an error message. """ url = self.url_builder(method, url_params=params) logger.info('Executing method %r', method) response = await aiohttp.get(url) logger.info('Status: %r', response.status) if response.status == 200: json = await response.json() logger.debug('...with JSON %r', json) if json.get('ok'): return json raise SlackApiError(json['error']) else: raise_for_status(response)
python
{ "resource": "" }
q264003
SlackApi.method_exists
validation
def method_exists(cls, method): """Whether a given method exists in the known API. Arguments: method (:py:class:`str`): The name of the method. Returns: :py:class:`bool`: Whether the method is in the known API. """ methods = cls.API_METHODS for key in method.split('.'): methods = methods.get(key) if methods is None: break if isinstance(methods, str): logger.debug('%r: %r', method, methods) return True return False
python
{ "resource": "" }
q264004
XPathSelectorHandler._add_parsley_ns
validation
def _add_parsley_ns(cls, namespace_dict): """ Extend XPath evaluation with Parsley extensions' namespace """ namespace_dict.update({ 'parslepy' : cls.LOCAL_NAMESPACE, 'parsley' : cls.LOCAL_NAMESPACE, }) return namespace_dict
python
{ "resource": "" }
q264005
XPathSelectorHandler.extract
validation
def extract(self, document, selector, debug_offset=''): """ Try and convert matching Elements to unicode strings. If this fails, the selector evaluation probably already returned some string(s) of some sort, or boolean value, or int/float, so return that instead. """ selected = self.select(document, selector) if selected is not None: if isinstance(selected, (list, tuple)): # FIXME: return None or return empty list? if not len(selected): return return [self._extract_single(m) for m in selected] else: return self._extract_single(selected) # selector did not match anything else: if self.DEBUG: print(debug_offset, "selector did not match anything; return None") return None
python
{ "resource": "" }
q264006
SlackBot.join_rtm
validation
async def join_rtm(self, filters=None): """Join the real-time messaging service. Arguments: filters (:py:class:`dict`, optional): Dictionary mapping message filters to the functions they should dispatch to. Use a :py:class:`collections.OrderedDict` if precedence is important; only one filter, the first match, will be applied to each message. """ if filters is None: filters = [cls(self) for cls in self.MESSAGE_FILTERS] url = await self._get_socket_url() logger.debug('Connecting to %r', url) async with ws_connect(url) as socket: first_msg = await socket.receive() self._validate_first_message(first_msg) self.socket = socket async for message in socket: if message.tp == MsgType.text: await self.handle_message(message, filters) elif message.tp in (MsgType.closed, MsgType.error): if not socket.closed: await socket.close() self.socket = None break logger.info('Left real-time messaging.')
python
{ "resource": "" }
q264007
SlackBot.handle_message
validation
async def handle_message(self, message, filters): """Handle an incoming message appropriately. Arguments: message (:py:class:`aiohttp.websocket.Message`): The incoming message to handle. filters (:py:class:`list`): The filters to apply to incoming messages. """ data = self._unpack_message(message) logger.debug(data) if data.get('type') == 'error': raise SlackApiError( data.get('error', {}).get('msg', str(data)) ) elif self.message_is_to_me(data): text = data['text'][len(self.address_as):].strip() if text == 'help': return self._respond( channel=data['channel'], text=self._instruction_list(filters), ) elif text == 'version': return self._respond( channel=data['channel'], text=self.VERSION, ) for _filter in filters: if _filter.matches(data): logger.debug('Response triggered') async for response in _filter: self._respond(channel=data['channel'], text=response)
python
{ "resource": "" }
q264008
SlackBot.message_is_to_me
validation
def message_is_to_me(self, data): """If you send a message directly to me""" return (data.get('type') == 'message' and data.get('text', '').startswith(self.address_as))
python
{ "resource": "" }
q264009
SlackBot.from_api_token
validation
async def from_api_token(cls, token=None, api_cls=SlackBotApi): """Create a new instance from the API token. Arguments: token (:py:class:`str`, optional): The bot's API token (defaults to ``None``, which means looking in the environment). api_cls (:py:class:`type`, optional): The class to create as the ``api`` argument for API access (defaults to :py:class:`aslack.slack_api.SlackBotApi`). Returns: :py:class:`SlackBot`: The new instance. """ api = api_cls.from_env() if token is None else api_cls(api_token=token) data = await api.execute_method(cls.API_AUTH_ENDPOINT) return cls(data['user_id'], data['user'], api)
python
{ "resource": "" }
q264010
SlackBot._format_message
validation
def _format_message(self, channel, text): """Format an outgoing message for transmission. Note: Adds the message type (``'message'``) and incremental ID. Arguments: channel (:py:class:`str`): The channel to send to. text (:py:class:`str`): The message text to send. Returns: :py:class:`str`: The JSON string of the message. """ payload = {'type': 'message', 'id': next(self._msg_ids)} payload.update(channel=channel, text=text) return json.dumps(payload)
python
{ "resource": "" }
q264011
SlackBot._get_socket_url
validation
async def _get_socket_url(self): """Get the WebSocket URL for the RTM session. Warning: The URL expires if the session is not joined within 30 seconds of the API call to the start endpoint. Returns: :py:class:`str`: The socket URL. """ data = await self.api.execute_method( self.RTM_START_ENDPOINT, simple_latest=True, no_unreads=True, ) return data['url']
python
{ "resource": "" }
q264012
SlackBot._instruction_list
validation
def _instruction_list(self, filters): """Generates the instructions for a bot and its filters. Note: The guidance for each filter is generated by combining the docstrings of the predicate filter and resulting dispatch function with a single space between. The class's :py:attr:`INSTRUCTIONS` and the default help command are added. Arguments: filters (:py:class:`list`): The filters to apply to incoming messages. Returns: :py:class:`str`: The bot's instructions. """ return '\n\n'.join([ self.INSTRUCTIONS.strip(), '*Supported methods:*', 'If you send "@{}: help" to me I reply with these ' 'instructions.'.format(self.user), 'If you send "@{}: version" to me I reply with my current ' 'version.'.format(self.user), ] + [filter.description() for filter in filters])
python
{ "resource": "" }
q264013
SlackBot._respond
validation
def _respond(self, channel, text): """Respond to a message on the current socket. Args: channel (:py:class:`str`): The channel to send to. text (:py:class:`str`): The message text to send. """ result = self._format_message(channel, text) if result is not None: logger.info( 'Sending message: %r', truncate(result, max_len=50), ) self.socket.send_str(result)
python
{ "resource": "" }
q264014
SlackBot._validate_first_message
validation
def _validate_first_message(cls, msg): """Check the first message matches the expected handshake. Note: The handshake is provided as :py:attr:`RTM_HANDSHAKE`. Arguments: msg (:py:class:`aiohttp.Message`): The message to validate. Raises: :py:class:`SlackApiError`: If the data doesn't match the expected handshake. """ data = cls._unpack_message(msg) logger.debug(data) if data != cls.RTM_HANDSHAKE: raise SlackApiError('Unexpected response: {!r}'.format(data)) logger.info('Joined real-time messaging.')
python
{ "resource": "" }
q264015
get_app_locations
validation
def get_app_locations(): """ Returns list of paths to tested apps """ return [os.path.dirname(os.path.normpath(import_module(app_name).__file__)) for app_name in PROJECT_APPS]
python
{ "resource": "" }
q264016
get_tasks
validation
def get_tasks(): """Get the imported task classes for each task that will be run""" task_classes = [] for task_path in TASKS: try: module, classname = task_path.rsplit('.', 1) except ValueError: raise ImproperlyConfigured('%s isn\'t a task module' % task_path) try: mod = import_module(module) except ImportError as e: raise ImproperlyConfigured('Error importing task %s: "%s"' % (module, e)) try: task_class = getattr(mod, classname) except AttributeError: raise ImproperlyConfigured('Task module "%s" does not define a ' '"%s" class' % (module, classname)) task_classes.append(task_class) return task_classes
python
{ "resource": "" }
q264017
get_task_options
validation
def get_task_options(): """Get the options for each task that will be run""" options = () task_classes = get_tasks() for cls in task_classes: options += cls.option_list return options
python
{ "resource": "" }
q264018
Database.to_cldf
validation
def to_cldf(self, dest, mdname='cldf-metadata.json'): """ Write the data from the db to a CLDF dataset according to the metadata in `self.dataset`. :param dest: :param mdname: :return: path of the metadata file """ dest = Path(dest) if not dest.exists(): dest.mkdir() data = self.read() if data[self.source_table_name]: sources = Sources() for src in data[self.source_table_name]: sources.add(Source( src['genre'], src['id'], **{k: v for k, v in src.items() if k not in ['id', 'genre']})) sources.write(dest / self.dataset.properties.get('dc:source', 'sources.bib')) for table_type, items in data.items(): try: table = self.dataset[table_type] table.common_props['dc:extent'] = table.write( [self.retranslate(table, item) for item in items], base=dest) except KeyError: assert table_type == self.source_table_name, table_type return self.dataset.write_metadata(dest / mdname)
python
{ "resource": "" }
q264019
MessageHandler.description
validation
def description(self): """A user-friendly description of the handler. Returns: :py:class:`str`: The handler's description. """ if self._description is None: text = '\n'.join(self.__doc__.splitlines()[1:]).strip() lines = [] for line in map(str.strip, text.splitlines()): if line and lines: lines[-1] = ' '.join((lines[-1], line)) elif line: lines.append(line) else: lines.append('') self._description = '\n'.join(lines) return self._description
python
{ "resource": "" }
q264020
Parselet.from_jsonfile
validation
def from_jsonfile(cls, fp, selector_handler=None, strict=False, debug=False): """ Create a Parselet instance from a file containing the Parsley script as a JSON object >>> import parslepy >>> with open('parselet.json') as fp: ... parslepy.Parselet.from_jsonfile(fp) ... <parslepy.base.Parselet object at 0x2014e50> :param file fp: an open file-like pointer containing the Parsley script :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor """ return cls._from_jsonlines(fp, selector_handler=selector_handler, strict=strict, debug=debug)
python
{ "resource": "" }
q264021
Parselet.from_yamlfile
validation
def from_yamlfile(cls, fp, selector_handler=None, strict=False, debug=False): """ Create a Parselet instance from a file containing the Parsley script as a YAML object >>> import parslepy >>> with open('parselet.yml') as fp: ... parslepy.Parselet.from_yamlfile(fp) ... <parslepy.base.Parselet object at 0x2014e50> :param file fp: an open file-like pointer containing the Parsley script :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor """ return cls.from_yamlstring(fp.read(), selector_handler=selector_handler, strict=strict, debug=debug)
python
{ "resource": "" }
q264022
Parselet._from_jsonlines
validation
def _from_jsonlines(cls, lines, selector_handler=None, strict=False, debug=False): """ Interpret input lines as a JSON Parsley script. Python-style comment lines are skipped. """ return cls(json.loads( "\n".join([l for l in lines if not cls.REGEX_COMMENT_LINE.match(l)]) ), selector_handler=selector_handler, strict=strict, debug=debug)
python
{ "resource": "" }
q264023
Parselet._compile
validation
def _compile(self, parselet_node, level=0): """ Build part of the abstract Parsley extraction tree Arguments: parselet_node (dict) -- part of the Parsley tree to compile (can be the root dict/node) level (int) -- current recursion depth (used for debug) """ if self.DEBUG: debug_offset = "".join([" " for x in range(level)]) if self.DEBUG: print(debug_offset, "%s::compile(%s)" % ( self.__class__.__name__, parselet_node)) if isinstance(parselet_node, dict): parselet_tree = ParsleyNode() for k, v in list(parselet_node.items()): # we parse the key raw elements but without much # interpretation (which is done by the SelectorHandler) try: m = self.REGEX_PARSELET_KEY.match(k) if not m: if self.DEBUG: print(debug_offset, "could not parse key", k) raise InvalidKeySyntax(k) except: raise InvalidKeySyntax("Key %s is not valid" % k) key = m.group('key') # by default, fields are required key_required = True operator = m.group('operator') if operator == '?': key_required = False # FIXME: "!" operator not supported (complete array) scope = m.group('scope') # example: get list of H3 tags # { "titles": ["h3"] } # FIXME: should we support multiple selectors in list? # e.g. { "titles": ["h1", "h2", "h3", "h4"] } if isinstance(v, (list, tuple)): v = v[0] iterate = True else: iterate = False # keys in the abstract Parsley trees are of type `ParsleyContext` try: parsley_context = ParsleyContext( key, operator=operator, required=key_required, scope=self.selector_handler.make(scope) if scope else None, iterate=iterate) except SyntaxError: if self.DEBUG: print("Invalid scope:", k, scope) raise if self.DEBUG: print(debug_offset, "current context:", parsley_context) # go deeper in the Parsley tree... try: child_tree = self._compile(v, level=level+1) except SyntaxError: if self.DEBUG: print("Invalid value: ", v) raise except: raise if self.DEBUG: print(debug_offset, "child tree:", child_tree) parselet_tree[parsley_context] = child_tree return parselet_tree # a string leaf should match some kind of selector, # let the selector handler deal with it elif isstr(parselet_node): return self.selector_handler.make(parselet_node) else: raise ValueError( "Unsupported type(%s) for Parselet node <%s>" % ( type(parselet_node), parselet_node))
python
{ "resource": "" }
q264024
Dataset.auto_constraints
validation
def auto_constraints(self, component=None): """ Use CLDF reference properties to implicitely create foreign key constraints. :param component: A Table object or `None`. """ if not component: for table in self.tables: self.auto_constraints(table) return if not component.tableSchema.primaryKey: idcol = component.get_column(term_uri('id')) if idcol: component.tableSchema.primaryKey = [idcol.name] self._auto_foreign_keys(component) try: table_type = self.get_tabletype(component) except ValueError: # New component is not a known CLDF term, so cannot add components # automatically. TODO: We might me able to infer some based on # `xxxReference` column properties? return # auto-add foreign keys targetting the new component: for table in self.tables: self._auto_foreign_keys(table, component=component, table_type=table_type)
python
{ "resource": "" }
q264025
Service.url_builder
validation
def url_builder(self, endpoint, *, root=None, params=None, url_params=None): """Create a URL for the specified endpoint. Arguments: endpoint (:py:class:`str`): The API endpoint to access. root: (:py:class:`str`, optional): The root URL for the service API. params: (:py:class:`dict`, optional): The values for format into the created URL (defaults to ``None``). url_params: (:py:class:`dict`, optional): Parameters to add to the end of the URL (defaults to ``None``). Returns: :py:class:`str`: The resulting URL. """ if root is None: root = self.ROOT scheme, netloc, path, _, _ = urlsplit(root) return urlunsplit(( scheme, netloc, urljoin(path, endpoint), urlencode(url_params or {}), '', )).format(**params or {})
python
{ "resource": "" }
q264026
raise_for_status
validation
def raise_for_status(response): """Raise an appropriate error for a given response. Arguments: response (:py:class:`aiohttp.ClientResponse`): The API response. Raises: :py:class:`aiohttp.web_exceptions.HTTPException`: The appropriate error for the response's status. """ for err_name in web_exceptions.__all__: err = getattr(web_exceptions, err_name) if err.status_code == response.status: payload = dict( headers=response.headers, reason=response.reason, ) if issubclass(err, web_exceptions._HTTPMove): # pylint: disable=protected-access raise err(response.headers['Location'], **payload) raise err(**payload)
python
{ "resource": "" }
q264027
truncate
validation
def truncate(text, max_len=350, end='...'): """Truncate the supplied text for display. Arguments: text (:py:class:`str`): The text to truncate. max_len (:py:class:`int`, optional): The maximum length of the text before truncation (defaults to 350 characters). end (:py:class:`str`, optional): The ending to use to show that the text was truncated (defaults to ``'...'``). Returns: :py:class:`str`: The truncated text. """ if len(text) <= max_len: return text return text[:max_len].rsplit(' ', maxsplit=1)[0] + end
python
{ "resource": "" }
q264028
Sources.add
validation
def add(self, *entries): """ Add a source, either specified by glottolog reference id, or as bibtex record. """ for entry in entries: if isinstance(entry, string_types): self._add_entries(database.parse_string(entry, bib_format='bibtex')) else: self._add_entries(entry)
python
{ "resource": "" }
q264029
get_cache_key
validation
def get_cache_key(user_or_username, size, prefix): """ Returns a cache key consisten of a username and image size. """ if isinstance(user_or_username, get_user_model()): user_or_username = user_or_username.username return '%s_%s_%s' % (prefix, user_or_username, size)
python
{ "resource": "" }
q264030
cache_result
validation
def cache_result(func): """ Decorator to cache the result of functions that take a ``user`` and a ``size`` value. """ def cache_set(key, value): cache.set(key, value, AVATAR_CACHE_TIMEOUT) return value def cached_func(user, size): prefix = func.__name__ cached_funcs.add(prefix) key = get_cache_key(user, size, prefix=prefix) return cache.get(key) or cache_set(key, func(user, size)) return cached_func
python
{ "resource": "" }
q264031
invalidate_cache
validation
def invalidate_cache(user, size=None): """ Function to be called when saving or changing an user's avatars. """ sizes = set(AUTO_GENERATE_AVATAR_SIZES) if size is not None: sizes.add(size) for prefix in cached_funcs: for size in sizes: cache.delete(get_cache_key(user, size, prefix))
python
{ "resource": "" }
q264032
get_field_for_proxy
validation
def get_field_for_proxy(pref_proxy): """Returns a field object instance for a given PrefProxy object. :param PrefProxy pref_proxy: :rtype: models.Field """ field = { bool: models.BooleanField, int: models.IntegerField, float: models.FloatField, datetime: models.DateTimeField, }.get(type(pref_proxy.default), models.TextField)() update_field_from_proxy(field, pref_proxy) return field
python
{ "resource": "" }
q264033
update_field_from_proxy
validation
def update_field_from_proxy(field_obj, pref_proxy): """Updates field object with data from a PrefProxy object. :param models.Field field_obj: :param PrefProxy pref_proxy: """ attr_names = ('verbose_name', 'help_text', 'default') for attr_name in attr_names: setattr(field_obj, attr_name, getattr(pref_proxy, attr_name))
python
{ "resource": "" }
q264034
get_pref_model_class
validation
def get_pref_model_class(app, prefs, get_prefs_func): """Returns preferences model class dynamically crated for a given app or None on conflict.""" module = '%s.%s' % (app, PREFS_MODULE_NAME) model_dict = { '_prefs_app': app, '_get_prefs': staticmethod(get_prefs_func), '__module__': module, 'Meta': type('Meta', (models.options.Options,), { 'verbose_name': _('Preference'), 'verbose_name_plural': _('Preferences'), 'app_label': app, 'managed': False, }) } for field_name, val_proxy in prefs.items(): model_dict[field_name] = val_proxy.field model = type('Preferences', (models.Model,), model_dict) def fake_save_base(self, *args, **kwargs): updated_prefs = { f.name: getattr(self, f.name) for f in self._meta.fields if not isinstance(f, models.fields.AutoField) } app_prefs = self._get_prefs(self._prefs_app) for pref in app_prefs.keys(): if pref in updated_prefs: app_prefs[pref].db_value = updated_prefs[pref] self.pk = self._prefs_app # Make Django 1.7 happy. prefs_save.send(sender=self, app=self._prefs_app, updated_prefs=updated_prefs) return True model.save_base = fake_save_base return model
python
{ "resource": "" }
q264035
get_frame_locals
validation
def get_frame_locals(stepback=0): """Returns locals dictionary from a given frame. :param int stepback: :rtype: dict """ with Frame(stepback=stepback) as frame: locals_dict = frame.f_locals return locals_dict
python
{ "resource": "" }
q264036
traverse_local_prefs
validation
def traverse_local_prefs(stepback=0): """Generator to walk through variables considered as preferences in locals dict of a given frame. :param int stepback: :rtype: tuple """ locals_dict = get_frame_locals(stepback+1) for k in locals_dict: if not k.startswith('_') and k.upper() == k: yield k, locals_dict
python
{ "resource": "" }
q264037
print_file_info
validation
def print_file_info(): """Prints file details in the current directory""" tpl = TableLogger(columns='file,created,modified,size') for f in os.listdir('.'): size = os.stat(f).st_size date_created = datetime.fromtimestamp(os.path.getctime(f)) date_modified = datetime.fromtimestamp(os.path.getmtime(f)) tpl(f, date_created, date_modified, size)
python
{ "resource": "" }
q264038
DispatchGroup._bind_args
validation
def _bind_args(sig, param_matchers, args, kwargs): ''' Attempt to bind the args to the type signature. First try to just bind to the signature, then ensure that all arguments match the parameter types. ''' #Bind to signature. May throw its own TypeError bound = sig.bind(*args, **kwargs) if not all(param_matcher(bound.arguments[param_name]) for param_name, param_matcher in param_matchers): raise TypeError return bound
python
{ "resource": "" }
q264039
DispatchGroup._make_all_matchers
validation
def _make_all_matchers(cls, parameters): ''' For every parameter, create a matcher if the parameter has an annotation. ''' for name, param in parameters: annotation = param.annotation if annotation is not Parameter.empty: yield name, cls._make_param_matcher(annotation, param.kind)
python
{ "resource": "" }
q264040
DispatchGroup._make_wrapper
validation
def _make_wrapper(self, func): ''' Makes a wrapper function that executes a dispatch call for func. The wrapper has the dispatch and dispatch_first attributes, so that additional overloads can be added to the group. ''' #TODO: consider using a class to make attribute forwarding easier. #TODO: consider using simply another DispatchGroup, with self.callees # assigned by reference to the original callees. @wraps(func) def executor(*args, **kwargs): return self.execute(args, kwargs) executor.dispatch = self.dispatch executor.dispatch_first = self.dispatch_first executor.func = func executor.lookup = self.lookup return executor
python
{ "resource": "" }
q264041
DispatchGroup.dispatch
validation
def dispatch(self, func): ''' Adds the decorated function to this dispatch. ''' self.callees.append(self._make_dispatch(func)) return self._make_wrapper(func)
python
{ "resource": "" }
q264042
DispatchGroup.dispatch_first
validation
def dispatch_first(self, func): ''' Adds the decorated function to this dispatch, at the FRONT of the order. Useful for allowing third parties to add overloaded functionality to be executed before default functionality. ''' self.callees.appendleft(self._make_dispatch(func)) return self._make_wrapper(func)
python
{ "resource": "" }
q264043
DispatchGroup.execute
validation
def execute(self, args, kwargs): ''' Dispatch a call. Call the first function whose type signature matches the arguemts. ''' return self.lookup_explicit(args, kwargs)(*args, **kwargs)
python
{ "resource": "" }
q264044
convertShpToExtend
validation
def convertShpToExtend(pathToShp): """ reprojette en WGS84 et recupere l'extend """ driver = ogr.GetDriverByName('ESRI Shapefile') dataset = driver.Open(pathToShp) if dataset is not None: # from Layer layer = dataset.GetLayer() spatialRef = layer.GetSpatialRef() # from Geometry feature = layer.GetNextFeature() geom = feature.GetGeometryRef() spatialRef = geom.GetSpatialReference() #WGS84 outSpatialRef = osr.SpatialReference() outSpatialRef.ImportFromEPSG(4326) coordTrans = osr.CoordinateTransformation(spatialRef, outSpatialRef) env = geom.GetEnvelope() pointMAX = ogr.Geometry(ogr.wkbPoint) pointMAX.AddPoint(env[1], env[3]) pointMAX.Transform(coordTrans) pointMIN = ogr.Geometry(ogr.wkbPoint) pointMIN.AddPoint(env[0], env[2]) pointMIN.Transform(coordTrans) return [pointMAX.GetPoint()[1],pointMIN.GetPoint()[0],pointMIN.GetPoint()[1],pointMAX.GetPoint()[0]] else: exit(" shapefile not found. Please verify your path to the shapefile")
python
{ "resource": "" }
q264045
convertGribToTiff
validation
def convertGribToTiff(listeFile,listParam,listLevel,liststep,grid,startDate,endDate,outFolder): """ Convert GRIB to Tif""" dicoValues={} for l in listeFile: grbs = pygrib.open(l) grbs.seek(0) index=1 for j in range(len(listLevel),0,-1): for i in range(len(listParam)-1,-1,-1): grb = grbs[index] p=grb.name.replace(' ','_') if grb.level != 0: l=str(grb.level)+'_'+grb.typeOfLevel else: l=grb.typeOfLevel if p+'_'+l not in dicoValues.keys(): dicoValues[p+'_'+l]=[] dicoValues[p+'_'+l].append(grb.values) shape=grb.values.shape lat,lon=grb.latlons() geoparam=(lon.min(),lat.max(),grid,grid) index+= 1 nbJour=(endDate-startDate).days+1 #on joute des arrayNan si il manque des fichiers for s in range(0, (len(liststep)*nbJour-len(listeFile))): for k in dicoValues.keys(): dicoValues[k].append(np.full(shape, np.nan)) #On écrit pour chacune des variables dans un fichier for i in range(len(dicoValues.keys())-1,-1,-1): dictParam=dict((k,dicoValues[dicoValues.keys()[i]][k]) for k in range(0,len(dicoValues[dicoValues.keys()[i]]))) sorted(dictParam.items(), key=lambda x: x[0]) outputImg=outFolder+'/'+dicoValues.keys()[i]+'_'+startDate.strftime('%Y%M%d')+'_'+endDate.strftime('%Y%M%d')+'.tif' writeTiffFromDicoArray(dictParam,outputImg,shape,geoparam) for f in listeFile: os.remove(f)
python
{ "resource": "" }
q264046
on_pref_update
validation
def on_pref_update(*args, **kwargs): """Triggered on dynamic preferences model save. Issues DB save and reread. """ Preference.update_prefs(*args, **kwargs) Preference.read_prefs(get_prefs())
python
{ "resource": "" }
q264047
bind_proxy
validation
def bind_proxy(values, category=None, field=None, verbose_name=None, help_text='', static=True, readonly=False): """Binds PrefProxy objects to module variables used by apps as preferences. :param list|tuple values: Preference values. :param str|unicode category: Category name the preference belongs to. :param Field field: Django model field to represent this preference. :param str|unicode verbose_name: Field verbose name. :param str|unicode help_text: Field help text. :param bool static: Leave this preference static (do not store in DB). :param bool readonly: Make this field read only. :rtype: list """ addrs = OrderedDict() depth = 3 for local_name, locals_dict in traverse_local_prefs(depth): addrs[id(locals_dict[local_name])] = local_name proxies = [] locals_dict = get_frame_locals(depth) for value in values: # Try to preserve fields order. id_val = id(value) if id_val in addrs: local_name = addrs[id_val] local_val = locals_dict[local_name] if isinstance(local_val, PatchedLocal) and not isinstance(local_val, PrefProxy): proxy = PrefProxy( local_name, value.val, category=category, field=field, verbose_name=verbose_name, help_text=help_text, static=static, readonly=readonly, ) app_name = locals_dict['__name__'].split('.')[-2] # x.y.settings -> y prefs = get_prefs() if app_name not in prefs: prefs[app_name] = OrderedDict() prefs[app_name][local_name.lower()] = proxy # Replace original pref variable with a proxy. locals_dict[local_name] = proxy proxies.append(proxy) return proxies
python
{ "resource": "" }
q264048
register_admin_models
validation
def register_admin_models(admin_site): """Registers dynamically created preferences models for Admin interface. :param admin.AdminSite admin_site: AdminSite object. """ global __MODELS_REGISTRY prefs = get_prefs() for app_label, prefs_items in prefs.items(): model_class = get_pref_model_class(app_label, prefs_items, get_app_prefs) if model_class is not None: __MODELS_REGISTRY[app_label] = model_class admin_site.register(model_class, get_pref_model_admin_class(prefs_items))
python
{ "resource": "" }
q264049
autodiscover_siteprefs
validation
def autodiscover_siteprefs(admin_site=None): """Automatically discovers and registers all preferences available in all apps. :param admin.AdminSite admin_site: Custom AdminSite object. """ if admin_site is None: admin_site = admin.site # Do not discover anything if called from manage.py (e.g. executing commands from cli). if 'manage' not in sys.argv[0] or (len(sys.argv) > 1 and sys.argv[1] in MANAGE_SAFE_COMMANDS): import_prefs() Preference.read_prefs(get_prefs()) register_admin_models(admin_site)
python
{ "resource": "" }
q264050
unpatch_locals
validation
def unpatch_locals(depth=3): """Restores the original values of module variables considered preferences if they are still PatchedLocal and not PrefProxy. """ for name, locals_dict in traverse_local_prefs(depth): if isinstance(locals_dict[name], PatchedLocal): locals_dict[name] = locals_dict[name].val del get_frame_locals(depth)[__PATCHED_LOCALS_SENTINEL]
python
{ "resource": "" }
q264051
proxy_settings_module
validation
def proxy_settings_module(depth=3): """Replaces a settings module with a Module proxy to intercept an access to settings. :param int depth: Frame count to go backward. """ proxies = [] modules = sys.modules module_name = get_frame_locals(depth)['__name__'] module_real = modules[module_name] for name, locals_dict in traverse_local_prefs(depth): value = locals_dict[name] if isinstance(value, PrefProxy): proxies.append(name) new_module = type(module_name, (ModuleType, ModuleProxy), {})(module_name) # ModuleProxy new_module.bind(module_real, proxies) modules[module_name] = new_module
python
{ "resource": "" }
q264052
register_prefs
validation
def register_prefs(*args, **kwargs): """Registers preferences that should be handled by siteprefs. Expects preferences as *args. Use keyword arguments to batch apply params supported by ``PrefProxy`` to all preferences not constructed by ``pref`` and ``pref_group``. Batch kwargs: :param str|unicode help_text: Field help text. :param bool static: Leave this preference static (do not store in DB). :param bool readonly: Make this field read only. :param bool swap_settings_module: Whether to automatically replace settings module with a special ``ProxyModule`` object to access dynamic values of settings transparently (so not to bother with calling ``.value`` of ``PrefProxy`` object). """ swap_settings_module = bool(kwargs.get('swap_settings_module', True)) if __PATCHED_LOCALS_SENTINEL not in get_frame_locals(2): raise SitePrefsException('Please call `patch_locals()` right before the `register_prefs()`.') bind_proxy(args, **kwargs) unpatch_locals() swap_settings_module and proxy_settings_module()
python
{ "resource": "" }
q264053
pref_group
validation
def pref_group(title, prefs, help_text='', static=True, readonly=False): """Marks preferences group. :param str|unicode title: Group title :param list|tuple prefs: Preferences to group. :param str|unicode help_text: Field help text. :param bool static: Leave this preference static (do not store in DB). :param bool readonly: Make this field read only. """ bind_proxy(prefs, title, help_text=help_text, static=static, readonly=readonly) for proxy in prefs: # For preferences already marked by pref(). if isinstance(proxy, PrefProxy): proxy.category = title
python
{ "resource": "" }
q264054
pref
validation
def pref(preference, field=None, verbose_name=None, help_text='', static=True, readonly=False): """Marks a preference. :param preference: Preference variable. :param Field field: Django model field to represent this preference. :param str|unicode verbose_name: Field verbose name. :param str|unicode help_text: Field help text. :param bool static: Leave this preference static (do not store in DB). :param bool readonly: Make this field read only. :rtype: PrefProxy|None """ try: bound = bind_proxy( (preference,), field=field, verbose_name=verbose_name, help_text=help_text, static=static, readonly=readonly, ) return bound[0] except IndexError: return
python
{ "resource": "" }
q264055
generate_versionwarning_data_json
validation
def generate_versionwarning_data_json(app, config=None, **kwargs): """ Generate the ``versionwarning-data.json`` file. This file is included in the output and read by the AJAX request when accessing to the documentation and used to compare the live versions with the curent one. Besides, this file contains meta data about the project, the API to use and the banner itself. """ # In Sphinx >= 1.8 we use ``config-initied`` signal which comes with the # ``config`` object and in Sphinx < 1.8 we use ``builder-initied`` signal # that doesn't have the ``config`` object and we take it from the ``app`` config = config or kwargs.pop('config', None) if config is None: config = app.config if config.versionwarning_project_version in config.versionwarning_messages: custom = True message = config.versionwarning_messages.get(config.versionwarning_project_version) else: custom = False message = config.versionwarning_default_message banner_html = config.versionwarning_banner_html.format( id_div=config.versionwarning_banner_id_div, banner_title=config.versionwarning_banner_title, message=message.format( **{config.versionwarning_message_placeholder: '<a href="#"></a>'}, ), admonition_type=config.versionwarning_admonition_type, ) data = json.dumps({ 'meta': { 'api_url': config.versionwarning_api_url, }, 'banner': { 'html': banner_html, 'id_div': config.versionwarning_banner_id_div, 'body_selector': config.versionwarning_body_selector, 'custom': custom, }, 'project': { 'slug': config.versionwarning_project_slug, }, 'version': { 'slug': config.versionwarning_project_version, }, }, indent=4) data_path = os.path.join(STATIC_PATH, 'data') if not os.path.exists(data_path): os.mkdir(data_path) with open(os.path.join(data_path, JSON_DATA_FILENAME), 'w') as f: f.write(data) # Add the path where ``versionwarning-data.json`` file and # ``versionwarning.js`` are saved config.html_static_path.append(STATIC_PATH)
python
{ "resource": "" }
q264056
objective
validation
def objective(param_scales=(1, 1), xstar=None, seed=None): """Gives objective functions a number of dimensions and parameter range Parameters ---------- param_scales : (int, int) Scale (std. dev.) for choosing each parameter xstar : array_like Optimal parameters """ ndim = len(param_scales) def decorator(func): @wraps(func) def wrapper(theta): return func(theta) def param_init(): np.random.seed(seed) return np.random.randn(ndim,) * np.array(param_scales) wrapper.ndim = ndim wrapper.param_init = param_init wrapper.xstar = xstar return wrapper return decorator
python
{ "resource": "" }
q264057
doublewell
validation
def doublewell(theta): """Pointwise minimum of two quadratic bowls""" k0, k1, depth = 0.01, 100, 0.5 shallow = 0.5 * k0 * theta ** 2 + depth deep = 0.5 * k1 * theta ** 2 obj = float(np.minimum(shallow, deep)) grad = np.where(deep < shallow, k1 * theta, k0 * theta) return obj, grad
python
{ "resource": "" }
q264058
rosenbrock
validation
def rosenbrock(theta): """Objective and gradient for the rosenbrock function""" x, y = theta obj = (1 - x)**2 + 100 * (y - x**2)**2 grad = np.zeros(2) grad[0] = 2 * x - 400 * (x * y - x**3) - 2 grad[1] = 200 * (y - x**2) return obj, grad
python
{ "resource": "" }
q264059
beale
validation
def beale(theta): """Beale's function""" x, y = theta A = 1.5 - x + x * y B = 2.25 - x + x * y**2 C = 2.625 - x + x * y**3 obj = A ** 2 + B ** 2 + C ** 2 grad = np.array([ 2 * A * (y - 1) + 2 * B * (y ** 2 - 1) + 2 * C * (y ** 3 - 1), 2 * A * x + 4 * B * x * y + 6 * C * x * y ** 2 ]) return obj, grad
python
{ "resource": "" }
q264060
booth
validation
def booth(theta): """Booth's function""" x, y = theta A = x + 2 * y - 7 B = 2 * x + y - 5 obj = A**2 + B**2 grad = np.array([2 * A + 4 * B, 4 * A + 2 * B]) return obj, grad
python
{ "resource": "" }
q264061
camel
validation
def camel(theta): """Three-hump camel function""" x, y = theta obj = 2 * x ** 2 - 1.05 * x ** 4 + x ** 6 / 6 + x * y + y ** 2 grad = np.array([ 4 * x - 4.2 * x ** 3 + x ** 5 + y, x + 2 * y ]) return obj, grad
python
{ "resource": "" }
q264062
bohachevsky1
validation
def bohachevsky1(theta): """One of the Bohachevsky functions""" x, y = theta obj = x ** 2 + 2 * y ** 2 - 0.3 * np.cos(3 * np.pi * x) - 0.4 * np.cos(4 * np.pi * y) + 0.7 grad = np.array([ 2 * x + 0.3 * np.sin(3 * np.pi * x) * 3 * np.pi, 4 * y + 0.4 * np.sin(4 * np.pi * y) * 4 * np.pi, ]) return obj, grad
python
{ "resource": "" }
q264063
dixon_price
validation
def dixon_price(theta): """Dixon-Price function""" x, y = theta obj = (x - 1) ** 2 + 2 * (2 * y ** 2 - x) ** 2 grad = np.array([ 2 * x - 2 - 4 * (2 * y ** 2 - x), 16 * (2 * y ** 2 - x) * y, ]) return obj, grad
python
{ "resource": "" }
q264064
styblinski_tang
validation
def styblinski_tang(theta): """Styblinski-Tang function""" x, y = theta obj = 0.5 * (x ** 4 - 16 * x ** 2 + 5 * x + y ** 4 - 16 * y ** 2 + 5 * y) grad = np.array([ 2 * x ** 3 - 16 * x + 2.5, 2 * y ** 3 - 16 * y + 2.5, ]) return obj, grad
python
{ "resource": "" }
q264065
S3Connection.get_all_buckets
validation
def get_all_buckets(self, *args, **kwargs): """Return a list of buckets in MimicDB. :param boolean force: If true, API call is forced to S3 """ if kwargs.pop('force', None): buckets = super(S3Connection, self).get_all_buckets(*args, **kwargs) for bucket in buckets: mimicdb.backend.sadd(tpl.connection, bucket.name) return buckets return [Bucket(self, bucket) for bucket in mimicdb.backend.smembers(tpl.connection)]
python
{ "resource": "" }
q264066
S3Connection.get_bucket
validation
def get_bucket(self, bucket_name, validate=True, headers=None, force=None): """Return a bucket from MimicDB if it exists. Return a S3ResponseError if the bucket does not exist and validate is passed. :param boolean force: If true, API call is forced to S3 """ if force: bucket = super(S3Connection, self).get_bucket(bucket_name, validate, headers) mimicdb.backend.sadd(tpl.connection, bucket.name) return bucket if mimicdb.backend.sismember(tpl.connection, bucket_name): return Bucket(self, bucket_name) else: if validate: raise S3ResponseError(404, 'NoSuchBucket') else: return Bucket(self, bucket_name)
python
{ "resource": "" }
q264067
S3Connection.create_bucket
validation
def create_bucket(self, *args, **kwargs): """Add the bucket to MimicDB after successful creation. """ bucket = super(S3Connection, self).create_bucket(*args, **kwargs) if bucket: mimicdb.backend.sadd(tpl.connection, bucket.name) return bucket
python
{ "resource": "" }
q264068
S3Connection.sync
validation
def sync(self, *buckets): """Sync either a list of buckets or the entire connection. Force all API calls to S3 and populate the database with the current state of S3. :param \*string \*buckets: Buckets to sync """ if buckets: for _bucket in buckets: for key in mimicdb.backend.smembers(tpl.bucket % _bucket): mimicdb.backend.delete(tpl.key % (_bucket, key)) mimicdb.backend.delete(tpl.bucket % _bucket) bucket = self.get_bucket(_bucket, force=True) for key in bucket.list(force=True): mimicdb.backend.sadd(tpl.bucket % bucket.name, key.name) mimicdb.backend.hmset(tpl.key % (bucket.name, key.name), dict(size=key.size, md5=key.etag.strip('"'))) else: for bucket in mimicdb.backend.smembers(tpl.connection): for key in mimicdb.backend.smembers(tpl.bucket % bucket): mimicdb.backend.delete(tpl.key % (bucket, key)) mimicdb.backend.delete(tpl.bucket % bucket) for bucket in self.get_all_buckets(force=True): for key in bucket.list(force=True): mimicdb.backend.sadd(tpl.bucket % bucket.name, key.name) mimicdb.backend.hmset(tpl.key % (bucket.name, key.name), dict(size=key.size, md5=key.etag.strip('"')))
python
{ "resource": "" }
q264069
Bucket.get_key
validation
def get_key(self, *args, **kwargs): """Return the key from MimicDB. :param boolean force: If true, API call is forced to S3 """ if kwargs.pop('force', None): headers = kwargs.get('headers', {}) headers['force'] = True kwargs['headers'] = headers return super(Bucket, self).get_key(*args, **kwargs)
python
{ "resource": "" }
q264070
Bucket._get_key_internal
validation
def _get_key_internal(self, *args, **kwargs): """Return None if key is not in the bucket set. Pass 'force' in the headers to check S3 for the key, and after fetching the key from S3, save the metadata and key to the bucket set. """ if args[1] is not None and 'force' in args[1]: key, res = super(Bucket, self)._get_key_internal(*args, **kwargs) if key: mimicdb.backend.sadd(tpl.bucket % self.name, key.name) mimicdb.backend.hmset(tpl.key % (self.name, key.name), dict(size=key.size, md5=key.etag.strip('"'))) return key, res key = None if mimicdb.backend.sismember(tpl.bucket % self.name, args[0]): key = Key(self) key.name = args[0] return key, None
python
{ "resource": "" }
q264071
Bucket.get_all_keys
validation
def get_all_keys(self, *args, **kwargs): """Return a list of keys from MimicDB. :param boolean force: If true, API call is forced to S3 """ if kwargs.pop('force', None): headers = kwargs.get('headers', args[0] if len(args) else None) or dict() headers['force'] = True kwargs['headers'] = headers return super(Bucket, self).get_all_keys(*args, **kwargs)
python
{ "resource": "" }
q264072
Bucket.delete_keys
validation
def delete_keys(self, *args, **kwargs): """Remove each key or key name in an iterable from the bucket set. """ ikeys = iter(kwargs.get('keys', args[0] if args else [])) while True: try: key = ikeys.next() except StopIteration: break if isinstance(key, basestring): mimicdb.backend.srem(tpl.bucket % self.name, key) mimicdb.backend.delete(tpl.key % (self.name, key)) elif isinstance(key, BotoKey) or isinstance(key, Key): mimicdb.backend.srem(tpl.bucket % self.name, key.name) mimicdb.backend.delete(tpl.key % (self.name, key.name)) return super(Bucket, self).delete_keys(*args, **kwargs)
python
{ "resource": "" }
q264073
Bucket._delete_key_internal
validation
def _delete_key_internal(self, *args, **kwargs): """Remove key name from bucket set. """ mimicdb.backend.srem(tpl.bucket % self.name, args[0]) mimicdb.backend.delete(tpl.key % (self.name, args[0])) return super(Bucket, self)._delete_key_internal(*args, **kwargs)
python
{ "resource": "" }
q264074
Bucket.list
validation
def list(self, *args, **kwargs): """Return an iterable of keys from MimicDB. :param boolean force: If true, API call is forced to S3 """ if kwargs.pop('force', None): headers = kwargs.get('headers', args[4] if len(args) > 4 else None) or dict() headers['force'] = True kwargs['headers'] = headers for key in super(Bucket, self).list(*args, **kwargs): yield key else: prefix = kwargs.get('prefix', args[0] if args else '') for key in mimicdb.backend.smembers(tpl.bucket % self.name): if key.startswith(prefix): k = Key(self, key) meta = mimicdb.backend.hgetall(tpl.key % (self.name, key)) if meta: k._load_meta(meta['size'], meta['md5']) yield k
python
{ "resource": "" }
q264075
Bucket.sync
validation
def sync(self): """Sync a bucket. Force all API calls to S3 and populate the database with the current state of S3. """ for key in mimicdb.backend.smembers(tpl.bucket % self.name): mimicdb.backend.delete(tpl.key % (self.name, key)) mimicdb.backend.delete(tpl.bucket % self.name) mimicdb.backend.sadd(tpl.connection, self.name) for key in self.list(force=True): mimicdb.backend.sadd(tpl.bucket % self.name, key.name) mimicdb.backend.hmset(tpl.key % (self.name, key.name), dict(size=key.size, md5=key.etag.strip('"')))
python
{ "resource": "" }
q264076
lbfgs
validation
def lbfgs(x, rho, f_df, maxiter=20): """ Minimize the proximal operator of a given objective using L-BFGS Parameters ---------- f_df : function Returns the objective and gradient of the function to minimize maxiter : int Maximum number of L-BFGS iterations """ def f_df_augmented(theta): f, df = f_df(theta) obj = f + (rho / 2.) * np.linalg.norm(theta - x) ** 2 grad = df + rho * (theta - x) return obj, grad res = scipy_minimize(f_df_augmented, x, jac=True, method='L-BFGS-B', options={'maxiter': maxiter, 'disp': False}) return res.x
python
{ "resource": "" }
q264077
smooth
validation
def smooth(x, rho, penalty, axis=0, newshape=None): """ Applies a smoothing operator along one dimension currently only accepts a matrix as input Parameters ---------- penalty : float axis : int, optional Axis along which to apply the smoothing (Default: 0) newshape : tuple, optional Desired shape of the parameters to apply the nuclear norm to. The given parameters are reshaped to an array with this shape, or not reshaped if the value of newshape is None. (Default: None) """ orig_shape = x.shape if newshape is not None: x = x.reshape(newshape) # Apply Laplacian smoothing (l2 norm on the parameters multiplied by # the laplacian) n = x.shape[axis] lap_op = spdiags([(2 + rho / penalty) * np.ones(n), -1 * np.ones(n), -1 * np.ones(n)], [0, -1, 1], n, n, format='csc') A = penalty * lap_op b = rho * np.rollaxis(x, axis, 0) return np.rollaxis(spsolve(A, b), axis, 0).reshape(orig_shape)
python
{ "resource": "" }
q264078
sdcone
validation
def sdcone(x, rho): """Projection onto the semidefinite cone""" U, V = np.linalg.eigh(x) return V.dot(np.diag(np.maximum(U, 0)).dot(V.T))
python
{ "resource": "" }
q264079
simplex
validation
def simplex(x, rho): """ Projection onto the probability simplex http://arxiv.org/pdf/1309.1541v1.pdf """ # sort the elements in descending order u = np.flipud(np.sort(x.ravel())) lambdas = (1 - np.cumsum(u)) / (1. + np.arange(u.size)) ix = np.where(u + lambdas > 0)[0].max() return np.maximum(x + lambdas[ix], 0)
python
{ "resource": "" }
q264080
columns
validation
def columns(x, rho, proxop): """Applies a proximal operator to the columns of a matrix""" xnext = np.zeros_like(x) for ix in range(x.shape[1]): xnext[:, ix] = proxop(x[:, ix], rho) return xnext
python
{ "resource": "" }
q264081
gradient_optimizer
validation
def gradient_optimizer(coro): """Turns a coroutine into a gradient based optimizer.""" class GradientOptimizer(Optimizer): @wraps(coro) def __init__(self, *args, **kwargs): self.algorithm = coro(*args, **kwargs) self.algorithm.send(None) self.operators = [] def set_transform(self, func): self.transform = compose(destruct, func, self.restruct) def minimize(self, f_df, x0, display=sys.stdout, maxiter=1e3): self.display = display self.theta = x0 # setup xk = self.algorithm.send(destruct(x0).copy()) store = defaultdict(list) runtimes = [] if len(self.operators) == 0: self.operators = [proxops.identity()] # setup obj, grad = wrap(f_df, x0) transform = compose(destruct, *reversed(self.operators), self.restruct) self.optional_print(tp.header(['Iteration', 'Objective', '||Grad||', 'Runtime'])) try: for k in count(): # setup tstart = perf_counter() f = obj(xk) df = grad(xk) xk = transform(self.algorithm.send(df)) runtimes.append(perf_counter() - tstart) store['f'].append(f) # Update display self.optional_print(tp.row([k, f, np.linalg.norm(destruct(df)), tp.humantime(runtimes[-1])])) if k >= maxiter: break except KeyboardInterrupt: pass self.optional_print(tp.bottom(4)) # cleanup self.optional_print(u'\u279b Final objective: {}'.format(store['f'][-1])) self.optional_print(u'\u279b Total runtime: {}'.format(tp.humantime(sum(runtimes)))) self.optional_print(u'\u279b Per iteration runtime: {} +/- {}'.format( tp.humantime(np.mean(runtimes)), tp.humantime(np.std(runtimes)), )) # result return OptimizeResult({ 'x': self.restruct(xk), 'f': f, 'df': self.restruct(df), 'k': k, 'obj': np.array(store['f']), }) return GradientOptimizer
python
{ "resource": "" }
q264082
Optimizer.add
validation
def add(self, operator, *args): """Adds a proximal operator to the list of operators""" if isinstance(operator, str): op = getattr(proxops, operator)(*args) elif isinstance(operator, proxops.ProximalOperatorBaseClass): op = operator else: raise ValueError("operator must be a string or a subclass of ProximalOperator") self.operators.append(op) return self
python
{ "resource": "" }
q264083
Key._load_meta
validation
def _load_meta(self, size, md5): """Set key attributes to retrived metadata. Might be extended in the future to support more attributes. """ if not hasattr(self, 'local_hashes'): self.local_hashes = {} self.size = int(size) if (re.match('^[a-fA-F0-9]{32}$', md5)): self.md5 = md5
python
{ "resource": "" }
q264084
Key._send_file_internal
validation
def _send_file_internal(self, *args, **kwargs): """Called internally for any type of upload. After upload finishes, make sure the key is in the bucket set and save the metadata. """ super(Key, self)._send_file_internal(*args, **kwargs) mimicdb.backend.sadd(tpl.bucket % self.bucket.name, self.name) mimicdb.backend.hmset(tpl.key % (self.bucket.name, self.name), dict(size=self.size, md5=self.md5))
python
{ "resource": "" }
q264085
wrap
validation
def wrap(f_df, xref, size=1): """ Memoizes an objective + gradient function, and splits it into two functions that return just the objective and gradient, respectively. Parameters ---------- f_df : function Must be unary (takes a single argument) xref : list, dict, or array_like The form of the parameters size : int, optional Size of the cache (Default=1) """ memoized_f_df = lrucache(lambda x: f_df(restruct(x, xref)), size) objective = compose(first, memoized_f_df) gradient = compose(destruct, second, memoized_f_df) return objective, gradient
python
{ "resource": "" }
q264086
docstring
validation
def docstring(docstr): """ Decorates a function with the given docstring Parameters ---------- docstr : string """ def decorator(func): @wraps(func) def wrapper(*args, **kwargs): return func(*args, **kwargs) wrapper.__doc__ = docstr return wrapper return decorator
python
{ "resource": "" }
q264087
check_grad
validation
def check_grad(f_df, xref, stepsize=1e-6, tol=1e-6, width=15, style='round', out=sys.stdout): """ Compares the numerical gradient to the analytic gradient Parameters ---------- f_df : function The analytic objective and gradient function to check x0 : array_like Parameter values to check the gradient at stepsize : float, optional Stepsize for the numerical gradient. Too big and this will poorly estimate the gradient. Too small and you will run into precision issues (default: 1e-6) tol : float, optional Tolerance to use when coloring correct/incorrect gradients (default: 1e-5) width : int, optional Width of the table columns (default: 15) style : string, optional Style of the printed table, see tableprint for a list of styles (default: 'round') """ CORRECT = u'\x1b[32m\N{CHECK MARK}\x1b[0m' INCORRECT = u'\x1b[31m\N{BALLOT X}\x1b[0m' obj, grad = wrap(f_df, xref, size=0) x0 = destruct(xref) df = grad(x0) # header out.write(tp.header(["Numerical", "Analytic", "Error"], width=width, style=style) + "\n") out.flush() # helper function to parse a number def parse_error(number): # colors failure = "\033[91m" passing = "\033[92m" warning = "\033[93m" end = "\033[0m" base = "{}{:0.3e}{}" # correct if error < 0.1 * tol: return base.format(passing, error, end) # warning elif error < tol: return base.format(warning, error, end) # failure else: return base.format(failure, error, end) # check each dimension num_errors = 0 for j in range(x0.size): # take a small step in one dimension dx = np.zeros(x0.size) dx[j] = stepsize # compute the centered difference formula df_approx = (obj(x0 + dx) - obj(x0 - dx)) / (2 * stepsize) df_analytic = df[j] # absolute error abs_error = np.linalg.norm(df_approx - df_analytic) # relative error error = abs_error if np.allclose(abs_error, 0) else abs_error / \ (np.linalg.norm(df_analytic) + np.linalg.norm(df_approx)) num_errors += error >= tol errstr = CORRECT if error < tol else INCORRECT out.write(tp.row([df_approx, df_analytic, parse_error(error) + ' ' + errstr], width=width, style=style) + "\n") out.flush() out.write(tp.bottom(3, width=width, style=style) + "\n") return num_errors
python
{ "resource": "" }
q264088
RegressionQualityValidator.evaluate
validation
def evaluate(self, repo, spec, args): """ Evaluate the files identified for checksum. """ status = [] # Do we have to any thing at all? if len(spec['files']) == 0: return status with cd(repo.rootdir): rules = None if 'rules-files' in spec and len(spec['rules-files']) > 0: rulesfiles = spec['rules-files'] rules = {} for f in rulesfiles: d = json.loads(open(f).read()) rules.update(d) elif 'rules' in spec: rules = { 'inline': spec['rules'] } if rules is None or len(rules) == 0: print("Regression quality validation has been enabled but no rules file has been specified") print("Example: { 'min-r2': 0.25 }. Put this either in file or in dgit.json") raise InvalidParameters("Regression quality checking rules missing") files = dict([(f, open(f).read()) for f in spec['files']]) for r in rules: if 'min-r2' not in rules[r]: continue minr2 = float(rules[r]['min-r2']) for f in files: match = re.search(r"R-squared:\s+(\d.\d+)", files[f]) if match is None: status.append({ 'target': f, 'validator': self.name, 'description': self.description, 'rules': r, 'status': "ERROR", 'message': "Invalid model output" }) else: r2 = match.group(1) r2 = float(r2) if r2 > minr2: status.append({ 'target': f, 'validator': self.name, 'description': self.description, 'rules': r, 'status': "OK", 'message': "Acceptable R2" }) else: status.append({ 'target': f, 'validator': self.name, 'description': self.description, 'rules': r, 'status': "ERROR", 'message': "R2 is too low" }) return status
python
{ "resource": "" }
q264089
MetadataValidator.evaluate
validation
def evaluate(self, repo, spec, args): """ Check the integrity of the datapackage.json """ status = [] with cd(repo.rootdir): files = spec.get('files', ['*']) resource_files = repo.find_matching_files(files) files = glob2.glob("**/*") disk_files = [f for f in files if os.path.isfile(f) and f != "datapackage.json"] allfiles = list(set(resource_files + disk_files)) allfiles.sort() for f in allfiles: if f in resource_files and f in disk_files: r = repo.get_resource(f) coded_sha256 = r['sha256'] computed_sha256 = compute_sha256(f) if computed_sha256 != coded_sha256: status.append({ 'target': f, 'rules': "", 'validator': self.name, 'description': self.description, 'status': 'ERROR', 'message': "Mismatch in checksum on disk and in datapackage.json" }) else: status.append({ 'target': f, 'rules': "", 'validator': self.name, 'description': self.description, 'status': 'OK', 'message': "" }) elif f in resource_files: status.append({ 'target': f, 'rules': "", 'validator': self.name, 'description': self.description, 'status': 'ERROR', 'message': "In datapackage.json but not in repo" }) else: status.append({ 'target': f, 'rules': "", 'validator': self.name, 'description': self.description, 'status': 'ERROR', 'message': "In repo but not in datapackage.json" }) return status
python
{ "resource": "" }
q264090
TableRepresentation.read_file
validation
def read_file(self, filename): """ Guess the filetype and read the file into row sets """ #print("Reading file", filename) try: fh = open(filename, 'rb') table_set = any_tableset(fh) # guess the type... except: #traceback.print_exc() # Cannot find the schema. table_set = None return table_set
python
{ "resource": "" }
q264091
TableRepresentation.get_schema
validation
def get_schema(self, filename): """ Guess schema using messytables """ table_set = self.read_file(filename) # Have I been able to read the filename if table_set is None: return [] # Get the first table as rowset row_set = table_set.tables[0] offset, headers = headers_guess(row_set.sample) row_set.register_processor(headers_processor(headers)) row_set.register_processor(offset_processor(offset + 1)) types = type_guess(row_set.sample, strict=True) # Get a sample as well.. sample = next(row_set.sample) clean = lambda v: str(v) if not isinstance(v, str) else v schema = [] for i, h in enumerate(headers): schema.append([h, str(types[i]), clean(sample[i].value)]) return schema
python
{ "resource": "" }
q264092
int2fin_reference
validation
def int2fin_reference(n): """Calculates a checksum for a Finnish national reference number""" checksum = 10 - (sum([int(c) * i for c, i in zip(str(n)[::-1], it.cycle((7, 3, 1)))]) % 10) if checksum == 10: checksum = 0 return "%s%s" % (n, checksum)
python
{ "resource": "" }
q264093
iso_reference_valid_char
validation
def iso_reference_valid_char(c, raise_error=True): """Helper to make sure the given character is valid for a reference number""" if c in ISO_REFERENCE_VALID: return True if raise_error: raise ValueError("'%s' is not in '%s'" % (c, ISO_REFERENCE_VALID)) return False
python
{ "resource": "" }
q264094
iso_reference_str2int
validation
def iso_reference_str2int(n): """Creates the huge number from ISO alphanumeric ISO reference""" n = n.upper() numbers = [] for c in n: iso_reference_valid_char(c) if c in ISO_REFERENCE_VALID_NUMERIC: numbers.append(c) else: numbers.append(str(iso_reference_char2int(c))) return int(''.join(numbers))
python
{ "resource": "" }
q264095
iso_reference_isvalid
validation
def iso_reference_isvalid(ref): """Validates ISO reference number""" ref = str(ref) cs_source = ref[4:] + ref[:4] return (iso_reference_str2int(cs_source) % 97) == 1
python
{ "resource": "" }
q264096
barcode
validation
def barcode(iban, reference, amount, due=None): """Calculates virtual barcode for IBAN account number and ISO reference Arguments: iban {string} -- IBAN formed account number reference {string} -- ISO 11649 creditor reference amount {decimal.Decimal} -- Amount in euros, 0.01 - 999999.99 due {datetime.date} -- due date """ iban = iban.replace(' ', '') reference = reference.replace(' ', '') if reference.startswith('RF'): version = 5 else: version = 4 if version == 5: reference = reference[2:] # test RF and add 00 where needed if len(reference) < 23: reference = reference[:2] + ("0" * (23 - len(reference))) + reference[2:] elif version == 4: reference = reference.zfill(20) if not iban.startswith('FI'): raise BarcodeException('Barcodes can be printed only for IBANs starting with FI') iban = iban[2:] amount = "%08d" % (amount.quantize(Decimal('.01')).shift(2).to_integral_value()) if len(amount) != 8: raise BarcodeException("Barcode payment amount must be less than 1000000.00") if due: due = due.strftime("%y%m%d") else: due = "000000" if version == 4: barcode = "%s%s%s000%s%s" % (version, iban, amount, reference, due) elif version == 5: barcode = "%s%s%s%s%s" % (version, iban, amount, reference, due) return barcode
python
{ "resource": "" }
q264097
add_file_normal
validation
def add_file_normal(f, targetdir, generator,script, source): """ Add a normal file including its source """ basename = os.path.basename(f) if targetdir != ".": relativepath = os.path.join(targetdir, basename) else: relativepath = basename relpath = os.path.relpath(f, os.getcwd()) filetype = 'data' if script: filetype = 'script' if generator: filetype = 'generator' update = OrderedDict([ ('type', filetype), ('generator', generator), ('relativepath', relativepath), ('content', ""), ('source', source), ('localfullpath', f), ('localrelativepath', relpath) ]) update = annotate_record(update) return (basename, update)
python
{ "resource": "" }
q264098
run_executable
validation
def run_executable(repo, args, includes): """ Run the executable and capture the input and output... """ # Get platform information mgr = plugins_get_mgr() repomgr = mgr.get(what='instrumentation', name='platform') platform_metadata = repomgr.get_metadata() print("Obtaining Commit Information") (executable, commiturl) = \ find_executable_commitpath(repo, args) # Create a local directory tmpdir = tempfile.mkdtemp() # Construct the strace command print("Running the command") strace_filename = os.path.join(tmpdir,'strace.out.txt') cmd = ["strace.py", "-f", "-o", strace_filename, "-s", "1024", "-q", "--"] + args # Run the command p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() # Capture the stdout/stderr stdout = os.path.join(tmpdir, 'stdout.log.txt') with open(stdout, 'w') as fd: fd.write(out.decode('utf-8')) stderr = os.path.join(tmpdir, 'stderr.log.txt') with open(stderr, 'w') as fd: fd.write(err.decode('utf-8')) # Check the strace output files = extract_files(strace_filename, includes) # Now insert the execution metadata execution_metadata = { 'likelyexecutable': executable, 'commitpath': commiturl, 'args': args, } execution_metadata.update(platform_metadata) for i in range(len(files)): files[i]['execution_metadata'] = execution_metadata return files
python
{ "resource": "" }
q264099
add
validation
def add(repo, args, targetdir, execute=False, generator=False, includes=[], script=False, source=None): """ Add files to the repository by explicitly specifying them or by specifying a pattern over files accessed during execution of an executable. Parameters ---------- repo: Repository args: files or command line (a) If simply adding files, then the list of files that must be added (including any additional arguments to be passed to git (b) If files to be added are an output of a command line, then args is the command lined targetdir: Target directory to store the files execute: Args are not files to be added but scripts that must be run. includes: patterns used to select files to script: Is this a script? generator: Is this a generator source: Link to the original source of the data """ # Gather the files... if not execute: files = add_files(args=args, targetdir=targetdir, source=source, script=script, generator=generator) else: files = run_executable(repo, args, includes) if files is None or len(files) == 0: return repo # Update the repo package but with only those that have changed. filtered_files = [] package = repo.package for h in files: found = False for i, r in enumerate(package['resources']): if h['relativepath'] == r['relativepath']: found = True if h['sha256'] == r['sha256']: change = False for attr in ['source']: if h[attr] != r[attr]: r[attr] = h[attr] change = True if change: filtered_files.append(h) continue else: filtered_files.append(h) package['resources'][i] = h break if not found: filtered_files.append(h) package['resources'].append(h) if len(filtered_files) == 0: return 0 # Copy the files repo.manager.add_files(repo, filtered_files) # Write to disk... rootdir = repo.rootdir with cd(rootdir): datapath = "datapackage.json" with open(datapath, 'w') as fd: fd.write(json.dumps(package, indent=4)) return len(filtered_files)
python
{ "resource": "" }