_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q264200
ManagedResource.put
validation
def put(self, request, response): """Processes a `PUT` request.""" if self.slug is None: # Mass-PUT is not implemented. raise http.exceptions.NotImplemented() # Check if the resource exists. target = self.read() # Deserialize and clean the incoming object. data = self._clean(target, self.request.read(deserialize=True)) if target is not None: # Ensure we're allowed to update the resource. self.assert_operations('update') try: # Delegate to `update` to create the item. self.update(target, data) except AttributeError: # No read method defined. raise http.exceptions.NotImplemented() # Build the response object. self.make_response(target) else: # Ensure we're allowed to create the resource. self.assert_operations('create') # Delegate to `create` to create the item. target = self.create(data) # Build the response object. self.response.status = http.client.CREATED self.make_response(target)
python
{ "resource": "" }
q264201
ManagedResource.delete
validation
def delete(self, request, response): """Processes a `DELETE` request.""" if self.slug is None: # Mass-DELETE is not implemented. raise http.exceptions.NotImplemented() # Ensure we're allowed to destroy a resource. self.assert_operations('destroy') # Delegate to `destroy` to destroy the item. self.destroy() # Build the response object. self.response.status = http.client.NO_CONTENT self.make_response()
python
{ "resource": "" }
q264202
ManagedResource.link
validation
def link(self, request, response): """Processes a `LINK` request. A `LINK` request is asking to create a relation from the currently represented URI to all of the `Link` request headers. """ from armet.resources.managed.request import read if self.slug is None: # Mass-LINK is not implemented. raise http.exceptions.NotImplemented() # Get the current target. target = self.read() # Collect all the passed link headers. links = self._parse_link_headers(request['Link']) # Pull targets for each represented link. for link in links: # Delegate to a connector. self.relate(target, read(self, link['uri'])) # Build the response object. self.response.status = http.client.NO_CONTENT self.make_response()
python
{ "resource": "" }
q264203
DjangoCreator.create_project
validation
def create_project(self): ''' Creates a base Django project ''' if os.path.exists(self._py): prj_dir = os.path.join(self._app_dir, self._project_name) if os.path.exists(prj_dir): if self._force: logging.warn('Removing existing project') shutil.rmtree(prj_dir) else: logging.warn('Found existing project; not creating (use --force to overwrite)') return logging.info('Creating project') p = subprocess.Popen('cd {0} ; {1} startproject {2} > /dev/null'.format(self._app_dir, self._ve_dir + os.sep + self._project_name + \ os.sep + 'bin' + os.sep + 'django-admin.py', self._project_name), \ shell=True) os.waitpid(p.pid, 0) else: logging.error('Unable to find Python interpreter in virtualenv') return
python
{ "resource": "" }
q264204
ilike_helper
validation
def ilike_helper(default): """Helper function that performs an `ilike` query if a string value is passed, otherwise the normal default operation.""" @functools.wraps(default) def wrapped(x, y): # String values should use ILIKE queries. if isinstance(y, six.string_types) and not isinstance(x.type, sa.Enum): return x.ilike("%" + y + "%") else: return default(x, y) return wrapped
python
{ "resource": "" }
q264205
parse
validation
def parse(text, encoding='utf8'): """Parse the querystring into a normalized form.""" # Decode the text if we got bytes. if isinstance(text, six.binary_type): text = text.decode(encoding) return Query(text, split_segments(text))
python
{ "resource": "" }
q264206
split_segments
validation
def split_segments(text, closing_paren=False): """Return objects representing segments.""" buf = StringIO() # The segments we're building, and the combinators used to combine them. # Note that after this is complete, this should be true: # len(segments) == len(combinators) + 1 # Thus we can understand the relationship between segments and combinators # like so: # s1 (c1) s2 (c2) s3 (c3) where sN are segments and cN are combination # functions. # TODO: Figure out exactly where the querystring died and post cool # error messages about it. segments = [] combinators = [] # A flag dictating if the last character we processed was a group. # This is used to determine if the next character (being a combinator) # is allowed to last_group = False # The recursive nature of this function relies on keeping track of the # state of iteration. This iterator will be passed down to recursed calls. iterator = iter(text) # Detection for exclamation points. only matters for this situation: # foo=bar&!(bar=baz) last_negation = False for character in iterator: if character in COMBINATORS: if last_negation: buf.write(constants.OPERATOR_NEGATION) # The string representation of our segment. val = buf.getvalue() reset_stringio(buf) if not last_group and not len(val): raise ValueError('Unexpected %s.' % character) # When a group happens, the previous value is empty. if len(val): segments.append(parse_segment(val)) combinators.append(COMBINATORS[character]) elif character == constants.GROUP_BEGIN: # Recursively go into the next group. if buf.tell(): raise ValueError('Unexpected %s' % character) seg = split_segments(iterator, True) if last_negation: seg = UnarySegmentCombinator(seg) segments.append(seg) # Flag that the last entry was a grouping, so that we don't panic # when the next character is a logical combinator last_group = True continue elif character == constants.GROUP_END: # Build the segment for anything remaining, and then combine # all the segments. val = buf.getvalue() # Check for unbalanced parens or an empty thing: foo=bar&();bar=baz if not buf.tell() or not closing_paren: raise ValueError('Unexpected %s' % character) segments.append(parse_segment(val)) return combine(segments, combinators) elif character == constants.OPERATOR_NEGATION and not buf.tell(): last_negation = True continue else: if last_negation: buf.write(constants.OPERATOR_NEGATION) if last_group: raise ValueError('Unexpected %s' % character) buf.write(character) last_negation = False last_group = False else: # Check and see if the iterator exited early (unbalanced parens) if closing_paren: raise ValueError('Expected %s.' % constants.GROUP_END) if not last_group: # Add the final segment. segments.append(parse_segment(buf.getvalue())) # Everything completed normally, combine all the segments into one # and return them. return combine(segments, combinators)
python
{ "resource": "" }
q264207
parse_segment
validation
def parse_segment(text): "we expect foo=bar" if not len(text): return NoopQuerySegment() q = QuerySegment() # First we need to split the segment into key/value pairs. This is done # by attempting to split the sequence for each equality comparison. Then # discard any that did not split properly. Then chose the smallest key # (greedily chose the first comparator we encounter in the string) # followed by the smallest value (greedily chose the largest comparator # possible.) # translate into [('=', 'foo=bar')] equalities = zip(constants.OPERATOR_EQUALITIES, itertools.repeat(text)) # Translate into [('=', ['foo', 'bar'])] equalities = map(lambda x: (x[0], x[1].split(x[0], 1)), equalities) # Remove unsplit entries and translate into [('=': ['foo', 'bar'])] # Note that the result from this stage is iterated over twice. equalities = list(filter(lambda x: len(x[1]) > 1, equalities)) # Get the smallest key and use the length of that to remove other items key_len = len(min((x[1][0] for x in equalities), key=len)) equalities = filter(lambda x: len(x[1][0]) == key_len, equalities) # Get the smallest value length. thus we have the earliest key and the # smallest value. op, (key, value) = min(equalities, key=lambda x: len(x[1][1])) key, directive = parse_directive(key) if directive: op = constants.OPERATOR_EQUALITY_FALLBACK q.directive = directive # Process negation. This comes in both foo.not= and foo!= forms. path = key.split(constants.SEP_PATH) last = path[-1] # Check for != if last.endswith(constants.OPERATOR_NEGATION): last = last[:-1] q.negated = not q.negated # Check for foo.not= if last == constants.PATH_NEGATION: path.pop(-1) q.negated = not q.negated q.values = value.split(constants.SEP_VALUE) # Check for suffixed operators (foo.gte=bar). Prioritize suffixed # entries over actual equality checks. if path[-1] in constants.OPERATOR_SUFFIXES: # The case where foo.gte<=bar, which obviously makes no sense. if op not in constants.OPERATOR_FALLBACK: raise ValueError( 'Both path-style operator and equality style operator ' 'provided. Please provide only a single style operator.') q.operator = constants.OPERATOR_SUFFIX_MAP[path[-1]] path.pop(-1) else: q.operator = constants.OPERATOR_EQUALITY_MAP[op] if not len(path): raise ValueError('No attribute navigation path provided.') q.path = path return q
python
{ "resource": "" }
q264208
Attribute.set
validation
def set(self, target, value): """Set the value of this attribute for the passed object. """ if not self._set: return if self.path is None: # There is no path defined on this resource. # We can do no magic to set the value. self.set = lambda *a: None return None if self._segments[target.__class__]: # Attempt to resolve access to this attribute. self.get(target) if self._segments[target.__class__]: # Attribute is not fully resolved; an interim segment is null. return # Resolve access to the parent object. # For a single-segment path this will effectively be a no-op. parent_getter = compose(*self._getters[target.__class__][:-1]) target = parent_getter(target) # Make the setter. func = self._make_setter(self.path.split('.')[-1], target.__class__) # Apply the setter now. func(target, value) # Replace this function with the constructed setter. def setter(target, value): func(parent_getter(target), value) self.set = setter
python
{ "resource": "" }
q264209
parse
validation
def parse(specifiers): """ Consumes set specifiers as text and forms a generator to retrieve the requested ranges. @param[in] specifiers Expected syntax is from the byte-range-specifier ABNF found in the [RFC 2616]; eg. 15-17,151,-16,26-278,15 @returns Consecutive tuples that describe the requested range; eg. (1, 72) or (1, 1) [read as 1 to 72 or 1 to 1]. """ specifiers = "".join(specifiers.split()) for specifier in specifiers.split(','): if len(specifier) == 0: raise ValueError("Range: Invalid syntax; missing specifier.") count = specifier.count('-') if (count and specifier[0] == '-') or not count: # Single specifier; return as a tuple to itself. yield int(specifier), int(specifier) continue specifier = list(map(int, specifier.split('-'))) if len(specifier) == 2: # Range specifier; return as a tuple. if specifier[0] < 0 or specifier[1] < 0: # Negative indexing is not supported in range specifiers # as stated in the HTTP/1.1 Range header specification. raise ValueError( "Range: Invalid syntax; negative indexing " "not supported in a range specifier.") if specifier[1] < specifier[0]: # Range must be for at least one item. raise ValueError( "Range: Invalid syntax; stop is less than start.") # Return them as a immutable tuple. yield tuple(specifier) continue # Something weird happened. raise ValueError("Range: Invalid syntax.")
python
{ "resource": "" }
q264210
paginate
validation
def paginate(request, response, items): """Paginate an iterable during a request. Magically splicling an iterable in our supported ORMs allows LIMIT and OFFSET queries. We should probably delegate this to the ORM or something in the future. """ # TODO: support dynamic rangewords and page lengths # TODO: support multi-part range requests # Get the header header = request.headers.get('Range') if not header: # No range header; move along. return items # do some validation prefix = RANGE_SPECIFIER + '=' if not header.find(prefix) == 0: # This is not using a range specifier that we understand raise exceptions.RequestedRangeNotSatisfiable() else: # Chop the prefix off the header and parse it ranges = parse(header[len(prefix):]) ranges = list(ranges) if len(ranges) > 1: raise exceptions.RequestedRangeNotSatisfiable( 'Multiple ranges in a single request is not yet supported.') start, end = ranges[0] # Make sure the length is not higher than the total number allowed. max_length = request.resource.count(items) end = min(end, max_length) response.status = client.PARTIAL_CONTENT response.headers['Content-Range'] = '%d-%d/%d' % (start, end, max_length) response.headers['Accept-Ranges'] = RANGE_SPECIFIER # Splice and return the items. items = items[start:end + 1] return items
python
{ "resource": "" }
q264211
indexesOptional
validation
def indexesOptional(f): """Decorate test methods with this if you don't require strict index checking""" stack = inspect.stack() _NO_INDEX_CHECK_NEEDED.add('%s.%s.%s' % (f.__module__, stack[1][3], f.__name__)) del stack return f
python
{ "resource": "" }
q264212
Request.read
validation
def read(self, deserialize=False, format=None): """Read and return the request data. @param[in] deserialize True to deserialize the resultant text using a determiend format or the passed format. @param[in] format A specific format to deserialize in; if provided, no detection is done. If not provided, the content-type header is looked at to determine an appropriate deserializer. """ if deserialize: data, _ = self.deserialize(format=format) return data content = self._read() if not content: return '' if type(content) is six.binary_type: content = content.decode(self.encoding) return content
python
{ "resource": "" }
q264213
use
validation
def use(**kwargs): """ Updates the active resource configuration to the passed keyword arguments. Invoking this method without passing arguments will just return the active resource configuration. @returns The previous configuration. """ config = dict(use.config) use.config.update(kwargs) return config
python
{ "resource": "" }
q264214
try_delegation
validation
def try_delegation(method): '''This decorator wraps descriptor methods with a new method that tries to delegate to a function of the same name defined on the owner instance for convenience for dispatcher clients. ''' @functools.wraps(method) def delegator(self, *args, **kwargs): if self.try_delegation: # Try to dispatch to the instance's implementation. inst = getattr(self, 'inst', None) if inst is not None: method_name = (self.delegator_prefix or '') + method.__name__ func = getattr(inst, method_name, None) if func is not None: return func(*args, **kwargs) # Otherwise run the decorated func. return method(self, *args, **kwargs) return delegator
python
{ "resource": "" }
q264215
Dispatcher.register
validation
def register(self, method, args, kwargs): '''Given a single decorated handler function, prepare, append desired data to self.registry. ''' invoc = self.dump_invoc(*args, **kwargs) self.registry.append((invoc, method.__name__))
python
{ "resource": "" }
q264216
Dispatcher.get_method
validation
def get_method(self, *args, **kwargs): '''Find the first method this input dispatches to. ''' for method in self.gen_methods(*args, **kwargs): return method msg = 'No method was found for %r on %r.' raise self.DispatchError(msg % ((args, kwargs), self.inst))
python
{ "resource": "" }
q264217
TypeDispatcher.gen_method_keys
validation
def gen_method_keys(self, *args, **kwargs): '''Given a node, return the string to use in computing the matching visitor methodname. Can also be a generator of strings. ''' token = args[0] for mro_type in type(token).__mro__[:-1]: name = mro_type.__name__ yield name
python
{ "resource": "" }
q264218
TypeDispatcher.gen_methods
validation
def gen_methods(self, *args, **kwargs): '''Find all method names this input dispatches to. ''' token = args[0] inst = self.inst prefix = self._method_prefix for method_key in self.gen_method_keys(*args, **kwargs): method = getattr(inst, prefix + method_key, None) if method is not None: yield method # Fall back to built-in types, then types, then collections. typename = type(token).__name__ yield from self.check_basetype( token, typename, self.builtins.get(typename)) for basetype_name in self.interp_types: yield from self.check_basetype( token, basetype_name, getattr(self.types, basetype_name, None)) for basetype_name in self.abc_types: yield from self.check_basetype( token, basetype_name, getattr(self.collections, basetype_name, None)) # Try the generic handler. yield from self.gen_generic()
python
{ "resource": "" }
q264219
BumpRequirement.parse
validation
def parse(cls, s, required=False): """ Parse string to create an instance :param str s: String with requirement to parse :param bool required: Is this requirement required to be fulfilled? If not, then it is a filter. """ req = pkg_resources.Requirement.parse(s) return cls(req, required=required)
python
{ "resource": "" }
q264220
RequirementsManager.add
validation
def add(self, requirements, required=None): """ Add requirements to be managed :param list/Requirement requirements: List of :class:`BumpRequirement` or :class:`pkg_resources.Requirement` :param bool required: Set required flag for each requirement if provided. """ if isinstance(requirements, RequirementsManager): requirements = list(requirements) elif not isinstance(requirements, list): requirements = [requirements] for req in requirements: name = req.project_name if not isinstance(req, BumpRequirement): req = BumpRequirement(req, required=required) elif required is not None: req.required = required add = True if name in self.requirements: for existing_req in self.requirements[name]: if req == existing_req: add = False break # Need to replace existing as the new req will be used to bump next, and req.required could be # updated. replace = False # Two pins: Use highest pinned version if (req.specs and req.specs[0][0] == '==' and existing_req.specs and existing_req.specs[0][0] == '=='): if pkg_resources.parse_version(req.specs[0][1]) < pkg_resources.parse_version( existing_req.specs[0][1]): req.requirement = existing_req.requirement replace = True # Replace Any if not (req.specs and existing_req.specs): if existing_req.specs: req.requirement = existing_req.requirement replace = True if replace: req.required |= existing_req.required if existing_req.required_by and not req.required_by: req.required_by = existing_req.required_by self.requirements[name].remove(existing_req) break if add: self.requirements[name].append(req)
python
{ "resource": "" }
q264221
RequirementsManager.satisfied_by_checked
validation
def satisfied_by_checked(self, req): """ Check if requirement is already satisfied by what was previously checked :param Requirement req: Requirement to check """ req_man = RequirementsManager([req]) return any(req_man.check(*checked) for checked in self.checked)
python
{ "resource": "" }
q264222
Bump.require
validation
def require(self, req): """ Add new requirements that must be fulfilled for this bump to occur """ reqs = req if isinstance(req, list) else [req] for req in reqs: if not isinstance(req, BumpRequirement): req = BumpRequirement(req) req.required = True req.required_by = self self.requirements.append(req)
python
{ "resource": "" }
q264223
AbstractBumper.requirements_for_changes
validation
def requirements_for_changes(self, changes): """ Parse changes for requirements :param list changes: """ requirements = [] reqs_set = set() if isinstance(changes, str): changes = changes.split('\n') if not changes or changes[0].startswith('-'): return requirements for line in changes: line = line.strip(' -+*') if not line: continue match = IS_REQUIREMENTS_RE2.search(line) # or IS_REQUIREMENTS_RE.match(line) if match: for match in REQUIREMENTS_RE.findall(match.group(1)): if match[1]: version = '==' + match[2] if match[1].startswith(' to ') else match[1] req_str = match[0] + version else: req_str = match[0] if req_str not in reqs_set: reqs_set.add(req_str) try: requirements.append(pkg_resources.Requirement.parse(req_str)) except Exception as e: log.warn('Could not parse requirement "%s" from changes: %s', req_str, e) return requirements
python
{ "resource": "" }
q264224
AbstractBumper.bump
validation
def bump(self, bump_reqs=None, **kwargs): """ Bump dependencies using given requirements. :param RequirementsManager bump_reqs: Bump requirements manager :param dict kwargs: Additional args from argparse. Some bumpers accept user options, and some not. :return: List of :class:`Bump` changes made. """ bumps = {} for existing_req in sorted(self.requirements(), key=lambda r: r.project_name): if bump_reqs and existing_req.project_name not in bump_reqs: continue bump_reqs.check(existing_req) try: bump = self._bump(existing_req, bump_reqs.get(existing_req.project_name)) if bump: bumps[bump.name] = bump bump_reqs.check(bump) except Exception as e: if bump_reqs and bump_reqs.get(existing_req.project_name) and all( r.required_by is None for r in bump_reqs.get(existing_req.project_name)): raise else: log.warn(e) for reqs in bump_reqs.required_requirements().values(): name = reqs[0].project_name if name not in bumps and self.should_add(name): try: bump = self._bump(None, reqs) if bump: bumps[bump.name] = bump bump_reqs.check(bump) except Exception as e: if all(r.required_by is None for r in reqs): raise else: log.warn(e) self.bumps.update(bumps.values()) return bumps.values()
python
{ "resource": "" }
q264225
AbstractBumper.reverse
validation
def reverse(self): """ Restore content in target file to be before any changes """ if self._original_target_content: with open(self.target, 'w') as fp: fp.write(self._original_target_content)
python
{ "resource": "" }
q264226
Serializer.serialize
validation
def serialize(self, data=None): """ Transforms the object into an acceptable format for transmission. @throws ValueError To indicate this serializer does not support the encoding of the specified object. """ if data is not None and self.response is not None: # Set the content type. self.response['Content-Type'] = self.media_types[0] # Write the encoded and prepared data to the response. self.response.write(data) # Return the serialized data. # This has normally been transformed by a base class. return data
python
{ "resource": "" }
q264227
cons
validation
def cons(collection, value): """Extends a collection with a value.""" if isinstance(value, collections.Mapping): if collection is None: collection = {} collection.update(**value) elif isinstance(value, six.string_types): if collection is None: collection = [] collection.append(value) elif isinstance(value, collections.Iterable): if collection is None: collection = [] collection.extend(value) else: if collection is None: collection = [] collection.append(value) return collection
python
{ "resource": "" }
q264228
_merge
validation
def _merge(options, name, bases, default=None): """Merges a named option collection.""" result = None for base in bases: if base is None: continue value = getattr(base, name, None) if value is None: continue result = utils.cons(result, value) value = options.get(name) if value is not None: result = utils.cons(result, value) return result or default
python
{ "resource": "" }
q264229
PyPI.package_info
validation
def package_info(cls, package): """ All package info for given package """ if package not in cls.package_info_cache: package_json_url = 'https://pypi.python.org/pypi/%s/json' % package try: logging.getLogger('requests').setLevel(logging.WARN) response = requests.get(package_json_url) response.raise_for_status() cls.package_info_cache[package] = simplejson.loads(response.text) except Exception as e: log.debug('Could not get package info from %s: %s', package_json_url, e) cls.package_info_cache[package] = None return cls.package_info_cache[package]
python
{ "resource": "" }
q264230
PyPI.all_package_versions
validation
def all_package_versions(package): """ All versions for package """ info = PyPI.package_info(package) return info and sorted(info['releases'].keys(), key=lambda x: x.split(), reverse=True) or []
python
{ "resource": "" }
q264231
Response.close
validation
def close(self): """Flush and close the stream. This is called automatically by the base resource on resources unless the resource is operating asynchronously; in that case, this method MUST be called in order to signal the end of the request. If not the request will simply hang as it is waiting for some thread to tell it to return to the client. """ # Ensure we're not closed. self.require_not_closed() if not self.streaming or self.asynchronous: # We're not streaming, auto-write content-length if not # already set. if 'Content-Length' not in self.headers: self.headers['Content-Length'] = self.tell() # Flush out the current buffer. self.flush() # We're done with the response; inform the HTTP connector # to close the response stream. self._closed = True
python
{ "resource": "" }
q264232
Response.write
validation
def write(self, chunk, serialize=False, format=None): """Writes the given chunk to the output buffer. @param[in] chunk Either a byte array, a unicode string, or a generator. If `chunk` is a generator then calling `self.write(<generator>)` is equivalent to: @code for x in <generator>: self.write(x) self.flush() @endcode @param[in] serialize True to serialize the lines in a determined serializer. @param[in] format A specific format to serialize in; if provided, no detection is done. If not provided, the accept header (as well as the URL extension) is looked at to determine an appropriate serializer. """ # Ensure we're not closed. self.require_not_closed() if chunk is None: # There is nothing here. return if serialize or format is not None: # Forward to the serializer to serialize the chunk # before it gets written to the response. self.serialize(chunk, format=format) return # `serialize` invokes write(...) if type(chunk) is six.binary_type: # Update the stream length. self._length += len(chunk) # If passed a byte string, we hope the user encoded it properly. self._stream.write(chunk) elif isinstance(chunk, six.string_types): encoding = self.encoding if encoding is not None: # If passed a string, we can encode it for the user. chunk = chunk.encode(encoding) else: # Bail; we don't have an encoding. raise exceptions.InvalidOperation( 'Attempting to write textual data without an encoding.') # Update the stream length. self._length += len(chunk) # Write the encoded data into the byte stream. self._stream.write(chunk) elif isinstance(chunk, collections.Iterable): # If passed some kind of iterator, attempt to recurse into # oblivion. for section in chunk: self.write(section) else: # Bail; we have no idea what to do with this. raise exceptions.InvalidOperation( 'Attempting to write something not recognized.')
python
{ "resource": "" }
q264233
Response.serialize
validation
def serialize(self, data, format=None): """Serializes the data into this response using a serializer. @param[in] data The data to be serialized. @param[in] format A specific format to serialize in; if provided, no detection is done. If not provided, the accept header (as well as the URL extension) is looked at to determine an appropriate serializer. @returns A tuple of the serialized text and an instance of the serializer used. """ return self._resource.serialize(data, response=self, format=format)
python
{ "resource": "" }
q264234
Response.flush
validation
def flush(self): """Flush the write buffers of the stream. This results in writing the current contents of the write buffer to the transport layer, initiating the HTTP/1.1 response. This initiates a streaming response. If the `Content-Length` header is not given then the chunked `Transfer-Encoding` is applied. """ # Ensure we're not closed. self.require_not_closed() # Pull out the accumulated chunk. chunk = self._stream.getvalue() self._stream.truncate(0) self._stream.seek(0) # Append the chunk to the body. self.body = chunk if (self._body is None) else (self._body + chunk) if self.asynchronous: # We are now streaming because we're asynchronous. self.streaming = True
python
{ "resource": "" }
q264235
Response.send
validation
def send(self, *args, **kwargs): """Writes the passed chunk and flushes it to the client.""" self.write(*args, **kwargs) self.flush()
python
{ "resource": "" }
q264236
Response.end
validation
def end(self, *args, **kwargs): """ Writes the passed chunk, flushes it to the client, and terminates the connection. """ self.send(*args, **kwargs) self.close()
python
{ "resource": "" }
q264237
replaced_directory
validation
def replaced_directory(dirname): """This ``Context Manager`` is used to move the contents of a directory elsewhere temporarily and put them back upon exit. This allows testing code to use the same file directories as normal code without fear of damage. The name of the temporary directory which contains your files is yielded. :param dirname: Path name of the directory to be replaced. Example: .. code-block:: python with replaced_directory('/foo/bar/') as rd: # "/foo/bar/" has been moved & renamed with open('/foo/bar/thing.txt', 'w') as f: f.write('stuff') f.close() # got here? => "/foo/bar/ is now restored and temp has been wiped, # "thing.txt" is gone """ if dirname[-1] == '/': dirname = dirname[:-1] full_path = os.path.abspath(dirname) if not os.path.isdir(full_path): raise AttributeError('dir_name must be a directory') base, name = os.path.split(full_path) # create a temporary directory, move provided dir into it and recreate the # directory for the user tempdir = tempfile.mkdtemp() shutil.move(full_path, tempdir) os.mkdir(full_path) try: yield tempdir finally: # done context, undo everything shutil.rmtree(full_path) moved = os.path.join(tempdir, name) shutil.move(moved, base) shutil.rmtree(tempdir)
python
{ "resource": "" }
q264238
capture_stdout
validation
def capture_stdout(): """This ``Context Manager`` redirects STDOUT to a ``StringIO`` objects which is returned from the ``Context``. On exit STDOUT is restored. Example: .. code-block:: python with capture_stdout() as capture: print('foo') # got here? => capture.getvalue() will now have "foo\\n" """ stdout = sys.stdout try: capture_out = StringIO() sys.stdout = capture_out yield capture_out finally: sys.stdout = stdout
python
{ "resource": "" }
q264239
capture_stderr
validation
def capture_stderr(): """This ``Context Manager`` redirects STDERR to a ``StringIO`` objects which is returned from the ``Context``. On exit STDERR is restored. Example: .. code-block:: python with capture_stderr() as capture: print('foo') # got here? => capture.getvalue() will now have "foo\\n" """ stderr = sys.stderr try: capture_out = StringIO() sys.stderr = capture_out yield capture_out finally: sys.stderr = stderr
python
{ "resource": "" }
q264240
Resource.urls
validation
def urls(cls): """Builds the URL configuration for this resource.""" return urls.patterns('', urls.url( r'^{}(?:$|(?P<path>[/:(.].*))'.format(cls.meta.name), cls.view, name='armet-api-{}'.format(cls.meta.name), kwargs={'resource': cls.meta.name}))
python
{ "resource": "" }
q264241
dump
validation
def dump(obj, fp, startindex=1, separator=DEFAULT, index_separator=DEFAULT): '''Dump an object in req format to the fp given. :param Mapping obj: The object to serialize. Must have a keys method. :param fp: A writable that can accept all the types given. :param separator: The separator between key and value. Defaults to u'|' or b'|', depending on the types. :param index_separator: The separator between key and index. Defaults to u'_' or b'_', depending on the types. ''' if startindex < 0: raise ValueError('startindex must be non-negative, but was {}'.format(startindex)) try: firstkey = next(iter(obj.keys())) except StopIteration: return if isinstance(firstkey, six.text_type): converter = six.u else: converter = six.b default_separator = converter('|') default_index_separator = converter('_') newline = converter('\n') if separator is DEFAULT: separator = default_separator if index_separator is DEFAULT: index_separator = default_index_separator for key, value in six.iteritems(obj): if isinstance(value, (list, tuple, set)): for index, item in enumerate(value, start=startindex): fp.write(key) fp.write(index_separator) fp.write(converter(str(index))) fp.write(separator) fp.write(item) fp.write(newline) else: fp.write(key) fp.write(separator) fp.write(value) fp.write(newline)
python
{ "resource": "" }
q264242
dumps
validation
def dumps(obj, startindex=1, separator=DEFAULT, index_separator=DEFAULT): '''Dump an object in req format to a string. :param Mapping obj: The object to serialize. Must have a keys method. :param separator: The separator between key and value. Defaults to u'|' or b'|', depending on the types. :param index_separator: The separator between key and index. Defaults to u'_' or b'_', depending on the types. ''' try: firstkey = next(iter(obj.keys())) except StopIteration: return str() if isinstance(firstkey, six.text_type): io = StringIO() else: io = BytesIO() dump( obj=obj, fp=io, startindex=startindex, separator=separator, index_separator=index_separator, ) return io.getvalue()
python
{ "resource": "" }
q264243
load
validation
def load(fp, separator=DEFAULT, index_separator=DEFAULT, cls=dict, list_cls=list): '''Load an object from the file pointer. :param fp: A readable filehandle. :param separator: The separator between key and value. Defaults to u'|' or b'|', depending on the types. :param index_separator: The separator between key and index. Defaults to u'_' or b'_', depending on the types. :param cls: A callable that returns a Mapping that is filled with pairs. The most common alternate option would be OrderedDict. :param list_cls: A callable that takes an iterable and returns a sequence. ''' converter = None output = cls() arraykeys = set() for line in fp: if converter is None: if isinstance(line, six.text_type): converter = six.u else: converter = six.b default_separator = converter('|') default_index_separator = converter('_') newline = converter('\n') if separator is DEFAULT: separator = default_separator if index_separator is DEFAULT: index_separator = default_index_separator key, value = line.strip().split(separator, 1) keyparts = key.split(index_separator) try: index = int(keyparts[-1]) endwithint = True except ValueError: endwithint = False # We do everything in-place to ensure that we maintain order when using # an OrderedDict. if len(keyparts) > 1 and endwithint: # If this is an array key basekey = key.rsplit(index_separator, 1)[0] if basekey not in arraykeys: arraykeys.add(basekey) if basekey in output: # If key already exists as non-array, fix it if not isinstance(output[basekey], dict): output[basekey] = {-1: output[basekey]} else: output[basekey] = {} output[basekey][index] = value else: if key in output and isinstance(output[key], dict): output[key][-1] = value else: output[key] = value # Convert array keys for key in arraykeys: output[key] = list_cls(pair[1] for pair in sorted(six.iteritems(output[key]))) return output
python
{ "resource": "" }
q264244
loads
validation
def loads(s, separator=DEFAULT, index_separator=DEFAULT, cls=dict, list_cls=list): '''Loads an object from a string. :param s: An object to parse :type s: bytes or str :param separator: The separator between key and value. Defaults to u'|' or b'|', depending on the types. :param index_separator: The separator between key and index. Defaults to u'_' or b'_', depending on the types. :param cls: A callable that returns a Mapping that is filled with pairs. The most common alternate option would be OrderedDict. :param list_cls: A callable that takes an iterable and returns a sequence. ''' if isinstance(s, six.text_type): io = StringIO(s) else: io = BytesIO(s) return load( fp=io, separator=separator, index_separator=index_separator, cls=cls, list_cls=list_cls, )
python
{ "resource": "" }
q264245
BumperDriver.reverse
validation
def reverse(self): """ Reverse all bumpers """ if not self.test_drive and self.bumps: map(lambda b: b.reverse(), self.bumpers)
python
{ "resource": "" }
q264246
BumperDriver._expand_targets
validation
def _expand_targets(self, targets, base_dir=None): """ Expand targets by looking for '-r' in targets. """ all_targets = [] for target in targets: target_dirs = [p for p in [base_dir, os.path.dirname(target)] if p] target_dir = target_dirs and os.path.join(*target_dirs) or '' target = os.path.basename(target) target_path = os.path.join(target_dir, target) if os.path.exists(target_path): all_targets.append(target_path) with open(target_path) as fp: for line in fp: if line.startswith('-r '): _, new_target = line.split(' ', 1) all_targets.extend(self._expand_targets([new_target.strip()], base_dir=target_dir)) return all_targets
python
{ "resource": "" }
q264247
ProjectCreator.get_nginx_config
validation
def get_nginx_config(self): """ Gets the Nginx config for the project """ if os.path.exists(self._nginx_config): return open(self._nginx_config, 'r').read() else: return None
python
{ "resource": "" }
q264248
ProjectCreator.check_directories
validation
def check_directories(self): """ Creates base directories for app, virtualenv, and nginx """ self.log.debug('Checking directories') if not os.path.exists(self._ve_dir): os.makedirs(self._ve_dir) if not os.path.exists(self._app_dir): os.makedirs(self._app_dir) if not os.path.exists(self._conf_dir): os.makedirs(self._conf_dir) if not os.path.exists(self._var_dir): os.makedirs(self._var_dir) if not os.path.exists(self._log_dir): os.makedirs(self._log_dir) if not os.path.exists(self._script_dir): os.makedirs(self._script_dir) # copy uswgi_params for nginx uwsgi_params = '/etc/nginx/uwsgi_params' if os.path.exists(uwsgi_params): shutil.copy(uwsgi_params, self._conf_dir) else: logging.warning('Unable to find Nginx uwsgi_params. You must manually copy this to {0}.'.format(self._conf_dir)) # copy mime.types for nginx mime_types = '/etc/nginx/mime.types' if os.path.exists(mime_types): shutil.copy(mime_types, self._conf_dir) self._include_mimetypes = True else: logging.warn('Unable to find mime.types for Nginx. You must manually copy this to {0}.'.format(self._conf_dir))
python
{ "resource": "" }
q264249
ProjectCreator.create_virtualenv
validation
def create_virtualenv(self): """ Creates the virtualenv for the project """ if check_command('virtualenv'): ve_dir = os.path.join(self._ve_dir, self._project_name) if os.path.exists(ve_dir): if self._force: logging.warn('Removing existing virtualenv') shutil.rmtree(ve_dir) else: logging.warn('Found existing virtualenv; not creating (use --force to overwrite)') return logging.info('Creating virtualenv') p = subprocess.Popen('virtualenv --no-site-packages {0} > /dev/null'.format(ve_dir), shell=True) os.waitpid(p.pid, 0) # install modules for m in self._modules: self.log.info('Installing module {0}'.format(m)) p = subprocess.Popen('{0} install {1} > /dev/null'.format(os.path.join(self._ve_dir, \ self._project_name) + os.sep + 'bin' + os.sep + 'pip', m), shell=True) os.waitpid(p.pid, 0)
python
{ "resource": "" }
q264250
ProjectCreator.create_nginx_config
validation
def create_nginx_config(self): """ Creates the Nginx configuration for the project """ cfg = '# nginx config for {0}\n'.format(self._project_name) if not self._shared_hosting: # user if self._user: cfg += 'user {0};\n'.format(self._user) # misc nginx config cfg += 'worker_processes 1;\nerror_log {0}-errors.log;\n\ pid {1}_ nginx.pid;\n\n'.format(os.path.join(self._log_dir, \ self._project_name), os.path.join(self._var_dir, self._project_name)) cfg += 'events {\n\tworker_connections 32;\n}\n\n' # http section cfg += 'http {\n' if self._include_mimetypes: cfg += '\tinclude mime.types;\n' cfg += '\tdefault_type application/octet-stream;\n' cfg += '\tclient_max_body_size 1G;\n' cfg += '\tproxy_max_temp_file_size 0;\n' cfg += '\tproxy_buffering off;\n' cfg += '\taccess_log {0}-access.log;\n'.format(os.path.join \ (self._log_dir, self._project_name)) cfg += '\tsendfile on;\n' cfg += '\tkeepalive_timeout 65;\n' # server section cfg += '\tserver {\n' cfg += '\t\tlisten 0.0.0.0:{0};\n'.format(self._port) if self._server_name: cfg += '\t\tserver_name {0};\n'.format(self._server_name) # location section cfg += '\t\tlocation / {\n' cfg += '\t\t\tuwsgi_pass unix:///{0}.sock;\n'.format(\ os.path.join(self._var_dir, self._project_name)) cfg += '\t\t\tinclude uwsgi_params;\n' cfg += '\t\t}\n\n' # end location # error page templates cfg += '\t\terror_page 500 502 503 504 /50x.html;\n' cfg += '\t\tlocation = /50x.html {\n' cfg += '\t\t\troot html;\n' # end error page section cfg += '\t\t}\n' # end server section cfg += '\t}\n' if not self._shared_hosting: # end http section cfg += '}\n' # create conf f = open(self._nginx_config, 'w') f.write(cfg) f.close()
python
{ "resource": "" }
q264251
ProjectCreator.create_manage_scripts
validation
def create_manage_scripts(self): """ Creates scripts to start and stop the application """ # create start script start = '# start script for {0}\n\n'.format(self._project_name) # start uwsgi start += 'echo \'Starting uWSGI...\'\n' start += 'sh {0}.uwsgi\n'.format(os.path.join(self._conf_dir, self._project_name)) start += 'sleep 1\n' # start nginx start += 'echo \'Starting Nginx...\'\n' start += 'nginx -c {0}_nginx.conf\n'.format(os.path.join(self._conf_dir, self._project_name)) start += 'sleep 1\n' start += 'echo \'{0} started\'\n\n'.format(self._project_name) # stop script stop = '# stop script for {0}\n\n'.format(self._project_name) # stop nginx stop += 'if [ -e {0}_nginx.pid ]; then nginx -c {1}_nginx.conf -s stop ; fi\n'.format(os.path.join(self._var_dir, self._project_name), os.path.join(self._conf_dir, self._project_name)) # stop uwsgi stop += 'if [ -e {0}_uwsgi.pid ]; then kill -9 `cat {0}_uwsgi.pid` ; rm {0}_uwsgi.pid 2>&1 > /dev/null ; fi\n'.format(os.path.join(self._var_dir, self._project_name)) stop += 'echo \'{0} stopped\'\n'.format(self._project_name) # write scripts start_file = '{0}_start.sh'.format(os.path.join(self._script_dir, self._project_name)) stop_file = '{0}_stop.sh'.format(os.path.join(self._script_dir, self._project_name)) f = open(start_file, 'w') f.write(start) f.close() f = open(stop_file, 'w') f.write(stop) f.close() # make executable os.chmod(start_file, 0754) os.chmod(stop_file, 0754)
python
{ "resource": "" }
q264252
ProjectCreator.create
validation
def create(self): """ Creates the full project """ # create virtualenv self.create_virtualenv() # create project self.create_project() # generate uwsgi script self.create_uwsgi_script() # generate nginx config self.create_nginx_config() # generate management scripts self.create_manage_scripts() logging.info('** Make sure to set proper permissions for the webserver user account on the var and log directories in the project root')
python
{ "resource": "" }
q264253
dasherize
validation
def dasherize(value): """Dasherizes the passed value.""" value = value.strip() value = re.sub(r'([A-Z])', r'-\1', value) value = re.sub(r'[-_\s]+', r'-', value) value = re.sub(r'^-', r'', value) value = value.lower() return value
python
{ "resource": "" }
q264254
Resource.redirect
validation
def redirect(cls, request, response): """Redirect to the canonical URI for this resource.""" if cls.meta.legacy_redirect: if request.method in ('GET', 'HEAD',): # A SAFE request is allowed to redirect using a 301 response.status = http.client.MOVED_PERMANENTLY else: # All other requests must use a 307 response.status = http.client.TEMPORARY_REDIRECT else: # Modern redirects are allowed. Let's have some fun. # Hopefully you're client supports this. # The RFC explicitly discourages UserAgent sniffing. response.status = http.client.PERMANENT_REDIRECT # Terminate the connection. response.close()
python
{ "resource": "" }
q264255
Resource.parse
validation
def parse(cls, path): """Parses out parameters and separates them out of the path. This uses one of the many defined patterns on the options class. But, it defaults to a no-op if there are no defined patterns. """ # Iterate through the available patterns. for resource, pattern in cls.meta.patterns: # Attempt to match the path. match = re.match(pattern, path) if match is not None: # Found something. return resource, match.groupdict(), match.string[match.end():] # No patterns at all; return unsuccessful. return None if not cls.meta.patterns else False
python
{ "resource": "" }
q264256
Resource.traverse
validation
def traverse(cls, request, params=None): """Traverses down the path and determines the accessed resource. This makes use of the patterns array to implement simple traversal. This defaults to a no-op if there are no defined patterns. """ # Attempt to parse the path using a pattern. result = cls.parse(request.path) if result is None: # No parsing was requested; no-op. return cls, {} elif not result: # Parsing failed; raise 404. raise http.exceptions.NotFound() # Partition out the result. resource, data, rest = result if params: # Append params to data. data.update(params) if resource is None: # No traversal; return parameters. return cls, data # Modify the path appropriately. if data.get('path') is not None: request.path = data.pop('path') elif rest is not None: request.path = rest # Send us through traversal again. result = resource.traverse(request, params=data) return result
python
{ "resource": "" }
q264257
Resource.stream
validation
def stream(cls, response, sequence): """ Helper method used in conjunction with the view handler to stream responses to the client. """ # Construct the iterator and run the sequence once in order # to capture any headers and status codes set. iterator = iter(sequence) data = {'chunk': next(iterator)} response.streaming = True def streamer(): # Iterate through the iterator and yield its content while True: if response.asynchronous: # Yield our current chunk. yield data['chunk'] else: # Write the chunk to the response response.send(data['chunk']) # Yield its body yield response.body # Unset the body. response.body = None try: # Get the next chunk. data['chunk'] = next(iterator) except StopIteration: # Get out of the loop. break if not response.asynchronous: # Close the response. response.close() # Return the streaming function. return streamer()
python
{ "resource": "" }
q264258
Resource.deserialize
validation
def deserialize(self, request=None, text=None, format=None): """Deserializes the text using a determined deserializer. @param[in] request The request object to pull information from; normally used to determine the deserialization format (when `format` is not provided). @param[in] text The text to be deserialized. Can be left blank and the request will be read. @param[in] format A specific format to deserialize in; if provided, no detection is done. If not provided, the content-type header is looked at to determine an appropriate deserializer. @returns A tuple of the deserialized data and an instance of the deserializer used. """ if isinstance(self, Resource): if not request: # Ensure we have a response object. request = self._request Deserializer = None if format: # An explicit format was given; do not attempt to auto-detect # a deserializer. Deserializer = self.meta.deserializers[format] if not Deserializer: # Determine an appropriate deserializer to use by # introspecting the request object and looking at # the `Content-Type` header. media_ranges = request.get('Content-Type') if media_ranges: # Parse the media ranges and determine the deserializer # that is the closest match. media_types = six.iterkeys(self._deserializer_map) media_type = mimeparse.best_match(media_types, media_ranges) if media_type: format = self._deserializer_map[media_type] Deserializer = self.meta.deserializers[format] else: # Client didn't provide a content-type; we're supposed # to auto-detect. # TODO: Implement this. pass if Deserializer: try: # Attempt to deserialize the data using the determined # deserializer. deserializer = Deserializer() data = deserializer.deserialize(request=request, text=text) return data, deserializer except ValueError: # Failed to deserialize the data. pass # Failed to determine a deserializer; or failed to deserialize. raise http.exceptions.UnsupportedMediaType()
python
{ "resource": "" }
q264259
Resource.serialize
validation
def serialize(self, data, response=None, request=None, format=None): """Serializes the data using a determined serializer. @param[in] data The data to be serialized. @param[in] response The response object to serialize the data to. If this method is invoked as an instance method, the response object can be omitted and it will be taken from the instance. @param[in] request The request object to pull information from; normally used to determine the serialization format (when `format` is not provided). May be used by some serializers as well to pull additional headers. If this method is invoked as an instance method, the request object can be omitted and it will be taken from the instance. @param[in] format A specific format to serialize in; if provided, no detection is done. If not provided, the accept header (as well as the URL extension) is looked at to determine an appropriate serializer. @returns A tuple of the serialized text and an instance of the serializer used. """ if isinstance(self, Resource): if not request: # Ensure we have a response object. request = self._request Serializer = None if format: # An explicit format was given; do not attempt to auto-detect # a serializer. Serializer = self.meta.serializers[format] if not Serializer: # Determine an appropriate serializer to use by # introspecting the request object and looking at the `Accept` # header. media_ranges = (request.get('Accept') or '*/*').strip() if not media_ranges: # Default the media ranges to */* media_ranges = '*/*' if media_ranges != '*/*': # Parse the media ranges and determine the serializer # that is the closest match. media_types = six.iterkeys(self._serializer_map) media_type = mimeparse.best_match(media_types, media_ranges) if media_type: format = self._serializer_map[media_type] Serializer = self.meta.serializers[format] else: # Client indicated no preference; use the default. default = self.meta.default_serializer Serializer = self.meta.serializers[default] if Serializer: try: # Attempt to serialize the data using the determined # serializer. serializer = Serializer(request, response) return serializer.serialize(data), serializer except ValueError: # Failed to serialize the data. pass # Either failed to determine a serializer or failed to serialize # the data; construct a list of available and valid encoders. available = {} for name in self.meta.allowed_serializers: Serializer = self.meta.serializers[name] instance = Serializer(request, None) if instance.can_serialize(data): available[name] = Serializer.media_types[0] # Raise a Not Acceptable exception. raise http.exceptions.NotAcceptable(available)
python
{ "resource": "" }
q264260
Resource.dispatch
validation
def dispatch(self, request, response): """Entry-point of the dispatch cycle for this resource. Performs common work such as authentication, decoding, etc. before handing complete control of the result to a function with the same name as the request method. """ # Assert authentication and attempt to get a valid user object. self.require_authentication(request) # Assert accessibiltiy of the resource in question. self.require_accessibility(request.user, request.method) # Facilitate CORS by applying various headers. # This must be done on every request. # TODO: Provide cross_domain configuration that turns this off. self._process_cross_domain_request(request, response) # Route the HTTP/1.1 request to an appropriate method. return self.route(request, response)
python
{ "resource": "" }
q264261
Resource.require_authentication
validation
def require_authentication(self, request): """Ensure we are authenticated.""" request.user = user = None if request.method == 'OPTIONS': # Authentication should not be checked on an OPTIONS request. return for auth in self.meta.authentication: user = auth.authenticate(request) if user is False: # Authentication protocol failed to authenticate; # pass the baton. continue if user is None and not auth.allow_anonymous: # Authentication protocol determined the user is # unauthenticated. auth.unauthenticated() # Authentication protocol determined the user is indeed # authenticated (or not); Store the user for later reference. request.user = user return if not user and not auth.allow_anonymous: # No authenticated user found and protocol doesn't allow # anonymous users. auth.unauthenticated()
python
{ "resource": "" }
q264262
Resource.require_accessibility
validation
def require_accessibility(self, user, method): """Ensure we are allowed to access this resource.""" if method == 'OPTIONS': # Authorization should not be checked on an OPTIONS request. return authz = self.meta.authorization if not authz.is_accessible(user, method, self): # User is not authorized; raise an appropriate message. authz.unaccessible()
python
{ "resource": "" }
q264263
Resource.require_http_allowed_method
validation
def require_http_allowed_method(cls, request): """Ensure that we're allowed to use this HTTP method.""" allowed = cls.meta.http_allowed_methods if request.method not in allowed: # The specified method is not allowed for the resource # identified by the request URI. # RFC 2616 § 10.4.6 — 405 Method Not Allowed raise http.exceptions.MethodNotAllowed(allowed)
python
{ "resource": "" }
q264264
Resource.route
validation
def route(self, request, response): """Processes every request. Directs control flow to the appropriate HTTP/1.1 method. """ # Ensure that we're allowed to use this HTTP method. self.require_http_allowed_method(request) # Retrieve the function corresponding to this HTTP method. function = getattr(self, request.method.lower(), None) if function is None: # Server is not capable of supporting it. raise http.exceptions.NotImplemented() # Delegate to the determined function to process the request. return function(request, response)
python
{ "resource": "" }
q264265
Resource.options
validation
def options(self, request, response): """Process an `OPTIONS` request. Used to initiate a cross-origin request. All handling specific to CORS requests is done on every request however this method also returns a list of available methods. """ # Gather a list available HTTP/1.1 methods for this URI. response['Allowed'] = ', '.join(self.meta.http_allowed_methods) # All CORS handling is done for every HTTP/1.1 method. # No more handling is neccesary; set the response to 200 and return. response.status = http.client.OK
python
{ "resource": "" }
q264266
resource
validation
def resource(**kwargs): """Wraps the decorated function in a lightweight resource.""" def inner(function): name = kwargs.pop('name', None) if name is None: name = utils.dasherize(function.__name__) methods = kwargs.pop('methods', None) if isinstance(methods, six.string_types): # Tuple-ify the method if we got just a string. methods = methods, # Construct a handler. handler = (function, methods) if name not in _resources: # Initiate the handlers list. _handlers[name] = [] # Construct a light-weight resource using the passed kwargs # as the arguments for the meta. from armet import resources kwargs['name'] = name class LightweightResource(resources.Resource): Meta = type(str('Meta'), (), kwargs) def route(self, request, response): for handler, methods in _handlers[name]: if methods is None or request.method in methods: return handler(request, response) resources.Resource.route(self) # Construct and add this resource. _resources[name] = LightweightResource # Add this to the handlers. _handlers[name].append(handler) # Return the resource. return _resources[name] # Return the inner method. return inner
python
{ "resource": "" }
q264267
CookieDict.render_to_string
validation
def render_to_string(self): """Render to cookie strings. """ values = '' for key, value in self.items(): values += '{}={};'.format(key, value) return values
python
{ "resource": "" }
q264268
CookieDict.from_cookie_string
validation
def from_cookie_string(self, cookie_string): """update self with cookie_string. """ for key_value in cookie_string.split(';'): if '=' in key_value: key, value = key_value.split('=', 1) else: key = key_value strip_key = key.strip() if strip_key and strip_key.lower() not in COOKIE_ATTRIBUTE_NAMES: self[strip_key] = value.strip()
python
{ "resource": "" }
q264269
AuthPolicy._add_method
validation
def _add_method(self, effect, verb, resource, conditions): """ Adds a method to the internal lists of allowed or denied methods. Each object in the internal list contains a resource ARN and a condition statement. The condition statement can be null. """ if verb != '*' and not hasattr(HttpVerb, verb): raise NameError('Invalid HTTP verb ' + verb + '. Allowed verbs in HttpVerb class') resource_pattern = re.compile(self.path_regex) if not resource_pattern.match(resource): raise NameError('Invalid resource path: ' + resource + '. Path should match ' + self.path_regex) if resource[:1] == '/': resource = resource[1:] resource_arn = ('arn:aws:execute-api:' + self.region + ':' + self.aws_account_id + ':' + self.rest_api_id + '/' + self.stage + '/' + verb + '/' + resource) if effect.lower() == 'allow': self.allowMethods.append({ 'resource_arn': resource_arn, 'conditions': conditions }) elif effect.lower() == 'deny': self.denyMethods.append({ 'resource_arn': resource_arn, 'conditions': conditions })
python
{ "resource": "" }
q264270
AuthPolicy._get_effect_statement
validation
def _get_effect_statement(self, effect, methods): """ This function loops over an array of objects containing a resourceArn and conditions statement and generates the array of statements for the policy. """ statements = [] if len(methods) > 0: statement = self._get_empty_statement(effect) for method in methods: if (method['conditions'] is None or len(method['conditions']) == 0): statement['Resource'].append(method['resource_arn']) else: cond_statement = self._get_empty_statement(effect) cond_statement['Resource'].append(method['resource_arn']) cond_statement['Condition'] = method['conditions'] statements.append(cond_statement) statements.append(statement) return statements
python
{ "resource": "" }
q264271
Deployment.deref
validation
def deref(self, data): """AWS doesn't quite have Swagger 2.0 validation right and will fail on some refs. So, we need to convert to deref before upload.""" # We have to make a deepcopy here to create a proper JSON # compatible object, otherwise `json.dumps` fails when it # hits jsonref.JsonRef objects. deref = copy.deepcopy(jsonref.JsonRef.replace_refs(data)) # Write out JSON version because we might want this. self.write_template(deref, filename='swagger.json') return deref
python
{ "resource": "" }
q264272
check_pre_requirements
validation
def check_pre_requirements(pre_requirements): """Check all necessary system requirements to exist. :param pre_requirements: Sequence of pre-requirements to check by running ``where <pre_requirement>`` on Windows and ``which ...`` elsewhere. """ pre_requirements = set(pre_requirements or []) pre_requirements.add('virtualenv') for requirement in pre_requirements: if not which(requirement): print_error('Requirement {0!r} is not found in system'. format(requirement)) return False return True
python
{ "resource": "" }
q264273
config_to_args
validation
def config_to_args(config): """Convert config dict to arguments list. :param config: Configuration dict. """ result = [] for key, value in iteritems(config): if value is False: continue key = '--{0}'.format(key.replace('_', '-')) if isinstance(value, (list, set, tuple)): for item in value: result.extend((key, smart_str(item))) elif value is not True: result.extend((key, smart_str(value))) else: result.append(key) return tuple(result)
python
{ "resource": "" }
q264274
create_env
validation
def create_env(env, args, recreate=False, ignore_activated=False, quiet=False): """Create virtual environment. :param env: Virtual environment name. :param args: Pass given arguments to ``virtualenv`` script. :param recerate: Recreate virtual environment? By default: False :param ignore_activated: Ignore already activated virtual environment and create new one. By default: False :param quiet: Do not output messages into terminal. By default: False """ cmd = None result = True inside_env = hasattr(sys, 'real_prefix') or os.environ.get('VIRTUAL_ENV') env_exists = os.path.isdir(env) if not quiet: print_message('== Step 1. Create virtual environment ==') if ( recreate or (not inside_env and not env_exists) ) or ( ignore_activated and not env_exists ): cmd = ('virtualenv', ) + args + (env, ) if not cmd and not quiet: if inside_env: message = 'Working inside of virtual environment, done...' else: message = 'Virtual environment {0!r} already created, done...' print_message(message.format(env)) if cmd: with disable_error_handler(): result = not run_cmd(cmd, echo=not quiet) if not quiet: print_message() return result
python
{ "resource": "" }
q264275
error_handler
validation
def error_handler(func): """Decorator to error handling.""" @wraps(func) def wrapper(*args, **kwargs): """ Run actual function and if exception catched and error handler enabled put traceback to log file """ try: return func(*args, **kwargs) except BaseException as err: # Do not catch exceptions on testing if BOOTSTRAPPER_TEST_KEY in os.environ: raise # Fail silently if error handling disabled if ERROR_HANDLER_DISABLED: return True # Otherwise save traceback to log return save_traceback(err) return wrapper
python
{ "resource": "" }
q264276
install
validation
def install(env, requirements, args, ignore_activated=False, install_dev_requirements=False, quiet=False): """Install library or project into virtual environment. :param env: Use given virtual environment name. :param requirements: Use given requirements file for pip. :param args: Pass given arguments to pip script. :param ignore_activated: Do not run pip inside already activated virtual environment. By default: False :param install_dev_requirements: When enabled install prefixed or suffixed dev requirements after original installation process completed. By default: False :param quiet: Do not output message to terminal. By default: False """ if os.path.isfile(requirements): args += ('-r', requirements) label = 'project' else: args += ('-U', '-e', '.') label = 'library' # Attempt to install development requirements if install_dev_requirements: dev_requirements = None dirname = os.path.dirname(requirements) basename, ext = os.path.splitext(os.path.basename(requirements)) # Possible dev requirements files: # # * <requirements>-dev.<ext> # * dev-<requirements>.<ext> # * <requirements>_dev.<ext> # * dev_<requirements>.<ext> # * <requirements>dev.<ext> # * dev<requirements>.<ext> # # Where <requirements> is basename of given requirements file to use # and <ext> is its extension. for delimiter in ('-', '_', ''): filename = os.path.join( dirname, ''.join((basename, delimiter, 'dev', ext)) ) if os.path.isfile(filename): dev_requirements = filename break filename = os.path.join( dirname, ''.join(('dev', delimiter, basename, ext)) ) if os.path.isfile(filename): dev_requirements = filename break # If at least one dev requirements file found, install dev requirements if dev_requirements: args += ('-r', dev_requirements) if not quiet: print_message('== Step 2. Install {0} =='.format(label)) result = not pip_cmd(env, ('install', ) + args, ignore_activated, echo=not quiet) if not quiet: print_message() return result
python
{ "resource": "" }
q264277
iteritems
validation
def iteritems(data, **kwargs): """Iterate over dict items.""" return iter(data.items(**kwargs)) if IS_PY3 else data.iteritems(**kwargs)
python
{ "resource": "" }
q264278
iterkeys
validation
def iterkeys(data, **kwargs): """Iterate over dict keys.""" return iter(data.keys(**kwargs)) if IS_PY3 else data.iterkeys(**kwargs)
python
{ "resource": "" }
q264279
main
validation
def main(*args): r"""Bootstrap Python projects and libraries with virtualenv and pip. Also check system requirements before bootstrap and run post bootstrap hook if any. :param \*args: Command line arguments list. """ # Create parser, read arguments from direct input or command line with disable_error_handler(): args = parse_args(args or sys.argv[1:]) # Read current config from file and command line arguments config = read_config(args.config, args) if config is None: return True bootstrap = config[__script__] # Check pre-requirements if not check_pre_requirements(bootstrap['pre_requirements']): return True # Create virtual environment env_args = prepare_args(config['virtualenv'], bootstrap) if not create_env( bootstrap['env'], env_args, bootstrap['recreate'], bootstrap['ignore_activated'], bootstrap['quiet'] ): # Exit if couldn't create virtual environment return True # And install library or project here pip_args = prepare_args(config['pip'], bootstrap) if not install( bootstrap['env'], bootstrap['requirements'], pip_args, bootstrap['ignore_activated'], bootstrap['install_dev_requirements'], bootstrap['quiet'] ): # Exist if couldn't install requirements into venv return True # Run post-bootstrap hook run_hook(bootstrap['hook'], bootstrap, bootstrap['quiet']) # All OK! if not bootstrap['quiet']: print_message('All OK!') # False means everything went alright, exit code: 0 return False
python
{ "resource": "" }
q264280
parse_args
validation
def parse_args(args): """ Parse args from command line by creating argument parser instance and process it. :param args: Command line arguments list. """ from argparse import ArgumentParser description = ('Bootstrap Python projects and libraries with virtualenv ' 'and pip.') parser = ArgumentParser(description=description) parser.add_argument('--version', action='version', version=__version__) parser.add_argument( '-c', '--config', default=DEFAULT_CONFIG, help='Path to config file. By default: {0}'.format(DEFAULT_CONFIG) ) parser.add_argument( '-p', '--pre-requirements', default=[], nargs='+', help='List of pre-requirements to check, separated by space.' ) parser.add_argument( '-e', '--env', help='Virtual environment name. By default: {0}'. format(CONFIG[__script__]['env']) ) parser.add_argument( '-r', '--requirements', help='Path to requirements file. By default: {0}'. format(CONFIG[__script__]['requirements']) ) parser.add_argument( '-d', '--install-dev-requirements', action='store_true', default=None, help='Install prefixed or suffixed "dev" requirements after ' 'installation of original requirements file or library completed ' 'without errors.' ) parser.add_argument( '-C', '--hook', help='Execute this hook after bootstrap process.' ) parser.add_argument( '--ignore-activated', action='store_true', default=None, help='Ignore pre-activated virtualenv, like on Travis CI.' ) parser.add_argument( '--recreate', action='store_true', default=None, help='Recreate virtualenv on every run.' ) parser.add_argument( '-q', '--quiet', action='store_true', default=None, help='Minimize output, show only error messages.' ) return parser.parse_args(args)
python
{ "resource": "" }
q264281
pip_cmd
validation
def pip_cmd(env, cmd, ignore_activated=False, **kwargs): r"""Run pip command in given or activated virtual environment. :param env: Virtual environment name. :param cmd: Pip subcommand to run. :param ignore_activated: Ignore activated virtual environment and use given venv instead. By default: False :param \*\*kwargs: Additional keyword arguments to be passed to :func:`~run_cmd` """ cmd = tuple(cmd) dirname = safe_path(env) if not ignore_activated: activated_env = os.environ.get('VIRTUAL_ENV') if hasattr(sys, 'real_prefix'): dirname = sys.prefix elif activated_env: dirname = activated_env pip_path = os.path.join(dirname, 'Scripts' if IS_WINDOWS else 'bin', 'pip') if kwargs.pop('return_path', False): return pip_path if not os.path.isfile(pip_path): raise OSError('No pip found at {0!r}'.format(pip_path)) # Disable pip version check in tests if BOOTSTRAPPER_TEST_KEY in os.environ and cmd[0] == 'install': cmd = list(cmd) cmd.insert(1, '--disable-pip-version-check') cmd = tuple(cmd) with disable_error_handler(): return run_cmd((pip_path, ) + cmd, **kwargs)
python
{ "resource": "" }
q264282
prepare_args
validation
def prepare_args(config, bootstrap): """Convert config dict to command line args line. :param config: Configuration dict. :param bootstrap: Bootstrapper configuration dict. """ config = copy.deepcopy(config) environ = dict(copy.deepcopy(os.environ)) data = {'env': bootstrap['env'], 'pip': pip_cmd(bootstrap['env'], '', return_path=True), 'requirements': bootstrap['requirements']} environ.update(data) if isinstance(config, string_types): return config.format(**environ) for key, value in iteritems(config): if not isinstance(value, string_types): continue config[key] = value.format(**environ) return config_to_args(config)
python
{ "resource": "" }
q264283
print_error
validation
def print_error(message, wrap=True): """Print error message to stderr, using ANSI-colors. :param message: Message to print :param wrap: Wrap message into ``ERROR: <message>. Exit...`` template. By default: True """ if wrap: message = 'ERROR: {0}. Exit...'.format(message.rstrip('.')) colorizer = (_color_wrap(colorama.Fore.RED) if colorama else lambda message: message) return print(colorizer(message), file=sys.stderr)
python
{ "resource": "" }
q264284
print_message
validation
def print_message(message=None): """Print message via ``subprocess.call`` function. This helps to ensure consistent output and avoid situations where print messages actually shown after messages from all inner threads. :param message: Text message to print. """ kwargs = {'stdout': sys.stdout, 'stderr': sys.stderr, 'shell': True} return subprocess.call('echo "{0}"'.format(message or ''), **kwargs)
python
{ "resource": "" }
q264285
read_config
validation
def read_config(filename, args): """ Read and parse configuration file. By default, ``filename`` is relative path to current work directory. If no config file found, default ``CONFIG`` would be used. :param filename: Read config from given filename. :param args: Parsed command line arguments. """ # Initial vars config = defaultdict(dict) splitter = operator.methodcaller('split', ' ') converters = { __script__: { 'env': safe_path, 'pre_requirements': splitter, }, 'pip': { 'allow_external': splitter, 'allow_unverified': splitter, } } default = copy.deepcopy(CONFIG) sections = set(iterkeys(default)) # Append download-cache for old pip versions if int(getattr(pip, '__version__', '1.x').split('.')[0]) < 6: default['pip']['download_cache'] = safe_path(os.path.expanduser( os.path.join('~', '.{0}'.format(__script__), 'pip-cache') )) # Expand user and environ vars in config filename is_default = filename == DEFAULT_CONFIG filename = os.path.expandvars(os.path.expanduser(filename)) # Read config if it exists on disk if not is_default and not os.path.isfile(filename): print_error('Config file does not exist at {0!r}'.format(filename)) return None parser = ConfigParser() try: parser.read(filename) except ConfigParserError: print_error('Cannot parse config file at {0!r}'.format(filename)) return None # Apply config for each possible section for section in sections: if not parser.has_section(section): continue items = parser.items(section) # Make auto convert here for integers and boolean values for key, value in items: try: value = int(value) except (TypeError, ValueError): try: value = bool(strtobool(value)) except ValueError: pass if section in converters and key in converters[section]: value = converters[section][key](value) config[section][key] = value # Update config with default values if necessary for section, data in iteritems(default): if section not in config: config[section] = data else: for key, value in iteritems(data): config[section].setdefault(key, value) # Update bootstrap config from parsed args keys = set(( 'env', 'hook', 'install_dev_requirements', 'ignore_activated', 'pre_requirements', 'quiet', 'recreate', 'requirements' )) for key in keys: value = getattr(args, key) config[__script__].setdefault(key, value) if key == 'pre_requirements' and not value: continue if value is not None: config[__script__][key] = value return config
python
{ "resource": "" }
q264286
run_cmd
validation
def run_cmd(cmd, echo=False, fail_silently=False, **kwargs): r"""Call given command with ``subprocess.call`` function. :param cmd: Command to run. :type cmd: tuple or str :param echo: If enabled show command to call and its output in STDOUT, otherwise hide all output. By default: False :param fail_silently: Do not raise exception on error. By default: False :param \*\*kwargs: Additional keyword arguments to be passed to ``subprocess.call`` function. STDOUT and STDERR streams would be setup inside of function to ensure hiding command output in case of disabling ``echo``. """ out, err = None, None if echo: cmd_str = cmd if isinstance(cmd, string_types) else ' '.join(cmd) kwargs['stdout'], kwargs['stderr'] = sys.stdout, sys.stderr print_message('$ {0}'.format(cmd_str)) else: out, err = get_temp_streams() kwargs['stdout'], kwargs['stderr'] = out, err try: retcode = subprocess.call(cmd, **kwargs) except subprocess.CalledProcessError as err: if fail_silently: return False print_error(str(err) if IS_PY3 else unicode(err)) # noqa finally: if out: out.close() if err: err.close() if retcode and echo and not fail_silently: print_error('Command {0!r} returned non-zero exit status {1}'. format(cmd_str, retcode)) return retcode
python
{ "resource": "" }
q264287
run_hook
validation
def run_hook(hook, config, quiet=False): """Run post-bootstrap hook if any. :param hook: Hook to run. :param config: Configuration dict. :param quiet: Do not output messages to STDOUT/STDERR. By default: False """ if not hook: return True if not quiet: print_message('== Step 3. Run post-bootstrap hook ==') result = not run_cmd(prepare_args(hook, config), echo=not quiet, fail_silently=True, shell=True) if not quiet: print_message() return result
python
{ "resource": "" }
q264288
save_traceback
validation
def save_traceback(err): """Save error traceback to bootstrapper log file. :param err: Catched exception. """ # Store logs to ~/.bootstrapper directory dirname = safe_path(os.path.expanduser( os.path.join('~', '.{0}'.format(__script__)) )) # But ensure that directory exists if not os.path.isdir(dirname): os.mkdir(dirname) # Now we ready to put traceback to log file filename = os.path.join(dirname, '{0}.log'.format(__script__)) with open(filename, 'a+') as handler: traceback.print_exc(file=handler) # And show colorized message message = ('User aborted workflow' if isinstance(err, KeyboardInterrupt) else 'Unexpected error catched') print_error(message) print_error('Full log stored to {0}'.format(filename), False) return True
python
{ "resource": "" }
q264289
smart_str
validation
def smart_str(value, encoding='utf-8', errors='strict'): """Convert Python object to string. :param value: Python object to convert. :param encoding: Encoding to use if in Python 2 given object is unicode. :param errors: Errors mode to use if in Python 2 given object is unicode. """ if not IS_PY3 and isinstance(value, unicode): # noqa return value.encode(encoding, errors) return str(value)
python
{ "resource": "" }
q264290
copy_w_plus
validation
def copy_w_plus(src, dst): """Copy file from `src` path to `dst` path. If `dst` already exists, will add '+' characters to the end of the basename without extension. Parameters ---------- src: str dst: str Returns ------- dstpath: str """ dst_ext = get_extension(dst) dst_pre = remove_ext (dst) while op.exists(dst_pre + dst_ext): dst_pre += '+' shutil.copy(src, dst_pre + dst_ext) return dst_pre + dst_ext
python
{ "resource": "" }
q264291
get_abspath
validation
def get_abspath(folderpath): """Returns the absolute path of folderpath. If the path does not exist, will raise IOError. """ if not op.exists(folderpath): raise FolderNotFound(folderpath) return op.abspath(folderpath)
python
{ "resource": "" }
q264292
get_extension
validation
def get_extension(filepath, check_if_exists=False, allowed_exts=ALLOWED_EXTS): """Return the extension of fpath. Parameters ---------- fpath: string File name or path check_if_exists: bool allowed_exts: dict Dictionary of strings, where the key if the last part of a complex ('.' separated) extension and the value is the previous part. For example: for the '.nii.gz' extension I would have a dict as {'.gz': ['.nii',]} Returns ------- str The extension of the file name or path """ if check_if_exists: if not op.exists(filepath): raise IOError('File not found: ' + filepath) rest, ext = op.splitext(filepath) if ext in allowed_exts: alloweds = allowed_exts[ext] _, ext2 = op.splitext(rest) if ext2 in alloweds: ext = ext2 + ext return ext
python
{ "resource": "" }
q264293
add_extension_if_needed
validation
def add_extension_if_needed(filepath, ext, check_if_exists=False): """Add the extension ext to fpath if it doesn't have it. Parameters ---------- filepath: str File name or path ext: str File extension check_if_exists: bool Returns ------- File name or path with extension added, if needed. """ if not filepath.endswith(ext): filepath += ext if check_if_exists: if not op.exists(filepath): raise IOError('File not found: ' + filepath) return filepath
python
{ "resource": "" }
q264294
join_path_to_filelist
validation
def join_path_to_filelist(path, filelist): """Joins path to each line in filelist Parameters ---------- path: str filelist: list of str Returns ------- list of filepaths """ return [op.join(path, str(item)) for item in filelist]
python
{ "resource": "" }
q264295
remove_all
validation
def remove_all(filelist, folder=''): """Deletes all files in filelist Parameters ---------- filelist: list of str List of the file paths to be removed folder: str Path to be used as common directory for all file paths in filelist """ if not folder: for f in filelist: os.remove(f) else: for f in filelist: os.remove(op.join(folder, f))
python
{ "resource": "" }
q264296
ux_file_len
validation
def ux_file_len(filepath): """Returns the length of the file using the 'wc' GNU command Parameters ---------- filepath: str Returns ------- float """ p = subprocess.Popen(['wc', '-l', filepath], stdout=subprocess.PIPE, stderr=subprocess.PIPE) result, err = p.communicate() if p.returncode != 0: raise IOError(err) l = result.strip() l = int(l.split()[0]) return l
python
{ "resource": "" }
q264297
merge
validation
def merge(dict_1, dict_2): """Merge two dictionaries. Values that evaluate to true take priority over falsy values. `dict_1` takes priority over `dict_2`. """ return dict((str(key), dict_1.get(key) or dict_2.get(key)) for key in set(dict_2) | set(dict_1))
python
{ "resource": "" }
q264298
get_sys_path
validation
def get_sys_path(rcpath, app_name, section_name=None): """Return a folder path if it exists. First will check if it is an existing system path, if it is, will return it expanded and absoluted. If this fails will look for the rcpath variable in the app_name rcfiles or exclusively within the given section_name, if given. Parameters ---------- rcpath: str Existing folder path or variable name in app_name rcfile with an existing one. section_name: str Name of a section in the app_name rcfile to look exclusively there for variable names. app_name: str Name of the application to look for rcfile configuration files. Returns ------- sys_path: str A expanded absolute file or folder path if the path exists. Raises ------ IOError if the proposed sys_path does not exist. """ # first check if it is an existing path if op.exists(rcpath): return op.realpath(op.expanduser(rcpath)) # look for the rcfile try: settings = rcfile(app_name, section_name) except: raise # look for the variable within the rcfile configutarions try: sys_path = op.expanduser(settings[rcpath]) except KeyError: raise IOError('Could not find an existing variable with name {0} in' ' section {1} of {2}rc config setup. Maybe it is a ' ' folder that could not be found.'.format(rcpath, section_name, app_name)) # found the variable, now check if it is an existing path else: if not op.exists(sys_path): raise IOError('Could not find the path {3} indicated by the ' 'variable {0} in section {1} of {2}rc config ' 'setup.'.format(rcpath, section_name, app_name, sys_path)) # expand the path and return return op.realpath(op.expanduser(sys_path))
python
{ "resource": "" }
q264299
rcfile
validation
def rcfile(appname, section=None, args={}, strip_dashes=True): """Read environment variables and config files and return them merged with predefined list of arguments. Parameters ---------- appname: str Application name, used for config files and environment variable names. section: str Name of the section to be read. If this is not set: appname. args: arguments from command line (optparse, docopt, etc). strip_dashes: bool Strip dashes prefixing key names from args dict. Returns -------- dict containing the merged variables of environment variables, config files and args. Raises ------ IOError In case the return value is empty. Notes ----- Environment variables are read if they start with appname in uppercase with underscore, for example: TEST_VAR=1 Config files compatible with ConfigParser are read and the section name appname is read, example: [appname] var=1 We can also have host-dependent configuration values, which have priority over the default appname values. [appname] var=1 [appname:mylinux] var=3 For boolean flags do not try to use: 'True' or 'False', 'on' or 'off', '1' or '0'. Unless you are willing to parse this values by yourself. We recommend commenting the variables out with '#' if you want to set a flag to False and check if it is in the rcfile cfg dict, i.e.: flag_value = 'flag_variable' in cfg Files are read from: /etc/appname/config, /etc/appfilerc, ~/.config/appname/config, ~/.config/appname, ~/.appname/config, ~/.appnamerc, appnamerc, .appnamerc, appnamerc file found in 'path' folder variable in args, .appnamerc file found in 'path' folder variable in args, file provided by 'config' variable in args. Example ------- args = rcfile(__name__, docopt(__doc__, version=__version__)) """ if strip_dashes: for k in args.keys(): args[k.lstrip('-')] = args.pop(k) environ = get_environment(appname) if section is None: section = appname config = get_config(appname, section, args.get('config', ''), args.get('path', '')) config = merge(merge(args, config), environ) if not config: raise IOError('Could not find any rcfile for application ' '{}.'.format(appname)) return config
python
{ "resource": "" }