_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 75 19.8k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q265400 | dmap | validation | def dmap(fn, record):
"""map for a directory"""
values = (fn(v) for k, v in record.items())
return dict(itertools.izip(record, values)) | python | {
"resource": ""
} |
q265401 | apply_types | validation | def apply_types(use_types, guess_type, line):
"""Apply the types on the elements of the line"""
new_line = {}
for k, v in line.items():
if use_types.has_key(k):
new_line[k] = force_type(use_types[k], v)
elif guess_type:
new_line[k] = determine_type(v)
else:
new_line[k] = v
return new_line | python | {
"resource": ""
} |
q265402 | format_to_csv | validation | def format_to_csv(filename, skiprows=0, delimiter=""):
"""Convert a file to a .csv file"""
if not delimiter:
delimiter = "\t"
input_file = open(filename, "r")
if skiprows:
[input_file.readline() for _ in range(skiprows)]
new_filename = os.path.splitext(filename)[0] + ".csv"
output_file = open(new_filename, "w")
header = input_file.readline().split()
reader = csv.DictReader(input_file, fieldnames=header, delimiter=delimiter)
writer = csv.DictWriter(output_file, fieldnames=header, delimiter=",")
# Write header
writer.writerow(dict((x, x) for x in header))
# Write rows
for line in reader:
if None in line: del line[None]
writer.writerow(line)
input_file.close()
output_file.close()
print "Saved %s." % new_filename | python | {
"resource": ""
} |
q265403 | admin_obj_link | validation | def admin_obj_link(obj, display=''):
"""Returns a link to the django admin change list with a filter set to
only the object given.
:param obj:
Object to create the admin change list display link for
:param display:
Text to display in the link. Defaults to string call of the object
:returns:
Text containing HTML for a link
"""
# get the url for the change list for this object
url = reverse('admin:%s_%s_changelist' % (obj._meta.app_label,
obj._meta.model_name))
url += '?id__exact=%s' % obj.id
text = str(obj)
if display:
text = display
return format_html('<a href="{}">{}</a>', url, text) | python | {
"resource": ""
} |
q265404 | _obj_display | validation | def _obj_display(obj, display=''):
"""Returns string representation of an object, either the default or based
on the display template passed in.
"""
result = ''
if not display:
result = str(obj)
else:
template = Template(display)
context = Context({'obj':obj})
result = template.render(context)
return result | python | {
"resource": ""
} |
q265405 | FancyModelAdmin.add_link | validation | def add_link(cls, attr, title='', display=''):
"""Adds a ``list_display`` attribute that appears as a link to the
django admin change page for the type of object being shown. Supports
double underscore attribute name dereferencing.
:param attr:
Name of the attribute to dereference from the corresponding
object, i.e. what will be lined to. This name supports double
underscore object link referencing for ``models.ForeignKey``
members.
:param title:
Title for the column of the django admin table. If not given it
defaults to a capitalized version of ``attr``
:param display:
What to display as the text for the link being shown. If not
given it defaults to the string representation of the object for
the row: ``str(obj)`` . This parameter supports django
templating, the context for which contains a dictionary key named
"obj" with the value being the object for the row.
Example usage:
.. code-block:: python
# ---- admin.py file ----
base = fancy_modeladmin('id')
base.add_link('author', 'Our Authors',
'{{obj.name}} (id={{obj.id}})')
@admin.register(Book)
class BookAdmin(base):
pass
The django admin change page for the Book class would have a column
for "id" and another titled "Our Authors". The "Our Authors" column
would have a link for each Author object referenced by "book.author".
The link would go to the Author django admin change listing. The
display of the link would be the name of the author with the id in
brakcets, e.g. "Douglas Adams (id=42)"
"""
global klass_count
klass_count += 1
fn_name = 'dyn_fn_%d' % klass_count
cls.list_display.append(fn_name)
if not title:
title = attr.capitalize()
# python scoping is a bit weird with default values, if it isn't
# referenced the inner function won't see it, so assign it for use
_display = display
def _link(self, obj):
field_obj = admin_obj_attr(obj, attr)
if not field_obj:
return ''
text = _obj_display(field_obj, _display)
return admin_obj_link(field_obj, text)
_link.short_description = title
_link.allow_tags = True
_link.admin_order_field = attr
setattr(cls, fn_name, _link) | python | {
"resource": ""
} |
q265406 | FancyModelAdmin.add_object | validation | def add_object(cls, attr, title='', display=''):
"""Adds a ``list_display`` attribute showing an object. Supports
double underscore attribute name dereferencing.
:param attr:
Name of the attribute to dereference from the corresponding
object, i.e. what will be lined to. This name supports double
underscore object link referencing for ``models.ForeignKey``
members.
:param title:
Title for the column of the django admin table. If not given it
defaults to a capitalized version of ``attr``
:param display:
What to display as the text for the link being shown. If not
given it defaults to the string representation of the object for
the row: ``str(obj)``. This parameter supports django templating,
the context for which contains a dictionary key named "obj" with
the value being the object for the row.
"""
global klass_count
klass_count += 1
fn_name = 'dyn_fn_%d' % klass_count
cls.list_display.append(fn_name)
if not title:
title = attr.capitalize()
# python scoping is a bit weird with default values, if it isn't
# referenced the inner function won't see it, so assign it for use
_display = display
def _ref(self, obj):
field_obj = admin_obj_attr(obj, attr)
if not field_obj:
return ''
return _obj_display(field_obj, _display)
_ref.short_description = title
_ref.allow_tags = True
_ref.admin_order_field = attr
setattr(cls, fn_name, _ref) | python | {
"resource": ""
} |
q265407 | FancyModelAdmin.add_formatted_field | validation | def add_formatted_field(cls, field, format_string, title=''):
"""Adds a ``list_display`` attribute showing a field in the object
using a python %formatted string.
:param field:
Name of the field in the object.
:param format_string:
A old-style (to remain python 2.x compatible) % string formatter
with a single variable reference. The named ``field`` attribute
will be passed to the formatter using the "%" operator.
:param title:
Title for the column of the django admin table. If not given it
defaults to a capitalized version of ``field``
"""
global klass_count
klass_count += 1
fn_name = 'dyn_fn_%d' % klass_count
cls.list_display.append(fn_name)
if not title:
title = field.capitalize()
# python scoping is a bit weird with default values, if it isn't
# referenced the inner function won't see it, so assign it for use
_format_string = format_string
def _ref(self, obj):
return _format_string % getattr(obj, field)
_ref.short_description = title
_ref.allow_tags = True
_ref.admin_order_field = field
setattr(cls, fn_name, _ref) | python | {
"resource": ""
} |
q265408 | post_required | validation | def post_required(method_or_options=[]):
"""View decorator that enforces that the method was called using POST.
This decorator can be called with or without parameters. As it is
expected to wrap a view, the first argument of the method being wrapped is
expected to be a ``request`` object.
.. code-block:: python
@post_required
def some_view(request):
pass
@post_required(['firstname', 'lastname'])
def some_view(request):
pass
The optional parameter contains a single list which specifies the names of
the expected fields in the POST dictionary. The list is not exclusive,
you can pass in fields that are not checked by the decorator.
:param options:
List of the names of expected POST keys.
"""
def decorator(method):
# handle wrapping or wrapping with arguments; if no arguments (and no
# calling parenthesis) then method_or_options will be a list,
# otherwise it will be the wrapped function
expected_fields = []
if not callable(method_or_options):
# not callable means wrapping with arguments
expected_fields = method_or_options
@wraps(method)
def wrapper(*args, **kwargs):
request = args[0]
if request.method != 'POST':
logger.error('POST required for this url')
raise Http404('only POST allowed for this url')
missing = []
for field in expected_fields:
if field not in request.POST:
missing.append(field)
if missing:
s = 'Expected fields missing in POST: %s' % missing
logger.error(s)
raise Http404(s)
# everything verified, run the view
return method(*args, **kwargs)
return wrapper
if callable(method_or_options):
# callable means decorated method without options, call our decorator
return decorator(method_or_options)
return decorator | python | {
"resource": ""
} |
q265409 | json_post_required | validation | def json_post_required(*decorator_args):
"""View decorator that enforces that the method was called using POST and
contains a field containing a JSON dictionary. This method should
only be used to wrap views and assumes the first argument of the method
being wrapped is a ``request`` object.
.. code-block:: python
@json_post_required('data', 'json_data')
def some_view(request):
username = request.json_data['username']
:param field:
The name of the POST field that contains a JSON dictionary
:param request_name:
[optional] Name of the parameter on the request to put the
deserialized JSON data. If not given the field name is used
"""
def decorator(method):
@wraps(method)
def wrapper(*args, **kwargs):
field = decorator_args[0]
if len(decorator_args) == 2:
request_name = decorator_args[1]
else:
request_name = field
request = args[0]
if request.method != 'POST':
logger.error('POST required for this url')
raise Http404('only POST allowed for this url')
if field not in request.POST:
s = 'Expected field named %s in POST' % field
logger.error(s)
raise Http404(s)
# deserialize the JSON and put it in the request
setattr(request, request_name, json.loads(request.POST[field]))
# everything verified, run the view
return method(*args, **kwargs)
return wrapper
return decorator | python | {
"resource": ""
} |
q265410 | Match.sigma_prime | validation | def sigma_prime(self):
"""
Divergence of matched beam
"""
return _np.sqrt(self.emit/self.beta(self.E)) | python | {
"resource": ""
} |
q265411 | MatchPlasma.n_p | validation | def n_p(self):
"""
The plasma density in SI units.
"""
return 2*_sltr.GeV2joule(self.E)*_spc.epsilon_0 / (self.beta*_spc.elementary_charge)**2 | python | {
"resource": ""
} |
q265412 | main | validation | def main(target, label):
"""
Semver tag triggered deployment helper
"""
check_environment(target, label)
click.secho('Fetching tags from the upstream ...')
handler = TagHandler(git.list_tags())
print_information(handler, label)
tag = handler.yield_tag(target, label)
confirm(tag) | python | {
"resource": ""
} |
q265413 | check_environment | validation | def check_environment(target, label):
"""
Performs some environment checks prior to the program's execution
"""
if not git.exists():
click.secho('You must have git installed to use yld.', fg='red')
sys.exit(1)
if not os.path.isdir('.git'):
click.secho('You must cd into a git repository to use yld.', fg='red')
sys.exit(1)
if not git.is_committed():
click.secho('You must commit or stash your work before proceeding.',
fg='red')
sys.exit(1)
if target is None and label is None:
click.secho('You must specify either a target or a label.', fg='red')
sys.exit(1) | python | {
"resource": ""
} |
q265414 | print_information | validation | def print_information(handler, label):
"""
Prints latest tag's information
"""
click.echo('=> Latest stable: {tag}'.format(
tag=click.style(str(handler.latest_stable or 'N/A'), fg='yellow' if
handler.latest_stable else 'magenta')
))
if label is not None:
latest_revision = handler.latest_revision(label)
click.echo('=> Latest relative revision ({label}): {tag}'.format(
label=click.style(label, fg='blue'),
tag=click.style(str(latest_revision or 'N/A'),
fg='yellow' if latest_revision else 'magenta')
)) | python | {
"resource": ""
} |
q265415 | confirm | validation | def confirm(tag):
"""
Prompts user before proceeding
"""
click.echo()
if click.confirm('Do you want to create the tag {tag}?'.format(
tag=click.style(str(tag), fg='yellow')),
default=True, abort=True):
git.create_tag(tag)
if click.confirm(
'Do you want to push the tag {tag} into the upstream?'.format(
tag=click.style(str(tag), fg='yellow')),
default=True):
git.push_tag(tag)
click.echo('Done!')
else:
git.delete_tag(tag)
click.echo('Aborted!') | python | {
"resource": ""
} |
q265416 | get | validation | def get(f, key, default=None):
"""
Gets an array from datasets.
.. versionadded:: 1.4
"""
if key in f.keys():
val = f[key].value
if default is None:
return val
else:
if _np.shape(val) == _np.shape(default):
return val
return default | python | {
"resource": ""
} |
q265417 | FolderDiff.get_state | validation | def get_state(self):
"""Get the current directory state"""
return [os.path.join(dp, f)
for dp, _, fn in os.walk(self.dir)
for f in fn] | python | {
"resource": ""
} |
q265418 | ProgressBar.tick | validation | def tick(self):
"""Add one tick to progress bar"""
self.current += 1
if self.current == self.factor:
sys.stdout.write('+')
sys.stdout.flush()
self.current = 0 | python | {
"resource": ""
} |
q265419 | DLL.push | validation | def push(self, k):
"""Push k to the top of the list
>>> l = DLL()
>>> l.push(1)
>>> l
[1]
>>> l.push(2)
>>> l
[2, 1]
>>> l.push(3)
>>> l
[3, 2, 1]
"""
if not self._first:
# first item
self._first = self._last = node = DLL.Node(k)
elif self._first.value == k:
# it's already at the top
return
else:
try:
self.delete(k) # in case we have it already
except KeyError:
pass
self._first = node = self._first.insert_before(k)
self._index[k] = node
self._size += 1 | python | {
"resource": ""
} |
q265420 | Counter.increment | validation | def increment(cls, name):
"""Call this method to increment the named counter. This is atomic on
the database.
:param name:
Name for a previously created ``Counter`` object
"""
with transaction.atomic():
counter = Counter.objects.select_for_update().get(name=name)
counter.value += 1
counter.save()
return counter.value | python | {
"resource": ""
} |
q265421 | Component.print_loading | validation | def print_loading(self, wait, message):
"""
print loading message on screen
.. note::
loading message only write to `sys.stdout`
:param int wait: seconds to wait
:param str message: message to print
:return: None
"""
tags = ['\\', '|', '/', '-']
for i in range(wait):
time.sleep(0.25)
sys.stdout.write("%(message)s... %(tag)s\r" % {
'message': message,
'tag': tags[i % 4]
})
sys.stdout.flush()
pass
sys.stdout.write("%s... Done...\n" % message)
sys.stdout.flush()
pass | python | {
"resource": ""
} |
q265422 | Component.warn_message | validation | def warn_message(self, message, fh=None, prefix="[warn]:", suffix="..."):
"""
print warn type message,
if file handle is `sys.stdout`, print color message
:param str message: message to print
:param file fh: file handle,default is `sys.stdout`
:param str prefix: message prefix,default is `[warn]`
:param str suffix: message suffix ,default is `...`
:return: None
"""
msg = prefix + message + suffix
fh = fh or sys.stdout
if fh is sys.stdout:
termcolor.cprint(msg, color="yellow")
else:
fh.write(msg)
pass | python | {
"resource": ""
} |
q265423 | Component.error_message | validation | def error_message(self, message, fh=None, prefix="[error]:",
suffix="..."):
"""
print error type message
if file handle is `sys.stderr`, print color message
:param str message: message to print
:param file fh: file handle, default is `sys.stdout`
:param str prefix: message prefix,default is `[error]`
:param str suffix: message suffix ,default is '...'
:return: None
"""
msg = prefix + message + suffix
fh = fh or sys.stderr
if fh is sys.stderr:
termcolor.cprint(msg, color="red")
else:
fh.write(msg)
pass | python | {
"resource": ""
} |
q265424 | Component.system | validation | def system(self, cmd, fake_code=False):
"""
a built-in wrapper make dry-run easier.
you should use this instead use `os.system`
.. note::
to use it,you need add '--dry-run' option in
your argparser options
:param str cmd: command to execute
:param bool fake_code: only display command
when is True,default is False
:return:
"""
try:
if self.options.dry_run:
def fake_system(cmd):
self.print_message(cmd)
return fake_code
return fake_system(cmd)
except AttributeError:
self.logger.warnning("fake mode enabled,"
"but you don't set '--dry-run' option "
"in your argparser options")
pass
return os.system(cmd) | python | {
"resource": ""
} |
q265425 | Firebase_sync.url_correct | validation | def url_correct(self, point, auth=None, export=None):
'''
Returns a Corrected URL to be used for a Request
as per the REST API.
'''
newUrl = self.__url + point + '.json'
if auth or export:
newUrl += "?"
if auth:
newUrl += ("auth=" + auth)
if export:
if not newUrl.endswith('?'):
newUrl += "&"
newUrl += "format=export"
return newUrl | python | {
"resource": ""
} |
q265426 | main | validation | def main():
"""
Main method.
This method holds what you want to execute when
the script is run on command line.
"""
args = get_arguments()
setup_logging(args)
version_path = os.path.abspath(os.path.join(
os.path.dirname(__file__),
'..',
'..',
'.VERSION'
))
try:
version_text = open(version_path).read().strip()
except Exception:
print('Could not open or read the .VERSION file')
sys.exit(1)
try:
semver.parse(version_text)
except ValueError:
print(('The .VERSION file contains an invalid '
'version: "{}"').format(version_text))
sys.exit(1)
new_version = version_text
if args.version:
try:
if semver.parse(args.version):
new_version = args.version
except Exception:
print('Could not parse "{}" as a version'.format(args.version))
sys.exit(1)
elif args.bump_major:
new_version = semver.bump_major(version_text)
elif args.bump_minor:
new_version = semver.bump_minor(version_text)
elif args.bump_patch:
new_version = semver.bump_patch(version_text)
try:
with open(version_path, 'w') as version_file:
version_file.write(new_version)
except Exception:
print('Could not write the .VERSION file')
sys.exit(1)
print(new_version) | python | {
"resource": ""
} |
q265427 | pickle | validation | def pickle(obj, filepath):
"""Pickle and compress."""
arr = pkl.dumps(obj, -1)
with open(filepath, 'wb') as f:
s = 0
while s < len(arr):
e = min(s + blosc.MAX_BUFFERSIZE, len(arr))
carr = blosc.compress(arr[s:e], typesize=8)
f.write(carr)
s = e | python | {
"resource": ""
} |
q265428 | unpickle | validation | def unpickle(filepath):
"""Decompress and unpickle."""
arr = []
with open(filepath, 'rb') as f:
carr = f.read(blosc.MAX_BUFFERSIZE)
while len(carr) > 0:
arr.append(blosc.decompress(carr))
carr = f.read(blosc.MAX_BUFFERSIZE)
return pkl.loads(b"".join(arr)) | python | {
"resource": ""
} |
q265429 | contact | validation | def contact(request):
"""Displays the contact form and sends the email"""
form = ContactForm(request.POST or None)
if form.is_valid():
subject = form.cleaned_data['subject']
message = form.cleaned_data['message']
sender = form.cleaned_data['sender']
cc_myself = form.cleaned_data['cc_myself']
recipients = settings.CONTACTFORM_RECIPIENTS
if cc_myself:
recipients.append(sender)
send_mail(getattr(settings, "CONTACTFORM_SUBJECT_PREFIX", '') + subject, message, sender, recipients)
return render(request, 'contactform/thanks.html')
return render( request, 'contactform/contact.html', {'form': form}) | python | {
"resource": ""
} |
q265430 | InitComponent.load_gitconfig | validation | def load_gitconfig(self):
"""
try use gitconfig info.
author,email etc.
"""
gitconfig_path = os.path.expanduser('~/.gitconfig')
if os.path.exists(gitconfig_path):
parser = Parser()
parser.read(gitconfig_path)
parser.sections()
return parser
pass | python | {
"resource": ""
} |
q265431 | InitComponent.add_arguments | validation | def add_arguments(cls):
"""
Init project.
"""
return [
(('--yes',), dict(action='store_true', help='clean .git repo')),
(('--variable', '-s'),
dict(nargs='+', help='set extra variable,format is name:value')),
(('--skip-builtin',),
dict(action='store_true', help='skip replace builtin variable')),
] | python | {
"resource": ""
} |
q265432 | SlotComponent.run | validation | def run(self, options):
"""
In general, you don't need to overwrite this method.
:param options:
:return:
"""
self.set_signal()
self.check_exclusive_mode()
slot = self.Handle(self)
# start thread
i = 0
while i < options.threads:
t = threading.Thread(target=self.worker, args=[slot])
# only set daemon when once is False
if options.once is True or options.no_daemon is True:
t.daemon = False
else:
t.daemon = True
t.start()
i += 1
# waiting thread
if options.once is False:
while True:
if threading.active_count() > 1:
sleep(1)
else:
if threading.current_thread().name == "MainThread":
sys.exit(0)
pass | python | {
"resource": ""
} |
q265433 | combine_filenames | validation | def combine_filenames(filenames, max_length=40):
"""Return a new filename to use as the combined file name for a
bunch of files, based on the SHA of their contents.
A precondition is that they all have the same file extension
Given that the list of files can have different paths, we aim to use the
most common path.
Example:
/somewhere/else/foo.js
/somewhere/bar.js
/somewhere/different/too/foobar.js
The result will be
/somewhere/148713695b4a4b9083e506086f061f9c.js
Another thing to note, if the filenames have timestamps in them, combine
them all and use the highest timestamp.
"""
# Get the SHA for each file, then sha all the shas.
path = None
names = []
extension = None
timestamps = []
shas = []
filenames.sort()
concat_names = "_".join(filenames)
if concat_names in COMBINED_FILENAMES_GENERATED:
return COMBINED_FILENAMES_GENERATED[concat_names]
for filename in filenames:
name = os.path.basename(filename)
if not extension:
extension = os.path.splitext(name)[1]
elif os.path.splitext(name)[1] != extension:
raise ValueError("Can't combine multiple file extensions")
for base in MEDIA_ROOTS:
try:
shas.append(md5(os.path.join(base, filename)))
break
except IOError:
pass
if path is None:
path = os.path.dirname(filename)
else:
if len(os.path.dirname(filename)) < len(path):
path = os.path.dirname(filename)
m = hashlib.md5()
m.update(",".join(shas))
new_filename = "%s-inkmd" % m.hexdigest()
new_filename = new_filename[:max_length]
new_filename += extension
COMBINED_FILENAMES_GENERATED[concat_names] = new_filename
return os.path.join(path, new_filename) | python | {
"resource": ""
} |
q265434 | apply_orientation | validation | def apply_orientation(im):
"""
Extract the oritentation EXIF tag from the image, which should be a PIL Image instance,
and if there is an orientation tag that would rotate the image, apply that rotation to
the Image instance given to do an in-place rotation.
:param Image im: Image instance to inspect
:return: A possibly transposed image instance
"""
try:
kOrientationEXIFTag = 0x0112
if hasattr(im, '_getexif'): # only present in JPEGs
e = im._getexif() # returns None if no EXIF data
if e is not None:
#log.info('EXIF data found: %r', e)
orientation = e[kOrientationEXIFTag]
f = orientation_funcs[orientation]
return f(im)
except:
# We'd be here with an invalid orientation value or some random error?
pass # log.exception("Error applying EXIF Orientation tag")
return im | python | {
"resource": ""
} |
q265435 | write | validation | def write():
"""Start a new piece"""
click.echo("Fantastic. Let's get started. ")
title = click.prompt("What's the title?")
# Make sure that title doesn't exist.
url = slugify(title)
url = click.prompt("What's the URL?", default=url)
# Make sure that title doesn't exist.
click.echo("Got it. Creating %s..." % url)
scaffold_piece(title, url) | python | {
"resource": ""
} |
q265436 | scaffold | validation | def scaffold():
"""Start a new site."""
click.echo("A whole new site? Awesome.")
title = click.prompt("What's the title?")
url = click.prompt("Great. What's url? http://")
# Make sure that title doesn't exist.
click.echo("Got it. Creating %s..." % url) | python | {
"resource": ""
} |
q265437 | publish | validation | def publish():
"""Publish the site"""
try:
build_site(dev_mode=False, clean=True)
click.echo('Deploying the site...')
# call("firebase deploy", shell=True)
call("rsync -avz -e ssh --progress %s/ %s" % (BUILD_DIR, CONFIG["scp_target"],), shell=True)
if "cloudflare" in CONFIG and "purge" in CONFIG["cloudflare"] and CONFIG["cloudflare"]["purge"]:
do_purge()
except (KeyboardInterrupt, SystemExit):
raise
sys.exit(1) | python | {
"resource": ""
} |
q265438 | Git.get_branches | validation | def get_branches(self):
"""Returns a list of the branches"""
return [self._sanitize(branch)
for branch in self._git.branch(color="never").splitlines()] | python | {
"resource": ""
} |
q265439 | Git.get_current_branch | validation | def get_current_branch(self):
"""Returns the currently active branch"""
return next((self._sanitize(branch)
for branch in self._git.branch(color="never").splitlines()
if branch.startswith('*')),
None) | python | {
"resource": ""
} |
q265440 | Git.create_patch | validation | def create_patch(self, from_tag, to_tag):
"""Create a patch between tags"""
return str(self._git.diff('{}..{}'.format(from_tag, to_tag), _tty_out=False)) | python | {
"resource": ""
} |
q265441 | one | validation | def one(func, n=0):
"""
Create a callable that applies ``func`` to a value in a sequence.
If the value is not a sequence or is an empty sequence then ``None`` is
returned.
:type func: `callable`
:param func: Callable to be applied to each result.
:type n: `int`
:param n: Index of the value to apply ``func`` to.
"""
def _one(result):
if _isSequenceTypeNotText(result) and len(result) > n:
return func(result[n])
return None
return maybe(_one) | python | {
"resource": ""
} |
q265442 | many | validation | def many(func):
"""
Create a callable that applies ``func`` to every value in a sequence.
If the value is not a sequence then an empty list is returned.
:type func: `callable`
:param func: Callable to be applied to the first result.
"""
def _many(result):
if _isSequenceTypeNotText(result):
return map(func, result)
return []
return maybe(_many, default=[]) | python | {
"resource": ""
} |
q265443 | Text | validation | def Text(value, encoding=None):
"""
Parse a value as text.
:type value: `unicode` or `bytes`
:param value: Text value to parse
:type encoding: `bytes`
:param encoding: Encoding to treat ``bytes`` values as, defaults to
``utf-8``.
:rtype: `unicode`
:return: Parsed text or ``None`` if ``value`` is neither `bytes` nor
`unicode`.
"""
if encoding is None:
encoding = 'utf-8'
if isinstance(value, bytes):
return value.decode(encoding)
elif isinstance(value, unicode):
return value
return None | python | {
"resource": ""
} |
q265444 | Integer | validation | def Integer(value, base=10, encoding=None):
"""
Parse a value as an integer.
:type value: `unicode` or `bytes`
:param value: Text value to parse
:type base: `unicode` or `bytes`
:param base: Base to assume ``value`` is specified in.
:type encoding: `bytes`
:param encoding: Encoding to treat ``bytes`` values as, defaults to
``utf-8``.
:rtype: `int`
:return: Parsed integer or ``None`` if ``value`` could not be parsed as an
integer.
"""
try:
return int(Text(value, encoding), base)
except (TypeError, ValueError):
return None | python | {
"resource": ""
} |
q265445 | Boolean | validation | def Boolean(value, true=(u'yes', u'1', u'true'), false=(u'no', u'0', u'false'),
encoding=None):
"""
Parse a value as a boolean.
:type value: `unicode` or `bytes`
:param value: Text value to parse.
:type true: `tuple` of `unicode`
:param true: Values to compare, ignoring case, for ``True`` values.
:type false: `tuple` of `unicode`
:param false: Values to compare, ignoring case, for ``False`` values.
:type encoding: `bytes`
:param encoding: Encoding to treat `bytes` values as, defaults to
``utf-8``.
:rtype: `bool`
:return: Parsed boolean or ``None`` if ``value`` did not match ``true`` or
``false`` values.
"""
value = Text(value, encoding)
if value is not None:
value = value.lower().strip()
if value in true:
return True
elif value in false:
return False
return None | python | {
"resource": ""
} |
q265446 | Delimited | validation | def Delimited(value, parser=Text, delimiter=u',', encoding=None):
"""
Parse a value as a delimited list.
:type value: `unicode` or `bytes`
:param value: Text value to parse.
:type parser: `callable` taking a `unicode` parameter
:param parser: Callable to map over the delimited text values.
:type delimiter: `unicode`
:param delimiter: Delimiter text.
:type encoding: `bytes`
:param encoding: Encoding to treat `bytes` values as, defaults to
``utf-8``.
:rtype: `list`
:return: List of parsed values.
"""
value = Text(value, encoding)
if value is None or value == u'':
return []
return map(parser, value.split(delimiter)) | python | {
"resource": ""
} |
q265447 | Timestamp | validation | def Timestamp(value, _divisor=1., tz=UTC, encoding=None):
"""
Parse a value as a POSIX timestamp in seconds.
:type value: `unicode` or `bytes`
:param value: Text value to parse, which should be the number of seconds
since the epoch.
:type _divisor: `float`
:param _divisor: Number to divide the value by.
:type tz: `tzinfo`
:param tz: Timezone, defaults to UTC.
:type encoding: `bytes`
:param encoding: Encoding to treat `bytes` values as, defaults to
``utf-8``.
:rtype: `datetime.datetime`
:return: Parsed datetime or ``None`` if ``value`` could not be parsed.
"""
value = Float(value, encoding)
if value is not None:
value = value / _divisor
return datetime.fromtimestamp(value, tz)
return None | python | {
"resource": ""
} |
q265448 | parse | validation | def parse(expected, query):
"""
Parse query parameters.
:type expected: `dict` mapping `bytes` to `callable`
:param expected: Mapping of query argument names to argument parsing
callables.
:type query: `dict` mapping `bytes` to `list` of `bytes`
:param query: Mapping of query argument names to lists of argument values,
this is the form that Twisted Web's `IRequest.args
<twisted:twisted.web.iweb.IRequest.args>` value takes.
:rtype: `dict` mapping `bytes` to `object`
:return: Mapping of query argument names to parsed argument values.
"""
return dict(
(key, parser(query.get(key, [])))
for key, parser in expected.items()) | python | {
"resource": ""
} |
q265449 | CloudWatch.put | validation | def put(self, metrics):
"""
Put metrics to cloudwatch. Metric shoult be instance or list of
instances of CloudWatchMetric
"""
if type(metrics) == list:
for metric in metrics:
self.c.put_metric_data(**metric)
else:
self.c.put_metric_data(**metrics) | python | {
"resource": ""
} |
q265450 | _renderResource | validation | def _renderResource(resource, request):
"""
Render a given resource.
See `IResource.render <twisted:twisted.web.resource.IResource.render>`.
"""
meth = getattr(resource, 'render_' + nativeString(request.method), None)
if meth is None:
try:
allowedMethods = resource.allowedMethods
except AttributeError:
allowedMethods = _computeAllowedMethods(resource)
raise UnsupportedMethod(allowedMethods)
return meth(request) | python | {
"resource": ""
} |
q265451 | SpinneretResource._adaptToResource | validation | def _adaptToResource(self, result):
"""
Adapt a result to `IResource`.
Several adaptions are tried they are, in order: ``None``,
`IRenderable <twisted:twisted.web.iweb.IRenderable>`, `IResource
<twisted:twisted.web.resource.IResource>`, and `URLPath
<twisted:twisted.python.urlpath.URLPath>`. Anything else is returned as
is.
A `URLPath <twisted:twisted.python.urlpath.URLPath>` is treated as
a redirect.
"""
if result is None:
return NotFound()
spinneretResource = ISpinneretResource(result, None)
if spinneretResource is not None:
return SpinneretResource(spinneretResource)
renderable = IRenderable(result, None)
if renderable is not None:
return _RenderableResource(renderable)
resource = IResource(result, None)
if resource is not None:
return resource
if isinstance(result, URLPath):
return Redirect(str(result))
return result | python | {
"resource": ""
} |
q265452 | SpinneretResource._handleRenderResult | validation | def _handleRenderResult(self, request, result):
"""
Handle the result from `IResource.render`.
If the result is a `Deferred` then return `NOT_DONE_YET` and add
a callback to write the result to the request when it arrives.
"""
def _requestFinished(result, cancel):
cancel()
return result
if not isinstance(result, Deferred):
result = succeed(result)
def _whenDone(result):
render = getattr(result, 'render', lambda request: result)
renderResult = render(request)
if renderResult != NOT_DONE_YET:
request.write(renderResult)
request.finish()
return result
request.notifyFinish().addBoth(_requestFinished, result.cancel)
result.addCallback(self._adaptToResource)
result.addCallback(_whenDone)
result.addErrback(request.processingFailed)
return NOT_DONE_YET | python | {
"resource": ""
} |
q265453 | ContentTypeNegotiator._negotiateHandler | validation | def _negotiateHandler(self, request):
"""
Negotiate a handler based on the content types acceptable to the
client.
:rtype: 2-`tuple` of `twisted.web.iweb.IResource` and `bytes`
:return: Pair of a resource and the content type.
"""
accept = _parseAccept(request.requestHeaders.getRawHeaders('Accept'))
for contentType in accept.keys():
handler = self._acceptHandlers.get(contentType.lower())
if handler is not None:
return handler, handler.contentType
if self._fallback:
handler = self._handlers[0]
return handler, handler.contentType
return NotAcceptable(), None | python | {
"resource": ""
} |
q265454 | _parseAccept | validation | def _parseAccept(headers):
"""
Parse and sort an ``Accept`` header.
The header is sorted according to the ``q`` parameter for each header value.
@rtype: `OrderedDict` mapping `bytes` to `dict`
@return: Mapping of media types to header parameters.
"""
def sort(value):
return float(value[1].get('q', 1))
return OrderedDict(sorted(_splitHeaders(headers), key=sort, reverse=True)) | python | {
"resource": ""
} |
q265455 | _splitHeaders | validation | def _splitHeaders(headers):
"""
Split an HTTP header whose components are separated with commas.
Each component is then split on semicolons and the component arguments
converted into a `dict`.
@return: `list` of 2-`tuple` of `bytes`, `dict`
@return: List of header arguments and mapping of component argument names
to values.
"""
return [cgi.parse_header(value)
for value in chain.from_iterable(
s.split(',') for s in headers
if s)] | python | {
"resource": ""
} |
q265456 | contentEncoding | validation | def contentEncoding(requestHeaders, encoding=None):
"""
Extract an encoding from a ``Content-Type`` header.
@type requestHeaders: `twisted.web.http_headers.Headers`
@param requestHeaders: Request headers.
@type encoding: `bytes`
@param encoding: Default encoding to assume if the ``Content-Type``
header is lacking one. Defaults to ``UTF-8``.
@rtype: `bytes`
@return: Content encoding.
"""
if encoding is None:
encoding = b'utf-8'
headers = _splitHeaders(
requestHeaders.getRawHeaders(b'Content-Type', []))
if headers:
return headers[0][1].get(b'charset', encoding)
return encoding | python | {
"resource": ""
} |
q265457 | maybe | validation | def maybe(f, default=None):
"""
Create a nil-safe callable decorator.
If the wrapped callable receives ``None`` as its argument, it will return
``None`` immediately.
"""
@wraps(f)
def _maybe(x, *a, **kw):
if x is None:
return default
return f(x, *a, **kw)
return _maybe | python | {
"resource": ""
} |
q265458 | settings | validation | def settings(path=None, with_path=None):
"""
Get or set `Settings._wrapped`
:param str path: a python module file,
if user set it,write config to `Settings._wrapped`
:param str with_path: search path
:return: A instance of `Settings`
"""
if path:
Settings.bind(path, with_path=with_path)
return Settings._wrapped | python | {
"resource": ""
} |
q265459 | Settings.bind | validation | def bind(mod_path, with_path=None):
"""
bind user variable to `_wrapped`
.. note::
you don't need call this method by yourself.
program will call it in `cliez.parser.parse`
.. expection::
if path is not correct,will cause an `ImportError`
:param str mod_path: module path, *use dot style,'mod.mod1'*
:param str with_path: add path to `sys.path`,
if path is file,use its parent.
:return: A instance of `Settings`
"""
if with_path:
if os.path.isdir(with_path):
sys.path.insert(0, with_path)
else:
sys.path.insert(0, with_path.rsplit('/', 2)[0])
pass
# raise `ImportError` mod_path if not exist
mod = importlib.import_module(mod_path)
settings = Settings()
for v in dir(mod):
if v[0] == '_' or type(getattr(mod, v)).__name__ == 'module':
continue
setattr(settings, v, getattr(mod, v))
pass
Settings._path = mod_path
Settings._wrapped = settings
return settings | python | {
"resource": ""
} |
q265460 | get_version | validation | def get_version():
"""
Get the version from version module without importing more than
necessary.
"""
version_module_path = os.path.join(
os.path.dirname(__file__), "txspinneret", "_version.py")
# The version module contains a variable called __version__
with open(version_module_path) as version_module:
exec(version_module.read())
return locals()["__version__"] | python | {
"resource": ""
} |
q265461 | TX.send | validation | def send(self, use_open_peers=True, queue=True, **kw):
"""
send a transaction immediately. Failed transactions are picked up by the TxBroadcaster
:param ip: specific peer IP to send tx to
:param port: port of specific peer
:param use_open_peers: use Arky's broadcast method
"""
if not use_open_peers:
ip = kw.get('ip')
port = kw.get('port')
peer = 'http://{}:{}'.format(ip, port)
res = arky.rest.POST.peer.transactions(peer=peer, transactions=[self.tx.tx])
else:
res = arky.core.sendPayload(self.tx.tx)
if self.tx.success != '0.0%':
self.tx.error = None
self.tx.success = True
else:
self.tx.error = res['messages']
self.tx.success = False
self.tx.tries += 1
self.tx.res = res
if queue:
self.tx.send = True
self.__save()
return res | python | {
"resource": ""
} |
q265462 | TX.check_confirmations_or_resend | validation | def check_confirmations_or_resend(self, use_open_peers=False, **kw):
"""
check if a tx is confirmed, else resend it.
:param use_open_peers: select random peers fro api/peers endpoint
"""
if self.confirmations() == 0:
self.send(use_open_peers, **kw) | python | {
"resource": ""
} |
q265463 | command_list | validation | def command_list():
"""
Get sub-command list
.. note::
Don't use logger handle this function errors.
Because the error should be a code error,not runtime error.
:return: `list` matched sub-parser
"""
from cliez.conf import COMPONENT_ROOT
root = COMPONENT_ROOT
if root is None:
sys.stderr.write("cliez.conf.COMPONENT_ROOT not set.\n")
sys.exit(2)
pass
if not os.path.exists(root):
sys.stderr.write(
"please set a valid path for `cliez.conf.COMPONENT_ROOT`\n")
sys.exit(2)
pass
try:
path = os.listdir(os.path.join(root, 'components'))
return [f[:-3] for f in path if
f.endswith('.py') and f != '__init__.py']
except FileNotFoundError:
return [] | python | {
"resource": ""
} |
q265464 | append_arguments | validation | def append_arguments(klass, sub_parsers, default_epilog, general_arguments):
"""
Add class options to argparser options.
:param cliez.component.Component klass: subclass of Component
:param Namespace sub_parsers:
:param str default_epilog: default_epilog
:param list general_arguments: global options, defined by user
:return: Namespace subparser
"""
entry_name = hump_to_underscore(klass.__name__).replace(
'_component',
'')
# set sub command document
epilog = default_epilog if default_epilog \
else 'This tool generate by `cliez` ' \
'https://www.github.com/wangwenpei/cliez'
sub_parser = sub_parsers.add_parser(entry_name, help=klass.__doc__,
epilog=epilog)
sub_parser.description = klass.add_arguments.__doc__
# add slot arguments
if hasattr(klass, 'add_slot_args'):
slot_args = klass.add_slot_args() or []
for v in slot_args:
sub_parser.add_argument(*v[0], **v[1])
sub_parser.description = klass.add_slot_args.__doc__
pass
user_arguments = klass.add_arguments() or []
for v in user_arguments:
sub_parser.add_argument(*v[0], **v[1])
if not klass.exclude_global_option:
for v in general_arguments:
sub_parser.add_argument(*v[0], **v[1])
return sub_parser | python | {
"resource": ""
} |
q265465 | parse | validation | def parse(parser, argv=None, settings_key='settings', no_args_func=None):
"""
parser cliez app
:param argparse.ArgumentParser parser: an instance
of argparse.ArgumentParser
:param argv: argument list,default is `sys.argv`
:type argv: list or tuple
:param str settings: settings option name,
default is settings.
:param object no_args_func: a callable object.if no sub-parser matched,
parser will call it.
:return: an instance of `cliez.component.Component` or its subclass
"""
argv = argv or sys.argv
commands = command_list()
if type(argv) not in [list, tuple]:
raise TypeError("argv only can be list or tuple")
# match sub-parser
if len(argv) >= 2 and argv[1] in commands:
sub_parsers = parser.add_subparsers()
class_name = argv[1].capitalize() + 'Component'
from cliez.conf import (COMPONENT_ROOT,
LOGGING_CONFIG,
EPILOG,
GENERAL_ARGUMENTS)
sys.path.insert(0, os.path.dirname(COMPONENT_ROOT))
mod = importlib.import_module(
'{}.components.{}'.format(os.path.basename(COMPONENT_ROOT),
argv[1]))
# dynamic load component
klass = getattr(mod, class_name)
sub_parser = append_arguments(klass, sub_parsers, EPILOG,
GENERAL_ARGUMENTS)
options = parser.parse_args(argv[1:])
settings = Settings.bind(
getattr(options, settings_key)
) if settings_key and hasattr(options, settings_key) else None
obj = klass(parser, sub_parser, options, settings)
# init logger
logger_level = logging.CRITICAL
if hasattr(options, 'verbose'):
if options.verbose == 1:
logger_level = logging.ERROR
elif options.verbose == 2:
logger_level = logging.WARNING
elif options.verbose == 3:
logger_level = logging.INFO
obj.logger.setLevel(logging.INFO)
pass
if hasattr(options, 'debug') and options.debug:
logger_level = logging.DEBUG
# http lib use a strange way to logging
try:
import http.client as http_client
http_client.HTTPConnection.debuglevel = 1
except Exception:
# do nothing
pass
pass
loggers = LOGGING_CONFIG['loggers']
for k, v in loggers.items():
v.setdefault('level', logger_level)
if logger_level in [logging.INFO, logging.DEBUG]:
v['handlers'] = ['stdout']
pass
logging_config.dictConfig(LOGGING_CONFIG)
# this may not necessary
# obj.logger.setLevel(logger_level)
obj.run(options)
# return object to make unit test easy
return obj
# print all sub commands when user set.
if not parser.description and len(commands):
sub_parsers = parser.add_subparsers()
[sub_parsers.add_parser(v) for v in commands]
pass
pass
options = parser.parse_args(argv[1:])
if no_args_func and callable(no_args_func):
return no_args_func(options)
else:
parser._print_message("nothing to do...\n")
pass | python | {
"resource": ""
} |
q265466 | hump_to_underscore | validation | def hump_to_underscore(name):
"""
Convert Hump style to underscore
:param name: Hump Character
:return: str
"""
new_name = ''
pos = 0
for c in name:
if pos == 0:
new_name = c.lower()
elif 65 <= ord(c) <= 90:
new_name += '_' + c.lower()
pass
else:
new_name += c
pos += 1
pass
return new_name | python | {
"resource": ""
} |
q265467 | FuelCheckClient.get_fuel_prices | validation | def get_fuel_prices(self) -> GetFuelPricesResponse:
"""Fetches fuel prices for all stations."""
response = requests.get(
'{}/prices'.format(API_URL_BASE),
headers=self._get_headers(),
timeout=self._timeout,
)
if not response.ok:
raise FuelCheckError.create(response)
return GetFuelPricesResponse.deserialize(response.json()) | python | {
"resource": ""
} |
q265468 | FuelCheckClient.get_fuel_prices_for_station | validation | def get_fuel_prices_for_station(
self,
station: int
) -> List[Price]:
"""Gets the fuel prices for a specific fuel station."""
response = requests.get(
'{}/prices/station/{}'.format(API_URL_BASE, station),
headers=self._get_headers(),
timeout=self._timeout,
)
if not response.ok:
raise FuelCheckError.create(response)
data = response.json()
return [Price.deserialize(data) for data in data['prices']] | python | {
"resource": ""
} |
q265469 | FuelCheckClient.get_fuel_prices_within_radius | validation | def get_fuel_prices_within_radius(
self, latitude: float, longitude: float, radius: int,
fuel_type: str, brands: Optional[List[str]] = None
) -> List[StationPrice]:
"""Gets all the fuel prices within the specified radius."""
if brands is None:
brands = []
response = requests.post(
'{}/prices/nearby'.format(API_URL_BASE),
json={
'fueltype': fuel_type,
'latitude': latitude,
'longitude': longitude,
'radius': radius,
'brand': brands,
},
headers=self._get_headers(),
timeout=self._timeout,
)
if not response.ok:
raise FuelCheckError.create(response)
data = response.json()
stations = {
station['code']: Station.deserialize(station)
for station in data['stations']
}
station_prices = [] # type: List[StationPrice]
for serialized_price in data['prices']:
price = Price.deserialize(serialized_price)
station_prices.append(StationPrice(
price=price,
station=stations[price.station_code]
))
return station_prices | python | {
"resource": ""
} |
q265470 | FuelCheckClient.get_fuel_price_trends | validation | def get_fuel_price_trends(self, latitude: float, longitude: float,
fuel_types: List[str]) -> PriceTrends:
"""Gets the fuel price trends for the given location and fuel types."""
response = requests.post(
'{}/prices/trends/'.format(API_URL_BASE),
json={
'location': {
'latitude': latitude,
'longitude': longitude,
},
'fueltypes': [{'code': type} for type in fuel_types],
},
headers=self._get_headers(),
timeout=self._timeout,
)
if not response.ok:
raise FuelCheckError.create(response)
data = response.json()
return PriceTrends(
variances=[
Variance.deserialize(variance)
for variance in data['Variances']
],
average_prices=[
AveragePrice.deserialize(avg_price)
for avg_price in data['AveragePrices']
]
) | python | {
"resource": ""
} |
q265471 | FuelCheckClient.get_reference_data | validation | def get_reference_data(
self,
modified_since: Optional[datetime.datetime] = None
) -> GetReferenceDataResponse:
"""
Fetches API reference data.
:param modified_since: The response will be empty if no
changes have been made to the reference data since this
timestamp, otherwise all reference data will be returned.
"""
if modified_since is None:
modified_since = datetime.datetime(year=2010, month=1, day=1)
response = requests.get(
'{}/lovs'.format(API_URL_BASE),
headers={
'if-modified-since': self._format_dt(modified_since),
**self._get_headers(),
},
timeout=self._timeout,
)
if not response.ok:
raise FuelCheckError.create(response)
# return response.text
return GetReferenceDataResponse.deserialize(response.json()) | python | {
"resource": ""
} |
q265472 | PyTemplate.pre | validation | def pre(self, command, output_dir, vars):
"""
Called before template is applied.
"""
# import pdb;pdb.set_trace()
vars['license_name'] = 'Apache'
vars['year'] = time.strftime('%Y', time.localtime()) | python | {
"resource": ""
} |
q265473 | Text | validation | def Text(name, encoding=None):
"""
Match a route parameter.
`Any` is a synonym for `Text`.
:type name: `bytes`
:param name: Route parameter name.
:type encoding: `bytes`
:param encoding: Default encoding to assume if the ``Content-Type``
header is lacking one.
:return: ``callable`` suitable for use with `route` or `subroute`.
"""
def _match(request, value):
return name, query.Text(
value,
encoding=contentEncoding(request.requestHeaders, encoding))
return _match | python | {
"resource": ""
} |
q265474 | Integer | validation | def Integer(name, base=10, encoding=None):
"""
Match an integer route parameter.
:type name: `bytes`
:param name: Route parameter name.
:type base: `int`
:param base: Base to interpret the value in.
:type encoding: `bytes`
:param encoding: Default encoding to assume if the ``Content-Type``
header is lacking one.
:return: ``callable`` suitable for use with `route` or `subroute`.
"""
def _match(request, value):
return name, query.Integer(
value,
base=base,
encoding=contentEncoding(request.requestHeaders, encoding))
return _match | python | {
"resource": ""
} |
q265475 | _matchRoute | validation | def _matchRoute(components, request, segments, partialMatching):
"""
Match a request path against our path components.
The path components are always matched relative to their parent is in the
resource hierarchy, in other words it is only possible to match URIs nested
more deeply than the parent resource.
:type components: ``iterable`` of `bytes` or `callable`
:param components: Iterable of path components, to match against the
request, either static strings or dynamic parameters. As a convenience,
a single `bytes` component containing ``/`` may be given instead of
manually separating the components. If no components are given the null
route is matched, this is the case where ``segments`` is empty.
:type segments: ``sequence`` of `bytes`
:param segments: Sequence of path segments, from the request, to match
against.
:type partialMatching: `bool`
:param partialMatching: Allow partial matching against the request path?
:rtype: 2-`tuple` of `dict` keyed on `bytes` and `list` of `bytes`
:return: Pair of parameter results, mapping parameter names to processed
values, and a list of the remaining request path segments. If there is
no route match the result will be ``None`` and the original request path
segments.
"""
if len(components) == 1 and isinstance(components[0], bytes):
components = components[0]
if components[:1] == '/':
components = components[1:]
components = components.split('/')
results = OrderedDict()
NO_MATCH = None, segments
remaining = list(segments)
# Handle the null route.
if len(segments) == len(components) == 0:
return results, remaining
for us, them in izip_longest(components, segments):
if us is None:
if partialMatching:
# We've matched all of our components, there might be more
# segments for something else to process.
break
else:
return NO_MATCH
elif them is None:
# We've run out of path segments to match, so this route can't be
# the matching one.
return NO_MATCH
if callable(us):
name, match = us(request, them)
if match is None:
return NO_MATCH
results[name] = match
elif us != them:
return NO_MATCH
remaining.pop(0)
return results, remaining | python | {
"resource": ""
} |
q265476 | routedResource | validation | def routedResource(f, routerAttribute='router'):
"""
Decorate a router-producing callable to instead produce a resource.
This simply produces a new callable that invokes the original callable, and
calls ``resource`` on the ``routerAttribute``.
If the router producer has multiple routers the attribute can be altered to
choose the appropriate one, for example:
.. code-block:: python
class _ComplexRouter(object):
router = Router()
privateRouter = Router()
@router.route('/')
def publicRoot(self, request, params):
return SomethingPublic(...)
@privateRouter.route('/')
def privateRoot(self, request, params):
return SomethingPrivate(...)
PublicResource = routedResource(_ComplexRouter)
PrivateResource = routedResource(_ComplexRouter, 'privateRouter')
:type f: ``callable``
:param f: Callable producing an object with a `Router` attribute, for
example, a type.
:type routerAttribute: `str`
:param routerAttribute: Name of the `Router` attribute on the result of
calling ``f``.
:rtype: `callable`
:return: Callable producing an `IResource`.
"""
return wraps(f)(
lambda *a, **kw: getattr(f(*a, **kw), routerAttribute).resource()) | python | {
"resource": ""
} |
q265477 | Router._forObject | validation | def _forObject(self, obj):
"""
Create a new `Router` instance, with it's own set of routes, for
``obj``.
"""
router = type(self)()
router._routes = list(self._routes)
router._self = obj
return router | python | {
"resource": ""
} |
q265478 | Router._addRoute | validation | def _addRoute(self, f, matcher):
"""
Add a route handler and matcher to the collection of possible routes.
"""
self._routes.append((f.func_name, f, matcher)) | python | {
"resource": ""
} |
q265479 | Router.route | validation | def route(self, *components):
"""
See `txspinneret.route.route`.
This decorator can be stacked with itself to specify multiple routes
with a single handler.
"""
def _factory(f):
self._addRoute(f, route(*components))
return f
return _factory | python | {
"resource": ""
} |
q265480 | Router.subroute | validation | def subroute(self, *components):
"""
See `txspinneret.route.subroute`.
This decorator can be stacked with itself to specify multiple routes
with a single handler.
"""
def _factory(f):
self._addRoute(f, subroute(*components))
return f
return _factory | python | {
"resource": ""
} |
q265481 | _tempfile | validation | def _tempfile(filename):
"""
Create a NamedTemporaryFile instance to be passed to atomic_writer
"""
return tempfile.NamedTemporaryFile(mode='w',
dir=os.path.dirname(filename),
prefix=os.path.basename(filename),
suffix=os.fsencode('.tmp'),
delete=False) | python | {
"resource": ""
} |
q265482 | atomic_write | validation | def atomic_write(filename):
"""
Open a NamedTemoraryFile handle in a context manager
"""
f = _tempfile(os.fsencode(filename))
try:
yield f
finally:
f.close()
# replace the original file with the new temp file (atomic on success)
os.replace(f.name, filename) | python | {
"resource": ""
} |
q265483 | get_item | validation | def get_item(filename, uuid):
"""
Read entry from JSON file
"""
with open(os.fsencode(str(filename)), "r") as f:
data = json.load(f)
results = [i for i in data if i["uuid"] == str(uuid)]
if results:
return results
return None | python | {
"resource": ""
} |
q265484 | set_item | validation | def set_item(filename, item):
"""
Save entry to JSON file
"""
with atomic_write(os.fsencode(str(filename))) as temp_file:
with open(os.fsencode(str(filename))) as products_file:
# load the JSON data into memory
products_data = json.load(products_file)
# check if UUID already exists
uuid_list = [i for i in filter(
lambda z: z["uuid"] == str(item["uuid"]), products_data)]
if len(uuid_list) == 0:
# add the new item to the JSON file
products_data.append(item)
# save the new JSON to the temp file
json.dump(products_data, temp_file)
return True
return None | python | {
"resource": ""
} |
q265485 | update_item | validation | def update_item(filename, item, uuid):
"""
Update entry by UUID in the JSON file
"""
with atomic_write(os.fsencode(str(filename))) as temp_file:
with open(os.fsencode(str(filename))) as products_file:
# load the JSON data into memory
products_data = json.load(products_file)
# apply modifications to the JSON data wrt UUID
# TODO: handle this in a neat way
if 'products' in products_data[-1]:
# handle orders object
[products_data[i]["products"][0].update(item) for (
i, j) in enumerate(products_data) if j["uuid"] == str(uuid)]
else:
# handle products object
[products_data[i].update(item) for (i, j) in enumerate(
products_data) if j["uuid"] == str(uuid)]
# save the modified JSON data into the temp file
json.dump(products_data, temp_file)
return True | python | {
"resource": ""
} |
q265486 | Command.command_handle | validation | def command_handle(self):
"""Get the number of the shell command."""
self.__results = self.execute(self.args.command)
self.close()
self.logger.debug("results: {}".format(self.__results))
if not self.__results:
self.unknown("{} return nothing.".format(self.args.command))
if len(self.__results) != 1:
self.unknown(
"{} return more than one number.".format(
self.args.command))
self.__result = int(self.__results[0])
self.logger.debug("result: {}".format(self.__result))
if not isinstance(self.__result, (int, long)):
self.unknown(
"{} didn't return single number.".format(
self.args.command))
status = self.ok
# Compare the vlaue.
if self.__result > self.args.warning:
status = self.warning
if self.__result > self.args.critical:
status = self.critical
# Output
self.shortoutput = "{0} return {1}.".format(
self.args.command, self.__result)
[self.longoutput.append(line)
for line in self.__results if self.__results]
self.perfdata.append("{command}={result};{warn};{crit};0;".format(
crit=self.args.critical,
warn=self.args.warning,
result=self.__result,
command=self.args.command))
# Return status with message to Nagios.
status(self.output(long_output_limit=None))
self.logger.debug("Return status and exit to Nagios.") | python | {
"resource": ""
} |
q265487 | Ssh.execute | validation | def execute(self, command, timeout=None):
"""Execute a shell command."""
try:
self.channel = self.ssh.get_transport().open_session()
except paramiko.SSHException as e:
self.unknown("Create channel error: %s" % e)
try:
self.channel.settimeout(self.args.timeout if not timeout else timeout)
except socket.timeout as e:
self.unknown("Settimeout for channel error: %s" % e)
try:
self.logger.debug("command: {}".format(command))
self.channel.exec_command(command)
except paramiko.SSHException as e:
self.unknown("Execute command error: %s" % e)
try:
self.stdin = self.channel.makefile('wb', -1)
self.stderr = map(string.strip, self.channel.makefile_stderr('rb', -1).readlines())
self.stdout = map(string.strip, self.channel.makefile('rb', -1).readlines())
except Exception as e:
self.unknown("Get result error: %s" % e)
try:
self.status = self.channel.recv_exit_status()
except paramiko.SSHException as e:
self.unknown("Get return code error: %s" % e)
else:
if self.status != 0:
self.unknown("Return code: %d , stderr: %s" % (self.status, self.errors))
else:
return self.stdout
finally:
self.logger.debug("Execute command finish.") | python | {
"resource": ""
} |
q265488 | slinky | validation | def slinky(filename, seconds_available, bucket_name, aws_key, aws_secret):
"""Simple program that creates an temp S3 link."""
if not os.environ.get('AWS_ACCESS_KEY_ID') and os.environ.get('AWS_SECRET_ACCESS_KEY'):
print 'Need to set environment variables for AWS access and create a slinky bucket.'
exit()
print create_temp_s3_link(filename, seconds_available, bucket_name) | python | {
"resource": ""
} |
q265489 | Process.check_readable | validation | def check_readable(self, timeout):
"""
Poll ``self.stdout`` and return True if it is readable.
:param float timeout: seconds to wait I/O
:return: True if readable, else False
:rtype: boolean
"""
rlist, wlist, xlist = select.select([self._stdout], [], [], timeout)
return bool(len(rlist)) | python | {
"resource": ""
} |
q265490 | get_indices_list | validation | def get_indices_list(s: Any) -> List[str]:
""" Retrieve a list of characters and escape codes where each escape
code uses only one index. The indexes will not match up with the
indexes in the original string.
"""
indices = get_indices(s)
return [
indices[i] for i in sorted(indices, key=int)
] | python | {
"resource": ""
} |
q265491 | strip_codes | validation | def strip_codes(s: Any) -> str:
""" Strip all color codes from a string.
Returns empty string for "falsey" inputs.
"""
return codepat.sub('', str(s) if (s or (s == 0)) else '') | python | {
"resource": ""
} |
q265492 | AbstractBundle.init_build | validation | def init_build(self, asset, builder):
"""
Called when builder group collect files
Resolves absolute url if relative passed
:type asset: static_bundle.builders.Asset
:type builder: static_bundle.builders.StandardBuilder
"""
if not self.abs_path:
rel_path = utils.prepare_path(self.rel_bundle_path)
self.abs_bundle_path = utils.prepare_path([builder.config.input_dir, rel_path])
self.abs_path = True
self.input_dir = builder.config.input_dir | python | {
"resource": ""
} |
q265493 | AbstractBundle.add_file | validation | def add_file(self, *args):
"""
Add single file or list of files to bundle
:type: file_path: str|unicode
"""
for file_path in args:
self.files.append(FilePath(file_path, self)) | python | {
"resource": ""
} |
q265494 | AbstractBundle.add_directory | validation | def add_directory(self, *args, **kwargs):
"""
Add directory or directories list to bundle
:param exclusions: List of excluded paths
:type path: str|unicode
:type exclusions: list
"""
exc = kwargs.get('exclusions', None)
for path in args:
self.files.append(DirectoryPath(path, self, exclusions=exc)) | python | {
"resource": ""
} |
q265495 | AbstractBundle.add_path_object | validation | def add_path_object(self, *args):
"""
Add custom path objects
:type: path_object: static_bundle.paths.AbstractPath
"""
for obj in args:
obj.bundle = self
self.files.append(obj) | python | {
"resource": ""
} |
q265496 | AbstractBundle.add_prepare_handler | validation | def add_prepare_handler(self, prepare_handlers):
"""
Add prepare handler to bundle
:type: prepare_handler: static_bundle.handlers.AbstractPrepareHandler
"""
if not isinstance(prepare_handlers, static_bundle.BUNDLE_ITERABLE_TYPES):
prepare_handlers = [prepare_handlers]
if self.prepare_handlers_chain is None:
self.prepare_handlers_chain = []
for handler in prepare_handlers:
self.prepare_handlers_chain.append(handler) | python | {
"resource": ""
} |
q265497 | AbstractBundle.prepare | validation | def prepare(self):
"""
Called when builder run collect files in builder group
:rtype: list[static_bundle.files.StaticFileResult]
"""
result_files = self.collect_files()
chain = self.prepare_handlers_chain
if chain is None:
# default handlers
chain = [
LessCompilerPrepareHandler()
]
for prepare_handler in chain:
result_files = prepare_handler.prepare(result_files, self)
return result_files | python | {
"resource": ""
} |
q265498 | FileNumber.filenumber_handle | validation | def filenumber_handle(self):
"""Get the number of files in the folder."""
self.__results = []
self.__dirs = []
self.__files = []
self.__ftp = self.connect()
self.__ftp.dir(self.args.path, self.__results.append)
self.logger.debug("dir results: {}".format(self.__results))
self.quit()
status = self.ok
for data in self.__results:
if "<DIR>" in data:
self.__dirs.append(str(data.split()[3]))
else:
self.__files.append(str(data.split()[2]))
self.__result = len(self.__files)
self.logger.debug("result: {}".format(self.__result))
# Compare the vlaue.
if self.__result > self.args.warning:
status = self.warning
if self.__result > self.args.critical:
status = self.critical
# Output
self.shortoutput = "Found {0} files in {1}.".format(self.__result,
self.args.path)
[self.longoutput.append(line)
for line in self.__results if self.__results]
self.perfdata.append("{path}={result};{warn};{crit};0;".format(
crit=self.args.critical,
warn=self.args.warning,
result=self.__result,
path=self.args.path))
self.logger.debug("Return status and output.")
status(self.output()) | python | {
"resource": ""
} |
q265499 | DataStore.register_json | validation | def register_json(self, data):
"""
Register the contents as JSON
"""
j = json.loads(data)
self.last_data_timestamp = \
datetime.datetime.utcnow().replace(microsecond=0).isoformat()
try:
for v in j:
# prepare the sensor entry container
self.data[v[self.id_key]] = {}
# add the mandatory entries
self.data[v[self.id_key]][self.id_key] = \
v[self.id_key]
self.data[v[self.id_key]][self.value_key] = \
v[self.value_key]
# add the optional well known entries if provided
if self.unit_key in v:
self.data[v[self.id_key]][self.unit_key] = \
v[self.unit_key]
if self.threshold_key in v:
self.data[v[self.id_key]][self.threshold_key] = \
v[self.threshold_key]
# add any further entries found
for k in self.other_keys:
if k in v:
self.data[v[self.id_key]][k] = v[k]
# add the custom sensor time
if self.sensor_time_key in v:
self.data[v[self.sensor_time_key]][self.sensor_time_key] = \
v[self.sensor_time_key]
# last: add the time the data was received (overwriting any
# not properly defined timestamp that was already there)
self.data[v[self.id_key]][self.time_key] = \
self.last_data_timestamp
except KeyError as e:
print("The main key was not found on the serial input line: " + \
str(e))
except ValueError as e:
print("No valid JSON string received. Waiting for the next turn.")
print("The error was: " + str(e)) | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.