_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 75 19.8k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q265500 | DataStore.get_translated_data | validation | def get_translated_data(self):
"""
Translate the data with the translation table
"""
j = {}
for k in self.data:
d = {}
for l in self.data[k]:
d[self.translation_keys[l]] = self.data[k][l]
j[k] = d
return j | python | {
"resource": ""
} |
q265501 | DataStore.get_json | validation | def get_json(self, prettyprint=False, translate=True):
"""
Get the data in JSON form
"""
j = []
if translate:
d = self.get_translated_data()
else:
d = self.data
for k in d:
j.append(d[k])
if prettyprint:
j = json.dumps(j, indent=2, separators=(',',': '))
else:
j = json.dumps(j)
return j | python | {
"resource": ""
} |
q265502 | DataStore.get_json_tuples | validation | def get_json_tuples(self, prettyprint=False, translate=True):
"""
Get the data as JSON tuples
"""
j = self.get_json(prettyprint, translate)
if len(j) > 2:
if prettyprint:
j = j[1:-2] + ",\n"
else:
j = j[1:-1] + ","
else:
j = ""
return j | python | {
"resource": ""
} |
q265503 | ShirtsIORequest.get | validation | def get(self, url, params={}):
"""
Issues a GET request against the API, properly formatting the params
:param url: a string, the url you are requesting
:param params: a dict, the key-value of all the paramaters needed
in the request
:returns: a dict parsed of the JSON response
"""
params.update({'api_key': self.api_key})
try:
response = requests.get(self.host + url, params=params)
except RequestException as e:
response = e.args
return self.json_parse(response.content) | python | {
"resource": ""
} |
q265504 | ShirtsIORequest.post | validation | def post(self, url, params={}, files=None):
"""
Issues a POST request against the API, allows for multipart data uploads
:param url: a string, the url you are requesting
:param params: a dict, the key-value of all the parameters needed
in the request
:param files: a list, the list of tuples of files
:returns: a dict parsed of the JSON response
"""
params.update({'api_key': self.api_key})
try:
response = requests.post(self.host + url, data=params, files=files)
return self.json_parse(response.content)
except RequestException as e:
return self.json_parse(e.args) | python | {
"resource": ""
} |
q265505 | ConfigStore.load_values | validation | def load_values(self):
"""
Go through the env var map, transferring the values to this object
as attributes.
:raises: RuntimeError if a required env var isn't defined.
"""
for config_name, evar in self.evar_defs.items():
if evar.is_required and evar.name not in os.environ:
raise RuntimeError((
"Missing required environment variable: {evar_name}\n"
"{help_txt}"
).format(evar_name=evar.name, help_txt=evar.help_txt))
# Env var is present. Transfer its value over.
if evar.name in os.environ:
self[config_name] = os.environ.get(evar.name)
else:
self[config_name] = evar.default_val
# Perform any validations or transformations.
for filter in evar.filters:
current_val = self.get(config_name)
new_val = filter(current_val, evar)
self[config_name] = new_val
# This is the top-level filter that is often useful for checking
# the values of related env vars (instead of individual validation).
self._filter_all() | python | {
"resource": ""
} |
q265506 | embed_data | validation | def embed_data(request):
"""
Create a temporary directory with input data for the test.
The directory contents is copied from a directory with the same name as the module located in the same directory of
the test module.
"""
result = _EmbedDataFixture(request)
result.delete_data_dir()
result.create_data_dir()
yield result
result.delete_data_dir() | python | {
"resource": ""
} |
q265507 | _EmbedDataFixture.assert_equal_files | validation | def assert_equal_files(self, obtained_fn, expected_fn, fix_callback=lambda x:x, binary=False, encoding=None):
'''
Compare two files contents. If the files differ, show the diff and write a nice HTML
diff file into the data directory.
Searches for the filenames both inside and outside the data directory (in that order).
:param unicode obtained_fn: basename to obtained file into the data directory, or full path.
:param unicode expected_fn: basename to expected file into the data directory, or full path.
:param bool binary:
Thread both files as binary files.
:param unicode encoding:
File's encoding. If not None, contents obtained from file will be decoded using this
`encoding`.
:param callable fix_callback:
A callback to "fix" the contents of the obtained (first) file.
This callback receives a list of strings (lines) and must also return a list of lines,
changed as needed.
The resulting lines will be used to compare with the contents of expected_fn.
:param bool binary:
.. seealso:: zerotk.easyfs.GetFileContents
'''
import os
from zerotk.easyfs import GetFileContents, GetFileLines
__tracebackhide__ = True
import io
def FindFile(filename):
# See if this path exists in the data dir
data_filename = self.get_filename(filename)
if os.path.isfile(data_filename):
return data_filename
# If not, we might have already received a full path
if os.path.isfile(filename):
return filename
# If we didn't find anything, raise an error
from ._exceptions import MultipleFilesNotFound
raise MultipleFilesNotFound([filename, data_filename])
obtained_fn = FindFile(obtained_fn)
expected_fn = FindFile(expected_fn)
if binary:
obtained_lines = GetFileContents(obtained_fn, binary=True)
expected_lines = GetFileContents(expected_fn, binary=True)
assert obtained_lines == expected_lines
else:
obtained_lines = fix_callback(GetFileLines(obtained_fn, encoding=encoding))
expected_lines = GetFileLines(expected_fn, encoding=encoding)
if obtained_lines != expected_lines:
html_fn = os.path.splitext(obtained_fn)[0] + '.diff.html'
html_diff = self._generate_html_diff(
expected_fn, expected_lines, obtained_fn, obtained_lines)
with io.open(html_fn, 'w') as f:
f.write(html_diff)
import difflib
diff = ['FILES DIFFER:', obtained_fn, expected_fn]
diff += ['HTML DIFF: %s' % html_fn]
diff += difflib.context_diff(obtained_lines, expected_lines)
raise AssertionError('\n'.join(diff) + '\n') | python | {
"resource": ""
} |
q265508 | _EmbedDataFixture._generate_html_diff | validation | def _generate_html_diff(self, expected_fn, expected_lines, obtained_fn, obtained_lines):
"""
Returns a nice side-by-side diff of the given files, as a string.
"""
import difflib
differ = difflib.HtmlDiff()
return differ.make_file(
fromlines=expected_lines,
fromdesc=expected_fn,
tolines=obtained_lines,
todesc=obtained_fn,
) | python | {
"resource": ""
} |
q265509 | Network.add_peer | validation | def add_peer(self, peer):
"""
Add a peer or multiple peers to the PEERS variable, takes a single string or a list.
:param peer(list or string)
"""
if type(peer) == list:
for i in peer:
check_url(i)
self.PEERS.extend(peer)
elif type(peer) == str:
check_url(peer)
self.PEERS.append(peer) | python | {
"resource": ""
} |
q265510 | Network.remove_peer | validation | def remove_peer(self, peer):
"""
remove one or multiple peers from PEERS variable
:param peer(list or string):
"""
if type(peer) == list:
for x in peer:
check_url(x)
for i in self.PEERS:
if x in i:
self.PEERS.remove(i)
elif type(peer) == str:
check_url(peer)
for i in self.PEERS:
if peer == i:
self.PEERS.remove(i)
else:
raise ValueError('peer paramater did not pass url validation') | python | {
"resource": ""
} |
q265511 | Network.status | validation | def status(self):
"""
check the status of the network and the peers
:return: network_height, peer_status
"""
peer = random.choice(self.PEERS)
formatted_peer = 'http://{}:4001'.format(peer)
peerdata = requests.get(url=formatted_peer + '/api/peers/').json()['peers']
peers_status = {}
networkheight = max([x['height'] for x in peerdata])
for i in peerdata:
if 'http://{}:4001'.format(i['ip']) in self.PEERS:
peers_status.update({i['ip']: {
'height': i['height'],
'status': i['status'],
'version': i['version'],
'delay': i['delay'],
}})
return {
'network_height': networkheight,
'peer_status': peers_status
} | python | {
"resource": ""
} |
q265512 | Network.broadcast_tx | validation | def broadcast_tx(self, address, amount, secret, secondsecret=None, vendorfield=''):
"""broadcasts a transaction to the peerslist using ark-js library"""
peer = random.choice(self.PEERS)
park = Park(
peer,
4001,
constants.ARK_NETHASH,
'1.1.1'
)
return park.transactions().create(address, str(amount), vendorfield, secret, secondsecret) | python | {
"resource": ""
} |
q265513 | OrbApiFactory.register | validation | def register(self, service, name=''):
"""
Exposes a given service to this API.
"""
try:
is_model = issubclass(service, orb.Model)
except StandardError:
is_model = False
# expose an ORB table dynamically as a service
if is_model:
self.services[service.schema().dbname()] = (ModelService, service)
else:
super(OrbApiFactory, self).register(service, name=name) | python | {
"resource": ""
} |
q265514 | main | validation | def main():
""" Main entry point, expects doctopt arg dict as argd. """
global DEBUG
argd = docopt(USAGESTR, version=VERSIONSTR, script=SCRIPT)
DEBUG = argd['--debug']
width = parse_int(argd['--width'] or DEFAULT_WIDTH) or 1
indent = parse_int(argd['--indent'] or (argd['--INDENT'] or 0))
prepend = ' ' * (indent * 4)
if prepend and argd['--indent']:
# Smart indent, change max width based on indention.
width -= len(prepend)
userprepend = argd['--prepend'] or (argd['--PREPEND'] or '')
prepend = ''.join((prepend, userprepend))
if argd['--prepend']:
# Smart indent, change max width based on prepended text.
width -= len(userprepend)
userappend = argd['--append'] or (argd['--APPEND'] or '')
if argd['--append']:
width -= len(userappend)
if argd['WORDS']:
# Try each argument as a file name.
argd['WORDS'] = (
(try_read_file(w) if len(w) < 256 else w)
for w in argd['WORDS']
)
words = ' '.join((w for w in argd['WORDS'] if w))
else:
# No text/filenames provided, use stdin for input.
words = read_stdin()
block = FormatBlock(words).iter_format_block(
chars=argd['--chars'],
fill=argd['--fill'],
prepend=prepend,
strip_first=argd['--stripfirst'],
append=userappend,
strip_last=argd['--striplast'],
width=width,
newlines=argd['--newlines'],
lstrip=argd['--lstrip'],
)
for i, line in enumerate(block):
if argd['--enumerate']:
# Current line number format supports up to 999 lines before
# messing up. Who would format 1000 lines like this anyway?
print('{: >3}: {}'.format(i + 1, line))
else:
print(line)
return 0 | python | {
"resource": ""
} |
q265515 | debug | validation | def debug(*args, **kwargs):
""" Print a message only if DEBUG is truthy. """
if not (DEBUG and args):
return None
# Include parent class name when given.
parent = kwargs.get('parent', None)
with suppress(KeyError):
kwargs.pop('parent')
# Go back more than once when given.
backlevel = kwargs.get('back', 1)
with suppress(KeyError):
kwargs.pop('back')
frame = inspect.currentframe()
# Go back a number of frames (usually 1).
while backlevel > 0:
frame = frame.f_back
backlevel -= 1
fname = os.path.split(frame.f_code.co_filename)[-1]
lineno = frame.f_lineno
if parent:
func = '{}.{}'.format(parent.__class__.__name__, frame.f_code.co_name)
else:
func = frame.f_code.co_name
lineinfo = '{}:{} {}: '.format(
C(fname, 'yellow'),
C(str(lineno).ljust(4), 'blue'),
C().join(C(func, 'magenta'), '()').ljust(20)
)
# Patch args to stay compatible with print().
pargs = list(C(a, 'green').str() for a in args)
pargs[0] = ''.join((lineinfo, pargs[0]))
print_err(*pargs, **kwargs) | python | {
"resource": ""
} |
q265516 | parse_int | validation | def parse_int(s):
""" Parse a string as an integer.
Exit with a message on failure.
"""
try:
val = int(s)
except ValueError:
print_err('\nInvalid integer: {}'.format(s))
sys.exit(1)
return val | python | {
"resource": ""
} |
q265517 | try_read_file | validation | def try_read_file(s):
""" If `s` is a file name, read the file and return it's content.
Otherwise, return the original string.
Returns None if the file was opened, but errored during reading.
"""
try:
with open(s, 'r') as f:
data = f.read()
except FileNotFoundError:
# Not a file name.
return s
except EnvironmentError as ex:
print_err('\nFailed to read file: {}\n {}'.format(s, ex))
return None
return data | python | {
"resource": ""
} |
q265518 | Vim.wait | validation | def wait(self, timeout=None):
"""
Wait for response until timeout.
If timeout is specified to None, ``self.timeout`` is used.
:param float timeout: seconds to wait I/O
"""
if timeout is None:
timeout = self._timeout
while self._process.check_readable(timeout):
self._flush() | python | {
"resource": ""
} |
q265519 | make_seekable | validation | def make_seekable(fileobj):
"""
If the file-object is not seekable, return ArchiveTemp of the fileobject,
otherwise return the file-object itself
"""
if sys.version_info < (3, 0) and isinstance(fileobj, file):
filename = fileobj.name
fileobj = io.FileIO(fileobj.fileno(), closefd=False)
fileobj.name = filename
assert isinstance(fileobj, io.IOBase), \
"fileobj must be an instance of io.IOBase or a file, got %s" \
% type(fileobj)
return fileobj if fileobj.seekable() \
else ArchiveTemp(fileobj) | python | {
"resource": ""
} |
q265520 | Tracy.init_app | validation | def init_app(self, app):
"""Setup before_request, after_request handlers for tracing.
"""
app.config.setdefault("TRACY_REQUIRE_CLIENT", False)
if not hasattr(app, 'extensions'):
app.extensions = {}
app.extensions['restpoints'] = self
app.before_request(self._before)
app.after_request(self._after) | python | {
"resource": ""
} |
q265521 | Tracy._before | validation | def _before(self):
"""Records the starting time of this reqeust.
"""
# Don't trace excluded routes.
if request.path in self.excluded_routes:
request._tracy_exclude = True
return
request._tracy_start_time = monotonic()
client = request.headers.get(trace_header_client, None)
require_client = current_app.config.get("TRACY_REQUIRE_CLIENT", False)
if client is None and require_client:
abort(400, "Missing %s header" % trace_header_client)
request._tracy_client = client
request._tracy_id = request.headers.get(trace_header_id, new_id()) | python | {
"resource": ""
} |
q265522 | Tracy._after | validation | def _after(self, response):
"""Calculates the request duration, and adds a transaction
ID to the header.
"""
# Ignore excluded routes.
if getattr(request, '_tracy_exclude', False):
return response
duration = None
if getattr(request, '_tracy_start_time', None):
duration = monotonic() - request._tracy_start_time
# Add Trace_ID header.
trace_id = None
if getattr(request, '_tracy_id', None):
trace_id = request._tracy_id
response.headers[trace_header_id] = trace_id
# Get the invoking client.
trace_client = None
if getattr(request, '_tracy_client', None):
trace_client = request._tracy_client
# Extra log kwargs.
d = {'status_code': response.status_code,
'url': request.base_url,
'client_ip': request.remote_addr,
'trace_name': trace_client,
'trace_id': trace_id,
'trace_duration': duration}
logger.info(None, extra=d)
return response | python | {
"resource": ""
} |
q265523 | FormatBlock.expand_words | validation | def expand_words(self, line, width=60):
""" Insert spaces between words until it is wide enough for `width`.
"""
if not line.strip():
return line
# Word index, which word to insert on (cycles between 1->len(words))
wordi = 1
while len(strip_codes(line)) < width:
wordendi = self.find_word_end(line, wordi)
if wordendi < 0:
# Reached the end?, try starting at the front again.
wordi = 1
wordendi = self.find_word_end(line, wordi)
if wordendi < 0:
# There are no spaces to expand, just prepend one.
line = ''.join((' ', line))
else:
line = ' '.join((line[:wordendi], line[wordendi:]))
wordi += 1
# Don't push a single word all the way to the right.
if ' ' not in strip_codes(line).strip():
return line.replace(' ', '')
return line | python | {
"resource": ""
} |
q265524 | FormatBlock.iter_add_text | validation | def iter_add_text(self, lines, prepend=None, append=None):
""" Prepend or append text to lines. Yields each line. """
if (prepend is None) and (append is None):
yield from lines
else:
# Build up a format string, with optional {prepend}/{append}
fmtpcs = ['{prepend}'] if prepend else []
fmtpcs.append('{line}')
if append:
fmtpcs.append('{append}')
fmtstr = ''.join(fmtpcs)
yield from (
fmtstr.format(prepend=prepend, line=line, append=append)
for line in lines
) | python | {
"resource": ""
} |
q265525 | FormatBlock.iter_char_block | validation | def iter_char_block(self, text=None, width=60, fmtfunc=str):
""" Format block by splitting on individual characters. """
if width < 1:
width = 1
text = (self.text if text is None else text) or ''
text = ' '.join(text.split('\n'))
escapecodes = get_codes(text)
if not escapecodes:
# No escape codes, use simple method.
yield from (
fmtfunc(text[i:i + width])
for i in range(0, len(text), width)
)
else:
# Ignore escape codes when counting.
blockwidth = 0
block = []
for i, s in enumerate(get_indices_list(text)):
block.append(s)
if len(s) == 1:
# Normal char.
blockwidth += 1
if blockwidth == width:
yield ''.join(block)
block = []
blockwidth = 0
if block:
yield ''.join(block) | python | {
"resource": ""
} |
q265526 | FormatBlock.iter_space_block | validation | def iter_space_block(self, text=None, width=60, fmtfunc=str):
""" Format block by wrapping on spaces. """
if width < 1:
width = 1
curline = ''
text = (self.text if text is None else text) or ''
for word in text.split():
possibleline = ' '.join((curline, word)) if curline else word
# Ignore escape codes.
codelen = sum(len(s) for s in get_codes(possibleline))
reallen = len(possibleline) - codelen
if reallen > width:
# This word would exceed the limit, start a new line with
# it.
yield fmtfunc(curline)
curline = word
else:
curline = possibleline
# yield the last line.
if curline:
yield fmtfunc(curline) | python | {
"resource": ""
} |
q265527 | FormatBlock.squeeze_words | validation | def squeeze_words(line, width=60):
""" Remove spaces in between words until it is small enough for
`width`.
This will always leave at least one space between words,
so it may not be able to get below `width` characters.
"""
# Start removing spaces to "squeeze" the text, leaving at least one.
while (' ' in line) and (len(line) > width):
# Remove two spaces from the end, replace with one.
head, _, tail = line.rpartition(' ')
line = ' '.join((head, tail))
return line | python | {
"resource": ""
} |
q265528 | CachedHTTPBL.check_ip | validation | def check_ip(self, ip):
"""
Check IP trough the httpBL API
:param ip: ipv4 ip address
:return: httpBL results or None if any error is occurred
"""
self._last_result = None
if is_valid_ipv4(ip):
key = None
if self._use_cache:
key = self._make_cache_key(ip)
self._last_result = self._cache.get(key, version=self._cache_version)
if self._last_result is None:
# request httpBL API
error, age, threat, type = self._request_httpbl(ip)
if error == 127 or error == 0:
self._last_result = {
'error': error,
'age': age,
'threat': threat,
'type': type
}
if self._use_cache:
self._cache.set(key, self._last_result, timeout=self._api_timeout, version=self._cache_version)
if self._last_result is not None and settings.CACHED_HTTPBL_USE_LOGGING:
logger.info(
'httpBL check ip: {0}; '
'httpBL result: error: {1}, age: {2}, threat: {3}, type: {4}'.format(ip,
self._last_result['error'],
self._last_result['age'],
self._last_result['threat'],
self._last_result['type']
)
)
return self._last_result | python | {
"resource": ""
} |
q265529 | CachedHTTPBL.is_threat | validation | def is_threat(self, result=None, harmless_age=None, threat_score=None, threat_type=None):
"""
Check if IP is a threat
:param result: httpBL results; if None, then results from last check_ip() used (optional)
:param harmless_age: harmless age for check if httpBL age is older (optional)
:param threat_score: threat score for check if httpBL threat is lower (optional)
:param threat_type: threat type, if not equal httpBL score type, then return False (optional)
:return: True or False
"""
harmless_age = harmless_age if harmless_age is not None else settings.CACHED_HTTPBL_HARMLESS_AGE
threat_score = threat_score if threat_score is not None else settings.CACHED_HTTPBL_THREAT_SCORE
threat_type = threat_type if threat_type is not None else -1
result = result if result is not None else self._last_result
threat = False
if result is not None:
if result['age'] < harmless_age and result['threat'] > threat_score:
threat = True
if threat_type > -1:
if result['type'] & threat_type:
threat = True
else:
threat = False
return threat | python | {
"resource": ""
} |
q265530 | CachedHTTPBL.is_suspicious | validation | def is_suspicious(self, result=None):
"""
Check if IP is suspicious
:param result: httpBL results; if None, then results from last check_ip() used (optional)
:return: True or False
"""
result = result if result is not None else self._last_result
suspicious = False
if result is not None:
suspicious = True if result['type'] > 0 else False
return suspicious | python | {
"resource": ""
} |
q265531 | CachedHTTPBL.invalidate_ip | validation | def invalidate_ip(self, ip):
"""
Invalidate httpBL cache for IP address
:param ip: ipv4 IP address
"""
if self._use_cache:
key = self._make_cache_key(ip)
self._cache.delete(key, version=self._cache_version) | python | {
"resource": ""
} |
q265532 | CachedHTTPBL.invalidate_cache | validation | def invalidate_cache(self):
"""
Invalidate httpBL cache
"""
if self._use_cache:
self._cache_version += 1
self._cache.increment('cached_httpbl_{0}_version'.format(self._api_key)) | python | {
"resource": ""
} |
q265533 | Consumer.run | validation | def run(self):
"""Runs the consumer."""
self.log.debug('consumer is running...')
self.running = True
while self.running:
self.upload()
self.log.debug('consumer exited.') | python | {
"resource": ""
} |
q265534 | Consumer.upload | validation | def upload(self):
"""Upload the next batch of items, return whether successful."""
success = False
batch = self.next()
if len(batch) == 0:
return False
try:
self.request(batch)
success = True
except Exception as e:
self.log.error('error uploading: %s', e)
success = False
if self.on_error:
self.on_error(e, batch)
finally:
# cleanup
for item in batch:
self.queue.task_done()
return success | python | {
"resource": ""
} |
q265535 | Consumer.next | validation | def next(self):
"""Return the next batch of items to upload."""
queue = self.queue
items = []
item = self.next_item()
if item is None:
return items
items.append(item)
while len(items) < self.upload_size and not queue.empty():
item = self.next_item()
if item:
items.append(item)
return items | python | {
"resource": ""
} |
q265536 | Consumer.next_item | validation | def next_item(self):
"""Get a single item from the queue."""
queue = self.queue
try:
item = queue.get(block=True, timeout=5)
return item
except Exception:
return None | python | {
"resource": ""
} |
q265537 | Consumer.request | validation | def request(self, batch, attempt=0):
"""Attempt to upload the batch and retry before raising an error """
try:
q = self.api.new_queue()
for msg in batch:
q.add(msg['event'], msg['value'], source=msg['source'])
q.submit()
except:
if attempt > self.retries:
raise
self.request(batch, attempt+1) | python | {
"resource": ""
} |
q265538 | _camelcase_to_underscore | validation | def _camelcase_to_underscore(url):
"""
Translate camelCase into underscore format.
>>> _camelcase_to_underscore('minutesBetweenSummaries')
'minutes_between_summaries'
"""
def upper2underscore(text):
for char in text:
if char.islower():
yield char
else:
yield '_'
if char.isalpha():
yield char.lower()
return ''.join(upper2underscore(url)) | python | {
"resource": ""
} |
q265539 | create_tree | validation | def create_tree(endpoints):
"""
Creates the Trello endpoint tree.
>>> r = {'1': { \
'actions': {'METHODS': {'GET'}}, \
'boards': { \
'members': {'METHODS': {'DELETE'}}}} \
}
>>> r == create_tree([ \
'GET /1/actions/[idAction]', \
'DELETE /1/boards/[board_id]/members/[idMember]'])
True
"""
tree = {}
for method, url, doc in endpoints:
path = [p for p in url.strip('/').split('/')]
here = tree
# First element (API Version).
version = path[0]
here.setdefault(version, {})
here = here[version]
# The rest of elements of the URL.
for p in path[1:]:
part = _camelcase_to_underscore(p)
here.setdefault(part, {})
here = here[part]
# Allowed HTTP methods.
if not 'METHODS' in here:
here['METHODS'] = [[method, doc]]
else:
if not method in here['METHODS']:
here['METHODS'].append([method, doc])
return tree | python | {
"resource": ""
} |
q265540 | main | validation | def main():
"""
Prints the complete YAML.
"""
ep = requests.get(TRELLO_API_DOC).content
root = html.fromstring(ep)
links = root.xpath('//a[contains(@class, "reference internal")]/@href')
pages = [requests.get(TRELLO_API_DOC + u)
for u in links if u.endswith('index.html')]
endpoints = []
for page in pages:
root = html.fromstring(page.content)
sections = root.xpath('//div[@class="section"]/h2/..')
for sec in sections:
ep_html = etree.tostring(sec).decode('utf-8')
ep_text = html2text(ep_html).splitlines()
match = EP_DESC_REGEX.match(ep_text[0])
if not match:
continue
ep_method, ep_url = match.groups()
ep_text[0] = ' '.join([ep_method, ep_url])
ep_doc = b64encode(gzip.compress('\n'.join(ep_text).encode('utf-8')))
endpoints.append((ep_method, ep_url, ep_doc))
print(yaml.dump(create_tree(endpoints))) | python | {
"resource": ""
} |
q265541 | Wmi.query | validation | def query(self, wql):
"""Connect by wmi and run wql."""
try:
self.__wql = ['wmic', '-U',
self.args.domain + '\\' + self.args.user + '%' + self.args.password,
'//' + self.args.host,
'--namespace', self.args.namespace,
'--delimiter', self.args.delimiter,
wql]
self.logger.debug("wql: {}".format(self.__wql))
self.__output = subprocess.check_output(self.__wql)
self.logger.debug("output: {}".format(self.__output))
self.logger.debug("wmi connect succeed.")
self.__wmi_output = self.__output.splitlines()[1:]
self.logger.debug("wmi_output: {}".format(self.__wmi_output))
self.__csv_header = csv.DictReader(self.__wmi_output, delimiter='|')
self.logger.debug("csv_header: {}".format(self.__csv_header))
return list(self.__csv_header)
except subprocess.CalledProcessError as e:
self.unknown("Connect by wmi and run wql error: %s" % e) | python | {
"resource": ""
} |
q265542 | DataReporter.log | validation | def log(self, url=None, credentials=None, do_verify_certificate=True):
"""
Wrapper for the other log methods, decide which one based on the
URL parameter.
"""
if url is None:
url = self.url
if re.match("file://", url):
self.log_file(url)
elif re.match("https://", url) or re.match("http://", url):
self.log_post(url, credentials, do_verify_certificate)
else:
self.log_stdout() | python | {
"resource": ""
} |
q265543 | DataReporter.log_file | validation | def log_file(self, url=None):
"""
Write to a local log file
"""
if url is None:
url = self.url
f = re.sub("file://", "", url)
try:
with open(f, "a") as of:
of.write(str(self.store.get_json_tuples(True)))
except IOError as e:
print(e)
print("Could not write the content to the file..") | python | {
"resource": ""
} |
q265544 | DataReporter.log_post | validation | def log_post(self, url=None, credentials=None, do_verify_certificate=True):
"""
Write to a remote host via HTTP POST
"""
if url is None:
url = self.url
if credentials is None:
credentials = self.credentials
if do_verify_certificate is None:
do_verify_certificate = self.do_verify_certificate
if credentials and "base64" in credentials:
headers = {"Content-Type": "application/json", \
'Authorization': 'Basic %s' % credentials["base64"]}
else:
headers = {"Content-Type": "application/json"}
try:
request = requests.post(url, headers=headers, \
data=self.store.get_json(), verify=do_verify_certificate)
except httplib.IncompleteRead as e:
request = e.partial | python | {
"resource": ""
} |
q265545 | DataReporter.register_credentials | validation | def register_credentials(self, credentials=None, user=None, user_file=None, password=None, password_file=None):
"""
Helper method to store username and password
"""
# lets store all kind of credential data into this dict
if credentials is not None:
self.credentials = credentials
else:
self.credentials = {}
# set the user from CLI or file
if user:
self.credentials["user"] = user
elif user_file:
with open(user_file, "r") as of:
# what would the file entry look like?
pattern = re.compile("^user: ")
for l in of:
if re.match(pattern, l):
# strip away the newline
l = l[0:-1]
self.credentials["user"] = re.sub(pattern, "", l)
# remove any surrounding quotes
if self.credentials["user"][0:1] == '"' and \
self.credentials["user"][-1:] == '"':
self.credentials["user"] = self.credentials["user"][1:-1]
# set the password from CLI or file
if password:
self.credentials["password"] = password
elif password_file:
with open(password_file, "r") as of:
# what would the file entry look like?
pattern = re.compile("^password: ")
for l in of:
if re.match(pattern, l):
# strip away the newline
l = l[0:-1]
self.credentials["password"] = \
re.sub(pattern, "", l)
# remove any surrounding quotes
if self.credentials["password"][0:1] == '"' and \
self.credentials["password"][-1:] == '"':
self.credentials["password"] = \
self.credentials["password"][1:-1]
# if both user and password is set,
# 1. encode to base 64 for basic auth
if "user" in self.credentials and "password" in self.credentials:
c = self.credentials["user"] + ":" + self.credentials["password"]
self.credentials["base64"] = b64encode(c.encode()).decode("ascii") | python | {
"resource": ""
} |
q265546 | set_connection | validation | def set_connection(host=None, database=None, user=None, password=None):
"""Set connection parameters. Call set_connection with no arguments to clear."""
c.CONNECTION['HOST'] = host
c.CONNECTION['DATABASE'] = database
c.CONNECTION['USER'] = user
c.CONNECTION['PASSWORD'] = password | python | {
"resource": ""
} |
q265547 | set_delegate | validation | def set_delegate(address=None, pubkey=None, secret=None):
"""Set delegate parameters. Call set_delegate with no arguments to clear."""
c.DELEGATE['ADDRESS'] = address
c.DELEGATE['PUBKEY'] = pubkey
c.DELEGATE['PASSPHRASE'] = secret | python | {
"resource": ""
} |
q265548 | Address.balance | validation | def balance(address):
"""
Takes a single address and returns the current balance.
"""
txhistory = Address.transactions(address)
balance = 0
for i in txhistory:
if i.recipientId == address:
balance += i.amount
if i.senderId == address:
balance -= (i.amount + i.fee)
delegates = Delegate.delegates()
for i in delegates:
if address == i.address:
forged_blocks = Delegate.blocks(i.pubkey)
for block in forged_blocks:
balance += (block.reward + block.totalFee)
if balance < 0:
height = Node.height()
logger.fatal('Negative balance for address {0}, Nodeheight: {1)'.format(address, height))
raise NegativeBalanceError('Negative balance for address {0}, Nodeheight: {1)'.format(address, height))
return balance | python | {
"resource": ""
} |
q265549 | Address.balance_over_time | validation | def balance_over_time(address):
"""returns a list of named tuples, x.timestamp, x.amount including block rewards"""
forged_blocks = None
txhistory = Address.transactions(address)
delegates = Delegate.delegates()
for i in delegates:
if address == i.address:
forged_blocks = Delegate.blocks(i.pubkey)
balance_over_time = []
balance = 0
block = 0
Balance = namedtuple(
'balance',
'timestamp amount')
for tx in txhistory:
if forged_blocks:
while forged_blocks[block].timestamp <= tx.timestamp:
balance += (forged_blocks[block].reward + forged_blocks[block].totalFee)
balance_over_time.append(Balance(timestamp=forged_blocks[block].timestamp, amount=balance))
block += 1
if tx.senderId == address:
balance -= (tx.amount + tx.fee)
res = Balance(timestamp=tx.timestamp, amount=balance)
balance_over_time.append(res)
if tx.recipientId == address:
balance += tx.amount
res = Balance(timestamp=tx.timestamp, amount=balance)
balance_over_time.append(res)
if forged_blocks and block <= len(forged_blocks) - 1:
if forged_blocks[block].timestamp > txhistory[-1].timestamp:
for i in forged_blocks[block:]:
balance += (i.reward + i.totalFee)
res = Balance(timestamp=i.timestamp, amount=balance)
balance_over_time.append(res)
return balance_over_time | python | {
"resource": ""
} |
q265550 | value_to_bool | validation | def value_to_bool(config_val, evar):
"""
Massages the 'true' and 'false' strings to bool equivalents.
:param str config_val: The env var value.
:param EnvironmentVariable evar: The EVar object we are validating
a value for.
:rtype: bool
:return: True or False, depending on the value.
"""
if not config_val:
return False
if config_val.strip().lower() == 'true':
return True
else:
return False | python | {
"resource": ""
} |
q265551 | validate_is_not_none | validation | def validate_is_not_none(config_val, evar):
"""
If the value is ``None``, fail validation.
:param str config_val: The env var value.
:param EnvironmentVariable evar: The EVar object we are validating
a value for.
:raises: ValueError if the config value is None.
"""
if config_val is None:
raise ValueError(
"Value for environment variable '{evar_name}' can't "
"be empty.".format(evar_name=evar.name))
return config_val | python | {
"resource": ""
} |
q265552 | validate_is_boolean_true | validation | def validate_is_boolean_true(config_val, evar):
"""
Make sure the value evaluates to boolean True.
:param str config_val: The env var value.
:param EnvironmentVariable evar: The EVar object we are validating
a value for.
:raises: ValueError if the config value evaluates to boolean False.
"""
if config_val is None:
raise ValueError(
"Value for environment variable '{evar_name}' can't "
"be empty.".format(evar_name=evar.name))
return config_val | python | {
"resource": ""
} |
q265553 | value_to_python_log_level | validation | def value_to_python_log_level(config_val, evar):
"""
Convert an evar value into a Python logging level constant.
:param str config_val: The env var value.
:param EnvironmentVariable evar: The EVar object we are validating
a value for.
:return: A validated string.
:raises: ValueError if the log level is invalid.
"""
if not config_val:
config_val = evar.default_val
config_val = config_val.upper()
# noinspection PyProtectedMember
return logging._checkLevel(config_val) | python | {
"resource": ""
} |
q265554 | register_range_type | validation | def register_range_type(pgrange, pyrange, conn):
"""
Register a new range type as a PostgreSQL range.
>>> register_range_type("int4range", intrange, conn)
The above will make sure intrange is regarded as an int4range for queries
and that int4ranges will be cast into intrange when fetching rows.
pgrange should be the full name including schema for the custom range type.
Note that adaption is global, meaning if a range type is passed to a regular
psycopg2 connection it will adapt it to its proper range type. Parsing of
rows from the database however is not global and just set on a per connection
basis.
"""
register_adapter(pyrange, partial(adapt_range, pgrange))
register_range_caster(
pgrange, pyrange, *query_range_oids(pgrange, conn), scope=conn) | python | {
"resource": ""
} |
q265555 | get_api_error | validation | def get_api_error(response):
"""Acquires the correct error for a given response.
:param requests.Response response: HTTP error response
:returns: the appropriate error for a given response
:rtype: APIError
"""
error_class = _status_code_to_class.get(response.status_code, APIError)
return error_class(response) | python | {
"resource": ""
} |
q265556 | get_param_values | validation | def get_param_values(request, model=None):
"""
Converts the request parameters to Python.
:param request: <pyramid.request.Request> || <dict>
:return: <dict>
"""
if type(request) == dict:
return request
params = get_payload(request)
# support in-place editing formatted request
try:
del params['pk']
params[params.pop('name')] = params.pop('value')
except KeyError:
pass
return {
k.rstrip('[]'): safe_eval(v) if not type(v) == list else [safe_eval(sv) for sv in v]
for k, v in params.items()
} | python | {
"resource": ""
} |
q265557 | get_context | validation | def get_context(request, model=None):
"""
Extracts ORB context information from the request.
:param request: <pyramid.request.Request>
:param model: <orb.Model> || None
:return: {<str> key: <variant> value} values, <orb.Context>
"""
# convert request parameters to python
param_values = get_param_values(request, model=model)
# extract the full orb context if provided
context = param_values.pop('orb_context', {})
if isinstance(context, (unicode, str)):
context = projex.rest.unjsonify(context)
# otherwise, extract the limit information
has_limit = 'limit' in context or 'limit' in param_values
# create the new orb context
orb_context = orb.Context(**context)
# build up context information from the request params
used = set()
query_context = {}
for key in orb.Context.Defaults:
if key in param_values:
used.add(key)
query_context[key] = param_values.get(key)
# generate a simple query object
schema_values = {}
if model:
# extract match dict items
for key, value in request.matchdict.items():
if model.schema().column(key, raise_=False):
schema_values[key] = value
# extract payload items
for key, value in param_values.items():
root_key = key.split('.')[0]
schema_object = model.schema().column(root_key, raise_=False) or model.schema().collector(root_key)
if schema_object:
value = param_values.pop(key)
if isinstance(schema_object, orb.Collector) and type(value) not in (tuple, list):
value = [value]
schema_values[key] = value
# generate the base context information
query_context['scope'] = {
'request': request
}
# include any request specific scoping or information from the request
# first, look for default ORB context for all requests
try:
default_context = request.orb_default_context
# then, look for scope specific information for all requests
except AttributeError:
try:
query_context['scope'].update(request.orb_scope)
except AttributeError:
pass
# if request specific context defaults exist, then
# merge them with the rest of the query context
else:
if 'scope' in default_context:
query_context['scope'].update(default_context.pop('scope'))
# setup defaults based on the request
for k, v in default_context.items():
query_context.setdefault(k, v)
orb_context.update(query_context)
return schema_values, orb_context | python | {
"resource": ""
} |
q265558 | OrderBook._real_time_thread | validation | def _real_time_thread(self):
"""Handles real-time updates to the order book."""
while self.ws_client.connected():
if self.die:
break
if self.pause:
sleep(5)
continue
message = self.ws_client.receive()
if message is None:
break
message_type = message['type']
if message_type == 'error':
continue
if message['sequence'] <= self.sequence:
continue
if message_type == 'open':
self._handle_open(message)
elif message_type == 'match':
self._handle_match(message)
elif message_type == 'done':
self._handle_done(message)
elif message_type == 'change':
self._handle_change(message)
else:
continue
self.ws_client.disconnect() | python | {
"resource": ""
} |
q265559 | WSClient._keep_alive_thread | validation | def _keep_alive_thread(self):
"""Used exclusively as a thread which keeps the WebSocket alive."""
while True:
with self._lock:
if self.connected():
self._ws.ping()
else:
self.disconnect()
self._thread = None
return
sleep(30) | python | {
"resource": ""
} |
q265560 | WSClient.connect | validation | def connect(self):
"""Connects and subscribes to the WebSocket Feed."""
if not self.connected():
self._ws = create_connection(self.WS_URI)
message = {
'type':self.WS_TYPE,
'product_id':self.WS_PRODUCT_ID
}
self._ws.send(dumps(message))
# There will be only one keep alive thread per client instance
with self._lock:
if not self._thread:
thread = Thread(target=self._keep_alive_thread, args=[])
thread.start() | python | {
"resource": ""
} |
q265561 | cached_httpbl_exempt | validation | def cached_httpbl_exempt(view_func):
"""
Marks a view function as being exempt from the cached httpbl view protection.
"""
# We could just do view_func.cached_httpbl_exempt = True, but decorators
# are nicer if they don't have side-effects, so we return a new
# function.
def wrapped_view(*args, **kwargs):
return view_func(*args, **kwargs)
wrapped_view.cached_httpbl_exempt = True
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view) | python | {
"resource": ""
} |
q265562 | CounterPool.get_conn | validation | def get_conn(self, aws_access_key=None, aws_secret_key=None):
'''
Hook point for overriding how the CounterPool gets its connection to
AWS.
'''
return boto.connect_dynamodb(
aws_access_key_id=aws_access_key,
aws_secret_access_key=aws_secret_key,
) | python | {
"resource": ""
} |
q265563 | CounterPool.get_schema | validation | def get_schema(self):
'''
Hook point for overriding how the CounterPool determines the schema
to be used when creating a missing table.
'''
if not self.schema:
raise NotImplementedError(
'You must provide a schema value or override the get_schema method'
)
return self.conn.create_schema(**self.schema) | python | {
"resource": ""
} |
q265564 | CounterPool.create_table | validation | def create_table(self):
'''
Hook point for overriding how the CounterPool creates a new table
in DynamooDB
'''
table = self.conn.create_table(
name=self.get_table_name(),
schema=self.get_schema(),
read_units=self.get_read_units(),
write_units=self.get_write_units(),
)
if table.status != 'ACTIVE':
table.refresh(wait_for_active=True, retry_seconds=1)
return table | python | {
"resource": ""
} |
q265565 | CounterPool.get_table | validation | def get_table(self):
'''
Hook point for overriding how the CounterPool transforms table_name
into a boto DynamoDB Table object.
'''
if hasattr(self, '_table'):
table = self._table
else:
try:
table = self.conn.get_table(self.get_table_name())
except boto.exception.DynamoDBResponseError:
if self.auto_create_table:
table = self.create_table()
else:
raise
self._table = table
return table | python | {
"resource": ""
} |
q265566 | CounterPool.create_item | validation | def create_item(self, hash_key, start=0, extra_attrs=None):
'''
Hook point for overriding how the CouterPool creates a DynamoDB item
for a given counter when an existing item can't be found.
'''
table = self.get_table()
now = datetime.utcnow().replace(microsecond=0).isoformat()
attrs = {
'created_on': now,
'modified_on': now,
'count': start,
}
if extra_attrs:
attrs.update(extra_attrs)
item = table.new_item(
hash_key=hash_key,
attrs=attrs,
)
return item | python | {
"resource": ""
} |
q265567 | CounterPool.get_item | validation | def get_item(self, hash_key, start=0, extra_attrs=None):
'''
Hook point for overriding how the CouterPool fetches a DynamoDB item
for a given counter.
'''
table = self.get_table()
try:
item = table.get_item(hash_key=hash_key)
except DynamoDBKeyNotFoundError:
item = None
if item is None:
item = self.create_item(
hash_key=hash_key,
start=start,
extra_attrs=extra_attrs,
)
return item | python | {
"resource": ""
} |
q265568 | CounterPool.get_counter | validation | def get_counter(self, name, start=0):
'''
Gets the DynamoDB item behind a counter and ties it to a Counter
instace.
'''
item = self.get_item(hash_key=name, start=start)
counter = Counter(dynamo_item=item, pool=self)
return counter | python | {
"resource": ""
} |
q265569 | many_to_one | validation | def many_to_one(clsname, **kw):
"""Use an event to build a many-to-one relationship on a class.
This makes use of the :meth:`.References._reference_table` method
to generate a full foreign key relationship to the remote table.
"""
@declared_attr
def m2o(cls):
cls._references((cls.__name__, clsname))
return relationship(clsname, **kw)
return m2o | python | {
"resource": ""
} |
q265570 | one_to_many | validation | def one_to_many(clsname, **kw):
"""Use an event to build a one-to-many relationship on a class.
This makes use of the :meth:`.References._reference_table` method
to generate a full foreign key relationship from the remote table.
"""
@declared_attr
def o2m(cls):
cls._references((clsname, cls.__name__))
return relationship(clsname, **kw)
return o2m | python | {
"resource": ""
} |
q265571 | DjeffParser.handle_data | validation | def handle_data(self, data):
"""
Djeffify data between tags
"""
if data.strip():
data = djeffify_string(data)
self.djhtml += data | python | {
"resource": ""
} |
q265572 | References._reference_table | validation | def _reference_table(cls, ref_table):
"""Create a foreign key reference from the local class to the given remote
table.
Adds column references to the declarative class and adds a
ForeignKeyConstraint.
"""
# create pairs of (Foreign key column, primary key column)
cols = [(sa.Column(), refcol) for refcol in ref_table.primary_key]
# set "tablename_colname = Foreign key Column" on the local class
for col, refcol in cols:
setattr(cls, "%s_%s" % (ref_table.name, refcol.name), col)
# add a ForeignKeyConstraint([local columns], [remote columns])
cls.__table__.append_constraint(sa.ForeignKeyConstraint(*zip(*cols))) | python | {
"resource": ""
} |
q265573 | prepare_path | validation | def prepare_path(path):
"""
Path join helper method
Join paths if list passed
:type path: str|unicode|list
:rtype: str|unicode
"""
if type(path) == list:
return os.path.join(*path)
return path | python | {
"resource": ""
} |
q265574 | read_from_file | validation | def read_from_file(file_path, encoding="utf-8"):
"""
Read helper method
:type file_path: str|unicode
:type encoding: str|unicode
:rtype: str|unicode
"""
with codecs.open(file_path, "r", encoding) as f:
return f.read() | python | {
"resource": ""
} |
q265575 | write_to_file | validation | def write_to_file(file_path, contents, encoding="utf-8"):
"""
Write helper method
:type file_path: str|unicode
:type contents: str|unicode
:type encoding: str|unicode
"""
with codecs.open(file_path, "w", encoding) as f:
f.write(contents) | python | {
"resource": ""
} |
q265576 | copy_file | validation | def copy_file(src, dest):
"""
Copy file helper method
:type src: str|unicode
:type dest: str|unicode
"""
dir_path = os.path.dirname(dest)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
shutil.copy2(src, dest) | python | {
"resource": ""
} |
q265577 | get_path_extension | validation | def get_path_extension(path):
"""
Split file name and extension
:type path: str|unicode
:rtype: one str|unicode
"""
file_path, file_ext = os.path.splitext(path)
return file_ext.lstrip('.') | python | {
"resource": ""
} |
q265578 | split_path | validation | def split_path(path):
"""
Helper method for absolute and relative paths resolution
Split passed path and return each directory parts
example: "/usr/share/dir"
return: ["usr", "share", "dir"]
@type path: one of (unicode, str)
@rtype: list
"""
result_parts = []
#todo: check loops
while path != "/":
parts = os.path.split(path)
if parts[1] == path:
result_parts.insert(0, parts[1])
break
elif parts[0] == path:
result_parts.insert(0, parts[0])
break
else:
path = parts[0]
result_parts.insert(0, parts[1])
return result_parts | python | {
"resource": ""
} |
q265579 | RESTClient._create_api_uri | validation | def _create_api_uri(self, *parts):
"""Creates fully qualified endpoint URIs.
:param parts: the string parts that form the request URI
"""
return urljoin(self.API_URI, '/'.join(map(quote, parts))) | python | {
"resource": ""
} |
q265580 | RESTClient._format_iso_time | validation | def _format_iso_time(self, time):
"""Makes sure we have proper ISO 8601 time.
:param time: either already ISO 8601 a string or datetime.datetime
:returns: ISO 8601 time
:rtype: str
"""
if isinstance(time, str):
return time
elif isinstance(time, datetime):
return time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
else:
return None | python | {
"resource": ""
} |
q265581 | RESTClient._handle_response | validation | def _handle_response(self, response):
"""Returns the given response or raises an APIError for non-2xx responses.
:param requests.Response response: HTTP response
:returns: requested data
:rtype: requests.Response
:raises APIError: for non-2xx responses
"""
if not str(response.status_code).startswith('2'):
raise get_api_error(response)
return response | python | {
"resource": ""
} |
q265582 | PaginationClient._check_next | validation | def _check_next(self):
"""Checks if a next message is possible.
:returns: True if a next message is possible, otherwise False
:rtype: bool
"""
if self.is_initial:
return True
if self.before:
if self.before_cursor:
return True
else:
return False
else:
if self.after_cursor:
return True
else:
return False | python | {
"resource": ""
} |
q265583 | Colors._wrap_color | validation | def _wrap_color(self, code, text, format=None, style=None):
""" Colors text with code and given format """
color = None
if code[:3] == self.bg.PREFIX:
color = self.bg.COLORS.get(code, None)
if not color:
color = self.fg.COLORS.get(code, None)
if not color:
raise Exception('Color code not found')
if format and format not in self.formats:
raise Exception('Color format not found')
fmt = "0;"
if format == 'bold':
fmt = "1;"
elif format == 'underline':
fmt = "4;"
# Manage the format
parts = color.split('[')
color = '{0}[{1}{2}'.format(parts[0], fmt, parts[1])
if self.has_colors and self.colors_enabled:
# Set brightness
st = ''
if style:
st = self.st.COLORS.get(style, '')
return "{0}{1}{2}{3}".format(st, color, text, self.st.COLORS['reset_all'])
else:
return text | python | {
"resource": ""
} |
q265584 | SymbolDatabase.RegisterMessage | validation | def RegisterMessage(self, message):
"""Registers the given message type in the local database.
Args:
message: a message.Message, to be registered.
Returns:
The provided message.
"""
desc = message.DESCRIPTOR
self._symbols[desc.full_name] = message
if desc.file.name not in self._symbols_by_file:
self._symbols_by_file[desc.file.name] = {}
self._symbols_by_file[desc.file.name][desc.full_name] = message
self.pool.AddDescriptor(desc)
return message | python | {
"resource": ""
} |
q265585 | RuntimePath.insert | validation | def insert(self, index, value):
"""
Insert object before index.
:param int index: index to insert in
:param string value: path to insert
"""
self._list.insert(index, value)
self._sync() | python | {
"resource": ""
} |
q265586 | RuntimePath.parse | validation | def parse(self, string):
"""
Parse runtime path representation to list.
:param string string: runtime path string
:return: list of runtime paths
:rtype: list of string
"""
var, eq, values = string.strip().partition('=')
assert var == 'runtimepath'
assert eq == '='
return values.split(',') | python | {
"resource": ""
} |
q265587 | Asset.add_bundle | validation | def add_bundle(self, *args):
"""
Add some bundle to build group
:type bundle: static_bundle.bundles.AbstractBundle
@rtype: BuildGroup
"""
for bundle in args:
if not self.multitype and self.has_bundles():
first_bundle = self.get_first_bundle()
if first_bundle.get_type() != bundle.get_type():
raise Exception(
'Different bundle types for one Asset: %s[%s -> %s]'
'check types or set multitype parameter to True'
% (self.name, first_bundle.get_type(), bundle.get_type())
)
self.bundles.append(bundle)
return self | python | {
"resource": ""
} |
q265588 | Asset.collect_files | validation | def collect_files(self):
"""
Return collected files links
:rtype: list[static_bundle.files.StaticFileResult]
"""
self.files = []
for bundle in self.bundles:
bundle.init_build(self, self.builder)
bundle_files = bundle.prepare()
self.files.extend(bundle_files)
return self | python | {
"resource": ""
} |
q265589 | Asset.get_minifier | validation | def get_minifier(self):
"""
Asset minifier
Uses default minifier in bundle if it's not defined
:rtype: static_bundle.minifiers.DefaultMinifier|None
"""
if self.minifier is None:
if not self.has_bundles():
raise Exception("Unable to get default minifier, no bundles in build group")
minifier = self.get_first_bundle().get_default_minifier()
else:
minifier = self.minifier
if minifier:
minifier.init_asset(self)
return minifier | python | {
"resource": ""
} |
q265590 | StandardBuilder.render_asset | validation | def render_asset(self, name):
"""
Render all includes in asset by names
:type name: str|unicode
:rtype: str|unicode
"""
result = ""
if self.has_asset(name):
asset = self.get_asset(name)
if asset.files:
for f in asset.files:
result += f.render_include() + "\r\n"
return result | python | {
"resource": ""
} |
q265591 | StandardBuilder.collect_links | validation | def collect_links(self, env=None):
"""
Return links without build files
"""
for asset in self.assets.values():
if asset.has_bundles():
asset.collect_files()
if env is None:
env = self.config.env
if env == static_bundle.ENV_PRODUCTION:
self._minify(emulate=True)
self._add_url_prefix() | python | {
"resource": ""
} |
q265592 | _default_json_default | validation | def _default_json_default(obj):
""" Coerce everything to strings.
All objects representing time get output according to default_date_fmt.
"""
if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
return obj.strftime(default_date_fmt)
else:
return str(obj) | python | {
"resource": ""
} |
q265593 | init_logs | validation | def init_logs(path=None,
target=None,
logger_name='root',
level=logging.DEBUG,
maxBytes=1*1024*1024,
backupCount=5,
application_name='default',
server_hostname=None,
fields=None):
"""Initialize the zlogger.
Sets up a rotating file handler to the specified path and file with
the given size and backup count limits, sets the default
application_name, server_hostname, and default/whitelist fields.
:param path: path to write the log file
:param target: name of the log file
:param logger_name: name of the logger (defaults to root)
:param level: log level for this logger (defaults to logging.DEBUG)
:param maxBytes: size of the file before rotation (default 1MB)
:param application_name: app name to add to each log entry
:param server_hostname: hostname to add to each log entry
:param fields: default/whitelist fields.
:type path: string
:type target: string
:type logger_name: string
:type level: int
:type maxBytes: int
:type backupCount: int
:type application_name: string
:type server_hostname: string
:type fields: dict
"""
log_file = os.path.abspath(
os.path.join(path, target))
logger = logging.getLogger(logger_name)
logger.setLevel(level)
handler = logging.handlers.RotatingFileHandler(
log_file, maxBytes=maxBytes, backupCount=backupCount)
handler.setLevel(level)
handler.setFormatter(
JsonFormatter(
application_name=application_name,
server_hostname=server_hostname,
fields=fields))
logger.addHandler(handler) | python | {
"resource": ""
} |
q265594 | JsonFormatter.format | validation | def format(self, record):
"""formats a logging.Record into a standard json log entry
:param record: record to be formatted
:type record: logging.Record
:return: the formatted json string
:rtype: string
"""
record_fields = record.__dict__.copy()
self._set_exc_info(record_fields)
event_name = 'default'
if record_fields.get('event_name'):
event_name = record_fields.pop('event_name')
log_level = 'INFO'
if record_fields.get('log_level'):
log_level = record_fields.pop('log_level')
[record_fields.pop(k) for k in record_fields.keys()
if k not in self.fields]
defaults = self.defaults.copy()
fields = self.fields.copy()
fields.update(record_fields)
filtered_fields = {}
for k, v in fields.iteritems():
if v is not None:
filtered_fields[k] = v
defaults.update({
'event_timestamp': self._get_now(),
'event_name': event_name,
'log_level': log_level,
'fields': filtered_fields})
return json.dumps(defaults, default=self.json_default) | python | {
"resource": ""
} |
q265595 | includeme | validation | def includeme(config):
"""
Initialize the model for a Pyramid app.
Activate this setup using ``config.include('baka_model')``.
"""
settings = config.get_settings()
should_create = asbool(settings.get('baka_model.should_create_all', False))
should_drop = asbool(settings.get('baka_model.should_drop_all', False))
# Configure the transaction manager to support retrying retryable
# exceptions. We also register the session factory with the thread-local
# transaction manager, so that all sessions it creates are registered.
# "tm.attempts": 3,
config.add_settings({
"retry.attempts": 3,
"tm.activate_hook": tm_activate_hook,
"tm.annotate_user": False,
})
# use pyramid_retry couse pyramid_tm disabled it
config.include('pyramid_retry')
# use pyramid_tm to hook the transaction lifecycle to the request
config.include('pyramid_tm')
engine = get_engine(settings)
session_factory = get_session_factory(engine)
config.registry['db_session_factory'] = session_factory
# make request.db available for use in Pyramid
config.add_request_method(
# r.tm is the transaction manager used by pyramid_tm
lambda r: get_tm_session(session_factory, r.tm),
'db',
reify=True
)
# service model factory
config.include('.service')
# Register a deferred action to bind the engine when the configuration is
# committed. Deferring the action means that this module can be included
# before model modules without ill effect.
config.action(None, bind_engine, (engine,), {
'should_create': should_create,
'should_drop': should_drop
}, order=10) | python | {
"resource": ""
} |
q265596 | AbstractPath.get_abs_and_rel_paths | validation | def get_abs_and_rel_paths(self, root_path, file_name, input_dir):
"""
Return absolute and relative path for file
:type root_path: str|unicode
:type file_name: str|unicode
:type input_dir: str|unicode
:rtype: tuple
"""
# todo: change relative path resolving [bug on duplicate dir names in path]
relative_dir = root_path.replace(input_dir, '')
return os.path.join(root_path, file_name), relative_dir + '/' + file_name | python | {
"resource": ""
} |
q265597 | DescriptorPool.AddEnumDescriptor | validation | def AddEnumDescriptor(self, enum_desc):
"""Adds an EnumDescriptor to the pool.
This method also registers the FileDescriptor associated with the message.
Args:
enum_desc: An EnumDescriptor.
"""
if not isinstance(enum_desc, descriptor.EnumDescriptor):
raise TypeError('Expected instance of descriptor.EnumDescriptor.')
self._enum_descriptors[enum_desc.full_name] = enum_desc
self.AddFileDescriptor(enum_desc.file) | python | {
"resource": ""
} |
q265598 | DescriptorPool.FindFileContainingSymbol | validation | def FindFileContainingSymbol(self, symbol):
"""Gets the FileDescriptor for the file containing the specified symbol.
Args:
symbol: The name of the symbol to search for.
Returns:
A FileDescriptor that contains the specified symbol.
Raises:
KeyError: if the file can not be found in the pool.
"""
symbol = _NormalizeFullyQualifiedName(symbol)
try:
return self._descriptors[symbol].file
except KeyError:
pass
try:
return self._enum_descriptors[symbol].file
except KeyError:
pass
try:
file_proto = self._internal_db.FindFileContainingSymbol(symbol)
except KeyError as error:
if self._descriptor_db:
file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
else:
raise error
if not file_proto:
raise KeyError('Cannot find a file containing %s' % symbol)
return self._ConvertFileProtoToFileDescriptor(file_proto) | python | {
"resource": ""
} |
q265599 | DescriptorPool.FindMessageTypeByName | validation | def FindMessageTypeByName(self, full_name):
"""Loads the named descriptor from the pool.
Args:
full_name: The full name of the descriptor to load.
Returns:
The descriptor for the named type.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
if full_name not in self._descriptors:
self.FindFileContainingSymbol(full_name)
return self._descriptors[full_name] | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.