repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
toumorokoshi/sprinter | sprinter/lib/request.py | authenticated_get | def authenticated_get(username, password, url, verify=True):
"""
Perform an authorized query to the url, and return the result
"""
try:
response = requests.get(url, auth=(username, password), verify=verify)
if response.status_code == 401:
raise BadCredentialsException(
"Unable to authenticate user %s to %s with password provided!"
% (username, url))
except requests.exceptions.SSLError:
raise CertificateException("Unable to verify certificate at %s!" % url)
return response.content | python | def authenticated_get(username, password, url, verify=True):
"""
Perform an authorized query to the url, and return the result
"""
try:
response = requests.get(url, auth=(username, password), verify=verify)
if response.status_code == 401:
raise BadCredentialsException(
"Unable to authenticate user %s to %s with password provided!"
% (username, url))
except requests.exceptions.SSLError:
raise CertificateException("Unable to verify certificate at %s!" % url)
return response.content | [
"def",
"authenticated_get",
"(",
"username",
",",
"password",
",",
"url",
",",
"verify",
"=",
"True",
")",
":",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"auth",
"=",
"(",
"username",
",",
"password",
")",
",",
"verify",
"... | Perform an authorized query to the url, and return the result | [
"Perform",
"an",
"authorized",
"query",
"to",
"the",
"url",
"and",
"return",
"the",
"result"
] | 846697a7a087e69c61d075232e754d6975a64152 | https://github.com/toumorokoshi/sprinter/blob/846697a7a087e69c61d075232e754d6975a64152/sprinter/lib/request.py#L19-L31 | train | 54,800 |
toumorokoshi/sprinter | sprinter/lib/request.py | cleaned_request | def cleaned_request(request_type, *args, **kwargs):
""" Perform a cleaned requests request """
s = requests.Session()
# this removes netrc checking
s.trust_env = False
return s.request(request_type, *args, **kwargs) | python | def cleaned_request(request_type, *args, **kwargs):
""" Perform a cleaned requests request """
s = requests.Session()
# this removes netrc checking
s.trust_env = False
return s.request(request_type, *args, **kwargs) | [
"def",
"cleaned_request",
"(",
"request_type",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"s",
"=",
"requests",
".",
"Session",
"(",
")",
"# this removes netrc checking",
"s",
".",
"trust_env",
"=",
"False",
"return",
"s",
".",
"request",
"(",
... | Perform a cleaned requests request | [
"Perform",
"a",
"cleaned",
"requests",
"request"
] | 846697a7a087e69c61d075232e754d6975a64152 | https://github.com/toumorokoshi/sprinter/blob/846697a7a087e69c61d075232e754d6975a64152/sprinter/lib/request.py#L34-L39 | train | 54,801 |
toumorokoshi/sprinter | sprinter/lib/request.py | download_to_bytesio | def download_to_bytesio(url):
""" Return a bytesio object with a download bar """
logger.info("Downloading url: {0}".format(url))
r = cleaned_request('get', url, stream=True)
stream = io.BytesIO()
total_length = int(r.headers.get('content-length'))
for chunk in progress.bar(r.iter_content(chunk_size=1024), expected_size=(total_length/1024) + 1):
if chunk:
stream.write(chunk)
stream.seek(0)
return stream | python | def download_to_bytesio(url):
""" Return a bytesio object with a download bar """
logger.info("Downloading url: {0}".format(url))
r = cleaned_request('get', url, stream=True)
stream = io.BytesIO()
total_length = int(r.headers.get('content-length'))
for chunk in progress.bar(r.iter_content(chunk_size=1024), expected_size=(total_length/1024) + 1):
if chunk:
stream.write(chunk)
stream.seek(0)
return stream | [
"def",
"download_to_bytesio",
"(",
"url",
")",
":",
"logger",
".",
"info",
"(",
"\"Downloading url: {0}\"",
".",
"format",
"(",
"url",
")",
")",
"r",
"=",
"cleaned_request",
"(",
"'get'",
",",
"url",
",",
"stream",
"=",
"True",
")",
"stream",
"=",
"io",
... | Return a bytesio object with a download bar | [
"Return",
"a",
"bytesio",
"object",
"with",
"a",
"download",
"bar"
] | 846697a7a087e69c61d075232e754d6975a64152 | https://github.com/toumorokoshi/sprinter/blob/846697a7a087e69c61d075232e754d6975a64152/sprinter/lib/request.py#L42-L52 | train | 54,802 |
ponty/entrypoint2 | entrypoint2/examples/hello.py | add | def add(one, two=4, three=False):
''' This function adds two number.
:param one: first number to add
:param two: second number to add
:rtype: int
'''
s = str(int(one) + int(two))
logging.debug('logging sum from hello.py:' + s)
print 'printing sum from hello.py:', s
return s | python | def add(one, two=4, three=False):
''' This function adds two number.
:param one: first number to add
:param two: second number to add
:rtype: int
'''
s = str(int(one) + int(two))
logging.debug('logging sum from hello.py:' + s)
print 'printing sum from hello.py:', s
return s | [
"def",
"add",
"(",
"one",
",",
"two",
"=",
"4",
",",
"three",
"=",
"False",
")",
":",
"s",
"=",
"str",
"(",
"int",
"(",
"one",
")",
"+",
"int",
"(",
"two",
")",
")",
"logging",
".",
"debug",
"(",
"'logging sum from hello.py:'",
"+",
"s",
")",
"... | This function adds two number.
:param one: first number to add
:param two: second number to add
:rtype: int | [
"This",
"function",
"adds",
"two",
"number",
"."
] | d355dd1a6e0cabdd6751fc2f6016aee20755d332 | https://github.com/ponty/entrypoint2/blob/d355dd1a6e0cabdd6751fc2f6016aee20755d332/entrypoint2/examples/hello.py#L8-L20 | train | 54,803 |
inveniosoftware/invenio-queues | invenio_queues/queue.py | Queue.queue | def queue(self):
"""Message queue queue."""
with self.connection_pool.acquire(block=True) as conn:
return Q(
self.routing_key,
exchange=self.exchange,
routing_key=self.routing_key
)(conn) | python | def queue(self):
"""Message queue queue."""
with self.connection_pool.acquire(block=True) as conn:
return Q(
self.routing_key,
exchange=self.exchange,
routing_key=self.routing_key
)(conn) | [
"def",
"queue",
"(",
"self",
")",
":",
"with",
"self",
".",
"connection_pool",
".",
"acquire",
"(",
"block",
"=",
"True",
")",
"as",
"conn",
":",
"return",
"Q",
"(",
"self",
".",
"routing_key",
",",
"exchange",
"=",
"self",
".",
"exchange",
",",
"rou... | Message queue queue. | [
"Message",
"queue",
"queue",
"."
] | 1dd9112d7c5fe72a428c86f21f6d02cdb0595921 | https://github.com/inveniosoftware/invenio-queues/blob/1dd9112d7c5fe72a428c86f21f6d02cdb0595921/invenio_queues/queue.py#L55-L62 | train | 54,804 |
inveniosoftware/invenio-queues | invenio_queues/queue.py | Queue.exists | def exists(self):
"""Test if this queue exists in the AMQP store.
Note: This doesn't work with redis as declaring queues has not effect
except creating the exchange.
:returns: True if the queue exists, else False.
:rtype: bool
"""
try:
queue = self.queue
queue.queue_declare(passive=True)
except NotFound:
return False
except ChannelError as e:
if e.reply_code == '404':
return False
raise e
return True | python | def exists(self):
"""Test if this queue exists in the AMQP store.
Note: This doesn't work with redis as declaring queues has not effect
except creating the exchange.
:returns: True if the queue exists, else False.
:rtype: bool
"""
try:
queue = self.queue
queue.queue_declare(passive=True)
except NotFound:
return False
except ChannelError as e:
if e.reply_code == '404':
return False
raise e
return True | [
"def",
"exists",
"(",
"self",
")",
":",
"try",
":",
"queue",
"=",
"self",
".",
"queue",
"queue",
".",
"queue_declare",
"(",
"passive",
"=",
"True",
")",
"except",
"NotFound",
":",
"return",
"False",
"except",
"ChannelError",
"as",
"e",
":",
"if",
"e",
... | Test if this queue exists in the AMQP store.
Note: This doesn't work with redis as declaring queues has not effect
except creating the exchange.
:returns: True if the queue exists, else False.
:rtype: bool | [
"Test",
"if",
"this",
"queue",
"exists",
"in",
"the",
"AMQP",
"store",
"."
] | 1dd9112d7c5fe72a428c86f21f6d02cdb0595921 | https://github.com/inveniosoftware/invenio-queues/blob/1dd9112d7c5fe72a428c86f21f6d02cdb0595921/invenio_queues/queue.py#L65-L83 | train | 54,805 |
inveniosoftware/invenio-queues | invenio_queues/queue.py | Queue.create_consumer | def create_consumer(self):
"""Context manager that yields an instance of ``Consumer``."""
with self.connection_pool.acquire(block=True) as conn:
yield self.consumer(conn) | python | def create_consumer(self):
"""Context manager that yields an instance of ``Consumer``."""
with self.connection_pool.acquire(block=True) as conn:
yield self.consumer(conn) | [
"def",
"create_consumer",
"(",
"self",
")",
":",
"with",
"self",
".",
"connection_pool",
".",
"acquire",
"(",
"block",
"=",
"True",
")",
"as",
"conn",
":",
"yield",
"self",
".",
"consumer",
"(",
"conn",
")"
] | Context manager that yields an instance of ``Consumer``. | [
"Context",
"manager",
"that",
"yields",
"an",
"instance",
"of",
"Consumer",
"."
] | 1dd9112d7c5fe72a428c86f21f6d02cdb0595921 | https://github.com/inveniosoftware/invenio-queues/blob/1dd9112d7c5fe72a428c86f21f6d02cdb0595921/invenio_queues/queue.py#L114-L117 | train | 54,806 |
inveniosoftware/invenio-queues | invenio_queues/queue.py | Queue.publish | def publish(self, events):
"""Publish events."""
assert len(events) > 0
with self.create_producer() as producer:
for event in events:
producer.publish(event) | python | def publish(self, events):
"""Publish events."""
assert len(events) > 0
with self.create_producer() as producer:
for event in events:
producer.publish(event) | [
"def",
"publish",
"(",
"self",
",",
"events",
")",
":",
"assert",
"len",
"(",
"events",
")",
">",
"0",
"with",
"self",
".",
"create_producer",
"(",
")",
"as",
"producer",
":",
"for",
"event",
"in",
"events",
":",
"producer",
".",
"publish",
"(",
"eve... | Publish events. | [
"Publish",
"events",
"."
] | 1dd9112d7c5fe72a428c86f21f6d02cdb0595921 | https://github.com/inveniosoftware/invenio-queues/blob/1dd9112d7c5fe72a428c86f21f6d02cdb0595921/invenio_queues/queue.py#L119-L124 | train | 54,807 |
inveniosoftware/invenio-queues | invenio_queues/queue.py | Queue.consume | def consume(self, payload=True):
"""Consume events."""
with self.create_consumer() as consumer:
for msg in consumer.iterqueue():
yield msg.payload if payload else msg | python | def consume(self, payload=True):
"""Consume events."""
with self.create_consumer() as consumer:
for msg in consumer.iterqueue():
yield msg.payload if payload else msg | [
"def",
"consume",
"(",
"self",
",",
"payload",
"=",
"True",
")",
":",
"with",
"self",
".",
"create_consumer",
"(",
")",
"as",
"consumer",
":",
"for",
"msg",
"in",
"consumer",
".",
"iterqueue",
"(",
")",
":",
"yield",
"msg",
".",
"payload",
"if",
"pay... | Consume events. | [
"Consume",
"events",
"."
] | 1dd9112d7c5fe72a428c86f21f6d02cdb0595921 | https://github.com/inveniosoftware/invenio-queues/blob/1dd9112d7c5fe72a428c86f21f6d02cdb0595921/invenio_queues/queue.py#L126-L130 | train | 54,808 |
praekelt/django-profile | profile/forms.py | ProfileForm.get_initial | def get_initial(self, *args, **kwargs):
"""
Gathers initial form values from user and profile objects
suitable for using as form's initial data.
"""
initial = {}
for field in self.fields:
value = None
if hasattr(self.user, field):
value = getattr(self.user, field)
if hasattr(self.profile, field):
value = getattr(self.profile, field)
if value:
initial.update({
field: value
})
if hasattr(self.profile, 'dob'):
dob = self.profile.dob
if dob:
if 'dob_day' in self.fields:
initial.update({
'dob_day': dob.day
})
if 'dob_month' in self.fields:
initial.update({
'dob_month': dob.month
})
if 'dob_year' in self.fields:
initial.update({
'dob_year': dob.year
})
return initial | python | def get_initial(self, *args, **kwargs):
"""
Gathers initial form values from user and profile objects
suitable for using as form's initial data.
"""
initial = {}
for field in self.fields:
value = None
if hasattr(self.user, field):
value = getattr(self.user, field)
if hasattr(self.profile, field):
value = getattr(self.profile, field)
if value:
initial.update({
field: value
})
if hasattr(self.profile, 'dob'):
dob = self.profile.dob
if dob:
if 'dob_day' in self.fields:
initial.update({
'dob_day': dob.day
})
if 'dob_month' in self.fields:
initial.update({
'dob_month': dob.month
})
if 'dob_year' in self.fields:
initial.update({
'dob_year': dob.year
})
return initial | [
"def",
"get_initial",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"initial",
"=",
"{",
"}",
"for",
"field",
"in",
"self",
".",
"fields",
":",
"value",
"=",
"None",
"if",
"hasattr",
"(",
"self",
".",
"user",
",",
"field",
")"... | Gathers initial form values from user and profile objects
suitable for using as form's initial data. | [
"Gathers",
"initial",
"form",
"values",
"from",
"user",
"and",
"profile",
"objects",
"suitable",
"for",
"using",
"as",
"form",
"s",
"initial",
"data",
"."
] | 52a3d3f7e776742c5333f8fab67b5af3cdbc878b | https://github.com/praekelt/django-profile/blob/52a3d3f7e776742c5333f8fab67b5af3cdbc878b/profile/forms.py#L126-L159 | train | 54,809 |
praekelt/django-profile | profile/forms.py | ProfileForm.save | def save(self, *args, **kwargs):
"""
This method should be called when is_valid is true to save
relevant fields to user and profile models.
"""
for key, value in self.cleaned_data.items():
if value != None:
if hasattr(self.user, key):
setattr(self.user, key, value)
if hasattr(self.profile, key):
setattr(self.profile, key, value)
# set password
if 'password1' in self.cleaned_data:
if self.cleaned_data['password1']:
self.user.set_password(self.cleaned_data['password1'])
# set dob
if 'dob_day' in self.cleaned_data and 'dob_month' in self.\
cleaned_data and 'dob_year' in self.cleaned_data:
self.profile.dob = self._gen_dob()
self.user.save()
self.profile.save() | python | def save(self, *args, **kwargs):
"""
This method should be called when is_valid is true to save
relevant fields to user and profile models.
"""
for key, value in self.cleaned_data.items():
if value != None:
if hasattr(self.user, key):
setattr(self.user, key, value)
if hasattr(self.profile, key):
setattr(self.profile, key, value)
# set password
if 'password1' in self.cleaned_data:
if self.cleaned_data['password1']:
self.user.set_password(self.cleaned_data['password1'])
# set dob
if 'dob_day' in self.cleaned_data and 'dob_month' in self.\
cleaned_data and 'dob_year' in self.cleaned_data:
self.profile.dob = self._gen_dob()
self.user.save()
self.profile.save() | [
"def",
"save",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"for",
"key",
",",
"value",
"in",
"self",
".",
"cleaned_data",
".",
"items",
"(",
")",
":",
"if",
"value",
"!=",
"None",
":",
"if",
"hasattr",
"(",
"self",
".",
"u... | This method should be called when is_valid is true to save
relevant fields to user and profile models. | [
"This",
"method",
"should",
"be",
"called",
"when",
"is_valid",
"is",
"true",
"to",
"save",
"relevant",
"fields",
"to",
"user",
"and",
"profile",
"models",
"."
] | 52a3d3f7e776742c5333f8fab67b5af3cdbc878b | https://github.com/praekelt/django-profile/blob/52a3d3f7e776742c5333f8fab67b5af3cdbc878b/profile/forms.py#L161-L184 | train | 54,810 |
praekelt/django-profile | profile/forms.py | ProfileForm.clean_username | def clean_username(self):
"""
Validate that the username is alphanumeric and is not already
in use. Don't fail if users username is provided.
"""
user = None
try:
user = User.objects.get(username__iexact=self.\
cleaned_data['username'])
except User.DoesNotExist:
return self.cleaned_data['username']
if user:
if user.username == self.user.username:
return self.cleaned_data['username']
raise forms.ValidationError(_(\
"A user with that username already exists.")) | python | def clean_username(self):
"""
Validate that the username is alphanumeric and is not already
in use. Don't fail if users username is provided.
"""
user = None
try:
user = User.objects.get(username__iexact=self.\
cleaned_data['username'])
except User.DoesNotExist:
return self.cleaned_data['username']
if user:
if user.username == self.user.username:
return self.cleaned_data['username']
raise forms.ValidationError(_(\
"A user with that username already exists.")) | [
"def",
"clean_username",
"(",
"self",
")",
":",
"user",
"=",
"None",
"try",
":",
"user",
"=",
"User",
".",
"objects",
".",
"get",
"(",
"username__iexact",
"=",
"self",
".",
"cleaned_data",
"[",
"'username'",
"]",
")",
"except",
"User",
".",
"DoesNotExist... | Validate that the username is alphanumeric and is not already
in use. Don't fail if users username is provided. | [
"Validate",
"that",
"the",
"username",
"is",
"alphanumeric",
"and",
"is",
"not",
"already",
"in",
"use",
".",
"Don",
"t",
"fail",
"if",
"users",
"username",
"is",
"provided",
"."
] | 52a3d3f7e776742c5333f8fab67b5af3cdbc878b | https://github.com/praekelt/django-profile/blob/52a3d3f7e776742c5333f8fab67b5af3cdbc878b/profile/forms.py#L186-L203 | train | 54,811 |
studionow/pybrightcove | pybrightcove/http_core.py | HttpRequest.add_body_part | def add_body_part(self, key, data, mime_type, size=None):
"""Adds data to the HTTP request body.
If more than one part is added, this is assumed to be a mime-multipart
request. This method is designed to create MIME 1.0 requests as specified
in RFC 1341.
Args:
data: str or a file-like object containing a part of the request body.
mime_type: str The MIME type describing the data
size: int Required if the data is a file like object. If the data is a
string, the size is calculated so this parameter is ignored.
"""
if isinstance(data, str):
size = len(data)
if hasattr(data, "fileno"):
size = os.fstat(data.fileno())[stat.ST_SIZE]
if size is None:
# TODO: support chunked transfer if some of the body is of unknown size.
raise UnknownSize('Each part of the body must have a known size.')
if 'Content-Length' in self.headers:
content_length = int(self.headers['Content-Length'])
else:
content_length = 0
# If this is the first part added to the body, then this is not a multipart
# request.
boundary_string = '\r\n--%s\r\n' % (MIME_BOUNDARY,)
self._body_parts.append(boundary_string)
content_length += len(boundary_string) + size
# Include the mime type of this part.
cd = 'Content-Disposition: form-data; name="%s"' % key
mt = mime_type
if hasattr(data, "fileno"):
cd += '; filename="%s"' % data.name.split('/')[-1]
mt = mimetypes.guess_type(data.name)[0] or 'application/octet-stream'
cd += '\r\n'
type_string = 'Content-Type: %s\r\n\r\n' % (mt)
self._body_parts.append(cd)
self._body_parts.append(type_string)
content_length += len(type_string) + len(cd)
self._body_parts.append(data)
self.headers['Content-Length'] = str(content_length) | python | def add_body_part(self, key, data, mime_type, size=None):
"""Adds data to the HTTP request body.
If more than one part is added, this is assumed to be a mime-multipart
request. This method is designed to create MIME 1.0 requests as specified
in RFC 1341.
Args:
data: str or a file-like object containing a part of the request body.
mime_type: str The MIME type describing the data
size: int Required if the data is a file like object. If the data is a
string, the size is calculated so this parameter is ignored.
"""
if isinstance(data, str):
size = len(data)
if hasattr(data, "fileno"):
size = os.fstat(data.fileno())[stat.ST_SIZE]
if size is None:
# TODO: support chunked transfer if some of the body is of unknown size.
raise UnknownSize('Each part of the body must have a known size.')
if 'Content-Length' in self.headers:
content_length = int(self.headers['Content-Length'])
else:
content_length = 0
# If this is the first part added to the body, then this is not a multipart
# request.
boundary_string = '\r\n--%s\r\n' % (MIME_BOUNDARY,)
self._body_parts.append(boundary_string)
content_length += len(boundary_string) + size
# Include the mime type of this part.
cd = 'Content-Disposition: form-data; name="%s"' % key
mt = mime_type
if hasattr(data, "fileno"):
cd += '; filename="%s"' % data.name.split('/')[-1]
mt = mimetypes.guess_type(data.name)[0] or 'application/octet-stream'
cd += '\r\n'
type_string = 'Content-Type: %s\r\n\r\n' % (mt)
self._body_parts.append(cd)
self._body_parts.append(type_string)
content_length += len(type_string) + len(cd)
self._body_parts.append(data)
self.headers['Content-Length'] = str(content_length) | [
"def",
"add_body_part",
"(",
"self",
",",
"key",
",",
"data",
",",
"mime_type",
",",
"size",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"data",
",",
"str",
")",
":",
"size",
"=",
"len",
"(",
"data",
")",
"if",
"hasattr",
"(",
"data",
",",
"\... | Adds data to the HTTP request body.
If more than one part is added, this is assumed to be a mime-multipart
request. This method is designed to create MIME 1.0 requests as specified
in RFC 1341.
Args:
data: str or a file-like object containing a part of the request body.
mime_type: str The MIME type describing the data
size: int Required if the data is a file like object. If the data is a
string, the size is calculated so this parameter is ignored. | [
"Adds",
"data",
"to",
"the",
"HTTP",
"request",
"body",
".",
"If",
"more",
"than",
"one",
"part",
"is",
"added",
"this",
"is",
"assumed",
"to",
"be",
"a",
"mime",
"-",
"multipart",
"request",
".",
"This",
"method",
"is",
"designed",
"to",
"create",
"MI... | 19c946b689a80156e070fe9bc35589c4b768e614 | https://github.com/studionow/pybrightcove/blob/19c946b689a80156e070fe9bc35589c4b768e614/pybrightcove/http_core.py#L89-L130 | train | 54,812 |
studionow/pybrightcove | pybrightcove/http_core.py | HttpRequest._copy | def _copy(self):
"""Creates a deep copy of this request."""
copied_uri = Uri(self.uri.scheme, self.uri.host, self.uri.port,
self.uri.path, self.uri.query.copy())
new_request = HttpRequest(uri=copied_uri, method=self.method,
headers=self.headers.copy())
new_request._body_parts = self._body_parts[:]
return new_request | python | def _copy(self):
"""Creates a deep copy of this request."""
copied_uri = Uri(self.uri.scheme, self.uri.host, self.uri.port,
self.uri.path, self.uri.query.copy())
new_request = HttpRequest(uri=copied_uri, method=self.method,
headers=self.headers.copy())
new_request._body_parts = self._body_parts[:]
return new_request | [
"def",
"_copy",
"(",
"self",
")",
":",
"copied_uri",
"=",
"Uri",
"(",
"self",
".",
"uri",
".",
"scheme",
",",
"self",
".",
"uri",
".",
"host",
",",
"self",
".",
"uri",
".",
"port",
",",
"self",
".",
"uri",
".",
"path",
",",
"self",
".",
"uri",
... | Creates a deep copy of this request. | [
"Creates",
"a",
"deep",
"copy",
"of",
"this",
"request",
"."
] | 19c946b689a80156e070fe9bc35589c4b768e614 | https://github.com/studionow/pybrightcove/blob/19c946b689a80156e070fe9bc35589c4b768e614/pybrightcove/http_core.py#L138-L145 | train | 54,813 |
studionow/pybrightcove | pybrightcove/http_core.py | Uri._get_relative_path | def _get_relative_path(self):
"""Returns the path with the query parameters escaped and appended."""
param_string = self._get_query_string()
if self.path is None:
path = '/'
else:
path = self.path
if param_string:
return '?'.join([path, param_string])
else:
return path | python | def _get_relative_path(self):
"""Returns the path with the query parameters escaped and appended."""
param_string = self._get_query_string()
if self.path is None:
path = '/'
else:
path = self.path
if param_string:
return '?'.join([path, param_string])
else:
return path | [
"def",
"_get_relative_path",
"(",
"self",
")",
":",
"param_string",
"=",
"self",
".",
"_get_query_string",
"(",
")",
"if",
"self",
".",
"path",
"is",
"None",
":",
"path",
"=",
"'/'",
"else",
":",
"path",
"=",
"self",
".",
"path",
"if",
"param_string",
... | Returns the path with the query parameters escaped and appended. | [
"Returns",
"the",
"path",
"with",
"the",
"query",
"parameters",
"escaped",
"and",
"appended",
"."
] | 19c946b689a80156e070fe9bc35589c4b768e614 | https://github.com/studionow/pybrightcove/blob/19c946b689a80156e070fe9bc35589c4b768e614/pybrightcove/http_core.py#L194-L204 | train | 54,814 |
studionow/pybrightcove | pybrightcove/http_core.py | Uri.modify_request | def modify_request(self, http_request=None):
"""Sets HTTP request components based on the URI."""
if http_request is None:
http_request = HttpRequest()
if http_request.uri is None:
http_request.uri = Uri()
# Determine the correct scheme.
if self.scheme:
http_request.uri.scheme = self.scheme
if self.port:
http_request.uri.port = self.port
if self.host:
http_request.uri.host = self.host
# Set the relative uri path
if self.path:
http_request.uri.path = self.path
if self.query:
http_request.uri.query = self.query.copy()
return http_request | python | def modify_request(self, http_request=None):
"""Sets HTTP request components based on the URI."""
if http_request is None:
http_request = HttpRequest()
if http_request.uri is None:
http_request.uri = Uri()
# Determine the correct scheme.
if self.scheme:
http_request.uri.scheme = self.scheme
if self.port:
http_request.uri.port = self.port
if self.host:
http_request.uri.host = self.host
# Set the relative uri path
if self.path:
http_request.uri.path = self.path
if self.query:
http_request.uri.query = self.query.copy()
return http_request | [
"def",
"modify_request",
"(",
"self",
",",
"http_request",
"=",
"None",
")",
":",
"if",
"http_request",
"is",
"None",
":",
"http_request",
"=",
"HttpRequest",
"(",
")",
"if",
"http_request",
".",
"uri",
"is",
"None",
":",
"http_request",
".",
"uri",
"=",
... | Sets HTTP request components based on the URI. | [
"Sets",
"HTTP",
"request",
"components",
"based",
"on",
"the",
"URI",
"."
] | 19c946b689a80156e070fe9bc35589c4b768e614 | https://github.com/studionow/pybrightcove/blob/19c946b689a80156e070fe9bc35589c4b768e614/pybrightcove/http_core.py#L226-L244 | train | 54,815 |
studionow/pybrightcove | pybrightcove/http_core.py | Uri.parse_uri | def parse_uri(uri_string):
"""Creates a Uri object which corresponds to the URI string.
This method can accept partial URIs, but it will leave missing
members of the Uri unset.
"""
parts = urlparse.urlparse(uri_string)
uri = Uri()
if parts[0]:
uri.scheme = parts[0]
if parts[1]:
host_parts = parts[1].split(':')
if host_parts[0]:
uri.host = host_parts[0]
if len(host_parts) > 1:
uri.port = int(host_parts[1])
if parts[2]:
uri.path = parts[2]
if parts[4]:
param_pairs = parts[4].split('&')
for pair in param_pairs:
pair_parts = pair.split('=')
if len(pair_parts) > 1:
uri.query[urllib.unquote_plus(pair_parts[0])] = (
urllib.unquote_plus(pair_parts[1]))
elif len(pair_parts) == 1:
uri.query[urllib.unquote_plus(pair_parts[0])] = None
return uri | python | def parse_uri(uri_string):
"""Creates a Uri object which corresponds to the URI string.
This method can accept partial URIs, but it will leave missing
members of the Uri unset.
"""
parts = urlparse.urlparse(uri_string)
uri = Uri()
if parts[0]:
uri.scheme = parts[0]
if parts[1]:
host_parts = parts[1].split(':')
if host_parts[0]:
uri.host = host_parts[0]
if len(host_parts) > 1:
uri.port = int(host_parts[1])
if parts[2]:
uri.path = parts[2]
if parts[4]:
param_pairs = parts[4].split('&')
for pair in param_pairs:
pair_parts = pair.split('=')
if len(pair_parts) > 1:
uri.query[urllib.unquote_plus(pair_parts[0])] = (
urllib.unquote_plus(pair_parts[1]))
elif len(pair_parts) == 1:
uri.query[urllib.unquote_plus(pair_parts[0])] = None
return uri | [
"def",
"parse_uri",
"(",
"uri_string",
")",
":",
"parts",
"=",
"urlparse",
".",
"urlparse",
"(",
"uri_string",
")",
"uri",
"=",
"Uri",
"(",
")",
"if",
"parts",
"[",
"0",
"]",
":",
"uri",
".",
"scheme",
"=",
"parts",
"[",
"0",
"]",
"if",
"parts",
... | Creates a Uri object which corresponds to the URI string.
This method can accept partial URIs, but it will leave missing
members of the Uri unset. | [
"Creates",
"a",
"Uri",
"object",
"which",
"corresponds",
"to",
"the",
"URI",
"string",
".",
"This",
"method",
"can",
"accept",
"partial",
"URIs",
"but",
"it",
"will",
"leave",
"missing",
"members",
"of",
"the",
"Uri",
"unset",
"."
] | 19c946b689a80156e070fe9bc35589c4b768e614 | https://github.com/studionow/pybrightcove/blob/19c946b689a80156e070fe9bc35589c4b768e614/pybrightcove/http_core.py#L248-L275 | train | 54,816 |
NiklasRosenstein-Python/nr-deprecated | nr/strex.py | Scanner.next | def next(self):
" Move on to the next character in the text. "
char = self.char
if char == '\n':
self.lineno += 1
self.colno = 0
else:
self.colno += 1
self.index += 1
return self.char | python | def next(self):
" Move on to the next character in the text. "
char = self.char
if char == '\n':
self.lineno += 1
self.colno = 0
else:
self.colno += 1
self.index += 1
return self.char | [
"def",
"next",
"(",
"self",
")",
":",
"char",
"=",
"self",
".",
"char",
"if",
"char",
"==",
"'\\n'",
":",
"self",
".",
"lineno",
"+=",
"1",
"self",
".",
"colno",
"=",
"0",
"else",
":",
"self",
".",
"colno",
"+=",
"1",
"self",
".",
"index",
"+="... | Move on to the next character in the text. | [
"Move",
"on",
"to",
"the",
"next",
"character",
"in",
"the",
"text",
"."
] | f9f8b89ea1b084841a8ab65784eaf68852686b2a | https://github.com/NiklasRosenstein-Python/nr-deprecated/blob/f9f8b89ea1b084841a8ab65784eaf68852686b2a/nr/strex.py#L188-L198 | train | 54,817 |
NiklasRosenstein-Python/nr-deprecated | nr/strex.py | Scanner.readline | def readline(self):
" Reads a full line from the scanner and returns it. "
start = end = self.index
while end < len(self.text):
if self.text[end] == '\n':
end += 1
break
end += 1
result = self.text[start:end]
self.index = end
if result.endswith('\n'):
self.colno = 0
self.lineno += 1
else:
self.colno += end - start
return result | python | def readline(self):
" Reads a full line from the scanner and returns it. "
start = end = self.index
while end < len(self.text):
if self.text[end] == '\n':
end += 1
break
end += 1
result = self.text[start:end]
self.index = end
if result.endswith('\n'):
self.colno = 0
self.lineno += 1
else:
self.colno += end - start
return result | [
"def",
"readline",
"(",
"self",
")",
":",
"start",
"=",
"end",
"=",
"self",
".",
"index",
"while",
"end",
"<",
"len",
"(",
"self",
".",
"text",
")",
":",
"if",
"self",
".",
"text",
"[",
"end",
"]",
"==",
"'\\n'",
":",
"end",
"+=",
"1",
"break",... | Reads a full line from the scanner and returns it. | [
"Reads",
"a",
"full",
"line",
"from",
"the",
"scanner",
"and",
"returns",
"it",
"."
] | f9f8b89ea1b084841a8ab65784eaf68852686b2a | https://github.com/NiklasRosenstein-Python/nr-deprecated/blob/f9f8b89ea1b084841a8ab65784eaf68852686b2a/nr/strex.py#L200-L216 | train | 54,818 |
NiklasRosenstein-Python/nr-deprecated | nr/strex.py | Lexer.accept | def accept(self, *names, **kwargs):
"""
Extracts a token of one of the specified rule names and doesn't error if
unsuccessful. Skippable tokens might still be skipped by this method.
# Arguments
names (str): One or more token names that are accepted.
kwargs: Additional keyword arguments for #next().
# Raises
ValueError: if a rule with the specified name doesn't exist.
"""
return self.next(*names, as_accept=True, **kwargs) | python | def accept(self, *names, **kwargs):
"""
Extracts a token of one of the specified rule names and doesn't error if
unsuccessful. Skippable tokens might still be skipped by this method.
# Arguments
names (str): One or more token names that are accepted.
kwargs: Additional keyword arguments for #next().
# Raises
ValueError: if a rule with the specified name doesn't exist.
"""
return self.next(*names, as_accept=True, **kwargs) | [
"def",
"accept",
"(",
"self",
",",
"*",
"names",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"next",
"(",
"*",
"names",
",",
"as_accept",
"=",
"True",
",",
"*",
"*",
"kwargs",
")"
] | Extracts a token of one of the specified rule names and doesn't error if
unsuccessful. Skippable tokens might still be skipped by this method.
# Arguments
names (str): One or more token names that are accepted.
kwargs: Additional keyword arguments for #next().
# Raises
ValueError: if a rule with the specified name doesn't exist. | [
"Extracts",
"a",
"token",
"of",
"one",
"of",
"the",
"specified",
"rule",
"names",
"and",
"doesn",
"t",
"error",
"if",
"unsuccessful",
".",
"Skippable",
"tokens",
"might",
"still",
"be",
"skipped",
"by",
"this",
"method",
"."
] | f9f8b89ea1b084841a8ab65784eaf68852686b2a | https://github.com/NiklasRosenstein-Python/nr-deprecated/blob/f9f8b89ea1b084841a8ab65784eaf68852686b2a/nr/strex.py#L352-L365 | train | 54,819 |
satori-ng/hooker | hooker/event_list.py | EventList.append | def append(self, event, help=""):
"""Creates a new event. `event` may be iterable or string
Args:
event (str): Name of event to declare
Kwrgs:
help (str): Help string for the event
Raises:
TypeError
**Please** describe the event and its calling arguments in the help
string.
"""
if isinstance(event, str):
self._events[event] = HookList(is_waterfall=self.is_waterfall)
self._help[event] = (help, getframeinfo(stack()[1][0]))
if not help:
logger.warning("Great, don't say anything about your hooks and \
wait for plugin creators to figure it out.")
elif isinstance(event, Iterable):
# Depricated. It does not give the ability to give help string
# TODO: Remove this
for name in event:
self.append(name)
else:
raise TypeError("Invalid event name!") | python | def append(self, event, help=""):
"""Creates a new event. `event` may be iterable or string
Args:
event (str): Name of event to declare
Kwrgs:
help (str): Help string for the event
Raises:
TypeError
**Please** describe the event and its calling arguments in the help
string.
"""
if isinstance(event, str):
self._events[event] = HookList(is_waterfall=self.is_waterfall)
self._help[event] = (help, getframeinfo(stack()[1][0]))
if not help:
logger.warning("Great, don't say anything about your hooks and \
wait for plugin creators to figure it out.")
elif isinstance(event, Iterable):
# Depricated. It does not give the ability to give help string
# TODO: Remove this
for name in event:
self.append(name)
else:
raise TypeError("Invalid event name!") | [
"def",
"append",
"(",
"self",
",",
"event",
",",
"help",
"=",
"\"\"",
")",
":",
"if",
"isinstance",
"(",
"event",
",",
"str",
")",
":",
"self",
".",
"_events",
"[",
"event",
"]",
"=",
"HookList",
"(",
"is_waterfall",
"=",
"self",
".",
"is_waterfall",... | Creates a new event. `event` may be iterable or string
Args:
event (str): Name of event to declare
Kwrgs:
help (str): Help string for the event
Raises:
TypeError
**Please** describe the event and its calling arguments in the help
string. | [
"Creates",
"a",
"new",
"event",
".",
"event",
"may",
"be",
"iterable",
"or",
"string"
] | 8ef1fffe1537f06313799d1e5e6f7acc4ab405b4 | https://github.com/satori-ng/hooker/blob/8ef1fffe1537f06313799d1e5e6f7acc4ab405b4/hooker/event_list.py#L16-L44 | train | 54,820 |
satori-ng/hooker | hooker/event_list.py | EventList.hook | def hook(self, function, event, dependencies):
"""Tries to load the hook to the event
Args:
function (func): Function that will be called when the event is called
Kwargs:
dependencies (str): String or Iterable with modules whose hooks should be called before this one
Raises:
NameError
Note that the dependencies are module-wide, that means that if
`parent.foo` and `parent.bar` are both subscribed to `example` event
and `child` enumerates `parent` as dependcy, **both** `foo` and `bar`
must be called in order for the dependcy to get resolved.
"""
# Hooks all events (recursively)
if event is None:
for e in self._events.keys():
self.hook(function, e, dependencies)
return
# Hook multiple, but specific events (recursively)
if not isinstance(event, str) and isinstance(event, Iterable):
for e in event:
self.hook(function, e, dependencies)
return
# Hook a simple event
event_list = self._events.get(event, None)
if event_list is None:
raise NameError(
"Invalid key provided '%s'. Valid options: %s"
% (event, ", ".join(self._events.keys()))
)
return
return event_list.hook(function, dependencies) | python | def hook(self, function, event, dependencies):
"""Tries to load the hook to the event
Args:
function (func): Function that will be called when the event is called
Kwargs:
dependencies (str): String or Iterable with modules whose hooks should be called before this one
Raises:
NameError
Note that the dependencies are module-wide, that means that if
`parent.foo` and `parent.bar` are both subscribed to `example` event
and `child` enumerates `parent` as dependcy, **both** `foo` and `bar`
must be called in order for the dependcy to get resolved.
"""
# Hooks all events (recursively)
if event is None:
for e in self._events.keys():
self.hook(function, e, dependencies)
return
# Hook multiple, but specific events (recursively)
if not isinstance(event, str) and isinstance(event, Iterable):
for e in event:
self.hook(function, e, dependencies)
return
# Hook a simple event
event_list = self._events.get(event, None)
if event_list is None:
raise NameError(
"Invalid key provided '%s'. Valid options: %s"
% (event, ", ".join(self._events.keys()))
)
return
return event_list.hook(function, dependencies) | [
"def",
"hook",
"(",
"self",
",",
"function",
",",
"event",
",",
"dependencies",
")",
":",
"# Hooks all events (recursively)",
"if",
"event",
"is",
"None",
":",
"for",
"e",
"in",
"self",
".",
"_events",
".",
"keys",
"(",
")",
":",
"self",
".",
"hook",
"... | Tries to load the hook to the event
Args:
function (func): Function that will be called when the event is called
Kwargs:
dependencies (str): String or Iterable with modules whose hooks should be called before this one
Raises:
NameError
Note that the dependencies are module-wide, that means that if
`parent.foo` and `parent.bar` are both subscribed to `example` event
and `child` enumerates `parent` as dependcy, **both** `foo` and `bar`
must be called in order for the dependcy to get resolved. | [
"Tries",
"to",
"load",
"the",
"hook",
"to",
"the",
"event"
] | 8ef1fffe1537f06313799d1e5e6f7acc4ab405b4 | https://github.com/satori-ng/hooker/blob/8ef1fffe1537f06313799d1e5e6f7acc4ab405b4/hooker/event_list.py#L46-L84 | train | 54,821 |
jessamynsmith/paragres | paragres/command.py | Command.print_message | def print_message(self, message, verbosity_needed=1):
""" Prints the message, if verbosity is high enough. """
if self.args.verbosity >= verbosity_needed:
print(message) | python | def print_message(self, message, verbosity_needed=1):
""" Prints the message, if verbosity is high enough. """
if self.args.verbosity >= verbosity_needed:
print(message) | [
"def",
"print_message",
"(",
"self",
",",
"message",
",",
"verbosity_needed",
"=",
"1",
")",
":",
"if",
"self",
".",
"args",
".",
"verbosity",
">=",
"verbosity_needed",
":",
"print",
"(",
"message",
")"
] | Prints the message, if verbosity is high enough. | [
"Prints",
"the",
"message",
"if",
"verbosity",
"is",
"high",
"enough",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L52-L55 | train | 54,822 |
jessamynsmith/paragres | paragres/command.py | Command.error | def error(self, message, code=1):
""" Prints the error, and exits with the given code. """
sys.stderr.write(message)
sys.exit(code) | python | def error(self, message, code=1):
""" Prints the error, and exits with the given code. """
sys.stderr.write(message)
sys.exit(code) | [
"def",
"error",
"(",
"self",
",",
"message",
",",
"code",
"=",
"1",
")",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"message",
")",
"sys",
".",
"exit",
"(",
"code",
")"
] | Prints the error, and exits with the given code. | [
"Prints",
"the",
"error",
"and",
"exits",
"with",
"the",
"given",
"code",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L57-L60 | train | 54,823 |
jessamynsmith/paragres | paragres/command.py | Command.parse_db_settings | def parse_db_settings(self, settings):
""" Parse out database settings from filename or DJANGO_SETTINGS_MODULE. """
if settings == 'DJANGO_SETTINGS_MODULE':
django_settings = os.environ.get('DJANGO_SETTINGS_MODULE')
self.print_message("Getting settings file from DJANGO_SETTINGS_MODULE=%s"
% django_settings)
path_pieces = django_settings.split('.')
path_pieces[-1] = '%s.py' % path_pieces[-1]
settings = os.path.join(*path_pieces)
self.print_message("Parsing settings from settings file '%s'" % settings)
parser = DatabaseSettingsParser()
with open(settings) as settings_file:
settings_ast = ast.parse(settings_file.read())
parser.visit(settings_ast)
try:
return parser.database_settings['default']
except KeyError as e:
self.error("Missing key or value for: %s\nSettings must be of the form: %s"
% (e, self.settings_format)) | python | def parse_db_settings(self, settings):
""" Parse out database settings from filename or DJANGO_SETTINGS_MODULE. """
if settings == 'DJANGO_SETTINGS_MODULE':
django_settings = os.environ.get('DJANGO_SETTINGS_MODULE')
self.print_message("Getting settings file from DJANGO_SETTINGS_MODULE=%s"
% django_settings)
path_pieces = django_settings.split('.')
path_pieces[-1] = '%s.py' % path_pieces[-1]
settings = os.path.join(*path_pieces)
self.print_message("Parsing settings from settings file '%s'" % settings)
parser = DatabaseSettingsParser()
with open(settings) as settings_file:
settings_ast = ast.parse(settings_file.read())
parser.visit(settings_ast)
try:
return parser.database_settings['default']
except KeyError as e:
self.error("Missing key or value for: %s\nSettings must be of the form: %s"
% (e, self.settings_format)) | [
"def",
"parse_db_settings",
"(",
"self",
",",
"settings",
")",
":",
"if",
"settings",
"==",
"'DJANGO_SETTINGS_MODULE'",
":",
"django_settings",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'DJANGO_SETTINGS_MODULE'",
")",
"self",
".",
"print_message",
"(",
"\"Get... | Parse out database settings from filename or DJANGO_SETTINGS_MODULE. | [
"Parse",
"out",
"database",
"settings",
"from",
"filename",
"or",
"DJANGO_SETTINGS_MODULE",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L62-L82 | train | 54,824 |
jessamynsmith/paragres | paragres/command.py | Command.initialize_db_args | def initialize_db_args(self, settings, db_key):
""" Initialize connection arguments for postgres commands. """
self.print_message("Initializing database settings for %s" % db_key, verbosity_needed=2)
db_member = self.databases[db_key]
db_name = settings.get('NAME')
if db_name and not db_member['name']:
db_member['name'] = db_name
db_member['password'] = settings.get('PASSWORD')
args = []
for key in ['USER', 'HOST', 'PORT']:
value = settings.get(key)
if value:
self.print_message("Adding parameter %s" % key.lower, verbosity_needed=2)
args.append('--%s=%s' % (key.lower(), value))
db_member['args'] = args | python | def initialize_db_args(self, settings, db_key):
""" Initialize connection arguments for postgres commands. """
self.print_message("Initializing database settings for %s" % db_key, verbosity_needed=2)
db_member = self.databases[db_key]
db_name = settings.get('NAME')
if db_name and not db_member['name']:
db_member['name'] = db_name
db_member['password'] = settings.get('PASSWORD')
args = []
for key in ['USER', 'HOST', 'PORT']:
value = settings.get(key)
if value:
self.print_message("Adding parameter %s" % key.lower, verbosity_needed=2)
args.append('--%s=%s' % (key.lower(), value))
db_member['args'] = args | [
"def",
"initialize_db_args",
"(",
"self",
",",
"settings",
",",
"db_key",
")",
":",
"self",
".",
"print_message",
"(",
"\"Initializing database settings for %s\"",
"%",
"db_key",
",",
"verbosity_needed",
"=",
"2",
")",
"db_member",
"=",
"self",
".",
"databases",
... | Initialize connection arguments for postgres commands. | [
"Initialize",
"connection",
"arguments",
"for",
"postgres",
"commands",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L84-L103 | train | 54,825 |
jessamynsmith/paragres | paragres/command.py | Command.download_file | def download_file(self, url, filename):
""" Download file from url to filename. """
self.print_message("Downloading to file '%s' from URL '%s'" % (filename, url))
try:
db_file = urllib2.urlopen(url)
with open(filename, 'wb') as output:
output.write(db_file.read())
db_file.close()
except Exception as e:
self.error(str(e))
self.print_message("File downloaded") | python | def download_file(self, url, filename):
""" Download file from url to filename. """
self.print_message("Downloading to file '%s' from URL '%s'" % (filename, url))
try:
db_file = urllib2.urlopen(url)
with open(filename, 'wb') as output:
output.write(db_file.read())
db_file.close()
except Exception as e:
self.error(str(e))
self.print_message("File downloaded") | [
"def",
"download_file",
"(",
"self",
",",
"url",
",",
"filename",
")",
":",
"self",
".",
"print_message",
"(",
"\"Downloading to file '%s' from URL '%s'\"",
"%",
"(",
"filename",
",",
"url",
")",
")",
"try",
":",
"db_file",
"=",
"urllib2",
".",
"urlopen",
"(... | Download file from url to filename. | [
"Download",
"file",
"from",
"url",
"to",
"filename",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L115-L125 | train | 54,826 |
jessamynsmith/paragres | paragres/command.py | Command.unzip_file_if_necessary | def unzip_file_if_necessary(self, source_file):
""" Unzip file if zipped. """
if source_file.endswith(".gz"):
self.print_message("Decompressing '%s'" % source_file)
subprocess.check_call(["gunzip", "--force", source_file])
source_file = source_file[:-len(".gz")]
return source_file | python | def unzip_file_if_necessary(self, source_file):
""" Unzip file if zipped. """
if source_file.endswith(".gz"):
self.print_message("Decompressing '%s'" % source_file)
subprocess.check_call(["gunzip", "--force", source_file])
source_file = source_file[:-len(".gz")]
return source_file | [
"def",
"unzip_file_if_necessary",
"(",
"self",
",",
"source_file",
")",
":",
"if",
"source_file",
".",
"endswith",
"(",
"\".gz\"",
")",
":",
"self",
".",
"print_message",
"(",
"\"Decompressing '%s'\"",
"%",
"source_file",
")",
"subprocess",
".",
"check_call",
"(... | Unzip file if zipped. | [
"Unzip",
"file",
"if",
"zipped",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L127-L133 | train | 54,827 |
jessamynsmith/paragres | paragres/command.py | Command.download_file_from_url | def download_file_from_url(self, source_app, url):
""" Download file from source app or url, and return local filename. """
if source_app:
source_name = source_app
else:
source_name = urlparse.urlparse(url).netloc.replace('.', '_')
filename = self.create_file_name(source_name)
self.download_file(url, filename)
return filename | python | def download_file_from_url(self, source_app, url):
""" Download file from source app or url, and return local filename. """
if source_app:
source_name = source_app
else:
source_name = urlparse.urlparse(url).netloc.replace('.', '_')
filename = self.create_file_name(source_name)
self.download_file(url, filename)
return filename | [
"def",
"download_file_from_url",
"(",
"self",
",",
"source_app",
",",
"url",
")",
":",
"if",
"source_app",
":",
"source_name",
"=",
"source_app",
"else",
":",
"source_name",
"=",
"urlparse",
".",
"urlparse",
"(",
"url",
")",
".",
"netloc",
".",
"replace",
... | Download file from source app or url, and return local filename. | [
"Download",
"file",
"from",
"source",
"app",
"or",
"url",
"and",
"return",
"local",
"filename",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L135-L144 | train | 54,828 |
jessamynsmith/paragres | paragres/command.py | Command.dump_database | def dump_database(self):
""" Create dumpfile from postgres database, and return filename. """
db_file = self.create_file_name(self.databases['source']['name'])
self.print_message("Dumping postgres database '%s' to file '%s'"
% (self.databases['source']['name'], db_file))
self.export_pgpassword('source')
args = [
"pg_dump",
"-Fc",
"--no-acl",
"--no-owner",
"--dbname=%s" % self.databases['source']['name'],
"--file=%s" % db_file,
]
args.extend(self.databases['source']['args'])
subprocess.check_call(args)
return db_file | python | def dump_database(self):
""" Create dumpfile from postgres database, and return filename. """
db_file = self.create_file_name(self.databases['source']['name'])
self.print_message("Dumping postgres database '%s' to file '%s'"
% (self.databases['source']['name'], db_file))
self.export_pgpassword('source')
args = [
"pg_dump",
"-Fc",
"--no-acl",
"--no-owner",
"--dbname=%s" % self.databases['source']['name'],
"--file=%s" % db_file,
]
args.extend(self.databases['source']['args'])
subprocess.check_call(args)
return db_file | [
"def",
"dump_database",
"(",
"self",
")",
":",
"db_file",
"=",
"self",
".",
"create_file_name",
"(",
"self",
".",
"databases",
"[",
"'source'",
"]",
"[",
"'name'",
"]",
")",
"self",
".",
"print_message",
"(",
"\"Dumping postgres database '%s' to file '%s'\"",
"%... | Create dumpfile from postgres database, and return filename. | [
"Create",
"dumpfile",
"from",
"postgres",
"database",
"and",
"return",
"filename",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L146-L162 | train | 54,829 |
jessamynsmith/paragres | paragres/command.py | Command.drop_database | def drop_database(self):
""" Drop postgres database. """
self.print_message("Dropping database '%s'" % self.databases['destination']['name'])
self.export_pgpassword('destination')
args = [
"dropdb",
"--if-exists",
self.databases['destination']['name'],
]
args.extend(self.databases['destination']['args'])
subprocess.check_call(args) | python | def drop_database(self):
""" Drop postgres database. """
self.print_message("Dropping database '%s'" % self.databases['destination']['name'])
self.export_pgpassword('destination')
args = [
"dropdb",
"--if-exists",
self.databases['destination']['name'],
]
args.extend(self.databases['destination']['args'])
subprocess.check_call(args) | [
"def",
"drop_database",
"(",
"self",
")",
":",
"self",
".",
"print_message",
"(",
"\"Dropping database '%s'\"",
"%",
"self",
".",
"databases",
"[",
"'destination'",
"]",
"[",
"'name'",
"]",
")",
"self",
".",
"export_pgpassword",
"(",
"'destination'",
")",
"arg... | Drop postgres database. | [
"Drop",
"postgres",
"database",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L164-L174 | train | 54,830 |
jessamynsmith/paragres | paragres/command.py | Command.create_database | def create_database(self):
""" Create postgres database. """
self.print_message("Creating database '%s'" % self.databases['destination']['name'])
self.export_pgpassword('destination')
args = [
"createdb",
self.databases['destination']['name'],
]
args.extend(self.databases['destination']['args'])
for arg in self.databases['destination']['args']:
if arg[:7] == '--user=':
args.append('--owner=%s' % arg[7:])
subprocess.check_call(args) | python | def create_database(self):
""" Create postgres database. """
self.print_message("Creating database '%s'" % self.databases['destination']['name'])
self.export_pgpassword('destination')
args = [
"createdb",
self.databases['destination']['name'],
]
args.extend(self.databases['destination']['args'])
for arg in self.databases['destination']['args']:
if arg[:7] == '--user=':
args.append('--owner=%s' % arg[7:])
subprocess.check_call(args) | [
"def",
"create_database",
"(",
"self",
")",
":",
"self",
".",
"print_message",
"(",
"\"Creating database '%s'\"",
"%",
"self",
".",
"databases",
"[",
"'destination'",
"]",
"[",
"'name'",
"]",
")",
"self",
".",
"export_pgpassword",
"(",
"'destination'",
")",
"a... | Create postgres database. | [
"Create",
"postgres",
"database",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L176-L188 | train | 54,831 |
jessamynsmith/paragres | paragres/command.py | Command.replace_postgres_db | def replace_postgres_db(self, file_url):
""" Replace postgres database with database from specified source. """
self.print_message("Replacing postgres database")
if file_url:
self.print_message("Sourcing data from online backup file '%s'" % file_url)
source_file = self.download_file_from_url(self.args.source_app, file_url)
elif self.databases['source']['name']:
self.print_message("Sourcing data from database '%s'"
% self.databases['source']['name'])
source_file = self.dump_database()
else:
self.print_message("Sourcing data from local backup file %s" % self.args.file)
source_file = self.args.file
self.drop_database()
self.create_database()
source_file = self.unzip_file_if_necessary(source_file)
self.print_message("Importing '%s' into database '%s'"
% (source_file, self.databases['destination']['name']))
args = [
"pg_restore",
"--no-acl",
"--no-owner",
"--dbname=%s" % self.databases['destination']['name'],
source_file,
]
args.extend(self.databases['destination']['args'])
subprocess.check_call(args) | python | def replace_postgres_db(self, file_url):
""" Replace postgres database with database from specified source. """
self.print_message("Replacing postgres database")
if file_url:
self.print_message("Sourcing data from online backup file '%s'" % file_url)
source_file = self.download_file_from_url(self.args.source_app, file_url)
elif self.databases['source']['name']:
self.print_message("Sourcing data from database '%s'"
% self.databases['source']['name'])
source_file = self.dump_database()
else:
self.print_message("Sourcing data from local backup file %s" % self.args.file)
source_file = self.args.file
self.drop_database()
self.create_database()
source_file = self.unzip_file_if_necessary(source_file)
self.print_message("Importing '%s' into database '%s'"
% (source_file, self.databases['destination']['name']))
args = [
"pg_restore",
"--no-acl",
"--no-owner",
"--dbname=%s" % self.databases['destination']['name'],
source_file,
]
args.extend(self.databases['destination']['args'])
subprocess.check_call(args) | [
"def",
"replace_postgres_db",
"(",
"self",
",",
"file_url",
")",
":",
"self",
".",
"print_message",
"(",
"\"Replacing postgres database\"",
")",
"if",
"file_url",
":",
"self",
".",
"print_message",
"(",
"\"Sourcing data from online backup file '%s'\"",
"%",
"file_url",
... | Replace postgres database with database from specified source. | [
"Replace",
"postgres",
"database",
"with",
"database",
"from",
"specified",
"source",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L190-L220 | train | 54,832 |
jessamynsmith/paragres | paragres/command.py | Command.capture_heroku_database | def capture_heroku_database(self):
""" Capture Heroku database backup. """
self.print_message("Capturing database backup for app '%s'" % self.args.source_app)
args = [
"heroku",
"pg:backups:capture",
"--app=%s" % self.args.source_app,
]
if self.args.use_pgbackups:
args = [
"heroku",
"pgbackups:capture",
"--app=%s" % self.args.source_app,
"--expire",
]
subprocess.check_call(args) | python | def capture_heroku_database(self):
""" Capture Heroku database backup. """
self.print_message("Capturing database backup for app '%s'" % self.args.source_app)
args = [
"heroku",
"pg:backups:capture",
"--app=%s" % self.args.source_app,
]
if self.args.use_pgbackups:
args = [
"heroku",
"pgbackups:capture",
"--app=%s" % self.args.source_app,
"--expire",
]
subprocess.check_call(args) | [
"def",
"capture_heroku_database",
"(",
"self",
")",
":",
"self",
".",
"print_message",
"(",
"\"Capturing database backup for app '%s'\"",
"%",
"self",
".",
"args",
".",
"source_app",
")",
"args",
"=",
"[",
"\"heroku\"",
",",
"\"pg:backups:capture\"",
",",
"\"--app=%... | Capture Heroku database backup. | [
"Capture",
"Heroku",
"database",
"backup",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L238-L253 | train | 54,833 |
jessamynsmith/paragres | paragres/command.py | Command.reset_heroku_database | def reset_heroku_database(self):
""" Reset Heroku database. """
self.print_message("Resetting database for app '%s'" % self.args.destination_app)
args = [
"heroku",
"pg:reset",
"--app=%s" % self.args.destination_app,
"DATABASE_URL",
]
subprocess.check_call(args) | python | def reset_heroku_database(self):
""" Reset Heroku database. """
self.print_message("Resetting database for app '%s'" % self.args.destination_app)
args = [
"heroku",
"pg:reset",
"--app=%s" % self.args.destination_app,
"DATABASE_URL",
]
subprocess.check_call(args) | [
"def",
"reset_heroku_database",
"(",
"self",
")",
":",
"self",
".",
"print_message",
"(",
"\"Resetting database for app '%s'\"",
"%",
"self",
".",
"args",
".",
"destination_app",
")",
"args",
"=",
"[",
"\"heroku\"",
",",
"\"pg:reset\"",
",",
"\"--app=%s\"",
"%",
... | Reset Heroku database. | [
"Reset",
"Heroku",
"database",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L255-L264 | train | 54,834 |
jessamynsmith/paragres | paragres/command.py | Command.replace_heroku_db | def replace_heroku_db(self, file_url):
""" Replace Heroku database with database from specified source. """
self.print_message("Replacing database for Heroku app '%s'" % self.args.destination_app)
self.reset_heroku_database()
if file_url:
self.print_message("Restoring from URL '%s'" % file_url)
args = [
"heroku",
"pg:backups:restore",
file_url,
"--app=%s" % self.args.destination_app,
"DATABASE",
"--confirm",
self.args.destination_app,
]
if self.args.use_pgbackups:
args = [
"heroku",
"pgbackups:restore",
"--app=%s" % self.args.destination_app,
"DATABASE_URL",
"--confirm",
self.args.destination_app,
file_url,
]
subprocess.check_call(args)
else:
# TODO perhaps add support for file -> heroku by piping to pg:psql
self.print_message("Pushing data from database '%s'" % self.databases['source']['name'])
self.print_message("NOTE: Any postgres authentication settings you passed to paragres "
"will be ignored.\nIf desired, you can export PG* variables.\n"
"You will be prompted for your psql password.")
args = [
"heroku",
"pg:push",
self.databases['source']['name'],
"DATABASE_URL",
"--app=%s" % self.args.destination_app,
]
subprocess.check_call(args) | python | def replace_heroku_db(self, file_url):
""" Replace Heroku database with database from specified source. """
self.print_message("Replacing database for Heroku app '%s'" % self.args.destination_app)
self.reset_heroku_database()
if file_url:
self.print_message("Restoring from URL '%s'" % file_url)
args = [
"heroku",
"pg:backups:restore",
file_url,
"--app=%s" % self.args.destination_app,
"DATABASE",
"--confirm",
self.args.destination_app,
]
if self.args.use_pgbackups:
args = [
"heroku",
"pgbackups:restore",
"--app=%s" % self.args.destination_app,
"DATABASE_URL",
"--confirm",
self.args.destination_app,
file_url,
]
subprocess.check_call(args)
else:
# TODO perhaps add support for file -> heroku by piping to pg:psql
self.print_message("Pushing data from database '%s'" % self.databases['source']['name'])
self.print_message("NOTE: Any postgres authentication settings you passed to paragres "
"will be ignored.\nIf desired, you can export PG* variables.\n"
"You will be prompted for your psql password.")
args = [
"heroku",
"pg:push",
self.databases['source']['name'],
"DATABASE_URL",
"--app=%s" % self.args.destination_app,
]
subprocess.check_call(args) | [
"def",
"replace_heroku_db",
"(",
"self",
",",
"file_url",
")",
":",
"self",
".",
"print_message",
"(",
"\"Replacing database for Heroku app '%s'\"",
"%",
"self",
".",
"args",
".",
"destination_app",
")",
"self",
".",
"reset_heroku_database",
"(",
")",
"if",
"file_... | Replace Heroku database with database from specified source. | [
"Replace",
"Heroku",
"database",
"with",
"database",
"from",
"specified",
"source",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L266-L307 | train | 54,835 |
jessamynsmith/paragres | paragres/command.py | Command.run | def run(self):
""" Replace a database with the data from the specified source. """
self.print_message("\nBeginning database replacement process.\n")
if self.args.source_settings:
settings = self.parse_db_settings(self.args.source_settings)
self.initialize_db_args(settings, 'source')
if self.args.settings:
settings = self.parse_db_settings(self.args.settings)
self.initialize_db_args(settings, 'destination')
if self.args.capture:
self.capture_heroku_database()
file_url = self.args.url
if self.args.source_app:
self.print_message("Sourcing data from backup for Heroku app '%s'"
% self.args.source_app)
file_url = self.get_file_url_for_heroku_app(self.args.source_app)
if self.args.destination_app:
self.replace_heroku_db(file_url)
elif self.databases['destination']['name']:
self.replace_postgres_db(file_url)
self.print_message("\nDone.\n\nDon't forget to update the Django Site entry if necessary!") | python | def run(self):
""" Replace a database with the data from the specified source. """
self.print_message("\nBeginning database replacement process.\n")
if self.args.source_settings:
settings = self.parse_db_settings(self.args.source_settings)
self.initialize_db_args(settings, 'source')
if self.args.settings:
settings = self.parse_db_settings(self.args.settings)
self.initialize_db_args(settings, 'destination')
if self.args.capture:
self.capture_heroku_database()
file_url = self.args.url
if self.args.source_app:
self.print_message("Sourcing data from backup for Heroku app '%s'"
% self.args.source_app)
file_url = self.get_file_url_for_heroku_app(self.args.source_app)
if self.args.destination_app:
self.replace_heroku_db(file_url)
elif self.databases['destination']['name']:
self.replace_postgres_db(file_url)
self.print_message("\nDone.\n\nDon't forget to update the Django Site entry if necessary!") | [
"def",
"run",
"(",
"self",
")",
":",
"self",
".",
"print_message",
"(",
"\"\\nBeginning database replacement process.\\n\"",
")",
"if",
"self",
".",
"args",
".",
"source_settings",
":",
"settings",
"=",
"self",
".",
"parse_db_settings",
"(",
"self",
".",
"args",... | Replace a database with the data from the specified source. | [
"Replace",
"a",
"database",
"with",
"the",
"data",
"from",
"the",
"specified",
"source",
"."
] | 4e068cbfcafbe8f1b010741d38fb65d40de2c6aa | https://github.com/jessamynsmith/paragres/blob/4e068cbfcafbe8f1b010741d38fb65d40de2c6aa/paragres/command.py#L309-L335 | train | 54,836 |
VikParuchuri/percept | percept/utils/registry.py | import_task_modules | def import_task_modules():
"""
Import all installed apps and add modules to registry
"""
top_level_modules = settings.INSTALLED_APPS
module_names = []
for module in top_level_modules:
#Import package
mod = import_module(module)
#Find all modules in package path
for loader, module_name, is_pkg in pkgutil.walk_packages(mod.__path__):
if not module_name.startswith("__"):
#If the module is not __init__, add it to the registry
submod_name = "{0}.{1}".format(module,module_name)
module_names.append(submod_name)
#Once everything is imported, the metaclass will register them automatically
modules = map(import_module, module_names)
return modules | python | def import_task_modules():
"""
Import all installed apps and add modules to registry
"""
top_level_modules = settings.INSTALLED_APPS
module_names = []
for module in top_level_modules:
#Import package
mod = import_module(module)
#Find all modules in package path
for loader, module_name, is_pkg in pkgutil.walk_packages(mod.__path__):
if not module_name.startswith("__"):
#If the module is not __init__, add it to the registry
submod_name = "{0}.{1}".format(module,module_name)
module_names.append(submod_name)
#Once everything is imported, the metaclass will register them automatically
modules = map(import_module, module_names)
return modules | [
"def",
"import_task_modules",
"(",
")",
":",
"top_level_modules",
"=",
"settings",
".",
"INSTALLED_APPS",
"module_names",
"=",
"[",
"]",
"for",
"module",
"in",
"top_level_modules",
":",
"#Import package",
"mod",
"=",
"import_module",
"(",
"module",
")",
"#Find all... | Import all installed apps and add modules to registry | [
"Import",
"all",
"installed",
"apps",
"and",
"add",
"modules",
"to",
"registry"
] | 90304ba82053e2a9ad2bacaab3479403d3923bcf | https://github.com/VikParuchuri/percept/blob/90304ba82053e2a9ad2bacaab3479403d3923bcf/percept/utils/registry.py#L12-L29 | train | 54,837 |
KvasirSecurity/kvasirapi-python | KvasirAPI/jsonrpc/services.py | Services.list | def list(self, service_rec=None, host_rec=None, hostfilter=None):
"""
List a specific service or all services
:param service_rec: t_services.id
:param host_rec: t_hosts.id
:param hostfilter: Valid hostfilter or None
:return: [(svc.t_services.id, svc.t_services.f_hosts_id, svc.t_hosts.f_ipaddr,
svc.t_hosts.f_hostname, svc.t_services.f_proto,
svc.t_services.f_number, svc.t_services.f_status, svc.t_services.f_name,
svc.t_services.f_banner), ...]
"""
return self.send.service_list(service_rec, host_rec, hostfilter) | python | def list(self, service_rec=None, host_rec=None, hostfilter=None):
"""
List a specific service or all services
:param service_rec: t_services.id
:param host_rec: t_hosts.id
:param hostfilter: Valid hostfilter or None
:return: [(svc.t_services.id, svc.t_services.f_hosts_id, svc.t_hosts.f_ipaddr,
svc.t_hosts.f_hostname, svc.t_services.f_proto,
svc.t_services.f_number, svc.t_services.f_status, svc.t_services.f_name,
svc.t_services.f_banner), ...]
"""
return self.send.service_list(service_rec, host_rec, hostfilter) | [
"def",
"list",
"(",
"self",
",",
"service_rec",
"=",
"None",
",",
"host_rec",
"=",
"None",
",",
"hostfilter",
"=",
"None",
")",
":",
"return",
"self",
".",
"send",
".",
"service_list",
"(",
"service_rec",
",",
"host_rec",
",",
"hostfilter",
")"
] | List a specific service or all services
:param service_rec: t_services.id
:param host_rec: t_hosts.id
:param hostfilter: Valid hostfilter or None
:return: [(svc.t_services.id, svc.t_services.f_hosts_id, svc.t_hosts.f_ipaddr,
svc.t_hosts.f_hostname, svc.t_services.f_proto,
svc.t_services.f_number, svc.t_services.f_status, svc.t_services.f_name,
svc.t_services.f_banner), ...] | [
"List",
"a",
"specific",
"service",
"or",
"all",
"services"
] | ec8c5818bd5913f3afd150f25eaec6e7cc732f4c | https://github.com/KvasirSecurity/kvasirapi-python/blob/ec8c5818bd5913f3afd150f25eaec6e7cc732f4c/KvasirAPI/jsonrpc/services.py#L20-L32 | train | 54,838 |
KvasirSecurity/kvasirapi-python | KvasirAPI/jsonrpc/services.py | Services.info | def info(self, svc_rec=None, ipaddr=None, proto=None, port=None):
"""
Information about a service.
:param svc_rec: t_services.id
:param ipaddr: IP Address
:param proto: Protocol (tcp, udp, info)
:param port: Port (0-65535)
:return: [ service_id, host_id, ipv4, ipv6, hostname, proto, number, status, name, banner ]
"""
return self.send.service_info(svc_rec, ipaddr, proto, port) | python | def info(self, svc_rec=None, ipaddr=None, proto=None, port=None):
"""
Information about a service.
:param svc_rec: t_services.id
:param ipaddr: IP Address
:param proto: Protocol (tcp, udp, info)
:param port: Port (0-65535)
:return: [ service_id, host_id, ipv4, ipv6, hostname, proto, number, status, name, banner ]
"""
return self.send.service_info(svc_rec, ipaddr, proto, port) | [
"def",
"info",
"(",
"self",
",",
"svc_rec",
"=",
"None",
",",
"ipaddr",
"=",
"None",
",",
"proto",
"=",
"None",
",",
"port",
"=",
"None",
")",
":",
"return",
"self",
".",
"send",
".",
"service_info",
"(",
"svc_rec",
",",
"ipaddr",
",",
"proto",
","... | Information about a service.
:param svc_rec: t_services.id
:param ipaddr: IP Address
:param proto: Protocol (tcp, udp, info)
:param port: Port (0-65535)
:return: [ service_id, host_id, ipv4, ipv6, hostname, proto, number, status, name, banner ] | [
"Information",
"about",
"a",
"service",
"."
] | ec8c5818bd5913f3afd150f25eaec6e7cc732f4c | https://github.com/KvasirSecurity/kvasirapi-python/blob/ec8c5818bd5913f3afd150f25eaec6e7cc732f4c/KvasirAPI/jsonrpc/services.py#L44-L54 | train | 54,839 |
KvasirSecurity/kvasirapi-python | KvasirAPI/jsonrpc/services.py | Services.add | def add(self, ipaddr=None, proto=None, port=None, fields=None):
"""
Add a service record
:param ipaddr: IP Address
:param proto: Protocol (tcp, udp, info)
:param port: Port (0-65535)
:param fields: Extra fields
:return: (True/False, t_services.id or response message)
"""
return self.send.service_add(ipaddr, proto, port, fields) | python | def add(self, ipaddr=None, proto=None, port=None, fields=None):
"""
Add a service record
:param ipaddr: IP Address
:param proto: Protocol (tcp, udp, info)
:param port: Port (0-65535)
:param fields: Extra fields
:return: (True/False, t_services.id or response message)
"""
return self.send.service_add(ipaddr, proto, port, fields) | [
"def",
"add",
"(",
"self",
",",
"ipaddr",
"=",
"None",
",",
"proto",
"=",
"None",
",",
"port",
"=",
"None",
",",
"fields",
"=",
"None",
")",
":",
"return",
"self",
".",
"send",
".",
"service_add",
"(",
"ipaddr",
",",
"proto",
",",
"port",
",",
"f... | Add a service record
:param ipaddr: IP Address
:param proto: Protocol (tcp, udp, info)
:param port: Port (0-65535)
:param fields: Extra fields
:return: (True/False, t_services.id or response message) | [
"Add",
"a",
"service",
"record"
] | ec8c5818bd5913f3afd150f25eaec6e7cc732f4c | https://github.com/KvasirSecurity/kvasirapi-python/blob/ec8c5818bd5913f3afd150f25eaec6e7cc732f4c/KvasirAPI/jsonrpc/services.py#L56-L66 | train | 54,840 |
KvasirSecurity/kvasirapi-python | KvasirAPI/jsonrpc/services.py | Services.delete | def delete(self, svc_rec=None, ipaddr=None, proto=None, port=None):
"""
Delete a t_services record
:param svc_rec: t_services.id
:param ipaddr: IP Address or t_hosts.id
:param proto: Protocol (tcp, udp, info)
:param port: Port (0-65535)
:return: [True, Response Message]
"""
return self.send.service_del(svc_rec, ipaddr, proto, port) | python | def delete(self, svc_rec=None, ipaddr=None, proto=None, port=None):
"""
Delete a t_services record
:param svc_rec: t_services.id
:param ipaddr: IP Address or t_hosts.id
:param proto: Protocol (tcp, udp, info)
:param port: Port (0-65535)
:return: [True, Response Message]
"""
return self.send.service_del(svc_rec, ipaddr, proto, port) | [
"def",
"delete",
"(",
"self",
",",
"svc_rec",
"=",
"None",
",",
"ipaddr",
"=",
"None",
",",
"proto",
"=",
"None",
",",
"port",
"=",
"None",
")",
":",
"return",
"self",
".",
"send",
".",
"service_del",
"(",
"svc_rec",
",",
"ipaddr",
",",
"proto",
",... | Delete a t_services record
:param svc_rec: t_services.id
:param ipaddr: IP Address or t_hosts.id
:param proto: Protocol (tcp, udp, info)
:param port: Port (0-65535)
:return: [True, Response Message] | [
"Delete",
"a",
"t_services",
"record"
] | ec8c5818bd5913f3afd150f25eaec6e7cc732f4c | https://github.com/KvasirSecurity/kvasirapi-python/blob/ec8c5818bd5913f3afd150f25eaec6e7cc732f4c/KvasirAPI/jsonrpc/services.py#L68-L78 | train | 54,841 |
KvasirSecurity/kvasirapi-python | KvasirAPI/jsonrpc/services.py | Services.vulns_list | def vulns_list(self, service_id=None, service_port=None, hostfilter=None):
"""
List of vulnerabilities for a service
:param service_id: t_services.id
:param service_port: tcp/#, udp/# or info/#
:param hostfilter: Valid hostfilter or None
:return: t_services.rows.as_list()
"""
return self.send.service_vulns_list(service_id, service_port, hostfilter) | python | def vulns_list(self, service_id=None, service_port=None, hostfilter=None):
"""
List of vulnerabilities for a service
:param service_id: t_services.id
:param service_port: tcp/#, udp/# or info/#
:param hostfilter: Valid hostfilter or None
:return: t_services.rows.as_list()
"""
return self.send.service_vulns_list(service_id, service_port, hostfilter) | [
"def",
"vulns_list",
"(",
"self",
",",
"service_id",
"=",
"None",
",",
"service_port",
"=",
"None",
",",
"hostfilter",
"=",
"None",
")",
":",
"return",
"self",
".",
"send",
".",
"service_vulns_list",
"(",
"service_id",
",",
"service_port",
",",
"hostfilter",... | List of vulnerabilities for a service
:param service_id: t_services.id
:param service_port: tcp/#, udp/# or info/#
:param hostfilter: Valid hostfilter or None
:return: t_services.rows.as_list() | [
"List",
"of",
"vulnerabilities",
"for",
"a",
"service"
] | ec8c5818bd5913f3afd150f25eaec6e7cc732f4c | https://github.com/KvasirSecurity/kvasirapi-python/blob/ec8c5818bd5913f3afd150f25eaec6e7cc732f4c/KvasirAPI/jsonrpc/services.py#L101-L110 | train | 54,842 |
Nagasaki45/fluteline | fluteline/utils.py | connect | def connect(nodes):
'''
Connect a list of nodes.
Connected nodes have an ``output`` member which is the following node in
the line. The last node's ``output`` is a :class:`Queue` for
easy plumbing.
'''
for a, b in zip(nodes[:-1], nodes[1:]):
a.output = b
b.output = queues.Queue() | python | def connect(nodes):
'''
Connect a list of nodes.
Connected nodes have an ``output`` member which is the following node in
the line. The last node's ``output`` is a :class:`Queue` for
easy plumbing.
'''
for a, b in zip(nodes[:-1], nodes[1:]):
a.output = b
b.output = queues.Queue() | [
"def",
"connect",
"(",
"nodes",
")",
":",
"for",
"a",
",",
"b",
"in",
"zip",
"(",
"nodes",
"[",
":",
"-",
"1",
"]",
",",
"nodes",
"[",
"1",
":",
"]",
")",
":",
"a",
".",
"output",
"=",
"b",
"b",
".",
"output",
"=",
"queues",
".",
"Queue",
... | Connect a list of nodes.
Connected nodes have an ``output`` member which is the following node in
the line. The last node's ``output`` is a :class:`Queue` for
easy plumbing. | [
"Connect",
"a",
"list",
"of",
"nodes",
"."
] | c4d238aa9711abfe8c7e94bb0dd4e170d0f48601 | https://github.com/Nagasaki45/fluteline/blob/c4d238aa9711abfe8c7e94bb0dd4e170d0f48601/fluteline/utils.py#L5-L15 | train | 54,843 |
frascoweb/frasco | frasco/templating/__init__.py | render_layout | def render_layout(layout_name, content, **context):
"""Uses a jinja template to wrap the content inside a layout.
Wraps the content inside a block and adds the extend statement before rendering it
with jinja. The block name can be specified in the layout_name after the filename separated
by a colon. The default block name is "content".
"""
layout_block = "content"
if ":" in layout_name:
layout_name, layout_block = layout_name.split(":")
tpl = '{%% extends "%s" %%}{%% block %s %%}%s{%% endblock %%}' % (layout_name, layout_block, content)
return render_template_string(tpl, **context) | python | def render_layout(layout_name, content, **context):
"""Uses a jinja template to wrap the content inside a layout.
Wraps the content inside a block and adds the extend statement before rendering it
with jinja. The block name can be specified in the layout_name after the filename separated
by a colon. The default block name is "content".
"""
layout_block = "content"
if ":" in layout_name:
layout_name, layout_block = layout_name.split(":")
tpl = '{%% extends "%s" %%}{%% block %s %%}%s{%% endblock %%}' % (layout_name, layout_block, content)
return render_template_string(tpl, **context) | [
"def",
"render_layout",
"(",
"layout_name",
",",
"content",
",",
"*",
"*",
"context",
")",
":",
"layout_block",
"=",
"\"content\"",
"if",
"\":\"",
"in",
"layout_name",
":",
"layout_name",
",",
"layout_block",
"=",
"layout_name",
".",
"split",
"(",
"\":\"",
"... | Uses a jinja template to wrap the content inside a layout.
Wraps the content inside a block and adds the extend statement before rendering it
with jinja. The block name can be specified in the layout_name after the filename separated
by a colon. The default block name is "content". | [
"Uses",
"a",
"jinja",
"template",
"to",
"wrap",
"the",
"content",
"inside",
"a",
"layout",
".",
"Wraps",
"the",
"content",
"inside",
"a",
"block",
"and",
"adds",
"the",
"extend",
"statement",
"before",
"rendering",
"it",
"with",
"jinja",
".",
"The",
"block... | ea519d69dd5ca6deaf3650175692ee4a1a02518f | https://github.com/frascoweb/frasco/blob/ea519d69dd5ca6deaf3650175692ee4a1a02518f/frasco/templating/__init__.py#L174-L184 | train | 54,844 |
frascoweb/frasco | frasco/templating/__init__.py | parse_template | def parse_template(app, filename):
"""Parses the given template using the jinja environment of the given app
and returns the AST. ASTs are cached in parse_template.cache
"""
if not hasattr(parse_template, "cache"):
parse_template.cache = {}
if filename not in parse_template.cache:
source = get_template_source(app, filename)
parse_template.cache[filename] = app.jinja_env.parse(source, filename=filename)
return parse_template.cache[filename] | python | def parse_template(app, filename):
"""Parses the given template using the jinja environment of the given app
and returns the AST. ASTs are cached in parse_template.cache
"""
if not hasattr(parse_template, "cache"):
parse_template.cache = {}
if filename not in parse_template.cache:
source = get_template_source(app, filename)
parse_template.cache[filename] = app.jinja_env.parse(source, filename=filename)
return parse_template.cache[filename] | [
"def",
"parse_template",
"(",
"app",
",",
"filename",
")",
":",
"if",
"not",
"hasattr",
"(",
"parse_template",
",",
"\"cache\"",
")",
":",
"parse_template",
".",
"cache",
"=",
"{",
"}",
"if",
"filename",
"not",
"in",
"parse_template",
".",
"cache",
":",
... | Parses the given template using the jinja environment of the given app
and returns the AST. ASTs are cached in parse_template.cache | [
"Parses",
"the",
"given",
"template",
"using",
"the",
"jinja",
"environment",
"of",
"the",
"given",
"app",
"and",
"returns",
"the",
"AST",
".",
"ASTs",
"are",
"cached",
"in",
"parse_template",
".",
"cache"
] | ea519d69dd5ca6deaf3650175692ee4a1a02518f | https://github.com/frascoweb/frasco/blob/ea519d69dd5ca6deaf3650175692ee4a1a02518f/frasco/templating/__init__.py#L192-L201 | train | 54,845 |
frascoweb/frasco | frasco/templating/__init__.py | jinja_node_to_python | def jinja_node_to_python(node):
"""Converts a Jinja2 node to its python equivalent
"""
if isinstance(node, nodes.Const):
return node.value
if isinstance(node, nodes.Neg):
return -jinja_node_to_python(node.node)
if isinstance(node, nodes.Name):
return node.name
if isinstance(node, (nodes.List, nodes.Tuple)):
value = []
for i in node.items:
value.append(jinja_node_to_python(i))
return value
if isinstance(node, nodes.Dict):
value = {}
for pair in node.items:
value[pair.key.value] = jinja_node_to_python(pair.value)
return value
if isinstance(node, nodes.Call):
if not isinstance(node.node, nodes.Name) or node.node.name not in ("_", "translate", "gettext"):
raise FormDefinitionError("Cannot convert function calls from jinja to python other than translation calls")
return lazy_translate(jinja_node_to_python(node.args[0]))
raise Exception("Cannot convert jinja nodes to python") | python | def jinja_node_to_python(node):
"""Converts a Jinja2 node to its python equivalent
"""
if isinstance(node, nodes.Const):
return node.value
if isinstance(node, nodes.Neg):
return -jinja_node_to_python(node.node)
if isinstance(node, nodes.Name):
return node.name
if isinstance(node, (nodes.List, nodes.Tuple)):
value = []
for i in node.items:
value.append(jinja_node_to_python(i))
return value
if isinstance(node, nodes.Dict):
value = {}
for pair in node.items:
value[pair.key.value] = jinja_node_to_python(pair.value)
return value
if isinstance(node, nodes.Call):
if not isinstance(node.node, nodes.Name) or node.node.name not in ("_", "translate", "gettext"):
raise FormDefinitionError("Cannot convert function calls from jinja to python other than translation calls")
return lazy_translate(jinja_node_to_python(node.args[0]))
raise Exception("Cannot convert jinja nodes to python") | [
"def",
"jinja_node_to_python",
"(",
"node",
")",
":",
"if",
"isinstance",
"(",
"node",
",",
"nodes",
".",
"Const",
")",
":",
"return",
"node",
".",
"value",
"if",
"isinstance",
"(",
"node",
",",
"nodes",
".",
"Neg",
")",
":",
"return",
"-",
"jinja_node... | Converts a Jinja2 node to its python equivalent | [
"Converts",
"a",
"Jinja2",
"node",
"to",
"its",
"python",
"equivalent"
] | ea519d69dd5ca6deaf3650175692ee4a1a02518f | https://github.com/frascoweb/frasco/blob/ea519d69dd5ca6deaf3650175692ee4a1a02518f/frasco/templating/__init__.py#L204-L227 | train | 54,846 |
liam-middlebrook/csh_ldap | csh_ldap/member.py | CSHMember.in_group | def in_group(self, group, dn=False):
"""Get whether or not the bound CSH LDAP member object is part of a
group.
Arguments:
group -- the CSHGroup object (or distinguished name) of the group to
check membership for
"""
if dn:
return group in self.groups()
return group.check_member(self) | python | def in_group(self, group, dn=False):
"""Get whether or not the bound CSH LDAP member object is part of a
group.
Arguments:
group -- the CSHGroup object (or distinguished name) of the group to
check membership for
"""
if dn:
return group in self.groups()
return group.check_member(self) | [
"def",
"in_group",
"(",
"self",
",",
"group",
",",
"dn",
"=",
"False",
")",
":",
"if",
"dn",
":",
"return",
"group",
"in",
"self",
".",
"groups",
"(",
")",
"return",
"group",
".",
"check_member",
"(",
"self",
")"
] | Get whether or not the bound CSH LDAP member object is part of a
group.
Arguments:
group -- the CSHGroup object (or distinguished name) of the group to
check membership for | [
"Get",
"whether",
"or",
"not",
"the",
"bound",
"CSH",
"LDAP",
"member",
"object",
"is",
"part",
"of",
"a",
"group",
"."
] | 90bd334a20e13c03af07bce4f104ad96baf620e4 | https://github.com/liam-middlebrook/csh_ldap/blob/90bd334a20e13c03af07bce4f104ad96baf620e4/csh_ldap/member.py#L59-L69 | train | 54,847 |
wheeler-microfluidics/dmf-control-board-firmware | dmf_control_board_firmware/__init__.py | savgol_filter | def savgol_filter(x, window_length, polyorder, deriv=0, delta=1.0, axis=-1, mode='interp', cval=0.0):
'''
Wrapper for the scipy.signal.savgol_filter function that handles Nan values.
See: https://github.com/wheeler-microfluidics/dmf-control-board-firmware/issues/3
Returns
-------
y : ndarray, same shape as `x`
The filtered data.
'''
# linearly interpolate missing values before filtering
x = np.ma.masked_invalid(pd.Series(x).interpolate())
try:
# start filtering from the first non-zero value since these won't be addressed by
# the interpolation above
ind = np.isfinite(x).nonzero()[0][0]
x[ind:] = signal.savgol_filter(x[ind:], window_length, polyorder, deriv,
delta, axis, mode, cval)
except IndexError:
pass
return np.ma.masked_invalid(x) | python | def savgol_filter(x, window_length, polyorder, deriv=0, delta=1.0, axis=-1, mode='interp', cval=0.0):
'''
Wrapper for the scipy.signal.savgol_filter function that handles Nan values.
See: https://github.com/wheeler-microfluidics/dmf-control-board-firmware/issues/3
Returns
-------
y : ndarray, same shape as `x`
The filtered data.
'''
# linearly interpolate missing values before filtering
x = np.ma.masked_invalid(pd.Series(x).interpolate())
try:
# start filtering from the first non-zero value since these won't be addressed by
# the interpolation above
ind = np.isfinite(x).nonzero()[0][0]
x[ind:] = signal.savgol_filter(x[ind:], window_length, polyorder, deriv,
delta, axis, mode, cval)
except IndexError:
pass
return np.ma.masked_invalid(x) | [
"def",
"savgol_filter",
"(",
"x",
",",
"window_length",
",",
"polyorder",
",",
"deriv",
"=",
"0",
",",
"delta",
"=",
"1.0",
",",
"axis",
"=",
"-",
"1",
",",
"mode",
"=",
"'interp'",
",",
"cval",
"=",
"0.0",
")",
":",
"# linearly interpolate missing value... | Wrapper for the scipy.signal.savgol_filter function that handles Nan values.
See: https://github.com/wheeler-microfluidics/dmf-control-board-firmware/issues/3
Returns
-------
y : ndarray, same shape as `x`
The filtered data. | [
"Wrapper",
"for",
"the",
"scipy",
".",
"signal",
".",
"savgol_filter",
"function",
"that",
"handles",
"Nan",
"values",
"."
] | 1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c | https://github.com/wheeler-microfluidics/dmf-control-board-firmware/blob/1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c/dmf_control_board_firmware/__init__.py#L50-L72 | train | 54,848 |
wheeler-microfluidics/dmf-control-board-firmware | dmf_control_board_firmware/__init__.py | feedback_results_to_measurements_frame | def feedback_results_to_measurements_frame(feedback_result):
'''
Extract measured data from `FeedbackResults` instance into
`pandas.DataFrame`.
'''
index = pd.Index(feedback_result.time * 1e-3, name='seconds')
df_feedback = pd.DataFrame(np.column_stack([feedback_result.V_fb,
feedback_result.V_hv,
feedback_result.fb_resistor,
feedback_result.hv_resistor]),
columns=['V_fb', 'V_hv', 'fb_resistor',
'hv_resistor'],
index=index)
df_feedback.insert(0, 'frequency', feedback_result.frequency)
return df_feedback | python | def feedback_results_to_measurements_frame(feedback_result):
'''
Extract measured data from `FeedbackResults` instance into
`pandas.DataFrame`.
'''
index = pd.Index(feedback_result.time * 1e-3, name='seconds')
df_feedback = pd.DataFrame(np.column_stack([feedback_result.V_fb,
feedback_result.V_hv,
feedback_result.fb_resistor,
feedback_result.hv_resistor]),
columns=['V_fb', 'V_hv', 'fb_resistor',
'hv_resistor'],
index=index)
df_feedback.insert(0, 'frequency', feedback_result.frequency)
return df_feedback | [
"def",
"feedback_results_to_measurements_frame",
"(",
"feedback_result",
")",
":",
"index",
"=",
"pd",
".",
"Index",
"(",
"feedback_result",
".",
"time",
"*",
"1e-3",
",",
"name",
"=",
"'seconds'",
")",
"df_feedback",
"=",
"pd",
".",
"DataFrame",
"(",
"np",
... | Extract measured data from `FeedbackResults` instance into
`pandas.DataFrame`. | [
"Extract",
"measured",
"data",
"from",
"FeedbackResults",
"instance",
"into",
"pandas",
".",
"DataFrame",
"."
] | 1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c | https://github.com/wheeler-microfluidics/dmf-control-board-firmware/blob/1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c/dmf_control_board_firmware/__init__.py#L158-L172 | train | 54,849 |
wheeler-microfluidics/dmf-control-board-firmware | dmf_control_board_firmware/__init__.py | feedback_results_to_impedance_frame | def feedback_results_to_impedance_frame(feedback_result):
'''
Extract computed impedance data from `FeedbackResults` instance into
`pandas.DataFrame`.
'''
index = pd.Index(feedback_result.time * 1e-3, name='seconds')
df_feedback = pd.DataFrame(np.column_stack([feedback_result.V_actuation()
.filled(np.NaN),
feedback_result.capacitance()
.filled(np.NaN),
feedback_result.Z_device()
.filled(np.NaN)]),
columns=['V_actuation', 'capacitance',
'impedance'],
index=index)
df_feedback.insert(0, 'frequency', feedback_result.frequency)
df_feedback.insert(1, 'voltage', feedback_result.voltage)
return df_feedback | python | def feedback_results_to_impedance_frame(feedback_result):
'''
Extract computed impedance data from `FeedbackResults` instance into
`pandas.DataFrame`.
'''
index = pd.Index(feedback_result.time * 1e-3, name='seconds')
df_feedback = pd.DataFrame(np.column_stack([feedback_result.V_actuation()
.filled(np.NaN),
feedback_result.capacitance()
.filled(np.NaN),
feedback_result.Z_device()
.filled(np.NaN)]),
columns=['V_actuation', 'capacitance',
'impedance'],
index=index)
df_feedback.insert(0, 'frequency', feedback_result.frequency)
df_feedback.insert(1, 'voltage', feedback_result.voltage)
return df_feedback | [
"def",
"feedback_results_to_impedance_frame",
"(",
"feedback_result",
")",
":",
"index",
"=",
"pd",
".",
"Index",
"(",
"feedback_result",
".",
"time",
"*",
"1e-3",
",",
"name",
"=",
"'seconds'",
")",
"df_feedback",
"=",
"pd",
".",
"DataFrame",
"(",
"np",
"."... | Extract computed impedance data from `FeedbackResults` instance into
`pandas.DataFrame`. | [
"Extract",
"computed",
"impedance",
"data",
"from",
"FeedbackResults",
"instance",
"into",
"pandas",
".",
"DataFrame",
"."
] | 1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c | https://github.com/wheeler-microfluidics/dmf-control-board-firmware/blob/1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c/dmf_control_board_firmware/__init__.py#L175-L192 | train | 54,850 |
wheeler-microfluidics/dmf-control-board-firmware | dmf_control_board_firmware/__init__.py | get_firmwares | def get_firmwares():
'''
Return `dmf_control_board` compiled Arduino hex file paths.
This function may be used to locate firmware binaries that are available
for flashing to [Arduino Mega2560][1] boards.
[1]: http://arduino.cc/en/Main/arduinoBoardMega2560
'''
return OrderedDict([(board_dir.name, [f.abspath() for f in
board_dir.walkfiles('*.hex')])
for board_dir in
package_path().joinpath('firmware').dirs()]) | python | def get_firmwares():
'''
Return `dmf_control_board` compiled Arduino hex file paths.
This function may be used to locate firmware binaries that are available
for flashing to [Arduino Mega2560][1] boards.
[1]: http://arduino.cc/en/Main/arduinoBoardMega2560
'''
return OrderedDict([(board_dir.name, [f.abspath() for f in
board_dir.walkfiles('*.hex')])
for board_dir in
package_path().joinpath('firmware').dirs()]) | [
"def",
"get_firmwares",
"(",
")",
":",
"return",
"OrderedDict",
"(",
"[",
"(",
"board_dir",
".",
"name",
",",
"[",
"f",
".",
"abspath",
"(",
")",
"for",
"f",
"in",
"board_dir",
".",
"walkfiles",
"(",
"'*.hex'",
")",
"]",
")",
"for",
"board_dir",
"in"... | Return `dmf_control_board` compiled Arduino hex file paths.
This function may be used to locate firmware binaries that are available
for flashing to [Arduino Mega2560][1] boards.
[1]: http://arduino.cc/en/Main/arduinoBoardMega2560 | [
"Return",
"dmf_control_board",
"compiled",
"Arduino",
"hex",
"file",
"paths",
"."
] | 1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c | https://github.com/wheeler-microfluidics/dmf-control-board-firmware/blob/1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c/dmf_control_board_firmware/__init__.py#L227-L239 | train | 54,851 |
wheeler-microfluidics/dmf-control-board-firmware | dmf_control_board_firmware/__init__.py | remote_command | def remote_command(function, self, *args, **kwargs):
'''
Catch `RuntimeError` exceptions raised by remote control board firmware
commands and re-raise as more specific `FirmwareError` exception type,
which includes command code and return code.
'''
try:
return function(self, *args, **kwargs)
except RuntimeError, exception:
error_message = str(exception)
match = CRE_REMOTE_ERROR.match(error_message)
if match:
# Exception message matches format of remote firmware error.
command_code = int(match.group('command_int'))
return_code = int(match.group('return_code_int'))
raise FirmwareError(command_code, return_code)
match = CRE_REMOTE_COMMAND_ERROR.match(error_message)
if match:
# Exception message matches format of remote firmware error.
command_code = int(match.group('command_int'))
command_name = NAMES_BY_COMMAND_CODE[command_code]
raise RuntimeError(CRE_REMOTE_COMMAND_ERROR.sub(command_name,
error_message))
# Not a remote firmware error, so raise original exception.
raise | python | def remote_command(function, self, *args, **kwargs):
'''
Catch `RuntimeError` exceptions raised by remote control board firmware
commands and re-raise as more specific `FirmwareError` exception type,
which includes command code and return code.
'''
try:
return function(self, *args, **kwargs)
except RuntimeError, exception:
error_message = str(exception)
match = CRE_REMOTE_ERROR.match(error_message)
if match:
# Exception message matches format of remote firmware error.
command_code = int(match.group('command_int'))
return_code = int(match.group('return_code_int'))
raise FirmwareError(command_code, return_code)
match = CRE_REMOTE_COMMAND_ERROR.match(error_message)
if match:
# Exception message matches format of remote firmware error.
command_code = int(match.group('command_int'))
command_name = NAMES_BY_COMMAND_CODE[command_code]
raise RuntimeError(CRE_REMOTE_COMMAND_ERROR.sub(command_name,
error_message))
# Not a remote firmware error, so raise original exception.
raise | [
"def",
"remote_command",
"(",
"function",
",",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"return",
"function",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"RuntimeError",
",",
"exception",
":"... | Catch `RuntimeError` exceptions raised by remote control board firmware
commands and re-raise as more specific `FirmwareError` exception type,
which includes command code and return code. | [
"Catch",
"RuntimeError",
"exceptions",
"raised",
"by",
"remote",
"control",
"board",
"firmware",
"commands",
"and",
"re",
"-",
"raise",
"as",
"more",
"specific",
"FirmwareError",
"exception",
"type",
"which",
"includes",
"command",
"code",
"and",
"return",
"code",... | 1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c | https://github.com/wheeler-microfluidics/dmf-control-board-firmware/blob/1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c/dmf_control_board_firmware/__init__.py#L1117-L1143 | train | 54,852 |
wheeler-microfluidics/dmf-control-board-firmware | dmf_control_board_firmware/__init__.py | FeedbackResults.to_frame | def to_frame(self, filter_order=3):
"""
Convert data to a `pandas.DataFrame`.
Parameters
----------
filter_order : int
Filter order to use when filtering Z_device, capacitance, x_position, and dxdt.
Data is filtered using a Savitzky-Golay filter with a window size that is adjusted
based on the mean velocity of the drop (see _get_window_size).
Returns
-------
pandas.DataFrame
This DataFrame is indexed by a utc_timestamp and contains the following columns:
frequency: actuation frequency (Hz)
target_voltage: target voltage (V)
voltage: measured voltage (V)
force: actuation force (uN/mm)
area: actuated area (mm^2)
Z_device_filtered: filtered device impedance for actuated area (Ohms)
capacitance_filtered: filtered device capacitance for actuated area (F)
x_position_filtered: filtered x-position of the drop (mm)
dxdt_filtered: filtered instantaneous velocity of the drop (mm/s)
Z_device: device impedance for actuated area (Ohms)
capacitance: device capacitance for actuated area (F)
x_position: x-position of the drop (mm)
dxdt: instantaneous velocity of the drop (mm/s)
dx: difference in the drop's x-position over the course of the step (mm)
dt: time the drop is considered to have been "moving" (s)
mean_velocity: mean drop velocity (mm/s)
peak_velocity: peak drop velocity calculated from filtered instantaneous
velocity (mm/s)
window_size: windows size used for Savitzky-Golay filter (# bins)
filter_order: order used for Savitzky-Golay filter (integer)
"""
window_size = self._get_window_size()
L = np.sqrt(self.area)
velocity_results = self.mean_velocity(Lx=L)
mean_velocity = None
peak_velocity = None
dx = 0
dt = 0
dxdt = np.zeros(len(self.time))
dxdt_filtered = np.zeros(len(self.time))
# if the window size is too small for filtering, set filter_order to None
if filter_order and window_size and window_size < filter_order + 2:
filter_order = None
if velocity_results and velocity_results['dx']:
mean_velocity = velocity_results['p'][0] * 1e3
dx = velocity_results['dx']
dt = velocity_results['dt'] * 1e-3 # convert to seconds
t, dxdt = self.dxdt(Lx=L)
# interpolate dxdt to use the same time points as the impedance values.
dxdt = np.interp(self.time,
t, dxdt) * 1e3 # multiply by 1000 to convert to mm/s
dxdt = np.ma.masked_invalid(dxdt)
t, dxdt_filtered = self.dxdt(filter_order=filter_order, Lx=L)
# interpolate dxdt_filtered to use the same time points as the impedance values.
dxdt_filtered = np.interp(self.time,
t, dxdt_filtered) * 1e3 # multiply by 1000 to convert to mm/s
dxdt_filtered = np.ma.masked_invalid(dxdt_filtered)
# calculate peak velocity from filtered data
peak_velocity = np.max(dxdt_filtered)
index = pd.Index(self.time * 1e-3, name='step_time')
df = pd.DataFrame({'target_voltage': self.voltage, # V
'voltage': self.V_actuation(), # V
'force': self.force(Ly=1.0) * 1e6, # uN/mm
'Z_device_filtered': self.Z_device(filter_order=filter_order), # Ohms
'capacitance_filtered': self.capacitance(filter_order=filter_order), # F
'x_position_filtered': self.x_position(filter_order=filter_order), # mm
'dxdt_filtered': dxdt_filtered, # mm/s
'Z_device': self.Z_device(), # Ohms
'capacitance': self.capacitance(), # F
'x_position': self.x_position(), # mm
'dxdt': dxdt, # mm/s
}, index=index)
df['frequency'] = self.frequency
df['area'] = self.area # mm^2
df['dx'] = dx # mm
df['dt'] = dt # s
df['mean_velocity'] = mean_velocity # mm/s
df['peak_velocity'] = peak_velocity # mm/s
df['window_size'] = window_size
df['filter_order'] = filter_order
# re-order columns
return df[[u'frequency', u'target_voltage', u'voltage', u'force', u'area',
u'Z_device_filtered', u'capacitance_filtered', u'x_position_filtered',
u'dxdt_filtered', u'Z_device', u'capacitance', u'x_position', u'dxdt',
u'dx', u'dt', u'mean_velocity', u'peak_velocity',
u'window_size', u'filter_order']] | python | def to_frame(self, filter_order=3):
"""
Convert data to a `pandas.DataFrame`.
Parameters
----------
filter_order : int
Filter order to use when filtering Z_device, capacitance, x_position, and dxdt.
Data is filtered using a Savitzky-Golay filter with a window size that is adjusted
based on the mean velocity of the drop (see _get_window_size).
Returns
-------
pandas.DataFrame
This DataFrame is indexed by a utc_timestamp and contains the following columns:
frequency: actuation frequency (Hz)
target_voltage: target voltage (V)
voltage: measured voltage (V)
force: actuation force (uN/mm)
area: actuated area (mm^2)
Z_device_filtered: filtered device impedance for actuated area (Ohms)
capacitance_filtered: filtered device capacitance for actuated area (F)
x_position_filtered: filtered x-position of the drop (mm)
dxdt_filtered: filtered instantaneous velocity of the drop (mm/s)
Z_device: device impedance for actuated area (Ohms)
capacitance: device capacitance for actuated area (F)
x_position: x-position of the drop (mm)
dxdt: instantaneous velocity of the drop (mm/s)
dx: difference in the drop's x-position over the course of the step (mm)
dt: time the drop is considered to have been "moving" (s)
mean_velocity: mean drop velocity (mm/s)
peak_velocity: peak drop velocity calculated from filtered instantaneous
velocity (mm/s)
window_size: windows size used for Savitzky-Golay filter (# bins)
filter_order: order used for Savitzky-Golay filter (integer)
"""
window_size = self._get_window_size()
L = np.sqrt(self.area)
velocity_results = self.mean_velocity(Lx=L)
mean_velocity = None
peak_velocity = None
dx = 0
dt = 0
dxdt = np.zeros(len(self.time))
dxdt_filtered = np.zeros(len(self.time))
# if the window size is too small for filtering, set filter_order to None
if filter_order and window_size and window_size < filter_order + 2:
filter_order = None
if velocity_results and velocity_results['dx']:
mean_velocity = velocity_results['p'][0] * 1e3
dx = velocity_results['dx']
dt = velocity_results['dt'] * 1e-3 # convert to seconds
t, dxdt = self.dxdt(Lx=L)
# interpolate dxdt to use the same time points as the impedance values.
dxdt = np.interp(self.time,
t, dxdt) * 1e3 # multiply by 1000 to convert to mm/s
dxdt = np.ma.masked_invalid(dxdt)
t, dxdt_filtered = self.dxdt(filter_order=filter_order, Lx=L)
# interpolate dxdt_filtered to use the same time points as the impedance values.
dxdt_filtered = np.interp(self.time,
t, dxdt_filtered) * 1e3 # multiply by 1000 to convert to mm/s
dxdt_filtered = np.ma.masked_invalid(dxdt_filtered)
# calculate peak velocity from filtered data
peak_velocity = np.max(dxdt_filtered)
index = pd.Index(self.time * 1e-3, name='step_time')
df = pd.DataFrame({'target_voltage': self.voltage, # V
'voltage': self.V_actuation(), # V
'force': self.force(Ly=1.0) * 1e6, # uN/mm
'Z_device_filtered': self.Z_device(filter_order=filter_order), # Ohms
'capacitance_filtered': self.capacitance(filter_order=filter_order), # F
'x_position_filtered': self.x_position(filter_order=filter_order), # mm
'dxdt_filtered': dxdt_filtered, # mm/s
'Z_device': self.Z_device(), # Ohms
'capacitance': self.capacitance(), # F
'x_position': self.x_position(), # mm
'dxdt': dxdt, # mm/s
}, index=index)
df['frequency'] = self.frequency
df['area'] = self.area # mm^2
df['dx'] = dx # mm
df['dt'] = dt # s
df['mean_velocity'] = mean_velocity # mm/s
df['peak_velocity'] = peak_velocity # mm/s
df['window_size'] = window_size
df['filter_order'] = filter_order
# re-order columns
return df[[u'frequency', u'target_voltage', u'voltage', u'force', u'area',
u'Z_device_filtered', u'capacitance_filtered', u'x_position_filtered',
u'dxdt_filtered', u'Z_device', u'capacitance', u'x_position', u'dxdt',
u'dx', u'dt', u'mean_velocity', u'peak_velocity',
u'window_size', u'filter_order']] | [
"def",
"to_frame",
"(",
"self",
",",
"filter_order",
"=",
"3",
")",
":",
"window_size",
"=",
"self",
".",
"_get_window_size",
"(",
")",
"L",
"=",
"np",
".",
"sqrt",
"(",
"self",
".",
"area",
")",
"velocity_results",
"=",
"self",
".",
"mean_velocity",
"... | Convert data to a `pandas.DataFrame`.
Parameters
----------
filter_order : int
Filter order to use when filtering Z_device, capacitance, x_position, and dxdt.
Data is filtered using a Savitzky-Golay filter with a window size that is adjusted
based on the mean velocity of the drop (see _get_window_size).
Returns
-------
pandas.DataFrame
This DataFrame is indexed by a utc_timestamp and contains the following columns:
frequency: actuation frequency (Hz)
target_voltage: target voltage (V)
voltage: measured voltage (V)
force: actuation force (uN/mm)
area: actuated area (mm^2)
Z_device_filtered: filtered device impedance for actuated area (Ohms)
capacitance_filtered: filtered device capacitance for actuated area (F)
x_position_filtered: filtered x-position of the drop (mm)
dxdt_filtered: filtered instantaneous velocity of the drop (mm/s)
Z_device: device impedance for actuated area (Ohms)
capacitance: device capacitance for actuated area (F)
x_position: x-position of the drop (mm)
dxdt: instantaneous velocity of the drop (mm/s)
dx: difference in the drop's x-position over the course of the step (mm)
dt: time the drop is considered to have been "moving" (s)
mean_velocity: mean drop velocity (mm/s)
peak_velocity: peak drop velocity calculated from filtered instantaneous
velocity (mm/s)
window_size: windows size used for Savitzky-Golay filter (# bins)
filter_order: order used for Savitzky-Golay filter (integer) | [
"Convert",
"data",
"to",
"a",
"pandas",
".",
"DataFrame",
"."
] | 1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c | https://github.com/wheeler-microfluidics/dmf-control-board-firmware/blob/1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c/dmf_control_board_firmware/__init__.py#L721-L821 | train | 54,853 |
wheeler-microfluidics/dmf-control-board-firmware | dmf_control_board_firmware/__init__.py | DMFControlBoard.set_series_capacitance | def set_series_capacitance(self, channel, value, resistor_index=None):
'''
Set the current series capacitance value for the specified channel.
Parameters
----------
channel : int
Analog channel index.
value : float
Series capacitance value.
resistor_index : int, optional
Series resistor channel index.
If :data:`resistor_index` is not specified, the resistor-index from
the current context _(i.e., the result of
:attr:`series_resistor_index`)_ is used.
Otherwise, the series-resistor is temporarily set to the value of
:data:`resistor_index` to read the resistance before restoring
back to the original value.
Returns
-------
int
Return code from embedded call.
'''
if resistor_index is None:
resistor_index = self.series_resistor_index(channel)
try:
if channel == 0:
self.calibration.C_hv[resistor_index] = value
else:
self.calibration.C_fb[resistor_index] = value
except:
pass
return self._set_series_capacitance(channel, value) | python | def set_series_capacitance(self, channel, value, resistor_index=None):
'''
Set the current series capacitance value for the specified channel.
Parameters
----------
channel : int
Analog channel index.
value : float
Series capacitance value.
resistor_index : int, optional
Series resistor channel index.
If :data:`resistor_index` is not specified, the resistor-index from
the current context _(i.e., the result of
:attr:`series_resistor_index`)_ is used.
Otherwise, the series-resistor is temporarily set to the value of
:data:`resistor_index` to read the resistance before restoring
back to the original value.
Returns
-------
int
Return code from embedded call.
'''
if resistor_index is None:
resistor_index = self.series_resistor_index(channel)
try:
if channel == 0:
self.calibration.C_hv[resistor_index] = value
else:
self.calibration.C_fb[resistor_index] = value
except:
pass
return self._set_series_capacitance(channel, value) | [
"def",
"set_series_capacitance",
"(",
"self",
",",
"channel",
",",
"value",
",",
"resistor_index",
"=",
"None",
")",
":",
"if",
"resistor_index",
"is",
"None",
":",
"resistor_index",
"=",
"self",
".",
"series_resistor_index",
"(",
"channel",
")",
"try",
":",
... | Set the current series capacitance value for the specified channel.
Parameters
----------
channel : int
Analog channel index.
value : float
Series capacitance value.
resistor_index : int, optional
Series resistor channel index.
If :data:`resistor_index` is not specified, the resistor-index from
the current context _(i.e., the result of
:attr:`series_resistor_index`)_ is used.
Otherwise, the series-resistor is temporarily set to the value of
:data:`resistor_index` to read the resistance before restoring
back to the original value.
Returns
-------
int
Return code from embedded call. | [
"Set",
"the",
"current",
"series",
"capacitance",
"value",
"for",
"the",
"specified",
"channel",
"."
] | 1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c | https://github.com/wheeler-microfluidics/dmf-control-board-firmware/blob/1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c/dmf_control_board_firmware/__init__.py#L1261-L1296 | train | 54,854 |
wheeler-microfluidics/dmf-control-board-firmware | dmf_control_board_firmware/__init__.py | DMFControlBoard.set_series_resistance | def set_series_resistance(self, channel, value, resistor_index=None):
'''
Set the current series resistance value for the specified channel.
Parameters
----------
channel : int
Analog channel index.
value : float
Series resistance value.
resistor_index : int, optional
Series resistor channel index.
If :data:`resistor_index` is not specified, the resistor-index from
the current context _(i.e., the result of
:attr:`series_resistor_index`)_ is used.
Otherwise, the series-resistor is temporarily set to the value of
:data:`resistor_index` to set the resistance before restoring back
to the original value.
See definition of :meth:`safe_series_resistor_index_read`
decorator.
Returns
-------
int
Return code from embedded call.
'''
if resistor_index is None:
resistor_index = self.series_resistor_index(channel)
try:
if channel == 0:
self.calibration.R_hv[resistor_index] = value
else:
self.calibration.R_fb[resistor_index] = value
except:
pass
return self._set_series_resistance(channel, value) | python | def set_series_resistance(self, channel, value, resistor_index=None):
'''
Set the current series resistance value for the specified channel.
Parameters
----------
channel : int
Analog channel index.
value : float
Series resistance value.
resistor_index : int, optional
Series resistor channel index.
If :data:`resistor_index` is not specified, the resistor-index from
the current context _(i.e., the result of
:attr:`series_resistor_index`)_ is used.
Otherwise, the series-resistor is temporarily set to the value of
:data:`resistor_index` to set the resistance before restoring back
to the original value.
See definition of :meth:`safe_series_resistor_index_read`
decorator.
Returns
-------
int
Return code from embedded call.
'''
if resistor_index is None:
resistor_index = self.series_resistor_index(channel)
try:
if channel == 0:
self.calibration.R_hv[resistor_index] = value
else:
self.calibration.R_fb[resistor_index] = value
except:
pass
return self._set_series_resistance(channel, value) | [
"def",
"set_series_resistance",
"(",
"self",
",",
"channel",
",",
"value",
",",
"resistor_index",
"=",
"None",
")",
":",
"if",
"resistor_index",
"is",
"None",
":",
"resistor_index",
"=",
"self",
".",
"series_resistor_index",
"(",
"channel",
")",
"try",
":",
... | Set the current series resistance value for the specified channel.
Parameters
----------
channel : int
Analog channel index.
value : float
Series resistance value.
resistor_index : int, optional
Series resistor channel index.
If :data:`resistor_index` is not specified, the resistor-index from
the current context _(i.e., the result of
:attr:`series_resistor_index`)_ is used.
Otherwise, the series-resistor is temporarily set to the value of
:data:`resistor_index` to set the resistance before restoring back
to the original value.
See definition of :meth:`safe_series_resistor_index_read`
decorator.
Returns
-------
int
Return code from embedded call. | [
"Set",
"the",
"current",
"series",
"resistance",
"value",
"for",
"the",
"specified",
"channel",
"."
] | 1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c | https://github.com/wheeler-microfluidics/dmf-control-board-firmware/blob/1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c/dmf_control_board_firmware/__init__.py#L1299-L1337 | train | 54,855 |
wheeler-microfluidics/dmf-control-board-firmware | dmf_control_board_firmware/__init__.py | DMFControlBoard.persistent_write | def persistent_write(self, address, byte, refresh_config=False):
'''
Write a single byte to an address in persistent memory.
Parameters
----------
address : int
Address in persistent memory (e.g., EEPROM).
byte : int
Value to write to address.
refresh_config : bool, optional
Is ``True``, :meth:`load_config()` is called afterward to refresh
the configuration settings.
'''
self._persistent_write(address, byte)
if refresh_config:
self.load_config(False) | python | def persistent_write(self, address, byte, refresh_config=False):
'''
Write a single byte to an address in persistent memory.
Parameters
----------
address : int
Address in persistent memory (e.g., EEPROM).
byte : int
Value to write to address.
refresh_config : bool, optional
Is ``True``, :meth:`load_config()` is called afterward to refresh
the configuration settings.
'''
self._persistent_write(address, byte)
if refresh_config:
self.load_config(False) | [
"def",
"persistent_write",
"(",
"self",
",",
"address",
",",
"byte",
",",
"refresh_config",
"=",
"False",
")",
":",
"self",
".",
"_persistent_write",
"(",
"address",
",",
"byte",
")",
"if",
"refresh_config",
":",
"self",
".",
"load_config",
"(",
"False",
"... | Write a single byte to an address in persistent memory.
Parameters
----------
address : int
Address in persistent memory (e.g., EEPROM).
byte : int
Value to write to address.
refresh_config : bool, optional
Is ``True``, :meth:`load_config()` is called afterward to refresh
the configuration settings. | [
"Write",
"a",
"single",
"byte",
"to",
"an",
"address",
"in",
"persistent",
"memory",
"."
] | 1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c | https://github.com/wheeler-microfluidics/dmf-control-board-firmware/blob/1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c/dmf_control_board_firmware/__init__.py#L1504-L1520 | train | 54,856 |
wheeler-microfluidics/dmf-control-board-firmware | dmf_control_board_firmware/__init__.py | DMFControlBoard.persistent_read_multibyte | def persistent_read_multibyte(self, address, count=None, dtype=np.uint8):
'''
Read a chunk of data from persistent memory.
Parameters
----------
address : int
Address in persistent memory (e.g., EEPROM).
count : int, optional
Number of values to read.
If not set, read a single value of the specified :data:`dtype`.
dtype : numpy.dtype, optional
The type of the value(s) to read.
Returns
-------
dtype or numpy.array(dtype=dtype)
If :data:`count` is ``None``, return single value.
Otherwise, return array of values.
'''
nbytes = np.dtype(dtype).itemsize
if count is not None:
nbytes *= count
# Read enough bytes starting at specified address to match the
# requested number of the specified data type.
data_bytes = np.array([self.persistent_read(address + i)
for i in xrange(nbytes)], dtype=np.uint8)
# Cast byte array as array of specified data type.
result = data_bytes.view(dtype)
# If no count was specified, we return a scalar value rather than the
# resultant array.
if count is None:
return result[0]
return result | python | def persistent_read_multibyte(self, address, count=None, dtype=np.uint8):
'''
Read a chunk of data from persistent memory.
Parameters
----------
address : int
Address in persistent memory (e.g., EEPROM).
count : int, optional
Number of values to read.
If not set, read a single value of the specified :data:`dtype`.
dtype : numpy.dtype, optional
The type of the value(s) to read.
Returns
-------
dtype or numpy.array(dtype=dtype)
If :data:`count` is ``None``, return single value.
Otherwise, return array of values.
'''
nbytes = np.dtype(dtype).itemsize
if count is not None:
nbytes *= count
# Read enough bytes starting at specified address to match the
# requested number of the specified data type.
data_bytes = np.array([self.persistent_read(address + i)
for i in xrange(nbytes)], dtype=np.uint8)
# Cast byte array as array of specified data type.
result = data_bytes.view(dtype)
# If no count was specified, we return a scalar value rather than the
# resultant array.
if count is None:
return result[0]
return result | [
"def",
"persistent_read_multibyte",
"(",
"self",
",",
"address",
",",
"count",
"=",
"None",
",",
"dtype",
"=",
"np",
".",
"uint8",
")",
":",
"nbytes",
"=",
"np",
".",
"dtype",
"(",
"dtype",
")",
".",
"itemsize",
"if",
"count",
"is",
"not",
"None",
":... | Read a chunk of data from persistent memory.
Parameters
----------
address : int
Address in persistent memory (e.g., EEPROM).
count : int, optional
Number of values to read.
If not set, read a single value of the specified :data:`dtype`.
dtype : numpy.dtype, optional
The type of the value(s) to read.
Returns
-------
dtype or numpy.array(dtype=dtype)
If :data:`count` is ``None``, return single value.
Otherwise, return array of values. | [
"Read",
"a",
"chunk",
"of",
"data",
"from",
"persistent",
"memory",
"."
] | 1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c | https://github.com/wheeler-microfluidics/dmf-control-board-firmware/blob/1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c/dmf_control_board_firmware/__init__.py#L1522-L1560 | train | 54,857 |
wheeler-microfluidics/dmf-control-board-firmware | dmf_control_board_firmware/__init__.py | DMFControlBoard.persistent_write_multibyte | def persistent_write_multibyte(self, address, data, refresh_config=False):
'''
Write multiple bytes to an address in persistent memory.
Parameters
----------
address : int
Address in persistent memory (e.g., EEPROM).
data : numpy.array
Data to write.
refresh_config : bool, optional
Is ``True``, :meth:`load_config()` is called afterward to refresh
the configuration settings.
'''
for i, byte in enumerate(data.view(np.uint8)):
self.persistent_write(address + i, int(byte))
if refresh_config:
self.load_config(False) | python | def persistent_write_multibyte(self, address, data, refresh_config=False):
'''
Write multiple bytes to an address in persistent memory.
Parameters
----------
address : int
Address in persistent memory (e.g., EEPROM).
data : numpy.array
Data to write.
refresh_config : bool, optional
Is ``True``, :meth:`load_config()` is called afterward to refresh
the configuration settings.
'''
for i, byte in enumerate(data.view(np.uint8)):
self.persistent_write(address + i, int(byte))
if refresh_config:
self.load_config(False) | [
"def",
"persistent_write_multibyte",
"(",
"self",
",",
"address",
",",
"data",
",",
"refresh_config",
"=",
"False",
")",
":",
"for",
"i",
",",
"byte",
"in",
"enumerate",
"(",
"data",
".",
"view",
"(",
"np",
".",
"uint8",
")",
")",
":",
"self",
".",
"... | Write multiple bytes to an address in persistent memory.
Parameters
----------
address : int
Address in persistent memory (e.g., EEPROM).
data : numpy.array
Data to write.
refresh_config : bool, optional
Is ``True``, :meth:`load_config()` is called afterward to refresh
the configuration settings. | [
"Write",
"multiple",
"bytes",
"to",
"an",
"address",
"in",
"persistent",
"memory",
"."
] | 1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c | https://github.com/wheeler-microfluidics/dmf-control-board-firmware/blob/1cd8cc9a148d530f9a11f634f2dbfe73f08aa27c/dmf_control_board_firmware/__init__.py#L1562-L1579 | train | 54,858 |
inveniosoftware/kwalitee | kwalitee/hooks.py | _get_files_modified | def _get_files_modified():
"""Get the list of modified files that are Python or Jinja2."""
cmd = "git diff-index --cached --name-only --diff-filter=ACMRTUXB HEAD"
_, files_modified, _ = run(cmd)
extensions = [re.escape(ext) for ext in list(SUPPORTED_FILES) + [".rst"]]
test = "(?:{0})$".format("|".join(extensions))
return list(filter(lambda f: re.search(test, f), files_modified)) | python | def _get_files_modified():
"""Get the list of modified files that are Python or Jinja2."""
cmd = "git diff-index --cached --name-only --diff-filter=ACMRTUXB HEAD"
_, files_modified, _ = run(cmd)
extensions = [re.escape(ext) for ext in list(SUPPORTED_FILES) + [".rst"]]
test = "(?:{0})$".format("|".join(extensions))
return list(filter(lambda f: re.search(test, f), files_modified)) | [
"def",
"_get_files_modified",
"(",
")",
":",
"cmd",
"=",
"\"git diff-index --cached --name-only --diff-filter=ACMRTUXB HEAD\"",
"_",
",",
"files_modified",
",",
"_",
"=",
"run",
"(",
"cmd",
")",
"extensions",
"=",
"[",
"re",
".",
"escape",
"(",
"ext",
")",
"for"... | Get the list of modified files that are Python or Jinja2. | [
"Get",
"the",
"list",
"of",
"modified",
"files",
"that",
"are",
"Python",
"or",
"Jinja2",
"."
] | 9124f8f55b15547fef08c6c43cabced314e70674 | https://github.com/inveniosoftware/kwalitee/blob/9124f8f55b15547fef08c6c43cabced314e70674/kwalitee/hooks.py#L42-L49 | train | 54,859 |
inveniosoftware/kwalitee | kwalitee/hooks.py | _get_git_author | def _get_git_author():
"""Return the git author from the git variables."""
_, stdout, _ = run("git var GIT_AUTHOR_IDENT")
git_author = stdout[0]
return git_author[:git_author.find(">") + 1] | python | def _get_git_author():
"""Return the git author from the git variables."""
_, stdout, _ = run("git var GIT_AUTHOR_IDENT")
git_author = stdout[0]
return git_author[:git_author.find(">") + 1] | [
"def",
"_get_git_author",
"(",
")",
":",
"_",
",",
"stdout",
",",
"_",
"=",
"run",
"(",
"\"git var GIT_AUTHOR_IDENT\"",
")",
"git_author",
"=",
"stdout",
"[",
"0",
"]",
"return",
"git_author",
"[",
":",
"git_author",
".",
"find",
"(",
"\">\"",
")",
"+",
... | Return the git author from the git variables. | [
"Return",
"the",
"git",
"author",
"from",
"the",
"git",
"variables",
"."
] | 9124f8f55b15547fef08c6c43cabced314e70674 | https://github.com/inveniosoftware/kwalitee/blob/9124f8f55b15547fef08c6c43cabced314e70674/kwalitee/hooks.py#L52-L57 | train | 54,860 |
inveniosoftware/kwalitee | kwalitee/hooks.py | _get_component | def _get_component(filename, default="global"):
"""Get component name from filename."""
if hasattr(filename, "decode"):
filename = filename.decode()
parts = filename.split(os.path.sep)
if len(parts) >= 3:
if parts[1] in "modules legacy ext".split():
return parts[2]
if len(parts) >= 2:
if parts[1] in "base celery utils".split():
return parts[1]
if len(parts) >= 1:
if parts[0] in "grunt docs".split():
return parts[0]
return default | python | def _get_component(filename, default="global"):
"""Get component name from filename."""
if hasattr(filename, "decode"):
filename = filename.decode()
parts = filename.split(os.path.sep)
if len(parts) >= 3:
if parts[1] in "modules legacy ext".split():
return parts[2]
if len(parts) >= 2:
if parts[1] in "base celery utils".split():
return parts[1]
if len(parts) >= 1:
if parts[0] in "grunt docs".split():
return parts[0]
return default | [
"def",
"_get_component",
"(",
"filename",
",",
"default",
"=",
"\"global\"",
")",
":",
"if",
"hasattr",
"(",
"filename",
",",
"\"decode\"",
")",
":",
"filename",
"=",
"filename",
".",
"decode",
"(",
")",
"parts",
"=",
"filename",
".",
"split",
"(",
"os",... | Get component name from filename. | [
"Get",
"component",
"name",
"from",
"filename",
"."
] | 9124f8f55b15547fef08c6c43cabced314e70674 | https://github.com/inveniosoftware/kwalitee/blob/9124f8f55b15547fef08c6c43cabced314e70674/kwalitee/hooks.py#L60-L75 | train | 54,861 |
inveniosoftware/kwalitee | kwalitee/hooks.py | _prepare_commit_msg | def _prepare_commit_msg(tmp_file, author, files_modified=None, template=None):
"""Prepare the commit message in tmp_file.
It will build the commit message prefilling the component line, as well
as the signature using the git author and the modified files.
The file remains untouched if it is not empty.
"""
files_modified = files_modified or []
template = template or "{component}:\n\nSigned-off-by: {author}\n{extra}"
if hasattr(template, "decode"):
template = template.decode()
with open(tmp_file, "r", "utf-8") as fh:
contents = fh.readlines()
msg = filter(lambda x: not (x.startswith("#") or x.isspace()),
contents)
if len(list(msg)):
return
component = "unknown"
components = _get_components(files_modified)
if len(components) == 1:
component = components[0]
elif len(components) > 1:
component = "/".join(components)
contents.append(
"# WARNING: Multiple components detected - consider splitting "
"commit.\r\n"
)
with open(tmp_file, "w", "utf-8") as fh:
fh.write(template.format(component=component,
author=author,
extra="".join(contents))) | python | def _prepare_commit_msg(tmp_file, author, files_modified=None, template=None):
"""Prepare the commit message in tmp_file.
It will build the commit message prefilling the component line, as well
as the signature using the git author and the modified files.
The file remains untouched if it is not empty.
"""
files_modified = files_modified or []
template = template or "{component}:\n\nSigned-off-by: {author}\n{extra}"
if hasattr(template, "decode"):
template = template.decode()
with open(tmp_file, "r", "utf-8") as fh:
contents = fh.readlines()
msg = filter(lambda x: not (x.startswith("#") or x.isspace()),
contents)
if len(list(msg)):
return
component = "unknown"
components = _get_components(files_modified)
if len(components) == 1:
component = components[0]
elif len(components) > 1:
component = "/".join(components)
contents.append(
"# WARNING: Multiple components detected - consider splitting "
"commit.\r\n"
)
with open(tmp_file, "w", "utf-8") as fh:
fh.write(template.format(component=component,
author=author,
extra="".join(contents))) | [
"def",
"_prepare_commit_msg",
"(",
"tmp_file",
",",
"author",
",",
"files_modified",
"=",
"None",
",",
"template",
"=",
"None",
")",
":",
"files_modified",
"=",
"files_modified",
"or",
"[",
"]",
"template",
"=",
"template",
"or",
"\"{component}:\\n\\nSigned-off-by... | Prepare the commit message in tmp_file.
It will build the commit message prefilling the component line, as well
as the signature using the git author and the modified files.
The file remains untouched if it is not empty. | [
"Prepare",
"the",
"commit",
"message",
"in",
"tmp_file",
"."
] | 9124f8f55b15547fef08c6c43cabced314e70674 | https://github.com/inveniosoftware/kwalitee/blob/9124f8f55b15547fef08c6c43cabced314e70674/kwalitee/hooks.py#L83-L118 | train | 54,862 |
inveniosoftware/kwalitee | kwalitee/hooks.py | _check_message | def _check_message(message, options):
"""Checking the message and printing the errors."""
options = options or dict()
options.update(get_options())
options.update(_read_local_kwalitee_configuration())
errors = check_message(message, **options)
if errors:
for error in errors:
print(error, file=sys.stderr)
return False
return True | python | def _check_message(message, options):
"""Checking the message and printing the errors."""
options = options or dict()
options.update(get_options())
options.update(_read_local_kwalitee_configuration())
errors = check_message(message, **options)
if errors:
for error in errors:
print(error, file=sys.stderr)
return False
return True | [
"def",
"_check_message",
"(",
"message",
",",
"options",
")",
":",
"options",
"=",
"options",
"or",
"dict",
"(",
")",
"options",
".",
"update",
"(",
"get_options",
"(",
")",
")",
"options",
".",
"update",
"(",
"_read_local_kwalitee_configuration",
"(",
")",
... | Checking the message and printing the errors. | [
"Checking",
"the",
"message",
"and",
"printing",
"the",
"errors",
"."
] | 9124f8f55b15547fef08c6c43cabced314e70674 | https://github.com/inveniosoftware/kwalitee/blob/9124f8f55b15547fef08c6c43cabced314e70674/kwalitee/hooks.py#L121-L134 | train | 54,863 |
inveniosoftware/kwalitee | kwalitee/hooks.py | _read_local_kwalitee_configuration | def _read_local_kwalitee_configuration(directory="."):
"""Check if the repo has a ``.kwalitee.yaml`` file."""
filepath = os.path.abspath(os.path.join(directory, '.kwalitee.yml'))
data = {}
if os.path.exists(filepath):
with open(filepath, 'r') as file_read:
data = yaml.load(file_read.read())
return data | python | def _read_local_kwalitee_configuration(directory="."):
"""Check if the repo has a ``.kwalitee.yaml`` file."""
filepath = os.path.abspath(os.path.join(directory, '.kwalitee.yml'))
data = {}
if os.path.exists(filepath):
with open(filepath, 'r') as file_read:
data = yaml.load(file_read.read())
return data | [
"def",
"_read_local_kwalitee_configuration",
"(",
"directory",
"=",
"\".\"",
")",
":",
"filepath",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"'.kwalitee.yml'",
")",
")",
"data",
"=",
"{",
"}",
"if... | Check if the repo has a ``.kwalitee.yaml`` file. | [
"Check",
"if",
"the",
"repo",
"has",
"a",
".",
"kwalitee",
".",
"yaml",
"file",
"."
] | 9124f8f55b15547fef08c6c43cabced314e70674 | https://github.com/inveniosoftware/kwalitee/blob/9124f8f55b15547fef08c6c43cabced314e70674/kwalitee/hooks.py#L185-L192 | train | 54,864 |
inveniosoftware/kwalitee | kwalitee/hooks.py | _pre_commit | def _pre_commit(files, options):
"""Run the check on files of the added version.
They might be different than the one on disk. Equivalent than doing a git
stash, check, and git stash pop.
"""
errors = []
tmpdir = mkdtemp()
files_to_check = []
try:
for (file_, content) in files:
# write staged version of file to temporary directory
dirname, filename = os.path.split(os.path.abspath(file_))
prefix = os.path.commonprefix([dirname, tmpdir])
dirname = os.path.relpath(dirname, start=prefix)
dirname = os.path.join(tmpdir, dirname)
if not os.path.isdir(dirname):
os.makedirs(dirname)
filename = os.path.join(dirname, filename)
with open(filename, "wb") as fh:
fh.write(content)
files_to_check.append((file_, filename))
for (file_, filename) in files_to_check:
errors += list(map(lambda x: "{0}: {1}".format(file_, x),
check_file(filename, **options) or []))
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
return errors | python | def _pre_commit(files, options):
"""Run the check on files of the added version.
They might be different than the one on disk. Equivalent than doing a git
stash, check, and git stash pop.
"""
errors = []
tmpdir = mkdtemp()
files_to_check = []
try:
for (file_, content) in files:
# write staged version of file to temporary directory
dirname, filename = os.path.split(os.path.abspath(file_))
prefix = os.path.commonprefix([dirname, tmpdir])
dirname = os.path.relpath(dirname, start=prefix)
dirname = os.path.join(tmpdir, dirname)
if not os.path.isdir(dirname):
os.makedirs(dirname)
filename = os.path.join(dirname, filename)
with open(filename, "wb") as fh:
fh.write(content)
files_to_check.append((file_, filename))
for (file_, filename) in files_to_check:
errors += list(map(lambda x: "{0}: {1}".format(file_, x),
check_file(filename, **options) or []))
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
return errors | [
"def",
"_pre_commit",
"(",
"files",
",",
"options",
")",
":",
"errors",
"=",
"[",
"]",
"tmpdir",
"=",
"mkdtemp",
"(",
")",
"files_to_check",
"=",
"[",
"]",
"try",
":",
"for",
"(",
"file_",
",",
"content",
")",
"in",
"files",
":",
"# write staged versio... | Run the check on files of the added version.
They might be different than the one on disk. Equivalent than doing a git
stash, check, and git stash pop. | [
"Run",
"the",
"check",
"on",
"files",
"of",
"the",
"added",
"version",
"."
] | 9124f8f55b15547fef08c6c43cabced314e70674 | https://github.com/inveniosoftware/kwalitee/blob/9124f8f55b15547fef08c6c43cabced314e70674/kwalitee/hooks.py#L221-L250 | train | 54,865 |
inveniosoftware/kwalitee | kwalitee/hooks.py | run | def run(command, raw_output=False):
"""Run a command using subprocess.
:param command: command line to be run
:type command: str
:param raw_output: does not attempt to convert the output as unicode
:type raw_output: bool
:return: error code, output (``stdout``) and error (``stderr``)
:rtype: tuple
"""
p = Popen(command.split(), stdout=PIPE, stderr=PIPE)
(stdout, stderr) = p.communicate()
# On python 3, subprocess.Popen returns bytes objects.
if not raw_output:
return (
p.returncode,
[line.rstrip() for line in stdout.decode("utf-8").splitlines()],
[line.rstrip() for line in stderr.decode("utf-8").splitlines()]
)
else:
return (p.returncode, stdout, stderr) | python | def run(command, raw_output=False):
"""Run a command using subprocess.
:param command: command line to be run
:type command: str
:param raw_output: does not attempt to convert the output as unicode
:type raw_output: bool
:return: error code, output (``stdout``) and error (``stderr``)
:rtype: tuple
"""
p = Popen(command.split(), stdout=PIPE, stderr=PIPE)
(stdout, stderr) = p.communicate()
# On python 3, subprocess.Popen returns bytes objects.
if not raw_output:
return (
p.returncode,
[line.rstrip() for line in stdout.decode("utf-8").splitlines()],
[line.rstrip() for line in stderr.decode("utf-8").splitlines()]
)
else:
return (p.returncode, stdout, stderr) | [
"def",
"run",
"(",
"command",
",",
"raw_output",
"=",
"False",
")",
":",
"p",
"=",
"Popen",
"(",
"command",
".",
"split",
"(",
")",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"PIPE",
")",
"(",
"stdout",
",",
"stderr",
")",
"=",
"p",
".",
"... | Run a command using subprocess.
:param command: command line to be run
:type command: str
:param raw_output: does not attempt to convert the output as unicode
:type raw_output: bool
:return: error code, output (``stdout``) and error (``stderr``)
:rtype: tuple | [
"Run",
"a",
"command",
"using",
"subprocess",
"."
] | 9124f8f55b15547fef08c6c43cabced314e70674 | https://github.com/inveniosoftware/kwalitee/blob/9124f8f55b15547fef08c6c43cabced314e70674/kwalitee/hooks.py#L285-L306 | train | 54,866 |
Chilipp/psy-simple | psy_simple/widgets/texts.py | mpl_weight2qt | def mpl_weight2qt(weight):
"""Convert a weight from matplotlib definition to a Qt weight
Parameters
----------
weight: int or string
Either an integer between 1 and 1000 or a string out of
:attr:`weights_mpl2qt`
Returns
-------
int
One type of the PyQt5.QtGui.QFont.Weight"""
try:
weight = weights_mpl2qt[weight]
except KeyError:
try:
weight = float(weight) / 10
except (ValueError, TypeError):
weight = QtGui.QFont.Normal
else:
try:
weight = min(filter(lambda w: w >= weight, weights_qt2mpl),
key=lambda w: abs(w - weight))
except ValueError:
weight = QtGui.QFont.Normal
return weight | python | def mpl_weight2qt(weight):
"""Convert a weight from matplotlib definition to a Qt weight
Parameters
----------
weight: int or string
Either an integer between 1 and 1000 or a string out of
:attr:`weights_mpl2qt`
Returns
-------
int
One type of the PyQt5.QtGui.QFont.Weight"""
try:
weight = weights_mpl2qt[weight]
except KeyError:
try:
weight = float(weight) / 10
except (ValueError, TypeError):
weight = QtGui.QFont.Normal
else:
try:
weight = min(filter(lambda w: w >= weight, weights_qt2mpl),
key=lambda w: abs(w - weight))
except ValueError:
weight = QtGui.QFont.Normal
return weight | [
"def",
"mpl_weight2qt",
"(",
"weight",
")",
":",
"try",
":",
"weight",
"=",
"weights_mpl2qt",
"[",
"weight",
"]",
"except",
"KeyError",
":",
"try",
":",
"weight",
"=",
"float",
"(",
"weight",
")",
"/",
"10",
"except",
"(",
"ValueError",
",",
"TypeError",... | Convert a weight from matplotlib definition to a Qt weight
Parameters
----------
weight: int or string
Either an integer between 1 and 1000 or a string out of
:attr:`weights_mpl2qt`
Returns
-------
int
One type of the PyQt5.QtGui.QFont.Weight | [
"Convert",
"a",
"weight",
"from",
"matplotlib",
"definition",
"to",
"a",
"Qt",
"weight"
] | 7d916406a6d3c3c27c0b7102f98fef07a4da0a61 | https://github.com/Chilipp/psy-simple/blob/7d916406a6d3c3c27c0b7102f98fef07a4da0a61/psy_simple/widgets/texts.py#L66-L92 | train | 54,867 |
Chilipp/psy-simple | psy_simple/widgets/texts.py | FontPropertiesWidget.choose_font | def choose_font(self, font=None):
"""Choose a font for the label through a dialog"""
fmt_widget = self.parent()
if font is None:
if self.current_font:
font, ok = QFontDialog.getFont(
self.current_font, fmt_widget,
'Select %s font' % self.fmto_name,
QFontDialog.DontUseNativeDialog)
else:
font, ok = QFontDialog.getFont(fmt_widget)
if not ok:
return
self.current_font = font
properties = self.load_properties()
properties.update(self.qfont_to_artist_props(font))
fmt_widget.set_obj(properties)
self.refresh() | python | def choose_font(self, font=None):
"""Choose a font for the label through a dialog"""
fmt_widget = self.parent()
if font is None:
if self.current_font:
font, ok = QFontDialog.getFont(
self.current_font, fmt_widget,
'Select %s font' % self.fmto_name,
QFontDialog.DontUseNativeDialog)
else:
font, ok = QFontDialog.getFont(fmt_widget)
if not ok:
return
self.current_font = font
properties = self.load_properties()
properties.update(self.qfont_to_artist_props(font))
fmt_widget.set_obj(properties)
self.refresh() | [
"def",
"choose_font",
"(",
"self",
",",
"font",
"=",
"None",
")",
":",
"fmt_widget",
"=",
"self",
".",
"parent",
"(",
")",
"if",
"font",
"is",
"None",
":",
"if",
"self",
".",
"current_font",
":",
"font",
",",
"ok",
"=",
"QFontDialog",
".",
"getFont",... | Choose a font for the label through a dialog | [
"Choose",
"a",
"font",
"for",
"the",
"label",
"through",
"a",
"dialog"
] | 7d916406a6d3c3c27c0b7102f98fef07a4da0a61 | https://github.com/Chilipp/psy-simple/blob/7d916406a6d3c3c27c0b7102f98fef07a4da0a61/psy_simple/widgets/texts.py#L401-L418 | train | 54,868 |
Chilipp/psy-simple | psy_simple/widgets/texts.py | FontPropertiesWidget.refresh | def refresh(self):
"""Refresh the widgets from the current font"""
font = self.current_font
# refresh btn_bold
self.btn_bold.blockSignals(True)
self.btn_bold.setChecked(font.weight() > 50)
self.btn_bold.blockSignals(False)
# refresh btn_italic
self.btn_italic.blockSignals(True)
self.btn_italic.setChecked(font.italic())
self.btn_italic.blockSignals(False)
# refresh font size
self.spin_box.blockSignals(True)
self.spin_box.setValue(font.pointSize())
self.spin_box.blockSignals(False) | python | def refresh(self):
"""Refresh the widgets from the current font"""
font = self.current_font
# refresh btn_bold
self.btn_bold.blockSignals(True)
self.btn_bold.setChecked(font.weight() > 50)
self.btn_bold.blockSignals(False)
# refresh btn_italic
self.btn_italic.blockSignals(True)
self.btn_italic.setChecked(font.italic())
self.btn_italic.blockSignals(False)
# refresh font size
self.spin_box.blockSignals(True)
self.spin_box.setValue(font.pointSize())
self.spin_box.blockSignals(False) | [
"def",
"refresh",
"(",
"self",
")",
":",
"font",
"=",
"self",
".",
"current_font",
"# refresh btn_bold",
"self",
".",
"btn_bold",
".",
"blockSignals",
"(",
"True",
")",
"self",
".",
"btn_bold",
".",
"setChecked",
"(",
"font",
".",
"weight",
"(",
")",
">"... | Refresh the widgets from the current font | [
"Refresh",
"the",
"widgets",
"from",
"the",
"current",
"font"
] | 7d916406a6d3c3c27c0b7102f98fef07a4da0a61 | https://github.com/Chilipp/psy-simple/blob/7d916406a6d3c3c27c0b7102f98fef07a4da0a61/psy_simple/widgets/texts.py#L420-L437 | train | 54,869 |
rclement/flask-pretty | flask_pretty.py | Prettify._prettify_response | def _prettify_response(self, response):
"""
Prettify the HTML response.
:param response: A Flask Response object.
"""
if response.content_type == 'text/html; charset=utf-8':
ugly = response.get_data(as_text=True)
soup = BeautifulSoup(ugly, 'html.parser')
pretty = soup.prettify(formatter='html')
response.direct_passthrough = False
response.set_data(pretty)
return response | python | def _prettify_response(self, response):
"""
Prettify the HTML response.
:param response: A Flask Response object.
"""
if response.content_type == 'text/html; charset=utf-8':
ugly = response.get_data(as_text=True)
soup = BeautifulSoup(ugly, 'html.parser')
pretty = soup.prettify(formatter='html')
response.direct_passthrough = False
response.set_data(pretty)
return response | [
"def",
"_prettify_response",
"(",
"self",
",",
"response",
")",
":",
"if",
"response",
".",
"content_type",
"==",
"'text/html; charset=utf-8'",
":",
"ugly",
"=",
"response",
".",
"get_data",
"(",
"as_text",
"=",
"True",
")",
"soup",
"=",
"BeautifulSoup",
"(",
... | Prettify the HTML response.
:param response: A Flask Response object. | [
"Prettify",
"the",
"HTML",
"response",
"."
] | 74f3c2d9f344d5cd8611a4c25a2a30e85f4ef1d4 | https://github.com/rclement/flask-pretty/blob/74f3c2d9f344d5cd8611a4c25a2a30e85f4ef1d4/flask_pretty.py#L52-L65 | train | 54,870 |
jeradM/pysabnzbd | pysabnzbd/__init__.py | SabnzbdApi._call | async def _call(self, params):
"""Call the SABnzbd API"""
if self._session.closed:
raise SabnzbdApiException('Session already closed')
p = {**self._default_params, **params}
try:
async with timeout(self._timeout, loop=self._session.loop):
async with self._session.get(self._api_url, params=p) as resp:
data = await resp.json()
if data.get('status', True) is False:
self._handle_error(data, params)
else:
return data
except aiohttp.ClientError:
raise SabnzbdApiException('Unable to communicate with Sabnzbd API')
except asyncio.TimeoutError:
raise SabnzbdApiException('SABnzbd API request timed out') | python | async def _call(self, params):
"""Call the SABnzbd API"""
if self._session.closed:
raise SabnzbdApiException('Session already closed')
p = {**self._default_params, **params}
try:
async with timeout(self._timeout, loop=self._session.loop):
async with self._session.get(self._api_url, params=p) as resp:
data = await resp.json()
if data.get('status', True) is False:
self._handle_error(data, params)
else:
return data
except aiohttp.ClientError:
raise SabnzbdApiException('Unable to communicate with Sabnzbd API')
except asyncio.TimeoutError:
raise SabnzbdApiException('SABnzbd API request timed out') | [
"async",
"def",
"_call",
"(",
"self",
",",
"params",
")",
":",
"if",
"self",
".",
"_session",
".",
"closed",
":",
"raise",
"SabnzbdApiException",
"(",
"'Session already closed'",
")",
"p",
"=",
"{",
"*",
"*",
"self",
".",
"_default_params",
",",
"*",
"*"... | Call the SABnzbd API | [
"Call",
"the",
"SABnzbd",
"API"
] | 2b365a1f7d8fef437151570a430f8493d6d04795 | https://github.com/jeradM/pysabnzbd/blob/2b365a1f7d8fef437151570a430f8493d6d04795/pysabnzbd/__init__.py#L34-L51 | train | 54,871 |
jeradM/pysabnzbd | pysabnzbd/__init__.py | SabnzbdApi.refresh_data | async def refresh_data(self):
"""Refresh the cached SABnzbd queue data"""
queue = await self.get_queue()
history = await self.get_history()
totals = {}
for k in history:
if k[-4:] == 'size':
totals[k] = self._convert_size(history.get(k))
self.queue = {**totals, **queue} | python | async def refresh_data(self):
"""Refresh the cached SABnzbd queue data"""
queue = await self.get_queue()
history = await self.get_history()
totals = {}
for k in history:
if k[-4:] == 'size':
totals[k] = self._convert_size(history.get(k))
self.queue = {**totals, **queue} | [
"async",
"def",
"refresh_data",
"(",
"self",
")",
":",
"queue",
"=",
"await",
"self",
".",
"get_queue",
"(",
")",
"history",
"=",
"await",
"self",
".",
"get_history",
"(",
")",
"totals",
"=",
"{",
"}",
"for",
"k",
"in",
"history",
":",
"if",
"k",
"... | Refresh the cached SABnzbd queue data | [
"Refresh",
"the",
"cached",
"SABnzbd",
"queue",
"data"
] | 2b365a1f7d8fef437151570a430f8493d6d04795 | https://github.com/jeradM/pysabnzbd/blob/2b365a1f7d8fef437151570a430f8493d6d04795/pysabnzbd/__init__.py#L53-L61 | train | 54,872 |
jeradM/pysabnzbd | pysabnzbd/__init__.py | SabnzbdApi._convert_size | def _convert_size(self, size_str):
"""Convert units to GB"""
suffix = size_str[-1]
if suffix == 'K':
multiplier = 1.0 / (1024.0 * 1024.0)
elif suffix == 'M':
multiplier = 1.0 / 1024.0
elif suffix == 'T':
multiplier = 1024.0
else:
multiplier = 1
try:
val = float(size_str.split(' ')[0])
return val * multiplier
except ValueError:
return 0.0 | python | def _convert_size(self, size_str):
"""Convert units to GB"""
suffix = size_str[-1]
if suffix == 'K':
multiplier = 1.0 / (1024.0 * 1024.0)
elif suffix == 'M':
multiplier = 1.0 / 1024.0
elif suffix == 'T':
multiplier = 1024.0
else:
multiplier = 1
try:
val = float(size_str.split(' ')[0])
return val * multiplier
except ValueError:
return 0.0 | [
"def",
"_convert_size",
"(",
"self",
",",
"size_str",
")",
":",
"suffix",
"=",
"size_str",
"[",
"-",
"1",
"]",
"if",
"suffix",
"==",
"'K'",
":",
"multiplier",
"=",
"1.0",
"/",
"(",
"1024.0",
"*",
"1024.0",
")",
"elif",
"suffix",
"==",
"'M'",
":",
"... | Convert units to GB | [
"Convert",
"units",
"to",
"GB"
] | 2b365a1f7d8fef437151570a430f8493d6d04795 | https://github.com/jeradM/pysabnzbd/blob/2b365a1f7d8fef437151570a430f8493d6d04795/pysabnzbd/__init__.py#L96-L112 | train | 54,873 |
jeradM/pysabnzbd | pysabnzbd/__init__.py | SabnzbdApi._handle_error | def _handle_error(self, data, params):
"""Handle an error response from the SABnzbd API"""
error = data.get('error', 'API call failed')
mode = params.get('mode')
raise SabnzbdApiException(error, mode=mode) | python | def _handle_error(self, data, params):
"""Handle an error response from the SABnzbd API"""
error = data.get('error', 'API call failed')
mode = params.get('mode')
raise SabnzbdApiException(error, mode=mode) | [
"def",
"_handle_error",
"(",
"self",
",",
"data",
",",
"params",
")",
":",
"error",
"=",
"data",
".",
"get",
"(",
"'error'",
",",
"'API call failed'",
")",
"mode",
"=",
"params",
".",
"get",
"(",
"'mode'",
")",
"raise",
"SabnzbdApiException",
"(",
"error... | Handle an error response from the SABnzbd API | [
"Handle",
"an",
"error",
"response",
"from",
"the",
"SABnzbd",
"API"
] | 2b365a1f7d8fef437151570a430f8493d6d04795 | https://github.com/jeradM/pysabnzbd/blob/2b365a1f7d8fef437151570a430f8493d6d04795/pysabnzbd/__init__.py#L114-L118 | train | 54,874 |
toumorokoshi/sprinter | sprinter/formula/ssh.py | SSHFormula.__generate_key | def __generate_key(self, config):
"""
Generate the ssh key, and return the ssh config location
"""
cwd = config.get('ssh_path', self._install_directory())
if config.is_affirmative('create', default="yes"):
if not os.path.exists(cwd):
os.makedirs(cwd)
if not os.path.exists(os.path.join(cwd, config.get('keyname'))):
command = "ssh-keygen -t %(type)s -f %(keyname)s -N " % config.to_dict()
lib.call(command, cwd=cwd, output_log_level=logging.DEBUG)
if not config.has('ssh_path'):
config.set('ssh_path', cwd)
config.set('ssh_key_path', os.path.join(config.get('ssh_path'), config.get('keyname'))) | python | def __generate_key(self, config):
"""
Generate the ssh key, and return the ssh config location
"""
cwd = config.get('ssh_path', self._install_directory())
if config.is_affirmative('create', default="yes"):
if not os.path.exists(cwd):
os.makedirs(cwd)
if not os.path.exists(os.path.join(cwd, config.get('keyname'))):
command = "ssh-keygen -t %(type)s -f %(keyname)s -N " % config.to_dict()
lib.call(command, cwd=cwd, output_log_level=logging.DEBUG)
if not config.has('ssh_path'):
config.set('ssh_path', cwd)
config.set('ssh_key_path', os.path.join(config.get('ssh_path'), config.get('keyname'))) | [
"def",
"__generate_key",
"(",
"self",
",",
"config",
")",
":",
"cwd",
"=",
"config",
".",
"get",
"(",
"'ssh_path'",
",",
"self",
".",
"_install_directory",
"(",
")",
")",
"if",
"config",
".",
"is_affirmative",
"(",
"'create'",
",",
"default",
"=",
"\"yes... | Generate the ssh key, and return the ssh config location | [
"Generate",
"the",
"ssh",
"key",
"and",
"return",
"the",
"ssh",
"config",
"location"
] | 846697a7a087e69c61d075232e754d6975a64152 | https://github.com/toumorokoshi/sprinter/blob/846697a7a087e69c61d075232e754d6975a64152/sprinter/formula/ssh.py#L68-L81 | train | 54,875 |
toumorokoshi/sprinter | sprinter/formula/ssh.py | SSHFormula.__install_ssh_config | def __install_ssh_config(self, config):
"""
Install the ssh configuration
"""
if not config.is_affirmative('use_global_ssh', default="no"):
ssh_config_injection = self._build_ssh_config(config)
if not os.path.exists(ssh_config_path):
if self.injections.in_noninjected_file(ssh_config_path, "Host %s" % config.get('host')):
if config.is_affirmative('override'):
self.injections.inject(ssh_config_path, ssh_config_injection)
else:
self.injections.inject(ssh_config_path, ssh_config_injection)
else:
self.injections.inject(ssh_config_path, ssh_config_injection)
self.injections.commit() | python | def __install_ssh_config(self, config):
"""
Install the ssh configuration
"""
if not config.is_affirmative('use_global_ssh', default="no"):
ssh_config_injection = self._build_ssh_config(config)
if not os.path.exists(ssh_config_path):
if self.injections.in_noninjected_file(ssh_config_path, "Host %s" % config.get('host')):
if config.is_affirmative('override'):
self.injections.inject(ssh_config_path, ssh_config_injection)
else:
self.injections.inject(ssh_config_path, ssh_config_injection)
else:
self.injections.inject(ssh_config_path, ssh_config_injection)
self.injections.commit() | [
"def",
"__install_ssh_config",
"(",
"self",
",",
"config",
")",
":",
"if",
"not",
"config",
".",
"is_affirmative",
"(",
"'use_global_ssh'",
",",
"default",
"=",
"\"no\"",
")",
":",
"ssh_config_injection",
"=",
"self",
".",
"_build_ssh_config",
"(",
"config",
"... | Install the ssh configuration | [
"Install",
"the",
"ssh",
"configuration"
] | 846697a7a087e69c61d075232e754d6975a64152 | https://github.com/toumorokoshi/sprinter/blob/846697a7a087e69c61d075232e754d6975a64152/sprinter/formula/ssh.py#L83-L101 | train | 54,876 |
toumorokoshi/sprinter | sprinter/formula/ssh.py | SSHFormula._build_ssh_config | def _build_ssh_config(self, config):
""" build the ssh injection configuration """
ssh_config_injection = ssh_config_template % {
'host': config.get('host'),
'hostname': config.get('hostname'),
'ssh_key_path': config.get('ssh_key_path'),
'user': config.get('user')
}
if config.has('port'):
ssh_config_injection += " Port {0}\n".format(config.get('port'))
return ssh_config_injection | python | def _build_ssh_config(self, config):
""" build the ssh injection configuration """
ssh_config_injection = ssh_config_template % {
'host': config.get('host'),
'hostname': config.get('hostname'),
'ssh_key_path': config.get('ssh_key_path'),
'user': config.get('user')
}
if config.has('port'):
ssh_config_injection += " Port {0}\n".format(config.get('port'))
return ssh_config_injection | [
"def",
"_build_ssh_config",
"(",
"self",
",",
"config",
")",
":",
"ssh_config_injection",
"=",
"ssh_config_template",
"%",
"{",
"'host'",
":",
"config",
".",
"get",
"(",
"'host'",
")",
",",
"'hostname'",
":",
"config",
".",
"get",
"(",
"'hostname'",
")",
"... | build the ssh injection configuration | [
"build",
"the",
"ssh",
"injection",
"configuration"
] | 846697a7a087e69c61d075232e754d6975a64152 | https://github.com/toumorokoshi/sprinter/blob/846697a7a087e69c61d075232e754d6975a64152/sprinter/formula/ssh.py#L112-L122 | train | 54,877 |
Cadasta/cadasta-workertoolbox | cadasta/workertoolbox/utils.py | extract_followups | def extract_followups(task):
"""
Retrieve callbacks and errbacks from provided task instance, disables
tasks callbacks.
"""
callbacks = task.request.callbacks
errbacks = task.request.errbacks
task.request.callbacks = None
return {'link': callbacks, 'link_error': errbacks} | python | def extract_followups(task):
"""
Retrieve callbacks and errbacks from provided task instance, disables
tasks callbacks.
"""
callbacks = task.request.callbacks
errbacks = task.request.errbacks
task.request.callbacks = None
return {'link': callbacks, 'link_error': errbacks} | [
"def",
"extract_followups",
"(",
"task",
")",
":",
"callbacks",
"=",
"task",
".",
"request",
".",
"callbacks",
"errbacks",
"=",
"task",
".",
"request",
".",
"errbacks",
"task",
".",
"request",
".",
"callbacks",
"=",
"None",
"return",
"{",
"'link'",
":",
... | Retrieve callbacks and errbacks from provided task instance, disables
tasks callbacks. | [
"Retrieve",
"callbacks",
"and",
"errbacks",
"from",
"provided",
"task",
"instance",
"disables",
"tasks",
"callbacks",
"."
] | e17cf376538cee0b32c7a21afd5319e3549b954f | https://github.com/Cadasta/cadasta-workertoolbox/blob/e17cf376538cee0b32c7a21afd5319e3549b954f/cadasta/workertoolbox/utils.py#L4-L12 | train | 54,878 |
frascoweb/frasco | frasco/cli/scaffold.py | gen_procfile | def gen_procfile(ctx, wsgi, dev):
"""Generates Procfiles which can be used with honcho or foreman.
"""
if wsgi is None:
if os.path.exists("wsgi.py"):
wsgi = "wsgi.py"
elif os.path.exists("app.py"):
wsgi = "app.py"
else:
wsgi = "app.py"
ctx.invoke(gen_apppy)
def write_procfile(filename, server_process, debug):
processes = [server_process] + current_app.processes
procfile = []
for name, cmd in procfile_processes(processes, debug).iteritems():
procfile.append("%s: %s" % (name, cmd))
with open(filename, "w") as f:
f.write("\n".join(procfile))
write_procfile("Procfile", ("web", ["gunicorn", wsgi]), False)
if dev:
write_procfile("Procfile.dev", ("web", ["frasco", "serve"]), True) | python | def gen_procfile(ctx, wsgi, dev):
"""Generates Procfiles which can be used with honcho or foreman.
"""
if wsgi is None:
if os.path.exists("wsgi.py"):
wsgi = "wsgi.py"
elif os.path.exists("app.py"):
wsgi = "app.py"
else:
wsgi = "app.py"
ctx.invoke(gen_apppy)
def write_procfile(filename, server_process, debug):
processes = [server_process] + current_app.processes
procfile = []
for name, cmd in procfile_processes(processes, debug).iteritems():
procfile.append("%s: %s" % (name, cmd))
with open(filename, "w") as f:
f.write("\n".join(procfile))
write_procfile("Procfile", ("web", ["gunicorn", wsgi]), False)
if dev:
write_procfile("Procfile.dev", ("web", ["frasco", "serve"]), True) | [
"def",
"gen_procfile",
"(",
"ctx",
",",
"wsgi",
",",
"dev",
")",
":",
"if",
"wsgi",
"is",
"None",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"\"wsgi.py\"",
")",
":",
"wsgi",
"=",
"\"wsgi.py\"",
"elif",
"os",
".",
"path",
".",
"exists",
"(",
... | Generates Procfiles which can be used with honcho or foreman. | [
"Generates",
"Procfiles",
"which",
"can",
"be",
"used",
"with",
"honcho",
"or",
"foreman",
"."
] | ea519d69dd5ca6deaf3650175692ee4a1a02518f | https://github.com/frascoweb/frasco/blob/ea519d69dd5ca6deaf3650175692ee4a1a02518f/frasco/cli/scaffold.py#L55-L77 | train | 54,879 |
nikcub/floyd | floyd/util/dateformat.py | TimeFormat.g | def g(self):
"Hour, 12-hour format without leading zeros; i.e. '1' to '12'"
if self.data.hour == 0:
return 12
if self.data.hour > 12:
return self.data.hour - 12
return self.data.hour | python | def g(self):
"Hour, 12-hour format without leading zeros; i.e. '1' to '12'"
if self.data.hour == 0:
return 12
if self.data.hour > 12:
return self.data.hour - 12
return self.data.hour | [
"def",
"g",
"(",
"self",
")",
":",
"if",
"self",
".",
"data",
".",
"hour",
"==",
"0",
":",
"return",
"12",
"if",
"self",
".",
"data",
".",
"hour",
">",
"12",
":",
"return",
"self",
".",
"data",
".",
"hour",
"-",
"12",
"return",
"self",
".",
"... | Hour, 12-hour format without leading zeros; i.e. '1' to '12 | [
"Hour",
"12",
"-",
"hour",
"format",
"without",
"leading",
"zeros",
";",
"i",
".",
"e",
".",
"1",
"to",
"12"
] | 5772d0047efb11c9ce5f7d234a9da4576ce24edc | https://github.com/nikcub/floyd/blob/5772d0047efb11c9ce5f7d234a9da4576ce24edc/floyd/util/dateformat.py#L122-L128 | train | 54,880 |
nikcub/floyd | floyd/util/dateformat.py | DateFormat.I | def I(self):
"'1' if Daylight Savings Time, '0' otherwise."
if self.timezone and self.timezone.dst(self.data):
return u'1'
else:
return u'0' | python | def I(self):
"'1' if Daylight Savings Time, '0' otherwise."
if self.timezone and self.timezone.dst(self.data):
return u'1'
else:
return u'0' | [
"def",
"I",
"(",
"self",
")",
":",
"if",
"self",
".",
"timezone",
"and",
"self",
".",
"timezone",
".",
"dst",
"(",
"self",
".",
"data",
")",
":",
"return",
"u'1'",
"else",
":",
"return",
"u'0'"
] | 1' if Daylight Savings Time, '0' otherwise. | [
"1",
"if",
"Daylight",
"Savings",
"Time",
"0",
"otherwise",
"."
] | 5772d0047efb11c9ce5f7d234a9da4576ce24edc | https://github.com/nikcub/floyd/blob/5772d0047efb11c9ce5f7d234a9da4576ce24edc/floyd/util/dateformat.py#L257-L262 | train | 54,881 |
nikcub/floyd | floyd/util/dateformat.py | DateFormat.S | def S(self):
"English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'"
if self.data.day in (11, 12, 13): # Special case
return u'th'
last = self.data.day % 10
if last == 1:
return u'st'
if last == 2:
return u'nd'
if last == 3:
return u'rd'
return u'th' | python | def S(self):
"English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'"
if self.data.day in (11, 12, 13): # Special case
return u'th'
last = self.data.day % 10
if last == 1:
return u'st'
if last == 2:
return u'nd'
if last == 3:
return u'rd'
return u'th' | [
"def",
"S",
"(",
"self",
")",
":",
"if",
"self",
".",
"data",
".",
"day",
"in",
"(",
"11",
",",
"12",
",",
"13",
")",
":",
"# Special case",
"return",
"u'th'",
"last",
"=",
"self",
".",
"data",
".",
"day",
"%",
"10",
"if",
"last",
"==",
"1",
... | English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th | [
"English",
"ordinal",
"suffix",
"for",
"the",
"day",
"of",
"the",
"month",
"2",
"characters",
";",
"i",
".",
"e",
".",
"st",
"nd",
"rd",
"or",
"th"
] | 5772d0047efb11c9ce5f7d234a9da4576ce24edc | https://github.com/nikcub/floyd/blob/5772d0047efb11c9ce5f7d234a9da4576ce24edc/floyd/util/dateformat.py#L301-L312 | train | 54,882 |
nikcub/floyd | floyd/util/dateformat.py | DateFormat.t | def t(self):
"Number of days in the given month; i.e. '28' to '31'"
return u'%02d' % calendar.monthrange(self.data.year, self.data.month)[1] | python | def t(self):
"Number of days in the given month; i.e. '28' to '31'"
return u'%02d' % calendar.monthrange(self.data.year, self.data.month)[1] | [
"def",
"t",
"(",
"self",
")",
":",
"return",
"u'%02d'",
"%",
"calendar",
".",
"monthrange",
"(",
"self",
".",
"data",
".",
"year",
",",
"self",
".",
"data",
".",
"month",
")",
"[",
"1",
"]"
] | Number of days in the given month; i.e. '28' to '31 | [
"Number",
"of",
"days",
"in",
"the",
"given",
"month",
";",
"i",
".",
"e",
".",
"28",
"to",
"31"
] | 5772d0047efb11c9ce5f7d234a9da4576ce24edc | https://github.com/nikcub/floyd/blob/5772d0047efb11c9ce5f7d234a9da4576ce24edc/floyd/util/dateformat.py#L314-L316 | train | 54,883 |
nikcub/floyd | floyd/util/dateformat.py | DateFormat.W | def W(self):
"ISO-8601 week number of year, weeks starting on Monday"
# Algorithm from http://www.personal.ecu.edu/mccartyr/ISOwdALG.txt
week_number = None
jan1_weekday = self.data.replace(month=1, day=1).weekday() + 1
weekday = self.data.weekday() + 1
day_of_year = self.z()
if day_of_year <= (8 - jan1_weekday) and jan1_weekday > 4:
if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year-1)):
week_number = 53
else:
week_number = 52
else:
if calendar.isleap(self.data.year):
i = 366
else:
i = 365
if (i - day_of_year) < (4 - weekday):
week_number = 1
else:
j = day_of_year + (7 - weekday) + (jan1_weekday - 1)
week_number = j // 7
if jan1_weekday > 4:
week_number -= 1
return week_number | python | def W(self):
"ISO-8601 week number of year, weeks starting on Monday"
# Algorithm from http://www.personal.ecu.edu/mccartyr/ISOwdALG.txt
week_number = None
jan1_weekday = self.data.replace(month=1, day=1).weekday() + 1
weekday = self.data.weekday() + 1
day_of_year = self.z()
if day_of_year <= (8 - jan1_weekday) and jan1_weekday > 4:
if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year-1)):
week_number = 53
else:
week_number = 52
else:
if calendar.isleap(self.data.year):
i = 366
else:
i = 365
if (i - day_of_year) < (4 - weekday):
week_number = 1
else:
j = day_of_year + (7 - weekday) + (jan1_weekday - 1)
week_number = j // 7
if jan1_weekday > 4:
week_number -= 1
return week_number | [
"def",
"W",
"(",
"self",
")",
":",
"# Algorithm from http://www.personal.ecu.edu/mccartyr/ISOwdALG.txt",
"week_number",
"=",
"None",
"jan1_weekday",
"=",
"self",
".",
"data",
".",
"replace",
"(",
"month",
"=",
"1",
",",
"day",
"=",
"1",
")",
".",
"weekday",
"(... | ISO-8601 week number of year, weeks starting on Monday | [
"ISO",
"-",
"8601",
"week",
"number",
"of",
"year",
"weeks",
"starting",
"on",
"Monday"
] | 5772d0047efb11c9ce5f7d234a9da4576ce24edc | https://github.com/nikcub/floyd/blob/5772d0047efb11c9ce5f7d234a9da4576ce24edc/floyd/util/dateformat.py#L336-L360 | train | 54,884 |
nikcub/floyd | floyd/util/dateformat.py | DateFormat.z | def z(self):
"Day of the year; i.e. '0' to '365'"
doy = self.year_days[self.data.month] + self.data.day
if self.L() and self.data.month > 2:
doy += 1
return doy | python | def z(self):
"Day of the year; i.e. '0' to '365'"
doy = self.year_days[self.data.month] + self.data.day
if self.L() and self.data.month > 2:
doy += 1
return doy | [
"def",
"z",
"(",
"self",
")",
":",
"doy",
"=",
"self",
".",
"year_days",
"[",
"self",
".",
"data",
".",
"month",
"]",
"+",
"self",
".",
"data",
".",
"day",
"if",
"self",
".",
"L",
"(",
")",
"and",
"self",
".",
"data",
".",
"month",
">",
"2",
... | Day of the year; i.e. '0' to '365 | [
"Day",
"of",
"the",
"year",
";",
"i",
".",
"e",
".",
"0",
"to",
"365"
] | 5772d0047efb11c9ce5f7d234a9da4576ce24edc | https://github.com/nikcub/floyd/blob/5772d0047efb11c9ce5f7d234a9da4576ce24edc/floyd/util/dateformat.py#L370-L375 | train | 54,885 |
wearpants/instrument | instrument/output/__init__.py | print_metric | def print_metric(name, count, elapsed):
"""A metric function that prints to standard output
:arg str name: name of the metric
:arg int count: number of items
:arg float elapsed: time in seconds
"""
_do_print(name, count, elapsed, file=sys.stdout) | python | def print_metric(name, count, elapsed):
"""A metric function that prints to standard output
:arg str name: name of the metric
:arg int count: number of items
:arg float elapsed: time in seconds
"""
_do_print(name, count, elapsed, file=sys.stdout) | [
"def",
"print_metric",
"(",
"name",
",",
"count",
",",
"elapsed",
")",
":",
"_do_print",
"(",
"name",
",",
"count",
",",
"elapsed",
",",
"file",
"=",
"sys",
".",
"stdout",
")"
] | A metric function that prints to standard output
:arg str name: name of the metric
:arg int count: number of items
:arg float elapsed: time in seconds | [
"A",
"metric",
"function",
"that",
"prints",
"to",
"standard",
"output"
] | a0f6103574ab58a82361a951e5e56b69aedfe294 | https://github.com/wearpants/instrument/blob/a0f6103574ab58a82361a951e5e56b69aedfe294/instrument/output/__init__.py#L9-L16 | train | 54,886 |
wearpants/instrument | instrument/output/__init__.py | stderr_metric | def stderr_metric(name, count, elapsed):
"""A metric function that prints to standard error
:arg str name: name of the metric
:arg int count: number of items
:arg float elapsed: time in seconds
"""
_do_print(name, count, elapsed, file=sys.stderr) | python | def stderr_metric(name, count, elapsed):
"""A metric function that prints to standard error
:arg str name: name of the metric
:arg int count: number of items
:arg float elapsed: time in seconds
"""
_do_print(name, count, elapsed, file=sys.stderr) | [
"def",
"stderr_metric",
"(",
"name",
",",
"count",
",",
"elapsed",
")",
":",
"_do_print",
"(",
"name",
",",
"count",
",",
"elapsed",
",",
"file",
"=",
"sys",
".",
"stderr",
")"
] | A metric function that prints to standard error
:arg str name: name of the metric
:arg int count: number of items
:arg float elapsed: time in seconds | [
"A",
"metric",
"function",
"that",
"prints",
"to",
"standard",
"error"
] | a0f6103574ab58a82361a951e5e56b69aedfe294 | https://github.com/wearpants/instrument/blob/a0f6103574ab58a82361a951e5e56b69aedfe294/instrument/output/__init__.py#L18-L25 | train | 54,887 |
wearpants/instrument | instrument/output/__init__.py | make_multi_metric | def make_multi_metric(*metrics):
"""Make a new metric function that calls the supplied metrics
:arg functions metrics: metric functions
:rtype: function
"""
def multi_metric(name, count, elapsed):
"""Calls multiple metrics (closure)"""
for m in metrics:
m(name, count, elapsed)
return multi_metric | python | def make_multi_metric(*metrics):
"""Make a new metric function that calls the supplied metrics
:arg functions metrics: metric functions
:rtype: function
"""
def multi_metric(name, count, elapsed):
"""Calls multiple metrics (closure)"""
for m in metrics:
m(name, count, elapsed)
return multi_metric | [
"def",
"make_multi_metric",
"(",
"*",
"metrics",
")",
":",
"def",
"multi_metric",
"(",
"name",
",",
"count",
",",
"elapsed",
")",
":",
"\"\"\"Calls multiple metrics (closure)\"\"\"",
"for",
"m",
"in",
"metrics",
":",
"m",
"(",
"name",
",",
"count",
",",
"ela... | Make a new metric function that calls the supplied metrics
:arg functions metrics: metric functions
:rtype: function | [
"Make",
"a",
"new",
"metric",
"function",
"that",
"calls",
"the",
"supplied",
"metrics"
] | a0f6103574ab58a82361a951e5e56b69aedfe294 | https://github.com/wearpants/instrument/blob/a0f6103574ab58a82361a951e5e56b69aedfe294/instrument/output/__init__.py#L27-L37 | train | 54,888 |
mdickinson/refcycle | refcycle/__init__.py | _is_orphan | def _is_orphan(scc, graph):
"""
Return False iff the given scc is reachable from elsewhere.
"""
return all(p in scc for v in scc for p in graph.parents(v)) | python | def _is_orphan(scc, graph):
"""
Return False iff the given scc is reachable from elsewhere.
"""
return all(p in scc for v in scc for p in graph.parents(v)) | [
"def",
"_is_orphan",
"(",
"scc",
",",
"graph",
")",
":",
"return",
"all",
"(",
"p",
"in",
"scc",
"for",
"v",
"in",
"scc",
"for",
"p",
"in",
"graph",
".",
"parents",
"(",
"v",
")",
")"
] | Return False iff the given scc is reachable from elsewhere. | [
"Return",
"False",
"iff",
"the",
"given",
"scc",
"is",
"reachable",
"from",
"elsewhere",
"."
] | 627fad74c74efc601209c96405f8118cd99b2241 | https://github.com/mdickinson/refcycle/blob/627fad74c74efc601209c96405f8118cd99b2241/refcycle/__init__.py#L34-L39 | train | 54,889 |
mdickinson/refcycle | refcycle/__init__.py | key_cycles | def key_cycles():
"""
Collect cyclic garbage, and return the strongly connected
components that were keeping the garbage alive.
"""
graph = garbage()
sccs = graph.strongly_connected_components()
return [scc for scc in sccs if _is_orphan(scc, graph)] | python | def key_cycles():
"""
Collect cyclic garbage, and return the strongly connected
components that were keeping the garbage alive.
"""
graph = garbage()
sccs = graph.strongly_connected_components()
return [scc for scc in sccs if _is_orphan(scc, graph)] | [
"def",
"key_cycles",
"(",
")",
":",
"graph",
"=",
"garbage",
"(",
")",
"sccs",
"=",
"graph",
".",
"strongly_connected_components",
"(",
")",
"return",
"[",
"scc",
"for",
"scc",
"in",
"sccs",
"if",
"_is_orphan",
"(",
"scc",
",",
"graph",
")",
"]"
] | Collect cyclic garbage, and return the strongly connected
components that were keeping the garbage alive. | [
"Collect",
"cyclic",
"garbage",
"and",
"return",
"the",
"strongly",
"connected",
"components",
"that",
"were",
"keeping",
"the",
"garbage",
"alive",
"."
] | 627fad74c74efc601209c96405f8118cd99b2241 | https://github.com/mdickinson/refcycle/blob/627fad74c74efc601209c96405f8118cd99b2241/refcycle/__init__.py#L42-L50 | train | 54,890 |
StorjOld/plowshare-wrapper | plowshare/plowshare.py | Plowshare._run_command | def _run_command(self, command, **kwargs):
"""Wrapper to pass command to plowshare.
:param command: The command to pass to plowshare.
:type command: str
:param **kwargs: Additional keywords passed into
:type **kwargs: dict
:returns: Object containing either output of plowshare command or an
error message.
:rtype: dict
:raises: Exception
"""
try:
return {'output': subprocess.check_output(command, **kwargs)}
except Exception as e:
return {'error': str(e)} | python | def _run_command(self, command, **kwargs):
"""Wrapper to pass command to plowshare.
:param command: The command to pass to plowshare.
:type command: str
:param **kwargs: Additional keywords passed into
:type **kwargs: dict
:returns: Object containing either output of plowshare command or an
error message.
:rtype: dict
:raises: Exception
"""
try:
return {'output': subprocess.check_output(command, **kwargs)}
except Exception as e:
return {'error': str(e)} | [
"def",
"_run_command",
"(",
"self",
",",
"command",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"return",
"{",
"'output'",
":",
"subprocess",
".",
"check_output",
"(",
"command",
",",
"*",
"*",
"kwargs",
")",
"}",
"except",
"Exception",
"as",
"e",
... | Wrapper to pass command to plowshare.
:param command: The command to pass to plowshare.
:type command: str
:param **kwargs: Additional keywords passed into
:type **kwargs: dict
:returns: Object containing either output of plowshare command or an
error message.
:rtype: dict
:raises: Exception | [
"Wrapper",
"to",
"pass",
"command",
"to",
"plowshare",
"."
] | edb38d01fd1decabf92cc4f536d7404dca6a977c | https://github.com/StorjOld/plowshare-wrapper/blob/edb38d01fd1decabf92cc4f536d7404dca6a977c/plowshare/plowshare.py#L53-L68 | train | 54,891 |
StorjOld/plowshare-wrapper | plowshare/plowshare.py | Plowshare._filter_sources | def _filter_sources(self, sources):
"""Remove sources with errors and return ordered by host success.
:param sources: List of potential sources to connect to.
:type sources: list
:returns: Sorted list of potential sources without errors.
:rtype: list
"""
filtered, hosts = [], []
for source in sources:
if 'error' in source:
continue
filtered.append(source)
hosts.append(source['host_name'])
return sorted(filtered, key=lambda s:
self._hosts_by_success(hosts).index(s['host_name'])) | python | def _filter_sources(self, sources):
"""Remove sources with errors and return ordered by host success.
:param sources: List of potential sources to connect to.
:type sources: list
:returns: Sorted list of potential sources without errors.
:rtype: list
"""
filtered, hosts = [], []
for source in sources:
if 'error' in source:
continue
filtered.append(source)
hosts.append(source['host_name'])
return sorted(filtered, key=lambda s:
self._hosts_by_success(hosts).index(s['host_name'])) | [
"def",
"_filter_sources",
"(",
"self",
",",
"sources",
")",
":",
"filtered",
",",
"hosts",
"=",
"[",
"]",
",",
"[",
"]",
"for",
"source",
"in",
"sources",
":",
"if",
"'error'",
"in",
"source",
":",
"continue",
"filtered",
".",
"append",
"(",
"source",
... | Remove sources with errors and return ordered by host success.
:param sources: List of potential sources to connect to.
:type sources: list
:returns: Sorted list of potential sources without errors.
:rtype: list | [
"Remove",
"sources",
"with",
"errors",
"and",
"return",
"ordered",
"by",
"host",
"success",
"."
] | edb38d01fd1decabf92cc4f536d7404dca6a977c | https://github.com/StorjOld/plowshare-wrapper/blob/edb38d01fd1decabf92cc4f536d7404dca6a977c/plowshare/plowshare.py#L81-L97 | train | 54,892 |
StorjOld/plowshare-wrapper | plowshare/plowshare.py | Plowshare.upload | def upload(self, filename, number_of_hosts):
"""Upload the given file to the specified number of hosts.
:param filename: The filename of the file to upload.
:type filename: str
:param number_of_hosts: The number of hosts to connect to.
:type number_of_hosts: int
:returns: A list of dicts with 'host_name' and 'url' keys for all
successful uploads or an empty list if all uploads failed.
:rtype: list
"""
return self.multiupload(filename, self.random_hosts(number_of_hosts)) | python | def upload(self, filename, number_of_hosts):
"""Upload the given file to the specified number of hosts.
:param filename: The filename of the file to upload.
:type filename: str
:param number_of_hosts: The number of hosts to connect to.
:type number_of_hosts: int
:returns: A list of dicts with 'host_name' and 'url' keys for all
successful uploads or an empty list if all uploads failed.
:rtype: list
"""
return self.multiupload(filename, self.random_hosts(number_of_hosts)) | [
"def",
"upload",
"(",
"self",
",",
"filename",
",",
"number_of_hosts",
")",
":",
"return",
"self",
".",
"multiupload",
"(",
"filename",
",",
"self",
".",
"random_hosts",
"(",
"number_of_hosts",
")",
")"
] | Upload the given file to the specified number of hosts.
:param filename: The filename of the file to upload.
:type filename: str
:param number_of_hosts: The number of hosts to connect to.
:type number_of_hosts: int
:returns: A list of dicts with 'host_name' and 'url' keys for all
successful uploads or an empty list if all uploads failed.
:rtype: list | [
"Upload",
"the",
"given",
"file",
"to",
"the",
"specified",
"number",
"of",
"hosts",
"."
] | edb38d01fd1decabf92cc4f536d7404dca6a977c | https://github.com/StorjOld/plowshare-wrapper/blob/edb38d01fd1decabf92cc4f536d7404dca6a977c/plowshare/plowshare.py#L114-L125 | train | 54,893 |
StorjOld/plowshare-wrapper | plowshare/plowshare.py | Plowshare.download | def download(self, sources, output_directory, filename):
"""Download a file from one of the provided sources
The sources will be ordered by least amount of errors, so most
successful hosts will be tried first. In case of failure, the next
source will be attempted, until the first successful download is
completed or all sources have been depleted.
:param sources: A list of dicts with 'host_name' and 'url' keys.
:type sources: list
:param output_directory: Directory to save the downloaded file in.
:type output_directory: str
:param filename: Filename assigned to the downloaded file.
:type filename: str
:returns: A dict with 'host_name' and 'filename' keys if the download
is successful, or an empty dict otherwise.
:rtype: dict
"""
valid_sources = self._filter_sources(sources)
if not valid_sources:
return {'error': 'no valid sources'}
manager = Manager()
successful_downloads = manager.list([])
def f(source):
if not successful_downloads:
result = self.download_from_host(
source, output_directory, filename)
if 'error' in result:
self._host_errors[source['host_name']] += 1
else:
successful_downloads.append(result)
multiprocessing.dummy.Pool(len(valid_sources)).map(f, valid_sources)
return successful_downloads[0] if successful_downloads else {} | python | def download(self, sources, output_directory, filename):
"""Download a file from one of the provided sources
The sources will be ordered by least amount of errors, so most
successful hosts will be tried first. In case of failure, the next
source will be attempted, until the first successful download is
completed or all sources have been depleted.
:param sources: A list of dicts with 'host_name' and 'url' keys.
:type sources: list
:param output_directory: Directory to save the downloaded file in.
:type output_directory: str
:param filename: Filename assigned to the downloaded file.
:type filename: str
:returns: A dict with 'host_name' and 'filename' keys if the download
is successful, or an empty dict otherwise.
:rtype: dict
"""
valid_sources = self._filter_sources(sources)
if not valid_sources:
return {'error': 'no valid sources'}
manager = Manager()
successful_downloads = manager.list([])
def f(source):
if not successful_downloads:
result = self.download_from_host(
source, output_directory, filename)
if 'error' in result:
self._host_errors[source['host_name']] += 1
else:
successful_downloads.append(result)
multiprocessing.dummy.Pool(len(valid_sources)).map(f, valid_sources)
return successful_downloads[0] if successful_downloads else {} | [
"def",
"download",
"(",
"self",
",",
"sources",
",",
"output_directory",
",",
"filename",
")",
":",
"valid_sources",
"=",
"self",
".",
"_filter_sources",
"(",
"sources",
")",
"if",
"not",
"valid_sources",
":",
"return",
"{",
"'error'",
":",
"'no valid sources'... | Download a file from one of the provided sources
The sources will be ordered by least amount of errors, so most
successful hosts will be tried first. In case of failure, the next
source will be attempted, until the first successful download is
completed or all sources have been depleted.
:param sources: A list of dicts with 'host_name' and 'url' keys.
:type sources: list
:param output_directory: Directory to save the downloaded file in.
:type output_directory: str
:param filename: Filename assigned to the downloaded file.
:type filename: str
:returns: A dict with 'host_name' and 'filename' keys if the download
is successful, or an empty dict otherwise.
:rtype: dict | [
"Download",
"a",
"file",
"from",
"one",
"of",
"the",
"provided",
"sources"
] | edb38d01fd1decabf92cc4f536d7404dca6a977c | https://github.com/StorjOld/plowshare-wrapper/blob/edb38d01fd1decabf92cc4f536d7404dca6a977c/plowshare/plowshare.py#L127-L163 | train | 54,894 |
StorjOld/plowshare-wrapper | plowshare/plowshare.py | Plowshare.download_from_host | def download_from_host(self, source, output_directory, filename):
"""Download a file from a given host.
This method renames the file to the given string.
:param source: Dictionary containing information about host.
:type source: dict
:param output_directory: Directory to place output in.
:type output_directory: str
:param filename: The filename to rename to.
:type filename: str
:returns: Dictionary with information about downloaded file.
:rtype: dict
"""
result = self._run_command(
["plowdown", source["url"], "-o",
output_directory, "--temp-rename"],
stderr=open("/dev/null", "w")
)
result['host_name'] = source['host_name']
if 'error' in result:
return result
temporary_filename = self.parse_output(
result['host_name'], result['output'])
result['filename'] = os.path.join(output_directory, filename)
result.pop('output')
os.rename(temporary_filename, result['filename'])
return result | python | def download_from_host(self, source, output_directory, filename):
"""Download a file from a given host.
This method renames the file to the given string.
:param source: Dictionary containing information about host.
:type source: dict
:param output_directory: Directory to place output in.
:type output_directory: str
:param filename: The filename to rename to.
:type filename: str
:returns: Dictionary with information about downloaded file.
:rtype: dict
"""
result = self._run_command(
["plowdown", source["url"], "-o",
output_directory, "--temp-rename"],
stderr=open("/dev/null", "w")
)
result['host_name'] = source['host_name']
if 'error' in result:
return result
temporary_filename = self.parse_output(
result['host_name'], result['output'])
result['filename'] = os.path.join(output_directory, filename)
result.pop('output')
os.rename(temporary_filename, result['filename'])
return result | [
"def",
"download_from_host",
"(",
"self",
",",
"source",
",",
"output_directory",
",",
"filename",
")",
":",
"result",
"=",
"self",
".",
"_run_command",
"(",
"[",
"\"plowdown\"",
",",
"source",
"[",
"\"url\"",
"]",
",",
"\"-o\"",
",",
"output_directory",
","... | Download a file from a given host.
This method renames the file to the given string.
:param source: Dictionary containing information about host.
:type source: dict
:param output_directory: Directory to place output in.
:type output_directory: str
:param filename: The filename to rename to.
:type filename: str
:returns: Dictionary with information about downloaded file.
:rtype: dict | [
"Download",
"a",
"file",
"from",
"a",
"given",
"host",
"."
] | edb38d01fd1decabf92cc4f536d7404dca6a977c | https://github.com/StorjOld/plowshare-wrapper/blob/edb38d01fd1decabf92cc4f536d7404dca6a977c/plowshare/plowshare.py#L165-L197 | train | 54,895 |
StorjOld/plowshare-wrapper | plowshare/plowshare.py | Plowshare.multiupload | def multiupload(self, filename, hosts):
"""Upload file to multiple hosts simultaneously
The upload will be attempted for each host until the optimal file
redundancy is achieved (a percentage of successful uploads) or the host
list is depleted.
:param filename: The filename of the file to upload.
:type filename: str
:param hosts: A list of hosts as defined in the master host list.
:type hosts: list
:returns: A list of dicts with 'host_name' and 'url' keys for all
successful uploads or an empty list if all uploads failed.
:rtype: list
"""
manager = Manager()
successful_uploads = manager.list([])
def f(host):
if len(successful_uploads) / float(len(hosts)) < \
settings.MIN_FILE_REDUNDANCY:
# Optimal redundancy not achieved, keep going
result = self.upload_to_host(filename, host)
if 'error' in result:
self._host_errors[host] += 1
else:
successful_uploads.append(result)
multiprocessing.dummy.Pool(len(hosts)).map(
f, self._hosts_by_success(hosts))
return list(successful_uploads) | python | def multiupload(self, filename, hosts):
"""Upload file to multiple hosts simultaneously
The upload will be attempted for each host until the optimal file
redundancy is achieved (a percentage of successful uploads) or the host
list is depleted.
:param filename: The filename of the file to upload.
:type filename: str
:param hosts: A list of hosts as defined in the master host list.
:type hosts: list
:returns: A list of dicts with 'host_name' and 'url' keys for all
successful uploads or an empty list if all uploads failed.
:rtype: list
"""
manager = Manager()
successful_uploads = manager.list([])
def f(host):
if len(successful_uploads) / float(len(hosts)) < \
settings.MIN_FILE_REDUNDANCY:
# Optimal redundancy not achieved, keep going
result = self.upload_to_host(filename, host)
if 'error' in result:
self._host_errors[host] += 1
else:
successful_uploads.append(result)
multiprocessing.dummy.Pool(len(hosts)).map(
f, self._hosts_by_success(hosts))
return list(successful_uploads) | [
"def",
"multiupload",
"(",
"self",
",",
"filename",
",",
"hosts",
")",
":",
"manager",
"=",
"Manager",
"(",
")",
"successful_uploads",
"=",
"manager",
".",
"list",
"(",
"[",
"]",
")",
"def",
"f",
"(",
"host",
")",
":",
"if",
"len",
"(",
"successful_u... | Upload file to multiple hosts simultaneously
The upload will be attempted for each host until the optimal file
redundancy is achieved (a percentage of successful uploads) or the host
list is depleted.
:param filename: The filename of the file to upload.
:type filename: str
:param hosts: A list of hosts as defined in the master host list.
:type hosts: list
:returns: A list of dicts with 'host_name' and 'url' keys for all
successful uploads or an empty list if all uploads failed.
:rtype: list | [
"Upload",
"file",
"to",
"multiple",
"hosts",
"simultaneously"
] | edb38d01fd1decabf92cc4f536d7404dca6a977c | https://github.com/StorjOld/plowshare-wrapper/blob/edb38d01fd1decabf92cc4f536d7404dca6a977c/plowshare/plowshare.py#L199-L230 | train | 54,896 |
StorjOld/plowshare-wrapper | plowshare/plowshare.py | Plowshare.upload_to_host | def upload_to_host(self, filename, hostname):
"""Upload a file to the given host.
This method relies on 'plowup' being installed on the system.
If it succeeds, this method returns a dictionary with the host name,
and the final URL. Otherwise, it returns a dictionary with the
host name and an error flag.
:param filename: The filename of the file to upload.
:type filename: str
:param hostname: The host you are uploading the file to.
:type hostname: str
:returns: Dictionary containing information about upload to host.
:rtype: dict
"""
result = self._run_command(
["plowup", hostname, filename],
stderr=open("/dev/null", "w")
)
result['host_name'] = hostname
if 'error' not in result:
result['url'] = self.parse_output(hostname, result.pop('output'))
return result | python | def upload_to_host(self, filename, hostname):
"""Upload a file to the given host.
This method relies on 'plowup' being installed on the system.
If it succeeds, this method returns a dictionary with the host name,
and the final URL. Otherwise, it returns a dictionary with the
host name and an error flag.
:param filename: The filename of the file to upload.
:type filename: str
:param hostname: The host you are uploading the file to.
:type hostname: str
:returns: Dictionary containing information about upload to host.
:rtype: dict
"""
result = self._run_command(
["plowup", hostname, filename],
stderr=open("/dev/null", "w")
)
result['host_name'] = hostname
if 'error' not in result:
result['url'] = self.parse_output(hostname, result.pop('output'))
return result | [
"def",
"upload_to_host",
"(",
"self",
",",
"filename",
",",
"hostname",
")",
":",
"result",
"=",
"self",
".",
"_run_command",
"(",
"[",
"\"plowup\"",
",",
"hostname",
",",
"filename",
"]",
",",
"stderr",
"=",
"open",
"(",
"\"/dev/null\"",
",",
"\"w\"",
"... | Upload a file to the given host.
This method relies on 'plowup' being installed on the system.
If it succeeds, this method returns a dictionary with the host name,
and the final URL. Otherwise, it returns a dictionary with the
host name and an error flag.
:param filename: The filename of the file to upload.
:type filename: str
:param hostname: The host you are uploading the file to.
:type hostname: str
:returns: Dictionary containing information about upload to host.
:rtype: dict | [
"Upload",
"a",
"file",
"to",
"the",
"given",
"host",
"."
] | edb38d01fd1decabf92cc4f536d7404dca6a977c | https://github.com/StorjOld/plowshare-wrapper/blob/edb38d01fd1decabf92cc4f536d7404dca6a977c/plowshare/plowshare.py#L232-L256 | train | 54,897 |
StorjOld/plowshare-wrapper | plowshare/plowshare.py | Plowshare.parse_output | def parse_output(self, hostname, output):
"""Parse plowup's output.
For now, we just return the last line.
:param hostname: Name of host you are working with.
:type hostname: str
:param output: Dictionary containing information about a plowshare
action.
:type output: dict
:returns: Parsed and decoded output list.
:rtype: list
"""
if isinstance(output, bytes):
output = output.decode('utf-8')
return output.split()[-1] | python | def parse_output(self, hostname, output):
"""Parse plowup's output.
For now, we just return the last line.
:param hostname: Name of host you are working with.
:type hostname: str
:param output: Dictionary containing information about a plowshare
action.
:type output: dict
:returns: Parsed and decoded output list.
:rtype: list
"""
if isinstance(output, bytes):
output = output.decode('utf-8')
return output.split()[-1] | [
"def",
"parse_output",
"(",
"self",
",",
"hostname",
",",
"output",
")",
":",
"if",
"isinstance",
"(",
"output",
",",
"bytes",
")",
":",
"output",
"=",
"output",
".",
"decode",
"(",
"'utf-8'",
")",
"return",
"output",
".",
"split",
"(",
")",
"[",
"-"... | Parse plowup's output.
For now, we just return the last line.
:param hostname: Name of host you are working with.
:type hostname: str
:param output: Dictionary containing information about a plowshare
action.
:type output: dict
:returns: Parsed and decoded output list.
:rtype: list | [
"Parse",
"plowup",
"s",
"output",
"."
] | edb38d01fd1decabf92cc4f536d7404dca6a977c | https://github.com/StorjOld/plowshare-wrapper/blob/edb38d01fd1decabf92cc4f536d7404dca6a977c/plowshare/plowshare.py#L258-L273 | train | 54,898 |
Cadasta/cadasta-workertoolbox | cadasta/workertoolbox/conf.py | Config._generate_queues | def _generate_queues(queues, exchange, platform_queue):
""" Queues known by this worker """
return set([
Queue('celery', exchange, routing_key='celery'),
Queue(platform_queue, exchange, routing_key='#'),
] + [
Queue(q_name, exchange, routing_key=q_name)
for q_name in queues
]) | python | def _generate_queues(queues, exchange, platform_queue):
""" Queues known by this worker """
return set([
Queue('celery', exchange, routing_key='celery'),
Queue(platform_queue, exchange, routing_key='#'),
] + [
Queue(q_name, exchange, routing_key=q_name)
for q_name in queues
]) | [
"def",
"_generate_queues",
"(",
"queues",
",",
"exchange",
",",
"platform_queue",
")",
":",
"return",
"set",
"(",
"[",
"Queue",
"(",
"'celery'",
",",
"exchange",
",",
"routing_key",
"=",
"'celery'",
")",
",",
"Queue",
"(",
"platform_queue",
",",
"exchange",
... | Queues known by this worker | [
"Queues",
"known",
"by",
"this",
"worker"
] | e17cf376538cee0b32c7a21afd5319e3549b954f | https://github.com/Cadasta/cadasta-workertoolbox/blob/e17cf376538cee0b32c7a21afd5319e3549b954f/cadasta/workertoolbox/conf.py#L202-L210 | train | 54,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.