body_hash stringlengths 64 64 | body stringlengths 23 109k | docstring stringlengths 1 57k | path stringlengths 4 198 | name stringlengths 1 115 | repository_name stringlengths 7 111 | repository_stars float64 0 191k | lang stringclasses 1 value | body_without_docstring stringlengths 14 108k | unified stringlengths 45 133k |
|---|---|---|---|---|---|---|---|---|---|
28e17428ba0196f64623aa4218fc5e115b90477e22544c969772f17e40289541 | @is_check_password.setter
def is_check_password(self, is_check_password):
'Sets the is_check_password of this ResetServerPasswordOption.\n\n 是否检查密码的复杂度。\n\n :param is_check_password: The is_check_password of this ResetServerPasswordOption.\n :type: bool\n '
self._is_check_password = is_check_password | Sets the is_check_password of this ResetServerPasswordOption.
是否检查密码的复杂度。
:param is_check_password: The is_check_password of this ResetServerPasswordOption.
:type: bool | huaweicloud-sdk-ecs/huaweicloudsdkecs/v2/model/reset_server_password_option.py | is_check_password | huaweicloud/huaweicloud-sdk-python-v3 | 64 | python | @is_check_password.setter
def is_check_password(self, is_check_password):
'Sets the is_check_password of this ResetServerPasswordOption.\n\n 是否检查密码的复杂度。\n\n :param is_check_password: The is_check_password of this ResetServerPasswordOption.\n :type: bool\n '
self._is_check_password = is_check_password | @is_check_password.setter
def is_check_password(self, is_check_password):
'Sets the is_check_password of this ResetServerPasswordOption.\n\n 是否检查密码的复杂度。\n\n :param is_check_password: The is_check_password of this ResetServerPasswordOption.\n :type: bool\n '
self._is_check_password = is_check_password<|docstring|>Sets the is_check_password of this ResetServerPasswordOption.
是否检查密码的复杂度。
:param is_check_password: The is_check_password of this ResetServerPasswordOption.
:type: bool<|endoftext|> |
23795442a46e2cd10dec98fded44ed9172a29971e98983a30ad89baa6c9c0a03 | def to_dict(self):
'Returns the model properties as a dict'
result = {}
for (attr, _) in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
elif (attr in self.sensitive_list):
result[attr] = '****'
else:
result[attr] = value
return result | Returns the model properties as a dict | huaweicloud-sdk-ecs/huaweicloudsdkecs/v2/model/reset_server_password_option.py | to_dict | huaweicloud/huaweicloud-sdk-python-v3 | 64 | python | def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
elif (attr in self.sensitive_list):
result[attr] = '****'
else:
result[attr] = value
return result | def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
elif (attr in self.sensitive_list):
result[attr] = '****'
else:
result[attr] = value
return result<|docstring|>Returns the model properties as a dict<|endoftext|> |
a85eb2dd57daf3998acb705f217af08ef0b14fd68fee87605500331b1a5f2987 | def to_str(self):
'Returns the string representation of the model'
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False) | Returns the string representation of the model | huaweicloud-sdk-ecs/huaweicloudsdkecs/v2/model/reset_server_password_option.py | to_str | huaweicloud/huaweicloud-sdk-python-v3 | 64 | python | def to_str(self):
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False) | def to_str(self):
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)<|docstring|>Returns the string representation of the model<|endoftext|> |
122cefd5382ee9078015a8ccdeba1aa42a0625442bf0dcfc7748dc07a3e45d3f | def __repr__(self):
'For `print`'
return self.to_str() | For `print` | huaweicloud-sdk-ecs/huaweicloudsdkecs/v2/model/reset_server_password_option.py | __repr__ | huaweicloud/huaweicloud-sdk-python-v3 | 64 | python | def __repr__(self):
return self.to_str() | def __repr__(self):
return self.to_str()<|docstring|>For `print`<|endoftext|> |
c299fb3ef7d311b60d623157d1e3d71074e6e58011520253e153bf0f7d690339 | def __eq__(self, other):
'Returns true if both objects are equal'
if (not isinstance(other, ResetServerPasswordOption)):
return False
return (self.__dict__ == other.__dict__) | Returns true if both objects are equal | huaweicloud-sdk-ecs/huaweicloudsdkecs/v2/model/reset_server_password_option.py | __eq__ | huaweicloud/huaweicloud-sdk-python-v3 | 64 | python | def __eq__(self, other):
if (not isinstance(other, ResetServerPasswordOption)):
return False
return (self.__dict__ == other.__dict__) | def __eq__(self, other):
if (not isinstance(other, ResetServerPasswordOption)):
return False
return (self.__dict__ == other.__dict__)<|docstring|>Returns true if both objects are equal<|endoftext|> |
43dc6740163eb9fc1161d09cb2208a64c7ad0cc8d9c8637ac3264522d3ec7e42 | def __ne__(self, other):
'Returns true if both objects are not equal'
return (not (self == other)) | Returns true if both objects are not equal | huaweicloud-sdk-ecs/huaweicloudsdkecs/v2/model/reset_server_password_option.py | __ne__ | huaweicloud/huaweicloud-sdk-python-v3 | 64 | python | def __ne__(self, other):
return (not (self == other)) | def __ne__(self, other):
return (not (self == other))<|docstring|>Returns true if both objects are not equal<|endoftext|> |
ef32925b06c57436698a093763e1c9b1fc4ffeaf3180a10613cc8d4a0e6b8609 | def __init__(self, ip, port):
'Initializes our server and binds to a socket.'
self.host = ip
self.port = port
self.sock = socket.socket()
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.bind((self.host, self.port))
print('Bounded on {}:{}'.format(self.host, self.port)) | Initializes our server and binds to a socket. | router.py | __init__ | dsande30/COSC560-PA1 | 0 | python | def __init__(self, ip, port):
self.host = ip
self.port = port
self.sock = socket.socket()
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.bind((self.host, self.port))
print('Bounded on {}:{}'.format(self.host, self.port)) | def __init__(self, ip, port):
self.host = ip
self.port = port
self.sock = socket.socket()
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.bind((self.host, self.port))
print('Bounded on {}:{}'.format(self.host, self.port))<|docstring|>Initializes our server and binds to a socket.<|endoftext|> |
fc62217f4c2592d49e6f128ae263ca3c52efdfcb5992d6e6423b080ee7f87241 | def listen(self):
'Listens for incoming connections and threads them off.'
self.sock.listen(5)
while True:
(client, address) = self.sock.accept()
client.settimeout(30)
threading.Thread(target=self.serveClient, args=(client, address)).start() | Listens for incoming connections and threads them off. | router.py | listen | dsande30/COSC560-PA1 | 0 | python | def listen(self):
self.sock.listen(5)
while True:
(client, address) = self.sock.accept()
client.settimeout(30)
threading.Thread(target=self.serveClient, args=(client, address)).start() | def listen(self):
self.sock.listen(5)
while True:
(client, address) = self.sock.accept()
client.settimeout(30)
threading.Thread(target=self.serveClient, args=(client, address)).start()<|docstring|>Listens for incoming connections and threads them off.<|endoftext|> |
16c9c24a5c184d4945a24d9f55418f22ce59b23dd74e258bdebcdeb5f2ff3b6d | def recvall(self, client):
'A helper function to receive ALL client data before proceeding.'
BUFF_SIZE = 512
data = b''
while True:
part = client.recv(BUFF_SIZE)
data += part
if (len(part) < BUFF_SIZE):
break
return data | A helper function to receive ALL client data before proceeding. | router.py | recvall | dsande30/COSC560-PA1 | 0 | python | def recvall(self, client):
BUFF_SIZE = 512
data = b
while True:
part = client.recv(BUFF_SIZE)
data += part
if (len(part) < BUFF_SIZE):
break
return data | def recvall(self, client):
BUFF_SIZE = 512
data = b
while True:
part = client.recv(BUFF_SIZE)
data += part
if (len(part) < BUFF_SIZE):
break
return data<|docstring|>A helper function to receive ALL client data before proceeding.<|endoftext|> |
de3512b027dd01190fca6d37155af97a0cf250cd2487cc24f68195a1b8873993 | def serveClient(self, client, address):
'Receives request, parses it, responds, and closes connection.'
name = '{}:{}'.format(address[0], address[1])
print('Connected to', name)
while True:
try:
data = self.recvall(client)
if data:
request = RequestParser(data)
request.parseRequest()
response = Responder(request, client, name)
if (request.error_code != 200):
response.sendError(request.error_code)
elif (request.action == 'GET'):
response.sendGET()
elif (request.action == 'POST'):
response.sendPOST()
print('Served {}, disconnecting.'.format(name))
client.close()
return False
else:
raise Exception('Client {} disconnected'.format(name))
except Exception as e:
print(e)
client.close()
return False | Receives request, parses it, responds, and closes connection. | router.py | serveClient | dsande30/COSC560-PA1 | 0 | python | def serveClient(self, client, address):
name = '{}:{}'.format(address[0], address[1])
print('Connected to', name)
while True:
try:
data = self.recvall(client)
if data:
request = RequestParser(data)
request.parseRequest()
response = Responder(request, client, name)
if (request.error_code != 200):
response.sendError(request.error_code)
elif (request.action == 'GET'):
response.sendGET()
elif (request.action == 'POST'):
response.sendPOST()
print('Served {}, disconnecting.'.format(name))
client.close()
return False
else:
raise Exception('Client {} disconnected'.format(name))
except Exception as e:
print(e)
client.close()
return False | def serveClient(self, client, address):
name = '{}:{}'.format(address[0], address[1])
print('Connected to', name)
while True:
try:
data = self.recvall(client)
if data:
request = RequestParser(data)
request.parseRequest()
response = Responder(request, client, name)
if (request.error_code != 200):
response.sendError(request.error_code)
elif (request.action == 'GET'):
response.sendGET()
elif (request.action == 'POST'):
response.sendPOST()
print('Served {}, disconnecting.'.format(name))
client.close()
return False
else:
raise Exception('Client {} disconnected'.format(name))
except Exception as e:
print(e)
client.close()
return False<|docstring|>Receives request, parses it, responds, and closes connection.<|endoftext|> |
cf4caa36fed93ebe063218f323796932755d407c6418da85503415ec43f38b65 | def create_line():
'Function to create a new line.'
lst = [(0, 0)]
lst.append((8, lfo1.get()))
lst.append((128, lfo2.get()))
lst.append((255, 0))
table.replace(lst) | Function to create a new line. | venv/Lib/site-packages/pyo/examples/10-tables/07-moving-points.py | create_line | mintzer/pupillometry-rf-back | 0 | python | def create_line():
lst = [(0, 0)]
lst.append((8, lfo1.get()))
lst.append((128, lfo2.get()))
lst.append((255, 0))
table.replace(lst) | def create_line():
lst = [(0, 0)]
lst.append((8, lfo1.get()))
lst.append((128, lfo2.get()))
lst.append((255, 0))
table.replace(lst)<|docstring|>Function to create a new line.<|endoftext|> |
64225256b29a3afb54253445aa81e5be5bdb0cd747b5942c401d9a5dd1d959aa | @property
def service_account_credentials(self):
'Gets the service_account_credentials of this GcsServiceAccountInput.\n\n GCS projectId (required)\n\n :return: The service_account_credentials of this GcsServiceAccountInput.\n :rtype: string_types\n '
return self._service_account_credentials | Gets the service_account_credentials of this GcsServiceAccountInput.
GCS projectId (required)
:return: The service_account_credentials of this GcsServiceAccountInput.
:rtype: string_types | bitmovin_api_sdk/models/gcs_service_account_input.py | service_account_credentials | hofmannben/bitmovin-api-sdk-python | 0 | python | @property
def service_account_credentials(self):
'Gets the service_account_credentials of this GcsServiceAccountInput.\n\n GCS projectId (required)\n\n :return: The service_account_credentials of this GcsServiceAccountInput.\n :rtype: string_types\n '
return self._service_account_credentials | @property
def service_account_credentials(self):
'Gets the service_account_credentials of this GcsServiceAccountInput.\n\n GCS projectId (required)\n\n :return: The service_account_credentials of this GcsServiceAccountInput.\n :rtype: string_types\n '
return self._service_account_credentials<|docstring|>Gets the service_account_credentials of this GcsServiceAccountInput.
GCS projectId (required)
:return: The service_account_credentials of this GcsServiceAccountInput.
:rtype: string_types<|endoftext|> |
28c26692115bd689d6dc0e414bee2019e95024ee94b2e1f278d5c484f2697d33 | @service_account_credentials.setter
def service_account_credentials(self, service_account_credentials):
'Sets the service_account_credentials of this GcsServiceAccountInput.\n\n GCS projectId (required)\n\n :param service_account_credentials: The service_account_credentials of this GcsServiceAccountInput.\n :type: string_types\n '
if (service_account_credentials is not None):
if (not isinstance(service_account_credentials, string_types)):
raise TypeError('Invalid type for `service_account_credentials`, type has to be `string_types`')
self._service_account_credentials = service_account_credentials | Sets the service_account_credentials of this GcsServiceAccountInput.
GCS projectId (required)
:param service_account_credentials: The service_account_credentials of this GcsServiceAccountInput.
:type: string_types | bitmovin_api_sdk/models/gcs_service_account_input.py | service_account_credentials | hofmannben/bitmovin-api-sdk-python | 0 | python | @service_account_credentials.setter
def service_account_credentials(self, service_account_credentials):
'Sets the service_account_credentials of this GcsServiceAccountInput.\n\n GCS projectId (required)\n\n :param service_account_credentials: The service_account_credentials of this GcsServiceAccountInput.\n :type: string_types\n '
if (service_account_credentials is not None):
if (not isinstance(service_account_credentials, string_types)):
raise TypeError('Invalid type for `service_account_credentials`, type has to be `string_types`')
self._service_account_credentials = service_account_credentials | @service_account_credentials.setter
def service_account_credentials(self, service_account_credentials):
'Sets the service_account_credentials of this GcsServiceAccountInput.\n\n GCS projectId (required)\n\n :param service_account_credentials: The service_account_credentials of this GcsServiceAccountInput.\n :type: string_types\n '
if (service_account_credentials is not None):
if (not isinstance(service_account_credentials, string_types)):
raise TypeError('Invalid type for `service_account_credentials`, type has to be `string_types`')
self._service_account_credentials = service_account_credentials<|docstring|>Sets the service_account_credentials of this GcsServiceAccountInput.
GCS projectId (required)
:param service_account_credentials: The service_account_credentials of this GcsServiceAccountInput.
:type: string_types<|endoftext|> |
c4295fe81578a9c37bf52f5be5fe81dd605ac4722ac71ac7966029d0aa630cf4 | @property
def bucket_name(self):
'Gets the bucket_name of this GcsServiceAccountInput.\n\n Name of the bucket (required)\n\n :return: The bucket_name of this GcsServiceAccountInput.\n :rtype: string_types\n '
return self._bucket_name | Gets the bucket_name of this GcsServiceAccountInput.
Name of the bucket (required)
:return: The bucket_name of this GcsServiceAccountInput.
:rtype: string_types | bitmovin_api_sdk/models/gcs_service_account_input.py | bucket_name | hofmannben/bitmovin-api-sdk-python | 0 | python | @property
def bucket_name(self):
'Gets the bucket_name of this GcsServiceAccountInput.\n\n Name of the bucket (required)\n\n :return: The bucket_name of this GcsServiceAccountInput.\n :rtype: string_types\n '
return self._bucket_name | @property
def bucket_name(self):
'Gets the bucket_name of this GcsServiceAccountInput.\n\n Name of the bucket (required)\n\n :return: The bucket_name of this GcsServiceAccountInput.\n :rtype: string_types\n '
return self._bucket_name<|docstring|>Gets the bucket_name of this GcsServiceAccountInput.
Name of the bucket (required)
:return: The bucket_name of this GcsServiceAccountInput.
:rtype: string_types<|endoftext|> |
8027e5e2653d3545548ee31729fab852f8a10f10ec4ef50b2843d3308dff7961 | @bucket_name.setter
def bucket_name(self, bucket_name):
'Sets the bucket_name of this GcsServiceAccountInput.\n\n Name of the bucket (required)\n\n :param bucket_name: The bucket_name of this GcsServiceAccountInput.\n :type: string_types\n '
if (bucket_name is not None):
if (not isinstance(bucket_name, string_types)):
raise TypeError('Invalid type for `bucket_name`, type has to be `string_types`')
self._bucket_name = bucket_name | Sets the bucket_name of this GcsServiceAccountInput.
Name of the bucket (required)
:param bucket_name: The bucket_name of this GcsServiceAccountInput.
:type: string_types | bitmovin_api_sdk/models/gcs_service_account_input.py | bucket_name | hofmannben/bitmovin-api-sdk-python | 0 | python | @bucket_name.setter
def bucket_name(self, bucket_name):
'Sets the bucket_name of this GcsServiceAccountInput.\n\n Name of the bucket (required)\n\n :param bucket_name: The bucket_name of this GcsServiceAccountInput.\n :type: string_types\n '
if (bucket_name is not None):
if (not isinstance(bucket_name, string_types)):
raise TypeError('Invalid type for `bucket_name`, type has to be `string_types`')
self._bucket_name = bucket_name | @bucket_name.setter
def bucket_name(self, bucket_name):
'Sets the bucket_name of this GcsServiceAccountInput.\n\n Name of the bucket (required)\n\n :param bucket_name: The bucket_name of this GcsServiceAccountInput.\n :type: string_types\n '
if (bucket_name is not None):
if (not isinstance(bucket_name, string_types)):
raise TypeError('Invalid type for `bucket_name`, type has to be `string_types`')
self._bucket_name = bucket_name<|docstring|>Sets the bucket_name of this GcsServiceAccountInput.
Name of the bucket (required)
:param bucket_name: The bucket_name of this GcsServiceAccountInput.
:type: string_types<|endoftext|> |
4e56e3c5ad60a767f1b10488938382689fea380c72fd9cfe7e37858c09ac0771 | @property
def cloud_region(self):
'Gets the cloud_region of this GcsServiceAccountInput.\n\n\n :return: The cloud_region of this GcsServiceAccountInput.\n :rtype: GoogleCloudRegion\n '
return self._cloud_region | Gets the cloud_region of this GcsServiceAccountInput.
:return: The cloud_region of this GcsServiceAccountInput.
:rtype: GoogleCloudRegion | bitmovin_api_sdk/models/gcs_service_account_input.py | cloud_region | hofmannben/bitmovin-api-sdk-python | 0 | python | @property
def cloud_region(self):
'Gets the cloud_region of this GcsServiceAccountInput.\n\n\n :return: The cloud_region of this GcsServiceAccountInput.\n :rtype: GoogleCloudRegion\n '
return self._cloud_region | @property
def cloud_region(self):
'Gets the cloud_region of this GcsServiceAccountInput.\n\n\n :return: The cloud_region of this GcsServiceAccountInput.\n :rtype: GoogleCloudRegion\n '
return self._cloud_region<|docstring|>Gets the cloud_region of this GcsServiceAccountInput.
:return: The cloud_region of this GcsServiceAccountInput.
:rtype: GoogleCloudRegion<|endoftext|> |
969890afc71c9348eb9597efb30d82f373d442cf0ebf886c4894773a73d2d69e | @cloud_region.setter
def cloud_region(self, cloud_region):
'Sets the cloud_region of this GcsServiceAccountInput.\n\n\n :param cloud_region: The cloud_region of this GcsServiceAccountInput.\n :type: GoogleCloudRegion\n '
if (cloud_region is not None):
if (not isinstance(cloud_region, GoogleCloudRegion)):
raise TypeError('Invalid type for `cloud_region`, type has to be `GoogleCloudRegion`')
self._cloud_region = cloud_region | Sets the cloud_region of this GcsServiceAccountInput.
:param cloud_region: The cloud_region of this GcsServiceAccountInput.
:type: GoogleCloudRegion | bitmovin_api_sdk/models/gcs_service_account_input.py | cloud_region | hofmannben/bitmovin-api-sdk-python | 0 | python | @cloud_region.setter
def cloud_region(self, cloud_region):
'Sets the cloud_region of this GcsServiceAccountInput.\n\n\n :param cloud_region: The cloud_region of this GcsServiceAccountInput.\n :type: GoogleCloudRegion\n '
if (cloud_region is not None):
if (not isinstance(cloud_region, GoogleCloudRegion)):
raise TypeError('Invalid type for `cloud_region`, type has to be `GoogleCloudRegion`')
self._cloud_region = cloud_region | @cloud_region.setter
def cloud_region(self, cloud_region):
'Sets the cloud_region of this GcsServiceAccountInput.\n\n\n :param cloud_region: The cloud_region of this GcsServiceAccountInput.\n :type: GoogleCloudRegion\n '
if (cloud_region is not None):
if (not isinstance(cloud_region, GoogleCloudRegion)):
raise TypeError('Invalid type for `cloud_region`, type has to be `GoogleCloudRegion`')
self._cloud_region = cloud_region<|docstring|>Sets the cloud_region of this GcsServiceAccountInput.
:param cloud_region: The cloud_region of this GcsServiceAccountInput.
:type: GoogleCloudRegion<|endoftext|> |
3abc7a570bc2e31f3247858c97dd4466d64a002319b022f550c269539745bb90 | def to_dict(self):
'Returns the model properties as a dict'
result = {}
if hasattr(super(GcsServiceAccountInput, self), 'to_dict'):
result = super(GcsServiceAccountInput, self).to_dict()
for (attr, _) in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if (value is None):
continue
if isinstance(value, list):
if (len(value) == 0):
continue
result[self.attribute_map.get(attr)] = [(y.value if isinstance(y, Enum) else y) for y in [(x.to_dict() if hasattr(x, 'to_dict') else x) for x in value]]
elif hasattr(value, 'to_dict'):
result[self.attribute_map.get(attr)] = value.to_dict()
elif isinstance(value, Enum):
result[self.attribute_map.get(attr)] = value.value
elif isinstance(value, dict):
result[self.attribute_map.get(attr)] = {k: (v.to_dict() if hasattr(v, 'to_dict') else v) for (k, v) in value.items()}
else:
result[self.attribute_map.get(attr)] = value
return result | Returns the model properties as a dict | bitmovin_api_sdk/models/gcs_service_account_input.py | to_dict | hofmannben/bitmovin-api-sdk-python | 0 | python | def to_dict(self):
result = {}
if hasattr(super(GcsServiceAccountInput, self), 'to_dict'):
result = super(GcsServiceAccountInput, self).to_dict()
for (attr, _) in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if (value is None):
continue
if isinstance(value, list):
if (len(value) == 0):
continue
result[self.attribute_map.get(attr)] = [(y.value if isinstance(y, Enum) else y) for y in [(x.to_dict() if hasattr(x, 'to_dict') else x) for x in value]]
elif hasattr(value, 'to_dict'):
result[self.attribute_map.get(attr)] = value.to_dict()
elif isinstance(value, Enum):
result[self.attribute_map.get(attr)] = value.value
elif isinstance(value, dict):
result[self.attribute_map.get(attr)] = {k: (v.to_dict() if hasattr(v, 'to_dict') else v) for (k, v) in value.items()}
else:
result[self.attribute_map.get(attr)] = value
return result | def to_dict(self):
result = {}
if hasattr(super(GcsServiceAccountInput, self), 'to_dict'):
result = super(GcsServiceAccountInput, self).to_dict()
for (attr, _) in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if (value is None):
continue
if isinstance(value, list):
if (len(value) == 0):
continue
result[self.attribute_map.get(attr)] = [(y.value if isinstance(y, Enum) else y) for y in [(x.to_dict() if hasattr(x, 'to_dict') else x) for x in value]]
elif hasattr(value, 'to_dict'):
result[self.attribute_map.get(attr)] = value.to_dict()
elif isinstance(value, Enum):
result[self.attribute_map.get(attr)] = value.value
elif isinstance(value, dict):
result[self.attribute_map.get(attr)] = {k: (v.to_dict() if hasattr(v, 'to_dict') else v) for (k, v) in value.items()}
else:
result[self.attribute_map.get(attr)] = value
return result<|docstring|>Returns the model properties as a dict<|endoftext|> |
cbb19eaa2fc8a113d9e32f924ef280a7e97563f8915f94f65dab438997af2e99 | def to_str(self):
'Returns the string representation of the model'
return pprint.pformat(self.to_dict()) | Returns the string representation of the model | bitmovin_api_sdk/models/gcs_service_account_input.py | to_str | hofmannben/bitmovin-api-sdk-python | 0 | python | def to_str(self):
return pprint.pformat(self.to_dict()) | def to_str(self):
return pprint.pformat(self.to_dict())<|docstring|>Returns the string representation of the model<|endoftext|> |
772243a2c2b3261a9b954d07aaf295e3c1242a579a495e2d6a5679c677861703 | def __repr__(self):
'For `print` and `pprint`'
return self.to_str() | For `print` and `pprint` | bitmovin_api_sdk/models/gcs_service_account_input.py | __repr__ | hofmannben/bitmovin-api-sdk-python | 0 | python | def __repr__(self):
return self.to_str() | def __repr__(self):
return self.to_str()<|docstring|>For `print` and `pprint`<|endoftext|> |
a326d0b343d9e66e2a4a8be9ec5d8614d0d6f31a90f1a95f91df8d90bd96e1d8 | def __eq__(self, other):
'Returns true if both objects are equal'
if (not isinstance(other, GcsServiceAccountInput)):
return False
return (self.__dict__ == other.__dict__) | Returns true if both objects are equal | bitmovin_api_sdk/models/gcs_service_account_input.py | __eq__ | hofmannben/bitmovin-api-sdk-python | 0 | python | def __eq__(self, other):
if (not isinstance(other, GcsServiceAccountInput)):
return False
return (self.__dict__ == other.__dict__) | def __eq__(self, other):
if (not isinstance(other, GcsServiceAccountInput)):
return False
return (self.__dict__ == other.__dict__)<|docstring|>Returns true if both objects are equal<|endoftext|> |
43dc6740163eb9fc1161d09cb2208a64c7ad0cc8d9c8637ac3264522d3ec7e42 | def __ne__(self, other):
'Returns true if both objects are not equal'
return (not (self == other)) | Returns true if both objects are not equal | bitmovin_api_sdk/models/gcs_service_account_input.py | __ne__ | hofmannben/bitmovin-api-sdk-python | 0 | python | def __ne__(self, other):
return (not (self == other)) | def __ne__(self, other):
return (not (self == other))<|docstring|>Returns true if both objects are not equal<|endoftext|> |
770334572a345559fd769443c0fa55832a59035d9d75e08eafa481603a4b49b1 | def softmax_accuracy(preds, labels):
'\n Accuracy for multiclass model.\n :param preds: predictions\n :param labels: ground truth labelt\n :return: average accuracy\n '
correct_prediction = tf.equal(tf.argmax(preds, 1), tf.to_int64(labels))
accuracy_all = tf.cast(correct_prediction, tf.float32)
return tf.reduce_mean(accuracy_all) | Accuracy for multiclass model.
:param preds: predictions
:param labels: ground truth labelt
:return: average accuracy | model/metrics.py | softmax_accuracy | gayalkuruppu/visual-compatibility | 89 | python | def softmax_accuracy(preds, labels):
'\n Accuracy for multiclass model.\n :param preds: predictions\n :param labels: ground truth labelt\n :return: average accuracy\n '
correct_prediction = tf.equal(tf.argmax(preds, 1), tf.to_int64(labels))
accuracy_all = tf.cast(correct_prediction, tf.float32)
return tf.reduce_mean(accuracy_all) | def softmax_accuracy(preds, labels):
'\n Accuracy for multiclass model.\n :param preds: predictions\n :param labels: ground truth labelt\n :return: average accuracy\n '
correct_prediction = tf.equal(tf.argmax(preds, 1), tf.to_int64(labels))
accuracy_all = tf.cast(correct_prediction, tf.float32)
return tf.reduce_mean(accuracy_all)<|docstring|>Accuracy for multiclass model.
:param preds: predictions
:param labels: ground truth labelt
:return: average accuracy<|endoftext|> |
e4420253ede27aa764febcb96d9d9ccc08fed75999e0f48ebea0146666f8597f | def sigmoid_accuracy(preds, labels):
'\n Accuracy for binary class model.\n :param preds: predictions\n :param labels: ground truth label\n :return: average accuracy\n '
correct_prediction = tf.equal(tf.cast((preds >= 0.0), tf.int64), tf.to_int64(labels))
accuracy_all = tf.cast(correct_prediction, tf.float32)
return tf.reduce_mean(accuracy_all) | Accuracy for binary class model.
:param preds: predictions
:param labels: ground truth label
:return: average accuracy | model/metrics.py | sigmoid_accuracy | gayalkuruppu/visual-compatibility | 89 | python | def sigmoid_accuracy(preds, labels):
'\n Accuracy for binary class model.\n :param preds: predictions\n :param labels: ground truth label\n :return: average accuracy\n '
correct_prediction = tf.equal(tf.cast((preds >= 0.0), tf.int64), tf.to_int64(labels))
accuracy_all = tf.cast(correct_prediction, tf.float32)
return tf.reduce_mean(accuracy_all) | def sigmoid_accuracy(preds, labels):
'\n Accuracy for binary class model.\n :param preds: predictions\n :param labels: ground truth label\n :return: average accuracy\n '
correct_prediction = tf.equal(tf.cast((preds >= 0.0), tf.int64), tf.to_int64(labels))
accuracy_all = tf.cast(correct_prediction, tf.float32)
return tf.reduce_mean(accuracy_all)<|docstring|>Accuracy for binary class model.
:param preds: predictions
:param labels: ground truth label
:return: average accuracy<|endoftext|> |
048c1c0be2220f0b9923f6c8589cf79779261b523ad690437eaf4335841a89c7 | def binary_accuracy(preds, labels):
'\n Accuracy for binary class model.\n :param preds: predictions\n :param labels: ground truth label\n :return: average accuracy\n '
correct_prediction = tf.equal(tf.cast((preds >= 0.5), tf.int64), tf.to_int64(labels))
accuracy_all = tf.cast(correct_prediction, tf.float32)
return tf.reduce_mean(accuracy_all) | Accuracy for binary class model.
:param preds: predictions
:param labels: ground truth label
:return: average accuracy | model/metrics.py | binary_accuracy | gayalkuruppu/visual-compatibility | 89 | python | def binary_accuracy(preds, labels):
'\n Accuracy for binary class model.\n :param preds: predictions\n :param labels: ground truth label\n :return: average accuracy\n '
correct_prediction = tf.equal(tf.cast((preds >= 0.5), tf.int64), tf.to_int64(labels))
accuracy_all = tf.cast(correct_prediction, tf.float32)
return tf.reduce_mean(accuracy_all) | def binary_accuracy(preds, labels):
'\n Accuracy for binary class model.\n :param preds: predictions\n :param labels: ground truth label\n :return: average accuracy\n '
correct_prediction = tf.equal(tf.cast((preds >= 0.5), tf.int64), tf.to_int64(labels))
accuracy_all = tf.cast(correct_prediction, tf.float32)
return tf.reduce_mean(accuracy_all)<|docstring|>Accuracy for binary class model.
:param preds: predictions
:param labels: ground truth label
:return: average accuracy<|endoftext|> |
ae30e30aa069a1e84436443d760c40a817f4059c5fc5108a14bb1c891c868aed | def softmax_confusion_matrix(preds, labels):
'\n Computes the confusion matrix. The rows are real labels, and columns the\n predictions.\n '
int_preds = (preds >= 0.0)
int_preds = tf.cast(int_preds, tf.int32)
return tf.confusion_matrix(labels, int_preds) | Computes the confusion matrix. The rows are real labels, and columns the
predictions. | model/metrics.py | softmax_confusion_matrix | gayalkuruppu/visual-compatibility | 89 | python | def softmax_confusion_matrix(preds, labels):
'\n Computes the confusion matrix. The rows are real labels, and columns the\n predictions.\n '
int_preds = (preds >= 0.0)
int_preds = tf.cast(int_preds, tf.int32)
return tf.confusion_matrix(labels, int_preds) | def softmax_confusion_matrix(preds, labels):
'\n Computes the confusion matrix. The rows are real labels, and columns the\n predictions.\n '
int_preds = (preds >= 0.0)
int_preds = tf.cast(int_preds, tf.int32)
return tf.confusion_matrix(labels, int_preds)<|docstring|>Computes the confusion matrix. The rows are real labels, and columns the
predictions.<|endoftext|> |
47474430da1ea35e3772a5c9641a40ac57b9957e4ac694f0bb5551816c744388 | def softmax_cross_entropy(outputs, labels):
' computes average softmax cross entropy '
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=outputs, labels=labels)
return tf.reduce_mean(loss) | computes average softmax cross entropy | model/metrics.py | softmax_cross_entropy | gayalkuruppu/visual-compatibility | 89 | python | def softmax_cross_entropy(outputs, labels):
' '
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=outputs, labels=labels)
return tf.reduce_mean(loss) | def softmax_cross_entropy(outputs, labels):
' '
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=outputs, labels=labels)
return tf.reduce_mean(loss)<|docstring|>computes average softmax cross entropy<|endoftext|> |
50c4032d3470bbe25f18dbf1cf49ee5d41f1da134031fb3496449f2f2b413e8e | def sigmoid_cross_entropy(outputs, labels):
' computes average binary cross entropy '
loss = tf.nn.sigmoid_cross_entropy_with_logits(logits=outputs, labels=labels)
return tf.reduce_mean(loss) | computes average binary cross entropy | model/metrics.py | sigmoid_cross_entropy | gayalkuruppu/visual-compatibility | 89 | python | def sigmoid_cross_entropy(outputs, labels):
' '
loss = tf.nn.sigmoid_cross_entropy_with_logits(logits=outputs, labels=labels)
return tf.reduce_mean(loss) | def sigmoid_cross_entropy(outputs, labels):
' '
loss = tf.nn.sigmoid_cross_entropy_with_logits(logits=outputs, labels=labels)
return tf.reduce_mean(loss)<|docstring|>computes average binary cross entropy<|endoftext|> |
dbc6a82cbdbfe88a0345a26e314dfc14cc591822baaa967cf80a94d390b311bf | def resume(self):
'This method resumes the inertia that was previously suppressed.\n '
pass | This method resumes the inertia that was previously suppressed. | src/abaqus/EngineeringFeature/Inertia.py | resume | Haiiliin/PyAbaqusBase | 7 | python | def resume(self):
'\n '
pass | def resume(self):
'\n '
pass<|docstring|>This method resumes the inertia that was previously suppressed.<|endoftext|> |
28ac0d5eb19809bfaaa0c50b9a215ada64c8a35c9887320824bc1c9203b8f7a4 | def suppress(self):
'This method suppresses the inertia.\n '
pass | This method suppresses the inertia. | src/abaqus/EngineeringFeature/Inertia.py | suppress | Haiiliin/PyAbaqusBase | 7 | python | def suppress(self):
'\n '
pass | def suppress(self):
'\n '
pass<|docstring|>This method suppresses the inertia.<|endoftext|> |
8afb1f56f40a61a4bc8ea066bcb4e7ba2f3db9c10bcfa2eaeffa50fe1c4fc419 | def compile_slot_mapping(typ: TypeInfo) -> List[Type]:
'Return types that represent values of type variable slots of a type.\n\n The returned types are in terms of type variables of the type.\n \n For example, assume these definitions:\n \n class D(Generic[S]): ...\n class C(D[E[S]], Generic[T, S]): ...\n \n Now slot mappings for C is [E[S], T] (S and T refer to type variables of\n C).\n '
exprs = []
for slot in range(num_slots(typ)):
(origin, tv) = find_slot_origin(typ, slot)
selftype = self_type(typ)
selftype = map_instance_to_supertype(selftype, origin)
tvar = selftype.args[(tv - 1)]
exprs.append(tvar)
return exprs | Return types that represent values of type variable slots of a type.
The returned types are in terms of type variables of the type.
For example, assume these definitions:
class D(Generic[S]): ...
class C(D[E[S]], Generic[T, S]): ...
Now slot mappings for C is [E[S], T] (S and T refer to type variables of
C). | mypy/compileslotmap.py | compile_slot_mapping | TimSimpsonR/mypy | 1 | python | def compile_slot_mapping(typ: TypeInfo) -> List[Type]:
'Return types that represent values of type variable slots of a type.\n\n The returned types are in terms of type variables of the type.\n \n For example, assume these definitions:\n \n class D(Generic[S]): ...\n class C(D[E[S]], Generic[T, S]): ...\n \n Now slot mappings for C is [E[S], T] (S and T refer to type variables of\n C).\n '
exprs = []
for slot in range(num_slots(typ)):
(origin, tv) = find_slot_origin(typ, slot)
selftype = self_type(typ)
selftype = map_instance_to_supertype(selftype, origin)
tvar = selftype.args[(tv - 1)]
exprs.append(tvar)
return exprs | def compile_slot_mapping(typ: TypeInfo) -> List[Type]:
'Return types that represent values of type variable slots of a type.\n\n The returned types are in terms of type variables of the type.\n \n For example, assume these definitions:\n \n class D(Generic[S]): ...\n class C(D[E[S]], Generic[T, S]): ...\n \n Now slot mappings for C is [E[S], T] (S and T refer to type variables of\n C).\n '
exprs = []
for slot in range(num_slots(typ)):
(origin, tv) = find_slot_origin(typ, slot)
selftype = self_type(typ)
selftype = map_instance_to_supertype(selftype, origin)
tvar = selftype.args[(tv - 1)]
exprs.append(tvar)
return exprs<|docstring|>Return types that represent values of type variable slots of a type.
The returned types are in terms of type variables of the type.
For example, assume these definitions:
class D(Generic[S]): ...
class C(D[E[S]], Generic[T, S]): ...
Now slot mappings for C is [E[S], T] (S and T refer to type variables of
C).<|endoftext|> |
198acae43e7e5f50e1884b7cd553c024e95c2e28449f6111b36f2ccf903ed611 | def find_slot_origin(info: TypeInfo, slot: int) -> Tuple[(TypeInfo, int)]:
"Determine class and type variable index that directly maps to the slot.\n\n The result defines which class in inheritance hierarchy of info introduced\n the slot. All subclasses inherit this slot. The result TypeInfo always\n refers to one of the base classes of info (or info itself).\n\n Examples:\n - In 'class C(Generic[T]): ...', the slot 0 in C is mapped to\n type var 1 (T) in C.\n - In 'class D(C[U], Generic[S, U]): ...', the slot 0 in D is mapped\n to type var 1 (T) in C; the slot 1 of D is mapped to type variable 1\n of D.\n "
base = info.bases[0].type
super_slots = num_slots(base)
if (slot < super_slots):
return find_slot_origin(base, slot)
else:
for tv in range(1, (len(info.type_vars) + 1)):
if ((get_tvar_access_path(info, tv)[0] - 1) == slot):
return (info, tv)
raise RuntimeError('Could not map slot') | Determine class and type variable index that directly maps to the slot.
The result defines which class in inheritance hierarchy of info introduced
the slot. All subclasses inherit this slot. The result TypeInfo always
refers to one of the base classes of info (or info itself).
Examples:
- In 'class C(Generic[T]): ...', the slot 0 in C is mapped to
type var 1 (T) in C.
- In 'class D(C[U], Generic[S, U]): ...', the slot 0 in D is mapped
to type var 1 (T) in C; the slot 1 of D is mapped to type variable 1
of D. | mypy/compileslotmap.py | find_slot_origin | TimSimpsonR/mypy | 1 | python | def find_slot_origin(info: TypeInfo, slot: int) -> Tuple[(TypeInfo, int)]:
"Determine class and type variable index that directly maps to the slot.\n\n The result defines which class in inheritance hierarchy of info introduced\n the slot. All subclasses inherit this slot. The result TypeInfo always\n refers to one of the base classes of info (or info itself).\n\n Examples:\n - In 'class C(Generic[T]): ...', the slot 0 in C is mapped to\n type var 1 (T) in C.\n - In 'class D(C[U], Generic[S, U]): ...', the slot 0 in D is mapped\n to type var 1 (T) in C; the slot 1 of D is mapped to type variable 1\n of D.\n "
base = info.bases[0].type
super_slots = num_slots(base)
if (slot < super_slots):
return find_slot_origin(base, slot)
else:
for tv in range(1, (len(info.type_vars) + 1)):
if ((get_tvar_access_path(info, tv)[0] - 1) == slot):
return (info, tv)
raise RuntimeError('Could not map slot') | def find_slot_origin(info: TypeInfo, slot: int) -> Tuple[(TypeInfo, int)]:
"Determine class and type variable index that directly maps to the slot.\n\n The result defines which class in inheritance hierarchy of info introduced\n the slot. All subclasses inherit this slot. The result TypeInfo always\n refers to one of the base classes of info (or info itself).\n\n Examples:\n - In 'class C(Generic[T]): ...', the slot 0 in C is mapped to\n type var 1 (T) in C.\n - In 'class D(C[U], Generic[S, U]): ...', the slot 0 in D is mapped\n to type var 1 (T) in C; the slot 1 of D is mapped to type variable 1\n of D.\n "
base = info.bases[0].type
super_slots = num_slots(base)
if (slot < super_slots):
return find_slot_origin(base, slot)
else:
for tv in range(1, (len(info.type_vars) + 1)):
if ((get_tvar_access_path(info, tv)[0] - 1) == slot):
return (info, tv)
raise RuntimeError('Could not map slot')<|docstring|>Determine class and type variable index that directly maps to the slot.
The result defines which class in inheritance hierarchy of info introduced
the slot. All subclasses inherit this slot. The result TypeInfo always
refers to one of the base classes of info (or info itself).
Examples:
- In 'class C(Generic[T]): ...', the slot 0 in C is mapped to
type var 1 (T) in C.
- In 'class D(C[U], Generic[S, U]): ...', the slot 0 in D is mapped
to type var 1 (T) in C; the slot 1 of D is mapped to type variable 1
of D.<|endoftext|> |
218ff3fecbd47ad45d7081625b35c8827df5a313a757350363da14a5a76b1a5c | def get_first_result(self, request_response_list):
' Gets the result field from the first response. '
return request_response_list[0][1][RESULT] | Gets the result field from the first response. | sovtoken/sovtoken/test/helpers/helper_sdk.py | get_first_result | anikitinDSR/token-plugin | 0 | python | def get_first_result(self, request_response_list):
' '
return request_response_list[0][1][RESULT] | def get_first_result(self, request_response_list):
' '
return request_response_list[0][1][RESULT]<|docstring|>Gets the result field from the first response.<|endoftext|> |
93c31e6f20c5f583436c851a0784d13569822168c6986a3085070b8fcd3c1eed | def prepare_request_objects(self, request_objects, wallet=None, sign=False):
' Prepares the request to be sent by transforming it into json and sign. '
if (sign and all(((not (req.signature or req.signatures)) for req in request_objects))):
requests = self.sdk_sign_request_objects(request_objects, wallet)
else:
requests = [json.dumps(request.as_dict) for request in request_objects]
return requests | Prepares the request to be sent by transforming it into json and sign. | sovtoken/sovtoken/test/helpers/helper_sdk.py | prepare_request_objects | anikitinDSR/token-plugin | 0 | python | def prepare_request_objects(self, request_objects, wallet=None, sign=False):
' '
if (sign and all(((not (req.signature or req.signatures)) for req in request_objects))):
requests = self.sdk_sign_request_objects(request_objects, wallet)
else:
requests = [json.dumps(request.as_dict) for request in request_objects]
return requests | def prepare_request_objects(self, request_objects, wallet=None, sign=False):
' '
if (sign and all(((not (req.signature or req.signatures)) for req in request_objects))):
requests = self.sdk_sign_request_objects(request_objects, wallet)
else:
requests = [json.dumps(request.as_dict) for request in request_objects]
return requests<|docstring|>Prepares the request to be sent by transforming it into json and sign.<|endoftext|> |
297fb7f2401e6e9b4654da1e166b28611acfedcfb7018cbdcdf05546cfd34ee4 | def send_and_check_request_objects(self, request_objects, wallet=None, sign=True):
'\n Sends the request objects and checks the replies are valid.\n\n Returns a list of request_response tuples.\n '
requests = self.prepare_request_objects(request_objects, wallet, sign)
return self.sdk_send_and_check(requests) | Sends the request objects and checks the replies are valid.
Returns a list of request_response tuples. | sovtoken/sovtoken/test/helpers/helper_sdk.py | send_and_check_request_objects | anikitinDSR/token-plugin | 0 | python | def send_and_check_request_objects(self, request_objects, wallet=None, sign=True):
'\n Sends the request objects and checks the replies are valid.\n\n Returns a list of request_response tuples.\n '
requests = self.prepare_request_objects(request_objects, wallet, sign)
return self.sdk_send_and_check(requests) | def send_and_check_request_objects(self, request_objects, wallet=None, sign=True):
'\n Sends the request objects and checks the replies are valid.\n\n Returns a list of request_response tuples.\n '
requests = self.prepare_request_objects(request_objects, wallet, sign)
return self.sdk_send_and_check(requests)<|docstring|>Sends the request objects and checks the replies are valid.
Returns a list of request_response tuples.<|endoftext|> |
4eacc94c9a4adac372cde3a4d48d837ea07e7db99fcc78e045fc417c2b328665 | def send_request_objects(self, request_objects, wallet=None, sign=True):
' Sends the request objects '
requests = self.prepare_request_objects(request_objects, wallet, sign)
return self.sdk_send_signed_requests(requests) | Sends the request objects | sovtoken/sovtoken/test/helpers/helper_sdk.py | send_request_objects | anikitinDSR/token-plugin | 0 | python | def send_request_objects(self, request_objects, wallet=None, sign=True):
' '
requests = self.prepare_request_objects(request_objects, wallet, sign)
return self.sdk_send_signed_requests(requests) | def send_request_objects(self, request_objects, wallet=None, sign=True):
' '
requests = self.prepare_request_objects(request_objects, wallet, sign)
return self.sdk_send_signed_requests(requests)<|docstring|>Sends the request objects<|endoftext|> |
57629fd6d9aa618204bb4eb24ab669baa9893a4d0b40c79950bc774eda2c0bb5 | def __virtual__():
'\n Only load if buildout libs are present\n '
return __virtualname__ | Only load if buildout libs are present | salt/modules/zcbuildout.py | __virtual__ | Flowdalic/salt | 9,425 | python | def __virtual__():
'\n \n '
return __virtualname__ | def __virtual__():
'\n \n '
return __virtualname__<|docstring|>Only load if buildout libs are present<|endoftext|> |
1bb9f829d13c4b8e3cfd26ac399b40964409be8aa1f709bf8d21b2c54977beaf | def _set_status(m, comment=INVALID_RESPONSE, status=False, out=None):
'\n Assign status data to a dict.\n '
m['out'] = out
m['status'] = status
m['logs'] = LOG.messages[:]
m['logs_by_level'] = LOG.by_level.copy()
(outlog, outlog_by_level) = ('', '')
m['comment'] = comment
if (out and isinstance(out, str)):
outlog += HR
outlog += 'OUTPUT:\n'
outlog += '{}\n'.format(salt.utils.stringutils.to_unicode(out))
outlog += HR
if m['logs']:
outlog += HR
outlog += 'Log summary:\n'
outlog += HR
outlog_by_level += HR
outlog_by_level += 'Log summary by level:\n'
outlog_by_level += HR
for (level, msg) in m['logs']:
outlog += '\n{}: {}\n'.format(level.upper(), salt.utils.stringutils.to_unicode(msg))
for logger in ('error', 'warn', 'info', 'debug'):
logs = m['logs_by_level'].get(logger, [])
if logs:
outlog_by_level += '\n{}:\n'.format(logger.upper())
for (idx, log) in enumerate(logs[:]):
logs[idx] = salt.utils.stringutils.to_unicode(log)
outlog_by_level += '\n'.join(logs)
outlog_by_level += '\n'
outlog += HR
m['outlog'] = outlog
m['outlog_by_level'] = outlog_by_level
return _encode_status(m) | Assign status data to a dict. | salt/modules/zcbuildout.py | _set_status | Flowdalic/salt | 9,425 | python | def _set_status(m, comment=INVALID_RESPONSE, status=False, out=None):
'\n \n '
m['out'] = out
m['status'] = status
m['logs'] = LOG.messages[:]
m['logs_by_level'] = LOG.by_level.copy()
(outlog, outlog_by_level) = (, )
m['comment'] = comment
if (out and isinstance(out, str)):
outlog += HR
outlog += 'OUTPUT:\n'
outlog += '{}\n'.format(salt.utils.stringutils.to_unicode(out))
outlog += HR
if m['logs']:
outlog += HR
outlog += 'Log summary:\n'
outlog += HR
outlog_by_level += HR
outlog_by_level += 'Log summary by level:\n'
outlog_by_level += HR
for (level, msg) in m['logs']:
outlog += '\n{}: {}\n'.format(level.upper(), salt.utils.stringutils.to_unicode(msg))
for logger in ('error', 'warn', 'info', 'debug'):
logs = m['logs_by_level'].get(logger, [])
if logs:
outlog_by_level += '\n{}:\n'.format(logger.upper())
for (idx, log) in enumerate(logs[:]):
logs[idx] = salt.utils.stringutils.to_unicode(log)
outlog_by_level += '\n'.join(logs)
outlog_by_level += '\n'
outlog += HR
m['outlog'] = outlog
m['outlog_by_level'] = outlog_by_level
return _encode_status(m) | def _set_status(m, comment=INVALID_RESPONSE, status=False, out=None):
'\n \n '
m['out'] = out
m['status'] = status
m['logs'] = LOG.messages[:]
m['logs_by_level'] = LOG.by_level.copy()
(outlog, outlog_by_level) = (, )
m['comment'] = comment
if (out and isinstance(out, str)):
outlog += HR
outlog += 'OUTPUT:\n'
outlog += '{}\n'.format(salt.utils.stringutils.to_unicode(out))
outlog += HR
if m['logs']:
outlog += HR
outlog += 'Log summary:\n'
outlog += HR
outlog_by_level += HR
outlog_by_level += 'Log summary by level:\n'
outlog_by_level += HR
for (level, msg) in m['logs']:
outlog += '\n{}: {}\n'.format(level.upper(), salt.utils.stringutils.to_unicode(msg))
for logger in ('error', 'warn', 'info', 'debug'):
logs = m['logs_by_level'].get(logger, [])
if logs:
outlog_by_level += '\n{}:\n'.format(logger.upper())
for (idx, log) in enumerate(logs[:]):
logs[idx] = salt.utils.stringutils.to_unicode(log)
outlog_by_level += '\n'.join(logs)
outlog_by_level += '\n'
outlog += HR
m['outlog'] = outlog
m['outlog_by_level'] = outlog_by_level
return _encode_status(m)<|docstring|>Assign status data to a dict.<|endoftext|> |
fd32898d989cb7912329aee9ca1e8039dce7f072c463ce18e09f05bc0dd0903e | def _invalid(m, comment=INVALID_RESPONSE, out=None):
'\n Return invalid status.\n '
return _set_status(m, status=False, comment=comment, out=out) | Return invalid status. | salt/modules/zcbuildout.py | _invalid | Flowdalic/salt | 9,425 | python | def _invalid(m, comment=INVALID_RESPONSE, out=None):
'\n \n '
return _set_status(m, status=False, comment=comment, out=out) | def _invalid(m, comment=INVALID_RESPONSE, out=None):
'\n \n '
return _set_status(m, status=False, comment=comment, out=out)<|docstring|>Return invalid status.<|endoftext|> |
f5e033fd1a03cf0dd654100b87c5b4f58bc94ce6617df87525d63cca6f0d474c | def _valid(m, comment=VALID_RESPONSE, out=None):
'\n Return valid status.\n '
return _set_status(m, status=True, comment=comment, out=out) | Return valid status. | salt/modules/zcbuildout.py | _valid | Flowdalic/salt | 9,425 | python | def _valid(m, comment=VALID_RESPONSE, out=None):
'\n \n '
return _set_status(m, status=True, comment=comment, out=out) | def _valid(m, comment=VALID_RESPONSE, out=None):
'\n \n '
return _set_status(m, status=True, comment=comment, out=out)<|docstring|>Return valid status.<|endoftext|> |
220768a05a87c3d64b610d0b93be75b23d76d1c863edf9dac4d8fde4e28bb20b | def _Popen(command, output=False, directory='.', runas=None, env=(), exitcode=0, use_vt=False, loglevel=None):
'\n Run a command.\n\n output\n return output if true\n\n directory\n directory to execute in\n\n runas\n user used to run buildout as\n\n env\n environment variables to set when running\n\n exitcode\n fails if cmd does not return this exit code\n (set to None to disable check)\n\n use_vt\n Use the new salt VT to stream output [experimental]\n\n '
ret = None
directory = os.path.abspath(directory)
if isinstance(command, list):
command = ' '.join(command)
LOG.debug('Running {}'.format(command))
if (not loglevel):
loglevel = 'debug'
ret = __salt__['cmd.run_all'](command, cwd=directory, output_loglevel=loglevel, runas=runas, env=env, use_vt=use_vt, python_shell=False)
out = ((ret['stdout'] + '\n\n') + ret['stderr'])
if ((exitcode is not None) and (ret['retcode'] != exitcode)):
raise _BuildoutError(out)
ret['output'] = out
if output:
ret = out
return ret | Run a command.
output
return output if true
directory
directory to execute in
runas
user used to run buildout as
env
environment variables to set when running
exitcode
fails if cmd does not return this exit code
(set to None to disable check)
use_vt
Use the new salt VT to stream output [experimental] | salt/modules/zcbuildout.py | _Popen | Flowdalic/salt | 9,425 | python | def _Popen(command, output=False, directory='.', runas=None, env=(), exitcode=0, use_vt=False, loglevel=None):
'\n Run a command.\n\n output\n return output if true\n\n directory\n directory to execute in\n\n runas\n user used to run buildout as\n\n env\n environment variables to set when running\n\n exitcode\n fails if cmd does not return this exit code\n (set to None to disable check)\n\n use_vt\n Use the new salt VT to stream output [experimental]\n\n '
ret = None
directory = os.path.abspath(directory)
if isinstance(command, list):
command = ' '.join(command)
LOG.debug('Running {}'.format(command))
if (not loglevel):
loglevel = 'debug'
ret = __salt__['cmd.run_all'](command, cwd=directory, output_loglevel=loglevel, runas=runas, env=env, use_vt=use_vt, python_shell=False)
out = ((ret['stdout'] + '\n\n') + ret['stderr'])
if ((exitcode is not None) and (ret['retcode'] != exitcode)):
raise _BuildoutError(out)
ret['output'] = out
if output:
ret = out
return ret | def _Popen(command, output=False, directory='.', runas=None, env=(), exitcode=0, use_vt=False, loglevel=None):
'\n Run a command.\n\n output\n return output if true\n\n directory\n directory to execute in\n\n runas\n user used to run buildout as\n\n env\n environment variables to set when running\n\n exitcode\n fails if cmd does not return this exit code\n (set to None to disable check)\n\n use_vt\n Use the new salt VT to stream output [experimental]\n\n '
ret = None
directory = os.path.abspath(directory)
if isinstance(command, list):
command = ' '.join(command)
LOG.debug('Running {}'.format(command))
if (not loglevel):
loglevel = 'debug'
ret = __salt__['cmd.run_all'](command, cwd=directory, output_loglevel=loglevel, runas=runas, env=env, use_vt=use_vt, python_shell=False)
out = ((ret['stdout'] + '\n\n') + ret['stderr'])
if ((exitcode is not None) and (ret['retcode'] != exitcode)):
raise _BuildoutError(out)
ret['output'] = out
if output:
ret = out
return ret<|docstring|>Run a command.
output
return output if true
directory
directory to execute in
runas
user used to run buildout as
env
environment variables to set when running
exitcode
fails if cmd does not return this exit code
(set to None to disable check)
use_vt
Use the new salt VT to stream output [experimental]<|endoftext|> |
986509faf81e51b1d99546bd51c78eff7917fb7ebb775b473ea14a1e3eba5761 | def _find_cfgs(path, cfgs=None):
'\n Find all buildout configs in a subdirectory.\n only buildout.cfg and etc/buildout.cfg are valid in::\n\n path\n directory where to start to search\n\n cfg\n a optional list to append to\n\n .\n ├── buildout.cfg\n ├── etc\n │\xa0\xa0 └── buildout.cfg\n ├── foo\n │\xa0\xa0 └── buildout.cfg\n └── var\n └── buildout.cfg\n '
ignored = ['var', 'parts']
dirs = []
if (not cfgs):
cfgs = []
for i in os.listdir(path):
fi = os.path.join(path, i)
if (fi.endswith('.cfg') and os.path.isfile(fi)):
cfgs.append(fi)
if (os.path.isdir(fi) and (i not in ignored)):
dirs.append(fi)
for fpath in dirs:
for (p, ids, ifs) in salt.utils.path.os_walk(fpath):
for i in ifs:
if i.endswith('.cfg'):
cfgs.append(os.path.join(p, i))
return cfgs | Find all buildout configs in a subdirectory.
only buildout.cfg and etc/buildout.cfg are valid in::
path
directory where to start to search
cfg
a optional list to append to
.
├── buildout.cfg
├── etc
│ └── buildout.cfg
├── foo
│ └── buildout.cfg
└── var
└── buildout.cfg | salt/modules/zcbuildout.py | _find_cfgs | Flowdalic/salt | 9,425 | python | def _find_cfgs(path, cfgs=None):
'\n Find all buildout configs in a subdirectory.\n only buildout.cfg and etc/buildout.cfg are valid in::\n\n path\n directory where to start to search\n\n cfg\n a optional list to append to\n\n .\n ├── buildout.cfg\n ├── etc\n │\xa0\xa0 └── buildout.cfg\n ├── foo\n │\xa0\xa0 └── buildout.cfg\n └── var\n └── buildout.cfg\n '
ignored = ['var', 'parts']
dirs = []
if (not cfgs):
cfgs = []
for i in os.listdir(path):
fi = os.path.join(path, i)
if (fi.endswith('.cfg') and os.path.isfile(fi)):
cfgs.append(fi)
if (os.path.isdir(fi) and (i not in ignored)):
dirs.append(fi)
for fpath in dirs:
for (p, ids, ifs) in salt.utils.path.os_walk(fpath):
for i in ifs:
if i.endswith('.cfg'):
cfgs.append(os.path.join(p, i))
return cfgs | def _find_cfgs(path, cfgs=None):
'\n Find all buildout configs in a subdirectory.\n only buildout.cfg and etc/buildout.cfg are valid in::\n\n path\n directory where to start to search\n\n cfg\n a optional list to append to\n\n .\n ├── buildout.cfg\n ├── etc\n │\xa0\xa0 └── buildout.cfg\n ├── foo\n │\xa0\xa0 └── buildout.cfg\n └── var\n └── buildout.cfg\n '
ignored = ['var', 'parts']
dirs = []
if (not cfgs):
cfgs = []
for i in os.listdir(path):
fi = os.path.join(path, i)
if (fi.endswith('.cfg') and os.path.isfile(fi)):
cfgs.append(fi)
if (os.path.isdir(fi) and (i not in ignored)):
dirs.append(fi)
for fpath in dirs:
for (p, ids, ifs) in salt.utils.path.os_walk(fpath):
for i in ifs:
if i.endswith('.cfg'):
cfgs.append(os.path.join(p, i))
return cfgs<|docstring|>Find all buildout configs in a subdirectory.
only buildout.cfg and etc/buildout.cfg are valid in::
path
directory where to start to search
cfg
a optional list to append to
.
├── buildout.cfg
├── etc
│ └── buildout.cfg
├── foo
│ └── buildout.cfg
└── var
└── buildout.cfg<|endoftext|> |
5c7197c61f042a046e93a3b2e293d78e4ac95d752ea470d7c01b99ee0ed1fb8c | def _get_bootstrap_content(directory='.'):
'\n Get the current bootstrap.py script content\n '
try:
with salt.utils.files.fopen(os.path.join(os.path.abspath(directory), 'bootstrap.py')) as fic:
oldcontent = salt.utils.stringutils.to_unicode(fic.read())
except OSError:
oldcontent = ''
return oldcontent | Get the current bootstrap.py script content | salt/modules/zcbuildout.py | _get_bootstrap_content | Flowdalic/salt | 9,425 | python | def _get_bootstrap_content(directory='.'):
'\n \n '
try:
with salt.utils.files.fopen(os.path.join(os.path.abspath(directory), 'bootstrap.py')) as fic:
oldcontent = salt.utils.stringutils.to_unicode(fic.read())
except OSError:
oldcontent =
return oldcontent | def _get_bootstrap_content(directory='.'):
'\n \n '
try:
with salt.utils.files.fopen(os.path.join(os.path.abspath(directory), 'bootstrap.py')) as fic:
oldcontent = salt.utils.stringutils.to_unicode(fic.read())
except OSError:
oldcontent =
return oldcontent<|docstring|>Get the current bootstrap.py script content<|endoftext|> |
d18f826580cc6849d5ce231de38cb2c546e57e4e9e4969f5ac4ef435b1dfcec4 | def _get_buildout_ver(directory='.'):
'Check for buildout versions.\n\n In any cases, check for a version pinning\n Also check for buildout.dumppickedversions which is buildout1 specific\n Also check for the version targeted by the local bootstrap file\n Take as default buildout2\n\n directory\n directory to execute in\n '
directory = os.path.abspath(directory)
buildoutver = 2
try:
files = _find_cfgs(directory)
for f in files:
with salt.utils.files.fopen(f) as fic:
buildout1re = re.compile('^zc\\.buildout\\s*=\\s*1', RE_F)
dfic = salt.utils.stringutils.to_unicode(fic.read())
if (('buildout.dumppick' in dfic) or buildout1re.search(dfic)):
buildoutver = 1
bcontent = _get_bootstrap_content(directory)
if (('--download-base' in bcontent) or ('--setup-source' in bcontent) or ('--distribute' in bcontent)):
buildoutver = 1
except OSError:
pass
return buildoutver | Check for buildout versions.
In any cases, check for a version pinning
Also check for buildout.dumppickedversions which is buildout1 specific
Also check for the version targeted by the local bootstrap file
Take as default buildout2
directory
directory to execute in | salt/modules/zcbuildout.py | _get_buildout_ver | Flowdalic/salt | 9,425 | python | def _get_buildout_ver(directory='.'):
'Check for buildout versions.\n\n In any cases, check for a version pinning\n Also check for buildout.dumppickedversions which is buildout1 specific\n Also check for the version targeted by the local bootstrap file\n Take as default buildout2\n\n directory\n directory to execute in\n '
directory = os.path.abspath(directory)
buildoutver = 2
try:
files = _find_cfgs(directory)
for f in files:
with salt.utils.files.fopen(f) as fic:
buildout1re = re.compile('^zc\\.buildout\\s*=\\s*1', RE_F)
dfic = salt.utils.stringutils.to_unicode(fic.read())
if (('buildout.dumppick' in dfic) or buildout1re.search(dfic)):
buildoutver = 1
bcontent = _get_bootstrap_content(directory)
if (('--download-base' in bcontent) or ('--setup-source' in bcontent) or ('--distribute' in bcontent)):
buildoutver = 1
except OSError:
pass
return buildoutver | def _get_buildout_ver(directory='.'):
'Check for buildout versions.\n\n In any cases, check for a version pinning\n Also check for buildout.dumppickedversions which is buildout1 specific\n Also check for the version targeted by the local bootstrap file\n Take as default buildout2\n\n directory\n directory to execute in\n '
directory = os.path.abspath(directory)
buildoutver = 2
try:
files = _find_cfgs(directory)
for f in files:
with salt.utils.files.fopen(f) as fic:
buildout1re = re.compile('^zc\\.buildout\\s*=\\s*1', RE_F)
dfic = salt.utils.stringutils.to_unicode(fic.read())
if (('buildout.dumppick' in dfic) or buildout1re.search(dfic)):
buildoutver = 1
bcontent = _get_bootstrap_content(directory)
if (('--download-base' in bcontent) or ('--setup-source' in bcontent) or ('--distribute' in bcontent)):
buildoutver = 1
except OSError:
pass
return buildoutver<|docstring|>Check for buildout versions.
In any cases, check for a version pinning
Also check for buildout.dumppickedversions which is buildout1 specific
Also check for the version targeted by the local bootstrap file
Take as default buildout2
directory
directory to execute in<|endoftext|> |
cc0ddac441185690916fb332e492c868cbf39f58fad7131ffac7f1f82e02d3fc | def _get_bootstrap_url(directory):
'\n Get the most appropriate download URL for the bootstrap script.\n\n directory\n directory to execute in\n\n '
v = _get_buildout_ver(directory)
return _URL_VERSIONS.get(v, _URL_VERSIONS[DEFAULT_VER]) | Get the most appropriate download URL for the bootstrap script.
directory
directory to execute in | salt/modules/zcbuildout.py | _get_bootstrap_url | Flowdalic/salt | 9,425 | python | def _get_bootstrap_url(directory):
'\n Get the most appropriate download URL for the bootstrap script.\n\n directory\n directory to execute in\n\n '
v = _get_buildout_ver(directory)
return _URL_VERSIONS.get(v, _URL_VERSIONS[DEFAULT_VER]) | def _get_bootstrap_url(directory):
'\n Get the most appropriate download URL for the bootstrap script.\n\n directory\n directory to execute in\n\n '
v = _get_buildout_ver(directory)
return _URL_VERSIONS.get(v, _URL_VERSIONS[DEFAULT_VER])<|docstring|>Get the most appropriate download URL for the bootstrap script.
directory
directory to execute in<|endoftext|> |
49145cb4cc329e75e66e5f2d3a05f208b77ee50a9bf8e906a320fb04eea77dd4 | def _dot_buildout(directory):
'\n Get the local marker directory.\n\n directory\n directory to execute in\n '
return os.path.join(os.path.abspath(directory), '.buildout') | Get the local marker directory.
directory
directory to execute in | salt/modules/zcbuildout.py | _dot_buildout | Flowdalic/salt | 9,425 | python | def _dot_buildout(directory):
'\n Get the local marker directory.\n\n directory\n directory to execute in\n '
return os.path.join(os.path.abspath(directory), '.buildout') | def _dot_buildout(directory):
'\n Get the local marker directory.\n\n directory\n directory to execute in\n '
return os.path.join(os.path.abspath(directory), '.buildout')<|docstring|>Get the local marker directory.
directory
directory to execute in<|endoftext|> |
a46e7ee4b984afeba232b000320c5da28e8321395b1827102ca3108fce8391bb | @_salt_callback
def upgrade_bootstrap(directory='.', onlyif=None, unless=None, runas=None, env=(), offline=False, buildout_ver=None):
"\n Upgrade current bootstrap.py with the last released one.\n\n Indeed, when we first run a buildout, a common source of problem\n is to have a locally stale bootstrap, we just try to grab a new copy\n\n directory\n directory to execute in\n\n offline\n are we executing buildout in offline mode\n\n buildout_ver\n forcing to use a specific buildout version (1 | 2)\n\n onlyif\n Only execute cmd if statement on the host return 0\n\n unless\n Do not execute cmd if statement on the host return 0\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' buildout.upgrade_bootstrap /srv/mybuildout\n "
if buildout_ver:
booturl = _URL_VERSIONS[buildout_ver]
else:
buildout_ver = _get_buildout_ver(directory)
booturl = _get_bootstrap_url(directory)
LOG.debug('Using {}'.format(booturl))
directory = os.path.abspath(directory)
b_py = os.path.join(directory, 'bootstrap.py')
comment = ''
try:
oldcontent = _get_bootstrap_content(directory)
dbuild = _dot_buildout(directory)
data = oldcontent
updated = False
dled = False
if (not offline):
try:
if (not os.path.isdir(dbuild)):
os.makedirs(dbuild)
with salt.utils.files.fopen(os.path.join(dbuild, '{}.updated_bootstrap'.format(buildout_ver))):
pass
except OSError:
LOG.info('Bootstrap updated from repository')
data = urllib.request.urlopen(booturl).read()
updated = True
dled = True
if ('socket.setdefaulttimeout' not in data):
updated = True
ldata = data.splitlines()
ldata.insert(1, 'import socket;socket.setdefaulttimeout(2)')
data = '\n'.join(ldata)
if updated:
comment = 'Bootstrap updated'
with salt.utils.files.fopen(b_py, 'w') as fic:
fic.write(salt.utils.stringutils.to_str(data))
if dled:
with salt.utils.files.fopen(os.path.join(dbuild, '{}.updated_bootstrap'.format(buildout_ver)), 'w') as afic:
afic.write('foo')
except OSError:
if oldcontent:
with salt.utils.files.fopen(b_py, 'w') as fic:
fic.write(salt.utils.stringutils.to_str(oldcontent))
return {'comment': comment} | Upgrade current bootstrap.py with the last released one.
Indeed, when we first run a buildout, a common source of problem
is to have a locally stale bootstrap, we just try to grab a new copy
directory
directory to execute in
offline
are we executing buildout in offline mode
buildout_ver
forcing to use a specific buildout version (1 | 2)
onlyif
Only execute cmd if statement on the host return 0
unless
Do not execute cmd if statement on the host return 0
CLI Example:
.. code-block:: bash
salt '*' buildout.upgrade_bootstrap /srv/mybuildout | salt/modules/zcbuildout.py | upgrade_bootstrap | Flowdalic/salt | 9,425 | python | @_salt_callback
def upgrade_bootstrap(directory='.', onlyif=None, unless=None, runas=None, env=(), offline=False, buildout_ver=None):
"\n Upgrade current bootstrap.py with the last released one.\n\n Indeed, when we first run a buildout, a common source of problem\n is to have a locally stale bootstrap, we just try to grab a new copy\n\n directory\n directory to execute in\n\n offline\n are we executing buildout in offline mode\n\n buildout_ver\n forcing to use a specific buildout version (1 | 2)\n\n onlyif\n Only execute cmd if statement on the host return 0\n\n unless\n Do not execute cmd if statement on the host return 0\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' buildout.upgrade_bootstrap /srv/mybuildout\n "
if buildout_ver:
booturl = _URL_VERSIONS[buildout_ver]
else:
buildout_ver = _get_buildout_ver(directory)
booturl = _get_bootstrap_url(directory)
LOG.debug('Using {}'.format(booturl))
directory = os.path.abspath(directory)
b_py = os.path.join(directory, 'bootstrap.py')
comment =
try:
oldcontent = _get_bootstrap_content(directory)
dbuild = _dot_buildout(directory)
data = oldcontent
updated = False
dled = False
if (not offline):
try:
if (not os.path.isdir(dbuild)):
os.makedirs(dbuild)
with salt.utils.files.fopen(os.path.join(dbuild, '{}.updated_bootstrap'.format(buildout_ver))):
pass
except OSError:
LOG.info('Bootstrap updated from repository')
data = urllib.request.urlopen(booturl).read()
updated = True
dled = True
if ('socket.setdefaulttimeout' not in data):
updated = True
ldata = data.splitlines()
ldata.insert(1, 'import socket;socket.setdefaulttimeout(2)')
data = '\n'.join(ldata)
if updated:
comment = 'Bootstrap updated'
with salt.utils.files.fopen(b_py, 'w') as fic:
fic.write(salt.utils.stringutils.to_str(data))
if dled:
with salt.utils.files.fopen(os.path.join(dbuild, '{}.updated_bootstrap'.format(buildout_ver)), 'w') as afic:
afic.write('foo')
except OSError:
if oldcontent:
with salt.utils.files.fopen(b_py, 'w') as fic:
fic.write(salt.utils.stringutils.to_str(oldcontent))
return {'comment': comment} | @_salt_callback
def upgrade_bootstrap(directory='.', onlyif=None, unless=None, runas=None, env=(), offline=False, buildout_ver=None):
"\n Upgrade current bootstrap.py with the last released one.\n\n Indeed, when we first run a buildout, a common source of problem\n is to have a locally stale bootstrap, we just try to grab a new copy\n\n directory\n directory to execute in\n\n offline\n are we executing buildout in offline mode\n\n buildout_ver\n forcing to use a specific buildout version (1 | 2)\n\n onlyif\n Only execute cmd if statement on the host return 0\n\n unless\n Do not execute cmd if statement on the host return 0\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' buildout.upgrade_bootstrap /srv/mybuildout\n "
if buildout_ver:
booturl = _URL_VERSIONS[buildout_ver]
else:
buildout_ver = _get_buildout_ver(directory)
booturl = _get_bootstrap_url(directory)
LOG.debug('Using {}'.format(booturl))
directory = os.path.abspath(directory)
b_py = os.path.join(directory, 'bootstrap.py')
comment =
try:
oldcontent = _get_bootstrap_content(directory)
dbuild = _dot_buildout(directory)
data = oldcontent
updated = False
dled = False
if (not offline):
try:
if (not os.path.isdir(dbuild)):
os.makedirs(dbuild)
with salt.utils.files.fopen(os.path.join(dbuild, '{}.updated_bootstrap'.format(buildout_ver))):
pass
except OSError:
LOG.info('Bootstrap updated from repository')
data = urllib.request.urlopen(booturl).read()
updated = True
dled = True
if ('socket.setdefaulttimeout' not in data):
updated = True
ldata = data.splitlines()
ldata.insert(1, 'import socket;socket.setdefaulttimeout(2)')
data = '\n'.join(ldata)
if updated:
comment = 'Bootstrap updated'
with salt.utils.files.fopen(b_py, 'w') as fic:
fic.write(salt.utils.stringutils.to_str(data))
if dled:
with salt.utils.files.fopen(os.path.join(dbuild, '{}.updated_bootstrap'.format(buildout_ver)), 'w') as afic:
afic.write('foo')
except OSError:
if oldcontent:
with salt.utils.files.fopen(b_py, 'w') as fic:
fic.write(salt.utils.stringutils.to_str(oldcontent))
return {'comment': comment}<|docstring|>Upgrade current bootstrap.py with the last released one.
Indeed, when we first run a buildout, a common source of problem
is to have a locally stale bootstrap, we just try to grab a new copy
directory
directory to execute in
offline
are we executing buildout in offline mode
buildout_ver
forcing to use a specific buildout version (1 | 2)
onlyif
Only execute cmd if statement on the host return 0
unless
Do not execute cmd if statement on the host return 0
CLI Example:
.. code-block:: bash
salt '*' buildout.upgrade_bootstrap /srv/mybuildout<|endoftext|> |
9f23f0e45316c484008d1cf154ef1f3d42132b685c231f1c63816fdbe98c7330 | @_salt_callback
def bootstrap(directory='.', config='buildout.cfg', python=sys.executable, onlyif=None, unless=None, runas=None, env=(), distribute=None, buildout_ver=None, test_release=False, offline=False, new_st=None, use_vt=False, loglevel=None):
"\n Run the buildout bootstrap dance (python bootstrap.py).\n\n directory\n directory to execute in\n\n config\n alternative buildout configuration file to use\n\n runas\n User used to run buildout as\n\n env\n environment variables to set when running\n\n buildout_ver\n force a specific buildout version (1 | 2)\n\n test_release\n buildout accept test release\n\n offline\n are we executing buildout in offline mode\n\n distribute\n Forcing use of distribute\n\n new_st\n Forcing use of setuptools >= 0.7\n\n python\n path to a python executable to use in place of default (salt one)\n\n onlyif\n Only execute cmd if statement on the host return 0\n\n unless\n Do not execute cmd if statement on the host return 0\n\n use_vt\n Use the new salt VT to stream output [experimental]\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' buildout.bootstrap /srv/mybuildout\n "
directory = os.path.abspath(directory)
dbuild = _dot_buildout(directory)
bootstrap_args = ''
has_distribute = _has_old_distribute(python=python, runas=runas, env=env)
has_new_st = _has_setuptools7(python=python, runas=runas, env=env)
if (has_distribute and has_new_st and (not distribute) and new_st):
new_st = True
distribute = False
if (has_distribute and has_new_st and (not distribute) and new_st):
new_st = True
distribute = False
if (has_distribute and has_new_st and distribute and (not new_st)):
new_st = True
distribute = False
if (has_distribute and has_new_st and (not distribute) and (not new_st)):
new_st = True
distribute = False
if ((not has_distribute) and has_new_st and (not distribute) and new_st):
new_st = True
distribute = False
if ((not has_distribute) and has_new_st and (not distribute) and new_st):
new_st = True
distribute = False
if ((not has_distribute) and has_new_st and distribute and (not new_st)):
new_st = True
distribute = False
if ((not has_distribute) and has_new_st and (not distribute) and (not new_st)):
new_st = True
distribute = False
if (has_distribute and (not has_new_st) and (not distribute) and new_st):
new_st = True
distribute = False
if (has_distribute and (not has_new_st) and (not distribute) and new_st):
new_st = True
distribute = False
if (has_distribute and (not has_new_st) and distribute and (not new_st)):
new_st = False
distribute = True
if (has_distribute and (not has_new_st) and (not distribute) and (not new_st)):
new_st = False
distribute = True
if ((not has_distribute) and (not has_new_st) and (not distribute) and new_st):
new_st = True
distribute = False
if ((not has_distribute) and (not has_new_st) and (not distribute) and new_st):
new_st = True
distribute = False
if ((not has_distribute) and (not has_new_st) and distribute and (not new_st)):
new_st = False
distribute = True
if ((not has_distribute) and (not has_new_st) and (not distribute) and (not new_st)):
new_st = True
distribute = False
if (new_st and distribute):
distribute = False
if new_st:
distribute = False
LOG.warning('Forcing to use setuptools as we have setuptools >= 0.7')
if distribute:
new_st = False
if (buildout_ver == 1):
LOG.warning('Using distribute !')
bootstrap_args += ' --distribute'
if (not os.path.isdir(dbuild)):
os.makedirs(dbuild)
upgrade_bootstrap(directory, offline=offline, buildout_ver=buildout_ver)
b_py = os.path.join(directory, 'bootstrap.py')
with salt.utils.files.fopen(b_py) as fic:
content = salt.utils.stringutils.to_unicode(fic.read())
if ((test_release is not False) and (' --accept-buildout-test-releases' in content)):
bootstrap_args += ' --accept-buildout-test-releases'
if (config and ('"-c"' in content)):
bootstrap_args += ' -c {}'.format(config)
try:
if runas:
uid = __salt__['user.info'](runas)['uid']
gid = __salt__['user.info'](runas)['gid']
os.chown('bootstrap.py', uid, gid)
except OSError as exc:
_logger.error('BUILDOUT bootstrap permissions error: %s', exc, exc_info=_logger.isEnabledFor(logging.DEBUG))
cmd = '{} bootstrap.py {}'.format(python, bootstrap_args)
ret = _Popen(cmd, directory=directory, runas=runas, loglevel=loglevel, env=env, use_vt=use_vt)
output = ret['output']
return {'comment': cmd, 'out': output} | Run the buildout bootstrap dance (python bootstrap.py).
directory
directory to execute in
config
alternative buildout configuration file to use
runas
User used to run buildout as
env
environment variables to set when running
buildout_ver
force a specific buildout version (1 | 2)
test_release
buildout accept test release
offline
are we executing buildout in offline mode
distribute
Forcing use of distribute
new_st
Forcing use of setuptools >= 0.7
python
path to a python executable to use in place of default (salt one)
onlyif
Only execute cmd if statement on the host return 0
unless
Do not execute cmd if statement on the host return 0
use_vt
Use the new salt VT to stream output [experimental]
CLI Example:
.. code-block:: bash
salt '*' buildout.bootstrap /srv/mybuildout | salt/modules/zcbuildout.py | bootstrap | Flowdalic/salt | 9,425 | python | @_salt_callback
def bootstrap(directory='.', config='buildout.cfg', python=sys.executable, onlyif=None, unless=None, runas=None, env=(), distribute=None, buildout_ver=None, test_release=False, offline=False, new_st=None, use_vt=False, loglevel=None):
"\n Run the buildout bootstrap dance (python bootstrap.py).\n\n directory\n directory to execute in\n\n config\n alternative buildout configuration file to use\n\n runas\n User used to run buildout as\n\n env\n environment variables to set when running\n\n buildout_ver\n force a specific buildout version (1 | 2)\n\n test_release\n buildout accept test release\n\n offline\n are we executing buildout in offline mode\n\n distribute\n Forcing use of distribute\n\n new_st\n Forcing use of setuptools >= 0.7\n\n python\n path to a python executable to use in place of default (salt one)\n\n onlyif\n Only execute cmd if statement on the host return 0\n\n unless\n Do not execute cmd if statement on the host return 0\n\n use_vt\n Use the new salt VT to stream output [experimental]\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' buildout.bootstrap /srv/mybuildout\n "
directory = os.path.abspath(directory)
dbuild = _dot_buildout(directory)
bootstrap_args =
has_distribute = _has_old_distribute(python=python, runas=runas, env=env)
has_new_st = _has_setuptools7(python=python, runas=runas, env=env)
if (has_distribute and has_new_st and (not distribute) and new_st):
new_st = True
distribute = False
if (has_distribute and has_new_st and (not distribute) and new_st):
new_st = True
distribute = False
if (has_distribute and has_new_st and distribute and (not new_st)):
new_st = True
distribute = False
if (has_distribute and has_new_st and (not distribute) and (not new_st)):
new_st = True
distribute = False
if ((not has_distribute) and has_new_st and (not distribute) and new_st):
new_st = True
distribute = False
if ((not has_distribute) and has_new_st and (not distribute) and new_st):
new_st = True
distribute = False
if ((not has_distribute) and has_new_st and distribute and (not new_st)):
new_st = True
distribute = False
if ((not has_distribute) and has_new_st and (not distribute) and (not new_st)):
new_st = True
distribute = False
if (has_distribute and (not has_new_st) and (not distribute) and new_st):
new_st = True
distribute = False
if (has_distribute and (not has_new_st) and (not distribute) and new_st):
new_st = True
distribute = False
if (has_distribute and (not has_new_st) and distribute and (not new_st)):
new_st = False
distribute = True
if (has_distribute and (not has_new_st) and (not distribute) and (not new_st)):
new_st = False
distribute = True
if ((not has_distribute) and (not has_new_st) and (not distribute) and new_st):
new_st = True
distribute = False
if ((not has_distribute) and (not has_new_st) and (not distribute) and new_st):
new_st = True
distribute = False
if ((not has_distribute) and (not has_new_st) and distribute and (not new_st)):
new_st = False
distribute = True
if ((not has_distribute) and (not has_new_st) and (not distribute) and (not new_st)):
new_st = True
distribute = False
if (new_st and distribute):
distribute = False
if new_st:
distribute = False
LOG.warning('Forcing to use setuptools as we have setuptools >= 0.7')
if distribute:
new_st = False
if (buildout_ver == 1):
LOG.warning('Using distribute !')
bootstrap_args += ' --distribute'
if (not os.path.isdir(dbuild)):
os.makedirs(dbuild)
upgrade_bootstrap(directory, offline=offline, buildout_ver=buildout_ver)
b_py = os.path.join(directory, 'bootstrap.py')
with salt.utils.files.fopen(b_py) as fic:
content = salt.utils.stringutils.to_unicode(fic.read())
if ((test_release is not False) and (' --accept-buildout-test-releases' in content)):
bootstrap_args += ' --accept-buildout-test-releases'
if (config and ('"-c"' in content)):
bootstrap_args += ' -c {}'.format(config)
try:
if runas:
uid = __salt__['user.info'](runas)['uid']
gid = __salt__['user.info'](runas)['gid']
os.chown('bootstrap.py', uid, gid)
except OSError as exc:
_logger.error('BUILDOUT bootstrap permissions error: %s', exc, exc_info=_logger.isEnabledFor(logging.DEBUG))
cmd = '{} bootstrap.py {}'.format(python, bootstrap_args)
ret = _Popen(cmd, directory=directory, runas=runas, loglevel=loglevel, env=env, use_vt=use_vt)
output = ret['output']
return {'comment': cmd, 'out': output} | @_salt_callback
def bootstrap(directory='.', config='buildout.cfg', python=sys.executable, onlyif=None, unless=None, runas=None, env=(), distribute=None, buildout_ver=None, test_release=False, offline=False, new_st=None, use_vt=False, loglevel=None):
"\n Run the buildout bootstrap dance (python bootstrap.py).\n\n directory\n directory to execute in\n\n config\n alternative buildout configuration file to use\n\n runas\n User used to run buildout as\n\n env\n environment variables to set when running\n\n buildout_ver\n force a specific buildout version (1 | 2)\n\n test_release\n buildout accept test release\n\n offline\n are we executing buildout in offline mode\n\n distribute\n Forcing use of distribute\n\n new_st\n Forcing use of setuptools >= 0.7\n\n python\n path to a python executable to use in place of default (salt one)\n\n onlyif\n Only execute cmd if statement on the host return 0\n\n unless\n Do not execute cmd if statement on the host return 0\n\n use_vt\n Use the new salt VT to stream output [experimental]\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' buildout.bootstrap /srv/mybuildout\n "
directory = os.path.abspath(directory)
dbuild = _dot_buildout(directory)
bootstrap_args =
has_distribute = _has_old_distribute(python=python, runas=runas, env=env)
has_new_st = _has_setuptools7(python=python, runas=runas, env=env)
if (has_distribute and has_new_st and (not distribute) and new_st):
new_st = True
distribute = False
if (has_distribute and has_new_st and (not distribute) and new_st):
new_st = True
distribute = False
if (has_distribute and has_new_st and distribute and (not new_st)):
new_st = True
distribute = False
if (has_distribute and has_new_st and (not distribute) and (not new_st)):
new_st = True
distribute = False
if ((not has_distribute) and has_new_st and (not distribute) and new_st):
new_st = True
distribute = False
if ((not has_distribute) and has_new_st and (not distribute) and new_st):
new_st = True
distribute = False
if ((not has_distribute) and has_new_st and distribute and (not new_st)):
new_st = True
distribute = False
if ((not has_distribute) and has_new_st and (not distribute) and (not new_st)):
new_st = True
distribute = False
if (has_distribute and (not has_new_st) and (not distribute) and new_st):
new_st = True
distribute = False
if (has_distribute and (not has_new_st) and (not distribute) and new_st):
new_st = True
distribute = False
if (has_distribute and (not has_new_st) and distribute and (not new_st)):
new_st = False
distribute = True
if (has_distribute and (not has_new_st) and (not distribute) and (not new_st)):
new_st = False
distribute = True
if ((not has_distribute) and (not has_new_st) and (not distribute) and new_st):
new_st = True
distribute = False
if ((not has_distribute) and (not has_new_st) and (not distribute) and new_st):
new_st = True
distribute = False
if ((not has_distribute) and (not has_new_st) and distribute and (not new_st)):
new_st = False
distribute = True
if ((not has_distribute) and (not has_new_st) and (not distribute) and (not new_st)):
new_st = True
distribute = False
if (new_st and distribute):
distribute = False
if new_st:
distribute = False
LOG.warning('Forcing to use setuptools as we have setuptools >= 0.7')
if distribute:
new_st = False
if (buildout_ver == 1):
LOG.warning('Using distribute !')
bootstrap_args += ' --distribute'
if (not os.path.isdir(dbuild)):
os.makedirs(dbuild)
upgrade_bootstrap(directory, offline=offline, buildout_ver=buildout_ver)
b_py = os.path.join(directory, 'bootstrap.py')
with salt.utils.files.fopen(b_py) as fic:
content = salt.utils.stringutils.to_unicode(fic.read())
if ((test_release is not False) and (' --accept-buildout-test-releases' in content)):
bootstrap_args += ' --accept-buildout-test-releases'
if (config and ('"-c"' in content)):
bootstrap_args += ' -c {}'.format(config)
try:
if runas:
uid = __salt__['user.info'](runas)['uid']
gid = __salt__['user.info'](runas)['gid']
os.chown('bootstrap.py', uid, gid)
except OSError as exc:
_logger.error('BUILDOUT bootstrap permissions error: %s', exc, exc_info=_logger.isEnabledFor(logging.DEBUG))
cmd = '{} bootstrap.py {}'.format(python, bootstrap_args)
ret = _Popen(cmd, directory=directory, runas=runas, loglevel=loglevel, env=env, use_vt=use_vt)
output = ret['output']
return {'comment': cmd, 'out': output}<|docstring|>Run the buildout bootstrap dance (python bootstrap.py).
directory
directory to execute in
config
alternative buildout configuration file to use
runas
User used to run buildout as
env
environment variables to set when running
buildout_ver
force a specific buildout version (1 | 2)
test_release
buildout accept test release
offline
are we executing buildout in offline mode
distribute
Forcing use of distribute
new_st
Forcing use of setuptools >= 0.7
python
path to a python executable to use in place of default (salt one)
onlyif
Only execute cmd if statement on the host return 0
unless
Do not execute cmd if statement on the host return 0
use_vt
Use the new salt VT to stream output [experimental]
CLI Example:
.. code-block:: bash
salt '*' buildout.bootstrap /srv/mybuildout<|endoftext|> |
17d69c8cc196456df69b105ca9a821a2f4cf7c490eedfce5ff44c55e1331ef6c | @_salt_callback
def run_buildout(directory='.', config='buildout.cfg', parts=None, onlyif=None, unless=None, offline=False, newest=True, runas=None, env=(), verbose=False, debug=False, use_vt=False, loglevel=None):
"\n Run a buildout in a directory.\n\n directory\n directory to execute in\n\n config\n alternative buildout configuration file to use\n\n offline\n are we executing buildout in offline mode\n\n runas\n user used to run buildout as\n\n env\n environment variables to set when running\n\n onlyif\n Only execute cmd if statement on the host return 0\n\n unless\n Do not execute cmd if statement on the host return 0\n\n newest\n run buildout in newest mode\n\n force\n run buildout unconditionally\n\n verbose\n run buildout in verbose mode (-vvvvv)\n\n use_vt\n Use the new salt VT to stream output [experimental]\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' buildout.run_buildout /srv/mybuildout\n "
directory = os.path.abspath(directory)
bcmd = os.path.join(directory, 'bin', 'buildout')
installed_cfg = os.path.join(directory, '.installed.cfg')
argv = []
if verbose:
LOG.debug('Buildout is running in verbose mode!')
argv.append('-vvvvvvv')
if ((not newest) and os.path.exists(installed_cfg)):
LOG.debug('Buildout is running in non newest mode!')
argv.append('-N')
if newest:
LOG.debug('Buildout is running in newest mode!')
argv.append('-n')
if offline:
LOG.debug('Buildout is running in offline mode!')
argv.append('-o')
if debug:
LOG.debug('Buildout is running in debug mode!')
argv.append('-D')
(cmds, outputs) = ([], [])
if parts:
for part in parts:
LOG.info('Installing single part: {}'.format(part))
cmd = '{} -c {} {} install {}'.format(bcmd, config, ' '.join(argv), part)
cmds.append(cmd)
outputs.append(_Popen(cmd, directory=directory, runas=runas, env=env, output=True, loglevel=loglevel, use_vt=use_vt))
else:
LOG.info('Installing all buildout parts')
cmd = '{} -c {} {}'.format(bcmd, config, ' '.join(argv))
cmds.append(cmd)
outputs.append(_Popen(cmd, directory=directory, runas=runas, loglevel=loglevel, env=env, output=True, use_vt=use_vt))
return {'comment': '\n'.join(cmds), 'out': '\n'.join(outputs)} | Run a buildout in a directory.
directory
directory to execute in
config
alternative buildout configuration file to use
offline
are we executing buildout in offline mode
runas
user used to run buildout as
env
environment variables to set when running
onlyif
Only execute cmd if statement on the host return 0
unless
Do not execute cmd if statement on the host return 0
newest
run buildout in newest mode
force
run buildout unconditionally
verbose
run buildout in verbose mode (-vvvvv)
use_vt
Use the new salt VT to stream output [experimental]
CLI Example:
.. code-block:: bash
salt '*' buildout.run_buildout /srv/mybuildout | salt/modules/zcbuildout.py | run_buildout | Flowdalic/salt | 9,425 | python | @_salt_callback
def run_buildout(directory='.', config='buildout.cfg', parts=None, onlyif=None, unless=None, offline=False, newest=True, runas=None, env=(), verbose=False, debug=False, use_vt=False, loglevel=None):
"\n Run a buildout in a directory.\n\n directory\n directory to execute in\n\n config\n alternative buildout configuration file to use\n\n offline\n are we executing buildout in offline mode\n\n runas\n user used to run buildout as\n\n env\n environment variables to set when running\n\n onlyif\n Only execute cmd if statement on the host return 0\n\n unless\n Do not execute cmd if statement on the host return 0\n\n newest\n run buildout in newest mode\n\n force\n run buildout unconditionally\n\n verbose\n run buildout in verbose mode (-vvvvv)\n\n use_vt\n Use the new salt VT to stream output [experimental]\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' buildout.run_buildout /srv/mybuildout\n "
directory = os.path.abspath(directory)
bcmd = os.path.join(directory, 'bin', 'buildout')
installed_cfg = os.path.join(directory, '.installed.cfg')
argv = []
if verbose:
LOG.debug('Buildout is running in verbose mode!')
argv.append('-vvvvvvv')
if ((not newest) and os.path.exists(installed_cfg)):
LOG.debug('Buildout is running in non newest mode!')
argv.append('-N')
if newest:
LOG.debug('Buildout is running in newest mode!')
argv.append('-n')
if offline:
LOG.debug('Buildout is running in offline mode!')
argv.append('-o')
if debug:
LOG.debug('Buildout is running in debug mode!')
argv.append('-D')
(cmds, outputs) = ([], [])
if parts:
for part in parts:
LOG.info('Installing single part: {}'.format(part))
cmd = '{} -c {} {} install {}'.format(bcmd, config, ' '.join(argv), part)
cmds.append(cmd)
outputs.append(_Popen(cmd, directory=directory, runas=runas, env=env, output=True, loglevel=loglevel, use_vt=use_vt))
else:
LOG.info('Installing all buildout parts')
cmd = '{} -c {} {}'.format(bcmd, config, ' '.join(argv))
cmds.append(cmd)
outputs.append(_Popen(cmd, directory=directory, runas=runas, loglevel=loglevel, env=env, output=True, use_vt=use_vt))
return {'comment': '\n'.join(cmds), 'out': '\n'.join(outputs)} | @_salt_callback
def run_buildout(directory='.', config='buildout.cfg', parts=None, onlyif=None, unless=None, offline=False, newest=True, runas=None, env=(), verbose=False, debug=False, use_vt=False, loglevel=None):
"\n Run a buildout in a directory.\n\n directory\n directory to execute in\n\n config\n alternative buildout configuration file to use\n\n offline\n are we executing buildout in offline mode\n\n runas\n user used to run buildout as\n\n env\n environment variables to set when running\n\n onlyif\n Only execute cmd if statement on the host return 0\n\n unless\n Do not execute cmd if statement on the host return 0\n\n newest\n run buildout in newest mode\n\n force\n run buildout unconditionally\n\n verbose\n run buildout in verbose mode (-vvvvv)\n\n use_vt\n Use the new salt VT to stream output [experimental]\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' buildout.run_buildout /srv/mybuildout\n "
directory = os.path.abspath(directory)
bcmd = os.path.join(directory, 'bin', 'buildout')
installed_cfg = os.path.join(directory, '.installed.cfg')
argv = []
if verbose:
LOG.debug('Buildout is running in verbose mode!')
argv.append('-vvvvvvv')
if ((not newest) and os.path.exists(installed_cfg)):
LOG.debug('Buildout is running in non newest mode!')
argv.append('-N')
if newest:
LOG.debug('Buildout is running in newest mode!')
argv.append('-n')
if offline:
LOG.debug('Buildout is running in offline mode!')
argv.append('-o')
if debug:
LOG.debug('Buildout is running in debug mode!')
argv.append('-D')
(cmds, outputs) = ([], [])
if parts:
for part in parts:
LOG.info('Installing single part: {}'.format(part))
cmd = '{} -c {} {} install {}'.format(bcmd, config, ' '.join(argv), part)
cmds.append(cmd)
outputs.append(_Popen(cmd, directory=directory, runas=runas, env=env, output=True, loglevel=loglevel, use_vt=use_vt))
else:
LOG.info('Installing all buildout parts')
cmd = '{} -c {} {}'.format(bcmd, config, ' '.join(argv))
cmds.append(cmd)
outputs.append(_Popen(cmd, directory=directory, runas=runas, loglevel=loglevel, env=env, output=True, use_vt=use_vt))
return {'comment': '\n'.join(cmds), 'out': '\n'.join(outputs)}<|docstring|>Run a buildout in a directory.
directory
directory to execute in
config
alternative buildout configuration file to use
offline
are we executing buildout in offline mode
runas
user used to run buildout as
env
environment variables to set when running
onlyif
Only execute cmd if statement on the host return 0
unless
Do not execute cmd if statement on the host return 0
newest
run buildout in newest mode
force
run buildout unconditionally
verbose
run buildout in verbose mode (-vvvvv)
use_vt
Use the new salt VT to stream output [experimental]
CLI Example:
.. code-block:: bash
salt '*' buildout.run_buildout /srv/mybuildout<|endoftext|> |
fa1453490d0e9bcc33d62df4af5df12ad237eec672cf013dc6a178a4b2817477 | @_salt_callback
def buildout(directory='.', config='buildout.cfg', parts=None, runas=None, env=(), buildout_ver=None, test_release=False, distribute=None, new_st=None, offline=False, newest=False, python=sys.executable, debug=False, verbose=False, onlyif=None, unless=None, use_vt=False, loglevel=None):
"\n Run buildout in a directory.\n\n directory\n directory to execute in\n\n config\n buildout config to use\n\n parts\n specific buildout parts to run\n\n runas\n user used to run buildout as\n\n env\n environment variables to set when running\n\n buildout_ver\n force a specific buildout version (1 | 2)\n\n test_release\n buildout accept test release\n\n new_st\n Forcing use of setuptools >= 0.7\n\n distribute\n use distribute over setuptools if possible\n\n offline\n does buildout run offline\n\n python\n python to use\n\n debug\n run buildout with -D debug flag\n\n onlyif\n Only execute cmd if statement on the host return 0\n\n unless\n Do not execute cmd if statement on the host return 0\n newest\n run buildout in newest mode\n\n verbose\n run buildout in verbose mode (-vvvvv)\n\n use_vt\n Use the new salt VT to stream output [experimental]\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' buildout.buildout /srv/mybuildout\n "
LOG.info('Running buildout in {} ({})'.format(directory, config))
boot_ret = bootstrap(directory, config=config, buildout_ver=buildout_ver, test_release=test_release, offline=offline, new_st=new_st, env=env, runas=runas, distribute=distribute, python=python, use_vt=use_vt, loglevel=loglevel)
buildout_ret = run_buildout(directory=directory, config=config, parts=parts, offline=offline, newest=newest, runas=runas, env=env, verbose=verbose, debug=debug, use_vt=use_vt, loglevel=loglevel)
return _merge_statuses([boot_ret, buildout_ret]) | Run buildout in a directory.
directory
directory to execute in
config
buildout config to use
parts
specific buildout parts to run
runas
user used to run buildout as
env
environment variables to set when running
buildout_ver
force a specific buildout version (1 | 2)
test_release
buildout accept test release
new_st
Forcing use of setuptools >= 0.7
distribute
use distribute over setuptools if possible
offline
does buildout run offline
python
python to use
debug
run buildout with -D debug flag
onlyif
Only execute cmd if statement on the host return 0
unless
Do not execute cmd if statement on the host return 0
newest
run buildout in newest mode
verbose
run buildout in verbose mode (-vvvvv)
use_vt
Use the new salt VT to stream output [experimental]
CLI Example:
.. code-block:: bash
salt '*' buildout.buildout /srv/mybuildout | salt/modules/zcbuildout.py | buildout | Flowdalic/salt | 9,425 | python | @_salt_callback
def buildout(directory='.', config='buildout.cfg', parts=None, runas=None, env=(), buildout_ver=None, test_release=False, distribute=None, new_st=None, offline=False, newest=False, python=sys.executable, debug=False, verbose=False, onlyif=None, unless=None, use_vt=False, loglevel=None):
"\n Run buildout in a directory.\n\n directory\n directory to execute in\n\n config\n buildout config to use\n\n parts\n specific buildout parts to run\n\n runas\n user used to run buildout as\n\n env\n environment variables to set when running\n\n buildout_ver\n force a specific buildout version (1 | 2)\n\n test_release\n buildout accept test release\n\n new_st\n Forcing use of setuptools >= 0.7\n\n distribute\n use distribute over setuptools if possible\n\n offline\n does buildout run offline\n\n python\n python to use\n\n debug\n run buildout with -D debug flag\n\n onlyif\n Only execute cmd if statement on the host return 0\n\n unless\n Do not execute cmd if statement on the host return 0\n newest\n run buildout in newest mode\n\n verbose\n run buildout in verbose mode (-vvvvv)\n\n use_vt\n Use the new salt VT to stream output [experimental]\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' buildout.buildout /srv/mybuildout\n "
LOG.info('Running buildout in {} ({})'.format(directory, config))
boot_ret = bootstrap(directory, config=config, buildout_ver=buildout_ver, test_release=test_release, offline=offline, new_st=new_st, env=env, runas=runas, distribute=distribute, python=python, use_vt=use_vt, loglevel=loglevel)
buildout_ret = run_buildout(directory=directory, config=config, parts=parts, offline=offline, newest=newest, runas=runas, env=env, verbose=verbose, debug=debug, use_vt=use_vt, loglevel=loglevel)
return _merge_statuses([boot_ret, buildout_ret]) | @_salt_callback
def buildout(directory='.', config='buildout.cfg', parts=None, runas=None, env=(), buildout_ver=None, test_release=False, distribute=None, new_st=None, offline=False, newest=False, python=sys.executable, debug=False, verbose=False, onlyif=None, unless=None, use_vt=False, loglevel=None):
"\n Run buildout in a directory.\n\n directory\n directory to execute in\n\n config\n buildout config to use\n\n parts\n specific buildout parts to run\n\n runas\n user used to run buildout as\n\n env\n environment variables to set when running\n\n buildout_ver\n force a specific buildout version (1 | 2)\n\n test_release\n buildout accept test release\n\n new_st\n Forcing use of setuptools >= 0.7\n\n distribute\n use distribute over setuptools if possible\n\n offline\n does buildout run offline\n\n python\n python to use\n\n debug\n run buildout with -D debug flag\n\n onlyif\n Only execute cmd if statement on the host return 0\n\n unless\n Do not execute cmd if statement on the host return 0\n newest\n run buildout in newest mode\n\n verbose\n run buildout in verbose mode (-vvvvv)\n\n use_vt\n Use the new salt VT to stream output [experimental]\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' buildout.buildout /srv/mybuildout\n "
LOG.info('Running buildout in {} ({})'.format(directory, config))
boot_ret = bootstrap(directory, config=config, buildout_ver=buildout_ver, test_release=test_release, offline=offline, new_st=new_st, env=env, runas=runas, distribute=distribute, python=python, use_vt=use_vt, loglevel=loglevel)
buildout_ret = run_buildout(directory=directory, config=config, parts=parts, offline=offline, newest=newest, runas=runas, env=env, verbose=verbose, debug=debug, use_vt=use_vt, loglevel=loglevel)
return _merge_statuses([boot_ret, buildout_ret])<|docstring|>Run buildout in a directory.
directory
directory to execute in
config
buildout config to use
parts
specific buildout parts to run
runas
user used to run buildout as
env
environment variables to set when running
buildout_ver
force a specific buildout version (1 | 2)
test_release
buildout accept test release
new_st
Forcing use of setuptools >= 0.7
distribute
use distribute over setuptools if possible
offline
does buildout run offline
python
python to use
debug
run buildout with -D debug flag
onlyif
Only execute cmd if statement on the host return 0
unless
Do not execute cmd if statement on the host return 0
newest
run buildout in newest mode
verbose
run buildout in verbose mode (-vvvvv)
use_vt
Use the new salt VT to stream output [experimental]
CLI Example:
.. code-block:: bash
salt '*' buildout.buildout /srv/mybuildout<|endoftext|> |
ef575ca522123a99684741b17a216fb207f7d962b6a4f97f97318a0cc3a92585 | def spectral_radius(m):
'\n Compute spectral radius of a square 2-D tensor\n :param m: squared 2D tensor\n :return:\n '
return torch.max(torch.abs(torch.eig(m)[0])) | Compute spectral radius of a square 2-D tensor
:param m: squared 2D tensor
:return: | ESN/EchoTorch-master/echotorch/utils/utility_functions.py | spectral_radius | RogerFu18/drunken-monkey | 0 | python | def spectral_radius(m):
'\n Compute spectral radius of a square 2-D tensor\n :param m: squared 2D tensor\n :return:\n '
return torch.max(torch.abs(torch.eig(m)[0])) | def spectral_radius(m):
'\n Compute spectral radius of a square 2-D tensor\n :param m: squared 2D tensor\n :return:\n '
return torch.max(torch.abs(torch.eig(m)[0]))<|docstring|>Compute spectral radius of a square 2-D tensor
:param m: squared 2D tensor
:return:<|endoftext|> |
8255576c341ed34fef4f552ca160cf4b2e854f50058526831c6522840f36fcd8 | def deep_spectral_radius(m, leaky_rate):
"\n Compute spectral radius of a square 2-D tensor for stacked-ESN\n :param m: squared 2D tensor\n :param leaky_rate: Layer's leaky rate\n :return:\n "
return spectral_radius((((1.0 - leaky_rate) * torch.eye(m.size(0), m.size(0))) + (leaky_rate * m))) | Compute spectral radius of a square 2-D tensor for stacked-ESN
:param m: squared 2D tensor
:param leaky_rate: Layer's leaky rate
:return: | ESN/EchoTorch-master/echotorch/utils/utility_functions.py | deep_spectral_radius | RogerFu18/drunken-monkey | 0 | python | def deep_spectral_radius(m, leaky_rate):
"\n Compute spectral radius of a square 2-D tensor for stacked-ESN\n :param m: squared 2D tensor\n :param leaky_rate: Layer's leaky rate\n :return:\n "
return spectral_radius((((1.0 - leaky_rate) * torch.eye(m.size(0), m.size(0))) + (leaky_rate * m))) | def deep_spectral_radius(m, leaky_rate):
"\n Compute spectral radius of a square 2-D tensor for stacked-ESN\n :param m: squared 2D tensor\n :param leaky_rate: Layer's leaky rate\n :return:\n "
return spectral_radius((((1.0 - leaky_rate) * torch.eye(m.size(0), m.size(0))) + (leaky_rate * m)))<|docstring|>Compute spectral radius of a square 2-D tensor for stacked-ESN
:param m: squared 2D tensor
:param leaky_rate: Layer's leaky rate
:return:<|endoftext|> |
4b281c1de8fd49c30425b841beb1554bf49fda6f8e7838808ebc59a10ebbcc0b | def normalize(tensor, dim=1):
'\n Normalize a tensor on a single dimension\n :param t:\n :return:\n '
pass | Normalize a tensor on a single dimension
:param t:
:return: | ESN/EchoTorch-master/echotorch/utils/utility_functions.py | normalize | RogerFu18/drunken-monkey | 0 | python | def normalize(tensor, dim=1):
'\n Normalize a tensor on a single dimension\n :param t:\n :return:\n '
pass | def normalize(tensor, dim=1):
'\n Normalize a tensor on a single dimension\n :param t:\n :return:\n '
pass<|docstring|>Normalize a tensor on a single dimension
:param t:
:return:<|endoftext|> |
726dd955d9092fa29fad63c1cdb82ed112b5e36e1995d567adb0b591c3ac04d5 | def average_prob(tensor, dim=0):
'\n Average probabilities through time\n :param tensor:\n :param dim:\n :return:\n '
return torch.mean(tensor, dim=dim) | Average probabilities through time
:param tensor:
:param dim:
:return: | ESN/EchoTorch-master/echotorch/utils/utility_functions.py | average_prob | RogerFu18/drunken-monkey | 0 | python | def average_prob(tensor, dim=0):
'\n Average probabilities through time\n :param tensor:\n :param dim:\n :return:\n '
return torch.mean(tensor, dim=dim) | def average_prob(tensor, dim=0):
'\n Average probabilities through time\n :param tensor:\n :param dim:\n :return:\n '
return torch.mean(tensor, dim=dim)<|docstring|>Average probabilities through time
:param tensor:
:param dim:
:return:<|endoftext|> |
99478f56ebbb77b54f6c43975a79c245adcc510f874fda0577197e0910583b77 | def max_average_through_time(tensor, dim=0):
'\n Max average through time\n :param tensor:\n :param dim: Time dimension\n :return:\n '
average = torch.mean(tensor, dim=dim)
return torch.max(average, dim=dim)[1] | Max average through time
:param tensor:
:param dim: Time dimension
:return: | ESN/EchoTorch-master/echotorch/utils/utility_functions.py | max_average_through_time | RogerFu18/drunken-monkey | 0 | python | def max_average_through_time(tensor, dim=0):
'\n Max average through time\n :param tensor:\n :param dim: Time dimension\n :return:\n '
average = torch.mean(tensor, dim=dim)
return torch.max(average, dim=dim)[1] | def max_average_through_time(tensor, dim=0):
'\n Max average through time\n :param tensor:\n :param dim: Time dimension\n :return:\n '
average = torch.mean(tensor, dim=dim)
return torch.max(average, dim=dim)[1]<|docstring|>Max average through time
:param tensor:
:param dim: Time dimension
:return:<|endoftext|> |
11c2e2a93242b696a423690b72f0a516e7eb9732a0e300559550f8c37f7bee9d | def tree(self, sentence):
"\n $ python -m sagas.bots.hanlp_procs tree '苹果电脑可以运行开源阿尔法狗代码吗'\n :param sentence:\n :return:\n "
hanlp.set_nature('tech', ['苹果电脑', '阿尔法狗'])
(result, conll) = hanlp.parse_tree(sentence)
print(result)
hanlp.print_deps(conll) | $ python -m sagas.bots.hanlp_procs tree '苹果电脑可以运行开源阿尔法狗代码吗'
:param sentence:
:return: | sagas/bots/hanlp_procs.py | tree | samlet/stack | 3 | python | def tree(self, sentence):
"\n $ python -m sagas.bots.hanlp_procs tree '苹果电脑可以运行开源阿尔法狗代码吗'\n :param sentence:\n :return:\n "
hanlp.set_nature('tech', ['苹果电脑', '阿尔法狗'])
(result, conll) = hanlp.parse_tree(sentence)
print(result)
hanlp.print_deps(conll) | def tree(self, sentence):
"\n $ python -m sagas.bots.hanlp_procs tree '苹果电脑可以运行开源阿尔法狗代码吗'\n :param sentence:\n :return:\n "
hanlp.set_nature('tech', ['苹果电脑', '阿尔法狗'])
(result, conll) = hanlp.parse_tree(sentence)
print(result)
hanlp.print_deps(conll)<|docstring|>$ python -m sagas.bots.hanlp_procs tree '苹果电脑可以运行开源阿尔法狗代码吗'
:param sentence:
:return:<|endoftext|> |
a5160086adb108eb2902100ae4080ba26f254863a079118e468e1b05cbf35dcc | def backtrace(self, raw, index=0):
"\n $ python -m sagas.bots.hanlp_procs backtrace '苹果电脑可以运行开源阿尔法狗代码吗'\n :param raw:\n :param index:\n :return:\n "
sentence = hanlp.j.HanLP.parseDependency(raw)
wordArray = sentence.getWordArray()
head = wordArray[index]
while (head is not None):
if (head == hanlp.j.CoNLLWord.ROOT):
print(head.LEMMA)
else:
print(('%s --(%s)--> ' % (head.LEMMA, head.DEPREL)))
head = head.HEAD | $ python -m sagas.bots.hanlp_procs backtrace '苹果电脑可以运行开源阿尔法狗代码吗'
:param raw:
:param index:
:return: | sagas/bots/hanlp_procs.py | backtrace | samlet/stack | 3 | python | def backtrace(self, raw, index=0):
"\n $ python -m sagas.bots.hanlp_procs backtrace '苹果电脑可以运行开源阿尔法狗代码吗'\n :param raw:\n :param index:\n :return:\n "
sentence = hanlp.j.HanLP.parseDependency(raw)
wordArray = sentence.getWordArray()
head = wordArray[index]
while (head is not None):
if (head == hanlp.j.CoNLLWord.ROOT):
print(head.LEMMA)
else:
print(('%s --(%s)--> ' % (head.LEMMA, head.DEPREL)))
head = head.HEAD | def backtrace(self, raw, index=0):
"\n $ python -m sagas.bots.hanlp_procs backtrace '苹果电脑可以运行开源阿尔法狗代码吗'\n :param raw:\n :param index:\n :return:\n "
sentence = hanlp.j.HanLP.parseDependency(raw)
wordArray = sentence.getWordArray()
head = wordArray[index]
while (head is not None):
if (head == hanlp.j.CoNLLWord.ROOT):
print(head.LEMMA)
else:
print(('%s --(%s)--> ' % (head.LEMMA, head.DEPREL)))
head = head.HEAD<|docstring|>$ python -m sagas.bots.hanlp_procs backtrace '苹果电脑可以运行开源阿尔法狗代码吗'
:param raw:
:param index:
:return:<|endoftext|> |
44a6f20eb60313202722b0d5eff522ae4b55c8ad2de27d0e14823ed40ee8171e | def deps(self, raw):
"\n $ python -m sagas.bots.hanlp_procs deps '苹果电脑可以运行开源阿尔法狗代码吗'\n :param raw:\n :return:\n "
sentence = hanlp.j.HanLP.parseDependency(raw)
wordArray = sentence.getWordArray()
for word in wordArray:
print(('%s --(%s)--> %s' % (word.LEMMA, word.DEPREL, word.HEAD.LEMMA))) | $ python -m sagas.bots.hanlp_procs deps '苹果电脑可以运行开源阿尔法狗代码吗'
:param raw:
:return: | sagas/bots/hanlp_procs.py | deps | samlet/stack | 3 | python | def deps(self, raw):
"\n $ python -m sagas.bots.hanlp_procs deps '苹果电脑可以运行开源阿尔法狗代码吗'\n :param raw:\n :return:\n "
sentence = hanlp.j.HanLP.parseDependency(raw)
wordArray = sentence.getWordArray()
for word in wordArray:
print(('%s --(%s)--> %s' % (word.LEMMA, word.DEPREL, word.HEAD.LEMMA))) | def deps(self, raw):
"\n $ python -m sagas.bots.hanlp_procs deps '苹果电脑可以运行开源阿尔法狗代码吗'\n :param raw:\n :return:\n "
sentence = hanlp.j.HanLP.parseDependency(raw)
wordArray = sentence.getWordArray()
for word in wordArray:
print(('%s --(%s)--> %s' % (word.LEMMA, word.DEPREL, word.HEAD.LEMMA)))<|docstring|>$ python -m sagas.bots.hanlp_procs deps '苹果电脑可以运行开源阿尔法狗代码吗'
:param raw:
:return:<|endoftext|> |
e2b7c376cc5585a6edbd4bc68dd18a036251373e2060fefac7530a270ef28c4b | def read_data(filename):
'Attempts to strip out SMART recovery data from PDF, using tabula-py\n Returns a tuple of:\n - a list of entries, which should be "first,last,email,address,state,country"\n - a set of emails which were extracted from the PDF, but for which an entry\n was NOT extracted. The data for this entry must be manually extracted.\n '
temp = mkstemp(text=True)
convert_into(filename, temp[1], output_format='csv', lattice=True, pages='all')
os.close(temp[0])
data = []
emails = set()
good_emails = set()
with open(temp[1], 'rb') as F:
for line in F:
entry = [item.strip() for item in line.decode('UTF-8').split(',')]
for email in [item for item in entry if ('@' in item)]:
emails.add(email)
if ((len(entry) < 5) or (entry[0] == '""')):
continue
joined = ','.join(entry[:6])
good_emails.add(entry[2])
data.append(','.join(entry[:6]))
os.remove(temp[1])
return (data, emails.difference(good_emails)) | Attempts to strip out SMART recovery data from PDF, using tabula-py
Returns a tuple of:
- a list of entries, which should be "first,last,email,address,state,country"
- a set of emails which were extracted from the PDF, but for which an entry
was NOT extracted. The data for this entry must be manually extracted. | smart_extractor.py | read_data | saites/smart_extractor | 0 | python | def read_data(filename):
'Attempts to strip out SMART recovery data from PDF, using tabula-py\n Returns a tuple of:\n - a list of entries, which should be "first,last,email,address,state,country"\n - a set of emails which were extracted from the PDF, but for which an entry\n was NOT extracted. The data for this entry must be manually extracted.\n '
temp = mkstemp(text=True)
convert_into(filename, temp[1], output_format='csv', lattice=True, pages='all')
os.close(temp[0])
data = []
emails = set()
good_emails = set()
with open(temp[1], 'rb') as F:
for line in F:
entry = [item.strip() for item in line.decode('UTF-8').split(',')]
for email in [item for item in entry if ('@' in item)]:
emails.add(email)
if ((len(entry) < 5) or (entry[0] == '')):
continue
joined = ','.join(entry[:6])
good_emails.add(entry[2])
data.append(','.join(entry[:6]))
os.remove(temp[1])
return (data, emails.difference(good_emails)) | def read_data(filename):
'Attempts to strip out SMART recovery data from PDF, using tabula-py\n Returns a tuple of:\n - a list of entries, which should be "first,last,email,address,state,country"\n - a set of emails which were extracted from the PDF, but for which an entry\n was NOT extracted. The data for this entry must be manually extracted.\n '
temp = mkstemp(text=True)
convert_into(filename, temp[1], output_format='csv', lattice=True, pages='all')
os.close(temp[0])
data = []
emails = set()
good_emails = set()
with open(temp[1], 'rb') as F:
for line in F:
entry = [item.strip() for item in line.decode('UTF-8').split(',')]
for email in [item for item in entry if ('@' in item)]:
emails.add(email)
if ((len(entry) < 5) or (entry[0] == '')):
continue
joined = ','.join(entry[:6])
good_emails.add(entry[2])
data.append(','.join(entry[:6]))
os.remove(temp[1])
return (data, emails.difference(good_emails))<|docstring|>Attempts to strip out SMART recovery data from PDF, using tabula-py
Returns a tuple of:
- a list of entries, which should be "first,last,email,address,state,country"
- a set of emails which were extracted from the PDF, but for which an entry
was NOT extracted. The data for this entry must be manually extracted.<|endoftext|> |
5a99214f84e97248dde9f280351a9de89d882f22d85a165f3c63465b4da3a33b | def check_output_files(args):
"Make sure the output files don't exist, or that --force is used"
if ((not args.force) and Path(args.output).exists()):
sys.stderr.write('{} exists; use -f to force overwrite{}'.format(args.output, os.linesep))
exit(1)
if ((not args.force) and Path(args.missed).exists()):
sys.stderr.write('{} exists; use -f to force overwrite{}'.format(args.missed, os.linesep))
exit(1) | Make sure the output files don't exist, or that --force is used | smart_extractor.py | check_output_files | saites/smart_extractor | 0 | python | def check_output_files(args):
if ((not args.force) and Path(args.output).exists()):
sys.stderr.write('{} exists; use -f to force overwrite{}'.format(args.output, os.linesep))
exit(1)
if ((not args.force) and Path(args.missed).exists()):
sys.stderr.write('{} exists; use -f to force overwrite{}'.format(args.missed, os.linesep))
exit(1) | def check_output_files(args):
if ((not args.force) and Path(args.output).exists()):
sys.stderr.write('{} exists; use -f to force overwrite{}'.format(args.output, os.linesep))
exit(1)
if ((not args.force) and Path(args.missed).exists()):
sys.stderr.write('{} exists; use -f to force overwrite{}'.format(args.missed, os.linesep))
exit(1)<|docstring|>Make sure the output files don't exist, or that --force is used<|endoftext|> |
0a5716d95707000d313753081a7d08d34fae2368b451e8d812a6a6db479785b8 | def collect_filenames(args):
'Convert args to a list of input argument paths\n File paths are taken as-is. Directory paths are globbed for pdfs.\n Others are ignored.\n '
raw = [Path(p) for p in args.input]
files = [p for p in raw if p.is_file()]
for d in [p for p in raw if p.is_dir()]:
files += d.glob('*.pdf')
if (len(files) == 0):
sys.stderr.write('No pdf files found in provided directories{}'.format(os.linesep))
exit(1)
return files | Convert args to a list of input argument paths
File paths are taken as-is. Directory paths are globbed for pdfs.
Others are ignored. | smart_extractor.py | collect_filenames | saites/smart_extractor | 0 | python | def collect_filenames(args):
'Convert args to a list of input argument paths\n File paths are taken as-is. Directory paths are globbed for pdfs.\n Others are ignored.\n '
raw = [Path(p) for p in args.input]
files = [p for p in raw if p.is_file()]
for d in [p for p in raw if p.is_dir()]:
files += d.glob('*.pdf')
if (len(files) == 0):
sys.stderr.write('No pdf files found in provided directories{}'.format(os.linesep))
exit(1)
return files | def collect_filenames(args):
'Convert args to a list of input argument paths\n File paths are taken as-is. Directory paths are globbed for pdfs.\n Others are ignored.\n '
raw = [Path(p) for p in args.input]
files = [p for p in raw if p.is_file()]
for d in [p for p in raw if p.is_dir()]:
files += d.glob('*.pdf')
if (len(files) == 0):
sys.stderr.write('No pdf files found in provided directories{}'.format(os.linesep))
exit(1)
return files<|docstring|>Convert args to a list of input argument paths
File paths are taken as-is. Directory paths are globbed for pdfs.
Others are ignored.<|endoftext|> |
21323f0c59485a2cf7d0b67f5aee64d7c615aee14e39a333969e1df06ad8b6ec | def process_from_cmd(args):
'Processes multiple files, using cmdline args'
check_output_files(args)
files = collect_filenames(args)
if ((not args.quiet) and (len(files) > 1)):
print('Processing {} files...{}'.format(len(files), os.linesep))
entries = 0
missed_count = 0
with open(args.output, 'w') as output, open(args.missed, 'w') as missed:
for pdf in files:
if (not args.quiet):
print('Starting to process {}...'.format(pdf.name))
try:
(results, emails) = read_data(str(pdf.absolute()))
except Exception as e:
sys.stderr.write("Couldn't process {}: {}{}{}".format(pdf.name, e, os.linesep, os.linesep))
continue
for line in results:
output.write(line)
output.write('\n')
for e in emails:
missed.write(e)
missed.write('\n')
entries += len(results)
missed_count += len(emails)
if (not args.quiet):
print(' ...grabbed {} entries; missed {} emails{}'.format(len(results), len(emails), os.linesep))
if ((not args.quiet) and (len(files) > 1)):
print('Finished processing: collected {} entries but missed {} emails'.format(entries, missed_count)) | Processes multiple files, using cmdline args | smart_extractor.py | process_from_cmd | saites/smart_extractor | 0 | python | def process_from_cmd(args):
check_output_files(args)
files = collect_filenames(args)
if ((not args.quiet) and (len(files) > 1)):
print('Processing {} files...{}'.format(len(files), os.linesep))
entries = 0
missed_count = 0
with open(args.output, 'w') as output, open(args.missed, 'w') as missed:
for pdf in files:
if (not args.quiet):
print('Starting to process {}...'.format(pdf.name))
try:
(results, emails) = read_data(str(pdf.absolute()))
except Exception as e:
sys.stderr.write("Couldn't process {}: {}{}{}".format(pdf.name, e, os.linesep, os.linesep))
continue
for line in results:
output.write(line)
output.write('\n')
for e in emails:
missed.write(e)
missed.write('\n')
entries += len(results)
missed_count += len(emails)
if (not args.quiet):
print(' ...grabbed {} entries; missed {} emails{}'.format(len(results), len(emails), os.linesep))
if ((not args.quiet) and (len(files) > 1)):
print('Finished processing: collected {} entries but missed {} emails'.format(entries, missed_count)) | def process_from_cmd(args):
check_output_files(args)
files = collect_filenames(args)
if ((not args.quiet) and (len(files) > 1)):
print('Processing {} files...{}'.format(len(files), os.linesep))
entries = 0
missed_count = 0
with open(args.output, 'w') as output, open(args.missed, 'w') as missed:
for pdf in files:
if (not args.quiet):
print('Starting to process {}...'.format(pdf.name))
try:
(results, emails) = read_data(str(pdf.absolute()))
except Exception as e:
sys.stderr.write("Couldn't process {}: {}{}{}".format(pdf.name, e, os.linesep, os.linesep))
continue
for line in results:
output.write(line)
output.write('\n')
for e in emails:
missed.write(e)
missed.write('\n')
entries += len(results)
missed_count += len(emails)
if (not args.quiet):
print(' ...grabbed {} entries; missed {} emails{}'.format(len(results), len(emails), os.linesep))
if ((not args.quiet) and (len(files) > 1)):
print('Finished processing: collected {} entries but missed {} emails'.format(entries, missed_count))<|docstring|>Processes multiple files, using cmdline args<|endoftext|> |
4199a0630e6e662475dcd32834f70e4b1f52e8042dbfe907c7818fcb90fd4a26 | def parser_from(self, tokens):
'\n Return a Parser created from `tokens`.\n '
main_name = None
rules = {}
for token in tokens:
result = self.run(token)
if (result is not None):
(name, creator) = result
if (main_name is None):
main_name = name
rules[name] = creator
if (main_name is None):
raise ValueError('No main rule found')
return Parser(main_name, rules) | Return a Parser created from `tokens`. | parv/bnf.py | parser_from | GeeTransit/parv | 0 | python | def parser_from(self, tokens):
'\n \n '
main_name = None
rules = {}
for token in tokens:
result = self.run(token)
if (result is not None):
(name, creator) = result
if (main_name is None):
main_name = name
rules[name] = creator
if (main_name is None):
raise ValueError('No main rule found')
return Parser(main_name, rules) | def parser_from(self, tokens):
'\n \n '
main_name = None
rules = {}
for token in tokens:
result = self.run(token)
if (result is not None):
(name, creator) = result
if (main_name is None):
main_name = name
rules[name] = creator
if (main_name is None):
raise ValueError('No main rule found')
return Parser(main_name, rules)<|docstring|>Return a Parser created from `tokens`.<|endoftext|> |
891286c6da52ac85f58757a8d89311a6b5c2951fad197acf439684b12a2d6073 | def to_proto(self, experiment: BasicTraining) -> Any:
'Convert an `Experiment` to its protobuf representation.'
version = self._version_to_proto()
dataset = self._dataset_to_proto(experiment.dataset, experiment.batch_size)
network = self._model_to_proto(experiment.model)
training = self._training_to_proto(experiment.epochs, experiment.learning_rate, experiment.loss_function)
training_input = TrainingInput(version=version, dataset=dataset, network=network, training=training)
return training_input | Convert an `Experiment` to its protobuf representation. | src/aihwkit/cloud/converter/v1/training.py | to_proto | diego-plan9/aihwkit | 133 | python | def to_proto(self, experiment: BasicTraining) -> Any:
version = self._version_to_proto()
dataset = self._dataset_to_proto(experiment.dataset, experiment.batch_size)
network = self._model_to_proto(experiment.model)
training = self._training_to_proto(experiment.epochs, experiment.learning_rate, experiment.loss_function)
training_input = TrainingInput(version=version, dataset=dataset, network=network, training=training)
return training_input | def to_proto(self, experiment: BasicTraining) -> Any:
version = self._version_to_proto()
dataset = self._dataset_to_proto(experiment.dataset, experiment.batch_size)
network = self._model_to_proto(experiment.model)
training = self._training_to_proto(experiment.epochs, experiment.learning_rate, experiment.loss_function)
training_input = TrainingInput(version=version, dataset=dataset, network=network, training=training)
return training_input<|docstring|>Convert an `Experiment` to its protobuf representation.<|endoftext|> |
97da3a3d5bbb5c432f7b99ab2e78e2e36b478455da309b28235ecf10a676a7b6 | def from_proto(self, training_proto: Any) -> Any:
'Convert a protobuf representation to an `Experiment`.'
dataset = InverseMappings.datasets[training_proto.dataset.dataset_id]
model = self._model_from_proto(training_proto.network)
batch_size = training_proto.dataset.batch_size
loss_function = InverseMappings.loss_functions[training_proto.training.loss_function.id]
epochs = training_proto.training.epochs
learning_rate = training_proto.training.optimizer.arguments[0].f
return BasicTraining(dataset=dataset, model=model, batch_size=batch_size, loss_function=loss_function, epochs=epochs, learning_rate=learning_rate) | Convert a protobuf representation to an `Experiment`. | src/aihwkit/cloud/converter/v1/training.py | from_proto | diego-plan9/aihwkit | 133 | python | def from_proto(self, training_proto: Any) -> Any:
dataset = InverseMappings.datasets[training_proto.dataset.dataset_id]
model = self._model_from_proto(training_proto.network)
batch_size = training_proto.dataset.batch_size
loss_function = InverseMappings.loss_functions[training_proto.training.loss_function.id]
epochs = training_proto.training.epochs
learning_rate = training_proto.training.optimizer.arguments[0].f
return BasicTraining(dataset=dataset, model=model, batch_size=batch_size, loss_function=loss_function, epochs=epochs, learning_rate=learning_rate) | def from_proto(self, training_proto: Any) -> Any:
dataset = InverseMappings.datasets[training_proto.dataset.dataset_id]
model = self._model_from_proto(training_proto.network)
batch_size = training_proto.dataset.batch_size
loss_function = InverseMappings.loss_functions[training_proto.training.loss_function.id]
epochs = training_proto.training.epochs
learning_rate = training_proto.training.optimizer.arguments[0].f
return BasicTraining(dataset=dataset, model=model, batch_size=batch_size, loss_function=loss_function, epochs=epochs, learning_rate=learning_rate)<|docstring|>Convert a protobuf representation to an `Experiment`.<|endoftext|> |
4e63a04004ab49701dfaba1979b36621de69a1dfafafbef04c65beeb1fe2749d | def from_proto(self, results: Any) -> Any:
'Convert a result to its json representation.'
return {'version': {'schema': 1, 'opset': 1}, 'epochs': self._epochs_from_proto(results)} | Convert a result to its json representation. | src/aihwkit/cloud/converter/v1/training.py | from_proto | diego-plan9/aihwkit | 133 | python | def from_proto(self, results: Any) -> Any:
return {'version': {'schema': 1, 'opset': 1}, 'epochs': self._epochs_from_proto(results)} | def from_proto(self, results: Any) -> Any:
return {'version': {'schema': 1, 'opset': 1}, 'epochs': self._epochs_from_proto(results)}<|docstring|>Convert a result to its json representation.<|endoftext|> |
2b7d8d73972cfb91fd02f3ff8c4e84550aae5584e9ce31473092f2e76e853ba8 | @blueprint.route('/my-outlay')
@login_required
def my_outlay():
'Render page with statistics with my outlay.'
title = 'Мои расходы'
if (platform.system() == 'Windows'):
locale.setlocale(locale.LC_ALL, 'russian')
else:
locale.setlocale(locale.LC_TIME, 'ru_RU.UTF-8')
form = DateForm()
start_date = date.today().replace(day=1)
end_date = date.today()
text_date = date.today().strftime('%B %Y')
query_sum = query_sum_purchase(current_user, start_date, end_date)
query_purchase = query_purchase_category(current_user, start_date, end_date)
query_receipt = query_receipt_subcategory(current_user, start_date, end_date)
return render_template('statistic/my_outlay.html', form=form, page_title=title, query_purchase=query_sum, query_category=query_purchase, query_subcategory=query_receipt, text_date=text_date) | Render page with statistics with my outlay. | webapp/statistic/views.py | my_outlay | sanchos2/nautilus | 0 | python | @blueprint.route('/my-outlay')
@login_required
def my_outlay():
title = 'Мои расходы'
if (platform.system() == 'Windows'):
locale.setlocale(locale.LC_ALL, 'russian')
else:
locale.setlocale(locale.LC_TIME, 'ru_RU.UTF-8')
form = DateForm()
start_date = date.today().replace(day=1)
end_date = date.today()
text_date = date.today().strftime('%B %Y')
query_sum = query_sum_purchase(current_user, start_date, end_date)
query_purchase = query_purchase_category(current_user, start_date, end_date)
query_receipt = query_receipt_subcategory(current_user, start_date, end_date)
return render_template('statistic/my_outlay.html', form=form, page_title=title, query_purchase=query_sum, query_category=query_purchase, query_subcategory=query_receipt, text_date=text_date) | @blueprint.route('/my-outlay')
@login_required
def my_outlay():
title = 'Мои расходы'
if (platform.system() == 'Windows'):
locale.setlocale(locale.LC_ALL, 'russian')
else:
locale.setlocale(locale.LC_TIME, 'ru_RU.UTF-8')
form = DateForm()
start_date = date.today().replace(day=1)
end_date = date.today()
text_date = date.today().strftime('%B %Y')
query_sum = query_sum_purchase(current_user, start_date, end_date)
query_purchase = query_purchase_category(current_user, start_date, end_date)
query_receipt = query_receipt_subcategory(current_user, start_date, end_date)
return render_template('statistic/my_outlay.html', form=form, page_title=title, query_purchase=query_sum, query_category=query_purchase, query_subcategory=query_receipt, text_date=text_date)<|docstring|>Render page with statistics with my outlay.<|endoftext|> |
692a36e2b21cc9f5586564c1c31410cf35bef488f25eaa60a305d34e47df8843 | @blueprint.route('/process-outlay', methods=['POST'])
def process_outlay():
'Date selection process.'
title = 'Мои расходы'
form = DateForm()
if form.validate_on_submit():
start_date = form.start_date.data
end_date = form.end_date.data
if (start_date > end_date):
flash('Дата начала не может быть больше даты конца периода')
return redirect(url_for('statistic.my_outlay'))
query_sum = query_sum_purchase(current_user, start_date, end_date)
query_purchase = query_purchase_category(current_user, start_date, end_date)
query_receipt = query_receipt_subcategory(current_user, start_date, end_date)
flash('Данные обновлены')
return render_template('statistic/my_outlay.html', form=form, page_title=title, query_purchase=query_sum, query_category=query_purchase, query_subcategory=query_receipt, text_date='выбранный период')
for (field, errors) in form.errors.items():
for error in errors:
flash(f'Ошибка в поле "{getattr(form, field).label.text}": - {error}')
return redirect(url_for('statistic.my_outlay')) | Date selection process. | webapp/statistic/views.py | process_outlay | sanchos2/nautilus | 0 | python | @blueprint.route('/process-outlay', methods=['POST'])
def process_outlay():
title = 'Мои расходы'
form = DateForm()
if form.validate_on_submit():
start_date = form.start_date.data
end_date = form.end_date.data
if (start_date > end_date):
flash('Дата начала не может быть больше даты конца периода')
return redirect(url_for('statistic.my_outlay'))
query_sum = query_sum_purchase(current_user, start_date, end_date)
query_purchase = query_purchase_category(current_user, start_date, end_date)
query_receipt = query_receipt_subcategory(current_user, start_date, end_date)
flash('Данные обновлены')
return render_template('statistic/my_outlay.html', form=form, page_title=title, query_purchase=query_sum, query_category=query_purchase, query_subcategory=query_receipt, text_date='выбранный период')
for (field, errors) in form.errors.items():
for error in errors:
flash(f'Ошибка в поле "{getattr(form, field).label.text}": - {error}')
return redirect(url_for('statistic.my_outlay')) | @blueprint.route('/process-outlay', methods=['POST'])
def process_outlay():
title = 'Мои расходы'
form = DateForm()
if form.validate_on_submit():
start_date = form.start_date.data
end_date = form.end_date.data
if (start_date > end_date):
flash('Дата начала не может быть больше даты конца периода')
return redirect(url_for('statistic.my_outlay'))
query_sum = query_sum_purchase(current_user, start_date, end_date)
query_purchase = query_purchase_category(current_user, start_date, end_date)
query_receipt = query_receipt_subcategory(current_user, start_date, end_date)
flash('Данные обновлены')
return render_template('statistic/my_outlay.html', form=form, page_title=title, query_purchase=query_sum, query_category=query_purchase, query_subcategory=query_receipt, text_date='выбранный период')
for (field, errors) in form.errors.items():
for error in errors:
flash(f'Ошибка в поле "{getattr(form, field).label.text}": - {error}')
return redirect(url_for('statistic.my_outlay'))<|docstring|>Date selection process.<|endoftext|> |
c201982cf929f22a7139b53d8214ca7e7a8ab786b6f35a9c3fd1940bd2a6a20f | def solid_density(self, locs):
'Compute solid_density field at locations.\n '
(npts, dim) = locs.shape
solid_density = (rho_s * numpy.ones((1, npts, 1), dtype=numpy.float64))
return solid_density | Compute solid_density field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | solid_density | reykoki/pylith | 93 | python | def solid_density(self, locs):
'\n '
(npts, dim) = locs.shape
solid_density = (rho_s * numpy.ones((1, npts, 1), dtype=numpy.float64))
return solid_density | def solid_density(self, locs):
'\n '
(npts, dim) = locs.shape
solid_density = (rho_s * numpy.ones((1, npts, 1), dtype=numpy.float64))
return solid_density<|docstring|>Compute solid_density field at locations.<|endoftext|> |
bfc5fdaf2a0a5ec68c2ced16acb11106b90830e4b2fc91a12cae056514cae9a3 | def fluid_density(self, locs):
'Compute fluid density field at locations.\n '
(npts, dim) = locs.shape
fluid_density = (rho_f * numpy.ones((1, npts, 1), dtype=numpy.float64))
return fluid_density | Compute fluid density field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | fluid_density | reykoki/pylith | 93 | python | def fluid_density(self, locs):
'\n '
(npts, dim) = locs.shape
fluid_density = (rho_f * numpy.ones((1, npts, 1), dtype=numpy.float64))
return fluid_density | def fluid_density(self, locs):
'\n '
(npts, dim) = locs.shape
fluid_density = (rho_f * numpy.ones((1, npts, 1), dtype=numpy.float64))
return fluid_density<|docstring|>Compute fluid density field at locations.<|endoftext|> |
54b134a2e8fd2b48f94c4a7a57ed0be4198f0089851d0882d2545b4974ad592a | def shear_modulus(self, locs):
'Compute shear modulus field at locations.\n '
(npts, dim) = locs.shape
shear_modulus = (G * numpy.ones((1, npts, 1), dtype=numpy.float64))
return shear_modulus | Compute shear modulus field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | shear_modulus | reykoki/pylith | 93 | python | def shear_modulus(self, locs):
'\n '
(npts, dim) = locs.shape
shear_modulus = (G * numpy.ones((1, npts, 1), dtype=numpy.float64))
return shear_modulus | def shear_modulus(self, locs):
'\n '
(npts, dim) = locs.shape
shear_modulus = (G * numpy.ones((1, npts, 1), dtype=numpy.float64))
return shear_modulus<|docstring|>Compute shear modulus field at locations.<|endoftext|> |
4eb1c9c952ca5a8a5dd1c2e89f40065dac26c160674c67c98c424765799be1db | def porosity(self, locs):
'Compute porosity field at locations.\n '
(npts, dim) = locs.shape
porosity = (phi * numpy.ones((1, npts, 1), dtype=numpy.float64))
return porosity | Compute porosity field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | porosity | reykoki/pylith | 93 | python | def porosity(self, locs):
'\n '
(npts, dim) = locs.shape
porosity = (phi * numpy.ones((1, npts, 1), dtype=numpy.float64))
return porosity | def porosity(self, locs):
'\n '
(npts, dim) = locs.shape
porosity = (phi * numpy.ones((1, npts, 1), dtype=numpy.float64))
return porosity<|docstring|>Compute porosity field at locations.<|endoftext|> |
901db7a6d2a90db73f54bebfc40a9455c25ff09e90b3bdbe5d82aee0ba10a944 | def fluid_viscosity(self, locs):
'Compute fluid_viscosity field at locations.\n '
(npts, dim) = locs.shape
fluid_viscosity = (mu_f * numpy.ones((1, npts, 1), dtype=numpy.float64))
return fluid_viscosity | Compute fluid_viscosity field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | fluid_viscosity | reykoki/pylith | 93 | python | def fluid_viscosity(self, locs):
'\n '
(npts, dim) = locs.shape
fluid_viscosity = (mu_f * numpy.ones((1, npts, 1), dtype=numpy.float64))
return fluid_viscosity | def fluid_viscosity(self, locs):
'\n '
(npts, dim) = locs.shape
fluid_viscosity = (mu_f * numpy.ones((1, npts, 1), dtype=numpy.float64))
return fluid_viscosity<|docstring|>Compute fluid_viscosity field at locations.<|endoftext|> |
962135b1306ed2bec534201fc9ca331cc4a7b2420c20e4d7da4701a025689d86 | def drained_bulk_modulus(self, locs):
'Compute undrained bulk modulus field at locations.\n '
(npts, dim) = locs.shape
undrained_bulk_modulus = (K_d * numpy.ones((1, npts, 1), dtype=numpy.float64))
return undrained_bulk_modulus | Compute undrained bulk modulus field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | drained_bulk_modulus | reykoki/pylith | 93 | python | def drained_bulk_modulus(self, locs):
'\n '
(npts, dim) = locs.shape
undrained_bulk_modulus = (K_d * numpy.ones((1, npts, 1), dtype=numpy.float64))
return undrained_bulk_modulus | def drained_bulk_modulus(self, locs):
'\n '
(npts, dim) = locs.shape
undrained_bulk_modulus = (K_d * numpy.ones((1, npts, 1), dtype=numpy.float64))
return undrained_bulk_modulus<|docstring|>Compute undrained bulk modulus field at locations.<|endoftext|> |
f1e967edab9d386abd3ccac67650def1477abbd6fca7d1a3c44135772432a7ec | def biot_coefficient(self, locs):
'Compute biot coefficient field at locations.\n '
(npts, dim) = locs.shape
biot_coefficient = (alpha * numpy.ones((1, npts, 1), dtype=numpy.float64))
return biot_coefficient | Compute biot coefficient field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | biot_coefficient | reykoki/pylith | 93 | python | def biot_coefficient(self, locs):
'\n '
(npts, dim) = locs.shape
biot_coefficient = (alpha * numpy.ones((1, npts, 1), dtype=numpy.float64))
return biot_coefficient | def biot_coefficient(self, locs):
'\n '
(npts, dim) = locs.shape
biot_coefficient = (alpha * numpy.ones((1, npts, 1), dtype=numpy.float64))
return biot_coefficient<|docstring|>Compute biot coefficient field at locations.<|endoftext|> |
7960c741edd9ef5d84d55c8dc04d94672670884d451cd6299d8d184cb79a7d35 | def biot_modulus(self, locs):
'Compute biot modulus field at locations.\n '
(npts, dim) = locs.shape
biot_modulus = (M * numpy.ones((1, npts, 1), dtype=numpy.float64))
return biot_modulus | Compute biot modulus field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | biot_modulus | reykoki/pylith | 93 | python | def biot_modulus(self, locs):
'\n '
(npts, dim) = locs.shape
biot_modulus = (M * numpy.ones((1, npts, 1), dtype=numpy.float64))
return biot_modulus | def biot_modulus(self, locs):
'\n '
(npts, dim) = locs.shape
biot_modulus = (M * numpy.ones((1, npts, 1), dtype=numpy.float64))
return biot_modulus<|docstring|>Compute biot modulus field at locations.<|endoftext|> |
6a42d8e94903a9e8acedfcebc61d142b9ee731dbdb91a88cd00864d62e7e96d4 | def isotropic_permeability(self, locs):
'Compute isotropic permeability field at locations.\n '
(npts, dim) = locs.shape
isotropic_permeability = (k * numpy.ones((1, npts, 1), dtype=numpy.float64))
return isotropic_permeability | Compute isotropic permeability field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | isotropic_permeability | reykoki/pylith | 93 | python | def isotropic_permeability(self, locs):
'\n '
(npts, dim) = locs.shape
isotropic_permeability = (k * numpy.ones((1, npts, 1), dtype=numpy.float64))
return isotropic_permeability | def isotropic_permeability(self, locs):
'\n '
(npts, dim) = locs.shape
isotropic_permeability = (k * numpy.ones((1, npts, 1), dtype=numpy.float64))
return isotropic_permeability<|docstring|>Compute isotropic permeability field at locations.<|endoftext|> |
fc31c05c56f2db8c9cbcf597f0dec75ed41dd2f4e1d79e6712942896de209b87 | def displacement(self, locs):
'Compute displacement field at locations.\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
displacement = numpy.zeros((ntpts, npts, dim), dtype=numpy.float64)
z = locs[(:, 1)]
t_track = 0
z_star = (1 - (z / L))
for t in tsteps:
if (t < 0.0):
displacement[(0, :, 1)] = ((((P_0 * L) * (1.0 - (2.0 * nu_u))) / ((2.0 * G) * (1.0 - nu_u))) * (1.0 - z_star))
else:
t_star = ((c * t) / ((2 * L) ** 2))
displacement[(t_track, :, 1)] = (((((P_0 * L) * (1.0 - (2.0 * nu_u))) / ((2.0 * G) * (1.0 - nu_u))) * (1.0 - z_star)) + ((((P_0 * L) * (nu_u - nu)) / (((2.0 * G) * (1.0 - nu_u)) * (1.0 - nu))) * self.F2(z_star, t_star)))
t_track += 1
return displacement | Compute displacement field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | displacement | reykoki/pylith | 93 | python | def displacement(self, locs):
'\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
displacement = numpy.zeros((ntpts, npts, dim), dtype=numpy.float64)
z = locs[(:, 1)]
t_track = 0
z_star = (1 - (z / L))
for t in tsteps:
if (t < 0.0):
displacement[(0, :, 1)] = ((((P_0 * L) * (1.0 - (2.0 * nu_u))) / ((2.0 * G) * (1.0 - nu_u))) * (1.0 - z_star))
else:
t_star = ((c * t) / ((2 * L) ** 2))
displacement[(t_track, :, 1)] = (((((P_0 * L) * (1.0 - (2.0 * nu_u))) / ((2.0 * G) * (1.0 - nu_u))) * (1.0 - z_star)) + ((((P_0 * L) * (nu_u - nu)) / (((2.0 * G) * (1.0 - nu_u)) * (1.0 - nu))) * self.F2(z_star, t_star)))
t_track += 1
return displacement | def displacement(self, locs):
'\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
displacement = numpy.zeros((ntpts, npts, dim), dtype=numpy.float64)
z = locs[(:, 1)]
t_track = 0
z_star = (1 - (z / L))
for t in tsteps:
if (t < 0.0):
displacement[(0, :, 1)] = ((((P_0 * L) * (1.0 - (2.0 * nu_u))) / ((2.0 * G) * (1.0 - nu_u))) * (1.0 - z_star))
else:
t_star = ((c * t) / ((2 * L) ** 2))
displacement[(t_track, :, 1)] = (((((P_0 * L) * (1.0 - (2.0 * nu_u))) / ((2.0 * G) * (1.0 - nu_u))) * (1.0 - z_star)) + ((((P_0 * L) * (nu_u - nu)) / (((2.0 * G) * (1.0 - nu_u)) * (1.0 - nu))) * self.F2(z_star, t_star)))
t_track += 1
return displacement<|docstring|>Compute displacement field at locations.<|endoftext|> |
c64a5aecfcc401f00ba938f7ce1701e4f493672fd2f7ec3ee75388a7c26013de | def pressure(self, locs):
'Compute pressure field at locations.\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
pressure = numpy.zeros((ntpts, npts, 1), dtype=numpy.float64)
z = locs[(:, 1)]
t_track = 0
for t in tsteps:
z_star = (1 - (z / L))
t_star = ((c * t) / (4.0 * (L ** 2)))
pressure[(t_track, :, 0)] = ((- ((P_0 * eta) / (G * S))) * self.F1(z_star, t_star))
t_track += 1
return pressure | Compute pressure field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | pressure | reykoki/pylith | 93 | python | def pressure(self, locs):
'\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
pressure = numpy.zeros((ntpts, npts, 1), dtype=numpy.float64)
z = locs[(:, 1)]
t_track = 0
for t in tsteps:
z_star = (1 - (z / L))
t_star = ((c * t) / (4.0 * (L ** 2)))
pressure[(t_track, :, 0)] = ((- ((P_0 * eta) / (G * S))) * self.F1(z_star, t_star))
t_track += 1
return pressure | def pressure(self, locs):
'\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
pressure = numpy.zeros((ntpts, npts, 1), dtype=numpy.float64)
z = locs[(:, 1)]
t_track = 0
for t in tsteps:
z_star = (1 - (z / L))
t_star = ((c * t) / (4.0 * (L ** 2)))
pressure[(t_track, :, 0)] = ((- ((P_0 * eta) / (G * S))) * self.F1(z_star, t_star))
t_track += 1
return pressure<|docstring|>Compute pressure field at locations.<|endoftext|> |
670338b4e6d1d915280fcada816f20dbf46dfe32fc40e35d14d480d95342e246 | def trace_strain(self, locs):
'Compute trace strain field at locations.\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
trace_strain = numpy.zeros((ntpts, npts, 1), dtype=numpy.float64)
z = locs[(:, 1)]
t_track = 0
for t in tsteps:
z_star = (z / L)
t_star = ((c * t) / (4 * (L ** 2)))
trace_strain[(t_track, :, 0)] = ((- (((P_0 * L) * (1.0 - (2.0 * nu_u))) / (((2.0 * G) * (1.0 - nu_u)) * L))) + ((((P_0 * L) * (nu_u - nu)) / (((2.0 * G) * (1.0 - nu_u)) * (1.0 - nu))) * self.F3(z_star, t_star)))
t_track += 1
return trace_strain | Compute trace strain field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | trace_strain | reykoki/pylith | 93 | python | def trace_strain(self, locs):
'\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
trace_strain = numpy.zeros((ntpts, npts, 1), dtype=numpy.float64)
z = locs[(:, 1)]
t_track = 0
for t in tsteps:
z_star = (z / L)
t_star = ((c * t) / (4 * (L ** 2)))
trace_strain[(t_track, :, 0)] = ((- (((P_0 * L) * (1.0 - (2.0 * nu_u))) / (((2.0 * G) * (1.0 - nu_u)) * L))) + ((((P_0 * L) * (nu_u - nu)) / (((2.0 * G) * (1.0 - nu_u)) * (1.0 - nu))) * self.F3(z_star, t_star)))
t_track += 1
return trace_strain | def trace_strain(self, locs):
'\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
trace_strain = numpy.zeros((ntpts, npts, 1), dtype=numpy.float64)
z = locs[(:, 1)]
t_track = 0
for t in tsteps:
z_star = (z / L)
t_star = ((c * t) / (4 * (L ** 2)))
trace_strain[(t_track, :, 0)] = ((- (((P_0 * L) * (1.0 - (2.0 * nu_u))) / (((2.0 * G) * (1.0 - nu_u)) * L))) + ((((P_0 * L) * (nu_u - nu)) / (((2.0 * G) * (1.0 - nu_u)) * (1.0 - nu))) * self.F3(z_star, t_star)))
t_track += 1
return trace_strain<|docstring|>Compute trace strain field at locations.<|endoftext|> |
413e03aaa930b2b369eb90d2e267e7d7d85b467292016177ccb7147d1b92f3be | def strain(self, locs):
'Compute strain field at locations.\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
e_xx = 0.0
e_yy = self.trace_strain(locs)
e_zz = 0.0
e_xy = 0.0
strain = numpy.zeros((ntpts, npts, self.TENSOR_SIZE), dtype=numpy.float64)
strain[(:, :, 0)] = exx
strain[(:, :, 1)] = eyy
strain[(:, :, 2)] = ezz
strain[(:, :, 3)] = exy
return strain | Compute strain field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | strain | reykoki/pylith | 93 | python | def strain(self, locs):
'\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
e_xx = 0.0
e_yy = self.trace_strain(locs)
e_zz = 0.0
e_xy = 0.0
strain = numpy.zeros((ntpts, npts, self.TENSOR_SIZE), dtype=numpy.float64)
strain[(:, :, 0)] = exx
strain[(:, :, 1)] = eyy
strain[(:, :, 2)] = ezz
strain[(:, :, 3)] = exy
return strain | def strain(self, locs):
'\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
e_xx = 0.0
e_yy = self.trace_strain(locs)
e_zz = 0.0
e_xy = 0.0
strain = numpy.zeros((ntpts, npts, self.TENSOR_SIZE), dtype=numpy.float64)
strain[(:, :, 0)] = exx
strain[(:, :, 1)] = eyy
strain[(:, :, 2)] = ezz
strain[(:, :, 3)] = exy
return strain<|docstring|>Compute strain field at locations.<|endoftext|> |
22000287209c6ec366834cfc25bcebbbc4aa69ceea972f456aa2bc97f639a8b0 | def stress(self, locs):
'Compute stress field at locations.\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
poisson_ratio = (((3 * K_d) - (2 * G)) / (2 * ((3 * K_d) + G)))
trace_strain = self.trace_strain(locs)
pressure = self.pressure(locs)
e_xx = 0.0
e_yy = self.trace_strain(locs)
e_xy = 0.0
stress = numpy.zeros((ntpts, npts, self.TENSOR_SIZE), dtype=numpy.float64)
stress[(:, :, 0)] = ((((((2 * G) * poisson_ratio) / (1 - (2 * poisson_ratio))) * trace_strain) + ((2 * G) * e_xx)) - (alpha * pressure))
stress[(:, :, 1)] = ((((((2 * G) * poisson_ratio) / (1 - (2 * poisson_ratio))) * trace_strain) + ((2 * G) * e_yy)) - (alpha * pressure))
stress[(:, :, 2)] = (((((2 * G) * poisson_ratio) / (1 - (2 * poisson_ratio))) * trace_strain) - (alpha * pressure))
stress[(:, :, 3)] = ((2 * G) * e_xy)
return stress | Compute stress field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | stress | reykoki/pylith | 93 | python | def stress(self, locs):
'\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
poisson_ratio = (((3 * K_d) - (2 * G)) / (2 * ((3 * K_d) + G)))
trace_strain = self.trace_strain(locs)
pressure = self.pressure(locs)
e_xx = 0.0
e_yy = self.trace_strain(locs)
e_xy = 0.0
stress = numpy.zeros((ntpts, npts, self.TENSOR_SIZE), dtype=numpy.float64)
stress[(:, :, 0)] = ((((((2 * G) * poisson_ratio) / (1 - (2 * poisson_ratio))) * trace_strain) + ((2 * G) * e_xx)) - (alpha * pressure))
stress[(:, :, 1)] = ((((((2 * G) * poisson_ratio) / (1 - (2 * poisson_ratio))) * trace_strain) + ((2 * G) * e_yy)) - (alpha * pressure))
stress[(:, :, 2)] = (((((2 * G) * poisson_ratio) / (1 - (2 * poisson_ratio))) * trace_strain) - (alpha * pressure))
stress[(:, :, 3)] = ((2 * G) * e_xy)
return stress | def stress(self, locs):
'\n '
(npts, dim) = locs.shape
ntpts = tsteps.shape[0]
poisson_ratio = (((3 * K_d) - (2 * G)) / (2 * ((3 * K_d) + G)))
trace_strain = self.trace_strain(locs)
pressure = self.pressure(locs)
e_xx = 0.0
e_yy = self.trace_strain(locs)
e_xy = 0.0
stress = numpy.zeros((ntpts, npts, self.TENSOR_SIZE), dtype=numpy.float64)
stress[(:, :, 0)] = ((((((2 * G) * poisson_ratio) / (1 - (2 * poisson_ratio))) * trace_strain) + ((2 * G) * e_xx)) - (alpha * pressure))
stress[(:, :, 1)] = ((((((2 * G) * poisson_ratio) / (1 - (2 * poisson_ratio))) * trace_strain) + ((2 * G) * e_yy)) - (alpha * pressure))
stress[(:, :, 2)] = (((((2 * G) * poisson_ratio) / (1 - (2 * poisson_ratio))) * trace_strain) - (alpha * pressure))
stress[(:, :, 3)] = ((2 * G) * e_xy)
return stress<|docstring|>Compute stress field at locations.<|endoftext|> |
171f5b8e72bde5807aff0d988bea19cbb360edb08db16324a45f284197d9caf3 | def y_pos_neu(self, locs):
'Compute initial traction at locations.\n '
(npts, dim) = locs.shape
traction = numpy.zeros((1, npts, self.SPACE_DIM), dtype=numpy.float64)
traction[(:, :, 0)] = 0.0
traction[(:, :, 1)] = P_0
return traction | Compute initial traction at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | y_pos_neu | reykoki/pylith | 93 | python | def y_pos_neu(self, locs):
'\n '
(npts, dim) = locs.shape
traction = numpy.zeros((1, npts, self.SPACE_DIM), dtype=numpy.float64)
traction[(:, :, 0)] = 0.0
traction[(:, :, 1)] = P_0
return traction | def y_pos_neu(self, locs):
'\n '
(npts, dim) = locs.shape
traction = numpy.zeros((1, npts, self.SPACE_DIM), dtype=numpy.float64)
traction[(:, :, 0)] = 0.0
traction[(:, :, 1)] = P_0
return traction<|docstring|>Compute initial traction at locations.<|endoftext|> |
f82b8f0c35e6b2c58c12adccbaa7c2344d2d3e1a257a86e6b5c35abb947da145 | def initial_displacement(self, locs):
'Compute initial displacement at locations\n '
(npts, dim) = locs.shape
displacement = numpy.zeros((1, npts, dim), dtype=numpy.float64)
z = locs[(:, 1)]
z_star = (1 - (z / L))
displacement[(0, :, 1)] = ((((P_0 * L) * (1.0 - (2.0 * nu_u))) / ((2.0 * G) * (1.0 - nu_u))) * (1.0 - z_star))
return displacement | Compute initial displacement at locations | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | initial_displacement | reykoki/pylith | 93 | python | def initial_displacement(self, locs):
'\n '
(npts, dim) = locs.shape
displacement = numpy.zeros((1, npts, dim), dtype=numpy.float64)
z = locs[(:, 1)]
z_star = (1 - (z / L))
displacement[(0, :, 1)] = ((((P_0 * L) * (1.0 - (2.0 * nu_u))) / ((2.0 * G) * (1.0 - nu_u))) * (1.0 - z_star))
return displacement | def initial_displacement(self, locs):
'\n '
(npts, dim) = locs.shape
displacement = numpy.zeros((1, npts, dim), dtype=numpy.float64)
z = locs[(:, 1)]
z_star = (1 - (z / L))
displacement[(0, :, 1)] = ((((P_0 * L) * (1.0 - (2.0 * nu_u))) / ((2.0 * G) * (1.0 - nu_u))) * (1.0 - z_star))
return displacement<|docstring|>Compute initial displacement at locations<|endoftext|> |
5a1d11c0f9ff247d35e67a8b84358d191178ac31bc2c0866c59bfe5d97574c7c | def initial_pressure(self, locs):
'Compute initial pressure at locations\n '
(npts, dim) = locs.shape
pressure = numpy.zeros((1, npts), dtype=numpy.float64)
z = locs[(:, 1)]
pressure[(0, :)] = (((- P_0) * eta) / (G * S))
return pressure | Compute initial pressure at locations | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | initial_pressure | reykoki/pylith | 93 | python | def initial_pressure(self, locs):
'\n '
(npts, dim) = locs.shape
pressure = numpy.zeros((1, npts), dtype=numpy.float64)
z = locs[(:, 1)]
pressure[(0, :)] = (((- P_0) * eta) / (G * S))
return pressure | def initial_pressure(self, locs):
'\n '
(npts, dim) = locs.shape
pressure = numpy.zeros((1, npts), dtype=numpy.float64)
z = locs[(:, 1)]
pressure[(0, :)] = (((- P_0) * eta) / (G * S))
return pressure<|docstring|>Compute initial pressure at locations<|endoftext|> |
30e8a785e6c333ae4e10127c734210390dfce8e1d413d87a4de83da95fed53e0 | def initial_trace_strain(self, locs):
'Compute initial trace strain field at locations.\n '
(npts, dim) = locs.shape
trace_strain = numpy.zeros((1, npts), dtype=numpy.float64)
z = locs[(:, 1)]
z_star = (z / L)
trace_strain[(0, :)] = ((- (P_0 * (1.0 - (2.0 * nu_u)))) / ((2.0 * G) * (1.0 - nu_u)))
return trace_strain | Compute initial trace strain field at locations. | tests/fullscale/poroelasticity/terzaghi/terzaghi_soln.py | initial_trace_strain | reykoki/pylith | 93 | python | def initial_trace_strain(self, locs):
'\n '
(npts, dim) = locs.shape
trace_strain = numpy.zeros((1, npts), dtype=numpy.float64)
z = locs[(:, 1)]
z_star = (z / L)
trace_strain[(0, :)] = ((- (P_0 * (1.0 - (2.0 * nu_u)))) / ((2.0 * G) * (1.0 - nu_u)))
return trace_strain | def initial_trace_strain(self, locs):
'\n '
(npts, dim) = locs.shape
trace_strain = numpy.zeros((1, npts), dtype=numpy.float64)
z = locs[(:, 1)]
z_star = (z / L)
trace_strain[(0, :)] = ((- (P_0 * (1.0 - (2.0 * nu_u)))) / ((2.0 * G) * (1.0 - nu_u)))
return trace_strain<|docstring|>Compute initial trace strain field at locations.<|endoftext|> |
678d3f38a50c5701e8a8ca8e2b4c0eaa8c46d587deb3c63773757ae3382e2308 | def main():
'公開鍵と秘密鍵を生成'
(p, q) = rsa.calc_p_q(30, 3000)
(public_key, private_key) = rsa.generate_keys(p, q)
plain_text = 'この文字列を暗号化/復号するよ♪'
print(('p = %d, q = %d' % (p, q)))
print(('秘密鍵:(E = %d, N = %d)' % (public_key[0], public_key[1])))
print(('公開鍵:(D = %d, N = %d)' % (private_key[0], private_key[1])))
print(('平文:%s' % plain_text))
'暗号文を生成する'
encrypted_text = rsa.encrypt_from_text(plain_text, public_key)
print(('暗号文:%s' % rsa.sanitize(encrypted_text)))
'暗号文から平文を復元する'
decrypted_text = rsa.decrypt_to_text(encrypted_text, private_key)
print(('平文:%s' % decrypted_text))
if (plain_text == decrypted_text):
print('Success.')
else:
print('Failed.') | 公開鍵と秘密鍵を生成 | rsa_test.py | main | NobuyukiInoue/Example_RSA | 0 | python | def main():
(p, q) = rsa.calc_p_q(30, 3000)
(public_key, private_key) = rsa.generate_keys(p, q)
plain_text = 'この文字列を暗号化/復号するよ♪'
print(('p = %d, q = %d' % (p, q)))
print(('秘密鍵:(E = %d, N = %d)' % (public_key[0], public_key[1])))
print(('公開鍵:(D = %d, N = %d)' % (private_key[0], private_key[1])))
print(('平文:%s' % plain_text))
'暗号文を生成する'
encrypted_text = rsa.encrypt_from_text(plain_text, public_key)
print(('暗号文:%s' % rsa.sanitize(encrypted_text)))
'暗号文から平文を復元する'
decrypted_text = rsa.decrypt_to_text(encrypted_text, private_key)
print(('平文:%s' % decrypted_text))
if (plain_text == decrypted_text):
print('Success.')
else:
print('Failed.') | def main():
(p, q) = rsa.calc_p_q(30, 3000)
(public_key, private_key) = rsa.generate_keys(p, q)
plain_text = 'この文字列を暗号化/復号するよ♪'
print(('p = %d, q = %d' % (p, q)))
print(('秘密鍵:(E = %d, N = %d)' % (public_key[0], public_key[1])))
print(('公開鍵:(D = %d, N = %d)' % (private_key[0], private_key[1])))
print(('平文:%s' % plain_text))
'暗号文を生成する'
encrypted_text = rsa.encrypt_from_text(plain_text, public_key)
print(('暗号文:%s' % rsa.sanitize(encrypted_text)))
'暗号文から平文を復元する'
decrypted_text = rsa.decrypt_to_text(encrypted_text, private_key)
print(('平文:%s' % decrypted_text))
if (plain_text == decrypted_text):
print('Success.')
else:
print('Failed.')<|docstring|>公開鍵と秘密鍵を生成<|endoftext|> |
a15699404e2f953ec9b1a3943fd020c0812b0046c04e9282c36e7cbc267eadb0 | def std_plus(x):
'\n Calculates the standard deviation of the values in a numeric vector. \n It is capable of computing standard deviation when the vector contains missing values \n and inifinite values by automatically removing them.\n\n parameters:\n -----------\n x (array_like) a numeric vector\n\n Return:\n ------\n sd_value (float): the value of standard deviation of the input data\n '
if isinstance(x, (list, tuple, np.ndarray)):
x = np.array(x)
length = len(x)
if (length == 1):
return 0.0
elif (length == 0):
raise TypeError('The input cannot be empty.')
x = x[(~ np.isinf(x))]
x = x[(~ np.isnan(x))]
if (x.size == 0):
return np.nan
sd_value = np.sqrt(np.mean((abs((x - x.mean())) ** 2)))
return sd_value | Calculates the standard deviation of the values in a numeric vector.
It is capable of computing standard deviation when the vector contains missing values
and inifinite values by automatically removing them.
parameters:
-----------
x (array_like) a numeric vector
Return:
------
sd_value (float): the value of standard deviation of the input data | CorrPy/std_plus.py | std_plus | K3ra-y/DSCI524_lab03_group15 | 0 | python | def std_plus(x):
'\n Calculates the standard deviation of the values in a numeric vector. \n It is capable of computing standard deviation when the vector contains missing values \n and inifinite values by automatically removing them.\n\n parameters:\n -----------\n x (array_like) a numeric vector\n\n Return:\n ------\n sd_value (float): the value of standard deviation of the input data\n '
if isinstance(x, (list, tuple, np.ndarray)):
x = np.array(x)
length = len(x)
if (length == 1):
return 0.0
elif (length == 0):
raise TypeError('The input cannot be empty.')
x = x[(~ np.isinf(x))]
x = x[(~ np.isnan(x))]
if (x.size == 0):
return np.nan
sd_value = np.sqrt(np.mean((abs((x - x.mean())) ** 2)))
return sd_value | def std_plus(x):
'\n Calculates the standard deviation of the values in a numeric vector. \n It is capable of computing standard deviation when the vector contains missing values \n and inifinite values by automatically removing them.\n\n parameters:\n -----------\n x (array_like) a numeric vector\n\n Return:\n ------\n sd_value (float): the value of standard deviation of the input data\n '
if isinstance(x, (list, tuple, np.ndarray)):
x = np.array(x)
length = len(x)
if (length == 1):
return 0.0
elif (length == 0):
raise TypeError('The input cannot be empty.')
x = x[(~ np.isinf(x))]
x = x[(~ np.isnan(x))]
if (x.size == 0):
return np.nan
sd_value = np.sqrt(np.mean((abs((x - x.mean())) ** 2)))
return sd_value<|docstring|>Calculates the standard deviation of the values in a numeric vector.
It is capable of computing standard deviation when the vector contains missing values
and inifinite values by automatically removing them.
parameters:
-----------
x (array_like) a numeric vector
Return:
------
sd_value (float): the value of standard deviation of the input data<|endoftext|> |
3e8983bad9641660280d7f4c076bd3c341444ede875cdb03e85fc1116b2fbdd7 | def compute_feedback_gradients(self, h_previous_corrupted, h_current_reconstructed, h_previous, sigma):
'\n Compute the gradient of the feedback weights and bias, based on the\n difference reconstruction loss (p16 in theoretical framework). The\n gradients are saved in the .grad attribute of the feedback weights and\n feedback bias.\n Args:\n h_previous_corrupted (torch.Tensor): the initial corrupted sample\n of the previous layer that was propagated forward to the output.\n h_current_reconstructed (torch.Tensor): The reconstruction of the\n corrupted sample (by propagating it backward again in a DTP-like\n fashion to this layer)\n h_previous (torch.Tensor): the initial non-corrupted sample of the\n previous layer\n '
self.set_feedback_requires_grad(True)
h_previous_reconstructed = self.backward(h_current_reconstructed, h_previous, self.activations)
if (sigma <= 0):
raise ValueError('Sigma should be greater than zero when using thedifference reconstruction loss. Given sigma = {}'.format(sigma))
scale = (1 / (sigma ** 2))
reconstruction_loss = (scale * F.mse_loss(h_previous_corrupted, h_previous_reconstructed))
self.save_feedback_gradients(reconstruction_loss)
self.set_feedback_requires_grad(False) | Compute the gradient of the feedback weights and bias, based on the
difference reconstruction loss (p16 in theoretical framework). The
gradients are saved in the .grad attribute of the feedback weights and
feedback bias.
Args:
h_previous_corrupted (torch.Tensor): the initial corrupted sample
of the previous layer that was propagated forward to the output.
h_current_reconstructed (torch.Tensor): The reconstruction of the
corrupted sample (by propagating it backward again in a DTP-like
fashion to this layer)
h_previous (torch.Tensor): the initial non-corrupted sample of the
previous layer | lib/dtpdrl_layers.py | compute_feedback_gradients | scspinney/theoretical_framework_for_target_propagation | 15 | python | def compute_feedback_gradients(self, h_previous_corrupted, h_current_reconstructed, h_previous, sigma):
'\n Compute the gradient of the feedback weights and bias, based on the\n difference reconstruction loss (p16 in theoretical framework). The\n gradients are saved in the .grad attribute of the feedback weights and\n feedback bias.\n Args:\n h_previous_corrupted (torch.Tensor): the initial corrupted sample\n of the previous layer that was propagated forward to the output.\n h_current_reconstructed (torch.Tensor): The reconstruction of the\n corrupted sample (by propagating it backward again in a DTP-like\n fashion to this layer)\n h_previous (torch.Tensor): the initial non-corrupted sample of the\n previous layer\n '
self.set_feedback_requires_grad(True)
h_previous_reconstructed = self.backward(h_current_reconstructed, h_previous, self.activations)
if (sigma <= 0):
raise ValueError('Sigma should be greater than zero when using thedifference reconstruction loss. Given sigma = {}'.format(sigma))
scale = (1 / (sigma ** 2))
reconstruction_loss = (scale * F.mse_loss(h_previous_corrupted, h_previous_reconstructed))
self.save_feedback_gradients(reconstruction_loss)
self.set_feedback_requires_grad(False) | def compute_feedback_gradients(self, h_previous_corrupted, h_current_reconstructed, h_previous, sigma):
'\n Compute the gradient of the feedback weights and bias, based on the\n difference reconstruction loss (p16 in theoretical framework). The\n gradients are saved in the .grad attribute of the feedback weights and\n feedback bias.\n Args:\n h_previous_corrupted (torch.Tensor): the initial corrupted sample\n of the previous layer that was propagated forward to the output.\n h_current_reconstructed (torch.Tensor): The reconstruction of the\n corrupted sample (by propagating it backward again in a DTP-like\n fashion to this layer)\n h_previous (torch.Tensor): the initial non-corrupted sample of the\n previous layer\n '
self.set_feedback_requires_grad(True)
h_previous_reconstructed = self.backward(h_current_reconstructed, h_previous, self.activations)
if (sigma <= 0):
raise ValueError('Sigma should be greater than zero when using thedifference reconstruction loss. Given sigma = {}'.format(sigma))
scale = (1 / (sigma ** 2))
reconstruction_loss = (scale * F.mse_loss(h_previous_corrupted, h_previous_reconstructed))
self.save_feedback_gradients(reconstruction_loss)
self.set_feedback_requires_grad(False)<|docstring|>Compute the gradient of the feedback weights and bias, based on the
difference reconstruction loss (p16 in theoretical framework). The
gradients are saved in the .grad attribute of the feedback weights and
feedback bias.
Args:
h_previous_corrupted (torch.Tensor): the initial corrupted sample
of the previous layer that was propagated forward to the output.
h_current_reconstructed (torch.Tensor): The reconstruction of the
corrupted sample (by propagating it backward again in a DTP-like
fashion to this layer)
h_previous (torch.Tensor): the initial non-corrupted sample of the
previous layer<|endoftext|> |
7504182cdbeba6fada96a787f407b210acc1d873a83de660087e06f418e056e2 | def detect_red_light(I):
'\n This function takes a numpy array <I> and returns a list <bounding_boxes>.\n The list <bounding_boxes> should have one element for each red light in the\n image. Each element of <bounding_boxes> should itself be a list, containing\n four integers that specify a bounding box: the row and column index of the\n top left corner and the row and column index of the bottom right corner (in\n that order). See the code below for an example.\n\n Note that PIL loads images in RGB order, so:\n I[:,:,0] is the red channel\n I[:,:,1] is the green channel\n I[:,:,2] is the blue channel\n '
bounding_boxes = []
k = Image.open('redlight.jpg')
k = np.asarray(k)
k = (k / np.linalg.norm(k))
(box_height, box_width, _) = k.shape
(height, width, _) = I.shape
for i in range(((height - box_height) + 1)):
for j in range(((width - box_width) + 1)):
tmp = I[(i:(i + box_height), j:(j + box_width), :)]
tmp = (tmp / np.linalg.norm(tmp))
val = np.sum((tmp * k))
if (val > 0.9):
tl_row = i
tl_col = j
br_row = (i + box_height)
br_col = (j + box_width)
bounding_boxes.append([tl_row, tl_col, br_row, br_col])
for i in range(len(bounding_boxes)):
assert (len(bounding_boxes[i]) == 4)
return bounding_boxes | This function takes a numpy array <I> and returns a list <bounding_boxes>.
The list <bounding_boxes> should have one element for each red light in the
image. Each element of <bounding_boxes> should itself be a list, containing
four integers that specify a bounding box: the row and column index of the
top left corner and the row and column index of the bottom right corner (in
that order). See the code below for an example.
Note that PIL loads images in RGB order, so:
I[:,:,0] is the red channel
I[:,:,1] is the green channel
I[:,:,2] is the blue channel | run_predictions.py | detect_red_light | Fishmoon5/caltech-ee148-spring2020-hw01 | 0 | python | def detect_red_light(I):
'\n This function takes a numpy array <I> and returns a list <bounding_boxes>.\n The list <bounding_boxes> should have one element for each red light in the\n image. Each element of <bounding_boxes> should itself be a list, containing\n four integers that specify a bounding box: the row and column index of the\n top left corner and the row and column index of the bottom right corner (in\n that order). See the code below for an example.\n\n Note that PIL loads images in RGB order, so:\n I[:,:,0] is the red channel\n I[:,:,1] is the green channel\n I[:,:,2] is the blue channel\n '
bounding_boxes = []
k = Image.open('redlight.jpg')
k = np.asarray(k)
k = (k / np.linalg.norm(k))
(box_height, box_width, _) = k.shape
(height, width, _) = I.shape
for i in range(((height - box_height) + 1)):
for j in range(((width - box_width) + 1)):
tmp = I[(i:(i + box_height), j:(j + box_width), :)]
tmp = (tmp / np.linalg.norm(tmp))
val = np.sum((tmp * k))
if (val > 0.9):
tl_row = i
tl_col = j
br_row = (i + box_height)
br_col = (j + box_width)
bounding_boxes.append([tl_row, tl_col, br_row, br_col])
for i in range(len(bounding_boxes)):
assert (len(bounding_boxes[i]) == 4)
return bounding_boxes | def detect_red_light(I):
'\n This function takes a numpy array <I> and returns a list <bounding_boxes>.\n The list <bounding_boxes> should have one element for each red light in the\n image. Each element of <bounding_boxes> should itself be a list, containing\n four integers that specify a bounding box: the row and column index of the\n top left corner and the row and column index of the bottom right corner (in\n that order). See the code below for an example.\n\n Note that PIL loads images in RGB order, so:\n I[:,:,0] is the red channel\n I[:,:,1] is the green channel\n I[:,:,2] is the blue channel\n '
bounding_boxes = []
k = Image.open('redlight.jpg')
k = np.asarray(k)
k = (k / np.linalg.norm(k))
(box_height, box_width, _) = k.shape
(height, width, _) = I.shape
for i in range(((height - box_height) + 1)):
for j in range(((width - box_width) + 1)):
tmp = I[(i:(i + box_height), j:(j + box_width), :)]
tmp = (tmp / np.linalg.norm(tmp))
val = np.sum((tmp * k))
if (val > 0.9):
tl_row = i
tl_col = j
br_row = (i + box_height)
br_col = (j + box_width)
bounding_boxes.append([tl_row, tl_col, br_row, br_col])
for i in range(len(bounding_boxes)):
assert (len(bounding_boxes[i]) == 4)
return bounding_boxes<|docstring|>This function takes a numpy array <I> and returns a list <bounding_boxes>.
The list <bounding_boxes> should have one element for each red light in the
image. Each element of <bounding_boxes> should itself be a list, containing
four integers that specify a bounding box: the row and column index of the
top left corner and the row and column index of the bottom right corner (in
that order). See the code below for an example.
Note that PIL loads images in RGB order, so:
I[:,:,0] is the red channel
I[:,:,1] is the green channel
I[:,:,2] is the blue channel<|endoftext|> |
79f0330888d116125d930df08a5edbb7074753e35ec68b5201cd0575641841ca | def load_tsv(tokenizer_name, data_file, max_seq_len, s1_idx=0, s2_idx=1, targ_idx=2, idx_idx=None, targ_map=None, targ_fn=None, skip_rows=0, delimiter='\t', filter_idx=None, filter_value=None):
'Load a tsv\n\n To load only rows that have a certain value for a certain column, like genre in MNLI, set filter_idx and filter_value.'
(sent1s, sent2s, targs, idxs) = ([], [], [], [])
with codecs.open(data_file, 'r', 'utf-8', errors='ignore') as data_fh:
for _ in range(skip_rows):
data_fh.readline()
for (row_idx, row) in enumerate(data_fh):
row = row.strip().split(delimiter)
if (filter_idx and (row[filter_idx] != filter_value)):
continue
sent1 = process_sentence(tokenizer_name, row[s1_idx], max_seq_len)
if (((targ_idx is not None) and (not row[targ_idx])) or (not len(sent1))):
continue
if (targ_idx is not None):
if (targ_map is not None):
targ = targ_map[row[targ_idx]]
elif (targ_fn is not None):
targ = targ_fn(row[targ_idx])
else:
targ = int(row[targ_idx])
else:
targ = 0
if (s2_idx is not None):
sent2 = process_sentence(tokenizer_name, row[s2_idx], max_seq_len)
if (not len(sent2)):
continue
sent2s.append(sent2)
if (idx_idx is not None):
idx = int(row[idx_idx])
idxs.append(idx)
sent1s.append(sent1)
targs.append(targ)
if (idx_idx is not None):
return (sent1s, sent2s, targs, idxs)
else:
return (sent1s, sent2s, targs) | Load a tsv
To load only rows that have a certain value for a certain column, like genre in MNLI, set filter_idx and filter_value. | src/utils/data_loaders.py | load_tsv | cjmay/jiant | 0 | python | def load_tsv(tokenizer_name, data_file, max_seq_len, s1_idx=0, s2_idx=1, targ_idx=2, idx_idx=None, targ_map=None, targ_fn=None, skip_rows=0, delimiter='\t', filter_idx=None, filter_value=None):
'Load a tsv\n\n To load only rows that have a certain value for a certain column, like genre in MNLI, set filter_idx and filter_value.'
(sent1s, sent2s, targs, idxs) = ([], [], [], [])
with codecs.open(data_file, 'r', 'utf-8', errors='ignore') as data_fh:
for _ in range(skip_rows):
data_fh.readline()
for (row_idx, row) in enumerate(data_fh):
row = row.strip().split(delimiter)
if (filter_idx and (row[filter_idx] != filter_value)):
continue
sent1 = process_sentence(tokenizer_name, row[s1_idx], max_seq_len)
if (((targ_idx is not None) and (not row[targ_idx])) or (not len(sent1))):
continue
if (targ_idx is not None):
if (targ_map is not None):
targ = targ_map[row[targ_idx]]
elif (targ_fn is not None):
targ = targ_fn(row[targ_idx])
else:
targ = int(row[targ_idx])
else:
targ = 0
if (s2_idx is not None):
sent2 = process_sentence(tokenizer_name, row[s2_idx], max_seq_len)
if (not len(sent2)):
continue
sent2s.append(sent2)
if (idx_idx is not None):
idx = int(row[idx_idx])
idxs.append(idx)
sent1s.append(sent1)
targs.append(targ)
if (idx_idx is not None):
return (sent1s, sent2s, targs, idxs)
else:
return (sent1s, sent2s, targs) | def load_tsv(tokenizer_name, data_file, max_seq_len, s1_idx=0, s2_idx=1, targ_idx=2, idx_idx=None, targ_map=None, targ_fn=None, skip_rows=0, delimiter='\t', filter_idx=None, filter_value=None):
'Load a tsv\n\n To load only rows that have a certain value for a certain column, like genre in MNLI, set filter_idx and filter_value.'
(sent1s, sent2s, targs, idxs) = ([], [], [], [])
with codecs.open(data_file, 'r', 'utf-8', errors='ignore') as data_fh:
for _ in range(skip_rows):
data_fh.readline()
for (row_idx, row) in enumerate(data_fh):
row = row.strip().split(delimiter)
if (filter_idx and (row[filter_idx] != filter_value)):
continue
sent1 = process_sentence(tokenizer_name, row[s1_idx], max_seq_len)
if (((targ_idx is not None) and (not row[targ_idx])) or (not len(sent1))):
continue
if (targ_idx is not None):
if (targ_map is not None):
targ = targ_map[row[targ_idx]]
elif (targ_fn is not None):
targ = targ_fn(row[targ_idx])
else:
targ = int(row[targ_idx])
else:
targ = 0
if (s2_idx is not None):
sent2 = process_sentence(tokenizer_name, row[s2_idx], max_seq_len)
if (not len(sent2)):
continue
sent2s.append(sent2)
if (idx_idx is not None):
idx = int(row[idx_idx])
idxs.append(idx)
sent1s.append(sent1)
targs.append(targ)
if (idx_idx is not None):
return (sent1s, sent2s, targs, idxs)
else:
return (sent1s, sent2s, targs)<|docstring|>Load a tsv
To load only rows that have a certain value for a certain column, like genre in MNLI, set filter_idx and filter_value.<|endoftext|> |
85d93b7dc4dd01d81c88d7d90a9e632d057c275223532c1a51fad1fde67935a5 | def load_diagnostic_tsv(tokenizer_name, data_file, max_seq_len, s1_idx=0, s2_idx=1, targ_idx=2, idx_idx=None, targ_map=None, targ_fn=None, skip_rows=0, delimiter='\t', filter_idx=None, filter_value=None):
"Load a tsv\n\n It loads the data with all it's attributes from diagnostic dataset for MNLI"
(sent1s, sent2s, targs, idxs, lex_sem, pr_ar_str, logic, knowledge) = ([], [], [], [], [], [], [], [])
ix_to_lex_sem_dic = {}
ix_to_pr_ar_str_dic = {}
ix_to_logic_dic = {}
ix_to_knowledge_dic = {}
lex_sem_to_ix_dic = {}
pr_ar_str_to_ix_dic = {}
logic_to_ix_dic = {}
knowledge_to_ix_dic = {}
def tags_to_ixs(tags, tag_to_ix_dict, ix_to_tag_dic):
splitted_tags = tags.split(';')
indexes = []
for t in splitted_tags:
if (t == ''):
continue
if (t in tag_to_ix_dict):
indexes.append(tag_to_ix_dict[t])
else:
highest_ix = len(tag_to_ix_dict)
new_index = (highest_ix + 1)
tag_to_ix_dict[t] = new_index
ix_to_tag_dic[new_index] = t
indexes.append(new_index)
return indexes
with codecs.open(data_file, 'r', 'utf-8', errors='ignore') as data_fh:
for _ in range(skip_rows):
data_fh.readline()
for (row_idx, row) in enumerate(data_fh):
row = row.rstrip().split(delimiter)
sent1 = process_sentence(tokenizer_name, row[s1_idx], max_seq_len)
if (targ_map is not None):
targ = targ_map[row[targ_idx]]
elif (targ_fn is not None):
targ = targ_fn(row[targ_idx])
else:
targ = int(row[targ_idx])
sent2 = process_sentence(tokenizer_name, row[s2_idx], max_seq_len)
sent2s.append(sent2)
sent1s.append(sent1)
targs.append(targ)
lex_sem_sample = tags_to_ixs(row[0], lex_sem_to_ix_dic, ix_to_lex_sem_dic)
pr_ar_str_sample = tags_to_ixs(row[1], pr_ar_str_to_ix_dic, ix_to_pr_ar_str_dic)
logic_sample = tags_to_ixs(row[2], logic_to_ix_dic, ix_to_logic_dic)
knowledge_sample = tags_to_ixs(row[3], knowledge_to_ix_dic, ix_to_knowledge_dic)
idxs.append(row_idx)
lex_sem.append(lex_sem_sample)
pr_ar_str.append(pr_ar_str_sample)
logic.append(logic_sample)
knowledge.append(knowledge_sample)
ix_to_lex_sem_dic[0] = 'missing'
ix_to_pr_ar_str_dic[0] = 'missing'
ix_to_logic_dic[0] = 'missing'
ix_to_knowledge_dic[0] = 'missing'
lex_sem_to_ix_dic['missing'] = 0
pr_ar_str_to_ix_dic['missing'] = 0
logic_to_ix_dic['missing'] = 0
knowledge_to_ix_dic['missing'] = 0
return {'sents1': sent1s, 'sents2': sent2s, 'targs': targs, 'idxs': idxs, 'lex_sem': lex_sem, 'pr_ar_str': pr_ar_str, 'logic': logic, 'knowledge': knowledge, 'ix_to_lex_sem_dic': ix_to_lex_sem_dic, 'ix_to_pr_ar_str_dic': ix_to_pr_ar_str_dic, 'ix_to_logic_dic': ix_to_logic_dic, 'ix_to_knowledge_dic': ix_to_knowledge_dic} | Load a tsv
It loads the data with all it's attributes from diagnostic dataset for MNLI | src/utils/data_loaders.py | load_diagnostic_tsv | cjmay/jiant | 0 | python | def load_diagnostic_tsv(tokenizer_name, data_file, max_seq_len, s1_idx=0, s2_idx=1, targ_idx=2, idx_idx=None, targ_map=None, targ_fn=None, skip_rows=0, delimiter='\t', filter_idx=None, filter_value=None):
"Load a tsv\n\n It loads the data with all it's attributes from diagnostic dataset for MNLI"
(sent1s, sent2s, targs, idxs, lex_sem, pr_ar_str, logic, knowledge) = ([], [], [], [], [], [], [], [])
ix_to_lex_sem_dic = {}
ix_to_pr_ar_str_dic = {}
ix_to_logic_dic = {}
ix_to_knowledge_dic = {}
lex_sem_to_ix_dic = {}
pr_ar_str_to_ix_dic = {}
logic_to_ix_dic = {}
knowledge_to_ix_dic = {}
def tags_to_ixs(tags, tag_to_ix_dict, ix_to_tag_dic):
splitted_tags = tags.split(';')
indexes = []
for t in splitted_tags:
if (t == ):
continue
if (t in tag_to_ix_dict):
indexes.append(tag_to_ix_dict[t])
else:
highest_ix = len(tag_to_ix_dict)
new_index = (highest_ix + 1)
tag_to_ix_dict[t] = new_index
ix_to_tag_dic[new_index] = t
indexes.append(new_index)
return indexes
with codecs.open(data_file, 'r', 'utf-8', errors='ignore') as data_fh:
for _ in range(skip_rows):
data_fh.readline()
for (row_idx, row) in enumerate(data_fh):
row = row.rstrip().split(delimiter)
sent1 = process_sentence(tokenizer_name, row[s1_idx], max_seq_len)
if (targ_map is not None):
targ = targ_map[row[targ_idx]]
elif (targ_fn is not None):
targ = targ_fn(row[targ_idx])
else:
targ = int(row[targ_idx])
sent2 = process_sentence(tokenizer_name, row[s2_idx], max_seq_len)
sent2s.append(sent2)
sent1s.append(sent1)
targs.append(targ)
lex_sem_sample = tags_to_ixs(row[0], lex_sem_to_ix_dic, ix_to_lex_sem_dic)
pr_ar_str_sample = tags_to_ixs(row[1], pr_ar_str_to_ix_dic, ix_to_pr_ar_str_dic)
logic_sample = tags_to_ixs(row[2], logic_to_ix_dic, ix_to_logic_dic)
knowledge_sample = tags_to_ixs(row[3], knowledge_to_ix_dic, ix_to_knowledge_dic)
idxs.append(row_idx)
lex_sem.append(lex_sem_sample)
pr_ar_str.append(pr_ar_str_sample)
logic.append(logic_sample)
knowledge.append(knowledge_sample)
ix_to_lex_sem_dic[0] = 'missing'
ix_to_pr_ar_str_dic[0] = 'missing'
ix_to_logic_dic[0] = 'missing'
ix_to_knowledge_dic[0] = 'missing'
lex_sem_to_ix_dic['missing'] = 0
pr_ar_str_to_ix_dic['missing'] = 0
logic_to_ix_dic['missing'] = 0
knowledge_to_ix_dic['missing'] = 0
return {'sents1': sent1s, 'sents2': sent2s, 'targs': targs, 'idxs': idxs, 'lex_sem': lex_sem, 'pr_ar_str': pr_ar_str, 'logic': logic, 'knowledge': knowledge, 'ix_to_lex_sem_dic': ix_to_lex_sem_dic, 'ix_to_pr_ar_str_dic': ix_to_pr_ar_str_dic, 'ix_to_logic_dic': ix_to_logic_dic, 'ix_to_knowledge_dic': ix_to_knowledge_dic} | def load_diagnostic_tsv(tokenizer_name, data_file, max_seq_len, s1_idx=0, s2_idx=1, targ_idx=2, idx_idx=None, targ_map=None, targ_fn=None, skip_rows=0, delimiter='\t', filter_idx=None, filter_value=None):
"Load a tsv\n\n It loads the data with all it's attributes from diagnostic dataset for MNLI"
(sent1s, sent2s, targs, idxs, lex_sem, pr_ar_str, logic, knowledge) = ([], [], [], [], [], [], [], [])
ix_to_lex_sem_dic = {}
ix_to_pr_ar_str_dic = {}
ix_to_logic_dic = {}
ix_to_knowledge_dic = {}
lex_sem_to_ix_dic = {}
pr_ar_str_to_ix_dic = {}
logic_to_ix_dic = {}
knowledge_to_ix_dic = {}
def tags_to_ixs(tags, tag_to_ix_dict, ix_to_tag_dic):
splitted_tags = tags.split(';')
indexes = []
for t in splitted_tags:
if (t == ):
continue
if (t in tag_to_ix_dict):
indexes.append(tag_to_ix_dict[t])
else:
highest_ix = len(tag_to_ix_dict)
new_index = (highest_ix + 1)
tag_to_ix_dict[t] = new_index
ix_to_tag_dic[new_index] = t
indexes.append(new_index)
return indexes
with codecs.open(data_file, 'r', 'utf-8', errors='ignore') as data_fh:
for _ in range(skip_rows):
data_fh.readline()
for (row_idx, row) in enumerate(data_fh):
row = row.rstrip().split(delimiter)
sent1 = process_sentence(tokenizer_name, row[s1_idx], max_seq_len)
if (targ_map is not None):
targ = targ_map[row[targ_idx]]
elif (targ_fn is not None):
targ = targ_fn(row[targ_idx])
else:
targ = int(row[targ_idx])
sent2 = process_sentence(tokenizer_name, row[s2_idx], max_seq_len)
sent2s.append(sent2)
sent1s.append(sent1)
targs.append(targ)
lex_sem_sample = tags_to_ixs(row[0], lex_sem_to_ix_dic, ix_to_lex_sem_dic)
pr_ar_str_sample = tags_to_ixs(row[1], pr_ar_str_to_ix_dic, ix_to_pr_ar_str_dic)
logic_sample = tags_to_ixs(row[2], logic_to_ix_dic, ix_to_logic_dic)
knowledge_sample = tags_to_ixs(row[3], knowledge_to_ix_dic, ix_to_knowledge_dic)
idxs.append(row_idx)
lex_sem.append(lex_sem_sample)
pr_ar_str.append(pr_ar_str_sample)
logic.append(logic_sample)
knowledge.append(knowledge_sample)
ix_to_lex_sem_dic[0] = 'missing'
ix_to_pr_ar_str_dic[0] = 'missing'
ix_to_logic_dic[0] = 'missing'
ix_to_knowledge_dic[0] = 'missing'
lex_sem_to_ix_dic['missing'] = 0
pr_ar_str_to_ix_dic['missing'] = 0
logic_to_ix_dic['missing'] = 0
knowledge_to_ix_dic['missing'] = 0
return {'sents1': sent1s, 'sents2': sent2s, 'targs': targs, 'idxs': idxs, 'lex_sem': lex_sem, 'pr_ar_str': pr_ar_str, 'logic': logic, 'knowledge': knowledge, 'ix_to_lex_sem_dic': ix_to_lex_sem_dic, 'ix_to_pr_ar_str_dic': ix_to_pr_ar_str_dic, 'ix_to_logic_dic': ix_to_logic_dic, 'ix_to_knowledge_dic': ix_to_knowledge_dic}<|docstring|>Load a tsv
It loads the data with all it's attributes from diagnostic dataset for MNLI<|endoftext|> |
238406f497b576036c4f0296c26232521e66c2709c617f7371d358f65228fdd4 | def process_sentence(tokenizer_name, sent, max_seq_len):
'process a sentence '
max_seq_len -= 2
assert (max_seq_len > 0), 'Max sequence length should be at least 2!'
tokenizer = get_tokenizer(tokenizer_name)
if tokenizer_name.startswith('bert-'):
(sos_tok, eos_tok) = (BERT_SEP_TOK, BERT_CLS_TOK)
else:
(sos_tok, eos_tok) = (SOS_TOK, EOS_TOK)
if isinstance(sent, str):
return (([sos_tok] + tokenizer.tokenize(sent)[:max_seq_len]) + [eos_tok])
elif isinstance(sent, list):
assert isinstance(sent[0], str), 'Invalid sentence found!'
return (([sos_tok] + sent[:max_seq_len]) + [eos_tok]) | process a sentence | src/utils/data_loaders.py | process_sentence | cjmay/jiant | 0 | python | def process_sentence(tokenizer_name, sent, max_seq_len):
' '
max_seq_len -= 2
assert (max_seq_len > 0), 'Max sequence length should be at least 2!'
tokenizer = get_tokenizer(tokenizer_name)
if tokenizer_name.startswith('bert-'):
(sos_tok, eos_tok) = (BERT_SEP_TOK, BERT_CLS_TOK)
else:
(sos_tok, eos_tok) = (SOS_TOK, EOS_TOK)
if isinstance(sent, str):
return (([sos_tok] + tokenizer.tokenize(sent)[:max_seq_len]) + [eos_tok])
elif isinstance(sent, list):
assert isinstance(sent[0], str), 'Invalid sentence found!'
return (([sos_tok] + sent[:max_seq_len]) + [eos_tok]) | def process_sentence(tokenizer_name, sent, max_seq_len):
' '
max_seq_len -= 2
assert (max_seq_len > 0), 'Max sequence length should be at least 2!'
tokenizer = get_tokenizer(tokenizer_name)
if tokenizer_name.startswith('bert-'):
(sos_tok, eos_tok) = (BERT_SEP_TOK, BERT_CLS_TOK)
else:
(sos_tok, eos_tok) = (SOS_TOK, EOS_TOK)
if isinstance(sent, str):
return (([sos_tok] + tokenizer.tokenize(sent)[:max_seq_len]) + [eos_tok])
elif isinstance(sent, list):
assert isinstance(sent[0], str), 'Invalid sentence found!'
return (([sos_tok] + sent[:max_seq_len]) + [eos_tok])<|docstring|>process a sentence<|endoftext|> |
5d40a3c9df5d42ca7d04c75d2f44baa185052da25083faa70de25a00c190e823 | def _handle_retry(exc, no_of_retries):
'Handle errors which qualify for retry'
retry = False
no_of_retries += 1
sleep_time = _get_sleep_time_seconds(no_of_retries)
msg = f'Got error: {exc} Retrying in {sleep_time} secs, attempt {no_of_retries}'
if isinstance(exc, httpx.HTTPStatusError):
if (exc.response.status_code in RETRY_HTTP_CODES):
retry = True
elif (exc.response.status_code == httpx.codes.UNAUTHORIZED):
refresh_tokens((no_of_retries > 1))
retry = True
sleep_time = 0.1
msg = 'Invalid access token, re-authenticating.'
elif isinstance(exc, RETRY_ERRORS):
retry = True
if retry:
logger.warning(msg)
return (retry, no_of_retries, sleep_time) | Handle errors which qualify for retry | src/pytailor/common/request_handler.py | _handle_retry | entailor/pytailor | 9 | python | def _handle_retry(exc, no_of_retries):
retry = False
no_of_retries += 1
sleep_time = _get_sleep_time_seconds(no_of_retries)
msg = f'Got error: {exc} Retrying in {sleep_time} secs, attempt {no_of_retries}'
if isinstance(exc, httpx.HTTPStatusError):
if (exc.response.status_code in RETRY_HTTP_CODES):
retry = True
elif (exc.response.status_code == httpx.codes.UNAUTHORIZED):
refresh_tokens((no_of_retries > 1))
retry = True
sleep_time = 0.1
msg = 'Invalid access token, re-authenticating.'
elif isinstance(exc, RETRY_ERRORS):
retry = True
if retry:
logger.warning(msg)
return (retry, no_of_retries, sleep_time) | def _handle_retry(exc, no_of_retries):
retry = False
no_of_retries += 1
sleep_time = _get_sleep_time_seconds(no_of_retries)
msg = f'Got error: {exc} Retrying in {sleep_time} secs, attempt {no_of_retries}'
if isinstance(exc, httpx.HTTPStatusError):
if (exc.response.status_code in RETRY_HTTP_CODES):
retry = True
elif (exc.response.status_code == httpx.codes.UNAUTHORIZED):
refresh_tokens((no_of_retries > 1))
retry = True
sleep_time = 0.1
msg = 'Invalid access token, re-authenticating.'
elif isinstance(exc, RETRY_ERRORS):
retry = True
if retry:
logger.warning(msg)
return (retry, no_of_retries, sleep_time)<|docstring|>Handle errors which qualify for retry<|endoftext|> |
185e8690fe89ca68af7db02277326c757366ca00ff48fe935135f117618c6eb2 | def _handle_exception(exc, return_none_on, error_msg):
'Handle errors which do not qualify for retry'
if isinstance(exc, httpx.HTTPStatusError):
if (exc.response.status_code in return_none_on):
return
error_msg += f' The response from the backend was: {exc}.'
try:
error_msg += f" Details: {exc.response.json()['detail']}"
except:
pass
raise BackendResponseError(error_msg)
elif isinstance(exc, httpx.RequestError):
error_msg += f' {exc}'
raise BackendResponseError(error_msg)
else:
raise | Handle errors which do not qualify for retry | src/pytailor/common/request_handler.py | _handle_exception | entailor/pytailor | 9 | python | def _handle_exception(exc, return_none_on, error_msg):
if isinstance(exc, httpx.HTTPStatusError):
if (exc.response.status_code in return_none_on):
return
error_msg += f' The response from the backend was: {exc}.'
try:
error_msg += f" Details: {exc.response.json()['detail']}"
except:
pass
raise BackendResponseError(error_msg)
elif isinstance(exc, httpx.RequestError):
error_msg += f' {exc}'
raise BackendResponseError(error_msg)
else:
raise | def _handle_exception(exc, return_none_on, error_msg):
if isinstance(exc, httpx.HTTPStatusError):
if (exc.response.status_code in return_none_on):
return
error_msg += f' The response from the backend was: {exc}.'
try:
error_msg += f" Details: {exc.response.json()['detail']}"
except:
pass
raise BackendResponseError(error_msg)
elif isinstance(exc, httpx.RequestError):
error_msg += f' {exc}'
raise BackendResponseError(error_msg)
else:
raise<|docstring|>Handle errors which do not qualify for retry<|endoftext|> |
f3f6c8b6b1f0df11c741b0f9ee9d9075cbae7619e8abe2bd01daf0c173333ee2 | def get_moment_map(self, i=0, iaz=0, iTrans=0, moment=0, beam=None, conv_method=None):
'\n This returns the moment maps in physical units, ie:\n - M1 is the average velocity [km/s]\n - M2 is the velocity dispersion [km/s]\n '
if self.is_casa:
cube = np.copy(self.lines[(:, :, :)])
else:
cube = np.copy(self.lines[(i, iaz, iTrans, :, :, :)])
dv = (self.velocity[1] - self.velocity[0])
if (beam is None):
M0 = (np.sum(cube, axis=0) * dv)
elif (moment == 0):
M0 = (np.sum(cube, axis=0) * dv)
M0 = conv_method(M0, beam)
else:
print('Convolving individual channel maps, this may take a bit of time ....')
try:
bar = progressbar.ProgressBar(maxval=self.nv, widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()])
bar.start()
except:
pass
for iv in range(self.nv):
try:
bar.update((iv + 1))
except:
pass
channel = np.copy(cube[(iv, :, :)])
cube[(iv, :, :)] = conv_method(channel, beam)
M0 = (np.sum(cube, axis=0) * dv)
try:
bar.finish()
except:
pass
if (moment >= 1):
M1 = ((np.sum((cube[(:, :, :)] * self.velocity[(:, np.newaxis, np.newaxis)]), axis=0) * dv) / M0)
if (moment == 2):
M2 = np.sqrt(((np.sum((cube[(:, :, :)] * ((self.velocity[(:, np.newaxis, np.newaxis)] - M1[(np.newaxis, :, :)]) ** 2)), axis=0) * dv) / M0))
if (moment == 0):
return M0
elif (moment == 1):
return M1
elif (moment == 2):
return M2 | This returns the moment maps in physical units, ie:
- M1 is the average velocity [km/s]
- M2 is the velocity dispersion [km/s] | pymcfost/line.py | get_moment_map | YohannFaure/pymcfost | 0 | python | def get_moment_map(self, i=0, iaz=0, iTrans=0, moment=0, beam=None, conv_method=None):
'\n This returns the moment maps in physical units, ie:\n - M1 is the average velocity [km/s]\n - M2 is the velocity dispersion [km/s]\n '
if self.is_casa:
cube = np.copy(self.lines[(:, :, :)])
else:
cube = np.copy(self.lines[(i, iaz, iTrans, :, :, :)])
dv = (self.velocity[1] - self.velocity[0])
if (beam is None):
M0 = (np.sum(cube, axis=0) * dv)
elif (moment == 0):
M0 = (np.sum(cube, axis=0) * dv)
M0 = conv_method(M0, beam)
else:
print('Convolving individual channel maps, this may take a bit of time ....')
try:
bar = progressbar.ProgressBar(maxval=self.nv, widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()])
bar.start()
except:
pass
for iv in range(self.nv):
try:
bar.update((iv + 1))
except:
pass
channel = np.copy(cube[(iv, :, :)])
cube[(iv, :, :)] = conv_method(channel, beam)
M0 = (np.sum(cube, axis=0) * dv)
try:
bar.finish()
except:
pass
if (moment >= 1):
M1 = ((np.sum((cube[(:, :, :)] * self.velocity[(:, np.newaxis, np.newaxis)]), axis=0) * dv) / M0)
if (moment == 2):
M2 = np.sqrt(((np.sum((cube[(:, :, :)] * ((self.velocity[(:, np.newaxis, np.newaxis)] - M1[(np.newaxis, :, :)]) ** 2)), axis=0) * dv) / M0))
if (moment == 0):
return M0
elif (moment == 1):
return M1
elif (moment == 2):
return M2 | def get_moment_map(self, i=0, iaz=0, iTrans=0, moment=0, beam=None, conv_method=None):
'\n This returns the moment maps in physical units, ie:\n - M1 is the average velocity [km/s]\n - M2 is the velocity dispersion [km/s]\n '
if self.is_casa:
cube = np.copy(self.lines[(:, :, :)])
else:
cube = np.copy(self.lines[(i, iaz, iTrans, :, :, :)])
dv = (self.velocity[1] - self.velocity[0])
if (beam is None):
M0 = (np.sum(cube, axis=0) * dv)
elif (moment == 0):
M0 = (np.sum(cube, axis=0) * dv)
M0 = conv_method(M0, beam)
else:
print('Convolving individual channel maps, this may take a bit of time ....')
try:
bar = progressbar.ProgressBar(maxval=self.nv, widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()])
bar.start()
except:
pass
for iv in range(self.nv):
try:
bar.update((iv + 1))
except:
pass
channel = np.copy(cube[(iv, :, :)])
cube[(iv, :, :)] = conv_method(channel, beam)
M0 = (np.sum(cube, axis=0) * dv)
try:
bar.finish()
except:
pass
if (moment >= 1):
M1 = ((np.sum((cube[(:, :, :)] * self.velocity[(:, np.newaxis, np.newaxis)]), axis=0) * dv) / M0)
if (moment == 2):
M2 = np.sqrt(((np.sum((cube[(:, :, :)] * ((self.velocity[(:, np.newaxis, np.newaxis)] - M1[(np.newaxis, :, :)]) ** 2)), axis=0) * dv) / M0))
if (moment == 0):
return M0
elif (moment == 1):
return M1
elif (moment == 2):
return M2<|docstring|>This returns the moment maps in physical units, ie:
- M1 is the average velocity [km/s]
- M2 is the velocity dispersion [km/s]<|endoftext|> |
37fd43ef62806f3dee0478b36254894b7ab1a3025db89a3131809bf00a0edd0d | def poggendorff_psychopy(window, parameters=None, **kwargs):
'Create a PsychoPy stimulus of the Poggendorff illusion.\n \n \n The Poggendorff illusion is an optical illusion that involves the misperception\n of the position of one segment of a transverse line that has been interrupted\n by the contour of an intervening structure.\n\n Parameters\n ----------\n window : object\n The window object in which the stimulus will be rendered.\n parameters : dict\n Parameters of the Poggendorff illusion generated by `poggendorff_parameters()`.\n **kwargs\n Additional arguments passed into `poggendorff_parameters()`.\n \n Returns\n -------\n In-place modification of the PsychoPy window (No explicit return).\n\n Examples\n --------- \n >>> import pyllusion as ill\n >>> from psychopy import visual, event\n \n >>> # Create parameters\n >>> parameters = ill.poggendorff_parameters(illusion_strength=-50)\n \n >>> # Initiate Window\n >>> window = visual.Window(size=[800, 600], winType=\'pygame\', color="white")\n \n >>> # Display illusion\n >>> ill.poggendorff_psychopy(window=window, parameters=parameters)\n\n >>> # Refresh and close window \n >>> window.flip()\n >>> event.waitKeys() # Press any key to close\n >>> window.close()\n '
if (parameters is None):
parameters = poggendorff_parameters(**kwargs)
for pos in ['Left_', 'Right_']:
psychopy_line(window, x1=parameters[(pos + 'x1')], y1=parameters[(pos + 'y1')], x2=parameters[(pos + 'x2')], y2=parameters[(pos + 'y2')], adjust_height=True, color='red', size=5)
psychopy_rectangle(window, x=0, y=parameters['Rectangle_y'], size_width=parameters['Rectangle_Width'], size_height=parameters['Rectangle_Height'], color='grey', outline_color='grey') | Create a PsychoPy stimulus of the Poggendorff illusion.
The Poggendorff illusion is an optical illusion that involves the misperception
of the position of one segment of a transverse line that has been interrupted
by the contour of an intervening structure.
Parameters
----------
window : object
The window object in which the stimulus will be rendered.
parameters : dict
Parameters of the Poggendorff illusion generated by `poggendorff_parameters()`.
**kwargs
Additional arguments passed into `poggendorff_parameters()`.
Returns
-------
In-place modification of the PsychoPy window (No explicit return).
Examples
---------
>>> import pyllusion as ill
>>> from psychopy import visual, event
>>> # Create parameters
>>> parameters = ill.poggendorff_parameters(illusion_strength=-50)
>>> # Initiate Window
>>> window = visual.Window(size=[800, 600], winType='pygame', color="white")
>>> # Display illusion
>>> ill.poggendorff_psychopy(window=window, parameters=parameters)
>>> # Refresh and close window
>>> window.flip()
>>> event.waitKeys() # Press any key to close
>>> window.close() | pyllusion/illusion/poggendorff.py | poggendorff_psychopy | RebeccaHirst/Pyllusion | 0 | python | def poggendorff_psychopy(window, parameters=None, **kwargs):
'Create a PsychoPy stimulus of the Poggendorff illusion.\n \n \n The Poggendorff illusion is an optical illusion that involves the misperception\n of the position of one segment of a transverse line that has been interrupted\n by the contour of an intervening structure.\n\n Parameters\n ----------\n window : object\n The window object in which the stimulus will be rendered.\n parameters : dict\n Parameters of the Poggendorff illusion generated by `poggendorff_parameters()`.\n **kwargs\n Additional arguments passed into `poggendorff_parameters()`.\n \n Returns\n -------\n In-place modification of the PsychoPy window (No explicit return).\n\n Examples\n --------- \n >>> import pyllusion as ill\n >>> from psychopy import visual, event\n \n >>> # Create parameters\n >>> parameters = ill.poggendorff_parameters(illusion_strength=-50)\n \n >>> # Initiate Window\n >>> window = visual.Window(size=[800, 600], winType=\'pygame\', color="white")\n \n >>> # Display illusion\n >>> ill.poggendorff_psychopy(window=window, parameters=parameters)\n\n >>> # Refresh and close window \n >>> window.flip()\n >>> event.waitKeys() # Press any key to close\n >>> window.close()\n '
if (parameters is None):
parameters = poggendorff_parameters(**kwargs)
for pos in ['Left_', 'Right_']:
psychopy_line(window, x1=parameters[(pos + 'x1')], y1=parameters[(pos + 'y1')], x2=parameters[(pos + 'x2')], y2=parameters[(pos + 'y2')], adjust_height=True, color='red', size=5)
psychopy_rectangle(window, x=0, y=parameters['Rectangle_y'], size_width=parameters['Rectangle_Width'], size_height=parameters['Rectangle_Height'], color='grey', outline_color='grey') | def poggendorff_psychopy(window, parameters=None, **kwargs):
'Create a PsychoPy stimulus of the Poggendorff illusion.\n \n \n The Poggendorff illusion is an optical illusion that involves the misperception\n of the position of one segment of a transverse line that has been interrupted\n by the contour of an intervening structure.\n\n Parameters\n ----------\n window : object\n The window object in which the stimulus will be rendered.\n parameters : dict\n Parameters of the Poggendorff illusion generated by `poggendorff_parameters()`.\n **kwargs\n Additional arguments passed into `poggendorff_parameters()`.\n \n Returns\n -------\n In-place modification of the PsychoPy window (No explicit return).\n\n Examples\n --------- \n >>> import pyllusion as ill\n >>> from psychopy import visual, event\n \n >>> # Create parameters\n >>> parameters = ill.poggendorff_parameters(illusion_strength=-50)\n \n >>> # Initiate Window\n >>> window = visual.Window(size=[800, 600], winType=\'pygame\', color="white")\n \n >>> # Display illusion\n >>> ill.poggendorff_psychopy(window=window, parameters=parameters)\n\n >>> # Refresh and close window \n >>> window.flip()\n >>> event.waitKeys() # Press any key to close\n >>> window.close()\n '
if (parameters is None):
parameters = poggendorff_parameters(**kwargs)
for pos in ['Left_', 'Right_']:
psychopy_line(window, x1=parameters[(pos + 'x1')], y1=parameters[(pos + 'y1')], x2=parameters[(pos + 'x2')], y2=parameters[(pos + 'y2')], adjust_height=True, color='red', size=5)
psychopy_rectangle(window, x=0, y=parameters['Rectangle_y'], size_width=parameters['Rectangle_Width'], size_height=parameters['Rectangle_Height'], color='grey', outline_color='grey')<|docstring|>Create a PsychoPy stimulus of the Poggendorff illusion.
The Poggendorff illusion is an optical illusion that involves the misperception
of the position of one segment of a transverse line that has been interrupted
by the contour of an intervening structure.
Parameters
----------
window : object
The window object in which the stimulus will be rendered.
parameters : dict
Parameters of the Poggendorff illusion generated by `poggendorff_parameters()`.
**kwargs
Additional arguments passed into `poggendorff_parameters()`.
Returns
-------
In-place modification of the PsychoPy window (No explicit return).
Examples
---------
>>> import pyllusion as ill
>>> from psychopy import visual, event
>>> # Create parameters
>>> parameters = ill.poggendorff_parameters(illusion_strength=-50)
>>> # Initiate Window
>>> window = visual.Window(size=[800, 600], winType='pygame', color="white")
>>> # Display illusion
>>> ill.poggendorff_psychopy(window=window, parameters=parameters)
>>> # Refresh and close window
>>> window.flip()
>>> event.waitKeys() # Press any key to close
>>> window.close()<|endoftext|> |
3c3c08394950bf244832d1aa24355e8c6f3a90e2f973b8a3ba0595a9c454e722 | def poggendorff_image(parameters=None, width=800, height=600, background='white', **kwargs):
'Create a PIL image of the Poggendorff illusion.\n \n \n The Poggendorff illusion is an optical illusion that involves the misperception\n of the position of one segment of a transverse line that has been interrupted\n by the contour of an intervening structure.\n\n Parameters\n ----------\n parameters : dict\n Parameters of the Poggendorff illusion generated by `poggendorff_parameters()`.\n width : int\n Width of the returned image.\n height : int\n Height of the returned image.\n background : str\n Color of the background.\n **kwargs\n Additional arguments passed into `poggendorff_parameters()`.\n\n Returns\n -------\n Image\n Image of the Poggendorff illusion.\n\n Examples\n ---------\n >>> import pyllusion as ill\n >>>\n >>> parameters = ill.poggendorff_parameters(illusion_strength=-55)\n >>> ill.poggendorff_image(parameters) \n\n '
if (parameters is None):
parameters = poggendorff_parameters(**kwargs)
image = PIL.Image.new('RGB', (width, height), color=background)
for pos in ['Left_', 'Right_']:
image = image_line(image=image, x1=parameters[(pos + 'x1')], y1=parameters[(pos + 'y1')], x2=parameters[(pos + 'x2')], y2=parameters[(pos + 'y2')], color='red', adjust_height=True, size=20)
image = image_rectangle(image=image, y=parameters['Rectangle_y'], size_width=parameters['Rectangle_Width'], size_height=parameters['Rectangle_Height'], color='grey', adjust_height=False)
return image | Create a PIL image of the Poggendorff illusion.
The Poggendorff illusion is an optical illusion that involves the misperception
of the position of one segment of a transverse line that has been interrupted
by the contour of an intervening structure.
Parameters
----------
parameters : dict
Parameters of the Poggendorff illusion generated by `poggendorff_parameters()`.
width : int
Width of the returned image.
height : int
Height of the returned image.
background : str
Color of the background.
**kwargs
Additional arguments passed into `poggendorff_parameters()`.
Returns
-------
Image
Image of the Poggendorff illusion.
Examples
---------
>>> import pyllusion as ill
>>>
>>> parameters = ill.poggendorff_parameters(illusion_strength=-55)
>>> ill.poggendorff_image(parameters) | pyllusion/illusion/poggendorff.py | poggendorff_image | RebeccaHirst/Pyllusion | 0 | python | def poggendorff_image(parameters=None, width=800, height=600, background='white', **kwargs):
'Create a PIL image of the Poggendorff illusion.\n \n \n The Poggendorff illusion is an optical illusion that involves the misperception\n of the position of one segment of a transverse line that has been interrupted\n by the contour of an intervening structure.\n\n Parameters\n ----------\n parameters : dict\n Parameters of the Poggendorff illusion generated by `poggendorff_parameters()`.\n width : int\n Width of the returned image.\n height : int\n Height of the returned image.\n background : str\n Color of the background.\n **kwargs\n Additional arguments passed into `poggendorff_parameters()`.\n\n Returns\n -------\n Image\n Image of the Poggendorff illusion.\n\n Examples\n ---------\n >>> import pyllusion as ill\n >>>\n >>> parameters = ill.poggendorff_parameters(illusion_strength=-55)\n >>> ill.poggendorff_image(parameters) \n\n '
if (parameters is None):
parameters = poggendorff_parameters(**kwargs)
image = PIL.Image.new('RGB', (width, height), color=background)
for pos in ['Left_', 'Right_']:
image = image_line(image=image, x1=parameters[(pos + 'x1')], y1=parameters[(pos + 'y1')], x2=parameters[(pos + 'x2')], y2=parameters[(pos + 'y2')], color='red', adjust_height=True, size=20)
image = image_rectangle(image=image, y=parameters['Rectangle_y'], size_width=parameters['Rectangle_Width'], size_height=parameters['Rectangle_Height'], color='grey', adjust_height=False)
return image | def poggendorff_image(parameters=None, width=800, height=600, background='white', **kwargs):
'Create a PIL image of the Poggendorff illusion.\n \n \n The Poggendorff illusion is an optical illusion that involves the misperception\n of the position of one segment of a transverse line that has been interrupted\n by the contour of an intervening structure.\n\n Parameters\n ----------\n parameters : dict\n Parameters of the Poggendorff illusion generated by `poggendorff_parameters()`.\n width : int\n Width of the returned image.\n height : int\n Height of the returned image.\n background : str\n Color of the background.\n **kwargs\n Additional arguments passed into `poggendorff_parameters()`.\n\n Returns\n -------\n Image\n Image of the Poggendorff illusion.\n\n Examples\n ---------\n >>> import pyllusion as ill\n >>>\n >>> parameters = ill.poggendorff_parameters(illusion_strength=-55)\n >>> ill.poggendorff_image(parameters) \n\n '
if (parameters is None):
parameters = poggendorff_parameters(**kwargs)
image = PIL.Image.new('RGB', (width, height), color=background)
for pos in ['Left_', 'Right_']:
image = image_line(image=image, x1=parameters[(pos + 'x1')], y1=parameters[(pos + 'y1')], x2=parameters[(pos + 'x2')], y2=parameters[(pos + 'y2')], color='red', adjust_height=True, size=20)
image = image_rectangle(image=image, y=parameters['Rectangle_y'], size_width=parameters['Rectangle_Width'], size_height=parameters['Rectangle_Height'], color='grey', adjust_height=False)
return image<|docstring|>Create a PIL image of the Poggendorff illusion.
The Poggendorff illusion is an optical illusion that involves the misperception
of the position of one segment of a transverse line that has been interrupted
by the contour of an intervening structure.
Parameters
----------
parameters : dict
Parameters of the Poggendorff illusion generated by `poggendorff_parameters()`.
width : int
Width of the returned image.
height : int
Height of the returned image.
background : str
Color of the background.
**kwargs
Additional arguments passed into `poggendorff_parameters()`.
Returns
-------
Image
Image of the Poggendorff illusion.
Examples
---------
>>> import pyllusion as ill
>>>
>>> parameters = ill.poggendorff_parameters(illusion_strength=-55)
>>> ill.poggendorff_image(parameters)<|endoftext|> |
38683fff2e624dd378abc8484cb1da20d5ee9ebffc68b809a8b8fe61e469e75f | def poggendorff_parameters(illusion_strength=0, difference=0):
'Compute Parameters for Poggendorff Illusion.\n\n Parameters\n ----------\n illusion_strength : float\n The strength of the line tilt in biasing the perception of an uncontinuous single line.\n Specifically, the orientation of the lines in degrees, 0 being vertical and\n larger values (in magnitude; no change with positive or negative sign) rotating clockwise.\n difference : float\n The objective magnitude of the lines discontinuity.\n Specifically, the amount of displacement of the right line relative to the left line. A positive sign\n represents the right line displaced higher up, and a negative sign represents it displaced lower down.\n\n Returns\n -------\n dict\n Dictionary of parameters of the Poggendorff illusion.\n\n '
y_offset = difference
angle = (90 - illusion_strength)
angle = (angle if (illusion_strength >= 0) else (- angle))
(coord, _, _) = _coord_line(x1=0, y1=0, angle=(- angle), length=0.75)
(left_x1, left_y1, left_x2, left_y2) = coord
(coord, _, _) = _coord_line(x1=0, y1=y_offset, angle=(180 - angle), length=0.75)
(right_x1, right_y1, right_x2, right_y2) = coord
parameters = {'Illusion': 'Poggendorff', 'Illusion_Strength': illusion_strength, 'Difference': difference, 'Illusion_Type': ('Congruent' if (illusion_strength > 0) else 'Incongruent'), 'Left_x1': left_x1, 'Left_y1': left_y1, 'Left_x2': left_x2, 'Left_y2': left_y2, 'Right_x1': right_x1, 'Right_y1': right_y1, 'Right_x2': right_x2, 'Right_y2': right_y2, 'Angle': angle, 'Rectangle_Height': 1.75, 'Rectangle_Width': 0.5, 'Rectangle_y': 0}
return parameters | Compute Parameters for Poggendorff Illusion.
Parameters
----------
illusion_strength : float
The strength of the line tilt in biasing the perception of an uncontinuous single line.
Specifically, the orientation of the lines in degrees, 0 being vertical and
larger values (in magnitude; no change with positive or negative sign) rotating clockwise.
difference : float
The objective magnitude of the lines discontinuity.
Specifically, the amount of displacement of the right line relative to the left line. A positive sign
represents the right line displaced higher up, and a negative sign represents it displaced lower down.
Returns
-------
dict
Dictionary of parameters of the Poggendorff illusion. | pyllusion/illusion/poggendorff.py | poggendorff_parameters | RebeccaHirst/Pyllusion | 0 | python | def poggendorff_parameters(illusion_strength=0, difference=0):
'Compute Parameters for Poggendorff Illusion.\n\n Parameters\n ----------\n illusion_strength : float\n The strength of the line tilt in biasing the perception of an uncontinuous single line.\n Specifically, the orientation of the lines in degrees, 0 being vertical and\n larger values (in magnitude; no change with positive or negative sign) rotating clockwise.\n difference : float\n The objective magnitude of the lines discontinuity.\n Specifically, the amount of displacement of the right line relative to the left line. A positive sign\n represents the right line displaced higher up, and a negative sign represents it displaced lower down.\n\n Returns\n -------\n dict\n Dictionary of parameters of the Poggendorff illusion.\n\n '
y_offset = difference
angle = (90 - illusion_strength)
angle = (angle if (illusion_strength >= 0) else (- angle))
(coord, _, _) = _coord_line(x1=0, y1=0, angle=(- angle), length=0.75)
(left_x1, left_y1, left_x2, left_y2) = coord
(coord, _, _) = _coord_line(x1=0, y1=y_offset, angle=(180 - angle), length=0.75)
(right_x1, right_y1, right_x2, right_y2) = coord
parameters = {'Illusion': 'Poggendorff', 'Illusion_Strength': illusion_strength, 'Difference': difference, 'Illusion_Type': ('Congruent' if (illusion_strength > 0) else 'Incongruent'), 'Left_x1': left_x1, 'Left_y1': left_y1, 'Left_x2': left_x2, 'Left_y2': left_y2, 'Right_x1': right_x1, 'Right_y1': right_y1, 'Right_x2': right_x2, 'Right_y2': right_y2, 'Angle': angle, 'Rectangle_Height': 1.75, 'Rectangle_Width': 0.5, 'Rectangle_y': 0}
return parameters | def poggendorff_parameters(illusion_strength=0, difference=0):
'Compute Parameters for Poggendorff Illusion.\n\n Parameters\n ----------\n illusion_strength : float\n The strength of the line tilt in biasing the perception of an uncontinuous single line.\n Specifically, the orientation of the lines in degrees, 0 being vertical and\n larger values (in magnitude; no change with positive or negative sign) rotating clockwise.\n difference : float\n The objective magnitude of the lines discontinuity.\n Specifically, the amount of displacement of the right line relative to the left line. A positive sign\n represents the right line displaced higher up, and a negative sign represents it displaced lower down.\n\n Returns\n -------\n dict\n Dictionary of parameters of the Poggendorff illusion.\n\n '
y_offset = difference
angle = (90 - illusion_strength)
angle = (angle if (illusion_strength >= 0) else (- angle))
(coord, _, _) = _coord_line(x1=0, y1=0, angle=(- angle), length=0.75)
(left_x1, left_y1, left_x2, left_y2) = coord
(coord, _, _) = _coord_line(x1=0, y1=y_offset, angle=(180 - angle), length=0.75)
(right_x1, right_y1, right_x2, right_y2) = coord
parameters = {'Illusion': 'Poggendorff', 'Illusion_Strength': illusion_strength, 'Difference': difference, 'Illusion_Type': ('Congruent' if (illusion_strength > 0) else 'Incongruent'), 'Left_x1': left_x1, 'Left_y1': left_y1, 'Left_x2': left_x2, 'Left_y2': left_y2, 'Right_x1': right_x1, 'Right_y1': right_y1, 'Right_x2': right_x2, 'Right_y2': right_y2, 'Angle': angle, 'Rectangle_Height': 1.75, 'Rectangle_Width': 0.5, 'Rectangle_y': 0}
return parameters<|docstring|>Compute Parameters for Poggendorff Illusion.
Parameters
----------
illusion_strength : float
The strength of the line tilt in biasing the perception of an uncontinuous single line.
Specifically, the orientation of the lines in degrees, 0 being vertical and
larger values (in magnitude; no change with positive or negative sign) rotating clockwise.
difference : float
The objective magnitude of the lines discontinuity.
Specifically, the amount of displacement of the right line relative to the left line. A positive sign
represents the right line displaced higher up, and a negative sign represents it displaced lower down.
Returns
-------
dict
Dictionary of parameters of the Poggendorff illusion.<|endoftext|> |
81a758e052895e3d3545c10b45001b34ecd9e0c851351cc8c2c6966cefc7a9ee | def initialize(self):
'\n Get a spark session\n Create the model instance\n Set the appropriate parameters as set up in configuration\n :return:\n '
self.spark = get_or_create_spark_session(self.spark_conf)
self.model = instantiate_from_str(self.training_conf.model)
params = self.engine_conf.training.model_parameters
model_features = {}
for feature in params['features']:
features_class = self.engine_conf.all_features[feature]
model_features[feature] = {'categorical': features_class.is_categorical(), 'string': (features_class.spark_type() == StringType())}
params['features'] = model_features
self.model.set_params(**params)
self.model.set_logger(self.logger)
conf = self.db_conf
conf.maintenance = None
self.db_tools = BaskervilleDBTools(conf)
self.db_tools.connect_to_db() | Get a spark session
Create the model instance
Set the appropriate parameters as set up in configuration
:return: | src/baskerville/models/pipeline_training.py | initialize | deflect-ca/baskerville | 2 | python | def initialize(self):
'\n Get a spark session\n Create the model instance\n Set the appropriate parameters as set up in configuration\n :return:\n '
self.spark = get_or_create_spark_session(self.spark_conf)
self.model = instantiate_from_str(self.training_conf.model)
params = self.engine_conf.training.model_parameters
model_features = {}
for feature in params['features']:
features_class = self.engine_conf.all_features[feature]
model_features[feature] = {'categorical': features_class.is_categorical(), 'string': (features_class.spark_type() == StringType())}
params['features'] = model_features
self.model.set_params(**params)
self.model.set_logger(self.logger)
conf = self.db_conf
conf.maintenance = None
self.db_tools = BaskervilleDBTools(conf)
self.db_tools.connect_to_db() | def initialize(self):
'\n Get a spark session\n Create the model instance\n Set the appropriate parameters as set up in configuration\n :return:\n '
self.spark = get_or_create_spark_session(self.spark_conf)
self.model = instantiate_from_str(self.training_conf.model)
params = self.engine_conf.training.model_parameters
model_features = {}
for feature in params['features']:
features_class = self.engine_conf.all_features[feature]
model_features[feature] = {'categorical': features_class.is_categorical(), 'string': (features_class.spark_type() == StringType())}
params['features'] = model_features
self.model.set_params(**params)
self.model.set_logger(self.logger)
conf = self.db_conf
conf.maintenance = None
self.db_tools = BaskervilleDBTools(conf)
self.db_tools.connect_to_db()<|docstring|>Get a spark session
Create the model instance
Set the appropriate parameters as set up in configuration
:return:<|endoftext|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.