repo
stringlengths 7
55
| path
stringlengths 4
223
| func_name
stringlengths 1
134
| original_string
stringlengths 75
104k
| language
stringclasses 1
value | code
stringlengths 75
104k
| code_tokens
listlengths 19
28.4k
| docstring
stringlengths 1
46.9k
| docstring_tokens
listlengths 1
1.97k
| sha
stringlengths 40
40
| url
stringlengths 87
315
| partition
stringclasses 1
value |
|---|---|---|---|---|---|---|---|---|---|---|---|
UpCloudLtd/upcloud-python-api
|
upcloud_api/server.py
|
Server.add_tags
|
def add_tags(self, tags):
"""
Add tags to a server. Accepts tags as strings or Tag objects.
"""
if self.cloud_manager.assign_tags(self.uuid, tags):
tags = self.tags + [str(tag) for tag in tags]
object.__setattr__(self, 'tags', tags)
|
python
|
def add_tags(self, tags):
"""
Add tags to a server. Accepts tags as strings or Tag objects.
"""
if self.cloud_manager.assign_tags(self.uuid, tags):
tags = self.tags + [str(tag) for tag in tags]
object.__setattr__(self, 'tags', tags)
|
[
"def",
"add_tags",
"(",
"self",
",",
"tags",
")",
":",
"if",
"self",
".",
"cloud_manager",
".",
"assign_tags",
"(",
"self",
".",
"uuid",
",",
"tags",
")",
":",
"tags",
"=",
"self",
".",
"tags",
"+",
"[",
"str",
"(",
"tag",
")",
"for",
"tag",
"in",
"tags",
"]",
"object",
".",
"__setattr__",
"(",
"self",
",",
"'tags'",
",",
"tags",
")"
] |
Add tags to a server. Accepts tags as strings or Tag objects.
|
[
"Add",
"tags",
"to",
"a",
"server",
".",
"Accepts",
"tags",
"as",
"strings",
"or",
"Tag",
"objects",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L264-L270
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/server.py
|
Server.remove_tags
|
def remove_tags(self, tags):
"""
Add tags to a server. Accepts tags as strings or Tag objects.
"""
if self.cloud_manager.remove_tags(self, tags):
new_tags = [tag for tag in self.tags if tag not in tags]
object.__setattr__(self, 'tags', new_tags)
|
python
|
def remove_tags(self, tags):
"""
Add tags to a server. Accepts tags as strings or Tag objects.
"""
if self.cloud_manager.remove_tags(self, tags):
new_tags = [tag for tag in self.tags if tag not in tags]
object.__setattr__(self, 'tags', new_tags)
|
[
"def",
"remove_tags",
"(",
"self",
",",
"tags",
")",
":",
"if",
"self",
".",
"cloud_manager",
".",
"remove_tags",
"(",
"self",
",",
"tags",
")",
":",
"new_tags",
"=",
"[",
"tag",
"for",
"tag",
"in",
"self",
".",
"tags",
"if",
"tag",
"not",
"in",
"tags",
"]",
"object",
".",
"__setattr__",
"(",
"self",
",",
"'tags'",
",",
"new_tags",
")"
] |
Add tags to a server. Accepts tags as strings or Tag objects.
|
[
"Add",
"tags",
"to",
"a",
"server",
".",
"Accepts",
"tags",
"as",
"strings",
"or",
"Tag",
"objects",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L272-L278
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/server.py
|
Server.configure_firewall
|
def configure_firewall(self, FirewallRules):
"""
Helper function for automatically adding several FirewallRules in series.
"""
firewall_rule_bodies = [
FirewallRule.to_dict()
for FirewallRule in FirewallRules
]
return self.cloud_manager.configure_firewall(self, firewall_rule_bodies)
|
python
|
def configure_firewall(self, FirewallRules):
"""
Helper function for automatically adding several FirewallRules in series.
"""
firewall_rule_bodies = [
FirewallRule.to_dict()
for FirewallRule in FirewallRules
]
return self.cloud_manager.configure_firewall(self, firewall_rule_bodies)
|
[
"def",
"configure_firewall",
"(",
"self",
",",
"FirewallRules",
")",
":",
"firewall_rule_bodies",
"=",
"[",
"FirewallRule",
".",
"to_dict",
"(",
")",
"for",
"FirewallRule",
"in",
"FirewallRules",
"]",
"return",
"self",
".",
"cloud_manager",
".",
"configure_firewall",
"(",
"self",
",",
"firewall_rule_bodies",
")"
] |
Helper function for automatically adding several FirewallRules in series.
|
[
"Helper",
"function",
"for",
"automatically",
"adding",
"several",
"FirewallRules",
"in",
"series",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L285-L293
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/server.py
|
Server.prepare_post_body
|
def prepare_post_body(self):
"""
Prepare a JSON serializable dict from a Server instance with nested.
Storage instances.
"""
body = dict()
# mandatory
body['server'] = {
'hostname': self.hostname,
'zone': self.zone,
'title': self.title,
'storage_devices': {}
}
# optional fields
for optional_field in self.optional_fields:
if hasattr(self, optional_field):
body['server'][optional_field] = getattr(self, optional_field)
# set password_delivery default as 'none' to prevent API from sending
# emails (with credentials) about each created server
if not hasattr(self, 'password_delivery'):
body['server']['password_delivery'] = 'none'
# collect storage devices and create a unique title (see: Storage.title in API doc)
# for each of them
body['server']['storage_devices'] = {
'storage_device': []
}
storage_title_id = 0 # running number for unique storage titles
for storage in self.storage_devices:
if not hasattr(storage, 'os') or storage.os is None:
storage_title_id += 1
storage_body = storage.to_dict()
# setup default titles for storages unless the user has specified
# them at storage.title
if not hasattr(storage, 'title') or not storage.title:
if hasattr(storage, 'os') and storage.os:
storage_body['title'] = self.hostname + ' OS disk'
else:
storage_body['title'] = self.hostname + ' storage disk ' + str(storage_title_id)
# figure out the storage `action` parameter
# public template
if hasattr(storage, 'os') and storage.os:
storage_body['action'] = 'clone'
storage_body['storage'] = OperatingSystems.get_OS_UUID(storage.os)
# private template
elif hasattr(storage, 'uuid'):
storage_body['action'] = 'clone'
storage_body['storage'] = storage.uuid
# create a new storage
else:
storage_body['action'] = 'create'
body['server']['storage_devices']['storage_device'].append(storage_body)
if hasattr(self, 'ip_addresses') and self.ip_addresses:
body['server']['ip_addresses'] = {
'ip_address': [
ip.to_dict() for ip in self.ip_addresses
]
}
return body
|
python
|
def prepare_post_body(self):
"""
Prepare a JSON serializable dict from a Server instance with nested.
Storage instances.
"""
body = dict()
# mandatory
body['server'] = {
'hostname': self.hostname,
'zone': self.zone,
'title': self.title,
'storage_devices': {}
}
# optional fields
for optional_field in self.optional_fields:
if hasattr(self, optional_field):
body['server'][optional_field] = getattr(self, optional_field)
# set password_delivery default as 'none' to prevent API from sending
# emails (with credentials) about each created server
if not hasattr(self, 'password_delivery'):
body['server']['password_delivery'] = 'none'
# collect storage devices and create a unique title (see: Storage.title in API doc)
# for each of them
body['server']['storage_devices'] = {
'storage_device': []
}
storage_title_id = 0 # running number for unique storage titles
for storage in self.storage_devices:
if not hasattr(storage, 'os') or storage.os is None:
storage_title_id += 1
storage_body = storage.to_dict()
# setup default titles for storages unless the user has specified
# them at storage.title
if not hasattr(storage, 'title') or not storage.title:
if hasattr(storage, 'os') and storage.os:
storage_body['title'] = self.hostname + ' OS disk'
else:
storage_body['title'] = self.hostname + ' storage disk ' + str(storage_title_id)
# figure out the storage `action` parameter
# public template
if hasattr(storage, 'os') and storage.os:
storage_body['action'] = 'clone'
storage_body['storage'] = OperatingSystems.get_OS_UUID(storage.os)
# private template
elif hasattr(storage, 'uuid'):
storage_body['action'] = 'clone'
storage_body['storage'] = storage.uuid
# create a new storage
else:
storage_body['action'] = 'create'
body['server']['storage_devices']['storage_device'].append(storage_body)
if hasattr(self, 'ip_addresses') and self.ip_addresses:
body['server']['ip_addresses'] = {
'ip_address': [
ip.to_dict() for ip in self.ip_addresses
]
}
return body
|
[
"def",
"prepare_post_body",
"(",
"self",
")",
":",
"body",
"=",
"dict",
"(",
")",
"# mandatory",
"body",
"[",
"'server'",
"]",
"=",
"{",
"'hostname'",
":",
"self",
".",
"hostname",
",",
"'zone'",
":",
"self",
".",
"zone",
",",
"'title'",
":",
"self",
".",
"title",
",",
"'storage_devices'",
":",
"{",
"}",
"}",
"# optional fields",
"for",
"optional_field",
"in",
"self",
".",
"optional_fields",
":",
"if",
"hasattr",
"(",
"self",
",",
"optional_field",
")",
":",
"body",
"[",
"'server'",
"]",
"[",
"optional_field",
"]",
"=",
"getattr",
"(",
"self",
",",
"optional_field",
")",
"# set password_delivery default as 'none' to prevent API from sending",
"# emails (with credentials) about each created server",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'password_delivery'",
")",
":",
"body",
"[",
"'server'",
"]",
"[",
"'password_delivery'",
"]",
"=",
"'none'",
"# collect storage devices and create a unique title (see: Storage.title in API doc)",
"# for each of them",
"body",
"[",
"'server'",
"]",
"[",
"'storage_devices'",
"]",
"=",
"{",
"'storage_device'",
":",
"[",
"]",
"}",
"storage_title_id",
"=",
"0",
"# running number for unique storage titles",
"for",
"storage",
"in",
"self",
".",
"storage_devices",
":",
"if",
"not",
"hasattr",
"(",
"storage",
",",
"'os'",
")",
"or",
"storage",
".",
"os",
"is",
"None",
":",
"storage_title_id",
"+=",
"1",
"storage_body",
"=",
"storage",
".",
"to_dict",
"(",
")",
"# setup default titles for storages unless the user has specified",
"# them at storage.title",
"if",
"not",
"hasattr",
"(",
"storage",
",",
"'title'",
")",
"or",
"not",
"storage",
".",
"title",
":",
"if",
"hasattr",
"(",
"storage",
",",
"'os'",
")",
"and",
"storage",
".",
"os",
":",
"storage_body",
"[",
"'title'",
"]",
"=",
"self",
".",
"hostname",
"+",
"' OS disk'",
"else",
":",
"storage_body",
"[",
"'title'",
"]",
"=",
"self",
".",
"hostname",
"+",
"' storage disk '",
"+",
"str",
"(",
"storage_title_id",
")",
"# figure out the storage `action` parameter",
"# public template",
"if",
"hasattr",
"(",
"storage",
",",
"'os'",
")",
"and",
"storage",
".",
"os",
":",
"storage_body",
"[",
"'action'",
"]",
"=",
"'clone'",
"storage_body",
"[",
"'storage'",
"]",
"=",
"OperatingSystems",
".",
"get_OS_UUID",
"(",
"storage",
".",
"os",
")",
"# private template",
"elif",
"hasattr",
"(",
"storage",
",",
"'uuid'",
")",
":",
"storage_body",
"[",
"'action'",
"]",
"=",
"'clone'",
"storage_body",
"[",
"'storage'",
"]",
"=",
"storage",
".",
"uuid",
"# create a new storage",
"else",
":",
"storage_body",
"[",
"'action'",
"]",
"=",
"'create'",
"body",
"[",
"'server'",
"]",
"[",
"'storage_devices'",
"]",
"[",
"'storage_device'",
"]",
".",
"append",
"(",
"storage_body",
")",
"if",
"hasattr",
"(",
"self",
",",
"'ip_addresses'",
")",
"and",
"self",
".",
"ip_addresses",
":",
"body",
"[",
"'server'",
"]",
"[",
"'ip_addresses'",
"]",
"=",
"{",
"'ip_address'",
":",
"[",
"ip",
".",
"to_dict",
"(",
")",
"for",
"ip",
"in",
"self",
".",
"ip_addresses",
"]",
"}",
"return",
"body"
] |
Prepare a JSON serializable dict from a Server instance with nested.
Storage instances.
|
[
"Prepare",
"a",
"JSON",
"serializable",
"dict",
"from",
"a",
"Server",
"instance",
"with",
"nested",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L295-L368
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/server.py
|
Server.to_dict
|
def to_dict(self):
"""
Prepare a JSON serializable dict for read-only purposes.
Includes storages and IP-addresses.
Use prepare_post_body for POST and .save() for PUT.
"""
fields = dict(vars(self).items())
if self.populated:
fields['ip_addresses'] = []
fields['storage_devices'] = []
for ip in self.ip_addresses:
fields['ip_addresses'].append({
'address': ip.address,
'access': ip.access,
'family': ip.family
})
for storage in self.storage_devices:
fields['storage_devices'].append({
'address': storage.address,
'storage': storage.uuid,
'storage_size': storage.size,
'storage_title': storage.title,
'type': storage.type,
})
del fields['populated']
del fields['cloud_manager']
return fields
|
python
|
def to_dict(self):
"""
Prepare a JSON serializable dict for read-only purposes.
Includes storages and IP-addresses.
Use prepare_post_body for POST and .save() for PUT.
"""
fields = dict(vars(self).items())
if self.populated:
fields['ip_addresses'] = []
fields['storage_devices'] = []
for ip in self.ip_addresses:
fields['ip_addresses'].append({
'address': ip.address,
'access': ip.access,
'family': ip.family
})
for storage in self.storage_devices:
fields['storage_devices'].append({
'address': storage.address,
'storage': storage.uuid,
'storage_size': storage.size,
'storage_title': storage.title,
'type': storage.type,
})
del fields['populated']
del fields['cloud_manager']
return fields
|
[
"def",
"to_dict",
"(",
"self",
")",
":",
"fields",
"=",
"dict",
"(",
"vars",
"(",
"self",
")",
".",
"items",
"(",
")",
")",
"if",
"self",
".",
"populated",
":",
"fields",
"[",
"'ip_addresses'",
"]",
"=",
"[",
"]",
"fields",
"[",
"'storage_devices'",
"]",
"=",
"[",
"]",
"for",
"ip",
"in",
"self",
".",
"ip_addresses",
":",
"fields",
"[",
"'ip_addresses'",
"]",
".",
"append",
"(",
"{",
"'address'",
":",
"ip",
".",
"address",
",",
"'access'",
":",
"ip",
".",
"access",
",",
"'family'",
":",
"ip",
".",
"family",
"}",
")",
"for",
"storage",
"in",
"self",
".",
"storage_devices",
":",
"fields",
"[",
"'storage_devices'",
"]",
".",
"append",
"(",
"{",
"'address'",
":",
"storage",
".",
"address",
",",
"'storage'",
":",
"storage",
".",
"uuid",
",",
"'storage_size'",
":",
"storage",
".",
"size",
",",
"'storage_title'",
":",
"storage",
".",
"title",
",",
"'type'",
":",
"storage",
".",
"type",
",",
"}",
")",
"del",
"fields",
"[",
"'populated'",
"]",
"del",
"fields",
"[",
"'cloud_manager'",
"]",
"return",
"fields"
] |
Prepare a JSON serializable dict for read-only purposes.
Includes storages and IP-addresses.
Use prepare_post_body for POST and .save() for PUT.
|
[
"Prepare",
"a",
"JSON",
"serializable",
"dict",
"for",
"read",
"-",
"only",
"purposes",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L370-L400
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/server.py
|
Server.get_ip
|
def get_ip(self, access='public', addr_family=None, strict=None):
"""
Return the server's IP address.
Params:
- addr_family: IPv4, IPv6 or None. None prefers IPv4 but will
return IPv6 if IPv4 addr was not available.
- access: 'public' or 'private'
"""
if addr_family not in ['IPv4', 'IPv6', None]:
raise Exception("`addr_family` must be 'IPv4', 'IPv6' or None")
if access not in ['private', 'public']:
raise Exception("`access` must be 'public' or 'private'")
if not hasattr(self, 'ip_addresses'):
self.populate()
# server can have several public or private IPs
ip_addrs = [
ip_addr for ip_addr in self.ip_addresses
if ip_addr.access == access
]
# prefer addr_family (or IPv4 if none given)
preferred_family = addr_family if addr_family else 'IPv4'
for ip_addr in ip_addrs:
if ip_addr.family == preferred_family:
return ip_addr.address
# any IP (of the right access) will do if available and addr_family is None
return ip_addrs[0].address if ip_addrs and not addr_family else None
|
python
|
def get_ip(self, access='public', addr_family=None, strict=None):
"""
Return the server's IP address.
Params:
- addr_family: IPv4, IPv6 or None. None prefers IPv4 but will
return IPv6 if IPv4 addr was not available.
- access: 'public' or 'private'
"""
if addr_family not in ['IPv4', 'IPv6', None]:
raise Exception("`addr_family` must be 'IPv4', 'IPv6' or None")
if access not in ['private', 'public']:
raise Exception("`access` must be 'public' or 'private'")
if not hasattr(self, 'ip_addresses'):
self.populate()
# server can have several public or private IPs
ip_addrs = [
ip_addr for ip_addr in self.ip_addresses
if ip_addr.access == access
]
# prefer addr_family (or IPv4 if none given)
preferred_family = addr_family if addr_family else 'IPv4'
for ip_addr in ip_addrs:
if ip_addr.family == preferred_family:
return ip_addr.address
# any IP (of the right access) will do if available and addr_family is None
return ip_addrs[0].address if ip_addrs and not addr_family else None
|
[
"def",
"get_ip",
"(",
"self",
",",
"access",
"=",
"'public'",
",",
"addr_family",
"=",
"None",
",",
"strict",
"=",
"None",
")",
":",
"if",
"addr_family",
"not",
"in",
"[",
"'IPv4'",
",",
"'IPv6'",
",",
"None",
"]",
":",
"raise",
"Exception",
"(",
"\"`addr_family` must be 'IPv4', 'IPv6' or None\"",
")",
"if",
"access",
"not",
"in",
"[",
"'private'",
",",
"'public'",
"]",
":",
"raise",
"Exception",
"(",
"\"`access` must be 'public' or 'private'\"",
")",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'ip_addresses'",
")",
":",
"self",
".",
"populate",
"(",
")",
"# server can have several public or private IPs",
"ip_addrs",
"=",
"[",
"ip_addr",
"for",
"ip_addr",
"in",
"self",
".",
"ip_addresses",
"if",
"ip_addr",
".",
"access",
"==",
"access",
"]",
"# prefer addr_family (or IPv4 if none given)",
"preferred_family",
"=",
"addr_family",
"if",
"addr_family",
"else",
"'IPv4'",
"for",
"ip_addr",
"in",
"ip_addrs",
":",
"if",
"ip_addr",
".",
"family",
"==",
"preferred_family",
":",
"return",
"ip_addr",
".",
"address",
"# any IP (of the right access) will do if available and addr_family is None",
"return",
"ip_addrs",
"[",
"0",
"]",
".",
"address",
"if",
"ip_addrs",
"and",
"not",
"addr_family",
"else",
"None"
] |
Return the server's IP address.
Params:
- addr_family: IPv4, IPv6 or None. None prefers IPv4 but will
return IPv6 if IPv4 addr was not available.
- access: 'public' or 'private'
|
[
"Return",
"the",
"server",
"s",
"IP",
"address",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L402-L433
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/server.py
|
Server.get_public_ip
|
def get_public_ip(self, addr_family=None, *args, **kwargs):
"""Alias for get_ip('public')"""
return self.get_ip('public', addr_family, *args, **kwargs)
|
python
|
def get_public_ip(self, addr_family=None, *args, **kwargs):
"""Alias for get_ip('public')"""
return self.get_ip('public', addr_family, *args, **kwargs)
|
[
"def",
"get_public_ip",
"(",
"self",
",",
"addr_family",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"get_ip",
"(",
"'public'",
",",
"addr_family",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Alias for get_ip('public')
|
[
"Alias",
"for",
"get_ip",
"(",
"public",
")"
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L435-L437
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/server.py
|
Server.get_private_ip
|
def get_private_ip(self, addr_family=None, *args, **kwargs):
"""Alias for get_ip('private')"""
return self.get_ip('private', addr_family, *args, **kwargs)
|
python
|
def get_private_ip(self, addr_family=None, *args, **kwargs):
"""Alias for get_ip('private')"""
return self.get_ip('private', addr_family, *args, **kwargs)
|
[
"def",
"get_private_ip",
"(",
"self",
",",
"addr_family",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"get_ip",
"(",
"'private'",
",",
"addr_family",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Alias for get_ip('private')
|
[
"Alias",
"for",
"get_ip",
"(",
"private",
")"
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L439-L441
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/server.py
|
Server._wait_for_state_change
|
def _wait_for_state_change(self, target_states, update_interval=10):
"""
Blocking wait until target_state reached. update_interval is in seconds.
Warning: state change must begin before calling this method.
"""
while self.state not in target_states:
if self.state == 'error':
raise Exception('server is in error state')
# update server state every 10s
sleep(update_interval)
self.populate()
|
python
|
def _wait_for_state_change(self, target_states, update_interval=10):
"""
Blocking wait until target_state reached. update_interval is in seconds.
Warning: state change must begin before calling this method.
"""
while self.state not in target_states:
if self.state == 'error':
raise Exception('server is in error state')
# update server state every 10s
sleep(update_interval)
self.populate()
|
[
"def",
"_wait_for_state_change",
"(",
"self",
",",
"target_states",
",",
"update_interval",
"=",
"10",
")",
":",
"while",
"self",
".",
"state",
"not",
"in",
"target_states",
":",
"if",
"self",
".",
"state",
"==",
"'error'",
":",
"raise",
"Exception",
"(",
"'server is in error state'",
")",
"# update server state every 10s",
"sleep",
"(",
"update_interval",
")",
"self",
".",
"populate",
"(",
")"
] |
Blocking wait until target_state reached. update_interval is in seconds.
Warning: state change must begin before calling this method.
|
[
"Blocking",
"wait",
"until",
"target_state",
"reached",
".",
"update_interval",
"is",
"in",
"seconds",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L443-L455
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/server.py
|
Server.ensure_started
|
def ensure_started(self):
"""
Start a server and waits (blocking wait) until it is fully started.
"""
# server is either starting or stopping (or error)
if self.state in ['maintenance', 'error']:
self._wait_for_state_change(['stopped', 'started'])
if self.state == 'stopped':
self.start()
self._wait_for_state_change(['started'])
if self.state == 'started':
return True
else:
# something went wrong, fail explicitly
raise Exception('unknown server state: ' + self.state)
|
python
|
def ensure_started(self):
"""
Start a server and waits (blocking wait) until it is fully started.
"""
# server is either starting or stopping (or error)
if self.state in ['maintenance', 'error']:
self._wait_for_state_change(['stopped', 'started'])
if self.state == 'stopped':
self.start()
self._wait_for_state_change(['started'])
if self.state == 'started':
return True
else:
# something went wrong, fail explicitly
raise Exception('unknown server state: ' + self.state)
|
[
"def",
"ensure_started",
"(",
"self",
")",
":",
"# server is either starting or stopping (or error)",
"if",
"self",
".",
"state",
"in",
"[",
"'maintenance'",
",",
"'error'",
"]",
":",
"self",
".",
"_wait_for_state_change",
"(",
"[",
"'stopped'",
",",
"'started'",
"]",
")",
"if",
"self",
".",
"state",
"==",
"'stopped'",
":",
"self",
".",
"start",
"(",
")",
"self",
".",
"_wait_for_state_change",
"(",
"[",
"'started'",
"]",
")",
"if",
"self",
".",
"state",
"==",
"'started'",
":",
"return",
"True",
"else",
":",
"# something went wrong, fail explicitly",
"raise",
"Exception",
"(",
"'unknown server state: '",
"+",
"self",
".",
"state",
")"
] |
Start a server and waits (blocking wait) until it is fully started.
|
[
"Start",
"a",
"server",
"and",
"waits",
"(",
"blocking",
"wait",
")",
"until",
"it",
"is",
"fully",
"started",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L457-L473
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/server.py
|
Server.stop_and_destroy
|
def stop_and_destroy(self, sync=True):
"""
Destroy a server and its storages. Stops the server before destroying.
Syncs the server state from the API, use sync=False to disable.
"""
def _self_destruct():
"""destroy the server and all storages attached to it."""
# try_it_n_times util is used as a convenience because
# Servers and Storages can fluctuate between "maintenance" and their
# original state due to several different reasons especially when
# destroying infrastructure.
# first destroy server
try_it_n_times(operation=self.destroy,
expected_error_codes=['SERVER_STATE_ILLEGAL'],
custom_error='destroying server failed')
# storages may be deleted instantly after server DELETE
for storage in self.storage_devices:
try_it_n_times(operation=storage.destroy,
expected_error_codes=['STORAGE_STATE_ILLEGAL'],
custom_error='destroying storage failed')
if sync:
self.populate()
# server is either starting or stopping (or error)
if self.state in ['maintenance', 'error']:
self._wait_for_state_change(['stopped', 'started'])
if self.state == 'started':
try_it_n_times(operation=self.stop,
expected_error_codes=['SERVER_STATE_ILLEGAL'],
custom_error='stopping server failed')
self._wait_for_state_change(['stopped'])
if self.state == 'stopped':
_self_destruct()
else:
raise Exception('unknown server state: ' + self.state)
|
python
|
def stop_and_destroy(self, sync=True):
"""
Destroy a server and its storages. Stops the server before destroying.
Syncs the server state from the API, use sync=False to disable.
"""
def _self_destruct():
"""destroy the server and all storages attached to it."""
# try_it_n_times util is used as a convenience because
# Servers and Storages can fluctuate between "maintenance" and their
# original state due to several different reasons especially when
# destroying infrastructure.
# first destroy server
try_it_n_times(operation=self.destroy,
expected_error_codes=['SERVER_STATE_ILLEGAL'],
custom_error='destroying server failed')
# storages may be deleted instantly after server DELETE
for storage in self.storage_devices:
try_it_n_times(operation=storage.destroy,
expected_error_codes=['STORAGE_STATE_ILLEGAL'],
custom_error='destroying storage failed')
if sync:
self.populate()
# server is either starting or stopping (or error)
if self.state in ['maintenance', 'error']:
self._wait_for_state_change(['stopped', 'started'])
if self.state == 'started':
try_it_n_times(operation=self.stop,
expected_error_codes=['SERVER_STATE_ILLEGAL'],
custom_error='stopping server failed')
self._wait_for_state_change(['stopped'])
if self.state == 'stopped':
_self_destruct()
else:
raise Exception('unknown server state: ' + self.state)
|
[
"def",
"stop_and_destroy",
"(",
"self",
",",
"sync",
"=",
"True",
")",
":",
"def",
"_self_destruct",
"(",
")",
":",
"\"\"\"destroy the server and all storages attached to it.\"\"\"",
"# try_it_n_times util is used as a convenience because",
"# Servers and Storages can fluctuate between \"maintenance\" and their",
"# original state due to several different reasons especially when",
"# destroying infrastructure.",
"# first destroy server",
"try_it_n_times",
"(",
"operation",
"=",
"self",
".",
"destroy",
",",
"expected_error_codes",
"=",
"[",
"'SERVER_STATE_ILLEGAL'",
"]",
",",
"custom_error",
"=",
"'destroying server failed'",
")",
"# storages may be deleted instantly after server DELETE",
"for",
"storage",
"in",
"self",
".",
"storage_devices",
":",
"try_it_n_times",
"(",
"operation",
"=",
"storage",
".",
"destroy",
",",
"expected_error_codes",
"=",
"[",
"'STORAGE_STATE_ILLEGAL'",
"]",
",",
"custom_error",
"=",
"'destroying storage failed'",
")",
"if",
"sync",
":",
"self",
".",
"populate",
"(",
")",
"# server is either starting or stopping (or error)",
"if",
"self",
".",
"state",
"in",
"[",
"'maintenance'",
",",
"'error'",
"]",
":",
"self",
".",
"_wait_for_state_change",
"(",
"[",
"'stopped'",
",",
"'started'",
"]",
")",
"if",
"self",
".",
"state",
"==",
"'started'",
":",
"try_it_n_times",
"(",
"operation",
"=",
"self",
".",
"stop",
",",
"expected_error_codes",
"=",
"[",
"'SERVER_STATE_ILLEGAL'",
"]",
",",
"custom_error",
"=",
"'stopping server failed'",
")",
"self",
".",
"_wait_for_state_change",
"(",
"[",
"'stopped'",
"]",
")",
"if",
"self",
".",
"state",
"==",
"'stopped'",
":",
"_self_destruct",
"(",
")",
"else",
":",
"raise",
"Exception",
"(",
"'unknown server state: '",
"+",
"self",
".",
"state",
")"
] |
Destroy a server and its storages. Stops the server before destroying.
Syncs the server state from the API, use sync=False to disable.
|
[
"Destroy",
"a",
"server",
"and",
"its",
"storages",
".",
"Stops",
"the",
"server",
"before",
"destroying",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/server.py#L475-L517
|
train
|
okfn/ofs
|
ofs/local/storedjson.py
|
PersistentState.revert
|
def revert(self):
"""Revert the state to the version stored on disc."""
if self.filepath:
if path.isfile(self.filepath):
serialised_file = open(self.filepath, "r")
try:
self.state = json.load(serialised_file)
except ValueError:
print("No JSON information could be read from the persistence file - could be empty: %s" % self.filepath)
self.state = {}
finally:
serialised_file.close()
else:
print("The persistence file has not yet been created or does not exist, so the state cannot be read from it yet.")
else:
print("Filepath to the persistence file is not set. State cannot be read.")
return False
|
python
|
def revert(self):
"""Revert the state to the version stored on disc."""
if self.filepath:
if path.isfile(self.filepath):
serialised_file = open(self.filepath, "r")
try:
self.state = json.load(serialised_file)
except ValueError:
print("No JSON information could be read from the persistence file - could be empty: %s" % self.filepath)
self.state = {}
finally:
serialised_file.close()
else:
print("The persistence file has not yet been created or does not exist, so the state cannot be read from it yet.")
else:
print("Filepath to the persistence file is not set. State cannot be read.")
return False
|
[
"def",
"revert",
"(",
"self",
")",
":",
"if",
"self",
".",
"filepath",
":",
"if",
"path",
".",
"isfile",
"(",
"self",
".",
"filepath",
")",
":",
"serialised_file",
"=",
"open",
"(",
"self",
".",
"filepath",
",",
"\"r\"",
")",
"try",
":",
"self",
".",
"state",
"=",
"json",
".",
"load",
"(",
"serialised_file",
")",
"except",
"ValueError",
":",
"print",
"(",
"\"No JSON information could be read from the persistence file - could be empty: %s\"",
"%",
"self",
".",
"filepath",
")",
"self",
".",
"state",
"=",
"{",
"}",
"finally",
":",
"serialised_file",
".",
"close",
"(",
")",
"else",
":",
"print",
"(",
"\"The persistence file has not yet been created or does not exist, so the state cannot be read from it yet.\"",
")",
"else",
":",
"print",
"(",
"\"Filepath to the persistence file is not set. State cannot be read.\"",
")",
"return",
"False"
] |
Revert the state to the version stored on disc.
|
[
"Revert",
"the",
"state",
"to",
"the",
"version",
"stored",
"on",
"disc",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/storedjson.py#L33-L49
|
train
|
okfn/ofs
|
ofs/local/storedjson.py
|
PersistentState.sync
|
def sync(self):
"""Synchronise and update the stored state to the in-memory state."""
if self.filepath:
serialised_file = open(self.filepath, "w")
json.dump(self.state, serialised_file)
serialised_file.close()
else:
print("Filepath to the persistence file is not set. State cannot be synced to disc.")
|
python
|
def sync(self):
"""Synchronise and update the stored state to the in-memory state."""
if self.filepath:
serialised_file = open(self.filepath, "w")
json.dump(self.state, serialised_file)
serialised_file.close()
else:
print("Filepath to the persistence file is not set. State cannot be synced to disc.")
|
[
"def",
"sync",
"(",
"self",
")",
":",
"if",
"self",
".",
"filepath",
":",
"serialised_file",
"=",
"open",
"(",
"self",
".",
"filepath",
",",
"\"w\"",
")",
"json",
".",
"dump",
"(",
"self",
".",
"state",
",",
"serialised_file",
")",
"serialised_file",
".",
"close",
"(",
")",
"else",
":",
"print",
"(",
"\"Filepath to the persistence file is not set. State cannot be synced to disc.\"",
")"
] |
Synchronise and update the stored state to the in-memory state.
|
[
"Synchronise",
"and",
"update",
"the",
"stored",
"state",
"to",
"the",
"in",
"-",
"memory",
"state",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/storedjson.py#L51-L58
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/upcloud_resource.py
|
UpCloudResource._reset
|
def _reset(self, **kwargs):
"""
Reset after repopulating from API (or when initializing).
"""
# set object attributes from params
for key in kwargs:
setattr(self, key, kwargs[key])
# set defaults (if need be) where the default is not None
for attr in self.ATTRIBUTES:
if not hasattr(self, attr) and self.ATTRIBUTES[attr] is not None:
setattr(self, attr, self.ATTRIBUTES[attr])
|
python
|
def _reset(self, **kwargs):
"""
Reset after repopulating from API (or when initializing).
"""
# set object attributes from params
for key in kwargs:
setattr(self, key, kwargs[key])
# set defaults (if need be) where the default is not None
for attr in self.ATTRIBUTES:
if not hasattr(self, attr) and self.ATTRIBUTES[attr] is not None:
setattr(self, attr, self.ATTRIBUTES[attr])
|
[
"def",
"_reset",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"# set object attributes from params",
"for",
"key",
"in",
"kwargs",
":",
"setattr",
"(",
"self",
",",
"key",
",",
"kwargs",
"[",
"key",
"]",
")",
"# set defaults (if need be) where the default is not None",
"for",
"attr",
"in",
"self",
".",
"ATTRIBUTES",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"attr",
")",
"and",
"self",
".",
"ATTRIBUTES",
"[",
"attr",
"]",
"is",
"not",
"None",
":",
"setattr",
"(",
"self",
",",
"attr",
",",
"self",
".",
"ATTRIBUTES",
"[",
"attr",
"]",
")"
] |
Reset after repopulating from API (or when initializing).
|
[
"Reset",
"after",
"repopulating",
"from",
"API",
"(",
"or",
"when",
"initializing",
")",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/upcloud_resource.py#L27-L38
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/upcloud_resource.py
|
UpCloudResource.to_dict
|
def to_dict(self):
"""
Return a dict that can be serialised to JSON and sent to UpCloud's API.
"""
return dict(
(attr, getattr(self, attr))
for attr in self.ATTRIBUTES
if hasattr(self, attr)
)
|
python
|
def to_dict(self):
"""
Return a dict that can be serialised to JSON and sent to UpCloud's API.
"""
return dict(
(attr, getattr(self, attr))
for attr in self.ATTRIBUTES
if hasattr(self, attr)
)
|
[
"def",
"to_dict",
"(",
"self",
")",
":",
"return",
"dict",
"(",
"(",
"attr",
",",
"getattr",
"(",
"self",
",",
"attr",
")",
")",
"for",
"attr",
"in",
"self",
".",
"ATTRIBUTES",
"if",
"hasattr",
"(",
"self",
",",
"attr",
")",
")"
] |
Return a dict that can be serialised to JSON and sent to UpCloud's API.
|
[
"Return",
"a",
"dict",
"that",
"can",
"be",
"serialised",
"to",
"JSON",
"and",
"sent",
"to",
"UpCloud",
"s",
"API",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/upcloud_resource.py#L47-L55
|
train
|
okfn/ofs
|
ofs/remote/botostore.py
|
BotoOFS._require_bucket
|
def _require_bucket(self, bucket_name):
""" Also try to create the bucket. """
if not self.exists(bucket_name) and not self.claim_bucket(bucket_name):
raise OFSException("Invalid bucket: %s" % bucket_name)
return self._get_bucket(bucket_name)
|
python
|
def _require_bucket(self, bucket_name):
""" Also try to create the bucket. """
if not self.exists(bucket_name) and not self.claim_bucket(bucket_name):
raise OFSException("Invalid bucket: %s" % bucket_name)
return self._get_bucket(bucket_name)
|
[
"def",
"_require_bucket",
"(",
"self",
",",
"bucket_name",
")",
":",
"if",
"not",
"self",
".",
"exists",
"(",
"bucket_name",
")",
"and",
"not",
"self",
".",
"claim_bucket",
"(",
"bucket_name",
")",
":",
"raise",
"OFSException",
"(",
"\"Invalid bucket: %s\"",
"%",
"bucket_name",
")",
"return",
"self",
".",
"_get_bucket",
"(",
"bucket_name",
")"
] |
Also try to create the bucket.
|
[
"Also",
"try",
"to",
"create",
"the",
"bucket",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/remote/botostore.py#L42-L46
|
train
|
okfn/ofs
|
ofs/remote/botostore.py
|
BotoOFS.del_stream
|
def del_stream(self, bucket, label):
""" Will fail if the bucket or label don't exist """
bucket = self._require_bucket(bucket)
key = self._require_key(bucket, label)
key.delete()
|
python
|
def del_stream(self, bucket, label):
""" Will fail if the bucket or label don't exist """
bucket = self._require_bucket(bucket)
key = self._require_key(bucket, label)
key.delete()
|
[
"def",
"del_stream",
"(",
"self",
",",
"bucket",
",",
"label",
")",
":",
"bucket",
"=",
"self",
".",
"_require_bucket",
"(",
"bucket",
")",
"key",
"=",
"self",
".",
"_require_key",
"(",
"bucket",
",",
"label",
")",
"key",
".",
"delete",
"(",
")"
] |
Will fail if the bucket or label don't exist
|
[
"Will",
"fail",
"if",
"the",
"bucket",
"or",
"label",
"don",
"t",
"exist"
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/remote/botostore.py#L119-L123
|
train
|
okfn/ofs
|
ofs/remote/botostore.py
|
BotoOFS.authenticate_request
|
def authenticate_request(self, method, bucket='', key='', headers=None):
'''Authenticate a HTTP request by filling in Authorization field header.
:param method: HTTP method (e.g. GET, PUT, POST)
:param bucket: name of the bucket.
:param key: name of key within bucket.
:param headers: dictionary of additional HTTP headers.
:return: boto.connection.HTTPRequest object with Authorization header
filled (NB: will also have a Date field if none before and a User-Agent
field will be set to Boto).
'''
# following is extracted from S3Connection.make_request and the method
# it calls: AWSAuthConnection.make_request
path = self.conn.calling_format.build_path_base(bucket, key)
auth_path = self.conn.calling_format.build_auth_path(bucket, key)
http_request = boto.connection.AWSAuthConnection.build_base_http_request(
self.conn,
method,
path,
auth_path,
{},
headers
)
http_request.authorize(connection=self.conn)
return http_request
|
python
|
def authenticate_request(self, method, bucket='', key='', headers=None):
'''Authenticate a HTTP request by filling in Authorization field header.
:param method: HTTP method (e.g. GET, PUT, POST)
:param bucket: name of the bucket.
:param key: name of key within bucket.
:param headers: dictionary of additional HTTP headers.
:return: boto.connection.HTTPRequest object with Authorization header
filled (NB: will also have a Date field if none before and a User-Agent
field will be set to Boto).
'''
# following is extracted from S3Connection.make_request and the method
# it calls: AWSAuthConnection.make_request
path = self.conn.calling_format.build_path_base(bucket, key)
auth_path = self.conn.calling_format.build_auth_path(bucket, key)
http_request = boto.connection.AWSAuthConnection.build_base_http_request(
self.conn,
method,
path,
auth_path,
{},
headers
)
http_request.authorize(connection=self.conn)
return http_request
|
[
"def",
"authenticate_request",
"(",
"self",
",",
"method",
",",
"bucket",
"=",
"''",
",",
"key",
"=",
"''",
",",
"headers",
"=",
"None",
")",
":",
"# following is extracted from S3Connection.make_request and the method",
"# it calls: AWSAuthConnection.make_request",
"path",
"=",
"self",
".",
"conn",
".",
"calling_format",
".",
"build_path_base",
"(",
"bucket",
",",
"key",
")",
"auth_path",
"=",
"self",
".",
"conn",
".",
"calling_format",
".",
"build_auth_path",
"(",
"bucket",
",",
"key",
")",
"http_request",
"=",
"boto",
".",
"connection",
".",
"AWSAuthConnection",
".",
"build_base_http_request",
"(",
"self",
".",
"conn",
",",
"method",
",",
"path",
",",
"auth_path",
",",
"{",
"}",
",",
"headers",
")",
"http_request",
".",
"authorize",
"(",
"connection",
"=",
"self",
".",
"conn",
")",
"return",
"http_request"
] |
Authenticate a HTTP request by filling in Authorization field header.
:param method: HTTP method (e.g. GET, PUT, POST)
:param bucket: name of the bucket.
:param key: name of key within bucket.
:param headers: dictionary of additional HTTP headers.
:return: boto.connection.HTTPRequest object with Authorization header
filled (NB: will also have a Date field if none before and a User-Agent
field will be set to Boto).
|
[
"Authenticate",
"a",
"HTTP",
"request",
"by",
"filling",
"in",
"Authorization",
"field",
"header",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/remote/botostore.py#L172-L197
|
train
|
ckan/deadoralive
|
deadoralive/deadoralive.py
|
get_resources_to_check
|
def get_resources_to_check(client_site_url, apikey):
"""Return a list of resource IDs to check for broken links.
Calls the client site's API to get a list of resource IDs.
:raises CouldNotGetResourceIDsError: if getting the resource IDs fails
for any reason
"""
url = client_site_url + u"deadoralive/get_resources_to_check"
response = requests.get(url, headers=dict(Authorization=apikey))
if not response.ok:
raise CouldNotGetResourceIDsError(
u"Couldn't get resource IDs to check: {code} {reason}".format(
code=response.status_code, reason=response.reason))
return response.json()
|
python
|
def get_resources_to_check(client_site_url, apikey):
"""Return a list of resource IDs to check for broken links.
Calls the client site's API to get a list of resource IDs.
:raises CouldNotGetResourceIDsError: if getting the resource IDs fails
for any reason
"""
url = client_site_url + u"deadoralive/get_resources_to_check"
response = requests.get(url, headers=dict(Authorization=apikey))
if not response.ok:
raise CouldNotGetResourceIDsError(
u"Couldn't get resource IDs to check: {code} {reason}".format(
code=response.status_code, reason=response.reason))
return response.json()
|
[
"def",
"get_resources_to_check",
"(",
"client_site_url",
",",
"apikey",
")",
":",
"url",
"=",
"client_site_url",
"+",
"u\"deadoralive/get_resources_to_check\"",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"headers",
"=",
"dict",
"(",
"Authorization",
"=",
"apikey",
")",
")",
"if",
"not",
"response",
".",
"ok",
":",
"raise",
"CouldNotGetResourceIDsError",
"(",
"u\"Couldn't get resource IDs to check: {code} {reason}\"",
".",
"format",
"(",
"code",
"=",
"response",
".",
"status_code",
",",
"reason",
"=",
"response",
".",
"reason",
")",
")",
"return",
"response",
".",
"json",
"(",
")"
] |
Return a list of resource IDs to check for broken links.
Calls the client site's API to get a list of resource IDs.
:raises CouldNotGetResourceIDsError: if getting the resource IDs fails
for any reason
|
[
"Return",
"a",
"list",
"of",
"resource",
"IDs",
"to",
"check",
"for",
"broken",
"links",
"."
] |
82eed6c73e17b9884476311a7a8fae9d2b379600
|
https://github.com/ckan/deadoralive/blob/82eed6c73e17b9884476311a7a8fae9d2b379600/deadoralive/deadoralive.py#L26-L41
|
train
|
ckan/deadoralive
|
deadoralive/deadoralive.py
|
get_url_for_id
|
def get_url_for_id(client_site_url, apikey, resource_id):
"""Return the URL for the given resource ID.
Contacts the client site's API to get the URL for the ID and returns it.
:raises CouldNotGetURLError: if getting the URL fails for any reason
"""
# TODO: Handle invalid responses from the client site.
url = client_site_url + u"deadoralive/get_url_for_resource_id"
params = {"resource_id": resource_id}
response = requests.get(url, headers=dict(Authorization=apikey),
params=params)
if not response.ok:
raise CouldNotGetURLError(
u"Couldn't get URL for resource {id}: {code} {reason}".format(
id=resource_id, code=response.status_code,
reason=response.reason))
return response.json()
|
python
|
def get_url_for_id(client_site_url, apikey, resource_id):
"""Return the URL for the given resource ID.
Contacts the client site's API to get the URL for the ID and returns it.
:raises CouldNotGetURLError: if getting the URL fails for any reason
"""
# TODO: Handle invalid responses from the client site.
url = client_site_url + u"deadoralive/get_url_for_resource_id"
params = {"resource_id": resource_id}
response = requests.get(url, headers=dict(Authorization=apikey),
params=params)
if not response.ok:
raise CouldNotGetURLError(
u"Couldn't get URL for resource {id}: {code} {reason}".format(
id=resource_id, code=response.status_code,
reason=response.reason))
return response.json()
|
[
"def",
"get_url_for_id",
"(",
"client_site_url",
",",
"apikey",
",",
"resource_id",
")",
":",
"# TODO: Handle invalid responses from the client site.",
"url",
"=",
"client_site_url",
"+",
"u\"deadoralive/get_url_for_resource_id\"",
"params",
"=",
"{",
"\"resource_id\"",
":",
"resource_id",
"}",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"headers",
"=",
"dict",
"(",
"Authorization",
"=",
"apikey",
")",
",",
"params",
"=",
"params",
")",
"if",
"not",
"response",
".",
"ok",
":",
"raise",
"CouldNotGetURLError",
"(",
"u\"Couldn't get URL for resource {id}: {code} {reason}\"",
".",
"format",
"(",
"id",
"=",
"resource_id",
",",
"code",
"=",
"response",
".",
"status_code",
",",
"reason",
"=",
"response",
".",
"reason",
")",
")",
"return",
"response",
".",
"json",
"(",
")"
] |
Return the URL for the given resource ID.
Contacts the client site's API to get the URL for the ID and returns it.
:raises CouldNotGetURLError: if getting the URL fails for any reason
|
[
"Return",
"the",
"URL",
"for",
"the",
"given",
"resource",
"ID",
"."
] |
82eed6c73e17b9884476311a7a8fae9d2b379600
|
https://github.com/ckan/deadoralive/blob/82eed6c73e17b9884476311a7a8fae9d2b379600/deadoralive/deadoralive.py#L49-L68
|
train
|
ckan/deadoralive
|
deadoralive/deadoralive.py
|
check_url
|
def check_url(url):
"""Check whether the given URL is dead or alive.
Returns a dict with four keys:
"url": The URL that was checked (string)
"alive": Whether the URL was working, True or False
"status": The HTTP status code of the response from the URL,
e.g. 200, 401, 500 (int)
"reason": The reason for the success or failure of the check,
e.g. "OK", "Unauthorized", "Internal Server Error" (string)
The "status" may be None if we did not get a valid HTTP response,
e.g. in the event of a timeout, DNS failure or invalid HTTP response.
The "reason" will always be a string, but may be a requests library
exception string rather than an HTTP reason string if we did not get a valid
HTTP response.
"""
result = {"url": url}
try:
response = requests.get(url)
result["status"] = response.status_code
result["reason"] = response.reason
response.raise_for_status() # Raise if status_code is not OK.
result["alive"] = True
except AttributeError as err:
if err.message == "'NoneType' object has no attribute 'encode'":
# requests seems to throw these for some invalid URLs.
result["alive"] = False
result["reason"] = "Invalid URL"
result["status"] = None
else:
raise
except requests.exceptions.RequestException as err:
result["alive"] = False
if "reason" not in result:
result["reason"] = str(err)
if "status" not in result:
# This can happen if the response is invalid HTTP, if we get a DNS
# failure, or a timeout, etc.
result["status"] = None
# We should always have these four fields in the result.
assert "url" in result
assert result.get("alive") in (True, False)
assert "status" in result
assert "reason" in result
return result
|
python
|
def check_url(url):
"""Check whether the given URL is dead or alive.
Returns a dict with four keys:
"url": The URL that was checked (string)
"alive": Whether the URL was working, True or False
"status": The HTTP status code of the response from the URL,
e.g. 200, 401, 500 (int)
"reason": The reason for the success or failure of the check,
e.g. "OK", "Unauthorized", "Internal Server Error" (string)
The "status" may be None if we did not get a valid HTTP response,
e.g. in the event of a timeout, DNS failure or invalid HTTP response.
The "reason" will always be a string, but may be a requests library
exception string rather than an HTTP reason string if we did not get a valid
HTTP response.
"""
result = {"url": url}
try:
response = requests.get(url)
result["status"] = response.status_code
result["reason"] = response.reason
response.raise_for_status() # Raise if status_code is not OK.
result["alive"] = True
except AttributeError as err:
if err.message == "'NoneType' object has no attribute 'encode'":
# requests seems to throw these for some invalid URLs.
result["alive"] = False
result["reason"] = "Invalid URL"
result["status"] = None
else:
raise
except requests.exceptions.RequestException as err:
result["alive"] = False
if "reason" not in result:
result["reason"] = str(err)
if "status" not in result:
# This can happen if the response is invalid HTTP, if we get a DNS
# failure, or a timeout, etc.
result["status"] = None
# We should always have these four fields in the result.
assert "url" in result
assert result.get("alive") in (True, False)
assert "status" in result
assert "reason" in result
return result
|
[
"def",
"check_url",
"(",
"url",
")",
":",
"result",
"=",
"{",
"\"url\"",
":",
"url",
"}",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"result",
"[",
"\"status\"",
"]",
"=",
"response",
".",
"status_code",
"result",
"[",
"\"reason\"",
"]",
"=",
"response",
".",
"reason",
"response",
".",
"raise_for_status",
"(",
")",
"# Raise if status_code is not OK.",
"result",
"[",
"\"alive\"",
"]",
"=",
"True",
"except",
"AttributeError",
"as",
"err",
":",
"if",
"err",
".",
"message",
"==",
"\"'NoneType' object has no attribute 'encode'\"",
":",
"# requests seems to throw these for some invalid URLs.",
"result",
"[",
"\"alive\"",
"]",
"=",
"False",
"result",
"[",
"\"reason\"",
"]",
"=",
"\"Invalid URL\"",
"result",
"[",
"\"status\"",
"]",
"=",
"None",
"else",
":",
"raise",
"except",
"requests",
".",
"exceptions",
".",
"RequestException",
"as",
"err",
":",
"result",
"[",
"\"alive\"",
"]",
"=",
"False",
"if",
"\"reason\"",
"not",
"in",
"result",
":",
"result",
"[",
"\"reason\"",
"]",
"=",
"str",
"(",
"err",
")",
"if",
"\"status\"",
"not",
"in",
"result",
":",
"# This can happen if the response is invalid HTTP, if we get a DNS",
"# failure, or a timeout, etc.",
"result",
"[",
"\"status\"",
"]",
"=",
"None",
"# We should always have these four fields in the result.",
"assert",
"\"url\"",
"in",
"result",
"assert",
"result",
".",
"get",
"(",
"\"alive\"",
")",
"in",
"(",
"True",
",",
"False",
")",
"assert",
"\"status\"",
"in",
"result",
"assert",
"\"reason\"",
"in",
"result",
"return",
"result"
] |
Check whether the given URL is dead or alive.
Returns a dict with four keys:
"url": The URL that was checked (string)
"alive": Whether the URL was working, True or False
"status": The HTTP status code of the response from the URL,
e.g. 200, 401, 500 (int)
"reason": The reason for the success or failure of the check,
e.g. "OK", "Unauthorized", "Internal Server Error" (string)
The "status" may be None if we did not get a valid HTTP response,
e.g. in the event of a timeout, DNS failure or invalid HTTP response.
The "reason" will always be a string, but may be a requests library
exception string rather than an HTTP reason string if we did not get a valid
HTTP response.
|
[
"Check",
"whether",
"the",
"given",
"URL",
"is",
"dead",
"or",
"alive",
"."
] |
82eed6c73e17b9884476311a7a8fae9d2b379600
|
https://github.com/ckan/deadoralive/blob/82eed6c73e17b9884476311a7a8fae9d2b379600/deadoralive/deadoralive.py#L71-L121
|
train
|
ckan/deadoralive
|
deadoralive/deadoralive.py
|
upsert_result
|
def upsert_result(client_site_url, apikey, resource_id, result):
"""Post the given link check result to the client site."""
# TODO: Handle exceptions and unexpected results.
url = client_site_url + u"deadoralive/upsert"
params = result.copy()
params["resource_id"] = resource_id
requests.post(url, headers=dict(Authorization=apikey), params=params)
|
python
|
def upsert_result(client_site_url, apikey, resource_id, result):
"""Post the given link check result to the client site."""
# TODO: Handle exceptions and unexpected results.
url = client_site_url + u"deadoralive/upsert"
params = result.copy()
params["resource_id"] = resource_id
requests.post(url, headers=dict(Authorization=apikey), params=params)
|
[
"def",
"upsert_result",
"(",
"client_site_url",
",",
"apikey",
",",
"resource_id",
",",
"result",
")",
":",
"# TODO: Handle exceptions and unexpected results.",
"url",
"=",
"client_site_url",
"+",
"u\"deadoralive/upsert\"",
"params",
"=",
"result",
".",
"copy",
"(",
")",
"params",
"[",
"\"resource_id\"",
"]",
"=",
"resource_id",
"requests",
".",
"post",
"(",
"url",
",",
"headers",
"=",
"dict",
"(",
"Authorization",
"=",
"apikey",
")",
",",
"params",
"=",
"params",
")"
] |
Post the given link check result to the client site.
|
[
"Post",
"the",
"given",
"link",
"check",
"result",
"to",
"the",
"client",
"site",
"."
] |
82eed6c73e17b9884476311a7a8fae9d2b379600
|
https://github.com/ckan/deadoralive/blob/82eed6c73e17b9884476311a7a8fae9d2b379600/deadoralive/deadoralive.py#L124-L131
|
train
|
ckan/deadoralive
|
deadoralive/deadoralive.py
|
get_check_and_report
|
def get_check_and_report(client_site_url, apikey, get_resource_ids_to_check,
get_url_for_id, check_url, upsert_result):
"""Get links from the client site, check them, and post the results back.
Get resource IDs from the client site, get the URL for each resource ID from
the client site, check each URL, and post the results back to the client
site.
This function can be called repeatedly to keep on getting more links from
the client site and checking them.
The functions that this function calls to carry out the various tasks are
taken as parameters to this function for testing purposes - it makes it
easy for tests to pass in mock functions. It also decouples the code nicely.
:param client_site_url: the base URL of the client site
:type client_site_url: string
:param apikey: the API key to use when making requests to the client site
:type apikey: string or None
:param get_resource_ids_to_check: The function to call to get the list of
resource IDs to be checked from the client site. See
get_resource_ids_to_check() above for the interface that this function
should implement.
:type get_resource_ids_to_check: callable
:param get_url_for_id: The function to call to get the URL for a given
resource ID from the client site. See get_url_for_id() above for the
interface that this function should implement.
:type get_url_for_id: callable
:param check_url: The function to call to check whether a URL is dead or
alive. See check_url() above for the interface that this function
should implement.
:type check_url: callable
:param upsert_result: The function to call to post a link check result to
the client site. See upsert_result() above for the interface that this
function should implement.
:type upsert_result: callable
"""
logger = _get_logger()
resource_ids = get_resource_ids_to_check(client_site_url, apikey)
for resource_id in resource_ids:
try:
url = get_url_for_id(client_site_url, apikey, resource_id)
except CouldNotGetURLError:
logger.info(u"This link checker was not authorized to access "
"resource {0}, skipping.".format(resource_id))
continue
result = check_url(url)
status = result["status"]
reason = result["reason"]
if result["alive"]:
logger.info(u"Checking URL {0} of resource {1} succeeded with "
"status {2}:".format(url, resource_id, status))
else:
logger.info(u"Checking URL {0} of resource {1} failed with error "
"{2}:".format(url, resource_id, reason))
upsert_result(client_site_url, apikey, resource_id=resource_id,
result=result)
|
python
|
def get_check_and_report(client_site_url, apikey, get_resource_ids_to_check,
get_url_for_id, check_url, upsert_result):
"""Get links from the client site, check them, and post the results back.
Get resource IDs from the client site, get the URL for each resource ID from
the client site, check each URL, and post the results back to the client
site.
This function can be called repeatedly to keep on getting more links from
the client site and checking them.
The functions that this function calls to carry out the various tasks are
taken as parameters to this function for testing purposes - it makes it
easy for tests to pass in mock functions. It also decouples the code nicely.
:param client_site_url: the base URL of the client site
:type client_site_url: string
:param apikey: the API key to use when making requests to the client site
:type apikey: string or None
:param get_resource_ids_to_check: The function to call to get the list of
resource IDs to be checked from the client site. See
get_resource_ids_to_check() above for the interface that this function
should implement.
:type get_resource_ids_to_check: callable
:param get_url_for_id: The function to call to get the URL for a given
resource ID from the client site. See get_url_for_id() above for the
interface that this function should implement.
:type get_url_for_id: callable
:param check_url: The function to call to check whether a URL is dead or
alive. See check_url() above for the interface that this function
should implement.
:type check_url: callable
:param upsert_result: The function to call to post a link check result to
the client site. See upsert_result() above for the interface that this
function should implement.
:type upsert_result: callable
"""
logger = _get_logger()
resource_ids = get_resource_ids_to_check(client_site_url, apikey)
for resource_id in resource_ids:
try:
url = get_url_for_id(client_site_url, apikey, resource_id)
except CouldNotGetURLError:
logger.info(u"This link checker was not authorized to access "
"resource {0}, skipping.".format(resource_id))
continue
result = check_url(url)
status = result["status"]
reason = result["reason"]
if result["alive"]:
logger.info(u"Checking URL {0} of resource {1} succeeded with "
"status {2}:".format(url, resource_id, status))
else:
logger.info(u"Checking URL {0} of resource {1} failed with error "
"{2}:".format(url, resource_id, reason))
upsert_result(client_site_url, apikey, resource_id=resource_id,
result=result)
|
[
"def",
"get_check_and_report",
"(",
"client_site_url",
",",
"apikey",
",",
"get_resource_ids_to_check",
",",
"get_url_for_id",
",",
"check_url",
",",
"upsert_result",
")",
":",
"logger",
"=",
"_get_logger",
"(",
")",
"resource_ids",
"=",
"get_resource_ids_to_check",
"(",
"client_site_url",
",",
"apikey",
")",
"for",
"resource_id",
"in",
"resource_ids",
":",
"try",
":",
"url",
"=",
"get_url_for_id",
"(",
"client_site_url",
",",
"apikey",
",",
"resource_id",
")",
"except",
"CouldNotGetURLError",
":",
"logger",
".",
"info",
"(",
"u\"This link checker was not authorized to access \"",
"\"resource {0}, skipping.\"",
".",
"format",
"(",
"resource_id",
")",
")",
"continue",
"result",
"=",
"check_url",
"(",
"url",
")",
"status",
"=",
"result",
"[",
"\"status\"",
"]",
"reason",
"=",
"result",
"[",
"\"reason\"",
"]",
"if",
"result",
"[",
"\"alive\"",
"]",
":",
"logger",
".",
"info",
"(",
"u\"Checking URL {0} of resource {1} succeeded with \"",
"\"status {2}:\"",
".",
"format",
"(",
"url",
",",
"resource_id",
",",
"status",
")",
")",
"else",
":",
"logger",
".",
"info",
"(",
"u\"Checking URL {0} of resource {1} failed with error \"",
"\"{2}:\"",
".",
"format",
"(",
"url",
",",
"resource_id",
",",
"reason",
")",
")",
"upsert_result",
"(",
"client_site_url",
",",
"apikey",
",",
"resource_id",
"=",
"resource_id",
",",
"result",
"=",
"result",
")"
] |
Get links from the client site, check them, and post the results back.
Get resource IDs from the client site, get the URL for each resource ID from
the client site, check each URL, and post the results back to the client
site.
This function can be called repeatedly to keep on getting more links from
the client site and checking them.
The functions that this function calls to carry out the various tasks are
taken as parameters to this function for testing purposes - it makes it
easy for tests to pass in mock functions. It also decouples the code nicely.
:param client_site_url: the base URL of the client site
:type client_site_url: string
:param apikey: the API key to use when making requests to the client site
:type apikey: string or None
:param get_resource_ids_to_check: The function to call to get the list of
resource IDs to be checked from the client site. See
get_resource_ids_to_check() above for the interface that this function
should implement.
:type get_resource_ids_to_check: callable
:param get_url_for_id: The function to call to get the URL for a given
resource ID from the client site. See get_url_for_id() above for the
interface that this function should implement.
:type get_url_for_id: callable
:param check_url: The function to call to check whether a URL is dead or
alive. See check_url() above for the interface that this function
should implement.
:type check_url: callable
:param upsert_result: The function to call to post a link check result to
the client site. See upsert_result() above for the interface that this
function should implement.
:type upsert_result: callable
|
[
"Get",
"links",
"from",
"the",
"client",
"site",
"check",
"them",
"and",
"post",
"the",
"results",
"back",
"."
] |
82eed6c73e17b9884476311a7a8fae9d2b379600
|
https://github.com/ckan/deadoralive/blob/82eed6c73e17b9884476311a7a8fae9d2b379600/deadoralive/deadoralive.py#L148-L210
|
train
|
okfn/ofs
|
ofs/local/zipfile.py
|
ZipExtFile.peek
|
def peek(self, n=1):
"""Returns buffered bytes without advancing the position."""
if n > len(self._readbuffer) - self._offset:
chunk = self.read(n)
self._offset -= len(chunk)
# Return up to 512 bytes to reduce allocation overhead for tight loops.
return self._readbuffer[self._offset: self._offset + 512]
|
python
|
def peek(self, n=1):
"""Returns buffered bytes without advancing the position."""
if n > len(self._readbuffer) - self._offset:
chunk = self.read(n)
self._offset -= len(chunk)
# Return up to 512 bytes to reduce allocation overhead for tight loops.
return self._readbuffer[self._offset: self._offset + 512]
|
[
"def",
"peek",
"(",
"self",
",",
"n",
"=",
"1",
")",
":",
"if",
"n",
">",
"len",
"(",
"self",
".",
"_readbuffer",
")",
"-",
"self",
".",
"_offset",
":",
"chunk",
"=",
"self",
".",
"read",
"(",
"n",
")",
"self",
".",
"_offset",
"-=",
"len",
"(",
"chunk",
")",
"# Return up to 512 bytes to reduce allocation overhead for tight loops.",
"return",
"self",
".",
"_readbuffer",
"[",
"self",
".",
"_offset",
":",
"self",
".",
"_offset",
"+",
"512",
"]"
] |
Returns buffered bytes without advancing the position.
|
[
"Returns",
"buffered",
"bytes",
"without",
"advancing",
"the",
"position",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipfile.py#L551-L558
|
train
|
okfn/ofs
|
ofs/local/zipfile.py
|
ZipExtFile.read
|
def read(self, n=-1):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
"""
buf = b''
while n < 0 or n is None or n > len(buf):
data = self.read1(n)
if len(data) == 0:
return buf
buf += data
return buf
|
python
|
def read(self, n=-1):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
"""
buf = b''
while n < 0 or n is None or n > len(buf):
data = self.read1(n)
if len(data) == 0:
return buf
buf += data
return buf
|
[
"def",
"read",
"(",
"self",
",",
"n",
"=",
"-",
"1",
")",
":",
"buf",
"=",
"b''",
"while",
"n",
"<",
"0",
"or",
"n",
"is",
"None",
"or",
"n",
">",
"len",
"(",
"buf",
")",
":",
"data",
"=",
"self",
".",
"read1",
"(",
"n",
")",
"if",
"len",
"(",
"data",
")",
"==",
"0",
":",
"return",
"buf",
"buf",
"+=",
"data",
"return",
"buf"
] |
Read and return up to n bytes.
If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
|
[
"Read",
"and",
"return",
"up",
"to",
"n",
"bytes",
".",
"If",
"the",
"argument",
"is",
"omitted",
"None",
"or",
"negative",
"data",
"is",
"read",
"and",
"returned",
"until",
"EOF",
"is",
"reached",
".."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipfile.py#L563-L576
|
train
|
okfn/ofs
|
ofs/local/zipfile.py
|
ZipFile._RealGetContents
|
def _RealGetContents(self):
"""Read in the table of contents for the ZIP file."""
fp = self.fp
endrec = _EndRecData(fp)
if not endrec:
raise BadZipfile("File is not a zip file")
if self.debug > 1:
print(endrec)
size_cd = endrec[_ECD_SIZE] # bytes in central directory
offset_cd = endrec[_ECD_OFFSET] # offset of central directory
self.comment = endrec[_ECD_COMMENT] # archive comment
# "concat" is zero, unless zip was concatenated to another file
concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
if endrec[_ECD_SIGNATURE] == stringEndArchive64:
# If Zip64 extension structures are present, account for them
concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
if self.debug > 2:
inferred = concat + offset_cd
print("given, inferred, offset", offset_cd, inferred, concat)
# self.start_dir: Position of start of central directory
self.start_dir = offset_cd + concat
fp.seek(self.start_dir, 0)
data = fp.read(size_cd)
fp = cStringIO.StringIO(data)
total = 0
while total < size_cd:
centdir = fp.read(sizeCentralDir)
if centdir[0:4] != stringCentralDir:
raise BadZipfile("Bad magic number for central directory")
centdir = struct.unpack(structCentralDir, centdir)
if self.debug > 2:
print(centdir)
filename = fp.read(centdir[_CD_FILENAME_LENGTH])
# Create ZipInfo instance to store file information
x = ZipInfo(filename)
x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
(x.create_version, x.create_system, x.extract_version, x.reserved,
x.flag_bits, x.compress_type, t, d,
x.CRC, x.compress_size, x.file_size) = centdir[1:12]
x.volume, x.internal_attr, x.external_attr = centdir[15:18]
# Convert date/time code to (year, month, day, hour, min, sec)
x._raw_time = t
x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
x._decodeExtra()
x.header_offset = x.header_offset + concat
x.filename = x._decodeFilename()
self.filelist.append(x)
self.NameToInfo[x.filename] = x
# update total bytes read from central directory
total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]
+ centdir[_CD_EXTRA_FIELD_LENGTH]
+ centdir[_CD_COMMENT_LENGTH])
if self.debug > 2:
print("total", total)
|
python
|
def _RealGetContents(self):
"""Read in the table of contents for the ZIP file."""
fp = self.fp
endrec = _EndRecData(fp)
if not endrec:
raise BadZipfile("File is not a zip file")
if self.debug > 1:
print(endrec)
size_cd = endrec[_ECD_SIZE] # bytes in central directory
offset_cd = endrec[_ECD_OFFSET] # offset of central directory
self.comment = endrec[_ECD_COMMENT] # archive comment
# "concat" is zero, unless zip was concatenated to another file
concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
if endrec[_ECD_SIGNATURE] == stringEndArchive64:
# If Zip64 extension structures are present, account for them
concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
if self.debug > 2:
inferred = concat + offset_cd
print("given, inferred, offset", offset_cd, inferred, concat)
# self.start_dir: Position of start of central directory
self.start_dir = offset_cd + concat
fp.seek(self.start_dir, 0)
data = fp.read(size_cd)
fp = cStringIO.StringIO(data)
total = 0
while total < size_cd:
centdir = fp.read(sizeCentralDir)
if centdir[0:4] != stringCentralDir:
raise BadZipfile("Bad magic number for central directory")
centdir = struct.unpack(structCentralDir, centdir)
if self.debug > 2:
print(centdir)
filename = fp.read(centdir[_CD_FILENAME_LENGTH])
# Create ZipInfo instance to store file information
x = ZipInfo(filename)
x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
(x.create_version, x.create_system, x.extract_version, x.reserved,
x.flag_bits, x.compress_type, t, d,
x.CRC, x.compress_size, x.file_size) = centdir[1:12]
x.volume, x.internal_attr, x.external_attr = centdir[15:18]
# Convert date/time code to (year, month, day, hour, min, sec)
x._raw_time = t
x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
x._decodeExtra()
x.header_offset = x.header_offset + concat
x.filename = x._decodeFilename()
self.filelist.append(x)
self.NameToInfo[x.filename] = x
# update total bytes read from central directory
total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]
+ centdir[_CD_EXTRA_FIELD_LENGTH]
+ centdir[_CD_COMMENT_LENGTH])
if self.debug > 2:
print("total", total)
|
[
"def",
"_RealGetContents",
"(",
"self",
")",
":",
"fp",
"=",
"self",
".",
"fp",
"endrec",
"=",
"_EndRecData",
"(",
"fp",
")",
"if",
"not",
"endrec",
":",
"raise",
"BadZipfile",
"(",
"\"File is not a zip file\"",
")",
"if",
"self",
".",
"debug",
">",
"1",
":",
"print",
"(",
"endrec",
")",
"size_cd",
"=",
"endrec",
"[",
"_ECD_SIZE",
"]",
"# bytes in central directory",
"offset_cd",
"=",
"endrec",
"[",
"_ECD_OFFSET",
"]",
"# offset of central directory",
"self",
".",
"comment",
"=",
"endrec",
"[",
"_ECD_COMMENT",
"]",
"# archive comment",
"# \"concat\" is zero, unless zip was concatenated to another file",
"concat",
"=",
"endrec",
"[",
"_ECD_LOCATION",
"]",
"-",
"size_cd",
"-",
"offset_cd",
"if",
"endrec",
"[",
"_ECD_SIGNATURE",
"]",
"==",
"stringEndArchive64",
":",
"# If Zip64 extension structures are present, account for them",
"concat",
"-=",
"(",
"sizeEndCentDir64",
"+",
"sizeEndCentDir64Locator",
")",
"if",
"self",
".",
"debug",
">",
"2",
":",
"inferred",
"=",
"concat",
"+",
"offset_cd",
"print",
"(",
"\"given, inferred, offset\"",
",",
"offset_cd",
",",
"inferred",
",",
"concat",
")",
"# self.start_dir: Position of start of central directory",
"self",
".",
"start_dir",
"=",
"offset_cd",
"+",
"concat",
"fp",
".",
"seek",
"(",
"self",
".",
"start_dir",
",",
"0",
")",
"data",
"=",
"fp",
".",
"read",
"(",
"size_cd",
")",
"fp",
"=",
"cStringIO",
".",
"StringIO",
"(",
"data",
")",
"total",
"=",
"0",
"while",
"total",
"<",
"size_cd",
":",
"centdir",
"=",
"fp",
".",
"read",
"(",
"sizeCentralDir",
")",
"if",
"centdir",
"[",
"0",
":",
"4",
"]",
"!=",
"stringCentralDir",
":",
"raise",
"BadZipfile",
"(",
"\"Bad magic number for central directory\"",
")",
"centdir",
"=",
"struct",
".",
"unpack",
"(",
"structCentralDir",
",",
"centdir",
")",
"if",
"self",
".",
"debug",
">",
"2",
":",
"print",
"(",
"centdir",
")",
"filename",
"=",
"fp",
".",
"read",
"(",
"centdir",
"[",
"_CD_FILENAME_LENGTH",
"]",
")",
"# Create ZipInfo instance to store file information",
"x",
"=",
"ZipInfo",
"(",
"filename",
")",
"x",
".",
"extra",
"=",
"fp",
".",
"read",
"(",
"centdir",
"[",
"_CD_EXTRA_FIELD_LENGTH",
"]",
")",
"x",
".",
"comment",
"=",
"fp",
".",
"read",
"(",
"centdir",
"[",
"_CD_COMMENT_LENGTH",
"]",
")",
"x",
".",
"header_offset",
"=",
"centdir",
"[",
"_CD_LOCAL_HEADER_OFFSET",
"]",
"(",
"x",
".",
"create_version",
",",
"x",
".",
"create_system",
",",
"x",
".",
"extract_version",
",",
"x",
".",
"reserved",
",",
"x",
".",
"flag_bits",
",",
"x",
".",
"compress_type",
",",
"t",
",",
"d",
",",
"x",
".",
"CRC",
",",
"x",
".",
"compress_size",
",",
"x",
".",
"file_size",
")",
"=",
"centdir",
"[",
"1",
":",
"12",
"]",
"x",
".",
"volume",
",",
"x",
".",
"internal_attr",
",",
"x",
".",
"external_attr",
"=",
"centdir",
"[",
"15",
":",
"18",
"]",
"# Convert date/time code to (year, month, day, hour, min, sec)",
"x",
".",
"_raw_time",
"=",
"t",
"x",
".",
"date_time",
"=",
"(",
"(",
"d",
">>",
"9",
")",
"+",
"1980",
",",
"(",
"d",
">>",
"5",
")",
"&",
"0xF",
",",
"d",
"&",
"0x1F",
",",
"t",
">>",
"11",
",",
"(",
"t",
">>",
"5",
")",
"&",
"0x3F",
",",
"(",
"t",
"&",
"0x1F",
")",
"*",
"2",
")",
"x",
".",
"_decodeExtra",
"(",
")",
"x",
".",
"header_offset",
"=",
"x",
".",
"header_offset",
"+",
"concat",
"x",
".",
"filename",
"=",
"x",
".",
"_decodeFilename",
"(",
")",
"self",
".",
"filelist",
".",
"append",
"(",
"x",
")",
"self",
".",
"NameToInfo",
"[",
"x",
".",
"filename",
"]",
"=",
"x",
"# update total bytes read from central directory",
"total",
"=",
"(",
"total",
"+",
"sizeCentralDir",
"+",
"centdir",
"[",
"_CD_FILENAME_LENGTH",
"]",
"+",
"centdir",
"[",
"_CD_EXTRA_FIELD_LENGTH",
"]",
"+",
"centdir",
"[",
"_CD_COMMENT_LENGTH",
"]",
")",
"if",
"self",
".",
"debug",
">",
"2",
":",
"print",
"(",
"\"total\"",
",",
"total",
")"
] |
Read in the table of contents for the ZIP file.
|
[
"Read",
"in",
"the",
"table",
"of",
"contents",
"for",
"the",
"ZIP",
"file",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipfile.py#L724-L785
|
train
|
okfn/ofs
|
ofs/local/zipfile.py
|
ZipFile.open
|
def open(self, name, mode="r", pwd=None):
"""Return file-like object for 'name'."""
if mode not in ("r", "U", "rU"):
raise RuntimeError('open() requires mode "r", "U", or "rU"')
if not self.fp:
raise RuntimeError(
"Attempt to read ZIP archive that was already closed"
)
# Only open a new file for instances where we were not
# given a file object in the constructor
if self._filePassed:
zef_file = self.fp
else:
zef_file = open(self.filename, 'rb')
# Make sure we have an info object
if isinstance(name, ZipInfo):
# 'name' is already an info object
zinfo = name
else:
# Get info object for name
zinfo = self.getinfo(name)
zef_file.seek(zinfo.header_offset, 0)
# Skip the file header:
fheader = zef_file.read(sizeFileHeader)
if fheader[0:4] != stringFileHeader:
raise BadZipfile("Bad magic number for file header")
fheader = struct.unpack(structFileHeader, fheader)
fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
if fheader[_FH_EXTRA_FIELD_LENGTH]:
zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
if fname != zinfo.orig_filename.encode('utf-8'):
raise BadZipfile(
'File name in directory "%s" and header "%s" differ.' % (
zinfo.orig_filename, fname)
)
# check for encrypted flag & handle password
is_encrypted = zinfo.flag_bits & 0x1
zd = None
if is_encrypted:
if not pwd:
pwd = self.pwd
if not pwd:
raise RuntimeError("File %s is encrypted, " \
"password required for extraction" % name)
zd = _ZipDecrypter(pwd)
# The first 12 bytes in the cypher stream is an encryption header
# used to strengthen the algorithm. The first 11 bytes are
# completely random, while the 12th contains the MSB of the CRC,
# or the MSB of the file time depending on the header type
# and is used to check the correctness of the password.
bytes = zef_file.read(12)
h = map(zd, bytes[0:12])
if zinfo.flag_bits & 0x8:
# compare against the file type from extended local headers
check_byte = (zinfo._raw_time >> 8) & 0xff
else:
# compare against the CRC otherwise
check_byte = (zinfo.CRC >> 24) & 0xff
if ord(h[11]) != check_byte:
raise RuntimeError("Bad password for file", name)
return ZipExtFile(zef_file, mode, zinfo, zd)
|
python
|
def open(self, name, mode="r", pwd=None):
"""Return file-like object for 'name'."""
if mode not in ("r", "U", "rU"):
raise RuntimeError('open() requires mode "r", "U", or "rU"')
if not self.fp:
raise RuntimeError(
"Attempt to read ZIP archive that was already closed"
)
# Only open a new file for instances where we were not
# given a file object in the constructor
if self._filePassed:
zef_file = self.fp
else:
zef_file = open(self.filename, 'rb')
# Make sure we have an info object
if isinstance(name, ZipInfo):
# 'name' is already an info object
zinfo = name
else:
# Get info object for name
zinfo = self.getinfo(name)
zef_file.seek(zinfo.header_offset, 0)
# Skip the file header:
fheader = zef_file.read(sizeFileHeader)
if fheader[0:4] != stringFileHeader:
raise BadZipfile("Bad magic number for file header")
fheader = struct.unpack(structFileHeader, fheader)
fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
if fheader[_FH_EXTRA_FIELD_LENGTH]:
zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
if fname != zinfo.orig_filename.encode('utf-8'):
raise BadZipfile(
'File name in directory "%s" and header "%s" differ.' % (
zinfo.orig_filename, fname)
)
# check for encrypted flag & handle password
is_encrypted = zinfo.flag_bits & 0x1
zd = None
if is_encrypted:
if not pwd:
pwd = self.pwd
if not pwd:
raise RuntimeError("File %s is encrypted, " \
"password required for extraction" % name)
zd = _ZipDecrypter(pwd)
# The first 12 bytes in the cypher stream is an encryption header
# used to strengthen the algorithm. The first 11 bytes are
# completely random, while the 12th contains the MSB of the CRC,
# or the MSB of the file time depending on the header type
# and is used to check the correctness of the password.
bytes = zef_file.read(12)
h = map(zd, bytes[0:12])
if zinfo.flag_bits & 0x8:
# compare against the file type from extended local headers
check_byte = (zinfo._raw_time >> 8) & 0xff
else:
# compare against the CRC otherwise
check_byte = (zinfo.CRC >> 24) & 0xff
if ord(h[11]) != check_byte:
raise RuntimeError("Bad password for file", name)
return ZipExtFile(zef_file, mode, zinfo, zd)
|
[
"def",
"open",
"(",
"self",
",",
"name",
",",
"mode",
"=",
"\"r\"",
",",
"pwd",
"=",
"None",
")",
":",
"if",
"mode",
"not",
"in",
"(",
"\"r\"",
",",
"\"U\"",
",",
"\"rU\"",
")",
":",
"raise",
"RuntimeError",
"(",
"'open() requires mode \"r\", \"U\", or \"rU\"'",
")",
"if",
"not",
"self",
".",
"fp",
":",
"raise",
"RuntimeError",
"(",
"\"Attempt to read ZIP archive that was already closed\"",
")",
"# Only open a new file for instances where we were not",
"# given a file object in the constructor",
"if",
"self",
".",
"_filePassed",
":",
"zef_file",
"=",
"self",
".",
"fp",
"else",
":",
"zef_file",
"=",
"open",
"(",
"self",
".",
"filename",
",",
"'rb'",
")",
"# Make sure we have an info object",
"if",
"isinstance",
"(",
"name",
",",
"ZipInfo",
")",
":",
"# 'name' is already an info object",
"zinfo",
"=",
"name",
"else",
":",
"# Get info object for name",
"zinfo",
"=",
"self",
".",
"getinfo",
"(",
"name",
")",
"zef_file",
".",
"seek",
"(",
"zinfo",
".",
"header_offset",
",",
"0",
")",
"# Skip the file header:",
"fheader",
"=",
"zef_file",
".",
"read",
"(",
"sizeFileHeader",
")",
"if",
"fheader",
"[",
"0",
":",
"4",
"]",
"!=",
"stringFileHeader",
":",
"raise",
"BadZipfile",
"(",
"\"Bad magic number for file header\"",
")",
"fheader",
"=",
"struct",
".",
"unpack",
"(",
"structFileHeader",
",",
"fheader",
")",
"fname",
"=",
"zef_file",
".",
"read",
"(",
"fheader",
"[",
"_FH_FILENAME_LENGTH",
"]",
")",
"if",
"fheader",
"[",
"_FH_EXTRA_FIELD_LENGTH",
"]",
":",
"zef_file",
".",
"read",
"(",
"fheader",
"[",
"_FH_EXTRA_FIELD_LENGTH",
"]",
")",
"if",
"fname",
"!=",
"zinfo",
".",
"orig_filename",
".",
"encode",
"(",
"'utf-8'",
")",
":",
"raise",
"BadZipfile",
"(",
"'File name in directory \"%s\" and header \"%s\" differ.'",
"%",
"(",
"zinfo",
".",
"orig_filename",
",",
"fname",
")",
")",
"# check for encrypted flag & handle password",
"is_encrypted",
"=",
"zinfo",
".",
"flag_bits",
"&",
"0x1",
"zd",
"=",
"None",
"if",
"is_encrypted",
":",
"if",
"not",
"pwd",
":",
"pwd",
"=",
"self",
".",
"pwd",
"if",
"not",
"pwd",
":",
"raise",
"RuntimeError",
"(",
"\"File %s is encrypted, \"",
"\"password required for extraction\"",
"%",
"name",
")",
"zd",
"=",
"_ZipDecrypter",
"(",
"pwd",
")",
"# The first 12 bytes in the cypher stream is an encryption header",
"# used to strengthen the algorithm. The first 11 bytes are",
"# completely random, while the 12th contains the MSB of the CRC,",
"# or the MSB of the file time depending on the header type",
"# and is used to check the correctness of the password.",
"bytes",
"=",
"zef_file",
".",
"read",
"(",
"12",
")",
"h",
"=",
"map",
"(",
"zd",
",",
"bytes",
"[",
"0",
":",
"12",
"]",
")",
"if",
"zinfo",
".",
"flag_bits",
"&",
"0x8",
":",
"# compare against the file type from extended local headers",
"check_byte",
"=",
"(",
"zinfo",
".",
"_raw_time",
">>",
"8",
")",
"&",
"0xff",
"else",
":",
"# compare against the CRC otherwise",
"check_byte",
"=",
"(",
"zinfo",
".",
"CRC",
">>",
"24",
")",
"&",
"0xff",
"if",
"ord",
"(",
"h",
"[",
"11",
"]",
")",
"!=",
"check_byte",
":",
"raise",
"RuntimeError",
"(",
"\"Bad password for file\"",
",",
"name",
")",
"return",
"ZipExtFile",
"(",
"zef_file",
",",
"mode",
",",
"zinfo",
",",
"zd",
")"
] |
Return file-like object for 'name'.
|
[
"Return",
"file",
"-",
"like",
"object",
"for",
"name",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipfile.py#L837-L906
|
train
|
okfn/ofs
|
ofs/local/zipfile.py
|
ZipFile.remove
|
def remove(self, member):
"""Remove a member from the archive."""
# Make sure we have an info object
if isinstance(member, ZipInfo):
# 'member' is already an info object
zinfo = member
else:
# Get info object for name
zinfo = self.getinfo(member)
# compute the location of the file data in the local file header,
# by adding the lengths of the records before it
zlen = len(zinfo.FileHeader()) + zinfo.compress_size
fileidx = self.filelist.index(zinfo)
fileofs = sum(
[len(self.filelist[f].FileHeader()) + self.filelist[f].compress_size
for f in xrange(0, fileidx)]
)
self.fp.seek(fileofs + zlen)
after = self.fp.read()
self.fp.seek(fileofs)
self.fp.write(after)
self.fp.seek(-zlen, 2)
self.fp.truncate()
self._didModify = True
self.filelist.remove(zinfo)
del self.NameToInfo[member]
|
python
|
def remove(self, member):
"""Remove a member from the archive."""
# Make sure we have an info object
if isinstance(member, ZipInfo):
# 'member' is already an info object
zinfo = member
else:
# Get info object for name
zinfo = self.getinfo(member)
# compute the location of the file data in the local file header,
# by adding the lengths of the records before it
zlen = len(zinfo.FileHeader()) + zinfo.compress_size
fileidx = self.filelist.index(zinfo)
fileofs = sum(
[len(self.filelist[f].FileHeader()) + self.filelist[f].compress_size
for f in xrange(0, fileidx)]
)
self.fp.seek(fileofs + zlen)
after = self.fp.read()
self.fp.seek(fileofs)
self.fp.write(after)
self.fp.seek(-zlen, 2)
self.fp.truncate()
self._didModify = True
self.filelist.remove(zinfo)
del self.NameToInfo[member]
|
[
"def",
"remove",
"(",
"self",
",",
"member",
")",
":",
"# Make sure we have an info object",
"if",
"isinstance",
"(",
"member",
",",
"ZipInfo",
")",
":",
"# 'member' is already an info object",
"zinfo",
"=",
"member",
"else",
":",
"# Get info object for name",
"zinfo",
"=",
"self",
".",
"getinfo",
"(",
"member",
")",
"# compute the location of the file data in the local file header,",
"# by adding the lengths of the records before it",
"zlen",
"=",
"len",
"(",
"zinfo",
".",
"FileHeader",
"(",
")",
")",
"+",
"zinfo",
".",
"compress_size",
"fileidx",
"=",
"self",
".",
"filelist",
".",
"index",
"(",
"zinfo",
")",
"fileofs",
"=",
"sum",
"(",
"[",
"len",
"(",
"self",
".",
"filelist",
"[",
"f",
"]",
".",
"FileHeader",
"(",
")",
")",
"+",
"self",
".",
"filelist",
"[",
"f",
"]",
".",
"compress_size",
"for",
"f",
"in",
"xrange",
"(",
"0",
",",
"fileidx",
")",
"]",
")",
"self",
".",
"fp",
".",
"seek",
"(",
"fileofs",
"+",
"zlen",
")",
"after",
"=",
"self",
".",
"fp",
".",
"read",
"(",
")",
"self",
".",
"fp",
".",
"seek",
"(",
"fileofs",
")",
"self",
".",
"fp",
".",
"write",
"(",
"after",
")",
"self",
".",
"fp",
".",
"seek",
"(",
"-",
"zlen",
",",
"2",
")",
"self",
".",
"fp",
".",
"truncate",
"(",
")",
"self",
".",
"_didModify",
"=",
"True",
"self",
".",
"filelist",
".",
"remove",
"(",
"zinfo",
")",
"del",
"self",
".",
"NameToInfo",
"[",
"member",
"]"
] |
Remove a member from the archive.
|
[
"Remove",
"a",
"member",
"from",
"the",
"archive",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipfile.py#L1118-L1146
|
train
|
okfn/ofs
|
ofs/local/zipfile.py
|
PyZipFile._get_codename
|
def _get_codename(self, pathname, basename):
"""Return (filename, archivename) for the path.
Given a module name path, return the correct file path and
archive name, compiling if necessary. For example, given
/python/lib/string, return (/python/lib/string.pyc, string).
"""
file_py = pathname + ".py"
file_pyc = pathname + ".pyc"
file_pyo = pathname + ".pyo"
if os.path.isfile(file_pyo) and \
os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime:
fname = file_pyo # Use .pyo file
elif not os.path.isfile(file_pyc) or \
os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime:
import py_compile
if self.debug:
print("Compiling", file_py)
try:
py_compile.compile(file_py, file_pyc, None, True)
except py_compile.PyCompileError as err:
print(err.msg)
fname = file_pyc
else:
fname = file_pyc
archivename = os.path.split(fname)[1]
if basename:
archivename = "%s/%s" % (basename, archivename)
return (fname, archivename)
|
python
|
def _get_codename(self, pathname, basename):
"""Return (filename, archivename) for the path.
Given a module name path, return the correct file path and
archive name, compiling if necessary. For example, given
/python/lib/string, return (/python/lib/string.pyc, string).
"""
file_py = pathname + ".py"
file_pyc = pathname + ".pyc"
file_pyo = pathname + ".pyo"
if os.path.isfile(file_pyo) and \
os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime:
fname = file_pyo # Use .pyo file
elif not os.path.isfile(file_pyc) or \
os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime:
import py_compile
if self.debug:
print("Compiling", file_py)
try:
py_compile.compile(file_py, file_pyc, None, True)
except py_compile.PyCompileError as err:
print(err.msg)
fname = file_pyc
else:
fname = file_pyc
archivename = os.path.split(fname)[1]
if basename:
archivename = "%s/%s" % (basename, archivename)
return (fname, archivename)
|
[
"def",
"_get_codename",
"(",
"self",
",",
"pathname",
",",
"basename",
")",
":",
"file_py",
"=",
"pathname",
"+",
"\".py\"",
"file_pyc",
"=",
"pathname",
"+",
"\".pyc\"",
"file_pyo",
"=",
"pathname",
"+",
"\".pyo\"",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"file_pyo",
")",
"and",
"os",
".",
"stat",
"(",
"file_pyo",
")",
".",
"st_mtime",
">=",
"os",
".",
"stat",
"(",
"file_py",
")",
".",
"st_mtime",
":",
"fname",
"=",
"file_pyo",
"# Use .pyo file",
"elif",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"file_pyc",
")",
"or",
"os",
".",
"stat",
"(",
"file_pyc",
")",
".",
"st_mtime",
"<",
"os",
".",
"stat",
"(",
"file_py",
")",
".",
"st_mtime",
":",
"import",
"py_compile",
"if",
"self",
".",
"debug",
":",
"print",
"(",
"\"Compiling\"",
",",
"file_py",
")",
"try",
":",
"py_compile",
".",
"compile",
"(",
"file_py",
",",
"file_pyc",
",",
"None",
",",
"True",
")",
"except",
"py_compile",
".",
"PyCompileError",
"as",
"err",
":",
"print",
"(",
"err",
".",
"msg",
")",
"fname",
"=",
"file_pyc",
"else",
":",
"fname",
"=",
"file_pyc",
"archivename",
"=",
"os",
".",
"path",
".",
"split",
"(",
"fname",
")",
"[",
"1",
"]",
"if",
"basename",
":",
"archivename",
"=",
"\"%s/%s\"",
"%",
"(",
"basename",
",",
"archivename",
")",
"return",
"(",
"fname",
",",
"archivename",
")"
] |
Return (filename, archivename) for the path.
Given a module name path, return the correct file path and
archive name, compiling if necessary. For example, given
/python/lib/string, return (/python/lib/string.pyc, string).
|
[
"Return",
"(",
"filename",
"archivename",
")",
"for",
"the",
"path",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipfile.py#L1334-L1362
|
train
|
tanwanirahul/django-batch-requests
|
batch_requests/settings.py
|
import_class
|
def import_class(class_path):
'''
Imports the class for the given class name.
'''
module_name, class_name = class_path.rsplit(".", 1)
module = import_module(module_name)
claz = getattr(module, class_name)
return claz
|
python
|
def import_class(class_path):
'''
Imports the class for the given class name.
'''
module_name, class_name = class_path.rsplit(".", 1)
module = import_module(module_name)
claz = getattr(module, class_name)
return claz
|
[
"def",
"import_class",
"(",
"class_path",
")",
":",
"module_name",
",",
"class_name",
"=",
"class_path",
".",
"rsplit",
"(",
"\".\"",
",",
"1",
")",
"module",
"=",
"import_module",
"(",
"module_name",
")",
"claz",
"=",
"getattr",
"(",
"module",
",",
"class_name",
")",
"return",
"claz"
] |
Imports the class for the given class name.
|
[
"Imports",
"the",
"class",
"for",
"the",
"given",
"class",
"name",
"."
] |
9c5afc42f7542f466247f4ffed9c44e1c49fa20d
|
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/settings.py#L27-L34
|
train
|
tanwanirahul/django-batch-requests
|
batch_requests/settings.py
|
BatchRequestSettings._executor
|
def _executor(self):
'''
Creating an ExecutorPool is a costly operation. Executor needs to be instantiated only once.
'''
if self.EXECUTE_PARALLEL is False:
executor_path = "batch_requests.concurrent.executor.SequentialExecutor"
executor_class = import_class(executor_path)
return executor_class()
else:
executor_path = self.CONCURRENT_EXECUTOR
executor_class = import_class(executor_path)
return executor_class(self.NUM_WORKERS)
|
python
|
def _executor(self):
'''
Creating an ExecutorPool is a costly operation. Executor needs to be instantiated only once.
'''
if self.EXECUTE_PARALLEL is False:
executor_path = "batch_requests.concurrent.executor.SequentialExecutor"
executor_class = import_class(executor_path)
return executor_class()
else:
executor_path = self.CONCURRENT_EXECUTOR
executor_class = import_class(executor_path)
return executor_class(self.NUM_WORKERS)
|
[
"def",
"_executor",
"(",
"self",
")",
":",
"if",
"self",
".",
"EXECUTE_PARALLEL",
"is",
"False",
":",
"executor_path",
"=",
"\"batch_requests.concurrent.executor.SequentialExecutor\"",
"executor_class",
"=",
"import_class",
"(",
"executor_path",
")",
"return",
"executor_class",
"(",
")",
"else",
":",
"executor_path",
"=",
"self",
".",
"CONCURRENT_EXECUTOR",
"executor_class",
"=",
"import_class",
"(",
"executor_path",
")",
"return",
"executor_class",
"(",
"self",
".",
"NUM_WORKERS",
")"
] |
Creating an ExecutorPool is a costly operation. Executor needs to be instantiated only once.
|
[
"Creating",
"an",
"ExecutorPool",
"is",
"a",
"costly",
"operation",
".",
"Executor",
"needs",
"to",
"be",
"instantiated",
"only",
"once",
"."
] |
9c5afc42f7542f466247f4ffed9c44e1c49fa20d
|
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/settings.py#L48-L59
|
train
|
okfn/ofs
|
ofs/command.py
|
OFS.make_label
|
def make_label(self, path):
"""
this borrows too much from the internals of ofs
maybe expose different parts of the api?
"""
from datetime import datetime
from StringIO import StringIO
path = path.lstrip("/")
bucket, label = path.split("/", 1)
bucket = self.ofs._require_bucket(bucket)
key = self.ofs._get_key(bucket, label)
if key is None:
key = bucket.new_key(label)
self.ofs._update_key_metadata(key, { '_creation_time': str(datetime.utcnow()) })
key.set_contents_from_file(StringIO(''))
key.close()
|
python
|
def make_label(self, path):
"""
this borrows too much from the internals of ofs
maybe expose different parts of the api?
"""
from datetime import datetime
from StringIO import StringIO
path = path.lstrip("/")
bucket, label = path.split("/", 1)
bucket = self.ofs._require_bucket(bucket)
key = self.ofs._get_key(bucket, label)
if key is None:
key = bucket.new_key(label)
self.ofs._update_key_metadata(key, { '_creation_time': str(datetime.utcnow()) })
key.set_contents_from_file(StringIO(''))
key.close()
|
[
"def",
"make_label",
"(",
"self",
",",
"path",
")",
":",
"from",
"datetime",
"import",
"datetime",
"from",
"StringIO",
"import",
"StringIO",
"path",
"=",
"path",
".",
"lstrip",
"(",
"\"/\"",
")",
"bucket",
",",
"label",
"=",
"path",
".",
"split",
"(",
"\"/\"",
",",
"1",
")",
"bucket",
"=",
"self",
".",
"ofs",
".",
"_require_bucket",
"(",
"bucket",
")",
"key",
"=",
"self",
".",
"ofs",
".",
"_get_key",
"(",
"bucket",
",",
"label",
")",
"if",
"key",
"is",
"None",
":",
"key",
"=",
"bucket",
".",
"new_key",
"(",
"label",
")",
"self",
".",
"ofs",
".",
"_update_key_metadata",
"(",
"key",
",",
"{",
"'_creation_time'",
":",
"str",
"(",
"datetime",
".",
"utcnow",
"(",
")",
")",
"}",
")",
"key",
".",
"set_contents_from_file",
"(",
"StringIO",
"(",
"''",
")",
")",
"key",
".",
"close",
"(",
")"
] |
this borrows too much from the internals of ofs
maybe expose different parts of the api?
|
[
"this",
"borrows",
"too",
"much",
"from",
"the",
"internals",
"of",
"ofs",
"maybe",
"expose",
"different",
"parts",
"of",
"the",
"api?"
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/command.py#L57-L73
|
train
|
okfn/ofs
|
ofs/command.py
|
OFS.get_proxy_config
|
def get_proxy_config(self, headers, path):
"""
stub. this really needs to be a call to the remote
restful interface to get the appropriate host and
headers to use for this upload
"""
self.ofs.conn.add_aws_auth_header(headers, 'PUT', path)
from pprint import pprint
pprint(headers)
host = self.ofs.conn.server_name()
return host, headers
|
python
|
def get_proxy_config(self, headers, path):
"""
stub. this really needs to be a call to the remote
restful interface to get the appropriate host and
headers to use for this upload
"""
self.ofs.conn.add_aws_auth_header(headers, 'PUT', path)
from pprint import pprint
pprint(headers)
host = self.ofs.conn.server_name()
return host, headers
|
[
"def",
"get_proxy_config",
"(",
"self",
",",
"headers",
",",
"path",
")",
":",
"self",
".",
"ofs",
".",
"conn",
".",
"add_aws_auth_header",
"(",
"headers",
",",
"'PUT'",
",",
"path",
")",
"from",
"pprint",
"import",
"pprint",
"pprint",
"(",
"headers",
")",
"host",
"=",
"self",
".",
"ofs",
".",
"conn",
".",
"server_name",
"(",
")",
"return",
"host",
",",
"headers"
] |
stub. this really needs to be a call to the remote
restful interface to get the appropriate host and
headers to use for this upload
|
[
"stub",
".",
"this",
"really",
"needs",
"to",
"be",
"a",
"call",
"to",
"the",
"remote",
"restful",
"interface",
"to",
"get",
"the",
"appropriate",
"host",
"and",
"headers",
"to",
"use",
"for",
"this",
"upload"
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/command.py#L75-L85
|
train
|
okfn/ofs
|
ofs/command.py
|
OFS.proxy_upload
|
def proxy_upload(self, path, filename, content_type=None, content_encoding=None,
cb=None, num_cb=None):
"""
This is the main function that uploads. We assume the bucket
and key (== path) exists. What we do here is simple. Calculate
the headers we will need, (e.g. md5, content-type, etc). Then
we ask the self.get_proxy_config method to fill in the authentication
information and tell us which remote host we should talk to
for the upload. From there, the rest is ripped from
boto.key.Key.send_file
"""
from boto.connection import AWSAuthConnection
import mimetypes
from hashlib import md5
import base64
BufferSize = 65536 ## set to something very small to make sure
## chunking is working properly
fp = open(filename)
headers = { 'Content-Type': content_type }
if content_type is None:
content_type = mimetypes.guess_type(filename)[0] or "text/plain"
headers['Content-Type'] = content_type
if content_encoding is not None:
headers['Content-Encoding'] = content_encoding
m = md5()
fp.seek(0)
s = fp.read(BufferSize)
while s:
m.update(s)
s = fp.read(BufferSize)
self.size = fp.tell()
fp.seek(0)
self.md5 = m.hexdigest()
headers['Content-MD5'] = base64.encodestring(m.digest()).rstrip('\n')
headers['Content-Length'] = str(self.size)
headers['Expect'] = '100-Continue'
host, headers = self.get_proxy_config(headers, path)
### how to do this same thing with curl instead...
print("curl -i --trace-ascii foo.log -T %s -H %s https://%s%s" % (
filename,
" -H ".join("'%s: %s'" % (k,v) for k,v in headers.items()),
host, path
))
def sender(http_conn, method, path, data, headers):
http_conn.putrequest(method, path)
for key in headers:
http_conn.putheader(key, headers[key])
http_conn.endheaders()
fp.seek(0)
http_conn.set_debuglevel(0) ### XXX set to e.g. 4 to see what going on
if cb:
if num_cb > 2:
cb_count = self.size / BufferSize / (num_cb-2)
elif num_cb < 0:
cb_count = -1
else:
cb_count = 0
i = total_bytes = 0
cb(total_bytes, self.size)
l = fp.read(BufferSize)
while len(l) > 0:
http_conn.send(l)
if cb:
total_bytes += len(l)
i += 1
if i == cb_count or cb_count == -1:
cb(total_bytes, self.size)
i = 0
l = fp.read(BufferSize)
if cb:
cb(total_bytes, self.size)
response = http_conn.getresponse()
body = response.read()
fp.seek(0)
if response.status == 500 or response.status == 503 or \
response.getheader('location'):
# we'll try again
return response
elif response.status >= 200 and response.status <= 299:
self.etag = response.getheader('etag')
if self.etag != '"%s"' % self.md5:
raise Exception('ETag from S3 did not match computed MD5')
return response
else:
#raise provider.storage_response_error(
# response.status, response.reason, body)
raise Exception(response.status, response.reason, body)
awsc = AWSAuthConnection(host,
aws_access_key_id="key_id",
aws_secret_access_key="secret")
awsc._mexe('PUT', path, None, headers, sender=sender)
|
python
|
def proxy_upload(self, path, filename, content_type=None, content_encoding=None,
cb=None, num_cb=None):
"""
This is the main function that uploads. We assume the bucket
and key (== path) exists. What we do here is simple. Calculate
the headers we will need, (e.g. md5, content-type, etc). Then
we ask the self.get_proxy_config method to fill in the authentication
information and tell us which remote host we should talk to
for the upload. From there, the rest is ripped from
boto.key.Key.send_file
"""
from boto.connection import AWSAuthConnection
import mimetypes
from hashlib import md5
import base64
BufferSize = 65536 ## set to something very small to make sure
## chunking is working properly
fp = open(filename)
headers = { 'Content-Type': content_type }
if content_type is None:
content_type = mimetypes.guess_type(filename)[0] or "text/plain"
headers['Content-Type'] = content_type
if content_encoding is not None:
headers['Content-Encoding'] = content_encoding
m = md5()
fp.seek(0)
s = fp.read(BufferSize)
while s:
m.update(s)
s = fp.read(BufferSize)
self.size = fp.tell()
fp.seek(0)
self.md5 = m.hexdigest()
headers['Content-MD5'] = base64.encodestring(m.digest()).rstrip('\n')
headers['Content-Length'] = str(self.size)
headers['Expect'] = '100-Continue'
host, headers = self.get_proxy_config(headers, path)
### how to do this same thing with curl instead...
print("curl -i --trace-ascii foo.log -T %s -H %s https://%s%s" % (
filename,
" -H ".join("'%s: %s'" % (k,v) for k,v in headers.items()),
host, path
))
def sender(http_conn, method, path, data, headers):
http_conn.putrequest(method, path)
for key in headers:
http_conn.putheader(key, headers[key])
http_conn.endheaders()
fp.seek(0)
http_conn.set_debuglevel(0) ### XXX set to e.g. 4 to see what going on
if cb:
if num_cb > 2:
cb_count = self.size / BufferSize / (num_cb-2)
elif num_cb < 0:
cb_count = -1
else:
cb_count = 0
i = total_bytes = 0
cb(total_bytes, self.size)
l = fp.read(BufferSize)
while len(l) > 0:
http_conn.send(l)
if cb:
total_bytes += len(l)
i += 1
if i == cb_count or cb_count == -1:
cb(total_bytes, self.size)
i = 0
l = fp.read(BufferSize)
if cb:
cb(total_bytes, self.size)
response = http_conn.getresponse()
body = response.read()
fp.seek(0)
if response.status == 500 or response.status == 503 or \
response.getheader('location'):
# we'll try again
return response
elif response.status >= 200 and response.status <= 299:
self.etag = response.getheader('etag')
if self.etag != '"%s"' % self.md5:
raise Exception('ETag from S3 did not match computed MD5')
return response
else:
#raise provider.storage_response_error(
# response.status, response.reason, body)
raise Exception(response.status, response.reason, body)
awsc = AWSAuthConnection(host,
aws_access_key_id="key_id",
aws_secret_access_key="secret")
awsc._mexe('PUT', path, None, headers, sender=sender)
|
[
"def",
"proxy_upload",
"(",
"self",
",",
"path",
",",
"filename",
",",
"content_type",
"=",
"None",
",",
"content_encoding",
"=",
"None",
",",
"cb",
"=",
"None",
",",
"num_cb",
"=",
"None",
")",
":",
"from",
"boto",
".",
"connection",
"import",
"AWSAuthConnection",
"import",
"mimetypes",
"from",
"hashlib",
"import",
"md5",
"import",
"base64",
"BufferSize",
"=",
"65536",
"## set to something very small to make sure",
"## chunking is working properly",
"fp",
"=",
"open",
"(",
"filename",
")",
"headers",
"=",
"{",
"'Content-Type'",
":",
"content_type",
"}",
"if",
"content_type",
"is",
"None",
":",
"content_type",
"=",
"mimetypes",
".",
"guess_type",
"(",
"filename",
")",
"[",
"0",
"]",
"or",
"\"text/plain\"",
"headers",
"[",
"'Content-Type'",
"]",
"=",
"content_type",
"if",
"content_encoding",
"is",
"not",
"None",
":",
"headers",
"[",
"'Content-Encoding'",
"]",
"=",
"content_encoding",
"m",
"=",
"md5",
"(",
")",
"fp",
".",
"seek",
"(",
"0",
")",
"s",
"=",
"fp",
".",
"read",
"(",
"BufferSize",
")",
"while",
"s",
":",
"m",
".",
"update",
"(",
"s",
")",
"s",
"=",
"fp",
".",
"read",
"(",
"BufferSize",
")",
"self",
".",
"size",
"=",
"fp",
".",
"tell",
"(",
")",
"fp",
".",
"seek",
"(",
"0",
")",
"self",
".",
"md5",
"=",
"m",
".",
"hexdigest",
"(",
")",
"headers",
"[",
"'Content-MD5'",
"]",
"=",
"base64",
".",
"encodestring",
"(",
"m",
".",
"digest",
"(",
")",
")",
".",
"rstrip",
"(",
"'\\n'",
")",
"headers",
"[",
"'Content-Length'",
"]",
"=",
"str",
"(",
"self",
".",
"size",
")",
"headers",
"[",
"'Expect'",
"]",
"=",
"'100-Continue'",
"host",
",",
"headers",
"=",
"self",
".",
"get_proxy_config",
"(",
"headers",
",",
"path",
")",
"### how to do this same thing with curl instead...",
"print",
"(",
"\"curl -i --trace-ascii foo.log -T %s -H %s https://%s%s\"",
"%",
"(",
"filename",
",",
"\" -H \"",
".",
"join",
"(",
"\"'%s: %s'\"",
"%",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"headers",
".",
"items",
"(",
")",
")",
",",
"host",
",",
"path",
")",
")",
"def",
"sender",
"(",
"http_conn",
",",
"method",
",",
"path",
",",
"data",
",",
"headers",
")",
":",
"http_conn",
".",
"putrequest",
"(",
"method",
",",
"path",
")",
"for",
"key",
"in",
"headers",
":",
"http_conn",
".",
"putheader",
"(",
"key",
",",
"headers",
"[",
"key",
"]",
")",
"http_conn",
".",
"endheaders",
"(",
")",
"fp",
".",
"seek",
"(",
"0",
")",
"http_conn",
".",
"set_debuglevel",
"(",
"0",
")",
"### XXX set to e.g. 4 to see what going on",
"if",
"cb",
":",
"if",
"num_cb",
">",
"2",
":",
"cb_count",
"=",
"self",
".",
"size",
"/",
"BufferSize",
"/",
"(",
"num_cb",
"-",
"2",
")",
"elif",
"num_cb",
"<",
"0",
":",
"cb_count",
"=",
"-",
"1",
"else",
":",
"cb_count",
"=",
"0",
"i",
"=",
"total_bytes",
"=",
"0",
"cb",
"(",
"total_bytes",
",",
"self",
".",
"size",
")",
"l",
"=",
"fp",
".",
"read",
"(",
"BufferSize",
")",
"while",
"len",
"(",
"l",
")",
">",
"0",
":",
"http_conn",
".",
"send",
"(",
"l",
")",
"if",
"cb",
":",
"total_bytes",
"+=",
"len",
"(",
"l",
")",
"i",
"+=",
"1",
"if",
"i",
"==",
"cb_count",
"or",
"cb_count",
"==",
"-",
"1",
":",
"cb",
"(",
"total_bytes",
",",
"self",
".",
"size",
")",
"i",
"=",
"0",
"l",
"=",
"fp",
".",
"read",
"(",
"BufferSize",
")",
"if",
"cb",
":",
"cb",
"(",
"total_bytes",
",",
"self",
".",
"size",
")",
"response",
"=",
"http_conn",
".",
"getresponse",
"(",
")",
"body",
"=",
"response",
".",
"read",
"(",
")",
"fp",
".",
"seek",
"(",
"0",
")",
"if",
"response",
".",
"status",
"==",
"500",
"or",
"response",
".",
"status",
"==",
"503",
"or",
"response",
".",
"getheader",
"(",
"'location'",
")",
":",
"# we'll try again",
"return",
"response",
"elif",
"response",
".",
"status",
">=",
"200",
"and",
"response",
".",
"status",
"<=",
"299",
":",
"self",
".",
"etag",
"=",
"response",
".",
"getheader",
"(",
"'etag'",
")",
"if",
"self",
".",
"etag",
"!=",
"'\"%s\"'",
"%",
"self",
".",
"md5",
":",
"raise",
"Exception",
"(",
"'ETag from S3 did not match computed MD5'",
")",
"return",
"response",
"else",
":",
"#raise provider.storage_response_error(",
"# response.status, response.reason, body)",
"raise",
"Exception",
"(",
"response",
".",
"status",
",",
"response",
".",
"reason",
",",
"body",
")",
"awsc",
"=",
"AWSAuthConnection",
"(",
"host",
",",
"aws_access_key_id",
"=",
"\"key_id\"",
",",
"aws_secret_access_key",
"=",
"\"secret\"",
")",
"awsc",
".",
"_mexe",
"(",
"'PUT'",
",",
"path",
",",
"None",
",",
"headers",
",",
"sender",
"=",
"sender",
")"
] |
This is the main function that uploads. We assume the bucket
and key (== path) exists. What we do here is simple. Calculate
the headers we will need, (e.g. md5, content-type, etc). Then
we ask the self.get_proxy_config method to fill in the authentication
information and tell us which remote host we should talk to
for the upload. From there, the rest is ripped from
boto.key.Key.send_file
|
[
"This",
"is",
"the",
"main",
"function",
"that",
"uploads",
".",
"We",
"assume",
"the",
"bucket",
"and",
"key",
"(",
"==",
"path",
")",
"exists",
".",
"What",
"we",
"do",
"here",
"is",
"simple",
".",
"Calculate",
"the",
"headers",
"we",
"will",
"need",
"(",
"e",
".",
"g",
".",
"md5",
"content",
"-",
"type",
"etc",
")",
".",
"Then",
"we",
"ask",
"the",
"self",
".",
"get_proxy_config",
"method",
"to",
"fill",
"in",
"the",
"authentication",
"information",
"and",
"tell",
"us",
"which",
"remote",
"host",
"we",
"should",
"talk",
"to",
"for",
"the",
"upload",
".",
"From",
"there",
"the",
"rest",
"is",
"ripped",
"from",
"boto",
".",
"key",
".",
"Key",
".",
"send_file"
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/command.py#L87-L187
|
train
|
KKBOX/OpenAPI-Python
|
kkbox_developer_sdk/mood_station_fetcher.py
|
KKBOXMoodStationFetcher.fetch_all_mood_stations
|
def fetch_all_mood_stations(self, terr=KKBOXTerritory.TAIWAN):
'''
Fetches all mood stations.
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#moodstations`.
'''
url = 'https://api.kkbox.com/v1.1/mood-stations'
url += '?' + url_parse.urlencode({'territory': terr})
return self.http._post_data(url, None, self.http._headers_with_access_token())
|
python
|
def fetch_all_mood_stations(self, terr=KKBOXTerritory.TAIWAN):
'''
Fetches all mood stations.
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#moodstations`.
'''
url = 'https://api.kkbox.com/v1.1/mood-stations'
url += '?' + url_parse.urlencode({'territory': terr})
return self.http._post_data(url, None, self.http._headers_with_access_token())
|
[
"def",
"fetch_all_mood_stations",
"(",
"self",
",",
"terr",
"=",
"KKBOXTerritory",
".",
"TAIWAN",
")",
":",
"url",
"=",
"'https://api.kkbox.com/v1.1/mood-stations'",
"url",
"+=",
"'?'",
"+",
"url_parse",
".",
"urlencode",
"(",
"{",
"'territory'",
":",
"terr",
"}",
")",
"return",
"self",
".",
"http",
".",
"_post_data",
"(",
"url",
",",
"None",
",",
"self",
".",
"http",
".",
"_headers_with_access_token",
"(",
")",
")"
] |
Fetches all mood stations.
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#moodstations`.
|
[
"Fetches",
"all",
"mood",
"stations",
"."
] |
77aa22fd300ed987d5507a5b66b149edcd28047d
|
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/mood_station_fetcher.py#L13-L25
|
train
|
KKBOX/OpenAPI-Python
|
kkbox_developer_sdk/mood_station_fetcher.py
|
KKBOXMoodStationFetcher.fetch_mood_station
|
def fetch_mood_station(self, station_id, terr=KKBOXTerritory.TAIWAN):
'''
Fetches a mood station by given ID.
:param station_id: the station ID
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#moodstations-station_id`.
'''
url = 'https://api.kkbox.com/v1.1/mood-stations/%s' % station_id
url += '?' + url_parse.urlencode({'territory': terr})
return self.http._post_data(url, None, self.http._headers_with_access_token())
|
python
|
def fetch_mood_station(self, station_id, terr=KKBOXTerritory.TAIWAN):
'''
Fetches a mood station by given ID.
:param station_id: the station ID
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#moodstations-station_id`.
'''
url = 'https://api.kkbox.com/v1.1/mood-stations/%s' % station_id
url += '?' + url_parse.urlencode({'territory': terr})
return self.http._post_data(url, None, self.http._headers_with_access_token())
|
[
"def",
"fetch_mood_station",
"(",
"self",
",",
"station_id",
",",
"terr",
"=",
"KKBOXTerritory",
".",
"TAIWAN",
")",
":",
"url",
"=",
"'https://api.kkbox.com/v1.1/mood-stations/%s'",
"%",
"station_id",
"url",
"+=",
"'?'",
"+",
"url_parse",
".",
"urlencode",
"(",
"{",
"'territory'",
":",
"terr",
"}",
")",
"return",
"self",
".",
"http",
".",
"_post_data",
"(",
"url",
",",
"None",
",",
"self",
".",
"http",
".",
"_headers_with_access_token",
"(",
")",
")"
] |
Fetches a mood station by given ID.
:param station_id: the station ID
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#moodstations-station_id`.
|
[
"Fetches",
"a",
"mood",
"station",
"by",
"given",
"ID",
"."
] |
77aa22fd300ed987d5507a5b66b149edcd28047d
|
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/mood_station_fetcher.py#L28-L41
|
train
|
KKBOX/OpenAPI-Python
|
kkbox_developer_sdk/fetcher.py
|
Fetcher.fetch_next_page
|
def fetch_next_page(self, data):
'''
Fetches next page based on previously fetched data.
Will get the next page url from data['paging']['next'].
:param data: previously fetched API response.
:type data: dict
:return: API response.
:rtype: dict
'''
next_url = data['paging']['next']
if next_url != None:
next_data = self.http._post_data(next_url, None, self.http._headers_with_access_token())
return next_data
else:
return None
|
python
|
def fetch_next_page(self, data):
'''
Fetches next page based on previously fetched data.
Will get the next page url from data['paging']['next'].
:param data: previously fetched API response.
:type data: dict
:return: API response.
:rtype: dict
'''
next_url = data['paging']['next']
if next_url != None:
next_data = self.http._post_data(next_url, None, self.http._headers_with_access_token())
return next_data
else:
return None
|
[
"def",
"fetch_next_page",
"(",
"self",
",",
"data",
")",
":",
"next_url",
"=",
"data",
"[",
"'paging'",
"]",
"[",
"'next'",
"]",
"if",
"next_url",
"!=",
"None",
":",
"next_data",
"=",
"self",
".",
"http",
".",
"_post_data",
"(",
"next_url",
",",
"None",
",",
"self",
".",
"http",
".",
"_headers_with_access_token",
"(",
")",
")",
"return",
"next_data",
"else",
":",
"return",
"None"
] |
Fetches next page based on previously fetched data.
Will get the next page url from data['paging']['next'].
:param data: previously fetched API response.
:type data: dict
:return: API response.
:rtype: dict
|
[
"Fetches",
"next",
"page",
"based",
"on",
"previously",
"fetched",
"data",
".",
"Will",
"get",
"the",
"next",
"page",
"url",
"from",
"data",
"[",
"paging",
"]",
"[",
"next",
"]",
"."
] |
77aa22fd300ed987d5507a5b66b149edcd28047d
|
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/fetcher.py#L29-L44
|
train
|
KKBOX/OpenAPI-Python
|
kkbox_developer_sdk/fetcher.py
|
Fetcher.fetch_data
|
def fetch_data(self, url):
'''
Fetches data from specific url.
:return: The response.
:rtype: dict
'''
return self.http._post_data(url, None, self.http._headers_with_access_token())
|
python
|
def fetch_data(self, url):
'''
Fetches data from specific url.
:return: The response.
:rtype: dict
'''
return self.http._post_data(url, None, self.http._headers_with_access_token())
|
[
"def",
"fetch_data",
"(",
"self",
",",
"url",
")",
":",
"return",
"self",
".",
"http",
".",
"_post_data",
"(",
"url",
",",
"None",
",",
"self",
".",
"http",
".",
"_headers_with_access_token",
"(",
")",
")"
] |
Fetches data from specific url.
:return: The response.
:rtype: dict
|
[
"Fetches",
"data",
"from",
"specific",
"url",
"."
] |
77aa22fd300ed987d5507a5b66b149edcd28047d
|
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/fetcher.py#L46-L53
|
train
|
KKBOX/OpenAPI-Python
|
kkbox_developer_sdk/shared_playlist_fetcher.py
|
KKBOXSharedPlaylistFetcher.fetch_shared_playlist
|
def fetch_shared_playlist(self, playlist_id, terr=KKBOXTerritory.TAIWAN):
'''
Fetches a shared playlist by given ID.
:param playlist_id: the playlist ID.
:type playlist_id: str
:param terr: the current territory.
:return: API response.
:rtype: dictcd
See `https://docs-en.kkbox.codes/v1.1/reference#sharedplaylists-playlist_id`.
'''
url = 'https://api.kkbox.com/v1.1/shared-playlists/%s' % playlist_id
url += '?' + url_parse.urlencode({'territory': terr})
return self.http._post_data(url, None, self.http._headers_with_access_token())
|
python
|
def fetch_shared_playlist(self, playlist_id, terr=KKBOXTerritory.TAIWAN):
'''
Fetches a shared playlist by given ID.
:param playlist_id: the playlist ID.
:type playlist_id: str
:param terr: the current territory.
:return: API response.
:rtype: dictcd
See `https://docs-en.kkbox.codes/v1.1/reference#sharedplaylists-playlist_id`.
'''
url = 'https://api.kkbox.com/v1.1/shared-playlists/%s' % playlist_id
url += '?' + url_parse.urlencode({'territory': terr})
return self.http._post_data(url, None, self.http._headers_with_access_token())
|
[
"def",
"fetch_shared_playlist",
"(",
"self",
",",
"playlist_id",
",",
"terr",
"=",
"KKBOXTerritory",
".",
"TAIWAN",
")",
":",
"url",
"=",
"'https://api.kkbox.com/v1.1/shared-playlists/%s'",
"%",
"playlist_id",
"url",
"+=",
"'?'",
"+",
"url_parse",
".",
"urlencode",
"(",
"{",
"'territory'",
":",
"terr",
"}",
")",
"return",
"self",
".",
"http",
".",
"_post_data",
"(",
"url",
",",
"None",
",",
"self",
".",
"http",
".",
"_headers_with_access_token",
"(",
")",
")"
] |
Fetches a shared playlist by given ID.
:param playlist_id: the playlist ID.
:type playlist_id: str
:param terr: the current territory.
:return: API response.
:rtype: dictcd
See `https://docs-en.kkbox.codes/v1.1/reference#sharedplaylists-playlist_id`.
|
[
"Fetches",
"a",
"shared",
"playlist",
"by",
"given",
"ID",
"."
] |
77aa22fd300ed987d5507a5b66b149edcd28047d
|
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/shared_playlist_fetcher.py#L13-L27
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/firewall_mixin.py
|
FirewallManager.get_firewall_rule
|
def get_firewall_rule(self, server_uuid, firewall_rule_position, server_instance=None):
"""
Return a FirewallRule object based on server uuid and rule position.
"""
url = '/server/{0}/firewall_rule/{1}'.format(server_uuid, firewall_rule_position)
res = self.get_request(url)
return FirewallRule(**res['firewall_rule'])
|
python
|
def get_firewall_rule(self, server_uuid, firewall_rule_position, server_instance=None):
"""
Return a FirewallRule object based on server uuid and rule position.
"""
url = '/server/{0}/firewall_rule/{1}'.format(server_uuid, firewall_rule_position)
res = self.get_request(url)
return FirewallRule(**res['firewall_rule'])
|
[
"def",
"get_firewall_rule",
"(",
"self",
",",
"server_uuid",
",",
"firewall_rule_position",
",",
"server_instance",
"=",
"None",
")",
":",
"url",
"=",
"'/server/{0}/firewall_rule/{1}'",
".",
"format",
"(",
"server_uuid",
",",
"firewall_rule_position",
")",
"res",
"=",
"self",
".",
"get_request",
"(",
"url",
")",
"return",
"FirewallRule",
"(",
"*",
"*",
"res",
"[",
"'firewall_rule'",
"]",
")"
] |
Return a FirewallRule object based on server uuid and rule position.
|
[
"Return",
"a",
"FirewallRule",
"object",
"based",
"on",
"server",
"uuid",
"and",
"rule",
"position",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/firewall_mixin.py#L20-L26
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/firewall_mixin.py
|
FirewallManager.get_firewall_rules
|
def get_firewall_rules(self, server):
"""
Return all FirewallRule objects based on a server instance or uuid.
"""
server_uuid, server_instance = uuid_and_instance(server)
url = '/server/{0}/firewall_rule'.format(server_uuid)
res = self.get_request(url)
return [
FirewallRule(server=server_instance, **firewall_rule)
for firewall_rule in res['firewall_rules']['firewall_rule']
]
|
python
|
def get_firewall_rules(self, server):
"""
Return all FirewallRule objects based on a server instance or uuid.
"""
server_uuid, server_instance = uuid_and_instance(server)
url = '/server/{0}/firewall_rule'.format(server_uuid)
res = self.get_request(url)
return [
FirewallRule(server=server_instance, **firewall_rule)
for firewall_rule in res['firewall_rules']['firewall_rule']
]
|
[
"def",
"get_firewall_rules",
"(",
"self",
",",
"server",
")",
":",
"server_uuid",
",",
"server_instance",
"=",
"uuid_and_instance",
"(",
"server",
")",
"url",
"=",
"'/server/{0}/firewall_rule'",
".",
"format",
"(",
"server_uuid",
")",
"res",
"=",
"self",
".",
"get_request",
"(",
"url",
")",
"return",
"[",
"FirewallRule",
"(",
"server",
"=",
"server_instance",
",",
"*",
"*",
"firewall_rule",
")",
"for",
"firewall_rule",
"in",
"res",
"[",
"'firewall_rules'",
"]",
"[",
"'firewall_rule'",
"]",
"]"
] |
Return all FirewallRule objects based on a server instance or uuid.
|
[
"Return",
"all",
"FirewallRule",
"objects",
"based",
"on",
"a",
"server",
"instance",
"or",
"uuid",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/firewall_mixin.py#L28-L40
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/firewall_mixin.py
|
FirewallManager.create_firewall_rule
|
def create_firewall_rule(self, server, firewall_rule_body):
"""
Create a new firewall rule for a given server uuid.
The rule can begiven as a dict or with FirewallRule.prepare_post_body().
Returns a FirewallRule object.
"""
server_uuid, server_instance = uuid_and_instance(server)
url = '/server/{0}/firewall_rule'.format(server_uuid)
body = {'firewall_rule': firewall_rule_body}
res = self.post_request(url, body)
return FirewallRule(server=server_instance, **res['firewall_rule'])
|
python
|
def create_firewall_rule(self, server, firewall_rule_body):
"""
Create a new firewall rule for a given server uuid.
The rule can begiven as a dict or with FirewallRule.prepare_post_body().
Returns a FirewallRule object.
"""
server_uuid, server_instance = uuid_and_instance(server)
url = '/server/{0}/firewall_rule'.format(server_uuid)
body = {'firewall_rule': firewall_rule_body}
res = self.post_request(url, body)
return FirewallRule(server=server_instance, **res['firewall_rule'])
|
[
"def",
"create_firewall_rule",
"(",
"self",
",",
"server",
",",
"firewall_rule_body",
")",
":",
"server_uuid",
",",
"server_instance",
"=",
"uuid_and_instance",
"(",
"server",
")",
"url",
"=",
"'/server/{0}/firewall_rule'",
".",
"format",
"(",
"server_uuid",
")",
"body",
"=",
"{",
"'firewall_rule'",
":",
"firewall_rule_body",
"}",
"res",
"=",
"self",
".",
"post_request",
"(",
"url",
",",
"body",
")",
"return",
"FirewallRule",
"(",
"server",
"=",
"server_instance",
",",
"*",
"*",
"res",
"[",
"'firewall_rule'",
"]",
")"
] |
Create a new firewall rule for a given server uuid.
The rule can begiven as a dict or with FirewallRule.prepare_post_body().
Returns a FirewallRule object.
|
[
"Create",
"a",
"new",
"firewall",
"rule",
"for",
"a",
"given",
"server",
"uuid",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/firewall_mixin.py#L42-L55
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/firewall_mixin.py
|
FirewallManager.delete_firewall_rule
|
def delete_firewall_rule(self, server_uuid, firewall_rule_position):
"""
Delete a firewall rule based on a server uuid and rule position.
"""
url = '/server/{0}/firewall_rule/{1}'.format(server_uuid, firewall_rule_position)
return self.request('DELETE', url)
|
python
|
def delete_firewall_rule(self, server_uuid, firewall_rule_position):
"""
Delete a firewall rule based on a server uuid and rule position.
"""
url = '/server/{0}/firewall_rule/{1}'.format(server_uuid, firewall_rule_position)
return self.request('DELETE', url)
|
[
"def",
"delete_firewall_rule",
"(",
"self",
",",
"server_uuid",
",",
"firewall_rule_position",
")",
":",
"url",
"=",
"'/server/{0}/firewall_rule/{1}'",
".",
"format",
"(",
"server_uuid",
",",
"firewall_rule_position",
")",
"return",
"self",
".",
"request",
"(",
"'DELETE'",
",",
"url",
")"
] |
Delete a firewall rule based on a server uuid and rule position.
|
[
"Delete",
"a",
"firewall",
"rule",
"based",
"on",
"a",
"server",
"uuid",
"and",
"rule",
"position",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/firewall_mixin.py#L57-L62
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/firewall_mixin.py
|
FirewallManager.configure_firewall
|
def configure_firewall(self, server, firewall_rule_bodies):
"""
Helper for calling create_firewall_rule in series for a list of firewall_rule_bodies.
"""
server_uuid, server_instance = uuid_and_instance(server)
return [
self.create_firewall_rule(server_uuid, rule)
for rule in firewall_rule_bodies
]
|
python
|
def configure_firewall(self, server, firewall_rule_bodies):
"""
Helper for calling create_firewall_rule in series for a list of firewall_rule_bodies.
"""
server_uuid, server_instance = uuid_and_instance(server)
return [
self.create_firewall_rule(server_uuid, rule)
for rule in firewall_rule_bodies
]
|
[
"def",
"configure_firewall",
"(",
"self",
",",
"server",
",",
"firewall_rule_bodies",
")",
":",
"server_uuid",
",",
"server_instance",
"=",
"uuid_and_instance",
"(",
"server",
")",
"return",
"[",
"self",
".",
"create_firewall_rule",
"(",
"server_uuid",
",",
"rule",
")",
"for",
"rule",
"in",
"firewall_rule_bodies",
"]"
] |
Helper for calling create_firewall_rule in series for a list of firewall_rule_bodies.
|
[
"Helper",
"for",
"calling",
"create_firewall_rule",
"in",
"series",
"for",
"a",
"list",
"of",
"firewall_rule_bodies",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/firewall_mixin.py#L64-L73
|
train
|
csirtgadgets/csirtgsdk-py
|
csirtgsdk/sinkhole.py
|
Sinkhole.post
|
def post(self, data):
"""
POSTs a raw SMTP message to the Sinkhole API
:param data: raw content to be submitted [STRING]
:return: { list of predictions }
"""
uri = '{}/sinkhole'.format(self.client.remote)
self.logger.debug(uri)
if PYVERSION == 2:
try:
data = data.decode('utf-8')
except Exception:
data = data.decode('latin-1')
data = {
'message': data
}
body = self.client.post(uri, data)
return body
|
python
|
def post(self, data):
"""
POSTs a raw SMTP message to the Sinkhole API
:param data: raw content to be submitted [STRING]
:return: { list of predictions }
"""
uri = '{}/sinkhole'.format(self.client.remote)
self.logger.debug(uri)
if PYVERSION == 2:
try:
data = data.decode('utf-8')
except Exception:
data = data.decode('latin-1')
data = {
'message': data
}
body = self.client.post(uri, data)
return body
|
[
"def",
"post",
"(",
"self",
",",
"data",
")",
":",
"uri",
"=",
"'{}/sinkhole'",
".",
"format",
"(",
"self",
".",
"client",
".",
"remote",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"uri",
")",
"if",
"PYVERSION",
"==",
"2",
":",
"try",
":",
"data",
"=",
"data",
".",
"decode",
"(",
"'utf-8'",
")",
"except",
"Exception",
":",
"data",
"=",
"data",
".",
"decode",
"(",
"'latin-1'",
")",
"data",
"=",
"{",
"'message'",
":",
"data",
"}",
"body",
"=",
"self",
".",
"client",
".",
"post",
"(",
"uri",
",",
"data",
")",
"return",
"body"
] |
POSTs a raw SMTP message to the Sinkhole API
:param data: raw content to be submitted [STRING]
:return: { list of predictions }
|
[
"POSTs",
"a",
"raw",
"SMTP",
"message",
"to",
"the",
"Sinkhole",
"API"
] |
5a7ed9c5e6fa27170366ecbdef710dc80d537dc2
|
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/sinkhole.py#L20-L41
|
train
|
tanwanirahul/django-batch-requests
|
batch_requests/utils.py
|
pre_process_method_headers
|
def pre_process_method_headers(method, headers):
'''
Returns the lowered method.
Capitalize headers, prepend HTTP_ and change - to _.
'''
method = method.lower()
# Standard WSGI supported headers
_wsgi_headers = ["content_length", "content_type", "query_string",
"remote_addr", "remote_host", "remote_user",
"request_method", "server_name", "server_port"]
_transformed_headers = {}
# For every header, replace - to _, prepend http_ if necessary and convert
# to upper case.
for header, value in headers.items():
header = header.replace("-", "_")
header = "http_{header}".format(
header=header) if header.lower() not in _wsgi_headers else header
_transformed_headers.update({header.upper(): value})
return method, _transformed_headers
|
python
|
def pre_process_method_headers(method, headers):
'''
Returns the lowered method.
Capitalize headers, prepend HTTP_ and change - to _.
'''
method = method.lower()
# Standard WSGI supported headers
_wsgi_headers = ["content_length", "content_type", "query_string",
"remote_addr", "remote_host", "remote_user",
"request_method", "server_name", "server_port"]
_transformed_headers = {}
# For every header, replace - to _, prepend http_ if necessary and convert
# to upper case.
for header, value in headers.items():
header = header.replace("-", "_")
header = "http_{header}".format(
header=header) if header.lower() not in _wsgi_headers else header
_transformed_headers.update({header.upper(): value})
return method, _transformed_headers
|
[
"def",
"pre_process_method_headers",
"(",
"method",
",",
"headers",
")",
":",
"method",
"=",
"method",
".",
"lower",
"(",
")",
"# Standard WSGI supported headers",
"_wsgi_headers",
"=",
"[",
"\"content_length\"",
",",
"\"content_type\"",
",",
"\"query_string\"",
",",
"\"remote_addr\"",
",",
"\"remote_host\"",
",",
"\"remote_user\"",
",",
"\"request_method\"",
",",
"\"server_name\"",
",",
"\"server_port\"",
"]",
"_transformed_headers",
"=",
"{",
"}",
"# For every header, replace - to _, prepend http_ if necessary and convert",
"# to upper case.",
"for",
"header",
",",
"value",
"in",
"headers",
".",
"items",
"(",
")",
":",
"header",
"=",
"header",
".",
"replace",
"(",
"\"-\"",
",",
"\"_\"",
")",
"header",
"=",
"\"http_{header}\"",
".",
"format",
"(",
"header",
"=",
"header",
")",
"if",
"header",
".",
"lower",
"(",
")",
"not",
"in",
"_wsgi_headers",
"else",
"header",
"_transformed_headers",
".",
"update",
"(",
"{",
"header",
".",
"upper",
"(",
")",
":",
"value",
"}",
")",
"return",
"method",
",",
"_transformed_headers"
] |
Returns the lowered method.
Capitalize headers, prepend HTTP_ and change - to _.
|
[
"Returns",
"the",
"lowered",
"method",
".",
"Capitalize",
"headers",
"prepend",
"HTTP_",
"and",
"change",
"-",
"to",
"_",
"."
] |
9c5afc42f7542f466247f4ffed9c44e1c49fa20d
|
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/utils.py#L47-L70
|
train
|
tanwanirahul/django-batch-requests
|
batch_requests/utils.py
|
headers_to_include_from_request
|
def headers_to_include_from_request(curr_request):
'''
Define headers that needs to be included from the current request.
'''
return {
h: v for h, v in curr_request.META.items() if h in _settings.HEADERS_TO_INCLUDE}
|
python
|
def headers_to_include_from_request(curr_request):
'''
Define headers that needs to be included from the current request.
'''
return {
h: v for h, v in curr_request.META.items() if h in _settings.HEADERS_TO_INCLUDE}
|
[
"def",
"headers_to_include_from_request",
"(",
"curr_request",
")",
":",
"return",
"{",
"h",
":",
"v",
"for",
"h",
",",
"v",
"in",
"curr_request",
".",
"META",
".",
"items",
"(",
")",
"if",
"h",
"in",
"_settings",
".",
"HEADERS_TO_INCLUDE",
"}"
] |
Define headers that needs to be included from the current request.
|
[
"Define",
"headers",
"that",
"needs",
"to",
"be",
"included",
"from",
"the",
"current",
"request",
"."
] |
9c5afc42f7542f466247f4ffed9c44e1c49fa20d
|
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/utils.py#L73-L78
|
train
|
tanwanirahul/django-batch-requests
|
batch_requests/utils.py
|
get_wsgi_request_object
|
def get_wsgi_request_object(curr_request, method, url, headers, body):
'''
Based on the given request parameters, constructs and returns the WSGI request object.
'''
x_headers = headers_to_include_from_request(curr_request)
method, t_headers = pre_process_method_headers(method, headers)
# Add default content type.
if "CONTENT_TYPE" not in t_headers:
t_headers.update({"CONTENT_TYPE": _settings.DEFAULT_CONTENT_TYPE})
# Override existing batch requests headers with the new headers passed for this request.
x_headers.update(t_headers)
content_type = x_headers.get("CONTENT_TYPE", _settings.DEFAULT_CONTENT_TYPE)
# Get hold of request factory to construct the request.
_request_factory = BatchRequestFactory()
_request_provider = getattr(_request_factory, method)
secure = _settings.USE_HTTPS
request = _request_provider(url, data=body, secure=secure,
content_type=content_type, **x_headers)
return request
|
python
|
def get_wsgi_request_object(curr_request, method, url, headers, body):
'''
Based on the given request parameters, constructs and returns the WSGI request object.
'''
x_headers = headers_to_include_from_request(curr_request)
method, t_headers = pre_process_method_headers(method, headers)
# Add default content type.
if "CONTENT_TYPE" not in t_headers:
t_headers.update({"CONTENT_TYPE": _settings.DEFAULT_CONTENT_TYPE})
# Override existing batch requests headers with the new headers passed for this request.
x_headers.update(t_headers)
content_type = x_headers.get("CONTENT_TYPE", _settings.DEFAULT_CONTENT_TYPE)
# Get hold of request factory to construct the request.
_request_factory = BatchRequestFactory()
_request_provider = getattr(_request_factory, method)
secure = _settings.USE_HTTPS
request = _request_provider(url, data=body, secure=secure,
content_type=content_type, **x_headers)
return request
|
[
"def",
"get_wsgi_request_object",
"(",
"curr_request",
",",
"method",
",",
"url",
",",
"headers",
",",
"body",
")",
":",
"x_headers",
"=",
"headers_to_include_from_request",
"(",
"curr_request",
")",
"method",
",",
"t_headers",
"=",
"pre_process_method_headers",
"(",
"method",
",",
"headers",
")",
"# Add default content type.",
"if",
"\"CONTENT_TYPE\"",
"not",
"in",
"t_headers",
":",
"t_headers",
".",
"update",
"(",
"{",
"\"CONTENT_TYPE\"",
":",
"_settings",
".",
"DEFAULT_CONTENT_TYPE",
"}",
")",
"# Override existing batch requests headers with the new headers passed for this request.",
"x_headers",
".",
"update",
"(",
"t_headers",
")",
"content_type",
"=",
"x_headers",
".",
"get",
"(",
"\"CONTENT_TYPE\"",
",",
"_settings",
".",
"DEFAULT_CONTENT_TYPE",
")",
"# Get hold of request factory to construct the request.",
"_request_factory",
"=",
"BatchRequestFactory",
"(",
")",
"_request_provider",
"=",
"getattr",
"(",
"_request_factory",
",",
"method",
")",
"secure",
"=",
"_settings",
".",
"USE_HTTPS",
"request",
"=",
"_request_provider",
"(",
"url",
",",
"data",
"=",
"body",
",",
"secure",
"=",
"secure",
",",
"content_type",
"=",
"content_type",
",",
"*",
"*",
"x_headers",
")",
"return",
"request"
] |
Based on the given request parameters, constructs and returns the WSGI request object.
|
[
"Based",
"on",
"the",
"given",
"request",
"parameters",
"constructs",
"and",
"returns",
"the",
"WSGI",
"request",
"object",
"."
] |
9c5afc42f7542f466247f4ffed9c44e1c49fa20d
|
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/utils.py#L81-L106
|
train
|
tanwanirahul/django-batch-requests
|
batch_requests/utils.py
|
BatchRequestFactory._base_environ
|
def _base_environ(self, **request):
'''
Override the default values for the wsgi environment variables.
'''
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('localhost'),
'SERVER_PORT': str('8000'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': True,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
|
python
|
def _base_environ(self, **request):
'''
Override the default values for the wsgi environment variables.
'''
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('localhost'),
'SERVER_PORT': str('8000'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': True,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
|
[
"def",
"_base_environ",
"(",
"self",
",",
"*",
"*",
"request",
")",
":",
"# This is a minimal valid WSGI environ dictionary, plus:",
"# - HTTP_COOKIE: for cookie support,",
"# - REMOTE_ADDR: often useful, see #8551.",
"# See http://www.python.org/dev/peps/pep-3333/#environ-variables",
"environ",
"=",
"{",
"'HTTP_COOKIE'",
":",
"self",
".",
"cookies",
".",
"output",
"(",
"header",
"=",
"''",
",",
"sep",
"=",
"'; '",
")",
",",
"'PATH_INFO'",
":",
"str",
"(",
"'/'",
")",
",",
"'REMOTE_ADDR'",
":",
"str",
"(",
"'127.0.0.1'",
")",
",",
"'REQUEST_METHOD'",
":",
"str",
"(",
"'GET'",
")",
",",
"'SCRIPT_NAME'",
":",
"str",
"(",
"''",
")",
",",
"'SERVER_NAME'",
":",
"str",
"(",
"'localhost'",
")",
",",
"'SERVER_PORT'",
":",
"str",
"(",
"'8000'",
")",
",",
"'SERVER_PROTOCOL'",
":",
"str",
"(",
"'HTTP/1.1'",
")",
",",
"'wsgi.version'",
":",
"(",
"1",
",",
"0",
")",
",",
"'wsgi.url_scheme'",
":",
"str",
"(",
"'http'",
")",
",",
"'wsgi.input'",
":",
"FakePayload",
"(",
"b''",
")",
",",
"'wsgi.errors'",
":",
"self",
".",
"errors",
",",
"'wsgi.multiprocess'",
":",
"True",
",",
"'wsgi.multithread'",
":",
"True",
",",
"'wsgi.run_once'",
":",
"False",
",",
"}",
"environ",
".",
"update",
"(",
"self",
".",
"defaults",
")",
"environ",
".",
"update",
"(",
"request",
")",
"return",
"environ"
] |
Override the default values for the wsgi environment variables.
|
[
"Override",
"the",
"default",
"values",
"for",
"the",
"wsgi",
"environment",
"variables",
"."
] |
9c5afc42f7542f466247f4ffed9c44e1c49fa20d
|
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/utils.py#L16-L44
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/base.py
|
BaseAPI.request
|
def request(self, method, endpoint, body=None, timeout=-1):
"""
Perform a request with a given body to a given endpoint in UpCloud's API.
Handles errors with __error_middleware.
"""
if method not in set(['GET', 'POST', 'PUT', 'DELETE']):
raise Exception('Invalid/Forbidden HTTP method')
url = '/' + self.api_v + endpoint
headers = {
'Authorization': self.token,
'Content-Type': 'application/json'
}
if body:
json_body_or_None = json.dumps(body)
else:
json_body_or_None = None
call_timeout = timeout if timeout != -1 else self.timeout
APIcall = getattr(requests, method.lower())
res = APIcall('https://api.upcloud.com' + url,
data=json_body_or_None,
headers=headers,
timeout=call_timeout)
if res.text:
res_json = res.json()
else:
res_json = {}
return self.__error_middleware(res, res_json)
|
python
|
def request(self, method, endpoint, body=None, timeout=-1):
"""
Perform a request with a given body to a given endpoint in UpCloud's API.
Handles errors with __error_middleware.
"""
if method not in set(['GET', 'POST', 'PUT', 'DELETE']):
raise Exception('Invalid/Forbidden HTTP method')
url = '/' + self.api_v + endpoint
headers = {
'Authorization': self.token,
'Content-Type': 'application/json'
}
if body:
json_body_or_None = json.dumps(body)
else:
json_body_or_None = None
call_timeout = timeout if timeout != -1 else self.timeout
APIcall = getattr(requests, method.lower())
res = APIcall('https://api.upcloud.com' + url,
data=json_body_or_None,
headers=headers,
timeout=call_timeout)
if res.text:
res_json = res.json()
else:
res_json = {}
return self.__error_middleware(res, res_json)
|
[
"def",
"request",
"(",
"self",
",",
"method",
",",
"endpoint",
",",
"body",
"=",
"None",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"if",
"method",
"not",
"in",
"set",
"(",
"[",
"'GET'",
",",
"'POST'",
",",
"'PUT'",
",",
"'DELETE'",
"]",
")",
":",
"raise",
"Exception",
"(",
"'Invalid/Forbidden HTTP method'",
")",
"url",
"=",
"'/'",
"+",
"self",
".",
"api_v",
"+",
"endpoint",
"headers",
"=",
"{",
"'Authorization'",
":",
"self",
".",
"token",
",",
"'Content-Type'",
":",
"'application/json'",
"}",
"if",
"body",
":",
"json_body_or_None",
"=",
"json",
".",
"dumps",
"(",
"body",
")",
"else",
":",
"json_body_or_None",
"=",
"None",
"call_timeout",
"=",
"timeout",
"if",
"timeout",
"!=",
"-",
"1",
"else",
"self",
".",
"timeout",
"APIcall",
"=",
"getattr",
"(",
"requests",
",",
"method",
".",
"lower",
"(",
")",
")",
"res",
"=",
"APIcall",
"(",
"'https://api.upcloud.com'",
"+",
"url",
",",
"data",
"=",
"json_body_or_None",
",",
"headers",
"=",
"headers",
",",
"timeout",
"=",
"call_timeout",
")",
"if",
"res",
".",
"text",
":",
"res_json",
"=",
"res",
".",
"json",
"(",
")",
"else",
":",
"res_json",
"=",
"{",
"}",
"return",
"self",
".",
"__error_middleware",
"(",
"res",
",",
"res_json",
")"
] |
Perform a request with a given body to a given endpoint in UpCloud's API.
Handles errors with __error_middleware.
|
[
"Perform",
"a",
"request",
"with",
"a",
"given",
"body",
"to",
"a",
"given",
"endpoint",
"in",
"UpCloud",
"s",
"API",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/base.py#L21-L54
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/base.py
|
BaseAPI.post_request
|
def post_request(self, endpoint, body=None, timeout=-1):
"""
Perform a POST request to a given endpoint in UpCloud's API.
"""
return self.request('POST', endpoint, body, timeout)
|
python
|
def post_request(self, endpoint, body=None, timeout=-1):
"""
Perform a POST request to a given endpoint in UpCloud's API.
"""
return self.request('POST', endpoint, body, timeout)
|
[
"def",
"post_request",
"(",
"self",
",",
"endpoint",
",",
"body",
"=",
"None",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"return",
"self",
".",
"request",
"(",
"'POST'",
",",
"endpoint",
",",
"body",
",",
"timeout",
")"
] |
Perform a POST request to a given endpoint in UpCloud's API.
|
[
"Perform",
"a",
"POST",
"request",
"to",
"a",
"given",
"endpoint",
"in",
"UpCloud",
"s",
"API",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/base.py#L62-L66
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/base.py
|
BaseAPI.__error_middleware
|
def __error_middleware(self, res, res_json):
"""
Middleware that raises an exception when HTTP statuscode is an error code.
"""
if(res.status_code in [400, 401, 402, 403, 404, 405, 406, 409]):
err_dict = res_json.get('error', {})
raise UpCloudAPIError(error_code=err_dict.get('error_code'),
error_message=err_dict.get('error_message'))
return res_json
|
python
|
def __error_middleware(self, res, res_json):
"""
Middleware that raises an exception when HTTP statuscode is an error code.
"""
if(res.status_code in [400, 401, 402, 403, 404, 405, 406, 409]):
err_dict = res_json.get('error', {})
raise UpCloudAPIError(error_code=err_dict.get('error_code'),
error_message=err_dict.get('error_message'))
return res_json
|
[
"def",
"__error_middleware",
"(",
"self",
",",
"res",
",",
"res_json",
")",
":",
"if",
"(",
"res",
".",
"status_code",
"in",
"[",
"400",
",",
"401",
",",
"402",
",",
"403",
",",
"404",
",",
"405",
",",
"406",
",",
"409",
"]",
")",
":",
"err_dict",
"=",
"res_json",
".",
"get",
"(",
"'error'",
",",
"{",
"}",
")",
"raise",
"UpCloudAPIError",
"(",
"error_code",
"=",
"err_dict",
".",
"get",
"(",
"'error_code'",
")",
",",
"error_message",
"=",
"err_dict",
".",
"get",
"(",
"'error_message'",
")",
")",
"return",
"res_json"
] |
Middleware that raises an exception when HTTP statuscode is an error code.
|
[
"Middleware",
"that",
"raises",
"an",
"exception",
"when",
"HTTP",
"statuscode",
"is",
"an",
"error",
"code",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/base.py#L68-L77
|
train
|
okfn/ofs
|
ofs/remote/swiftstore.py
|
SwiftOFS.put_stream
|
def put_stream(self, bucket, label, stream_object, params={}):
''' Create a new file to swift object storage. '''
self.claim_bucket(bucket)
self.connection.put_object(bucket, label, stream_object,
headers=self._convert_to_meta(params))
|
python
|
def put_stream(self, bucket, label, stream_object, params={}):
''' Create a new file to swift object storage. '''
self.claim_bucket(bucket)
self.connection.put_object(bucket, label, stream_object,
headers=self._convert_to_meta(params))
|
[
"def",
"put_stream",
"(",
"self",
",",
"bucket",
",",
"label",
",",
"stream_object",
",",
"params",
"=",
"{",
"}",
")",
":",
"self",
".",
"claim_bucket",
"(",
"bucket",
")",
"self",
".",
"connection",
".",
"put_object",
"(",
"bucket",
",",
"label",
",",
"stream_object",
",",
"headers",
"=",
"self",
".",
"_convert_to_meta",
"(",
"params",
")",
")"
] |
Create a new file to swift object storage.
|
[
"Create",
"a",
"new",
"file",
"to",
"swift",
"object",
"storage",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/remote/swiftstore.py#L116-L120
|
train
|
ralphhaygood/sklearn-gbmi
|
sklearn_gbmi/sklearn_gbmi.py
|
h
|
def h(gbm, array_or_frame, indices_or_columns = 'all'):
"""
PURPOSE
Compute Friedman and Popescu's H statistic, in order to look for an interaction in the passed gradient-boosting
model among the variables represented by the elements of the passed array or frame and specified by the passed
indices or columns.
See Jerome H. Friedman and Bogdan E. Popescu, 2008, "Predictive learning via rule ensembles", Ann. Appl. Stat.
2:916-954, http://projecteuclid.org/download/pdfview_1/euclid.aoas/1223908046, s. 8.1.
ARGUMENTS
gbm should be a scikit-learn gradient-boosting model (instance of sklearn.ensemble.GradientBoostingClassifier or
sklearn.ensemble.GradientBoostingRegressor) that has been fitted to array_or_frame (and a target, not used here).
array_or_frame should be a two-dimensional NumPy array or a pandas data frame (instance of numpy.ndarray or pandas
.DataFrame).
indices_or_columns is optional, with default value 'all'. It should be 'all' or a list of indices of columns of
array_or_frame if array_or_frame is a NumPy array or a list of columns of array_or_frame if array_or_frame is a
pandas data frame. If it is 'all', then all columns of array_or_frame are used.
RETURNS
The H statistic of the variables or NaN if the computation is spoiled by weak main effects and rounding errors.
H varies from 0 to 1. The larger H, the stronger the evidence for an interaction among the variables.
EXAMPLES
Friedman and Popescu's (2008) formulas (44) and (46) correspond to
h(F, x, [j, k])
and
h(F, x, [j, k, l])
respectively.
NOTES
1. Per Friedman and Popescu, only variables with strong main effects should be examined for interactions. Strengths
of main effects are available as gbm.feature_importances_ once gbm has been fitted.
2. Per Friedman and Popescu, collinearity among variables can lead to interactions in gbm that are not present in
the target function. To forestall such spurious interactions, check for strong correlations among variables before
fitting gbm.
"""
if indices_or_columns == 'all':
if gbm.max_depth < array_or_frame.shape[1]:
raise \
Exception(
"gbm.max_depth == {} < array_or_frame.shape[1] == {}, so indices_or_columns must not be 'all'."
.format(gbm.max_depth, array_or_frame.shape[1])
)
else:
if gbm.max_depth < len(indices_or_columns):
raise \
Exception(
"gbm.max_depth == {}, so indices_or_columns must contain at most {} {}."
.format(gbm.max_depth, gbm.max_depth, "element" if gbm.max_depth == 1 else "elements")
)
check_args_contd(array_or_frame, indices_or_columns)
arr, model_inds = get_arr_and_model_inds(array_or_frame, indices_or_columns)
width = arr.shape[1]
f_vals = {}
for n in range(width, 0, -1):
for inds in itertools.combinations(range(width), n):
f_vals[inds] = compute_f_vals(gbm, model_inds, arr, inds)
return compute_h_val(f_vals, arr, tuple(range(width)))
|
python
|
def h(gbm, array_or_frame, indices_or_columns = 'all'):
"""
PURPOSE
Compute Friedman and Popescu's H statistic, in order to look for an interaction in the passed gradient-boosting
model among the variables represented by the elements of the passed array or frame and specified by the passed
indices or columns.
See Jerome H. Friedman and Bogdan E. Popescu, 2008, "Predictive learning via rule ensembles", Ann. Appl. Stat.
2:916-954, http://projecteuclid.org/download/pdfview_1/euclid.aoas/1223908046, s. 8.1.
ARGUMENTS
gbm should be a scikit-learn gradient-boosting model (instance of sklearn.ensemble.GradientBoostingClassifier or
sklearn.ensemble.GradientBoostingRegressor) that has been fitted to array_or_frame (and a target, not used here).
array_or_frame should be a two-dimensional NumPy array or a pandas data frame (instance of numpy.ndarray or pandas
.DataFrame).
indices_or_columns is optional, with default value 'all'. It should be 'all' or a list of indices of columns of
array_or_frame if array_or_frame is a NumPy array or a list of columns of array_or_frame if array_or_frame is a
pandas data frame. If it is 'all', then all columns of array_or_frame are used.
RETURNS
The H statistic of the variables or NaN if the computation is spoiled by weak main effects and rounding errors.
H varies from 0 to 1. The larger H, the stronger the evidence for an interaction among the variables.
EXAMPLES
Friedman and Popescu's (2008) formulas (44) and (46) correspond to
h(F, x, [j, k])
and
h(F, x, [j, k, l])
respectively.
NOTES
1. Per Friedman and Popescu, only variables with strong main effects should be examined for interactions. Strengths
of main effects are available as gbm.feature_importances_ once gbm has been fitted.
2. Per Friedman and Popescu, collinearity among variables can lead to interactions in gbm that are not present in
the target function. To forestall such spurious interactions, check for strong correlations among variables before
fitting gbm.
"""
if indices_or_columns == 'all':
if gbm.max_depth < array_or_frame.shape[1]:
raise \
Exception(
"gbm.max_depth == {} < array_or_frame.shape[1] == {}, so indices_or_columns must not be 'all'."
.format(gbm.max_depth, array_or_frame.shape[1])
)
else:
if gbm.max_depth < len(indices_or_columns):
raise \
Exception(
"gbm.max_depth == {}, so indices_or_columns must contain at most {} {}."
.format(gbm.max_depth, gbm.max_depth, "element" if gbm.max_depth == 1 else "elements")
)
check_args_contd(array_or_frame, indices_or_columns)
arr, model_inds = get_arr_and_model_inds(array_or_frame, indices_or_columns)
width = arr.shape[1]
f_vals = {}
for n in range(width, 0, -1):
for inds in itertools.combinations(range(width), n):
f_vals[inds] = compute_f_vals(gbm, model_inds, arr, inds)
return compute_h_val(f_vals, arr, tuple(range(width)))
|
[
"def",
"h",
"(",
"gbm",
",",
"array_or_frame",
",",
"indices_or_columns",
"=",
"'all'",
")",
":",
"if",
"indices_or_columns",
"==",
"'all'",
":",
"if",
"gbm",
".",
"max_depth",
"<",
"array_or_frame",
".",
"shape",
"[",
"1",
"]",
":",
"raise",
"Exception",
"(",
"\"gbm.max_depth == {} < array_or_frame.shape[1] == {}, so indices_or_columns must not be 'all'.\"",
".",
"format",
"(",
"gbm",
".",
"max_depth",
",",
"array_or_frame",
".",
"shape",
"[",
"1",
"]",
")",
")",
"else",
":",
"if",
"gbm",
".",
"max_depth",
"<",
"len",
"(",
"indices_or_columns",
")",
":",
"raise",
"Exception",
"(",
"\"gbm.max_depth == {}, so indices_or_columns must contain at most {} {}.\"",
".",
"format",
"(",
"gbm",
".",
"max_depth",
",",
"gbm",
".",
"max_depth",
",",
"\"element\"",
"if",
"gbm",
".",
"max_depth",
"==",
"1",
"else",
"\"elements\"",
")",
")",
"check_args_contd",
"(",
"array_or_frame",
",",
"indices_or_columns",
")",
"arr",
",",
"model_inds",
"=",
"get_arr_and_model_inds",
"(",
"array_or_frame",
",",
"indices_or_columns",
")",
"width",
"=",
"arr",
".",
"shape",
"[",
"1",
"]",
"f_vals",
"=",
"{",
"}",
"for",
"n",
"in",
"range",
"(",
"width",
",",
"0",
",",
"-",
"1",
")",
":",
"for",
"inds",
"in",
"itertools",
".",
"combinations",
"(",
"range",
"(",
"width",
")",
",",
"n",
")",
":",
"f_vals",
"[",
"inds",
"]",
"=",
"compute_f_vals",
"(",
"gbm",
",",
"model_inds",
",",
"arr",
",",
"inds",
")",
"return",
"compute_h_val",
"(",
"f_vals",
",",
"arr",
",",
"tuple",
"(",
"range",
"(",
"width",
")",
")",
")"
] |
PURPOSE
Compute Friedman and Popescu's H statistic, in order to look for an interaction in the passed gradient-boosting
model among the variables represented by the elements of the passed array or frame and specified by the passed
indices or columns.
See Jerome H. Friedman and Bogdan E. Popescu, 2008, "Predictive learning via rule ensembles", Ann. Appl. Stat.
2:916-954, http://projecteuclid.org/download/pdfview_1/euclid.aoas/1223908046, s. 8.1.
ARGUMENTS
gbm should be a scikit-learn gradient-boosting model (instance of sklearn.ensemble.GradientBoostingClassifier or
sklearn.ensemble.GradientBoostingRegressor) that has been fitted to array_or_frame (and a target, not used here).
array_or_frame should be a two-dimensional NumPy array or a pandas data frame (instance of numpy.ndarray or pandas
.DataFrame).
indices_or_columns is optional, with default value 'all'. It should be 'all' or a list of indices of columns of
array_or_frame if array_or_frame is a NumPy array or a list of columns of array_or_frame if array_or_frame is a
pandas data frame. If it is 'all', then all columns of array_or_frame are used.
RETURNS
The H statistic of the variables or NaN if the computation is spoiled by weak main effects and rounding errors.
H varies from 0 to 1. The larger H, the stronger the evidence for an interaction among the variables.
EXAMPLES
Friedman and Popescu's (2008) formulas (44) and (46) correspond to
h(F, x, [j, k])
and
h(F, x, [j, k, l])
respectively.
NOTES
1. Per Friedman and Popescu, only variables with strong main effects should be examined for interactions. Strengths
of main effects are available as gbm.feature_importances_ once gbm has been fitted.
2. Per Friedman and Popescu, collinearity among variables can lead to interactions in gbm that are not present in
the target function. To forestall such spurious interactions, check for strong correlations among variables before
fitting gbm.
|
[
"PURPOSE"
] |
23a1e7fd50e53d6261379f22a337d8fa4ee6aabe
|
https://github.com/ralphhaygood/sklearn-gbmi/blob/23a1e7fd50e53d6261379f22a337d8fa4ee6aabe/sklearn_gbmi/sklearn_gbmi.py#L16-L95
|
train
|
ralphhaygood/sklearn-gbmi
|
sklearn_gbmi/sklearn_gbmi.py
|
h_all_pairs
|
def h_all_pairs(gbm, array_or_frame, indices_or_columns = 'all'):
"""
PURPOSE
Compute Friedman and Popescu's two-variable H statistic, in order to look for an interaction in the passed gradient-
boosting model between each pair of variables represented by the elements of the passed array or frame and specified
by the passed indices or columns.
See Jerome H. Friedman and Bogdan E. Popescu, 2008, "Predictive learning via rule ensembles", Ann. Appl. Stat.
2:916-954, http://projecteuclid.org/download/pdfview_1/euclid.aoas/1223908046, s. 8.1.
ARGUMENTS
gbm should be a scikit-learn gradient-boosting model (instance of sklearn.ensemble.GradientBoostingClassifier or
sklearn.ensemble.GradientBoostingRegressor) that has been fitted to array_or_frame (and a target, not used here).
array_or_frame should be a two-dimensional NumPy array or a pandas data frame (instance of numpy.ndarray or pandas
.DataFrame).
indices_or_columns is optional, with default value 'all'. It should be 'all' or a list of indices of columns of
array_or_frame if array_or_frame is a NumPy array or a list of columns of array_or_frame if array_or_frame is a
pandas data frame. If it is 'all', then all columns of array_or_frame are used.
RETURNS
A dict whose keys are pairs (2-tuples) of indices or columns and whose values are the H statistic of the pairs of
variables or NaN if a computation is spoiled by weak main effects and rounding errors.
H varies from 0 to 1. The larger H, the stronger the evidence for an interaction between a pair of variables.
EXAMPLE
Friedman and Popescu's (2008) formula (44) for every j and k corresponds to
h_all_pairs(F, x)
NOTES
1. Per Friedman and Popescu, only variables with strong main effects should be examined for interactions. Strengths
of main effects are available as gbm.feature_importances_ once gbm has been fitted.
2. Per Friedman and Popescu, collinearity among variables can lead to interactions in gbm that are not present in
the target function. To forestall such spurious interactions, check for strong correlations among variables before
fitting gbm.
"""
if gbm.max_depth < 2:
raise Exception("gbm.max_depth must be at least 2.")
check_args_contd(array_or_frame, indices_or_columns)
arr, model_inds = get_arr_and_model_inds(array_or_frame, indices_or_columns)
width = arr.shape[1]
f_vals = {}
for n in [2, 1]:
for inds in itertools.combinations(range(width), n):
f_vals[inds] = compute_f_vals(gbm, model_inds, arr, inds)
h_vals = {}
for inds in itertools.combinations(range(width), 2):
h_vals[inds] = compute_h_val(f_vals, arr, inds)
if indices_or_columns != 'all':
h_vals = {tuple(model_inds[(inds,)]): h_vals[inds] for inds in h_vals.keys()}
if not isinstance(array_or_frame, np.ndarray):
all_cols = array_or_frame.columns.values
h_vals = {tuple(all_cols[(inds,)]): h_vals[inds] for inds in h_vals.keys()}
return h_vals
|
python
|
def h_all_pairs(gbm, array_or_frame, indices_or_columns = 'all'):
"""
PURPOSE
Compute Friedman and Popescu's two-variable H statistic, in order to look for an interaction in the passed gradient-
boosting model between each pair of variables represented by the elements of the passed array or frame and specified
by the passed indices or columns.
See Jerome H. Friedman and Bogdan E. Popescu, 2008, "Predictive learning via rule ensembles", Ann. Appl. Stat.
2:916-954, http://projecteuclid.org/download/pdfview_1/euclid.aoas/1223908046, s. 8.1.
ARGUMENTS
gbm should be a scikit-learn gradient-boosting model (instance of sklearn.ensemble.GradientBoostingClassifier or
sklearn.ensemble.GradientBoostingRegressor) that has been fitted to array_or_frame (and a target, not used here).
array_or_frame should be a two-dimensional NumPy array or a pandas data frame (instance of numpy.ndarray or pandas
.DataFrame).
indices_or_columns is optional, with default value 'all'. It should be 'all' or a list of indices of columns of
array_or_frame if array_or_frame is a NumPy array or a list of columns of array_or_frame if array_or_frame is a
pandas data frame. If it is 'all', then all columns of array_or_frame are used.
RETURNS
A dict whose keys are pairs (2-tuples) of indices or columns and whose values are the H statistic of the pairs of
variables or NaN if a computation is spoiled by weak main effects and rounding errors.
H varies from 0 to 1. The larger H, the stronger the evidence for an interaction between a pair of variables.
EXAMPLE
Friedman and Popescu's (2008) formula (44) for every j and k corresponds to
h_all_pairs(F, x)
NOTES
1. Per Friedman and Popescu, only variables with strong main effects should be examined for interactions. Strengths
of main effects are available as gbm.feature_importances_ once gbm has been fitted.
2. Per Friedman and Popescu, collinearity among variables can lead to interactions in gbm that are not present in
the target function. To forestall such spurious interactions, check for strong correlations among variables before
fitting gbm.
"""
if gbm.max_depth < 2:
raise Exception("gbm.max_depth must be at least 2.")
check_args_contd(array_or_frame, indices_or_columns)
arr, model_inds = get_arr_and_model_inds(array_or_frame, indices_or_columns)
width = arr.shape[1]
f_vals = {}
for n in [2, 1]:
for inds in itertools.combinations(range(width), n):
f_vals[inds] = compute_f_vals(gbm, model_inds, arr, inds)
h_vals = {}
for inds in itertools.combinations(range(width), 2):
h_vals[inds] = compute_h_val(f_vals, arr, inds)
if indices_or_columns != 'all':
h_vals = {tuple(model_inds[(inds,)]): h_vals[inds] for inds in h_vals.keys()}
if not isinstance(array_or_frame, np.ndarray):
all_cols = array_or_frame.columns.values
h_vals = {tuple(all_cols[(inds,)]): h_vals[inds] for inds in h_vals.keys()}
return h_vals
|
[
"def",
"h_all_pairs",
"(",
"gbm",
",",
"array_or_frame",
",",
"indices_or_columns",
"=",
"'all'",
")",
":",
"if",
"gbm",
".",
"max_depth",
"<",
"2",
":",
"raise",
"Exception",
"(",
"\"gbm.max_depth must be at least 2.\"",
")",
"check_args_contd",
"(",
"array_or_frame",
",",
"indices_or_columns",
")",
"arr",
",",
"model_inds",
"=",
"get_arr_and_model_inds",
"(",
"array_or_frame",
",",
"indices_or_columns",
")",
"width",
"=",
"arr",
".",
"shape",
"[",
"1",
"]",
"f_vals",
"=",
"{",
"}",
"for",
"n",
"in",
"[",
"2",
",",
"1",
"]",
":",
"for",
"inds",
"in",
"itertools",
".",
"combinations",
"(",
"range",
"(",
"width",
")",
",",
"n",
")",
":",
"f_vals",
"[",
"inds",
"]",
"=",
"compute_f_vals",
"(",
"gbm",
",",
"model_inds",
",",
"arr",
",",
"inds",
")",
"h_vals",
"=",
"{",
"}",
"for",
"inds",
"in",
"itertools",
".",
"combinations",
"(",
"range",
"(",
"width",
")",
",",
"2",
")",
":",
"h_vals",
"[",
"inds",
"]",
"=",
"compute_h_val",
"(",
"f_vals",
",",
"arr",
",",
"inds",
")",
"if",
"indices_or_columns",
"!=",
"'all'",
":",
"h_vals",
"=",
"{",
"tuple",
"(",
"model_inds",
"[",
"(",
"inds",
",",
")",
"]",
")",
":",
"h_vals",
"[",
"inds",
"]",
"for",
"inds",
"in",
"h_vals",
".",
"keys",
"(",
")",
"}",
"if",
"not",
"isinstance",
"(",
"array_or_frame",
",",
"np",
".",
"ndarray",
")",
":",
"all_cols",
"=",
"array_or_frame",
".",
"columns",
".",
"values",
"h_vals",
"=",
"{",
"tuple",
"(",
"all_cols",
"[",
"(",
"inds",
",",
")",
"]",
")",
":",
"h_vals",
"[",
"inds",
"]",
"for",
"inds",
"in",
"h_vals",
".",
"keys",
"(",
")",
"}",
"return",
"h_vals"
] |
PURPOSE
Compute Friedman and Popescu's two-variable H statistic, in order to look for an interaction in the passed gradient-
boosting model between each pair of variables represented by the elements of the passed array or frame and specified
by the passed indices or columns.
See Jerome H. Friedman and Bogdan E. Popescu, 2008, "Predictive learning via rule ensembles", Ann. Appl. Stat.
2:916-954, http://projecteuclid.org/download/pdfview_1/euclid.aoas/1223908046, s. 8.1.
ARGUMENTS
gbm should be a scikit-learn gradient-boosting model (instance of sklearn.ensemble.GradientBoostingClassifier or
sklearn.ensemble.GradientBoostingRegressor) that has been fitted to array_or_frame (and a target, not used here).
array_or_frame should be a two-dimensional NumPy array or a pandas data frame (instance of numpy.ndarray or pandas
.DataFrame).
indices_or_columns is optional, with default value 'all'. It should be 'all' or a list of indices of columns of
array_or_frame if array_or_frame is a NumPy array or a list of columns of array_or_frame if array_or_frame is a
pandas data frame. If it is 'all', then all columns of array_or_frame are used.
RETURNS
A dict whose keys are pairs (2-tuples) of indices or columns and whose values are the H statistic of the pairs of
variables or NaN if a computation is spoiled by weak main effects and rounding errors.
H varies from 0 to 1. The larger H, the stronger the evidence for an interaction between a pair of variables.
EXAMPLE
Friedman and Popescu's (2008) formula (44) for every j and k corresponds to
h_all_pairs(F, x)
NOTES
1. Per Friedman and Popescu, only variables with strong main effects should be examined for interactions. Strengths
of main effects are available as gbm.feature_importances_ once gbm has been fitted.
2. Per Friedman and Popescu, collinearity among variables can lead to interactions in gbm that are not present in
the target function. To forestall such spurious interactions, check for strong correlations among variables before
fitting gbm.
|
[
"PURPOSE"
] |
23a1e7fd50e53d6261379f22a337d8fa4ee6aabe
|
https://github.com/ralphhaygood/sklearn-gbmi/blob/23a1e7fd50e53d6261379f22a337d8fa4ee6aabe/sklearn_gbmi/sklearn_gbmi.py#L98-L169
|
train
|
csirtgadgets/csirtgsdk-py
|
csirtgsdk/predict.py
|
Predict.get
|
def get(self, q, limit=None):
"""
Performs a search against the predict endpoint
:param q: query to be searched for [STRING]
:return: { score: [0|1] }
"""
uri = '{}/predict?q={}'.format(self.client.remote, q)
self.logger.debug(uri)
body = self.client.get(uri)
return body['score']
|
python
|
def get(self, q, limit=None):
"""
Performs a search against the predict endpoint
:param q: query to be searched for [STRING]
:return: { score: [0|1] }
"""
uri = '{}/predict?q={}'.format(self.client.remote, q)
self.logger.debug(uri)
body = self.client.get(uri)
return body['score']
|
[
"def",
"get",
"(",
"self",
",",
"q",
",",
"limit",
"=",
"None",
")",
":",
"uri",
"=",
"'{}/predict?q={}'",
".",
"format",
"(",
"self",
".",
"client",
".",
"remote",
",",
"q",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"uri",
")",
"body",
"=",
"self",
".",
"client",
".",
"get",
"(",
"uri",
")",
"return",
"body",
"[",
"'score'",
"]"
] |
Performs a search against the predict endpoint
:param q: query to be searched for [STRING]
:return: { score: [0|1] }
|
[
"Performs",
"a",
"search",
"against",
"the",
"predict",
"endpoint"
] |
5a7ed9c5e6fa27170366ecbdef710dc80d537dc2
|
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/predict.py#L18-L29
|
train
|
okfn/ofs
|
ofs/local/zipstore.py
|
ZOFS.exists
|
def exists(self, bucket, label):
'''Whether a given bucket:label object already exists.'''
fn = self._zf(bucket, label)
try:
self.z.getinfo(fn)
return True
except KeyError:
return False
|
python
|
def exists(self, bucket, label):
'''Whether a given bucket:label object already exists.'''
fn = self._zf(bucket, label)
try:
self.z.getinfo(fn)
return True
except KeyError:
return False
|
[
"def",
"exists",
"(",
"self",
",",
"bucket",
",",
"label",
")",
":",
"fn",
"=",
"self",
".",
"_zf",
"(",
"bucket",
",",
"label",
")",
"try",
":",
"self",
".",
"z",
".",
"getinfo",
"(",
"fn",
")",
"return",
"True",
"except",
"KeyError",
":",
"return",
"False"
] |
Whether a given bucket:label object already exists.
|
[
"Whether",
"a",
"given",
"bucket",
":",
"label",
"object",
"already",
"exists",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L112-L119
|
train
|
okfn/ofs
|
ofs/local/zipstore.py
|
ZOFS.list_labels
|
def list_labels(self, bucket):
'''List labels for the given bucket. Due to zipfiles inherent arbitrary ordering,
this is an expensive operation, as it walks the entire archive searching for individual
'buckets'
:param bucket: bucket to list labels for.
:return: iterator for the labels in the specified bucket.
'''
for name in self.z.namelist():
container, label = self._nf(name.encode("utf-8"))
if container == bucket and label != MD_FILE:
yield label
|
python
|
def list_labels(self, bucket):
'''List labels for the given bucket. Due to zipfiles inherent arbitrary ordering,
this is an expensive operation, as it walks the entire archive searching for individual
'buckets'
:param bucket: bucket to list labels for.
:return: iterator for the labels in the specified bucket.
'''
for name in self.z.namelist():
container, label = self._nf(name.encode("utf-8"))
if container == bucket and label != MD_FILE:
yield label
|
[
"def",
"list_labels",
"(",
"self",
",",
"bucket",
")",
":",
"for",
"name",
"in",
"self",
".",
"z",
".",
"namelist",
"(",
")",
":",
"container",
",",
"label",
"=",
"self",
".",
"_nf",
"(",
"name",
".",
"encode",
"(",
"\"utf-8\"",
")",
")",
"if",
"container",
"==",
"bucket",
"and",
"label",
"!=",
"MD_FILE",
":",
"yield",
"label"
] |
List labels for the given bucket. Due to zipfiles inherent arbitrary ordering,
this is an expensive operation, as it walks the entire archive searching for individual
'buckets'
:param bucket: bucket to list labels for.
:return: iterator for the labels in the specified bucket.
|
[
"List",
"labels",
"for",
"the",
"given",
"bucket",
".",
"Due",
"to",
"zipfiles",
"inherent",
"arbitrary",
"ordering",
"this",
"is",
"an",
"expensive",
"operation",
"as",
"it",
"walks",
"the",
"entire",
"archive",
"searching",
"for",
"individual",
"buckets"
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L131-L142
|
train
|
okfn/ofs
|
ofs/local/zipstore.py
|
ZOFS.list_buckets
|
def list_buckets(self):
'''List all buckets managed by this OFS instance. Like list_labels, this also
walks the entire archive, yielding the bucketnames. A local set is retained so that
duplicates aren't returned so this will temporarily pull the entire list into memory
even though this is a generator and will slow as more buckets are added to the set.
:return: iterator for the buckets.
'''
buckets = set()
for name in self.z.namelist():
bucket, _ = self._nf(name)
if bucket not in buckets:
buckets.add(bucket)
yield bucket
|
python
|
def list_buckets(self):
'''List all buckets managed by this OFS instance. Like list_labels, this also
walks the entire archive, yielding the bucketnames. A local set is retained so that
duplicates aren't returned so this will temporarily pull the entire list into memory
even though this is a generator and will slow as more buckets are added to the set.
:return: iterator for the buckets.
'''
buckets = set()
for name in self.z.namelist():
bucket, _ = self._nf(name)
if bucket not in buckets:
buckets.add(bucket)
yield bucket
|
[
"def",
"list_buckets",
"(",
"self",
")",
":",
"buckets",
"=",
"set",
"(",
")",
"for",
"name",
"in",
"self",
".",
"z",
".",
"namelist",
"(",
")",
":",
"bucket",
",",
"_",
"=",
"self",
".",
"_nf",
"(",
"name",
")",
"if",
"bucket",
"not",
"in",
"buckets",
":",
"buckets",
".",
"add",
"(",
"bucket",
")",
"yield",
"bucket"
] |
List all buckets managed by this OFS instance. Like list_labels, this also
walks the entire archive, yielding the bucketnames. A local set is retained so that
duplicates aren't returned so this will temporarily pull the entire list into memory
even though this is a generator and will slow as more buckets are added to the set.
:return: iterator for the buckets.
|
[
"List",
"all",
"buckets",
"managed",
"by",
"this",
"OFS",
"instance",
".",
"Like",
"list_labels",
"this",
"also",
"walks",
"the",
"entire",
"archive",
"yielding",
"the",
"bucketnames",
".",
"A",
"local",
"set",
"is",
"retained",
"so",
"that",
"duplicates",
"aren",
"t",
"returned",
"so",
"this",
"will",
"temporarily",
"pull",
"the",
"entire",
"list",
"into",
"memory",
"even",
"though",
"this",
"is",
"a",
"generator",
"and",
"will",
"slow",
"as",
"more",
"buckets",
"are",
"added",
"to",
"the",
"set",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L144-L157
|
train
|
okfn/ofs
|
ofs/local/zipstore.py
|
ZOFS.get_stream
|
def get_stream(self, bucket, label, as_stream=True):
'''Get a bitstream for the given bucket:label combination.
:param bucket: the bucket to use.
:return: bitstream as a file-like object
'''
if self.mode == "w":
raise OFSException("Cannot read from archive in 'w' mode")
elif self.exists(bucket, label):
fn = self._zf(bucket, label)
if as_stream:
return self.z.open(fn)
else:
return self.z.read(fn)
else:
raise OFSFileNotFound
|
python
|
def get_stream(self, bucket, label, as_stream=True):
'''Get a bitstream for the given bucket:label combination.
:param bucket: the bucket to use.
:return: bitstream as a file-like object
'''
if self.mode == "w":
raise OFSException("Cannot read from archive in 'w' mode")
elif self.exists(bucket, label):
fn = self._zf(bucket, label)
if as_stream:
return self.z.open(fn)
else:
return self.z.read(fn)
else:
raise OFSFileNotFound
|
[
"def",
"get_stream",
"(",
"self",
",",
"bucket",
",",
"label",
",",
"as_stream",
"=",
"True",
")",
":",
"if",
"self",
".",
"mode",
"==",
"\"w\"",
":",
"raise",
"OFSException",
"(",
"\"Cannot read from archive in 'w' mode\"",
")",
"elif",
"self",
".",
"exists",
"(",
"bucket",
",",
"label",
")",
":",
"fn",
"=",
"self",
".",
"_zf",
"(",
"bucket",
",",
"label",
")",
"if",
"as_stream",
":",
"return",
"self",
".",
"z",
".",
"open",
"(",
"fn",
")",
"else",
":",
"return",
"self",
".",
"z",
".",
"read",
"(",
"fn",
")",
"else",
":",
"raise",
"OFSFileNotFound"
] |
Get a bitstream for the given bucket:label combination.
:param bucket: the bucket to use.
:return: bitstream as a file-like object
|
[
"Get",
"a",
"bitstream",
"for",
"the",
"given",
"bucket",
":",
"label",
"combination",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L159-L174
|
train
|
okfn/ofs
|
ofs/local/zipstore.py
|
ZOFS.get_url
|
def get_url(self, bucket, label):
'''Get a URL that should point at the bucket:labelled resource. Aimed to aid web apps by allowing them to redirect to an open resource, rather than proxy the bitstream.
:param bucket: the bucket to use.
:param label: the label of the resource to get
:return: a string URI - eg 'zip:file:///home/.../foo.zip!/bucket/label'
'''
if self.exists(bucket, label):
root = "zip:file//%s" % os.path.abspath(self.zipfile)
fn = self._zf(bucket, label)
return "!/".join(root, fn)
else:
raise OFSFileNotFound
|
python
|
def get_url(self, bucket, label):
'''Get a URL that should point at the bucket:labelled resource. Aimed to aid web apps by allowing them to redirect to an open resource, rather than proxy the bitstream.
:param bucket: the bucket to use.
:param label: the label of the resource to get
:return: a string URI - eg 'zip:file:///home/.../foo.zip!/bucket/label'
'''
if self.exists(bucket, label):
root = "zip:file//%s" % os.path.abspath(self.zipfile)
fn = self._zf(bucket, label)
return "!/".join(root, fn)
else:
raise OFSFileNotFound
|
[
"def",
"get_url",
"(",
"self",
",",
"bucket",
",",
"label",
")",
":",
"if",
"self",
".",
"exists",
"(",
"bucket",
",",
"label",
")",
":",
"root",
"=",
"\"zip:file//%s\"",
"%",
"os",
".",
"path",
".",
"abspath",
"(",
"self",
".",
"zipfile",
")",
"fn",
"=",
"self",
".",
"_zf",
"(",
"bucket",
",",
"label",
")",
"return",
"\"!/\"",
".",
"join",
"(",
"root",
",",
"fn",
")",
"else",
":",
"raise",
"OFSFileNotFound"
] |
Get a URL that should point at the bucket:labelled resource. Aimed to aid web apps by allowing them to redirect to an open resource, rather than proxy the bitstream.
:param bucket: the bucket to use.
:param label: the label of the resource to get
:return: a string URI - eg 'zip:file:///home/.../foo.zip!/bucket/label'
|
[
"Get",
"a",
"URL",
"that",
"should",
"point",
"at",
"the",
"bucket",
":",
"labelled",
"resource",
".",
"Aimed",
"to",
"aid",
"web",
"apps",
"by",
"allowing",
"them",
"to",
"redirect",
"to",
"an",
"open",
"resource",
"rather",
"than",
"proxy",
"the",
"bitstream",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L176-L188
|
train
|
okfn/ofs
|
ofs/local/zipstore.py
|
ZOFS.put_stream
|
def put_stream(self, bucket, label, stream_object, params=None, replace=True, add_md=True):
'''Put a bitstream (stream_object) for the specified bucket:label identifier.
:param bucket: as standard
:param label: as standard
:param stream_object: file-like object to read from or bytestring.
:param params: update metadata with these params (see `update_metadata`)
'''
if self.mode == "r":
raise OFSException("Cannot write into archive in 'r' mode")
else:
params = params or {}
fn = self._zf(bucket, label)
params['_creation_date'] = datetime.now().isoformat().split(".")[0] ## '2010-07-08T19:56:47'
params['_label'] = label
if self.exists(bucket, label) and replace==True:
# Add then Replace? Let's see if that works...
#z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64)
zinfo = self.z.getinfo(fn)
size, chksum = self._write(self.z, bucket, label, stream_object)
self._del_stream(zinfo)
#z.close()
params['_content_length'] = size
if chksum:
params['_checksum'] = chksum
else:
#z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64)
size, chksum = self._write(self.z, bucket, label, stream_object)
#z.close()
params['_content_length'] = size
if chksum:
params['_checksum'] = chksum
if add_md:
params = self.update_metadata(bucket, label, params)
return params
|
python
|
def put_stream(self, bucket, label, stream_object, params=None, replace=True, add_md=True):
'''Put a bitstream (stream_object) for the specified bucket:label identifier.
:param bucket: as standard
:param label: as standard
:param stream_object: file-like object to read from or bytestring.
:param params: update metadata with these params (see `update_metadata`)
'''
if self.mode == "r":
raise OFSException("Cannot write into archive in 'r' mode")
else:
params = params or {}
fn = self._zf(bucket, label)
params['_creation_date'] = datetime.now().isoformat().split(".")[0] ## '2010-07-08T19:56:47'
params['_label'] = label
if self.exists(bucket, label) and replace==True:
# Add then Replace? Let's see if that works...
#z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64)
zinfo = self.z.getinfo(fn)
size, chksum = self._write(self.z, bucket, label, stream_object)
self._del_stream(zinfo)
#z.close()
params['_content_length'] = size
if chksum:
params['_checksum'] = chksum
else:
#z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64)
size, chksum = self._write(self.z, bucket, label, stream_object)
#z.close()
params['_content_length'] = size
if chksum:
params['_checksum'] = chksum
if add_md:
params = self.update_metadata(bucket, label, params)
return params
|
[
"def",
"put_stream",
"(",
"self",
",",
"bucket",
",",
"label",
",",
"stream_object",
",",
"params",
"=",
"None",
",",
"replace",
"=",
"True",
",",
"add_md",
"=",
"True",
")",
":",
"if",
"self",
".",
"mode",
"==",
"\"r\"",
":",
"raise",
"OFSException",
"(",
"\"Cannot write into archive in 'r' mode\"",
")",
"else",
":",
"params",
"=",
"params",
"or",
"{",
"}",
"fn",
"=",
"self",
".",
"_zf",
"(",
"bucket",
",",
"label",
")",
"params",
"[",
"'_creation_date'",
"]",
"=",
"datetime",
".",
"now",
"(",
")",
".",
"isoformat",
"(",
")",
".",
"split",
"(",
"\".\"",
")",
"[",
"0",
"]",
"## '2010-07-08T19:56:47'",
"params",
"[",
"'_label'",
"]",
"=",
"label",
"if",
"self",
".",
"exists",
"(",
"bucket",
",",
"label",
")",
"and",
"replace",
"==",
"True",
":",
"# Add then Replace? Let's see if that works...",
"#z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64)",
"zinfo",
"=",
"self",
".",
"z",
".",
"getinfo",
"(",
"fn",
")",
"size",
",",
"chksum",
"=",
"self",
".",
"_write",
"(",
"self",
".",
"z",
",",
"bucket",
",",
"label",
",",
"stream_object",
")",
"self",
".",
"_del_stream",
"(",
"zinfo",
")",
"#z.close()",
"params",
"[",
"'_content_length'",
"]",
"=",
"size",
"if",
"chksum",
":",
"params",
"[",
"'_checksum'",
"]",
"=",
"chksum",
"else",
":",
"#z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64)",
"size",
",",
"chksum",
"=",
"self",
".",
"_write",
"(",
"self",
".",
"z",
",",
"bucket",
",",
"label",
",",
"stream_object",
")",
"#z.close()",
"params",
"[",
"'_content_length'",
"]",
"=",
"size",
"if",
"chksum",
":",
"params",
"[",
"'_checksum'",
"]",
"=",
"chksum",
"if",
"add_md",
":",
"params",
"=",
"self",
".",
"update_metadata",
"(",
"bucket",
",",
"label",
",",
"params",
")",
"return",
"params"
] |
Put a bitstream (stream_object) for the specified bucket:label identifier.
:param bucket: as standard
:param label: as standard
:param stream_object: file-like object to read from or bytestring.
:param params: update metadata with these params (see `update_metadata`)
|
[
"Put",
"a",
"bitstream",
"(",
"stream_object",
")",
"for",
"the",
"specified",
"bucket",
":",
"label",
"identifier",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L190-L224
|
train
|
okfn/ofs
|
ofs/local/zipstore.py
|
ZOFS.del_stream
|
def del_stream(self, bucket, label):
'''Delete a bitstream. This needs more testing - file deletion in a zipfile
is problematic. Alternate method is to create second zipfile without the files
in question, which is not a nice method for large zip archives.
'''
if self.exists(bucket, label):
name = self._zf(bucket, label)
#z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64)
self._del_stream(name)
|
python
|
def del_stream(self, bucket, label):
'''Delete a bitstream. This needs more testing - file deletion in a zipfile
is problematic. Alternate method is to create second zipfile without the files
in question, which is not a nice method for large zip archives.
'''
if self.exists(bucket, label):
name = self._zf(bucket, label)
#z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64)
self._del_stream(name)
|
[
"def",
"del_stream",
"(",
"self",
",",
"bucket",
",",
"label",
")",
":",
"if",
"self",
".",
"exists",
"(",
"bucket",
",",
"label",
")",
":",
"name",
"=",
"self",
".",
"_zf",
"(",
"bucket",
",",
"label",
")",
"#z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64)",
"self",
".",
"_del_stream",
"(",
"name",
")"
] |
Delete a bitstream. This needs more testing - file deletion in a zipfile
is problematic. Alternate method is to create second zipfile without the files
in question, which is not a nice method for large zip archives.
|
[
"Delete",
"a",
"bitstream",
".",
"This",
"needs",
"more",
"testing",
"-",
"file",
"deletion",
"in",
"a",
"zipfile",
"is",
"problematic",
".",
"Alternate",
"method",
"is",
"to",
"create",
"second",
"zipfile",
"without",
"the",
"files",
"in",
"question",
"which",
"is",
"not",
"a",
"nice",
"method",
"for",
"large",
"zip",
"archives",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L238-L246
|
train
|
okfn/ofs
|
ofs/local/zipstore.py
|
ZOFS.get_metadata
|
def get_metadata(self, bucket, label):
'''Get the metadata for this bucket:label identifier.
'''
if self.mode !="w":
try:
jsn = self._get_bucket_md(bucket)
except OFSFileNotFound:
# No MD found...
return {}
except OFSException as e:
raise OFSException(e)
if label in jsn:
return jsn[label]
else:
return {}
else:
raise OFSException("Cannot read md from archive in 'w' mode")
|
python
|
def get_metadata(self, bucket, label):
'''Get the metadata for this bucket:label identifier.
'''
if self.mode !="w":
try:
jsn = self._get_bucket_md(bucket)
except OFSFileNotFound:
# No MD found...
return {}
except OFSException as e:
raise OFSException(e)
if label in jsn:
return jsn[label]
else:
return {}
else:
raise OFSException("Cannot read md from archive in 'w' mode")
|
[
"def",
"get_metadata",
"(",
"self",
",",
"bucket",
",",
"label",
")",
":",
"if",
"self",
".",
"mode",
"!=",
"\"w\"",
":",
"try",
":",
"jsn",
"=",
"self",
".",
"_get_bucket_md",
"(",
"bucket",
")",
"except",
"OFSFileNotFound",
":",
"# No MD found...",
"return",
"{",
"}",
"except",
"OFSException",
"as",
"e",
":",
"raise",
"OFSException",
"(",
"e",
")",
"if",
"label",
"in",
"jsn",
":",
"return",
"jsn",
"[",
"label",
"]",
"else",
":",
"return",
"{",
"}",
"else",
":",
"raise",
"OFSException",
"(",
"\"Cannot read md from archive in 'w' mode\"",
")"
] |
Get the metadata for this bucket:label identifier.
|
[
"Get",
"the",
"metadata",
"for",
"this",
"bucket",
":",
"label",
"identifier",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L265-L281
|
train
|
okfn/ofs
|
ofs/local/zipstore.py
|
ZOFS.update_metadata
|
def update_metadata(self, bucket, label, params):
'''Update the metadata with the provided dictionary of params.
:param parmams: dictionary of key values (json serializable).
'''
if self.mode !="r":
try:
payload = self._get_bucket_md(bucket)
except OFSFileNotFound:
# No MD found... create it
payload = {}
for l in self.list_labels(bucket):
payload[l] = {}
payload[l]['_label'] = l
if not self.quiet:
print("Had to create md file for %s" % bucket)
except OFSException as e:
raise OFSException(e)
if not label in payload:
payload[label] = {}
payload[label].update(params)
self.put_stream(bucket, MD_FILE, json.dumps(payload).encode('utf-8'), params={}, replace=True, add_md=False)
return payload[label]
else:
raise OFSException("Cannot update MD in archive in 'r' mode")
|
python
|
def update_metadata(self, bucket, label, params):
'''Update the metadata with the provided dictionary of params.
:param parmams: dictionary of key values (json serializable).
'''
if self.mode !="r":
try:
payload = self._get_bucket_md(bucket)
except OFSFileNotFound:
# No MD found... create it
payload = {}
for l in self.list_labels(bucket):
payload[l] = {}
payload[l]['_label'] = l
if not self.quiet:
print("Had to create md file for %s" % bucket)
except OFSException as e:
raise OFSException(e)
if not label in payload:
payload[label] = {}
payload[label].update(params)
self.put_stream(bucket, MD_FILE, json.dumps(payload).encode('utf-8'), params={}, replace=True, add_md=False)
return payload[label]
else:
raise OFSException("Cannot update MD in archive in 'r' mode")
|
[
"def",
"update_metadata",
"(",
"self",
",",
"bucket",
",",
"label",
",",
"params",
")",
":",
"if",
"self",
".",
"mode",
"!=",
"\"r\"",
":",
"try",
":",
"payload",
"=",
"self",
".",
"_get_bucket_md",
"(",
"bucket",
")",
"except",
"OFSFileNotFound",
":",
"# No MD found... create it",
"payload",
"=",
"{",
"}",
"for",
"l",
"in",
"self",
".",
"list_labels",
"(",
"bucket",
")",
":",
"payload",
"[",
"l",
"]",
"=",
"{",
"}",
"payload",
"[",
"l",
"]",
"[",
"'_label'",
"]",
"=",
"l",
"if",
"not",
"self",
".",
"quiet",
":",
"print",
"(",
"\"Had to create md file for %s\"",
"%",
"bucket",
")",
"except",
"OFSException",
"as",
"e",
":",
"raise",
"OFSException",
"(",
"e",
")",
"if",
"not",
"label",
"in",
"payload",
":",
"payload",
"[",
"label",
"]",
"=",
"{",
"}",
"payload",
"[",
"label",
"]",
".",
"update",
"(",
"params",
")",
"self",
".",
"put_stream",
"(",
"bucket",
",",
"MD_FILE",
",",
"json",
".",
"dumps",
"(",
"payload",
")",
".",
"encode",
"(",
"'utf-8'",
")",
",",
"params",
"=",
"{",
"}",
",",
"replace",
"=",
"True",
",",
"add_md",
"=",
"False",
")",
"return",
"payload",
"[",
"label",
"]",
"else",
":",
"raise",
"OFSException",
"(",
"\"Cannot update MD in archive in 'r' mode\"",
")"
] |
Update the metadata with the provided dictionary of params.
:param parmams: dictionary of key values (json serializable).
|
[
"Update",
"the",
"metadata",
"with",
"the",
"provided",
"dictionary",
"of",
"params",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L283-L307
|
train
|
okfn/ofs
|
ofs/local/zipstore.py
|
ZOFS.del_metadata_keys
|
def del_metadata_keys(self, bucket, label, keys):
'''Delete the metadata corresponding to the specified keys.
'''
if self.mode !="r":
try:
payload = self._get_bucket_md(bucket)
except OFSFileNotFound:
# No MD found...
raise OFSFileNotFound("Couldn't find a md file for %s bucket" % bucket)
except OFSException as e:
raise OFSException(e)
if payload.has_key(label):
for key in [x for x in keys if payload[label].has_key(x)]:
del payload[label][key]
self.put_stream(bucket, MD_FILE, json.dumps(payload), params={}, replace=True, add_md=False)
else:
raise OFSException("Cannot update MD in archive in 'r' mode")
|
python
|
def del_metadata_keys(self, bucket, label, keys):
'''Delete the metadata corresponding to the specified keys.
'''
if self.mode !="r":
try:
payload = self._get_bucket_md(bucket)
except OFSFileNotFound:
# No MD found...
raise OFSFileNotFound("Couldn't find a md file for %s bucket" % bucket)
except OFSException as e:
raise OFSException(e)
if payload.has_key(label):
for key in [x for x in keys if payload[label].has_key(x)]:
del payload[label][key]
self.put_stream(bucket, MD_FILE, json.dumps(payload), params={}, replace=True, add_md=False)
else:
raise OFSException("Cannot update MD in archive in 'r' mode")
|
[
"def",
"del_metadata_keys",
"(",
"self",
",",
"bucket",
",",
"label",
",",
"keys",
")",
":",
"if",
"self",
".",
"mode",
"!=",
"\"r\"",
":",
"try",
":",
"payload",
"=",
"self",
".",
"_get_bucket_md",
"(",
"bucket",
")",
"except",
"OFSFileNotFound",
":",
"# No MD found...",
"raise",
"OFSFileNotFound",
"(",
"\"Couldn't find a md file for %s bucket\"",
"%",
"bucket",
")",
"except",
"OFSException",
"as",
"e",
":",
"raise",
"OFSException",
"(",
"e",
")",
"if",
"payload",
".",
"has_key",
"(",
"label",
")",
":",
"for",
"key",
"in",
"[",
"x",
"for",
"x",
"in",
"keys",
"if",
"payload",
"[",
"label",
"]",
".",
"has_key",
"(",
"x",
")",
"]",
":",
"del",
"payload",
"[",
"label",
"]",
"[",
"key",
"]",
"self",
".",
"put_stream",
"(",
"bucket",
",",
"MD_FILE",
",",
"json",
".",
"dumps",
"(",
"payload",
")",
",",
"params",
"=",
"{",
"}",
",",
"replace",
"=",
"True",
",",
"add_md",
"=",
"False",
")",
"else",
":",
"raise",
"OFSException",
"(",
"\"Cannot update MD in archive in 'r' mode\"",
")"
] |
Delete the metadata corresponding to the specified keys.
|
[
"Delete",
"the",
"metadata",
"corresponding",
"to",
"the",
"specified",
"keys",
"."
] |
c110cbecd7d0ae7e877963914a1a5af030cd6d45
|
https://github.com/okfn/ofs/blob/c110cbecd7d0ae7e877963914a1a5af030cd6d45/ofs/local/zipstore.py#L309-L325
|
train
|
tanwanirahul/django-batch-requests
|
batch_requests/views.py
|
get_response
|
def get_response(wsgi_request):
'''
Given a WSGI request, makes a call to a corresponding view
function and returns the response.
'''
service_start_time = datetime.now()
# Get the view / handler for this request
view, args, kwargs = resolve(wsgi_request.path_info)
kwargs.update({"request": wsgi_request})
# Let the view do his task.
try:
resp = view(*args, **kwargs)
except Exception as exc:
resp = HttpResponseServerError(content=exc.message)
headers = dict(resp._headers.values())
# Convert HTTP response into simple dict type.
d_resp = {"status_code": resp.status_code, "reason_phrase": resp.reason_phrase,
"headers": headers}
try:
d_resp.update({"body": resp.content})
except ContentNotRenderedError:
resp.render()
d_resp.update({"body": resp.content})
# Check if we need to send across the duration header.
if _settings.ADD_DURATION_HEADER:
d_resp['headers'].update({_settings.DURATION_HEADER_NAME: (datetime.now() - service_start_time).seconds})
return d_resp
|
python
|
def get_response(wsgi_request):
'''
Given a WSGI request, makes a call to a corresponding view
function and returns the response.
'''
service_start_time = datetime.now()
# Get the view / handler for this request
view, args, kwargs = resolve(wsgi_request.path_info)
kwargs.update({"request": wsgi_request})
# Let the view do his task.
try:
resp = view(*args, **kwargs)
except Exception as exc:
resp = HttpResponseServerError(content=exc.message)
headers = dict(resp._headers.values())
# Convert HTTP response into simple dict type.
d_resp = {"status_code": resp.status_code, "reason_phrase": resp.reason_phrase,
"headers": headers}
try:
d_resp.update({"body": resp.content})
except ContentNotRenderedError:
resp.render()
d_resp.update({"body": resp.content})
# Check if we need to send across the duration header.
if _settings.ADD_DURATION_HEADER:
d_resp['headers'].update({_settings.DURATION_HEADER_NAME: (datetime.now() - service_start_time).seconds})
return d_resp
|
[
"def",
"get_response",
"(",
"wsgi_request",
")",
":",
"service_start_time",
"=",
"datetime",
".",
"now",
"(",
")",
"# Get the view / handler for this request",
"view",
",",
"args",
",",
"kwargs",
"=",
"resolve",
"(",
"wsgi_request",
".",
"path_info",
")",
"kwargs",
".",
"update",
"(",
"{",
"\"request\"",
":",
"wsgi_request",
"}",
")",
"# Let the view do his task.",
"try",
":",
"resp",
"=",
"view",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"Exception",
"as",
"exc",
":",
"resp",
"=",
"HttpResponseServerError",
"(",
"content",
"=",
"exc",
".",
"message",
")",
"headers",
"=",
"dict",
"(",
"resp",
".",
"_headers",
".",
"values",
"(",
")",
")",
"# Convert HTTP response into simple dict type.",
"d_resp",
"=",
"{",
"\"status_code\"",
":",
"resp",
".",
"status_code",
",",
"\"reason_phrase\"",
":",
"resp",
".",
"reason_phrase",
",",
"\"headers\"",
":",
"headers",
"}",
"try",
":",
"d_resp",
".",
"update",
"(",
"{",
"\"body\"",
":",
"resp",
".",
"content",
"}",
")",
"except",
"ContentNotRenderedError",
":",
"resp",
".",
"render",
"(",
")",
"d_resp",
".",
"update",
"(",
"{",
"\"body\"",
":",
"resp",
".",
"content",
"}",
")",
"# Check if we need to send across the duration header.",
"if",
"_settings",
".",
"ADD_DURATION_HEADER",
":",
"d_resp",
"[",
"'headers'",
"]",
".",
"update",
"(",
"{",
"_settings",
".",
"DURATION_HEADER_NAME",
":",
"(",
"datetime",
".",
"now",
"(",
")",
"-",
"service_start_time",
")",
".",
"seconds",
"}",
")",
"return",
"d_resp"
] |
Given a WSGI request, makes a call to a corresponding view
function and returns the response.
|
[
"Given",
"a",
"WSGI",
"request",
"makes",
"a",
"call",
"to",
"a",
"corresponding",
"view",
"function",
"and",
"returns",
"the",
"response",
"."
] |
9c5afc42f7542f466247f4ffed9c44e1c49fa20d
|
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/views.py#L22-L53
|
train
|
tanwanirahul/django-batch-requests
|
batch_requests/views.py
|
get_wsgi_requests
|
def get_wsgi_requests(request):
'''
For the given batch request, extract the individual requests and create
WSGIRequest object for each.
'''
valid_http_methods = ["get", "post", "put", "patch", "delete", "head", "options", "connect", "trace"]
requests = json.loads(request.body)
if type(requests) not in (list, tuple):
raise BadBatchRequest("The body of batch request should always be list!")
# Max limit check.
no_requests = len(requests)
if no_requests > _settings.MAX_LIMIT:
raise BadBatchRequest("You can batch maximum of %d requests." % (_settings.MAX_LIMIT))
# We could mutate the current request with the respective parameters, but mutation is ghost in the dark,
# so lets avoid. Construct the new WSGI request object for each request.
def construct_wsgi_from_data(data):
'''
Given the data in the format of url, method, body and headers, construct a new
WSGIRequest object.
'''
url = data.get("url", None)
method = data.get("method", None)
if url is None or method is None:
raise BadBatchRequest("Request definition should have url, method defined.")
if method.lower() not in valid_http_methods:
raise BadBatchRequest("Invalid request method.")
body = data.get("body", "")
headers = data.get("headers", {})
return get_wsgi_request_object(request, method, url, headers, body)
return [construct_wsgi_from_data(data) for data in requests]
|
python
|
def get_wsgi_requests(request):
'''
For the given batch request, extract the individual requests and create
WSGIRequest object for each.
'''
valid_http_methods = ["get", "post", "put", "patch", "delete", "head", "options", "connect", "trace"]
requests = json.loads(request.body)
if type(requests) not in (list, tuple):
raise BadBatchRequest("The body of batch request should always be list!")
# Max limit check.
no_requests = len(requests)
if no_requests > _settings.MAX_LIMIT:
raise BadBatchRequest("You can batch maximum of %d requests." % (_settings.MAX_LIMIT))
# We could mutate the current request with the respective parameters, but mutation is ghost in the dark,
# so lets avoid. Construct the new WSGI request object for each request.
def construct_wsgi_from_data(data):
'''
Given the data in the format of url, method, body and headers, construct a new
WSGIRequest object.
'''
url = data.get("url", None)
method = data.get("method", None)
if url is None or method is None:
raise BadBatchRequest("Request definition should have url, method defined.")
if method.lower() not in valid_http_methods:
raise BadBatchRequest("Invalid request method.")
body = data.get("body", "")
headers = data.get("headers", {})
return get_wsgi_request_object(request, method, url, headers, body)
return [construct_wsgi_from_data(data) for data in requests]
|
[
"def",
"get_wsgi_requests",
"(",
"request",
")",
":",
"valid_http_methods",
"=",
"[",
"\"get\"",
",",
"\"post\"",
",",
"\"put\"",
",",
"\"patch\"",
",",
"\"delete\"",
",",
"\"head\"",
",",
"\"options\"",
",",
"\"connect\"",
",",
"\"trace\"",
"]",
"requests",
"=",
"json",
".",
"loads",
"(",
"request",
".",
"body",
")",
"if",
"type",
"(",
"requests",
")",
"not",
"in",
"(",
"list",
",",
"tuple",
")",
":",
"raise",
"BadBatchRequest",
"(",
"\"The body of batch request should always be list!\"",
")",
"# Max limit check.",
"no_requests",
"=",
"len",
"(",
"requests",
")",
"if",
"no_requests",
">",
"_settings",
".",
"MAX_LIMIT",
":",
"raise",
"BadBatchRequest",
"(",
"\"You can batch maximum of %d requests.\"",
"%",
"(",
"_settings",
".",
"MAX_LIMIT",
")",
")",
"# We could mutate the current request with the respective parameters, but mutation is ghost in the dark,",
"# so lets avoid. Construct the new WSGI request object for each request.",
"def",
"construct_wsgi_from_data",
"(",
"data",
")",
":",
"'''\n Given the data in the format of url, method, body and headers, construct a new\n WSGIRequest object.\n '''",
"url",
"=",
"data",
".",
"get",
"(",
"\"url\"",
",",
"None",
")",
"method",
"=",
"data",
".",
"get",
"(",
"\"method\"",
",",
"None",
")",
"if",
"url",
"is",
"None",
"or",
"method",
"is",
"None",
":",
"raise",
"BadBatchRequest",
"(",
"\"Request definition should have url, method defined.\"",
")",
"if",
"method",
".",
"lower",
"(",
")",
"not",
"in",
"valid_http_methods",
":",
"raise",
"BadBatchRequest",
"(",
"\"Invalid request method.\"",
")",
"body",
"=",
"data",
".",
"get",
"(",
"\"body\"",
",",
"\"\"",
")",
"headers",
"=",
"data",
".",
"get",
"(",
"\"headers\"",
",",
"{",
"}",
")",
"return",
"get_wsgi_request_object",
"(",
"request",
",",
"method",
",",
"url",
",",
"headers",
",",
"body",
")",
"return",
"[",
"construct_wsgi_from_data",
"(",
"data",
")",
"for",
"data",
"in",
"requests",
"]"
] |
For the given batch request, extract the individual requests and create
WSGIRequest object for each.
|
[
"For",
"the",
"given",
"batch",
"request",
"extract",
"the",
"individual",
"requests",
"and",
"create",
"WSGIRequest",
"object",
"for",
"each",
"."
] |
9c5afc42f7542f466247f4ffed9c44e1c49fa20d
|
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/views.py#L56-L94
|
train
|
tanwanirahul/django-batch-requests
|
batch_requests/views.py
|
handle_batch_requests
|
def handle_batch_requests(request, *args, **kwargs):
'''
A view function to handle the overall processing of batch requests.
'''
batch_start_time = datetime.now()
try:
# Get the Individual WSGI requests.
wsgi_requests = get_wsgi_requests(request)
except BadBatchRequest as brx:
return HttpResponseBadRequest(content=brx.message)
# Fire these WSGI requests, and collect the response for the same.
response = execute_requests(wsgi_requests)
# Evrything's done, return the response.
resp = HttpResponse(
content=json.dumps(response), content_type="application/json")
if _settings.ADD_DURATION_HEADER:
resp.__setitem__(_settings.DURATION_HEADER_NAME, str((datetime.now() - batch_start_time).seconds))
return resp
|
python
|
def handle_batch_requests(request, *args, **kwargs):
'''
A view function to handle the overall processing of batch requests.
'''
batch_start_time = datetime.now()
try:
# Get the Individual WSGI requests.
wsgi_requests = get_wsgi_requests(request)
except BadBatchRequest as brx:
return HttpResponseBadRequest(content=brx.message)
# Fire these WSGI requests, and collect the response for the same.
response = execute_requests(wsgi_requests)
# Evrything's done, return the response.
resp = HttpResponse(
content=json.dumps(response), content_type="application/json")
if _settings.ADD_DURATION_HEADER:
resp.__setitem__(_settings.DURATION_HEADER_NAME, str((datetime.now() - batch_start_time).seconds))
return resp
|
[
"def",
"handle_batch_requests",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"batch_start_time",
"=",
"datetime",
".",
"now",
"(",
")",
"try",
":",
"# Get the Individual WSGI requests.",
"wsgi_requests",
"=",
"get_wsgi_requests",
"(",
"request",
")",
"except",
"BadBatchRequest",
"as",
"brx",
":",
"return",
"HttpResponseBadRequest",
"(",
"content",
"=",
"brx",
".",
"message",
")",
"# Fire these WSGI requests, and collect the response for the same.",
"response",
"=",
"execute_requests",
"(",
"wsgi_requests",
")",
"# Evrything's done, return the response.",
"resp",
"=",
"HttpResponse",
"(",
"content",
"=",
"json",
".",
"dumps",
"(",
"response",
")",
",",
"content_type",
"=",
"\"application/json\"",
")",
"if",
"_settings",
".",
"ADD_DURATION_HEADER",
":",
"resp",
".",
"__setitem__",
"(",
"_settings",
".",
"DURATION_HEADER_NAME",
",",
"str",
"(",
"(",
"datetime",
".",
"now",
"(",
")",
"-",
"batch_start_time",
")",
".",
"seconds",
")",
")",
"return",
"resp"
] |
A view function to handle the overall processing of batch requests.
|
[
"A",
"view",
"function",
"to",
"handle",
"the",
"overall",
"processing",
"of",
"batch",
"requests",
"."
] |
9c5afc42f7542f466247f4ffed9c44e1c49fa20d
|
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/views.py#L108-L128
|
train
|
KKBOX/OpenAPI-Python
|
kkbox_developer_sdk/search_fetcher.py
|
KKBOXSearchFetcher.search
|
def search(self, keyword, types=[], terr=KKBOXTerritory.TAIWAN):
'''
Searches within KKBOX's database.
:param keyword: the keyword.
:type keyword: str
:param types: the search types.
:return: list
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#search_1`.
'''
url = 'https://api.kkbox.com/v1.1/search'
url += '?' + url_parse.urlencode({'q': keyword, 'territory': terr})
if len(types) > 0:
url += '&type=' + ','.join(types)
return self.http._post_data(url, None, self.http._headers_with_access_token())
|
python
|
def search(self, keyword, types=[], terr=KKBOXTerritory.TAIWAN):
'''
Searches within KKBOX's database.
:param keyword: the keyword.
:type keyword: str
:param types: the search types.
:return: list
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#search_1`.
'''
url = 'https://api.kkbox.com/v1.1/search'
url += '?' + url_parse.urlencode({'q': keyword, 'territory': terr})
if len(types) > 0:
url += '&type=' + ','.join(types)
return self.http._post_data(url, None, self.http._headers_with_access_token())
|
[
"def",
"search",
"(",
"self",
",",
"keyword",
",",
"types",
"=",
"[",
"]",
",",
"terr",
"=",
"KKBOXTerritory",
".",
"TAIWAN",
")",
":",
"url",
"=",
"'https://api.kkbox.com/v1.1/search'",
"url",
"+=",
"'?'",
"+",
"url_parse",
".",
"urlencode",
"(",
"{",
"'q'",
":",
"keyword",
",",
"'territory'",
":",
"terr",
"}",
")",
"if",
"len",
"(",
"types",
")",
">",
"0",
":",
"url",
"+=",
"'&type='",
"+",
"','",
".",
"join",
"(",
"types",
")",
"return",
"self",
".",
"http",
".",
"_post_data",
"(",
"url",
",",
"None",
",",
"self",
".",
"http",
".",
"_headers_with_access_token",
"(",
")",
")"
] |
Searches within KKBOX's database.
:param keyword: the keyword.
:type keyword: str
:param types: the search types.
:return: list
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#search_1`.
|
[
"Searches",
"within",
"KKBOX",
"s",
"database",
"."
] |
77aa22fd300ed987d5507a5b66b149edcd28047d
|
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/search_fetcher.py#L24-L42
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/ip_address.py
|
IPAddress.save
|
def save(self):
"""
IPAddress can only change its PTR record. Saves the current state, PUT /ip_address/uuid.
"""
body = {'ip_address': {'ptr_record': self.ptr_record}}
data = self.cloud_manager.request('PUT', '/ip_address/' + self.address, body)
self._reset(**data['ip_address'])
|
python
|
def save(self):
"""
IPAddress can only change its PTR record. Saves the current state, PUT /ip_address/uuid.
"""
body = {'ip_address': {'ptr_record': self.ptr_record}}
data = self.cloud_manager.request('PUT', '/ip_address/' + self.address, body)
self._reset(**data['ip_address'])
|
[
"def",
"save",
"(",
"self",
")",
":",
"body",
"=",
"{",
"'ip_address'",
":",
"{",
"'ptr_record'",
":",
"self",
".",
"ptr_record",
"}",
"}",
"data",
"=",
"self",
".",
"cloud_manager",
".",
"request",
"(",
"'PUT'",
",",
"'/ip_address/'",
"+",
"self",
".",
"address",
",",
"body",
")",
"self",
".",
"_reset",
"(",
"*",
"*",
"data",
"[",
"'ip_address'",
"]",
")"
] |
IPAddress can only change its PTR record. Saves the current state, PUT /ip_address/uuid.
|
[
"IPAddress",
"can",
"only",
"change",
"its",
"PTR",
"record",
".",
"Saves",
"the",
"current",
"state",
"PUT",
"/",
"ip_address",
"/",
"uuid",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/ip_address.py#L32-L38
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/ip_address.py
|
IPAddress._create_ip_address_objs
|
def _create_ip_address_objs(ip_addresses, cloud_manager):
"""
Create IPAddress objects from API response data.
Also associates CloudManager with the objects.
"""
# ip-addresses might be provided as a flat array or as a following dict:
# {'ip_addresses': {'ip_address': [...]}} || {'ip_address': [...]}
if 'ip_addresses' in ip_addresses:
ip_addresses = ip_addresses['ip_addresses']
if 'ip_address' in ip_addresses:
ip_addresses = ip_addresses['ip_address']
return [
IPAddress(cloud_manager=cloud_manager, **ip_addr)
for ip_addr in ip_addresses
]
|
python
|
def _create_ip_address_objs(ip_addresses, cloud_manager):
"""
Create IPAddress objects from API response data.
Also associates CloudManager with the objects.
"""
# ip-addresses might be provided as a flat array or as a following dict:
# {'ip_addresses': {'ip_address': [...]}} || {'ip_address': [...]}
if 'ip_addresses' in ip_addresses:
ip_addresses = ip_addresses['ip_addresses']
if 'ip_address' in ip_addresses:
ip_addresses = ip_addresses['ip_address']
return [
IPAddress(cloud_manager=cloud_manager, **ip_addr)
for ip_addr in ip_addresses
]
|
[
"def",
"_create_ip_address_objs",
"(",
"ip_addresses",
",",
"cloud_manager",
")",
":",
"# ip-addresses might be provided as a flat array or as a following dict:",
"# {'ip_addresses': {'ip_address': [...]}} || {'ip_address': [...]}",
"if",
"'ip_addresses'",
"in",
"ip_addresses",
":",
"ip_addresses",
"=",
"ip_addresses",
"[",
"'ip_addresses'",
"]",
"if",
"'ip_address'",
"in",
"ip_addresses",
":",
"ip_addresses",
"=",
"ip_addresses",
"[",
"'ip_address'",
"]",
"return",
"[",
"IPAddress",
"(",
"cloud_manager",
"=",
"cloud_manager",
",",
"*",
"*",
"ip_addr",
")",
"for",
"ip_addr",
"in",
"ip_addresses",
"]"
] |
Create IPAddress objects from API response data.
Also associates CloudManager with the objects.
|
[
"Create",
"IPAddress",
"objects",
"from",
"API",
"response",
"data",
".",
"Also",
"associates",
"CloudManager",
"with",
"the",
"objects",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/ip_address.py#L54-L71
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/tag.py
|
Tag._reset
|
def _reset(self, **kwargs):
"""
Reset the objects attributes.
Accepts servers as either unflattened or flattened UUID strings or Server objects.
"""
super(Tag, self)._reset(**kwargs)
# backup name for changing it (look: Tag.save)
self._api_name = self.name
# flatten { servers: { server: [] } }
if 'server' in self.servers:
self.servers = kwargs['servers']['server']
# convert UUIDs into server objects
if self.servers and isinstance(self.servers[0], six.string_types):
self.servers = [Server(uuid=server, populated=False) for server in self.servers]
|
python
|
def _reset(self, **kwargs):
"""
Reset the objects attributes.
Accepts servers as either unflattened or flattened UUID strings or Server objects.
"""
super(Tag, self)._reset(**kwargs)
# backup name for changing it (look: Tag.save)
self._api_name = self.name
# flatten { servers: { server: [] } }
if 'server' in self.servers:
self.servers = kwargs['servers']['server']
# convert UUIDs into server objects
if self.servers and isinstance(self.servers[0], six.string_types):
self.servers = [Server(uuid=server, populated=False) for server in self.servers]
|
[
"def",
"_reset",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"Tag",
",",
"self",
")",
".",
"_reset",
"(",
"*",
"*",
"kwargs",
")",
"# backup name for changing it (look: Tag.save)",
"self",
".",
"_api_name",
"=",
"self",
".",
"name",
"# flatten { servers: { server: [] } }",
"if",
"'server'",
"in",
"self",
".",
"servers",
":",
"self",
".",
"servers",
"=",
"kwargs",
"[",
"'servers'",
"]",
"[",
"'server'",
"]",
"# convert UUIDs into server objects",
"if",
"self",
".",
"servers",
"and",
"isinstance",
"(",
"self",
".",
"servers",
"[",
"0",
"]",
",",
"six",
".",
"string_types",
")",
":",
"self",
".",
"servers",
"=",
"[",
"Server",
"(",
"uuid",
"=",
"server",
",",
"populated",
"=",
"False",
")",
"for",
"server",
"in",
"self",
".",
"servers",
"]"
] |
Reset the objects attributes.
Accepts servers as either unflattened or flattened UUID strings or Server objects.
|
[
"Reset",
"the",
"objects",
"attributes",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/tag.py#L30-L47
|
train
|
csirtgadgets/csirtgsdk-py
|
csirtgsdk/client/http.py
|
HTTP._get
|
def _get(self, uri, params={}):
"""
HTTP GET function
:param uri: REST endpoint
:param params: optional HTTP params to pass to the endpoint
:return: list of results (usually a list of dicts)
Example:
ret = cli.get('/search', params={ 'q': 'example.org' })
"""
if not uri.startswith(self.remote):
uri = '{}{}'.format(self.remote, uri)
return self._make_request(uri, params)
|
python
|
def _get(self, uri, params={}):
"""
HTTP GET function
:param uri: REST endpoint
:param params: optional HTTP params to pass to the endpoint
:return: list of results (usually a list of dicts)
Example:
ret = cli.get('/search', params={ 'q': 'example.org' })
"""
if not uri.startswith(self.remote):
uri = '{}{}'.format(self.remote, uri)
return self._make_request(uri, params)
|
[
"def",
"_get",
"(",
"self",
",",
"uri",
",",
"params",
"=",
"{",
"}",
")",
":",
"if",
"not",
"uri",
".",
"startswith",
"(",
"self",
".",
"remote",
")",
":",
"uri",
"=",
"'{}{}'",
".",
"format",
"(",
"self",
".",
"remote",
",",
"uri",
")",
"return",
"self",
".",
"_make_request",
"(",
"uri",
",",
"params",
")"
] |
HTTP GET function
:param uri: REST endpoint
:param params: optional HTTP params to pass to the endpoint
:return: list of results (usually a list of dicts)
Example:
ret = cli.get('/search', params={ 'q': 'example.org' })
|
[
"HTTP",
"GET",
"function"
] |
5a7ed9c5e6fa27170366ecbdef710dc80d537dc2
|
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/client/http.py#L102-L117
|
train
|
csirtgadgets/csirtgsdk-py
|
csirtgsdk/client/http.py
|
HTTP._post
|
def _post(self, uri, data):
"""
HTTP POST function
:param uri: REST endpoint to POST to
:param data: list of dicts to be passed to the endpoint
:return: list of dicts, usually will be a list of objects or id's
Example:
ret = cli.post('/indicators', { 'indicator': 'example.com' })
"""
if not uri.startswith(self.remote):
uri = '{}/{}'.format(self.remote, uri)
self.logger.debug(uri)
return self._make_request(uri, data=data)
|
python
|
def _post(self, uri, data):
"""
HTTP POST function
:param uri: REST endpoint to POST to
:param data: list of dicts to be passed to the endpoint
:return: list of dicts, usually will be a list of objects or id's
Example:
ret = cli.post('/indicators', { 'indicator': 'example.com' })
"""
if not uri.startswith(self.remote):
uri = '{}/{}'.format(self.remote, uri)
self.logger.debug(uri)
return self._make_request(uri, data=data)
|
[
"def",
"_post",
"(",
"self",
",",
"uri",
",",
"data",
")",
":",
"if",
"not",
"uri",
".",
"startswith",
"(",
"self",
".",
"remote",
")",
":",
"uri",
"=",
"'{}/{}'",
".",
"format",
"(",
"self",
".",
"remote",
",",
"uri",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"uri",
")",
"return",
"self",
".",
"_make_request",
"(",
"uri",
",",
"data",
"=",
"data",
")"
] |
HTTP POST function
:param uri: REST endpoint to POST to
:param data: list of dicts to be passed to the endpoint
:return: list of dicts, usually will be a list of objects or id's
Example:
ret = cli.post('/indicators', { 'indicator': 'example.com' })
|
[
"HTTP",
"POST",
"function"
] |
5a7ed9c5e6fa27170366ecbdef710dc80d537dc2
|
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/client/http.py#L119-L135
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/server_mixin.py
|
ServerManager.get_servers
|
def get_servers(self, populate=False, tags_has_one=None, tags_has_all=None):
"""
Return a list of (populated or unpopulated) Server instances.
- populate = False (default) => 1 API request, returns unpopulated Server instances.
- populate = True => Does 1 + n API requests (n = # of servers),
returns populated Server instances.
New in 0.3.0: the list can be filtered with tags:
- tags_has_one: list of Tag objects or strings
returns servers that have at least one of the given tags
- tags_has_all: list of Tag objects or strings
returns servers that have all of the tags
"""
if tags_has_all and tags_has_one:
raise Exception('only one of (tags_has_all, tags_has_one) is allowed.')
request = '/server'
if tags_has_all:
tags_has_all = [str(tag) for tag in tags_has_all]
taglist = ':'.join(tags_has_all)
request = '/server/tag/{0}'.format(taglist)
if tags_has_one:
tags_has_one = [str(tag) for tag in tags_has_one]
taglist = ','.join(tags_has_one)
request = '/server/tag/{0}'.format(taglist)
servers = self.get_request(request)['servers']['server']
server_list = list()
for server in servers:
server_list.append(Server(server, cloud_manager=self))
if populate:
for server_instance in server_list:
server_instance.populate()
return server_list
|
python
|
def get_servers(self, populate=False, tags_has_one=None, tags_has_all=None):
"""
Return a list of (populated or unpopulated) Server instances.
- populate = False (default) => 1 API request, returns unpopulated Server instances.
- populate = True => Does 1 + n API requests (n = # of servers),
returns populated Server instances.
New in 0.3.0: the list can be filtered with tags:
- tags_has_one: list of Tag objects or strings
returns servers that have at least one of the given tags
- tags_has_all: list of Tag objects or strings
returns servers that have all of the tags
"""
if tags_has_all and tags_has_one:
raise Exception('only one of (tags_has_all, tags_has_one) is allowed.')
request = '/server'
if tags_has_all:
tags_has_all = [str(tag) for tag in tags_has_all]
taglist = ':'.join(tags_has_all)
request = '/server/tag/{0}'.format(taglist)
if tags_has_one:
tags_has_one = [str(tag) for tag in tags_has_one]
taglist = ','.join(tags_has_one)
request = '/server/tag/{0}'.format(taglist)
servers = self.get_request(request)['servers']['server']
server_list = list()
for server in servers:
server_list.append(Server(server, cloud_manager=self))
if populate:
for server_instance in server_list:
server_instance.populate()
return server_list
|
[
"def",
"get_servers",
"(",
"self",
",",
"populate",
"=",
"False",
",",
"tags_has_one",
"=",
"None",
",",
"tags_has_all",
"=",
"None",
")",
":",
"if",
"tags_has_all",
"and",
"tags_has_one",
":",
"raise",
"Exception",
"(",
"'only one of (tags_has_all, tags_has_one) is allowed.'",
")",
"request",
"=",
"'/server'",
"if",
"tags_has_all",
":",
"tags_has_all",
"=",
"[",
"str",
"(",
"tag",
")",
"for",
"tag",
"in",
"tags_has_all",
"]",
"taglist",
"=",
"':'",
".",
"join",
"(",
"tags_has_all",
")",
"request",
"=",
"'/server/tag/{0}'",
".",
"format",
"(",
"taglist",
")",
"if",
"tags_has_one",
":",
"tags_has_one",
"=",
"[",
"str",
"(",
"tag",
")",
"for",
"tag",
"in",
"tags_has_one",
"]",
"taglist",
"=",
"','",
".",
"join",
"(",
"tags_has_one",
")",
"request",
"=",
"'/server/tag/{0}'",
".",
"format",
"(",
"taglist",
")",
"servers",
"=",
"self",
".",
"get_request",
"(",
"request",
")",
"[",
"'servers'",
"]",
"[",
"'server'",
"]",
"server_list",
"=",
"list",
"(",
")",
"for",
"server",
"in",
"servers",
":",
"server_list",
".",
"append",
"(",
"Server",
"(",
"server",
",",
"cloud_manager",
"=",
"self",
")",
")",
"if",
"populate",
":",
"for",
"server_instance",
"in",
"server_list",
":",
"server_instance",
".",
"populate",
"(",
")",
"return",
"server_list"
] |
Return a list of (populated or unpopulated) Server instances.
- populate = False (default) => 1 API request, returns unpopulated Server instances.
- populate = True => Does 1 + n API requests (n = # of servers),
returns populated Server instances.
New in 0.3.0: the list can be filtered with tags:
- tags_has_one: list of Tag objects or strings
returns servers that have at least one of the given tags
- tags_has_all: list of Tag objects or strings
returns servers that have all of the tags
|
[
"Return",
"a",
"list",
"of",
"(",
"populated",
"or",
"unpopulated",
")",
"Server",
"instances",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/server_mixin.py#L15-L54
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/server_mixin.py
|
ServerManager.get_server
|
def get_server(self, UUID):
"""
Return a (populated) Server instance.
"""
server, IPAddresses, storages = self.get_server_data(UUID)
return Server(
server,
ip_addresses=IPAddresses,
storage_devices=storages,
populated=True,
cloud_manager=self
)
|
python
|
def get_server(self, UUID):
"""
Return a (populated) Server instance.
"""
server, IPAddresses, storages = self.get_server_data(UUID)
return Server(
server,
ip_addresses=IPAddresses,
storage_devices=storages,
populated=True,
cloud_manager=self
)
|
[
"def",
"get_server",
"(",
"self",
",",
"UUID",
")",
":",
"server",
",",
"IPAddresses",
",",
"storages",
"=",
"self",
".",
"get_server_data",
"(",
"UUID",
")",
"return",
"Server",
"(",
"server",
",",
"ip_addresses",
"=",
"IPAddresses",
",",
"storage_devices",
"=",
"storages",
",",
"populated",
"=",
"True",
",",
"cloud_manager",
"=",
"self",
")"
] |
Return a (populated) Server instance.
|
[
"Return",
"a",
"(",
"populated",
")",
"Server",
"instance",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/server_mixin.py#L56-L68
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/server_mixin.py
|
ServerManager.get_server_by_ip
|
def get_server_by_ip(self, ip_address):
"""
Return a (populated) Server instance by its IP.
Uses GET '/ip_address/x.x.x.x' to retrieve machine UUID using IP-address.
"""
data = self.get_request('/ip_address/{0}'.format(ip_address))
UUID = data['ip_address']['server']
return self.get_server(UUID)
|
python
|
def get_server_by_ip(self, ip_address):
"""
Return a (populated) Server instance by its IP.
Uses GET '/ip_address/x.x.x.x' to retrieve machine UUID using IP-address.
"""
data = self.get_request('/ip_address/{0}'.format(ip_address))
UUID = data['ip_address']['server']
return self.get_server(UUID)
|
[
"def",
"get_server_by_ip",
"(",
"self",
",",
"ip_address",
")",
":",
"data",
"=",
"self",
".",
"get_request",
"(",
"'/ip_address/{0}'",
".",
"format",
"(",
"ip_address",
")",
")",
"UUID",
"=",
"data",
"[",
"'ip_address'",
"]",
"[",
"'server'",
"]",
"return",
"self",
".",
"get_server",
"(",
"UUID",
")"
] |
Return a (populated) Server instance by its IP.
Uses GET '/ip_address/x.x.x.x' to retrieve machine UUID using IP-address.
|
[
"Return",
"a",
"(",
"populated",
")",
"Server",
"instance",
"by",
"its",
"IP",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/server_mixin.py#L70-L78
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/server_mixin.py
|
ServerManager.create_server
|
def create_server(self, server):
"""
Create a server and its storages based on a (locally created) Server object.
Populates the given Server instance with the API response.
0.3.0: also supports giving the entire POST body as a dict that is directly
serialised into JSON. Refer to the REST API documentation for correct format.
Example:
server1 = Server( core_number = 1,
memory_amount = 1024,
hostname = "my.example.1",
zone = ZONE.London,
storage_devices = [
Storage(os = "Ubuntu 14.04", size=10, tier=maxiops, title='The OS drive'),
Storage(size=10),
Storage()
title = "My Example Server"
])
manager.create_server(server1)
One storage should contain an OS. Otherwise storage fields are optional.
- size defaults to 10,
- title defaults to hostname + " OS disk" and hostname + " storage disk id"
(id is a running starting from 1)
- tier defaults to maxiops
- valid operating systems are:
"CentOS 6.5", "CentOS 7.0"
"Debian 7.8"
"Ubuntu 12.04", "Ubuntu 14.04"
"Windows 2003","Windows 2008" ,"Windows 2012"
"""
if isinstance(server, Server):
body = server.prepare_post_body()
else:
server = Server._create_server_obj(server, cloud_manager=self)
body = server.prepare_post_body()
res = self.post_request('/server', body)
server_to_return = server
server_to_return._reset(
res['server'],
cloud_manager=self,
populated=True
)
return server_to_return
|
python
|
def create_server(self, server):
"""
Create a server and its storages based on a (locally created) Server object.
Populates the given Server instance with the API response.
0.3.0: also supports giving the entire POST body as a dict that is directly
serialised into JSON. Refer to the REST API documentation for correct format.
Example:
server1 = Server( core_number = 1,
memory_amount = 1024,
hostname = "my.example.1",
zone = ZONE.London,
storage_devices = [
Storage(os = "Ubuntu 14.04", size=10, tier=maxiops, title='The OS drive'),
Storage(size=10),
Storage()
title = "My Example Server"
])
manager.create_server(server1)
One storage should contain an OS. Otherwise storage fields are optional.
- size defaults to 10,
- title defaults to hostname + " OS disk" and hostname + " storage disk id"
(id is a running starting from 1)
- tier defaults to maxiops
- valid operating systems are:
"CentOS 6.5", "CentOS 7.0"
"Debian 7.8"
"Ubuntu 12.04", "Ubuntu 14.04"
"Windows 2003","Windows 2008" ,"Windows 2012"
"""
if isinstance(server, Server):
body = server.prepare_post_body()
else:
server = Server._create_server_obj(server, cloud_manager=self)
body = server.prepare_post_body()
res = self.post_request('/server', body)
server_to_return = server
server_to_return._reset(
res['server'],
cloud_manager=self,
populated=True
)
return server_to_return
|
[
"def",
"create_server",
"(",
"self",
",",
"server",
")",
":",
"if",
"isinstance",
"(",
"server",
",",
"Server",
")",
":",
"body",
"=",
"server",
".",
"prepare_post_body",
"(",
")",
"else",
":",
"server",
"=",
"Server",
".",
"_create_server_obj",
"(",
"server",
",",
"cloud_manager",
"=",
"self",
")",
"body",
"=",
"server",
".",
"prepare_post_body",
"(",
")",
"res",
"=",
"self",
".",
"post_request",
"(",
"'/server'",
",",
"body",
")",
"server_to_return",
"=",
"server",
"server_to_return",
".",
"_reset",
"(",
"res",
"[",
"'server'",
"]",
",",
"cloud_manager",
"=",
"self",
",",
"populated",
"=",
"True",
")",
"return",
"server_to_return"
] |
Create a server and its storages based on a (locally created) Server object.
Populates the given Server instance with the API response.
0.3.0: also supports giving the entire POST body as a dict that is directly
serialised into JSON. Refer to the REST API documentation for correct format.
Example:
server1 = Server( core_number = 1,
memory_amount = 1024,
hostname = "my.example.1",
zone = ZONE.London,
storage_devices = [
Storage(os = "Ubuntu 14.04", size=10, tier=maxiops, title='The OS drive'),
Storage(size=10),
Storage()
title = "My Example Server"
])
manager.create_server(server1)
One storage should contain an OS. Otherwise storage fields are optional.
- size defaults to 10,
- title defaults to hostname + " OS disk" and hostname + " storage disk id"
(id is a running starting from 1)
- tier defaults to maxiops
- valid operating systems are:
"CentOS 6.5", "CentOS 7.0"
"Debian 7.8"
"Ubuntu 12.04", "Ubuntu 14.04"
"Windows 2003","Windows 2008" ,"Windows 2012"
|
[
"Create",
"a",
"server",
"and",
"its",
"storages",
"based",
"on",
"a",
"(",
"locally",
"created",
")",
"Server",
"object",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/server_mixin.py#L80-L127
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/server_mixin.py
|
ServerManager.modify_server
|
def modify_server(self, UUID, **kwargs):
"""
modify_server allows updating the server's updateable_fields.
Note: Server's IP-addresses and Storages are managed by their own add/remove methods.
"""
body = dict()
body['server'] = {}
for arg in kwargs:
if arg not in Server.updateable_fields:
Exception('{0} is not an updateable field'.format(arg))
body['server'][arg] = kwargs[arg]
res = self.request('PUT', '/server/{0}'.format(UUID), body)
server = res['server']
# Populate subobjects
IPAddresses = IPAddress._create_ip_address_objs(server.pop('ip_addresses'),
cloud_manager=self)
storages = Storage._create_storage_objs(server.pop('storage_devices'),
cloud_manager=self)
return Server(
server,
ip_addresses=IPAddresses,
storage_devices=storages,
populated=True,
cloud_manager=self
)
|
python
|
def modify_server(self, UUID, **kwargs):
"""
modify_server allows updating the server's updateable_fields.
Note: Server's IP-addresses and Storages are managed by their own add/remove methods.
"""
body = dict()
body['server'] = {}
for arg in kwargs:
if arg not in Server.updateable_fields:
Exception('{0} is not an updateable field'.format(arg))
body['server'][arg] = kwargs[arg]
res = self.request('PUT', '/server/{0}'.format(UUID), body)
server = res['server']
# Populate subobjects
IPAddresses = IPAddress._create_ip_address_objs(server.pop('ip_addresses'),
cloud_manager=self)
storages = Storage._create_storage_objs(server.pop('storage_devices'),
cloud_manager=self)
return Server(
server,
ip_addresses=IPAddresses,
storage_devices=storages,
populated=True,
cloud_manager=self
)
|
[
"def",
"modify_server",
"(",
"self",
",",
"UUID",
",",
"*",
"*",
"kwargs",
")",
":",
"body",
"=",
"dict",
"(",
")",
"body",
"[",
"'server'",
"]",
"=",
"{",
"}",
"for",
"arg",
"in",
"kwargs",
":",
"if",
"arg",
"not",
"in",
"Server",
".",
"updateable_fields",
":",
"Exception",
"(",
"'{0} is not an updateable field'",
".",
"format",
"(",
"arg",
")",
")",
"body",
"[",
"'server'",
"]",
"[",
"arg",
"]",
"=",
"kwargs",
"[",
"arg",
"]",
"res",
"=",
"self",
".",
"request",
"(",
"'PUT'",
",",
"'/server/{0}'",
".",
"format",
"(",
"UUID",
")",
",",
"body",
")",
"server",
"=",
"res",
"[",
"'server'",
"]",
"# Populate subobjects",
"IPAddresses",
"=",
"IPAddress",
".",
"_create_ip_address_objs",
"(",
"server",
".",
"pop",
"(",
"'ip_addresses'",
")",
",",
"cloud_manager",
"=",
"self",
")",
"storages",
"=",
"Storage",
".",
"_create_storage_objs",
"(",
"server",
".",
"pop",
"(",
"'storage_devices'",
")",
",",
"cloud_manager",
"=",
"self",
")",
"return",
"Server",
"(",
"server",
",",
"ip_addresses",
"=",
"IPAddresses",
",",
"storage_devices",
"=",
"storages",
",",
"populated",
"=",
"True",
",",
"cloud_manager",
"=",
"self",
")"
] |
modify_server allows updating the server's updateable_fields.
Note: Server's IP-addresses and Storages are managed by their own add/remove methods.
|
[
"modify_server",
"allows",
"updating",
"the",
"server",
"s",
"updateable_fields",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/server_mixin.py#L129-L158
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/server_mixin.py
|
ServerManager.get_server_data
|
def get_server_data(self, UUID):
"""
Return '/server/uuid' data in Python dict.
Creates object representations of any IP-address and Storage.
"""
data = self.get_request('/server/{0}'.format(UUID))
server = data['server']
# Populate subobjects
IPAddresses = IPAddress._create_ip_address_objs(server.pop('ip_addresses'),
cloud_manager=self)
storages = Storage._create_storage_objs(server.pop('storage_devices'),
cloud_manager=self)
return server, IPAddresses, storages
|
python
|
def get_server_data(self, UUID):
"""
Return '/server/uuid' data in Python dict.
Creates object representations of any IP-address and Storage.
"""
data = self.get_request('/server/{0}'.format(UUID))
server = data['server']
# Populate subobjects
IPAddresses = IPAddress._create_ip_address_objs(server.pop('ip_addresses'),
cloud_manager=self)
storages = Storage._create_storage_objs(server.pop('storage_devices'),
cloud_manager=self)
return server, IPAddresses, storages
|
[
"def",
"get_server_data",
"(",
"self",
",",
"UUID",
")",
":",
"data",
"=",
"self",
".",
"get_request",
"(",
"'/server/{0}'",
".",
"format",
"(",
"UUID",
")",
")",
"server",
"=",
"data",
"[",
"'server'",
"]",
"# Populate subobjects",
"IPAddresses",
"=",
"IPAddress",
".",
"_create_ip_address_objs",
"(",
"server",
".",
"pop",
"(",
"'ip_addresses'",
")",
",",
"cloud_manager",
"=",
"self",
")",
"storages",
"=",
"Storage",
".",
"_create_storage_objs",
"(",
"server",
".",
"pop",
"(",
"'storage_devices'",
")",
",",
"cloud_manager",
"=",
"self",
")",
"return",
"server",
",",
"IPAddresses",
",",
"storages"
] |
Return '/server/uuid' data in Python dict.
Creates object representations of any IP-address and Storage.
|
[
"Return",
"/",
"server",
"/",
"uuid",
"data",
"in",
"Python",
"dict",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/server_mixin.py#L170-L186
|
train
|
csirtgadgets/csirtgsdk-py
|
csirtgsdk/__init__.py
|
feed
|
def feed(f, limit=25):
"""
Pull a feed
:param f: feed name (eg: csirtgadgetes/correlated)
:param limit: return value limit (default 25)
:return: Feed dict
"""
if '/' not in f:
raise ValueError('feed name must be formatted like: '
'csirtgadgets/scanners')
user, f = f.split('/')
return Feed().show(user, f, limit=limit)
|
python
|
def feed(f, limit=25):
"""
Pull a feed
:param f: feed name (eg: csirtgadgetes/correlated)
:param limit: return value limit (default 25)
:return: Feed dict
"""
if '/' not in f:
raise ValueError('feed name must be formatted like: '
'csirtgadgets/scanners')
user, f = f.split('/')
return Feed().show(user, f, limit=limit)
|
[
"def",
"feed",
"(",
"f",
",",
"limit",
"=",
"25",
")",
":",
"if",
"'/'",
"not",
"in",
"f",
":",
"raise",
"ValueError",
"(",
"'feed name must be formatted like: '",
"'csirtgadgets/scanners'",
")",
"user",
",",
"f",
"=",
"f",
".",
"split",
"(",
"'/'",
")",
"return",
"Feed",
"(",
")",
".",
"show",
"(",
"user",
",",
"f",
",",
"limit",
"=",
"limit",
")"
] |
Pull a feed
:param f: feed name (eg: csirtgadgetes/correlated)
:param limit: return value limit (default 25)
:return: Feed dict
|
[
"Pull",
"a",
"feed",
":",
"param",
"f",
":",
"feed",
"name",
"(",
"eg",
":",
"csirtgadgetes",
"/",
"correlated",
")",
":",
"param",
"limit",
":",
"return",
"value",
"limit",
"(",
"default",
"25",
")",
":",
"return",
":",
"Feed",
"dict"
] |
5a7ed9c5e6fa27170366ecbdef710dc80d537dc2
|
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/__init__.py#L11-L24
|
train
|
csirtgadgets/csirtgsdk-py
|
csirtgsdk/__init__.py
|
indicator_create
|
def indicator_create(f, i):
"""
Create an indicator in a feed
:param f: feed name (eg: wes/test)
:param i: indicator dict (eg: {'indicator': 'example.com', 'tags': ['ssh'],
'description': 'this is a test'})
:return: dict of indicator
"""
if '/' not in f:
raise ValueError('feed name must be formatted like: '
'csirtgadgets/scanners')
if not i:
raise ValueError('missing indicator dict')
u, f = f.split('/')
i['user'] = u
i['feed'] = f
ret = Indicator(i).submit()
return ret
|
python
|
def indicator_create(f, i):
"""
Create an indicator in a feed
:param f: feed name (eg: wes/test)
:param i: indicator dict (eg: {'indicator': 'example.com', 'tags': ['ssh'],
'description': 'this is a test'})
:return: dict of indicator
"""
if '/' not in f:
raise ValueError('feed name must be formatted like: '
'csirtgadgets/scanners')
if not i:
raise ValueError('missing indicator dict')
u, f = f.split('/')
i['user'] = u
i['feed'] = f
ret = Indicator(i).submit()
return ret
|
[
"def",
"indicator_create",
"(",
"f",
",",
"i",
")",
":",
"if",
"'/'",
"not",
"in",
"f",
":",
"raise",
"ValueError",
"(",
"'feed name must be formatted like: '",
"'csirtgadgets/scanners'",
")",
"if",
"not",
"i",
":",
"raise",
"ValueError",
"(",
"'missing indicator dict'",
")",
"u",
",",
"f",
"=",
"f",
".",
"split",
"(",
"'/'",
")",
"i",
"[",
"'user'",
"]",
"=",
"u",
"i",
"[",
"'feed'",
"]",
"=",
"f",
"ret",
"=",
"Indicator",
"(",
"i",
")",
".",
"submit",
"(",
")",
"return",
"ret"
] |
Create an indicator in a feed
:param f: feed name (eg: wes/test)
:param i: indicator dict (eg: {'indicator': 'example.com', 'tags': ['ssh'],
'description': 'this is a test'})
:return: dict of indicator
|
[
"Create",
"an",
"indicator",
"in",
"a",
"feed",
":",
"param",
"f",
":",
"feed",
"name",
"(",
"eg",
":",
"wes",
"/",
"test",
")",
":",
"param",
"i",
":",
"indicator",
"dict",
"(",
"eg",
":",
"{",
"indicator",
":",
"example",
".",
"com",
"tags",
":",
"[",
"ssh",
"]",
"description",
":",
"this",
"is",
"a",
"test",
"}",
")",
":",
"return",
":",
"dict",
"of",
"indicator"
] |
5a7ed9c5e6fa27170366ecbdef710dc80d537dc2
|
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/__init__.py#L39-L61
|
train
|
ivanyu/idx2numpy
|
idx2numpy/converters.py
|
convert_from_file
|
def convert_from_file(file):
"""
Reads the content of file in IDX format, converts it into numpy.ndarray and
returns it.
file is a file-like object (with read() method) or a file name.
"""
if isinstance(file, six_string_types):
with open(file, 'rb') as f:
return _internal_convert(f)
else:
return _internal_convert(file)
|
python
|
def convert_from_file(file):
"""
Reads the content of file in IDX format, converts it into numpy.ndarray and
returns it.
file is a file-like object (with read() method) or a file name.
"""
if isinstance(file, six_string_types):
with open(file, 'rb') as f:
return _internal_convert(f)
else:
return _internal_convert(file)
|
[
"def",
"convert_from_file",
"(",
"file",
")",
":",
"if",
"isinstance",
"(",
"file",
",",
"six_string_types",
")",
":",
"with",
"open",
"(",
"file",
",",
"'rb'",
")",
"as",
"f",
":",
"return",
"_internal_convert",
"(",
"f",
")",
"else",
":",
"return",
"_internal_convert",
"(",
"file",
")"
] |
Reads the content of file in IDX format, converts it into numpy.ndarray and
returns it.
file is a file-like object (with read() method) or a file name.
|
[
"Reads",
"the",
"content",
"of",
"file",
"in",
"IDX",
"format",
"converts",
"it",
"into",
"numpy",
".",
"ndarray",
"and",
"returns",
"it",
".",
"file",
"is",
"a",
"file",
"-",
"like",
"object",
"(",
"with",
"read",
"()",
"method",
")",
"or",
"a",
"file",
"name",
"."
] |
9b88698314973226212181d1747dfad6c6974e51
|
https://github.com/ivanyu/idx2numpy/blob/9b88698314973226212181d1747dfad6c6974e51/idx2numpy/converters.py#L49-L59
|
train
|
ivanyu/idx2numpy
|
idx2numpy/converters.py
|
_internal_convert
|
def _internal_convert(inp):
"""
Converts file in IDX format provided by file-like input into numpy.ndarray
and returns it.
"""
'''
Converts file in IDX format provided by file-like input into numpy.ndarray
and returns it.
'''
# Read the "magic number" - 4 bytes.
try:
mn = struct.unpack('>BBBB', inp.read(4))
except struct.error:
raise FormatError(struct.error)
# First two bytes are always zero, check it.
if mn[0] != 0 or mn[1] != 0:
msg = ("Incorrect first two bytes of the magic number: " +
"0x{0:02X} 0x{1:02X}".format(mn[0], mn[1]))
raise FormatError(msg)
# 3rd byte is the data type code.
dtype_code = mn[2]
if dtype_code not in _DATA_TYPES_IDX:
msg = "Incorrect data type code: 0x{0:02X}".format(dtype_code)
raise FormatError(msg)
# 4th byte is the number of dimensions.
dims = int(mn[3])
# See possible data types description.
dtype, dtype_s, el_size = _DATA_TYPES_IDX[dtype_code]
# 4-byte integer for length of each dimension.
try:
dims_sizes = struct.unpack('>' + 'I' * dims, inp.read(4 * dims))
except struct.error as e:
raise FormatError('Dims sizes: {0}'.format(e))
# Full length of data.
full_length = reduce(operator.mul, dims_sizes, 1)
# Create a numpy array from the data
try:
result_array = numpy.frombuffer(
inp.read(full_length * el_size),
dtype=numpy.dtype(dtype)
).reshape(dims_sizes)
except ValueError as e:
raise FormatError('Error creating numpy array: {0}'.format(e))
# Check for superfluous data.
if len(inp.read(1)) > 0:
raise FormatError('Superfluous data detected.')
return result_array
|
python
|
def _internal_convert(inp):
"""
Converts file in IDX format provided by file-like input into numpy.ndarray
and returns it.
"""
'''
Converts file in IDX format provided by file-like input into numpy.ndarray
and returns it.
'''
# Read the "magic number" - 4 bytes.
try:
mn = struct.unpack('>BBBB', inp.read(4))
except struct.error:
raise FormatError(struct.error)
# First two bytes are always zero, check it.
if mn[0] != 0 or mn[1] != 0:
msg = ("Incorrect first two bytes of the magic number: " +
"0x{0:02X} 0x{1:02X}".format(mn[0], mn[1]))
raise FormatError(msg)
# 3rd byte is the data type code.
dtype_code = mn[2]
if dtype_code not in _DATA_TYPES_IDX:
msg = "Incorrect data type code: 0x{0:02X}".format(dtype_code)
raise FormatError(msg)
# 4th byte is the number of dimensions.
dims = int(mn[3])
# See possible data types description.
dtype, dtype_s, el_size = _DATA_TYPES_IDX[dtype_code]
# 4-byte integer for length of each dimension.
try:
dims_sizes = struct.unpack('>' + 'I' * dims, inp.read(4 * dims))
except struct.error as e:
raise FormatError('Dims sizes: {0}'.format(e))
# Full length of data.
full_length = reduce(operator.mul, dims_sizes, 1)
# Create a numpy array from the data
try:
result_array = numpy.frombuffer(
inp.read(full_length * el_size),
dtype=numpy.dtype(dtype)
).reshape(dims_sizes)
except ValueError as e:
raise FormatError('Error creating numpy array: {0}'.format(e))
# Check for superfluous data.
if len(inp.read(1)) > 0:
raise FormatError('Superfluous data detected.')
return result_array
|
[
"def",
"_internal_convert",
"(",
"inp",
")",
":",
"'''\n Converts file in IDX format provided by file-like input into numpy.ndarray\n and returns it.\n '''",
"# Read the \"magic number\" - 4 bytes.",
"try",
":",
"mn",
"=",
"struct",
".",
"unpack",
"(",
"'>BBBB'",
",",
"inp",
".",
"read",
"(",
"4",
")",
")",
"except",
"struct",
".",
"error",
":",
"raise",
"FormatError",
"(",
"struct",
".",
"error",
")",
"# First two bytes are always zero, check it.",
"if",
"mn",
"[",
"0",
"]",
"!=",
"0",
"or",
"mn",
"[",
"1",
"]",
"!=",
"0",
":",
"msg",
"=",
"(",
"\"Incorrect first two bytes of the magic number: \"",
"+",
"\"0x{0:02X} 0x{1:02X}\"",
".",
"format",
"(",
"mn",
"[",
"0",
"]",
",",
"mn",
"[",
"1",
"]",
")",
")",
"raise",
"FormatError",
"(",
"msg",
")",
"# 3rd byte is the data type code.",
"dtype_code",
"=",
"mn",
"[",
"2",
"]",
"if",
"dtype_code",
"not",
"in",
"_DATA_TYPES_IDX",
":",
"msg",
"=",
"\"Incorrect data type code: 0x{0:02X}\"",
".",
"format",
"(",
"dtype_code",
")",
"raise",
"FormatError",
"(",
"msg",
")",
"# 4th byte is the number of dimensions.",
"dims",
"=",
"int",
"(",
"mn",
"[",
"3",
"]",
")",
"# See possible data types description.",
"dtype",
",",
"dtype_s",
",",
"el_size",
"=",
"_DATA_TYPES_IDX",
"[",
"dtype_code",
"]",
"# 4-byte integer for length of each dimension.",
"try",
":",
"dims_sizes",
"=",
"struct",
".",
"unpack",
"(",
"'>'",
"+",
"'I'",
"*",
"dims",
",",
"inp",
".",
"read",
"(",
"4",
"*",
"dims",
")",
")",
"except",
"struct",
".",
"error",
"as",
"e",
":",
"raise",
"FormatError",
"(",
"'Dims sizes: {0}'",
".",
"format",
"(",
"e",
")",
")",
"# Full length of data.",
"full_length",
"=",
"reduce",
"(",
"operator",
".",
"mul",
",",
"dims_sizes",
",",
"1",
")",
"# Create a numpy array from the data",
"try",
":",
"result_array",
"=",
"numpy",
".",
"frombuffer",
"(",
"inp",
".",
"read",
"(",
"full_length",
"*",
"el_size",
")",
",",
"dtype",
"=",
"numpy",
".",
"dtype",
"(",
"dtype",
")",
")",
".",
"reshape",
"(",
"dims_sizes",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"FormatError",
"(",
"'Error creating numpy array: {0}'",
".",
"format",
"(",
"e",
")",
")",
"# Check for superfluous data.",
"if",
"len",
"(",
"inp",
".",
"read",
"(",
"1",
")",
")",
">",
"0",
":",
"raise",
"FormatError",
"(",
"'Superfluous data detected.'",
")",
"return",
"result_array"
] |
Converts file in IDX format provided by file-like input into numpy.ndarray
and returns it.
|
[
"Converts",
"file",
"in",
"IDX",
"format",
"provided",
"by",
"file",
"-",
"like",
"input",
"into",
"numpy",
".",
"ndarray",
"and",
"returns",
"it",
"."
] |
9b88698314973226212181d1747dfad6c6974e51
|
https://github.com/ivanyu/idx2numpy/blob/9b88698314973226212181d1747dfad6c6974e51/idx2numpy/converters.py#L71-L127
|
train
|
ivanyu/idx2numpy
|
idx2numpy/converters.py
|
convert_to_file
|
def convert_to_file(file, ndarr):
"""
Writes the contents of the numpy.ndarray ndarr to file in IDX format.
file is a file-like object (with write() method) or a file name.
"""
if isinstance(file, six_string_types):
with open(file, 'wb') as fp:
_internal_write(fp, ndarr)
else:
_internal_write(file, ndarr)
|
python
|
def convert_to_file(file, ndarr):
"""
Writes the contents of the numpy.ndarray ndarr to file in IDX format.
file is a file-like object (with write() method) or a file name.
"""
if isinstance(file, six_string_types):
with open(file, 'wb') as fp:
_internal_write(fp, ndarr)
else:
_internal_write(file, ndarr)
|
[
"def",
"convert_to_file",
"(",
"file",
",",
"ndarr",
")",
":",
"if",
"isinstance",
"(",
"file",
",",
"six_string_types",
")",
":",
"with",
"open",
"(",
"file",
",",
"'wb'",
")",
"as",
"fp",
":",
"_internal_write",
"(",
"fp",
",",
"ndarr",
")",
"else",
":",
"_internal_write",
"(",
"file",
",",
"ndarr",
")"
] |
Writes the contents of the numpy.ndarray ndarr to file in IDX format.
file is a file-like object (with write() method) or a file name.
|
[
"Writes",
"the",
"contents",
"of",
"the",
"numpy",
".",
"ndarray",
"ndarr",
"to",
"file",
"in",
"IDX",
"format",
".",
"file",
"is",
"a",
"file",
"-",
"like",
"object",
"(",
"with",
"write",
"()",
"method",
")",
"or",
"a",
"file",
"name",
"."
] |
9b88698314973226212181d1747dfad6c6974e51
|
https://github.com/ivanyu/idx2numpy/blob/9b88698314973226212181d1747dfad6c6974e51/idx2numpy/converters.py#L130-L139
|
train
|
ivanyu/idx2numpy
|
idx2numpy/converters.py
|
convert_to_string
|
def convert_to_string(ndarr):
"""
Writes the contents of the numpy.ndarray ndarr to bytes in IDX format and
returns it.
"""
with contextlib.closing(BytesIO()) as bytesio:
_internal_write(bytesio, ndarr)
return bytesio.getvalue()
|
python
|
def convert_to_string(ndarr):
"""
Writes the contents of the numpy.ndarray ndarr to bytes in IDX format and
returns it.
"""
with contextlib.closing(BytesIO()) as bytesio:
_internal_write(bytesio, ndarr)
return bytesio.getvalue()
|
[
"def",
"convert_to_string",
"(",
"ndarr",
")",
":",
"with",
"contextlib",
".",
"closing",
"(",
"BytesIO",
"(",
")",
")",
"as",
"bytesio",
":",
"_internal_write",
"(",
"bytesio",
",",
"ndarr",
")",
"return",
"bytesio",
".",
"getvalue",
"(",
")"
] |
Writes the contents of the numpy.ndarray ndarr to bytes in IDX format and
returns it.
|
[
"Writes",
"the",
"contents",
"of",
"the",
"numpy",
".",
"ndarray",
"ndarr",
"to",
"bytes",
"in",
"IDX",
"format",
"and",
"returns",
"it",
"."
] |
9b88698314973226212181d1747dfad6c6974e51
|
https://github.com/ivanyu/idx2numpy/blob/9b88698314973226212181d1747dfad6c6974e51/idx2numpy/converters.py#L142-L149
|
train
|
ivanyu/idx2numpy
|
idx2numpy/converters.py
|
_internal_write
|
def _internal_write(out_stream, arr):
"""
Writes numpy.ndarray arr to a file-like object (with write() method) in
IDX format.
"""
if arr.size == 0:
raise FormatError('Cannot encode empty array.')
try:
type_byte, struct_lib_type = _DATA_TYPES_NUMPY[str(arr.dtype)]
except KeyError:
raise FormatError('numpy ndarray type not supported by IDX format.')
if arr.ndim > _MAX_IDX_DIMENSIONS:
raise FormatError(
'IDX format cannot encode array with dimensions > 255')
if max(arr.shape) > _MAX_AXIS_LENGTH:
raise FormatError('IDX format cannot encode array with more than ' +
str(_MAX_AXIS_LENGTH) + ' elements along any axis')
# Write magic number
out_stream.write(struct.pack('BBBB', 0, 0, type_byte, arr.ndim))
# Write array dimensions
out_stream.write(struct.pack('>' + 'I' * arr.ndim, *arr.shape))
# Horrible hack to deal with horrible bug when using struct.pack to encode
# unsigned ints in 2.7 and lower, see http://bugs.python.org/issue2263
if sys.version_info < (2, 7) and str(arr.dtype) == 'uint8':
arr_as_list = [int(i) for i in arr.reshape(-1)]
out_stream.write(struct.pack('>' + struct_lib_type * arr.size,
*arr_as_list))
else:
# Write array contents - note that the limit to number of arguments
# doesn't apply to unrolled arguments
out_stream.write(struct.pack('>' + struct_lib_type * arr.size,
*arr.reshape(-1)))
|
python
|
def _internal_write(out_stream, arr):
"""
Writes numpy.ndarray arr to a file-like object (with write() method) in
IDX format.
"""
if arr.size == 0:
raise FormatError('Cannot encode empty array.')
try:
type_byte, struct_lib_type = _DATA_TYPES_NUMPY[str(arr.dtype)]
except KeyError:
raise FormatError('numpy ndarray type not supported by IDX format.')
if arr.ndim > _MAX_IDX_DIMENSIONS:
raise FormatError(
'IDX format cannot encode array with dimensions > 255')
if max(arr.shape) > _MAX_AXIS_LENGTH:
raise FormatError('IDX format cannot encode array with more than ' +
str(_MAX_AXIS_LENGTH) + ' elements along any axis')
# Write magic number
out_stream.write(struct.pack('BBBB', 0, 0, type_byte, arr.ndim))
# Write array dimensions
out_stream.write(struct.pack('>' + 'I' * arr.ndim, *arr.shape))
# Horrible hack to deal with horrible bug when using struct.pack to encode
# unsigned ints in 2.7 and lower, see http://bugs.python.org/issue2263
if sys.version_info < (2, 7) and str(arr.dtype) == 'uint8':
arr_as_list = [int(i) for i in arr.reshape(-1)]
out_stream.write(struct.pack('>' + struct_lib_type * arr.size,
*arr_as_list))
else:
# Write array contents - note that the limit to number of arguments
# doesn't apply to unrolled arguments
out_stream.write(struct.pack('>' + struct_lib_type * arr.size,
*arr.reshape(-1)))
|
[
"def",
"_internal_write",
"(",
"out_stream",
",",
"arr",
")",
":",
"if",
"arr",
".",
"size",
"==",
"0",
":",
"raise",
"FormatError",
"(",
"'Cannot encode empty array.'",
")",
"try",
":",
"type_byte",
",",
"struct_lib_type",
"=",
"_DATA_TYPES_NUMPY",
"[",
"str",
"(",
"arr",
".",
"dtype",
")",
"]",
"except",
"KeyError",
":",
"raise",
"FormatError",
"(",
"'numpy ndarray type not supported by IDX format.'",
")",
"if",
"arr",
".",
"ndim",
">",
"_MAX_IDX_DIMENSIONS",
":",
"raise",
"FormatError",
"(",
"'IDX format cannot encode array with dimensions > 255'",
")",
"if",
"max",
"(",
"arr",
".",
"shape",
")",
">",
"_MAX_AXIS_LENGTH",
":",
"raise",
"FormatError",
"(",
"'IDX format cannot encode array with more than '",
"+",
"str",
"(",
"_MAX_AXIS_LENGTH",
")",
"+",
"' elements along any axis'",
")",
"# Write magic number",
"out_stream",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'BBBB'",
",",
"0",
",",
"0",
",",
"type_byte",
",",
"arr",
".",
"ndim",
")",
")",
"# Write array dimensions",
"out_stream",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'>'",
"+",
"'I'",
"*",
"arr",
".",
"ndim",
",",
"*",
"arr",
".",
"shape",
")",
")",
"# Horrible hack to deal with horrible bug when using struct.pack to encode",
"# unsigned ints in 2.7 and lower, see http://bugs.python.org/issue2263",
"if",
"sys",
".",
"version_info",
"<",
"(",
"2",
",",
"7",
")",
"and",
"str",
"(",
"arr",
".",
"dtype",
")",
"==",
"'uint8'",
":",
"arr_as_list",
"=",
"[",
"int",
"(",
"i",
")",
"for",
"i",
"in",
"arr",
".",
"reshape",
"(",
"-",
"1",
")",
"]",
"out_stream",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'>'",
"+",
"struct_lib_type",
"*",
"arr",
".",
"size",
",",
"*",
"arr_as_list",
")",
")",
"else",
":",
"# Write array contents - note that the limit to number of arguments",
"# doesn't apply to unrolled arguments",
"out_stream",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'>'",
"+",
"struct_lib_type",
"*",
"arr",
".",
"size",
",",
"*",
"arr",
".",
"reshape",
"(",
"-",
"1",
")",
")",
")"
] |
Writes numpy.ndarray arr to a file-like object (with write() method) in
IDX format.
|
[
"Writes",
"numpy",
".",
"ndarray",
"arr",
"to",
"a",
"file",
"-",
"like",
"object",
"(",
"with",
"write",
"()",
"method",
")",
"in",
"IDX",
"format",
"."
] |
9b88698314973226212181d1747dfad6c6974e51
|
https://github.com/ivanyu/idx2numpy/blob/9b88698314973226212181d1747dfad6c6974e51/idx2numpy/converters.py#L152-L190
|
train
|
KKBOX/OpenAPI-Python
|
kkbox_developer_sdk/auth_flow.py
|
KKBOXOAuth.fetch_access_token_by_client_credentials
|
def fetch_access_token_by_client_credentials(self):
'''
There are three ways to let you start using KKBOX's Open/Partner
API. The first way among them is to generate a client
credential to fetch an access token to let KKBOX identify
you. It allows you to access public data from KKBOX such as
public albums, playlists and so on.
However, you cannot use client credentials to access private
data of a user. You have to let users to log-in into KKBOX and
grant permissions for you to do so. You cannot use client
credentials to do media playback either, since it requires a
Premium Membership.
:return: an access token
:rtype: :class:`kkbox_sdk.KKBOXAccessToken`
See `https://docs-en.kkbox.codes/docs/appendix-a`.
'''
client_credential_base = '%s:%s' % (self.client_id, self.client_secret)
try:
client_credentials = base64.b64encode(
bytes(client_credential_base, 'utf-8'))
except:
client_credentials = base64.b64encode(client_credential_base)
client_credentials = client_credentials.decode('utf-8')
headers = {'Authorization': 'Basic ' + client_credentials,
'Content-type': 'application/x-www-form-urlencoded'}
post_parameters = {'grant_type': 'client_credentials',
'scope': 'user_profile user_territory'}
json_object = self.http._post_data(KKBOXOAuth.OAUTH_TOKEN_URL, post_parameters,
headers)
self.access_token = KKBOXAccessToken(**json_object)
return self.access_token
|
python
|
def fetch_access_token_by_client_credentials(self):
'''
There are three ways to let you start using KKBOX's Open/Partner
API. The first way among them is to generate a client
credential to fetch an access token to let KKBOX identify
you. It allows you to access public data from KKBOX such as
public albums, playlists and so on.
However, you cannot use client credentials to access private
data of a user. You have to let users to log-in into KKBOX and
grant permissions for you to do so. You cannot use client
credentials to do media playback either, since it requires a
Premium Membership.
:return: an access token
:rtype: :class:`kkbox_sdk.KKBOXAccessToken`
See `https://docs-en.kkbox.codes/docs/appendix-a`.
'''
client_credential_base = '%s:%s' % (self.client_id, self.client_secret)
try:
client_credentials = base64.b64encode(
bytes(client_credential_base, 'utf-8'))
except:
client_credentials = base64.b64encode(client_credential_base)
client_credentials = client_credentials.decode('utf-8')
headers = {'Authorization': 'Basic ' + client_credentials,
'Content-type': 'application/x-www-form-urlencoded'}
post_parameters = {'grant_type': 'client_credentials',
'scope': 'user_profile user_territory'}
json_object = self.http._post_data(KKBOXOAuth.OAUTH_TOKEN_URL, post_parameters,
headers)
self.access_token = KKBOXAccessToken(**json_object)
return self.access_token
|
[
"def",
"fetch_access_token_by_client_credentials",
"(",
"self",
")",
":",
"client_credential_base",
"=",
"'%s:%s'",
"%",
"(",
"self",
".",
"client_id",
",",
"self",
".",
"client_secret",
")",
"try",
":",
"client_credentials",
"=",
"base64",
".",
"b64encode",
"(",
"bytes",
"(",
"client_credential_base",
",",
"'utf-8'",
")",
")",
"except",
":",
"client_credentials",
"=",
"base64",
".",
"b64encode",
"(",
"client_credential_base",
")",
"client_credentials",
"=",
"client_credentials",
".",
"decode",
"(",
"'utf-8'",
")",
"headers",
"=",
"{",
"'Authorization'",
":",
"'Basic '",
"+",
"client_credentials",
",",
"'Content-type'",
":",
"'application/x-www-form-urlencoded'",
"}",
"post_parameters",
"=",
"{",
"'grant_type'",
":",
"'client_credentials'",
",",
"'scope'",
":",
"'user_profile user_territory'",
"}",
"json_object",
"=",
"self",
".",
"http",
".",
"_post_data",
"(",
"KKBOXOAuth",
".",
"OAUTH_TOKEN_URL",
",",
"post_parameters",
",",
"headers",
")",
"self",
".",
"access_token",
"=",
"KKBOXAccessToken",
"(",
"*",
"*",
"json_object",
")",
"return",
"self",
".",
"access_token"
] |
There are three ways to let you start using KKBOX's Open/Partner
API. The first way among them is to generate a client
credential to fetch an access token to let KKBOX identify
you. It allows you to access public data from KKBOX such as
public albums, playlists and so on.
However, you cannot use client credentials to access private
data of a user. You have to let users to log-in into KKBOX and
grant permissions for you to do so. You cannot use client
credentials to do media playback either, since it requires a
Premium Membership.
:return: an access token
:rtype: :class:`kkbox_sdk.KKBOXAccessToken`
See `https://docs-en.kkbox.codes/docs/appendix-a`.
|
[
"There",
"are",
"three",
"ways",
"to",
"let",
"you",
"start",
"using",
"KKBOX",
"s",
"Open",
"/",
"Partner",
"API",
".",
"The",
"first",
"way",
"among",
"them",
"is",
"to",
"generate",
"a",
"client",
"credential",
"to",
"fetch",
"an",
"access",
"token",
"to",
"let",
"KKBOX",
"identify",
"you",
".",
"It",
"allows",
"you",
"to",
"access",
"public",
"data",
"from",
"KKBOX",
"such",
"as",
"public",
"albums",
"playlists",
"and",
"so",
"on",
"."
] |
77aa22fd300ed987d5507a5b66b149edcd28047d
|
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/auth_flow.py#L27-L60
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/constants.py
|
OperatingSystems.get_OS_UUID
|
def get_OS_UUID(cls, os):
"""
Validate Storage OS and its UUID.
If the OS is a custom OS UUID, don't validate against templates.
"""
if os in cls.templates:
return cls.templates[os]
uuid_regexp = '^[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}$'
if re.search(uuid_regexp, os):
return os
raise Exception((
"Invalid OS -- valid options are: 'CentOS 6.5', 'CentOS 7.0', "
"'Debian 7.8', 'Debian 8.0' ,'Ubuntu 12.04', 'Ubuntu 14.04', 'Ubuntu 16.04', "
"'Windows 2008', 'Windows 2012'"
))
|
python
|
def get_OS_UUID(cls, os):
"""
Validate Storage OS and its UUID.
If the OS is a custom OS UUID, don't validate against templates.
"""
if os in cls.templates:
return cls.templates[os]
uuid_regexp = '^[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}$'
if re.search(uuid_regexp, os):
return os
raise Exception((
"Invalid OS -- valid options are: 'CentOS 6.5', 'CentOS 7.0', "
"'Debian 7.8', 'Debian 8.0' ,'Ubuntu 12.04', 'Ubuntu 14.04', 'Ubuntu 16.04', "
"'Windows 2008', 'Windows 2012'"
))
|
[
"def",
"get_OS_UUID",
"(",
"cls",
",",
"os",
")",
":",
"if",
"os",
"in",
"cls",
".",
"templates",
":",
"return",
"cls",
".",
"templates",
"[",
"os",
"]",
"uuid_regexp",
"=",
"'^[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}$'",
"if",
"re",
".",
"search",
"(",
"uuid_regexp",
",",
"os",
")",
":",
"return",
"os",
"raise",
"Exception",
"(",
"(",
"\"Invalid OS -- valid options are: 'CentOS 6.5', 'CentOS 7.0', \"",
"\"'Debian 7.8', 'Debian 8.0' ,'Ubuntu 12.04', 'Ubuntu 14.04', 'Ubuntu 16.04', \"",
"\"'Windows 2008', 'Windows 2012'\"",
")",
")"
] |
Validate Storage OS and its UUID.
If the OS is a custom OS UUID, don't validate against templates.
|
[
"Validate",
"Storage",
"OS",
"and",
"its",
"UUID",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/constants.py#L43-L60
|
train
|
tanwanirahul/django-batch-requests
|
batch_requests/concurrent/executor.py
|
Executor.execute
|
def execute(self, requests, resp_generator, *args, **kwargs):
'''
Calls the resp_generator for all the requests in parallel in an asynchronous way.
'''
result_futures = [self.executor_pool.submit(resp_generator, req, *args, **kwargs) for req in requests]
resp = [res_future.result() for res_future in result_futures]
return resp
|
python
|
def execute(self, requests, resp_generator, *args, **kwargs):
'''
Calls the resp_generator for all the requests in parallel in an asynchronous way.
'''
result_futures = [self.executor_pool.submit(resp_generator, req, *args, **kwargs) for req in requests]
resp = [res_future.result() for res_future in result_futures]
return resp
|
[
"def",
"execute",
"(",
"self",
",",
"requests",
",",
"resp_generator",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"result_futures",
"=",
"[",
"self",
".",
"executor_pool",
".",
"submit",
"(",
"resp_generator",
",",
"req",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"for",
"req",
"in",
"requests",
"]",
"resp",
"=",
"[",
"res_future",
".",
"result",
"(",
")",
"for",
"res_future",
"in",
"result_futures",
"]",
"return",
"resp"
] |
Calls the resp_generator for all the requests in parallel in an asynchronous way.
|
[
"Calls",
"the",
"resp_generator",
"for",
"all",
"the",
"requests",
"in",
"parallel",
"in",
"an",
"asynchronous",
"way",
"."
] |
9c5afc42f7542f466247f4ffed9c44e1c49fa20d
|
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/concurrent/executor.py#L17-L23
|
train
|
tanwanirahul/django-batch-requests
|
batch_requests/concurrent/executor.py
|
SequentialExecutor.execute
|
def execute(self, requests, resp_generator, *args, **kwargs):
'''
Calls the resp_generator for all the requests in sequential order.
'''
return [resp_generator(request) for request in requests]
|
python
|
def execute(self, requests, resp_generator, *args, **kwargs):
'''
Calls the resp_generator for all the requests in sequential order.
'''
return [resp_generator(request) for request in requests]
|
[
"def",
"execute",
"(",
"self",
",",
"requests",
",",
"resp_generator",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"[",
"resp_generator",
"(",
"request",
")",
"for",
"request",
"in",
"requests",
"]"
] |
Calls the resp_generator for all the requests in sequential order.
|
[
"Calls",
"the",
"resp_generator",
"for",
"all",
"the",
"requests",
"in",
"sequential",
"order",
"."
] |
9c5afc42f7542f466247f4ffed9c44e1c49fa20d
|
https://github.com/tanwanirahul/django-batch-requests/blob/9c5afc42f7542f466247f4ffed9c44e1c49fa20d/batch_requests/concurrent/executor.py#L31-L35
|
train
|
csirtgadgets/csirtgsdk-py
|
csirtgsdk/client/cli.py
|
setup_logging
|
def setup_logging(args):
"""
Sets up basic logging
:param args: ArgParse arguments
:return: nothing. sets logger up globally
"""
loglevel = logging.WARNING
if args.verbose:
loglevel = logging.INFO
if args.debug:
loglevel = logging.DEBUG
console = logging.StreamHandler()
logging.getLogger('').setLevel(loglevel)
console.setFormatter(logging.Formatter(LOG_FORMAT))
logging.getLogger('').addHandler(console)
|
python
|
def setup_logging(args):
"""
Sets up basic logging
:param args: ArgParse arguments
:return: nothing. sets logger up globally
"""
loglevel = logging.WARNING
if args.verbose:
loglevel = logging.INFO
if args.debug:
loglevel = logging.DEBUG
console = logging.StreamHandler()
logging.getLogger('').setLevel(loglevel)
console.setFormatter(logging.Formatter(LOG_FORMAT))
logging.getLogger('').addHandler(console)
|
[
"def",
"setup_logging",
"(",
"args",
")",
":",
"loglevel",
"=",
"logging",
".",
"WARNING",
"if",
"args",
".",
"verbose",
":",
"loglevel",
"=",
"logging",
".",
"INFO",
"if",
"args",
".",
"debug",
":",
"loglevel",
"=",
"logging",
".",
"DEBUG",
"console",
"=",
"logging",
".",
"StreamHandler",
"(",
")",
"logging",
".",
"getLogger",
"(",
"''",
")",
".",
"setLevel",
"(",
"loglevel",
")",
"console",
".",
"setFormatter",
"(",
"logging",
".",
"Formatter",
"(",
"LOG_FORMAT",
")",
")",
"logging",
".",
"getLogger",
"(",
"''",
")",
".",
"addHandler",
"(",
"console",
")"
] |
Sets up basic logging
:param args: ArgParse arguments
:return: nothing. sets logger up globally
|
[
"Sets",
"up",
"basic",
"logging"
] |
5a7ed9c5e6fa27170366ecbdef710dc80d537dc2
|
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/client/cli.py#L18-L34
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/ip_address_mixin.py
|
IPManager.get_ip
|
def get_ip(self, address):
"""
Get an IPAddress object with the IP address (string) from the API.
e.g manager.get_ip('80.69.175.210')
"""
res = self.get_request('/ip_address/' + address)
return IPAddress(cloud_manager=self, **res['ip_address'])
|
python
|
def get_ip(self, address):
"""
Get an IPAddress object with the IP address (string) from the API.
e.g manager.get_ip('80.69.175.210')
"""
res = self.get_request('/ip_address/' + address)
return IPAddress(cloud_manager=self, **res['ip_address'])
|
[
"def",
"get_ip",
"(",
"self",
",",
"address",
")",
":",
"res",
"=",
"self",
".",
"get_request",
"(",
"'/ip_address/'",
"+",
"address",
")",
"return",
"IPAddress",
"(",
"cloud_manager",
"=",
"self",
",",
"*",
"*",
"res",
"[",
"'ip_address'",
"]",
")"
] |
Get an IPAddress object with the IP address (string) from the API.
e.g manager.get_ip('80.69.175.210')
|
[
"Get",
"an",
"IPAddress",
"object",
"with",
"the",
"IP",
"address",
"(",
"string",
")",
"from",
"the",
"API",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/ip_address_mixin.py#L16-L23
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/ip_address_mixin.py
|
IPManager.get_ips
|
def get_ips(self):
"""
Get all IPAddress objects from the API.
"""
res = self.get_request('/ip_address')
IPs = IPAddress._create_ip_address_objs(res['ip_addresses'], cloud_manager=self)
return IPs
|
python
|
def get_ips(self):
"""
Get all IPAddress objects from the API.
"""
res = self.get_request('/ip_address')
IPs = IPAddress._create_ip_address_objs(res['ip_addresses'], cloud_manager=self)
return IPs
|
[
"def",
"get_ips",
"(",
"self",
")",
":",
"res",
"=",
"self",
".",
"get_request",
"(",
"'/ip_address'",
")",
"IPs",
"=",
"IPAddress",
".",
"_create_ip_address_objs",
"(",
"res",
"[",
"'ip_addresses'",
"]",
",",
"cloud_manager",
"=",
"self",
")",
"return",
"IPs"
] |
Get all IPAddress objects from the API.
|
[
"Get",
"all",
"IPAddress",
"objects",
"from",
"the",
"API",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/ip_address_mixin.py#L25-L31
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/ip_address_mixin.py
|
IPManager.attach_ip
|
def attach_ip(self, server, family='IPv4'):
"""
Attach a new (random) IPAddress to the given server (object or UUID).
"""
body = {
'ip_address': {
'server': str(server),
'family': family
}
}
res = self.request('POST', '/ip_address', body)
return IPAddress(cloud_manager=self, **res['ip_address'])
|
python
|
def attach_ip(self, server, family='IPv4'):
"""
Attach a new (random) IPAddress to the given server (object or UUID).
"""
body = {
'ip_address': {
'server': str(server),
'family': family
}
}
res = self.request('POST', '/ip_address', body)
return IPAddress(cloud_manager=self, **res['ip_address'])
|
[
"def",
"attach_ip",
"(",
"self",
",",
"server",
",",
"family",
"=",
"'IPv4'",
")",
":",
"body",
"=",
"{",
"'ip_address'",
":",
"{",
"'server'",
":",
"str",
"(",
"server",
")",
",",
"'family'",
":",
"family",
"}",
"}",
"res",
"=",
"self",
".",
"request",
"(",
"'POST'",
",",
"'/ip_address'",
",",
"body",
")",
"return",
"IPAddress",
"(",
"cloud_manager",
"=",
"self",
",",
"*",
"*",
"res",
"[",
"'ip_address'",
"]",
")"
] |
Attach a new (random) IPAddress to the given server (object or UUID).
|
[
"Attach",
"a",
"new",
"(",
"random",
")",
"IPAddress",
"to",
"the",
"given",
"server",
"(",
"object",
"or",
"UUID",
")",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/ip_address_mixin.py#L33-L45
|
train
|
UpCloudLtd/upcloud-python-api
|
upcloud_api/cloud_manager/ip_address_mixin.py
|
IPManager.modify_ip
|
def modify_ip(self, ip_addr, ptr_record):
"""
Modify an IP address' ptr-record (Reverse DNS).
Accepts an IPAddress instance (object) or its address (string).
"""
body = {
'ip_address': {
'ptr_record': ptr_record
}
}
res = self.request('PUT', '/ip_address/' + str(ip_addr), body)
return IPAddress(cloud_manager=self, **res['ip_address'])
|
python
|
def modify_ip(self, ip_addr, ptr_record):
"""
Modify an IP address' ptr-record (Reverse DNS).
Accepts an IPAddress instance (object) or its address (string).
"""
body = {
'ip_address': {
'ptr_record': ptr_record
}
}
res = self.request('PUT', '/ip_address/' + str(ip_addr), body)
return IPAddress(cloud_manager=self, **res['ip_address'])
|
[
"def",
"modify_ip",
"(",
"self",
",",
"ip_addr",
",",
"ptr_record",
")",
":",
"body",
"=",
"{",
"'ip_address'",
":",
"{",
"'ptr_record'",
":",
"ptr_record",
"}",
"}",
"res",
"=",
"self",
".",
"request",
"(",
"'PUT'",
",",
"'/ip_address/'",
"+",
"str",
"(",
"ip_addr",
")",
",",
"body",
")",
"return",
"IPAddress",
"(",
"cloud_manager",
"=",
"self",
",",
"*",
"*",
"res",
"[",
"'ip_address'",
"]",
")"
] |
Modify an IP address' ptr-record (Reverse DNS).
Accepts an IPAddress instance (object) or its address (string).
|
[
"Modify",
"an",
"IP",
"address",
"ptr",
"-",
"record",
"(",
"Reverse",
"DNS",
")",
"."
] |
954b0ad7c4b932b2be31a95d88975f6b0eeac8ed
|
https://github.com/UpCloudLtd/upcloud-python-api/blob/954b0ad7c4b932b2be31a95d88975f6b0eeac8ed/upcloud_api/cloud_manager/ip_address_mixin.py#L47-L60
|
train
|
csirtgadgets/csirtgsdk-py
|
csirtgsdk/feed.py
|
Feed.new
|
def new(self, user, name, description=None):
"""
Creates a new Feed object
:param user: feed username
:param name: feed name
:param description: feed description
:return: dict
"""
uri = self.client.remote + '/users/{0}/feeds'.format(user)
data = {
'feed': {
'name': name,
'description': description
}
}
resp = self.client.post(uri, data)
return resp
|
python
|
def new(self, user, name, description=None):
"""
Creates a new Feed object
:param user: feed username
:param name: feed name
:param description: feed description
:return: dict
"""
uri = self.client.remote + '/users/{0}/feeds'.format(user)
data = {
'feed': {
'name': name,
'description': description
}
}
resp = self.client.post(uri, data)
return resp
|
[
"def",
"new",
"(",
"self",
",",
"user",
",",
"name",
",",
"description",
"=",
"None",
")",
":",
"uri",
"=",
"self",
".",
"client",
".",
"remote",
"+",
"'/users/{0}/feeds'",
".",
"format",
"(",
"user",
")",
"data",
"=",
"{",
"'feed'",
":",
"{",
"'name'",
":",
"name",
",",
"'description'",
":",
"description",
"}",
"}",
"resp",
"=",
"self",
".",
"client",
".",
"post",
"(",
"uri",
",",
"data",
")",
"return",
"resp"
] |
Creates a new Feed object
:param user: feed username
:param name: feed name
:param description: feed description
:return: dict
|
[
"Creates",
"a",
"new",
"Feed",
"object"
] |
5a7ed9c5e6fa27170366ecbdef710dc80d537dc2
|
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/feed.py#L32-L51
|
train
|
csirtgadgets/csirtgsdk-py
|
csirtgsdk/feed.py
|
Feed.delete
|
def delete(self, user, name):
"""
Removes a feed
:param user: feed username
:param name: feed name
:return: true/false
"""
uri = self.client.remote + '/users/{}/feeds/{}'.format(user, name)
resp = self.client.session.delete(uri)
return resp.status_code
|
python
|
def delete(self, user, name):
"""
Removes a feed
:param user: feed username
:param name: feed name
:return: true/false
"""
uri = self.client.remote + '/users/{}/feeds/{}'.format(user, name)
resp = self.client.session.delete(uri)
return resp.status_code
|
[
"def",
"delete",
"(",
"self",
",",
"user",
",",
"name",
")",
":",
"uri",
"=",
"self",
".",
"client",
".",
"remote",
"+",
"'/users/{}/feeds/{}'",
".",
"format",
"(",
"user",
",",
"name",
")",
"resp",
"=",
"self",
".",
"client",
".",
"session",
".",
"delete",
"(",
"uri",
")",
"return",
"resp",
".",
"status_code"
] |
Removes a feed
:param user: feed username
:param name: feed name
:return: true/false
|
[
"Removes",
"a",
"feed"
] |
5a7ed9c5e6fa27170366ecbdef710dc80d537dc2
|
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/feed.py#L53-L65
|
train
|
csirtgadgets/csirtgsdk-py
|
csirtgsdk/feed.py
|
Feed.index
|
def index(self, user):
"""
Returns a list of Feeds from the API
:param user: feed username
:return: list
Example:
ret = feed.index('csirtgadgets')
"""
uri = self.client.remote + '/users/{0}/feeds'.format(user)
return self.client.get(uri)
|
python
|
def index(self, user):
"""
Returns a list of Feeds from the API
:param user: feed username
:return: list
Example:
ret = feed.index('csirtgadgets')
"""
uri = self.client.remote + '/users/{0}/feeds'.format(user)
return self.client.get(uri)
|
[
"def",
"index",
"(",
"self",
",",
"user",
")",
":",
"uri",
"=",
"self",
".",
"client",
".",
"remote",
"+",
"'/users/{0}/feeds'",
".",
"format",
"(",
"user",
")",
"return",
"self",
".",
"client",
".",
"get",
"(",
"uri",
")"
] |
Returns a list of Feeds from the API
:param user: feed username
:return: list
Example:
ret = feed.index('csirtgadgets')
|
[
"Returns",
"a",
"list",
"of",
"Feeds",
"from",
"the",
"API"
] |
5a7ed9c5e6fa27170366ecbdef710dc80d537dc2
|
https://github.com/csirtgadgets/csirtgsdk-py/blob/5a7ed9c5e6fa27170366ecbdef710dc80d537dc2/csirtgsdk/feed.py#L69-L80
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.