repo
stringlengths 7
55
| path
stringlengths 4
223
| func_name
stringlengths 1
134
| original_string
stringlengths 75
104k
| language
stringclasses 1
value | code
stringlengths 75
104k
| code_tokens
listlengths 19
28.4k
| docstring
stringlengths 1
46.9k
| docstring_tokens
listlengths 1
1.97k
| sha
stringlengths 40
40
| url
stringlengths 87
315
| partition
stringclasses 1
value |
|---|---|---|---|---|---|---|---|---|---|---|---|
twisted/txacme
|
src/txacme/challenges/_libcloud.py
|
_get_existing
|
def _get_existing(driver, zone_name, server_name, validation):
"""
Get existing validation records.
"""
if zone_name is None:
zones = sorted(
(z for z
in driver.list_zones()
if server_name.rstrip(u'.')
.endswith(u'.' + z.domain.rstrip(u'.'))),
key=lambda z: len(z.domain),
reverse=True)
if len(zones) == 0:
raise NotInZone(server_name=server_name, zone_name=None)
else:
zones = [
z for z
in driver.list_zones()
if z.domain == zone_name]
if len(zones) == 0:
raise ZoneNotFound(zone_name=zone_name)
zone = zones[0]
subdomain = _split_zone(server_name, zone.domain)
existing = [
record for record
in zone.list_records()
if record.name == subdomain and
record.type == 'TXT' and
record.data == validation]
return zone, existing, subdomain
|
python
|
def _get_existing(driver, zone_name, server_name, validation):
"""
Get existing validation records.
"""
if zone_name is None:
zones = sorted(
(z for z
in driver.list_zones()
if server_name.rstrip(u'.')
.endswith(u'.' + z.domain.rstrip(u'.'))),
key=lambda z: len(z.domain),
reverse=True)
if len(zones) == 0:
raise NotInZone(server_name=server_name, zone_name=None)
else:
zones = [
z for z
in driver.list_zones()
if z.domain == zone_name]
if len(zones) == 0:
raise ZoneNotFound(zone_name=zone_name)
zone = zones[0]
subdomain = _split_zone(server_name, zone.domain)
existing = [
record for record
in zone.list_records()
if record.name == subdomain and
record.type == 'TXT' and
record.data == validation]
return zone, existing, subdomain
|
[
"def",
"_get_existing",
"(",
"driver",
",",
"zone_name",
",",
"server_name",
",",
"validation",
")",
":",
"if",
"zone_name",
"is",
"None",
":",
"zones",
"=",
"sorted",
"(",
"(",
"z",
"for",
"z",
"in",
"driver",
".",
"list_zones",
"(",
")",
"if",
"server_name",
".",
"rstrip",
"(",
"u'.'",
")",
".",
"endswith",
"(",
"u'.'",
"+",
"z",
".",
"domain",
".",
"rstrip",
"(",
"u'.'",
")",
")",
")",
",",
"key",
"=",
"lambda",
"z",
":",
"len",
"(",
"z",
".",
"domain",
")",
",",
"reverse",
"=",
"True",
")",
"if",
"len",
"(",
"zones",
")",
"==",
"0",
":",
"raise",
"NotInZone",
"(",
"server_name",
"=",
"server_name",
",",
"zone_name",
"=",
"None",
")",
"else",
":",
"zones",
"=",
"[",
"z",
"for",
"z",
"in",
"driver",
".",
"list_zones",
"(",
")",
"if",
"z",
".",
"domain",
"==",
"zone_name",
"]",
"if",
"len",
"(",
"zones",
")",
"==",
"0",
":",
"raise",
"ZoneNotFound",
"(",
"zone_name",
"=",
"zone_name",
")",
"zone",
"=",
"zones",
"[",
"0",
"]",
"subdomain",
"=",
"_split_zone",
"(",
"server_name",
",",
"zone",
".",
"domain",
")",
"existing",
"=",
"[",
"record",
"for",
"record",
"in",
"zone",
".",
"list_records",
"(",
")",
"if",
"record",
".",
"name",
"==",
"subdomain",
"and",
"record",
".",
"type",
"==",
"'TXT'",
"and",
"record",
".",
"data",
"==",
"validation",
"]",
"return",
"zone",
",",
"existing",
",",
"subdomain"
] |
Get existing validation records.
|
[
"Get",
"existing",
"validation",
"records",
"."
] |
9478381cc63c6d53d14bf8db8407c923f472989a
|
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/challenges/_libcloud.py#L61-L90
|
train
|
twisted/txacme
|
src/txacme/challenges/_libcloud.py
|
_validation
|
def _validation(response):
"""
Get the validation value for a challenge response.
"""
h = hashlib.sha256(response.key_authorization.encode("utf-8"))
return b64encode(h.digest()).decode()
|
python
|
def _validation(response):
"""
Get the validation value for a challenge response.
"""
h = hashlib.sha256(response.key_authorization.encode("utf-8"))
return b64encode(h.digest()).decode()
|
[
"def",
"_validation",
"(",
"response",
")",
":",
"h",
"=",
"hashlib",
".",
"sha256",
"(",
"response",
".",
"key_authorization",
".",
"encode",
"(",
"\"utf-8\"",
")",
")",
"return",
"b64encode",
"(",
"h",
".",
"digest",
"(",
")",
")",
".",
"decode",
"(",
")"
] |
Get the validation value for a challenge response.
|
[
"Get",
"the",
"validation",
"value",
"for",
"a",
"challenge",
"response",
"."
] |
9478381cc63c6d53d14bf8db8407c923f472989a
|
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/challenges/_libcloud.py#L93-L98
|
train
|
twisted/txacme
|
src/txacme/endpoint.py
|
load_or_create_client_key
|
def load_or_create_client_key(pem_path):
"""
Load the client key from a directory, creating it if it does not exist.
.. note:: The client key that will be created will be a 2048-bit RSA key.
:type pem_path: ``twisted.python.filepath.FilePath``
:param pem_path: The certificate directory
to use, as with the endpoint.
"""
acme_key_file = pem_path.asTextMode().child(u'client.key')
if acme_key_file.exists():
key = serialization.load_pem_private_key(
acme_key_file.getContent(),
password=None,
backend=default_backend())
else:
key = generate_private_key(u'rsa')
acme_key_file.setContent(
key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()))
return JWKRSA(key=key)
|
python
|
def load_or_create_client_key(pem_path):
"""
Load the client key from a directory, creating it if it does not exist.
.. note:: The client key that will be created will be a 2048-bit RSA key.
:type pem_path: ``twisted.python.filepath.FilePath``
:param pem_path: The certificate directory
to use, as with the endpoint.
"""
acme_key_file = pem_path.asTextMode().child(u'client.key')
if acme_key_file.exists():
key = serialization.load_pem_private_key(
acme_key_file.getContent(),
password=None,
backend=default_backend())
else:
key = generate_private_key(u'rsa')
acme_key_file.setContent(
key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()))
return JWKRSA(key=key)
|
[
"def",
"load_or_create_client_key",
"(",
"pem_path",
")",
":",
"acme_key_file",
"=",
"pem_path",
".",
"asTextMode",
"(",
")",
".",
"child",
"(",
"u'client.key'",
")",
"if",
"acme_key_file",
".",
"exists",
"(",
")",
":",
"key",
"=",
"serialization",
".",
"load_pem_private_key",
"(",
"acme_key_file",
".",
"getContent",
"(",
")",
",",
"password",
"=",
"None",
",",
"backend",
"=",
"default_backend",
"(",
")",
")",
"else",
":",
"key",
"=",
"generate_private_key",
"(",
"u'rsa'",
")",
"acme_key_file",
".",
"setContent",
"(",
"key",
".",
"private_bytes",
"(",
"encoding",
"=",
"serialization",
".",
"Encoding",
".",
"PEM",
",",
"format",
"=",
"serialization",
".",
"PrivateFormat",
".",
"TraditionalOpenSSL",
",",
"encryption_algorithm",
"=",
"serialization",
".",
"NoEncryption",
"(",
")",
")",
")",
"return",
"JWKRSA",
"(",
"key",
"=",
"key",
")"
] |
Load the client key from a directory, creating it if it does not exist.
.. note:: The client key that will be created will be a 2048-bit RSA key.
:type pem_path: ``twisted.python.filepath.FilePath``
:param pem_path: The certificate directory
to use, as with the endpoint.
|
[
"Load",
"the",
"client",
"key",
"from",
"a",
"directory",
"creating",
"it",
"if",
"it",
"does",
"not",
"exist",
"."
] |
9478381cc63c6d53d14bf8db8407c923f472989a
|
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/endpoint.py#L131-L154
|
train
|
twisted/txacme
|
src/txacme/endpoint.py
|
_parse
|
def _parse(reactor, directory, pemdir, *args, **kwargs):
"""
Parse a txacme endpoint description.
:param reactor: The Twisted reactor.
:param directory: ``twisted.python.url.URL`` for the ACME directory to use
for issuing certs.
:param str pemdir: The path to the certificate directory to use.
"""
def colon_join(items):
return ':'.join([item.replace(':', '\\:') for item in items])
sub = colon_join(list(args) + ['='.join(item) for item in kwargs.items()])
pem_path = FilePath(pemdir).asTextMode()
acme_key = load_or_create_client_key(pem_path)
return AutoTLSEndpoint(
reactor=reactor,
directory=directory,
client_creator=partial(Client.from_url, key=acme_key, alg=RS256),
cert_store=DirectoryStore(pem_path),
cert_mapping=HostDirectoryMap(pem_path),
sub_endpoint=serverFromString(reactor, sub))
|
python
|
def _parse(reactor, directory, pemdir, *args, **kwargs):
"""
Parse a txacme endpoint description.
:param reactor: The Twisted reactor.
:param directory: ``twisted.python.url.URL`` for the ACME directory to use
for issuing certs.
:param str pemdir: The path to the certificate directory to use.
"""
def colon_join(items):
return ':'.join([item.replace(':', '\\:') for item in items])
sub = colon_join(list(args) + ['='.join(item) for item in kwargs.items()])
pem_path = FilePath(pemdir).asTextMode()
acme_key = load_or_create_client_key(pem_path)
return AutoTLSEndpoint(
reactor=reactor,
directory=directory,
client_creator=partial(Client.from_url, key=acme_key, alg=RS256),
cert_store=DirectoryStore(pem_path),
cert_mapping=HostDirectoryMap(pem_path),
sub_endpoint=serverFromString(reactor, sub))
|
[
"def",
"_parse",
"(",
"reactor",
",",
"directory",
",",
"pemdir",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"colon_join",
"(",
"items",
")",
":",
"return",
"':'",
".",
"join",
"(",
"[",
"item",
".",
"replace",
"(",
"':'",
",",
"'\\\\:'",
")",
"for",
"item",
"in",
"items",
"]",
")",
"sub",
"=",
"colon_join",
"(",
"list",
"(",
"args",
")",
"+",
"[",
"'='",
".",
"join",
"(",
"item",
")",
"for",
"item",
"in",
"kwargs",
".",
"items",
"(",
")",
"]",
")",
"pem_path",
"=",
"FilePath",
"(",
"pemdir",
")",
".",
"asTextMode",
"(",
")",
"acme_key",
"=",
"load_or_create_client_key",
"(",
"pem_path",
")",
"return",
"AutoTLSEndpoint",
"(",
"reactor",
"=",
"reactor",
",",
"directory",
"=",
"directory",
",",
"client_creator",
"=",
"partial",
"(",
"Client",
".",
"from_url",
",",
"key",
"=",
"acme_key",
",",
"alg",
"=",
"RS256",
")",
",",
"cert_store",
"=",
"DirectoryStore",
"(",
"pem_path",
")",
",",
"cert_mapping",
"=",
"HostDirectoryMap",
"(",
"pem_path",
")",
",",
"sub_endpoint",
"=",
"serverFromString",
"(",
"reactor",
",",
"sub",
")",
")"
] |
Parse a txacme endpoint description.
:param reactor: The Twisted reactor.
:param directory: ``twisted.python.url.URL`` for the ACME directory to use
for issuing certs.
:param str pemdir: The path to the certificate directory to use.
|
[
"Parse",
"a",
"txacme",
"endpoint",
"description",
"."
] |
9478381cc63c6d53d14bf8db8407c923f472989a
|
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/endpoint.py#L157-L177
|
train
|
alexwlchan/lazyreader
|
lazyreader.py
|
lazyread
|
def lazyread(f, delimiter):
"""
Generator which continually reads ``f`` to the next instance
of ``delimiter``.
This allows you to do batch processing on the contents of ``f`` without
loading the entire file into memory.
:param f: Any file-like object which has a ``.read()`` method.
:param delimiter: Delimiter on which to split up the file.
"""
# Get an empty string to start with. We need to make sure that if the
# file is opened in binary mode, we're using byte strings, and similar
# for Unicode. Otherwise trying to update the running string will
# hit a TypeError.
try:
running = f.read(0)
except Exception as e:
# The boto3 APIs don't let you read zero bytes from an S3 object, but
# they always return bytestrings, so in this case we know what to
# start with.
if e.__class__.__name__ == 'IncompleteReadError':
running = b''
else:
raise
while True:
new_data = f.read(1024)
# When a call to read() returns nothing, we're at the end of the file.
if not new_data:
yield running
return
# Otherwise, update the running stream and look for instances of
# the delimiter. Remember we might have read more than one delimiter
# since the last time we checked
running += new_data
while delimiter in running:
curr, running = running.split(delimiter, 1)
yield curr + delimiter
|
python
|
def lazyread(f, delimiter):
"""
Generator which continually reads ``f`` to the next instance
of ``delimiter``.
This allows you to do batch processing on the contents of ``f`` without
loading the entire file into memory.
:param f: Any file-like object which has a ``.read()`` method.
:param delimiter: Delimiter on which to split up the file.
"""
# Get an empty string to start with. We need to make sure that if the
# file is opened in binary mode, we're using byte strings, and similar
# for Unicode. Otherwise trying to update the running string will
# hit a TypeError.
try:
running = f.read(0)
except Exception as e:
# The boto3 APIs don't let you read zero bytes from an S3 object, but
# they always return bytestrings, so in this case we know what to
# start with.
if e.__class__.__name__ == 'IncompleteReadError':
running = b''
else:
raise
while True:
new_data = f.read(1024)
# When a call to read() returns nothing, we're at the end of the file.
if not new_data:
yield running
return
# Otherwise, update the running stream and look for instances of
# the delimiter. Remember we might have read more than one delimiter
# since the last time we checked
running += new_data
while delimiter in running:
curr, running = running.split(delimiter, 1)
yield curr + delimiter
|
[
"def",
"lazyread",
"(",
"f",
",",
"delimiter",
")",
":",
"# Get an empty string to start with. We need to make sure that if the",
"# file is opened in binary mode, we're using byte strings, and similar",
"# for Unicode. Otherwise trying to update the running string will",
"# hit a TypeError.",
"try",
":",
"running",
"=",
"f",
".",
"read",
"(",
"0",
")",
"except",
"Exception",
"as",
"e",
":",
"# The boto3 APIs don't let you read zero bytes from an S3 object, but",
"# they always return bytestrings, so in this case we know what to",
"# start with.",
"if",
"e",
".",
"__class__",
".",
"__name__",
"==",
"'IncompleteReadError'",
":",
"running",
"=",
"b''",
"else",
":",
"raise",
"while",
"True",
":",
"new_data",
"=",
"f",
".",
"read",
"(",
"1024",
")",
"# When a call to read() returns nothing, we're at the end of the file.",
"if",
"not",
"new_data",
":",
"yield",
"running",
"return",
"# Otherwise, update the running stream and look for instances of",
"# the delimiter. Remember we might have read more than one delimiter",
"# since the last time we checked",
"running",
"+=",
"new_data",
"while",
"delimiter",
"in",
"running",
":",
"curr",
",",
"running",
"=",
"running",
".",
"split",
"(",
"delimiter",
",",
"1",
")",
"yield",
"curr",
"+",
"delimiter"
] |
Generator which continually reads ``f`` to the next instance
of ``delimiter``.
This allows you to do batch processing on the contents of ``f`` without
loading the entire file into memory.
:param f: Any file-like object which has a ``.read()`` method.
:param delimiter: Delimiter on which to split up the file.
|
[
"Generator",
"which",
"continually",
"reads",
"f",
"to",
"the",
"next",
"instance",
"of",
"delimiter",
"."
] |
918c408efba015efc1d67b05d1e4b373ac9d1192
|
https://github.com/alexwlchan/lazyreader/blob/918c408efba015efc1d67b05d1e4b373ac9d1192/lazyreader.py#L3-L44
|
train
|
twisted/txacme
|
src/txacme/util.py
|
generate_private_key
|
def generate_private_key(key_type):
"""
Generate a random private key using sensible parameters.
:param str key_type: The type of key to generate. One of: ``rsa``.
"""
if key_type == u'rsa':
return rsa.generate_private_key(
public_exponent=65537, key_size=2048, backend=default_backend())
raise ValueError(key_type)
|
python
|
def generate_private_key(key_type):
"""
Generate a random private key using sensible parameters.
:param str key_type: The type of key to generate. One of: ``rsa``.
"""
if key_type == u'rsa':
return rsa.generate_private_key(
public_exponent=65537, key_size=2048, backend=default_backend())
raise ValueError(key_type)
|
[
"def",
"generate_private_key",
"(",
"key_type",
")",
":",
"if",
"key_type",
"==",
"u'rsa'",
":",
"return",
"rsa",
".",
"generate_private_key",
"(",
"public_exponent",
"=",
"65537",
",",
"key_size",
"=",
"2048",
",",
"backend",
"=",
"default_backend",
"(",
")",
")",
"raise",
"ValueError",
"(",
"key_type",
")"
] |
Generate a random private key using sensible parameters.
:param str key_type: The type of key to generate. One of: ``rsa``.
|
[
"Generate",
"a",
"random",
"private",
"key",
"using",
"sensible",
"parameters",
"."
] |
9478381cc63c6d53d14bf8db8407c923f472989a
|
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/util.py#L20-L29
|
train
|
twisted/txacme
|
src/txacme/util.py
|
generate_tls_sni_01_cert
|
def generate_tls_sni_01_cert(server_name, key_type=u'rsa',
_generate_private_key=None):
"""
Generate a certificate/key pair for responding to a tls-sni-01 challenge.
:param str server_name: The SAN the certificate should have.
:param str key_type: The type of key to generate; usually not necessary.
:rtype: ``Tuple[`~cryptography.x509.Certificate`, PrivateKey]``
:return: A tuple of the certificate and private key.
"""
key = (_generate_private_key or generate_private_key)(key_type)
name = x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, u'acme.invalid')])
cert = (
x509.CertificateBuilder()
.subject_name(name)
.issuer_name(name)
.not_valid_before(datetime.now() - timedelta(seconds=3600))
.not_valid_after(datetime.now() + timedelta(seconds=3600))
.serial_number(int(uuid.uuid4()))
.public_key(key.public_key())
.add_extension(
x509.SubjectAlternativeName([x509.DNSName(server_name)]),
critical=False)
.sign(
private_key=key,
algorithm=hashes.SHA256(),
backend=default_backend())
)
return (cert, key)
|
python
|
def generate_tls_sni_01_cert(server_name, key_type=u'rsa',
_generate_private_key=None):
"""
Generate a certificate/key pair for responding to a tls-sni-01 challenge.
:param str server_name: The SAN the certificate should have.
:param str key_type: The type of key to generate; usually not necessary.
:rtype: ``Tuple[`~cryptography.x509.Certificate`, PrivateKey]``
:return: A tuple of the certificate and private key.
"""
key = (_generate_private_key or generate_private_key)(key_type)
name = x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, u'acme.invalid')])
cert = (
x509.CertificateBuilder()
.subject_name(name)
.issuer_name(name)
.not_valid_before(datetime.now() - timedelta(seconds=3600))
.not_valid_after(datetime.now() + timedelta(seconds=3600))
.serial_number(int(uuid.uuid4()))
.public_key(key.public_key())
.add_extension(
x509.SubjectAlternativeName([x509.DNSName(server_name)]),
critical=False)
.sign(
private_key=key,
algorithm=hashes.SHA256(),
backend=default_backend())
)
return (cert, key)
|
[
"def",
"generate_tls_sni_01_cert",
"(",
"server_name",
",",
"key_type",
"=",
"u'rsa'",
",",
"_generate_private_key",
"=",
"None",
")",
":",
"key",
"=",
"(",
"_generate_private_key",
"or",
"generate_private_key",
")",
"(",
"key_type",
")",
"name",
"=",
"x509",
".",
"Name",
"(",
"[",
"x509",
".",
"NameAttribute",
"(",
"NameOID",
".",
"COMMON_NAME",
",",
"u'acme.invalid'",
")",
"]",
")",
"cert",
"=",
"(",
"x509",
".",
"CertificateBuilder",
"(",
")",
".",
"subject_name",
"(",
"name",
")",
".",
"issuer_name",
"(",
"name",
")",
".",
"not_valid_before",
"(",
"datetime",
".",
"now",
"(",
")",
"-",
"timedelta",
"(",
"seconds",
"=",
"3600",
")",
")",
".",
"not_valid_after",
"(",
"datetime",
".",
"now",
"(",
")",
"+",
"timedelta",
"(",
"seconds",
"=",
"3600",
")",
")",
".",
"serial_number",
"(",
"int",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
")",
".",
"public_key",
"(",
"key",
".",
"public_key",
"(",
")",
")",
".",
"add_extension",
"(",
"x509",
".",
"SubjectAlternativeName",
"(",
"[",
"x509",
".",
"DNSName",
"(",
"server_name",
")",
"]",
")",
",",
"critical",
"=",
"False",
")",
".",
"sign",
"(",
"private_key",
"=",
"key",
",",
"algorithm",
"=",
"hashes",
".",
"SHA256",
"(",
")",
",",
"backend",
"=",
"default_backend",
"(",
")",
")",
")",
"return",
"(",
"cert",
",",
"key",
")"
] |
Generate a certificate/key pair for responding to a tls-sni-01 challenge.
:param str server_name: The SAN the certificate should have.
:param str key_type: The type of key to generate; usually not necessary.
:rtype: ``Tuple[`~cryptography.x509.Certificate`, PrivateKey]``
:return: A tuple of the certificate and private key.
|
[
"Generate",
"a",
"certificate",
"/",
"key",
"pair",
"for",
"responding",
"to",
"a",
"tls",
"-",
"sni",
"-",
"01",
"challenge",
"."
] |
9478381cc63c6d53d14bf8db8407c923f472989a
|
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/util.py#L32-L62
|
train
|
twisted/txacme
|
src/txacme/util.py
|
tap
|
def tap(f):
"""
"Tap" a Deferred callback chain with a function whose return value is
ignored.
"""
@wraps(f)
def _cb(res, *a, **kw):
d = maybeDeferred(f, res, *a, **kw)
d.addCallback(lambda ignored: res)
return d
return _cb
|
python
|
def tap(f):
"""
"Tap" a Deferred callback chain with a function whose return value is
ignored.
"""
@wraps(f)
def _cb(res, *a, **kw):
d = maybeDeferred(f, res, *a, **kw)
d.addCallback(lambda ignored: res)
return d
return _cb
|
[
"def",
"tap",
"(",
"f",
")",
":",
"@",
"wraps",
"(",
"f",
")",
"def",
"_cb",
"(",
"res",
",",
"*",
"a",
",",
"*",
"*",
"kw",
")",
":",
"d",
"=",
"maybeDeferred",
"(",
"f",
",",
"res",
",",
"*",
"a",
",",
"*",
"*",
"kw",
")",
"d",
".",
"addCallback",
"(",
"lambda",
"ignored",
":",
"res",
")",
"return",
"d",
"return",
"_cb"
] |
"Tap" a Deferred callback chain with a function whose return value is
ignored.
|
[
"Tap",
"a",
"Deferred",
"callback",
"chain",
"with",
"a",
"function",
"whose",
"return",
"value",
"is",
"ignored",
"."
] |
9478381cc63c6d53d14bf8db8407c923f472989a
|
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/util.py#L65-L75
|
train
|
twisted/txacme
|
src/txacme/util.py
|
decode_csr
|
def decode_csr(b64der):
"""
Decode JOSE Base-64 DER-encoded CSR.
:param str b64der: The encoded CSR.
:rtype: `cryptography.x509.CertificateSigningRequest`
:return: The decoded CSR.
"""
try:
return x509.load_der_x509_csr(
decode_b64jose(b64der), default_backend())
except ValueError as error:
raise DeserializationError(error)
|
python
|
def decode_csr(b64der):
"""
Decode JOSE Base-64 DER-encoded CSR.
:param str b64der: The encoded CSR.
:rtype: `cryptography.x509.CertificateSigningRequest`
:return: The decoded CSR.
"""
try:
return x509.load_der_x509_csr(
decode_b64jose(b64der), default_backend())
except ValueError as error:
raise DeserializationError(error)
|
[
"def",
"decode_csr",
"(",
"b64der",
")",
":",
"try",
":",
"return",
"x509",
".",
"load_der_x509_csr",
"(",
"decode_b64jose",
"(",
"b64der",
")",
",",
"default_backend",
"(",
")",
")",
"except",
"ValueError",
"as",
"error",
":",
"raise",
"DeserializationError",
"(",
"error",
")"
] |
Decode JOSE Base-64 DER-encoded CSR.
:param str b64der: The encoded CSR.
:rtype: `cryptography.x509.CertificateSigningRequest`
:return: The decoded CSR.
|
[
"Decode",
"JOSE",
"Base",
"-",
"64",
"DER",
"-",
"encoded",
"CSR",
"."
] |
9478381cc63c6d53d14bf8db8407c923f472989a
|
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/util.py#L89-L102
|
train
|
twisted/txacme
|
src/txacme/util.py
|
csr_for_names
|
def csr_for_names(names, key):
"""
Generate a certificate signing request for the given names and private key.
.. seealso:: `acme.client.Client.request_issuance`
.. seealso:: `generate_private_key`
:param ``List[str]``: One or more names (subjectAltName) for which to
request a certificate.
:param key: A Cryptography private key object.
:rtype: `cryptography.x509.CertificateSigningRequest`
:return: The certificate request message.
"""
if len(names) == 0:
raise ValueError('Must have at least one name')
if len(names[0]) > 64:
common_name = u'san.too.long.invalid'
else:
common_name = names[0]
return (
x509.CertificateSigningRequestBuilder()
.subject_name(x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, common_name)]))
.add_extension(
x509.SubjectAlternativeName(list(map(x509.DNSName, names))),
critical=False)
.sign(key, hashes.SHA256(), default_backend()))
|
python
|
def csr_for_names(names, key):
"""
Generate a certificate signing request for the given names and private key.
.. seealso:: `acme.client.Client.request_issuance`
.. seealso:: `generate_private_key`
:param ``List[str]``: One or more names (subjectAltName) for which to
request a certificate.
:param key: A Cryptography private key object.
:rtype: `cryptography.x509.CertificateSigningRequest`
:return: The certificate request message.
"""
if len(names) == 0:
raise ValueError('Must have at least one name')
if len(names[0]) > 64:
common_name = u'san.too.long.invalid'
else:
common_name = names[0]
return (
x509.CertificateSigningRequestBuilder()
.subject_name(x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, common_name)]))
.add_extension(
x509.SubjectAlternativeName(list(map(x509.DNSName, names))),
critical=False)
.sign(key, hashes.SHA256(), default_backend()))
|
[
"def",
"csr_for_names",
"(",
"names",
",",
"key",
")",
":",
"if",
"len",
"(",
"names",
")",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"'Must have at least one name'",
")",
"if",
"len",
"(",
"names",
"[",
"0",
"]",
")",
">",
"64",
":",
"common_name",
"=",
"u'san.too.long.invalid'",
"else",
":",
"common_name",
"=",
"names",
"[",
"0",
"]",
"return",
"(",
"x509",
".",
"CertificateSigningRequestBuilder",
"(",
")",
".",
"subject_name",
"(",
"x509",
".",
"Name",
"(",
"[",
"x509",
".",
"NameAttribute",
"(",
"NameOID",
".",
"COMMON_NAME",
",",
"common_name",
")",
"]",
")",
")",
".",
"add_extension",
"(",
"x509",
".",
"SubjectAlternativeName",
"(",
"list",
"(",
"map",
"(",
"x509",
".",
"DNSName",
",",
"names",
")",
")",
")",
",",
"critical",
"=",
"False",
")",
".",
"sign",
"(",
"key",
",",
"hashes",
".",
"SHA256",
"(",
")",
",",
"default_backend",
"(",
")",
")",
")"
] |
Generate a certificate signing request for the given names and private key.
.. seealso:: `acme.client.Client.request_issuance`
.. seealso:: `generate_private_key`
:param ``List[str]``: One or more names (subjectAltName) for which to
request a certificate.
:param key: A Cryptography private key object.
:rtype: `cryptography.x509.CertificateSigningRequest`
:return: The certificate request message.
|
[
"Generate",
"a",
"certificate",
"signing",
"request",
"for",
"the",
"given",
"names",
"and",
"private",
"key",
"."
] |
9478381cc63c6d53d14bf8db8407c923f472989a
|
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/src/txacme/util.py#L105-L133
|
train
|
samuelcolvin/python-devtools
|
devtools/debug.py
|
Debug._wrap_parse
|
def _wrap_parse(code, filename):
"""
async wrapper is required to avoid await calls raising a SyntaxError
"""
code = 'async def wrapper():\n' + indent(code, ' ')
return ast.parse(code, filename=filename).body[0].body[0].value
|
python
|
def _wrap_parse(code, filename):
"""
async wrapper is required to avoid await calls raising a SyntaxError
"""
code = 'async def wrapper():\n' + indent(code, ' ')
return ast.parse(code, filename=filename).body[0].body[0].value
|
[
"def",
"_wrap_parse",
"(",
"code",
",",
"filename",
")",
":",
"code",
"=",
"'async def wrapper():\\n'",
"+",
"indent",
"(",
"code",
",",
"' '",
")",
"return",
"ast",
".",
"parse",
"(",
"code",
",",
"filename",
"=",
"filename",
")",
".",
"body",
"[",
"0",
"]",
".",
"body",
"[",
"0",
"]",
".",
"value"
] |
async wrapper is required to avoid await calls raising a SyntaxError
|
[
"async",
"wrapper",
"is",
"required",
"to",
"avoid",
"await",
"calls",
"raising",
"a",
"SyntaxError"
] |
fb0021b3e6815348a28c1d2bf11b50b8f0bd511a
|
https://github.com/samuelcolvin/python-devtools/blob/fb0021b3e6815348a28c1d2bf11b50b8f0bd511a/devtools/debug.py#L274-L279
|
train
|
twisted/txacme
|
docs/conf.py
|
linkcode_resolve
|
def linkcode_resolve(domain, info):
"""
Determine the URL corresponding to Python object
"""
if domain != 'py':
return None
modname = info['module']
fullname = info['fullname']
submod = sys.modules.get(modname)
if submod is None:
return None
obj = submod
for part in fullname.split('.'):
try:
obj = getattr(obj, part)
except:
return None
try:
fn = inspect.getsourcefile(obj)
except:
fn = None
if not fn:
return None
try:
source, lineno = inspect.findsource(obj)
except:
lineno = None
if lineno:
linespec = "#L%d" % (lineno + 1)
else:
linespec = ""
fn = relpath(fn, start='..')
return "https://github.com/mithrandi/txacme/blob/%s/%s%s" % (
txacme_version_info['full-revisionid'], fn, linespec)
|
python
|
def linkcode_resolve(domain, info):
"""
Determine the URL corresponding to Python object
"""
if domain != 'py':
return None
modname = info['module']
fullname = info['fullname']
submod = sys.modules.get(modname)
if submod is None:
return None
obj = submod
for part in fullname.split('.'):
try:
obj = getattr(obj, part)
except:
return None
try:
fn = inspect.getsourcefile(obj)
except:
fn = None
if not fn:
return None
try:
source, lineno = inspect.findsource(obj)
except:
lineno = None
if lineno:
linespec = "#L%d" % (lineno + 1)
else:
linespec = ""
fn = relpath(fn, start='..')
return "https://github.com/mithrandi/txacme/blob/%s/%s%s" % (
txacme_version_info['full-revisionid'], fn, linespec)
|
[
"def",
"linkcode_resolve",
"(",
"domain",
",",
"info",
")",
":",
"if",
"domain",
"!=",
"'py'",
":",
"return",
"None",
"modname",
"=",
"info",
"[",
"'module'",
"]",
"fullname",
"=",
"info",
"[",
"'fullname'",
"]",
"submod",
"=",
"sys",
".",
"modules",
".",
"get",
"(",
"modname",
")",
"if",
"submod",
"is",
"None",
":",
"return",
"None",
"obj",
"=",
"submod",
"for",
"part",
"in",
"fullname",
".",
"split",
"(",
"'.'",
")",
":",
"try",
":",
"obj",
"=",
"getattr",
"(",
"obj",
",",
"part",
")",
"except",
":",
"return",
"None",
"try",
":",
"fn",
"=",
"inspect",
".",
"getsourcefile",
"(",
"obj",
")",
"except",
":",
"fn",
"=",
"None",
"if",
"not",
"fn",
":",
"return",
"None",
"try",
":",
"source",
",",
"lineno",
"=",
"inspect",
".",
"findsource",
"(",
"obj",
")",
"except",
":",
"lineno",
"=",
"None",
"if",
"lineno",
":",
"linespec",
"=",
"\"#L%d\"",
"%",
"(",
"lineno",
"+",
"1",
")",
"else",
":",
"linespec",
"=",
"\"\"",
"fn",
"=",
"relpath",
"(",
"fn",
",",
"start",
"=",
"'..'",
")",
"return",
"\"https://github.com/mithrandi/txacme/blob/%s/%s%s\"",
"%",
"(",
"txacme_version_info",
"[",
"'full-revisionid'",
"]",
",",
"fn",
",",
"linespec",
")"
] |
Determine the URL corresponding to Python object
|
[
"Determine",
"the",
"URL",
"corresponding",
"to",
"Python",
"object"
] |
9478381cc63c6d53d14bf8db8407c923f472989a
|
https://github.com/twisted/txacme/blob/9478381cc63c6d53d14bf8db8407c923f472989a/docs/conf.py#L326-L359
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/solr.py
|
SolrHypermap.layers_to_solr
|
def layers_to_solr(self, layers):
"""
Sync n layers in Solr.
"""
layers_dict_list = []
layers_success_ids = []
layers_errors_ids = []
for layer in layers:
layer_dict, message = layer2dict(layer)
if not layer_dict:
layers_errors_ids.append([layer.id, message])
LOGGER.error(message)
else:
layers_dict_list.append(layer_dict)
layers_success_ids.append(layer.id)
layers_json = json.dumps(layers_dict_list)
try:
url_solr_update = '%s/solr/hypermap/update/json/docs' % SEARCH_URL
headers = {"content-type": "application/json"}
params = {"commitWithin": 1500}
requests.post(url_solr_update, data=layers_json, params=params, headers=headers)
LOGGER.info('Solr synced for the given layers')
except Exception:
message = "Error saving solr records: %s" % sys.exc_info()[1]
layers_errors_ids.append([-1, message])
LOGGER.error(message)
return False, layers_errors_ids
return True, layers_errors_ids
|
python
|
def layers_to_solr(self, layers):
"""
Sync n layers in Solr.
"""
layers_dict_list = []
layers_success_ids = []
layers_errors_ids = []
for layer in layers:
layer_dict, message = layer2dict(layer)
if not layer_dict:
layers_errors_ids.append([layer.id, message])
LOGGER.error(message)
else:
layers_dict_list.append(layer_dict)
layers_success_ids.append(layer.id)
layers_json = json.dumps(layers_dict_list)
try:
url_solr_update = '%s/solr/hypermap/update/json/docs' % SEARCH_URL
headers = {"content-type": "application/json"}
params = {"commitWithin": 1500}
requests.post(url_solr_update, data=layers_json, params=params, headers=headers)
LOGGER.info('Solr synced for the given layers')
except Exception:
message = "Error saving solr records: %s" % sys.exc_info()[1]
layers_errors_ids.append([-1, message])
LOGGER.error(message)
return False, layers_errors_ids
return True, layers_errors_ids
|
[
"def",
"layers_to_solr",
"(",
"self",
",",
"layers",
")",
":",
"layers_dict_list",
"=",
"[",
"]",
"layers_success_ids",
"=",
"[",
"]",
"layers_errors_ids",
"=",
"[",
"]",
"for",
"layer",
"in",
"layers",
":",
"layer_dict",
",",
"message",
"=",
"layer2dict",
"(",
"layer",
")",
"if",
"not",
"layer_dict",
":",
"layers_errors_ids",
".",
"append",
"(",
"[",
"layer",
".",
"id",
",",
"message",
"]",
")",
"LOGGER",
".",
"error",
"(",
"message",
")",
"else",
":",
"layers_dict_list",
".",
"append",
"(",
"layer_dict",
")",
"layers_success_ids",
".",
"append",
"(",
"layer",
".",
"id",
")",
"layers_json",
"=",
"json",
".",
"dumps",
"(",
"layers_dict_list",
")",
"try",
":",
"url_solr_update",
"=",
"'%s/solr/hypermap/update/json/docs'",
"%",
"SEARCH_URL",
"headers",
"=",
"{",
"\"content-type\"",
":",
"\"application/json\"",
"}",
"params",
"=",
"{",
"\"commitWithin\"",
":",
"1500",
"}",
"requests",
".",
"post",
"(",
"url_solr_update",
",",
"data",
"=",
"layers_json",
",",
"params",
"=",
"params",
",",
"headers",
"=",
"headers",
")",
"LOGGER",
".",
"info",
"(",
"'Solr synced for the given layers'",
")",
"except",
"Exception",
":",
"message",
"=",
"\"Error saving solr records: %s\"",
"%",
"sys",
".",
"exc_info",
"(",
")",
"[",
"1",
"]",
"layers_errors_ids",
".",
"append",
"(",
"[",
"-",
"1",
",",
"message",
"]",
")",
"LOGGER",
".",
"error",
"(",
"message",
")",
"return",
"False",
",",
"layers_errors_ids",
"return",
"True",
",",
"layers_errors_ids"
] |
Sync n layers in Solr.
|
[
"Sync",
"n",
"layers",
"in",
"Solr",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/solr.py#L19-L50
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/solr.py
|
SolrHypermap.layer_to_solr
|
def layer_to_solr(self, layer):
"""
Sync a layer in Solr.
"""
success = True
message = 'Synced layer id %s to Solr' % layer.id
layer_dict, message = layer2dict(layer)
if not layer_dict:
success = False
else:
layer_json = json.dumps(layer_dict)
try:
url_solr_update = '%s/solr/hypermap/update/json/docs' % SEARCH_URL
headers = {"content-type": "application/json"}
params = {"commitWithin": 1500}
res = requests.post(url_solr_update, data=layer_json, params=params, headers=headers)
res = res.json()
if 'error' in res:
success = False
message = "Error syncing layer id %s to Solr: %s" % (layer.id, res["error"].get("msg"))
except Exception, e:
success = False
message = "Error syncing layer id %s to Solr: %s" % (layer.id, sys.exc_info()[1])
LOGGER.error(e, exc_info=True)
if success:
LOGGER.info(message)
else:
LOGGER.error(message)
return success, message
|
python
|
def layer_to_solr(self, layer):
"""
Sync a layer in Solr.
"""
success = True
message = 'Synced layer id %s to Solr' % layer.id
layer_dict, message = layer2dict(layer)
if not layer_dict:
success = False
else:
layer_json = json.dumps(layer_dict)
try:
url_solr_update = '%s/solr/hypermap/update/json/docs' % SEARCH_URL
headers = {"content-type": "application/json"}
params = {"commitWithin": 1500}
res = requests.post(url_solr_update, data=layer_json, params=params, headers=headers)
res = res.json()
if 'error' in res:
success = False
message = "Error syncing layer id %s to Solr: %s" % (layer.id, res["error"].get("msg"))
except Exception, e:
success = False
message = "Error syncing layer id %s to Solr: %s" % (layer.id, sys.exc_info()[1])
LOGGER.error(e, exc_info=True)
if success:
LOGGER.info(message)
else:
LOGGER.error(message)
return success, message
|
[
"def",
"layer_to_solr",
"(",
"self",
",",
"layer",
")",
":",
"success",
"=",
"True",
"message",
"=",
"'Synced layer id %s to Solr'",
"%",
"layer",
".",
"id",
"layer_dict",
",",
"message",
"=",
"layer2dict",
"(",
"layer",
")",
"if",
"not",
"layer_dict",
":",
"success",
"=",
"False",
"else",
":",
"layer_json",
"=",
"json",
".",
"dumps",
"(",
"layer_dict",
")",
"try",
":",
"url_solr_update",
"=",
"'%s/solr/hypermap/update/json/docs'",
"%",
"SEARCH_URL",
"headers",
"=",
"{",
"\"content-type\"",
":",
"\"application/json\"",
"}",
"params",
"=",
"{",
"\"commitWithin\"",
":",
"1500",
"}",
"res",
"=",
"requests",
".",
"post",
"(",
"url_solr_update",
",",
"data",
"=",
"layer_json",
",",
"params",
"=",
"params",
",",
"headers",
"=",
"headers",
")",
"res",
"=",
"res",
".",
"json",
"(",
")",
"if",
"'error'",
"in",
"res",
":",
"success",
"=",
"False",
"message",
"=",
"\"Error syncing layer id %s to Solr: %s\"",
"%",
"(",
"layer",
".",
"id",
",",
"res",
"[",
"\"error\"",
"]",
".",
"get",
"(",
"\"msg\"",
")",
")",
"except",
"Exception",
",",
"e",
":",
"success",
"=",
"False",
"message",
"=",
"\"Error syncing layer id %s to Solr: %s\"",
"%",
"(",
"layer",
".",
"id",
",",
"sys",
".",
"exc_info",
"(",
")",
"[",
"1",
"]",
")",
"LOGGER",
".",
"error",
"(",
"e",
",",
"exc_info",
"=",
"True",
")",
"if",
"success",
":",
"LOGGER",
".",
"info",
"(",
"message",
")",
"else",
":",
"LOGGER",
".",
"error",
"(",
"message",
")",
"return",
"success",
",",
"message"
] |
Sync a layer in Solr.
|
[
"Sync",
"a",
"layer",
"in",
"Solr",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/solr.py#L52-L81
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/solr.py
|
SolrHypermap.clear_solr
|
def clear_solr(self, catalog="hypermap"):
"""Clear all indexes in the solr core"""
solr_url = "{0}/solr/{1}".format(SEARCH_URL, catalog)
solr = pysolr.Solr(solr_url, timeout=60)
solr.delete(q='*:*')
LOGGER.debug('Solr core cleared')
|
python
|
def clear_solr(self, catalog="hypermap"):
"""Clear all indexes in the solr core"""
solr_url = "{0}/solr/{1}".format(SEARCH_URL, catalog)
solr = pysolr.Solr(solr_url, timeout=60)
solr.delete(q='*:*')
LOGGER.debug('Solr core cleared')
|
[
"def",
"clear_solr",
"(",
"self",
",",
"catalog",
"=",
"\"hypermap\"",
")",
":",
"solr_url",
"=",
"\"{0}/solr/{1}\"",
".",
"format",
"(",
"SEARCH_URL",
",",
"catalog",
")",
"solr",
"=",
"pysolr",
".",
"Solr",
"(",
"solr_url",
",",
"timeout",
"=",
"60",
")",
"solr",
".",
"delete",
"(",
"q",
"=",
"'*:*'",
")",
"LOGGER",
".",
"debug",
"(",
"'Solr core cleared'",
")"
] |
Clear all indexes in the solr core
|
[
"Clear",
"all",
"indexes",
"in",
"the",
"solr",
"core"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/solr.py#L83-L88
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/solr.py
|
SolrHypermap.update_schema
|
def update_schema(self, catalog="hypermap"):
"""
set the mapping in solr.
:param catalog: core
:return:
"""
schema_url = "{0}/solr/{1}/schema".format(SEARCH_URL, catalog)
print schema_url
# create a special type to draw better heatmaps.
location_rpt_quad_5m_payload = {
"add-field-type": {
"name": "location_rpt_quad_5m",
"class": "solr.SpatialRecursivePrefixTreeFieldType",
"geo": False,
"worldBounds": "ENVELOPE(-180, 180, 180, -180)",
"prefixTree": "packedQuad",
"distErrPct": "0.025",
"maxDistErr": "0.001",
"distanceUnits": "degrees"
}
}
requests.post(schema_url, json=location_rpt_quad_5m_payload)
# create a special type to implement ngrm text for search.
text_ngrm_payload = {
"add-field-type": {
"name": "text_ngrm",
"class": "solr.TextField",
"positionIncrementGap": "100",
"indexAnalyzer": {
"tokenizer": {
"class": "solr.WhitespaceTokenizerFactory"
},
"filters": [
{
"class": "solr.NGramFilterFactory",
"minGramSize": "1",
"maxGramSize": "50"
}, {
"class": "solr.LowerCaseFilterFactory"
}
]
},
"queryAnalyzer": {
"tokenizer": {
"class": "solr.WhitespaceTokenizerFactory"
},
"filters": [
{
"class": "solr.LowerCaseFilterFactory",
}
]
}
}
}
requests.post(schema_url, json=text_ngrm_payload)
# now the other fields
fields = [
{"name": "abstract", "type": "string"},
{"name": "abstract_txt", "type": "text_ngrm"},
{"name": "area", "type": "pdouble"},
{"name": "availability", "type": "string"},
{"name": "bbox", "type": "location_rpt_quad_5m"},
{"name": "domain_name", "type": "string"},
{"name": "is_public", "type": "boolean"},
{"name": "is_valid", "type": "boolean"},
{"name": "keywords", "type": "string", "multiValued": True},
{"name": "last_status", "type": "boolean"},
{"name": "layer_category", "type": "string"},
{"name": "layer_date", "type": "pdate", "docValues": True},
{"name": "layer_datetype", "type": "string"},
{"name": "layer_id", "type": "plong"},
{"name": "layer_originator", "type": "string"},
{"name": "layer_originator_txt", "type": "text_ngrm"},
{"name": "layer_username", "type": "string"},
{"name": "layer_username_txt", "type": "text_ngrm"},
{"name": "location", "type": "string"},
{"name": "max_x", "type": "pdouble"},
{"name": "max_y", "type": "pdouble"},
{"name": "min_x", "type": "pdouble"},
{"name": "min_y", "type": "pdouble"},
{"name": "name", "type": "string"},
{"name": "recent_reliability", "type": "pdouble"},
{"name": "reliability", "type": "pdouble"},
{"name": "service_id", "type": "plong"},
{"name": "service_type", "type": "string"},
{"name": "srs", "type": "string", "multiValued": True},
{"name": "tile_url", "type": "string"},
{"name": "title", "type": "string"},
{"name": "title_txt", "type": "text_ngrm"},
{"name": "type", "type": "string"},
{"name": "url", "type": "string"},
{"name": "uuid", "type": "string", "required": True},
{"name": "centroid_y", "type": "pdouble"},
{"name": "centroid_x", "type": "pdouble"},
]
copy_fields = [
{"source": "*", "dest": "_text_"},
{"source": "title", "dest": "title_txt"},
{"source": "abstract", "dest": "abstract_txt"},
{"source": "layer_originator", "dest": "layer_originator_txt"},
{"source": "layer_username", "dest": "layer_username_txt"},
]
headers = {
"Content-type": "application/json"
}
for field in fields:
data = {
"add-field": field
}
requests.post(schema_url, json=data, headers=headers)
for field in copy_fields:
data = {
"add-copy-field": field
}
print data
requests.post(schema_url, json=data, headers=headers)
|
python
|
def update_schema(self, catalog="hypermap"):
"""
set the mapping in solr.
:param catalog: core
:return:
"""
schema_url = "{0}/solr/{1}/schema".format(SEARCH_URL, catalog)
print schema_url
# create a special type to draw better heatmaps.
location_rpt_quad_5m_payload = {
"add-field-type": {
"name": "location_rpt_quad_5m",
"class": "solr.SpatialRecursivePrefixTreeFieldType",
"geo": False,
"worldBounds": "ENVELOPE(-180, 180, 180, -180)",
"prefixTree": "packedQuad",
"distErrPct": "0.025",
"maxDistErr": "0.001",
"distanceUnits": "degrees"
}
}
requests.post(schema_url, json=location_rpt_quad_5m_payload)
# create a special type to implement ngrm text for search.
text_ngrm_payload = {
"add-field-type": {
"name": "text_ngrm",
"class": "solr.TextField",
"positionIncrementGap": "100",
"indexAnalyzer": {
"tokenizer": {
"class": "solr.WhitespaceTokenizerFactory"
},
"filters": [
{
"class": "solr.NGramFilterFactory",
"minGramSize": "1",
"maxGramSize": "50"
}, {
"class": "solr.LowerCaseFilterFactory"
}
]
},
"queryAnalyzer": {
"tokenizer": {
"class": "solr.WhitespaceTokenizerFactory"
},
"filters": [
{
"class": "solr.LowerCaseFilterFactory",
}
]
}
}
}
requests.post(schema_url, json=text_ngrm_payload)
# now the other fields
fields = [
{"name": "abstract", "type": "string"},
{"name": "abstract_txt", "type": "text_ngrm"},
{"name": "area", "type": "pdouble"},
{"name": "availability", "type": "string"},
{"name": "bbox", "type": "location_rpt_quad_5m"},
{"name": "domain_name", "type": "string"},
{"name": "is_public", "type": "boolean"},
{"name": "is_valid", "type": "boolean"},
{"name": "keywords", "type": "string", "multiValued": True},
{"name": "last_status", "type": "boolean"},
{"name": "layer_category", "type": "string"},
{"name": "layer_date", "type": "pdate", "docValues": True},
{"name": "layer_datetype", "type": "string"},
{"name": "layer_id", "type": "plong"},
{"name": "layer_originator", "type": "string"},
{"name": "layer_originator_txt", "type": "text_ngrm"},
{"name": "layer_username", "type": "string"},
{"name": "layer_username_txt", "type": "text_ngrm"},
{"name": "location", "type": "string"},
{"name": "max_x", "type": "pdouble"},
{"name": "max_y", "type": "pdouble"},
{"name": "min_x", "type": "pdouble"},
{"name": "min_y", "type": "pdouble"},
{"name": "name", "type": "string"},
{"name": "recent_reliability", "type": "pdouble"},
{"name": "reliability", "type": "pdouble"},
{"name": "service_id", "type": "plong"},
{"name": "service_type", "type": "string"},
{"name": "srs", "type": "string", "multiValued": True},
{"name": "tile_url", "type": "string"},
{"name": "title", "type": "string"},
{"name": "title_txt", "type": "text_ngrm"},
{"name": "type", "type": "string"},
{"name": "url", "type": "string"},
{"name": "uuid", "type": "string", "required": True},
{"name": "centroid_y", "type": "pdouble"},
{"name": "centroid_x", "type": "pdouble"},
]
copy_fields = [
{"source": "*", "dest": "_text_"},
{"source": "title", "dest": "title_txt"},
{"source": "abstract", "dest": "abstract_txt"},
{"source": "layer_originator", "dest": "layer_originator_txt"},
{"source": "layer_username", "dest": "layer_username_txt"},
]
headers = {
"Content-type": "application/json"
}
for field in fields:
data = {
"add-field": field
}
requests.post(schema_url, json=data, headers=headers)
for field in copy_fields:
data = {
"add-copy-field": field
}
print data
requests.post(schema_url, json=data, headers=headers)
|
[
"def",
"update_schema",
"(",
"self",
",",
"catalog",
"=",
"\"hypermap\"",
")",
":",
"schema_url",
"=",
"\"{0}/solr/{1}/schema\"",
".",
"format",
"(",
"SEARCH_URL",
",",
"catalog",
")",
"print",
"schema_url",
"# create a special type to draw better heatmaps.",
"location_rpt_quad_5m_payload",
"=",
"{",
"\"add-field-type\"",
":",
"{",
"\"name\"",
":",
"\"location_rpt_quad_5m\"",
",",
"\"class\"",
":",
"\"solr.SpatialRecursivePrefixTreeFieldType\"",
",",
"\"geo\"",
":",
"False",
",",
"\"worldBounds\"",
":",
"\"ENVELOPE(-180, 180, 180, -180)\"",
",",
"\"prefixTree\"",
":",
"\"packedQuad\"",
",",
"\"distErrPct\"",
":",
"\"0.025\"",
",",
"\"maxDistErr\"",
":",
"\"0.001\"",
",",
"\"distanceUnits\"",
":",
"\"degrees\"",
"}",
"}",
"requests",
".",
"post",
"(",
"schema_url",
",",
"json",
"=",
"location_rpt_quad_5m_payload",
")",
"# create a special type to implement ngrm text for search.",
"text_ngrm_payload",
"=",
"{",
"\"add-field-type\"",
":",
"{",
"\"name\"",
":",
"\"text_ngrm\"",
",",
"\"class\"",
":",
"\"solr.TextField\"",
",",
"\"positionIncrementGap\"",
":",
"\"100\"",
",",
"\"indexAnalyzer\"",
":",
"{",
"\"tokenizer\"",
":",
"{",
"\"class\"",
":",
"\"solr.WhitespaceTokenizerFactory\"",
"}",
",",
"\"filters\"",
":",
"[",
"{",
"\"class\"",
":",
"\"solr.NGramFilterFactory\"",
",",
"\"minGramSize\"",
":",
"\"1\"",
",",
"\"maxGramSize\"",
":",
"\"50\"",
"}",
",",
"{",
"\"class\"",
":",
"\"solr.LowerCaseFilterFactory\"",
"}",
"]",
"}",
",",
"\"queryAnalyzer\"",
":",
"{",
"\"tokenizer\"",
":",
"{",
"\"class\"",
":",
"\"solr.WhitespaceTokenizerFactory\"",
"}",
",",
"\"filters\"",
":",
"[",
"{",
"\"class\"",
":",
"\"solr.LowerCaseFilterFactory\"",
",",
"}",
"]",
"}",
"}",
"}",
"requests",
".",
"post",
"(",
"schema_url",
",",
"json",
"=",
"text_ngrm_payload",
")",
"# now the other fields",
"fields",
"=",
"[",
"{",
"\"name\"",
":",
"\"abstract\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"abstract_txt\"",
",",
"\"type\"",
":",
"\"text_ngrm\"",
"}",
",",
"{",
"\"name\"",
":",
"\"area\"",
",",
"\"type\"",
":",
"\"pdouble\"",
"}",
",",
"{",
"\"name\"",
":",
"\"availability\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"bbox\"",
",",
"\"type\"",
":",
"\"location_rpt_quad_5m\"",
"}",
",",
"{",
"\"name\"",
":",
"\"domain_name\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"is_public\"",
",",
"\"type\"",
":",
"\"boolean\"",
"}",
",",
"{",
"\"name\"",
":",
"\"is_valid\"",
",",
"\"type\"",
":",
"\"boolean\"",
"}",
",",
"{",
"\"name\"",
":",
"\"keywords\"",
",",
"\"type\"",
":",
"\"string\"",
",",
"\"multiValued\"",
":",
"True",
"}",
",",
"{",
"\"name\"",
":",
"\"last_status\"",
",",
"\"type\"",
":",
"\"boolean\"",
"}",
",",
"{",
"\"name\"",
":",
"\"layer_category\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"layer_date\"",
",",
"\"type\"",
":",
"\"pdate\"",
",",
"\"docValues\"",
":",
"True",
"}",
",",
"{",
"\"name\"",
":",
"\"layer_datetype\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"layer_id\"",
",",
"\"type\"",
":",
"\"plong\"",
"}",
",",
"{",
"\"name\"",
":",
"\"layer_originator\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"layer_originator_txt\"",
",",
"\"type\"",
":",
"\"text_ngrm\"",
"}",
",",
"{",
"\"name\"",
":",
"\"layer_username\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"layer_username_txt\"",
",",
"\"type\"",
":",
"\"text_ngrm\"",
"}",
",",
"{",
"\"name\"",
":",
"\"location\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"max_x\"",
",",
"\"type\"",
":",
"\"pdouble\"",
"}",
",",
"{",
"\"name\"",
":",
"\"max_y\"",
",",
"\"type\"",
":",
"\"pdouble\"",
"}",
",",
"{",
"\"name\"",
":",
"\"min_x\"",
",",
"\"type\"",
":",
"\"pdouble\"",
"}",
",",
"{",
"\"name\"",
":",
"\"min_y\"",
",",
"\"type\"",
":",
"\"pdouble\"",
"}",
",",
"{",
"\"name\"",
":",
"\"name\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"recent_reliability\"",
",",
"\"type\"",
":",
"\"pdouble\"",
"}",
",",
"{",
"\"name\"",
":",
"\"reliability\"",
",",
"\"type\"",
":",
"\"pdouble\"",
"}",
",",
"{",
"\"name\"",
":",
"\"service_id\"",
",",
"\"type\"",
":",
"\"plong\"",
"}",
",",
"{",
"\"name\"",
":",
"\"service_type\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"srs\"",
",",
"\"type\"",
":",
"\"string\"",
",",
"\"multiValued\"",
":",
"True",
"}",
",",
"{",
"\"name\"",
":",
"\"tile_url\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"title\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"title_txt\"",
",",
"\"type\"",
":",
"\"text_ngrm\"",
"}",
",",
"{",
"\"name\"",
":",
"\"type\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"url\"",
",",
"\"type\"",
":",
"\"string\"",
"}",
",",
"{",
"\"name\"",
":",
"\"uuid\"",
",",
"\"type\"",
":",
"\"string\"",
",",
"\"required\"",
":",
"True",
"}",
",",
"{",
"\"name\"",
":",
"\"centroid_y\"",
",",
"\"type\"",
":",
"\"pdouble\"",
"}",
",",
"{",
"\"name\"",
":",
"\"centroid_x\"",
",",
"\"type\"",
":",
"\"pdouble\"",
"}",
",",
"]",
"copy_fields",
"=",
"[",
"{",
"\"source\"",
":",
"\"*\"",
",",
"\"dest\"",
":",
"\"_text_\"",
"}",
",",
"{",
"\"source\"",
":",
"\"title\"",
",",
"\"dest\"",
":",
"\"title_txt\"",
"}",
",",
"{",
"\"source\"",
":",
"\"abstract\"",
",",
"\"dest\"",
":",
"\"abstract_txt\"",
"}",
",",
"{",
"\"source\"",
":",
"\"layer_originator\"",
",",
"\"dest\"",
":",
"\"layer_originator_txt\"",
"}",
",",
"{",
"\"source\"",
":",
"\"layer_username\"",
",",
"\"dest\"",
":",
"\"layer_username_txt\"",
"}",
",",
"]",
"headers",
"=",
"{",
"\"Content-type\"",
":",
"\"application/json\"",
"}",
"for",
"field",
"in",
"fields",
":",
"data",
"=",
"{",
"\"add-field\"",
":",
"field",
"}",
"requests",
".",
"post",
"(",
"schema_url",
",",
"json",
"=",
"data",
",",
"headers",
"=",
"headers",
")",
"for",
"field",
"in",
"copy_fields",
":",
"data",
"=",
"{",
"\"add-copy-field\"",
":",
"field",
"}",
"print",
"data",
"requests",
".",
"post",
"(",
"schema_url",
",",
"json",
"=",
"data",
",",
"headers",
"=",
"headers",
")"
] |
set the mapping in solr.
:param catalog: core
:return:
|
[
"set",
"the",
"mapping",
"in",
"solr",
".",
":",
"param",
"catalog",
":",
"core",
":",
"return",
":"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/solr.py#L99-L221
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
create_layer_from_metadata_xml
|
def create_layer_from_metadata_xml(resourcetype, xml, monitor=False, service=None, catalog=None):
"""
Create a layer / keyword list from a metadata record if it does not already exist.
"""
from models import gen_anytext, Layer
if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2': # Dublin core
md = CswRecord(etree.fromstring(xml))
layer = Layer(
is_monitored=monitor,
name=md.title,
title=md.title,
abstract=md.abstract,
xml=xml,
service=service,
catalog=catalog,
anytext=gen_anytext(md.title, md.abstract, md.subjects)
)
if hasattr(md, 'alternative'):
layer.name = md.alternative
if md.bbox is not None:
layer.bbox_x0 = format_float(md.bbox.minx)
layer.bbox_y0 = format_float(md.bbox.miny)
layer.bbox_x1 = format_float(md.bbox.maxx)
layer.bbox_y1 = format_float(md.bbox.maxy)
layer.wkt_geometry = bbox2wktpolygon([md.bbox.minx, md.bbox.miny, md.bbox.maxx, md.bbox.maxy])
return layer, md.subjects
|
python
|
def create_layer_from_metadata_xml(resourcetype, xml, monitor=False, service=None, catalog=None):
"""
Create a layer / keyword list from a metadata record if it does not already exist.
"""
from models import gen_anytext, Layer
if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2': # Dublin core
md = CswRecord(etree.fromstring(xml))
layer = Layer(
is_monitored=monitor,
name=md.title,
title=md.title,
abstract=md.abstract,
xml=xml,
service=service,
catalog=catalog,
anytext=gen_anytext(md.title, md.abstract, md.subjects)
)
if hasattr(md, 'alternative'):
layer.name = md.alternative
if md.bbox is not None:
layer.bbox_x0 = format_float(md.bbox.minx)
layer.bbox_y0 = format_float(md.bbox.miny)
layer.bbox_x1 = format_float(md.bbox.maxx)
layer.bbox_y1 = format_float(md.bbox.maxy)
layer.wkt_geometry = bbox2wktpolygon([md.bbox.minx, md.bbox.miny, md.bbox.maxx, md.bbox.maxy])
return layer, md.subjects
|
[
"def",
"create_layer_from_metadata_xml",
"(",
"resourcetype",
",",
"xml",
",",
"monitor",
"=",
"False",
",",
"service",
"=",
"None",
",",
"catalog",
"=",
"None",
")",
":",
"from",
"models",
"import",
"gen_anytext",
",",
"Layer",
"if",
"resourcetype",
"==",
"'http://www.opengis.net/cat/csw/2.0.2'",
":",
"# Dublin core",
"md",
"=",
"CswRecord",
"(",
"etree",
".",
"fromstring",
"(",
"xml",
")",
")",
"layer",
"=",
"Layer",
"(",
"is_monitored",
"=",
"monitor",
",",
"name",
"=",
"md",
".",
"title",
",",
"title",
"=",
"md",
".",
"title",
",",
"abstract",
"=",
"md",
".",
"abstract",
",",
"xml",
"=",
"xml",
",",
"service",
"=",
"service",
",",
"catalog",
"=",
"catalog",
",",
"anytext",
"=",
"gen_anytext",
"(",
"md",
".",
"title",
",",
"md",
".",
"abstract",
",",
"md",
".",
"subjects",
")",
")",
"if",
"hasattr",
"(",
"md",
",",
"'alternative'",
")",
":",
"layer",
".",
"name",
"=",
"md",
".",
"alternative",
"if",
"md",
".",
"bbox",
"is",
"not",
"None",
":",
"layer",
".",
"bbox_x0",
"=",
"format_float",
"(",
"md",
".",
"bbox",
".",
"minx",
")",
"layer",
".",
"bbox_y0",
"=",
"format_float",
"(",
"md",
".",
"bbox",
".",
"miny",
")",
"layer",
".",
"bbox_x1",
"=",
"format_float",
"(",
"md",
".",
"bbox",
".",
"maxx",
")",
"layer",
".",
"bbox_y1",
"=",
"format_float",
"(",
"md",
".",
"bbox",
".",
"maxy",
")",
"layer",
".",
"wkt_geometry",
"=",
"bbox2wktpolygon",
"(",
"[",
"md",
".",
"bbox",
".",
"minx",
",",
"md",
".",
"bbox",
".",
"miny",
",",
"md",
".",
"bbox",
".",
"maxx",
",",
"md",
".",
"bbox",
".",
"maxy",
"]",
")",
"return",
"layer",
",",
"md",
".",
"subjects"
] |
Create a layer / keyword list from a metadata record if it does not already exist.
|
[
"Create",
"a",
"layer",
"/",
"keyword",
"list",
"from",
"a",
"metadata",
"record",
"if",
"it",
"does",
"not",
"already",
"exist",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L28-L59
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
create_service_from_endpoint
|
def create_service_from_endpoint(endpoint, service_type, title=None, abstract=None, catalog=None):
"""
Create a service from an endpoint if it does not already exists.
"""
from models import Service
if Service.objects.filter(url=endpoint, catalog=catalog).count() == 0:
# check if endpoint is valid
request = requests.get(endpoint)
if request.status_code == 200:
LOGGER.debug('Creating a %s service for endpoint=%s catalog=%s' % (service_type, endpoint, catalog))
service = Service(
type=service_type, url=endpoint, title=title, abstract=abstract,
csw_type='service', catalog=catalog
)
service.save()
return service
else:
LOGGER.warning('This endpoint is invalid, status code is %s' % request.status_code)
else:
LOGGER.warning('A service for this endpoint %s in catalog %s already exists' % (endpoint, catalog))
return None
|
python
|
def create_service_from_endpoint(endpoint, service_type, title=None, abstract=None, catalog=None):
"""
Create a service from an endpoint if it does not already exists.
"""
from models import Service
if Service.objects.filter(url=endpoint, catalog=catalog).count() == 0:
# check if endpoint is valid
request = requests.get(endpoint)
if request.status_code == 200:
LOGGER.debug('Creating a %s service for endpoint=%s catalog=%s' % (service_type, endpoint, catalog))
service = Service(
type=service_type, url=endpoint, title=title, abstract=abstract,
csw_type='service', catalog=catalog
)
service.save()
return service
else:
LOGGER.warning('This endpoint is invalid, status code is %s' % request.status_code)
else:
LOGGER.warning('A service for this endpoint %s in catalog %s already exists' % (endpoint, catalog))
return None
|
[
"def",
"create_service_from_endpoint",
"(",
"endpoint",
",",
"service_type",
",",
"title",
"=",
"None",
",",
"abstract",
"=",
"None",
",",
"catalog",
"=",
"None",
")",
":",
"from",
"models",
"import",
"Service",
"if",
"Service",
".",
"objects",
".",
"filter",
"(",
"url",
"=",
"endpoint",
",",
"catalog",
"=",
"catalog",
")",
".",
"count",
"(",
")",
"==",
"0",
":",
"# check if endpoint is valid",
"request",
"=",
"requests",
".",
"get",
"(",
"endpoint",
")",
"if",
"request",
".",
"status_code",
"==",
"200",
":",
"LOGGER",
".",
"debug",
"(",
"'Creating a %s service for endpoint=%s catalog=%s'",
"%",
"(",
"service_type",
",",
"endpoint",
",",
"catalog",
")",
")",
"service",
"=",
"Service",
"(",
"type",
"=",
"service_type",
",",
"url",
"=",
"endpoint",
",",
"title",
"=",
"title",
",",
"abstract",
"=",
"abstract",
",",
"csw_type",
"=",
"'service'",
",",
"catalog",
"=",
"catalog",
")",
"service",
".",
"save",
"(",
")",
"return",
"service",
"else",
":",
"LOGGER",
".",
"warning",
"(",
"'This endpoint is invalid, status code is %s'",
"%",
"request",
".",
"status_code",
")",
"else",
":",
"LOGGER",
".",
"warning",
"(",
"'A service for this endpoint %s in catalog %s already exists'",
"%",
"(",
"endpoint",
",",
"catalog",
")",
")",
"return",
"None"
] |
Create a service from an endpoint if it does not already exists.
|
[
"Create",
"a",
"service",
"from",
"an",
"endpoint",
"if",
"it",
"does",
"not",
"already",
"exists",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L62-L82
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
create_services_from_endpoint
|
def create_services_from_endpoint(url, catalog, greedy_opt=True):
"""
Generate service/services from an endpoint.
WMS, WMTS, TMS endpoints correspond to a single service.
ESRI, CSW endpoints corrispond to many services.
:return: imported, message
"""
# this variable will collect any exception message during the routine.
# will be used in the last step to send a message if "detected" var is False.
messages = []
num_created = 0
endpoint = get_sanitized_endpoint(url)
try:
urllib2.urlopen(endpoint, timeout=10)
except Exception as e:
message = traceback.format_exception(*sys.exc_info())
LOGGER.error('Cannot open this endpoint: %s' % endpoint)
LOGGER.error('ERROR MESSAGE: %s' % message)
LOGGER.error(e, exc_info=True)
return False, message
detected = False
# handle specific service types for some domains (WorldMap, Wrapper...)
parsed_uri = urlparse(endpoint)
domain = '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_uri)
if domain == 'http://worldmap.harvard.edu/':
service_type = 'Hypermap:WorldMap'
title = 'Harvard WorldMap'
abstract = 'Harvard WorldMap'
endpoint = domain
detected = True
if domain in [
'http://maps.nypl.org/',
'http://mapwarper.net/',
'http://warp.worldmap.harvard.edu/',
]:
service_type = 'Hypermap:WARPER'
title = 'Warper at %s' % domain
abstract = 'Warper at %s' % domain
detected = True
# test if it is CSW, WMS, TMS, WMTS or Esri
# CSW
try:
csw = CatalogueServiceWeb(endpoint)
service_type = 'OGC:CSW'
service_links = {}
detected = True
typenames = 'csw:Record'
outputschema = 'http://www.opengis.net/cat/csw/2.0.2'
if 'csw_harvest_pagesize' in settings.REGISTRY_PYCSW['manager']:
pagesize = int(settings.REGISTRY_PYCSW['manager']['csw_harvest_pagesize'])
else:
pagesize = 10
LOGGER.debug('Harvesting CSW %s' % endpoint)
# now get all records
# get total number of records to loop against
try:
csw.getrecords2(typenames=typenames, resulttype='hits',
outputschema=outputschema)
matches = csw.results['matches']
except: # this is a CSW, but server rejects query
raise RuntimeError(csw.response)
if pagesize > matches:
pagesize = matches
LOGGER.info('Harvesting %d CSW records' % matches)
# loop over all catalogue records incrementally
for r in range(1, matches+1, pagesize):
LOGGER.info('Parsing %s from %s' % (r, matches))
try:
csw.getrecords2(typenames=typenames, startposition=r,
maxrecords=pagesize, outputschema=outputschema, esn='full')
except Exception as err: # this is a CSW, but server rejects query
raise RuntimeError(csw.response)
for k, v in csw.records.items():
# try to parse metadata
try:
LOGGER.info('Looking for service links')
LOGGER.debug('Looking for service links via dct:references')
if v.references:
for ref in v.references:
scheme = None
if ref['scheme'] in [st[0] for st in SERVICE_TYPES]:
if ref['url'] not in service_links:
scheme = ref['scheme']
service_links[ref['url']] = scheme
else: # loose detection
scheme = detect_metadata_url_scheme(ref['url'])
if scheme is not None:
if ref['url'] not in service_links:
service_links[ref['url']] = scheme
if scheme is None:
continue
try:
service = create_service_from_endpoint(ref['url'], scheme, catalog=catalog)
if service is not None:
num_created = num_created + 1
LOGGER.info('Found %s services on endpoint' % num_created)
except Exception, e:
LOGGER.error('Could not create service for %s : %s' % (scheme, ref['url']))
LOGGER.error(e, exc_info=True)
LOGGER.debug('Looking for service links via the GeoNetwork-ish dc:URI')
if v.uris:
for u in v.uris: # loose detection
scheme = detect_metadata_url_scheme(u['url'])
if scheme is not None:
if u['url'] not in service_links:
service_links[u['url']] = scheme
else:
continue
try:
service = create_service_from_endpoint(u['url'], scheme, catalog=catalog)
if service is not None:
num_created = num_created + 1
LOGGER.info('Found %s services on endpoint' % num_created)
except Exception, e:
LOGGER.error('Could not create service for %s : %s' % (scheme, u['url']))
LOGGER.error(e, exc_info=True)
except Exception as err: # parsing failed for some reason
LOGGER.warning('Metadata parsing failed %s', err)
LOGGER.error(err, exc_info=True)
except XMLSyntaxError as e:
# This is not XML, so likely not a CSW. Moving on.
pass
except Exception as e:
LOGGER.error(e, exc_info=True)
messages.append(str(e))
# WMS
if not detected:
try:
service = get_wms_version_negotiate(endpoint, timeout=10)
service_type = 'OGC:WMS'
title = service.identification.title,
abstract = service.identification.abstract
detected = True
except XMLSyntaxError as e:
# This is not XML, so likely not a WMS. Moving on.
pass
except Exception as e:
LOGGER.error(e, exc_info=True)
messages.append(str(e))
# TMS
if not detected:
try:
service = TileMapService(endpoint, timeout=10)
service_type = 'OSGeo:TMS'
title = service.identification.title,
abstract = service.identification.abstract
detected = True
except XMLSyntaxError as e:
# This is not XML, so likely not a TsMS. Moving on.
pass
except Exception as e:
LOGGER.error(e, exc_info=True)
messages.append(str(e))
# WMTS
if not detected:
try:
# @tomkralidis timeout is not implemented for WebMapTileService?
service = WebMapTileService(endpoint)
service_type = 'OGC:WMTS'
title = service.identification.title,
abstract = service.identification.abstract
detected = True
except XMLSyntaxError as e:
# This is not XML, so likely not a WMTS. Moving on.
pass
except Exception as e:
LOGGER.error(e, exc_info=True)
messages.append(str(e))
# if detected, let's create the service
if detected and service_type != 'OGC:CSW':
try:
service = create_service_from_endpoint(
endpoint,
service_type,
title,
abstract=abstract,
catalog=catalog
)
if service is not None:
num_created = num_created + 1
except XMLSyntaxError as e:
# This is not XML, so likely not a OGC:CSW. Moving on.
pass
except Exception as e:
LOGGER.error(e, exc_info=True)
messages.append(str(e))
# Esri
# a good sample is here: https://gis.ngdc.noaa.gov/arcgis/rest/services
# we can safely assume the following condition (at least it is true for 1170 services)
# we need to test this as arcrest.Folder can freeze with not esri url such as this one:
# http://hh.worldmap.harvard.edu/admin/aggregator/service/?q=%2Frest%2Fservices
if '/rest/services' in endpoint:
if not detected:
try:
esri = arcrest.Folder(endpoint)
service_type = 'ESRI'
detected = True
service_to_process, folder_to_process = esri.services, esri.folders
if not greedy_opt:
folder_to_process = []
sections = service_url_parse(url)
service_to_process = get_single_service(esri, sections)
processed_services = process_esri_services(service_to_process, catalog)
num_created = num_created + len(processed_services)
for folder in folder_to_process:
folder_services = process_esri_services(folder.services, catalog)
num_created = num_created + len(folder_services)
except Exception as e:
LOGGER.error(e, exc_info=True)
messages.append(str(e))
if detected:
return True, '%s service/s created' % num_created
else:
m = '|'.join(messages)
return False, 'ERROR! Could not detect service type for ' \
'endpoint %s or already existing. messages=(%s)' % (endpoint, m)
|
python
|
def create_services_from_endpoint(url, catalog, greedy_opt=True):
"""
Generate service/services from an endpoint.
WMS, WMTS, TMS endpoints correspond to a single service.
ESRI, CSW endpoints corrispond to many services.
:return: imported, message
"""
# this variable will collect any exception message during the routine.
# will be used in the last step to send a message if "detected" var is False.
messages = []
num_created = 0
endpoint = get_sanitized_endpoint(url)
try:
urllib2.urlopen(endpoint, timeout=10)
except Exception as e:
message = traceback.format_exception(*sys.exc_info())
LOGGER.error('Cannot open this endpoint: %s' % endpoint)
LOGGER.error('ERROR MESSAGE: %s' % message)
LOGGER.error(e, exc_info=True)
return False, message
detected = False
# handle specific service types for some domains (WorldMap, Wrapper...)
parsed_uri = urlparse(endpoint)
domain = '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_uri)
if domain == 'http://worldmap.harvard.edu/':
service_type = 'Hypermap:WorldMap'
title = 'Harvard WorldMap'
abstract = 'Harvard WorldMap'
endpoint = domain
detected = True
if domain in [
'http://maps.nypl.org/',
'http://mapwarper.net/',
'http://warp.worldmap.harvard.edu/',
]:
service_type = 'Hypermap:WARPER'
title = 'Warper at %s' % domain
abstract = 'Warper at %s' % domain
detected = True
# test if it is CSW, WMS, TMS, WMTS or Esri
# CSW
try:
csw = CatalogueServiceWeb(endpoint)
service_type = 'OGC:CSW'
service_links = {}
detected = True
typenames = 'csw:Record'
outputschema = 'http://www.opengis.net/cat/csw/2.0.2'
if 'csw_harvest_pagesize' in settings.REGISTRY_PYCSW['manager']:
pagesize = int(settings.REGISTRY_PYCSW['manager']['csw_harvest_pagesize'])
else:
pagesize = 10
LOGGER.debug('Harvesting CSW %s' % endpoint)
# now get all records
# get total number of records to loop against
try:
csw.getrecords2(typenames=typenames, resulttype='hits',
outputschema=outputschema)
matches = csw.results['matches']
except: # this is a CSW, but server rejects query
raise RuntimeError(csw.response)
if pagesize > matches:
pagesize = matches
LOGGER.info('Harvesting %d CSW records' % matches)
# loop over all catalogue records incrementally
for r in range(1, matches+1, pagesize):
LOGGER.info('Parsing %s from %s' % (r, matches))
try:
csw.getrecords2(typenames=typenames, startposition=r,
maxrecords=pagesize, outputschema=outputschema, esn='full')
except Exception as err: # this is a CSW, but server rejects query
raise RuntimeError(csw.response)
for k, v in csw.records.items():
# try to parse metadata
try:
LOGGER.info('Looking for service links')
LOGGER.debug('Looking for service links via dct:references')
if v.references:
for ref in v.references:
scheme = None
if ref['scheme'] in [st[0] for st in SERVICE_TYPES]:
if ref['url'] not in service_links:
scheme = ref['scheme']
service_links[ref['url']] = scheme
else: # loose detection
scheme = detect_metadata_url_scheme(ref['url'])
if scheme is not None:
if ref['url'] not in service_links:
service_links[ref['url']] = scheme
if scheme is None:
continue
try:
service = create_service_from_endpoint(ref['url'], scheme, catalog=catalog)
if service is not None:
num_created = num_created + 1
LOGGER.info('Found %s services on endpoint' % num_created)
except Exception, e:
LOGGER.error('Could not create service for %s : %s' % (scheme, ref['url']))
LOGGER.error(e, exc_info=True)
LOGGER.debug('Looking for service links via the GeoNetwork-ish dc:URI')
if v.uris:
for u in v.uris: # loose detection
scheme = detect_metadata_url_scheme(u['url'])
if scheme is not None:
if u['url'] not in service_links:
service_links[u['url']] = scheme
else:
continue
try:
service = create_service_from_endpoint(u['url'], scheme, catalog=catalog)
if service is not None:
num_created = num_created + 1
LOGGER.info('Found %s services on endpoint' % num_created)
except Exception, e:
LOGGER.error('Could not create service for %s : %s' % (scheme, u['url']))
LOGGER.error(e, exc_info=True)
except Exception as err: # parsing failed for some reason
LOGGER.warning('Metadata parsing failed %s', err)
LOGGER.error(err, exc_info=True)
except XMLSyntaxError as e:
# This is not XML, so likely not a CSW. Moving on.
pass
except Exception as e:
LOGGER.error(e, exc_info=True)
messages.append(str(e))
# WMS
if not detected:
try:
service = get_wms_version_negotiate(endpoint, timeout=10)
service_type = 'OGC:WMS'
title = service.identification.title,
abstract = service.identification.abstract
detected = True
except XMLSyntaxError as e:
# This is not XML, so likely not a WMS. Moving on.
pass
except Exception as e:
LOGGER.error(e, exc_info=True)
messages.append(str(e))
# TMS
if not detected:
try:
service = TileMapService(endpoint, timeout=10)
service_type = 'OSGeo:TMS'
title = service.identification.title,
abstract = service.identification.abstract
detected = True
except XMLSyntaxError as e:
# This is not XML, so likely not a TsMS. Moving on.
pass
except Exception as e:
LOGGER.error(e, exc_info=True)
messages.append(str(e))
# WMTS
if not detected:
try:
# @tomkralidis timeout is not implemented for WebMapTileService?
service = WebMapTileService(endpoint)
service_type = 'OGC:WMTS'
title = service.identification.title,
abstract = service.identification.abstract
detected = True
except XMLSyntaxError as e:
# This is not XML, so likely not a WMTS. Moving on.
pass
except Exception as e:
LOGGER.error(e, exc_info=True)
messages.append(str(e))
# if detected, let's create the service
if detected and service_type != 'OGC:CSW':
try:
service = create_service_from_endpoint(
endpoint,
service_type,
title,
abstract=abstract,
catalog=catalog
)
if service is not None:
num_created = num_created + 1
except XMLSyntaxError as e:
# This is not XML, so likely not a OGC:CSW. Moving on.
pass
except Exception as e:
LOGGER.error(e, exc_info=True)
messages.append(str(e))
# Esri
# a good sample is here: https://gis.ngdc.noaa.gov/arcgis/rest/services
# we can safely assume the following condition (at least it is true for 1170 services)
# we need to test this as arcrest.Folder can freeze with not esri url such as this one:
# http://hh.worldmap.harvard.edu/admin/aggregator/service/?q=%2Frest%2Fservices
if '/rest/services' in endpoint:
if not detected:
try:
esri = arcrest.Folder(endpoint)
service_type = 'ESRI'
detected = True
service_to_process, folder_to_process = esri.services, esri.folders
if not greedy_opt:
folder_to_process = []
sections = service_url_parse(url)
service_to_process = get_single_service(esri, sections)
processed_services = process_esri_services(service_to_process, catalog)
num_created = num_created + len(processed_services)
for folder in folder_to_process:
folder_services = process_esri_services(folder.services, catalog)
num_created = num_created + len(folder_services)
except Exception as e:
LOGGER.error(e, exc_info=True)
messages.append(str(e))
if detected:
return True, '%s service/s created' % num_created
else:
m = '|'.join(messages)
return False, 'ERROR! Could not detect service type for ' \
'endpoint %s or already existing. messages=(%s)' % (endpoint, m)
|
[
"def",
"create_services_from_endpoint",
"(",
"url",
",",
"catalog",
",",
"greedy_opt",
"=",
"True",
")",
":",
"# this variable will collect any exception message during the routine.",
"# will be used in the last step to send a message if \"detected\" var is False.",
"messages",
"=",
"[",
"]",
"num_created",
"=",
"0",
"endpoint",
"=",
"get_sanitized_endpoint",
"(",
"url",
")",
"try",
":",
"urllib2",
".",
"urlopen",
"(",
"endpoint",
",",
"timeout",
"=",
"10",
")",
"except",
"Exception",
"as",
"e",
":",
"message",
"=",
"traceback",
".",
"format_exception",
"(",
"*",
"sys",
".",
"exc_info",
"(",
")",
")",
"LOGGER",
".",
"error",
"(",
"'Cannot open this endpoint: %s'",
"%",
"endpoint",
")",
"LOGGER",
".",
"error",
"(",
"'ERROR MESSAGE: %s'",
"%",
"message",
")",
"LOGGER",
".",
"error",
"(",
"e",
",",
"exc_info",
"=",
"True",
")",
"return",
"False",
",",
"message",
"detected",
"=",
"False",
"# handle specific service types for some domains (WorldMap, Wrapper...)",
"parsed_uri",
"=",
"urlparse",
"(",
"endpoint",
")",
"domain",
"=",
"'{uri.scheme}://{uri.netloc}/'",
".",
"format",
"(",
"uri",
"=",
"parsed_uri",
")",
"if",
"domain",
"==",
"'http://worldmap.harvard.edu/'",
":",
"service_type",
"=",
"'Hypermap:WorldMap'",
"title",
"=",
"'Harvard WorldMap'",
"abstract",
"=",
"'Harvard WorldMap'",
"endpoint",
"=",
"domain",
"detected",
"=",
"True",
"if",
"domain",
"in",
"[",
"'http://maps.nypl.org/'",
",",
"'http://mapwarper.net/'",
",",
"'http://warp.worldmap.harvard.edu/'",
",",
"]",
":",
"service_type",
"=",
"'Hypermap:WARPER'",
"title",
"=",
"'Warper at %s'",
"%",
"domain",
"abstract",
"=",
"'Warper at %s'",
"%",
"domain",
"detected",
"=",
"True",
"# test if it is CSW, WMS, TMS, WMTS or Esri",
"# CSW",
"try",
":",
"csw",
"=",
"CatalogueServiceWeb",
"(",
"endpoint",
")",
"service_type",
"=",
"'OGC:CSW'",
"service_links",
"=",
"{",
"}",
"detected",
"=",
"True",
"typenames",
"=",
"'csw:Record'",
"outputschema",
"=",
"'http://www.opengis.net/cat/csw/2.0.2'",
"if",
"'csw_harvest_pagesize'",
"in",
"settings",
".",
"REGISTRY_PYCSW",
"[",
"'manager'",
"]",
":",
"pagesize",
"=",
"int",
"(",
"settings",
".",
"REGISTRY_PYCSW",
"[",
"'manager'",
"]",
"[",
"'csw_harvest_pagesize'",
"]",
")",
"else",
":",
"pagesize",
"=",
"10",
"LOGGER",
".",
"debug",
"(",
"'Harvesting CSW %s'",
"%",
"endpoint",
")",
"# now get all records",
"# get total number of records to loop against",
"try",
":",
"csw",
".",
"getrecords2",
"(",
"typenames",
"=",
"typenames",
",",
"resulttype",
"=",
"'hits'",
",",
"outputschema",
"=",
"outputschema",
")",
"matches",
"=",
"csw",
".",
"results",
"[",
"'matches'",
"]",
"except",
":",
"# this is a CSW, but server rejects query",
"raise",
"RuntimeError",
"(",
"csw",
".",
"response",
")",
"if",
"pagesize",
">",
"matches",
":",
"pagesize",
"=",
"matches",
"LOGGER",
".",
"info",
"(",
"'Harvesting %d CSW records'",
"%",
"matches",
")",
"# loop over all catalogue records incrementally",
"for",
"r",
"in",
"range",
"(",
"1",
",",
"matches",
"+",
"1",
",",
"pagesize",
")",
":",
"LOGGER",
".",
"info",
"(",
"'Parsing %s from %s'",
"%",
"(",
"r",
",",
"matches",
")",
")",
"try",
":",
"csw",
".",
"getrecords2",
"(",
"typenames",
"=",
"typenames",
",",
"startposition",
"=",
"r",
",",
"maxrecords",
"=",
"pagesize",
",",
"outputschema",
"=",
"outputschema",
",",
"esn",
"=",
"'full'",
")",
"except",
"Exception",
"as",
"err",
":",
"# this is a CSW, but server rejects query",
"raise",
"RuntimeError",
"(",
"csw",
".",
"response",
")",
"for",
"k",
",",
"v",
"in",
"csw",
".",
"records",
".",
"items",
"(",
")",
":",
"# try to parse metadata",
"try",
":",
"LOGGER",
".",
"info",
"(",
"'Looking for service links'",
")",
"LOGGER",
".",
"debug",
"(",
"'Looking for service links via dct:references'",
")",
"if",
"v",
".",
"references",
":",
"for",
"ref",
"in",
"v",
".",
"references",
":",
"scheme",
"=",
"None",
"if",
"ref",
"[",
"'scheme'",
"]",
"in",
"[",
"st",
"[",
"0",
"]",
"for",
"st",
"in",
"SERVICE_TYPES",
"]",
":",
"if",
"ref",
"[",
"'url'",
"]",
"not",
"in",
"service_links",
":",
"scheme",
"=",
"ref",
"[",
"'scheme'",
"]",
"service_links",
"[",
"ref",
"[",
"'url'",
"]",
"]",
"=",
"scheme",
"else",
":",
"# loose detection",
"scheme",
"=",
"detect_metadata_url_scheme",
"(",
"ref",
"[",
"'url'",
"]",
")",
"if",
"scheme",
"is",
"not",
"None",
":",
"if",
"ref",
"[",
"'url'",
"]",
"not",
"in",
"service_links",
":",
"service_links",
"[",
"ref",
"[",
"'url'",
"]",
"]",
"=",
"scheme",
"if",
"scheme",
"is",
"None",
":",
"continue",
"try",
":",
"service",
"=",
"create_service_from_endpoint",
"(",
"ref",
"[",
"'url'",
"]",
",",
"scheme",
",",
"catalog",
"=",
"catalog",
")",
"if",
"service",
"is",
"not",
"None",
":",
"num_created",
"=",
"num_created",
"+",
"1",
"LOGGER",
".",
"info",
"(",
"'Found %s services on endpoint'",
"%",
"num_created",
")",
"except",
"Exception",
",",
"e",
":",
"LOGGER",
".",
"error",
"(",
"'Could not create service for %s : %s'",
"%",
"(",
"scheme",
",",
"ref",
"[",
"'url'",
"]",
")",
")",
"LOGGER",
".",
"error",
"(",
"e",
",",
"exc_info",
"=",
"True",
")",
"LOGGER",
".",
"debug",
"(",
"'Looking for service links via the GeoNetwork-ish dc:URI'",
")",
"if",
"v",
".",
"uris",
":",
"for",
"u",
"in",
"v",
".",
"uris",
":",
"# loose detection",
"scheme",
"=",
"detect_metadata_url_scheme",
"(",
"u",
"[",
"'url'",
"]",
")",
"if",
"scheme",
"is",
"not",
"None",
":",
"if",
"u",
"[",
"'url'",
"]",
"not",
"in",
"service_links",
":",
"service_links",
"[",
"u",
"[",
"'url'",
"]",
"]",
"=",
"scheme",
"else",
":",
"continue",
"try",
":",
"service",
"=",
"create_service_from_endpoint",
"(",
"u",
"[",
"'url'",
"]",
",",
"scheme",
",",
"catalog",
"=",
"catalog",
")",
"if",
"service",
"is",
"not",
"None",
":",
"num_created",
"=",
"num_created",
"+",
"1",
"LOGGER",
".",
"info",
"(",
"'Found %s services on endpoint'",
"%",
"num_created",
")",
"except",
"Exception",
",",
"e",
":",
"LOGGER",
".",
"error",
"(",
"'Could not create service for %s : %s'",
"%",
"(",
"scheme",
",",
"u",
"[",
"'url'",
"]",
")",
")",
"LOGGER",
".",
"error",
"(",
"e",
",",
"exc_info",
"=",
"True",
")",
"except",
"Exception",
"as",
"err",
":",
"# parsing failed for some reason",
"LOGGER",
".",
"warning",
"(",
"'Metadata parsing failed %s'",
",",
"err",
")",
"LOGGER",
".",
"error",
"(",
"err",
",",
"exc_info",
"=",
"True",
")",
"except",
"XMLSyntaxError",
"as",
"e",
":",
"# This is not XML, so likely not a CSW. Moving on.",
"pass",
"except",
"Exception",
"as",
"e",
":",
"LOGGER",
".",
"error",
"(",
"e",
",",
"exc_info",
"=",
"True",
")",
"messages",
".",
"append",
"(",
"str",
"(",
"e",
")",
")",
"# WMS",
"if",
"not",
"detected",
":",
"try",
":",
"service",
"=",
"get_wms_version_negotiate",
"(",
"endpoint",
",",
"timeout",
"=",
"10",
")",
"service_type",
"=",
"'OGC:WMS'",
"title",
"=",
"service",
".",
"identification",
".",
"title",
",",
"abstract",
"=",
"service",
".",
"identification",
".",
"abstract",
"detected",
"=",
"True",
"except",
"XMLSyntaxError",
"as",
"e",
":",
"# This is not XML, so likely not a WMS. Moving on.",
"pass",
"except",
"Exception",
"as",
"e",
":",
"LOGGER",
".",
"error",
"(",
"e",
",",
"exc_info",
"=",
"True",
")",
"messages",
".",
"append",
"(",
"str",
"(",
"e",
")",
")",
"# TMS",
"if",
"not",
"detected",
":",
"try",
":",
"service",
"=",
"TileMapService",
"(",
"endpoint",
",",
"timeout",
"=",
"10",
")",
"service_type",
"=",
"'OSGeo:TMS'",
"title",
"=",
"service",
".",
"identification",
".",
"title",
",",
"abstract",
"=",
"service",
".",
"identification",
".",
"abstract",
"detected",
"=",
"True",
"except",
"XMLSyntaxError",
"as",
"e",
":",
"# This is not XML, so likely not a TsMS. Moving on.",
"pass",
"except",
"Exception",
"as",
"e",
":",
"LOGGER",
".",
"error",
"(",
"e",
",",
"exc_info",
"=",
"True",
")",
"messages",
".",
"append",
"(",
"str",
"(",
"e",
")",
")",
"# WMTS",
"if",
"not",
"detected",
":",
"try",
":",
"# @tomkralidis timeout is not implemented for WebMapTileService?",
"service",
"=",
"WebMapTileService",
"(",
"endpoint",
")",
"service_type",
"=",
"'OGC:WMTS'",
"title",
"=",
"service",
".",
"identification",
".",
"title",
",",
"abstract",
"=",
"service",
".",
"identification",
".",
"abstract",
"detected",
"=",
"True",
"except",
"XMLSyntaxError",
"as",
"e",
":",
"# This is not XML, so likely not a WMTS. Moving on.",
"pass",
"except",
"Exception",
"as",
"e",
":",
"LOGGER",
".",
"error",
"(",
"e",
",",
"exc_info",
"=",
"True",
")",
"messages",
".",
"append",
"(",
"str",
"(",
"e",
")",
")",
"# if detected, let's create the service",
"if",
"detected",
"and",
"service_type",
"!=",
"'OGC:CSW'",
":",
"try",
":",
"service",
"=",
"create_service_from_endpoint",
"(",
"endpoint",
",",
"service_type",
",",
"title",
",",
"abstract",
"=",
"abstract",
",",
"catalog",
"=",
"catalog",
")",
"if",
"service",
"is",
"not",
"None",
":",
"num_created",
"=",
"num_created",
"+",
"1",
"except",
"XMLSyntaxError",
"as",
"e",
":",
"# This is not XML, so likely not a OGC:CSW. Moving on.",
"pass",
"except",
"Exception",
"as",
"e",
":",
"LOGGER",
".",
"error",
"(",
"e",
",",
"exc_info",
"=",
"True",
")",
"messages",
".",
"append",
"(",
"str",
"(",
"e",
")",
")",
"# Esri",
"# a good sample is here: https://gis.ngdc.noaa.gov/arcgis/rest/services",
"# we can safely assume the following condition (at least it is true for 1170 services)",
"# we need to test this as arcrest.Folder can freeze with not esri url such as this one:",
"# http://hh.worldmap.harvard.edu/admin/aggregator/service/?q=%2Frest%2Fservices",
"if",
"'/rest/services'",
"in",
"endpoint",
":",
"if",
"not",
"detected",
":",
"try",
":",
"esri",
"=",
"arcrest",
".",
"Folder",
"(",
"endpoint",
")",
"service_type",
"=",
"'ESRI'",
"detected",
"=",
"True",
"service_to_process",
",",
"folder_to_process",
"=",
"esri",
".",
"services",
",",
"esri",
".",
"folders",
"if",
"not",
"greedy_opt",
":",
"folder_to_process",
"=",
"[",
"]",
"sections",
"=",
"service_url_parse",
"(",
"url",
")",
"service_to_process",
"=",
"get_single_service",
"(",
"esri",
",",
"sections",
")",
"processed_services",
"=",
"process_esri_services",
"(",
"service_to_process",
",",
"catalog",
")",
"num_created",
"=",
"num_created",
"+",
"len",
"(",
"processed_services",
")",
"for",
"folder",
"in",
"folder_to_process",
":",
"folder_services",
"=",
"process_esri_services",
"(",
"folder",
".",
"services",
",",
"catalog",
")",
"num_created",
"=",
"num_created",
"+",
"len",
"(",
"folder_services",
")",
"except",
"Exception",
"as",
"e",
":",
"LOGGER",
".",
"error",
"(",
"e",
",",
"exc_info",
"=",
"True",
")",
"messages",
".",
"append",
"(",
"str",
"(",
"e",
")",
")",
"if",
"detected",
":",
"return",
"True",
",",
"'%s service/s created'",
"%",
"num_created",
"else",
":",
"m",
"=",
"'|'",
".",
"join",
"(",
"messages",
")",
"return",
"False",
",",
"'ERROR! Could not detect service type for '",
"'endpoint %s or already existing. messages=(%s)'",
"%",
"(",
"endpoint",
",",
"m",
")"
] |
Generate service/services from an endpoint.
WMS, WMTS, TMS endpoints correspond to a single service.
ESRI, CSW endpoints corrispond to many services.
:return: imported, message
|
[
"Generate",
"service",
"/",
"services",
"from",
"an",
"endpoint",
".",
"WMS",
"WMTS",
"TMS",
"endpoints",
"correspond",
"to",
"a",
"single",
"service",
".",
"ESRI",
"CSW",
"endpoints",
"corrispond",
"to",
"many",
"services",
".",
":",
"return",
":",
"imported",
"message"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L85-L332
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
service_url_parse
|
def service_url_parse(url):
"""
Function that parses from url the service and folder of services.
"""
endpoint = get_sanitized_endpoint(url)
url_split_list = url.split(endpoint + '/')
if len(url_split_list) != 0:
url_split_list = url_split_list[1].split('/')
else:
raise Exception('Wrong url parsed')
# Remove unnecessary items from list of the split url.
parsed_url = [s for s in url_split_list if '?' not in s if 'Server' not in s]
return parsed_url
|
python
|
def service_url_parse(url):
"""
Function that parses from url the service and folder of services.
"""
endpoint = get_sanitized_endpoint(url)
url_split_list = url.split(endpoint + '/')
if len(url_split_list) != 0:
url_split_list = url_split_list[1].split('/')
else:
raise Exception('Wrong url parsed')
# Remove unnecessary items from list of the split url.
parsed_url = [s for s in url_split_list if '?' not in s if 'Server' not in s]
return parsed_url
|
[
"def",
"service_url_parse",
"(",
"url",
")",
":",
"endpoint",
"=",
"get_sanitized_endpoint",
"(",
"url",
")",
"url_split_list",
"=",
"url",
".",
"split",
"(",
"endpoint",
"+",
"'/'",
")",
"if",
"len",
"(",
"url_split_list",
")",
"!=",
"0",
":",
"url_split_list",
"=",
"url_split_list",
"[",
"1",
"]",
".",
"split",
"(",
"'/'",
")",
"else",
":",
"raise",
"Exception",
"(",
"'Wrong url parsed'",
")",
"# Remove unnecessary items from list of the split url.",
"parsed_url",
"=",
"[",
"s",
"for",
"s",
"in",
"url_split_list",
"if",
"'?'",
"not",
"in",
"s",
"if",
"'Server'",
"not",
"in",
"s",
"]",
"return",
"parsed_url"
] |
Function that parses from url the service and folder of services.
|
[
"Function",
"that",
"parses",
"from",
"url",
"the",
"service",
"and",
"folder",
"of",
"services",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L335-L349
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
inverse_mercator
|
def inverse_mercator(xy):
"""
Given coordinates in spherical mercator, return a lon,lat tuple.
"""
lon = (xy[0] / 20037508.34) * 180
lat = (xy[1] / 20037508.34) * 180
lat = 180 / math.pi * \
(2 * math.atan(math.exp(lat * math.pi / 180)) - math.pi / 2)
return (lon, lat)
|
python
|
def inverse_mercator(xy):
"""
Given coordinates in spherical mercator, return a lon,lat tuple.
"""
lon = (xy[0] / 20037508.34) * 180
lat = (xy[1] / 20037508.34) * 180
lat = 180 / math.pi * \
(2 * math.atan(math.exp(lat * math.pi / 180)) - math.pi / 2)
return (lon, lat)
|
[
"def",
"inverse_mercator",
"(",
"xy",
")",
":",
"lon",
"=",
"(",
"xy",
"[",
"0",
"]",
"/",
"20037508.34",
")",
"*",
"180",
"lat",
"=",
"(",
"xy",
"[",
"1",
"]",
"/",
"20037508.34",
")",
"*",
"180",
"lat",
"=",
"180",
"/",
"math",
".",
"pi",
"*",
"(",
"2",
"*",
"math",
".",
"atan",
"(",
"math",
".",
"exp",
"(",
"lat",
"*",
"math",
".",
"pi",
"/",
"180",
")",
")",
"-",
"math",
".",
"pi",
"/",
"2",
")",
"return",
"(",
"lon",
",",
"lat",
")"
] |
Given coordinates in spherical mercator, return a lon,lat tuple.
|
[
"Given",
"coordinates",
"in",
"spherical",
"mercator",
"return",
"a",
"lon",
"lat",
"tuple",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L404-L412
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
get_wms_version_negotiate
|
def get_wms_version_negotiate(url, timeout=10):
"""
OWSLib wrapper function to perform version negotiation against owslib.wms.WebMapService
"""
try:
LOGGER.debug('Trying a WMS 1.3.0 GetCapabilities request')
return WebMapService(url, version='1.3.0', timeout=timeout)
except Exception as err:
LOGGER.warning('WMS 1.3.0 support not found: %s', err)
LOGGER.debug('Trying a WMS 1.1.1 GetCapabilities request instead')
return WebMapService(url, version='1.1.1', timeout=timeout)
|
python
|
def get_wms_version_negotiate(url, timeout=10):
"""
OWSLib wrapper function to perform version negotiation against owslib.wms.WebMapService
"""
try:
LOGGER.debug('Trying a WMS 1.3.0 GetCapabilities request')
return WebMapService(url, version='1.3.0', timeout=timeout)
except Exception as err:
LOGGER.warning('WMS 1.3.0 support not found: %s', err)
LOGGER.debug('Trying a WMS 1.1.1 GetCapabilities request instead')
return WebMapService(url, version='1.1.1', timeout=timeout)
|
[
"def",
"get_wms_version_negotiate",
"(",
"url",
",",
"timeout",
"=",
"10",
")",
":",
"try",
":",
"LOGGER",
".",
"debug",
"(",
"'Trying a WMS 1.3.0 GetCapabilities request'",
")",
"return",
"WebMapService",
"(",
"url",
",",
"version",
"=",
"'1.3.0'",
",",
"timeout",
"=",
"timeout",
")",
"except",
"Exception",
"as",
"err",
":",
"LOGGER",
".",
"warning",
"(",
"'WMS 1.3.0 support not found: %s'",
",",
"err",
")",
"LOGGER",
".",
"debug",
"(",
"'Trying a WMS 1.1.1 GetCapabilities request instead'",
")",
"return",
"WebMapService",
"(",
"url",
",",
"version",
"=",
"'1.1.1'",
",",
"timeout",
"=",
"timeout",
")"
] |
OWSLib wrapper function to perform version negotiation against owslib.wms.WebMapService
|
[
"OWSLib",
"wrapper",
"function",
"to",
"perform",
"version",
"negotiation",
"against",
"owslib",
".",
"wms",
".",
"WebMapService"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L415-L426
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
get_sanitized_endpoint
|
def get_sanitized_endpoint(url):
"""
Sanitize an endpoint, as removing unneeded parameters
"""
# sanitize esri
sanitized_url = url.rstrip()
esri_string = '/rest/services'
if esri_string in url:
match = re.search(esri_string, sanitized_url)
sanitized_url = url[0:(match.start(0)+len(esri_string))]
return sanitized_url
|
python
|
def get_sanitized_endpoint(url):
"""
Sanitize an endpoint, as removing unneeded parameters
"""
# sanitize esri
sanitized_url = url.rstrip()
esri_string = '/rest/services'
if esri_string in url:
match = re.search(esri_string, sanitized_url)
sanitized_url = url[0:(match.start(0)+len(esri_string))]
return sanitized_url
|
[
"def",
"get_sanitized_endpoint",
"(",
"url",
")",
":",
"# sanitize esri",
"sanitized_url",
"=",
"url",
".",
"rstrip",
"(",
")",
"esri_string",
"=",
"'/rest/services'",
"if",
"esri_string",
"in",
"url",
":",
"match",
"=",
"re",
".",
"search",
"(",
"esri_string",
",",
"sanitized_url",
")",
"sanitized_url",
"=",
"url",
"[",
"0",
":",
"(",
"match",
".",
"start",
"(",
"0",
")",
"+",
"len",
"(",
"esri_string",
")",
")",
"]",
"return",
"sanitized_url"
] |
Sanitize an endpoint, as removing unneeded parameters
|
[
"Sanitize",
"an",
"endpoint",
"as",
"removing",
"unneeded",
"parameters"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L435-L445
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
get_esri_service_name
|
def get_esri_service_name(url):
"""
A method to get a service name from an esri endpoint.
For example: http://example.com/arcgis/rest/services/myservice/mylayer/MapServer/?f=json
Will return: myservice/mylayer
"""
result = re.search('rest/services/(.*)/[MapServer|ImageServer]', url)
if result is None:
return url
else:
return result.group(1)
|
python
|
def get_esri_service_name(url):
"""
A method to get a service name from an esri endpoint.
For example: http://example.com/arcgis/rest/services/myservice/mylayer/MapServer/?f=json
Will return: myservice/mylayer
"""
result = re.search('rest/services/(.*)/[MapServer|ImageServer]', url)
if result is None:
return url
else:
return result.group(1)
|
[
"def",
"get_esri_service_name",
"(",
"url",
")",
":",
"result",
"=",
"re",
".",
"search",
"(",
"'rest/services/(.*)/[MapServer|ImageServer]'",
",",
"url",
")",
"if",
"result",
"is",
"None",
":",
"return",
"url",
"else",
":",
"return",
"result",
".",
"group",
"(",
"1",
")"
] |
A method to get a service name from an esri endpoint.
For example: http://example.com/arcgis/rest/services/myservice/mylayer/MapServer/?f=json
Will return: myservice/mylayer
|
[
"A",
"method",
"to",
"get",
"a",
"service",
"name",
"from",
"an",
"esri",
"endpoint",
".",
"For",
"example",
":",
"http",
":",
"//",
"example",
".",
"com",
"/",
"arcgis",
"/",
"rest",
"/",
"services",
"/",
"myservice",
"/",
"mylayer",
"/",
"MapServer",
"/",
"?f",
"=",
"json",
"Will",
"return",
":",
"myservice",
"/",
"mylayer"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L448-L458
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
get_esri_extent
|
def get_esri_extent(esriobj):
"""
Get the extent of an ESRI resource
"""
extent = None
srs = None
if 'fullExtent' in esriobj._json_struct:
extent = esriobj._json_struct['fullExtent']
if 'extent' in esriobj._json_struct:
extent = esriobj._json_struct['extent']
try:
srs = extent['spatialReference']['wkid']
except KeyError, err:
LOGGER.error(err, exc_info=True)
return [extent, srs]
|
python
|
def get_esri_extent(esriobj):
"""
Get the extent of an ESRI resource
"""
extent = None
srs = None
if 'fullExtent' in esriobj._json_struct:
extent = esriobj._json_struct['fullExtent']
if 'extent' in esriobj._json_struct:
extent = esriobj._json_struct['extent']
try:
srs = extent['spatialReference']['wkid']
except KeyError, err:
LOGGER.error(err, exc_info=True)
return [extent, srs]
|
[
"def",
"get_esri_extent",
"(",
"esriobj",
")",
":",
"extent",
"=",
"None",
"srs",
"=",
"None",
"if",
"'fullExtent'",
"in",
"esriobj",
".",
"_json_struct",
":",
"extent",
"=",
"esriobj",
".",
"_json_struct",
"[",
"'fullExtent'",
"]",
"if",
"'extent'",
"in",
"esriobj",
".",
"_json_struct",
":",
"extent",
"=",
"esriobj",
".",
"_json_struct",
"[",
"'extent'",
"]",
"try",
":",
"srs",
"=",
"extent",
"[",
"'spatialReference'",
"]",
"[",
"'wkid'",
"]",
"except",
"KeyError",
",",
"err",
":",
"LOGGER",
".",
"error",
"(",
"err",
",",
"exc_info",
"=",
"True",
")",
"return",
"[",
"extent",
",",
"srs",
"]"
] |
Get the extent of an ESRI resource
|
[
"Get",
"the",
"extent",
"of",
"an",
"ESRI",
"resource"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L461-L479
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
bbox2wktpolygon
|
def bbox2wktpolygon(bbox):
"""
Return OGC WKT Polygon of a simple bbox list of strings
"""
minx = float(bbox[0])
miny = float(bbox[1])
maxx = float(bbox[2])
maxy = float(bbox[3])
return 'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))' \
% (minx, miny, minx, maxy, maxx, maxy, maxx, miny, minx, miny)
|
python
|
def bbox2wktpolygon(bbox):
"""
Return OGC WKT Polygon of a simple bbox list of strings
"""
minx = float(bbox[0])
miny = float(bbox[1])
maxx = float(bbox[2])
maxy = float(bbox[3])
return 'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))' \
% (minx, miny, minx, maxy, maxx, maxy, maxx, miny, minx, miny)
|
[
"def",
"bbox2wktpolygon",
"(",
"bbox",
")",
":",
"minx",
"=",
"float",
"(",
"bbox",
"[",
"0",
"]",
")",
"miny",
"=",
"float",
"(",
"bbox",
"[",
"1",
"]",
")",
"maxx",
"=",
"float",
"(",
"bbox",
"[",
"2",
"]",
")",
"maxy",
"=",
"float",
"(",
"bbox",
"[",
"3",
"]",
")",
"return",
"'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))'",
"%",
"(",
"minx",
",",
"miny",
",",
"minx",
",",
"maxy",
",",
"maxx",
",",
"maxy",
",",
"maxx",
",",
"miny",
",",
"minx",
",",
"miny",
")"
] |
Return OGC WKT Polygon of a simple bbox list of strings
|
[
"Return",
"OGC",
"WKT",
"Polygon",
"of",
"a",
"simple",
"bbox",
"list",
"of",
"strings"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L503-L513
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
get_solr_date
|
def get_solr_date(pydate, is_negative):
"""
Returns a date in a valid Solr format from a string.
"""
# check if date is valid and then set it to solr format YYYY-MM-DDThh:mm:ssZ
try:
if isinstance(pydate, datetime.datetime):
solr_date = '%sZ' % pydate.isoformat()[0:19]
if is_negative:
LOGGER.debug('%s This layer has a negative date' % solr_date)
solr_date = '-%s' % solr_date
return solr_date
else:
return None
except Exception, e:
LOGGER.error(e, exc_info=True)
return None
|
python
|
def get_solr_date(pydate, is_negative):
"""
Returns a date in a valid Solr format from a string.
"""
# check if date is valid and then set it to solr format YYYY-MM-DDThh:mm:ssZ
try:
if isinstance(pydate, datetime.datetime):
solr_date = '%sZ' % pydate.isoformat()[0:19]
if is_negative:
LOGGER.debug('%s This layer has a negative date' % solr_date)
solr_date = '-%s' % solr_date
return solr_date
else:
return None
except Exception, e:
LOGGER.error(e, exc_info=True)
return None
|
[
"def",
"get_solr_date",
"(",
"pydate",
",",
"is_negative",
")",
":",
"# check if date is valid and then set it to solr format YYYY-MM-DDThh:mm:ssZ",
"try",
":",
"if",
"isinstance",
"(",
"pydate",
",",
"datetime",
".",
"datetime",
")",
":",
"solr_date",
"=",
"'%sZ'",
"%",
"pydate",
".",
"isoformat",
"(",
")",
"[",
"0",
":",
"19",
"]",
"if",
"is_negative",
":",
"LOGGER",
".",
"debug",
"(",
"'%s This layer has a negative date'",
"%",
"solr_date",
")",
"solr_date",
"=",
"'-%s'",
"%",
"solr_date",
"return",
"solr_date",
"else",
":",
"return",
"None",
"except",
"Exception",
",",
"e",
":",
"LOGGER",
".",
"error",
"(",
"e",
",",
"exc_info",
"=",
"True",
")",
"return",
"None"
] |
Returns a date in a valid Solr format from a string.
|
[
"Returns",
"a",
"date",
"in",
"a",
"valid",
"Solr",
"format",
"from",
"a",
"string",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L516-L532
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
get_date
|
def get_date(layer):
"""
Returns a custom date representation. A date can be detected or from metadata.
It can be a range or a simple date in isoformat.
"""
date = None
sign = '+'
date_type = 1
layer_dates = layer.get_layer_dates()
# we index the first date!
if layer_dates:
sign = layer_dates[0][0]
date = layer_dates[0][1]
date_type = layer_dates[0][2]
if date is None:
date = layer.created
# layer date > 2300 is invalid for sure
# TODO put this logic in date miner
if date.year > 2300:
date = None
if date_type == 0:
date_type = "Detected"
if date_type == 1:
date_type = "From Metadata"
return get_solr_date(date, (sign == '-')), date_type
|
python
|
def get_date(layer):
"""
Returns a custom date representation. A date can be detected or from metadata.
It can be a range or a simple date in isoformat.
"""
date = None
sign = '+'
date_type = 1
layer_dates = layer.get_layer_dates()
# we index the first date!
if layer_dates:
sign = layer_dates[0][0]
date = layer_dates[0][1]
date_type = layer_dates[0][2]
if date is None:
date = layer.created
# layer date > 2300 is invalid for sure
# TODO put this logic in date miner
if date.year > 2300:
date = None
if date_type == 0:
date_type = "Detected"
if date_type == 1:
date_type = "From Metadata"
return get_solr_date(date, (sign == '-')), date_type
|
[
"def",
"get_date",
"(",
"layer",
")",
":",
"date",
"=",
"None",
"sign",
"=",
"'+'",
"date_type",
"=",
"1",
"layer_dates",
"=",
"layer",
".",
"get_layer_dates",
"(",
")",
"# we index the first date!",
"if",
"layer_dates",
":",
"sign",
"=",
"layer_dates",
"[",
"0",
"]",
"[",
"0",
"]",
"date",
"=",
"layer_dates",
"[",
"0",
"]",
"[",
"1",
"]",
"date_type",
"=",
"layer_dates",
"[",
"0",
"]",
"[",
"2",
"]",
"if",
"date",
"is",
"None",
":",
"date",
"=",
"layer",
".",
"created",
"# layer date > 2300 is invalid for sure",
"# TODO put this logic in date miner",
"if",
"date",
".",
"year",
">",
"2300",
":",
"date",
"=",
"None",
"if",
"date_type",
"==",
"0",
":",
"date_type",
"=",
"\"Detected\"",
"if",
"date_type",
"==",
"1",
":",
"date_type",
"=",
"\"From Metadata\"",
"return",
"get_solr_date",
"(",
"date",
",",
"(",
"sign",
"==",
"'-'",
")",
")",
",",
"date_type"
] |
Returns a custom date representation. A date can be detected or from metadata.
It can be a range or a simple date in isoformat.
|
[
"Returns",
"a",
"custom",
"date",
"representation",
".",
"A",
"date",
"can",
"be",
"detected",
"or",
"from",
"metadata",
".",
"It",
"can",
"be",
"a",
"range",
"or",
"a",
"simple",
"date",
"in",
"isoformat",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L535-L559
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
layer2dict
|
def layer2dict(layer):
"""
Return a json representation for a layer.
"""
category = None
username = None
# bbox must be valid before proceeding
if not layer.has_valid_bbox():
message = 'Layer id: %s has a not valid bbox' % layer.id
return None, message
# we can proceed safely
bbox = [float(layer.bbox_x0), float(layer.bbox_y0), float(layer.bbox_x1), float(layer.bbox_y1)]
minX = bbox[0]
minY = bbox[1]
maxX = bbox[2]
maxY = bbox[3]
# coords hack needed by solr
if (minX < -180):
minX = -180
if (maxX > 180):
maxX = 180
if (minY < -90):
minY = -90
if (maxY > 90):
maxY = 90
rectangle = box(minX, minY, maxX, maxY)
wkt = "ENVELOPE({:f},{:f},{:f},{:f})".format(minX, maxX, maxY, minY)
halfWidth = (maxX - minX) / 2.0
halfHeight = (maxY - minY) / 2.0
area = (halfWidth * 2) * (halfHeight * 2)
domain = get_domain(layer.service.url)
if hasattr(layer, 'layerwm'):
category = layer.layerwm.category
username = layer.layerwm.username
abstract = layer.abstract
if abstract:
abstract = strip_tags(layer.abstract)
else:
abstract = ''
if layer.type == "WM":
originator = username
else:
originator = domain
layer_dict = {
'id': layer.id,
'uuid': str(layer.uuid),
'type': 'Layer',
'layer_id': layer.id,
'name': layer.name,
'title': layer.title,
'layer_originator': originator,
'service_id': layer.service.id,
'service_type': layer.service.type,
'layer_category': category,
'layer_username': username,
'url': layer.url,
'keywords': [kw.name for kw in layer.keywords.all()],
'reliability': layer.reliability,
'recent_reliability': layer.recent_reliability,
'last_status': layer.last_status,
'is_public': layer.is_public,
'is_valid': layer.is_valid,
'availability': 'Online',
'location': '{"layerInfoPage": "' + layer.get_absolute_url + '"}',
'abstract': abstract,
'domain_name': layer.service.get_domain
}
solr_date, date_type = get_date(layer)
if solr_date is not None:
layer_dict['layer_date'] = solr_date
layer_dict['layer_datetype'] = date_type
if bbox is not None:
layer_dict['min_x'] = minX
layer_dict['min_y'] = minY
layer_dict['max_x'] = maxX
layer_dict['max_y'] = maxY
layer_dict['area'] = area
layer_dict['bbox'] = wkt
layer_dict['centroid_x'] = rectangle.centroid.x
layer_dict['centroid_y'] = rectangle.centroid.y
srs_list = [srs.encode('utf-8') for srs in layer.service.srs.values_list('code', flat=True)]
layer_dict['srs'] = srs_list
if layer.get_tile_url():
layer_dict['tile_url'] = layer.get_tile_url()
message = 'Layer %s successfully converted to json' % layer.id
return layer_dict, message
|
python
|
def layer2dict(layer):
"""
Return a json representation for a layer.
"""
category = None
username = None
# bbox must be valid before proceeding
if not layer.has_valid_bbox():
message = 'Layer id: %s has a not valid bbox' % layer.id
return None, message
# we can proceed safely
bbox = [float(layer.bbox_x0), float(layer.bbox_y0), float(layer.bbox_x1), float(layer.bbox_y1)]
minX = bbox[0]
minY = bbox[1]
maxX = bbox[2]
maxY = bbox[3]
# coords hack needed by solr
if (minX < -180):
minX = -180
if (maxX > 180):
maxX = 180
if (minY < -90):
minY = -90
if (maxY > 90):
maxY = 90
rectangle = box(minX, minY, maxX, maxY)
wkt = "ENVELOPE({:f},{:f},{:f},{:f})".format(minX, maxX, maxY, minY)
halfWidth = (maxX - minX) / 2.0
halfHeight = (maxY - minY) / 2.0
area = (halfWidth * 2) * (halfHeight * 2)
domain = get_domain(layer.service.url)
if hasattr(layer, 'layerwm'):
category = layer.layerwm.category
username = layer.layerwm.username
abstract = layer.abstract
if abstract:
abstract = strip_tags(layer.abstract)
else:
abstract = ''
if layer.type == "WM":
originator = username
else:
originator = domain
layer_dict = {
'id': layer.id,
'uuid': str(layer.uuid),
'type': 'Layer',
'layer_id': layer.id,
'name': layer.name,
'title': layer.title,
'layer_originator': originator,
'service_id': layer.service.id,
'service_type': layer.service.type,
'layer_category': category,
'layer_username': username,
'url': layer.url,
'keywords': [kw.name for kw in layer.keywords.all()],
'reliability': layer.reliability,
'recent_reliability': layer.recent_reliability,
'last_status': layer.last_status,
'is_public': layer.is_public,
'is_valid': layer.is_valid,
'availability': 'Online',
'location': '{"layerInfoPage": "' + layer.get_absolute_url + '"}',
'abstract': abstract,
'domain_name': layer.service.get_domain
}
solr_date, date_type = get_date(layer)
if solr_date is not None:
layer_dict['layer_date'] = solr_date
layer_dict['layer_datetype'] = date_type
if bbox is not None:
layer_dict['min_x'] = minX
layer_dict['min_y'] = minY
layer_dict['max_x'] = maxX
layer_dict['max_y'] = maxY
layer_dict['area'] = area
layer_dict['bbox'] = wkt
layer_dict['centroid_x'] = rectangle.centroid.x
layer_dict['centroid_y'] = rectangle.centroid.y
srs_list = [srs.encode('utf-8') for srs in layer.service.srs.values_list('code', flat=True)]
layer_dict['srs'] = srs_list
if layer.get_tile_url():
layer_dict['tile_url'] = layer.get_tile_url()
message = 'Layer %s successfully converted to json' % layer.id
return layer_dict, message
|
[
"def",
"layer2dict",
"(",
"layer",
")",
":",
"category",
"=",
"None",
"username",
"=",
"None",
"# bbox must be valid before proceeding",
"if",
"not",
"layer",
".",
"has_valid_bbox",
"(",
")",
":",
"message",
"=",
"'Layer id: %s has a not valid bbox'",
"%",
"layer",
".",
"id",
"return",
"None",
",",
"message",
"# we can proceed safely",
"bbox",
"=",
"[",
"float",
"(",
"layer",
".",
"bbox_x0",
")",
",",
"float",
"(",
"layer",
".",
"bbox_y0",
")",
",",
"float",
"(",
"layer",
".",
"bbox_x1",
")",
",",
"float",
"(",
"layer",
".",
"bbox_y1",
")",
"]",
"minX",
"=",
"bbox",
"[",
"0",
"]",
"minY",
"=",
"bbox",
"[",
"1",
"]",
"maxX",
"=",
"bbox",
"[",
"2",
"]",
"maxY",
"=",
"bbox",
"[",
"3",
"]",
"# coords hack needed by solr",
"if",
"(",
"minX",
"<",
"-",
"180",
")",
":",
"minX",
"=",
"-",
"180",
"if",
"(",
"maxX",
">",
"180",
")",
":",
"maxX",
"=",
"180",
"if",
"(",
"minY",
"<",
"-",
"90",
")",
":",
"minY",
"=",
"-",
"90",
"if",
"(",
"maxY",
">",
"90",
")",
":",
"maxY",
"=",
"90",
"rectangle",
"=",
"box",
"(",
"minX",
",",
"minY",
",",
"maxX",
",",
"maxY",
")",
"wkt",
"=",
"\"ENVELOPE({:f},{:f},{:f},{:f})\"",
".",
"format",
"(",
"minX",
",",
"maxX",
",",
"maxY",
",",
"minY",
")",
"halfWidth",
"=",
"(",
"maxX",
"-",
"minX",
")",
"/",
"2.0",
"halfHeight",
"=",
"(",
"maxY",
"-",
"minY",
")",
"/",
"2.0",
"area",
"=",
"(",
"halfWidth",
"*",
"2",
")",
"*",
"(",
"halfHeight",
"*",
"2",
")",
"domain",
"=",
"get_domain",
"(",
"layer",
".",
"service",
".",
"url",
")",
"if",
"hasattr",
"(",
"layer",
",",
"'layerwm'",
")",
":",
"category",
"=",
"layer",
".",
"layerwm",
".",
"category",
"username",
"=",
"layer",
".",
"layerwm",
".",
"username",
"abstract",
"=",
"layer",
".",
"abstract",
"if",
"abstract",
":",
"abstract",
"=",
"strip_tags",
"(",
"layer",
".",
"abstract",
")",
"else",
":",
"abstract",
"=",
"''",
"if",
"layer",
".",
"type",
"==",
"\"WM\"",
":",
"originator",
"=",
"username",
"else",
":",
"originator",
"=",
"domain",
"layer_dict",
"=",
"{",
"'id'",
":",
"layer",
".",
"id",
",",
"'uuid'",
":",
"str",
"(",
"layer",
".",
"uuid",
")",
",",
"'type'",
":",
"'Layer'",
",",
"'layer_id'",
":",
"layer",
".",
"id",
",",
"'name'",
":",
"layer",
".",
"name",
",",
"'title'",
":",
"layer",
".",
"title",
",",
"'layer_originator'",
":",
"originator",
",",
"'service_id'",
":",
"layer",
".",
"service",
".",
"id",
",",
"'service_type'",
":",
"layer",
".",
"service",
".",
"type",
",",
"'layer_category'",
":",
"category",
",",
"'layer_username'",
":",
"username",
",",
"'url'",
":",
"layer",
".",
"url",
",",
"'keywords'",
":",
"[",
"kw",
".",
"name",
"for",
"kw",
"in",
"layer",
".",
"keywords",
".",
"all",
"(",
")",
"]",
",",
"'reliability'",
":",
"layer",
".",
"reliability",
",",
"'recent_reliability'",
":",
"layer",
".",
"recent_reliability",
",",
"'last_status'",
":",
"layer",
".",
"last_status",
",",
"'is_public'",
":",
"layer",
".",
"is_public",
",",
"'is_valid'",
":",
"layer",
".",
"is_valid",
",",
"'availability'",
":",
"'Online'",
",",
"'location'",
":",
"'{\"layerInfoPage\": \"'",
"+",
"layer",
".",
"get_absolute_url",
"+",
"'\"}'",
",",
"'abstract'",
":",
"abstract",
",",
"'domain_name'",
":",
"layer",
".",
"service",
".",
"get_domain",
"}",
"solr_date",
",",
"date_type",
"=",
"get_date",
"(",
"layer",
")",
"if",
"solr_date",
"is",
"not",
"None",
":",
"layer_dict",
"[",
"'layer_date'",
"]",
"=",
"solr_date",
"layer_dict",
"[",
"'layer_datetype'",
"]",
"=",
"date_type",
"if",
"bbox",
"is",
"not",
"None",
":",
"layer_dict",
"[",
"'min_x'",
"]",
"=",
"minX",
"layer_dict",
"[",
"'min_y'",
"]",
"=",
"minY",
"layer_dict",
"[",
"'max_x'",
"]",
"=",
"maxX",
"layer_dict",
"[",
"'max_y'",
"]",
"=",
"maxY",
"layer_dict",
"[",
"'area'",
"]",
"=",
"area",
"layer_dict",
"[",
"'bbox'",
"]",
"=",
"wkt",
"layer_dict",
"[",
"'centroid_x'",
"]",
"=",
"rectangle",
".",
"centroid",
".",
"x",
"layer_dict",
"[",
"'centroid_y'",
"]",
"=",
"rectangle",
".",
"centroid",
".",
"y",
"srs_list",
"=",
"[",
"srs",
".",
"encode",
"(",
"'utf-8'",
")",
"for",
"srs",
"in",
"layer",
".",
"service",
".",
"srs",
".",
"values_list",
"(",
"'code'",
",",
"flat",
"=",
"True",
")",
"]",
"layer_dict",
"[",
"'srs'",
"]",
"=",
"srs_list",
"if",
"layer",
".",
"get_tile_url",
"(",
")",
":",
"layer_dict",
"[",
"'tile_url'",
"]",
"=",
"layer",
".",
"get_tile_url",
"(",
")",
"message",
"=",
"'Layer %s successfully converted to json'",
"%",
"layer",
".",
"id",
"return",
"layer_dict",
",",
"message"
] |
Return a json representation for a layer.
|
[
"Return",
"a",
"json",
"representation",
"for",
"a",
"layer",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L570-L661
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/utils.py
|
detect_metadata_url_scheme
|
def detect_metadata_url_scheme(url):
"""detect whether a url is a Service type that HHypermap supports"""
scheme = None
url_lower = url.lower()
if any(x in url_lower for x in ['wms', 'service=wms']):
scheme = 'OGC:WMS'
if any(x in url_lower for x in ['wmts', 'service=wmts']):
scheme = 'OGC:WMTS'
elif all(x in url for x in ['/MapServer', 'f=json']):
scheme = 'ESRI:ArcGIS:MapServer'
elif all(x in url for x in ['/ImageServer', 'f=json']):
scheme = 'ESRI:ArcGIS:ImageServer'
return scheme
|
python
|
def detect_metadata_url_scheme(url):
"""detect whether a url is a Service type that HHypermap supports"""
scheme = None
url_lower = url.lower()
if any(x in url_lower for x in ['wms', 'service=wms']):
scheme = 'OGC:WMS'
if any(x in url_lower for x in ['wmts', 'service=wmts']):
scheme = 'OGC:WMTS'
elif all(x in url for x in ['/MapServer', 'f=json']):
scheme = 'ESRI:ArcGIS:MapServer'
elif all(x in url for x in ['/ImageServer', 'f=json']):
scheme = 'ESRI:ArcGIS:ImageServer'
return scheme
|
[
"def",
"detect_metadata_url_scheme",
"(",
"url",
")",
":",
"scheme",
"=",
"None",
"url_lower",
"=",
"url",
".",
"lower",
"(",
")",
"if",
"any",
"(",
"x",
"in",
"url_lower",
"for",
"x",
"in",
"[",
"'wms'",
",",
"'service=wms'",
"]",
")",
":",
"scheme",
"=",
"'OGC:WMS'",
"if",
"any",
"(",
"x",
"in",
"url_lower",
"for",
"x",
"in",
"[",
"'wmts'",
",",
"'service=wmts'",
"]",
")",
":",
"scheme",
"=",
"'OGC:WMTS'",
"elif",
"all",
"(",
"x",
"in",
"url",
"for",
"x",
"in",
"[",
"'/MapServer'",
",",
"'f=json'",
"]",
")",
":",
"scheme",
"=",
"'ESRI:ArcGIS:MapServer'",
"elif",
"all",
"(",
"x",
"in",
"url",
"for",
"x",
"in",
"[",
"'/ImageServer'",
",",
"'f=json'",
"]",
")",
":",
"scheme",
"=",
"'ESRI:ArcGIS:ImageServer'",
"return",
"scheme"
] |
detect whether a url is a Service type that HHypermap supports
|
[
"detect",
"whether",
"a",
"url",
"is",
"a",
"Service",
"type",
"that",
"HHypermap",
"supports"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/utils.py#L664-L679
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/views.py
|
serialize_checks
|
def serialize_checks(check_set):
"""
Serialize a check_set for raphael
"""
check_set_list = []
for check in check_set.all()[:25]:
check_set_list.append(
{
'datetime': check.checked_datetime.isoformat(),
'value': check.response_time,
'success': 1 if check.success else 0
}
)
return check_set_list
|
python
|
def serialize_checks(check_set):
"""
Serialize a check_set for raphael
"""
check_set_list = []
for check in check_set.all()[:25]:
check_set_list.append(
{
'datetime': check.checked_datetime.isoformat(),
'value': check.response_time,
'success': 1 if check.success else 0
}
)
return check_set_list
|
[
"def",
"serialize_checks",
"(",
"check_set",
")",
":",
"check_set_list",
"=",
"[",
"]",
"for",
"check",
"in",
"check_set",
".",
"all",
"(",
")",
"[",
":",
"25",
"]",
":",
"check_set_list",
".",
"append",
"(",
"{",
"'datetime'",
":",
"check",
".",
"checked_datetime",
".",
"isoformat",
"(",
")",
",",
"'value'",
":",
"check",
".",
"response_time",
",",
"'success'",
":",
"1",
"if",
"check",
".",
"success",
"else",
"0",
"}",
")",
"return",
"check_set_list"
] |
Serialize a check_set for raphael
|
[
"Serialize",
"a",
"check_set",
"for",
"raphael"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/views.py#L45-L58
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/views.py
|
domains
|
def domains(request):
"""
A page with number of services and layers faceted on domains.
"""
url = ''
query = '*:*&facet=true&facet.limit=-1&facet.pivot=domain_name,service_id&wt=json&indent=true&rows=0'
if settings.SEARCH_TYPE == 'elasticsearch':
url = '%s/select?q=%s' % (settings.SEARCH_URL, query)
if settings.SEARCH_TYPE == 'solr':
url = '%s/solr/hypermap/select?q=%s' % (settings.SEARCH_URL, query)
LOGGER.debug(url)
response = urllib2.urlopen(url)
data = response.read().replace('\n', '')
# stats
layers_count = Layer.objects.all().count()
services_count = Service.objects.all().count()
template = loader.get_template('aggregator/index.html')
context = RequestContext(request, {
'data': data,
'layers_count': layers_count,
'services_count': services_count,
})
return HttpResponse(template.render(context))
|
python
|
def domains(request):
"""
A page with number of services and layers faceted on domains.
"""
url = ''
query = '*:*&facet=true&facet.limit=-1&facet.pivot=domain_name,service_id&wt=json&indent=true&rows=0'
if settings.SEARCH_TYPE == 'elasticsearch':
url = '%s/select?q=%s' % (settings.SEARCH_URL, query)
if settings.SEARCH_TYPE == 'solr':
url = '%s/solr/hypermap/select?q=%s' % (settings.SEARCH_URL, query)
LOGGER.debug(url)
response = urllib2.urlopen(url)
data = response.read().replace('\n', '')
# stats
layers_count = Layer.objects.all().count()
services_count = Service.objects.all().count()
template = loader.get_template('aggregator/index.html')
context = RequestContext(request, {
'data': data,
'layers_count': layers_count,
'services_count': services_count,
})
return HttpResponse(template.render(context))
|
[
"def",
"domains",
"(",
"request",
")",
":",
"url",
"=",
"''",
"query",
"=",
"'*:*&facet=true&facet.limit=-1&facet.pivot=domain_name,service_id&wt=json&indent=true&rows=0'",
"if",
"settings",
".",
"SEARCH_TYPE",
"==",
"'elasticsearch'",
":",
"url",
"=",
"'%s/select?q=%s'",
"%",
"(",
"settings",
".",
"SEARCH_URL",
",",
"query",
")",
"if",
"settings",
".",
"SEARCH_TYPE",
"==",
"'solr'",
":",
"url",
"=",
"'%s/solr/hypermap/select?q=%s'",
"%",
"(",
"settings",
".",
"SEARCH_URL",
",",
"query",
")",
"LOGGER",
".",
"debug",
"(",
"url",
")",
"response",
"=",
"urllib2",
".",
"urlopen",
"(",
"url",
")",
"data",
"=",
"response",
".",
"read",
"(",
")",
".",
"replace",
"(",
"'\\n'",
",",
"''",
")",
"# stats",
"layers_count",
"=",
"Layer",
".",
"objects",
".",
"all",
"(",
")",
".",
"count",
"(",
")",
"services_count",
"=",
"Service",
".",
"objects",
".",
"all",
"(",
")",
".",
"count",
"(",
")",
"template",
"=",
"loader",
".",
"get_template",
"(",
"'aggregator/index.html'",
")",
"context",
"=",
"RequestContext",
"(",
"request",
",",
"{",
"'data'",
":",
"data",
",",
"'layers_count'",
":",
"layers_count",
",",
"'services_count'",
":",
"services_count",
",",
"}",
")",
"return",
"HttpResponse",
"(",
"template",
".",
"render",
"(",
"context",
")",
")"
] |
A page with number of services and layers faceted on domains.
|
[
"A",
"page",
"with",
"number",
"of",
"services",
"and",
"layers",
"faceted",
"on",
"domains",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/views.py#L62-L84
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/views.py
|
tasks_runner
|
def tasks_runner(request):
"""
A page that let the admin to run global tasks.
"""
# server info
cached_layers_number = 0
cached_layers = cache.get('layers')
if cached_layers:
cached_layers_number = len(cached_layers)
cached_deleted_layers_number = 0
cached_deleted_layers = cache.get('deleted_layers')
if cached_deleted_layers:
cached_deleted_layers_number = len(cached_deleted_layers)
# task actions
if request.method == 'POST':
if 'check_all' in request.POST:
if settings.REGISTRY_SKIP_CELERY:
check_all_services()
else:
check_all_services.delay()
if 'index_all' in request.POST:
if settings.REGISTRY_SKIP_CELERY:
index_all_layers()
else:
index_all_layers.delay()
if 'index_cached' in request.POST:
if settings.REGISTRY_SKIP_CELERY:
index_cached_layers()
else:
index_cached_layers.delay()
if 'drop_cached' in request.POST:
cache.set('layers', None)
cache.set('deleted_layers', None)
if 'clear_index' in request.POST:
if settings.REGISTRY_SKIP_CELERY:
clear_index()
else:
clear_index.delay()
if 'remove_index' in request.POST:
if settings.REGISTRY_SKIP_CELERY:
unindex_layers_with_issues()
else:
unindex_layers_with_issues.delay()
return render(
request,
'aggregator/tasks_runner.html', {
'cached_layers_number': cached_layers_number,
'cached_deleted_layers_number': cached_deleted_layers_number,
}
)
|
python
|
def tasks_runner(request):
"""
A page that let the admin to run global tasks.
"""
# server info
cached_layers_number = 0
cached_layers = cache.get('layers')
if cached_layers:
cached_layers_number = len(cached_layers)
cached_deleted_layers_number = 0
cached_deleted_layers = cache.get('deleted_layers')
if cached_deleted_layers:
cached_deleted_layers_number = len(cached_deleted_layers)
# task actions
if request.method == 'POST':
if 'check_all' in request.POST:
if settings.REGISTRY_SKIP_CELERY:
check_all_services()
else:
check_all_services.delay()
if 'index_all' in request.POST:
if settings.REGISTRY_SKIP_CELERY:
index_all_layers()
else:
index_all_layers.delay()
if 'index_cached' in request.POST:
if settings.REGISTRY_SKIP_CELERY:
index_cached_layers()
else:
index_cached_layers.delay()
if 'drop_cached' in request.POST:
cache.set('layers', None)
cache.set('deleted_layers', None)
if 'clear_index' in request.POST:
if settings.REGISTRY_SKIP_CELERY:
clear_index()
else:
clear_index.delay()
if 'remove_index' in request.POST:
if settings.REGISTRY_SKIP_CELERY:
unindex_layers_with_issues()
else:
unindex_layers_with_issues.delay()
return render(
request,
'aggregator/tasks_runner.html', {
'cached_layers_number': cached_layers_number,
'cached_deleted_layers_number': cached_deleted_layers_number,
}
)
|
[
"def",
"tasks_runner",
"(",
"request",
")",
":",
"# server info",
"cached_layers_number",
"=",
"0",
"cached_layers",
"=",
"cache",
".",
"get",
"(",
"'layers'",
")",
"if",
"cached_layers",
":",
"cached_layers_number",
"=",
"len",
"(",
"cached_layers",
")",
"cached_deleted_layers_number",
"=",
"0",
"cached_deleted_layers",
"=",
"cache",
".",
"get",
"(",
"'deleted_layers'",
")",
"if",
"cached_deleted_layers",
":",
"cached_deleted_layers_number",
"=",
"len",
"(",
"cached_deleted_layers",
")",
"# task actions",
"if",
"request",
".",
"method",
"==",
"'POST'",
":",
"if",
"'check_all'",
"in",
"request",
".",
"POST",
":",
"if",
"settings",
".",
"REGISTRY_SKIP_CELERY",
":",
"check_all_services",
"(",
")",
"else",
":",
"check_all_services",
".",
"delay",
"(",
")",
"if",
"'index_all'",
"in",
"request",
".",
"POST",
":",
"if",
"settings",
".",
"REGISTRY_SKIP_CELERY",
":",
"index_all_layers",
"(",
")",
"else",
":",
"index_all_layers",
".",
"delay",
"(",
")",
"if",
"'index_cached'",
"in",
"request",
".",
"POST",
":",
"if",
"settings",
".",
"REGISTRY_SKIP_CELERY",
":",
"index_cached_layers",
"(",
")",
"else",
":",
"index_cached_layers",
".",
"delay",
"(",
")",
"if",
"'drop_cached'",
"in",
"request",
".",
"POST",
":",
"cache",
".",
"set",
"(",
"'layers'",
",",
"None",
")",
"cache",
".",
"set",
"(",
"'deleted_layers'",
",",
"None",
")",
"if",
"'clear_index'",
"in",
"request",
".",
"POST",
":",
"if",
"settings",
".",
"REGISTRY_SKIP_CELERY",
":",
"clear_index",
"(",
")",
"else",
":",
"clear_index",
".",
"delay",
"(",
")",
"if",
"'remove_index'",
"in",
"request",
".",
"POST",
":",
"if",
"settings",
".",
"REGISTRY_SKIP_CELERY",
":",
"unindex_layers_with_issues",
"(",
")",
"else",
":",
"unindex_layers_with_issues",
".",
"delay",
"(",
")",
"return",
"render",
"(",
"request",
",",
"'aggregator/tasks_runner.html'",
",",
"{",
"'cached_layers_number'",
":",
"cached_layers_number",
",",
"'cached_deleted_layers_number'",
":",
"cached_deleted_layers_number",
",",
"}",
")"
] |
A page that let the admin to run global tasks.
|
[
"A",
"page",
"that",
"let",
"the",
"admin",
"to",
"run",
"global",
"tasks",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/views.py#L254-L307
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/views.py
|
layer_mapproxy
|
def layer_mapproxy(request, catalog_slug, layer_uuid, path_info):
"""
Get Layer with matching catalog and uuid
"""
layer = get_object_or_404(Layer,
uuid=layer_uuid,
catalog__slug=catalog_slug)
# for WorldMap layers we need to use the url of the layer
if layer.service.type == 'Hypermap:WorldMap':
layer.service.url = layer.url
# Set up a mapproxy app for this particular layer
mp, yaml_config = get_mapproxy(layer)
query = request.META['QUERY_STRING']
if len(query) > 0:
path_info = path_info + '?' + query
params = {}
headers = {
'X-Script-Name': '/registry/{0}/layer/{1}/map/'.format(catalog_slug, layer.id),
'X-Forwarded-Host': request.META['HTTP_HOST'],
'HTTP_HOST': request.META['HTTP_HOST'],
'SERVER_NAME': request.META['SERVER_NAME'],
}
if path_info == '/config':
response = HttpResponse(yaml_config, content_type='text/plain')
return response
# Get a response from MapProxy as if it was running standalone.
mp_response = mp.get(path_info, params, headers)
# Create a Django response from the MapProxy WSGI response.
response = HttpResponse(mp_response.body, status=mp_response.status_int)
for header, value in mp_response.headers.iteritems():
response[header] = value
return response
|
python
|
def layer_mapproxy(request, catalog_slug, layer_uuid, path_info):
"""
Get Layer with matching catalog and uuid
"""
layer = get_object_or_404(Layer,
uuid=layer_uuid,
catalog__slug=catalog_slug)
# for WorldMap layers we need to use the url of the layer
if layer.service.type == 'Hypermap:WorldMap':
layer.service.url = layer.url
# Set up a mapproxy app for this particular layer
mp, yaml_config = get_mapproxy(layer)
query = request.META['QUERY_STRING']
if len(query) > 0:
path_info = path_info + '?' + query
params = {}
headers = {
'X-Script-Name': '/registry/{0}/layer/{1}/map/'.format(catalog_slug, layer.id),
'X-Forwarded-Host': request.META['HTTP_HOST'],
'HTTP_HOST': request.META['HTTP_HOST'],
'SERVER_NAME': request.META['SERVER_NAME'],
}
if path_info == '/config':
response = HttpResponse(yaml_config, content_type='text/plain')
return response
# Get a response from MapProxy as if it was running standalone.
mp_response = mp.get(path_info, params, headers)
# Create a Django response from the MapProxy WSGI response.
response = HttpResponse(mp_response.body, status=mp_response.status_int)
for header, value in mp_response.headers.iteritems():
response[header] = value
return response
|
[
"def",
"layer_mapproxy",
"(",
"request",
",",
"catalog_slug",
",",
"layer_uuid",
",",
"path_info",
")",
":",
"layer",
"=",
"get_object_or_404",
"(",
"Layer",
",",
"uuid",
"=",
"layer_uuid",
",",
"catalog__slug",
"=",
"catalog_slug",
")",
"# for WorldMap layers we need to use the url of the layer",
"if",
"layer",
".",
"service",
".",
"type",
"==",
"'Hypermap:WorldMap'",
":",
"layer",
".",
"service",
".",
"url",
"=",
"layer",
".",
"url",
"# Set up a mapproxy app for this particular layer",
"mp",
",",
"yaml_config",
"=",
"get_mapproxy",
"(",
"layer",
")",
"query",
"=",
"request",
".",
"META",
"[",
"'QUERY_STRING'",
"]",
"if",
"len",
"(",
"query",
")",
">",
"0",
":",
"path_info",
"=",
"path_info",
"+",
"'?'",
"+",
"query",
"params",
"=",
"{",
"}",
"headers",
"=",
"{",
"'X-Script-Name'",
":",
"'/registry/{0}/layer/{1}/map/'",
".",
"format",
"(",
"catalog_slug",
",",
"layer",
".",
"id",
")",
",",
"'X-Forwarded-Host'",
":",
"request",
".",
"META",
"[",
"'HTTP_HOST'",
"]",
",",
"'HTTP_HOST'",
":",
"request",
".",
"META",
"[",
"'HTTP_HOST'",
"]",
",",
"'SERVER_NAME'",
":",
"request",
".",
"META",
"[",
"'SERVER_NAME'",
"]",
",",
"}",
"if",
"path_info",
"==",
"'/config'",
":",
"response",
"=",
"HttpResponse",
"(",
"yaml_config",
",",
"content_type",
"=",
"'text/plain'",
")",
"return",
"response",
"# Get a response from MapProxy as if it was running standalone.",
"mp_response",
"=",
"mp",
".",
"get",
"(",
"path_info",
",",
"params",
",",
"headers",
")",
"# Create a Django response from the MapProxy WSGI response.",
"response",
"=",
"HttpResponse",
"(",
"mp_response",
".",
"body",
",",
"status",
"=",
"mp_response",
".",
"status_int",
")",
"for",
"header",
",",
"value",
"in",
"mp_response",
".",
"headers",
".",
"iteritems",
"(",
")",
":",
"response",
"[",
"header",
"]",
"=",
"value",
"return",
"response"
] |
Get Layer with matching catalog and uuid
|
[
"Get",
"Layer",
"with",
"matching",
"catalog",
"and",
"uuid"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/views.py#L310-L350
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/utils.py
|
parse_datetime
|
def parse_datetime(date_str):
"""
Parses a date string to date object.
for BCE dates, only supports the year part.
"""
is_common_era = True
date_str_parts = date_str.split("-")
if date_str_parts and date_str_parts[0] == '':
is_common_era = False
# for now, only support BCE years
# assume the datetime comes complete, but
# when it comes only the year, add the missing datetime info:
if len(date_str_parts) == 2:
date_str = date_str + "-01-01T00:00:00Z"
parsed_datetime = {
'is_common_era': is_common_era,
'parsed_datetime': None
}
if is_common_era:
if date_str == '*':
return parsed_datetime # open ended.
default = datetime.datetime.now().replace(
hour=0, minute=0, second=0, microsecond=0,
day=1, month=1
)
parsed_datetime['parsed_datetime'] = parse(date_str, default=default)
return parsed_datetime
parsed_datetime['parsed_datetime'] = date_str
return parsed_datetime
|
python
|
def parse_datetime(date_str):
"""
Parses a date string to date object.
for BCE dates, only supports the year part.
"""
is_common_era = True
date_str_parts = date_str.split("-")
if date_str_parts and date_str_parts[0] == '':
is_common_era = False
# for now, only support BCE years
# assume the datetime comes complete, but
# when it comes only the year, add the missing datetime info:
if len(date_str_parts) == 2:
date_str = date_str + "-01-01T00:00:00Z"
parsed_datetime = {
'is_common_era': is_common_era,
'parsed_datetime': None
}
if is_common_era:
if date_str == '*':
return parsed_datetime # open ended.
default = datetime.datetime.now().replace(
hour=0, minute=0, second=0, microsecond=0,
day=1, month=1
)
parsed_datetime['parsed_datetime'] = parse(date_str, default=default)
return parsed_datetime
parsed_datetime['parsed_datetime'] = date_str
return parsed_datetime
|
[
"def",
"parse_datetime",
"(",
"date_str",
")",
":",
"is_common_era",
"=",
"True",
"date_str_parts",
"=",
"date_str",
".",
"split",
"(",
"\"-\"",
")",
"if",
"date_str_parts",
"and",
"date_str_parts",
"[",
"0",
"]",
"==",
"''",
":",
"is_common_era",
"=",
"False",
"# for now, only support BCE years",
"# assume the datetime comes complete, but",
"# when it comes only the year, add the missing datetime info:",
"if",
"len",
"(",
"date_str_parts",
")",
"==",
"2",
":",
"date_str",
"=",
"date_str",
"+",
"\"-01-01T00:00:00Z\"",
"parsed_datetime",
"=",
"{",
"'is_common_era'",
":",
"is_common_era",
",",
"'parsed_datetime'",
":",
"None",
"}",
"if",
"is_common_era",
":",
"if",
"date_str",
"==",
"'*'",
":",
"return",
"parsed_datetime",
"# open ended.",
"default",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
".",
"replace",
"(",
"hour",
"=",
"0",
",",
"minute",
"=",
"0",
",",
"second",
"=",
"0",
",",
"microsecond",
"=",
"0",
",",
"day",
"=",
"1",
",",
"month",
"=",
"1",
")",
"parsed_datetime",
"[",
"'parsed_datetime'",
"]",
"=",
"parse",
"(",
"date_str",
",",
"default",
"=",
"default",
")",
"return",
"parsed_datetime",
"parsed_datetime",
"[",
"'parsed_datetime'",
"]",
"=",
"date_str",
"return",
"parsed_datetime"
] |
Parses a date string to date object.
for BCE dates, only supports the year part.
|
[
"Parses",
"a",
"date",
"string",
"to",
"date",
"object",
".",
"for",
"BCE",
"dates",
"only",
"supports",
"the",
"year",
"part",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L24-L57
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/utils.py
|
parse_solr_time_range_as_pair
|
def parse_solr_time_range_as_pair(time_filter):
"""
:param time_filter: [2013-03-01 TO 2013-05-01T00:00:00]
:return: (2013-03-01, 2013-05-01T00:00:00)
"""
pattern = "\\[(.*) TO (.*)\\]"
matcher = re.search(pattern, time_filter)
if matcher:
return matcher.group(1), matcher.group(2)
else:
raise Exception("Regex {0} couldn't parse {1}".format(pattern, time_filter))
|
python
|
def parse_solr_time_range_as_pair(time_filter):
"""
:param time_filter: [2013-03-01 TO 2013-05-01T00:00:00]
:return: (2013-03-01, 2013-05-01T00:00:00)
"""
pattern = "\\[(.*) TO (.*)\\]"
matcher = re.search(pattern, time_filter)
if matcher:
return matcher.group(1), matcher.group(2)
else:
raise Exception("Regex {0} couldn't parse {1}".format(pattern, time_filter))
|
[
"def",
"parse_solr_time_range_as_pair",
"(",
"time_filter",
")",
":",
"pattern",
"=",
"\"\\\\[(.*) TO (.*)\\\\]\"",
"matcher",
"=",
"re",
".",
"search",
"(",
"pattern",
",",
"time_filter",
")",
"if",
"matcher",
":",
"return",
"matcher",
".",
"group",
"(",
"1",
")",
",",
"matcher",
".",
"group",
"(",
"2",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"Regex {0} couldn't parse {1}\"",
".",
"format",
"(",
"pattern",
",",
"time_filter",
")",
")"
] |
:param time_filter: [2013-03-01 TO 2013-05-01T00:00:00]
:return: (2013-03-01, 2013-05-01T00:00:00)
|
[
":",
"param",
"time_filter",
":",
"[",
"2013",
"-",
"03",
"-",
"01",
"TO",
"2013",
"-",
"05",
"-",
"01T00",
":",
"00",
":",
"00",
"]",
":",
"return",
":",
"(",
"2013",
"-",
"03",
"-",
"01",
"2013",
"-",
"05",
"-",
"01T00",
":",
"00",
":",
"00",
")"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L60-L70
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/utils.py
|
parse_datetime_range
|
def parse_datetime_range(time_filter):
"""
Parse the url param to python objects.
From what time range to divide by a.time.gap into intervals.
Defaults to q.time and otherwise 90 days.
Validate in API: re.search("\\[(.*) TO (.*)\\]", value)
:param time_filter: [2013-03-01 TO 2013-05-01T00:00:00]
:return: datetime.datetime(2013, 3, 1, 0, 0), datetime.datetime(2013, 5, 1, 0, 0)
"""
if not time_filter:
time_filter = "[* TO *]"
start, end = parse_solr_time_range_as_pair(time_filter)
start, end = parse_datetime(start), parse_datetime(end)
return start, end
|
python
|
def parse_datetime_range(time_filter):
"""
Parse the url param to python objects.
From what time range to divide by a.time.gap into intervals.
Defaults to q.time and otherwise 90 days.
Validate in API: re.search("\\[(.*) TO (.*)\\]", value)
:param time_filter: [2013-03-01 TO 2013-05-01T00:00:00]
:return: datetime.datetime(2013, 3, 1, 0, 0), datetime.datetime(2013, 5, 1, 0, 0)
"""
if not time_filter:
time_filter = "[* TO *]"
start, end = parse_solr_time_range_as_pair(time_filter)
start, end = parse_datetime(start), parse_datetime(end)
return start, end
|
[
"def",
"parse_datetime_range",
"(",
"time_filter",
")",
":",
"if",
"not",
"time_filter",
":",
"time_filter",
"=",
"\"[* TO *]\"",
"start",
",",
"end",
"=",
"parse_solr_time_range_as_pair",
"(",
"time_filter",
")",
"start",
",",
"end",
"=",
"parse_datetime",
"(",
"start",
")",
",",
"parse_datetime",
"(",
"end",
")",
"return",
"start",
",",
"end"
] |
Parse the url param to python objects.
From what time range to divide by a.time.gap into intervals.
Defaults to q.time and otherwise 90 days.
Validate in API: re.search("\\[(.*) TO (.*)\\]", value)
:param time_filter: [2013-03-01 TO 2013-05-01T00:00:00]
:return: datetime.datetime(2013, 3, 1, 0, 0), datetime.datetime(2013, 5, 1, 0, 0)
|
[
"Parse",
"the",
"url",
"param",
"to",
"python",
"objects",
".",
"From",
"what",
"time",
"range",
"to",
"divide",
"by",
"a",
".",
"time",
".",
"gap",
"into",
"intervals",
".",
"Defaults",
"to",
"q",
".",
"time",
"and",
"otherwise",
"90",
"days",
".",
"Validate",
"in",
"API",
":",
"re",
".",
"search",
"(",
"\\\\",
"[",
"(",
".",
"*",
")",
"TO",
"(",
".",
"*",
")",
"\\\\",
"]",
"value",
")",
":",
"param",
"time_filter",
":",
"[",
"2013",
"-",
"03",
"-",
"01",
"TO",
"2013",
"-",
"05",
"-",
"01T00",
":",
"00",
":",
"00",
"]",
":",
"return",
":",
"datetime",
".",
"datetime",
"(",
"2013",
"3",
"1",
"0",
"0",
")",
"datetime",
".",
"datetime",
"(",
"2013",
"5",
"1",
"0",
"0",
")"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L73-L88
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/utils.py
|
parse_ISO8601
|
def parse_ISO8601(time_gap):
"""
P1D to (1, ("DAYS", isodate.Duration(days=1)).
P1Y to (1, ("YEARS", isodate.Duration(years=1)).
:param time_gap: ISO8601 string.
:return: tuple with quantity and unit of time.
"""
matcher = None
if time_gap.count("T"):
units = {
"H": ("HOURS", isodate.Duration(hours=1)),
"M": ("MINUTES", isodate.Duration(minutes=1)),
"S": ("SECONDS", isodate.Duration(seconds=1))
}
matcher = re.search("PT(\d+)([HMS])", time_gap)
if matcher:
quantity = int(matcher.group(1))
unit = matcher.group(2)
return quantity, units.get(unit)
else:
raise Exception("Does not match the pattern: {}".format(time_gap))
else:
units = {
"Y": ("YEARS", isodate.Duration(years=1)),
"M": ("MONTHS", isodate.Duration(months=1)),
"W": ("WEEKS", isodate.Duration(weeks=1)),
"D": ("DAYS", isodate.Duration(days=1))
}
matcher = re.search("P(\d+)([YMWD])", time_gap)
if matcher:
quantity = int(matcher.group(1))
unit = matcher.group(2)
else:
raise Exception("Does not match the pattern: {}".format(time_gap))
return quantity, units.get(unit)
|
python
|
def parse_ISO8601(time_gap):
"""
P1D to (1, ("DAYS", isodate.Duration(days=1)).
P1Y to (1, ("YEARS", isodate.Duration(years=1)).
:param time_gap: ISO8601 string.
:return: tuple with quantity and unit of time.
"""
matcher = None
if time_gap.count("T"):
units = {
"H": ("HOURS", isodate.Duration(hours=1)),
"M": ("MINUTES", isodate.Duration(minutes=1)),
"S": ("SECONDS", isodate.Duration(seconds=1))
}
matcher = re.search("PT(\d+)([HMS])", time_gap)
if matcher:
quantity = int(matcher.group(1))
unit = matcher.group(2)
return quantity, units.get(unit)
else:
raise Exception("Does not match the pattern: {}".format(time_gap))
else:
units = {
"Y": ("YEARS", isodate.Duration(years=1)),
"M": ("MONTHS", isodate.Duration(months=1)),
"W": ("WEEKS", isodate.Duration(weeks=1)),
"D": ("DAYS", isodate.Duration(days=1))
}
matcher = re.search("P(\d+)([YMWD])", time_gap)
if matcher:
quantity = int(matcher.group(1))
unit = matcher.group(2)
else:
raise Exception("Does not match the pattern: {}".format(time_gap))
return quantity, units.get(unit)
|
[
"def",
"parse_ISO8601",
"(",
"time_gap",
")",
":",
"matcher",
"=",
"None",
"if",
"time_gap",
".",
"count",
"(",
"\"T\"",
")",
":",
"units",
"=",
"{",
"\"H\"",
":",
"(",
"\"HOURS\"",
",",
"isodate",
".",
"Duration",
"(",
"hours",
"=",
"1",
")",
")",
",",
"\"M\"",
":",
"(",
"\"MINUTES\"",
",",
"isodate",
".",
"Duration",
"(",
"minutes",
"=",
"1",
")",
")",
",",
"\"S\"",
":",
"(",
"\"SECONDS\"",
",",
"isodate",
".",
"Duration",
"(",
"seconds",
"=",
"1",
")",
")",
"}",
"matcher",
"=",
"re",
".",
"search",
"(",
"\"PT(\\d+)([HMS])\"",
",",
"time_gap",
")",
"if",
"matcher",
":",
"quantity",
"=",
"int",
"(",
"matcher",
".",
"group",
"(",
"1",
")",
")",
"unit",
"=",
"matcher",
".",
"group",
"(",
"2",
")",
"return",
"quantity",
",",
"units",
".",
"get",
"(",
"unit",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"Does not match the pattern: {}\"",
".",
"format",
"(",
"time_gap",
")",
")",
"else",
":",
"units",
"=",
"{",
"\"Y\"",
":",
"(",
"\"YEARS\"",
",",
"isodate",
".",
"Duration",
"(",
"years",
"=",
"1",
")",
")",
",",
"\"M\"",
":",
"(",
"\"MONTHS\"",
",",
"isodate",
".",
"Duration",
"(",
"months",
"=",
"1",
")",
")",
",",
"\"W\"",
":",
"(",
"\"WEEKS\"",
",",
"isodate",
".",
"Duration",
"(",
"weeks",
"=",
"1",
")",
")",
",",
"\"D\"",
":",
"(",
"\"DAYS\"",
",",
"isodate",
".",
"Duration",
"(",
"days",
"=",
"1",
")",
")",
"}",
"matcher",
"=",
"re",
".",
"search",
"(",
"\"P(\\d+)([YMWD])\"",
",",
"time_gap",
")",
"if",
"matcher",
":",
"quantity",
"=",
"int",
"(",
"matcher",
".",
"group",
"(",
"1",
")",
")",
"unit",
"=",
"matcher",
".",
"group",
"(",
"2",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"Does not match the pattern: {}\"",
".",
"format",
"(",
"time_gap",
")",
")",
"return",
"quantity",
",",
"units",
".",
"get",
"(",
"unit",
")"
] |
P1D to (1, ("DAYS", isodate.Duration(days=1)).
P1Y to (1, ("YEARS", isodate.Duration(years=1)).
:param time_gap: ISO8601 string.
:return: tuple with quantity and unit of time.
|
[
"P1D",
"to",
"(",
"1",
"(",
"DAYS",
"isodate",
".",
"Duration",
"(",
"days",
"=",
"1",
"))",
".",
"P1Y",
"to",
"(",
"1",
"(",
"YEARS",
"isodate",
".",
"Duration",
"(",
"years",
"=",
"1",
"))",
".",
":",
"param",
"time_gap",
":",
"ISO8601",
"string",
".",
":",
"return",
":",
"tuple",
"with",
"quantity",
"and",
"unit",
"of",
"time",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L109-L145
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/utils.py
|
compute_gap
|
def compute_gap(start, end, time_limit):
"""
Compute a gap that seems reasonable, considering natural time units and limit.
# TODO: make it to be reasonable.
# TODO: make it to be small unit of time sensitive.
:param start: datetime
:param end: datetime
:param time_limit: gaps count
:return: solr's format duration.
"""
if is_range_common_era(start, end):
duration = end.get("parsed_datetime") - start.get("parsed_datetime")
unit = int(math.ceil(duration.days / float(time_limit)))
return "+{0}DAYS".format(unit)
else:
# at the moment can not do maths with BCE dates.
# those dates are relatively big, so 100 years are reasonable in those cases.
# TODO: calculate duration on those cases.
return "+100YEARS"
|
python
|
def compute_gap(start, end, time_limit):
"""
Compute a gap that seems reasonable, considering natural time units and limit.
# TODO: make it to be reasonable.
# TODO: make it to be small unit of time sensitive.
:param start: datetime
:param end: datetime
:param time_limit: gaps count
:return: solr's format duration.
"""
if is_range_common_era(start, end):
duration = end.get("parsed_datetime") - start.get("parsed_datetime")
unit = int(math.ceil(duration.days / float(time_limit)))
return "+{0}DAYS".format(unit)
else:
# at the moment can not do maths with BCE dates.
# those dates are relatively big, so 100 years are reasonable in those cases.
# TODO: calculate duration on those cases.
return "+100YEARS"
|
[
"def",
"compute_gap",
"(",
"start",
",",
"end",
",",
"time_limit",
")",
":",
"if",
"is_range_common_era",
"(",
"start",
",",
"end",
")",
":",
"duration",
"=",
"end",
".",
"get",
"(",
"\"parsed_datetime\"",
")",
"-",
"start",
".",
"get",
"(",
"\"parsed_datetime\"",
")",
"unit",
"=",
"int",
"(",
"math",
".",
"ceil",
"(",
"duration",
".",
"days",
"/",
"float",
"(",
"time_limit",
")",
")",
")",
"return",
"\"+{0}DAYS\"",
".",
"format",
"(",
"unit",
")",
"else",
":",
"# at the moment can not do maths with BCE dates.",
"# those dates are relatively big, so 100 years are reasonable in those cases.",
"# TODO: calculate duration on those cases.",
"return",
"\"+100YEARS\""
] |
Compute a gap that seems reasonable, considering natural time units and limit.
# TODO: make it to be reasonable.
# TODO: make it to be small unit of time sensitive.
:param start: datetime
:param end: datetime
:param time_limit: gaps count
:return: solr's format duration.
|
[
"Compute",
"a",
"gap",
"that",
"seems",
"reasonable",
"considering",
"natural",
"time",
"units",
"and",
"limit",
".",
"#",
"TODO",
":",
"make",
"it",
"to",
"be",
"reasonable",
".",
"#",
"TODO",
":",
"make",
"it",
"to",
"be",
"small",
"unit",
"of",
"time",
"sensitive",
".",
":",
"param",
"start",
":",
"datetime",
":",
"param",
"end",
":",
"datetime",
":",
"param",
"time_limit",
":",
"gaps",
"count",
":",
"return",
":",
"solr",
"s",
"format",
"duration",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L148-L166
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/utils.py
|
gap_to_sorl
|
def gap_to_sorl(time_gap):
"""
P1D to +1DAY
:param time_gap:
:return: solr's format duration.
"""
quantity, unit = parse_ISO8601(time_gap)
if unit[0] == "WEEKS":
return "+{0}DAYS".format(quantity * 7)
else:
return "+{0}{1}".format(quantity, unit[0])
|
python
|
def gap_to_sorl(time_gap):
"""
P1D to +1DAY
:param time_gap:
:return: solr's format duration.
"""
quantity, unit = parse_ISO8601(time_gap)
if unit[0] == "WEEKS":
return "+{0}DAYS".format(quantity * 7)
else:
return "+{0}{1}".format(quantity, unit[0])
|
[
"def",
"gap_to_sorl",
"(",
"time_gap",
")",
":",
"quantity",
",",
"unit",
"=",
"parse_ISO8601",
"(",
"time_gap",
")",
"if",
"unit",
"[",
"0",
"]",
"==",
"\"WEEKS\"",
":",
"return",
"\"+{0}DAYS\"",
".",
"format",
"(",
"quantity",
"*",
"7",
")",
"else",
":",
"return",
"\"+{0}{1}\"",
".",
"format",
"(",
"quantity",
",",
"unit",
"[",
"0",
"]",
")"
] |
P1D to +1DAY
:param time_gap:
:return: solr's format duration.
|
[
"P1D",
"to",
"+",
"1DAY",
":",
"param",
"time_gap",
":",
":",
"return",
":",
"solr",
"s",
"format",
"duration",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L185-L195
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/utils.py
|
request_time_facet
|
def request_time_facet(field, time_filter, time_gap, time_limit=100):
"""
time facet query builder
:param field: map the query to this field.
:param time_limit: Non-0 triggers time/date range faceting. This value is the maximum number of time ranges to
return when a.time.gap is unspecified. This is a soft maximum; less will usually be returned.
A suggested value is 100.
Note that a.time.gap effectively ignores this value.
See Solr docs for more details on the query/response format.
:param time_filter: From what time range to divide by a.time.gap into intervals.
Defaults to q.time and otherwise 90 days.
:param time_gap: The consecutive time interval/gap for each time range. Ignores a.time.limit.
The format is based on a subset of the ISO-8601 duration format
:return: facet.range=manufacturedate_dt&f.manufacturedate_dt.facet.range.start=2006-02-11T15:26:37Z&f.
manufacturedate_dt.facet.range.end=2006-02-14T15:26:37Z&f.manufacturedate_dt.facet.range.gap=+1DAY
"""
start, end = parse_datetime_range(time_filter)
key_range_start = "f.{0}.facet.range.start".format(field)
key_range_end = "f.{0}.facet.range.end".format(field)
key_range_gap = "f.{0}.facet.range.gap".format(field)
key_range_mincount = "f.{0}.facet.mincount".format(field)
if time_gap:
gap = gap_to_sorl(time_gap)
else:
gap = compute_gap(start, end, time_limit)
value_range_start = start.get("parsed_datetime")
if start.get("is_common_era"):
value_range_start = start.get("parsed_datetime").isoformat().replace("+00:00", "") + "Z"
value_range_end = start.get("parsed_datetime")
if end.get("is_common_era"):
value_range_end = end.get("parsed_datetime").isoformat().replace("+00:00", "") + "Z"
value_range_gap = gap
params = {
'facet.range': field,
key_range_start: value_range_start,
key_range_end: value_range_end,
key_range_gap: value_range_gap,
key_range_mincount: 1
}
return params
|
python
|
def request_time_facet(field, time_filter, time_gap, time_limit=100):
"""
time facet query builder
:param field: map the query to this field.
:param time_limit: Non-0 triggers time/date range faceting. This value is the maximum number of time ranges to
return when a.time.gap is unspecified. This is a soft maximum; less will usually be returned.
A suggested value is 100.
Note that a.time.gap effectively ignores this value.
See Solr docs for more details on the query/response format.
:param time_filter: From what time range to divide by a.time.gap into intervals.
Defaults to q.time and otherwise 90 days.
:param time_gap: The consecutive time interval/gap for each time range. Ignores a.time.limit.
The format is based on a subset of the ISO-8601 duration format
:return: facet.range=manufacturedate_dt&f.manufacturedate_dt.facet.range.start=2006-02-11T15:26:37Z&f.
manufacturedate_dt.facet.range.end=2006-02-14T15:26:37Z&f.manufacturedate_dt.facet.range.gap=+1DAY
"""
start, end = parse_datetime_range(time_filter)
key_range_start = "f.{0}.facet.range.start".format(field)
key_range_end = "f.{0}.facet.range.end".format(field)
key_range_gap = "f.{0}.facet.range.gap".format(field)
key_range_mincount = "f.{0}.facet.mincount".format(field)
if time_gap:
gap = gap_to_sorl(time_gap)
else:
gap = compute_gap(start, end, time_limit)
value_range_start = start.get("parsed_datetime")
if start.get("is_common_era"):
value_range_start = start.get("parsed_datetime").isoformat().replace("+00:00", "") + "Z"
value_range_end = start.get("parsed_datetime")
if end.get("is_common_era"):
value_range_end = end.get("parsed_datetime").isoformat().replace("+00:00", "") + "Z"
value_range_gap = gap
params = {
'facet.range': field,
key_range_start: value_range_start,
key_range_end: value_range_end,
key_range_gap: value_range_gap,
key_range_mincount: 1
}
return params
|
[
"def",
"request_time_facet",
"(",
"field",
",",
"time_filter",
",",
"time_gap",
",",
"time_limit",
"=",
"100",
")",
":",
"start",
",",
"end",
"=",
"parse_datetime_range",
"(",
"time_filter",
")",
"key_range_start",
"=",
"\"f.{0}.facet.range.start\"",
".",
"format",
"(",
"field",
")",
"key_range_end",
"=",
"\"f.{0}.facet.range.end\"",
".",
"format",
"(",
"field",
")",
"key_range_gap",
"=",
"\"f.{0}.facet.range.gap\"",
".",
"format",
"(",
"field",
")",
"key_range_mincount",
"=",
"\"f.{0}.facet.mincount\"",
".",
"format",
"(",
"field",
")",
"if",
"time_gap",
":",
"gap",
"=",
"gap_to_sorl",
"(",
"time_gap",
")",
"else",
":",
"gap",
"=",
"compute_gap",
"(",
"start",
",",
"end",
",",
"time_limit",
")",
"value_range_start",
"=",
"start",
".",
"get",
"(",
"\"parsed_datetime\"",
")",
"if",
"start",
".",
"get",
"(",
"\"is_common_era\"",
")",
":",
"value_range_start",
"=",
"start",
".",
"get",
"(",
"\"parsed_datetime\"",
")",
".",
"isoformat",
"(",
")",
".",
"replace",
"(",
"\"+00:00\"",
",",
"\"\"",
")",
"+",
"\"Z\"",
"value_range_end",
"=",
"start",
".",
"get",
"(",
"\"parsed_datetime\"",
")",
"if",
"end",
".",
"get",
"(",
"\"is_common_era\"",
")",
":",
"value_range_end",
"=",
"end",
".",
"get",
"(",
"\"parsed_datetime\"",
")",
".",
"isoformat",
"(",
")",
".",
"replace",
"(",
"\"+00:00\"",
",",
"\"\"",
")",
"+",
"\"Z\"",
"value_range_gap",
"=",
"gap",
"params",
"=",
"{",
"'facet.range'",
":",
"field",
",",
"key_range_start",
":",
"value_range_start",
",",
"key_range_end",
":",
"value_range_end",
",",
"key_range_gap",
":",
"value_range_gap",
",",
"key_range_mincount",
":",
"1",
"}",
"return",
"params"
] |
time facet query builder
:param field: map the query to this field.
:param time_limit: Non-0 triggers time/date range faceting. This value is the maximum number of time ranges to
return when a.time.gap is unspecified. This is a soft maximum; less will usually be returned.
A suggested value is 100.
Note that a.time.gap effectively ignores this value.
See Solr docs for more details on the query/response format.
:param time_filter: From what time range to divide by a.time.gap into intervals.
Defaults to q.time and otherwise 90 days.
:param time_gap: The consecutive time interval/gap for each time range. Ignores a.time.limit.
The format is based on a subset of the ISO-8601 duration format
:return: facet.range=manufacturedate_dt&f.manufacturedate_dt.facet.range.start=2006-02-11T15:26:37Z&f.
manufacturedate_dt.facet.range.end=2006-02-14T15:26:37Z&f.manufacturedate_dt.facet.range.gap=+1DAY
|
[
"time",
"facet",
"query",
"builder",
":",
"param",
"field",
":",
"map",
"the",
"query",
"to",
"this",
"field",
".",
":",
"param",
"time_limit",
":",
"Non",
"-",
"0",
"triggers",
"time",
"/",
"date",
"range",
"faceting",
".",
"This",
"value",
"is",
"the",
"maximum",
"number",
"of",
"time",
"ranges",
"to",
"return",
"when",
"a",
".",
"time",
".",
"gap",
"is",
"unspecified",
".",
"This",
"is",
"a",
"soft",
"maximum",
";",
"less",
"will",
"usually",
"be",
"returned",
".",
"A",
"suggested",
"value",
"is",
"100",
".",
"Note",
"that",
"a",
".",
"time",
".",
"gap",
"effectively",
"ignores",
"this",
"value",
".",
"See",
"Solr",
"docs",
"for",
"more",
"details",
"on",
"the",
"query",
"/",
"response",
"format",
".",
":",
"param",
"time_filter",
":",
"From",
"what",
"time",
"range",
"to",
"divide",
"by",
"a",
".",
"time",
".",
"gap",
"into",
"intervals",
".",
"Defaults",
"to",
"q",
".",
"time",
"and",
"otherwise",
"90",
"days",
".",
":",
"param",
"time_gap",
":",
"The",
"consecutive",
"time",
"interval",
"/",
"gap",
"for",
"each",
"time",
"range",
".",
"Ignores",
"a",
".",
"time",
".",
"limit",
".",
"The",
"format",
"is",
"based",
"on",
"a",
"subset",
"of",
"the",
"ISO",
"-",
"8601",
"duration",
"format",
":",
"return",
":",
"facet",
".",
"range",
"=",
"manufacturedate_dt&f",
".",
"manufacturedate_dt",
".",
"facet",
".",
"range",
".",
"start",
"=",
"2006",
"-",
"02",
"-",
"11T15",
":",
"26",
":",
"37Z&f",
".",
"manufacturedate_dt",
".",
"facet",
".",
"range",
".",
"end",
"=",
"2006",
"-",
"02",
"-",
"14T15",
":",
"26",
":",
"37Z&f",
".",
"manufacturedate_dt",
".",
"facet",
".",
"range",
".",
"gap",
"=",
"+",
"1DAY"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L198-L244
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/utils.py
|
parse_solr_geo_range_as_pair
|
def parse_solr_geo_range_as_pair(geo_box_str):
"""
:param geo_box_str: [-90,-180 TO 90,180]
:return: ("-90,-180", "90,180")
"""
pattern = "\\[(.*) TO (.*)\\]"
matcher = re.search(pattern, geo_box_str)
if matcher:
return matcher.group(1), matcher.group(2)
else:
raise Exception("Regex {0} could not parse {1}".format(pattern, geo_box_str))
|
python
|
def parse_solr_geo_range_as_pair(geo_box_str):
"""
:param geo_box_str: [-90,-180 TO 90,180]
:return: ("-90,-180", "90,180")
"""
pattern = "\\[(.*) TO (.*)\\]"
matcher = re.search(pattern, geo_box_str)
if matcher:
return matcher.group(1), matcher.group(2)
else:
raise Exception("Regex {0} could not parse {1}".format(pattern, geo_box_str))
|
[
"def",
"parse_solr_geo_range_as_pair",
"(",
"geo_box_str",
")",
":",
"pattern",
"=",
"\"\\\\[(.*) TO (.*)\\\\]\"",
"matcher",
"=",
"re",
".",
"search",
"(",
"pattern",
",",
"geo_box_str",
")",
"if",
"matcher",
":",
"return",
"matcher",
".",
"group",
"(",
"1",
")",
",",
"matcher",
".",
"group",
"(",
"2",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"Regex {0} could not parse {1}\"",
".",
"format",
"(",
"pattern",
",",
"geo_box_str",
")",
")"
] |
:param geo_box_str: [-90,-180 TO 90,180]
:return: ("-90,-180", "90,180")
|
[
":",
"param",
"geo_box_str",
":",
"[",
"-",
"90",
"-",
"180",
"TO",
"90",
"180",
"]",
":",
"return",
":",
"(",
"-",
"90",
"-",
"180",
"90",
"180",
")"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L247-L257
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/utils.py
|
parse_geo_box
|
def parse_geo_box(geo_box_str):
"""
parses [-90,-180 TO 90,180] to a shapely.geometry.box
:param geo_box_str:
:return:
"""
from_point_str, to_point_str = parse_solr_geo_range_as_pair(geo_box_str)
from_point = parse_lat_lon(from_point_str)
to_point = parse_lat_lon(to_point_str)
rectangle = box(from_point[0], from_point[1], to_point[0], to_point[1])
return rectangle
|
python
|
def parse_geo_box(geo_box_str):
"""
parses [-90,-180 TO 90,180] to a shapely.geometry.box
:param geo_box_str:
:return:
"""
from_point_str, to_point_str = parse_solr_geo_range_as_pair(geo_box_str)
from_point = parse_lat_lon(from_point_str)
to_point = parse_lat_lon(to_point_str)
rectangle = box(from_point[0], from_point[1], to_point[0], to_point[1])
return rectangle
|
[
"def",
"parse_geo_box",
"(",
"geo_box_str",
")",
":",
"from_point_str",
",",
"to_point_str",
"=",
"parse_solr_geo_range_as_pair",
"(",
"geo_box_str",
")",
"from_point",
"=",
"parse_lat_lon",
"(",
"from_point_str",
")",
"to_point",
"=",
"parse_lat_lon",
"(",
"to_point_str",
")",
"rectangle",
"=",
"box",
"(",
"from_point",
"[",
"0",
"]",
",",
"from_point",
"[",
"1",
"]",
",",
"to_point",
"[",
"0",
"]",
",",
"to_point",
"[",
"1",
"]",
")",
"return",
"rectangle"
] |
parses [-90,-180 TO 90,180] to a shapely.geometry.box
:param geo_box_str:
:return:
|
[
"parses",
"[",
"-",
"90",
"-",
"180",
"TO",
"90",
"180",
"]",
"to",
"a",
"shapely",
".",
"geometry",
".",
"box",
":",
"param",
"geo_box_str",
":",
":",
"return",
":"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L265-L276
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/utils.py
|
request_heatmap_facet
|
def request_heatmap_facet(field, hm_filter, hm_grid_level, hm_limit):
"""
heatmap facet query builder
:param field: map the query to this field.
:param hm_filter: From what region to plot the heatmap. Defaults to q.geo or otherwise the world.
:param hm_grid_level: To explicitly specify the grid level, e.g. to let a user ask for greater or courser
resolution than the most recent request. Ignores a.hm.limit.
:param hm_limit: Non-0 triggers heatmap/grid faceting. This number is a soft maximum on thenumber of
cells it should have. There may be as few as 1/4th this number in return. Note that a.hm.gridLevel can effectively
ignore this value. The response heatmap contains a counts grid that can be null or contain null rows when all its
values would be 0. See Solr docs for more details on the response format.
:return:
"""
if not hm_filter:
hm_filter = '[-90,-180 TO 90,180]'
params = {
'facet': 'on',
'facet.heatmap': field,
'facet.heatmap.geom': hm_filter
}
if hm_grid_level:
# note: aHmLimit is ignored in this case
params['facet.heatmap.gridLevel'] = hm_grid_level
else:
# Calculate distErr that will approximate aHmLimit many cells as an upper bound
rectangle = parse_geo_box(hm_filter)
degrees_side_length = rectangle.length / 2
cell_side_length = math.sqrt(float(hm_limit))
cell_side_length_degrees = degrees_side_length / cell_side_length * 2
params['facet.heatmap.distErr'] = str(float(cell_side_length_degrees))
# TODO: not sure about if returning correct param values.
# get_params = urllib.urlencode(params)
return params
|
python
|
def request_heatmap_facet(field, hm_filter, hm_grid_level, hm_limit):
"""
heatmap facet query builder
:param field: map the query to this field.
:param hm_filter: From what region to plot the heatmap. Defaults to q.geo or otherwise the world.
:param hm_grid_level: To explicitly specify the grid level, e.g. to let a user ask for greater or courser
resolution than the most recent request. Ignores a.hm.limit.
:param hm_limit: Non-0 triggers heatmap/grid faceting. This number is a soft maximum on thenumber of
cells it should have. There may be as few as 1/4th this number in return. Note that a.hm.gridLevel can effectively
ignore this value. The response heatmap contains a counts grid that can be null or contain null rows when all its
values would be 0. See Solr docs for more details on the response format.
:return:
"""
if not hm_filter:
hm_filter = '[-90,-180 TO 90,180]'
params = {
'facet': 'on',
'facet.heatmap': field,
'facet.heatmap.geom': hm_filter
}
if hm_grid_level:
# note: aHmLimit is ignored in this case
params['facet.heatmap.gridLevel'] = hm_grid_level
else:
# Calculate distErr that will approximate aHmLimit many cells as an upper bound
rectangle = parse_geo_box(hm_filter)
degrees_side_length = rectangle.length / 2
cell_side_length = math.sqrt(float(hm_limit))
cell_side_length_degrees = degrees_side_length / cell_side_length * 2
params['facet.heatmap.distErr'] = str(float(cell_side_length_degrees))
# TODO: not sure about if returning correct param values.
# get_params = urllib.urlencode(params)
return params
|
[
"def",
"request_heatmap_facet",
"(",
"field",
",",
"hm_filter",
",",
"hm_grid_level",
",",
"hm_limit",
")",
":",
"if",
"not",
"hm_filter",
":",
"hm_filter",
"=",
"'[-90,-180 TO 90,180]'",
"params",
"=",
"{",
"'facet'",
":",
"'on'",
",",
"'facet.heatmap'",
":",
"field",
",",
"'facet.heatmap.geom'",
":",
"hm_filter",
"}",
"if",
"hm_grid_level",
":",
"# note: aHmLimit is ignored in this case",
"params",
"[",
"'facet.heatmap.gridLevel'",
"]",
"=",
"hm_grid_level",
"else",
":",
"# Calculate distErr that will approximate aHmLimit many cells as an upper bound",
"rectangle",
"=",
"parse_geo_box",
"(",
"hm_filter",
")",
"degrees_side_length",
"=",
"rectangle",
".",
"length",
"/",
"2",
"cell_side_length",
"=",
"math",
".",
"sqrt",
"(",
"float",
"(",
"hm_limit",
")",
")",
"cell_side_length_degrees",
"=",
"degrees_side_length",
"/",
"cell_side_length",
"*",
"2",
"params",
"[",
"'facet.heatmap.distErr'",
"]",
"=",
"str",
"(",
"float",
"(",
"cell_side_length_degrees",
")",
")",
"# TODO: not sure about if returning correct param values.",
"# get_params = urllib.urlencode(params)",
"return",
"params"
] |
heatmap facet query builder
:param field: map the query to this field.
:param hm_filter: From what region to plot the heatmap. Defaults to q.geo or otherwise the world.
:param hm_grid_level: To explicitly specify the grid level, e.g. to let a user ask for greater or courser
resolution than the most recent request. Ignores a.hm.limit.
:param hm_limit: Non-0 triggers heatmap/grid faceting. This number is a soft maximum on thenumber of
cells it should have. There may be as few as 1/4th this number in return. Note that a.hm.gridLevel can effectively
ignore this value. The response heatmap contains a counts grid that can be null or contain null rows when all its
values would be 0. See Solr docs for more details on the response format.
:return:
|
[
"heatmap",
"facet",
"query",
"builder",
":",
"param",
"field",
":",
"map",
"the",
"query",
"to",
"this",
"field",
".",
":",
"param",
"hm_filter",
":",
"From",
"what",
"region",
"to",
"plot",
"the",
"heatmap",
".",
"Defaults",
"to",
"q",
".",
"geo",
"or",
"otherwise",
"the",
"world",
".",
":",
"param",
"hm_grid_level",
":",
"To",
"explicitly",
"specify",
"the",
"grid",
"level",
"e",
".",
"g",
".",
"to",
"let",
"a",
"user",
"ask",
"for",
"greater",
"or",
"courser",
"resolution",
"than",
"the",
"most",
"recent",
"request",
".",
"Ignores",
"a",
".",
"hm",
".",
"limit",
".",
":",
"param",
"hm_limit",
":",
"Non",
"-",
"0",
"triggers",
"heatmap",
"/",
"grid",
"faceting",
".",
"This",
"number",
"is",
"a",
"soft",
"maximum",
"on",
"thenumber",
"of",
"cells",
"it",
"should",
"have",
".",
"There",
"may",
"be",
"as",
"few",
"as",
"1",
"/",
"4th",
"this",
"number",
"in",
"return",
".",
"Note",
"that",
"a",
".",
"hm",
".",
"gridLevel",
"can",
"effectively",
"ignore",
"this",
"value",
".",
"The",
"response",
"heatmap",
"contains",
"a",
"counts",
"grid",
"that",
"can",
"be",
"null",
"or",
"contain",
"null",
"rows",
"when",
"all",
"its",
"values",
"would",
"be",
"0",
".",
"See",
"Solr",
"docs",
"for",
"more",
"details",
"on",
"the",
"response",
"format",
".",
":",
"return",
":"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L279-L315
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/utils.py
|
asterisk_to_min_max
|
def asterisk_to_min_max(field, time_filter, search_engine_endpoint, actual_params=None):
"""
traduce [* TO *] to something like [MIN-INDEXED-DATE TO MAX-INDEXED-DATE]
:param field: map the stats to this field.
:param time_filter: this is the value to be translated. think in "[* TO 2000]"
:param search_engine_endpoint: solr core
:param actual_params: (not implemented) to merge with other params.
:return: translated time filter
"""
if actual_params:
raise NotImplemented("actual_params")
start, end = parse_solr_time_range_as_pair(time_filter)
if start == '*' or end == '*':
params_stats = {
"q": "*:*",
"rows": 0,
"stats.field": field,
"stats": "true",
"wt": "json"
}
res_stats = requests.get(search_engine_endpoint, params=params_stats)
if res_stats.ok:
stats_date_field = res_stats.json()["stats"]["stats_fields"][field]
date_min = stats_date_field["min"]
date_max = stats_date_field["max"]
if start != '*':
date_min = start
if end != '*':
date_max = end
time_filter = "[{0} TO {1}]".format(date_min, date_max)
return time_filter
|
python
|
def asterisk_to_min_max(field, time_filter, search_engine_endpoint, actual_params=None):
"""
traduce [* TO *] to something like [MIN-INDEXED-DATE TO MAX-INDEXED-DATE]
:param field: map the stats to this field.
:param time_filter: this is the value to be translated. think in "[* TO 2000]"
:param search_engine_endpoint: solr core
:param actual_params: (not implemented) to merge with other params.
:return: translated time filter
"""
if actual_params:
raise NotImplemented("actual_params")
start, end = parse_solr_time_range_as_pair(time_filter)
if start == '*' or end == '*':
params_stats = {
"q": "*:*",
"rows": 0,
"stats.field": field,
"stats": "true",
"wt": "json"
}
res_stats = requests.get(search_engine_endpoint, params=params_stats)
if res_stats.ok:
stats_date_field = res_stats.json()["stats"]["stats_fields"][field]
date_min = stats_date_field["min"]
date_max = stats_date_field["max"]
if start != '*':
date_min = start
if end != '*':
date_max = end
time_filter = "[{0} TO {1}]".format(date_min, date_max)
return time_filter
|
[
"def",
"asterisk_to_min_max",
"(",
"field",
",",
"time_filter",
",",
"search_engine_endpoint",
",",
"actual_params",
"=",
"None",
")",
":",
"if",
"actual_params",
":",
"raise",
"NotImplemented",
"(",
"\"actual_params\"",
")",
"start",
",",
"end",
"=",
"parse_solr_time_range_as_pair",
"(",
"time_filter",
")",
"if",
"start",
"==",
"'*'",
"or",
"end",
"==",
"'*'",
":",
"params_stats",
"=",
"{",
"\"q\"",
":",
"\"*:*\"",
",",
"\"rows\"",
":",
"0",
",",
"\"stats.field\"",
":",
"field",
",",
"\"stats\"",
":",
"\"true\"",
",",
"\"wt\"",
":",
"\"json\"",
"}",
"res_stats",
"=",
"requests",
".",
"get",
"(",
"search_engine_endpoint",
",",
"params",
"=",
"params_stats",
")",
"if",
"res_stats",
".",
"ok",
":",
"stats_date_field",
"=",
"res_stats",
".",
"json",
"(",
")",
"[",
"\"stats\"",
"]",
"[",
"\"stats_fields\"",
"]",
"[",
"field",
"]",
"date_min",
"=",
"stats_date_field",
"[",
"\"min\"",
"]",
"date_max",
"=",
"stats_date_field",
"[",
"\"max\"",
"]",
"if",
"start",
"!=",
"'*'",
":",
"date_min",
"=",
"start",
"if",
"end",
"!=",
"'*'",
":",
"date_max",
"=",
"end",
"time_filter",
"=",
"\"[{0} TO {1}]\"",
".",
"format",
"(",
"date_min",
",",
"date_max",
")",
"return",
"time_filter"
] |
traduce [* TO *] to something like [MIN-INDEXED-DATE TO MAX-INDEXED-DATE]
:param field: map the stats to this field.
:param time_filter: this is the value to be translated. think in "[* TO 2000]"
:param search_engine_endpoint: solr core
:param actual_params: (not implemented) to merge with other params.
:return: translated time filter
|
[
"traduce",
"[",
"*",
"TO",
"*",
"]",
"to",
"something",
"like",
"[",
"MIN",
"-",
"INDEXED",
"-",
"DATE",
"TO",
"MAX",
"-",
"INDEXED",
"-",
"DATE",
"]",
":",
"param",
"field",
":",
"map",
"the",
"stats",
"to",
"this",
"field",
".",
":",
"param",
"time_filter",
":",
"this",
"is",
"the",
"value",
"to",
"be",
"translated",
".",
"think",
"in",
"[",
"*",
"TO",
"2000",
"]",
":",
"param",
"search_engine_endpoint",
":",
"solr",
"core",
":",
"param",
"actual_params",
":",
"(",
"not",
"implemented",
")",
"to",
"merge",
"with",
"other",
"params",
".",
":",
"return",
":",
"translated",
"time",
"filter"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L322-L359
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search/pycsw_plugin.py
|
get_service
|
def get_service(raw_xml):
"""
Set a service object based on the XML metadata
<dct:references scheme="OGC:WMS">http://ngamaps.geointapps.org/arcgis
/services/RIO/Rio_Foundation_Transportation/MapServer/WMSServer
</dct:references>
:param instance:
:return: Layer
"""
from pycsw.core.etree import etree
parsed = etree.fromstring(raw_xml, etree.XMLParser(resolve_entities=False))
# <dc:format>OGC:WMS</dc:format>
source_tag = parsed.find("{http://purl.org/dc/elements/1.1/}source")
# <dc:source>
# http://ngamaps.geointapps.org/arcgis/services/RIO/Rio_Foundation_Transportation/MapServer/WMSServer
# </dc:source>
format_tag = parsed.find("{http://purl.org/dc/elements/1.1/}format")
service_url = None
service_type = None
if hasattr(source_tag, 'text'):
service_url = source_tag.text
if hasattr(format_tag, 'text'):
service_type = format_tag.text
if hasattr(format_tag, 'text'):
service_type = format_tag.text
service, created = Service.objects.get_or_create(url=service_url,
is_monitored=False,
type=service_type)
# TODO: dont hardcode SRS, get them from the parsed XML.
srs, created = SpatialReferenceSystem.objects.get_or_create(code="EPSG:4326")
service.srs.add(srs)
return service
|
python
|
def get_service(raw_xml):
"""
Set a service object based on the XML metadata
<dct:references scheme="OGC:WMS">http://ngamaps.geointapps.org/arcgis
/services/RIO/Rio_Foundation_Transportation/MapServer/WMSServer
</dct:references>
:param instance:
:return: Layer
"""
from pycsw.core.etree import etree
parsed = etree.fromstring(raw_xml, etree.XMLParser(resolve_entities=False))
# <dc:format>OGC:WMS</dc:format>
source_tag = parsed.find("{http://purl.org/dc/elements/1.1/}source")
# <dc:source>
# http://ngamaps.geointapps.org/arcgis/services/RIO/Rio_Foundation_Transportation/MapServer/WMSServer
# </dc:source>
format_tag = parsed.find("{http://purl.org/dc/elements/1.1/}format")
service_url = None
service_type = None
if hasattr(source_tag, 'text'):
service_url = source_tag.text
if hasattr(format_tag, 'text'):
service_type = format_tag.text
if hasattr(format_tag, 'text'):
service_type = format_tag.text
service, created = Service.objects.get_or_create(url=service_url,
is_monitored=False,
type=service_type)
# TODO: dont hardcode SRS, get them from the parsed XML.
srs, created = SpatialReferenceSystem.objects.get_or_create(code="EPSG:4326")
service.srs.add(srs)
return service
|
[
"def",
"get_service",
"(",
"raw_xml",
")",
":",
"from",
"pycsw",
".",
"core",
".",
"etree",
"import",
"etree",
"parsed",
"=",
"etree",
".",
"fromstring",
"(",
"raw_xml",
",",
"etree",
".",
"XMLParser",
"(",
"resolve_entities",
"=",
"False",
")",
")",
"# <dc:format>OGC:WMS</dc:format>",
"source_tag",
"=",
"parsed",
".",
"find",
"(",
"\"{http://purl.org/dc/elements/1.1/}source\"",
")",
"# <dc:source>",
"# http://ngamaps.geointapps.org/arcgis/services/RIO/Rio_Foundation_Transportation/MapServer/WMSServer",
"# </dc:source>",
"format_tag",
"=",
"parsed",
".",
"find",
"(",
"\"{http://purl.org/dc/elements/1.1/}format\"",
")",
"service_url",
"=",
"None",
"service_type",
"=",
"None",
"if",
"hasattr",
"(",
"source_tag",
",",
"'text'",
")",
":",
"service_url",
"=",
"source_tag",
".",
"text",
"if",
"hasattr",
"(",
"format_tag",
",",
"'text'",
")",
":",
"service_type",
"=",
"format_tag",
".",
"text",
"if",
"hasattr",
"(",
"format_tag",
",",
"'text'",
")",
":",
"service_type",
"=",
"format_tag",
".",
"text",
"service",
",",
"created",
"=",
"Service",
".",
"objects",
".",
"get_or_create",
"(",
"url",
"=",
"service_url",
",",
"is_monitored",
"=",
"False",
",",
"type",
"=",
"service_type",
")",
"# TODO: dont hardcode SRS, get them from the parsed XML.",
"srs",
",",
"created",
"=",
"SpatialReferenceSystem",
".",
"objects",
".",
"get_or_create",
"(",
"code",
"=",
"\"EPSG:4326\"",
")",
"service",
".",
"srs",
".",
"add",
"(",
"srs",
")",
"return",
"service"
] |
Set a service object based on the XML metadata
<dct:references scheme="OGC:WMS">http://ngamaps.geointapps.org/arcgis
/services/RIO/Rio_Foundation_Transportation/MapServer/WMSServer
</dct:references>
:param instance:
:return: Layer
|
[
"Set",
"a",
"service",
"object",
"based",
"on",
"the",
"XML",
"metadata",
"<dct",
":",
"references",
"scheme",
"=",
"OGC",
":",
"WMS",
">",
"http",
":",
"//",
"ngamaps",
".",
"geointapps",
".",
"org",
"/",
"arcgis",
"/",
"services",
"/",
"RIO",
"/",
"Rio_Foundation_Transportation",
"/",
"MapServer",
"/",
"WMSServer",
"<",
"/",
"dct",
":",
"references",
">",
":",
"param",
"instance",
":",
":",
"return",
":",
"Layer"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L56-L95
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search/pycsw_plugin.py
|
HHypermapRepository.query_ids
|
def query_ids(self, ids):
"""
Query by list of identifiers
"""
results = self._get_repo_filter(Layer.objects).filter(uuid__in=ids).all()
if len(results) == 0: # try services
results = self._get_repo_filter(Service.objects).filter(uuid__in=ids).all()
return results
|
python
|
def query_ids(self, ids):
"""
Query by list of identifiers
"""
results = self._get_repo_filter(Layer.objects).filter(uuid__in=ids).all()
if len(results) == 0: # try services
results = self._get_repo_filter(Service.objects).filter(uuid__in=ids).all()
return results
|
[
"def",
"query_ids",
"(",
"self",
",",
"ids",
")",
":",
"results",
"=",
"self",
".",
"_get_repo_filter",
"(",
"Layer",
".",
"objects",
")",
".",
"filter",
"(",
"uuid__in",
"=",
"ids",
")",
".",
"all",
"(",
")",
"if",
"len",
"(",
"results",
")",
"==",
"0",
":",
"# try services",
"results",
"=",
"self",
".",
"_get_repo_filter",
"(",
"Service",
".",
"objects",
")",
".",
"filter",
"(",
"uuid__in",
"=",
"ids",
")",
".",
"all",
"(",
")",
"return",
"results"
] |
Query by list of identifiers
|
[
"Query",
"by",
"list",
"of",
"identifiers"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L152-L162
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search/pycsw_plugin.py
|
HHypermapRepository.query_domain
|
def query_domain(self, domain, typenames, domainquerytype='list', count=False):
"""
Query by property domain values
"""
objects = self._get_repo_filter(Layer.objects)
if domainquerytype == 'range':
return [tuple(objects.aggregate(Min(domain), Max(domain)).values())]
else:
if count:
return [(d[domain], d['%s__count' % domain])
for d in objects.values(domain).annotate(Count(domain))]
else:
return objects.values_list(domain).distinct()
|
python
|
def query_domain(self, domain, typenames, domainquerytype='list', count=False):
"""
Query by property domain values
"""
objects = self._get_repo_filter(Layer.objects)
if domainquerytype == 'range':
return [tuple(objects.aggregate(Min(domain), Max(domain)).values())]
else:
if count:
return [(d[domain], d['%s__count' % domain])
for d in objects.values(domain).annotate(Count(domain))]
else:
return objects.values_list(domain).distinct()
|
[
"def",
"query_domain",
"(",
"self",
",",
"domain",
",",
"typenames",
",",
"domainquerytype",
"=",
"'list'",
",",
"count",
"=",
"False",
")",
":",
"objects",
"=",
"self",
".",
"_get_repo_filter",
"(",
"Layer",
".",
"objects",
")",
"if",
"domainquerytype",
"==",
"'range'",
":",
"return",
"[",
"tuple",
"(",
"objects",
".",
"aggregate",
"(",
"Min",
"(",
"domain",
")",
",",
"Max",
"(",
"domain",
")",
")",
".",
"values",
"(",
")",
")",
"]",
"else",
":",
"if",
"count",
":",
"return",
"[",
"(",
"d",
"[",
"domain",
"]",
",",
"d",
"[",
"'%s__count'",
"%",
"domain",
"]",
")",
"for",
"d",
"in",
"objects",
".",
"values",
"(",
"domain",
")",
".",
"annotate",
"(",
"Count",
"(",
"domain",
")",
")",
"]",
"else",
":",
"return",
"objects",
".",
"values_list",
"(",
"domain",
")",
".",
"distinct",
"(",
")"
] |
Query by property domain values
|
[
"Query",
"by",
"property",
"domain",
"values"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L164-L178
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search/pycsw_plugin.py
|
HHypermapRepository.query_insert
|
def query_insert(self, direction='max'):
"""
Query to get latest (default) or earliest update to repository
"""
if direction == 'min':
return Layer.objects.aggregate(
Min('last_updated'))['last_updated__min'].strftime('%Y-%m-%dT%H:%M:%SZ')
return self._get_repo_filter(Layer.objects).aggregate(
Max('last_updated'))['last_updated__max'].strftime('%Y-%m-%dT%H:%M:%SZ')
|
python
|
def query_insert(self, direction='max'):
"""
Query to get latest (default) or earliest update to repository
"""
if direction == 'min':
return Layer.objects.aggregate(
Min('last_updated'))['last_updated__min'].strftime('%Y-%m-%dT%H:%M:%SZ')
return self._get_repo_filter(Layer.objects).aggregate(
Max('last_updated'))['last_updated__max'].strftime('%Y-%m-%dT%H:%M:%SZ')
|
[
"def",
"query_insert",
"(",
"self",
",",
"direction",
"=",
"'max'",
")",
":",
"if",
"direction",
"==",
"'min'",
":",
"return",
"Layer",
".",
"objects",
".",
"aggregate",
"(",
"Min",
"(",
"'last_updated'",
")",
")",
"[",
"'last_updated__min'",
"]",
".",
"strftime",
"(",
"'%Y-%m-%dT%H:%M:%SZ'",
")",
"return",
"self",
".",
"_get_repo_filter",
"(",
"Layer",
".",
"objects",
")",
".",
"aggregate",
"(",
"Max",
"(",
"'last_updated'",
")",
")",
"[",
"'last_updated__max'",
"]",
".",
"strftime",
"(",
"'%Y-%m-%dT%H:%M:%SZ'",
")"
] |
Query to get latest (default) or earliest update to repository
|
[
"Query",
"to",
"get",
"latest",
"(",
"default",
")",
"or",
"earliest",
"update",
"to",
"repository"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L180-L188
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search/pycsw_plugin.py
|
HHypermapRepository.query_source
|
def query_source(self, source):
"""
Query by source
"""
return self._get_repo_filter(Layer.objects).filter(url=source)
|
python
|
def query_source(self, source):
"""
Query by source
"""
return self._get_repo_filter(Layer.objects).filter(url=source)
|
[
"def",
"query_source",
"(",
"self",
",",
"source",
")",
":",
"return",
"self",
".",
"_get_repo_filter",
"(",
"Layer",
".",
"objects",
")",
".",
"filter",
"(",
"url",
"=",
"source",
")"
] |
Query by source
|
[
"Query",
"by",
"source"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L190-L194
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search/pycsw_plugin.py
|
HHypermapRepository.query
|
def query(self, constraint, sortby=None, typenames=None, maxrecords=10, startposition=0):
"""
Query records from underlying repository
"""
# run the raw query and get total
# we want to exclude layers which are not valid, as it is done in the search engine
if 'where' in constraint: # GetRecords with constraint
query = self._get_repo_filter(Layer.objects).filter(
is_valid=True).extra(where=[constraint['where']], params=constraint['values'])
else: # GetRecords sans constraint
query = self._get_repo_filter(Layer.objects).filter(is_valid=True)
total = query.count()
# apply sorting, limit and offset
if sortby is not None:
if 'spatial' in sortby and sortby['spatial']: # spatial sort
desc = False
if sortby['order'] == 'DESC':
desc = True
query = query.all()
return [str(total),
sorted(query,
key=lambda x: float(util.get_geometry_area(getattr(x, sortby['propertyname']))),
reverse=desc,
)[startposition:startposition+int(maxrecords)]]
else:
if sortby['order'] == 'DESC':
pname = '-%s' % sortby['propertyname']
else:
pname = sortby['propertyname']
return [str(total),
query.order_by(pname)[startposition:startposition+int(maxrecords)]]
else: # no sort
return [str(total), query.all()[startposition:startposition+int(maxrecords)]]
|
python
|
def query(self, constraint, sortby=None, typenames=None, maxrecords=10, startposition=0):
"""
Query records from underlying repository
"""
# run the raw query and get total
# we want to exclude layers which are not valid, as it is done in the search engine
if 'where' in constraint: # GetRecords with constraint
query = self._get_repo_filter(Layer.objects).filter(
is_valid=True).extra(where=[constraint['where']], params=constraint['values'])
else: # GetRecords sans constraint
query = self._get_repo_filter(Layer.objects).filter(is_valid=True)
total = query.count()
# apply sorting, limit and offset
if sortby is not None:
if 'spatial' in sortby and sortby['spatial']: # spatial sort
desc = False
if sortby['order'] == 'DESC':
desc = True
query = query.all()
return [str(total),
sorted(query,
key=lambda x: float(util.get_geometry_area(getattr(x, sortby['propertyname']))),
reverse=desc,
)[startposition:startposition+int(maxrecords)]]
else:
if sortby['order'] == 'DESC':
pname = '-%s' % sortby['propertyname']
else:
pname = sortby['propertyname']
return [str(total),
query.order_by(pname)[startposition:startposition+int(maxrecords)]]
else: # no sort
return [str(total), query.all()[startposition:startposition+int(maxrecords)]]
|
[
"def",
"query",
"(",
"self",
",",
"constraint",
",",
"sortby",
"=",
"None",
",",
"typenames",
"=",
"None",
",",
"maxrecords",
"=",
"10",
",",
"startposition",
"=",
"0",
")",
":",
"# run the raw query and get total",
"# we want to exclude layers which are not valid, as it is done in the search engine",
"if",
"'where'",
"in",
"constraint",
":",
"# GetRecords with constraint",
"query",
"=",
"self",
".",
"_get_repo_filter",
"(",
"Layer",
".",
"objects",
")",
".",
"filter",
"(",
"is_valid",
"=",
"True",
")",
".",
"extra",
"(",
"where",
"=",
"[",
"constraint",
"[",
"'where'",
"]",
"]",
",",
"params",
"=",
"constraint",
"[",
"'values'",
"]",
")",
"else",
":",
"# GetRecords sans constraint",
"query",
"=",
"self",
".",
"_get_repo_filter",
"(",
"Layer",
".",
"objects",
")",
".",
"filter",
"(",
"is_valid",
"=",
"True",
")",
"total",
"=",
"query",
".",
"count",
"(",
")",
"# apply sorting, limit and offset",
"if",
"sortby",
"is",
"not",
"None",
":",
"if",
"'spatial'",
"in",
"sortby",
"and",
"sortby",
"[",
"'spatial'",
"]",
":",
"# spatial sort",
"desc",
"=",
"False",
"if",
"sortby",
"[",
"'order'",
"]",
"==",
"'DESC'",
":",
"desc",
"=",
"True",
"query",
"=",
"query",
".",
"all",
"(",
")",
"return",
"[",
"str",
"(",
"total",
")",
",",
"sorted",
"(",
"query",
",",
"key",
"=",
"lambda",
"x",
":",
"float",
"(",
"util",
".",
"get_geometry_area",
"(",
"getattr",
"(",
"x",
",",
"sortby",
"[",
"'propertyname'",
"]",
")",
")",
")",
",",
"reverse",
"=",
"desc",
",",
")",
"[",
"startposition",
":",
"startposition",
"+",
"int",
"(",
"maxrecords",
")",
"]",
"]",
"else",
":",
"if",
"sortby",
"[",
"'order'",
"]",
"==",
"'DESC'",
":",
"pname",
"=",
"'-%s'",
"%",
"sortby",
"[",
"'propertyname'",
"]",
"else",
":",
"pname",
"=",
"sortby",
"[",
"'propertyname'",
"]",
"return",
"[",
"str",
"(",
"total",
")",
",",
"query",
".",
"order_by",
"(",
"pname",
")",
"[",
"startposition",
":",
"startposition",
"+",
"int",
"(",
"maxrecords",
")",
"]",
"]",
"else",
":",
"# no sort",
"return",
"[",
"str",
"(",
"total",
")",
",",
"query",
".",
"all",
"(",
")",
"[",
"startposition",
":",
"startposition",
"+",
"int",
"(",
"maxrecords",
")",
"]",
"]"
] |
Query records from underlying repository
|
[
"Query",
"records",
"from",
"underlying",
"repository"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L196-L231
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search/pycsw_plugin.py
|
HHypermapRepository.insert
|
def insert(self, resourcetype, source, insert_date=None):
"""
Insert a record into the repository
"""
caller = inspect.stack()[1][3]
if caller == 'transaction': # insert of Layer
hhclass = 'Layer'
source = resourcetype
resourcetype = resourcetype.csw_schema
else: # insert of service
hhclass = 'Service'
if resourcetype not in HYPERMAP_SERVICE_TYPES.keys():
raise RuntimeError('Unsupported Service Type')
return self._insert_or_update(resourcetype, source, mode='insert', hhclass=hhclass)
|
python
|
def insert(self, resourcetype, source, insert_date=None):
"""
Insert a record into the repository
"""
caller = inspect.stack()[1][3]
if caller == 'transaction': # insert of Layer
hhclass = 'Layer'
source = resourcetype
resourcetype = resourcetype.csw_schema
else: # insert of service
hhclass = 'Service'
if resourcetype not in HYPERMAP_SERVICE_TYPES.keys():
raise RuntimeError('Unsupported Service Type')
return self._insert_or_update(resourcetype, source, mode='insert', hhclass=hhclass)
|
[
"def",
"insert",
"(",
"self",
",",
"resourcetype",
",",
"source",
",",
"insert_date",
"=",
"None",
")",
":",
"caller",
"=",
"inspect",
".",
"stack",
"(",
")",
"[",
"1",
"]",
"[",
"3",
"]",
"if",
"caller",
"==",
"'transaction'",
":",
"# insert of Layer",
"hhclass",
"=",
"'Layer'",
"source",
"=",
"resourcetype",
"resourcetype",
"=",
"resourcetype",
".",
"csw_schema",
"else",
":",
"# insert of service",
"hhclass",
"=",
"'Service'",
"if",
"resourcetype",
"not",
"in",
"HYPERMAP_SERVICE_TYPES",
".",
"keys",
"(",
")",
":",
"raise",
"RuntimeError",
"(",
"'Unsupported Service Type'",
")",
"return",
"self",
".",
"_insert_or_update",
"(",
"resourcetype",
",",
"source",
",",
"mode",
"=",
"'insert'",
",",
"hhclass",
"=",
"hhclass",
")"
] |
Insert a record into the repository
|
[
"Insert",
"a",
"record",
"into",
"the",
"repository"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L233-L249
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search/pycsw_plugin.py
|
HHypermapRepository._insert_or_update
|
def _insert_or_update(self, resourcetype, source, mode='insert', hhclass='Service'):
"""
Insert or update a record in the repository
"""
keywords = []
if self.filter is not None:
catalog = Catalog.objects.get(id=int(self.filter.split()[-1]))
try:
if hhclass == 'Layer':
# TODO: better way of figuring out duplicates
match = Layer.objects.filter(name=source.name,
title=source.title,
abstract=source.abstract,
is_monitored=False)
matches = match.all()
if matches:
if mode == 'insert':
raise RuntimeError('HHypermap error: Layer %d \'%s\' already exists' % (
matches[0].id, source.title))
elif mode == 'update':
match.update(
name=source.name,
title=source.title,
abstract=source.abstract,
is_monitored=False,
xml=source.xml,
wkt_geometry=source.wkt_geometry,
anytext=util.get_anytext([source.title, source.abstract, source.keywords_csv])
)
service = get_service(source.xml)
res, keywords = create_layer_from_metadata_xml(resourcetype, source.xml,
monitor=False, service=service,
catalog=catalog)
res.save()
LOGGER.debug('Indexing layer with id %s on search engine' % res.uuid)
index_layer(res.id, use_cache=True)
else:
if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2':
res = Endpoint(url=source, catalog=catalog)
else:
res = Service(type=HYPERMAP_SERVICE_TYPES[resourcetype], url=source, catalog=catalog)
res.save()
if keywords:
for kw in keywords:
res.keywords.add(kw)
except Exception as err:
raise RuntimeError('HHypermap error: %s' % err)
# return a list of ids that were inserted or updated
ids = []
if hhclass == 'Layer':
ids.append({'identifier': res.uuid, 'title': res.title})
else:
if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2':
for res in Endpoint.objects.filter(url=source).all():
ids.append({'identifier': res.uuid, 'title': res.url})
else:
for res in Service.objects.filter(url=source).all():
ids.append({'identifier': res.uuid, 'title': res.title})
return ids
|
python
|
def _insert_or_update(self, resourcetype, source, mode='insert', hhclass='Service'):
"""
Insert or update a record in the repository
"""
keywords = []
if self.filter is not None:
catalog = Catalog.objects.get(id=int(self.filter.split()[-1]))
try:
if hhclass == 'Layer':
# TODO: better way of figuring out duplicates
match = Layer.objects.filter(name=source.name,
title=source.title,
abstract=source.abstract,
is_monitored=False)
matches = match.all()
if matches:
if mode == 'insert':
raise RuntimeError('HHypermap error: Layer %d \'%s\' already exists' % (
matches[0].id, source.title))
elif mode == 'update':
match.update(
name=source.name,
title=source.title,
abstract=source.abstract,
is_monitored=False,
xml=source.xml,
wkt_geometry=source.wkt_geometry,
anytext=util.get_anytext([source.title, source.abstract, source.keywords_csv])
)
service = get_service(source.xml)
res, keywords = create_layer_from_metadata_xml(resourcetype, source.xml,
monitor=False, service=service,
catalog=catalog)
res.save()
LOGGER.debug('Indexing layer with id %s on search engine' % res.uuid)
index_layer(res.id, use_cache=True)
else:
if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2':
res = Endpoint(url=source, catalog=catalog)
else:
res = Service(type=HYPERMAP_SERVICE_TYPES[resourcetype], url=source, catalog=catalog)
res.save()
if keywords:
for kw in keywords:
res.keywords.add(kw)
except Exception as err:
raise RuntimeError('HHypermap error: %s' % err)
# return a list of ids that were inserted or updated
ids = []
if hhclass == 'Layer':
ids.append({'identifier': res.uuid, 'title': res.title})
else:
if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2':
for res in Endpoint.objects.filter(url=source).all():
ids.append({'identifier': res.uuid, 'title': res.url})
else:
for res in Service.objects.filter(url=source).all():
ids.append({'identifier': res.uuid, 'title': res.title})
return ids
|
[
"def",
"_insert_or_update",
"(",
"self",
",",
"resourcetype",
",",
"source",
",",
"mode",
"=",
"'insert'",
",",
"hhclass",
"=",
"'Service'",
")",
":",
"keywords",
"=",
"[",
"]",
"if",
"self",
".",
"filter",
"is",
"not",
"None",
":",
"catalog",
"=",
"Catalog",
".",
"objects",
".",
"get",
"(",
"id",
"=",
"int",
"(",
"self",
".",
"filter",
".",
"split",
"(",
")",
"[",
"-",
"1",
"]",
")",
")",
"try",
":",
"if",
"hhclass",
"==",
"'Layer'",
":",
"# TODO: better way of figuring out duplicates",
"match",
"=",
"Layer",
".",
"objects",
".",
"filter",
"(",
"name",
"=",
"source",
".",
"name",
",",
"title",
"=",
"source",
".",
"title",
",",
"abstract",
"=",
"source",
".",
"abstract",
",",
"is_monitored",
"=",
"False",
")",
"matches",
"=",
"match",
".",
"all",
"(",
")",
"if",
"matches",
":",
"if",
"mode",
"==",
"'insert'",
":",
"raise",
"RuntimeError",
"(",
"'HHypermap error: Layer %d \\'%s\\' already exists'",
"%",
"(",
"matches",
"[",
"0",
"]",
".",
"id",
",",
"source",
".",
"title",
")",
")",
"elif",
"mode",
"==",
"'update'",
":",
"match",
".",
"update",
"(",
"name",
"=",
"source",
".",
"name",
",",
"title",
"=",
"source",
".",
"title",
",",
"abstract",
"=",
"source",
".",
"abstract",
",",
"is_monitored",
"=",
"False",
",",
"xml",
"=",
"source",
".",
"xml",
",",
"wkt_geometry",
"=",
"source",
".",
"wkt_geometry",
",",
"anytext",
"=",
"util",
".",
"get_anytext",
"(",
"[",
"source",
".",
"title",
",",
"source",
".",
"abstract",
",",
"source",
".",
"keywords_csv",
"]",
")",
")",
"service",
"=",
"get_service",
"(",
"source",
".",
"xml",
")",
"res",
",",
"keywords",
"=",
"create_layer_from_metadata_xml",
"(",
"resourcetype",
",",
"source",
".",
"xml",
",",
"monitor",
"=",
"False",
",",
"service",
"=",
"service",
",",
"catalog",
"=",
"catalog",
")",
"res",
".",
"save",
"(",
")",
"LOGGER",
".",
"debug",
"(",
"'Indexing layer with id %s on search engine'",
"%",
"res",
".",
"uuid",
")",
"index_layer",
"(",
"res",
".",
"id",
",",
"use_cache",
"=",
"True",
")",
"else",
":",
"if",
"resourcetype",
"==",
"'http://www.opengis.net/cat/csw/2.0.2'",
":",
"res",
"=",
"Endpoint",
"(",
"url",
"=",
"source",
",",
"catalog",
"=",
"catalog",
")",
"else",
":",
"res",
"=",
"Service",
"(",
"type",
"=",
"HYPERMAP_SERVICE_TYPES",
"[",
"resourcetype",
"]",
",",
"url",
"=",
"source",
",",
"catalog",
"=",
"catalog",
")",
"res",
".",
"save",
"(",
")",
"if",
"keywords",
":",
"for",
"kw",
"in",
"keywords",
":",
"res",
".",
"keywords",
".",
"add",
"(",
"kw",
")",
"except",
"Exception",
"as",
"err",
":",
"raise",
"RuntimeError",
"(",
"'HHypermap error: %s'",
"%",
"err",
")",
"# return a list of ids that were inserted or updated",
"ids",
"=",
"[",
"]",
"if",
"hhclass",
"==",
"'Layer'",
":",
"ids",
".",
"append",
"(",
"{",
"'identifier'",
":",
"res",
".",
"uuid",
",",
"'title'",
":",
"res",
".",
"title",
"}",
")",
"else",
":",
"if",
"resourcetype",
"==",
"'http://www.opengis.net/cat/csw/2.0.2'",
":",
"for",
"res",
"in",
"Endpoint",
".",
"objects",
".",
"filter",
"(",
"url",
"=",
"source",
")",
".",
"all",
"(",
")",
":",
"ids",
".",
"append",
"(",
"{",
"'identifier'",
":",
"res",
".",
"uuid",
",",
"'title'",
":",
"res",
".",
"url",
"}",
")",
"else",
":",
"for",
"res",
"in",
"Service",
".",
"objects",
".",
"filter",
"(",
"url",
"=",
"source",
")",
".",
"all",
"(",
")",
":",
"ids",
".",
"append",
"(",
"{",
"'identifier'",
":",
"res",
".",
"uuid",
",",
"'title'",
":",
"res",
".",
"title",
"}",
")",
"return",
"ids"
] |
Insert or update a record in the repository
|
[
"Insert",
"or",
"update",
"a",
"record",
"in",
"the",
"repository"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L251-L321
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search/pycsw_plugin.py
|
HHypermapRepository.delete
|
def delete(self, constraint):
"""
Delete a record from the repository
"""
results = self._get_repo_filter(Service.objects).extra(where=[constraint['where']],
params=constraint['values']).all()
deleted = len(results)
results.delete()
return deleted
|
python
|
def delete(self, constraint):
"""
Delete a record from the repository
"""
results = self._get_repo_filter(Service.objects).extra(where=[constraint['where']],
params=constraint['values']).all()
deleted = len(results)
results.delete()
return deleted
|
[
"def",
"delete",
"(",
"self",
",",
"constraint",
")",
":",
"results",
"=",
"self",
".",
"_get_repo_filter",
"(",
"Service",
".",
"objects",
")",
".",
"extra",
"(",
"where",
"=",
"[",
"constraint",
"[",
"'where'",
"]",
"]",
",",
"params",
"=",
"constraint",
"[",
"'values'",
"]",
")",
".",
"all",
"(",
")",
"deleted",
"=",
"len",
"(",
"results",
")",
"results",
".",
"delete",
"(",
")",
"return",
"deleted"
] |
Delete a record from the repository
|
[
"Delete",
"a",
"record",
"from",
"the",
"repository"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L323-L332
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search/pycsw_plugin.py
|
HHypermapRepository._get_repo_filter
|
def _get_repo_filter(self, query):
"""
Apply repository wide side filter / mask query
"""
if self.filter is not None:
return query.extra(where=[self.filter])
return query
|
python
|
def _get_repo_filter(self, query):
"""
Apply repository wide side filter / mask query
"""
if self.filter is not None:
return query.extra(where=[self.filter])
return query
|
[
"def",
"_get_repo_filter",
"(",
"self",
",",
"query",
")",
":",
"if",
"self",
".",
"filter",
"is",
"not",
"None",
":",
"return",
"query",
".",
"extra",
"(",
"where",
"=",
"[",
"self",
".",
"filter",
"]",
")",
"return",
"query"
] |
Apply repository wide side filter / mask query
|
[
"Apply",
"repository",
"wide",
"side",
"filter",
"/",
"mask",
"query"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search/pycsw_plugin.py#L334-L340
|
train
|
hni14/jismesh
|
jismesh/utils.py
|
to_meshcode
|
def to_meshcode(lat, lon, level):
"""緯度経度から指定次の地域メッシュコードを算出する。
Args:
lat: 世界測地系の緯度(度単位)
lon: 世界測地系の経度(度単位)
level: 地域メッシュコードの次数
1次(80km四方):1
40倍(40km四方):40000
20倍(20km四方):20000
16倍(16km四方):16000
2次(10km四方):2
8倍(8km四方):8000
5倍(5km四方):5000
4倍(4km四方):4000
2.5倍(2.5km四方):2500
2倍(2km四方):2000
3次(1km四方):3
4次(500m四方):4
5次(250m四方):5
6次(125m四方):6
Return:
指定次の地域メッシュコード
"""
if not 0 <= lat < 66.66:
raise ValueError('the latitude is out of bound.')
if not 100 <= lon < 180:
raise ValueError('the longitude is out of bound.')
# reminder of latitude and longitude by its unit in degree of mesh level.
rem_lat_lv0 = lambda lat: lat
rem_lon_lv0 = lambda lon: lon % 100
rem_lat_lv1 = lambda lat: rem_lat_lv0(lat) % _unit_lat_lv1()
rem_lon_lv1 = lambda lon: rem_lon_lv0(lon) % _unit_lon_lv1()
rem_lat_40000 = lambda lat: rem_lat_lv1(lat) % _unit_lat_40000()
rem_lon_40000 = lambda lon: rem_lon_lv1(lon) % _unit_lon_40000()
rem_lat_20000 = lambda lat: rem_lat_40000(lat) % _unit_lat_20000()
rem_lon_20000 = lambda lon: rem_lon_40000(lon) % _unit_lon_20000()
rem_lat_16000 = lambda lat: rem_lat_lv1(lat) % _unit_lat_16000()
rem_lon_16000 = lambda lon: rem_lon_lv1(lon) % _unit_lon_16000()
rem_lat_lv2 = lambda lat: rem_lat_lv1(lat) % _unit_lat_lv2()
rem_lon_lv2 = lambda lon: rem_lon_lv1(lon) % _unit_lon_lv2()
rem_lat_8000 = lambda lat: rem_lat_lv1(lat) % _unit_lat_8000()
rem_lon_8000 = lambda lon: rem_lon_lv1(lon) % _unit_lon_8000()
rem_lat_5000 = lambda lat: rem_lat_lv2(lat) % _unit_lat_5000()
rem_lon_5000 = lambda lon: rem_lon_lv2(lon) % _unit_lon_5000()
rem_lat_4000 = lambda lat: rem_lat_8000(lat) % _unit_lat_4000()
rem_lon_4000 = lambda lon: rem_lon_8000(lon) % _unit_lon_4000()
rem_lat_2500 = lambda lat: rem_lat_5000(lat) % _unit_lat_2500()
rem_lon_2500 = lambda lon: rem_lon_5000(lon) % _unit_lon_2500()
rem_lat_2000 = lambda lat: rem_lat_lv2(lat) % _unit_lat_2000()
rem_lon_2000 = lambda lon: rem_lon_lv2(lon) % _unit_lon_2000()
rem_lat_lv3 = lambda lat: rem_lat_lv2(lat) % _unit_lat_lv3()
rem_lon_lv3 = lambda lon: rem_lon_lv2(lon) % _unit_lon_lv3()
rem_lat_lv4 = lambda lat: rem_lat_lv3(lat) % _unit_lat_lv4()
rem_lon_lv4 = lambda lon: rem_lon_lv3(lon) % _unit_lon_lv4()
rem_lat_lv5 = lambda lat: rem_lat_lv4(lat) % _unit_lat_lv5()
rem_lon_lv5 = lambda lon: rem_lon_lv4(lon) % _unit_lon_lv5()
rem_lat_lv6 = lambda lat: rem_lat_lv5(lat) % _unit_lat_lv6()
rem_lon_lv6 = lambda lon: rem_lon_lv5(lon) % _unit_lon_lv6()
def meshcode_lv1(lat, lon):
ab = int(rem_lat_lv0(lat) / _unit_lat_lv1())
cd = int(rem_lon_lv0(lon) / _unit_lon_lv1())
return str(ab) + str(cd)
def meshcode_40000(lat, lon):
e = int(rem_lat_lv1(lat) / _unit_lat_40000())*2 + int(rem_lon_lv1(lon) / _unit_lon_40000()) + 1
return meshcode_lv1(lat, lon) + str(e)
def meshcode_20000(lat, lon):
f = int(rem_lat_40000(lat) / _unit_lat_20000())*2 + int(rem_lon_40000(lon) / _unit_lon_20000()) + 1
g = 5
return meshcode_40000(lat, lon) + str(f) + str(g)
def meshcode_16000(lat, lon):
e = int(rem_lat_lv1(lat) / _unit_lat_16000())*2
f = int(rem_lon_lv1(lon) / _unit_lon_16000())*2
g = 7
return meshcode_lv1(lat, lon) + str(e) + str(f) + str(g)
def meshcode_lv2(lat, lon):
e = int(rem_lat_lv1(lat) / _unit_lat_lv2())
f = int(rem_lon_lv1(lon) / _unit_lon_lv2())
return meshcode_lv1(lat, lon) + str(e) + str(f)
def meshcode_8000(lat, lon):
e = int(rem_lat_lv1(lat) / _unit_lat_8000())
f = int(rem_lon_lv1(lon) / _unit_lon_8000())
g = 6
return meshcode_lv1(lat, lon) + str(e) + str(f) + str(g)
def meshcode_5000(lat, lon):
g = int(rem_lat_lv2(lat) / _unit_lat_5000())*2 + int(rem_lon_lv2(lon) / _unit_lon_5000()) + 1
return meshcode_lv2(lat, lon) + str(g)
def meshcode_4000(lat, lon):
h = int(rem_lat_8000(lat) / _unit_lat_4000())*2 + int(rem_lon_8000(lon) / _unit_lon_4000()) + 1
i = 7
return meshcode_8000(lat, lon) + str(h) + str(i)
def meshcode_2500(lat, lon):
h = int(rem_lat_5000(lat) / _unit_lat_2500())*2 + int(rem_lon_5000(lon) / _unit_lon_2500()) + 1
i = 6
return meshcode_5000(lat, lon) + str(h) + str(i)
def meshcode_2000(lat, lon):
g = int(rem_lat_lv2(lat) / _unit_lat_2000())*2
h = int(rem_lon_lv2(lon) / _unit_lon_2000())*2
i = 5
return meshcode_lv2(lat, lon) + str(g) + str(h) + str(i)
def meshcode_lv3(lat, lon):
g = int(rem_lat_lv2(lat) / _unit_lat_lv3())
h = int(rem_lon_lv2(lon) / _unit_lon_lv3())
return meshcode_lv2(lat, lon) + str(g) + str(h)
def meshcode_lv4(lat, lon):
i = int(rem_lat_lv3(lat) / _unit_lat_lv4())*2 + int(rem_lon_lv3(lon) / _unit_lon_lv4()) + 1
return meshcode_lv3(lat, lon) + str(i)
def meshcode_lv5(lat, lon):
j = int(rem_lat_lv4(lat) / _unit_lat_lv5())*2 + int(rem_lon_lv4(lon) / _unit_lon_lv5()) + 1
return meshcode_lv4(lat, lon) + str(j)
def meshcode_lv6(lat, lon):
k = int(rem_lat_lv5(lat) / _unit_lat_lv6())*2 + int(rem_lon_lv5(lon) / _unit_lon_lv6()) + 1
return meshcode_lv5(lat, lon) + str(k)
if level == 1:
return meshcode_lv1(lat, lon)
if level == 40000:
return meshcode_40000(lat, lon)
if level == 20000:
return meshcode_20000(lat, lon)
if level == 16000:
return meshcode_16000(lat, lon)
if level == 2:
return meshcode_lv2(lat, lon)
if level == 8000:
return meshcode_8000(lat, lon)
if level == 5000:
return meshcode_5000(lat, lon)
if level == 4000:
return meshcode_4000(lat, lon)
if level == 2500:
return meshcode_2500(lat, lon)
if level == 2000:
return meshcode_2000(lat, lon)
if level == 3:
return meshcode_lv3(lat, lon)
if level == 4:
return meshcode_lv4(lat, lon)
if level == 5:
return meshcode_lv5(lat, lon)
if level == 6:
return meshcode_lv6(lat, lon)
raise ValueError("the level is unsupported.")
|
python
|
def to_meshcode(lat, lon, level):
"""緯度経度から指定次の地域メッシュコードを算出する。
Args:
lat: 世界測地系の緯度(度単位)
lon: 世界測地系の経度(度単位)
level: 地域メッシュコードの次数
1次(80km四方):1
40倍(40km四方):40000
20倍(20km四方):20000
16倍(16km四方):16000
2次(10km四方):2
8倍(8km四方):8000
5倍(5km四方):5000
4倍(4km四方):4000
2.5倍(2.5km四方):2500
2倍(2km四方):2000
3次(1km四方):3
4次(500m四方):4
5次(250m四方):5
6次(125m四方):6
Return:
指定次の地域メッシュコード
"""
if not 0 <= lat < 66.66:
raise ValueError('the latitude is out of bound.')
if not 100 <= lon < 180:
raise ValueError('the longitude is out of bound.')
# reminder of latitude and longitude by its unit in degree of mesh level.
rem_lat_lv0 = lambda lat: lat
rem_lon_lv0 = lambda lon: lon % 100
rem_lat_lv1 = lambda lat: rem_lat_lv0(lat) % _unit_lat_lv1()
rem_lon_lv1 = lambda lon: rem_lon_lv0(lon) % _unit_lon_lv1()
rem_lat_40000 = lambda lat: rem_lat_lv1(lat) % _unit_lat_40000()
rem_lon_40000 = lambda lon: rem_lon_lv1(lon) % _unit_lon_40000()
rem_lat_20000 = lambda lat: rem_lat_40000(lat) % _unit_lat_20000()
rem_lon_20000 = lambda lon: rem_lon_40000(lon) % _unit_lon_20000()
rem_lat_16000 = lambda lat: rem_lat_lv1(lat) % _unit_lat_16000()
rem_lon_16000 = lambda lon: rem_lon_lv1(lon) % _unit_lon_16000()
rem_lat_lv2 = lambda lat: rem_lat_lv1(lat) % _unit_lat_lv2()
rem_lon_lv2 = lambda lon: rem_lon_lv1(lon) % _unit_lon_lv2()
rem_lat_8000 = lambda lat: rem_lat_lv1(lat) % _unit_lat_8000()
rem_lon_8000 = lambda lon: rem_lon_lv1(lon) % _unit_lon_8000()
rem_lat_5000 = lambda lat: rem_lat_lv2(lat) % _unit_lat_5000()
rem_lon_5000 = lambda lon: rem_lon_lv2(lon) % _unit_lon_5000()
rem_lat_4000 = lambda lat: rem_lat_8000(lat) % _unit_lat_4000()
rem_lon_4000 = lambda lon: rem_lon_8000(lon) % _unit_lon_4000()
rem_lat_2500 = lambda lat: rem_lat_5000(lat) % _unit_lat_2500()
rem_lon_2500 = lambda lon: rem_lon_5000(lon) % _unit_lon_2500()
rem_lat_2000 = lambda lat: rem_lat_lv2(lat) % _unit_lat_2000()
rem_lon_2000 = lambda lon: rem_lon_lv2(lon) % _unit_lon_2000()
rem_lat_lv3 = lambda lat: rem_lat_lv2(lat) % _unit_lat_lv3()
rem_lon_lv3 = lambda lon: rem_lon_lv2(lon) % _unit_lon_lv3()
rem_lat_lv4 = lambda lat: rem_lat_lv3(lat) % _unit_lat_lv4()
rem_lon_lv4 = lambda lon: rem_lon_lv3(lon) % _unit_lon_lv4()
rem_lat_lv5 = lambda lat: rem_lat_lv4(lat) % _unit_lat_lv5()
rem_lon_lv5 = lambda lon: rem_lon_lv4(lon) % _unit_lon_lv5()
rem_lat_lv6 = lambda lat: rem_lat_lv5(lat) % _unit_lat_lv6()
rem_lon_lv6 = lambda lon: rem_lon_lv5(lon) % _unit_lon_lv6()
def meshcode_lv1(lat, lon):
ab = int(rem_lat_lv0(lat) / _unit_lat_lv1())
cd = int(rem_lon_lv0(lon) / _unit_lon_lv1())
return str(ab) + str(cd)
def meshcode_40000(lat, lon):
e = int(rem_lat_lv1(lat) / _unit_lat_40000())*2 + int(rem_lon_lv1(lon) / _unit_lon_40000()) + 1
return meshcode_lv1(lat, lon) + str(e)
def meshcode_20000(lat, lon):
f = int(rem_lat_40000(lat) / _unit_lat_20000())*2 + int(rem_lon_40000(lon) / _unit_lon_20000()) + 1
g = 5
return meshcode_40000(lat, lon) + str(f) + str(g)
def meshcode_16000(lat, lon):
e = int(rem_lat_lv1(lat) / _unit_lat_16000())*2
f = int(rem_lon_lv1(lon) / _unit_lon_16000())*2
g = 7
return meshcode_lv1(lat, lon) + str(e) + str(f) + str(g)
def meshcode_lv2(lat, lon):
e = int(rem_lat_lv1(lat) / _unit_lat_lv2())
f = int(rem_lon_lv1(lon) / _unit_lon_lv2())
return meshcode_lv1(lat, lon) + str(e) + str(f)
def meshcode_8000(lat, lon):
e = int(rem_lat_lv1(lat) / _unit_lat_8000())
f = int(rem_lon_lv1(lon) / _unit_lon_8000())
g = 6
return meshcode_lv1(lat, lon) + str(e) + str(f) + str(g)
def meshcode_5000(lat, lon):
g = int(rem_lat_lv2(lat) / _unit_lat_5000())*2 + int(rem_lon_lv2(lon) / _unit_lon_5000()) + 1
return meshcode_lv2(lat, lon) + str(g)
def meshcode_4000(lat, lon):
h = int(rem_lat_8000(lat) / _unit_lat_4000())*2 + int(rem_lon_8000(lon) / _unit_lon_4000()) + 1
i = 7
return meshcode_8000(lat, lon) + str(h) + str(i)
def meshcode_2500(lat, lon):
h = int(rem_lat_5000(lat) / _unit_lat_2500())*2 + int(rem_lon_5000(lon) / _unit_lon_2500()) + 1
i = 6
return meshcode_5000(lat, lon) + str(h) + str(i)
def meshcode_2000(lat, lon):
g = int(rem_lat_lv2(lat) / _unit_lat_2000())*2
h = int(rem_lon_lv2(lon) / _unit_lon_2000())*2
i = 5
return meshcode_lv2(lat, lon) + str(g) + str(h) + str(i)
def meshcode_lv3(lat, lon):
g = int(rem_lat_lv2(lat) / _unit_lat_lv3())
h = int(rem_lon_lv2(lon) / _unit_lon_lv3())
return meshcode_lv2(lat, lon) + str(g) + str(h)
def meshcode_lv4(lat, lon):
i = int(rem_lat_lv3(lat) / _unit_lat_lv4())*2 + int(rem_lon_lv3(lon) / _unit_lon_lv4()) + 1
return meshcode_lv3(lat, lon) + str(i)
def meshcode_lv5(lat, lon):
j = int(rem_lat_lv4(lat) / _unit_lat_lv5())*2 + int(rem_lon_lv4(lon) / _unit_lon_lv5()) + 1
return meshcode_lv4(lat, lon) + str(j)
def meshcode_lv6(lat, lon):
k = int(rem_lat_lv5(lat) / _unit_lat_lv6())*2 + int(rem_lon_lv5(lon) / _unit_lon_lv6()) + 1
return meshcode_lv5(lat, lon) + str(k)
if level == 1:
return meshcode_lv1(lat, lon)
if level == 40000:
return meshcode_40000(lat, lon)
if level == 20000:
return meshcode_20000(lat, lon)
if level == 16000:
return meshcode_16000(lat, lon)
if level == 2:
return meshcode_lv2(lat, lon)
if level == 8000:
return meshcode_8000(lat, lon)
if level == 5000:
return meshcode_5000(lat, lon)
if level == 4000:
return meshcode_4000(lat, lon)
if level == 2500:
return meshcode_2500(lat, lon)
if level == 2000:
return meshcode_2000(lat, lon)
if level == 3:
return meshcode_lv3(lat, lon)
if level == 4:
return meshcode_lv4(lat, lon)
if level == 5:
return meshcode_lv5(lat, lon)
if level == 6:
return meshcode_lv6(lat, lon)
raise ValueError("the level is unsupported.")
|
[
"def",
"to_meshcode",
"(",
"lat",
",",
"lon",
",",
"level",
")",
":",
"if",
"not",
"0",
"<=",
"lat",
"<",
"66.66",
":",
"raise",
"ValueError",
"(",
"'the latitude is out of bound.'",
")",
"if",
"not",
"100",
"<=",
"lon",
"<",
"180",
":",
"raise",
"ValueError",
"(",
"'the longitude is out of bound.'",
")",
"# reminder of latitude and longitude by its unit in degree of mesh level.",
"rem_lat_lv0",
"=",
"lambda",
"lat",
":",
"lat",
"rem_lon_lv0",
"=",
"lambda",
"lon",
":",
"lon",
"%",
"100",
"rem_lat_lv1",
"=",
"lambda",
"lat",
":",
"rem_lat_lv0",
"(",
"lat",
")",
"%",
"_unit_lat_lv1",
"(",
")",
"rem_lon_lv1",
"=",
"lambda",
"lon",
":",
"rem_lon_lv0",
"(",
"lon",
")",
"%",
"_unit_lon_lv1",
"(",
")",
"rem_lat_40000",
"=",
"lambda",
"lat",
":",
"rem_lat_lv1",
"(",
"lat",
")",
"%",
"_unit_lat_40000",
"(",
")",
"rem_lon_40000",
"=",
"lambda",
"lon",
":",
"rem_lon_lv1",
"(",
"lon",
")",
"%",
"_unit_lon_40000",
"(",
")",
"rem_lat_20000",
"=",
"lambda",
"lat",
":",
"rem_lat_40000",
"(",
"lat",
")",
"%",
"_unit_lat_20000",
"(",
")",
"rem_lon_20000",
"=",
"lambda",
"lon",
":",
"rem_lon_40000",
"(",
"lon",
")",
"%",
"_unit_lon_20000",
"(",
")",
"rem_lat_16000",
"=",
"lambda",
"lat",
":",
"rem_lat_lv1",
"(",
"lat",
")",
"%",
"_unit_lat_16000",
"(",
")",
"rem_lon_16000",
"=",
"lambda",
"lon",
":",
"rem_lon_lv1",
"(",
"lon",
")",
"%",
"_unit_lon_16000",
"(",
")",
"rem_lat_lv2",
"=",
"lambda",
"lat",
":",
"rem_lat_lv1",
"(",
"lat",
")",
"%",
"_unit_lat_lv2",
"(",
")",
"rem_lon_lv2",
"=",
"lambda",
"lon",
":",
"rem_lon_lv1",
"(",
"lon",
")",
"%",
"_unit_lon_lv2",
"(",
")",
"rem_lat_8000",
"=",
"lambda",
"lat",
":",
"rem_lat_lv1",
"(",
"lat",
")",
"%",
"_unit_lat_8000",
"(",
")",
"rem_lon_8000",
"=",
"lambda",
"lon",
":",
"rem_lon_lv1",
"(",
"lon",
")",
"%",
"_unit_lon_8000",
"(",
")",
"rem_lat_5000",
"=",
"lambda",
"lat",
":",
"rem_lat_lv2",
"(",
"lat",
")",
"%",
"_unit_lat_5000",
"(",
")",
"rem_lon_5000",
"=",
"lambda",
"lon",
":",
"rem_lon_lv2",
"(",
"lon",
")",
"%",
"_unit_lon_5000",
"(",
")",
"rem_lat_4000",
"=",
"lambda",
"lat",
":",
"rem_lat_8000",
"(",
"lat",
")",
"%",
"_unit_lat_4000",
"(",
")",
"rem_lon_4000",
"=",
"lambda",
"lon",
":",
"rem_lon_8000",
"(",
"lon",
")",
"%",
"_unit_lon_4000",
"(",
")",
"rem_lat_2500",
"=",
"lambda",
"lat",
":",
"rem_lat_5000",
"(",
"lat",
")",
"%",
"_unit_lat_2500",
"(",
")",
"rem_lon_2500",
"=",
"lambda",
"lon",
":",
"rem_lon_5000",
"(",
"lon",
")",
"%",
"_unit_lon_2500",
"(",
")",
"rem_lat_2000",
"=",
"lambda",
"lat",
":",
"rem_lat_lv2",
"(",
"lat",
")",
"%",
"_unit_lat_2000",
"(",
")",
"rem_lon_2000",
"=",
"lambda",
"lon",
":",
"rem_lon_lv2",
"(",
"lon",
")",
"%",
"_unit_lon_2000",
"(",
")",
"rem_lat_lv3",
"=",
"lambda",
"lat",
":",
"rem_lat_lv2",
"(",
"lat",
")",
"%",
"_unit_lat_lv3",
"(",
")",
"rem_lon_lv3",
"=",
"lambda",
"lon",
":",
"rem_lon_lv2",
"(",
"lon",
")",
"%",
"_unit_lon_lv3",
"(",
")",
"rem_lat_lv4",
"=",
"lambda",
"lat",
":",
"rem_lat_lv3",
"(",
"lat",
")",
"%",
"_unit_lat_lv4",
"(",
")",
"rem_lon_lv4",
"=",
"lambda",
"lon",
":",
"rem_lon_lv3",
"(",
"lon",
")",
"%",
"_unit_lon_lv4",
"(",
")",
"rem_lat_lv5",
"=",
"lambda",
"lat",
":",
"rem_lat_lv4",
"(",
"lat",
")",
"%",
"_unit_lat_lv5",
"(",
")",
"rem_lon_lv5",
"=",
"lambda",
"lon",
":",
"rem_lon_lv4",
"(",
"lon",
")",
"%",
"_unit_lon_lv5",
"(",
")",
"rem_lat_lv6",
"=",
"lambda",
"lat",
":",
"rem_lat_lv5",
"(",
"lat",
")",
"%",
"_unit_lat_lv6",
"(",
")",
"rem_lon_lv6",
"=",
"lambda",
"lon",
":",
"rem_lon_lv5",
"(",
"lon",
")",
"%",
"_unit_lon_lv6",
"(",
")",
"def",
"meshcode_lv1",
"(",
"lat",
",",
"lon",
")",
":",
"ab",
"=",
"int",
"(",
"rem_lat_lv0",
"(",
"lat",
")",
"/",
"_unit_lat_lv1",
"(",
")",
")",
"cd",
"=",
"int",
"(",
"rem_lon_lv0",
"(",
"lon",
")",
"/",
"_unit_lon_lv1",
"(",
")",
")",
"return",
"str",
"(",
"ab",
")",
"+",
"str",
"(",
"cd",
")",
"def",
"meshcode_40000",
"(",
"lat",
",",
"lon",
")",
":",
"e",
"=",
"int",
"(",
"rem_lat_lv1",
"(",
"lat",
")",
"/",
"_unit_lat_40000",
"(",
")",
")",
"*",
"2",
"+",
"int",
"(",
"rem_lon_lv1",
"(",
"lon",
")",
"/",
"_unit_lon_40000",
"(",
")",
")",
"+",
"1",
"return",
"meshcode_lv1",
"(",
"lat",
",",
"lon",
")",
"+",
"str",
"(",
"e",
")",
"def",
"meshcode_20000",
"(",
"lat",
",",
"lon",
")",
":",
"f",
"=",
"int",
"(",
"rem_lat_40000",
"(",
"lat",
")",
"/",
"_unit_lat_20000",
"(",
")",
")",
"*",
"2",
"+",
"int",
"(",
"rem_lon_40000",
"(",
"lon",
")",
"/",
"_unit_lon_20000",
"(",
")",
")",
"+",
"1",
"g",
"=",
"5",
"return",
"meshcode_40000",
"(",
"lat",
",",
"lon",
")",
"+",
"str",
"(",
"f",
")",
"+",
"str",
"(",
"g",
")",
"def",
"meshcode_16000",
"(",
"lat",
",",
"lon",
")",
":",
"e",
"=",
"int",
"(",
"rem_lat_lv1",
"(",
"lat",
")",
"/",
"_unit_lat_16000",
"(",
")",
")",
"*",
"2",
"f",
"=",
"int",
"(",
"rem_lon_lv1",
"(",
"lon",
")",
"/",
"_unit_lon_16000",
"(",
")",
")",
"*",
"2",
"g",
"=",
"7",
"return",
"meshcode_lv1",
"(",
"lat",
",",
"lon",
")",
"+",
"str",
"(",
"e",
")",
"+",
"str",
"(",
"f",
")",
"+",
"str",
"(",
"g",
")",
"def",
"meshcode_lv2",
"(",
"lat",
",",
"lon",
")",
":",
"e",
"=",
"int",
"(",
"rem_lat_lv1",
"(",
"lat",
")",
"/",
"_unit_lat_lv2",
"(",
")",
")",
"f",
"=",
"int",
"(",
"rem_lon_lv1",
"(",
"lon",
")",
"/",
"_unit_lon_lv2",
"(",
")",
")",
"return",
"meshcode_lv1",
"(",
"lat",
",",
"lon",
")",
"+",
"str",
"(",
"e",
")",
"+",
"str",
"(",
"f",
")",
"def",
"meshcode_8000",
"(",
"lat",
",",
"lon",
")",
":",
"e",
"=",
"int",
"(",
"rem_lat_lv1",
"(",
"lat",
")",
"/",
"_unit_lat_8000",
"(",
")",
")",
"f",
"=",
"int",
"(",
"rem_lon_lv1",
"(",
"lon",
")",
"/",
"_unit_lon_8000",
"(",
")",
")",
"g",
"=",
"6",
"return",
"meshcode_lv1",
"(",
"lat",
",",
"lon",
")",
"+",
"str",
"(",
"e",
")",
"+",
"str",
"(",
"f",
")",
"+",
"str",
"(",
"g",
")",
"def",
"meshcode_5000",
"(",
"lat",
",",
"lon",
")",
":",
"g",
"=",
"int",
"(",
"rem_lat_lv2",
"(",
"lat",
")",
"/",
"_unit_lat_5000",
"(",
")",
")",
"*",
"2",
"+",
"int",
"(",
"rem_lon_lv2",
"(",
"lon",
")",
"/",
"_unit_lon_5000",
"(",
")",
")",
"+",
"1",
"return",
"meshcode_lv2",
"(",
"lat",
",",
"lon",
")",
"+",
"str",
"(",
"g",
")",
"def",
"meshcode_4000",
"(",
"lat",
",",
"lon",
")",
":",
"h",
"=",
"int",
"(",
"rem_lat_8000",
"(",
"lat",
")",
"/",
"_unit_lat_4000",
"(",
")",
")",
"*",
"2",
"+",
"int",
"(",
"rem_lon_8000",
"(",
"lon",
")",
"/",
"_unit_lon_4000",
"(",
")",
")",
"+",
"1",
"i",
"=",
"7",
"return",
"meshcode_8000",
"(",
"lat",
",",
"lon",
")",
"+",
"str",
"(",
"h",
")",
"+",
"str",
"(",
"i",
")",
"def",
"meshcode_2500",
"(",
"lat",
",",
"lon",
")",
":",
"h",
"=",
"int",
"(",
"rem_lat_5000",
"(",
"lat",
")",
"/",
"_unit_lat_2500",
"(",
")",
")",
"*",
"2",
"+",
"int",
"(",
"rem_lon_5000",
"(",
"lon",
")",
"/",
"_unit_lon_2500",
"(",
")",
")",
"+",
"1",
"i",
"=",
"6",
"return",
"meshcode_5000",
"(",
"lat",
",",
"lon",
")",
"+",
"str",
"(",
"h",
")",
"+",
"str",
"(",
"i",
")",
"def",
"meshcode_2000",
"(",
"lat",
",",
"lon",
")",
":",
"g",
"=",
"int",
"(",
"rem_lat_lv2",
"(",
"lat",
")",
"/",
"_unit_lat_2000",
"(",
")",
")",
"*",
"2",
"h",
"=",
"int",
"(",
"rem_lon_lv2",
"(",
"lon",
")",
"/",
"_unit_lon_2000",
"(",
")",
")",
"*",
"2",
"i",
"=",
"5",
"return",
"meshcode_lv2",
"(",
"lat",
",",
"lon",
")",
"+",
"str",
"(",
"g",
")",
"+",
"str",
"(",
"h",
")",
"+",
"str",
"(",
"i",
")",
"def",
"meshcode_lv3",
"(",
"lat",
",",
"lon",
")",
":",
"g",
"=",
"int",
"(",
"rem_lat_lv2",
"(",
"lat",
")",
"/",
"_unit_lat_lv3",
"(",
")",
")",
"h",
"=",
"int",
"(",
"rem_lon_lv2",
"(",
"lon",
")",
"/",
"_unit_lon_lv3",
"(",
")",
")",
"return",
"meshcode_lv2",
"(",
"lat",
",",
"lon",
")",
"+",
"str",
"(",
"g",
")",
"+",
"str",
"(",
"h",
")",
"def",
"meshcode_lv4",
"(",
"lat",
",",
"lon",
")",
":",
"i",
"=",
"int",
"(",
"rem_lat_lv3",
"(",
"lat",
")",
"/",
"_unit_lat_lv4",
"(",
")",
")",
"*",
"2",
"+",
"int",
"(",
"rem_lon_lv3",
"(",
"lon",
")",
"/",
"_unit_lon_lv4",
"(",
")",
")",
"+",
"1",
"return",
"meshcode_lv3",
"(",
"lat",
",",
"lon",
")",
"+",
"str",
"(",
"i",
")",
"def",
"meshcode_lv5",
"(",
"lat",
",",
"lon",
")",
":",
"j",
"=",
"int",
"(",
"rem_lat_lv4",
"(",
"lat",
")",
"/",
"_unit_lat_lv5",
"(",
")",
")",
"*",
"2",
"+",
"int",
"(",
"rem_lon_lv4",
"(",
"lon",
")",
"/",
"_unit_lon_lv5",
"(",
")",
")",
"+",
"1",
"return",
"meshcode_lv4",
"(",
"lat",
",",
"lon",
")",
"+",
"str",
"(",
"j",
")",
"def",
"meshcode_lv6",
"(",
"lat",
",",
"lon",
")",
":",
"k",
"=",
"int",
"(",
"rem_lat_lv5",
"(",
"lat",
")",
"/",
"_unit_lat_lv6",
"(",
")",
")",
"*",
"2",
"+",
"int",
"(",
"rem_lon_lv5",
"(",
"lon",
")",
"/",
"_unit_lon_lv6",
"(",
")",
")",
"+",
"1",
"return",
"meshcode_lv5",
"(",
"lat",
",",
"lon",
")",
"+",
"str",
"(",
"k",
")",
"if",
"level",
"==",
"1",
":",
"return",
"meshcode_lv1",
"(",
"lat",
",",
"lon",
")",
"if",
"level",
"==",
"40000",
":",
"return",
"meshcode_40000",
"(",
"lat",
",",
"lon",
")",
"if",
"level",
"==",
"20000",
":",
"return",
"meshcode_20000",
"(",
"lat",
",",
"lon",
")",
"if",
"level",
"==",
"16000",
":",
"return",
"meshcode_16000",
"(",
"lat",
",",
"lon",
")",
"if",
"level",
"==",
"2",
":",
"return",
"meshcode_lv2",
"(",
"lat",
",",
"lon",
")",
"if",
"level",
"==",
"8000",
":",
"return",
"meshcode_8000",
"(",
"lat",
",",
"lon",
")",
"if",
"level",
"==",
"5000",
":",
"return",
"meshcode_5000",
"(",
"lat",
",",
"lon",
")",
"if",
"level",
"==",
"4000",
":",
"return",
"meshcode_4000",
"(",
"lat",
",",
"lon",
")",
"if",
"level",
"==",
"2500",
":",
"return",
"meshcode_2500",
"(",
"lat",
",",
"lon",
")",
"if",
"level",
"==",
"2000",
":",
"return",
"meshcode_2000",
"(",
"lat",
",",
"lon",
")",
"if",
"level",
"==",
"3",
":",
"return",
"meshcode_lv3",
"(",
"lat",
",",
"lon",
")",
"if",
"level",
"==",
"4",
":",
"return",
"meshcode_lv4",
"(",
"lat",
",",
"lon",
")",
"if",
"level",
"==",
"5",
":",
"return",
"meshcode_lv5",
"(",
"lat",
",",
"lon",
")",
"if",
"level",
"==",
"6",
":",
"return",
"meshcode_lv6",
"(",
"lat",
",",
"lon",
")",
"raise",
"ValueError",
"(",
"\"the level is unsupported.\"",
")"
] |
緯度経度から指定次の地域メッシュコードを算出する。
Args:
lat: 世界測地系の緯度(度単位)
lon: 世界測地系の経度(度単位)
level: 地域メッシュコードの次数
1次(80km四方):1
40倍(40km四方):40000
20倍(20km四方):20000
16倍(16km四方):16000
2次(10km四方):2
8倍(8km四方):8000
5倍(5km四方):5000
4倍(4km四方):4000
2.5倍(2.5km四方):2500
2倍(2km四方):2000
3次(1km四方):3
4次(500m四方):4
5次(250m四方):5
6次(125m四方):6
Return:
指定次の地域メッシュコード
|
[
"緯度経度から指定次の地域メッシュコードを算出する。"
] |
bda486ac7828d0adaea2a128154d0a554be7ef37
|
https://github.com/hni14/jismesh/blob/bda486ac7828d0adaea2a128154d0a554be7ef37/jismesh/utils.py#L64-L238
|
train
|
hni14/jismesh
|
jismesh/utils.py
|
to_meshlevel
|
def to_meshlevel(meshcode):
"""メッシュコードから次数を算出する。
Args:
meshcode: メッシュコード
Return:
地域メッシュコードの次数
1次(80km四方):1
40倍(40km四方):40000
20倍(20km四方):20000
16倍(16km四方):16000
2次(10km四方):2
8倍(8km四方):8000
5倍(5km四方):5000
4倍(4km四方):4000
2.5倍(2.5km四方):2500
2倍(2km四方):2000
3次(1km四方):3
4次(500m四方):4
5次(250m四方):5
6次(125m四方):6
"""
length = len(str(meshcode))
if length == 4:
return 1
if length == 5:
return 40000
if length == 6:
return 2
if length == 7:
if meshcode[6:7] in ['1','2','3','4']:
return 5000
if meshcode[6:7] == '6':
return 8000
if meshcode[6:7] == '5':
return 20000
if meshcode[6:7] == '7':
return 16000
if length == 8:
return 3
if length == 9:
if meshcode[8:9] in ['1','2','3','4']:
return 4
if meshcode[8:9] == '5':
return 2000
if meshcode[8:9] == '6':
return 2500
if meshcode[8:9] == '7':
return 4000
if length == 10:
if meshcode[9:10] in ['1','2','3','4']:
return 5
if length == 11:
if meshcode[10:11] in ['1','2','3','4']:
return 6
raise ValueError('the meshcode is unsupported.')
|
python
|
def to_meshlevel(meshcode):
"""メッシュコードから次数を算出する。
Args:
meshcode: メッシュコード
Return:
地域メッシュコードの次数
1次(80km四方):1
40倍(40km四方):40000
20倍(20km四方):20000
16倍(16km四方):16000
2次(10km四方):2
8倍(8km四方):8000
5倍(5km四方):5000
4倍(4km四方):4000
2.5倍(2.5km四方):2500
2倍(2km四方):2000
3次(1km四方):3
4次(500m四方):4
5次(250m四方):5
6次(125m四方):6
"""
length = len(str(meshcode))
if length == 4:
return 1
if length == 5:
return 40000
if length == 6:
return 2
if length == 7:
if meshcode[6:7] in ['1','2','3','4']:
return 5000
if meshcode[6:7] == '6':
return 8000
if meshcode[6:7] == '5':
return 20000
if meshcode[6:7] == '7':
return 16000
if length == 8:
return 3
if length == 9:
if meshcode[8:9] in ['1','2','3','4']:
return 4
if meshcode[8:9] == '5':
return 2000
if meshcode[8:9] == '6':
return 2500
if meshcode[8:9] == '7':
return 4000
if length == 10:
if meshcode[9:10] in ['1','2','3','4']:
return 5
if length == 11:
if meshcode[10:11] in ['1','2','3','4']:
return 6
raise ValueError('the meshcode is unsupported.')
|
[
"def",
"to_meshlevel",
"(",
"meshcode",
")",
":",
"length",
"=",
"len",
"(",
"str",
"(",
"meshcode",
")",
")",
"if",
"length",
"==",
"4",
":",
"return",
"1",
"if",
"length",
"==",
"5",
":",
"return",
"40000",
"if",
"length",
"==",
"6",
":",
"return",
"2",
"if",
"length",
"==",
"7",
":",
"if",
"meshcode",
"[",
"6",
":",
"7",
"]",
"in",
"[",
"'1'",
",",
"'2'",
",",
"'3'",
",",
"'4'",
"]",
":",
"return",
"5000",
"if",
"meshcode",
"[",
"6",
":",
"7",
"]",
"==",
"'6'",
":",
"return",
"8000",
"if",
"meshcode",
"[",
"6",
":",
"7",
"]",
"==",
"'5'",
":",
"return",
"20000",
"if",
"meshcode",
"[",
"6",
":",
"7",
"]",
"==",
"'7'",
":",
"return",
"16000",
"if",
"length",
"==",
"8",
":",
"return",
"3",
"if",
"length",
"==",
"9",
":",
"if",
"meshcode",
"[",
"8",
":",
"9",
"]",
"in",
"[",
"'1'",
",",
"'2'",
",",
"'3'",
",",
"'4'",
"]",
":",
"return",
"4",
"if",
"meshcode",
"[",
"8",
":",
"9",
"]",
"==",
"'5'",
":",
"return",
"2000",
"if",
"meshcode",
"[",
"8",
":",
"9",
"]",
"==",
"'6'",
":",
"return",
"2500",
"if",
"meshcode",
"[",
"8",
":",
"9",
"]",
"==",
"'7'",
":",
"return",
"4000",
"if",
"length",
"==",
"10",
":",
"if",
"meshcode",
"[",
"9",
":",
"10",
"]",
"in",
"[",
"'1'",
",",
"'2'",
",",
"'3'",
",",
"'4'",
"]",
":",
"return",
"5",
"if",
"length",
"==",
"11",
":",
"if",
"meshcode",
"[",
"10",
":",
"11",
"]",
"in",
"[",
"'1'",
",",
"'2'",
",",
"'3'",
",",
"'4'",
"]",
":",
"return",
"6",
"raise",
"ValueError",
"(",
"'the meshcode is unsupported.'",
")"
] |
メッシュコードから次数を算出する。
Args:
meshcode: メッシュコード
Return:
地域メッシュコードの次数
1次(80km四方):1
40倍(40km四方):40000
20倍(20km四方):20000
16倍(16km四方):16000
2次(10km四方):2
8倍(8km四方):8000
5倍(5km四方):5000
4倍(4km四方):4000
2.5倍(2.5km四方):2500
2倍(2km四方):2000
3次(1km四方):3
4次(500m四方):4
5次(250m四方):5
6次(125m四方):6
|
[
"メッシュコードから次数を算出する。"
] |
bda486ac7828d0adaea2a128154d0a554be7ef37
|
https://github.com/hni14/jismesh/blob/bda486ac7828d0adaea2a128154d0a554be7ef37/jismesh/utils.py#L240-L310
|
train
|
hni14/jismesh
|
jismesh/utils.py
|
to_meshpoint
|
def to_meshpoint(meshcode, lat_multiplier, lon_multiplier):
"""地域メッシュコードから緯度経度を算出する。
下記のメッシュに対応している。
1次(80km四方):1
40倍(40km四方):40000
20倍(20km四方):20000
16倍(16km四方):16000
2次(10km四方):2
8倍(8km四方):8000
5倍(5km四方):5000
4倍(4km四方):4000
2.5倍(2.5km四方):2500
2倍(2km四方):2000
3次(1km四方):3
4次(500m四方):4
5次(250m四方):5
6次(125m四方):6
Args:
meshcode: 指定次の地域メッシュコード
lat_multiplier: 当該メッシュの基準点(南西端)から、緯度座標上の点の位置を当該メッシュの単位緯度の倍数で指定
lon_multiplier: 当該メッシュの基準点(南西端)から、経度座標上の点の位置を当該メッシュの単位経度の倍数で指定
Return:
lat: 世界測地系の緯度(度単位)
lon: 世界測地系の経度(度単位)
"""
def mesh_cord(func_higher_cord, func_unit_cord, func_multiplier):
return func_higher_cord() + func_unit_cord() * func_multiplier()
lat_multiplier_lv = lambda: lat_multiplier
lon_multiplier_lv = lambda: lon_multiplier
lat_multiplier_lv1 = _functools.partial(
lambda meshcode: int(meshcode[0:2]), meshcode=meshcode)
lon_multiplier_lv1 = _functools.partial(
lambda meshcode: int(meshcode[2:4]), meshcode=meshcode)
lat_multiplier_40000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[4:5])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_40000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[4:5])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_20000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[5:6])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_20000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[5:6])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_16000 = _functools.partial(
lambda meshcode: int(meshcode[4:5])/2, meshcode=meshcode)
lon_multiplier_16000 = _functools.partial(
lambda meshcode: int(meshcode[5:6])/2, meshcode=meshcode)
lat_multiplier_lv2 = _functools.partial(
lambda meshcode: int(meshcode[4:5]), meshcode=meshcode)
lon_multiplier_lv2 = _functools.partial(
lambda meshcode: int(meshcode[5:6]), meshcode=meshcode)
lat_multiplier_8000 = _functools.partial(
lambda meshcode: int(meshcode[4:5]), meshcode=meshcode)
lon_multiplier_8000 = _functools.partial(
lambda meshcode: int(meshcode[5:6]), meshcode=meshcode)
lat_multiplier_5000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[6:7])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_5000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[6:7])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_4000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_4000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_2500 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_2500 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_2000 = _functools.partial(
lambda meshcode: int(meshcode[6:7])/2, meshcode=meshcode)
lon_multiplier_2000 = _functools.partial(
lambda meshcode: int(meshcode[7:8])/2, meshcode=meshcode)
lat_multiplier_lv3 = _functools.partial(
lambda meshcode: int(meshcode[6:7]), meshcode=meshcode)
lon_multiplier_lv3 = _functools.partial(
lambda meshcode: int(meshcode[7:8]), meshcode=meshcode)
lat_multiplier_lv4 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[8:9])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_lv4 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[8:9])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_lv5 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[9:10])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_lv5 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[9:10])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_lv6 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[10:11])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_lv6 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[10:11])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
mesh_lv1_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=lambda: 0,
func_unit_cord=_unit_lat_lv1,
func_multiplier=lat_multiplier_lv1)
mesh_lv1_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=lambda: 100,
func_unit_cord=_unit_lon_lv1,
func_multiplier=lon_multiplier_lv1)
mesh_40000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_40000,
func_multiplier=lat_multiplier_40000)
mesh_40000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_40000,
func_multiplier=lon_multiplier_40000)
mesh_20000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_40000_default_lat,
func_unit_cord=_unit_lat_20000,
func_multiplier=lat_multiplier_20000)
mesh_20000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_40000_default_lon,
func_unit_cord=_unit_lon_20000,
func_multiplier=lon_multiplier_20000)
mesh_16000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_16000,
func_multiplier=lat_multiplier_16000)
mesh_16000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_16000,
func_multiplier=lon_multiplier_16000)
mesh_lv2_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_lv2,
func_multiplier=lat_multiplier_lv2)
mesh_lv2_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_lv2,
func_multiplier=lon_multiplier_lv2)
mesh_8000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_8000,
func_multiplier=lat_multiplier_8000)
mesh_8000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_8000,
func_multiplier=lon_multiplier_8000)
mesh_5000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lat,
func_unit_cord=_unit_lat_5000,
func_multiplier=lat_multiplier_5000)
mesh_5000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lon,
func_unit_cord=_unit_lon_5000,
func_multiplier=lon_multiplier_5000)
mesh_4000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_8000_default_lat,
func_unit_cord=_unit_lat_4000,
func_multiplier=lat_multiplier_4000)
mesh_4000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_8000_default_lon,
func_unit_cord=_unit_lon_4000,
func_multiplier=lon_multiplier_4000)
mesh_2500_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_5000_default_lat,
func_unit_cord=_unit_lat_2500,
func_multiplier=lat_multiplier_2500)
mesh_2500_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_5000_default_lon,
func_unit_cord=_unit_lon_2500,
func_multiplier=lon_multiplier_2500)
mesh_2000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lat,
func_unit_cord=_unit_lat_2000,
func_multiplier=lat_multiplier_2000)
mesh_2000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lon,
func_unit_cord=_unit_lon_2000,
func_multiplier=lon_multiplier_2000)
mesh_lv3_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lat,
func_unit_cord=_unit_lat_lv3,
func_multiplier=lat_multiplier_lv3)
mesh_lv3_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lon,
func_unit_cord=_unit_lon_lv3,
func_multiplier=lon_multiplier_lv3)
mesh_lv4_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv3_default_lat,
func_unit_cord=_unit_lat_lv4,
func_multiplier=lat_multiplier_lv4)
mesh_lv4_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv3_default_lon,
func_unit_cord=_unit_lon_lv4,
func_multiplier=lon_multiplier_lv4)
mesh_lv5_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv4_default_lat,
func_unit_cord=_unit_lat_lv5,
func_multiplier=lat_multiplier_lv5)
mesh_lv5_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv4_default_lon,
func_unit_cord=_unit_lon_lv5,
func_multiplier=lon_multiplier_lv5)
mesh_lv6_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv5_default_lat,
func_unit_cord=_unit_lat_lv6,
func_multiplier=lat_multiplier_lv6)
mesh_lv6_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv5_default_lon,
func_unit_cord=_unit_lon_lv6,
func_multiplier=lon_multiplier_lv6)
mesh_lv1_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_lv1,
func_multiplier=lat_multiplier_lv)
mesh_lv1_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_lv1,
func_multiplier=lon_multiplier_lv)
mesh_40000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_40000_default_lat,
func_unit_cord=_unit_lat_40000,
func_multiplier=lat_multiplier_lv)
mesh_40000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_40000_default_lon,
func_unit_cord=_unit_lon_40000,
func_multiplier=lon_multiplier_lv)
mesh_20000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_20000_default_lat,
func_unit_cord=_unit_lat_20000,
func_multiplier=lat_multiplier_lv)
mesh_20000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_20000_default_lon,
func_unit_cord=_unit_lon_20000,
func_multiplier=lon_multiplier_lv)
mesh_16000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_16000_default_lat,
func_unit_cord=_unit_lat_16000,
func_multiplier=lat_multiplier_lv)
mesh_16000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_16000_default_lon,
func_unit_cord=_unit_lon_16000,
func_multiplier=lon_multiplier_lv)
mesh_lv2_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lat,
func_unit_cord=_unit_lat_lv2,
func_multiplier=lat_multiplier_lv)
mesh_lv2_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lon,
func_unit_cord=_unit_lon_lv2,
func_multiplier=lon_multiplier_lv)
mesh_8000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_8000_default_lat,
func_unit_cord=_unit_lat_8000,
func_multiplier=lat_multiplier_lv)
mesh_8000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_8000_default_lon,
func_unit_cord=_unit_lon_8000,
func_multiplier=lon_multiplier_lv)
mesh_5000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_5000_default_lat,
func_unit_cord=_unit_lat_5000,
func_multiplier=lat_multiplier_lv)
mesh_5000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_5000_default_lon,
func_unit_cord=_unit_lon_5000,
func_multiplier=lon_multiplier_lv)
mesh_4000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_4000_default_lat,
func_unit_cord=_unit_lat_4000,
func_multiplier=lat_multiplier_lv)
mesh_4000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_4000_default_lon,
func_unit_cord=_unit_lon_4000,
func_multiplier=lon_multiplier_lv)
mesh_2500_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_2500_default_lat,
func_unit_cord=_unit_lat_2500,
func_multiplier=lat_multiplier_lv)
mesh_2500_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_2500_default_lon,
func_unit_cord=_unit_lon_2500,
func_multiplier=lon_multiplier_lv)
mesh_2000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_2000_default_lat,
func_unit_cord=_unit_lat_2000,
func_multiplier=lat_multiplier_lv)
mesh_2000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_2000_default_lon,
func_unit_cord=_unit_lon_2000,
func_multiplier=lon_multiplier_lv)
mesh_lv3_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv3_default_lat,
func_unit_cord=_unit_lat_lv3,
func_multiplier=lat_multiplier_lv)
mesh_lv3_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv3_default_lon,
func_unit_cord=_unit_lon_lv3,
func_multiplier=lon_multiplier_lv)
mesh_lv4_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv4_default_lat,
func_unit_cord=_unit_lat_lv4,
func_multiplier=lat_multiplier_lv)
mesh_lv4_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv4_default_lon,
func_unit_cord=_unit_lon_lv4,
func_multiplier=lon_multiplier_lv)
mesh_lv5_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv5_default_lat,
func_unit_cord=_unit_lat_lv5,
func_multiplier=lat_multiplier_lv)
mesh_lv5_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv5_default_lon,
func_unit_cord=_unit_lon_lv5,
func_multiplier=lon_multiplier_lv)
mesh_lv6_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv6_default_lat,
func_unit_cord=_unit_lat_lv6,
func_multiplier=lat_multiplier_lv)
mesh_lv6_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv6_default_lon,
func_unit_cord=_unit_lon_lv6,
func_multiplier=lon_multiplier_lv)
level = to_meshlevel(meshcode)
if level == 1:
return mesh_lv1_lat(), mesh_lv1_lon()
if level == 40000:
return mesh_40000_lat(), mesh_40000_lon()
if level == 20000:
return mesh_20000_lat(), mesh_20000_lon()
if level == 16000:
return mesh_16000_lat(), mesh_16000_lon()
if level == 2:
return mesh_lv2_lat(), mesh_lv2_lon()
if level == 8000:
return mesh_8000_lat(), mesh_8000_lon()
if level == 5000:
return mesh_5000_lat(), mesh_5000_lon()
if level == 4000:
return mesh_4000_lat(), mesh_4000_lon()
if level == 2500:
return mesh_2500_lat(), mesh_2500_lon()
if level == 2000:
return mesh_2000_lat(), mesh_2000_lon()
if level == 3:
return mesh_lv3_lat(), mesh_lv3_lon()
if level == 4:
return mesh_lv4_lat(), mesh_lv4_lon()
if level == 5:
return mesh_lv5_lat(), mesh_lv5_lon()
if level == 6:
return mesh_lv6_lat(), mesh_lv6_lon()
raise ValueError("the level is unsupported.")
|
python
|
def to_meshpoint(meshcode, lat_multiplier, lon_multiplier):
"""地域メッシュコードから緯度経度を算出する。
下記のメッシュに対応している。
1次(80km四方):1
40倍(40km四方):40000
20倍(20km四方):20000
16倍(16km四方):16000
2次(10km四方):2
8倍(8km四方):8000
5倍(5km四方):5000
4倍(4km四方):4000
2.5倍(2.5km四方):2500
2倍(2km四方):2000
3次(1km四方):3
4次(500m四方):4
5次(250m四方):5
6次(125m四方):6
Args:
meshcode: 指定次の地域メッシュコード
lat_multiplier: 当該メッシュの基準点(南西端)から、緯度座標上の点の位置を当該メッシュの単位緯度の倍数で指定
lon_multiplier: 当該メッシュの基準点(南西端)から、経度座標上の点の位置を当該メッシュの単位経度の倍数で指定
Return:
lat: 世界測地系の緯度(度単位)
lon: 世界測地系の経度(度単位)
"""
def mesh_cord(func_higher_cord, func_unit_cord, func_multiplier):
return func_higher_cord() + func_unit_cord() * func_multiplier()
lat_multiplier_lv = lambda: lat_multiplier
lon_multiplier_lv = lambda: lon_multiplier
lat_multiplier_lv1 = _functools.partial(
lambda meshcode: int(meshcode[0:2]), meshcode=meshcode)
lon_multiplier_lv1 = _functools.partial(
lambda meshcode: int(meshcode[2:4]), meshcode=meshcode)
lat_multiplier_40000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[4:5])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_40000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[4:5])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_20000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[5:6])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_20000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[5:6])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_16000 = _functools.partial(
lambda meshcode: int(meshcode[4:5])/2, meshcode=meshcode)
lon_multiplier_16000 = _functools.partial(
lambda meshcode: int(meshcode[5:6])/2, meshcode=meshcode)
lat_multiplier_lv2 = _functools.partial(
lambda meshcode: int(meshcode[4:5]), meshcode=meshcode)
lon_multiplier_lv2 = _functools.partial(
lambda meshcode: int(meshcode[5:6]), meshcode=meshcode)
lat_multiplier_8000 = _functools.partial(
lambda meshcode: int(meshcode[4:5]), meshcode=meshcode)
lon_multiplier_8000 = _functools.partial(
lambda meshcode: int(meshcode[5:6]), meshcode=meshcode)
lat_multiplier_5000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[6:7])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_5000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[6:7])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_4000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_4000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_2500 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_2500 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_2000 = _functools.partial(
lambda meshcode: int(meshcode[6:7])/2, meshcode=meshcode)
lon_multiplier_2000 = _functools.partial(
lambda meshcode: int(meshcode[7:8])/2, meshcode=meshcode)
lat_multiplier_lv3 = _functools.partial(
lambda meshcode: int(meshcode[6:7]), meshcode=meshcode)
lon_multiplier_lv3 = _functools.partial(
lambda meshcode: int(meshcode[7:8]), meshcode=meshcode)
lat_multiplier_lv4 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[8:9])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_lv4 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[8:9])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_lv5 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[9:10])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_lv5 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[9:10])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_lv6 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[10:11])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_lv6 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[10:11])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
mesh_lv1_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=lambda: 0,
func_unit_cord=_unit_lat_lv1,
func_multiplier=lat_multiplier_lv1)
mesh_lv1_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=lambda: 100,
func_unit_cord=_unit_lon_lv1,
func_multiplier=lon_multiplier_lv1)
mesh_40000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_40000,
func_multiplier=lat_multiplier_40000)
mesh_40000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_40000,
func_multiplier=lon_multiplier_40000)
mesh_20000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_40000_default_lat,
func_unit_cord=_unit_lat_20000,
func_multiplier=lat_multiplier_20000)
mesh_20000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_40000_default_lon,
func_unit_cord=_unit_lon_20000,
func_multiplier=lon_multiplier_20000)
mesh_16000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_16000,
func_multiplier=lat_multiplier_16000)
mesh_16000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_16000,
func_multiplier=lon_multiplier_16000)
mesh_lv2_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_lv2,
func_multiplier=lat_multiplier_lv2)
mesh_lv2_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_lv2,
func_multiplier=lon_multiplier_lv2)
mesh_8000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_8000,
func_multiplier=lat_multiplier_8000)
mesh_8000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_8000,
func_multiplier=lon_multiplier_8000)
mesh_5000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lat,
func_unit_cord=_unit_lat_5000,
func_multiplier=lat_multiplier_5000)
mesh_5000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lon,
func_unit_cord=_unit_lon_5000,
func_multiplier=lon_multiplier_5000)
mesh_4000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_8000_default_lat,
func_unit_cord=_unit_lat_4000,
func_multiplier=lat_multiplier_4000)
mesh_4000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_8000_default_lon,
func_unit_cord=_unit_lon_4000,
func_multiplier=lon_multiplier_4000)
mesh_2500_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_5000_default_lat,
func_unit_cord=_unit_lat_2500,
func_multiplier=lat_multiplier_2500)
mesh_2500_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_5000_default_lon,
func_unit_cord=_unit_lon_2500,
func_multiplier=lon_multiplier_2500)
mesh_2000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lat,
func_unit_cord=_unit_lat_2000,
func_multiplier=lat_multiplier_2000)
mesh_2000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lon,
func_unit_cord=_unit_lon_2000,
func_multiplier=lon_multiplier_2000)
mesh_lv3_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lat,
func_unit_cord=_unit_lat_lv3,
func_multiplier=lat_multiplier_lv3)
mesh_lv3_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lon,
func_unit_cord=_unit_lon_lv3,
func_multiplier=lon_multiplier_lv3)
mesh_lv4_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv3_default_lat,
func_unit_cord=_unit_lat_lv4,
func_multiplier=lat_multiplier_lv4)
mesh_lv4_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv3_default_lon,
func_unit_cord=_unit_lon_lv4,
func_multiplier=lon_multiplier_lv4)
mesh_lv5_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv4_default_lat,
func_unit_cord=_unit_lat_lv5,
func_multiplier=lat_multiplier_lv5)
mesh_lv5_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv4_default_lon,
func_unit_cord=_unit_lon_lv5,
func_multiplier=lon_multiplier_lv5)
mesh_lv6_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv5_default_lat,
func_unit_cord=_unit_lat_lv6,
func_multiplier=lat_multiplier_lv6)
mesh_lv6_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv5_default_lon,
func_unit_cord=_unit_lon_lv6,
func_multiplier=lon_multiplier_lv6)
mesh_lv1_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_lv1,
func_multiplier=lat_multiplier_lv)
mesh_lv1_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_lv1,
func_multiplier=lon_multiplier_lv)
mesh_40000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_40000_default_lat,
func_unit_cord=_unit_lat_40000,
func_multiplier=lat_multiplier_lv)
mesh_40000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_40000_default_lon,
func_unit_cord=_unit_lon_40000,
func_multiplier=lon_multiplier_lv)
mesh_20000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_20000_default_lat,
func_unit_cord=_unit_lat_20000,
func_multiplier=lat_multiplier_lv)
mesh_20000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_20000_default_lon,
func_unit_cord=_unit_lon_20000,
func_multiplier=lon_multiplier_lv)
mesh_16000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_16000_default_lat,
func_unit_cord=_unit_lat_16000,
func_multiplier=lat_multiplier_lv)
mesh_16000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_16000_default_lon,
func_unit_cord=_unit_lon_16000,
func_multiplier=lon_multiplier_lv)
mesh_lv2_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lat,
func_unit_cord=_unit_lat_lv2,
func_multiplier=lat_multiplier_lv)
mesh_lv2_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lon,
func_unit_cord=_unit_lon_lv2,
func_multiplier=lon_multiplier_lv)
mesh_8000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_8000_default_lat,
func_unit_cord=_unit_lat_8000,
func_multiplier=lat_multiplier_lv)
mesh_8000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_8000_default_lon,
func_unit_cord=_unit_lon_8000,
func_multiplier=lon_multiplier_lv)
mesh_5000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_5000_default_lat,
func_unit_cord=_unit_lat_5000,
func_multiplier=lat_multiplier_lv)
mesh_5000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_5000_default_lon,
func_unit_cord=_unit_lon_5000,
func_multiplier=lon_multiplier_lv)
mesh_4000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_4000_default_lat,
func_unit_cord=_unit_lat_4000,
func_multiplier=lat_multiplier_lv)
mesh_4000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_4000_default_lon,
func_unit_cord=_unit_lon_4000,
func_multiplier=lon_multiplier_lv)
mesh_2500_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_2500_default_lat,
func_unit_cord=_unit_lat_2500,
func_multiplier=lat_multiplier_lv)
mesh_2500_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_2500_default_lon,
func_unit_cord=_unit_lon_2500,
func_multiplier=lon_multiplier_lv)
mesh_2000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_2000_default_lat,
func_unit_cord=_unit_lat_2000,
func_multiplier=lat_multiplier_lv)
mesh_2000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_2000_default_lon,
func_unit_cord=_unit_lon_2000,
func_multiplier=lon_multiplier_lv)
mesh_lv3_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv3_default_lat,
func_unit_cord=_unit_lat_lv3,
func_multiplier=lat_multiplier_lv)
mesh_lv3_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv3_default_lon,
func_unit_cord=_unit_lon_lv3,
func_multiplier=lon_multiplier_lv)
mesh_lv4_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv4_default_lat,
func_unit_cord=_unit_lat_lv4,
func_multiplier=lat_multiplier_lv)
mesh_lv4_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv4_default_lon,
func_unit_cord=_unit_lon_lv4,
func_multiplier=lon_multiplier_lv)
mesh_lv5_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv5_default_lat,
func_unit_cord=_unit_lat_lv5,
func_multiplier=lat_multiplier_lv)
mesh_lv5_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv5_default_lon,
func_unit_cord=_unit_lon_lv5,
func_multiplier=lon_multiplier_lv)
mesh_lv6_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv6_default_lat,
func_unit_cord=_unit_lat_lv6,
func_multiplier=lat_multiplier_lv)
mesh_lv6_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv6_default_lon,
func_unit_cord=_unit_lon_lv6,
func_multiplier=lon_multiplier_lv)
level = to_meshlevel(meshcode)
if level == 1:
return mesh_lv1_lat(), mesh_lv1_lon()
if level == 40000:
return mesh_40000_lat(), mesh_40000_lon()
if level == 20000:
return mesh_20000_lat(), mesh_20000_lon()
if level == 16000:
return mesh_16000_lat(), mesh_16000_lon()
if level == 2:
return mesh_lv2_lat(), mesh_lv2_lon()
if level == 8000:
return mesh_8000_lat(), mesh_8000_lon()
if level == 5000:
return mesh_5000_lat(), mesh_5000_lon()
if level == 4000:
return mesh_4000_lat(), mesh_4000_lon()
if level == 2500:
return mesh_2500_lat(), mesh_2500_lon()
if level == 2000:
return mesh_2000_lat(), mesh_2000_lon()
if level == 3:
return mesh_lv3_lat(), mesh_lv3_lon()
if level == 4:
return mesh_lv4_lat(), mesh_lv4_lon()
if level == 5:
return mesh_lv5_lat(), mesh_lv5_lon()
if level == 6:
return mesh_lv6_lat(), mesh_lv6_lon()
raise ValueError("the level is unsupported.")
|
[
"def",
"to_meshpoint",
"(",
"meshcode",
",",
"lat_multiplier",
",",
"lon_multiplier",
")",
":",
"def",
"mesh_cord",
"(",
"func_higher_cord",
",",
"func_unit_cord",
",",
"func_multiplier",
")",
":",
"return",
"func_higher_cord",
"(",
")",
"+",
"func_unit_cord",
"(",
")",
"*",
"func_multiplier",
"(",
")",
"lat_multiplier_lv",
"=",
"lambda",
":",
"lat_multiplier",
"lon_multiplier_lv",
"=",
"lambda",
":",
"lon_multiplier",
"lat_multiplier_lv1",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"meshcode",
"[",
"0",
":",
"2",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_lv1",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"meshcode",
"[",
"2",
":",
"4",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lat_multiplier_40000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"4",
":",
"5",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"0",
":",
"1",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_40000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"4",
":",
"5",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"1",
":",
"2",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lat_multiplier_20000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"5",
":",
"6",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"0",
":",
"1",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_20000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"5",
":",
"6",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"1",
":",
"2",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lat_multiplier_16000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"meshcode",
"[",
"4",
":",
"5",
"]",
")",
"/",
"2",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_16000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"meshcode",
"[",
"5",
":",
"6",
"]",
")",
"/",
"2",
",",
"meshcode",
"=",
"meshcode",
")",
"lat_multiplier_lv2",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"meshcode",
"[",
"4",
":",
"5",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_lv2",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"meshcode",
"[",
"5",
":",
"6",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lat_multiplier_8000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"meshcode",
"[",
"4",
":",
"5",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_8000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"meshcode",
"[",
"5",
":",
"6",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lat_multiplier_5000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"6",
":",
"7",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"0",
":",
"1",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_5000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"6",
":",
"7",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"1",
":",
"2",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lat_multiplier_4000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"7",
":",
"8",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"0",
":",
"1",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_4000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"7",
":",
"8",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"1",
":",
"2",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lat_multiplier_2500",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"7",
":",
"8",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"0",
":",
"1",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_2500",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"7",
":",
"8",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"1",
":",
"2",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lat_multiplier_2000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"meshcode",
"[",
"6",
":",
"7",
"]",
")",
"/",
"2",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_2000",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"meshcode",
"[",
"7",
":",
"8",
"]",
")",
"/",
"2",
",",
"meshcode",
"=",
"meshcode",
")",
"lat_multiplier_lv3",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"meshcode",
"[",
"6",
":",
"7",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_lv3",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"meshcode",
"[",
"7",
":",
"8",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lat_multiplier_lv4",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"8",
":",
"9",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"0",
":",
"1",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_lv4",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"8",
":",
"9",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"1",
":",
"2",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lat_multiplier_lv5",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"9",
":",
"10",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"0",
":",
"1",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_lv5",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"9",
":",
"10",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"1",
":",
"2",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lat_multiplier_lv6",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"10",
":",
"11",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"0",
":",
"1",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"lon_multiplier_lv6",
"=",
"_functools",
".",
"partial",
"(",
"lambda",
"meshcode",
":",
"int",
"(",
"bin",
"(",
"int",
"(",
"meshcode",
"[",
"10",
":",
"11",
"]",
")",
"-",
"1",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"2",
")",
"[",
"1",
":",
"2",
"]",
")",
",",
"meshcode",
"=",
"meshcode",
")",
"mesh_lv1_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"lambda",
":",
"0",
",",
"func_unit_cord",
"=",
"_unit_lat_lv1",
",",
"func_multiplier",
"=",
"lat_multiplier_lv1",
")",
"mesh_lv1_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"lambda",
":",
"100",
",",
"func_unit_cord",
"=",
"_unit_lon_lv1",
",",
"func_multiplier",
"=",
"lon_multiplier_lv1",
")",
"mesh_40000_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv1_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_40000",
",",
"func_multiplier",
"=",
"lat_multiplier_40000",
")",
"mesh_40000_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv1_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_40000",
",",
"func_multiplier",
"=",
"lon_multiplier_40000",
")",
"mesh_20000_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_40000_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_20000",
",",
"func_multiplier",
"=",
"lat_multiplier_20000",
")",
"mesh_20000_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_40000_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_20000",
",",
"func_multiplier",
"=",
"lon_multiplier_20000",
")",
"mesh_16000_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv1_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_16000",
",",
"func_multiplier",
"=",
"lat_multiplier_16000",
")",
"mesh_16000_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv1_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_16000",
",",
"func_multiplier",
"=",
"lon_multiplier_16000",
")",
"mesh_lv2_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv1_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_lv2",
",",
"func_multiplier",
"=",
"lat_multiplier_lv2",
")",
"mesh_lv2_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv1_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_lv2",
",",
"func_multiplier",
"=",
"lon_multiplier_lv2",
")",
"mesh_8000_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv1_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_8000",
",",
"func_multiplier",
"=",
"lat_multiplier_8000",
")",
"mesh_8000_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv1_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_8000",
",",
"func_multiplier",
"=",
"lon_multiplier_8000",
")",
"mesh_5000_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv2_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_5000",
",",
"func_multiplier",
"=",
"lat_multiplier_5000",
")",
"mesh_5000_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv2_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_5000",
",",
"func_multiplier",
"=",
"lon_multiplier_5000",
")",
"mesh_4000_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_8000_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_4000",
",",
"func_multiplier",
"=",
"lat_multiplier_4000",
")",
"mesh_4000_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_8000_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_4000",
",",
"func_multiplier",
"=",
"lon_multiplier_4000",
")",
"mesh_2500_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_5000_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_2500",
",",
"func_multiplier",
"=",
"lat_multiplier_2500",
")",
"mesh_2500_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_5000_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_2500",
",",
"func_multiplier",
"=",
"lon_multiplier_2500",
")",
"mesh_2000_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv2_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_2000",
",",
"func_multiplier",
"=",
"lat_multiplier_2000",
")",
"mesh_2000_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv2_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_2000",
",",
"func_multiplier",
"=",
"lon_multiplier_2000",
")",
"mesh_lv3_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv2_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_lv3",
",",
"func_multiplier",
"=",
"lat_multiplier_lv3",
")",
"mesh_lv3_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv2_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_lv3",
",",
"func_multiplier",
"=",
"lon_multiplier_lv3",
")",
"mesh_lv4_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv3_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_lv4",
",",
"func_multiplier",
"=",
"lat_multiplier_lv4",
")",
"mesh_lv4_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv3_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_lv4",
",",
"func_multiplier",
"=",
"lon_multiplier_lv4",
")",
"mesh_lv5_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv4_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_lv5",
",",
"func_multiplier",
"=",
"lat_multiplier_lv5",
")",
"mesh_lv5_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv4_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_lv5",
",",
"func_multiplier",
"=",
"lon_multiplier_lv5",
")",
"mesh_lv6_default_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv5_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_lv6",
",",
"func_multiplier",
"=",
"lat_multiplier_lv6",
")",
"mesh_lv6_default_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv5_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_lv6",
",",
"func_multiplier",
"=",
"lon_multiplier_lv6",
")",
"mesh_lv1_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv1_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_lv1",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_lv1_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv1_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_lv1",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"mesh_40000_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_40000_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_40000",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_40000_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_40000_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_40000",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"mesh_20000_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_20000_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_20000",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_20000_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_20000_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_20000",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"mesh_16000_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_16000_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_16000",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_16000_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_16000_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_16000",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"mesh_lv2_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv2_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_lv2",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_lv2_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv2_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_lv2",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"mesh_8000_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_8000_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_8000",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_8000_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_8000_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_8000",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"mesh_5000_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_5000_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_5000",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_5000_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_5000_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_5000",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"mesh_4000_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_4000_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_4000",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_4000_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_4000_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_4000",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"mesh_2500_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_2500_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_2500",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_2500_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_2500_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_2500",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"mesh_2000_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_2000_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_2000",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_2000_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_2000_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_2000",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"mesh_lv3_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv3_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_lv3",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_lv3_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv3_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_lv3",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"mesh_lv4_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv4_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_lv4",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_lv4_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv4_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_lv4",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"mesh_lv5_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv5_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_lv5",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_lv5_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv5_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_lv5",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"mesh_lv6_lat",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv6_default_lat",
",",
"func_unit_cord",
"=",
"_unit_lat_lv6",
",",
"func_multiplier",
"=",
"lat_multiplier_lv",
")",
"mesh_lv6_lon",
"=",
"_functools",
".",
"partial",
"(",
"mesh_cord",
",",
"func_higher_cord",
"=",
"mesh_lv6_default_lon",
",",
"func_unit_cord",
"=",
"_unit_lon_lv6",
",",
"func_multiplier",
"=",
"lon_multiplier_lv",
")",
"level",
"=",
"to_meshlevel",
"(",
"meshcode",
")",
"if",
"level",
"==",
"1",
":",
"return",
"mesh_lv1_lat",
"(",
")",
",",
"mesh_lv1_lon",
"(",
")",
"if",
"level",
"==",
"40000",
":",
"return",
"mesh_40000_lat",
"(",
")",
",",
"mesh_40000_lon",
"(",
")",
"if",
"level",
"==",
"20000",
":",
"return",
"mesh_20000_lat",
"(",
")",
",",
"mesh_20000_lon",
"(",
")",
"if",
"level",
"==",
"16000",
":",
"return",
"mesh_16000_lat",
"(",
")",
",",
"mesh_16000_lon",
"(",
")",
"if",
"level",
"==",
"2",
":",
"return",
"mesh_lv2_lat",
"(",
")",
",",
"mesh_lv2_lon",
"(",
")",
"if",
"level",
"==",
"8000",
":",
"return",
"mesh_8000_lat",
"(",
")",
",",
"mesh_8000_lon",
"(",
")",
"if",
"level",
"==",
"5000",
":",
"return",
"mesh_5000_lat",
"(",
")",
",",
"mesh_5000_lon",
"(",
")",
"if",
"level",
"==",
"4000",
":",
"return",
"mesh_4000_lat",
"(",
")",
",",
"mesh_4000_lon",
"(",
")",
"if",
"level",
"==",
"2500",
":",
"return",
"mesh_2500_lat",
"(",
")",
",",
"mesh_2500_lon",
"(",
")",
"if",
"level",
"==",
"2000",
":",
"return",
"mesh_2000_lat",
"(",
")",
",",
"mesh_2000_lon",
"(",
")",
"if",
"level",
"==",
"3",
":",
"return",
"mesh_lv3_lat",
"(",
")",
",",
"mesh_lv3_lon",
"(",
")",
"if",
"level",
"==",
"4",
":",
"return",
"mesh_lv4_lat",
"(",
")",
",",
"mesh_lv4_lon",
"(",
")",
"if",
"level",
"==",
"5",
":",
"return",
"mesh_lv5_lat",
"(",
")",
",",
"mesh_lv5_lon",
"(",
")",
"if",
"level",
"==",
"6",
":",
"return",
"mesh_lv6_lat",
"(",
")",
",",
"mesh_lv6_lon",
"(",
")",
"raise",
"ValueError",
"(",
"\"the level is unsupported.\"",
")"
] |
地域メッシュコードから緯度経度を算出する。
下記のメッシュに対応している。
1次(80km四方):1
40倍(40km四方):40000
20倍(20km四方):20000
16倍(16km四方):16000
2次(10km四方):2
8倍(8km四方):8000
5倍(5km四方):5000
4倍(4km四方):4000
2.5倍(2.5km四方):2500
2倍(2km四方):2000
3次(1km四方):3
4次(500m四方):4
5次(250m四方):5
6次(125m四方):6
Args:
meshcode: 指定次の地域メッシュコード
lat_multiplier: 当該メッシュの基準点(南西端)から、緯度座標上の点の位置を当該メッシュの単位緯度の倍数で指定
lon_multiplier: 当該メッシュの基準点(南西端)から、経度座標上の点の位置を当該メッシュの単位経度の倍数で指定
Return:
lat: 世界測地系の緯度(度単位)
lon: 世界測地系の経度(度単位)
|
[
"地域メッシュコードから緯度経度を算出する。",
"下記のメッシュに対応している。",
"1次",
"(",
"80km四方",
")",
":",
"1",
"40倍",
"(",
"40km四方",
")",
":",
"40000",
"20倍",
"(",
"20km四方",
")",
":",
"20000",
"16倍",
"(",
"16km四方",
")",
":",
"16000",
"2次",
"(",
"10km四方",
")",
":",
"2",
"8倍",
"(",
"8km四方",
")",
":",
"8000",
"5倍",
"(",
"5km四方",
")",
":",
"5000",
"4倍",
"(",
"4km四方",
")",
":",
"4000",
"2",
".",
"5倍",
"(",
"2",
".",
"5km四方",
")",
":",
"2500",
"2倍",
"(",
"2km四方",
")",
":",
"2000",
"3次",
"(",
"1km四方",
")",
":",
"3",
"4次",
"(",
"500m四方",
")",
":",
"4",
"5次",
"(",
"250m四方",
")",
":",
"5",
"6次",
"(",
"125m四方",
")",
":",
"6"
] |
bda486ac7828d0adaea2a128154d0a554be7ef37
|
https://github.com/hni14/jismesh/blob/bda486ac7828d0adaea2a128154d0a554be7ef37/jismesh/utils.py#L312-L811
|
train
|
caffeinehit/django-follow
|
follow/views.py
|
check
|
def check(func):
"""
Check the permissions, http method and login state.
"""
def iCheck(request, *args, **kwargs):
if not request.method == "POST":
return HttpResponseBadRequest("Must be POST request.")
follow = func(request, *args, **kwargs)
if request.is_ajax():
return HttpResponse('ok')
try:
if 'next' in request.GET:
return HttpResponseRedirect(request.GET.get('next'))
if 'next' in request.POST:
return HttpResponseRedirect(request.POST.get('next'))
return HttpResponseRedirect(follow.target.get_absolute_url())
except (AttributeError, TypeError):
if 'HTTP_REFERER' in request.META:
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
if follow:
return HttpResponseServerError('"%s" object of type ``%s`` has no method ``get_absolute_url()``.' % (
unicode(follow.target), follow.target.__class__))
return HttpResponseServerError('No follow object and `next` parameter found.')
return iCheck
|
python
|
def check(func):
"""
Check the permissions, http method and login state.
"""
def iCheck(request, *args, **kwargs):
if not request.method == "POST":
return HttpResponseBadRequest("Must be POST request.")
follow = func(request, *args, **kwargs)
if request.is_ajax():
return HttpResponse('ok')
try:
if 'next' in request.GET:
return HttpResponseRedirect(request.GET.get('next'))
if 'next' in request.POST:
return HttpResponseRedirect(request.POST.get('next'))
return HttpResponseRedirect(follow.target.get_absolute_url())
except (AttributeError, TypeError):
if 'HTTP_REFERER' in request.META:
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
if follow:
return HttpResponseServerError('"%s" object of type ``%s`` has no method ``get_absolute_url()``.' % (
unicode(follow.target), follow.target.__class__))
return HttpResponseServerError('No follow object and `next` parameter found.')
return iCheck
|
[
"def",
"check",
"(",
"func",
")",
":",
"def",
"iCheck",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"request",
".",
"method",
"==",
"\"POST\"",
":",
"return",
"HttpResponseBadRequest",
"(",
"\"Must be POST request.\"",
")",
"follow",
"=",
"func",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"request",
".",
"is_ajax",
"(",
")",
":",
"return",
"HttpResponse",
"(",
"'ok'",
")",
"try",
":",
"if",
"'next'",
"in",
"request",
".",
"GET",
":",
"return",
"HttpResponseRedirect",
"(",
"request",
".",
"GET",
".",
"get",
"(",
"'next'",
")",
")",
"if",
"'next'",
"in",
"request",
".",
"POST",
":",
"return",
"HttpResponseRedirect",
"(",
"request",
".",
"POST",
".",
"get",
"(",
"'next'",
")",
")",
"return",
"HttpResponseRedirect",
"(",
"follow",
".",
"target",
".",
"get_absolute_url",
"(",
")",
")",
"except",
"(",
"AttributeError",
",",
"TypeError",
")",
":",
"if",
"'HTTP_REFERER'",
"in",
"request",
".",
"META",
":",
"return",
"HttpResponseRedirect",
"(",
"request",
".",
"META",
".",
"get",
"(",
"'HTTP_REFERER'",
",",
"'/'",
")",
")",
"if",
"follow",
":",
"return",
"HttpResponseServerError",
"(",
"'\"%s\" object of type ``%s`` has no method ``get_absolute_url()``.'",
"%",
"(",
"unicode",
"(",
"follow",
".",
"target",
")",
",",
"follow",
".",
"target",
".",
"__class__",
")",
")",
"return",
"HttpResponseServerError",
"(",
"'No follow object and `next` parameter found.'",
")",
"return",
"iCheck"
] |
Check the permissions, http method and login state.
|
[
"Check",
"the",
"permissions",
"http",
"method",
"and",
"login",
"state",
"."
] |
765a4795e58f57fbf96efdb7838d0c7222db2e56
|
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/views.py#L7-L30
|
train
|
caffeinehit/django-follow
|
follow/utils.py
|
register
|
def register(model, field_name=None, related_name=None, lookup_method_name='get_follows'):
"""
This registers any model class to be follow-able.
"""
if model in registry:
return
registry.append(model)
if not field_name:
field_name = 'target_%s' % model._meta.module_name
if not related_name:
related_name = 'follow_%s' % model._meta.module_name
field = ForeignKey(model, related_name=related_name, null=True,
blank=True, db_index=True)
field.contribute_to_class(Follow, field_name)
setattr(model, lookup_method_name, get_followers_for_object)
model_map[model] = [related_name, field_name]
|
python
|
def register(model, field_name=None, related_name=None, lookup_method_name='get_follows'):
"""
This registers any model class to be follow-able.
"""
if model in registry:
return
registry.append(model)
if not field_name:
field_name = 'target_%s' % model._meta.module_name
if not related_name:
related_name = 'follow_%s' % model._meta.module_name
field = ForeignKey(model, related_name=related_name, null=True,
blank=True, db_index=True)
field.contribute_to_class(Follow, field_name)
setattr(model, lookup_method_name, get_followers_for_object)
model_map[model] = [related_name, field_name]
|
[
"def",
"register",
"(",
"model",
",",
"field_name",
"=",
"None",
",",
"related_name",
"=",
"None",
",",
"lookup_method_name",
"=",
"'get_follows'",
")",
":",
"if",
"model",
"in",
"registry",
":",
"return",
"registry",
".",
"append",
"(",
"model",
")",
"if",
"not",
"field_name",
":",
"field_name",
"=",
"'target_%s'",
"%",
"model",
".",
"_meta",
".",
"module_name",
"if",
"not",
"related_name",
":",
"related_name",
"=",
"'follow_%s'",
"%",
"model",
".",
"_meta",
".",
"module_name",
"field",
"=",
"ForeignKey",
"(",
"model",
",",
"related_name",
"=",
"related_name",
",",
"null",
"=",
"True",
",",
"blank",
"=",
"True",
",",
"db_index",
"=",
"True",
")",
"field",
".",
"contribute_to_class",
"(",
"Follow",
",",
"field_name",
")",
"setattr",
"(",
"model",
",",
"lookup_method_name",
",",
"get_followers_for_object",
")",
"model_map",
"[",
"model",
"]",
"=",
"[",
"related_name",
",",
"field_name",
"]"
] |
This registers any model class to be follow-able.
|
[
"This",
"registers",
"any",
"model",
"class",
"to",
"be",
"follow",
"-",
"able",
"."
] |
765a4795e58f57fbf96efdb7838d0c7222db2e56
|
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/utils.py#L9-L30
|
train
|
caffeinehit/django-follow
|
follow/utils.py
|
follow
|
def follow(user, obj):
""" Make a user follow an object """
follow, created = Follow.objects.get_or_create(user, obj)
return follow
|
python
|
def follow(user, obj):
""" Make a user follow an object """
follow, created = Follow.objects.get_or_create(user, obj)
return follow
|
[
"def",
"follow",
"(",
"user",
",",
"obj",
")",
":",
"follow",
",",
"created",
"=",
"Follow",
".",
"objects",
".",
"get_or_create",
"(",
"user",
",",
"obj",
")",
"return",
"follow"
] |
Make a user follow an object
|
[
"Make",
"a",
"user",
"follow",
"an",
"object"
] |
765a4795e58f57fbf96efdb7838d0c7222db2e56
|
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/utils.py#L32-L35
|
train
|
caffeinehit/django-follow
|
follow/utils.py
|
unfollow
|
def unfollow(user, obj):
""" Make a user unfollow an object """
try:
follow = Follow.objects.get_follows(obj).get(user=user)
follow.delete()
return follow
except Follow.DoesNotExist:
pass
|
python
|
def unfollow(user, obj):
""" Make a user unfollow an object """
try:
follow = Follow.objects.get_follows(obj).get(user=user)
follow.delete()
return follow
except Follow.DoesNotExist:
pass
|
[
"def",
"unfollow",
"(",
"user",
",",
"obj",
")",
":",
"try",
":",
"follow",
"=",
"Follow",
".",
"objects",
".",
"get_follows",
"(",
"obj",
")",
".",
"get",
"(",
"user",
"=",
"user",
")",
"follow",
".",
"delete",
"(",
")",
"return",
"follow",
"except",
"Follow",
".",
"DoesNotExist",
":",
"pass"
] |
Make a user unfollow an object
|
[
"Make",
"a",
"user",
"unfollow",
"an",
"object"
] |
765a4795e58f57fbf96efdb7838d0c7222db2e56
|
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/utils.py#L37-L44
|
train
|
caffeinehit/django-follow
|
follow/utils.py
|
toggle
|
def toggle(user, obj):
""" Toggles a follow status. Useful function if you don't want to perform follow
checks but just toggle it on / off. """
if Follow.objects.is_following(user, obj):
return unfollow(user, obj)
return follow(user, obj)
|
python
|
def toggle(user, obj):
""" Toggles a follow status. Useful function if you don't want to perform follow
checks but just toggle it on / off. """
if Follow.objects.is_following(user, obj):
return unfollow(user, obj)
return follow(user, obj)
|
[
"def",
"toggle",
"(",
"user",
",",
"obj",
")",
":",
"if",
"Follow",
".",
"objects",
".",
"is_following",
"(",
"user",
",",
"obj",
")",
":",
"return",
"unfollow",
"(",
"user",
",",
"obj",
")",
"return",
"follow",
"(",
"user",
",",
"obj",
")"
] |
Toggles a follow status. Useful function if you don't want to perform follow
checks but just toggle it on / off.
|
[
"Toggles",
"a",
"follow",
"status",
".",
"Useful",
"function",
"if",
"you",
"don",
"t",
"want",
"to",
"perform",
"follow",
"checks",
"but",
"just",
"toggle",
"it",
"on",
"/",
"off",
"."
] |
765a4795e58f57fbf96efdb7838d0c7222db2e56
|
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/utils.py#L46-L51
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/serializers.py
|
SearchSerializer.validate_q_time
|
def validate_q_time(self, value):
"""
Would be for example: [2013-03-01 TO 2013-04-01T00:00:00] and/or [* TO *]
Returns a valid sorl value. [2013-03-01T00:00:00Z TO 2013-04-01T00:00:00Z] and/or [* TO *]
"""
if value:
try:
range = utils.parse_datetime_range_to_solr(value)
return range
except Exception as e:
raise serializers.ValidationError(e.message)
return value
|
python
|
def validate_q_time(self, value):
"""
Would be for example: [2013-03-01 TO 2013-04-01T00:00:00] and/or [* TO *]
Returns a valid sorl value. [2013-03-01T00:00:00Z TO 2013-04-01T00:00:00Z] and/or [* TO *]
"""
if value:
try:
range = utils.parse_datetime_range_to_solr(value)
return range
except Exception as e:
raise serializers.ValidationError(e.message)
return value
|
[
"def",
"validate_q_time",
"(",
"self",
",",
"value",
")",
":",
"if",
"value",
":",
"try",
":",
"range",
"=",
"utils",
".",
"parse_datetime_range_to_solr",
"(",
"value",
")",
"return",
"range",
"except",
"Exception",
"as",
"e",
":",
"raise",
"serializers",
".",
"ValidationError",
"(",
"e",
".",
"message",
")",
"return",
"value"
] |
Would be for example: [2013-03-01 TO 2013-04-01T00:00:00] and/or [* TO *]
Returns a valid sorl value. [2013-03-01T00:00:00Z TO 2013-04-01T00:00:00Z] and/or [* TO *]
|
[
"Would",
"be",
"for",
"example",
":",
"[",
"2013",
"-",
"03",
"-",
"01",
"TO",
"2013",
"-",
"04",
"-",
"01T00",
":",
"00",
":",
"00",
"]",
"and",
"/",
"or",
"[",
"*",
"TO",
"*",
"]",
"Returns",
"a",
"valid",
"sorl",
"value",
".",
"[",
"2013",
"-",
"03",
"-",
"01T00",
":",
"00",
":",
"00Z",
"TO",
"2013",
"-",
"04",
"-",
"01T00",
":",
"00",
":",
"00Z",
"]",
"and",
"/",
"or",
"[",
"*",
"TO",
"*",
"]"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/serializers.py#L114-L126
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/serializers.py
|
SearchSerializer.validate_q_geo
|
def validate_q_geo(self, value):
"""
Would be for example: [-90,-180 TO 90,180]
"""
if value:
try:
rectangle = utils.parse_geo_box(value)
return "[{0},{1} TO {2},{3}]".format(
rectangle.bounds[0],
rectangle.bounds[1],
rectangle.bounds[2],
rectangle.bounds[3],
)
except Exception as e:
raise serializers.ValidationError(e.message)
return value
|
python
|
def validate_q_geo(self, value):
"""
Would be for example: [-90,-180 TO 90,180]
"""
if value:
try:
rectangle = utils.parse_geo_box(value)
return "[{0},{1} TO {2},{3}]".format(
rectangle.bounds[0],
rectangle.bounds[1],
rectangle.bounds[2],
rectangle.bounds[3],
)
except Exception as e:
raise serializers.ValidationError(e.message)
return value
|
[
"def",
"validate_q_geo",
"(",
"self",
",",
"value",
")",
":",
"if",
"value",
":",
"try",
":",
"rectangle",
"=",
"utils",
".",
"parse_geo_box",
"(",
"value",
")",
"return",
"\"[{0},{1} TO {2},{3}]\"",
".",
"format",
"(",
"rectangle",
".",
"bounds",
"[",
"0",
"]",
",",
"rectangle",
".",
"bounds",
"[",
"1",
"]",
",",
"rectangle",
".",
"bounds",
"[",
"2",
"]",
",",
"rectangle",
".",
"bounds",
"[",
"3",
"]",
",",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"serializers",
".",
"ValidationError",
"(",
"e",
".",
"message",
")",
"return",
"value"
] |
Would be for example: [-90,-180 TO 90,180]
|
[
"Would",
"be",
"for",
"example",
":",
"[",
"-",
"90",
"-",
"180",
"TO",
"90",
"180",
"]"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/serializers.py#L128-L144
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/search_api/serializers.py
|
SearchSerializer.validate_a_time_filter
|
def validate_a_time_filter(self, value):
"""
Would be for example: [2013-03-01 TO 2013-04-01:00:00:00] and/or [* TO *]
"""
if value:
try:
utils.parse_datetime_range(value)
except Exception as e:
raise serializers.ValidationError(e.message)
return value
|
python
|
def validate_a_time_filter(self, value):
"""
Would be for example: [2013-03-01 TO 2013-04-01:00:00:00] and/or [* TO *]
"""
if value:
try:
utils.parse_datetime_range(value)
except Exception as e:
raise serializers.ValidationError(e.message)
return value
|
[
"def",
"validate_a_time_filter",
"(",
"self",
",",
"value",
")",
":",
"if",
"value",
":",
"try",
":",
"utils",
".",
"parse_datetime_range",
"(",
"value",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"serializers",
".",
"ValidationError",
"(",
"e",
".",
"message",
")",
"return",
"value"
] |
Would be for example: [2013-03-01 TO 2013-04-01:00:00:00] and/or [* TO *]
|
[
"Would",
"be",
"for",
"example",
":",
"[",
"2013",
"-",
"03",
"-",
"01",
"TO",
"2013",
"-",
"04",
"-",
"01",
":",
"00",
":",
"00",
":",
"00",
"]",
"and",
"/",
"or",
"[",
"*",
"TO",
"*",
"]"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/serializers.py#L146-L156
|
train
|
caffeinehit/django-follow
|
follow/models.py
|
FollowManager.fname
|
def fname(self, model_or_obj_or_qs):
"""
Return the field name on the :class:`Follow` model for ``model_or_obj_or_qs``.
"""
if isinstance(model_or_obj_or_qs, QuerySet):
_, fname = model_map[model_or_obj_or_qs.model]
else:
cls = model_or_obj_or_qs if inspect.isclass(model_or_obj_or_qs) else model_or_obj_or_qs.__class__
_, fname = model_map[cls]
return fname
|
python
|
def fname(self, model_or_obj_or_qs):
"""
Return the field name on the :class:`Follow` model for ``model_or_obj_or_qs``.
"""
if isinstance(model_or_obj_or_qs, QuerySet):
_, fname = model_map[model_or_obj_or_qs.model]
else:
cls = model_or_obj_or_qs if inspect.isclass(model_or_obj_or_qs) else model_or_obj_or_qs.__class__
_, fname = model_map[cls]
return fname
|
[
"def",
"fname",
"(",
"self",
",",
"model_or_obj_or_qs",
")",
":",
"if",
"isinstance",
"(",
"model_or_obj_or_qs",
",",
"QuerySet",
")",
":",
"_",
",",
"fname",
"=",
"model_map",
"[",
"model_or_obj_or_qs",
".",
"model",
"]",
"else",
":",
"cls",
"=",
"model_or_obj_or_qs",
"if",
"inspect",
".",
"isclass",
"(",
"model_or_obj_or_qs",
")",
"else",
"model_or_obj_or_qs",
".",
"__class__",
"_",
",",
"fname",
"=",
"model_map",
"[",
"cls",
"]",
"return",
"fname"
] |
Return the field name on the :class:`Follow` model for ``model_or_obj_or_qs``.
|
[
"Return",
"the",
"field",
"name",
"on",
"the",
":",
"class",
":",
"Follow",
"model",
"for",
"model_or_obj_or_qs",
"."
] |
765a4795e58f57fbf96efdb7838d0c7222db2e56
|
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/models.py#L10-L19
|
train
|
caffeinehit/django-follow
|
follow/models.py
|
FollowManager.create
|
def create(self, user, obj, **kwargs):
"""
Create a new follow link between a user and an object
of a registered model type.
"""
follow = Follow(user=user)
follow.target = obj
follow.save()
return follow
|
python
|
def create(self, user, obj, **kwargs):
"""
Create a new follow link between a user and an object
of a registered model type.
"""
follow = Follow(user=user)
follow.target = obj
follow.save()
return follow
|
[
"def",
"create",
"(",
"self",
",",
"user",
",",
"obj",
",",
"*",
"*",
"kwargs",
")",
":",
"follow",
"=",
"Follow",
"(",
"user",
"=",
"user",
")",
"follow",
".",
"target",
"=",
"obj",
"follow",
".",
"save",
"(",
")",
"return",
"follow"
] |
Create a new follow link between a user and an object
of a registered model type.
|
[
"Create",
"a",
"new",
"follow",
"link",
"between",
"a",
"user",
"and",
"an",
"object",
"of",
"a",
"registered",
"model",
"type",
"."
] |
765a4795e58f57fbf96efdb7838d0c7222db2e56
|
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/models.py#L21-L30
|
train
|
caffeinehit/django-follow
|
follow/models.py
|
FollowManager.get_or_create
|
def get_or_create(self, user, obj, **kwargs):
"""
Almost the same as `FollowManager.objects.create` - behaves the same
as the normal `get_or_create` methods in django though.
Returns a tuple with the `Follow` and either `True` or `False`
"""
if not self.is_following(user, obj):
return self.create(user, obj, **kwargs), True
return self.get_follows(obj).get(user=user), False
|
python
|
def get_or_create(self, user, obj, **kwargs):
"""
Almost the same as `FollowManager.objects.create` - behaves the same
as the normal `get_or_create` methods in django though.
Returns a tuple with the `Follow` and either `True` or `False`
"""
if not self.is_following(user, obj):
return self.create(user, obj, **kwargs), True
return self.get_follows(obj).get(user=user), False
|
[
"def",
"get_or_create",
"(",
"self",
",",
"user",
",",
"obj",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"is_following",
"(",
"user",
",",
"obj",
")",
":",
"return",
"self",
".",
"create",
"(",
"user",
",",
"obj",
",",
"*",
"*",
"kwargs",
")",
",",
"True",
"return",
"self",
".",
"get_follows",
"(",
"obj",
")",
".",
"get",
"(",
"user",
"=",
"user",
")",
",",
"False"
] |
Almost the same as `FollowManager.objects.create` - behaves the same
as the normal `get_or_create` methods in django though.
Returns a tuple with the `Follow` and either `True` or `False`
|
[
"Almost",
"the",
"same",
"as",
"FollowManager",
".",
"objects",
".",
"create",
"-",
"behaves",
"the",
"same",
"as",
"the",
"normal",
"get_or_create",
"methods",
"in",
"django",
"though",
"."
] |
765a4795e58f57fbf96efdb7838d0c7222db2e56
|
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/models.py#L32-L42
|
train
|
caffeinehit/django-follow
|
follow/models.py
|
FollowManager.is_following
|
def is_following(self, user, obj):
""" Returns `True` or `False` """
if isinstance(user, AnonymousUser):
return False
return 0 < self.get_follows(obj).filter(user=user).count()
|
python
|
def is_following(self, user, obj):
""" Returns `True` or `False` """
if isinstance(user, AnonymousUser):
return False
return 0 < self.get_follows(obj).filter(user=user).count()
|
[
"def",
"is_following",
"(",
"self",
",",
"user",
",",
"obj",
")",
":",
"if",
"isinstance",
"(",
"user",
",",
"AnonymousUser",
")",
":",
"return",
"False",
"return",
"0",
"<",
"self",
".",
"get_follows",
"(",
"obj",
")",
".",
"filter",
"(",
"user",
"=",
"user",
")",
".",
"count",
"(",
")"
] |
Returns `True` or `False`
|
[
"Returns",
"True",
"or",
"False"
] |
765a4795e58f57fbf96efdb7838d0c7222db2e56
|
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/models.py#L44-L48
|
train
|
caffeinehit/django-follow
|
follow/models.py
|
FollowManager.get_follows
|
def get_follows(self, model_or_obj_or_qs):
"""
Returns all the followers of a model, an object or a queryset.
"""
fname = self.fname(model_or_obj_or_qs)
if isinstance(model_or_obj_or_qs, QuerySet):
return self.filter(**{'%s__in' % fname: model_or_obj_or_qs})
if inspect.isclass(model_or_obj_or_qs):
return self.exclude(**{fname:None})
return self.filter(**{fname:model_or_obj_or_qs})
|
python
|
def get_follows(self, model_or_obj_or_qs):
"""
Returns all the followers of a model, an object or a queryset.
"""
fname = self.fname(model_or_obj_or_qs)
if isinstance(model_or_obj_or_qs, QuerySet):
return self.filter(**{'%s__in' % fname: model_or_obj_or_qs})
if inspect.isclass(model_or_obj_or_qs):
return self.exclude(**{fname:None})
return self.filter(**{fname:model_or_obj_or_qs})
|
[
"def",
"get_follows",
"(",
"self",
",",
"model_or_obj_or_qs",
")",
":",
"fname",
"=",
"self",
".",
"fname",
"(",
"model_or_obj_or_qs",
")",
"if",
"isinstance",
"(",
"model_or_obj_or_qs",
",",
"QuerySet",
")",
":",
"return",
"self",
".",
"filter",
"(",
"*",
"*",
"{",
"'%s__in'",
"%",
"fname",
":",
"model_or_obj_or_qs",
"}",
")",
"if",
"inspect",
".",
"isclass",
"(",
"model_or_obj_or_qs",
")",
":",
"return",
"self",
".",
"exclude",
"(",
"*",
"*",
"{",
"fname",
":",
"None",
"}",
")",
"return",
"self",
".",
"filter",
"(",
"*",
"*",
"{",
"fname",
":",
"model_or_obj_or_qs",
"}",
")"
] |
Returns all the followers of a model, an object or a queryset.
|
[
"Returns",
"all",
"the",
"followers",
"of",
"a",
"model",
"an",
"object",
"or",
"a",
"queryset",
"."
] |
765a4795e58f57fbf96efdb7838d0c7222db2e56
|
https://github.com/caffeinehit/django-follow/blob/765a4795e58f57fbf96efdb7838d0c7222db2e56/follow/models.py#L50-L62
|
train
|
tknapen/FIRDeconvolution
|
src/FIRDeconvolution.py
|
FIRDeconvolution.create_event_regressors
|
def create_event_regressors(self, event_times_indices, covariates = None, durations = None):
"""create_event_regressors creates the part of the design matrix corresponding to one event type.
:param event_times_indices: indices in the resampled data, on which the events occurred.
:type event_times_indices: numpy array, (nr_events)
:param covariates: covariates belonging to this event type. If None, covariates with a value of 1 for all events are created and used internally.
:type covariates: numpy array, (nr_events)
:param durations: durations belonging to this event type. If None, durations with a value of 1 sample for all events are created and used internally.
:type durations: numpy array, (nr_events)
:returns: This event type's part of the design matrix.
"""
# check covariates
if covariates is None:
covariates = np.ones(self.event_times_indices.shape)
# check/create durations, convert from seconds to samples time, and compute mean duration for this event type.
if durations is None:
durations = np.ones(self.event_times_indices.shape)
else:
durations = np.round(durations*self.deconvolution_frequency).astype(int)
mean_duration = np.mean(durations)
# set up output array
regressors_for_event = np.zeros((self.deconvolution_interval_size, self.resampled_signal_size))
# fill up output array by looping over events.
for cov, eti, dur in zip(covariates, event_times_indices, durations):
valid = True
if eti < 0:
self.logger.debug('deconv samples are starting before the data starts.')
valid = False
if eti+self.deconvolution_interval_size > self.resampled_signal_size:
self.logger.debug('deconv samples are continuing after the data stops.')
valid = False
if eti > self.resampled_signal_size:
self.logger.debug('event falls outside of the scope of the data.')
valid = False
if valid: # only incorporate sensible events.
# calculate the design matrix that belongs to this event.
this_event_design_matrix = (np.diag(np.ones(self.deconvolution_interval_size)) * cov)
over_durations_dm = np.copy(this_event_design_matrix)
if dur > 1: # if this event has a non-unity duration, duplicate the stick regressors in the time direction
for d in np.arange(1,dur):
over_durations_dm[d:] += this_event_design_matrix[:-d]
# and correct for differences in durations between different regressor types.
over_durations_dm /= mean_duration
# add the designmatrix for this event to the full design matrix for this type of event.
regressors_for_event[:,eti:int(eti+self.deconvolution_interval_size)] += over_durations_dm
return regressors_for_event
|
python
|
def create_event_regressors(self, event_times_indices, covariates = None, durations = None):
"""create_event_regressors creates the part of the design matrix corresponding to one event type.
:param event_times_indices: indices in the resampled data, on which the events occurred.
:type event_times_indices: numpy array, (nr_events)
:param covariates: covariates belonging to this event type. If None, covariates with a value of 1 for all events are created and used internally.
:type covariates: numpy array, (nr_events)
:param durations: durations belonging to this event type. If None, durations with a value of 1 sample for all events are created and used internally.
:type durations: numpy array, (nr_events)
:returns: This event type's part of the design matrix.
"""
# check covariates
if covariates is None:
covariates = np.ones(self.event_times_indices.shape)
# check/create durations, convert from seconds to samples time, and compute mean duration for this event type.
if durations is None:
durations = np.ones(self.event_times_indices.shape)
else:
durations = np.round(durations*self.deconvolution_frequency).astype(int)
mean_duration = np.mean(durations)
# set up output array
regressors_for_event = np.zeros((self.deconvolution_interval_size, self.resampled_signal_size))
# fill up output array by looping over events.
for cov, eti, dur in zip(covariates, event_times_indices, durations):
valid = True
if eti < 0:
self.logger.debug('deconv samples are starting before the data starts.')
valid = False
if eti+self.deconvolution_interval_size > self.resampled_signal_size:
self.logger.debug('deconv samples are continuing after the data stops.')
valid = False
if eti > self.resampled_signal_size:
self.logger.debug('event falls outside of the scope of the data.')
valid = False
if valid: # only incorporate sensible events.
# calculate the design matrix that belongs to this event.
this_event_design_matrix = (np.diag(np.ones(self.deconvolution_interval_size)) * cov)
over_durations_dm = np.copy(this_event_design_matrix)
if dur > 1: # if this event has a non-unity duration, duplicate the stick regressors in the time direction
for d in np.arange(1,dur):
over_durations_dm[d:] += this_event_design_matrix[:-d]
# and correct for differences in durations between different regressor types.
over_durations_dm /= mean_duration
# add the designmatrix for this event to the full design matrix for this type of event.
regressors_for_event[:,eti:int(eti+self.deconvolution_interval_size)] += over_durations_dm
return regressors_for_event
|
[
"def",
"create_event_regressors",
"(",
"self",
",",
"event_times_indices",
",",
"covariates",
"=",
"None",
",",
"durations",
"=",
"None",
")",
":",
"# check covariates",
"if",
"covariates",
"is",
"None",
":",
"covariates",
"=",
"np",
".",
"ones",
"(",
"self",
".",
"event_times_indices",
".",
"shape",
")",
"# check/create durations, convert from seconds to samples time, and compute mean duration for this event type.",
"if",
"durations",
"is",
"None",
":",
"durations",
"=",
"np",
".",
"ones",
"(",
"self",
".",
"event_times_indices",
".",
"shape",
")",
"else",
":",
"durations",
"=",
"np",
".",
"round",
"(",
"durations",
"*",
"self",
".",
"deconvolution_frequency",
")",
".",
"astype",
"(",
"int",
")",
"mean_duration",
"=",
"np",
".",
"mean",
"(",
"durations",
")",
"# set up output array",
"regressors_for_event",
"=",
"np",
".",
"zeros",
"(",
"(",
"self",
".",
"deconvolution_interval_size",
",",
"self",
".",
"resampled_signal_size",
")",
")",
"# fill up output array by looping over events.",
"for",
"cov",
",",
"eti",
",",
"dur",
"in",
"zip",
"(",
"covariates",
",",
"event_times_indices",
",",
"durations",
")",
":",
"valid",
"=",
"True",
"if",
"eti",
"<",
"0",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'deconv samples are starting before the data starts.'",
")",
"valid",
"=",
"False",
"if",
"eti",
"+",
"self",
".",
"deconvolution_interval_size",
">",
"self",
".",
"resampled_signal_size",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'deconv samples are continuing after the data stops.'",
")",
"valid",
"=",
"False",
"if",
"eti",
">",
"self",
".",
"resampled_signal_size",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'event falls outside of the scope of the data.'",
")",
"valid",
"=",
"False",
"if",
"valid",
":",
"# only incorporate sensible events.",
"# calculate the design matrix that belongs to this event.",
"this_event_design_matrix",
"=",
"(",
"np",
".",
"diag",
"(",
"np",
".",
"ones",
"(",
"self",
".",
"deconvolution_interval_size",
")",
")",
"*",
"cov",
")",
"over_durations_dm",
"=",
"np",
".",
"copy",
"(",
"this_event_design_matrix",
")",
"if",
"dur",
">",
"1",
":",
"# if this event has a non-unity duration, duplicate the stick regressors in the time direction",
"for",
"d",
"in",
"np",
".",
"arange",
"(",
"1",
",",
"dur",
")",
":",
"over_durations_dm",
"[",
"d",
":",
"]",
"+=",
"this_event_design_matrix",
"[",
":",
"-",
"d",
"]",
"# and correct for differences in durations between different regressor types.",
"over_durations_dm",
"/=",
"mean_duration",
"# add the designmatrix for this event to the full design matrix for this type of event.",
"regressors_for_event",
"[",
":",
",",
"eti",
":",
"int",
"(",
"eti",
"+",
"self",
".",
"deconvolution_interval_size",
")",
"]",
"+=",
"over_durations_dm",
"return",
"regressors_for_event"
] |
create_event_regressors creates the part of the design matrix corresponding to one event type.
:param event_times_indices: indices in the resampled data, on which the events occurred.
:type event_times_indices: numpy array, (nr_events)
:param covariates: covariates belonging to this event type. If None, covariates with a value of 1 for all events are created and used internally.
:type covariates: numpy array, (nr_events)
:param durations: durations belonging to this event type. If None, durations with a value of 1 sample for all events are created and used internally.
:type durations: numpy array, (nr_events)
:returns: This event type's part of the design matrix.
|
[
"create_event_regressors",
"creates",
"the",
"part",
"of",
"the",
"design",
"matrix",
"corresponding",
"to",
"one",
"event",
"type",
"."
] |
6263496a356c449062fe4c216fef56541f6dc151
|
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L121-L172
|
train
|
tknapen/FIRDeconvolution
|
src/FIRDeconvolution.py
|
FIRDeconvolution.create_design_matrix
|
def create_design_matrix(self, demean = False, intercept = True):
"""create_design_matrix calls create_event_regressors for each of the covariates in the self.covariates dict. self.designmatrix is created and is shaped (nr_regressors, self.resampled_signal.shape[-1])
"""
self.design_matrix = np.zeros((int(self.number_of_event_types*self.deconvolution_interval_size), self.resampled_signal_size))
for i, covariate in enumerate(self.covariates.keys()):
# document the creation of the designmatrix step by step
self.logger.debug('creating regressor for ' + covariate)
indices = np.arange(i*self.deconvolution_interval_size,(i+1)*self.deconvolution_interval_size, dtype = int)
# here, we implement the dot-separated encoding of events and covariates
if len(covariate.split('.')) > 0:
which_event_time_indices = covariate.split('.')[0]
else:
which_event_time_indices = covariate
self.design_matrix[indices] = self.create_event_regressors( self.event_times_indices[which_event_time_indices],
self.covariates[covariate],
self.durations[which_event_time_indices])
if demean:
# we expect the data to be demeaned.
# it's an option whether the regressors should be, too
self.design_matrix = (self.design_matrix.T - self.design_matrix.mean(axis = -1)).T
if intercept:
# similarly, intercept is a choice.
self.design_matrix = np.vstack((self.design_matrix, np.ones((1,self.design_matrix.shape[-1]))))
self.logger.debug('created %s design_matrix' % (str(self.design_matrix.shape)))
|
python
|
def create_design_matrix(self, demean = False, intercept = True):
"""create_design_matrix calls create_event_regressors for each of the covariates in the self.covariates dict. self.designmatrix is created and is shaped (nr_regressors, self.resampled_signal.shape[-1])
"""
self.design_matrix = np.zeros((int(self.number_of_event_types*self.deconvolution_interval_size), self.resampled_signal_size))
for i, covariate in enumerate(self.covariates.keys()):
# document the creation of the designmatrix step by step
self.logger.debug('creating regressor for ' + covariate)
indices = np.arange(i*self.deconvolution_interval_size,(i+1)*self.deconvolution_interval_size, dtype = int)
# here, we implement the dot-separated encoding of events and covariates
if len(covariate.split('.')) > 0:
which_event_time_indices = covariate.split('.')[0]
else:
which_event_time_indices = covariate
self.design_matrix[indices] = self.create_event_regressors( self.event_times_indices[which_event_time_indices],
self.covariates[covariate],
self.durations[which_event_time_indices])
if demean:
# we expect the data to be demeaned.
# it's an option whether the regressors should be, too
self.design_matrix = (self.design_matrix.T - self.design_matrix.mean(axis = -1)).T
if intercept:
# similarly, intercept is a choice.
self.design_matrix = np.vstack((self.design_matrix, np.ones((1,self.design_matrix.shape[-1]))))
self.logger.debug('created %s design_matrix' % (str(self.design_matrix.shape)))
|
[
"def",
"create_design_matrix",
"(",
"self",
",",
"demean",
"=",
"False",
",",
"intercept",
"=",
"True",
")",
":",
"self",
".",
"design_matrix",
"=",
"np",
".",
"zeros",
"(",
"(",
"int",
"(",
"self",
".",
"number_of_event_types",
"*",
"self",
".",
"deconvolution_interval_size",
")",
",",
"self",
".",
"resampled_signal_size",
")",
")",
"for",
"i",
",",
"covariate",
"in",
"enumerate",
"(",
"self",
".",
"covariates",
".",
"keys",
"(",
")",
")",
":",
"# document the creation of the designmatrix step by step",
"self",
".",
"logger",
".",
"debug",
"(",
"'creating regressor for '",
"+",
"covariate",
")",
"indices",
"=",
"np",
".",
"arange",
"(",
"i",
"*",
"self",
".",
"deconvolution_interval_size",
",",
"(",
"i",
"+",
"1",
")",
"*",
"self",
".",
"deconvolution_interval_size",
",",
"dtype",
"=",
"int",
")",
"# here, we implement the dot-separated encoding of events and covariates",
"if",
"len",
"(",
"covariate",
".",
"split",
"(",
"'.'",
")",
")",
">",
"0",
":",
"which_event_time_indices",
"=",
"covariate",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
"else",
":",
"which_event_time_indices",
"=",
"covariate",
"self",
".",
"design_matrix",
"[",
"indices",
"]",
"=",
"self",
".",
"create_event_regressors",
"(",
"self",
".",
"event_times_indices",
"[",
"which_event_time_indices",
"]",
",",
"self",
".",
"covariates",
"[",
"covariate",
"]",
",",
"self",
".",
"durations",
"[",
"which_event_time_indices",
"]",
")",
"if",
"demean",
":",
"# we expect the data to be demeaned. ",
"# it's an option whether the regressors should be, too",
"self",
".",
"design_matrix",
"=",
"(",
"self",
".",
"design_matrix",
".",
"T",
"-",
"self",
".",
"design_matrix",
".",
"mean",
"(",
"axis",
"=",
"-",
"1",
")",
")",
".",
"T",
"if",
"intercept",
":",
"# similarly, intercept is a choice.",
"self",
".",
"design_matrix",
"=",
"np",
".",
"vstack",
"(",
"(",
"self",
".",
"design_matrix",
",",
"np",
".",
"ones",
"(",
"(",
"1",
",",
"self",
".",
"design_matrix",
".",
"shape",
"[",
"-",
"1",
"]",
")",
")",
")",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'created %s design_matrix'",
"%",
"(",
"str",
"(",
"self",
".",
"design_matrix",
".",
"shape",
")",
")",
")"
] |
create_design_matrix calls create_event_regressors for each of the covariates in the self.covariates dict. self.designmatrix is created and is shaped (nr_regressors, self.resampled_signal.shape[-1])
|
[
"create_design_matrix",
"calls",
"create_event_regressors",
"for",
"each",
"of",
"the",
"covariates",
"in",
"the",
"self",
".",
"covariates",
"dict",
".",
"self",
".",
"designmatrix",
"is",
"created",
"and",
"is",
"shaped",
"(",
"nr_regressors",
"self",
".",
"resampled_signal",
".",
"shape",
"[",
"-",
"1",
"]",
")"
] |
6263496a356c449062fe4c216fef56541f6dc151
|
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L174-L200
|
train
|
tknapen/FIRDeconvolution
|
src/FIRDeconvolution.py
|
FIRDeconvolution.add_continuous_regressors_to_design_matrix
|
def add_continuous_regressors_to_design_matrix(self, regressors):
"""add_continuous_regressors_to_design_matrix appends continuously sampled regressors to the existing design matrix. One uses this addition to the design matrix when one expects the data to contain nuisance factors that aren't tied to the moments of specific events. For instance, in fMRI analysis this allows us to add cardiac / respiratory regressors, as well as tissue and head motion timecourses to the designmatrix.
:param regressors: the signal to be appended to the design matrix.
:type regressors: numpy array, with shape equal to (nr_regressors, self.resampled_signal.shape[-1])
"""
previous_design_matrix_shape = self.design_matrix.shape
if len(regressors.shape) == 1:
regressors = regressors[np.newaxis, :]
if regressors.shape[1] != self.resampled_signal.shape[1]:
self.logger.warning('additional regressor shape %s does not conform to designmatrix shape %s' % (regressors.shape, self.resampled_signal.shape))
# and, an vstack append
self.design_matrix = np.vstack((self.design_matrix, regressors))
self.logger.debug('added %s continuous regressors to %s design_matrix, shape now %s' % (str(regressors.shape), str(previous_design_matrix_shape), str(self.design_matrix.shape)))
|
python
|
def add_continuous_regressors_to_design_matrix(self, regressors):
"""add_continuous_regressors_to_design_matrix appends continuously sampled regressors to the existing design matrix. One uses this addition to the design matrix when one expects the data to contain nuisance factors that aren't tied to the moments of specific events. For instance, in fMRI analysis this allows us to add cardiac / respiratory regressors, as well as tissue and head motion timecourses to the designmatrix.
:param regressors: the signal to be appended to the design matrix.
:type regressors: numpy array, with shape equal to (nr_regressors, self.resampled_signal.shape[-1])
"""
previous_design_matrix_shape = self.design_matrix.shape
if len(regressors.shape) == 1:
regressors = regressors[np.newaxis, :]
if regressors.shape[1] != self.resampled_signal.shape[1]:
self.logger.warning('additional regressor shape %s does not conform to designmatrix shape %s' % (regressors.shape, self.resampled_signal.shape))
# and, an vstack append
self.design_matrix = np.vstack((self.design_matrix, regressors))
self.logger.debug('added %s continuous regressors to %s design_matrix, shape now %s' % (str(regressors.shape), str(previous_design_matrix_shape), str(self.design_matrix.shape)))
|
[
"def",
"add_continuous_regressors_to_design_matrix",
"(",
"self",
",",
"regressors",
")",
":",
"previous_design_matrix_shape",
"=",
"self",
".",
"design_matrix",
".",
"shape",
"if",
"len",
"(",
"regressors",
".",
"shape",
")",
"==",
"1",
":",
"regressors",
"=",
"regressors",
"[",
"np",
".",
"newaxis",
",",
":",
"]",
"if",
"regressors",
".",
"shape",
"[",
"1",
"]",
"!=",
"self",
".",
"resampled_signal",
".",
"shape",
"[",
"1",
"]",
":",
"self",
".",
"logger",
".",
"warning",
"(",
"'additional regressor shape %s does not conform to designmatrix shape %s'",
"%",
"(",
"regressors",
".",
"shape",
",",
"self",
".",
"resampled_signal",
".",
"shape",
")",
")",
"# and, an vstack append",
"self",
".",
"design_matrix",
"=",
"np",
".",
"vstack",
"(",
"(",
"self",
".",
"design_matrix",
",",
"regressors",
")",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'added %s continuous regressors to %s design_matrix, shape now %s'",
"%",
"(",
"str",
"(",
"regressors",
".",
"shape",
")",
",",
"str",
"(",
"previous_design_matrix_shape",
")",
",",
"str",
"(",
"self",
".",
"design_matrix",
".",
"shape",
")",
")",
")"
] |
add_continuous_regressors_to_design_matrix appends continuously sampled regressors to the existing design matrix. One uses this addition to the design matrix when one expects the data to contain nuisance factors that aren't tied to the moments of specific events. For instance, in fMRI analysis this allows us to add cardiac / respiratory regressors, as well as tissue and head motion timecourses to the designmatrix.
:param regressors: the signal to be appended to the design matrix.
:type regressors: numpy array, with shape equal to (nr_regressors, self.resampled_signal.shape[-1])
|
[
"add_continuous_regressors_to_design_matrix",
"appends",
"continuously",
"sampled",
"regressors",
"to",
"the",
"existing",
"design",
"matrix",
".",
"One",
"uses",
"this",
"addition",
"to",
"the",
"design",
"matrix",
"when",
"one",
"expects",
"the",
"data",
"to",
"contain",
"nuisance",
"factors",
"that",
"aren",
"t",
"tied",
"to",
"the",
"moments",
"of",
"specific",
"events",
".",
"For",
"instance",
"in",
"fMRI",
"analysis",
"this",
"allows",
"us",
"to",
"add",
"cardiac",
"/",
"respiratory",
"regressors",
"as",
"well",
"as",
"tissue",
"and",
"head",
"motion",
"timecourses",
"to",
"the",
"designmatrix",
".",
":",
"param",
"regressors",
":",
"the",
"signal",
"to",
"be",
"appended",
"to",
"the",
"design",
"matrix",
".",
":",
"type",
"regressors",
":",
"numpy",
"array",
"with",
"shape",
"equal",
"to",
"(",
"nr_regressors",
"self",
".",
"resampled_signal",
".",
"shape",
"[",
"-",
"1",
"]",
")"
] |
6263496a356c449062fe4c216fef56541f6dc151
|
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L202-L215
|
train
|
tknapen/FIRDeconvolution
|
src/FIRDeconvolution.py
|
FIRDeconvolution.regress
|
def regress(self, method = 'lstsq'):
"""regress performs linear least squares regression of the designmatrix on the data.
:param method: method, or backend to be used for the regression analysis.
:type method: string, one of ['lstsq', 'sm_ols']
:returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' (nr_signals x nr_samples) are created.
"""
if method is 'lstsq':
self.betas, residuals_sum, rank, s = LA.lstsq(self.design_matrix.T, self.resampled_signal.T)
self.residuals = self.resampled_signal - self.predict_from_design_matrix(self.design_matrix)
elif method is 'sm_ols':
import statsmodels.api as sm
assert self.resampled_signal.shape[0] == 1, \
'signal input into statsmodels OLS cannot contain multiple signals at once, present shape %s' % str(self.resampled_signal.shape)
model = sm.OLS(np.squeeze(self.resampled_signal),self.design_matrix.T)
results = model.fit()
# make betas and residuals that are compatible with the LA.lstsq type.
self.betas = np.array(results.params).reshape((self.design_matrix.shape[0], self.resampled_signal.shape[0]))
self.residuals = np.array(results.resid).reshape(self.resampled_signal.shape)
self.logger.debug('performed %s regression on %s design_matrix and %s signal' % (method, str(self.design_matrix.shape), str(self.resampled_signal.shape)))
|
python
|
def regress(self, method = 'lstsq'):
"""regress performs linear least squares regression of the designmatrix on the data.
:param method: method, or backend to be used for the regression analysis.
:type method: string, one of ['lstsq', 'sm_ols']
:returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' (nr_signals x nr_samples) are created.
"""
if method is 'lstsq':
self.betas, residuals_sum, rank, s = LA.lstsq(self.design_matrix.T, self.resampled_signal.T)
self.residuals = self.resampled_signal - self.predict_from_design_matrix(self.design_matrix)
elif method is 'sm_ols':
import statsmodels.api as sm
assert self.resampled_signal.shape[0] == 1, \
'signal input into statsmodels OLS cannot contain multiple signals at once, present shape %s' % str(self.resampled_signal.shape)
model = sm.OLS(np.squeeze(self.resampled_signal),self.design_matrix.T)
results = model.fit()
# make betas and residuals that are compatible with the LA.lstsq type.
self.betas = np.array(results.params).reshape((self.design_matrix.shape[0], self.resampled_signal.shape[0]))
self.residuals = np.array(results.resid).reshape(self.resampled_signal.shape)
self.logger.debug('performed %s regression on %s design_matrix and %s signal' % (method, str(self.design_matrix.shape), str(self.resampled_signal.shape)))
|
[
"def",
"regress",
"(",
"self",
",",
"method",
"=",
"'lstsq'",
")",
":",
"if",
"method",
"is",
"'lstsq'",
":",
"self",
".",
"betas",
",",
"residuals_sum",
",",
"rank",
",",
"s",
"=",
"LA",
".",
"lstsq",
"(",
"self",
".",
"design_matrix",
".",
"T",
",",
"self",
".",
"resampled_signal",
".",
"T",
")",
"self",
".",
"residuals",
"=",
"self",
".",
"resampled_signal",
"-",
"self",
".",
"predict_from_design_matrix",
"(",
"self",
".",
"design_matrix",
")",
"elif",
"method",
"is",
"'sm_ols'",
":",
"import",
"statsmodels",
".",
"api",
"as",
"sm",
"assert",
"self",
".",
"resampled_signal",
".",
"shape",
"[",
"0",
"]",
"==",
"1",
",",
"'signal input into statsmodels OLS cannot contain multiple signals at once, present shape %s'",
"%",
"str",
"(",
"self",
".",
"resampled_signal",
".",
"shape",
")",
"model",
"=",
"sm",
".",
"OLS",
"(",
"np",
".",
"squeeze",
"(",
"self",
".",
"resampled_signal",
")",
",",
"self",
".",
"design_matrix",
".",
"T",
")",
"results",
"=",
"model",
".",
"fit",
"(",
")",
"# make betas and residuals that are compatible with the LA.lstsq type.",
"self",
".",
"betas",
"=",
"np",
".",
"array",
"(",
"results",
".",
"params",
")",
".",
"reshape",
"(",
"(",
"self",
".",
"design_matrix",
".",
"shape",
"[",
"0",
"]",
",",
"self",
".",
"resampled_signal",
".",
"shape",
"[",
"0",
"]",
")",
")",
"self",
".",
"residuals",
"=",
"np",
".",
"array",
"(",
"results",
".",
"resid",
")",
".",
"reshape",
"(",
"self",
".",
"resampled_signal",
".",
"shape",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'performed %s regression on %s design_matrix and %s signal'",
"%",
"(",
"method",
",",
"str",
"(",
"self",
".",
"design_matrix",
".",
"shape",
")",
",",
"str",
"(",
"self",
".",
"resampled_signal",
".",
"shape",
")",
")",
")"
] |
regress performs linear least squares regression of the designmatrix on the data.
:param method: method, or backend to be used for the regression analysis.
:type method: string, one of ['lstsq', 'sm_ols']
:returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' (nr_signals x nr_samples) are created.
|
[
"regress",
"performs",
"linear",
"least",
"squares",
"regression",
"of",
"the",
"designmatrix",
"on",
"the",
"data",
"."
] |
6263496a356c449062fe4c216fef56541f6dc151
|
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L217-L239
|
train
|
tknapen/FIRDeconvolution
|
src/FIRDeconvolution.py
|
FIRDeconvolution.ridge_regress
|
def ridge_regress(self, cv = 20, alphas = None ):
"""perform k-folds cross-validated ridge regression on the design_matrix. To be used when the design matrix contains very collinear regressors. For cross-validation and ridge fitting, we use sklearn's RidgeCV functionality. Note: intercept is not fit, and data are not prenormalized.
:param cv: cross-validated folds, inherits RidgeCV cv argument's functionality.
:type cv: int, standard = 20
:param alphas: values of penalization parameter to be traversed by the procedure, inherits RidgeCV cv argument's functionality. Standard value, when parameter is None, is np.logspace(7, 0, 20)
:type alphas: numpy array, from >0 to 1.
:returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' (nr_signals x nr_samples) are created.
"""
if alphas is None:
alphas = np.logspace(7, 0, 20)
self.rcv = linear_model.RidgeCV(alphas=alphas,
fit_intercept=False,
cv=cv)
self.rcv.fit(self.design_matrix.T, self.resampled_signal.T)
self.betas = self.rcv.coef_.T
self.residuals = self.resampled_signal - self.rcv.predict(self.design_matrix.T)
self.logger.debug('performed ridge regression on %s design_matrix and %s signal, resulting alpha value is %f' % (str(self.design_matrix.shape), str(self.resampled_signal.shape), self.rcv.alpha_))
|
python
|
def ridge_regress(self, cv = 20, alphas = None ):
"""perform k-folds cross-validated ridge regression on the design_matrix. To be used when the design matrix contains very collinear regressors. For cross-validation and ridge fitting, we use sklearn's RidgeCV functionality. Note: intercept is not fit, and data are not prenormalized.
:param cv: cross-validated folds, inherits RidgeCV cv argument's functionality.
:type cv: int, standard = 20
:param alphas: values of penalization parameter to be traversed by the procedure, inherits RidgeCV cv argument's functionality. Standard value, when parameter is None, is np.logspace(7, 0, 20)
:type alphas: numpy array, from >0 to 1.
:returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' (nr_signals x nr_samples) are created.
"""
if alphas is None:
alphas = np.logspace(7, 0, 20)
self.rcv = linear_model.RidgeCV(alphas=alphas,
fit_intercept=False,
cv=cv)
self.rcv.fit(self.design_matrix.T, self.resampled_signal.T)
self.betas = self.rcv.coef_.T
self.residuals = self.resampled_signal - self.rcv.predict(self.design_matrix.T)
self.logger.debug('performed ridge regression on %s design_matrix and %s signal, resulting alpha value is %f' % (str(self.design_matrix.shape), str(self.resampled_signal.shape), self.rcv.alpha_))
|
[
"def",
"ridge_regress",
"(",
"self",
",",
"cv",
"=",
"20",
",",
"alphas",
"=",
"None",
")",
":",
"if",
"alphas",
"is",
"None",
":",
"alphas",
"=",
"np",
".",
"logspace",
"(",
"7",
",",
"0",
",",
"20",
")",
"self",
".",
"rcv",
"=",
"linear_model",
".",
"RidgeCV",
"(",
"alphas",
"=",
"alphas",
",",
"fit_intercept",
"=",
"False",
",",
"cv",
"=",
"cv",
")",
"self",
".",
"rcv",
".",
"fit",
"(",
"self",
".",
"design_matrix",
".",
"T",
",",
"self",
".",
"resampled_signal",
".",
"T",
")",
"self",
".",
"betas",
"=",
"self",
".",
"rcv",
".",
"coef_",
".",
"T",
"self",
".",
"residuals",
"=",
"self",
".",
"resampled_signal",
"-",
"self",
".",
"rcv",
".",
"predict",
"(",
"self",
".",
"design_matrix",
".",
"T",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'performed ridge regression on %s design_matrix and %s signal, resulting alpha value is %f'",
"%",
"(",
"str",
"(",
"self",
".",
"design_matrix",
".",
"shape",
")",
",",
"str",
"(",
"self",
".",
"resampled_signal",
".",
"shape",
")",
",",
"self",
".",
"rcv",
".",
"alpha_",
")",
")"
] |
perform k-folds cross-validated ridge regression on the design_matrix. To be used when the design matrix contains very collinear regressors. For cross-validation and ridge fitting, we use sklearn's RidgeCV functionality. Note: intercept is not fit, and data are not prenormalized.
:param cv: cross-validated folds, inherits RidgeCV cv argument's functionality.
:type cv: int, standard = 20
:param alphas: values of penalization parameter to be traversed by the procedure, inherits RidgeCV cv argument's functionality. Standard value, when parameter is None, is np.logspace(7, 0, 20)
:type alphas: numpy array, from >0 to 1.
:returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' (nr_signals x nr_samples) are created.
|
[
"perform",
"k",
"-",
"folds",
"cross",
"-",
"validated",
"ridge",
"regression",
"on",
"the",
"design_matrix",
".",
"To",
"be",
"used",
"when",
"the",
"design",
"matrix",
"contains",
"very",
"collinear",
"regressors",
".",
"For",
"cross",
"-",
"validation",
"and",
"ridge",
"fitting",
"we",
"use",
"sklearn",
"s",
"RidgeCV",
"functionality",
".",
"Note",
":",
"intercept",
"is",
"not",
"fit",
"and",
"data",
"are",
"not",
"prenormalized",
"."
] |
6263496a356c449062fe4c216fef56541f6dc151
|
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L241-L260
|
train
|
tknapen/FIRDeconvolution
|
src/FIRDeconvolution.py
|
FIRDeconvolution.betas_for_cov
|
def betas_for_cov(self, covariate = '0'):
"""betas_for_cov returns the beta values (i.e. IRF) associated with a specific covariate.
:param covariate: name of covariate.
:type covariate: string
"""
# find the index in the designmatrix of the current covariate
this_covariate_index = list(self.covariates.keys()).index(covariate)
return self.betas[int(this_covariate_index*self.deconvolution_interval_size):int((this_covariate_index+1)*self.deconvolution_interval_size)]
|
python
|
def betas_for_cov(self, covariate = '0'):
"""betas_for_cov returns the beta values (i.e. IRF) associated with a specific covariate.
:param covariate: name of covariate.
:type covariate: string
"""
# find the index in the designmatrix of the current covariate
this_covariate_index = list(self.covariates.keys()).index(covariate)
return self.betas[int(this_covariate_index*self.deconvolution_interval_size):int((this_covariate_index+1)*self.deconvolution_interval_size)]
|
[
"def",
"betas_for_cov",
"(",
"self",
",",
"covariate",
"=",
"'0'",
")",
":",
"# find the index in the designmatrix of the current covariate",
"this_covariate_index",
"=",
"list",
"(",
"self",
".",
"covariates",
".",
"keys",
"(",
")",
")",
".",
"index",
"(",
"covariate",
")",
"return",
"self",
".",
"betas",
"[",
"int",
"(",
"this_covariate_index",
"*",
"self",
".",
"deconvolution_interval_size",
")",
":",
"int",
"(",
"(",
"this_covariate_index",
"+",
"1",
")",
"*",
"self",
".",
"deconvolution_interval_size",
")",
"]"
] |
betas_for_cov returns the beta values (i.e. IRF) associated with a specific covariate.
:param covariate: name of covariate.
:type covariate: string
|
[
"betas_for_cov",
"returns",
"the",
"beta",
"values",
"(",
"i",
".",
"e",
".",
"IRF",
")",
"associated",
"with",
"a",
"specific",
"covariate",
"."
] |
6263496a356c449062fe4c216fef56541f6dc151
|
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L262-L270
|
train
|
tknapen/FIRDeconvolution
|
src/FIRDeconvolution.py
|
FIRDeconvolution.betas_for_events
|
def betas_for_events(self):
"""betas_for_events creates an internal self.betas_per_event_type array, of (nr_covariates x self.devonvolution_interval_size),
which holds the outcome betas per event type,in the order generated by self.covariates.keys()
"""
self.betas_per_event_type = np.zeros((len(self.covariates), self.deconvolution_interval_size, self.resampled_signal.shape[0]))
for i, covariate in enumerate(self.covariates.keys()):
self.betas_per_event_type[i] = self.betas_for_cov(covariate)
|
python
|
def betas_for_events(self):
"""betas_for_events creates an internal self.betas_per_event_type array, of (nr_covariates x self.devonvolution_interval_size),
which holds the outcome betas per event type,in the order generated by self.covariates.keys()
"""
self.betas_per_event_type = np.zeros((len(self.covariates), self.deconvolution_interval_size, self.resampled_signal.shape[0]))
for i, covariate in enumerate(self.covariates.keys()):
self.betas_per_event_type[i] = self.betas_for_cov(covariate)
|
[
"def",
"betas_for_events",
"(",
"self",
")",
":",
"self",
".",
"betas_per_event_type",
"=",
"np",
".",
"zeros",
"(",
"(",
"len",
"(",
"self",
".",
"covariates",
")",
",",
"self",
".",
"deconvolution_interval_size",
",",
"self",
".",
"resampled_signal",
".",
"shape",
"[",
"0",
"]",
")",
")",
"for",
"i",
",",
"covariate",
"in",
"enumerate",
"(",
"self",
".",
"covariates",
".",
"keys",
"(",
")",
")",
":",
"self",
".",
"betas_per_event_type",
"[",
"i",
"]",
"=",
"self",
".",
"betas_for_cov",
"(",
"covariate",
")"
] |
betas_for_events creates an internal self.betas_per_event_type array, of (nr_covariates x self.devonvolution_interval_size),
which holds the outcome betas per event type,in the order generated by self.covariates.keys()
|
[
"betas_for_events",
"creates",
"an",
"internal",
"self",
".",
"betas_per_event_type",
"array",
"of",
"(",
"nr_covariates",
"x",
"self",
".",
"devonvolution_interval_size",
")",
"which",
"holds",
"the",
"outcome",
"betas",
"per",
"event",
"type",
"in",
"the",
"order",
"generated",
"by",
"self",
".",
"covariates",
".",
"keys",
"()"
] |
6263496a356c449062fe4c216fef56541f6dc151
|
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L272-L278
|
train
|
tknapen/FIRDeconvolution
|
src/FIRDeconvolution.py
|
FIRDeconvolution.predict_from_design_matrix
|
def predict_from_design_matrix(self, design_matrix):
"""predict_from_design_matrix predicts signals given a design matrix.
:param design_matrix: design matrix from which to predict a signal.
:type design_matrix: numpy array, (nr_samples x betas.shape)
:returns: predicted signal(s)
:rtype: numpy array (nr_signals x nr_samples)
"""
# check if we have already run the regression - which is necessary
assert hasattr(self, 'betas'), 'no betas found, please run regression before prediction'
assert design_matrix.shape[0] == self.betas.shape[0], \
'designmatrix needs to have the same number of regressors as the betas already calculated'
# betas = np.copy(self.betas.T, order="F", dtype = np.float32)
# f_design_matrix = np.copy(design_matrix, order = "F", dtype = np.float32)
prediction = np.dot(self.betas.astype(np.float32).T, design_matrix.astype(np.float32))
return prediction
|
python
|
def predict_from_design_matrix(self, design_matrix):
"""predict_from_design_matrix predicts signals given a design matrix.
:param design_matrix: design matrix from which to predict a signal.
:type design_matrix: numpy array, (nr_samples x betas.shape)
:returns: predicted signal(s)
:rtype: numpy array (nr_signals x nr_samples)
"""
# check if we have already run the regression - which is necessary
assert hasattr(self, 'betas'), 'no betas found, please run regression before prediction'
assert design_matrix.shape[0] == self.betas.shape[0], \
'designmatrix needs to have the same number of regressors as the betas already calculated'
# betas = np.copy(self.betas.T, order="F", dtype = np.float32)
# f_design_matrix = np.copy(design_matrix, order = "F", dtype = np.float32)
prediction = np.dot(self.betas.astype(np.float32).T, design_matrix.astype(np.float32))
return prediction
|
[
"def",
"predict_from_design_matrix",
"(",
"self",
",",
"design_matrix",
")",
":",
"# check if we have already run the regression - which is necessary",
"assert",
"hasattr",
"(",
"self",
",",
"'betas'",
")",
",",
"'no betas found, please run regression before prediction'",
"assert",
"design_matrix",
".",
"shape",
"[",
"0",
"]",
"==",
"self",
".",
"betas",
".",
"shape",
"[",
"0",
"]",
",",
"'designmatrix needs to have the same number of regressors as the betas already calculated'",
"# betas = np.copy(self.betas.T, order=\"F\", dtype = np.float32)",
"# f_design_matrix = np.copy(design_matrix, order = \"F\", dtype = np.float32)",
"prediction",
"=",
"np",
".",
"dot",
"(",
"self",
".",
"betas",
".",
"astype",
"(",
"np",
".",
"float32",
")",
".",
"T",
",",
"design_matrix",
".",
"astype",
"(",
"np",
".",
"float32",
")",
")",
"return",
"prediction"
] |
predict_from_design_matrix predicts signals given a design matrix.
:param design_matrix: design matrix from which to predict a signal.
:type design_matrix: numpy array, (nr_samples x betas.shape)
:returns: predicted signal(s)
:rtype: numpy array (nr_signals x nr_samples)
|
[
"predict_from_design_matrix",
"predicts",
"signals",
"given",
"a",
"design",
"matrix",
"."
] |
6263496a356c449062fe4c216fef56541f6dc151
|
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L280-L298
|
train
|
tknapen/FIRDeconvolution
|
src/FIRDeconvolution.py
|
FIRDeconvolution.calculate_rsq
|
def calculate_rsq(self):
"""calculate_rsq calculates coefficient of determination, or r-squared, defined here as 1.0 - SS_res / SS_tot. rsq is only calculated for those timepoints in the data for which the design matrix is non-zero.
"""
assert hasattr(self, 'betas'), 'no betas found, please run regression before rsq'
explained_times = self.design_matrix.sum(axis = 0) != 0
explained_signal = self.predict_from_design_matrix(self.design_matrix)
self.rsq = 1.0 - np.sum((explained_signal[:,explained_times] - self.resampled_signal[:,explained_times])**2, axis = -1) / np.sum(self.resampled_signal[:,explained_times].squeeze()**2, axis = -1)
self.ssr = np.sum((explained_signal[:,explained_times] - self.resampled_signal[:,explained_times])**2, axis = -1)
return np.squeeze(self.rsq)
|
python
|
def calculate_rsq(self):
"""calculate_rsq calculates coefficient of determination, or r-squared, defined here as 1.0 - SS_res / SS_tot. rsq is only calculated for those timepoints in the data for which the design matrix is non-zero.
"""
assert hasattr(self, 'betas'), 'no betas found, please run regression before rsq'
explained_times = self.design_matrix.sum(axis = 0) != 0
explained_signal = self.predict_from_design_matrix(self.design_matrix)
self.rsq = 1.0 - np.sum((explained_signal[:,explained_times] - self.resampled_signal[:,explained_times])**2, axis = -1) / np.sum(self.resampled_signal[:,explained_times].squeeze()**2, axis = -1)
self.ssr = np.sum((explained_signal[:,explained_times] - self.resampled_signal[:,explained_times])**2, axis = -1)
return np.squeeze(self.rsq)
|
[
"def",
"calculate_rsq",
"(",
"self",
")",
":",
"assert",
"hasattr",
"(",
"self",
",",
"'betas'",
")",
",",
"'no betas found, please run regression before rsq'",
"explained_times",
"=",
"self",
".",
"design_matrix",
".",
"sum",
"(",
"axis",
"=",
"0",
")",
"!=",
"0",
"explained_signal",
"=",
"self",
".",
"predict_from_design_matrix",
"(",
"self",
".",
"design_matrix",
")",
"self",
".",
"rsq",
"=",
"1.0",
"-",
"np",
".",
"sum",
"(",
"(",
"explained_signal",
"[",
":",
",",
"explained_times",
"]",
"-",
"self",
".",
"resampled_signal",
"[",
":",
",",
"explained_times",
"]",
")",
"**",
"2",
",",
"axis",
"=",
"-",
"1",
")",
"/",
"np",
".",
"sum",
"(",
"self",
".",
"resampled_signal",
"[",
":",
",",
"explained_times",
"]",
".",
"squeeze",
"(",
")",
"**",
"2",
",",
"axis",
"=",
"-",
"1",
")",
"self",
".",
"ssr",
"=",
"np",
".",
"sum",
"(",
"(",
"explained_signal",
"[",
":",
",",
"explained_times",
"]",
"-",
"self",
".",
"resampled_signal",
"[",
":",
",",
"explained_times",
"]",
")",
"**",
"2",
",",
"axis",
"=",
"-",
"1",
")",
"return",
"np",
".",
"squeeze",
"(",
"self",
".",
"rsq",
")"
] |
calculate_rsq calculates coefficient of determination, or r-squared, defined here as 1.0 - SS_res / SS_tot. rsq is only calculated for those timepoints in the data for which the design matrix is non-zero.
|
[
"calculate_rsq",
"calculates",
"coefficient",
"of",
"determination",
"or",
"r",
"-",
"squared",
"defined",
"here",
"as",
"1",
".",
"0",
"-",
"SS_res",
"/",
"SS_tot",
".",
"rsq",
"is",
"only",
"calculated",
"for",
"those",
"timepoints",
"in",
"the",
"data",
"for",
"which",
"the",
"design",
"matrix",
"is",
"non",
"-",
"zero",
"."
] |
6263496a356c449062fe4c216fef56541f6dc151
|
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L300-L310
|
train
|
tknapen/FIRDeconvolution
|
src/FIRDeconvolution.py
|
FIRDeconvolution.bootstrap_on_residuals
|
def bootstrap_on_residuals(self, nr_repetitions = 1000):
"""bootstrap_on_residuals bootstraps, by shuffling the residuals. bootstrap_on_residuals should only be used on single-channel data, as otherwise the memory load might increase too much. This uses the lstsq backend regression for a single-pass fit across repetitions. Please note that shuffling the residuals may change the autocorrelation of the bootstrap samples relative to that of the original data and that may reduce its validity. Reference: https://en.wikipedia.org/wiki/Bootstrapping_(statistics)#Resampling_residuals
:param nr_repetitions: number of repetitions for the bootstrap.
:type nr_repetitions: int
"""
assert self.resampled_signal.shape[0] == 1, \
'signal input into bootstrap_on_residuals cannot contain signals from multiple channels at once, present shape %s' % str(self.resampled_signal.shape)
assert hasattr(self, 'betas'), 'no betas found, please run regression before bootstrapping'
# create bootstrap data by taking the residuals
bootstrap_data = np.zeros((self.resampled_signal_size, nr_repetitions))
explained_signal = self.predict_from_design_matrix(self.design_matrix).T
for x in range(bootstrap_data.shape[-1]): # loop over bootstrapsamples
bootstrap_data[:,x] = (self.residuals.T[np.random.permutation(self.resampled_signal_size)] + explained_signal).squeeze()
self.bootstrap_betas, bs_residuals, rank, s = LA.lstsq(self.design_matrix.T, bootstrap_data)
self.bootstrap_betas_per_event_type = np.zeros((len(self.covariates), self.deconvolution_interval_size, nr_repetitions))
for i, covariate in enumerate(list(self.covariates.keys())):
# find the index in the designmatrix of the current covariate
this_covariate_index = list(self.covariates.keys()).index(covariate)
self.bootstrap_betas_per_event_type[i] = self.bootstrap_betas[this_covariate_index*self.deconvolution_interval_size:(this_covariate_index+1)*self.deconvolution_interval_size]
|
python
|
def bootstrap_on_residuals(self, nr_repetitions = 1000):
"""bootstrap_on_residuals bootstraps, by shuffling the residuals. bootstrap_on_residuals should only be used on single-channel data, as otherwise the memory load might increase too much. This uses the lstsq backend regression for a single-pass fit across repetitions. Please note that shuffling the residuals may change the autocorrelation of the bootstrap samples relative to that of the original data and that may reduce its validity. Reference: https://en.wikipedia.org/wiki/Bootstrapping_(statistics)#Resampling_residuals
:param nr_repetitions: number of repetitions for the bootstrap.
:type nr_repetitions: int
"""
assert self.resampled_signal.shape[0] == 1, \
'signal input into bootstrap_on_residuals cannot contain signals from multiple channels at once, present shape %s' % str(self.resampled_signal.shape)
assert hasattr(self, 'betas'), 'no betas found, please run regression before bootstrapping'
# create bootstrap data by taking the residuals
bootstrap_data = np.zeros((self.resampled_signal_size, nr_repetitions))
explained_signal = self.predict_from_design_matrix(self.design_matrix).T
for x in range(bootstrap_data.shape[-1]): # loop over bootstrapsamples
bootstrap_data[:,x] = (self.residuals.T[np.random.permutation(self.resampled_signal_size)] + explained_signal).squeeze()
self.bootstrap_betas, bs_residuals, rank, s = LA.lstsq(self.design_matrix.T, bootstrap_data)
self.bootstrap_betas_per_event_type = np.zeros((len(self.covariates), self.deconvolution_interval_size, nr_repetitions))
for i, covariate in enumerate(list(self.covariates.keys())):
# find the index in the designmatrix of the current covariate
this_covariate_index = list(self.covariates.keys()).index(covariate)
self.bootstrap_betas_per_event_type[i] = self.bootstrap_betas[this_covariate_index*self.deconvolution_interval_size:(this_covariate_index+1)*self.deconvolution_interval_size]
|
[
"def",
"bootstrap_on_residuals",
"(",
"self",
",",
"nr_repetitions",
"=",
"1000",
")",
":",
"assert",
"self",
".",
"resampled_signal",
".",
"shape",
"[",
"0",
"]",
"==",
"1",
",",
"'signal input into bootstrap_on_residuals cannot contain signals from multiple channels at once, present shape %s'",
"%",
"str",
"(",
"self",
".",
"resampled_signal",
".",
"shape",
")",
"assert",
"hasattr",
"(",
"self",
",",
"'betas'",
")",
",",
"'no betas found, please run regression before bootstrapping'",
"# create bootstrap data by taking the residuals",
"bootstrap_data",
"=",
"np",
".",
"zeros",
"(",
"(",
"self",
".",
"resampled_signal_size",
",",
"nr_repetitions",
")",
")",
"explained_signal",
"=",
"self",
".",
"predict_from_design_matrix",
"(",
"self",
".",
"design_matrix",
")",
".",
"T",
"for",
"x",
"in",
"range",
"(",
"bootstrap_data",
".",
"shape",
"[",
"-",
"1",
"]",
")",
":",
"# loop over bootstrapsamples",
"bootstrap_data",
"[",
":",
",",
"x",
"]",
"=",
"(",
"self",
".",
"residuals",
".",
"T",
"[",
"np",
".",
"random",
".",
"permutation",
"(",
"self",
".",
"resampled_signal_size",
")",
"]",
"+",
"explained_signal",
")",
".",
"squeeze",
"(",
")",
"self",
".",
"bootstrap_betas",
",",
"bs_residuals",
",",
"rank",
",",
"s",
"=",
"LA",
".",
"lstsq",
"(",
"self",
".",
"design_matrix",
".",
"T",
",",
"bootstrap_data",
")",
"self",
".",
"bootstrap_betas_per_event_type",
"=",
"np",
".",
"zeros",
"(",
"(",
"len",
"(",
"self",
".",
"covariates",
")",
",",
"self",
".",
"deconvolution_interval_size",
",",
"nr_repetitions",
")",
")",
"for",
"i",
",",
"covariate",
"in",
"enumerate",
"(",
"list",
"(",
"self",
".",
"covariates",
".",
"keys",
"(",
")",
")",
")",
":",
"# find the index in the designmatrix of the current covariate",
"this_covariate_index",
"=",
"list",
"(",
"self",
".",
"covariates",
".",
"keys",
"(",
")",
")",
".",
"index",
"(",
"covariate",
")",
"self",
".",
"bootstrap_betas_per_event_type",
"[",
"i",
"]",
"=",
"self",
".",
"bootstrap_betas",
"[",
"this_covariate_index",
"*",
"self",
".",
"deconvolution_interval_size",
":",
"(",
"this_covariate_index",
"+",
"1",
")",
"*",
"self",
".",
"deconvolution_interval_size",
"]"
] |
bootstrap_on_residuals bootstraps, by shuffling the residuals. bootstrap_on_residuals should only be used on single-channel data, as otherwise the memory load might increase too much. This uses the lstsq backend regression for a single-pass fit across repetitions. Please note that shuffling the residuals may change the autocorrelation of the bootstrap samples relative to that of the original data and that may reduce its validity. Reference: https://en.wikipedia.org/wiki/Bootstrapping_(statistics)#Resampling_residuals
:param nr_repetitions: number of repetitions for the bootstrap.
:type nr_repetitions: int
|
[
"bootstrap_on_residuals",
"bootstraps",
"by",
"shuffling",
"the",
"residuals",
".",
"bootstrap_on_residuals",
"should",
"only",
"be",
"used",
"on",
"single",
"-",
"channel",
"data",
"as",
"otherwise",
"the",
"memory",
"load",
"might",
"increase",
"too",
"much",
".",
"This",
"uses",
"the",
"lstsq",
"backend",
"regression",
"for",
"a",
"single",
"-",
"pass",
"fit",
"across",
"repetitions",
".",
"Please",
"note",
"that",
"shuffling",
"the",
"residuals",
"may",
"change",
"the",
"autocorrelation",
"of",
"the",
"bootstrap",
"samples",
"relative",
"to",
"that",
"of",
"the",
"original",
"data",
"and",
"that",
"may",
"reduce",
"its",
"validity",
".",
"Reference",
":",
"https",
":",
"//",
"en",
".",
"wikipedia",
".",
"org",
"/",
"wiki",
"/",
"Bootstrapping_",
"(",
"statistics",
")",
"#Resampling_residuals"
] |
6263496a356c449062fe4c216fef56541f6dc151
|
https://github.com/tknapen/FIRDeconvolution/blob/6263496a356c449062fe4c216fef56541f6dc151/src/FIRDeconvolution.py#L312-L337
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/context_processors.py
|
resource_urls
|
def resource_urls(request):
"""Global values to pass to templates"""
url_parsed = urlparse(settings.SEARCH_URL)
defaults = dict(
APP_NAME=__description__,
APP_VERSION=__version__,
SITE_URL=settings.SITE_URL.rstrip('/'),
SEARCH_TYPE=settings.SEARCH_TYPE,
SEARCH_URL=settings.SEARCH_URL,
SEARCH_IP='%s://%s:%s' % (url_parsed.scheme, url_parsed.hostname, url_parsed.port)
)
return defaults
|
python
|
def resource_urls(request):
"""Global values to pass to templates"""
url_parsed = urlparse(settings.SEARCH_URL)
defaults = dict(
APP_NAME=__description__,
APP_VERSION=__version__,
SITE_URL=settings.SITE_URL.rstrip('/'),
SEARCH_TYPE=settings.SEARCH_TYPE,
SEARCH_URL=settings.SEARCH_URL,
SEARCH_IP='%s://%s:%s' % (url_parsed.scheme, url_parsed.hostname, url_parsed.port)
)
return defaults
|
[
"def",
"resource_urls",
"(",
"request",
")",
":",
"url_parsed",
"=",
"urlparse",
"(",
"settings",
".",
"SEARCH_URL",
")",
"defaults",
"=",
"dict",
"(",
"APP_NAME",
"=",
"__description__",
",",
"APP_VERSION",
"=",
"__version__",
",",
"SITE_URL",
"=",
"settings",
".",
"SITE_URL",
".",
"rstrip",
"(",
"'/'",
")",
",",
"SEARCH_TYPE",
"=",
"settings",
".",
"SEARCH_TYPE",
",",
"SEARCH_URL",
"=",
"settings",
".",
"SEARCH_URL",
",",
"SEARCH_IP",
"=",
"'%s://%s:%s'",
"%",
"(",
"url_parsed",
".",
"scheme",
",",
"url_parsed",
".",
"hostname",
",",
"url_parsed",
".",
"port",
")",
")",
"return",
"defaults"
] |
Global values to pass to templates
|
[
"Global",
"values",
"to",
"pass",
"to",
"templates"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/context_processors.py#L7-L19
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/tasks.py
|
index_cached_layers
|
def index_cached_layers(self):
"""
Index and unindex all layers in the Django cache (Index all layers who have been checked).
"""
from hypermap.aggregator.models import Layer
if SEARCH_TYPE == 'solr':
from hypermap.aggregator.solr import SolrHypermap
solrobject = SolrHypermap()
else:
from hypermap.aggregator.elasticsearch_client import ESHypermap
from elasticsearch import helpers
es_client = ESHypermap()
layers_cache = cache.get('layers')
deleted_layers_cache = cache.get('deleted_layers')
# 1. added layers cache
if layers_cache:
layers_list = list(layers_cache)
LOGGER.debug('There are %s layers in cache: %s' % (len(layers_list), layers_list))
batch_size = settings.REGISTRY_SEARCH_BATCH_SIZE
batch_lists = [layers_list[i:i+batch_size] for i in range(0, len(layers_list), batch_size)]
for batch_list_ids in batch_lists:
layers = Layer.objects.filter(id__in=batch_list_ids)
if batch_size > len(layers):
batch_size = len(layers)
LOGGER.debug('Syncing %s/%s layers to %s: %s' % (batch_size, len(layers_cache), layers, SEARCH_TYPE))
try:
# SOLR
if SEARCH_TYPE == 'solr':
success, layers_errors_ids = solrobject.layers_to_solr(layers)
if success:
# remove layers from cache here
layers_cache = layers_cache.difference(set(batch_list_ids))
LOGGER.debug('Removing layers with id %s from cache' % batch_list_ids)
cache.set('layers', layers_cache)
# ES
elif SEARCH_TYPE == 'elasticsearch':
with_bulk, success = True, False
layers_to_index = [es_client.layer_to_es(layer, with_bulk) for layer in layers]
message = helpers.bulk(es_client.es, layers_to_index)
# Check that all layers where indexed...if not, don't clear cache.
# TODO: Check why es does not index all layers at first.
len_indexed_layers = message[0]
if len_indexed_layers == len(layers):
LOGGER.debug('%d layers indexed successfully' % (len_indexed_layers))
success = True
if success:
# remove layers from cache here
layers_cache = layers_cache.difference(set(batch_list_ids))
cache.set('layers', layers_cache)
else:
raise Exception("Incorrect SEARCH_TYPE=%s" % SEARCH_TYPE)
except Exception as e:
LOGGER.error('Layers were NOT indexed correctly')
LOGGER.error(e, exc_info=True)
else:
LOGGER.debug('No cached layers to add in search engine.')
# 2. deleted layers cache
if deleted_layers_cache:
layers_list = list(deleted_layers_cache)
LOGGER.debug('There are %s layers in cache for deleting: %s' % (len(layers_list), layers_list))
# TODO implement me: batch layer index deletion
for layer_id in layers_list:
# SOLR
if SEARCH_TYPE == 'solr':
if Layer.objects.filter(pk=layer_id).exists():
layer = Layer.objects.get(id=layer_id)
unindex_layer(layer.id, use_cache=False)
deleted_layers_cache = deleted_layers_cache.difference(set([layer_id]))
cache.set('deleted_layers', deleted_layers_cache)
else:
# TODO implement me
raise NotImplementedError
else:
LOGGER.debug('No cached layers to remove in search engine.')
|
python
|
def index_cached_layers(self):
"""
Index and unindex all layers in the Django cache (Index all layers who have been checked).
"""
from hypermap.aggregator.models import Layer
if SEARCH_TYPE == 'solr':
from hypermap.aggregator.solr import SolrHypermap
solrobject = SolrHypermap()
else:
from hypermap.aggregator.elasticsearch_client import ESHypermap
from elasticsearch import helpers
es_client = ESHypermap()
layers_cache = cache.get('layers')
deleted_layers_cache = cache.get('deleted_layers')
# 1. added layers cache
if layers_cache:
layers_list = list(layers_cache)
LOGGER.debug('There are %s layers in cache: %s' % (len(layers_list), layers_list))
batch_size = settings.REGISTRY_SEARCH_BATCH_SIZE
batch_lists = [layers_list[i:i+batch_size] for i in range(0, len(layers_list), batch_size)]
for batch_list_ids in batch_lists:
layers = Layer.objects.filter(id__in=batch_list_ids)
if batch_size > len(layers):
batch_size = len(layers)
LOGGER.debug('Syncing %s/%s layers to %s: %s' % (batch_size, len(layers_cache), layers, SEARCH_TYPE))
try:
# SOLR
if SEARCH_TYPE == 'solr':
success, layers_errors_ids = solrobject.layers_to_solr(layers)
if success:
# remove layers from cache here
layers_cache = layers_cache.difference(set(batch_list_ids))
LOGGER.debug('Removing layers with id %s from cache' % batch_list_ids)
cache.set('layers', layers_cache)
# ES
elif SEARCH_TYPE == 'elasticsearch':
with_bulk, success = True, False
layers_to_index = [es_client.layer_to_es(layer, with_bulk) for layer in layers]
message = helpers.bulk(es_client.es, layers_to_index)
# Check that all layers where indexed...if not, don't clear cache.
# TODO: Check why es does not index all layers at first.
len_indexed_layers = message[0]
if len_indexed_layers == len(layers):
LOGGER.debug('%d layers indexed successfully' % (len_indexed_layers))
success = True
if success:
# remove layers from cache here
layers_cache = layers_cache.difference(set(batch_list_ids))
cache.set('layers', layers_cache)
else:
raise Exception("Incorrect SEARCH_TYPE=%s" % SEARCH_TYPE)
except Exception as e:
LOGGER.error('Layers were NOT indexed correctly')
LOGGER.error(e, exc_info=True)
else:
LOGGER.debug('No cached layers to add in search engine.')
# 2. deleted layers cache
if deleted_layers_cache:
layers_list = list(deleted_layers_cache)
LOGGER.debug('There are %s layers in cache for deleting: %s' % (len(layers_list), layers_list))
# TODO implement me: batch layer index deletion
for layer_id in layers_list:
# SOLR
if SEARCH_TYPE == 'solr':
if Layer.objects.filter(pk=layer_id).exists():
layer = Layer.objects.get(id=layer_id)
unindex_layer(layer.id, use_cache=False)
deleted_layers_cache = deleted_layers_cache.difference(set([layer_id]))
cache.set('deleted_layers', deleted_layers_cache)
else:
# TODO implement me
raise NotImplementedError
else:
LOGGER.debug('No cached layers to remove in search engine.')
|
[
"def",
"index_cached_layers",
"(",
"self",
")",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"models",
"import",
"Layer",
"if",
"SEARCH_TYPE",
"==",
"'solr'",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"solr",
"import",
"SolrHypermap",
"solrobject",
"=",
"SolrHypermap",
"(",
")",
"else",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"elasticsearch_client",
"import",
"ESHypermap",
"from",
"elasticsearch",
"import",
"helpers",
"es_client",
"=",
"ESHypermap",
"(",
")",
"layers_cache",
"=",
"cache",
".",
"get",
"(",
"'layers'",
")",
"deleted_layers_cache",
"=",
"cache",
".",
"get",
"(",
"'deleted_layers'",
")",
"# 1. added layers cache",
"if",
"layers_cache",
":",
"layers_list",
"=",
"list",
"(",
"layers_cache",
")",
"LOGGER",
".",
"debug",
"(",
"'There are %s layers in cache: %s'",
"%",
"(",
"len",
"(",
"layers_list",
")",
",",
"layers_list",
")",
")",
"batch_size",
"=",
"settings",
".",
"REGISTRY_SEARCH_BATCH_SIZE",
"batch_lists",
"=",
"[",
"layers_list",
"[",
"i",
":",
"i",
"+",
"batch_size",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"layers_list",
")",
",",
"batch_size",
")",
"]",
"for",
"batch_list_ids",
"in",
"batch_lists",
":",
"layers",
"=",
"Layer",
".",
"objects",
".",
"filter",
"(",
"id__in",
"=",
"batch_list_ids",
")",
"if",
"batch_size",
">",
"len",
"(",
"layers",
")",
":",
"batch_size",
"=",
"len",
"(",
"layers",
")",
"LOGGER",
".",
"debug",
"(",
"'Syncing %s/%s layers to %s: %s'",
"%",
"(",
"batch_size",
",",
"len",
"(",
"layers_cache",
")",
",",
"layers",
",",
"SEARCH_TYPE",
")",
")",
"try",
":",
"# SOLR",
"if",
"SEARCH_TYPE",
"==",
"'solr'",
":",
"success",
",",
"layers_errors_ids",
"=",
"solrobject",
".",
"layers_to_solr",
"(",
"layers",
")",
"if",
"success",
":",
"# remove layers from cache here",
"layers_cache",
"=",
"layers_cache",
".",
"difference",
"(",
"set",
"(",
"batch_list_ids",
")",
")",
"LOGGER",
".",
"debug",
"(",
"'Removing layers with id %s from cache'",
"%",
"batch_list_ids",
")",
"cache",
".",
"set",
"(",
"'layers'",
",",
"layers_cache",
")",
"# ES",
"elif",
"SEARCH_TYPE",
"==",
"'elasticsearch'",
":",
"with_bulk",
",",
"success",
"=",
"True",
",",
"False",
"layers_to_index",
"=",
"[",
"es_client",
".",
"layer_to_es",
"(",
"layer",
",",
"with_bulk",
")",
"for",
"layer",
"in",
"layers",
"]",
"message",
"=",
"helpers",
".",
"bulk",
"(",
"es_client",
".",
"es",
",",
"layers_to_index",
")",
"# Check that all layers where indexed...if not, don't clear cache.",
"# TODO: Check why es does not index all layers at first.",
"len_indexed_layers",
"=",
"message",
"[",
"0",
"]",
"if",
"len_indexed_layers",
"==",
"len",
"(",
"layers",
")",
":",
"LOGGER",
".",
"debug",
"(",
"'%d layers indexed successfully'",
"%",
"(",
"len_indexed_layers",
")",
")",
"success",
"=",
"True",
"if",
"success",
":",
"# remove layers from cache here",
"layers_cache",
"=",
"layers_cache",
".",
"difference",
"(",
"set",
"(",
"batch_list_ids",
")",
")",
"cache",
".",
"set",
"(",
"'layers'",
",",
"layers_cache",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"Incorrect SEARCH_TYPE=%s\"",
"%",
"SEARCH_TYPE",
")",
"except",
"Exception",
"as",
"e",
":",
"LOGGER",
".",
"error",
"(",
"'Layers were NOT indexed correctly'",
")",
"LOGGER",
".",
"error",
"(",
"e",
",",
"exc_info",
"=",
"True",
")",
"else",
":",
"LOGGER",
".",
"debug",
"(",
"'No cached layers to add in search engine.'",
")",
"# 2. deleted layers cache",
"if",
"deleted_layers_cache",
":",
"layers_list",
"=",
"list",
"(",
"deleted_layers_cache",
")",
"LOGGER",
".",
"debug",
"(",
"'There are %s layers in cache for deleting: %s'",
"%",
"(",
"len",
"(",
"layers_list",
")",
",",
"layers_list",
")",
")",
"# TODO implement me: batch layer index deletion",
"for",
"layer_id",
"in",
"layers_list",
":",
"# SOLR",
"if",
"SEARCH_TYPE",
"==",
"'solr'",
":",
"if",
"Layer",
".",
"objects",
".",
"filter",
"(",
"pk",
"=",
"layer_id",
")",
".",
"exists",
"(",
")",
":",
"layer",
"=",
"Layer",
".",
"objects",
".",
"get",
"(",
"id",
"=",
"layer_id",
")",
"unindex_layer",
"(",
"layer",
".",
"id",
",",
"use_cache",
"=",
"False",
")",
"deleted_layers_cache",
"=",
"deleted_layers_cache",
".",
"difference",
"(",
"set",
"(",
"[",
"layer_id",
"]",
")",
")",
"cache",
".",
"set",
"(",
"'deleted_layers'",
",",
"deleted_layers_cache",
")",
"else",
":",
"# TODO implement me",
"raise",
"NotImplementedError",
"else",
":",
"LOGGER",
".",
"debug",
"(",
"'No cached layers to remove in search engine.'",
")"
] |
Index and unindex all layers in the Django cache (Index all layers who have been checked).
|
[
"Index",
"and",
"unindex",
"all",
"layers",
"in",
"the",
"Django",
"cache",
"(",
"Index",
"all",
"layers",
"who",
"have",
"been",
"checked",
")",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/tasks.py#L94-L177
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/tasks.py
|
remove_service_checks
|
def remove_service_checks(self, service_id):
"""
Remove all checks from a service.
"""
from hypermap.aggregator.models import Service
service = Service.objects.get(id=service_id)
service.check_set.all().delete()
layer_to_process = service.layer_set.all()
for layer in layer_to_process:
layer.check_set.all().delete()
|
python
|
def remove_service_checks(self, service_id):
"""
Remove all checks from a service.
"""
from hypermap.aggregator.models import Service
service = Service.objects.get(id=service_id)
service.check_set.all().delete()
layer_to_process = service.layer_set.all()
for layer in layer_to_process:
layer.check_set.all().delete()
|
[
"def",
"remove_service_checks",
"(",
"self",
",",
"service_id",
")",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"models",
"import",
"Service",
"service",
"=",
"Service",
".",
"objects",
".",
"get",
"(",
"id",
"=",
"service_id",
")",
"service",
".",
"check_set",
".",
"all",
"(",
")",
".",
"delete",
"(",
")",
"layer_to_process",
"=",
"service",
".",
"layer_set",
".",
"all",
"(",
")",
"for",
"layer",
"in",
"layer_to_process",
":",
"layer",
".",
"check_set",
".",
"all",
"(",
")",
".",
"delete",
"(",
")"
] |
Remove all checks from a service.
|
[
"Remove",
"all",
"checks",
"from",
"a",
"service",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/tasks.py#L195-L205
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/tasks.py
|
index_service
|
def index_service(self, service_id):
"""
Index a service in search engine.
"""
from hypermap.aggregator.models import Service
service = Service.objects.get(id=service_id)
if not service.is_valid:
LOGGER.debug('Not indexing service with id %s in search engine as it is not valid' % service.id)
return
LOGGER.debug('Indexing service %s' % service.id)
layer_to_process = service.layer_set.all()
for layer in layer_to_process:
if not settings.REGISTRY_SKIP_CELERY:
index_layer(layer.id, use_cache=True)
else:
index_layer(layer.id)
|
python
|
def index_service(self, service_id):
"""
Index a service in search engine.
"""
from hypermap.aggregator.models import Service
service = Service.objects.get(id=service_id)
if not service.is_valid:
LOGGER.debug('Not indexing service with id %s in search engine as it is not valid' % service.id)
return
LOGGER.debug('Indexing service %s' % service.id)
layer_to_process = service.layer_set.all()
for layer in layer_to_process:
if not settings.REGISTRY_SKIP_CELERY:
index_layer(layer.id, use_cache=True)
else:
index_layer(layer.id)
|
[
"def",
"index_service",
"(",
"self",
",",
"service_id",
")",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"models",
"import",
"Service",
"service",
"=",
"Service",
".",
"objects",
".",
"get",
"(",
"id",
"=",
"service_id",
")",
"if",
"not",
"service",
".",
"is_valid",
":",
"LOGGER",
".",
"debug",
"(",
"'Not indexing service with id %s in search engine as it is not valid'",
"%",
"service",
".",
"id",
")",
"return",
"LOGGER",
".",
"debug",
"(",
"'Indexing service %s'",
"%",
"service",
".",
"id",
")",
"layer_to_process",
"=",
"service",
".",
"layer_set",
".",
"all",
"(",
")",
"for",
"layer",
"in",
"layer_to_process",
":",
"if",
"not",
"settings",
".",
"REGISTRY_SKIP_CELERY",
":",
"index_layer",
"(",
"layer",
".",
"id",
",",
"use_cache",
"=",
"True",
")",
"else",
":",
"index_layer",
"(",
"layer",
".",
"id",
")"
] |
Index a service in search engine.
|
[
"Index",
"a",
"service",
"in",
"search",
"engine",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/tasks.py#L209-L228
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/tasks.py
|
index_layer
|
def index_layer(self, layer_id, use_cache=False):
"""
Index a layer in the search backend.
If cache is set, append it to the list, if it isn't send the transaction right away.
cache needs memcached to be available.
"""
from hypermap.aggregator.models import Layer
layer = Layer.objects.get(id=layer_id)
if not layer.is_valid:
LOGGER.debug('Not indexing or removing layer with id %s in search engine as it is not valid' % layer.id)
unindex_layer(layer.id, use_cache)
return
if layer.was_deleted:
LOGGER.debug('Not indexing or removing layer with id %s in search engine as was_deleted is true' % layer.id)
unindex_layer(layer.id, use_cache)
return
# 1. if we use cache
if use_cache:
LOGGER.debug('Caching layer with id %s for syncing with search engine' % layer.id)
layers = cache.get('layers')
if layers is None:
layers = set([layer.id])
else:
layers.add(layer.id)
cache.set('layers', layers)
return
# 2. if we don't use cache
# TODO: Make this function more DRY
# by abstracting the common bits.
if SEARCH_TYPE == 'solr':
from hypermap.aggregator.solr import SolrHypermap
LOGGER.debug('Syncing layer %s to solr' % layer.name)
solrobject = SolrHypermap()
success, message = solrobject.layer_to_solr(layer)
# update the error message if using celery
if not settings.REGISTRY_SKIP_CELERY:
if not success:
self.update_state(
state=states.FAILURE,
meta=message
)
raise Ignore()
elif SEARCH_TYPE == 'elasticsearch':
from hypermap.aggregator.elasticsearch_client import ESHypermap
LOGGER.debug('Syncing layer %s to es' % layer.name)
esobject = ESHypermap()
success, message = esobject.layer_to_es(layer)
# update the error message if using celery
if not settings.REGISTRY_SKIP_CELERY:
if not success:
self.update_state(
state=states.FAILURE,
meta=message
)
raise Ignore()
|
python
|
def index_layer(self, layer_id, use_cache=False):
"""
Index a layer in the search backend.
If cache is set, append it to the list, if it isn't send the transaction right away.
cache needs memcached to be available.
"""
from hypermap.aggregator.models import Layer
layer = Layer.objects.get(id=layer_id)
if not layer.is_valid:
LOGGER.debug('Not indexing or removing layer with id %s in search engine as it is not valid' % layer.id)
unindex_layer(layer.id, use_cache)
return
if layer.was_deleted:
LOGGER.debug('Not indexing or removing layer with id %s in search engine as was_deleted is true' % layer.id)
unindex_layer(layer.id, use_cache)
return
# 1. if we use cache
if use_cache:
LOGGER.debug('Caching layer with id %s for syncing with search engine' % layer.id)
layers = cache.get('layers')
if layers is None:
layers = set([layer.id])
else:
layers.add(layer.id)
cache.set('layers', layers)
return
# 2. if we don't use cache
# TODO: Make this function more DRY
# by abstracting the common bits.
if SEARCH_TYPE == 'solr':
from hypermap.aggregator.solr import SolrHypermap
LOGGER.debug('Syncing layer %s to solr' % layer.name)
solrobject = SolrHypermap()
success, message = solrobject.layer_to_solr(layer)
# update the error message if using celery
if not settings.REGISTRY_SKIP_CELERY:
if not success:
self.update_state(
state=states.FAILURE,
meta=message
)
raise Ignore()
elif SEARCH_TYPE == 'elasticsearch':
from hypermap.aggregator.elasticsearch_client import ESHypermap
LOGGER.debug('Syncing layer %s to es' % layer.name)
esobject = ESHypermap()
success, message = esobject.layer_to_es(layer)
# update the error message if using celery
if not settings.REGISTRY_SKIP_CELERY:
if not success:
self.update_state(
state=states.FAILURE,
meta=message
)
raise Ignore()
|
[
"def",
"index_layer",
"(",
"self",
",",
"layer_id",
",",
"use_cache",
"=",
"False",
")",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"models",
"import",
"Layer",
"layer",
"=",
"Layer",
".",
"objects",
".",
"get",
"(",
"id",
"=",
"layer_id",
")",
"if",
"not",
"layer",
".",
"is_valid",
":",
"LOGGER",
".",
"debug",
"(",
"'Not indexing or removing layer with id %s in search engine as it is not valid'",
"%",
"layer",
".",
"id",
")",
"unindex_layer",
"(",
"layer",
".",
"id",
",",
"use_cache",
")",
"return",
"if",
"layer",
".",
"was_deleted",
":",
"LOGGER",
".",
"debug",
"(",
"'Not indexing or removing layer with id %s in search engine as was_deleted is true'",
"%",
"layer",
".",
"id",
")",
"unindex_layer",
"(",
"layer",
".",
"id",
",",
"use_cache",
")",
"return",
"# 1. if we use cache",
"if",
"use_cache",
":",
"LOGGER",
".",
"debug",
"(",
"'Caching layer with id %s for syncing with search engine'",
"%",
"layer",
".",
"id",
")",
"layers",
"=",
"cache",
".",
"get",
"(",
"'layers'",
")",
"if",
"layers",
"is",
"None",
":",
"layers",
"=",
"set",
"(",
"[",
"layer",
".",
"id",
"]",
")",
"else",
":",
"layers",
".",
"add",
"(",
"layer",
".",
"id",
")",
"cache",
".",
"set",
"(",
"'layers'",
",",
"layers",
")",
"return",
"# 2. if we don't use cache",
"# TODO: Make this function more DRY",
"# by abstracting the common bits.",
"if",
"SEARCH_TYPE",
"==",
"'solr'",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"solr",
"import",
"SolrHypermap",
"LOGGER",
".",
"debug",
"(",
"'Syncing layer %s to solr'",
"%",
"layer",
".",
"name",
")",
"solrobject",
"=",
"SolrHypermap",
"(",
")",
"success",
",",
"message",
"=",
"solrobject",
".",
"layer_to_solr",
"(",
"layer",
")",
"# update the error message if using celery",
"if",
"not",
"settings",
".",
"REGISTRY_SKIP_CELERY",
":",
"if",
"not",
"success",
":",
"self",
".",
"update_state",
"(",
"state",
"=",
"states",
".",
"FAILURE",
",",
"meta",
"=",
"message",
")",
"raise",
"Ignore",
"(",
")",
"elif",
"SEARCH_TYPE",
"==",
"'elasticsearch'",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"elasticsearch_client",
"import",
"ESHypermap",
"LOGGER",
".",
"debug",
"(",
"'Syncing layer %s to es'",
"%",
"layer",
".",
"name",
")",
"esobject",
"=",
"ESHypermap",
"(",
")",
"success",
",",
"message",
"=",
"esobject",
".",
"layer_to_es",
"(",
"layer",
")",
"# update the error message if using celery",
"if",
"not",
"settings",
".",
"REGISTRY_SKIP_CELERY",
":",
"if",
"not",
"success",
":",
"self",
".",
"update_state",
"(",
"state",
"=",
"states",
".",
"FAILURE",
",",
"meta",
"=",
"message",
")",
"raise",
"Ignore",
"(",
")"
] |
Index a layer in the search backend.
If cache is set, append it to the list, if it isn't send the transaction right away.
cache needs memcached to be available.
|
[
"Index",
"a",
"layer",
"in",
"the",
"search",
"backend",
".",
"If",
"cache",
"is",
"set",
"append",
"it",
"to",
"the",
"list",
"if",
"it",
"isn",
"t",
"send",
"the",
"transaction",
"right",
"away",
".",
"cache",
"needs",
"memcached",
"to",
"be",
"available",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/tasks.py#L232-L291
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/tasks.py
|
unindex_layers_with_issues
|
def unindex_layers_with_issues(self, use_cache=False):
"""
Remove the index for layers in search backend, which are linked to an issue.
"""
from hypermap.aggregator.models import Issue, Layer, Service
from django.contrib.contenttypes.models import ContentType
layer_type = ContentType.objects.get_for_model(Layer)
service_type = ContentType.objects.get_for_model(Service)
for issue in Issue.objects.filter(content_type__pk=layer_type.id):
unindex_layer(issue.content_object.id, use_cache)
for issue in Issue.objects.filter(content_type__pk=service_type.id):
for layer in issue.content_object.layer_set.all():
unindex_layer(layer.id, use_cache)
|
python
|
def unindex_layers_with_issues(self, use_cache=False):
"""
Remove the index for layers in search backend, which are linked to an issue.
"""
from hypermap.aggregator.models import Issue, Layer, Service
from django.contrib.contenttypes.models import ContentType
layer_type = ContentType.objects.get_for_model(Layer)
service_type = ContentType.objects.get_for_model(Service)
for issue in Issue.objects.filter(content_type__pk=layer_type.id):
unindex_layer(issue.content_object.id, use_cache)
for issue in Issue.objects.filter(content_type__pk=service_type.id):
for layer in issue.content_object.layer_set.all():
unindex_layer(layer.id, use_cache)
|
[
"def",
"unindex_layers_with_issues",
"(",
"self",
",",
"use_cache",
"=",
"False",
")",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"models",
"import",
"Issue",
",",
"Layer",
",",
"Service",
"from",
"django",
".",
"contrib",
".",
"contenttypes",
".",
"models",
"import",
"ContentType",
"layer_type",
"=",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"Layer",
")",
"service_type",
"=",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"Service",
")",
"for",
"issue",
"in",
"Issue",
".",
"objects",
".",
"filter",
"(",
"content_type__pk",
"=",
"layer_type",
".",
"id",
")",
":",
"unindex_layer",
"(",
"issue",
".",
"content_object",
".",
"id",
",",
"use_cache",
")",
"for",
"issue",
"in",
"Issue",
".",
"objects",
".",
"filter",
"(",
"content_type__pk",
"=",
"service_type",
".",
"id",
")",
":",
"for",
"layer",
"in",
"issue",
".",
"content_object",
".",
"layer_set",
".",
"all",
"(",
")",
":",
"unindex_layer",
"(",
"layer",
".",
"id",
",",
"use_cache",
")"
] |
Remove the index for layers in search backend, which are linked to an issue.
|
[
"Remove",
"the",
"index",
"for",
"layers",
"in",
"search",
"backend",
"which",
"are",
"linked",
"to",
"an",
"issue",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/tasks.py#L295-L310
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/tasks.py
|
unindex_layer
|
def unindex_layer(self, layer_id, use_cache=False):
"""
Remove the index for a layer in the search backend.
If cache is set, append it to the list of removed layers, if it isn't send the transaction right away.
"""
from hypermap.aggregator.models import Layer
layer = Layer.objects.get(id=layer_id)
if use_cache:
LOGGER.debug('Caching layer with id %s for being removed from search engine' % layer.id)
deleted_layers = cache.get('deleted_layers')
if deleted_layers is None:
deleted_layers = set([layer.id])
else:
deleted_layers.add(layer.id)
cache.set('deleted_layers', deleted_layers)
return
if SEARCH_TYPE == 'solr':
from hypermap.aggregator.solr import SolrHypermap
LOGGER.debug('Removing layer %s from solr' % layer.id)
try:
solrobject = SolrHypermap()
solrobject.remove_layer(layer.uuid)
except Exception:
LOGGER.error('Layer NOT correctly removed from Solr')
elif SEARCH_TYPE == 'elasticsearch':
# TODO implement me
pass
|
python
|
def unindex_layer(self, layer_id, use_cache=False):
"""
Remove the index for a layer in the search backend.
If cache is set, append it to the list of removed layers, if it isn't send the transaction right away.
"""
from hypermap.aggregator.models import Layer
layer = Layer.objects.get(id=layer_id)
if use_cache:
LOGGER.debug('Caching layer with id %s for being removed from search engine' % layer.id)
deleted_layers = cache.get('deleted_layers')
if deleted_layers is None:
deleted_layers = set([layer.id])
else:
deleted_layers.add(layer.id)
cache.set('deleted_layers', deleted_layers)
return
if SEARCH_TYPE == 'solr':
from hypermap.aggregator.solr import SolrHypermap
LOGGER.debug('Removing layer %s from solr' % layer.id)
try:
solrobject = SolrHypermap()
solrobject.remove_layer(layer.uuid)
except Exception:
LOGGER.error('Layer NOT correctly removed from Solr')
elif SEARCH_TYPE == 'elasticsearch':
# TODO implement me
pass
|
[
"def",
"unindex_layer",
"(",
"self",
",",
"layer_id",
",",
"use_cache",
"=",
"False",
")",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"models",
"import",
"Layer",
"layer",
"=",
"Layer",
".",
"objects",
".",
"get",
"(",
"id",
"=",
"layer_id",
")",
"if",
"use_cache",
":",
"LOGGER",
".",
"debug",
"(",
"'Caching layer with id %s for being removed from search engine'",
"%",
"layer",
".",
"id",
")",
"deleted_layers",
"=",
"cache",
".",
"get",
"(",
"'deleted_layers'",
")",
"if",
"deleted_layers",
"is",
"None",
":",
"deleted_layers",
"=",
"set",
"(",
"[",
"layer",
".",
"id",
"]",
")",
"else",
":",
"deleted_layers",
".",
"add",
"(",
"layer",
".",
"id",
")",
"cache",
".",
"set",
"(",
"'deleted_layers'",
",",
"deleted_layers",
")",
"return",
"if",
"SEARCH_TYPE",
"==",
"'solr'",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"solr",
"import",
"SolrHypermap",
"LOGGER",
".",
"debug",
"(",
"'Removing layer %s from solr'",
"%",
"layer",
".",
"id",
")",
"try",
":",
"solrobject",
"=",
"SolrHypermap",
"(",
")",
"solrobject",
".",
"remove_layer",
"(",
"layer",
".",
"uuid",
")",
"except",
"Exception",
":",
"LOGGER",
".",
"error",
"(",
"'Layer NOT correctly removed from Solr'",
")",
"elif",
"SEARCH_TYPE",
"==",
"'elasticsearch'",
":",
"# TODO implement me",
"pass"
] |
Remove the index for a layer in the search backend.
If cache is set, append it to the list of removed layers, if it isn't send the transaction right away.
|
[
"Remove",
"the",
"index",
"for",
"a",
"layer",
"in",
"the",
"search",
"backend",
".",
"If",
"cache",
"is",
"set",
"append",
"it",
"to",
"the",
"list",
"of",
"removed",
"layers",
"if",
"it",
"isn",
"t",
"send",
"the",
"transaction",
"right",
"away",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/tasks.py#L314-L343
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/tasks.py
|
index_all_layers
|
def index_all_layers(self):
"""
Index all layers in search engine.
"""
from hypermap.aggregator.models import Layer
if not settings.REGISTRY_SKIP_CELERY:
layers_cache = set(Layer.objects.filter(is_valid=True).values_list('id', flat=True))
deleted_layers_cache = set(Layer.objects.filter(is_valid=False).values_list('id', flat=True))
cache.set('layers', layers_cache)
cache.set('deleted_layers', deleted_layers_cache)
else:
for layer in Layer.objects.all():
index_layer(layer.id)
|
python
|
def index_all_layers(self):
"""
Index all layers in search engine.
"""
from hypermap.aggregator.models import Layer
if not settings.REGISTRY_SKIP_CELERY:
layers_cache = set(Layer.objects.filter(is_valid=True).values_list('id', flat=True))
deleted_layers_cache = set(Layer.objects.filter(is_valid=False).values_list('id', flat=True))
cache.set('layers', layers_cache)
cache.set('deleted_layers', deleted_layers_cache)
else:
for layer in Layer.objects.all():
index_layer(layer.id)
|
[
"def",
"index_all_layers",
"(",
"self",
")",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"models",
"import",
"Layer",
"if",
"not",
"settings",
".",
"REGISTRY_SKIP_CELERY",
":",
"layers_cache",
"=",
"set",
"(",
"Layer",
".",
"objects",
".",
"filter",
"(",
"is_valid",
"=",
"True",
")",
".",
"values_list",
"(",
"'id'",
",",
"flat",
"=",
"True",
")",
")",
"deleted_layers_cache",
"=",
"set",
"(",
"Layer",
".",
"objects",
".",
"filter",
"(",
"is_valid",
"=",
"False",
")",
".",
"values_list",
"(",
"'id'",
",",
"flat",
"=",
"True",
")",
")",
"cache",
".",
"set",
"(",
"'layers'",
",",
"layers_cache",
")",
"cache",
".",
"set",
"(",
"'deleted_layers'",
",",
"deleted_layers_cache",
")",
"else",
":",
"for",
"layer",
"in",
"Layer",
".",
"objects",
".",
"all",
"(",
")",
":",
"index_layer",
"(",
"layer",
".",
"id",
")"
] |
Index all layers in search engine.
|
[
"Index",
"all",
"layers",
"in",
"search",
"engine",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/tasks.py#L347-L360
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/tasks.py
|
update_last_wm_layers
|
def update_last_wm_layers(self, service_id, num_layers=10):
"""
Update and index the last added and deleted layers (num_layers) in WorldMap service.
"""
from hypermap.aggregator.models import Service
LOGGER.debug(
'Updating the index the last %s added and %s deleted layers in WorldMap service'
% (num_layers, num_layers)
)
service = Service.objects.get(id=service_id)
# TODO raise error if service type is not WM type
if service.type == 'Hypermap:WorldMapLegacy':
from hypermap.aggregator.models import update_layers_wm_legacy as update_layers_wm
if service.type == 'Hypermap:WorldMap':
from hypermap.aggregator.models import update_layers_geonode_wm as update_layers_wm
update_layers_wm(service, num_layers)
# Remove in search engine last num_layers that were deleted
LOGGER.debug('Removing the index for the last %s deleted layers' % num_layers)
layer_to_unindex = service.layer_set.filter(was_deleted=True).order_by('-last_updated')[0:num_layers]
for layer in layer_to_unindex:
if not settings.REGISTRY_SKIP_CELERY:
unindex_layer(layer.id, use_cache=True)
else:
unindex_layer(layer.id)
# Add/Update in search engine last num_layers that were added
LOGGER.debug('Adding/Updating the index for the last %s added layers' % num_layers)
layer_to_index = service.layer_set.filter(was_deleted=False).order_by('-last_updated')[0:num_layers]
for layer in layer_to_index:
if not settings.REGISTRY_SKIP_CELERY:
index_layer(layer.id, use_cache=True)
else:
index_layer(layer.id)
|
python
|
def update_last_wm_layers(self, service_id, num_layers=10):
"""
Update and index the last added and deleted layers (num_layers) in WorldMap service.
"""
from hypermap.aggregator.models import Service
LOGGER.debug(
'Updating the index the last %s added and %s deleted layers in WorldMap service'
% (num_layers, num_layers)
)
service = Service.objects.get(id=service_id)
# TODO raise error if service type is not WM type
if service.type == 'Hypermap:WorldMapLegacy':
from hypermap.aggregator.models import update_layers_wm_legacy as update_layers_wm
if service.type == 'Hypermap:WorldMap':
from hypermap.aggregator.models import update_layers_geonode_wm as update_layers_wm
update_layers_wm(service, num_layers)
# Remove in search engine last num_layers that were deleted
LOGGER.debug('Removing the index for the last %s deleted layers' % num_layers)
layer_to_unindex = service.layer_set.filter(was_deleted=True).order_by('-last_updated')[0:num_layers]
for layer in layer_to_unindex:
if not settings.REGISTRY_SKIP_CELERY:
unindex_layer(layer.id, use_cache=True)
else:
unindex_layer(layer.id)
# Add/Update in search engine last num_layers that were added
LOGGER.debug('Adding/Updating the index for the last %s added layers' % num_layers)
layer_to_index = service.layer_set.filter(was_deleted=False).order_by('-last_updated')[0:num_layers]
for layer in layer_to_index:
if not settings.REGISTRY_SKIP_CELERY:
index_layer(layer.id, use_cache=True)
else:
index_layer(layer.id)
|
[
"def",
"update_last_wm_layers",
"(",
"self",
",",
"service_id",
",",
"num_layers",
"=",
"10",
")",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"models",
"import",
"Service",
"LOGGER",
".",
"debug",
"(",
"'Updating the index the last %s added and %s deleted layers in WorldMap service'",
"%",
"(",
"num_layers",
",",
"num_layers",
")",
")",
"service",
"=",
"Service",
".",
"objects",
".",
"get",
"(",
"id",
"=",
"service_id",
")",
"# TODO raise error if service type is not WM type",
"if",
"service",
".",
"type",
"==",
"'Hypermap:WorldMapLegacy'",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"models",
"import",
"update_layers_wm_legacy",
"as",
"update_layers_wm",
"if",
"service",
".",
"type",
"==",
"'Hypermap:WorldMap'",
":",
"from",
"hypermap",
".",
"aggregator",
".",
"models",
"import",
"update_layers_geonode_wm",
"as",
"update_layers_wm",
"update_layers_wm",
"(",
"service",
",",
"num_layers",
")",
"# Remove in search engine last num_layers that were deleted",
"LOGGER",
".",
"debug",
"(",
"'Removing the index for the last %s deleted layers'",
"%",
"num_layers",
")",
"layer_to_unindex",
"=",
"service",
".",
"layer_set",
".",
"filter",
"(",
"was_deleted",
"=",
"True",
")",
".",
"order_by",
"(",
"'-last_updated'",
")",
"[",
"0",
":",
"num_layers",
"]",
"for",
"layer",
"in",
"layer_to_unindex",
":",
"if",
"not",
"settings",
".",
"REGISTRY_SKIP_CELERY",
":",
"unindex_layer",
"(",
"layer",
".",
"id",
",",
"use_cache",
"=",
"True",
")",
"else",
":",
"unindex_layer",
"(",
"layer",
".",
"id",
")",
"# Add/Update in search engine last num_layers that were added",
"LOGGER",
".",
"debug",
"(",
"'Adding/Updating the index for the last %s added layers'",
"%",
"num_layers",
")",
"layer_to_index",
"=",
"service",
".",
"layer_set",
".",
"filter",
"(",
"was_deleted",
"=",
"False",
")",
".",
"order_by",
"(",
"'-last_updated'",
")",
"[",
"0",
":",
"num_layers",
"]",
"for",
"layer",
"in",
"layer_to_index",
":",
"if",
"not",
"settings",
".",
"REGISTRY_SKIP_CELERY",
":",
"index_layer",
"(",
"layer",
".",
"id",
",",
"use_cache",
"=",
"True",
")",
"else",
":",
"index_layer",
"(",
"layer",
".",
"id",
")"
] |
Update and index the last added and deleted layers (num_layers) in WorldMap service.
|
[
"Update",
"and",
"index",
"the",
"last",
"added",
"and",
"deleted",
"layers",
"(",
"num_layers",
")",
"in",
"WorldMap",
"service",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/tasks.py#L364-L399
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/models.py
|
bbox2wktpolygon
|
def bbox2wktpolygon(bbox):
"""
Return OGC WKT Polygon of a simple bbox list
"""
try:
minx = float(bbox[0])
miny = float(bbox[1])
maxx = float(bbox[2])
maxy = float(bbox[3])
except:
LOGGER.debug("Invalid bbox, setting it to a zero POLYGON")
minx = 0
miny = 0
maxx = 0
maxy = 0
return 'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))' \
% (minx, miny, minx, maxy, maxx, maxy, maxx, miny, minx, miny)
|
python
|
def bbox2wktpolygon(bbox):
"""
Return OGC WKT Polygon of a simple bbox list
"""
try:
minx = float(bbox[0])
miny = float(bbox[1])
maxx = float(bbox[2])
maxy = float(bbox[3])
except:
LOGGER.debug("Invalid bbox, setting it to a zero POLYGON")
minx = 0
miny = 0
maxx = 0
maxy = 0
return 'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))' \
% (minx, miny, minx, maxy, maxx, maxy, maxx, miny, minx, miny)
|
[
"def",
"bbox2wktpolygon",
"(",
"bbox",
")",
":",
"try",
":",
"minx",
"=",
"float",
"(",
"bbox",
"[",
"0",
"]",
")",
"miny",
"=",
"float",
"(",
"bbox",
"[",
"1",
"]",
")",
"maxx",
"=",
"float",
"(",
"bbox",
"[",
"2",
"]",
")",
"maxy",
"=",
"float",
"(",
"bbox",
"[",
"3",
"]",
")",
"except",
":",
"LOGGER",
".",
"debug",
"(",
"\"Invalid bbox, setting it to a zero POLYGON\"",
")",
"minx",
"=",
"0",
"miny",
"=",
"0",
"maxx",
"=",
"0",
"maxy",
"=",
"0",
"return",
"'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))'",
"%",
"(",
"minx",
",",
"miny",
",",
"minx",
",",
"maxy",
",",
"maxx",
",",
"maxy",
",",
"maxx",
",",
"miny",
",",
"minx",
",",
"miny",
")"
] |
Return OGC WKT Polygon of a simple bbox list
|
[
"Return",
"OGC",
"WKT",
"Polygon",
"of",
"a",
"simple",
"bbox",
"list"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/models.py#L975-L994
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/models.py
|
create_metadata_record
|
def create_metadata_record(**kwargs):
"""
Create a csw:Record XML document from harvested metadata
"""
if 'srs' in kwargs:
srs = kwargs['srs']
else:
srs = '4326'
modified = '%sZ' % datetime.datetime.utcnow().isoformat().split('.')[0]
nsmap = Namespaces().get_namespaces(['csw', 'dc', 'dct', 'ows'])
e = etree.Element(nspath_eval('csw:Record', nsmap), nsmap=nsmap)
etree.SubElement(e, nspath_eval('dc:identifier', nsmap)).text = kwargs['identifier']
etree.SubElement(e, nspath_eval('dc:title', nsmap)).text = kwargs['title']
if 'alternative' in kwargs:
etree.SubElement(e, nspath_eval('dct:alternative', nsmap)).text = kwargs['alternative']
etree.SubElement(e, nspath_eval('dct:modified', nsmap)).text = modified
etree.SubElement(e, nspath_eval('dct:abstract', nsmap)).text = kwargs['abstract']
etree.SubElement(e, nspath_eval('dc:type', nsmap)).text = kwargs['type']
etree.SubElement(e, nspath_eval('dc:format', nsmap)).text = kwargs['format']
etree.SubElement(e, nspath_eval('dc:source', nsmap)).text = kwargs['source']
if 'relation' in kwargs:
etree.SubElement(e, nspath_eval('dc:relation', nsmap)).text = kwargs['relation']
if 'keywords' in kwargs:
if kwargs['keywords'] is not None:
for keyword in kwargs['keywords']:
etree.SubElement(e, nspath_eval('dc:subject', nsmap)).text = keyword
for link in kwargs['links']:
etree.SubElement(e, nspath_eval('dct:references', nsmap), scheme=link[0]).text = link[1]
bbox2 = loads(kwargs['wkt_geometry']).bounds
bbox = etree.SubElement(e, nspath_eval('ows:BoundingBox', nsmap),
crs='http://www.opengis.net/def/crs/EPSG/0/%s' % srs,
dimensions='2')
etree.SubElement(bbox, nspath_eval('ows:LowerCorner', nsmap)).text = '%s %s' % (bbox2[1], bbox2[0])
etree.SubElement(bbox, nspath_eval('ows:UpperCorner', nsmap)).text = '%s %s' % (bbox2[3], bbox2[2])
return etree.tostring(e, pretty_print=True)
|
python
|
def create_metadata_record(**kwargs):
"""
Create a csw:Record XML document from harvested metadata
"""
if 'srs' in kwargs:
srs = kwargs['srs']
else:
srs = '4326'
modified = '%sZ' % datetime.datetime.utcnow().isoformat().split('.')[0]
nsmap = Namespaces().get_namespaces(['csw', 'dc', 'dct', 'ows'])
e = etree.Element(nspath_eval('csw:Record', nsmap), nsmap=nsmap)
etree.SubElement(e, nspath_eval('dc:identifier', nsmap)).text = kwargs['identifier']
etree.SubElement(e, nspath_eval('dc:title', nsmap)).text = kwargs['title']
if 'alternative' in kwargs:
etree.SubElement(e, nspath_eval('dct:alternative', nsmap)).text = kwargs['alternative']
etree.SubElement(e, nspath_eval('dct:modified', nsmap)).text = modified
etree.SubElement(e, nspath_eval('dct:abstract', nsmap)).text = kwargs['abstract']
etree.SubElement(e, nspath_eval('dc:type', nsmap)).text = kwargs['type']
etree.SubElement(e, nspath_eval('dc:format', nsmap)).text = kwargs['format']
etree.SubElement(e, nspath_eval('dc:source', nsmap)).text = kwargs['source']
if 'relation' in kwargs:
etree.SubElement(e, nspath_eval('dc:relation', nsmap)).text = kwargs['relation']
if 'keywords' in kwargs:
if kwargs['keywords'] is not None:
for keyword in kwargs['keywords']:
etree.SubElement(e, nspath_eval('dc:subject', nsmap)).text = keyword
for link in kwargs['links']:
etree.SubElement(e, nspath_eval('dct:references', nsmap), scheme=link[0]).text = link[1]
bbox2 = loads(kwargs['wkt_geometry']).bounds
bbox = etree.SubElement(e, nspath_eval('ows:BoundingBox', nsmap),
crs='http://www.opengis.net/def/crs/EPSG/0/%s' % srs,
dimensions='2')
etree.SubElement(bbox, nspath_eval('ows:LowerCorner', nsmap)).text = '%s %s' % (bbox2[1], bbox2[0])
etree.SubElement(bbox, nspath_eval('ows:UpperCorner', nsmap)).text = '%s %s' % (bbox2[3], bbox2[2])
return etree.tostring(e, pretty_print=True)
|
[
"def",
"create_metadata_record",
"(",
"*",
"*",
"kwargs",
")",
":",
"if",
"'srs'",
"in",
"kwargs",
":",
"srs",
"=",
"kwargs",
"[",
"'srs'",
"]",
"else",
":",
"srs",
"=",
"'4326'",
"modified",
"=",
"'%sZ'",
"%",
"datetime",
".",
"datetime",
".",
"utcnow",
"(",
")",
".",
"isoformat",
"(",
")",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
"nsmap",
"=",
"Namespaces",
"(",
")",
".",
"get_namespaces",
"(",
"[",
"'csw'",
",",
"'dc'",
",",
"'dct'",
",",
"'ows'",
"]",
")",
"e",
"=",
"etree",
".",
"Element",
"(",
"nspath_eval",
"(",
"'csw:Record'",
",",
"nsmap",
")",
",",
"nsmap",
"=",
"nsmap",
")",
"etree",
".",
"SubElement",
"(",
"e",
",",
"nspath_eval",
"(",
"'dc:identifier'",
",",
"nsmap",
")",
")",
".",
"text",
"=",
"kwargs",
"[",
"'identifier'",
"]",
"etree",
".",
"SubElement",
"(",
"e",
",",
"nspath_eval",
"(",
"'dc:title'",
",",
"nsmap",
")",
")",
".",
"text",
"=",
"kwargs",
"[",
"'title'",
"]",
"if",
"'alternative'",
"in",
"kwargs",
":",
"etree",
".",
"SubElement",
"(",
"e",
",",
"nspath_eval",
"(",
"'dct:alternative'",
",",
"nsmap",
")",
")",
".",
"text",
"=",
"kwargs",
"[",
"'alternative'",
"]",
"etree",
".",
"SubElement",
"(",
"e",
",",
"nspath_eval",
"(",
"'dct:modified'",
",",
"nsmap",
")",
")",
".",
"text",
"=",
"modified",
"etree",
".",
"SubElement",
"(",
"e",
",",
"nspath_eval",
"(",
"'dct:abstract'",
",",
"nsmap",
")",
")",
".",
"text",
"=",
"kwargs",
"[",
"'abstract'",
"]",
"etree",
".",
"SubElement",
"(",
"e",
",",
"nspath_eval",
"(",
"'dc:type'",
",",
"nsmap",
")",
")",
".",
"text",
"=",
"kwargs",
"[",
"'type'",
"]",
"etree",
".",
"SubElement",
"(",
"e",
",",
"nspath_eval",
"(",
"'dc:format'",
",",
"nsmap",
")",
")",
".",
"text",
"=",
"kwargs",
"[",
"'format'",
"]",
"etree",
".",
"SubElement",
"(",
"e",
",",
"nspath_eval",
"(",
"'dc:source'",
",",
"nsmap",
")",
")",
".",
"text",
"=",
"kwargs",
"[",
"'source'",
"]",
"if",
"'relation'",
"in",
"kwargs",
":",
"etree",
".",
"SubElement",
"(",
"e",
",",
"nspath_eval",
"(",
"'dc:relation'",
",",
"nsmap",
")",
")",
".",
"text",
"=",
"kwargs",
"[",
"'relation'",
"]",
"if",
"'keywords'",
"in",
"kwargs",
":",
"if",
"kwargs",
"[",
"'keywords'",
"]",
"is",
"not",
"None",
":",
"for",
"keyword",
"in",
"kwargs",
"[",
"'keywords'",
"]",
":",
"etree",
".",
"SubElement",
"(",
"e",
",",
"nspath_eval",
"(",
"'dc:subject'",
",",
"nsmap",
")",
")",
".",
"text",
"=",
"keyword",
"for",
"link",
"in",
"kwargs",
"[",
"'links'",
"]",
":",
"etree",
".",
"SubElement",
"(",
"e",
",",
"nspath_eval",
"(",
"'dct:references'",
",",
"nsmap",
")",
",",
"scheme",
"=",
"link",
"[",
"0",
"]",
")",
".",
"text",
"=",
"link",
"[",
"1",
"]",
"bbox2",
"=",
"loads",
"(",
"kwargs",
"[",
"'wkt_geometry'",
"]",
")",
".",
"bounds",
"bbox",
"=",
"etree",
".",
"SubElement",
"(",
"e",
",",
"nspath_eval",
"(",
"'ows:BoundingBox'",
",",
"nsmap",
")",
",",
"crs",
"=",
"'http://www.opengis.net/def/crs/EPSG/0/%s'",
"%",
"srs",
",",
"dimensions",
"=",
"'2'",
")",
"etree",
".",
"SubElement",
"(",
"bbox",
",",
"nspath_eval",
"(",
"'ows:LowerCorner'",
",",
"nsmap",
")",
")",
".",
"text",
"=",
"'%s %s'",
"%",
"(",
"bbox2",
"[",
"1",
"]",
",",
"bbox2",
"[",
"0",
"]",
")",
"etree",
".",
"SubElement",
"(",
"bbox",
",",
"nspath_eval",
"(",
"'ows:UpperCorner'",
",",
"nsmap",
")",
")",
".",
"text",
"=",
"'%s %s'",
"%",
"(",
"bbox2",
"[",
"3",
"]",
",",
"bbox2",
"[",
"2",
"]",
")",
"return",
"etree",
".",
"tostring",
"(",
"e",
",",
"pretty_print",
"=",
"True",
")"
] |
Create a csw:Record XML document from harvested metadata
|
[
"Create",
"a",
"csw",
":",
"Record",
"XML",
"document",
"from",
"harvested",
"metadata"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/models.py#L997-L1042
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/models.py
|
gen_anytext
|
def gen_anytext(*args):
"""
Convenience function to create bag of words for anytext property
"""
bag = []
for term in args:
if term is not None:
if isinstance(term, list):
for term2 in term:
if term2 is not None:
bag.append(term2)
else:
bag.append(term)
return ' '.join(bag)
|
python
|
def gen_anytext(*args):
"""
Convenience function to create bag of words for anytext property
"""
bag = []
for term in args:
if term is not None:
if isinstance(term, list):
for term2 in term:
if term2 is not None:
bag.append(term2)
else:
bag.append(term)
return ' '.join(bag)
|
[
"def",
"gen_anytext",
"(",
"*",
"args",
")",
":",
"bag",
"=",
"[",
"]",
"for",
"term",
"in",
"args",
":",
"if",
"term",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"term",
",",
"list",
")",
":",
"for",
"term2",
"in",
"term",
":",
"if",
"term2",
"is",
"not",
"None",
":",
"bag",
".",
"append",
"(",
"term2",
")",
"else",
":",
"bag",
".",
"append",
"(",
"term",
")",
"return",
"' '",
".",
"join",
"(",
"bag",
")"
] |
Convenience function to create bag of words for anytext property
|
[
"Convenience",
"function",
"to",
"create",
"bag",
"of",
"words",
"for",
"anytext",
"property"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/models.py#L1045-L1060
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/models.py
|
update_layers_wmts
|
def update_layers_wmts(service):
"""
Update layers for an OGC:WMTS service.
Sample endpoint: http://map1.vis.earthdata.nasa.gov/wmts-geo/1.0.0/WMTSCapabilities.xml
"""
try:
wmts = WebMapTileService(service.url)
# set srs
# WMTS is always in 4326
srs, created = SpatialReferenceSystem.objects.get_or_create(code='EPSG:4326')
service.srs.add(srs)
service.update_validity()
layer_names = list(wmts.contents)
layer_n = 0
total = len(layer_names)
for layer_name in layer_names:
ows_layer = wmts.contents[layer_name]
LOGGER.debug('Updating layer %s' % ows_layer.name)
layer, created = Layer.objects.get_or_create(name=ows_layer.name, service=service, catalog=service.catalog)
if layer.active:
links = [['OGC:WMTS', service.url],
['OGC:WMTS', settings.SITE_URL.rstrip('/') + '/' + layer.get_url_endpoint()]]
layer.type = 'OGC:WMTS'
layer.title = ows_layer.title
layer.abstract = ows_layer.abstract
# keywords
# @tomkralidis wmts does not seem to support this attribute
keywords = None
if hasattr(ows_layer, 'keywords'):
keywords = ows_layer.keywords
for keyword in keywords:
layer.keywords.add(keyword)
layer.url = service.url
layer.page_url = layer.get_absolute_url
links.append([
'WWW:LINK',
settings.SITE_URL.rstrip('/') + layer.page_url
])
bbox = list(ows_layer.boundingBoxWGS84 or (-179.0, -89.0, 179.0, 89.0))
layer.bbox_x0 = bbox[0]
layer.bbox_y0 = bbox[1]
layer.bbox_x1 = bbox[2]
layer.bbox_y1 = bbox[3]
layer.wkt_geometry = bbox2wktpolygon(bbox)
layer.xml = create_metadata_record(
identifier=str(layer.uuid),
source=service.url,
links=links,
format='OGC:WMS',
type=layer.csw_type,
relation=service.id_string,
title=ows_layer.title,
alternative=ows_layer.name,
abstract=layer.abstract,
keywords=keywords,
wkt_geometry=layer.wkt_geometry
)
layer.anytext = gen_anytext(layer.title, layer.abstract, keywords)
layer.save()
# dates
add_mined_dates(layer)
layer_n = layer_n + 1
# exits if DEBUG_SERVICES
LOGGER.debug("Updating layer n. %s/%s" % (layer_n, total))
if DEBUG_SERVICES and layer_n == DEBUG_LAYER_NUMBER:
return
except Exception as err:
message = "update_layers_wmts: {0}".format(
err
)
check = Check(
content_object=service,
success=False,
response_time=0,
message=message
)
check.save()
|
python
|
def update_layers_wmts(service):
"""
Update layers for an OGC:WMTS service.
Sample endpoint: http://map1.vis.earthdata.nasa.gov/wmts-geo/1.0.0/WMTSCapabilities.xml
"""
try:
wmts = WebMapTileService(service.url)
# set srs
# WMTS is always in 4326
srs, created = SpatialReferenceSystem.objects.get_or_create(code='EPSG:4326')
service.srs.add(srs)
service.update_validity()
layer_names = list(wmts.contents)
layer_n = 0
total = len(layer_names)
for layer_name in layer_names:
ows_layer = wmts.contents[layer_name]
LOGGER.debug('Updating layer %s' % ows_layer.name)
layer, created = Layer.objects.get_or_create(name=ows_layer.name, service=service, catalog=service.catalog)
if layer.active:
links = [['OGC:WMTS', service.url],
['OGC:WMTS', settings.SITE_URL.rstrip('/') + '/' + layer.get_url_endpoint()]]
layer.type = 'OGC:WMTS'
layer.title = ows_layer.title
layer.abstract = ows_layer.abstract
# keywords
# @tomkralidis wmts does not seem to support this attribute
keywords = None
if hasattr(ows_layer, 'keywords'):
keywords = ows_layer.keywords
for keyword in keywords:
layer.keywords.add(keyword)
layer.url = service.url
layer.page_url = layer.get_absolute_url
links.append([
'WWW:LINK',
settings.SITE_URL.rstrip('/') + layer.page_url
])
bbox = list(ows_layer.boundingBoxWGS84 or (-179.0, -89.0, 179.0, 89.0))
layer.bbox_x0 = bbox[0]
layer.bbox_y0 = bbox[1]
layer.bbox_x1 = bbox[2]
layer.bbox_y1 = bbox[3]
layer.wkt_geometry = bbox2wktpolygon(bbox)
layer.xml = create_metadata_record(
identifier=str(layer.uuid),
source=service.url,
links=links,
format='OGC:WMS',
type=layer.csw_type,
relation=service.id_string,
title=ows_layer.title,
alternative=ows_layer.name,
abstract=layer.abstract,
keywords=keywords,
wkt_geometry=layer.wkt_geometry
)
layer.anytext = gen_anytext(layer.title, layer.abstract, keywords)
layer.save()
# dates
add_mined_dates(layer)
layer_n = layer_n + 1
# exits if DEBUG_SERVICES
LOGGER.debug("Updating layer n. %s/%s" % (layer_n, total))
if DEBUG_SERVICES and layer_n == DEBUG_LAYER_NUMBER:
return
except Exception as err:
message = "update_layers_wmts: {0}".format(
err
)
check = Check(
content_object=service,
success=False,
response_time=0,
message=message
)
check.save()
|
[
"def",
"update_layers_wmts",
"(",
"service",
")",
":",
"try",
":",
"wmts",
"=",
"WebMapTileService",
"(",
"service",
".",
"url",
")",
"# set srs",
"# WMTS is always in 4326",
"srs",
",",
"created",
"=",
"SpatialReferenceSystem",
".",
"objects",
".",
"get_or_create",
"(",
"code",
"=",
"'EPSG:4326'",
")",
"service",
".",
"srs",
".",
"add",
"(",
"srs",
")",
"service",
".",
"update_validity",
"(",
")",
"layer_names",
"=",
"list",
"(",
"wmts",
".",
"contents",
")",
"layer_n",
"=",
"0",
"total",
"=",
"len",
"(",
"layer_names",
")",
"for",
"layer_name",
"in",
"layer_names",
":",
"ows_layer",
"=",
"wmts",
".",
"contents",
"[",
"layer_name",
"]",
"LOGGER",
".",
"debug",
"(",
"'Updating layer %s'",
"%",
"ows_layer",
".",
"name",
")",
"layer",
",",
"created",
"=",
"Layer",
".",
"objects",
".",
"get_or_create",
"(",
"name",
"=",
"ows_layer",
".",
"name",
",",
"service",
"=",
"service",
",",
"catalog",
"=",
"service",
".",
"catalog",
")",
"if",
"layer",
".",
"active",
":",
"links",
"=",
"[",
"[",
"'OGC:WMTS'",
",",
"service",
".",
"url",
"]",
",",
"[",
"'OGC:WMTS'",
",",
"settings",
".",
"SITE_URL",
".",
"rstrip",
"(",
"'/'",
")",
"+",
"'/'",
"+",
"layer",
".",
"get_url_endpoint",
"(",
")",
"]",
"]",
"layer",
".",
"type",
"=",
"'OGC:WMTS'",
"layer",
".",
"title",
"=",
"ows_layer",
".",
"title",
"layer",
".",
"abstract",
"=",
"ows_layer",
".",
"abstract",
"# keywords",
"# @tomkralidis wmts does not seem to support this attribute",
"keywords",
"=",
"None",
"if",
"hasattr",
"(",
"ows_layer",
",",
"'keywords'",
")",
":",
"keywords",
"=",
"ows_layer",
".",
"keywords",
"for",
"keyword",
"in",
"keywords",
":",
"layer",
".",
"keywords",
".",
"add",
"(",
"keyword",
")",
"layer",
".",
"url",
"=",
"service",
".",
"url",
"layer",
".",
"page_url",
"=",
"layer",
".",
"get_absolute_url",
"links",
".",
"append",
"(",
"[",
"'WWW:LINK'",
",",
"settings",
".",
"SITE_URL",
".",
"rstrip",
"(",
"'/'",
")",
"+",
"layer",
".",
"page_url",
"]",
")",
"bbox",
"=",
"list",
"(",
"ows_layer",
".",
"boundingBoxWGS84",
"or",
"(",
"-",
"179.0",
",",
"-",
"89.0",
",",
"179.0",
",",
"89.0",
")",
")",
"layer",
".",
"bbox_x0",
"=",
"bbox",
"[",
"0",
"]",
"layer",
".",
"bbox_y0",
"=",
"bbox",
"[",
"1",
"]",
"layer",
".",
"bbox_x1",
"=",
"bbox",
"[",
"2",
"]",
"layer",
".",
"bbox_y1",
"=",
"bbox",
"[",
"3",
"]",
"layer",
".",
"wkt_geometry",
"=",
"bbox2wktpolygon",
"(",
"bbox",
")",
"layer",
".",
"xml",
"=",
"create_metadata_record",
"(",
"identifier",
"=",
"str",
"(",
"layer",
".",
"uuid",
")",
",",
"source",
"=",
"service",
".",
"url",
",",
"links",
"=",
"links",
",",
"format",
"=",
"'OGC:WMS'",
",",
"type",
"=",
"layer",
".",
"csw_type",
",",
"relation",
"=",
"service",
".",
"id_string",
",",
"title",
"=",
"ows_layer",
".",
"title",
",",
"alternative",
"=",
"ows_layer",
".",
"name",
",",
"abstract",
"=",
"layer",
".",
"abstract",
",",
"keywords",
"=",
"keywords",
",",
"wkt_geometry",
"=",
"layer",
".",
"wkt_geometry",
")",
"layer",
".",
"anytext",
"=",
"gen_anytext",
"(",
"layer",
".",
"title",
",",
"layer",
".",
"abstract",
",",
"keywords",
")",
"layer",
".",
"save",
"(",
")",
"# dates",
"add_mined_dates",
"(",
"layer",
")",
"layer_n",
"=",
"layer_n",
"+",
"1",
"# exits if DEBUG_SERVICES",
"LOGGER",
".",
"debug",
"(",
"\"Updating layer n. %s/%s\"",
"%",
"(",
"layer_n",
",",
"total",
")",
")",
"if",
"DEBUG_SERVICES",
"and",
"layer_n",
"==",
"DEBUG_LAYER_NUMBER",
":",
"return",
"except",
"Exception",
"as",
"err",
":",
"message",
"=",
"\"update_layers_wmts: {0}\"",
".",
"format",
"(",
"err",
")",
"check",
"=",
"Check",
"(",
"content_object",
"=",
"service",
",",
"success",
"=",
"False",
",",
"response_time",
"=",
"0",
",",
"message",
"=",
"message",
")",
"check",
".",
"save",
"(",
")"
] |
Update layers for an OGC:WMTS service.
Sample endpoint: http://map1.vis.earthdata.nasa.gov/wmts-geo/1.0.0/WMTSCapabilities.xml
|
[
"Update",
"layers",
"for",
"an",
"OGC",
":",
"WMTS",
"service",
".",
"Sample",
"endpoint",
":",
"http",
":",
"//",
"map1",
".",
"vis",
".",
"earthdata",
".",
"nasa",
".",
"gov",
"/",
"wmts",
"-",
"geo",
"/",
"1",
".",
"0",
".",
"0",
"/",
"WMTSCapabilities",
".",
"xml"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/models.py#L1156-L1235
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/models.py
|
update_layers_geonode_wm
|
def update_layers_geonode_wm(service, num_layers=None):
"""
Update layers for a WorldMap instance.
Sample endpoint: http://localhost:8000/
"""
wm_api_url = urlparse.urljoin(service.url, 'worldmap/api/2.8/layer/?format=json')
if num_layers:
total = num_layers
else:
response = requests.get(wm_api_url)
data = json.loads(response.content)
total = data['meta']['total_count']
# set srs
# WorldMap supports only 4326, 900913, 3857
for crs_code in ['EPSG:4326', 'EPSG:900913', 'EPSG:3857']:
srs, created = SpatialReferenceSystem.objects.get_or_create(code=crs_code)
service.srs.add(srs)
service.update_validity()
layer_n = 0
limit = 10
for i in range(0, total, limit):
try:
url = (
'%s&order_by=-date&offset=%s&limit=%s' % (wm_api_url, i, limit)
)
LOGGER.debug('Fetching %s' % url)
response = requests.get(url)
data = json.loads(response.content)
for row in data['objects']:
typename = row['typename']
# name = typename.split(':')[1]
name = typename
uuid = row['uuid']
LOGGER.debug('Updating layer %s' % name)
title = row['title']
abstract = row['abstract']
bbox = row['bbox']
page_url = urlparse.urljoin(service.url, 'data/%s' % name)
category = ''
if 'topic_category' in row:
category = row['topic_category']
username = ''
if 'owner_username' in row:
username = row['owner_username']
temporal_extent_start = ''
if 'temporal_extent_start' in row:
temporal_extent_start = row['temporal_extent_start']
temporal_extent_end = ''
if 'temporal_extent_end' in row:
temporal_extent_end = row['temporal_extent_end']
# we use the geoserver virtual layer getcapabilities for wm endpoint
# TODO we should port make geoserver port configurable some way...
# endpoint = urlparse.urljoin(service.url, 'geoserver/geonode/%s/wms?' % name)
endpoint = urlparse.urljoin(service.url, 'geoserver/wms?')
endpoint = endpoint.replace('8000', '8080')
print endpoint
if 'is_public' in row:
is_public = row['is_public']
layer, created = Layer.objects.get_or_create(
service=service, catalog=service.catalog, name=name, uuid=uuid)
if created:
LOGGER.debug('Added a new layer in registry: %s, %s' % (name, uuid))
if layer.active:
links = [['Hypermap:WorldMap', endpoint]]
# update fields
layer.type = 'Hypermap:WorldMap'
layer.title = title
layer.abstract = abstract
layer.is_public = is_public
layer.url = endpoint
layer.page_url = page_url
# category and owner username
layer_wm, created = LayerWM.objects.get_or_create(layer=layer)
layer_wm.category = category
layer_wm.username = username
layer_wm.temporal_extent_start = temporal_extent_start
layer_wm.temporal_extent_end = temporal_extent_end
layer_wm.save()
# bbox [x0, y0, x1, y1]
# check if it is a valid bbox (TODO improve this check)
# bbox = bbox.replace('-inf', 'None')
# bbox = bbox.replace('inf', 'None')
# if bbox.count(',') == 3:
# bbox_list = bbox[1:-1].split(',')
# else:
# bbox_list = [None, None, None, None]
x0 = format_float(bbox[0])
x1 = format_float(bbox[1])
y0 = format_float(bbox[2])
y1 = format_float(bbox[3])
# In many cases for some reason to be fixed GeoServer has x coordinates flipped in WM.
x0, x1 = flip_coordinates(x0, x1)
y0, y1 = flip_coordinates(y0, y1)
layer.bbox_x0 = x0
layer.bbox_y0 = y0
layer.bbox_x1 = x1
layer.bbox_y1 = y1
# keywords
keywords = []
for keyword in row['keywords']:
keywords.append(keyword['name'])
layer.keywords.all().delete()
for keyword in keywords:
layer.keywords.add(keyword)
layer.wkt_geometry = bbox2wktpolygon([x0, y0, x1, y1])
layer.xml = create_metadata_record(
identifier=str(layer.uuid),
source=endpoint,
links=links,
format='Hypermap:WorldMap',
type=layer.csw_type,
relation=service.id_string,
title=layer.title,
alternative=name,
abstract=layer.abstract,
keywords=keywords,
wkt_geometry=layer.wkt_geometry
)
layer.anytext = gen_anytext(layer.title, layer.abstract, keywords)
layer.save()
# dates
add_mined_dates(layer)
add_metadata_dates_to_layer([layer_wm.temporal_extent_start, layer_wm.temporal_extent_end], layer)
layer_n = layer_n + 1
# exits if DEBUG_SERVICES
LOGGER.debug("Updated layer n. %s/%s" % (layer_n, total))
if DEBUG_SERVICES and layer_n == DEBUG_LAYER_NUMBER:
return
except Exception as err:
LOGGER.error('Error! %s' % err)
# update deleted layers. For now we check the whole set of deleted layers
# we should optimize it if the list will grow
# TODO implement the actions application
url = urlparse.urljoin(service.url, 'worldmap/api/2.8/actionlayerdelete/?format=json')
LOGGER.debug('Fetching %s for detecting deleted layers' % url)
try:
response = requests.get(url)
data = json.loads(response.content)
for deleted_layer in data['objects']:
if Layer.objects.filter(uuid=deleted_layer['args']).count() > 0:
layer = Layer.objects.get(uuid=deleted_layer['args'])
layer.was_deleted = True
layer.save()
LOGGER.debug('Layer %s marked as deleted' % layer.uuid)
except Exception as err:
LOGGER.error('Error! %s' % err)
|
python
|
def update_layers_geonode_wm(service, num_layers=None):
"""
Update layers for a WorldMap instance.
Sample endpoint: http://localhost:8000/
"""
wm_api_url = urlparse.urljoin(service.url, 'worldmap/api/2.8/layer/?format=json')
if num_layers:
total = num_layers
else:
response = requests.get(wm_api_url)
data = json.loads(response.content)
total = data['meta']['total_count']
# set srs
# WorldMap supports only 4326, 900913, 3857
for crs_code in ['EPSG:4326', 'EPSG:900913', 'EPSG:3857']:
srs, created = SpatialReferenceSystem.objects.get_or_create(code=crs_code)
service.srs.add(srs)
service.update_validity()
layer_n = 0
limit = 10
for i in range(0, total, limit):
try:
url = (
'%s&order_by=-date&offset=%s&limit=%s' % (wm_api_url, i, limit)
)
LOGGER.debug('Fetching %s' % url)
response = requests.get(url)
data = json.loads(response.content)
for row in data['objects']:
typename = row['typename']
# name = typename.split(':')[1]
name = typename
uuid = row['uuid']
LOGGER.debug('Updating layer %s' % name)
title = row['title']
abstract = row['abstract']
bbox = row['bbox']
page_url = urlparse.urljoin(service.url, 'data/%s' % name)
category = ''
if 'topic_category' in row:
category = row['topic_category']
username = ''
if 'owner_username' in row:
username = row['owner_username']
temporal_extent_start = ''
if 'temporal_extent_start' in row:
temporal_extent_start = row['temporal_extent_start']
temporal_extent_end = ''
if 'temporal_extent_end' in row:
temporal_extent_end = row['temporal_extent_end']
# we use the geoserver virtual layer getcapabilities for wm endpoint
# TODO we should port make geoserver port configurable some way...
# endpoint = urlparse.urljoin(service.url, 'geoserver/geonode/%s/wms?' % name)
endpoint = urlparse.urljoin(service.url, 'geoserver/wms?')
endpoint = endpoint.replace('8000', '8080')
print endpoint
if 'is_public' in row:
is_public = row['is_public']
layer, created = Layer.objects.get_or_create(
service=service, catalog=service.catalog, name=name, uuid=uuid)
if created:
LOGGER.debug('Added a new layer in registry: %s, %s' % (name, uuid))
if layer.active:
links = [['Hypermap:WorldMap', endpoint]]
# update fields
layer.type = 'Hypermap:WorldMap'
layer.title = title
layer.abstract = abstract
layer.is_public = is_public
layer.url = endpoint
layer.page_url = page_url
# category and owner username
layer_wm, created = LayerWM.objects.get_or_create(layer=layer)
layer_wm.category = category
layer_wm.username = username
layer_wm.temporal_extent_start = temporal_extent_start
layer_wm.temporal_extent_end = temporal_extent_end
layer_wm.save()
# bbox [x0, y0, x1, y1]
# check if it is a valid bbox (TODO improve this check)
# bbox = bbox.replace('-inf', 'None')
# bbox = bbox.replace('inf', 'None')
# if bbox.count(',') == 3:
# bbox_list = bbox[1:-1].split(',')
# else:
# bbox_list = [None, None, None, None]
x0 = format_float(bbox[0])
x1 = format_float(bbox[1])
y0 = format_float(bbox[2])
y1 = format_float(bbox[3])
# In many cases for some reason to be fixed GeoServer has x coordinates flipped in WM.
x0, x1 = flip_coordinates(x0, x1)
y0, y1 = flip_coordinates(y0, y1)
layer.bbox_x0 = x0
layer.bbox_y0 = y0
layer.bbox_x1 = x1
layer.bbox_y1 = y1
# keywords
keywords = []
for keyword in row['keywords']:
keywords.append(keyword['name'])
layer.keywords.all().delete()
for keyword in keywords:
layer.keywords.add(keyword)
layer.wkt_geometry = bbox2wktpolygon([x0, y0, x1, y1])
layer.xml = create_metadata_record(
identifier=str(layer.uuid),
source=endpoint,
links=links,
format='Hypermap:WorldMap',
type=layer.csw_type,
relation=service.id_string,
title=layer.title,
alternative=name,
abstract=layer.abstract,
keywords=keywords,
wkt_geometry=layer.wkt_geometry
)
layer.anytext = gen_anytext(layer.title, layer.abstract, keywords)
layer.save()
# dates
add_mined_dates(layer)
add_metadata_dates_to_layer([layer_wm.temporal_extent_start, layer_wm.temporal_extent_end], layer)
layer_n = layer_n + 1
# exits if DEBUG_SERVICES
LOGGER.debug("Updated layer n. %s/%s" % (layer_n, total))
if DEBUG_SERVICES and layer_n == DEBUG_LAYER_NUMBER:
return
except Exception as err:
LOGGER.error('Error! %s' % err)
# update deleted layers. For now we check the whole set of deleted layers
# we should optimize it if the list will grow
# TODO implement the actions application
url = urlparse.urljoin(service.url, 'worldmap/api/2.8/actionlayerdelete/?format=json')
LOGGER.debug('Fetching %s for detecting deleted layers' % url)
try:
response = requests.get(url)
data = json.loads(response.content)
for deleted_layer in data['objects']:
if Layer.objects.filter(uuid=deleted_layer['args']).count() > 0:
layer = Layer.objects.get(uuid=deleted_layer['args'])
layer.was_deleted = True
layer.save()
LOGGER.debug('Layer %s marked as deleted' % layer.uuid)
except Exception as err:
LOGGER.error('Error! %s' % err)
|
[
"def",
"update_layers_geonode_wm",
"(",
"service",
",",
"num_layers",
"=",
"None",
")",
":",
"wm_api_url",
"=",
"urlparse",
".",
"urljoin",
"(",
"service",
".",
"url",
",",
"'worldmap/api/2.8/layer/?format=json'",
")",
"if",
"num_layers",
":",
"total",
"=",
"num_layers",
"else",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"wm_api_url",
")",
"data",
"=",
"json",
".",
"loads",
"(",
"response",
".",
"content",
")",
"total",
"=",
"data",
"[",
"'meta'",
"]",
"[",
"'total_count'",
"]",
"# set srs",
"# WorldMap supports only 4326, 900913, 3857",
"for",
"crs_code",
"in",
"[",
"'EPSG:4326'",
",",
"'EPSG:900913'",
",",
"'EPSG:3857'",
"]",
":",
"srs",
",",
"created",
"=",
"SpatialReferenceSystem",
".",
"objects",
".",
"get_or_create",
"(",
"code",
"=",
"crs_code",
")",
"service",
".",
"srs",
".",
"add",
"(",
"srs",
")",
"service",
".",
"update_validity",
"(",
")",
"layer_n",
"=",
"0",
"limit",
"=",
"10",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"total",
",",
"limit",
")",
":",
"try",
":",
"url",
"=",
"(",
"'%s&order_by=-date&offset=%s&limit=%s'",
"%",
"(",
"wm_api_url",
",",
"i",
",",
"limit",
")",
")",
"LOGGER",
".",
"debug",
"(",
"'Fetching %s'",
"%",
"url",
")",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"data",
"=",
"json",
".",
"loads",
"(",
"response",
".",
"content",
")",
"for",
"row",
"in",
"data",
"[",
"'objects'",
"]",
":",
"typename",
"=",
"row",
"[",
"'typename'",
"]",
"# name = typename.split(':')[1]",
"name",
"=",
"typename",
"uuid",
"=",
"row",
"[",
"'uuid'",
"]",
"LOGGER",
".",
"debug",
"(",
"'Updating layer %s'",
"%",
"name",
")",
"title",
"=",
"row",
"[",
"'title'",
"]",
"abstract",
"=",
"row",
"[",
"'abstract'",
"]",
"bbox",
"=",
"row",
"[",
"'bbox'",
"]",
"page_url",
"=",
"urlparse",
".",
"urljoin",
"(",
"service",
".",
"url",
",",
"'data/%s'",
"%",
"name",
")",
"category",
"=",
"''",
"if",
"'topic_category'",
"in",
"row",
":",
"category",
"=",
"row",
"[",
"'topic_category'",
"]",
"username",
"=",
"''",
"if",
"'owner_username'",
"in",
"row",
":",
"username",
"=",
"row",
"[",
"'owner_username'",
"]",
"temporal_extent_start",
"=",
"''",
"if",
"'temporal_extent_start'",
"in",
"row",
":",
"temporal_extent_start",
"=",
"row",
"[",
"'temporal_extent_start'",
"]",
"temporal_extent_end",
"=",
"''",
"if",
"'temporal_extent_end'",
"in",
"row",
":",
"temporal_extent_end",
"=",
"row",
"[",
"'temporal_extent_end'",
"]",
"# we use the geoserver virtual layer getcapabilities for wm endpoint",
"# TODO we should port make geoserver port configurable some way...",
"# endpoint = urlparse.urljoin(service.url, 'geoserver/geonode/%s/wms?' % name)",
"endpoint",
"=",
"urlparse",
".",
"urljoin",
"(",
"service",
".",
"url",
",",
"'geoserver/wms?'",
")",
"endpoint",
"=",
"endpoint",
".",
"replace",
"(",
"'8000'",
",",
"'8080'",
")",
"print",
"endpoint",
"if",
"'is_public'",
"in",
"row",
":",
"is_public",
"=",
"row",
"[",
"'is_public'",
"]",
"layer",
",",
"created",
"=",
"Layer",
".",
"objects",
".",
"get_or_create",
"(",
"service",
"=",
"service",
",",
"catalog",
"=",
"service",
".",
"catalog",
",",
"name",
"=",
"name",
",",
"uuid",
"=",
"uuid",
")",
"if",
"created",
":",
"LOGGER",
".",
"debug",
"(",
"'Added a new layer in registry: %s, %s'",
"%",
"(",
"name",
",",
"uuid",
")",
")",
"if",
"layer",
".",
"active",
":",
"links",
"=",
"[",
"[",
"'Hypermap:WorldMap'",
",",
"endpoint",
"]",
"]",
"# update fields",
"layer",
".",
"type",
"=",
"'Hypermap:WorldMap'",
"layer",
".",
"title",
"=",
"title",
"layer",
".",
"abstract",
"=",
"abstract",
"layer",
".",
"is_public",
"=",
"is_public",
"layer",
".",
"url",
"=",
"endpoint",
"layer",
".",
"page_url",
"=",
"page_url",
"# category and owner username",
"layer_wm",
",",
"created",
"=",
"LayerWM",
".",
"objects",
".",
"get_or_create",
"(",
"layer",
"=",
"layer",
")",
"layer_wm",
".",
"category",
"=",
"category",
"layer_wm",
".",
"username",
"=",
"username",
"layer_wm",
".",
"temporal_extent_start",
"=",
"temporal_extent_start",
"layer_wm",
".",
"temporal_extent_end",
"=",
"temporal_extent_end",
"layer_wm",
".",
"save",
"(",
")",
"# bbox [x0, y0, x1, y1]",
"# check if it is a valid bbox (TODO improve this check)",
"# bbox = bbox.replace('-inf', 'None')",
"# bbox = bbox.replace('inf', 'None')",
"# if bbox.count(',') == 3:",
"# bbox_list = bbox[1:-1].split(',')",
"# else:",
"# bbox_list = [None, None, None, None]",
"x0",
"=",
"format_float",
"(",
"bbox",
"[",
"0",
"]",
")",
"x1",
"=",
"format_float",
"(",
"bbox",
"[",
"1",
"]",
")",
"y0",
"=",
"format_float",
"(",
"bbox",
"[",
"2",
"]",
")",
"y1",
"=",
"format_float",
"(",
"bbox",
"[",
"3",
"]",
")",
"# In many cases for some reason to be fixed GeoServer has x coordinates flipped in WM.",
"x0",
",",
"x1",
"=",
"flip_coordinates",
"(",
"x0",
",",
"x1",
")",
"y0",
",",
"y1",
"=",
"flip_coordinates",
"(",
"y0",
",",
"y1",
")",
"layer",
".",
"bbox_x0",
"=",
"x0",
"layer",
".",
"bbox_y0",
"=",
"y0",
"layer",
".",
"bbox_x1",
"=",
"x1",
"layer",
".",
"bbox_y1",
"=",
"y1",
"# keywords",
"keywords",
"=",
"[",
"]",
"for",
"keyword",
"in",
"row",
"[",
"'keywords'",
"]",
":",
"keywords",
".",
"append",
"(",
"keyword",
"[",
"'name'",
"]",
")",
"layer",
".",
"keywords",
".",
"all",
"(",
")",
".",
"delete",
"(",
")",
"for",
"keyword",
"in",
"keywords",
":",
"layer",
".",
"keywords",
".",
"add",
"(",
"keyword",
")",
"layer",
".",
"wkt_geometry",
"=",
"bbox2wktpolygon",
"(",
"[",
"x0",
",",
"y0",
",",
"x1",
",",
"y1",
"]",
")",
"layer",
".",
"xml",
"=",
"create_metadata_record",
"(",
"identifier",
"=",
"str",
"(",
"layer",
".",
"uuid",
")",
",",
"source",
"=",
"endpoint",
",",
"links",
"=",
"links",
",",
"format",
"=",
"'Hypermap:WorldMap'",
",",
"type",
"=",
"layer",
".",
"csw_type",
",",
"relation",
"=",
"service",
".",
"id_string",
",",
"title",
"=",
"layer",
".",
"title",
",",
"alternative",
"=",
"name",
",",
"abstract",
"=",
"layer",
".",
"abstract",
",",
"keywords",
"=",
"keywords",
",",
"wkt_geometry",
"=",
"layer",
".",
"wkt_geometry",
")",
"layer",
".",
"anytext",
"=",
"gen_anytext",
"(",
"layer",
".",
"title",
",",
"layer",
".",
"abstract",
",",
"keywords",
")",
"layer",
".",
"save",
"(",
")",
"# dates",
"add_mined_dates",
"(",
"layer",
")",
"add_metadata_dates_to_layer",
"(",
"[",
"layer_wm",
".",
"temporal_extent_start",
",",
"layer_wm",
".",
"temporal_extent_end",
"]",
",",
"layer",
")",
"layer_n",
"=",
"layer_n",
"+",
"1",
"# exits if DEBUG_SERVICES",
"LOGGER",
".",
"debug",
"(",
"\"Updated layer n. %s/%s\"",
"%",
"(",
"layer_n",
",",
"total",
")",
")",
"if",
"DEBUG_SERVICES",
"and",
"layer_n",
"==",
"DEBUG_LAYER_NUMBER",
":",
"return",
"except",
"Exception",
"as",
"err",
":",
"LOGGER",
".",
"error",
"(",
"'Error! %s'",
"%",
"err",
")",
"# update deleted layers. For now we check the whole set of deleted layers",
"# we should optimize it if the list will grow",
"# TODO implement the actions application",
"url",
"=",
"urlparse",
".",
"urljoin",
"(",
"service",
".",
"url",
",",
"'worldmap/api/2.8/actionlayerdelete/?format=json'",
")",
"LOGGER",
".",
"debug",
"(",
"'Fetching %s for detecting deleted layers'",
"%",
"url",
")",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"data",
"=",
"json",
".",
"loads",
"(",
"response",
".",
"content",
")",
"for",
"deleted_layer",
"in",
"data",
"[",
"'objects'",
"]",
":",
"if",
"Layer",
".",
"objects",
".",
"filter",
"(",
"uuid",
"=",
"deleted_layer",
"[",
"'args'",
"]",
")",
".",
"count",
"(",
")",
">",
"0",
":",
"layer",
"=",
"Layer",
".",
"objects",
".",
"get",
"(",
"uuid",
"=",
"deleted_layer",
"[",
"'args'",
"]",
")",
"layer",
".",
"was_deleted",
"=",
"True",
"layer",
".",
"save",
"(",
")",
"LOGGER",
".",
"debug",
"(",
"'Layer %s marked as deleted'",
"%",
"layer",
".",
"uuid",
")",
"except",
"Exception",
"as",
"err",
":",
"LOGGER",
".",
"error",
"(",
"'Error! %s'",
"%",
"err",
")"
] |
Update layers for a WorldMap instance.
Sample endpoint: http://localhost:8000/
|
[
"Update",
"layers",
"for",
"a",
"WorldMap",
"instance",
".",
"Sample",
"endpoint",
":",
"http",
":",
"//",
"localhost",
":",
"8000",
"/"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/models.py#L1238-L1390
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/models.py
|
update_layers_warper
|
def update_layers_warper(service):
"""
Update layers for a Warper service.
Sample endpoint: http://warp.worldmap.harvard.edu/maps
"""
params = {'field': 'title', 'query': '', 'show_warped': '1', 'format': 'json'}
headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
request = requests.get(service.url, headers=headers, params=params)
try:
records = json.loads(request.content)
total_pages = int(records['total_pages'])
# set srs
# Warper supports only 4326, 900913, 3857
for crs_code in ['EPSG:4326', 'EPSG:900913', 'EPSG:3857']:
srs, created = SpatialReferenceSystem.objects.get_or_create(code=crs_code)
service.srs.add(srs)
service.update_validity()
for i in range(1, total_pages + 1):
params = {'field': 'title', 'query': '', 'show_warped': '1', 'format': 'json', 'page': i}
request = requests.get(service.url, headers=headers, params=params)
records = json.loads(request.content)
LOGGER.debug('Fetched %s' % request.url)
layers = records['items']
layer_n = 0
total = len(layers)
for layer in layers:
name = layer['id']
title = layer['title']
abstract = layer['description']
bbox = layer['bbox']
# dates
dates = []
if 'published_date' in layer:
dates.append(layer['published_date'])
if 'date_depicted' in layer:
dates.append(layer['date_depicted'])
if 'depicts_year' in layer:
dates.append(layer['depicts_year'])
if 'issue_year' in layer:
dates.append(layer['issue_year'])
layer, created = Layer.objects.get_or_create(name=name, service=service, catalog=service.catalog)
if layer.active:
# update fields
# links = [['OGC:WMTS', settings.SITE_URL.rstrip('/') + '/' + layer.get_url_endpoint()]]
layer.type = 'Hypermap:WARPER'
layer.title = title
layer.abstract = abstract
layer.is_public = True
layer.url = '%s/wms/%s?' % (service.url, name)
layer.page_url = '%s/%s' % (service.url, name)
# bbox
x0 = None
y0 = None
x1 = None
y1 = None
if bbox:
bbox_list = bbox.split(',')
x0 = format_float(bbox_list[0])
y0 = format_float(bbox_list[1])
x1 = format_float(bbox_list[2])
y1 = format_float(bbox_list[3])
layer.bbox_x0 = x0
layer.bbox_y0 = y0
layer.bbox_x1 = x1
layer.bbox_y1 = y1
layer.save()
# dates
add_mined_dates(layer)
add_metadata_dates_to_layer(dates, layer)
layer_n = layer_n + 1
# exits if DEBUG_SERVICES
LOGGER.debug("Updating layer n. %s/%s" % (layer_n, total))
if DEBUG_SERVICES and layer_n == DEBUG_LAYER_NUMBER:
return
except Exception as err:
message = "update_layers_warper: {0}. request={1} response={2}".format(
err,
service.url,
request.text
)
check = Check(
content_object=service,
success=False,
response_time=0,
message=message
)
check.save()
|
python
|
def update_layers_warper(service):
"""
Update layers for a Warper service.
Sample endpoint: http://warp.worldmap.harvard.edu/maps
"""
params = {'field': 'title', 'query': '', 'show_warped': '1', 'format': 'json'}
headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
request = requests.get(service.url, headers=headers, params=params)
try:
records = json.loads(request.content)
total_pages = int(records['total_pages'])
# set srs
# Warper supports only 4326, 900913, 3857
for crs_code in ['EPSG:4326', 'EPSG:900913', 'EPSG:3857']:
srs, created = SpatialReferenceSystem.objects.get_or_create(code=crs_code)
service.srs.add(srs)
service.update_validity()
for i in range(1, total_pages + 1):
params = {'field': 'title', 'query': '', 'show_warped': '1', 'format': 'json', 'page': i}
request = requests.get(service.url, headers=headers, params=params)
records = json.loads(request.content)
LOGGER.debug('Fetched %s' % request.url)
layers = records['items']
layer_n = 0
total = len(layers)
for layer in layers:
name = layer['id']
title = layer['title']
abstract = layer['description']
bbox = layer['bbox']
# dates
dates = []
if 'published_date' in layer:
dates.append(layer['published_date'])
if 'date_depicted' in layer:
dates.append(layer['date_depicted'])
if 'depicts_year' in layer:
dates.append(layer['depicts_year'])
if 'issue_year' in layer:
dates.append(layer['issue_year'])
layer, created = Layer.objects.get_or_create(name=name, service=service, catalog=service.catalog)
if layer.active:
# update fields
# links = [['OGC:WMTS', settings.SITE_URL.rstrip('/') + '/' + layer.get_url_endpoint()]]
layer.type = 'Hypermap:WARPER'
layer.title = title
layer.abstract = abstract
layer.is_public = True
layer.url = '%s/wms/%s?' % (service.url, name)
layer.page_url = '%s/%s' % (service.url, name)
# bbox
x0 = None
y0 = None
x1 = None
y1 = None
if bbox:
bbox_list = bbox.split(',')
x0 = format_float(bbox_list[0])
y0 = format_float(bbox_list[1])
x1 = format_float(bbox_list[2])
y1 = format_float(bbox_list[3])
layer.bbox_x0 = x0
layer.bbox_y0 = y0
layer.bbox_x1 = x1
layer.bbox_y1 = y1
layer.save()
# dates
add_mined_dates(layer)
add_metadata_dates_to_layer(dates, layer)
layer_n = layer_n + 1
# exits if DEBUG_SERVICES
LOGGER.debug("Updating layer n. %s/%s" % (layer_n, total))
if DEBUG_SERVICES and layer_n == DEBUG_LAYER_NUMBER:
return
except Exception as err:
message = "update_layers_warper: {0}. request={1} response={2}".format(
err,
service.url,
request.text
)
check = Check(
content_object=service,
success=False,
response_time=0,
message=message
)
check.save()
|
[
"def",
"update_layers_warper",
"(",
"service",
")",
":",
"params",
"=",
"{",
"'field'",
":",
"'title'",
",",
"'query'",
":",
"''",
",",
"'show_warped'",
":",
"'1'",
",",
"'format'",
":",
"'json'",
"}",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
",",
"'Accept'",
":",
"'application/json'",
"}",
"request",
"=",
"requests",
".",
"get",
"(",
"service",
".",
"url",
",",
"headers",
"=",
"headers",
",",
"params",
"=",
"params",
")",
"try",
":",
"records",
"=",
"json",
".",
"loads",
"(",
"request",
".",
"content",
")",
"total_pages",
"=",
"int",
"(",
"records",
"[",
"'total_pages'",
"]",
")",
"# set srs",
"# Warper supports only 4326, 900913, 3857",
"for",
"crs_code",
"in",
"[",
"'EPSG:4326'",
",",
"'EPSG:900913'",
",",
"'EPSG:3857'",
"]",
":",
"srs",
",",
"created",
"=",
"SpatialReferenceSystem",
".",
"objects",
".",
"get_or_create",
"(",
"code",
"=",
"crs_code",
")",
"service",
".",
"srs",
".",
"add",
"(",
"srs",
")",
"service",
".",
"update_validity",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"total_pages",
"+",
"1",
")",
":",
"params",
"=",
"{",
"'field'",
":",
"'title'",
",",
"'query'",
":",
"''",
",",
"'show_warped'",
":",
"'1'",
",",
"'format'",
":",
"'json'",
",",
"'page'",
":",
"i",
"}",
"request",
"=",
"requests",
".",
"get",
"(",
"service",
".",
"url",
",",
"headers",
"=",
"headers",
",",
"params",
"=",
"params",
")",
"records",
"=",
"json",
".",
"loads",
"(",
"request",
".",
"content",
")",
"LOGGER",
".",
"debug",
"(",
"'Fetched %s'",
"%",
"request",
".",
"url",
")",
"layers",
"=",
"records",
"[",
"'items'",
"]",
"layer_n",
"=",
"0",
"total",
"=",
"len",
"(",
"layers",
")",
"for",
"layer",
"in",
"layers",
":",
"name",
"=",
"layer",
"[",
"'id'",
"]",
"title",
"=",
"layer",
"[",
"'title'",
"]",
"abstract",
"=",
"layer",
"[",
"'description'",
"]",
"bbox",
"=",
"layer",
"[",
"'bbox'",
"]",
"# dates",
"dates",
"=",
"[",
"]",
"if",
"'published_date'",
"in",
"layer",
":",
"dates",
".",
"append",
"(",
"layer",
"[",
"'published_date'",
"]",
")",
"if",
"'date_depicted'",
"in",
"layer",
":",
"dates",
".",
"append",
"(",
"layer",
"[",
"'date_depicted'",
"]",
")",
"if",
"'depicts_year'",
"in",
"layer",
":",
"dates",
".",
"append",
"(",
"layer",
"[",
"'depicts_year'",
"]",
")",
"if",
"'issue_year'",
"in",
"layer",
":",
"dates",
".",
"append",
"(",
"layer",
"[",
"'issue_year'",
"]",
")",
"layer",
",",
"created",
"=",
"Layer",
".",
"objects",
".",
"get_or_create",
"(",
"name",
"=",
"name",
",",
"service",
"=",
"service",
",",
"catalog",
"=",
"service",
".",
"catalog",
")",
"if",
"layer",
".",
"active",
":",
"# update fields",
"# links = [['OGC:WMTS', settings.SITE_URL.rstrip('/') + '/' + layer.get_url_endpoint()]]",
"layer",
".",
"type",
"=",
"'Hypermap:WARPER'",
"layer",
".",
"title",
"=",
"title",
"layer",
".",
"abstract",
"=",
"abstract",
"layer",
".",
"is_public",
"=",
"True",
"layer",
".",
"url",
"=",
"'%s/wms/%s?'",
"%",
"(",
"service",
".",
"url",
",",
"name",
")",
"layer",
".",
"page_url",
"=",
"'%s/%s'",
"%",
"(",
"service",
".",
"url",
",",
"name",
")",
"# bbox",
"x0",
"=",
"None",
"y0",
"=",
"None",
"x1",
"=",
"None",
"y1",
"=",
"None",
"if",
"bbox",
":",
"bbox_list",
"=",
"bbox",
".",
"split",
"(",
"','",
")",
"x0",
"=",
"format_float",
"(",
"bbox_list",
"[",
"0",
"]",
")",
"y0",
"=",
"format_float",
"(",
"bbox_list",
"[",
"1",
"]",
")",
"x1",
"=",
"format_float",
"(",
"bbox_list",
"[",
"2",
"]",
")",
"y1",
"=",
"format_float",
"(",
"bbox_list",
"[",
"3",
"]",
")",
"layer",
".",
"bbox_x0",
"=",
"x0",
"layer",
".",
"bbox_y0",
"=",
"y0",
"layer",
".",
"bbox_x1",
"=",
"x1",
"layer",
".",
"bbox_y1",
"=",
"y1",
"layer",
".",
"save",
"(",
")",
"# dates",
"add_mined_dates",
"(",
"layer",
")",
"add_metadata_dates_to_layer",
"(",
"dates",
",",
"layer",
")",
"layer_n",
"=",
"layer_n",
"+",
"1",
"# exits if DEBUG_SERVICES",
"LOGGER",
".",
"debug",
"(",
"\"Updating layer n. %s/%s\"",
"%",
"(",
"layer_n",
",",
"total",
")",
")",
"if",
"DEBUG_SERVICES",
"and",
"layer_n",
"==",
"DEBUG_LAYER_NUMBER",
":",
"return",
"except",
"Exception",
"as",
"err",
":",
"message",
"=",
"\"update_layers_warper: {0}. request={1} response={2}\"",
".",
"format",
"(",
"err",
",",
"service",
".",
"url",
",",
"request",
".",
"text",
")",
"check",
"=",
"Check",
"(",
"content_object",
"=",
"service",
",",
"success",
"=",
"False",
",",
"response_time",
"=",
"0",
",",
"message",
"=",
"message",
")",
"check",
".",
"save",
"(",
")"
] |
Update layers for a Warper service.
Sample endpoint: http://warp.worldmap.harvard.edu/maps
|
[
"Update",
"layers",
"for",
"a",
"Warper",
"service",
".",
"Sample",
"endpoint",
":",
"http",
":",
"//",
"warp",
".",
"worldmap",
".",
"harvard",
".",
"edu",
"/",
"maps"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/models.py#L1541-L1632
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/models.py
|
update_layers_esri_mapserver
|
def update_layers_esri_mapserver(service, greedy_opt=False):
"""
Update layers for an ESRI REST MapServer.
Sample endpoint: https://gis.ngdc.noaa.gov/arcgis/rest/services/SampleWorldCities/MapServer/?f=json
"""
try:
esri_service = ArcMapService(service.url)
# set srs
# both mapserver and imageserver exposes just one srs at the service level
# not sure if other ones are supported, for now we just store this one
# not sure why this is needed, for now commenting out
# if wkt_text:
# params = {'exact': 'True', 'error': 'True', 'mode': 'wkt', 'terms': wkt_text}
# req = requests.get('http://prj2epsg.org/search.json', params=params)
# object = json.loads(req.content)
# srs = int(object['codes'][0]['code'])
srs_code = esri_service.spatialReference.wkid
srs, created = SpatialReferenceSystem.objects.get_or_create(code=srs_code)
service.srs.add(srs)
service.update_validity()
# check if it has a WMS interface
if 'supportedExtensions' in esri_service._json_struct and greedy_opt:
if 'WMSServer' in esri_service._json_struct['supportedExtensions']:
# we need to change the url
# http://cga1.cga.harvard.edu/arcgis/rest/services/ecuador/ecuadordata/MapServer?f=pjson
# http://cga1.cga.harvard.edu/arcgis/services/ecuador/
# ecuadordata/MapServer/WMSServer?request=GetCapabilities&service=WMS
wms_url = service.url.replace('/rest/services/', '/services/')
if '?f=pjson' in wms_url:
wms_url = wms_url.replace('?f=pjson', 'WMSServer?')
if '?f=json' in wms_url:
wms_url = wms_url.replace('?f=json', 'WMSServer?')
LOGGER.debug('This ESRI REST endpoint has an WMS interface to process: %s' % wms_url)
# import here as otherwise is circular (TODO refactor)
from utils import create_service_from_endpoint
create_service_from_endpoint(wms_url, 'OGC:WMS', catalog=service.catalog)
# now process the REST interface
layer_n = 0
total = len(esri_service.layers)
for esri_layer in esri_service.layers:
# in some case the json is invalid
# esri_layer._json_struct
# {u'currentVersion': 10.01,
# u'error':
# {u'message': u'An unexpected error occurred processing the request.', u'code': 500, u'details': []}}
if 'error' not in esri_layer._json_struct:
LOGGER.debug('Updating layer %s' % esri_layer.name)
layer, created = Layer.objects.get_or_create(
name=esri_layer.id,
service=service,
catalog=service.catalog
)
if layer.active:
layer.type = 'ESRI:ArcGIS:MapServer'
links = [[layer.type, service.url],
['OGC:WMTS', settings.SITE_URL.rstrip('/') + '/' + layer.get_url_endpoint()]]
layer.title = esri_layer.name
layer.abstract = esri_service.serviceDescription
layer.url = service.url
layer.page_url = layer.get_absolute_url
links.append([
'WWW:LINK',
settings.SITE_URL.rstrip('/') + layer.page_url
])
try:
layer.bbox_x0 = esri_layer.extent.xmin
layer.bbox_y0 = esri_layer.extent.ymin
layer.bbox_x1 = esri_layer.extent.xmax
layer.bbox_y1 = esri_layer.extent.ymax
except KeyError:
pass
try:
layer.bbox_x0 = esri_layer._json_struct['extent']['xmin']
layer.bbox_y0 = esri_layer._json_struct['extent']['ymin']
layer.bbox_x1 = esri_layer._json_struct['extent']['xmax']
layer.bbox_y1 = esri_layer._json_struct['extent']['ymax']
except Exception:
pass
layer.wkt_geometry = bbox2wktpolygon([layer.bbox_x0, layer.bbox_y0, layer.bbox_x1, layer.bbox_y1])
layer.xml = create_metadata_record(
identifier=str(layer.uuid),
source=service.url,
links=links,
format='ESRI:ArcGIS:MapServer',
type=layer.csw_type,
relation=service.id_string,
title=layer.title,
alternative=layer.title,
abstract=layer.abstract,
wkt_geometry=layer.wkt_geometry
)
layer.anytext = gen_anytext(layer.title, layer.abstract)
layer.save()
# dates
add_mined_dates(layer)
layer_n = layer_n + 1
# exits if DEBUG_SERVICES
LOGGER.debug("Updating layer n. %s/%s" % (layer_n, total))
if DEBUG_SERVICES and layer_n == DEBUG_LAYER_NUMBER:
return
except Exception as err:
message = "update_layers_esri_mapserver: {0}".format(
err
)
check = Check(
content_object=service,
success=False,
response_time=0,
message=message
)
check.save()
|
python
|
def update_layers_esri_mapserver(service, greedy_opt=False):
"""
Update layers for an ESRI REST MapServer.
Sample endpoint: https://gis.ngdc.noaa.gov/arcgis/rest/services/SampleWorldCities/MapServer/?f=json
"""
try:
esri_service = ArcMapService(service.url)
# set srs
# both mapserver and imageserver exposes just one srs at the service level
# not sure if other ones are supported, for now we just store this one
# not sure why this is needed, for now commenting out
# if wkt_text:
# params = {'exact': 'True', 'error': 'True', 'mode': 'wkt', 'terms': wkt_text}
# req = requests.get('http://prj2epsg.org/search.json', params=params)
# object = json.loads(req.content)
# srs = int(object['codes'][0]['code'])
srs_code = esri_service.spatialReference.wkid
srs, created = SpatialReferenceSystem.objects.get_or_create(code=srs_code)
service.srs.add(srs)
service.update_validity()
# check if it has a WMS interface
if 'supportedExtensions' in esri_service._json_struct and greedy_opt:
if 'WMSServer' in esri_service._json_struct['supportedExtensions']:
# we need to change the url
# http://cga1.cga.harvard.edu/arcgis/rest/services/ecuador/ecuadordata/MapServer?f=pjson
# http://cga1.cga.harvard.edu/arcgis/services/ecuador/
# ecuadordata/MapServer/WMSServer?request=GetCapabilities&service=WMS
wms_url = service.url.replace('/rest/services/', '/services/')
if '?f=pjson' in wms_url:
wms_url = wms_url.replace('?f=pjson', 'WMSServer?')
if '?f=json' in wms_url:
wms_url = wms_url.replace('?f=json', 'WMSServer?')
LOGGER.debug('This ESRI REST endpoint has an WMS interface to process: %s' % wms_url)
# import here as otherwise is circular (TODO refactor)
from utils import create_service_from_endpoint
create_service_from_endpoint(wms_url, 'OGC:WMS', catalog=service.catalog)
# now process the REST interface
layer_n = 0
total = len(esri_service.layers)
for esri_layer in esri_service.layers:
# in some case the json is invalid
# esri_layer._json_struct
# {u'currentVersion': 10.01,
# u'error':
# {u'message': u'An unexpected error occurred processing the request.', u'code': 500, u'details': []}}
if 'error' not in esri_layer._json_struct:
LOGGER.debug('Updating layer %s' % esri_layer.name)
layer, created = Layer.objects.get_or_create(
name=esri_layer.id,
service=service,
catalog=service.catalog
)
if layer.active:
layer.type = 'ESRI:ArcGIS:MapServer'
links = [[layer.type, service.url],
['OGC:WMTS', settings.SITE_URL.rstrip('/') + '/' + layer.get_url_endpoint()]]
layer.title = esri_layer.name
layer.abstract = esri_service.serviceDescription
layer.url = service.url
layer.page_url = layer.get_absolute_url
links.append([
'WWW:LINK',
settings.SITE_URL.rstrip('/') + layer.page_url
])
try:
layer.bbox_x0 = esri_layer.extent.xmin
layer.bbox_y0 = esri_layer.extent.ymin
layer.bbox_x1 = esri_layer.extent.xmax
layer.bbox_y1 = esri_layer.extent.ymax
except KeyError:
pass
try:
layer.bbox_x0 = esri_layer._json_struct['extent']['xmin']
layer.bbox_y0 = esri_layer._json_struct['extent']['ymin']
layer.bbox_x1 = esri_layer._json_struct['extent']['xmax']
layer.bbox_y1 = esri_layer._json_struct['extent']['ymax']
except Exception:
pass
layer.wkt_geometry = bbox2wktpolygon([layer.bbox_x0, layer.bbox_y0, layer.bbox_x1, layer.bbox_y1])
layer.xml = create_metadata_record(
identifier=str(layer.uuid),
source=service.url,
links=links,
format='ESRI:ArcGIS:MapServer',
type=layer.csw_type,
relation=service.id_string,
title=layer.title,
alternative=layer.title,
abstract=layer.abstract,
wkt_geometry=layer.wkt_geometry
)
layer.anytext = gen_anytext(layer.title, layer.abstract)
layer.save()
# dates
add_mined_dates(layer)
layer_n = layer_n + 1
# exits if DEBUG_SERVICES
LOGGER.debug("Updating layer n. %s/%s" % (layer_n, total))
if DEBUG_SERVICES and layer_n == DEBUG_LAYER_NUMBER:
return
except Exception as err:
message = "update_layers_esri_mapserver: {0}".format(
err
)
check = Check(
content_object=service,
success=False,
response_time=0,
message=message
)
check.save()
|
[
"def",
"update_layers_esri_mapserver",
"(",
"service",
",",
"greedy_opt",
"=",
"False",
")",
":",
"try",
":",
"esri_service",
"=",
"ArcMapService",
"(",
"service",
".",
"url",
")",
"# set srs",
"# both mapserver and imageserver exposes just one srs at the service level",
"# not sure if other ones are supported, for now we just store this one",
"# not sure why this is needed, for now commenting out",
"# if wkt_text:",
"# params = {'exact': 'True', 'error': 'True', 'mode': 'wkt', 'terms': wkt_text}",
"# req = requests.get('http://prj2epsg.org/search.json', params=params)",
"# object = json.loads(req.content)",
"# srs = int(object['codes'][0]['code'])",
"srs_code",
"=",
"esri_service",
".",
"spatialReference",
".",
"wkid",
"srs",
",",
"created",
"=",
"SpatialReferenceSystem",
".",
"objects",
".",
"get_or_create",
"(",
"code",
"=",
"srs_code",
")",
"service",
".",
"srs",
".",
"add",
"(",
"srs",
")",
"service",
".",
"update_validity",
"(",
")",
"# check if it has a WMS interface",
"if",
"'supportedExtensions'",
"in",
"esri_service",
".",
"_json_struct",
"and",
"greedy_opt",
":",
"if",
"'WMSServer'",
"in",
"esri_service",
".",
"_json_struct",
"[",
"'supportedExtensions'",
"]",
":",
"# we need to change the url",
"# http://cga1.cga.harvard.edu/arcgis/rest/services/ecuador/ecuadordata/MapServer?f=pjson",
"# http://cga1.cga.harvard.edu/arcgis/services/ecuador/",
"# ecuadordata/MapServer/WMSServer?request=GetCapabilities&service=WMS",
"wms_url",
"=",
"service",
".",
"url",
".",
"replace",
"(",
"'/rest/services/'",
",",
"'/services/'",
")",
"if",
"'?f=pjson'",
"in",
"wms_url",
":",
"wms_url",
"=",
"wms_url",
".",
"replace",
"(",
"'?f=pjson'",
",",
"'WMSServer?'",
")",
"if",
"'?f=json'",
"in",
"wms_url",
":",
"wms_url",
"=",
"wms_url",
".",
"replace",
"(",
"'?f=json'",
",",
"'WMSServer?'",
")",
"LOGGER",
".",
"debug",
"(",
"'This ESRI REST endpoint has an WMS interface to process: %s'",
"%",
"wms_url",
")",
"# import here as otherwise is circular (TODO refactor)",
"from",
"utils",
"import",
"create_service_from_endpoint",
"create_service_from_endpoint",
"(",
"wms_url",
",",
"'OGC:WMS'",
",",
"catalog",
"=",
"service",
".",
"catalog",
")",
"# now process the REST interface",
"layer_n",
"=",
"0",
"total",
"=",
"len",
"(",
"esri_service",
".",
"layers",
")",
"for",
"esri_layer",
"in",
"esri_service",
".",
"layers",
":",
"# in some case the json is invalid",
"# esri_layer._json_struct",
"# {u'currentVersion': 10.01,",
"# u'error':",
"# {u'message': u'An unexpected error occurred processing the request.', u'code': 500, u'details': []}}",
"if",
"'error'",
"not",
"in",
"esri_layer",
".",
"_json_struct",
":",
"LOGGER",
".",
"debug",
"(",
"'Updating layer %s'",
"%",
"esri_layer",
".",
"name",
")",
"layer",
",",
"created",
"=",
"Layer",
".",
"objects",
".",
"get_or_create",
"(",
"name",
"=",
"esri_layer",
".",
"id",
",",
"service",
"=",
"service",
",",
"catalog",
"=",
"service",
".",
"catalog",
")",
"if",
"layer",
".",
"active",
":",
"layer",
".",
"type",
"=",
"'ESRI:ArcGIS:MapServer'",
"links",
"=",
"[",
"[",
"layer",
".",
"type",
",",
"service",
".",
"url",
"]",
",",
"[",
"'OGC:WMTS'",
",",
"settings",
".",
"SITE_URL",
".",
"rstrip",
"(",
"'/'",
")",
"+",
"'/'",
"+",
"layer",
".",
"get_url_endpoint",
"(",
")",
"]",
"]",
"layer",
".",
"title",
"=",
"esri_layer",
".",
"name",
"layer",
".",
"abstract",
"=",
"esri_service",
".",
"serviceDescription",
"layer",
".",
"url",
"=",
"service",
".",
"url",
"layer",
".",
"page_url",
"=",
"layer",
".",
"get_absolute_url",
"links",
".",
"append",
"(",
"[",
"'WWW:LINK'",
",",
"settings",
".",
"SITE_URL",
".",
"rstrip",
"(",
"'/'",
")",
"+",
"layer",
".",
"page_url",
"]",
")",
"try",
":",
"layer",
".",
"bbox_x0",
"=",
"esri_layer",
".",
"extent",
".",
"xmin",
"layer",
".",
"bbox_y0",
"=",
"esri_layer",
".",
"extent",
".",
"ymin",
"layer",
".",
"bbox_x1",
"=",
"esri_layer",
".",
"extent",
".",
"xmax",
"layer",
".",
"bbox_y1",
"=",
"esri_layer",
".",
"extent",
".",
"ymax",
"except",
"KeyError",
":",
"pass",
"try",
":",
"layer",
".",
"bbox_x0",
"=",
"esri_layer",
".",
"_json_struct",
"[",
"'extent'",
"]",
"[",
"'xmin'",
"]",
"layer",
".",
"bbox_y0",
"=",
"esri_layer",
".",
"_json_struct",
"[",
"'extent'",
"]",
"[",
"'ymin'",
"]",
"layer",
".",
"bbox_x1",
"=",
"esri_layer",
".",
"_json_struct",
"[",
"'extent'",
"]",
"[",
"'xmax'",
"]",
"layer",
".",
"bbox_y1",
"=",
"esri_layer",
".",
"_json_struct",
"[",
"'extent'",
"]",
"[",
"'ymax'",
"]",
"except",
"Exception",
":",
"pass",
"layer",
".",
"wkt_geometry",
"=",
"bbox2wktpolygon",
"(",
"[",
"layer",
".",
"bbox_x0",
",",
"layer",
".",
"bbox_y0",
",",
"layer",
".",
"bbox_x1",
",",
"layer",
".",
"bbox_y1",
"]",
")",
"layer",
".",
"xml",
"=",
"create_metadata_record",
"(",
"identifier",
"=",
"str",
"(",
"layer",
".",
"uuid",
")",
",",
"source",
"=",
"service",
".",
"url",
",",
"links",
"=",
"links",
",",
"format",
"=",
"'ESRI:ArcGIS:MapServer'",
",",
"type",
"=",
"layer",
".",
"csw_type",
",",
"relation",
"=",
"service",
".",
"id_string",
",",
"title",
"=",
"layer",
".",
"title",
",",
"alternative",
"=",
"layer",
".",
"title",
",",
"abstract",
"=",
"layer",
".",
"abstract",
",",
"wkt_geometry",
"=",
"layer",
".",
"wkt_geometry",
")",
"layer",
".",
"anytext",
"=",
"gen_anytext",
"(",
"layer",
".",
"title",
",",
"layer",
".",
"abstract",
")",
"layer",
".",
"save",
"(",
")",
"# dates",
"add_mined_dates",
"(",
"layer",
")",
"layer_n",
"=",
"layer_n",
"+",
"1",
"# exits if DEBUG_SERVICES",
"LOGGER",
".",
"debug",
"(",
"\"Updating layer n. %s/%s\"",
"%",
"(",
"layer_n",
",",
"total",
")",
")",
"if",
"DEBUG_SERVICES",
"and",
"layer_n",
"==",
"DEBUG_LAYER_NUMBER",
":",
"return",
"except",
"Exception",
"as",
"err",
":",
"message",
"=",
"\"update_layers_esri_mapserver: {0}\"",
".",
"format",
"(",
"err",
")",
"check",
"=",
"Check",
"(",
"content_object",
"=",
"service",
",",
"success",
"=",
"False",
",",
"response_time",
"=",
"0",
",",
"message",
"=",
"message",
")",
"check",
".",
"save",
"(",
")"
] |
Update layers for an ESRI REST MapServer.
Sample endpoint: https://gis.ngdc.noaa.gov/arcgis/rest/services/SampleWorldCities/MapServer/?f=json
|
[
"Update",
"layers",
"for",
"an",
"ESRI",
"REST",
"MapServer",
".",
"Sample",
"endpoint",
":",
"https",
":",
"//",
"gis",
".",
"ngdc",
".",
"noaa",
".",
"gov",
"/",
"arcgis",
"/",
"rest",
"/",
"services",
"/",
"SampleWorldCities",
"/",
"MapServer",
"/",
"?f",
"=",
"json"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/models.py#L1635-L1749
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/models.py
|
update_layers_esri_imageserver
|
def update_layers_esri_imageserver(service):
"""
Update layers for an ESRI REST ImageServer.
Sample endpoint: https://gis.ngdc.noaa.gov/arcgis/rest/services/bag_bathymetry/ImageServer/?f=json
"""
try:
esri_service = ArcImageService(service.url)
# set srs
# both mapserver and imageserver exposes just one srs at the service level
# not sure if other ones are supported, for now we just store this one
obj = json.loads(esri_service._contents)
srs_code = obj['spatialReference']['wkid']
srs, created = SpatialReferenceSystem.objects.get_or_create(code=srs_code)
service.srs.add(srs)
service.update_validity()
layer, created = Layer.objects.get_or_create(name=obj['name'], service=service, catalog=service.catalog)
if layer.active:
layer.type = 'ESRI:ArcGIS:ImageServer'
links = [[layer.type, service.url],
['OGC:WMTS', settings.SITE_URL.rstrip('/') + '/' + layer.get_url_endpoint()]]
layer.title = obj['name']
layer.abstract = esri_service.serviceDescription
layer.url = service.url
layer.bbox_x0 = str(obj['extent']['xmin'])
layer.bbox_y0 = str(obj['extent']['ymin'])
layer.bbox_x1 = str(obj['extent']['xmax'])
layer.bbox_y1 = str(obj['extent']['ymax'])
layer.page_url = layer.get_absolute_url
links.append([
'WWW:LINK',
settings.SITE_URL.rstrip('/') + layer.page_url
])
layer.wkt_geometry = bbox2wktpolygon([layer.bbox_x0, layer.bbox_y0, layer.bbox_x1, layer.bbox_y1])
layer.xml = create_metadata_record(
identifier=str(layer.uuid),
source=service.url,
links=links,
format='ESRI:ArcGIS:ImageServer',
type=layer.csw_type,
relation=service.id_string,
title=layer.title,
alternative=layer.title,
abstract=layer.abstract,
wkt_geometry=layer.wkt_geometry
)
layer.anytext = gen_anytext(layer.title, layer.abstract)
layer.save()
# dates
add_mined_dates(layer)
except Exception as err:
message = "update_layers_esri_imageserver: {0}".format(
err
)
check = Check(
content_object=service,
success=False,
response_time=0,
message=message
)
check.save()
|
python
|
def update_layers_esri_imageserver(service):
"""
Update layers for an ESRI REST ImageServer.
Sample endpoint: https://gis.ngdc.noaa.gov/arcgis/rest/services/bag_bathymetry/ImageServer/?f=json
"""
try:
esri_service = ArcImageService(service.url)
# set srs
# both mapserver and imageserver exposes just one srs at the service level
# not sure if other ones are supported, for now we just store this one
obj = json.loads(esri_service._contents)
srs_code = obj['spatialReference']['wkid']
srs, created = SpatialReferenceSystem.objects.get_or_create(code=srs_code)
service.srs.add(srs)
service.update_validity()
layer, created = Layer.objects.get_or_create(name=obj['name'], service=service, catalog=service.catalog)
if layer.active:
layer.type = 'ESRI:ArcGIS:ImageServer'
links = [[layer.type, service.url],
['OGC:WMTS', settings.SITE_URL.rstrip('/') + '/' + layer.get_url_endpoint()]]
layer.title = obj['name']
layer.abstract = esri_service.serviceDescription
layer.url = service.url
layer.bbox_x0 = str(obj['extent']['xmin'])
layer.bbox_y0 = str(obj['extent']['ymin'])
layer.bbox_x1 = str(obj['extent']['xmax'])
layer.bbox_y1 = str(obj['extent']['ymax'])
layer.page_url = layer.get_absolute_url
links.append([
'WWW:LINK',
settings.SITE_URL.rstrip('/') + layer.page_url
])
layer.wkt_geometry = bbox2wktpolygon([layer.bbox_x0, layer.bbox_y0, layer.bbox_x1, layer.bbox_y1])
layer.xml = create_metadata_record(
identifier=str(layer.uuid),
source=service.url,
links=links,
format='ESRI:ArcGIS:ImageServer',
type=layer.csw_type,
relation=service.id_string,
title=layer.title,
alternative=layer.title,
abstract=layer.abstract,
wkt_geometry=layer.wkt_geometry
)
layer.anytext = gen_anytext(layer.title, layer.abstract)
layer.save()
# dates
add_mined_dates(layer)
except Exception as err:
message = "update_layers_esri_imageserver: {0}".format(
err
)
check = Check(
content_object=service,
success=False,
response_time=0,
message=message
)
check.save()
|
[
"def",
"update_layers_esri_imageserver",
"(",
"service",
")",
":",
"try",
":",
"esri_service",
"=",
"ArcImageService",
"(",
"service",
".",
"url",
")",
"# set srs",
"# both mapserver and imageserver exposes just one srs at the service level",
"# not sure if other ones are supported, for now we just store this one",
"obj",
"=",
"json",
".",
"loads",
"(",
"esri_service",
".",
"_contents",
")",
"srs_code",
"=",
"obj",
"[",
"'spatialReference'",
"]",
"[",
"'wkid'",
"]",
"srs",
",",
"created",
"=",
"SpatialReferenceSystem",
".",
"objects",
".",
"get_or_create",
"(",
"code",
"=",
"srs_code",
")",
"service",
".",
"srs",
".",
"add",
"(",
"srs",
")",
"service",
".",
"update_validity",
"(",
")",
"layer",
",",
"created",
"=",
"Layer",
".",
"objects",
".",
"get_or_create",
"(",
"name",
"=",
"obj",
"[",
"'name'",
"]",
",",
"service",
"=",
"service",
",",
"catalog",
"=",
"service",
".",
"catalog",
")",
"if",
"layer",
".",
"active",
":",
"layer",
".",
"type",
"=",
"'ESRI:ArcGIS:ImageServer'",
"links",
"=",
"[",
"[",
"layer",
".",
"type",
",",
"service",
".",
"url",
"]",
",",
"[",
"'OGC:WMTS'",
",",
"settings",
".",
"SITE_URL",
".",
"rstrip",
"(",
"'/'",
")",
"+",
"'/'",
"+",
"layer",
".",
"get_url_endpoint",
"(",
")",
"]",
"]",
"layer",
".",
"title",
"=",
"obj",
"[",
"'name'",
"]",
"layer",
".",
"abstract",
"=",
"esri_service",
".",
"serviceDescription",
"layer",
".",
"url",
"=",
"service",
".",
"url",
"layer",
".",
"bbox_x0",
"=",
"str",
"(",
"obj",
"[",
"'extent'",
"]",
"[",
"'xmin'",
"]",
")",
"layer",
".",
"bbox_y0",
"=",
"str",
"(",
"obj",
"[",
"'extent'",
"]",
"[",
"'ymin'",
"]",
")",
"layer",
".",
"bbox_x1",
"=",
"str",
"(",
"obj",
"[",
"'extent'",
"]",
"[",
"'xmax'",
"]",
")",
"layer",
".",
"bbox_y1",
"=",
"str",
"(",
"obj",
"[",
"'extent'",
"]",
"[",
"'ymax'",
"]",
")",
"layer",
".",
"page_url",
"=",
"layer",
".",
"get_absolute_url",
"links",
".",
"append",
"(",
"[",
"'WWW:LINK'",
",",
"settings",
".",
"SITE_URL",
".",
"rstrip",
"(",
"'/'",
")",
"+",
"layer",
".",
"page_url",
"]",
")",
"layer",
".",
"wkt_geometry",
"=",
"bbox2wktpolygon",
"(",
"[",
"layer",
".",
"bbox_x0",
",",
"layer",
".",
"bbox_y0",
",",
"layer",
".",
"bbox_x1",
",",
"layer",
".",
"bbox_y1",
"]",
")",
"layer",
".",
"xml",
"=",
"create_metadata_record",
"(",
"identifier",
"=",
"str",
"(",
"layer",
".",
"uuid",
")",
",",
"source",
"=",
"service",
".",
"url",
",",
"links",
"=",
"links",
",",
"format",
"=",
"'ESRI:ArcGIS:ImageServer'",
",",
"type",
"=",
"layer",
".",
"csw_type",
",",
"relation",
"=",
"service",
".",
"id_string",
",",
"title",
"=",
"layer",
".",
"title",
",",
"alternative",
"=",
"layer",
".",
"title",
",",
"abstract",
"=",
"layer",
".",
"abstract",
",",
"wkt_geometry",
"=",
"layer",
".",
"wkt_geometry",
")",
"layer",
".",
"anytext",
"=",
"gen_anytext",
"(",
"layer",
".",
"title",
",",
"layer",
".",
"abstract",
")",
"layer",
".",
"save",
"(",
")",
"# dates",
"add_mined_dates",
"(",
"layer",
")",
"except",
"Exception",
"as",
"err",
":",
"message",
"=",
"\"update_layers_esri_imageserver: {0}\"",
".",
"format",
"(",
"err",
")",
"check",
"=",
"Check",
"(",
"content_object",
"=",
"service",
",",
"success",
"=",
"False",
",",
"response_time",
"=",
"0",
",",
"message",
"=",
"message",
")",
"check",
".",
"save",
"(",
")"
] |
Update layers for an ESRI REST ImageServer.
Sample endpoint: https://gis.ngdc.noaa.gov/arcgis/rest/services/bag_bathymetry/ImageServer/?f=json
|
[
"Update",
"layers",
"for",
"an",
"ESRI",
"REST",
"ImageServer",
".",
"Sample",
"endpoint",
":",
"https",
":",
"//",
"gis",
".",
"ngdc",
".",
"noaa",
".",
"gov",
"/",
"arcgis",
"/",
"rest",
"/",
"services",
"/",
"bag_bathymetry",
"/",
"ImageServer",
"/",
"?f",
"=",
"json"
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/models.py#L1752-L1813
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/models.py
|
endpointlist_post_save
|
def endpointlist_post_save(instance, *args, **kwargs):
"""
Used to process the lines of the endpoint list.
"""
with open(instance.upload.file.name, mode='rb') as f:
lines = f.readlines()
for url in lines:
if len(url) > 255:
LOGGER.debug('Skipping this endpoint, as it is more than 255 characters: %s' % url)
else:
if Endpoint.objects.filter(url=url, catalog=instance.catalog).count() == 0:
endpoint = Endpoint(url=url, endpoint_list=instance)
endpoint.catalog = instance.catalog
endpoint.save()
if not settings.REGISTRY_SKIP_CELERY:
update_endpoints.delay(instance.id)
else:
update_endpoints(instance.id)
|
python
|
def endpointlist_post_save(instance, *args, **kwargs):
"""
Used to process the lines of the endpoint list.
"""
with open(instance.upload.file.name, mode='rb') as f:
lines = f.readlines()
for url in lines:
if len(url) > 255:
LOGGER.debug('Skipping this endpoint, as it is more than 255 characters: %s' % url)
else:
if Endpoint.objects.filter(url=url, catalog=instance.catalog).count() == 0:
endpoint = Endpoint(url=url, endpoint_list=instance)
endpoint.catalog = instance.catalog
endpoint.save()
if not settings.REGISTRY_SKIP_CELERY:
update_endpoints.delay(instance.id)
else:
update_endpoints(instance.id)
|
[
"def",
"endpointlist_post_save",
"(",
"instance",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"with",
"open",
"(",
"instance",
".",
"upload",
".",
"file",
".",
"name",
",",
"mode",
"=",
"'rb'",
")",
"as",
"f",
":",
"lines",
"=",
"f",
".",
"readlines",
"(",
")",
"for",
"url",
"in",
"lines",
":",
"if",
"len",
"(",
"url",
")",
">",
"255",
":",
"LOGGER",
".",
"debug",
"(",
"'Skipping this endpoint, as it is more than 255 characters: %s'",
"%",
"url",
")",
"else",
":",
"if",
"Endpoint",
".",
"objects",
".",
"filter",
"(",
"url",
"=",
"url",
",",
"catalog",
"=",
"instance",
".",
"catalog",
")",
".",
"count",
"(",
")",
"==",
"0",
":",
"endpoint",
"=",
"Endpoint",
"(",
"url",
"=",
"url",
",",
"endpoint_list",
"=",
"instance",
")",
"endpoint",
".",
"catalog",
"=",
"instance",
".",
"catalog",
"endpoint",
".",
"save",
"(",
")",
"if",
"not",
"settings",
".",
"REGISTRY_SKIP_CELERY",
":",
"update_endpoints",
".",
"delay",
"(",
"instance",
".",
"id",
")",
"else",
":",
"update_endpoints",
"(",
"instance",
".",
"id",
")"
] |
Used to process the lines of the endpoint list.
|
[
"Used",
"to",
"process",
"the",
"lines",
"of",
"the",
"endpoint",
"list",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/models.py#L1818-L1835
|
train
|
cga-harvard/Hypermap-Registry
|
hypermap/aggregator/models.py
|
service_pre_save
|
def service_pre_save(instance, *args, **kwargs):
"""
Used to do a service full check when saving it.
"""
# check if service is unique
# we cannot use unique_together as it relies on a combination of fields
# from different models (service, resource)
exists = Service.objects.filter(url=instance.url,
type=instance.type,
catalog=instance.catalog).count() > 0
# TODO: When saving from the django admin, this should not be triggered.
# Reference: http://stackoverflow.com/questions/11561722/django-what-is-the-role-of-modelstate
if instance._state.adding and exists:
raise Exception("There is already such a service. url={0} catalog={1}".format(
instance.url, instance.catalog
))
|
python
|
def service_pre_save(instance, *args, **kwargs):
"""
Used to do a service full check when saving it.
"""
# check if service is unique
# we cannot use unique_together as it relies on a combination of fields
# from different models (service, resource)
exists = Service.objects.filter(url=instance.url,
type=instance.type,
catalog=instance.catalog).count() > 0
# TODO: When saving from the django admin, this should not be triggered.
# Reference: http://stackoverflow.com/questions/11561722/django-what-is-the-role-of-modelstate
if instance._state.adding and exists:
raise Exception("There is already such a service. url={0} catalog={1}".format(
instance.url, instance.catalog
))
|
[
"def",
"service_pre_save",
"(",
"instance",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# check if service is unique",
"# we cannot use unique_together as it relies on a combination of fields",
"# from different models (service, resource)",
"exists",
"=",
"Service",
".",
"objects",
".",
"filter",
"(",
"url",
"=",
"instance",
".",
"url",
",",
"type",
"=",
"instance",
".",
"type",
",",
"catalog",
"=",
"instance",
".",
"catalog",
")",
".",
"count",
"(",
")",
">",
"0",
"# TODO: When saving from the django admin, this should not be triggered.",
"# Reference: http://stackoverflow.com/questions/11561722/django-what-is-the-role-of-modelstate",
"if",
"instance",
".",
"_state",
".",
"adding",
"and",
"exists",
":",
"raise",
"Exception",
"(",
"\"There is already such a service. url={0} catalog={1}\"",
".",
"format",
"(",
"instance",
".",
"url",
",",
"instance",
".",
"catalog",
")",
")"
] |
Used to do a service full check when saving it.
|
[
"Used",
"to",
"do",
"a",
"service",
"full",
"check",
"when",
"saving",
"it",
"."
] |
899a5385b15af7fba190ab4fae1d41e47d155a1b
|
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/aggregator/models.py#L1851-L1868
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.