index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
992,900 | 7fd3d5cec5d4d5853f5fa194febfadb20704e9a8 | # a is a dynamic reference to an object in memory
# When a new value is assigned to it the reference changes to a separate address with a new type
a = 'hello'
# This is type of the object that A is referencing
# Each object will therefore show a different memory id and type
print(type(a))
print(hex(id(a)))
a = 10
print(type(a))
print(hex(id(a)))
a = lambda x: x**2
print(a(2))
print(type(a))
print(hex(id(a)))
a = 3 + 4j
print(type(a))
print(hex(id(a)))
|
992,901 | 2d522dc71b4a5f3057ef12d5fc5f236ea3be0955 | import logging
import socket
import threading
import time
from .proto.packet import Packet
from .proto.opttypes import OptionType
from .proto.dhcpmsg import MessageType
def sync_worker(address, on_success, on_fail, oneshot=False, macaddr=None, relay_ip=None):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('0.0.0.0', 0))
sock.settimeout(1)
pkt = Packet(message_type=MessageType.REQUEST)
pkt.op = Packet.Op.REQUEST
pkt.chaddr = macaddr or 'de:12:44:4c:bb:48'
if relay_ip:
pkt.hops = 1
pkt.giaddr = relay_ip
pkt.add_option(OptionType.AgentInformation, b'\x01\x04test')
while True:
data = pkt.pack()
sent = sock.sendto(data, address)
try:
data, address = sock.recvfrom(4096)
on_success()
if oneshot:
reply = Packet.unpack_from(data)
print(reply)
except socket.timeout:
on_fail()
if oneshot:
break
def start_threaded(address, threads=1, macaddr=None, relay_ip=None):
host, port = address.split(':')
port = int(port)
success_count = 0
fail_count = 0
def inc_success():
nonlocal success_count
success_count += 1
def inc_fail():
nonlocal fail_count
fail_count += 1
for _ in range(threads):
t = threading.Thread(target=sync_worker, args=((host, port), inc_success, inc_fail, False, macaddr, relay_ip), daemon=True)
t.start()
while True:
time.sleep(1.0)
print('requests success: %s fail: %s' % (success_count, fail_count))
success_count = 0
fail_count = 0
def oneshot(address, macaddr, relay_ip):
host, port = address.split(':')
port = int(port)
sync_worker((host, port), lambda:None, lambda:None, oneshot=True, macaddr=macaddr, relay_ip=relay_ip)
|
992,902 | da944694ff047f624d64de52ac13e63009ed9254 | version https://git-lfs.github.com/spec/v1
oid sha256:9f1e36400c7a0181064505a629459283e8643f0cc3d2624aab902416bb2637c2
size 6912
|
992,903 | 729448647d778dfdff48850061ebd7b8a2cb1636 | # Programme testant si une année saisie est bissextile ou non
import os
annee = input("Saisissez une année : ") # On attend qu'une année à tester soit fournit
annee = int(annee) # Risque d'erreur si l'utilisateur n'a pas saisi un nombre
if annee % 400 == 0 or (annee % 4 == 0 and annee % 100 != 0):
print("L'année saisie est bissextile.")
else:
print("L'année saisie n'est pas bissextile.")
os.system("pause") |
992,904 | 5ce5e39927d0c6366a76a8627c2b4b142c86418a | #
#
# Source: "Брутим архивы ZIP/RAR используя python."
# https://codeby.net/threads/brutim-arxivy-zip-rar-ispolzuja-python.65986/
# It was written on python 2.x
# time python3 brutilka.py -f evil.zip -d dictionary
# time python3 brutilka.py -f evil.rar -d dictionary
#
import zipfile
import rarfile
import argparse
def cutMagicNumbers(archive):
with open(archive, 'rb') as file:
currentType = file.read(2).decode()
return launcher(currentType)
def launcher(extension):
return {'Ra': prepareBruteRar,
'PK': prepareBruteZip,
}.get(extension, 'Not Found')
def prepareBruteZip(archive, dictionary):
'''
Ограничения file <ZipName> == 2.0
type(pwd) == byte
'''
zArchive = zipfile.ZipFile(archive)
with open(dictionary, 'r') as wordlist:
for word in wordlist.readlines():
password = word.strip('\n').encode('ascii')
brute(zArchive, password)
def brute(archive, password):
try:
archive.extractall(pwd=password)
print('[+] Password is {}'.format(password))
except:
pass
def prepareBruteRar(archive, dictionary):
'''
type(pwd) == str
requirements installed unrar
'''
rArchive = rarfile.RarFile(archive)
with open(dictionary, 'r') as wordlist:
for word in wordlist.readlines():
password = word.strip('\n')
brute(rArchive, password)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
'--file <archive>' + '--dict <dictionary>')
parser.add_argument('-f', '--file', dest='archive', required=True,
type=str, help='Archive file')
parser.add_argument('-d', '--dict', dest='dictionary', required=True,
type=str, help="Dictionary file")
args = parser.parse_args()
cutMagicNumbers(args.archive)(args.archive, args.dictionary) |
992,905 | 19aece2bf4daa748038e75a9b3ef09fca403d617 |
add_library('controlp5')
cp5 = None
slider1 = None
def setup():
global cp5, slider1
size(500,500)
cp5 = ControlP5(this)
slider1 = (
cp5
.addSlider("slider")
.setSize(200,20)
.setPosition(20,20)
.setRange(0,255))
slider1.label = "Background"
def draw():
#cp5.draw()
background(slider1.getValue())
|
992,906 | 882c128d09f03967ba2e42515a10a13cf5cd9ef1 | #!/usr/bin/env python2.7
# Copyright 2013, ARM Limited
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of ARM Limited nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sys
import argparse
import re
import subprocess
import threading
import time
import util
def BuildOptions():
result = argparse.ArgumentParser(description = 'Unit test tool')
result.add_argument('name_filters', metavar='name filters', nargs='*',
help='Tests matching any of the regexp filters will be run.')
result.add_argument('--mode', action='store', choices=['release', 'debug', 'coverage'],
default='release', help='Build mode')
result.add_argument('--simulator', action='store', choices=['on', 'off'],
default='on', help='Use the builtin a64 simulator')
result.add_argument('--timeout', action='store', type=int, default=5,
help='Timeout (in seconds) for each cctest (5sec default).')
result.add_argument('--nobuild', action='store_true',
help='Do not (re)build the tests')
result.add_argument('--jobs', '-j', metavar='N', type=int, default=1,
help='Allow N jobs at once.')
return result.parse_args()
def BuildRequiredObjects(arguments):
status, output = util.getstatusoutput('scons ' +
'mode=' + arguments.mode + ' ' +
'simulator=' + arguments.simulator + ' ' +
'target=cctest ' +
'--jobs=' + str(arguments.jobs))
if status != 0:
print(output)
util.abort('Failed bulding cctest')
# Display the run progress:
# [time| progress|+ passed|- failed]
def UpdateProgress(start_time, passed, failed, card):
minutes, seconds = divmod(time.time() - start_time, 60)
progress = float(passed + failed) / card * 100
passed_colour = '\x1b[32m' if passed != 0 else ''
failed_colour = '\x1b[31m' if failed != 0 else ''
indicator = '\r[%02d:%02d| %3d%%|' + passed_colour + '+ %d\x1b[0m|' + failed_colour + '- %d\x1b[0m]'
sys.stdout.write(indicator % (minutes, seconds, progress, passed, failed))
def PrintError(s):
# Print the error on a new line.
sys.stdout.write('\n')
print(s)
sys.stdout.flush()
# List all tests matching any of the provided filters.
def ListTests(cctest, filters):
status, output = util.getstatusoutput(cctest + ' --list')
if status != 0: util.abort('Failed to list all tests')
available_tests = output.split()
if filters:
filters = map(re.compile, filters)
def is_selected(test_name):
for e in filters:
if e.search(test_name):
return True
return False
return filter(is_selected, available_tests)
else:
return available_tests
# A class representing a cctest.
class CCtest:
cctest = None
def __init__(self, name, options = None):
self.name = name
self.options = options
self.process = None
self.stdout = None
self.stderr = None
def Command(self):
command = '%s %s' % (CCtest.cctest, self.name)
if self.options is not None:
command = '%s %s' % (command, ' '.join(self.options))
return command
# Run the test.
# Use a thread to be able to control the test.
def Run(self, arguments):
command = [CCtest.cctest, self.name]
if self.options is not None:
command += self.options
def execute():
self.process = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.stdout, self.stderr = self.process.communicate()
thread = threading.Thread(target=execute)
retcode = -1
# Append spaces to hide the previous test name if longer.
sys.stdout.write(' ' + self.name + ' ' * 20)
sys.stdout.flush()
# Start the test with a timeout.
thread.start()
thread.join(arguments.timeout)
if thread.is_alive():
# Too slow! Terminate.
PrintError('### TIMEOUT %s\nCOMMAND:\n%s' % (self.name, self.Command()))
# If timeout was too small the thread may not have run and self.process
# is still None. Therefore check.
if (self.process):
self.process.terminate()
# Allow 1 second to terminate. Else, exterminate!
thread.join(1)
if thread.is_alive():
thread.kill()
thread.join()
# retcode is already set for failure.
else:
# Check the return status of the test.
retcode = self.process.poll()
if retcode != 0:
PrintError('### FAILED %s\nSTDERR:\n%s\nSTDOUT:\n%s\nCOMMAND:\n%s'
% (self.name, self.stderr.decode(), self.stdout.decode(),
self.Command()))
return retcode
# Run all tests in the list 'tests'.
def RunTests(cctest, tests, arguments):
CCtest.cctest = cctest
card = len(tests)
passed = 0
failed = 0
if card == 0:
print('No test to run')
return 0
# When the simulator is on the tests are ran twice: with and without the
# debugger.
if arguments.simulator:
card *= 2
print('Running %d tests... (timeout = %ds)' % (card, arguments.timeout))
start_time = time.time()
# Initialize the progress indicator.
UpdateProgress(start_time, 0, 0, card)
for e in tests:
variants = [CCtest(e)]
if arguments.simulator: variants.append(CCtest(e, ['--debugger']))
for v in variants:
retcode = v.Run(arguments)
# Update the counters and progress indicator.
if retcode == 0:
passed += 1
else:
failed += 1
UpdateProgress(start_time, passed, failed, card)
return failed
if __name__ == '__main__':
original_dir = os.path.abspath('.')
# $ROOT/tools/test.py
root_dir = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0])))
os.chdir(root_dir)
# Parse the arguments and build the executable.
args = BuildOptions()
if not args.nobuild:
BuildRequiredObjects(args)
# The test binary.
cctest = os.path.join(root_dir, 'cctest')
if args.simulator == 'on':
cctest += '_sim'
if args.mode == 'debug':
cctest += '_g'
elif args.mode == 'coverage':
cctest += '_gcov'
# List available tests.
tests = ListTests(cctest, args.name_filters)
# Delete coverage data files.
if args.mode == 'coverage':
status, output = util.getstatusoutput('find obj/coverage -name "*.gcda" -exec rm {} \;')
# Run the tests.
status = RunTests(cctest, tests, args)
sys.stdout.write('\n')
# Print coverage information.
if args.mode == 'coverage':
cmd = 'tggcov -R summary_all,untested_functions_per_file obj/coverage/src/aarch64'
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
print(stdout)
# Restore original directory.
os.chdir(original_dir)
sys.exit(status)
|
992,907 | ff620ee4066c10e3ada0c46e82eb6e8871add3d2 | from common import constants
from ..task import celery_app
from ..util.SpiderWorker import SpiderWorker
@celery_app.task(ignore_result=True)
def job_task():
for career in SpiderWorker().career_spider():
urls = [constants.HOME_URL + career + '?page={}&ka=page-{}'.format(page, page) for page in range(1, 31)]
for url in urls:
celery_app.send_task('app.task.job.get_job', args=(url,), queue='get_job_list', routing_key='get_job_list')
@celery_app.task(ignore_result=True)
def job_task_from_city():
for city in SpiderWorker().city_spider(update_db=False):
urls = [constants.HOME_URL + '/c' + city['city_id'] + '/h_100010000/?page={}&ka=page-{}'.format(page, page) for page in range(1, 31)]
for url in urls:
celery_app.send_task('app.task.job.get_job', args=(url,), queue='get_job_list', routing_key='get_job_list')
@celery_app.task(ignore_result=True)
def get_job(url):
SpiderWorker(url).job_spider()
@celery_app.task(ignore_result=True)
def job_des_task(times=10):
for i in range(times):
celery_app.send_task('app.task.job.get_job_des', queue='other_queue', routing_key='other_queue')
@celery_app.task(ignore_result=True)
def get_job_des():
SpiderWorker().job_des_spider()
if __name__ == '__main__':
job_task()
# job_des_task()
|
992,908 | 83d582c0da4bbad29d94e7fcfa7d28d31bb6336b | # coding:utf-8
import pytest
@pytest.fixture(scope='class', autouse=True)
@pytest.mark.run(order=1)
def open_browser():
print('\n打开浏览器...')
@pytest.fixture()
@pytest.mark.run(order=2)
def test_login(request):
user = request.param['user']
pwd = request.param['pwd']
print('登录中...')
if user == 'a1' and pwd == '123456':
print('登录页面成功...')
assert True
return '登录页面成功...'
else:
print('用户名或密码错误,请重新检查...')
assert False
|
992,909 | 651ecca843803efaddb8d9fe0bab1f071806acaf | # -*- coding: utf-8 -*-
from django.test import TestCase
from catalogapp import api
from catalogapp import models
class SpecFieldsTest(TestCase):
def test_create(self):
api.sections.create("Name", "slug")
section = api.sections.get(1)
test_query = {
"field_type": "BooleanField",
"name": "Bool Field",
"slug": "bool_slug",
"section": section,
"default_value": True,
"description": "Some desc",
}
result_id = api.specfields.create(**test_query)
result = api.specfields.get(result_id)
self.assertEqual(type(result), models.BooleanField)
del test_query['field_type']
test_query['default_value'] = str(test_query['default_value'])
for attr in test_query:
self.assertEqual(getattr(result, attr), test_query[attr])
def test_change(self):
api.sections.create("Name", "slug")
section = api.sections.get(1)
test_query = {
"field_type": "BooleanField",
"name": "Bool Field",
"slug": "bool_slug",
"section": section,
"default_value": True,
"description": "Some desc",
}
result_id = api.specfields.create(**test_query)
new_name = "new name"
new_slug = "new_slug"
new_default = "False"
new_desc = "new desc"
result_id = api.specfields.change(result_id, new_name, new_slug,
new_default, new_desc)
result = api.specfields.get(result_id)
self.assertEqual(result.name, new_name)
self.assertEqual(result.slug, new_slug)
self.assertEqual(result.default_value, new_default)
self.assertEqual(result.description, new_desc) |
992,910 | d157fae0001ac7198e368285311d474e37cdf1d4 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
"""
# Libraries
import gi
gi.require_version("Gtk", "3.0")
from gi.repository import Gio, Gtk, GdkPixbuf
import os
import sys
import subprocess
import gettext
import locale
import controls
import socket
el = gettext.translation('base', 'locale', fallback=True)
el.install()
_ = el.gettext
CURRDIR = os.path.dirname(os.path.abspath(__file__))
MAINDIR = "/usr/share/hvl/yusufreis/"
ICONDomain = os.path.join(MAINDIR+"images/", 'Domain.png')
ICONLocal = os.path.join(MAINDIR+"images/", 'Local.png')
ICONComputer = os.path.join(MAINDIR+"images/", 'Computer.png')
def getDomain():
return controls.execute("net ads info 2> /dev/null | grep Realm | cut -d':' -f2 | tr -d ' ' | tr -d '\n'")
def getWorkgroup():
return controls.execute("net ads workgroup | cut -d':' -f2 | tr -d ' ' | tr -d '\n'")
def getHostname():
return controls.execute("hostname | tr -d '\n'")
def getCPU():
cpumodel = controls.execute("lscpu | grep 'Model name:' | cut -d':' -f2 | sed -e 's/^[[:space:]]*//'| tr -d '\n'")
cpucore = controls.execute("lscpu | grep '^CPU(s):' | cut -d':' -f2 | sed -e 's/^[[:space:]]*//'| tr -d '\n'")
return(cpumodel + " - " + cpucore)
def getRAM():
memory = controls.execute("awk '/MemTotal/ {print $2}' /proc/meminfo")
memory = round(int(memory)/1024/1000, 2)
return(str(memory)+" GB")
def getDist():
return controls.execute("lsb_release -ir | cut -d':' -f2| sed -e 's/^[[:space:]]*//'| tr '\n' ' '")
def create_label_and_attach(grid, label_text, label_a_text, attach_next_to):
label = Gtk.Label(label_text)
label.set_halign(Gtk.Align.START)
label.set_direction(Gtk.TextDirection.LTR)
label_a = Gtk.Label(label_a_text)
label_a.set_halign(Gtk.Align.END)
label_a.set_direction(Gtk.TextDirection.LTR)
grid.attach_next_to(label_a, attach_next_to, Gtk.PositionType.BOTTOM, 1, 2)
grid.attach_next_to(label, label_a, Gtk.PositionType.RIGHT, 3, 2)
return label_a
class Summary(object):
def __init__(self):
self.is_window_open = False
self.window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
self.window.set_title(_("Summary"))
self.window.set_position(Gtk.WindowPosition.CENTER_ALWAYS)
self.window.set_border_width(32)
self.window.set_icon_from_file(ICONDomain)
self.window.set_default_size(400, 400)
self.window.set_resizable(False)
self.grid = Gtk.Grid()
self.grid.set_row_spacing(5)
self.grid.set_column_spacing(5)
self.grid.set_halign(Gtk.Align.CENTER)
self.grid.set_direction(Gtk.TextDirection.LTR)
def show_window(self, tray):
if self.is_window_open == True:
return
label1 = Gtk.Label(label=("<b>"+getHostname()+"</b>"), use_markup=True)
label1.set_halign(Gtk.Align.CENTER)
self.grid.attach(label1, 0, 0, 4, 1)
if (getDomain() != ""):
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_scale(
filename=ICONDomain,
width=96,
height=96,
preserve_aspect_ratio=True)
else:
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_scale(
filename=ICONLocal,
width=96,
height=96,
preserve_aspect_ratio=True)
image1 = Gtk.Image.new_from_pixbuf(pixbuf)
self.grid.attach_next_to(image1, label1, Gtk.PositionType.BOTTOM, 4, 2)
separator1 = Gtk.Separator()
self.grid.attach_next_to(separator1, image1, Gtk.PositionType.BOTTOM, 4, 2)
label_a = create_label_and_attach(self.grid, getDist(), _("OS:"), separator1)
label_a = create_label_and_attach(self.grid, getCPU(), _("CPU:"), label_a)
label_a = create_label_and_attach(self.grid, getRAM(), _("RAM:"), label_a)
separator = Gtk.Separator()
self.grid.attach_next_to(separator, label_a, Gtk.PositionType.BOTTOM, 4, 2)
domain = getDomain()
if(domain == ""):
domain = _("Domain could not found")
workgroup = ""
else:
workgroup = getWorkgroup()
label_a = create_label_and_attach(self.grid, domain, _("Domain:"), separator)
if(workgroup == ""):
workgroup = _("Workgroup could not found")
label_a = create_label_and_attach(self.grid, workgroup, _("Workgroup:"), label_a)
self.grid.attach_next_to(separator, label_a, Gtk.PositionType.BOTTOM, 4, 2)
quitBtn = Gtk.Button(label=_("Settings"))
quitBtn.set_size_request(80, 30)
quitBtn.connect("clicked", self.on_settings_clicked, tray)
separator = Gtk.Separator()
self.grid.attach_next_to(separator, label_a, Gtk.PositionType.BOTTOM, 4, 2)
self.grid.attach_next_to(quitBtn, separator, Gtk.PositionType.BOTTOM, 4, 2)
self.window.set_icon_from_file(ICONComputer)
self.window.connect('delete-event', self.on_delete_event)
self.is_window_open = True
self.window.add(self.grid)
self.window.show_all()
def on_delete_event(self, control, button):
self.is_window_open = False
def on_settings_clicked(self, widget, tray):
tray.show_settings_window()
def on_degisim_ornekozellik(self, settings, key, check_button):
check_button.set_active(settings.get_boolean("ornekozellik"))
def on_kontrol_ornekozellik(self, button, settings):
settings.set_boolean("ornekozellik", button.get_active())
|
992,911 | b40f39b6dd2767b2bb978ef176b9a51d72b97414 | """
Leetcode Problem 001: Two Sum
Author: Richard Coucoules
Solved: 2019-11-04
"""
class Solution:
def twoSum(self, nums, target):
numDict = {}
for idx, num in enumerate(nums):
numDict[num] = idx
for idx, num in enumerate(nums):
addend = target - num
if addend in numDict and numDict[addend] != idx:
return [idx, numDict[target - num]]
# Failure 2: Brute force solution fails run time limit
# class Solution:
# def twoSum(self, nums, target):
# numsMemory = [num for num in nums]
# for idx, val in enumerate(nums):
# if val > target:
# continue
# for i in range(len(nums)):
# if idx == i or nums[i] > target:
# continue
# if val + nums[i] == target:
# out = [idx, i]
# break
# else:
# continue
# break
# return out
# Failure 1: Assumed positive integers in array and positive target
# class Solution:
# def twoSum(self, nums, target):
# summands = []
# numsSorted = [num for num in nums]
# numsSorted.sort()
# numsSorted.reverse()
# varTarget = target
# for num in numsSorted:
# if num > varTarget:
# continue
# elif num <= varTarget:
# varTarget -= num
# summands.append(num)
# out = []
# numsMemory = [num for num in nums]
# for a in summands:
# out.append(numsMemory.index(a))
# numsMemory[numsMemory.index(a)] = None
# out.sort()
# return out
soln = Solution()
nums = [3, 2, 4]
target = 6
print(soln.twoSum(nums, target)) |
992,912 | 3fcf9a2f49c1833aa5d1b146fcea69884c16ad58 | from abc import abstractmethod
from typing import Iterable, Dict, Any
from datasets import Dataset
from cheese.pipeline import Pipeline
from cheese.utils import safe_mkdir
import pandas as pd
class DatasetPipeline(Pipeline):
"""
Base class for any pipeline thats data destination is a datasets.Dataset object
:param format: Format to save result dataset to. Defaults to arrow. Can be arrow or csv.
:type format: str
:param save_every: Save dataset whenever this number of rows is added.
:type save_every: int
"""
def __init__(self, format : str = "csv", save_every : int = 1):
super().__init__()
self.write_path : str = None
self.res_dataset : Dataset = None
self.format = format
self.save_every = save_every
self.save_accum = 0
def load_dataset(self) -> bool:
"""
Loads the results dataset from a given path. Returns false if load fails. Assumes write_path has been set already.
:return: Whether load was successful
:rtype: bool
"""
if self.write_path is None:
raise Exception("Error: Attempted to load results dataset without ever specifiying a path to write it to")
try:
if self.format == "arrow":
self.res_dataset = Dataset.load_from_disk(self.write_path)
elif self.format == "csv":
self.res_dataset = pd.read_csv(self.write_path)
return True
except:
return False
def save_dataset(self):
"""
Saves the result dataset to the write path (assuming it has been specified by subclass).
Does nothing if there is no data to save yet.
"""
if self.res_dataset is None:
return
if self.write_path is None:
raise Exception("Error: Attempted to save result dataset without ever specifiying a path to write to")
if self.format == "arrow":
self.res_dataset.save_to_disk(self.write_path)
elif self.format == "csv":
self.res_dataset.to_csv(self.write_path, index = False)
def add_row_to_dataset(self, row : Dict[str, Any]):
"""
Add single row to result dataset and then saves.
:param row: The row, as a dictionary, to add to the result dataset
:type row: Dict[str, Any]
"""
row = {key : [row[key]] for key in row}
if self.res_dataset is None:
self.res_dataset = Dataset.from_dict(row) if self.format == "arrow" else pd.DataFrame(row)
else:
if self.format == "arrow":
self.res_dataset = self.res_dataset.append(row)
else:
new_df = pd.DataFrame(row)
self.res_dataset = pd.concat([self.res_dataset, new_df], ignore_index = True)
self.save_accum += 1
if self.save_accum >= self.save_every:
self.save_dataset()
self.save_accum = 0
|
992,913 | fc7e504b7814edbb128015084fc4e23b4aa948a6 | from __future__ import division
from __future__ import print_function
import cv2
import numpy as np
import copy
from core.detection_input import DetectionAugmentation, AnchorTarget2D
class Resize2DImageBboxMask(DetectionAugmentation):
"""
input: image, ndarray(h, w, rgb)
gt_bbox, ndarry(n, 5)
gt_poly, [[(p, 2)]]
output: image, ndarray(h', w', rgb)
im_info, tuple(h', w', scale)
gt_bbox, ndarray(n, 5)
gt_poly, [[ndarray, ndarray, ...]]
"""
def __init__(self, pResize):
super().__init__()
self.p = pResize # type: ResizeParam
def apply(self, input_record):
p = self.p
image = input_record["image"]
gt_bbox = input_record["gt_bbox"]
gt_poly = input_record["gt_poly"]
short = min(image.shape[:2])
long = max(image.shape[:2])
scale = min(p.short / short, p.long / long)
input_record["image"] = cv2.resize(image, None, None, scale, scale,
interpolation=cv2.INTER_LINEAR)
# make sure gt boxes do not overflow
gt_bbox[:, :4] = gt_bbox[:, :4] * scale
if image.shape[0] < image.shape[1]:
gt_bbox[:, [0, 2]] = np.clip(gt_bbox[:, [0, 2]], 0, p.long)
gt_bbox[:, [1, 3]] = np.clip(gt_bbox[:, [1, 3]], 0, p.short)
else:
gt_bbox[:, [0, 2]] = np.clip(gt_bbox[:, [0, 2]], 0, p.short)
gt_bbox[:, [1, 3]] = np.clip(gt_bbox[:, [1, 3]], 0, p.long)
input_record["gt_bbox"] = gt_bbox
# exactly as opencv
h, w = image.shape[:2]
input_record["im_info"] = np.array([round(h * scale), round(w * scale), scale], dtype=np.float32)
# resize poly
for i, segms in enumerate(gt_poly):
input_record["gt_poly"][i] = [segm_j * scale for segm_j in segms]
class Flip2DImageBboxMask(DetectionAugmentation):
"""
input: image, ndarray(h, w, rgb)
gt_bbox, ndarry(n, 5)
gt_poly, [[ndarray, ndarray, ...]]
output: image, ndarray(h, w, rgb)
gt_bbox, ndarray(n, 5)
gt_poly, [[ndarray, ndarray, ...]]
"""
def __init__(self):
super().__init__()
def apply(self, input_record):
def _flip_poly(poly, width):
flipped_poly = poly.copy()
flipped_poly[0::2] = width - poly[0::2] - 1
return flipped_poly
if input_record["flipped"]:
image = input_record["image"]
gt_bbox = input_record["gt_bbox"]
gt_poly = input_record["gt_poly"]
input_record["image"] = image[:, ::-1]
flipped_bbox = gt_bbox.copy()
h, w = image.shape[:2]
flipped_bbox[:, 0] = (w - 1) - gt_bbox[:, 2]
flipped_bbox[:, 2] = (w - 1) - gt_bbox[:, 0]
input_record["gt_bbox"] = flipped_bbox
# flip poly
for i, segms in enumerate(gt_poly):
input_record["gt_poly"][i] = [_flip_poly(segm_j, w) for segm_j in segms]
class Pad2DImageBboxMask(DetectionAugmentation):
"""
input: image, ndarray(h, w, rgb)
gt_bbox, ndarry(n, 5)
gt_poly, [[ndarray, ndarray, ...]]
output: image, ndarray(h, w, rgb)
gt_bbox, ndarray(max_num_gt, 5)
gt_poly, [[ndarray, ndarray, ...]]
"""
def __init__(self, pPad):
super().__init__()
self.p = pPad # type: PadParam
def apply(self, input_record):
p = self.p
image = input_record["image"]
gt_bbox = input_record["gt_bbox"]
gt_poly = input_record["gt_poly"]
h, w = image.shape[:2]
shape = (p.long, p.short, 3) if h >= w \
else (p.short, p.long, 3)
padded_image = np.zeros(shape, dtype=np.float32)
padded_image[:h, :w] = image
padded_gt_bbox = np.full(shape=(p.max_num_gt, 5), fill_value=-1, dtype=np.float32)
padded_gt_bbox[:len(gt_bbox)] = gt_bbox
padded_gt_poly = np.full(shape=(p.max_num_gt, p.max_len_gt_poly), fill_value=-1, dtype=np.float32)
padded_gt_poly[:len(gt_bbox)] = gt_poly
input_record["image"] = padded_image
input_record["gt_bbox"] = padded_gt_bbox
input_record["gt_poly"] = padded_gt_poly
class PreprocessGtPoly(DetectionAugmentation):
# TODO: remove this function and set gt_poly in cache to ndarray
"""
input: gt_poly
output: gt_poly
"""
def __init__(self):
super().__init__()
def apply(self, input_record):
ins_poly = input_record["gt_poly"]
gt_poly = [None] * len(ins_poly)
for i, ins_poly_i in enumerate(ins_poly):
segms = [None] * len(ins_poly_i)
for j, segm_j in enumerate(ins_poly_i):
segms[j] = np.array(segm_j, dtype=np.float32)
gt_poly[i] = segms
input_record["gt_poly"] = gt_poly
class EncodeGtPoly(DetectionAugmentation):
"""
input: gt_class, gt_poly
output: gt_poly
"""
def __init__(self, pPad):
super().__init__()
self.p = pPad
def apply(self, input_record):
gt_class = input_record["gt_class"]
gt_poly = input_record["gt_poly"] # [[ndarray, ndarray, ...]]
num_instance = len(gt_class)
encoded_gt_poly = np.full((num_instance, self.p.max_len_gt_poly), -1, dtype=np.float32)
for i, (class_id, segms) in enumerate(zip(gt_class, gt_poly)):
# encoded_gt_poly_i: [class_id, num_segms, len_segm1, len_segm2, segm1, segm2]
encoded_gt_poly[i][0] = class_id
num_segms = len(segms)
encoded_gt_poly[i][1] = num_segms
segms_len = [len(segm_j) for segm_j in segms]
encoded = np.hstack([np.array(segms_len), np.hstack(segms)])
encoded_gt_poly[i][2:2+len(encoded)] = encoded
input_record["gt_poly"] = encoded_gt_poly
if __name__ == "__main__":
import pickle as pkl
import time
import pycocotools.mask as mask_util
from core.detection_input import ReadRoiRecord, \
ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \
RenameRecord, AnchorTarget2D, AnchorLoader
from models.maskrcnn.input import PreprocessGtPoly, EncodeGtPoly, \
Resize2DImageBboxMask, Flip2DImageBboxMask, Pad2DImageBboxMask
def vis_mask(img, mask, col, alpha=0.4):
"""Visualizes a single binary mask."""
img = img.astype(np.float32)
idx = np.nonzero(mask)
img[idx[0], idx[1], :] *= 1.0 - alpha
img[idx[0], idx[1], :] += alpha * col
return img.astype(np.uint8)
class ResizeParam:
short = 800
long = 1200
class PadParam:
short = 800
long = 1200
max_num_gt = 100
max_len_gt_poly = 2500
class AnchorTarget2DParam:
class generate:
short = 800 // 16
long = 1200 // 16
stride = 16
scales = (2, 4, 8, 16, 32)
aspects = (0.5, 1.0, 2.0)
class assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
class sample:
image_anchor = 256
pos_fraction = 0.5
class RenameParam:
mapping = dict(image="data")
transform = [
ReadRoiRecord(None),
PreprocessGtPoly(),
Resize2DImageBboxMask(ResizeParam),
Flip2DImageBboxMask(),
EncodeGtPoly(PadParam),
Pad2DImageBboxMask(PadParam),
ConvertImageFromHwcToChw(),
AnchorTarget2D(AnchorTarget2DParam),
RenameRecord(RenameParam.mapping)
]
DEBUG = True
with open("data/cache/coco_valminusminival2014.roidb", "rb") as fin:
roidb = pkl.load(fin)
roidb = [rec for rec in roidb if rec["gt_bbox"].shape[0] > 0]
roidb = [roidb[i] for i in np.random.choice(len(roidb), 20, replace=False)]
print(roidb[0])
flipped_roidb = []
for rec in roidb:
new_rec = rec.copy()
new_rec["flipped"] = True
flipped_roidb.append(new_rec)
roidb = roidb + flipped_roidb
loader = AnchorLoader(roidb=roidb,
transform=transform,
data_name=["data", "im_info", "gt_bbox", "gt_poly"],
label_name=["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"],
batch_size=2,
shuffle=False,
kv=None)
tic = time.time()
while True:
try:
data_batch = loader.next()
if DEBUG:
print(data_batch.provide_data)
print(data_batch.provide_label)
print(data_batch.data[0].shape)
print(data_batch.label[1].shape)
print(data_batch.label[2].shape)
data = data_batch.data[0]
gt_bbox = data_batch.data[2]
gt_poly = data_batch.data[3]
for i, (im, bbox, poly) in enumerate(zip(data, gt_bbox, gt_poly)):
im = im.transpose((1, 2, 0))[:, :, ::-1].asnumpy()
im = np.uint8(im)
valid_instance = np.where(bbox[:, -1] != -1)[0]
bbox = bbox[valid_instance].asnumpy()
poly = poly[valid_instance].asnumpy()
for j, (bbox_j, poly_j) in enumerate(zip(bbox, poly)):
x1, y1, x2, y2 = bbox_j[:4].astype(int)
cv2.rectangle(im, (x1, y1), (x2, y2), (255, 0, 0), 2)
class_index = poly_j[0]
assert int(class_index) == int(bbox_j[-1])
num_segms = poly_j[1]
len_segms = poly_j[2:2+int(num_segms)]
cur_start = 2 + int(num_segms)
segms = []
for len_segm in len_segms:
segm = poly_j[cur_start:cur_start+int(len_segm)]
segm = segm.tolist()
segms.append(segm)
cur_start = cur_start + int(len_segm)
rle = mask_util.frPyObjects(segms, im.shape[0], im.shape[1])
mask = mask_util.decode(rle)
mask = np.sum(mask, axis=2)
mask = np.array(mask > 0, dtype=np.float32)
im = vis_mask(im, mask, np.array([18, 127, 15]), alpha=0.4)
cv2.imshow("im", im)
cv2.waitKey(0)
except StopIteration:
toc = time.time()
print("{} samples/s".format(len(roidb) / (toc - tic)))
break
|
992,914 | 31635d03f41801a79c5ec3e14bb9a68b435de9d3 | my_pizzas = ['pepperoni', 'beef', 'bacon', 'durian']
friend_pizzas = my_pizzas[:]
my_pizzas.append('fruit')
friend_pizzas.append('vegetables')
print("My favorite pizzas are:")
for pizza in my_pizzas:
print(pizza)
print("\nMy friend's favorite pizzas are:")
for pizza in friend_pizzas:
print(pizza)
|
992,915 | 5afc1c07aea46980ba80dd60f365ef0291008a59 | import unittest
import tempfile
from pathlib import Path
import geowombat as gw
from geowombat.data import l8_224077_20200518_B2
from geowombat.data import l8_224077_20200518_B4
import numpy as np
import xarray as xr
def shift(data: xr.DataArray, x: int, y: int) -> xr.DataArray:
return (
(
data.astype('float64').shift(
shifts={'y': y, 'x': x}, fill_value=data._FillValue
)
)
.fillna(0)
.astype('uint16')
)
class TestCOREG(unittest.TestCase):
def test_coreg_data(self):
with gw.open(l8_224077_20200518_B2) as target, gw.open(
l8_224077_20200518_B4
) as reference:
data = gw.coregister(
target=target,
reference=reference,
ws=(256, 256),
r_b4match=1,
s_b4match=1,
max_shift=5,
resamp_alg_deshift='nearest',
resamp_alg_calc='cubic',
out_gsd=[target.gw.celly, reference.gw.celly],
q=True,
nodata=(0, 0),
CPUs=1,
)
self.assertTrue(reference.shape == data.shape)
def test_coreg_transform_data(self):
with gw.config.update(ref_crs='epsg:8858'):
with gw.open(l8_224077_20200518_B2, chunks=512) as target, gw.open(
l8_224077_20200518_B4, chunks=512
) as reference:
data = gw.coregister(
target=target,
reference=reference,
wkt_version='WKT2_2019',
ws=(256, 256),
r_b4match=1,
s_b4match=1,
max_shift=5,
resamp_alg_deshift='nearest',
resamp_alg_calc='cubic',
out_gsd=[target.gw.celly, reference.gw.celly],
q=True,
nodata=(0, 0),
CPUs=1,
)
self.assertTrue(reference.shape == data.shape)
def test_coreg_shift(self):
"""Tests a 1-pixel shift."""
with gw.open(l8_224077_20200518_B2) as target, gw.open(
l8_224077_20200518_B4
) as reference:
with tempfile.TemporaryDirectory() as tmp:
# Shift by 1 pixel in each direction
target_shifted = shift(target, x=1, y=1)
tmp_file = Path(tmp) / '_tmp_shift.tif'
target_shifted.gw.save(tmp_file, overwrite=True)
with gw.open(tmp_file) as target_shifted:
# Co-register the shifted data
shifted = gw.coregister(
target=target_shifted,
reference=reference,
ws=(256, 256),
r_b4match=1,
s_b4match=1,
max_shift=5,
resamp_alg_deshift='nearest',
resamp_alg_calc='cubic',
out_gsd=[target_shifted.gw.celly, reference.gw.celly],
q=True,
nodata=(0, 0),
CPUs=1,
)
self.assertFalse(
np.allclose(
target.values[:, :-1, :-1],
target_shifted.values[:, :-1, :-1],
)
)
# Check if the corrected data match the original (unshifted) target
self.assertTrue(
np.allclose(
target.values[:, :-1, :-1],
shifted.values[:, :-1, :-1],
)
)
if __name__ == '__main__':
unittest.main()
|
992,916 | 574cf12432b15e3b3bae07827a038918684d5f60 | # why am i outputting the wrong string at the same time as the right
if char in text:
print(text)
|
992,917 | d506b0a7caf23f861ad15490e16bf77397916bab | from Dataset import CustomImageDataset
from Sampler import ImbalancedDatasetSampler
from Model import AIST_model
from Loss import FocalLoss
from Run_model import model_generator
import torch
import torch.optim as optim
from torch.utils.data import DataLoader, random_split
import torchvision.transforms as transforms
import os
import matplotlib.pyplot as plt
import pandas as pd
# Variable
code_path = os.getcwd()
parent_path = os.path.abspath(os.path.join(code_path, os.pardir))
data_path = os.path.join(parent_path, 'data')
labels_path = os.path.join(data_path, 'train_master.tsv')
img_folders = ['train_1', 'train_2', 'train_3']
saving_weights_path = os.path.join(data_path, 'model_weights.pth')
saving_csv_path = os.path.join(data_path, 'results.csv')
ratio = 0.1 # positive class/total
BATCH_SIZE = 256
EPOCH = 10
# create transformation function
def transform(percent):
tf = transforms.Compose([transforms.ToTensor(),
transforms.RandomApply([transforms.RandomHorizontalFlip(p=percent),
transforms.RandomVerticalFlip(p=percent)])])
return tf
# create training dataset
dataset = CustomImageDataset(labels_path, data_path, img_folders, transform, None)
# split to train and valid data
torch.manual_seed(0)
train, valid = random_split(dataset, [236800, 59382]) # 296182
# create dataloader
train_dataloader = DataLoader(train, batch_size=BATCH_SIZE, num_workers=1,
sampler=ImbalancedDatasetSampler(train.dataset, train.indices, BATCH_SIZE, ratio))
valid_dataloader = DataLoader(valid, batch_size=BATCH_SIZE, shuffle=True)
# declare optimization method and loss function
# class_weights = torch.tensor([ratio, 1-ratio]).float().cuda()
# loss_fn = nn.CrossEntropyLoss(class_weights)
loss_fn = FocalLoss(alpha=ratio, gamma=2)
optimizer = optim.Adam(AIST_model.parameters(), lr=1e-3, weight_decay=1e-5)
# run model
model = model_generator(train_dataloader, valid_dataloader, AIST_model, loss_fn, optimizer, num_epoch=EPOCH,
save_path=saving_weights_path)
train_loss, valid_loss, train_IoU, valid_IoU = model.run()
# saving to csv file
content = {'epoch': [(i + 1) for i in range(EPOCH)], 'training_loss': train_loss, 'testing_loss': valid_loss,
'training_IoU': train_IoU, 'testing_IoU': valid_IoU}
df = pd.DataFrame(content)
df.to_csv(saving_csv_path)
# graph
plt.title('Training and Validation Loss')
plt.plot(train_loss, label="Training Loss")
plt.plot(valid_loss, label="Validation Loss")
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.show()
|
992,918 | 0618011bf007b92251d4a019b37c071558d31180 | from django.conf.urls import patterns, include, url
from django.conf import settings
from rest_framework.urlpatterns import format_suffix_patterns
import views
urlpatterns = patterns('',
url(r'^students/$',
views.StudentList.as_view(), name='student-list'),
url(r'^students/(?P<pk>[0-9]+)/$',
views.StudentDetail.as_view(), name='student-detail'),
url(r'^students/(?P<pk>[0-9]+)/charts/$',
views.StudentChartList.as_view(), name='student_chart-list'),
url(r'^students/(?P<pk>[0-9]+)/behaviortypes/$',
views.StudentBehaviorIncidentTypeList.as_view(), name='student_behaviortype-list'),
url(r'^students/(?P<pk>[0-9]+)/behaviorincidents/$',
views.StudentBehaviorIncidentList.as_view(), name='student_behaviorincident-list'),
url(r'^behaviorincidents/$',
views.BehaviorIncidentList.as_view(), name='behaviorincident-list'),
url(r'^behaviorincidents/(?P<pk>[0-9]+)/$',
views.BehaviorIncidentDetail.as_view(), name='behaviorincident-detail'),
url(r'^behaviortypes/$',
views.BehaviorIncidentTypeList.as_view(), name='behaviorincidenttype-list'),
url(r'^behaviortypes/(?P<pk>[0-9]+)/$',
views.BehaviorIncidentTypeDetail.as_view(), name='behaviorincidenttype-detail'),
url(r'^topics/$',
views.TopicList.as_view(), name='topic-list'),
url(r'^topics/(?P<pk>[0-9]+)/$',
views.TopicDetail.as_view(), name='topic-detail'),
url(r'^subtopics/(?P<pk>[0-9]+)/$',
views.SubtopicDetail.as_view(), name='subtopic-detail'),
url(r'^input_channels/$',
views.InputChannelList.as_view(), name='inputchannel-list'),
url(r'^input_channels/(?P<pk>[0-9]+)/$',
views.InputChannelDetail.as_view(), name='inputchannel-detail'),
url(r'^output_channels/$',
views.OutputChannelList.as_view(), name='outputchannel-list'),
url(r'^output_channels/(?P<pk>[0-9]+)/$',
views.OutputChannelDetail.as_view(), name='outputchannel-detail'),
url(r'^charts/$',
views.ChartList.as_view(), name='chart-list'),
url(r'^charts/(?P<pk>[0-9]+)/$',
views.ChartDetail.as_view(), name='chart-detail'),
url(r'^charts/(?P<pk>[0-9]+)/daymetrics/$',
views.ChartDayMetricList.as_view(), name='chart_daymetric-list'),
url(r'^charts/(?P<pk>[0-9]+)/phaselines/$',
views.ChartPhaseLineList.as_view(), name='chart_phaseline-list'),
url(r'^daymetrics/$',
views.DayMetricList.as_view(), name='daymetric-list'),
url(r'^daymetrics/(?P<pk>[0-9]+)/$',
views.DayMetricDetail.as_view(), name='daymetric-detail'),
url(r'^phaselines/$',
views.PhaseLineList.as_view(), name='phaseline-list'),
url(r'^phaselines/(?P<pk>[0-9]+)/$',
views.PhaseLineDetail.as_view(), name='phaseline-detail'),
)
urlpatterns = format_suffix_patterns(urlpatterns)
# in development mode, serve the index page with Django's dev server
if settings.DEBUG:
urlpatterns += patterns('',
url('^$', views.index, name='index'),
)
|
992,919 | 754153ad893ba982919acfbe265841668fd5ab42 | z = max(arr) |
992,920 | 8e9f9d5bf166d601e74423ce4130759a4c94f8b1 | import random, sys
file_word = open('C:/Users/Dmoho/Desktop/list_word.txt', 'r+')
str_word = file_word.read()
list_word = str_word.split(',')
rand_word = random.choice(list_word)
len_rand_word = int(len(rand_word))
count, bottom_line, str_line, guess = 0, "_", "", bool
for i in range(len_rand_word):
str_line += bottom_line
str_line += " "
list_guess_word = str_line.split()
user_letter = str(input(f'Guess the letter {" ".join(list_guess_word)}: '))
if user_letter in rand_word:
index_user_letter = int(rand_word.find(user_letter))
list_guess_word[index_user_letter] = user_letter
word_guess_str = (''.join(list_guess_word))
guess = True
else:
word_guess_str = (''.join(list_guess_word))
guess = False
while True:
while guess:
if count > 8:
sys.exit("Trials are over")
index_user_letter = int(rand_word.find(user_letter))
list_guess_word[index_user_letter] = user_letter
word_guess_str = (''.join(list_guess_word))
if rand_word == word_guess_str:
print("Congratulations!!")
sys.exit()
user_letter = str(input(f'Well done, Go one there are {7-count} chances {word_guess_str}: '))
if user_letter not in rand_word:
guess = False
break
index_user_letter = int(rand_word.find(user_letter))
word_guess_str = (''.join(list_guess_word))
if rand_word == word_guess_str:
print("Congratulations!!")
sys.exit()
while not guess:
if count > 8:
sys.exit("Trials are over")
count += 1
user_letter = str(input(f'Ouch is painfull!!!Left {7 - count} chances {word_guess_str}: '))
if user_letter in rand_word:
guess = True
index_user_letter = int(rand_word.find(user_letter))
list_guess_word[index_user_letter] = user_letter
word_guess_str = (''.join(list_guess_word))
break
else:
continue
|
992,921 | d84791cc1f130e94981e55457a146efac22d7dad | # dp[i][j]: the longest palindromic subsequence's length of substring(i, j)
# State transition:
# dp[i][j] = dp[i+1][j-1] + 2 if s.charAt(i) == s.charAt(j)
# otherwise, dp[i][j] = Math.max(dp[i+1][j], dp[i][j-1])
# Initialization: dp[i][i] = 1
class Solution(object):
def longestPalindromeSubseq(self, s):
# the trick here can increase speed
if s == s[::-1]:
return len(s)
n = len(s)
dp = [[0 for j in xrange(n)] for i in xrange(n)]
for i in xrange(n-1, -1, -1):
dp[i][i] = 1
for j in xrange(i+1, n):
if s[i] == s[j]:
dp[i][j] = 2 + dp[i+1][j-1]
else:
dp[i][j] = max(dp[i+1][j], dp[i][j-1])
return dp[0][n-1]
|
992,922 | e4373c111a3ebd8b8b756b39ac44fd09fb0ed44c | '''
Filename: inputMain.py
Function:
This script handles inputs and parses the input and calls the appropriate function
Possible flags:
1. -i
2. -f <input filename> | diff -u <comparison output filename> -
3. nothing
4. else
'''
import sys
import utils as utils
from main import intMode, fileMode
if __name__ == '__main__':
flag = sys.argv[1]
if flag == '-i':
intMode()
elif flag == '-f':
fileMode(None)
else:
print('Please enter a correct flag!')
|
992,923 | 41410cb571c0fe027bd200a2b3a75ce70f66ab47 | #!/home/bespontoff/PycharmProjects/checkio/venv/bin/checkio --domain=py run magic-with-5-cards
# code { background: transparent; white-space: nowrap; } code.r { color: red; } code.b { color: #163e69; } table { border-collapse: collapse; } th, td { border: 1px solid #163e69; padding: 5px; } th, td:first-child { text-align: center; font-weight: bold; } th:first-child { border: 0px; } p, li { text-align: justify; }
# END_DESC
RANKS = tuple('A 2 3 4 5 6 7 8 9 10 J Q K'.split())
SUITS = tuple('♣♦♥♠')
def bot(*cards, n=1):
"""Determine four cards the bot has to say to the magician."""
# Obviously not always just the first four, put your code here instead.
return cards[:4]
def magician(*cards, n=1):
"""Determine the fifth card with only four cards."""
# Obviously not a random card, put your code here instead.
from random import choice
deck = [f'{r} {s}' for r in RANKS for s in SUITS]
for card in cards:
deck.remove(card)
return choice(deck)
if __name__ == '__main__':
assert list(bot('A ♥', '3 ♦', 'K ♠', 'Q ♣', 'J ♦')) == ['J ♦', 'A ♥', 'Q ♣', 'K ♠']
assert magician('J ♦', 'A ♥', 'Q ♣', 'K ♠') == '3 ♦'
assert list(bot('10 ♦', 'J ♣', 'Q ♠', 'K ♥', '7 ♦', n=2)) == ['Q ♠', '7 ♦', 'J ♣', 'K ♥']
assert magician('Q ♠', '7 ♦', 'J ♣', 'K ♥', n=2) == '10 ♦' |
992,924 | 4229d2bf79edb8f1502b88608d3e5881a53d0c88 | import os
exe = '~/arch/scattering/master/bin/fit_spectrum '
input_file=' minimise_chipt_su3_op4.ini.xml '
mass_file=' ../masses.ini.xml '
"""to run Dave's spec list"""
spec_list=' ../djwilson_spec_list/spec_final_elastic.list'
"""to run my spec list"""
spec_list=' ../spec_final_elastic.list'
rel_dir=' /'
cmd=exe+input_file+mass_file+spec_list+rel_dir
print cmd
os.system(cmd)
|
992,925 | 8766d14752b44dbb3ea085156d28ebfb74d9e988 | """Tests for github_webhook.webhook"""
from __future__ import print_function
import pytest
import werkzeug
import json
try:
from unittest import mock
except ImportError:
import mock
from github_webhook.webhook import Webhook
@pytest.fixture
def mock_request():
with mock.patch("github_webhook.webhook.request") as req:
req.headers = {"X-Github-Delivery": ""}
yield req
@pytest.fixture
def push_request(mock_request):
mock_request.headers["X-Github-Event"] = "push"
mock_request.headers["content-type"] = "application/json"
yield mock_request
@pytest.fixture
def push_request_encoded(mock_request):
mock_request.headers["X-Github-Event"] = "push"
mock_request.headers["content-type"] = "application/x-www-form-urlencoded"
yield mock_request
@pytest.fixture
def app():
yield mock.Mock()
@pytest.fixture
def webhook(app):
yield Webhook(app)
@pytest.fixture
def handler(webhook):
handler = mock.Mock()
webhook.hook()(handler)
yield handler
def test_constructor():
# GIVEN
app = mock.Mock()
# WHEN
webhook = Webhook(app)
# THEN
app.add_url_rule.assert_called_once_with(
endpoint="/postreceive", rule="/postreceive", view_func=webhook._postreceive, methods=["POST"]
)
def test_init_app_flow():
# GIVEN
app = mock.Mock()
# WHEN
webhook = Webhook()
webhook.init_app(app)
# THEN
app.add_url_rule.assert_called_once_with(
endpoint="/postreceive", rule="/postreceive", view_func=webhook._postreceive, methods=["POST"]
)
def test_init_app_flow_should_not_accidentally_override_secrets():
# GIVEN
app = mock.Mock()
# WHEN
webhook = Webhook(secret="hello-world-of-secrecy")
webhook.init_app(app)
# THEN
assert webhook.secret is not None
def test_init_app_flow_should_override_secrets():
# GIVEN
app = mock.Mock()
# WHEN
webhook = Webhook(secret="hello-world-of-secrecy")
webhook.init_app(app, secret="a-new-world-of-secrecy")
# THEN
assert webhook.secret == "a-new-world-of-secrecy".encode("utf-8")
def test_run_push_hook(webhook, handler, push_request):
# WHEN
webhook._postreceive()
# THEN
handler.assert_called_once_with(push_request.get_json.return_value)
def test_run_push_hook_urlencoded(webhook, handler, push_request_encoded):
github_mock_payload = {"payload": '{"key": "value"}'}
push_request_encoded.form.to_dict.return_value = github_mock_payload
payload = json.loads(github_mock_payload["payload"])
# WHEN
webhook._postreceive()
# THEN
handler.assert_called_once_with(payload)
def test_do_not_run_push_hook_on_ping(webhook, handler, mock_request):
# GIVEN
mock_request.headers["X-Github-Event"] = "ping"
mock_request.headers["content-type"] = "application/json"
# WHEN
webhook._postreceive()
# THEN
handler.assert_not_called()
def test_do_not_run_push_hook_on_ping_urlencoded(webhook, handler, mock_request):
# GIVEN
mock_request.headers["X-Github-Event"] = "ping"
mock_request.headers["content-type"] = "application/x-www-form-urlencoded"
mock_request.form.to_dict.return_value = {"payload": '{"key": "value"}'}
# WHEN
webhook._postreceive()
# THEN
handler.assert_not_called()
def test_can_handle_zero_events(webhook, push_request):
# WHEN, THEN
webhook._postreceive() # noop
@pytest.mark.parametrize("secret", [u"secret", b"secret"])
@mock.patch("github_webhook.webhook.hmac")
def test_calls_if_signature_is_correct(mock_hmac, app, push_request, secret):
# GIVEN
webhook = Webhook(app, secret=secret)
push_request.headers["X-Hub-Signature"] = "sha1=hash_of_something"
push_request.data = b"something"
handler = mock.Mock()
mock_hmac.compare_digest.return_value = True
# WHEN
webhook.hook()(handler)
webhook._postreceive()
# THEN
handler.assert_called_once_with(push_request.get_json.return_value)
@mock.patch("github_webhook.webhook.hmac")
def test_does_not_call_if_signature_is_incorrect(mock_hmac, app, push_request):
# GIVEN
webhook = Webhook(app, secret="super_secret")
push_request.headers["X-Hub-Signature"] = "sha1=hash_of_something"
push_request.data = b"something"
handler = mock.Mock()
mock_hmac.compare_digest.return_value = False
# WHEN, THEN
webhook.hook()(handler)
with pytest.raises(werkzeug.exceptions.BadRequest):
webhook._postreceive()
def test_request_has_no_data(webhook, handler, push_request):
# GIVEN
push_request.get_json.return_value = None
# WHEN, THEN
with pytest.raises(werkzeug.exceptions.BadRequest):
webhook._postreceive()
def test_request_had_headers(webhook, handler, mock_request):
# WHEN, THEN
with pytest.raises(werkzeug.exceptions.BadRequest):
webhook._postreceive()
# -----------------------------------------------------------------------------
# Copyright 2015 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------- END-OF-FILE -----------------------------------
|
992,926 | 59a85b2a2b9944e5ad6035cae7bab1abc62f9ea9 | from django.db import models
from django.contrib.auth.models import User
from django.core.validators import RegexValidator,MaxValueValidator, MinValueValidator
from datetime import datetime
import ast
#-------------------------------------------------------------------------------------------
class MacAddress(models.Model):
address = models.CharField(max_length = 20,null=True,blank=True)
name = models.CharField(max_length = 50,null=True,blank=True,default='home')
route = models.CharField(max_length = 50,null=True,blank=True,default='home')
place = models.CharField(max_length = 50,null=True,blank=True,default='home')
territory = models.CharField(max_length = 50,null=True,blank=True,default='home')
provincial = models.CharField(max_length = 50,null=True,blank=True,default='home')
country = models.CharField(max_length = 50,null=True,blank=True,default='home')
PIN = models.IntegerField(max_length = 10,null=True,blank=True)
def __unicode__(self):
return self.address
#-------------------------------------------------------------------------------------------
class UserProfile(models.Model):
user = models.OneToOneField(User,primary_key=True)
GENDER_CHOICES = (('Male', 'M'),('Female', 'F'))
picture = models.ImageField(upload_to='profile_images', blank=True,default='profile_images/new_user.png')
timeout = models.DateTimeField(blank=True, null=True,default=datetime.now())
OTP=models.CharField(max_length = 100,blank=True,null=True)
gender = models.CharField(max_length=8, choices=GENDER_CHOICES)
dob = models.DateField(default=datetime.now())
folder=models.CharField(max_length = 100,blank=True)
phone_number = models.CharField(max_length=15, blank=True)
MAC=models.ManyToManyField(MacAddress,related_name='macAddress')
driveSize=models.FloatField(validators = [MinValueValidator(0.0), MaxValueValidator(100)],default=0)
def __unicode__(self):
return self.user.username
User.profile = property(lambda u: UserProfile.objects.get_or_create(user=u)[0])
#-------------------------------------------------------------------------------------------
class KeyValues(models.Model):
value=models.CharField(max_length=1,blank=True)
RealPosition=models.IntegerField(max_length=1,blank=True)
def __unicode__(self):
return str(self.value)
#-------------------------------------------------------------------------------------------
class Key(models.Model):
position=models.IntegerField(max_length=1,blank=True)
keyValues=models.ManyToManyField(KeyValues)
def __unicode__(self):
return str(self.position)
#-------------------------------------------------------------------------------------------
class FileDetails(models.Model):
fileId = models.CharField(max_length = 100,primary_key=True,db_index=True)
fileName=models.CharField(max_length = 100)
fileSize=models.IntegerField(max_length = 1000,blank=True,default=0)
key=models.ManyToManyField(Key)
def __unicode__(self):
return self.fileId
#-------------------------------------------------------------------------------------------
class TempStorage(models.Model):
user = models.OneToOneField(User,primary_key=True)
FILE=models.FileField(upload_to='Temp')
def __unicode__(self):
return self.FILE
#-------------------------------------------------------------------------------------------
class FileAccess(models.Model):
owner = models.ForeignKey(UserProfile)
ip=models.IPAddressField()
serverUid=models.CharField(max_length = 100)
def __unicode__(self):
return '%s ' % (self.serverUid)
#-------------------------------------------------------------------------------------------
class Permission(models.Model):
user = models.ForeignKey(UserProfile)
public=models.IntegerField(max_length=1,default=0)
def __unicode__(self):
return str(self.user)
#-------------------------------------------------------------------------------------------
class FileShare(models.Model):
owner = models.ForeignKey(UserProfile)
file_requested=models.ForeignKey(FileDetails)
permission=models.ManyToManyField(Permission)
def __unicode__(self):
return str(self.owner)
#-------------------------------------------------------------------------------------------
|
992,927 | 294d727ebbd36f1ffea205e028534cb34271934a | import numpy as np
import pickle
import matplotlib
from matplotlib import rc
rc('text', usetex=True)
matplotlib.rcParams['pdf.fonttype'] = 42
matplotlib.rcParams['ps.fonttype'] = 42
import matplotlib.pyplot as plt
plt.style.use('ggplot')
plt.rcParams['lines.linewidth']=1.5
plt.rcParams['axes.facecolor']='w'
colors = plt.rcParams['axes.prop_cycle'].by_key()['color']
import model_config
from run import make_approximate
def num2str(num):
return str(num).replace('.', '_')
import argparse
parser = argparse.ArgumentParser(description='Plot the data, posterior mean and variance of the synthetic moon classification dataset.')
parser.add_argument('--exper', help='in {moon, moon_random, moon_rm_30, moon_rm_40, moon_rm_50}',
required=False,
type=str,
default='moon')
args = parser.parse_args()
nbijector = 15
nhidden = 5
experiment = args.exper
approximate = "gauss_fullcov"
approximate_dist, approximate_config = make_approximate(approximate, nbijector, nhidden)
experiment_data = model_config.get_experiment(experiment)
dim = experiment_data['dim']
nparam = experiment_data['nparam']
model = experiment_data['model']
data = experiment_data['data']
remain_data = experiment_data['remain_data']
removed_data = experiment_data['removed_data']
ndata = data.shape[0]
prefix = "result/{}/{}".format(experiment, approximate)
selected_percentages = [1e-5, 1e-9, 0.0]
n = 50
plot_xmin = -1.5
plot_xmax = 2.5
x1d = np.linspace(plot_xmin, plot_xmax, n)
x1,x2 = np.meshgrid(x1d, x1d)
x = np.stack([x1.flatten(), x2.flatten()]).T
full_params = pickle.load(open("{}/full_data_post.p".format(prefix), "rb"))
full_meanf, full_varf = model.predict_f(x,
full_params['loc'],
full_params['sqrt_cov'].dot(full_params['sqrt_cov'].T))
remain_params = pickle.load(open("{}/remain_data_retrain_post.p".format(prefix), "rb"))
remain_meanf, remain_varf = model.predict_f(x,
remain_params['loc'],
remain_params['sqrt_cov'].dot(remain_params['sqrt_cov'].T))
elbo_meanf = {}
elbo_varf = {}
eubo_meanf = {}
eubo_varf = {}
for percentage in selected_percentages:
elbo_params = pickle.load(open("{}/data_remain_data_by_unlearn_elbo_{}.p".format(prefix, percentage), "rb"))
elbo_meanf[percentage], elbo_varf[percentage] = model.predict_f(x,
elbo_params['loc'],
elbo_params['sqrt_cov'].dot(elbo_params['sqrt_cov'].T))
eubo_params = pickle.load(open("{}/data_remain_data_by_unlearn_eubo_{}.p".format(prefix, percentage), "rb"))
eubo_meanf[percentage], eubo_varf[percentage] = model.predict_f(x,
eubo_params['loc'],
eubo_params['sqrt_cov'].dot(eubo_params['sqrt_cov'].T))
figsize = (2.2*6, 2.*3)
fig, axs = plt.subplots(3,6, figsize=figsize, tight_layout=True)
axs[0,0].scatter(remain_data[np.where(remain_data[:,-1] == 0),0],
remain_data[np.where(remain_data[:,-1] == 0),1],
marker='o', c=colors[1], s=20)
axs[0,0].scatter(remain_data[np.where(remain_data[:,-1] == 1),0],
remain_data[np.where(remain_data[:,-1] == 1),1],
marker='o', c=colors[4], s=20)
sc = axs[0,0].scatter(removed_data[np.where(removed_data[:,-1] == 0),0],
removed_data[np.where(removed_data[:,-1] == 0),1],
marker='X', c=colors[1], s=30)
sc.set_edgecolor('#C7006E')
sc = axs[0,0].scatter(removed_data[np.where(removed_data[:,-1] == 1),0],
removed_data[np.where(removed_data[:,-1] == 1),1],
marker='X', c=colors[4], s=30)
sc.set_edgecolor('#C7006E')
axs[0,0].set_xlim(plot_xmin, plot_xmax)
axs[0,0].set_ylim(plot_xmin, plot_xmax)
axs[0,0].set_xlabel(r'$x_0$')
axs[0,0].set_ylabel(r'$x_1$')
axs[0,0].set_title("Data")
contour = axs[0,1].contour(x1, x2, full_meanf.reshape(n,n), origin='lower', colors='black')
axs[0,1].clabel(contour, inline=True, fontsize=8)
axs[0,1].set_xlabel(r'$x_0$')
axs[0,1].set_ylabel(r'$x_1$')
axs[0,1].grid(False)
axs[0,1].set_title(r"full data: $\mu_x$")
contour = axs[0,2].contour(x1, x2, full_varf.reshape(n,n), origin='lower', colors='black')
axs[0,2].clabel(contour, inline=True, fontsize=8)
axs[0,2].set_xlabel(r'$x_0$')
axs[0,2].set_ylabel(r'$x_1$')
axs[0,2].grid(False)
axs[0,2].set_title(r"full data: $\sigma^2_x$")
contour = axs[0,3].contour(x1, x2, remain_meanf.reshape(n,n), origin='lower', colors='black')
axs[0,3].clabel(contour, inline=True, fontsize=8)
axs[0,3].set_xlabel(r'$x_0$')
axs[0,3].set_ylabel(r'$x_1$')
axs[0,3].grid(False)
axs[0,3].set_title(r"retrain: $\mu_x$")
contour = axs[0,4].contour(x1, x2, remain_varf.reshape(n,n), origin='lower', colors='black')
axs[0,4].clabel(contour, inline=True, fontsize=8)
axs[0,4].set_xlabel(r'$x_0$')
axs[0,4].set_ylabel(r'$x_1$')
axs[0,4].grid(False)
axs[0,4].set_title(r"retrain: $\sigma^2_x$")
plot_idx = 5
for percentage in selected_percentages:
contour = axs[int(plot_idx/6),plot_idx%6].contour(x1, x2, elbo_meanf[percentage].reshape(n,n), origin='lower', colors='black')
axs[int(plot_idx/6),plot_idx%6].clabel(contour, inline=True, fontsize=8)
axs[int(plot_idx/6),plot_idx%6].set_xlabel(r'$x_0$')
axs[int(plot_idx/6),plot_idx%6].set_ylabel(r'$x_1$')
axs[int(plot_idx/6),plot_idx%6].grid(False)
axs[int(plot_idx/6),plot_idx%6].set_title(r"rKL: $\mu_x$, $\lambda={}$".format(percentage))
plot_idx += 1
contour = axs[int(plot_idx/6),plot_idx%6].contour(x1, x2, elbo_varf[percentage].reshape(n,n), origin='lower', colors='black')
axs[int(plot_idx/6),plot_idx%6].clabel(contour, inline=True, fontsize=8)
axs[int(plot_idx/6),plot_idx%6].set_xlabel(r'$x_0$')
axs[int(plot_idx/6),plot_idx%6].set_ylabel(r'$x_1$')
axs[int(plot_idx/6),plot_idx%6].grid(False)
axs[int(plot_idx/6),plot_idx%6].set_title(r"rKL: $\sigma^2_x$, $\lambda={}$".format(percentage))
plot_idx += 1
contour = axs[int(plot_idx/6),plot_idx%6].contour(x1, x2, eubo_meanf[percentage].reshape(n,n), origin='lower', colors='black')
axs[int(plot_idx/6),plot_idx%6].clabel(contour, inline=True, fontsize=8)
axs[int(plot_idx/6),plot_idx%6].set_xlabel(r'$x_0$')
axs[int(plot_idx/6),plot_idx%6].set_ylabel(r'$x_1$')
axs[int(plot_idx/6),plot_idx%6].grid(False)
axs[int(plot_idx/6),plot_idx%6].set_title(r"EUBO: $\mu_x$, $\lambda={}$".format(percentage))
plot_idx += 1
contour = axs[int(plot_idx/6),plot_idx%6].contour(x1, x2, eubo_varf[percentage].reshape(n,n), origin='lower', colors='black')
axs[int(plot_idx/6),plot_idx%6].clabel(contour, inline=True, fontsize=8)
axs[int(plot_idx/6),plot_idx%6].set_xlabel(r'$x_0$')
axs[int(plot_idx/6),plot_idx%6].set_ylabel(r'$x_1$')
axs[int(plot_idx/6),plot_idx%6].grid(False)
axs[int(plot_idx/6),plot_idx%6].set_title(r"EUBO: $\sigma^2_x$, $\lambda={}$".format(percentage))
plot_idx += 1
plt.show()
|
992,928 | 5fb84372f582c5ae87a1b79178eaa1d6403251aa | #! /usr/bin/env python
import numpy as np
import argparse
import pyfits
from scipy.ndimage.interpolation import shift as sc_shift
#from shift import shift as fft_shift
import os
from pyds9 import pydisplay
def shift(filename, xs, ys, refFile,noShift=False):
f = pyfits.open(filename)
header = f[0].header
header['REF_FILE'] = (os.path.basename(refFile),'Reference file')
header['PRE_FILE'] = (os.path.basename(filename),'Filename before shift')
header['XSHIFT'] = (xs,'X shift from ref_file')
header['YSHIFT'] = (ys,'Y shift from ref_file')
newName = os.path.splitext(filename)
newName = ''.join([newName[0],'_s',newName[1]])
#return newName
if noShift:
newDat = f[0].data
else:
#newDat = fft_shift(f[0].data,xs,ys)
newDat = sc_shift(f[0].data,[ys,xs])
print 'Writing to %s' % newName
pyfits.writeto(newName,newDat,header=header,clobber=True)
return newName
def main():
parser = argparse.ArgumentParser(description='Shift images to align objects in input file')
parser.add_argument('file',help='Input file with coordinates')
args = parser.parse_args()
data = np.genfromtxt(args.file,names=['fname','x','y'],dtype=['a100','f8','f8'],autostrip=True)
# Copy reference file
checkMe = []
ref = data[0]
checkMe.append(shift(ref['fname'],0,0,ref['fname'],noShift=False))
for dat in data[1:]:
xs = ref['x'] - dat['x']
ys = ref['y'] - dat['y']
checkMe.append(shift(dat['fname'],xs,ys,ref['fname']))
#pydisplay(checkMe)
if __name__ == '__main__':
main()
|
992,929 | d1af718078e2fee3f47391397e2c5d0eb3451f1b | # -*- coding: utf-8 -*-
"""
Created on Fri Oct 25 12:29:15 2019
@author: yanxi
"""
import numpy as np
# -------- part 1 conversion --------
def cart2pol(data):
s = data.shape
assert s[-1] == 2
# x and y
t = data.reshape(-1,2)
res = np.zeros_like(t)
# rho
res[:,0] = np.sqrt(t[:,0]**2 + t[:,1]**2)
# phi
res[:,1] = np.arctan2(t[:,1], t[:,0])
return res.reshape(s)
def pol2cart(data):
s = data.shape
assert s[-1] == 2
# r and p
t = data.reshape(-1,2)
res = np.zeros_like(t)
# x
res[:,0] = t[:,0] * np.cos(t[:,1])
# y
res[:,1] = t[:,0] * np.sin(t[:,1])
return res.reshape(s)
def cart2speed(data):
s = data.shape
assert s[-1] == 2
t = data.reshape(-1,2)
res = np.sqrt(t[:,0]**2 + t[:,1]**2)
r = s[:-1]
return res.reshape(r)
# -------- part 2 feature extraction --------
def featureAbsSpeed(kpm, fps, unit, method='mean', alpha=0.8, weight=None):
'''
Input:
<kpm> key point matrix (4d: conf-frame-kp-xyv)
<fps> the FPS of the kpm data
<unit> generate one feature using <unit> frames
<method> how to merge the features of different frames
'''
assert kpm.ndim == 4
assert kpm.shape[-2:] == (17,3)
assert method in ['max', 'min', 'mean', 'ema', 'weight']
nconf, nfrm, nkp = kpm.shape[:3]
if nfrm % unit == 0:
nfrm-=1
nfrm = nfrm - (nfrm % unit) + 1
diff = np.diff(kpm[:,:nfrm,:,[0,1]],n=1,axis=1)
#utime = 1.0/fps
diff = diff.reshape([nconf, -1, unit, nkp, 2])
if method == 'mean':
m = diff.mean(2)
elif method == 'min':
m = diff.min(2)
elif method == 'max':
m = diff.max(2)
elif method == 'ema':
weight = np.ones(unit)
for i in range(unit-1):
weight[:unit-i-1] *= alpha
m = np.average(diff, axis=2, weights=weight)
elif method == 'weight':
assert isinstance(weight, np.ndarray) and np.shape == (unit,)
m = np.average(diff, axis=2, weights=weight)
return m
def featureRelSpeed(kpm, fps, pairs, unit, method='mean', alpha=0.8, weight=None):
'''
Input:
<pairs> a list of ID pairs
'''
assert kpm.ndim == 4
assert kpm.shape[-2:] == (17,3)
assert pairs.ndim ==2 and pairs.shape[1] == 2
assert method in ['max', 'min', 'mean', 'ema', 'weight']
nconf, nfrm, nkp = kpm.shape[:3]
npair = pairs.shape[0]
if nfrm % unit == 0:
nfrm-=1
nfrm = nfrm - (nfrm % unit) + 1
r =[None for _ in range(npair)]
for i in range(npair):
x,y = pairs[i]
r[i] = kpm[:,:,x,[0,1]] - kpm[:,:,y,[0,1]]
ref = np.stack(r, axis=2)
ref = ref.reshape([nconf, -1, unit, npair, 2])
if method == 'mean':
m = ref.mean(2)
elif method == 'min':
m = ref.min(2)
elif method == 'max':
m = ref.max(2)
elif method == 'ema':
f = np.ones(unit)
for i in range(unit-1):
f[:unit-i-1] *= alpha
m = np.average(ref, axis=2, weights=f)
elif method == 'weight':
assert isinstance(weight, np.ndarray) and np.shape == (unit,)
m = np.average(ref, axis=2, weights=weight)
return m
def featureAbsSpeedRange(kpm, fps, unit, method='mean', alpha=0.8, weight=None):
'''
Input:
<kpm> key point matrix (4d: conf-frame-kp-xyv)
<fps> the FPS of the kpm data
<unit> generate one feature using <unit> frames
<method> how to merge the features of different frames
'''
assert kpm.ndim == 4
assert kpm.shape[-2:] == (17,3)
assert method in ['max', 'min', 'mean', 'ema', 'weight']
nconf, nfrm, nkp = kpm.shape[:3]
if nfrm % unit == 0:
nfrm-=1
nfrm = nfrm - (nfrm % unit) + 1
diff = np.diff(kpm[:,:nfrm,:,[0,1]],n=1,axis=1)
#utime = 1.0/fps
diff = diff.reshape([nconf, -1, unit, nkp, 2])
def kp2feature(kpm):
pass
def kp2featureConf(kpmList, conf):
pass
|
992,930 | 433309a96166d4fb14e4d8984a59f635be2f91dd | #!/usr/bin/env python
# go to the tree file and pick out all the paths that have hits greater
# than the cutoff and convert the entries to edge definitions.
import sys
import re
import Parameters
import operator
import string
def runme2(fid,infile,outfile):
fin = open(infile,'r')
fout = open(outfile,'w')
sub = ' ' + fid + '[ $]'
r = re.compile(sub)
for line in fin.xreadlines():
i = line.index(' ')+1
s = r.search(line[i:])
if s != None:
print >>fout,line[0:i+s.end()-1]
def runme(fid,infile,outfile):
fin = open(infile,'r')
fout = open(outfile,'w')
tot_up = Parameters.parameters['view']['levels_up']
tot_down = Parameters.parameters['view']['levels_down']
print "up=",tot_up," down=",tot_down
# trigger too difficult for now
trigger = Parameters.parameters['find']['immediate_children_threshold']
for line in fin.xreadlines():
a=line.split()
b=a[2:]
# find fid in a
try:
i=operator.indexOf(b,fid)
# write out stuff according to tot_up and tot_down
if i < tot_up: c = i
else: c = tot_up
print >>fout,"%s %s %s"%(a[0],a[1],string.join(b[i-c:i+1+tot_down]))
except ValueError:
pass
if __name__ == "__main__":
if len(sys.argv) < 3:
print "usage: ", sys.argv[0], " function_id input_file_prefix"
sys.exit(1)
fid = sys.argv[1]
infile = sys.argv[2]
outfile = fid + Parameters.parameters['view']['out_file_suffix']
runme(fid, infile, outfile)
|
992,931 | 22d880148a076c2b715a994bf3bbb44a288b4276 | """ Test script to visualize some of the random Curve data.
@author Graham Taylor
"""
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import sys
sys.path.append('code')
sys.path.append('code/util')
from util import get_data_path, dispims, tile_raster_images
from util.serialization import deserialize_object
import os.path
data_path = get_data_path()
# will not reload if data is already in workspace
try:
datasets
except NameError:
print 'loading data'
datasets = deserialize_object(os.path.join(data_path, 'results/params_tracer_data_multi_full_33_17_17_nC1_11000_switch1.00_2718_noise_nh1a_300_nh1b_50_nh2_500_nout_9_preTrainFalse_LRi_0.01000_reg_10.00_dropoutFalse_2.647312.pkl'))
w0 = datasets[2].T
print w0.shape
n_cases, n_dims = w0.shape
im_w = int(np.sqrt(n_dims)) # assume square
case_w = int(np.sqrt(n_cases))+1
out = tile_raster_images(w0, (im_w, im_w), (case_w, case_w), tile_spacing=(3,3))
plt.imshow(out, cmap='gray')
plt.show()
quit()
#map_w = np.sqrt(n_dims_out) # assume square
print im_w
n_train_batches = int(np.ceil(float(n_cases) / show_batchsize))
for b in xrange(n_train_batches):
plt.figure(b)
plt.subplot(1, 2, 1)
batch_start = b * show_batchsize
batch_end = min((b + 1) * show_batchsize, n_cases)
this_view = w0[batch_start:batch_end]
# must send matrix and exampes in second dimensiton (ie .T)
dispims(this_view.T, im_w, im_w, border=2, bordercolor=this_view.max())
'''
plt.subplot(1, 2, 2)
this_view = train_y[batch_start:batch_end]
dispims(this_view.T, map_w, map_w, border=2, bordercolor=this_view.max())
'''
|
992,932 | a1fd8a0836bcdb570bd6cc3ade920867c951b72c | import random
# number guessing game
counter = 0
name = input('NAME PLEASE: ')
while True:
#1. computer chooses a random number between 1 and 10
r = random.randint(1,5)
#2. ask user to enter a number then print it out
number = input("please enter a number: ")
number = int(number)
#3. we print the two numbers
print(r)
print(number)
counter += 1
#4. if the two numbers are equal, print you won
if number == r:
print(f"{name} won after {counter} times")
break
else:
print("you lose")
|
992,933 | 4fdf4ab3620229006335c49d53a73793b665bc8e | test_article = { u'entrezajax': { u'error': False},
u'result': [ { u'MedlineCitation':
{ u'Article': { u'Abstract': { u'AbstractText': [ u"We have identified multiple distinct splicing enhancer elements within protein-coding sequences of the constitutively spliced human beta-globin pre-mRNA. Each of these highly conserved sequences is sufficient to activate the splicing of a heterologous enhancer-dependent pre-mRNA. One of these enhancers is activated by and binds to the SR protein SC35, whereas at least two others are activated by the SR protein SF2/ASF. A single base mutation within another enhancer element inactivates the enhancer but does not change the encoded amino acid. Thus, overlapping protein coding and RNA recognition elements may be coselected during evolution. These studies provide the first direct evidence that SR protein-specific splicing enhancers are located within the coding regions of constitutively spliced pre-mRNAs. We propose that these enhancers function as multisite splicing enhancers to specify 3' splice-site selection."]},
u'Affiliation': u'Department of Molecular and Cellular Biology, Harvard University, Cambridge, Massachusetts 02138, USA.',
u'ArticleDate': [ ],
u'ArticleTitle': u'Multiple distinct splicing enhancers in the protein-coding sequences of a constitutively spliced pre-mRNA.',
u'AuthorList': [ { u'ForeName': u'T D',
u'Identifier': [ ],
u'Initials': u'TD',
u'LastName': u'Schaal'},
{ u'ForeName': u'T',
u'Identifier': [ ],
u'Initials': u'T',
u'LastName': u'Maniatis'}],
u'ELocationID': [ ],
u'GrantList': [ { u'Acronym': u'GM',
u'Agency': u'NIGMS NIH HHS',
u'Country': u'United States',
u'GrantID': u'GM42231'}],
u'Journal': { u'ISOAbbreviation': u'Mol. Cell. Biol.',
u'ISSN': u'0270-7306',
u'JournalIssue': { u'Issue': u'1',
u'PubDate': { u'Month': u'Jan',
u'Year': u'1999'},
u'Volume': u'19'},
u'Title': u'Molecular and cellular biology'},
u'Language': [ u'eng'],
u'Pagination': { u'MedlinePgn': u'261-73'},
u'PublicationTypeList': [ u'Journal Article',
u"Research Support, U.S. Gov't, P.H.S."]},
u'ChemicalList': [ { u'NameOfSubstance': u'Cross-Linking Reagents',
u'RegistryNumber': u'0'},
{ u'NameOfSubstance': u'Nuclear Proteins',
u'RegistryNumber': u'0'},
{ u'NameOfSubstance': u'RNA Precursors',
u'RegistryNumber': u'0'},
{ u'NameOfSubstance': u'RNA-Binding Proteins',
u'RegistryNumber': u'0'},
{ u'NameOfSubstance': u'Ribonucleoproteins',
u'RegistryNumber': u'0'},
{ u'NameOfSubstance': u'SRSF2 protein, human',
u'RegistryNumber': u'147153-65-9'},
{ u'NameOfSubstance': u'serine-arginine-rich splicing proteins',
u'RegistryNumber': u'170974-22-8'},
{ u'NameOfSubstance': u'Globins',
u'RegistryNumber': u'9004-22-2'}],
u'CitationSubset': [u'IM'],
u'CommentsCorrectionsList': [ { u'PMID': u'1589782',
u'RefSource': u'Science. 1992 May 15;256(5059):992-7'},
{ u'PMID': u'9536098',
u'RefSource': u'Hum Mol Genet. 1998 May;7(5):919-32'},
{ u'PMID': u'1825520',
u'RefSource': u'Science. 1991 Mar 1;251(4997):1045-50'},
{ u'PMID': u'1834932',
u'RefSource': u'Mol Cell Biol. 1991 Dec;11(12):5919-28'},
{ u'PMID': u'1944277',
u'RefSource': u'Mol Cell Biol. 1991 Dec;11(12):6075-83'},
{ u'PMID': u'2136768',
u'RefSource': u'Mol Cell Biol. 1990 Jan;10(1):84-94'},
{ u'PMID': u'2247057',
u'RefSource': u'Mol Cell Biol. 1990 Dec;10(12):6299-305'},
{ u'PMID': u'2427200',
u'RefSource': u'Cell. 1986 Aug 29;46(5):681-90'},
{ u'PMID': u'2837425',
u'RefSource': u'Genes Dev. 1988 Mar;2(3):319-29'},
{ u'PMID': u'2968159',
u'RefSource': u'Cell. 1988 Jun 17;53(6):949-61'},
{ u'PMID': u'2994004',
u'RefSource': u'Nucleic Acids Res. 1985 Aug 12;13(15):5591-609'},
{ u'PMID': u'3352607',
u'RefSource': u'Mol Cell Biol. 1988 Feb;8(2):860-6'},
{ u'PMID': u'3649340',
u'RefSource': u'J Biol Chem. 1987 Aug 15;262(23):11284-91'},
{ u'PMID': u'6828386',
u'RefSource': u'Nucleic Acids Res. 1983 Mar 11;11(5):1475-89'},
{ u'PMID': u'7489484',
u'RefSource': u'RNA. 1995 Mar;1(1):21-35'},
{ u'PMID': u'7493322',
u'RefSource': u'RNA. 1995 Oct;1(8):763-71'},
{ u'PMID': u'7540101',
u'RefSource': u'Curr Biol. 1995 Mar 1;5(3):249-51'},
{ u'PMID': u'7585252',
u'RefSource': u'RNA. 1995 Sep;1(7):663-80'},
{ u'PMID': u'7761834',
u'RefSource': u'Science. 1995 May 26;268(5214):1173-6'},
{ u'PMID': u'7852296',
u'RefSource': u'J Biol Chem. 1995 Feb 10;270(6):2411-4'},
{ u'PMID': u'7867927',
u'RefSource': u'Genes Dev. 1995 Feb 1;9(3):284-93'},
{ u'PMID': u'7935481',
u'RefSource': u'Mol Cell Biol. 1994 Nov;14(11):7670-82'},
{ u'PMID': u'7958850',
u'RefSource': u'Genes Dev. 1994 Jul 15;8(14):1703-12'},
{ u'PMID': u'7962048',
u'RefSource': u'J Cell Biol. 1994 Nov;127(3):593-607'},
{ u'PMID': u'8089127',
u'RefSource': u'J Biol Chem. 1994 Sep 23;269(38):23590-6'},
{ u'PMID': u'8124712',
u'RefSource': u'Cell. 1994 Feb 25;76(4):735-46'},
{ u'PMID': u'8139654',
u'RefSource': u'Nature. 1994 Mar 10;368(6467):119-24'},
{ u'PMID': u'8223480',
u'RefSource': u'EMBO J. 1993 Dec;12(12):4715-26'},
{ u'PMID': u'8223481',
u'RefSource': u'EMBO J. 1993 Dec;12(12):4727-37'},
{ u'PMID': u'8253386',
u'RefSource': u'Genes Dev. 1993 Dec;7(12A):2405-17'},
{ u'PMID': u'8261509',
u'RefSource': u'Cell. 1993 Dec 17;75(6):1061-70'},
{ u'PMID': u'8276242',
u'RefSource': u'Genes Dev. 1993 Dec;7(12B):2598-608'},
{ u'PMID': u'8289812',
u'RefSource': u'Mol Cell Biol. 1994 Feb;14(2):1347-54'},
{ u'PMID': u'8334698',
u'RefSource': u'Cell. 1993 Jul 16;74(1):105-14'},
{ u'PMID': u'8449402',
u'RefSource': u'Genes Dev. 1993 Mar;7(3):407-18'},
{ u'PMID': u'9649504',
u'RefSource': u'Genes Dev. 1998 Jul 1;12(13):1998-2012'},
{ u'PMID': u'1285125',
u'RefSource': u'Genes Dev. 1992 Dec;6(12B):2554-68'},
{ u'PMID': u'1566072',
u'RefSource': u'Science. 1992 Apr 10;256(5054):237-40'},
{ u'PMID': u'1577277',
u'RefSource': u'Genes Dev. 1992 May;6(5):837-47'},
{ u'PMID': u'8524796',
u'RefSource': u'Proc Natl Acad Sci U S A. 1995 Dec 5;92(25):11524-8'},
{ u'PMID': u'8647433',
u'RefSource': u'Genes Dev. 1996 Jun 1;10(11):1356-68'},
{ u'PMID': u'8668147',
u'RefSource': u'Mol Cell Biol. 1996 Jul;16(7):3317-26'},
{ u'PMID': u'8682289',
u'RefSource': u'Genes Dev. 1996 Jul 1;10(13):1569-79'},
{ u'PMID': u'8722179',
u'RefSource': u'Curr Opin Genet Dev. 1996 Apr;6(2):215-20'},
{ u'PMID': u'8743883',
u'RefSource': u'Curr Opin Cell Biol. 1996 Jun;8(3):331-9'},
{ u'PMID': u'8769651',
u'RefSource': u'Genes Dev. 1996 Aug 15;10(16):2089-101'},
{ u'PMID': u'9030686',
u'RefSource': u'Genes Dev. 1997 Feb 1;11(3):334-44'},
{ u'PMID': u'9108022',
u'RefSource': u'Proc Natl Acad Sci U S A. 1997 Apr 15;94(8):3596-601'},
{ u'PMID': u'9115432',
u'RefSource': u'Curr Opin Genet Dev. 1997 Apr;7(2):205-11'},
{ u'PMID': u'9159075',
u'RefSource': u'Curr Opin Cell Biol. 1997 Jun;9(3):350-7'},
{ u'PMID': u'9418892',
u'RefSource': u'Mol Cell Biol. 1998 Jan;18(1):450-8'},
{ u'PMID': u'1620124',
u'RefSource': u'Mol Cell Biol. 1992 Jul;12(7):3165-75'}],
u'DateCompleted': { u'Day': u'10',
u'Month': u'02',
u'Year': u'1999'},
u'DateCreated': { u'Day': u'10',
u'Month': u'02',
u'Year': u'1999'},
u'DateRevised': { u'Day': u'17',
u'Month': u'11',
u'Year': u'2011'},
u'GeneralNote': [],
u'KeywordList': [],
u'MedlineJournalInfo': { u'Country': u'UNITED STATES',
u'ISSNLinking': u'0270-7306',
u'MedlineTA': u'Mol Cell Biol',
u'NlmUniqueID': u'8109087'},
u'MeshHeadingList': [ { u'DescriptorName': u'Cross-Linking Reagents',
u'QualifierName': [ ]},
{ u'DescriptorName': u'Enhancer Elements, Genetic',
u'QualifierName': [ ]},
{ u'DescriptorName': u'Exons',
u'QualifierName': [ ]},
{ u'DescriptorName': u'Globins',
u'QualifierName': [ u'genetics']},
{ u'DescriptorName': u'Humans',
u'QualifierName': [ ]},
{ u'DescriptorName': u'Nuclear Proteins',
u'QualifierName': [ u'genetics',
u'metabolism']},
{ u'DescriptorName': u'RNA Precursors',
u'QualifierName': [ ]},
{ u'DescriptorName': u'RNA Splicing',
u'QualifierName': [ ]},
{ u'DescriptorName': u'RNA-Binding Proteins',
u'QualifierName': [ ]},
{ u'DescriptorName': u'Ribonucleoproteins',
u'QualifierName': [ ]}],
u'OtherAbstract': [],
u'OtherID': [u'PMC83884'],
u'PMID': u'9858550',
u'SpaceFlightMission': []},
u'PubmedData': { u'ArticleIdList': [ u'9858550',
u'PMC83884'],
u'History': [ { u'Day': u'22',
u'Month': u'12',
u'Year': u'1998'},
{ u'Day': u'22',
u'Hour': u'0',
u'Minute': u'1',
u'Month': u'12',
u'Year': u'1998'},
{ u'Day': u'22',
u'Hour': u'0',
u'Minute': u'0',
u'Month': u'12',
u'Year': u'1998'}],
u'PublicationStatus': u'ppublish'}}]}
|
992,934 | 325f67c2d96ef9d0884417e17e1371514ca12238 |
# TODO: move
def read_stat(self,layer,stat):
assert False, 'not yet modified'
if self is None:
path_model_load = config_glb.path_model_load
path_stat_r = config_glb.path_stat
else:
path_model_load = self.path_model_load
path_stat_r = self.conf.path_stat
#path_stat = os.path.join(self.path_model_load,self.conf.path_stat)
path_stat = os.path.join(path_model_load,path_stat_r)
#path_stat = os.path.join(config_glb.path_model,config_glb.path_stat)
key = layer.name + '_' + stat
# f_name_stat = f_name_stat_pre+'_'+key
f_name_stat = key
f_name = os.path.join(path_stat, f_name_stat)
f_stat = open(f_name, 'r')
r_stat = csv.reader(f_stat)
for row in r_stat:
# self.dict_stat_r[l]=np.asarray(row,dtype=np.float32).reshape(self.list_shape[l][1:])
stat_r = np.asarray(row, dtype=np.float32).reshape(layer.output_shape_fixed_batch[1:])
return stat_r
|
992,935 | d7f5a390aef095c80ac06f34fc78c5acabbc4eb6 | #!/usr/bin/env python3
lines = open('input').read().splitlines()
count = 0
for line in lines:
r_a_p = line.split()
min_max = r_a_p[0].split("-")
letter = r_a_p[1][0]
password = r_a_p[2]
if (password[int(min_max[0]) - 1] == letter) != (password[int(min_max[1]) - 1] == letter):
count += 1
print(count)
|
992,936 | 5d52c16fd32bfc765ef1f08e82f19f034da9f645 | replace_with = {'ṭ': 't',
'ᵈ': 'd',
'ḍ': 'ɖ',
'ɡ': 'g',
'ᶢ': 'g',
'ḿ': 'm',
'ᵐ': 'm',
'ᶬ': 'ɱ',
'ṅ': 'n',
'ǹ': 'n',
'ⁿ': 'n',
'ṉ': 'n',
'ṇ': 'ɳ',
'ñ': 'ɲ',
'ᵑ': 'ŋ',
'ŕ': 'r',
'ṛ': 'r',
'ṟ': 'r',
'ḥ': 'h',
'ĺ': 'l',
'ḹ': 'l',
'ḻ': 'ɻ',
'ṣ': 'ʂ',
'ī': 'i',
'î': 'i',
'í': 'i',
'ì': 'i',
'ḭ': 'i',
'ǐ': 'i',
'ý': 'y',
'ẏ': 'y',
'ù': 'u',
'ū': 'u',
'ú': 'u',
'û': 'u',
'ǔ': 'u',
'ṵ': 'u',
'ṳ': 'u',
'é': 'e',
'è': 'e',
'ê': 'e',
'ē': 'e',
'ĕ': 'e',
'ě': 'e',
'ḛ': 'e',
'ó': 'o',
'ò': 'o',
'ô': 'o',
'ō': 'o',
'ǣ': 'æ',
'á': 'a',
'â': 'a',
'ā': 'a',
'à': 'a',
'ǎ': 'a',
'ạ': 'a',
'ʴ': '˞',
'ṃ': u'\u0303'
}
letters = {'p': {'LB', 'PL'},
'b': {'LB', 'PL', 'VO'},
'p' + u'\u032a': {'LD', 'PL'},
'b' + u'\u032a': {'LD', 'PL', 'VO'},
't': {'AL', 'PL'},
'd': {'AL', 'PL', 'VO'},
'ʈ': {'RE', 'PL'},
'ɖ': {'RE', 'PL', 'VO'},
'c': {'PA', 'PL'},
'ɟ': {'PA', 'PL', 'VO'},
'k': {'VE', 'PL'},
'g': {'VE', 'PL', 'VO'},
'q': {'UV', 'PL'},
'ɢ': {'UV', 'PL', 'VO'},
'ʡ': {'PH', 'PL'},
'ʔ': {'GL', 'PL'},
'm': {'LB', 'NA', 'VO'},
'ɱ': {'LD', 'NA', 'VO'},
'n': {'AL', 'NA', 'VO'},
'ɳ': {'RE', 'NA', 'VO'},
'ɲ': {'PA', 'NA', 'VO'},
'ŋ': {'VE', 'NA', 'VO'},
'ɴ': {'UV', 'NA', 'VO'},
'ʙ': {'LB', 'VI', 'VO'},
'r': {'AL', 'VI', 'VO'},
'ɽr': {'RE', 'VI', 'VO'},
'ʀ': {'UV', 'VI', 'VO'},
'ʜ': {'PH', 'VI'},
'ʢ': {'PH', 'VI', 'VO'},
'ⱱ' + u'\u031f': {'LB', 'TA', 'VO'},
'ⱱ': {'LD', 'TA', 'VO'},
'ɾ': {'AL', 'TA', 'VO'},
'ɽ': {'RE', 'TA', 'VO'},
'ʀ̆': {'UV', 'TA', 'VO'},
'ʢ̆': {'PH', 'TA', 'VO'},
'ɸ': {'LB', 'NS'},
'β': {'LB', 'NS', 'VO'},
'f': {'LD', 'NS'},
'v': {'LD', 'NS', 'VO'},
'θ': {'DE', 'NS'},
'ð': {'DE', 'NS', 'VO'},
's': {'AL', 'SS'},
'z': {'AL', 'SS', 'VO'},
'ʃ': {'PO', 'SS'},
'ʒ': {'PO', 'SS', 'VO'},
'ʂ': {'RE', 'SS'},
'ʐ': {'RE', 'SS', 'VO'},
'ç': {'PA', 'NS'},
'ʝ': {'PA', 'NS', 'VO'},
'x': {'VE', 'NS'},
'ɣ': {'VE', 'NS', 'VO'},
'χ': {'UV', 'NS'},
'ʁ': {'UV', 'NS', 'VO'},
'ħ': {'PH', 'NS'},
'ʕ': {'PH', 'NS', 'VO'},
'h': {'GL', 'NS'},
'ɦ': {'GL', 'NS', 'VO'},
'ɬ': {'AL', 'NS', 'LA'},
'ɮ': {'AL', 'NS', 'LA', 'VO'},
'ʋ': {'LD', 'SV', 'VO'},
'ɹ': {'AL', 'SV', 'VO'},
'ɹ' + u'\u0320': {'PO', 'SV', 'VO'},
'ɻ': {'RE', 'SV', 'VO'},
'j': {'PA', 'SV', 'VO'},
'ɰ': {'VE', 'SV', 'VO'},
'l': {'AL', 'SV', 'LA', 'VO'},
'ɬ': {'AL', 'NS', 'LA'},
'ɮ': {'AL', 'NS', 'LA', 'VO'},
'ɭ': {'RE', 'SV', 'LA', 'VO'},
'ɭ' + u'\u031d': {'RE', 'NS', 'LA', 'VO'},
'ʎ': {'PA', 'SV', 'LA', 'VO'},
'ʎ' + u'\u031d': {'PA', 'NS', 'LA', 'VO'},
'ʟ': {'VE', 'SV', 'LA', 'VO'},
'ʟ' + u'\u031d': {'VE', 'NS', 'LA', 'VO'},
'ʘ': {'LB', 'PL', 'EJ', 'IN'},
'ǀ': {'DE', 'PL', 'EJ', 'IN'},
'ǃ': {'AL', 'PL', 'EJ', 'IN'},
'‼': {'RE', 'PL', 'EJ', 'IN'},
'ǂ': {'PA', 'PL', 'EJ', 'IN'},
'ʞ': {'VE', 'PL', 'EJ', 'IN'},
'ǁ': {'AL', 'PL', 'LA', 'EJ', 'IN'},
'ɓ': {'LB', 'PL', 'VO', 'IN'},
'ɗ': {'AL', 'PL', 'VO', 'IN'},
'ᶑ': {'RE', 'PL', 'VO', 'IN'},
'ʄ': {'PA', 'PL', 'VO', 'IN'},
'ɠ': {'VE', 'PL', 'VO', 'IN'},
'ʛ': {'UV', 'PL', 'VO', 'IN'},
'ʍ': {'LB', 'VZ', 'SV'},
'w': {'LB', 'VZ', 'SV', 'VO'},
'ɥ': {'LB', 'PZ', 'SV', 'VO'},
'ɫ': {'AL', 'VZ', 'SV', 'LA', 'VO'},
'ɺ': {'AL', 'TA', 'LA', 'VO'},
'ɕ': {'AL', 'PA', 'SS'},
'ʑ': {'AL', 'PA', 'SS', 'VO'},
'ȵ': {'AL', 'PA', 'VO', 'NA'},
'ɧ': {'PO', 'VE', 'SS'},
'i': {'PA', 'CL', 'VO'},
'ĩ': {'PA', 'CL', 'VO', 'NZ'},
'y': {'LZ', 'PA', 'CL', 'VO'},
'ỹ': {'LZ', 'PA', 'CL', 'VO', 'NZ'},
'ɨ': {'NE', 'CL', 'VO'},
'ʉ': {'LZ', 'NE', 'CL', 'VO'},
'ɯ': {'VE', 'CL', 'VO'},
'u': {'LZ', 'VE', 'CL', 'VO'},
'ũ': {'LZ', 'VE', 'CL', 'VO', 'NZ'},
'ɪ': {'PZ', 'NE', 'NC', 'VO'},
'ʏ': {'LZ', 'PZ', 'NE', 'NC', 'VO'},
'ʊ': {'LZ', 'VZ', 'NE', 'NC', 'VO'},
'e': {'PA', 'MC', 'VO'},
'ẽ': {'PA', 'MC', 'VO', 'NZ'},
'ø': {'LZ', 'PA', 'MC', 'VO'},
'ɘ': {'NE', 'MC', 'VO'},
'ɵ': {'LZ', 'NE', 'MC', 'VO'},
'ɤ': {'VE', 'MC', 'VO'},
'o': {'LZ', 'VE', 'MC', 'VO'},
'õ': {'LZ', 'VE', 'MC', 'VO', 'NZ'},
'ə': {'NE', 'MI', 'VO'},
'ɛ': {'PA', 'MO', 'VO'},
'œ': {'LZ', 'PA', 'MO', 'VO'},
'ɜ': {'NE', 'MO', 'VO'},
'ɞ': {'LZ', 'NE', 'MO', 'VO'},
'ʌ': {'VE', 'MO', 'VO'},
'ɔ': {'LZ', 'VE', 'MO', 'VO'},
'æ': {'PA', 'NO', 'VO'},
'ɐ': {'NE', 'NO', 'VO'},
'a': {'PA', 'OP', 'VO'},
'ã': {'PA', 'OP', 'VO', 'NZ'},
'ɶ': {'LZ', 'PA', 'OP', 'VO'},
'ä': {'NE', 'OP', 'VO'},
'ɑ': {'VE', 'OP', 'VO'},
'ɒ': {'LZ', 'VE', 'OP', 'VO'},
'ɚ': {'RZ', 'NE', 'MI', 'VO'},
'ɝ': {'RZ', 'NE', 'MO', 'VO'}
}
ignore_set = {'.', 'ˈ', 'ˌ', 'ː', ':', 'ˑ', '*', '´', '′', '˘', '˧', '˩', '˨', '˦', '˥', 'ꜜ', '1',
'2', '²', '³', '⁽', '⁾', '/', '(', ')', '⟨', '⟩', 'ˣ', '‿', ' ',
u'\u0300', u'\u0301', u'\u0302', u'\u0304', u'\u030c', u'\u031a', u'\u0320',
u'\u0324', u'\u0329', u'\u032f', u'\u0330', u'\u033a', u'\u033b', u'\u035C',
u'\u0361', u'\u1dc4', u'\u1dc5', u'\u1dc8'}
modifiers = {'ᵝ': {'action': 'add', 'args': ['LZ']},
'˞': {'action': 'add', 'args': ['RZ']},
'ʲ': {'action': 'add', 'args': ['PZ']},
'ˠ': {'action': 'add', 'args': ['VZ']},
'ʷ': {'action': 'add', 'args': ['LZ', 'VZ']},
'ˁ': {'action': 'add', 'args': ['HZ']},
'ˀ': {'action': 'add', 'args': ['GZ']},
'ˡ': {'action': 'add', 'args': ['LA']},
'ʰ': {'action': 'add', 'args': ['AS']},
'ʱ': {'action': 'add', 'args': ['AS', 'VO']},
'ʼ': {'action': 'add', 'args': ['EJ']},
u'\u0303': {'action': 'add', 'args': ['NZ']},
u'\u030a': {'action': 'remove', 'args': ['VO']},
u'\u0322': {'action': 'make', 'args': ['places', 'RE']},
u'\u0325': {'action': 'remove', 'args': ['VO']},
u'\u032c': {'action': 'add', 'args': ['VO']},
u'\u031f': {'action': 'advance', 'args': []},
u'\u031e': {'action': 'lower', 'args': []},
u'\u031d': {'action': 'upper', 'args': []},
u'\u032a': {'action': 'dentalize', 'args': []},
u'\u0339': {'action': 'add_and_remove', 'args': [['LH'], ['LZ']]},
u'\u02be': {'action': 'add_and_remove', 'args': [['LH'], ['LZ']]},
u'\u0348': {'action': 'add', 'args': ['IT']}
}
vowelable_places = {'PA', 'NE', 'VE'}
vibrantable_places = {'LB', 'LL', 'DE', 'AL', 'PO', 'RE', 'UV', 'PH'}
labials = {'LB', 'LD', 'LL'}
linguals = {'LL', 'DE', 'AL', 'PO', 'RE', 'PA', 'NE', 'VE', 'UV'}
coronals = {'DE', 'AL', 'PO', 'RE'}
vowels = {'CL', 'NC', 'MC', 'MI', 'MO', 'NO', 'OP'}
places = {'LB', 'LD', 'LL', 'DE', 'AL', 'PO', 'RE', 'PA', 'NE', 'VE', 'UV', 'PH', 'GL'}
secondary_places = {'LZ', 'LH', 'RZ', 'PZ', 'VZ', 'HZ', 'GZ'}
manners = {'NA', 'PL', 'NF', 'SF', 'NS', 'SS', 'VI', 'TA', 'SV', 'CL', 'NC', 'MC', 'MI', 'MO', 'NO', 'OP'}
secondary_manners = {'NZ', 'LA'}
airflows = {'AS', 'EJ', 'IT', 'IN', 'VO'}
features = {'places': places,
'secondary_places': secondary_places,
'manners': manners,
'secondary_manners': secondary_manners,
'airflows': airflows}
feature_names = {'LB': 'Labial', 'LD': 'Labiodental', 'LL': 'Labiolingual', 'DE': 'Dental',
'AL': 'Alveolar', 'PO': 'Postalveolar', 'RE': 'Retroflex', 'PA': 'Palatal',
'NE': 'Neutral', 'VE': 'Velar', 'UV': 'Uvular', 'PH': 'Pharyngeal', 'GL': 'Glottal',
'LZ': 'Labialized', 'LH': 'Half-labialized', 'RZ': 'Rhoticized', 'PZ': 'Palatalized',
'VZ': 'Velarized', 'HZ': 'Pharyngealized', 'GZ': 'Glottalize',
'NZ': 'Nasalized', 'NA': 'Nasal', 'PL': 'Plosive', 'NF': 'Nonsibilant affricate', 'SF': 'Sibilant affricate',
'NS': 'Nonsibilant fricative', 'SS': 'Sibilant fricative', 'VI': 'Vibrant', 'TA': 'Tap',
'SV': 'Approximant', 'CL': 'Closed', 'NC': 'Near-closed', 'MC': 'Mid-closed', 'MI': 'Mid',
'MO': 'Mid-open', 'NO': 'Near-open', 'OP': 'Opened', 'LA': 'Lateral',
'VO': 'Voiced', 'AS': 'Aspirated', 'EJ': 'Ejective', 'IT': 'Intensive', 'IN': 'Ingressive'}
feature_distance_map = {('LB', 'LB'): 0, ('LD', 'LB'): 1, ('LL', 'LB'): 2, ('DE', 'LB'): 2, ('AL', 'LB'): 3, ('PO', 'LB'): 4, ('RE', 'LB'): 5, ('PA', 'LB'): 5, ('NE', 'LB'): 4, ('VE', 'LB'): 3, ('UV', 'LB'): 5, ('PH', 'LB'): 6, ('GL', 'LB'): 3, ('LZ', 'LB'): 1, ('LH', 'LB'): 2, ('RZ', 'LB'): 7, ('PZ', 'LB'): 7, ('VZ', 'LB'): 7, ('HZ', 'LB'): 7, ('GZ', 'LB'): 7, ('LA', 'LB'): '', ('NZ', 'LB'): '', ('NA', 'LB'): '', ('PL', 'LB'): '', ('NF', 'LB'): '', ('SF', 'LB'): '', ('NS', 'LB'): '', ('SS', 'LB'): '', ('VI', 'LB'): '', ('TA', 'LB'): '', ('SV', 'LB'): '', ('CL', 'LB'): '', ('NC', 'LB'): '', ('MC', 'LB'): '', ('MI', 'LB'): '', ('MO', 'LB'): '', ('NO', 'LB'): '', ('OP', 'LB'): '', ('VO', 'LB'): '', ('AS', 'LB'): '', ('EJ', 'LB'): '', ('IT', 'LB'): '', ('IN', 'LB'): '', ('X', 'LB'): 4, ('LB', 'LD'): 1, ('LD', 'LD'): 0, ('LL', 'LD'): 3, ('DE', 'LD'): 1, ('AL', 'LD'): 2, ('PO', 'LD'): 3, ('RE', 'LD'): 5, ('PA', 'LD'): 5, ('NE', 'LD'): 5, ('VE', 'LD'): 4, ('UV', 'LD'): 5, ('PH', 'LD'): 6, ('GL', 'LD'): 3, ('LZ', 'LD'): 2, ('LH', 'LD'): 3, ('RZ', 'LD'): 7, ('PZ', 'LD'): 7, ('VZ', 'LD'): 7, ('HZ', 'LD'): 7, ('GZ', 'LD'): 7, ('LA', 'LD'): '', ('NZ', 'LD'): '', ('NA', 'LD'): '', ('PL', 'LD'): '', ('NF', 'LD'): '', ('SF', 'LD'): '', ('NS', 'LD'): '', ('SS', 'LD'): '', ('VI', 'LD'): '', ('TA', 'LD'): '', ('SV', 'LD'): '', ('CL', 'LD'): '', ('NC', 'LD'): '', ('MC', 'LD'): '', ('MI', 'LD'): '', ('MO', 'LD'): '', ('NO', 'LD'): '', ('OP', 'LD'): '', ('VO', 'LD'): '', ('AS', 'LD'): '', ('EJ', 'LD'): '', ('IT', 'LD'): '', ('IN', 'LD'): '', ('X', 'LD'): 4, ('LB', 'LL'): 2, ('LD', 'LL'): 3, ('LL', 'LL'): 0, ('DE', 'LL'): 2, ('AL', 'LL'): 2, ('PO', 'LL'): 3, ('RE', 'LL'): 5, ('PA', 'LL'): 5, ('NE', 'LL'): 5, ('VE', 'LL'): 5, ('UV', 'LL'): 5, ('PH', 'LL'): 6, ('GL', 'LL'): 3, ('LZ', 'LL'): 3, ('LH', 'LL'): 4, ('RZ', 'LL'): 7, ('PZ', 'LL'): 7, ('VZ', 'LL'): 7, ('HZ', 'LL'): 7, ('GZ', 'LL'): 7, ('LA', 'LL'): '', ('NZ', 'LL'): '', ('NA', 'LL'): '', ('PL', 'LL'): '', ('NF', 'LL'): '', ('SF', 'LL'): '', ('NS', 'LL'): '', ('SS', 'LL'): '', ('VI', 'LL'): '', ('TA', 'LL'): '', ('SV', 'LL'): '', ('CL', 'LL'): '', ('NC', 'LL'): '', ('MC', 'LL'): '', ('MI', 'LL'): '', ('MO', 'LL'): '', ('NO', 'LL'): '', ('OP', 'LL'): '', ('VO', 'LL'): '', ('AS', 'LL'): '', ('EJ', 'LL'): '', ('IT', 'LL'): '', ('IN', 'LL'): '', ('X', 'LL'): 4, ('LB', 'DE'): 2, ('LD', 'DE'): 1, ('LL', 'DE'): 2, ('DE', 'DE'): 0, ('AL', 'DE'): 1, ('PO', 'DE'): 3, ('RE', 'DE'): 5, ('PA', 'DE'): 5, ('NE', 'DE'): 5, ('VE', 'DE'): 4, ('UV', 'DE'): 5, ('PH', 'DE'): 6, ('GL', 'DE'): 3, ('LZ', 'DE'): 7, ('LH', 'DE'): 6, ('RZ', 'DE'): 3, ('PZ', 'DE'): 7, ('VZ', 'DE'): 7, ('HZ', 'DE'): 7, ('GZ', 'DE'): 7, ('LA', 'DE'): '', ('NZ', 'DE'): '', ('NA', 'DE'): '', ('PL', 'DE'): '', ('NF', 'DE'): '', ('SF', 'DE'): '', ('NS', 'DE'): '', ('SS', 'DE'): '', ('VI', 'DE'): '', ('TA', 'DE'): '', ('SV', 'DE'): '', ('CL', 'DE'): '', ('NC', 'DE'): '', ('MC', 'DE'): '', ('MI', 'DE'): '', ('MO', 'DE'): '', ('NO', 'DE'): '', ('OP', 'DE'): '', ('VO', 'DE'): '', ('AS', 'DE'): '', ('EJ', 'DE'): '', ('IT', 'DE'): '', ('IN', 'DE'): '', ('X', 'DE'): 4, ('LB', 'AL'): 3, ('LD', 'AL'): 2, ('LL', 'AL'): 2, ('DE', 'AL'): 1, ('AL', 'AL'): 0, ('PO', 'AL'): 2, ('RE', 'AL'): 4, ('PA', 'AL'): 4, ('NE', 'AL'): 4, ('VE', 'AL'): 3, ('UV', 'AL'): 5, ('PH', 'AL'): 6, ('GL', 'AL'): 3, ('LZ', 'AL'): 7, ('LH', 'AL'): 6, ('RZ', 'AL'): 2, ('PZ', 'AL'): 7, ('VZ', 'AL'): 7, ('HZ', 'AL'): 7, ('GZ', 'AL'): 7, ('LA', 'AL'): '', ('NZ', 'AL'): '', ('NA', 'AL'): '', ('PL', 'AL'): '', ('NF', 'AL'): '', ('SF', 'AL'): '', ('NS', 'AL'): '', ('SS', 'AL'): '', ('VI', 'AL'): '', ('TA', 'AL'): '', ('SV', 'AL'): '', ('CL', 'AL'): '', ('NC', 'AL'): '', ('MC', 'AL'): '', ('MI', 'AL'): '', ('MO', 'AL'): '', ('NO', 'AL'): '', ('OP', 'AL'): '', ('VO', 'AL'): '', ('AS', 'AL'): '', ('EJ', 'AL'): '', ('IT', 'AL'): '', ('IN', 'AL'): '', ('X', 'AL'): 4, ('LB', 'PO'): 4, ('LD', 'PO'): 3, ('LL', 'PO'): 3, ('DE', 'PO'): 3, ('AL', 'PO'): 2, ('PO', 'PO'): 0, ('RE', 'PO'): 2, ('PA', 'PO'): 2, ('NE', 'PO'): 3, ('VE', 'PO'): 3, ('UV', 'PO'): 5, ('PH', 'PO'): 6, ('GL', 'PO'): 3, ('LZ', 'PO'): 7, ('LH', 'PO'): 6, ('RZ', 'PO'): 2, ('PZ', 'PO'): 7, ('VZ', 'PO'): 7, ('HZ', 'PO'): 7, ('GZ', 'PO'): 7, ('LA', 'PO'): '', ('NZ', 'PO'): '', ('NA', 'PO'): '', ('PL', 'PO'): '', ('NF', 'PO'): '', ('SF', 'PO'): '', ('NS', 'PO'): '', ('SS', 'PO'): '', ('VI', 'PO'): '', ('TA', 'PO'): '', ('SV', 'PO'): '', ('CL', 'PO'): '', ('NC', 'PO'): '', ('MC', 'PO'): '', ('MI', 'PO'): '', ('MO', 'PO'): '', ('NO', 'PO'): '', ('OP', 'PO'): '', ('VO', 'PO'): '', ('AS', 'PO'): '', ('EJ', 'PO'): '', ('IT', 'PO'): '', ('IN', 'PO'): '', ('X', 'PO'): 4, ('LB', 'RE'): 5, ('LD', 'RE'): 5, ('LL', 'RE'): 5, ('DE', 'RE'): 5, ('AL', 'RE'): 4, ('PO', 'RE'): 2, ('RE', 'RE'): 0, ('PA', 'RE'): 4, ('NE', 'RE'): 4, ('VE', 'RE'): 3, ('UV', 'RE'): 5, ('PH', 'RE'): 6, ('GL', 'RE'): 3, ('LZ', 'RE'): 7, ('LH', 'RE'): 6, ('RZ', 'RE'): 2, ('PZ', 'RE'): 7, ('VZ', 'RE'): 7, ('HZ', 'RE'): 7, ('GZ', 'RE'): 7, ('LA', 'RE'): '', ('NZ', 'RE'): '', ('NA', 'RE'): '', ('PL', 'RE'): '', ('NF', 'RE'): '', ('SF', 'RE'): '', ('NS', 'RE'): '', ('SS', 'RE'): '', ('VI', 'RE'): '', ('TA', 'RE'): '', ('SV', 'RE'): '', ('CL', 'RE'): '', ('NC', 'RE'): '', ('MC', 'RE'): '', ('MI', 'RE'): '', ('MO', 'RE'): '', ('NO', 'RE'): '', ('OP', 'RE'): '', ('VO', 'RE'): '', ('AS', 'RE'): '', ('EJ', 'RE'): '', ('IT', 'RE'): '', ('IN', 'RE'): '', ('X', 'RE'): 4, ('LB', 'PA'): 5, ('LD', 'PA'): 5, ('LL', 'PA'): 5, ('DE', 'PA'): 5, ('AL', 'PA'): 4, ('PO', 'PA'): 2, ('RE', 'PA'): 4, ('PA', 'PA'): 0, ('NE', 'PA'): 1, ('VE', 'PA'): 2, ('UV', 'PA'): 4, ('PH', 'PA'): 6, ('GL', 'PA'): 2, ('LZ', 'PA'): 7, ('LH', 'PA'): 6, ('RZ', 'PA'): 7, ('PZ', 'PA'): 1, ('VZ', 'PA'): 7, ('HZ', 'PA'): 7, ('GZ', 'PA'): 7, ('LA', 'PA'): '', ('NZ', 'PA'): '', ('NA', 'PA'): '', ('PL', 'PA'): '', ('NF', 'PA'): '', ('SF', 'PA'): '', ('NS', 'PA'): '', ('SS', 'PA'): '', ('VI', 'PA'): '', ('TA', 'PA'): '', ('SV', 'PA'): '', ('CL', 'PA'): '', ('NC', 'PA'): '', ('MC', 'PA'): '', ('MI', 'PA'): '', ('MO', 'PA'): '', ('NO', 'PA'): '', ('OP', 'PA'): '', ('VO', 'PA'): '', ('AS', 'PA'): '', ('EJ', 'PA'): '', ('IT', 'PA'): '', ('IN', 'PA'): '', ('X', 'PA'): 4, ('LB', 'NE'): 4, ('LD', 'NE'): 5, ('LL', 'NE'): 5, ('DE', 'NE'): 5, ('AL', 'NE'): 4, ('PO', 'NE'): 3, ('RE', 'NE'): 4, ('PA', 'NE'): 1, ('NE', 'NE'): 0, ('VE', 'NE'): 1, ('UV', 'NE'): 3, ('PH', 'NE'): 5, ('GL', 'NE'): 2, ('LZ', 'NE'): 7, ('LH', 'NE'): 6, ('RZ', 'NE'): 7, ('PZ', 'NE'): 2, ('VZ', 'NE'): 7, ('HZ', 'NE'): 7, ('GZ', 'NE'): 7, ('LA', 'NE'): '', ('NZ', 'NE'): '', ('NA', 'NE'): '', ('PL', 'NE'): '', ('NF', 'NE'): '', ('SF', 'NE'): '', ('NS', 'NE'): '', ('SS', 'NE'): '', ('VI', 'NE'): '', ('TA', 'NE'): '', ('SV', 'NE'): '', ('CL', 'NE'): '', ('NC', 'NE'): '', ('MC', 'NE'): '', ('MI', 'NE'): '', ('MO', 'NE'): '', ('NO', 'NE'): '', ('OP', 'NE'): '', ('VO', 'NE'): '', ('AS', 'NE'): '', ('EJ', 'NE'): '', ('IT', 'NE'): '', ('IN', 'NE'): '', ('X', 'NE'): 4, ('LB', 'VE'): 3, ('LD', 'VE'): 4, ('LL', 'VE'): 5, ('DE', 'VE'): 4, ('AL', 'VE'): 3, ('PO', 'VE'): 3, ('RE', 'VE'): 3, ('PA', 'VE'): 2, ('NE', 'VE'): 1, ('VE', 'VE'): 0, ('UV', 'VE'): 2, ('PH', 'VE'): 4, ('GL', 'VE'): 2, ('LZ', 'VE'): 7, ('LH', 'VE'): 6, ('RZ', 'VE'): 7, ('PZ', 'VE'): 7, ('VZ', 'VE'): 1, ('HZ', 'VE'): 7, ('GZ', 'VE'): 7, ('LA', 'VE'): '', ('NZ', 'VE'): '', ('NA', 'VE'): '', ('PL', 'VE'): '', ('NF', 'VE'): '', ('SF', 'VE'): '', ('NS', 'VE'): '', ('SS', 'VE'): '', ('VI', 'VE'): '', ('TA', 'VE'): '', ('SV', 'VE'): '', ('CL', 'VE'): '', ('NC', 'VE'): '', ('MC', 'VE'): '', ('MI', 'VE'): '', ('MO', 'VE'): '', ('NO', 'VE'): '', ('OP', 'VE'): '', ('VO', 'VE'): '', ('AS', 'VE'): '', ('EJ', 'VE'): '', ('IT', 'VE'): '', ('IN', 'VE'): '', ('X', 'VE'): 4, ('LB', 'UV'): 5, ('LD', 'UV'): 5, ('LL', 'UV'): 5, ('DE', 'UV'): 5, ('AL', 'UV'): 5, ('PO', 'UV'): 5, ('RE', 'UV'): 5, ('PA', 'UV'): 4, ('NE', 'UV'): 3, ('VE', 'UV'): 2, ('UV', 'UV'): 0, ('PH', 'UV'): 3, ('GL', 'UV'): 3, ('LZ', 'UV'): 7, ('LH', 'UV'): 6, ('RZ', 'UV'): 7, ('PZ', 'UV'): 7, ('VZ', 'UV'): 3, ('HZ', 'UV'): 7, ('GZ', 'UV'): 7, ('LA', 'UV'): '', ('NZ', 'UV'): '', ('NA', 'UV'): '', ('PL', 'UV'): '', ('NF', 'UV'): '', ('SF', 'UV'): '', ('NS', 'UV'): '', ('SS', 'UV'): '', ('VI', 'UV'): '', ('TA', 'UV'): '', ('SV', 'UV'): '', ('CL', 'UV'): '', ('NC', 'UV'): '', ('MC', 'UV'): '', ('MI', 'UV'): '', ('MO', 'UV'): '', ('NO', 'UV'): '', ('OP', 'UV'): '', ('VO', 'UV'): '', ('AS', 'UV'): '', ('EJ', 'UV'): '', ('IT', 'UV'): '', ('IN', 'UV'): '', ('X', 'UV'): 4, ('LB', 'PH'): 6, ('LD', 'PH'): 6, ('LL', 'PH'): 6, ('DE', 'PH'): 6, ('AL', 'PH'): 6, ('PO', 'PH'): 6, ('RE', 'PH'): 6, ('PA', 'PH'): 6, ('NE', 'PH'): 5, ('VE', 'PH'): 4, ('UV', 'PH'): 3, ('PH', 'PH'): 0, ('GL', 'PH'): 3, ('LZ', 'PH'): 7, ('LH', 'PH'): 6, ('RZ', 'PH'): 7, ('PZ', 'PH'): 7, ('VZ', 'PH'): 7, ('HZ', 'PH'): 1, ('GZ', 'PH'): 7, ('LA', 'PH'): '', ('NZ', 'PH'): '', ('NA', 'PH'): '', ('PL', 'PH'): '', ('NF', 'PH'): '', ('SF', 'PH'): '', ('NS', 'PH'): '', ('SS', 'PH'): '', ('VI', 'PH'): '', ('TA', 'PH'): '', ('SV', 'PH'): '', ('CL', 'PH'): '', ('NC', 'PH'): '', ('MC', 'PH'): '', ('MI', 'PH'): '', ('MO', 'PH'): '', ('NO', 'PH'): '', ('OP', 'PH'): '', ('VO', 'PH'): '', ('AS', 'PH'): '', ('EJ', 'PH'): '', ('IT', 'PH'): '', ('IN', 'PH'): '', ('X', 'PH'): 4, ('LB', 'GL'): 3, ('LD', 'GL'): 3, ('LL', 'GL'): 3, ('DE', 'GL'): 3, ('AL', 'GL'): 3, ('PO', 'GL'): 3, ('RE', 'GL'): 3, ('PA', 'GL'): 2, ('NE', 'GL'): 2, ('VE', 'GL'): 2, ('UV', 'GL'): 3, ('PH', 'GL'): 3, ('GL', 'GL'): 0, ('LZ', 'GL'): 5, ('LH', 'GL'): 4, ('RZ', 'GL'): 5, ('PZ', 'GL'): 5, ('VZ', 'GL'): 5, ('HZ', 'GL'): 5, ('GZ', 'GL'): 1, ('LA', 'GL'): '', ('NZ', 'GL'): '', ('NA', 'GL'): '', ('PL', 'GL'): '', ('NF', 'GL'): '', ('SF', 'GL'): '', ('NS', 'GL'): '', ('SS', 'GL'): '', ('VI', 'GL'): '', ('TA', 'GL'): '', ('SV', 'GL'): '', ('CL', 'GL'): '', ('NC', 'GL'): '', ('MC', 'GL'): '', ('MI', 'GL'): '', ('MO', 'GL'): '', ('NO', 'GL'): '', ('OP', 'GL'): '', ('VO', 'GL'): '', ('AS', 'GL'): '', ('EJ', 'GL'): 5, ('IT', 'GL'): 8, ('IN', 'GL'): '', ('X', 'GL'): 2, ('LB', 'LZ'): 1, ('LD', 'LZ'): 2, ('LL', 'LZ'): 3, ('DE', 'LZ'): 7, ('AL', 'LZ'): 7, ('PO', 'LZ'): 7, ('RE', 'LZ'): 7, ('PA', 'LZ'): 7, ('NE', 'LZ'): 7, ('VE', 'LZ'): 7, ('UV', 'LZ'): 7, ('PH', 'LZ'): 7, ('GL', 'LZ'): 5, ('LZ', 'LZ'): 0, ('LH', 'LZ'): 1, ('RZ', 'LZ'): 5, ('PZ', 'LZ'): 5, ('VZ', 'LZ'): 5, ('HZ', 'LZ'): 5, ('GZ', 'LZ'): 5, ('LA', 'LZ'): '', ('NZ', 'LZ'): '', ('NA', 'LZ'): '', ('PL', 'LZ'): '', ('NF', 'LZ'): '', ('SF', 'LZ'): '', ('NS', 'LZ'): '', ('SS', 'LZ'): '', ('VI', 'LZ'): '', ('TA', 'LZ'): '', ('SV', 'LZ'): '', ('CL', 'LZ'): '', ('NC', 'LZ'): '', ('MC', 'LZ'): '', ('MI', 'LZ'): '', ('MO', 'LZ'): '', ('NO', 'LZ'): '', ('OP', 'LZ'): '', ('VO', 'LZ'): '', ('AS', 'LZ'): '', ('EJ', 'LZ'): '', ('IT', 'LZ'): '', ('IN', 'LZ'): '', ('X', 'LZ'): 2, ('LB', 'LH'): 2, ('LD', 'LH'): 3, ('LL', 'LH'): 4, ('DE', 'LH'): 6, ('AL', 'LH'): 6, ('PO', 'LH'): 6, ('RE', 'LH'): 6, ('PA', 'LH'): 6, ('NE', 'LH'): 6, ('VE', 'LH'): 6, ('UV', 'LH'): 6, ('PH', 'LH'): 6, ('GL', 'LH'): 4, ('LZ', 'LH'): 1, ('LH', 'LH'): 0, ('RZ', 'LH'): 4, ('PZ', 'LH'): 4, ('VZ', 'LH'): 4, ('HZ', 'LH'): 4, ('GZ', 'LH'): 4, ('LA', 'LH'): '', ('NZ', 'LH'): '', ('NA', 'LH'): '', ('PL', 'LH'): '', ('NF', 'LH'): '', ('SF', 'LH'): '', ('NS', 'LH'): '', ('SS', 'LH'): '', ('VI', 'LH'): '', ('TA', 'LH'): '', ('SV', 'LH'): '', ('CL', 'LH'): '', ('NC', 'LH'): '', ('MC', 'LH'): '', ('MI', 'LH'): '', ('MO', 'LH'): '', ('NO', 'LH'): '', ('OP', 'LH'): '', ('VO', 'LH'): '', ('AS', 'LH'): '', ('EJ', 'LH'): '', ('IT', 'LH'): '', ('IN', 'LH'): '', ('X', 'LH'): 1, ('LB', 'RZ'): 7, ('LD', 'RZ'): 7, ('LL', 'RZ'): 7, ('DE', 'RZ'): 3, ('AL', 'RZ'): 2, ('PO', 'RZ'): 2, ('RE', 'RZ'): 2, ('PA', 'RZ'): 7, ('NE', 'RZ'): 7, ('VE', 'RZ'): 7, ('UV', 'RZ'): 7, ('PH', 'RZ'): 7, ('GL', 'RZ'): 5, ('LZ', 'RZ'): 5, ('LH', 'RZ'): 4, ('RZ', 'RZ'): 0, ('PZ', 'RZ'): 5, ('VZ', 'RZ'): 5, ('HZ', 'RZ'): 5, ('GZ', 'RZ'): 5, ('LA', 'RZ'): '', ('NZ', 'RZ'): '', ('NA', 'RZ'): '', ('PL', 'RZ'): '', ('NF', 'RZ'): '', ('SF', 'RZ'): '', ('NS', 'RZ'): '', ('SS', 'RZ'): '', ('VI', 'RZ'): '', ('TA', 'RZ'): '', ('SV', 'RZ'): '', ('CL', 'RZ'): '', ('NC', 'RZ'): '', ('MC', 'RZ'): '', ('MI', 'RZ'): '', ('MO', 'RZ'): '', ('NO', 'RZ'): '', ('OP', 'RZ'): '', ('VO', 'RZ'): '', ('AS', 'RZ'): '', ('EJ', 'RZ'): '', ('IT', 'RZ'): '', ('IN', 'RZ'): '', ('X', 'RZ'): 2, ('LB', 'PZ'): 7, ('LD', 'PZ'): 7, ('LL', 'PZ'): 7, ('DE', 'PZ'): 7, ('AL', 'PZ'): 7, ('PO', 'PZ'): 7, ('RE', 'PZ'): 7, ('PA', 'PZ'): 1, ('NE', 'PZ'): 2, ('VE', 'PZ'): 7, ('UV', 'PZ'): 7, ('PH', 'PZ'): 7, ('GL', 'PZ'): 5, ('LZ', 'PZ'): 5, ('LH', 'PZ'): 4, ('RZ', 'PZ'): 5, ('PZ', 'PZ'): 0, ('VZ', 'PZ'): 5, ('HZ', 'PZ'): 5, ('GZ', 'PZ'): 5, ('LA', 'PZ'): '', ('NZ', 'PZ'): '', ('NA', 'PZ'): '', ('PL', 'PZ'): '', ('NF', 'PZ'): '', ('SF', 'PZ'): '', ('NS', 'PZ'): '', ('SS', 'PZ'): '', ('VI', 'PZ'): '', ('TA', 'PZ'): '', ('SV', 'PZ'): '', ('CL', 'PZ'): '', ('NC', 'PZ'): '', ('MC', 'PZ'): '', ('MI', 'PZ'): '', ('MO', 'PZ'): '', ('NO', 'PZ'): '', ('OP', 'PZ'): '', ('VO', 'PZ'): '', ('AS', 'PZ'): '', ('EJ', 'PZ'): '', ('IT', 'PZ'): '', ('IN', 'PZ'): '', ('X', 'PZ'): 2, ('LB', 'VZ'): 7, ('LD', 'VZ'): 7, ('LL', 'VZ'): 7, ('DE', 'VZ'): 7, ('AL', 'VZ'): 7, ('PO', 'VZ'): 7, ('RE', 'VZ'): 7, ('PA', 'VZ'): 7, ('NE', 'VZ'): 7, ('VE', 'VZ'): 1, ('UV', 'VZ'): 3, ('PH', 'VZ'): 7, ('GL', 'VZ'): 5, ('LZ', 'VZ'): 5, ('LH', 'VZ'): 4, ('RZ', 'VZ'): 5, ('PZ', 'VZ'): 5, ('VZ', 'VZ'): 0, ('HZ', 'VZ'): 2, ('GZ', 'VZ'): 5, ('LA', 'VZ'): '', ('NZ', 'VZ'): '', ('NA', 'VZ'): '', ('PL', 'VZ'): '', ('NF', 'VZ'): '', ('SF', 'VZ'): '', ('NS', 'VZ'): '', ('SS', 'VZ'): '', ('VI', 'VZ'): '', ('TA', 'VZ'): '', ('SV', 'VZ'): '', ('CL', 'VZ'): '', ('NC', 'VZ'): '', ('MC', 'VZ'): '', ('MI', 'VZ'): '', ('MO', 'VZ'): '', ('NO', 'VZ'): '', ('OP', 'VZ'): '', ('VO', 'VZ'): '', ('AS', 'VZ'): '', ('EJ', 'VZ'): '', ('IT', 'VZ'): '', ('IN', 'VZ'): '', ('X', 'VZ'): 2, ('LB', 'HZ'): 7, ('LD', 'HZ'): 7, ('LL', 'HZ'): 7, ('DE', 'HZ'): 7, ('AL', 'HZ'): 7, ('PO', 'HZ'): 7, ('RE', 'HZ'): 7, ('PA', 'HZ'): 7, ('NE', 'HZ'): 7, ('VE', 'HZ'): 7, ('UV', 'HZ'): 7, ('PH', 'HZ'): 1, ('GL', 'HZ'): 5, ('LZ', 'HZ'): 5, ('LH', 'HZ'): 4, ('RZ', 'HZ'): 5, ('PZ', 'HZ'): 5, ('VZ', 'HZ'): 2, ('HZ', 'HZ'): 0, ('GZ', 'HZ'): 5, ('LA', 'HZ'): '', ('NZ', 'HZ'): '', ('NA', 'HZ'): '', ('PL', 'HZ'): '', ('NF', 'HZ'): '', ('SF', 'HZ'): '', ('NS', 'HZ'): '', ('SS', 'HZ'): '', ('VI', 'HZ'): '', ('TA', 'HZ'): '', ('SV', 'HZ'): '', ('CL', 'HZ'): '', ('NC', 'HZ'): '', ('MC', 'HZ'): '', ('MI', 'HZ'): '', ('MO', 'HZ'): '', ('NO', 'HZ'): '', ('OP', 'HZ'): '', ('VO', 'HZ'): '', ('AS', 'HZ'): '', ('EJ', 'HZ'): '', ('IT', 'HZ'): '', ('IN', 'HZ'): '', ('X', 'HZ'): 2, ('LB', 'GZ'): 7, ('LD', 'GZ'): 7, ('LL', 'GZ'): 7, ('DE', 'GZ'): 7, ('AL', 'GZ'): 7, ('PO', 'GZ'): 7, ('RE', 'GZ'): 7, ('PA', 'GZ'): 7, ('NE', 'GZ'): 7, ('VE', 'GZ'): 7, ('UV', 'GZ'): 7, ('PH', 'GZ'): 7, ('GL', 'GZ'): 1, ('LZ', 'GZ'): 5, ('LH', 'GZ'): 4, ('RZ', 'GZ'): 5, ('PZ', 'GZ'): 5, ('VZ', 'GZ'): 5, ('HZ', 'GZ'): 5, ('GZ', 'GZ'): 0, ('LA', 'GZ'): '', ('NZ', 'GZ'): '', ('NA', 'GZ'): '', ('PL', 'GZ'): '', ('NF', 'GZ'): '', ('SF', 'GZ'): '', ('NS', 'GZ'): '', ('SS', 'GZ'): '', ('VI', 'GZ'): '', ('TA', 'GZ'): '', ('SV', 'GZ'): '', ('CL', 'GZ'): '', ('NC', 'GZ'): '', ('MC', 'GZ'): '', ('MI', 'GZ'): '', ('MO', 'GZ'): '', ('NO', 'GZ'): '', ('OP', 'GZ'): '', ('VO', 'GZ'): '', ('AS', 'GZ'): '', ('EJ', 'GZ'): 4, ('IT', 'GZ'): 7, ('IN', 'GZ'): '', ('X', 'GZ'): 2, ('LB', 'LA'): '', ('LD', 'LA'): '', ('LL', 'LA'): '', ('DE', 'LA'): '', ('AL', 'LA'): '', ('PO', 'LA'): '', ('RE', 'LA'): '', ('PA', 'LA'): '', ('NE', 'LA'): '', ('VE', 'LA'): '', ('UV', 'LA'): '', ('PH', 'LA'): '', ('GL', 'LA'): '', ('LZ', 'LA'): '', ('LH', 'LA'): '', ('RZ', 'LA'): '', ('PZ', 'LA'): '', ('VZ', 'LA'): '', ('HZ', 'LA'): '', ('GZ', 'LA'): '', ('LA', 'LA'): 0, ('NZ', 'LA'): 11, ('NA', 'LA'): 9, ('PL', 'LA'): 11, ('NF', 'LA'): 11, ('SF', 'LA'): 11, ('NS', 'LA'): 9, ('SS', 'LA'): 9, ('VI', 'LA'): 9, ('TA', 'LA'): 9, ('SV', 'LA'): 8, ('CL', 'LA'): 11, ('NC', 'LA'): 11, ('MC', 'LA'): 10, ('MI', 'LA'): 10, ('MO', 'LA'): 10, ('NO', 'LA'): 11, ('OP', 'LA'): 11, ('VO', 'LA'): '', ('AS', 'LA'): '', ('EJ', 'LA'): '', ('IT', 'LA'): '', ('IN', 'LA'): '', ('X', 'LA'): 5, ('LB', 'NZ'): '', ('LD', 'NZ'): '', ('LL', 'NZ'): '', ('DE', 'NZ'): '', ('AL', 'NZ'): '', ('PO', 'NZ'): '', ('RE', 'NZ'): '', ('PA', 'NZ'): '', ('NE', 'NZ'): '', ('VE', 'NZ'): '', ('UV', 'NZ'): '', ('PH', 'NZ'): '', ('GL', 'NZ'): '', ('LZ', 'NZ'): '', ('LH', 'NZ'): '', ('RZ', 'NZ'): '', ('PZ', 'NZ'): '', ('VZ', 'NZ'): '', ('HZ', 'NZ'): '', ('GZ', 'NZ'): '', ('LA', 'NZ'): 11, ('NZ', 'NZ'): 0, ('NA', 'NZ'): 2, ('PL', 'NZ'): 5, ('NF', 'NZ'): 7, ('SF', 'NZ'): 7, ('NS', 'NZ'): 6, ('SS', 'NZ'): 7, ('VI', 'NZ'): 6, ('TA', 'NZ'): 5, ('SV', 'NZ'): 4, ('CL', 'NZ'): 6, ('NC', 'NZ'): 7, ('MC', 'NZ'): 8, ('MI', 'NZ'): 9, ('MO', 'NZ'): 10, ('NO', 'NZ'): 11, ('OP', 'NZ'): 12, ('VO', 'NZ'): '', ('AS', 'NZ'): '', ('EJ', 'NZ'): '', ('IT', 'NZ'): '', ('IN', 'NZ'): '', ('X', 'NZ'): 3, ('LB', 'NA'): '', ('LD', 'NA'): '', ('LL', 'NA'): '', ('DE', 'NA'): '', ('AL', 'NA'): '', ('PO', 'NA'): '', ('RE', 'NA'): '', ('PA', 'NA'): '', ('NE', 'NA'): '', ('VE', 'NA'): '', ('UV', 'NA'): '', ('PH', 'NA'): '', ('GL', 'NA'): '', ('LZ', 'NA'): '', ('LH', 'NA'): '', ('RZ', 'NA'): '', ('PZ', 'NA'): '', ('VZ', 'NA'): '', ('HZ', 'NA'): '', ('GZ', 'NA'): '', ('LA', 'NA'): 9, ('NZ', 'NA'): 2, ('NA', 'NA'): 0, ('PL', 'NA'): 3, ('NF', 'NA'): 5, ('SF', 'NA'): 5, ('NS', 'NA'): 4, ('SS', 'NA'): 5, ('VI', 'NA'): 4, ('TA', 'NA'): 3, ('SV', 'NA'): 2, ('CL', 'NA'): 4, ('NC', 'NA'): 5, ('MC', 'NA'): 6, ('MI', 'NA'): 7, ('MO', 'NA'): 8, ('NO', 'NA'): 9, ('OP', 'NA'): 10, ('VO', 'NA'): '', ('AS', 'NA'): '', ('EJ', 'NA'): '', ('IT', 'NA'): '', ('IN', 'NA'): '', ('X', 'NA'): 3, ('LB', 'PL'): '', ('LD', 'PL'): '', ('LL', 'PL'): '', ('DE', 'PL'): '', ('AL', 'PL'): '', ('PO', 'PL'): '', ('RE', 'PL'): '', ('PA', 'PL'): '', ('NE', 'PL'): '', ('VE', 'PL'): '', ('UV', 'PL'): '', ('PH', 'PL'): '', ('GL', 'PL'): '', ('LZ', 'PL'): '', ('LH', 'PL'): '', ('RZ', 'PL'): '', ('PZ', 'PL'): '', ('VZ', 'PL'): '', ('HZ', 'PL'): '', ('GZ', 'PL'): '', ('LA', 'PL'): 11, ('NZ', 'PL'): 5, ('NA', 'PL'): 3, ('PL', 'PL'): 0, ('NF', 'PL'): 2, ('SF', 'PL'): 2, ('NS', 'PL'): 4, ('SS', 'PL'): 4, ('VI', 'PL'): 5, ('TA', 'PL'): 4, ('SV', 'PL'): 6, ('CL', 'PL'): 8, ('NC', 'PL'): 9, ('MC', 'PL'): 10, ('MI', 'PL'): 11, ('MO', 'PL'): 12, ('NO', 'PL'): 13, ('OP', 'PL'): 14, ('VO', 'PL'): '', ('AS', 'PL'): '', ('EJ', 'PL'): '', ('IT', 'PL'): '', ('IN', 'PL'): '', ('X', 'PL'): 5, ('LB', 'NF'): '', ('LD', 'NF'): '', ('LL', 'NF'): '', ('DE', 'NF'): '', ('AL', 'NF'): '', ('PO', 'NF'): '', ('RE', 'NF'): '', ('PA', 'NF'): '', ('NE', 'NF'): '', ('VE', 'NF'): '', ('UV', 'NF'): '', ('PH', 'NF'): '', ('GL', 'NF'): '', ('LZ', 'NF'): '', ('LH', 'NF'): '', ('RZ', 'NF'): '', ('PZ', 'NF'): '', ('VZ', 'NF'): '', ('HZ', 'NF'): '', ('GZ', 'NF'): '', ('LA', 'NF'): 11, ('NZ', 'NF'): 7, ('NA', 'NF'): 5, ('PL', 'NF'): 2, ('NF', 'NF'): 0, ('SF', 'NF'): 1, ('NS', 'NF'): 2, ('SS', 'NF'): 3, ('VI', 'NF'): 4, ('TA', 'NF'): 3, ('SV', 'NF'): 4, ('CL', 'NF'): 6, ('NC', 'NF'): 7, ('MC', 'NF'): 8, ('MI', 'NF'): 9, ('MO', 'NF'): 10, ('NO', 'NF'): 11, ('OP', 'NF'): 12, ('VO', 'NF'): '', ('AS', 'NF'): '', ('EJ', 'NF'): '', ('IT', 'NF'): '', ('IN', 'NF'): '', ('X', 'NF'): 5, ('LB', 'SF'): '', ('LD', 'SF'): '', ('LL', 'SF'): '', ('DE', 'SF'): '', ('AL', 'SF'): '', ('PO', 'SF'): '', ('RE', 'SF'): '', ('PA', 'SF'): '', ('NE', 'SF'): '', ('VE', 'SF'): '', ('UV', 'SF'): '', ('PH', 'SF'): '', ('GL', 'SF'): '', ('LZ', 'SF'): '', ('LH', 'SF'): '', ('RZ', 'SF'): '', ('PZ', 'SF'): '', ('VZ', 'SF'): '', ('HZ', 'SF'): '', ('GZ', 'SF'): '', ('LA', 'SF'): 11, ('NZ', 'SF'): 7, ('NA', 'SF'): 5, ('PL', 'SF'): 2, ('NF', 'SF'): 1, ('SF', 'SF'): 0, ('NS', 'SF'): 3, ('SS', 'SF'): 2, ('VI', 'SF'): 5, ('TA', 'SF'): 4, ('SV', 'SF'): 5, ('CL', 'SF'): 7, ('NC', 'SF'): 8, ('MC', 'SF'): 9, ('MI', 'SF'): 10, ('MO', 'SF'): 11, ('NO', 'SF'): 12, ('OP', 'SF'): 13, ('VO', 'SF'): '', ('AS', 'SF'): '', ('EJ', 'SF'): '', ('IT', 'SF'): '', ('IN', 'SF'): '', ('X', 'SF'): 5, ('LB', 'NS'): '', ('LD', 'NS'): '', ('LL', 'NS'): '', ('DE', 'NS'): '', ('AL', 'NS'): '', ('PO', 'NS'): '', ('RE', 'NS'): '', ('PA', 'NS'): '', ('NE', 'NS'): '', ('VE', 'NS'): '', ('UV', 'NS'): '', ('PH', 'NS'): '', ('GL', 'NS'): '', ('LZ', 'NS'): '', ('LH', 'NS'): '', ('RZ', 'NS'): '', ('PZ', 'NS'): '', ('VZ', 'NS'): '', ('HZ', 'NS'): '', ('GZ', 'NS'): '', ('LA', 'NS'): 9, ('NZ', 'NS'): 6, ('NA', 'NS'): 4, ('PL', 'NS'): 4, ('NF', 'NS'): 2, ('SF', 'NS'): 3, ('NS', 'NS'): 0, ('SS', 'NS'): 1, ('VI', 'NS'): 2, ('TA', 'NS'): 1, ('SV', 'NS'): 2, ('CL', 'NS'): 4, ('NC', 'NS'): 5, ('MC', 'NS'): 6, ('MI', 'NS'): 7, ('MO', 'NS'): 8, ('NO', 'NS'): 9, ('OP', 'NS'): 10, ('VO', 'NS'): '', ('AS', 'NS'): '', ('EJ', 'NS'): '', ('IT', 'NS'): '', ('IN', 'NS'): '', ('X', 'NS'): 3, ('LB', 'SS'): '', ('LD', 'SS'): '', ('LL', 'SS'): '', ('DE', 'SS'): '', ('AL', 'SS'): '', ('PO', 'SS'): '', ('RE', 'SS'): '', ('PA', 'SS'): '', ('NE', 'SS'): '', ('VE', 'SS'): '', ('UV', 'SS'): '', ('PH', 'SS'): '', ('GL', 'SS'): '', ('LZ', 'SS'): '', ('LH', 'SS'): '', ('RZ', 'SS'): '', ('PZ', 'SS'): '', ('VZ', 'SS'): '', ('HZ', 'SS'): '', ('GZ', 'SS'): '', ('LA', 'SS'): 9, ('NZ', 'SS'): 7, ('NA', 'SS'): 5, ('PL', 'SS'): 4, ('NF', 'SS'): 3, ('SF', 'SS'): 2, ('NS', 'SS'): 1, ('SS', 'SS'): 0, ('VI', 'SS'): 3, ('TA', 'SS'): 2, ('SV', 'SS'): 3, ('CL', 'SS'): 5, ('NC', 'SS'): 6, ('MC', 'SS'): 7, ('MI', 'SS'): 8, ('MO', 'SS'): 9, ('NO', 'SS'): 10, ('OP', 'SS'): 11, ('VO', 'SS'): '', ('AS', 'SS'): '', ('EJ', 'SS'): '', ('IT', 'SS'): '', ('IN', 'SS'): '', ('X', 'SS'): 3, ('LB', 'VI'): '', ('LD', 'VI'): '', ('LL', 'VI'): '', ('DE', 'VI'): '', ('AL', 'VI'): '', ('PO', 'VI'): '', ('RE', 'VI'): '', ('PA', 'VI'): '', ('NE', 'VI'): '', ('VE', 'VI'): '', ('UV', 'VI'): '', ('PH', 'VI'): '', ('GL', 'VI'): '', ('LZ', 'VI'): '', ('LH', 'VI'): '', ('RZ', 'VI'): '', ('PZ', 'VI'): '', ('VZ', 'VI'): '', ('HZ', 'VI'): '', ('GZ', 'VI'): '', ('LA', 'VI'): 9, ('NZ', 'VI'): 6, ('NA', 'VI'): 4, ('PL', 'VI'): 5, ('NF', 'VI'): 4, ('SF', 'VI'): 5, ('NS', 'VI'): 2, ('SS', 'VI'): 3, ('VI', 'VI'): 0, ('TA', 'VI'): 1, ('SV', 'VI'): 3, ('CL', 'VI'): 5, ('NC', 'VI'): 6, ('MC', 'VI'): 7, ('MI', 'VI'): 8, ('MO', 'VI'): 9, ('NO', 'VI'): 10, ('OP', 'VI'): 11, ('VO', 'VI'): '', ('AS', 'VI'): '', ('EJ', 'VI'): '', ('IT', 'VI'): '', ('IN', 'VI'): '', ('X', 'VI'): 3, ('LB', 'TA'): '', ('LD', 'TA'): '', ('LL', 'TA'): '', ('DE', 'TA'): '', ('AL', 'TA'): '', ('PO', 'TA'): '', ('RE', 'TA'): '', ('PA', 'TA'): '', ('NE', 'TA'): '', ('VE', 'TA'): '', ('UV', 'TA'): '', ('PH', 'TA'): '', ('GL', 'TA'): '', ('LZ', 'TA'): '', ('LH', 'TA'): '', ('RZ', 'TA'): '', ('PZ', 'TA'): '', ('VZ', 'TA'): '', ('HZ', 'TA'): '', ('GZ', 'TA'): '', ('LA', 'TA'): 9, ('NZ', 'TA'): 5, ('NA', 'TA'): 3, ('PL', 'TA'): 4, ('NF', 'TA'): 3, ('SF', 'TA'): 4, ('NS', 'TA'): 1, ('SS', 'TA'): 2, ('VI', 'TA'): 1, ('TA', 'TA'): 0, ('SV', 'TA'): 3, ('CL', 'TA'): 5, ('NC', 'TA'): 6, ('MC', 'TA'): 7, ('MI', 'TA'): 8, ('MO', 'TA'): 9, ('NO', 'TA'): 10, ('OP', 'TA'): 11, ('VO', 'TA'): '', ('AS', 'TA'): '', ('EJ', 'TA'): '', ('IT', 'TA'): '', ('IN', 'TA'): '', ('X', 'TA'): 3, ('LB', 'SV'): '', ('LD', 'SV'): '', ('LL', 'SV'): '', ('DE', 'SV'): '', ('AL', 'SV'): '', ('PO', 'SV'): '', ('RE', 'SV'): '', ('PA', 'SV'): '', ('NE', 'SV'): '', ('VE', 'SV'): '', ('UV', 'SV'): '', ('PH', 'SV'): '', ('GL', 'SV'): '', ('LZ', 'SV'): '', ('LH', 'SV'): '', ('RZ', 'SV'): '', ('PZ', 'SV'): '', ('VZ', 'SV'): '', ('HZ', 'SV'): '', ('GZ', 'SV'): '', ('LA', 'SV'): 8, ('NZ', 'SV'): 4, ('NA', 'SV'): 2, ('PL', 'SV'): 6, ('NF', 'SV'): 4, ('SF', 'SV'): 5, ('NS', 'SV'): 2, ('SS', 'SV'): 3, ('VI', 'SV'): 3, ('TA', 'SV'): 3, ('SV', 'SV'): 0, ('CL', 'SV'): 2, ('NC', 'SV'): 3, ('MC', 'SV'): 4, ('MI', 'SV'): 5, ('MO', 'SV'): 6, ('NO', 'SV'): 7, ('OP', 'SV'): 8, ('VO', 'SV'): '', ('AS', 'SV'): '', ('EJ', 'SV'): '', ('IT', 'SV'): '', ('IN', 'SV'): '', ('X', 'SV'): 2, ('LB', 'CL'): '', ('LD', 'CL'): '', ('LL', 'CL'): '', ('DE', 'CL'): '', ('AL', 'CL'): '', ('PO', 'CL'): '', ('RE', 'CL'): '', ('PA', 'CL'): '', ('NE', 'CL'): '', ('VE', 'CL'): '', ('UV', 'CL'): '', ('PH', 'CL'): '', ('GL', 'CL'): '', ('LZ', 'CL'): '', ('LH', 'CL'): '', ('RZ', 'CL'): '', ('PZ', 'CL'): '', ('VZ', 'CL'): '', ('HZ', 'CL'): '', ('GZ', 'CL'): '', ('LA', 'CL'): 11, ('NZ', 'CL'): 6, ('NA', 'CL'): 4, ('PL', 'CL'): 8, ('NF', 'CL'): 6, ('SF', 'CL'): 7, ('NS', 'CL'): 4, ('SS', 'CL'): 5, ('VI', 'CL'): 5, ('TA', 'CL'): 5, ('SV', 'CL'): 2, ('CL', 'CL'): 0, ('NC', 'CL'): 1, ('MC', 'CL'): 2, ('MI', 'CL'): 3, ('MO', 'CL'): 4, ('NO', 'CL'): 5, ('OP', 'CL'): 6, ('VO', 'CL'): '', ('AS', 'CL'): '', ('EJ', 'CL'): '', ('IT', 'CL'): '', ('IN', 'CL'): '', ('X', 'CL'): 5, ('LB', 'NC'): '', ('LD', 'NC'): '', ('LL', 'NC'): '', ('DE', 'NC'): '', ('AL', 'NC'): '', ('PO', 'NC'): '', ('RE', 'NC'): '', ('PA', 'NC'): '', ('NE', 'NC'): '', ('VE', 'NC'): '', ('UV', 'NC'): '', ('PH', 'NC'): '', ('GL', 'NC'): '', ('LZ', 'NC'): '', ('LH', 'NC'): '', ('RZ', 'NC'): '', ('PZ', 'NC'): '', ('VZ', 'NC'): '', ('HZ', 'NC'): '', ('GZ', 'NC'): '', ('LA', 'NC'): 11, ('NZ', 'NC'): 7, ('NA', 'NC'): 5, ('PL', 'NC'): 9, ('NF', 'NC'): 7, ('SF', 'NC'): 8, ('NS', 'NC'): 5, ('SS', 'NC'): 6, ('VI', 'NC'): 6, ('TA', 'NC'): 6, ('SV', 'NC'): 3, ('CL', 'NC'): 1, ('NC', 'NC'): 0, ('MC', 'NC'): 1, ('MI', 'NC'): 2, ('MO', 'NC'): 3, ('NO', 'NC'): 4, ('OP', 'NC'): 5, ('VO', 'NC'): '', ('AS', 'NC'): '', ('EJ', 'NC'): '', ('IT', 'NC'): '', ('IN', 'NC'): '', ('X', 'NC'): 5, ('LB', 'MC'): '', ('LD', 'MC'): '', ('LL', 'MC'): '', ('DE', 'MC'): '', ('AL', 'MC'): '', ('PO', 'MC'): '', ('RE', 'MC'): '', ('PA', 'MC'): '', ('NE', 'MC'): '', ('VE', 'MC'): '', ('UV', 'MC'): '', ('PH', 'MC'): '', ('GL', 'MC'): '', ('LZ', 'MC'): '', ('LH', 'MC'): '', ('RZ', 'MC'): '', ('PZ', 'MC'): '', ('VZ', 'MC'): '', ('HZ', 'MC'): '', ('GZ', 'MC'): '', ('LA', 'MC'): 10, ('NZ', 'MC'): 8, ('NA', 'MC'): 6, ('PL', 'MC'): 10, ('NF', 'MC'): 8, ('SF', 'MC'): 9, ('NS', 'MC'): 6, ('SS', 'MC'): 7, ('VI', 'MC'): 7, ('TA', 'MC'): 7, ('SV', 'MC'): 4, ('CL', 'MC'): 2, ('NC', 'MC'): 1, ('MC', 'MC'): 0, ('MI', 'MC'): 1, ('MO', 'MC'): 2, ('NO', 'MC'): 3, ('OP', 'MC'): 4, ('VO', 'MC'): '', ('AS', 'MC'): '', ('EJ', 'MC'): '', ('IT', 'MC'): '', ('IN', 'MC'): '', ('X', 'MC'): 4, ('LB', 'MI'): '', ('LD', 'MI'): '', ('LL', 'MI'): '', ('DE', 'MI'): '', ('AL', 'MI'): '', ('PO', 'MI'): '', ('RE', 'MI'): '', ('PA', 'MI'): '', ('NE', 'MI'): '', ('VE', 'MI'): '', ('UV', 'MI'): '', ('PH', 'MI'): '', ('GL', 'MI'): '', ('LZ', 'MI'): '', ('LH', 'MI'): '', ('RZ', 'MI'): '', ('PZ', 'MI'): '', ('VZ', 'MI'): '', ('HZ', 'MI'): '', ('GZ', 'MI'): '', ('LA', 'MI'): 10, ('NZ', 'MI'): 9, ('NA', 'MI'): 7, ('PL', 'MI'): 11, ('NF', 'MI'): 9, ('SF', 'MI'): 10, ('NS', 'MI'): 7, ('SS', 'MI'): 8, ('VI', 'MI'): 8, ('TA', 'MI'): 8, ('SV', 'MI'): 5, ('CL', 'MI'): 3, ('NC', 'MI'): 2, ('MC', 'MI'): 1, ('MI', 'MI'): 0, ('MO', 'MI'): 1, ('NO', 'MI'): 2, ('OP', 'MI'): 3, ('VO', 'MI'): '', ('AS', 'MI'): '', ('EJ', 'MI'): '', ('IT', 'MI'): '', ('IN', 'MI'): '', ('X', 'MI'): 4, ('LB', 'MO'): '', ('LD', 'MO'): '', ('LL', 'MO'): '', ('DE', 'MO'): '', ('AL', 'MO'): '', ('PO', 'MO'): '', ('RE', 'MO'): '', ('PA', 'MO'): '', ('NE', 'MO'): '', ('VE', 'MO'): '', ('UV', 'MO'): '', ('PH', 'MO'): '', ('GL', 'MO'): '', ('LZ', 'MO'): '', ('LH', 'MO'): '', ('RZ', 'MO'): '', ('PZ', 'MO'): '', ('VZ', 'MO'): '', ('HZ', 'MO'): '', ('GZ', 'MO'): '', ('LA', 'MO'): 10, ('NZ', 'MO'): 10, ('NA', 'MO'): 8, ('PL', 'MO'): 12, ('NF', 'MO'): 10, ('SF', 'MO'): 11, ('NS', 'MO'): 8, ('SS', 'MO'): 9, ('VI', 'MO'): 9, ('TA', 'MO'): 9, ('SV', 'MO'): 6, ('CL', 'MO'): 4, ('NC', 'MO'): 3, ('MC', 'MO'): 2, ('MI', 'MO'): 1, ('MO', 'MO'): 0, ('NO', 'MO'): 1, ('OP', 'MO'): 2, ('VO', 'MO'): '', ('AS', 'MO'): '', ('EJ', 'MO'): '', ('IT', 'MO'): '', ('IN', 'MO'): '', ('X', 'MO'): 4, ('LB', 'NO'): '', ('LD', 'NO'): '', ('LL', 'NO'): '', ('DE', 'NO'): '', ('AL', 'NO'): '', ('PO', 'NO'): '', ('RE', 'NO'): '', ('PA', 'NO'): '', ('NE', 'NO'): '', ('VE', 'NO'): '', ('UV', 'NO'): '', ('PH', 'NO'): '', ('GL', 'NO'): '', ('LZ', 'NO'): '', ('LH', 'NO'): '', ('RZ', 'NO'): '', ('PZ', 'NO'): '', ('VZ', 'NO'): '', ('HZ', 'NO'): '', ('GZ', 'NO'): '', ('LA', 'NO'): 11, ('NZ', 'NO'): 11, ('NA', 'NO'): 9, ('PL', 'NO'): 13, ('NF', 'NO'): 11, ('SF', 'NO'): 12, ('NS', 'NO'): 9, ('SS', 'NO'): 10, ('VI', 'NO'): 10, ('TA', 'NO'): 10, ('SV', 'NO'): 7, ('CL', 'NO'): 5, ('NC', 'NO'): 4, ('MC', 'NO'): 3, ('MI', 'NO'): 2, ('MO', 'NO'): 1, ('NO', 'NO'): 0, ('OP', 'NO'): 1, ('VO', 'NO'): '', ('AS', 'NO'): '', ('EJ', 'NO'): '', ('IT', 'NO'): '', ('IN', 'NO'): '', ('X', 'NO'): 5, ('LB', 'OP'): '', ('LD', 'OP'): '', ('LL', 'OP'): '', ('DE', 'OP'): '', ('AL', 'OP'): '', ('PO', 'OP'): '', ('RE', 'OP'): '', ('PA', 'OP'): '', ('NE', 'OP'): '', ('VE', 'OP'): '', ('UV', 'OP'): '', ('PH', 'OP'): '', ('GL', 'OP'): '', ('LZ', 'OP'): '', ('LH', 'OP'): '', ('RZ', 'OP'): '', ('PZ', 'OP'): '', ('VZ', 'OP'): '', ('HZ', 'OP'): '', ('GZ', 'OP'): '', ('LA', 'OP'): 11, ('NZ', 'OP'): 12, ('NA', 'OP'): 10, ('PL', 'OP'): 14, ('NF', 'OP'): 12, ('SF', 'OP'): 13, ('NS', 'OP'): 10, ('SS', 'OP'): 11, ('VI', 'OP'): 11, ('TA', 'OP'): 11, ('SV', 'OP'): 8, ('CL', 'OP'): 6, ('NC', 'OP'): 5, ('MC', 'OP'): 4, ('MI', 'OP'): 3, ('MO', 'OP'): 2, ('NO', 'OP'): 1, ('OP', 'OP'): 0, ('VO', 'OP'): '', ('AS', 'OP'): '', ('EJ', 'OP'): '', ('IT', 'OP'): '', ('IN', 'OP'): '', ('X', 'OP'): 5, ('LB', 'VO'): '', ('LD', 'VO'): '', ('LL', 'VO'): '', ('DE', 'VO'): '', ('AL', 'VO'): '', ('PO', 'VO'): '', ('RE', 'VO'): '', ('PA', 'VO'): '', ('NE', 'VO'): '', ('VE', 'VO'): '', ('UV', 'VO'): '', ('PH', 'VO'): '', ('GL', 'VO'): '', ('LZ', 'VO'): '', ('LH', 'VO'): '', ('RZ', 'VO'): '', ('PZ', 'VO'): '', ('VZ', 'VO'): '', ('HZ', 'VO'): '', ('GZ', 'VO'): '', ('LA', 'VO'): '', ('NZ', 'VO'): '', ('NA', 'VO'): '', ('PL', 'VO'): '', ('NF', 'VO'): '', ('SF', 'VO'): '', ('NS', 'VO'): '', ('SS', 'VO'): '', ('VI', 'VO'): '', ('TA', 'VO'): '', ('SV', 'VO'): '', ('CL', 'VO'): '', ('NC', 'VO'): '', ('MC', 'VO'): '', ('MI', 'VO'): '', ('MO', 'VO'): '', ('NO', 'VO'): '', ('OP', 'VO'): '', ('VO', 'VO'): 0, ('AS', 'VO'): 8, ('EJ', 'VO'): 10, ('IT', 'VO'): 7, ('IN', 'VO'): 10, ('X', 'VO'): 3, ('LB', 'AS'): '', ('LD', 'AS'): '', ('LL', 'AS'): '', ('DE', 'AS'): '', ('AL', 'AS'): '', ('PO', 'AS'): '', ('RE', 'AS'): '', ('PA', 'AS'): '', ('NE', 'AS'): '', ('VE', 'AS'): '', ('UV', 'AS'): '', ('PH', 'AS'): '', ('GL', 'AS'): '', ('LZ', 'AS'): '', ('LH', 'AS'): '', ('RZ', 'AS'): '', ('PZ', 'AS'): '', ('VZ', 'AS'): '', ('HZ', 'AS'): '', ('GZ', 'AS'): '', ('LA', 'AS'): '', ('NZ', 'AS'): '', ('NA', 'AS'): '', ('PL', 'AS'): '', ('NF', 'AS'): '', ('SF', 'AS'): '', ('NS', 'AS'): '', ('SS', 'AS'): '', ('VI', 'AS'): '', ('TA', 'AS'): '', ('SV', 'AS'): '', ('CL', 'AS'): '', ('NC', 'AS'): '', ('MC', 'AS'): '', ('MI', 'AS'): '', ('MO', 'AS'): '', ('NO', 'AS'): '', ('OP', 'AS'): '', ('VO', 'AS'): 8, ('AS', 'AS'): 0, ('EJ', 'AS'): 11, ('IT', 'AS'): 8, ('IN', 'AS'): 11, ('X', 'AS'): 4, ('LB', 'EJ'): '', ('LD', 'EJ'): '', ('LL', 'EJ'): '', ('DE', 'EJ'): '', ('AL', 'EJ'): '', ('PO', 'EJ'): '', ('RE', 'EJ'): '', ('PA', 'EJ'): '', ('NE', 'EJ'): '', ('VE', 'EJ'): '', ('UV', 'EJ'): '', ('PH', 'EJ'): '', ('GL', 'EJ'): 5, ('LZ', 'EJ'): '', ('LH', 'EJ'): '', ('RZ', 'EJ'): '', ('PZ', 'EJ'): '', ('VZ', 'EJ'): '', ('HZ', 'EJ'): '', ('GZ', 'EJ'): 4, ('LA', 'EJ'): '', ('NZ', 'EJ'): '', ('NA', 'EJ'): '', ('PL', 'EJ'): '', ('NF', 'EJ'): '', ('SF', 'EJ'): '', ('NS', 'EJ'): '', ('SS', 'EJ'): '', ('VI', 'EJ'): '', ('TA', 'EJ'): '', ('SV', 'EJ'): '', ('CL', 'EJ'): '', ('NC', 'EJ'): '', ('MC', 'EJ'): '', ('MI', 'EJ'): '', ('MO', 'EJ'): '', ('NO', 'EJ'): '', ('OP', 'EJ'): '', ('VO', 'EJ'): 10, ('AS', 'EJ'): 11, ('EJ', 'EJ'): 0, ('IT', 'EJ'): 3, ('IN', 'EJ'): 13, ('X', 'EJ'): 6, ('LB', 'IT'): '', ('LD', 'IT'): '', ('LL', 'IT'): '', ('DE', 'IT'): '', ('AL', 'IT'): '', ('PO', 'IT'): '', ('RE', 'IT'): '', ('PA', 'IT'): '', ('NE', 'IT'): '', ('VE', 'IT'): '', ('UV', 'IT'): '', ('PH', 'IT'): '', ('GL', 'IT'): 8, ('LZ', 'IT'): '', ('LH', 'IT'): '', ('RZ', 'IT'): '', ('PZ', 'IT'): '', ('VZ', 'IT'): '', ('HZ', 'IT'): '', ('GZ', 'IT'): 7, ('LA', 'IT'): '', ('NZ', 'IT'): '', ('NA', 'IT'): '', ('PL', 'IT'): '', ('NF', 'IT'): '', ('SF', 'IT'): '', ('NS', 'IT'): '', ('SS', 'IT'): '', ('VI', 'IT'): '', ('TA', 'IT'): '', ('SV', 'IT'): '', ('CL', 'IT'): '', ('NC', 'IT'): '', ('MC', 'IT'): '', ('MI', 'IT'): '', ('MO', 'IT'): '', ('NO', 'IT'): '', ('OP', 'IT'): '', ('VO', 'IT'): 7, ('AS', 'IT'): 8, ('EJ', 'IT'): 3, ('IT', 'IT'): 0, ('IN', 'IT'): 10, ('X', 'IT'): 3, ('LB', 'IN'): '', ('LD', 'IN'): '', ('LL', 'IN'): '', ('DE', 'IN'): '', ('AL', 'IN'): '', ('PO', 'IN'): '', ('RE', 'IN'): '', ('PA', 'IN'): '', ('NE', 'IN'): '', ('VE', 'IN'): '', ('UV', 'IN'): '', ('PH', 'IN'): '', ('GL', 'IN'): '', ('LZ', 'IN'): '', ('LH', 'IN'): '', ('RZ', 'IN'): '', ('PZ', 'IN'): '', ('VZ', 'IN'): '', ('HZ', 'IN'): '', ('GZ', 'IN'): '', ('LA', 'IN'): '', ('NZ', 'IN'): '', ('NA', 'IN'): '', ('PL', 'IN'): '', ('NF', 'IN'): '', ('SF', 'IN'): '', ('NS', 'IN'): '', ('SS', 'IN'): '', ('VI', 'IN'): '', ('TA', 'IN'): '', ('SV', 'IN'): '', ('CL', 'IN'): '', ('NC', 'IN'): '', ('MC', 'IN'): '', ('MI', 'IN'): '', ('MO', 'IN'): '', ('NO', 'IN'): '', ('OP', 'IN'): '', ('VO', 'IN'): 10, ('AS', 'IN'): 11, ('EJ', 'IN'): 13, ('IT', 'IN'): 10, ('IN', 'IN'): 0, ('X', 'IN'): 6, ('LB', 'X'): 4, ('LD', 'X'): 4, ('LL', 'X'): 4, ('DE', 'X'): 4, ('AL', 'X'): 4, ('PO', 'X'): 4, ('RE', 'X'): 4, ('PA', 'X'): 4, ('NE', 'X'): 4, ('VE', 'X'): 4, ('UV', 'X'): 4, ('PH', 'X'): 4, ('GL', 'X'): 2, ('LZ', 'X'): 2, ('LH', 'X'): 1, ('RZ', 'X'): 2, ('PZ', 'X'): 2, ('VZ', 'X'): 2, ('HZ', 'X'): 2, ('GZ', 'X'): 2, ('LA', 'X'): 5, ('NZ', 'X'): 3, ('NA', 'X'): 3, ('PL', 'X'): 5, ('NF', 'X'): 5, ('SF', 'X'): 5, ('NS', 'X'): 3, ('SS', 'X'): 3, ('VI', 'X'): 3, ('TA', 'X'): 3, ('SV', 'X'): 2, ('CL', 'X'): 5, ('NC', 'X'): 5, ('MC', 'X'): 4, ('MI', 'X'): 4, ('MO', 'X'): 4, ('NO', 'X'): 5, ('OP', 'X'): 5, ('VO', 'X'): 3, ('AS', 'X'): 4, ('EJ', 'X'): 6, ('IT', 'X'): 3, ('IN', 'X'): 6, ('X', 'X'): 0}
identity_feature_distance_map = {('LB', 'LB'): 1, ('LD', 'LB'): 1, ('LL', 'LB'): 1, ('DE', 'LB'): 1, ('AL', 'LB'): 1, ('PO', 'LB'): 1, ('RE', 'LB'): 1, ('PA', 'LB'): 1, ('NE', 'LB'): 1, ('VE', 'LB'): 1, ('UV', 'LB'): 1, ('PH', 'LB'): 1, ('GL', 'LB'): 1, ('LZ', 'LB'): 1, ('LH', 'LB'): 1, ('RZ', 'LB'): 1, ('PZ', 'LB'): 1, ('VZ', 'LB'): 1, ('HZ', 'LB'): 1, ('GZ', 'LB'): 1, ('LA', 'LB'): '', ('NZ', 'LB'): '', ('NA', 'LB'): '', ('PL', 'LB'): '', ('NF', 'LB'): '', ('SF', 'LB'): '', ('NS', 'LB'): '', ('SS', 'LB'): '', ('VI', 'LB'): '', ('TA', 'LB'): '', ('SV', 'LB'): '', ('CL', 'LB'): '', ('NC', 'LB'): '', ('MC', 'LB'): '', ('MI', 'LB'): '', ('MO', 'LB'): '', ('NO', 'LB'): '', ('OP', 'LB'): '', ('VO', 'LB'): '', ('AS', 'LB'): '', ('EJ', 'LB'): '', ('IT', 'LB'): '', ('IN', 'LB'): '', ('X', 'LB'): 1, ('LB', 'LD'): 1, ('LD', 'LD'): 1, ('LL', 'LD'): 1, ('DE', 'LD'): 1, ('AL', 'LD'): 1, ('PO', 'LD'): 1, ('RE', 'LD'): 1, ('PA', 'LD'): 1, ('NE', 'LD'): 1, ('VE', 'LD'): 1, ('UV', 'LD'): 1, ('PH', 'LD'): 1, ('GL', 'LD'): 1, ('LZ', 'LD'): 1, ('LH', 'LD'): 1, ('RZ', 'LD'): 1, ('PZ', 'LD'): 1, ('VZ', 'LD'): 1, ('HZ', 'LD'): 1, ('GZ', 'LD'): 1, ('LA', 'LD'): '', ('NZ', 'LD'): '', ('NA', 'LD'): '', ('PL', 'LD'): '', ('NF', 'LD'): '', ('SF', 'LD'): '', ('NS', 'LD'): '', ('SS', 'LD'): '', ('VI', 'LD'): '', ('TA', 'LD'): '', ('SV', 'LD'): '', ('CL', 'LD'): '', ('NC', 'LD'): '', ('MC', 'LD'): '', ('MI', 'LD'): '', ('MO', 'LD'): '', ('NO', 'LD'): '', ('OP', 'LD'): '', ('VO', 'LD'): '', ('AS', 'LD'): '', ('EJ', 'LD'): '', ('IT', 'LD'): '', ('IN', 'LD'): '', ('X', 'LD'): 1, ('LB', 'LL'): 1, ('LD', 'LL'): 1, ('LL', 'LL'): 1, ('DE', 'LL'): 1, ('AL', 'LL'): 1, ('PO', 'LL'): 1, ('RE', 'LL'): 1, ('PA', 'LL'): 1, ('NE', 'LL'): 1, ('VE', 'LL'): 1, ('UV', 'LL'): 1, ('PH', 'LL'): 1, ('GL', 'LL'): 1, ('LZ', 'LL'): 1, ('LH', 'LL'): 1, ('RZ', 'LL'): 1, ('PZ', 'LL'): 1, ('VZ', 'LL'): 1, ('HZ', 'LL'): 1, ('GZ', 'LL'): 1, ('LA', 'LL'): '', ('NZ', 'LL'): '', ('NA', 'LL'): '', ('PL', 'LL'): '', ('NF', 'LL'): '', ('SF', 'LL'): '', ('NS', 'LL'): '', ('SS', 'LL'): '', ('VI', 'LL'): '', ('TA', 'LL'): '', ('SV', 'LL'): '', ('CL', 'LL'): '', ('NC', 'LL'): '', ('MC', 'LL'): '', ('MI', 'LL'): '', ('MO', 'LL'): '', ('NO', 'LL'): '', ('OP', 'LL'): '', ('VO', 'LL'): '', ('AS', 'LL'): '', ('EJ', 'LL'): '', ('IT', 'LL'): '', ('IN', 'LL'): '', ('X', 'LL'): 1, ('LB', 'DE'): 1, ('LD', 'DE'): 1, ('LL', 'DE'): 1, ('DE', 'DE'): 1, ('AL', 'DE'): 1, ('PO', 'DE'): 1, ('RE', 'DE'): 1, ('PA', 'DE'): 1, ('NE', 'DE'): 1, ('VE', 'DE'): 1, ('UV', 'DE'): 1, ('PH', 'DE'): 1, ('GL', 'DE'): 1, ('LZ', 'DE'): 1, ('LH', 'DE'): 1, ('RZ', 'DE'): 1, ('PZ', 'DE'): 1, ('VZ', 'DE'): 1, ('HZ', 'DE'): 1, ('GZ', 'DE'): 1, ('LA', 'DE'): '', ('NZ', 'DE'): '', ('NA', 'DE'): '', ('PL', 'DE'): '', ('NF', 'DE'): '', ('SF', 'DE'): '', ('NS', 'DE'): '', ('SS', 'DE'): '', ('VI', 'DE'): '', ('TA', 'DE'): '', ('SV', 'DE'): '', ('CL', 'DE'): '', ('NC', 'DE'): '', ('MC', 'DE'): '', ('MI', 'DE'): '', ('MO', 'DE'): '', ('NO', 'DE'): '', ('OP', 'DE'): '', ('VO', 'DE'): '', ('AS', 'DE'): '', ('EJ', 'DE'): '', ('IT', 'DE'): '', ('IN', 'DE'): '', ('X', 'DE'): 1, ('LB', 'AL'): 1, ('LD', 'AL'): 1, ('LL', 'AL'): 1, ('DE', 'AL'): 1, ('AL', 'AL'): 1, ('PO', 'AL'): 1, ('RE', 'AL'): 1, ('PA', 'AL'): 1, ('NE', 'AL'): 1, ('VE', 'AL'): 1, ('UV', 'AL'): 1, ('PH', 'AL'): 1, ('GL', 'AL'): 1, ('LZ', 'AL'): 1, ('LH', 'AL'): 1, ('RZ', 'AL'): 1, ('PZ', 'AL'): 1, ('VZ', 'AL'): 1, ('HZ', 'AL'): 1, ('GZ', 'AL'): 1, ('LA', 'AL'): '', ('NZ', 'AL'): '', ('NA', 'AL'): '', ('PL', 'AL'): '', ('NF', 'AL'): '', ('SF', 'AL'): '', ('NS', 'AL'): '', ('SS', 'AL'): '', ('VI', 'AL'): '', ('TA', 'AL'): '', ('SV', 'AL'): '', ('CL', 'AL'): '', ('NC', 'AL'): '', ('MC', 'AL'): '', ('MI', 'AL'): '', ('MO', 'AL'): '', ('NO', 'AL'): '', ('OP', 'AL'): '', ('VO', 'AL'): '', ('AS', 'AL'): '', ('EJ', 'AL'): '', ('IT', 'AL'): '', ('IN', 'AL'): '', ('X', 'AL'): 1, ('LB', 'PO'): 1, ('LD', 'PO'): 1, ('LL', 'PO'): 1, ('DE', 'PO'): 1, ('AL', 'PO'): 1, ('PO', 'PO'): 1, ('RE', 'PO'): 1, ('PA', 'PO'): 1, ('NE', 'PO'): 1, ('VE', 'PO'): 1, ('UV', 'PO'): 1, ('PH', 'PO'): 1, ('GL', 'PO'): 1, ('LZ', 'PO'): 1, ('LH', 'PO'): 1, ('RZ', 'PO'): 1, ('PZ', 'PO'): 1, ('VZ', 'PO'): 1, ('HZ', 'PO'): 1, ('GZ', 'PO'): 1, ('LA', 'PO'): '', ('NZ', 'PO'): '', ('NA', 'PO'): '', ('PL', 'PO'): '', ('NF', 'PO'): '', ('SF', 'PO'): '', ('NS', 'PO'): '', ('SS', 'PO'): '', ('VI', 'PO'): '', ('TA', 'PO'): '', ('SV', 'PO'): '', ('CL', 'PO'): '', ('NC', 'PO'): '', ('MC', 'PO'): '', ('MI', 'PO'): '', ('MO', 'PO'): '', ('NO', 'PO'): '', ('OP', 'PO'): '', ('VO', 'PO'): '', ('AS', 'PO'): '', ('EJ', 'PO'): '', ('IT', 'PO'): '', ('IN', 'PO'): '', ('X', 'PO'): 1, ('LB', 'RE'): 1, ('LD', 'RE'): 1, ('LL', 'RE'): 1, ('DE', 'RE'): 1, ('AL', 'RE'): 1, ('PO', 'RE'): 1, ('RE', 'RE'): 1, ('PA', 'RE'): 1, ('NE', 'RE'): 1, ('VE', 'RE'): 1, ('UV', 'RE'): 1, ('PH', 'RE'): 1, ('GL', 'RE'): 1, ('LZ', 'RE'): 1, ('LH', 'RE'): 1, ('RZ', 'RE'): 1, ('PZ', 'RE'): 1, ('VZ', 'RE'): 1, ('HZ', 'RE'): 1, ('GZ', 'RE'): 1, ('LA', 'RE'): '', ('NZ', 'RE'): '', ('NA', 'RE'): '', ('PL', 'RE'): '', ('NF', 'RE'): '', ('SF', 'RE'): '', ('NS', 'RE'): '', ('SS', 'RE'): '', ('VI', 'RE'): '', ('TA', 'RE'): '', ('SV', 'RE'): '', ('CL', 'RE'): '', ('NC', 'RE'): '', ('MC', 'RE'): '', ('MI', 'RE'): '', ('MO', 'RE'): '', ('NO', 'RE'): '', ('OP', 'RE'): '', ('VO', 'RE'): '', ('AS', 'RE'): '', ('EJ', 'RE'): '', ('IT', 'RE'): '', ('IN', 'RE'): '', ('X', 'RE'): 1, ('LB', 'PA'): 1, ('LD', 'PA'): 1, ('LL', 'PA'): 1, ('DE', 'PA'): 1, ('AL', 'PA'): 1, ('PO', 'PA'): 1, ('RE', 'PA'): 1, ('PA', 'PA'): 1, ('NE', 'PA'): 1, ('VE', 'PA'): 1, ('UV', 'PA'): 1, ('PH', 'PA'): 1, ('GL', 'PA'): 1, ('LZ', 'PA'): 1, ('LH', 'PA'): 1, ('RZ', 'PA'): 1, ('PZ', 'PA'): 1, ('VZ', 'PA'): 1, ('HZ', 'PA'): 1, ('GZ', 'PA'): 1, ('LA', 'PA'): '', ('NZ', 'PA'): '', ('NA', 'PA'): '', ('PL', 'PA'): '', ('NF', 'PA'): '', ('SF', 'PA'): '', ('NS', 'PA'): '', ('SS', 'PA'): '', ('VI', 'PA'): '', ('TA', 'PA'): '', ('SV', 'PA'): '', ('CL', 'PA'): '', ('NC', 'PA'): '', ('MC', 'PA'): '', ('MI', 'PA'): '', ('MO', 'PA'): '', ('NO', 'PA'): '', ('OP', 'PA'): '', ('VO', 'PA'): '', ('AS', 'PA'): '', ('EJ', 'PA'): '', ('IT', 'PA'): '', ('IN', 'PA'): '', ('X', 'PA'): 1, ('LB', 'NE'): 1, ('LD', 'NE'): 1, ('LL', 'NE'): 1, ('DE', 'NE'): 1, ('AL', 'NE'): 1, ('PO', 'NE'): 1, ('RE', 'NE'): 1, ('PA', 'NE'): 1, ('NE', 'NE'): 1, ('VE', 'NE'): 1, ('UV', 'NE'): 1, ('PH', 'NE'): 1, ('GL', 'NE'): 1, ('LZ', 'NE'): 1, ('LH', 'NE'): 1, ('RZ', 'NE'): 1, ('PZ', 'NE'): 1, ('VZ', 'NE'): 1, ('HZ', 'NE'): 1, ('GZ', 'NE'): 1, ('LA', 'NE'): '', ('NZ', 'NE'): '', ('NA', 'NE'): '', ('PL', 'NE'): '', ('NF', 'NE'): '', ('SF', 'NE'): '', ('NS', 'NE'): '', ('SS', 'NE'): '', ('VI', 'NE'): '', ('TA', 'NE'): '', ('SV', 'NE'): '', ('CL', 'NE'): '', ('NC', 'NE'): '', ('MC', 'NE'): '', ('MI', 'NE'): '', ('MO', 'NE'): '', ('NO', 'NE'): '', ('OP', 'NE'): '', ('VO', 'NE'): '', ('AS', 'NE'): '', ('EJ', 'NE'): '', ('IT', 'NE'): '', ('IN', 'NE'): '', ('X', 'NE'): 1, ('LB', 'VE'): 1, ('LD', 'VE'): 1, ('LL', 'VE'): 1, ('DE', 'VE'): 1, ('AL', 'VE'): 1, ('PO', 'VE'): 1, ('RE', 'VE'): 1, ('PA', 'VE'): 1, ('NE', 'VE'): 1, ('VE', 'VE'): 1, ('UV', 'VE'): 1, ('PH', 'VE'): 1, ('GL', 'VE'): 1, ('LZ', 'VE'): 1, ('LH', 'VE'): 1, ('RZ', 'VE'): 1, ('PZ', 'VE'): 1, ('VZ', 'VE'): 1, ('HZ', 'VE'): 1, ('GZ', 'VE'): 1, ('LA', 'VE'): '', ('NZ', 'VE'): '', ('NA', 'VE'): '', ('PL', 'VE'): '', ('NF', 'VE'): '', ('SF', 'VE'): '', ('NS', 'VE'): '', ('SS', 'VE'): '', ('VI', 'VE'): '', ('TA', 'VE'): '', ('SV', 'VE'): '', ('CL', 'VE'): '', ('NC', 'VE'): '', ('MC', 'VE'): '', ('MI', 'VE'): '', ('MO', 'VE'): '', ('NO', 'VE'): '', ('OP', 'VE'): '', ('VO', 'VE'): '', ('AS', 'VE'): '', ('EJ', 'VE'): '', ('IT', 'VE'): '', ('IN', 'VE'): '', ('X', 'VE'): 1, ('LB', 'UV'): 1, ('LD', 'UV'): 1, ('LL', 'UV'): 1, ('DE', 'UV'): 1, ('AL', 'UV'): 1, ('PO', 'UV'): 1, ('RE', 'UV'): 1, ('PA', 'UV'): 1, ('NE', 'UV'): 1, ('VE', 'UV'): 1, ('UV', 'UV'): 1, ('PH', 'UV'): 1, ('GL', 'UV'): 1, ('LZ', 'UV'): 1, ('LH', 'UV'): 1, ('RZ', 'UV'): 1, ('PZ', 'UV'): 1, ('VZ', 'UV'): 1, ('HZ', 'UV'): 1, ('GZ', 'UV'): 1, ('LA', 'UV'): '', ('NZ', 'UV'): '', ('NA', 'UV'): '', ('PL', 'UV'): '', ('NF', 'UV'): '', ('SF', 'UV'): '', ('NS', 'UV'): '', ('SS', 'UV'): '', ('VI', 'UV'): '', ('TA', 'UV'): '', ('SV', 'UV'): '', ('CL', 'UV'): '', ('NC', 'UV'): '', ('MC', 'UV'): '', ('MI', 'UV'): '', ('MO', 'UV'): '', ('NO', 'UV'): '', ('OP', 'UV'): '', ('VO', 'UV'): '', ('AS', 'UV'): '', ('EJ', 'UV'): '', ('IT', 'UV'): '', ('IN', 'UV'): '', ('X', 'UV'): 1, ('LB', 'PH'): 1, ('LD', 'PH'): 1, ('LL', 'PH'): 1, ('DE', 'PH'): 1, ('AL', 'PH'): 1, ('PO', 'PH'): 1, ('RE', 'PH'): 1, ('PA', 'PH'): 1, ('NE', 'PH'): 1, ('VE', 'PH'): 1, ('UV', 'PH'): 1, ('PH', 'PH'): 1, ('GL', 'PH'): 1, ('LZ', 'PH'): 1, ('LH', 'PH'): 1, ('RZ', 'PH'): 1, ('PZ', 'PH'): 1, ('VZ', 'PH'): 1, ('HZ', 'PH'): 1, ('GZ', 'PH'): 1, ('LA', 'PH'): '', ('NZ', 'PH'): '', ('NA', 'PH'): '', ('PL', 'PH'): '', ('NF', 'PH'): '', ('SF', 'PH'): '', ('NS', 'PH'): '', ('SS', 'PH'): '', ('VI', 'PH'): '', ('TA', 'PH'): '', ('SV', 'PH'): '', ('CL', 'PH'): '', ('NC', 'PH'): '', ('MC', 'PH'): '', ('MI', 'PH'): '', ('MO', 'PH'): '', ('NO', 'PH'): '', ('OP', 'PH'): '', ('VO', 'PH'): '', ('AS', 'PH'): '', ('EJ', 'PH'): '', ('IT', 'PH'): '', ('IN', 'PH'): '', ('X', 'PH'): 1, ('LB', 'GL'): 1, ('LD', 'GL'): 1, ('LL', 'GL'): 1, ('DE', 'GL'): 1, ('AL', 'GL'): 1, ('PO', 'GL'): 1, ('RE', 'GL'): 1, ('PA', 'GL'): 1, ('NE', 'GL'): 1, ('VE', 'GL'): 1, ('UV', 'GL'): 1, ('PH', 'GL'): 1, ('GL', 'GL'): 1, ('LZ', 'GL'): 1, ('LH', 'GL'): 1, ('RZ', 'GL'): 1, ('PZ', 'GL'): 1, ('VZ', 'GL'): 1, ('HZ', 'GL'): 1, ('GZ', 'GL'): 1, ('LA', 'GL'): '', ('NZ', 'GL'): '', ('NA', 'GL'): '', ('PL', 'GL'): '', ('NF', 'GL'): '', ('SF', 'GL'): '', ('NS', 'GL'): '', ('SS', 'GL'): '', ('VI', 'GL'): '', ('TA', 'GL'): '', ('SV', 'GL'): '', ('CL', 'GL'): '', ('NC', 'GL'): '', ('MC', 'GL'): '', ('MI', 'GL'): '', ('MO', 'GL'): '', ('NO', 'GL'): '', ('OP', 'GL'): '', ('VO', 'GL'): '', ('AS', 'GL'): '', ('EJ', 'GL'): 1, ('IT', 'GL'): 1, ('IN', 'GL'): '', ('X', 'GL'): 1, ('LB', 'LZ'): 1, ('LD', 'LZ'): 1, ('LL', 'LZ'): 1, ('DE', 'LZ'): 1, ('AL', 'LZ'): 1, ('PO', 'LZ'): 1, ('RE', 'LZ'): 1, ('PA', 'LZ'): 1, ('NE', 'LZ'): 1, ('VE', 'LZ'): 1, ('UV', 'LZ'): 1, ('PH', 'LZ'): 1, ('GL', 'LZ'): 1, ('LZ', 'LZ'): 1, ('LH', 'LZ'): 1, ('RZ', 'LZ'): 1, ('PZ', 'LZ'): 1, ('VZ', 'LZ'): 1, ('HZ', 'LZ'): 1, ('GZ', 'LZ'): 1, ('LA', 'LZ'): '', ('NZ', 'LZ'): '', ('NA', 'LZ'): '', ('PL', 'LZ'): '', ('NF', 'LZ'): '', ('SF', 'LZ'): '', ('NS', 'LZ'): '', ('SS', 'LZ'): '', ('VI', 'LZ'): '', ('TA', 'LZ'): '', ('SV', 'LZ'): '', ('CL', 'LZ'): '', ('NC', 'LZ'): '', ('MC', 'LZ'): '', ('MI', 'LZ'): '', ('MO', 'LZ'): '', ('NO', 'LZ'): '', ('OP', 'LZ'): '', ('VO', 'LZ'): '', ('AS', 'LZ'): '', ('EJ', 'LZ'): '', ('IT', 'LZ'): '', ('IN', 'LZ'): '', ('X', 'LZ'): 1, ('LB', 'LH'): 1, ('LD', 'LH'): 1, ('LL', 'LH'): 1, ('DE', 'LH'): 1, ('AL', 'LH'): 1, ('PO', 'LH'): 1, ('RE', 'LH'): 1, ('PA', 'LH'): 1, ('NE', 'LH'): 1, ('VE', 'LH'): 1, ('UV', 'LH'): 1, ('PH', 'LH'): 1, ('GL', 'LH'): 1, ('LZ', 'LH'): 1, ('LH', 'LH'): 1, ('RZ', 'LH'): 1, ('PZ', 'LH'): 1, ('VZ', 'LH'): 1, ('HZ', 'LH'): 1, ('GZ', 'LH'): 1, ('LA', 'LH'): '', ('NZ', 'LH'): '', ('NA', 'LH'): '', ('PL', 'LH'): '', ('NF', 'LH'): '', ('SF', 'LH'): '', ('NS', 'LH'): '', ('SS', 'LH'): '', ('VI', 'LH'): '', ('TA', 'LH'): '', ('SV', 'LH'): '', ('CL', 'LH'): '', ('NC', 'LH'): '', ('MC', 'LH'): '', ('MI', 'LH'): '', ('MO', 'LH'): '', ('NO', 'LH'): '', ('OP', 'LH'): '', ('VO', 'LH'): '', ('AS', 'LH'): '', ('EJ', 'LH'): '', ('IT', 'LH'): '', ('IN', 'LH'): '', ('X', 'LH'): 1, ('LB', 'RZ'): 1, ('LD', 'RZ'): 1, ('LL', 'RZ'): 1, ('DE', 'RZ'): 1, ('AL', 'RZ'): 1, ('PO', 'RZ'): 1, ('RE', 'RZ'): 1, ('PA', 'RZ'): 1, ('NE', 'RZ'): 1, ('VE', 'RZ'): 1, ('UV', 'RZ'): 1, ('PH', 'RZ'): 1, ('GL', 'RZ'): 1, ('LZ', 'RZ'): 1, ('LH', 'RZ'): 1, ('RZ', 'RZ'): 1, ('PZ', 'RZ'): 1, ('VZ', 'RZ'): 1, ('HZ', 'RZ'): 1, ('GZ', 'RZ'): 1, ('LA', 'RZ'): '', ('NZ', 'RZ'): '', ('NA', 'RZ'): '', ('PL', 'RZ'): '', ('NF', 'RZ'): '', ('SF', 'RZ'): '', ('NS', 'RZ'): '', ('SS', 'RZ'): '', ('VI', 'RZ'): '', ('TA', 'RZ'): '', ('SV', 'RZ'): '', ('CL', 'RZ'): '', ('NC', 'RZ'): '', ('MC', 'RZ'): '', ('MI', 'RZ'): '', ('MO', 'RZ'): '', ('NO', 'RZ'): '', ('OP', 'RZ'): '', ('VO', 'RZ'): '', ('AS', 'RZ'): '', ('EJ', 'RZ'): '', ('IT', 'RZ'): '', ('IN', 'RZ'): '', ('X', 'RZ'): 1, ('LB', 'PZ'): 1, ('LD', 'PZ'): 1, ('LL', 'PZ'): 1, ('DE', 'PZ'): 1, ('AL', 'PZ'): 1, ('PO', 'PZ'): 1, ('RE', 'PZ'): 1, ('PA', 'PZ'): 1, ('NE', 'PZ'): 1, ('VE', 'PZ'): 1, ('UV', 'PZ'): 1, ('PH', 'PZ'): 1, ('GL', 'PZ'): 1, ('LZ', 'PZ'): 1, ('LH', 'PZ'): 1, ('RZ', 'PZ'): 1, ('PZ', 'PZ'): 1, ('VZ', 'PZ'): 1, ('HZ', 'PZ'): 1, ('GZ', 'PZ'): 1, ('LA', 'PZ'): '', ('NZ', 'PZ'): '', ('NA', 'PZ'): '', ('PL', 'PZ'): '', ('NF', 'PZ'): '', ('SF', 'PZ'): '', ('NS', 'PZ'): '', ('SS', 'PZ'): '', ('VI', 'PZ'): '', ('TA', 'PZ'): '', ('SV', 'PZ'): '', ('CL', 'PZ'): '', ('NC', 'PZ'): '', ('MC', 'PZ'): '', ('MI', 'PZ'): '', ('MO', 'PZ'): '', ('NO', 'PZ'): '', ('OP', 'PZ'): '', ('VO', 'PZ'): '', ('AS', 'PZ'): '', ('EJ', 'PZ'): '', ('IT', 'PZ'): '', ('IN', 'PZ'): '', ('X', 'PZ'): 1, ('LB', 'VZ'): 1, ('LD', 'VZ'): 1, ('LL', 'VZ'): 1, ('DE', 'VZ'): 1, ('AL', 'VZ'): 1, ('PO', 'VZ'): 1, ('RE', 'VZ'): 1, ('PA', 'VZ'): 1, ('NE', 'VZ'): 1, ('VE', 'VZ'): 1, ('UV', 'VZ'): 1, ('PH', 'VZ'): 1, ('GL', 'VZ'): 1, ('LZ', 'VZ'): 1, ('LH', 'VZ'): 1, ('RZ', 'VZ'): 1, ('PZ', 'VZ'): 1, ('VZ', 'VZ'): 1, ('HZ', 'VZ'): 1, ('GZ', 'VZ'): 1, ('LA', 'VZ'): '', ('NZ', 'VZ'): '', ('NA', 'VZ'): '', ('PL', 'VZ'): '', ('NF', 'VZ'): '', ('SF', 'VZ'): '', ('NS', 'VZ'): '', ('SS', 'VZ'): '', ('VI', 'VZ'): '', ('TA', 'VZ'): '', ('SV', 'VZ'): '', ('CL', 'VZ'): '', ('NC', 'VZ'): '', ('MC', 'VZ'): '', ('MI', 'VZ'): '', ('MO', 'VZ'): '', ('NO', 'VZ'): '', ('OP', 'VZ'): '', ('VO', 'VZ'): '', ('AS', 'VZ'): '', ('EJ', 'VZ'): '', ('IT', 'VZ'): '', ('IN', 'VZ'): '', ('X', 'VZ'): 1, ('LB', 'HZ'): 1, ('LD', 'HZ'): 1, ('LL', 'HZ'): 1, ('DE', 'HZ'): 1, ('AL', 'HZ'): 1, ('PO', 'HZ'): 1, ('RE', 'HZ'): 1, ('PA', 'HZ'): 1, ('NE', 'HZ'): 1, ('VE', 'HZ'): 1, ('UV', 'HZ'): 1, ('PH', 'HZ'): 1, ('GL', 'HZ'): 1, ('LZ', 'HZ'): 1, ('LH', 'HZ'): 1, ('RZ', 'HZ'): 1, ('PZ', 'HZ'): 1, ('VZ', 'HZ'): 1, ('HZ', 'HZ'): 1, ('GZ', 'HZ'): 1, ('LA', 'HZ'): '', ('NZ', 'HZ'): '', ('NA', 'HZ'): '', ('PL', 'HZ'): '', ('NF', 'HZ'): '', ('SF', 'HZ'): '', ('NS', 'HZ'): '', ('SS', 'HZ'): '', ('VI', 'HZ'): '', ('TA', 'HZ'): '', ('SV', 'HZ'): '', ('CL', 'HZ'): '', ('NC', 'HZ'): '', ('MC', 'HZ'): '', ('MI', 'HZ'): '', ('MO', 'HZ'): '', ('NO', 'HZ'): '', ('OP', 'HZ'): '', ('VO', 'HZ'): '', ('AS', 'HZ'): '', ('EJ', 'HZ'): '', ('IT', 'HZ'): '', ('IN', 'HZ'): '', ('X', 'HZ'): 1, ('LB', 'GZ'): 1, ('LD', 'GZ'): 1, ('LL', 'GZ'): 1, ('DE', 'GZ'): 1, ('AL', 'GZ'): 1, ('PO', 'GZ'): 1, ('RE', 'GZ'): 1, ('PA', 'GZ'): 1, ('NE', 'GZ'): 1, ('VE', 'GZ'): 1, ('UV', 'GZ'): 1, ('PH', 'GZ'): 1, ('GL', 'GZ'): 1, ('LZ', 'GZ'): 1, ('LH', 'GZ'): 1, ('RZ', 'GZ'): 1, ('PZ', 'GZ'): 1, ('VZ', 'GZ'): 1, ('HZ', 'GZ'): 1, ('GZ', 'GZ'): 1, ('LA', 'GZ'): '', ('NZ', 'GZ'): '', ('NA', 'GZ'): '', ('PL', 'GZ'): '', ('NF', 'GZ'): '', ('SF', 'GZ'): '', ('NS', 'GZ'): '', ('SS', 'GZ'): '', ('VI', 'GZ'): '', ('TA', 'GZ'): '', ('SV', 'GZ'): '', ('CL', 'GZ'): '', ('NC', 'GZ'): '', ('MC', 'GZ'): '', ('MI', 'GZ'): '', ('MO', 'GZ'): '', ('NO', 'GZ'): '', ('OP', 'GZ'): '', ('VO', 'GZ'): '', ('AS', 'GZ'): '', ('EJ', 'GZ'): 1, ('IT', 'GZ'): 1, ('IN', 'GZ'): '', ('X', 'GZ'): 1, ('LB', 'LA'): '', ('LD', 'LA'): '', ('LL', 'LA'): '', ('DE', 'LA'): '', ('AL', 'LA'): '', ('PO', 'LA'): '', ('RE', 'LA'): '', ('PA', 'LA'): '', ('NE', 'LA'): '', ('VE', 'LA'): '', ('UV', 'LA'): '', ('PH', 'LA'): '', ('GL', 'LA'): '', ('LZ', 'LA'): '', ('LH', 'LA'): '', ('RZ', 'LA'): '', ('PZ', 'LA'): '', ('VZ', 'LA'): '', ('HZ', 'LA'): '', ('GZ', 'LA'): '', ('LA', 'LA'): 1, ('NZ', 'LA'): 1, ('NA', 'LA'): 1, ('PL', 'LA'): 1, ('NF', 'LA'): 1, ('SF', 'LA'): 1, ('NS', 'LA'): 1, ('SS', 'LA'): 1, ('VI', 'LA'): 1, ('TA', 'LA'): 1, ('SV', 'LA'): 1, ('CL', 'LA'): 1, ('NC', 'LA'): 1, ('MC', 'LA'): 1, ('MI', 'LA'): 1, ('MO', 'LA'): 1, ('NO', 'LA'): 1, ('OP', 'LA'): 1, ('VO', 'LA'): '', ('AS', 'LA'): '', ('EJ', 'LA'): '', ('IT', 'LA'): '', ('IN', 'LA'): '', ('X', 'LA'): 1, ('LB', 'NZ'): '', ('LD', 'NZ'): '', ('LL', 'NZ'): '', ('DE', 'NZ'): '', ('AL', 'NZ'): '', ('PO', 'NZ'): '', ('RE', 'NZ'): '', ('PA', 'NZ'): '', ('NE', 'NZ'): '', ('VE', 'NZ'): '', ('UV', 'NZ'): '', ('PH', 'NZ'): '', ('GL', 'NZ'): '', ('LZ', 'NZ'): '', ('LH', 'NZ'): '', ('RZ', 'NZ'): '', ('PZ', 'NZ'): '', ('VZ', 'NZ'): '', ('HZ', 'NZ'): '', ('GZ', 'NZ'): '', ('LA', 'NZ'): 1, ('NZ', 'NZ'): 1, ('NA', 'NZ'): 1, ('PL', 'NZ'): 1, ('NF', 'NZ'): 1, ('SF', 'NZ'): 1, ('NS', 'NZ'): 1, ('SS', 'NZ'): 1, ('VI', 'NZ'): 1, ('TA', 'NZ'): 1, ('SV', 'NZ'): 1, ('CL', 'NZ'): 1, ('NC', 'NZ'): 1, ('MC', 'NZ'): 1, ('MI', 'NZ'): 1, ('MO', 'NZ'): 1, ('NO', 'NZ'): 1, ('OP', 'NZ'): 1, ('VO', 'NZ'): '', ('AS', 'NZ'): '', ('EJ', 'NZ'): '', ('IT', 'NZ'): '', ('IN', 'NZ'): '', ('X', 'NZ'): 1, ('LB', 'NA'): '', ('LD', 'NA'): '', ('LL', 'NA'): '', ('DE', 'NA'): '', ('AL', 'NA'): '', ('PO', 'NA'): '', ('RE', 'NA'): '', ('PA', 'NA'): '', ('NE', 'NA'): '', ('VE', 'NA'): '', ('UV', 'NA'): '', ('PH', 'NA'): '', ('GL', 'NA'): '', ('LZ', 'NA'): '', ('LH', 'NA'): '', ('RZ', 'NA'): '', ('PZ', 'NA'): '', ('VZ', 'NA'): '', ('HZ', 'NA'): '', ('GZ', 'NA'): '', ('LA', 'NA'): 1, ('NZ', 'NA'): 1, ('NA', 'NA'): 1, ('PL', 'NA'): 1, ('NF', 'NA'): 1, ('SF', 'NA'): 1, ('NS', 'NA'): 1, ('SS', 'NA'): 1, ('VI', 'NA'): 1, ('TA', 'NA'): 1, ('SV', 'NA'): 1, ('CL', 'NA'): 1, ('NC', 'NA'): 1, ('MC', 'NA'): 1, ('MI', 'NA'): 1, ('MO', 'NA'): 1, ('NO', 'NA'): 1, ('OP', 'NA'): 1, ('VO', 'NA'): '', ('AS', 'NA'): '', ('EJ', 'NA'): '', ('IT', 'NA'): '', ('IN', 'NA'): '', ('X', 'NA'): 1, ('LB', 'PL'): '', ('LD', 'PL'): '', ('LL', 'PL'): '', ('DE', 'PL'): '', ('AL', 'PL'): '', ('PO', 'PL'): '', ('RE', 'PL'): '', ('PA', 'PL'): '', ('NE', 'PL'): '', ('VE', 'PL'): '', ('UV', 'PL'): '', ('PH', 'PL'): '', ('GL', 'PL'): '', ('LZ', 'PL'): '', ('LH', 'PL'): '', ('RZ', 'PL'): '', ('PZ', 'PL'): '', ('VZ', 'PL'): '', ('HZ', 'PL'): '', ('GZ', 'PL'): '', ('LA', 'PL'): 1, ('NZ', 'PL'): 1, ('NA', 'PL'): 1, ('PL', 'PL'): 1, ('NF', 'PL'): 1, ('SF', 'PL'): 1, ('NS', 'PL'): 1, ('SS', 'PL'): 1, ('VI', 'PL'): 1, ('TA', 'PL'): 1, ('SV', 'PL'): 1, ('CL', 'PL'): 1, ('NC', 'PL'): 1, ('MC', 'PL'): 1, ('MI', 'PL'): 1, ('MO', 'PL'): 1, ('NO', 'PL'): 1, ('OP', 'PL'): 1, ('VO', 'PL'): '', ('AS', 'PL'): '', ('EJ', 'PL'): '', ('IT', 'PL'): '', ('IN', 'PL'): '', ('X', 'PL'): 1, ('LB', 'NF'): '', ('LD', 'NF'): '', ('LL', 'NF'): '', ('DE', 'NF'): '', ('AL', 'NF'): '', ('PO', 'NF'): '', ('RE', 'NF'): '', ('PA', 'NF'): '', ('NE', 'NF'): '', ('VE', 'NF'): '', ('UV', 'NF'): '', ('PH', 'NF'): '', ('GL', 'NF'): '', ('LZ', 'NF'): '', ('LH', 'NF'): '', ('RZ', 'NF'): '', ('PZ', 'NF'): '', ('VZ', 'NF'): '', ('HZ', 'NF'): '', ('GZ', 'NF'): '', ('LA', 'NF'): 1, ('NZ', 'NF'): 1, ('NA', 'NF'): 1, ('PL', 'NF'): 1, ('NF', 'NF'): 1, ('SF', 'NF'): 1, ('NS', 'NF'): 1, ('SS', 'NF'): 1, ('VI', 'NF'): 1, ('TA', 'NF'): 1, ('SV', 'NF'): 1, ('CL', 'NF'): 1, ('NC', 'NF'): 1, ('MC', 'NF'): 1, ('MI', 'NF'): 1, ('MO', 'NF'): 1, ('NO', 'NF'): 1, ('OP', 'NF'): 1, ('VO', 'NF'): '', ('AS', 'NF'): '', ('EJ', 'NF'): '', ('IT', 'NF'): '', ('IN', 'NF'): '', ('X', 'NF'): 1, ('LB', 'SF'): '', ('LD', 'SF'): '', ('LL', 'SF'): '', ('DE', 'SF'): '', ('AL', 'SF'): '', ('PO', 'SF'): '', ('RE', 'SF'): '', ('PA', 'SF'): '', ('NE', 'SF'): '', ('VE', 'SF'): '', ('UV', 'SF'): '', ('PH', 'SF'): '', ('GL', 'SF'): '', ('LZ', 'SF'): '', ('LH', 'SF'): '', ('RZ', 'SF'): '', ('PZ', 'SF'): '', ('VZ', 'SF'): '', ('HZ', 'SF'): '', ('GZ', 'SF'): '', ('LA', 'SF'): 1, ('NZ', 'SF'): 1, ('NA', 'SF'): 1, ('PL', 'SF'): 1, ('NF', 'SF'): 1, ('SF', 'SF'): 1, ('NS', 'SF'): 1, ('SS', 'SF'): 1, ('VI', 'SF'): 1, ('TA', 'SF'): 1, ('SV', 'SF'): 1, ('CL', 'SF'): 1, ('NC', 'SF'): 1, ('MC', 'SF'): 1, ('MI', 'SF'): 1, ('MO', 'SF'): 1, ('NO', 'SF'): 1, ('OP', 'SF'): 1, ('VO', 'SF'): '', ('AS', 'SF'): '', ('EJ', 'SF'): '', ('IT', 'SF'): '', ('IN', 'SF'): '', ('X', 'SF'): 1, ('LB', 'NS'): '', ('LD', 'NS'): '', ('LL', 'NS'): '', ('DE', 'NS'): '', ('AL', 'NS'): '', ('PO', 'NS'): '', ('RE', 'NS'): '', ('PA', 'NS'): '', ('NE', 'NS'): '', ('VE', 'NS'): '', ('UV', 'NS'): '', ('PH', 'NS'): '', ('GL', 'NS'): '', ('LZ', 'NS'): '', ('LH', 'NS'): '', ('RZ', 'NS'): '', ('PZ', 'NS'): '', ('VZ', 'NS'): '', ('HZ', 'NS'): '', ('GZ', 'NS'): '', ('LA', 'NS'): 1, ('NZ', 'NS'): 1, ('NA', 'NS'): 1, ('PL', 'NS'): 1, ('NF', 'NS'): 1, ('SF', 'NS'): 1, ('NS', 'NS'): 1, ('SS', 'NS'): 1, ('VI', 'NS'): 1, ('TA', 'NS'): 1, ('SV', 'NS'): 1, ('CL', 'NS'): 1, ('NC', 'NS'): 1, ('MC', 'NS'): 1, ('MI', 'NS'): 1, ('MO', 'NS'): 1, ('NO', 'NS'): 1, ('OP', 'NS'): 1, ('VO', 'NS'): '', ('AS', 'NS'): '', ('EJ', 'NS'): '', ('IT', 'NS'): '', ('IN', 'NS'): '', ('X', 'NS'): 1, ('LB', 'SS'): '', ('LD', 'SS'): '', ('LL', 'SS'): '', ('DE', 'SS'): '', ('AL', 'SS'): '', ('PO', 'SS'): '', ('RE', 'SS'): '', ('PA', 'SS'): '', ('NE', 'SS'): '', ('VE', 'SS'): '', ('UV', 'SS'): '', ('PH', 'SS'): '', ('GL', 'SS'): '', ('LZ', 'SS'): '', ('LH', 'SS'): '', ('RZ', 'SS'): '', ('PZ', 'SS'): '', ('VZ', 'SS'): '', ('HZ', 'SS'): '', ('GZ', 'SS'): '', ('LA', 'SS'): 1, ('NZ', 'SS'): 1, ('NA', 'SS'): 1, ('PL', 'SS'): 1, ('NF', 'SS'): 1, ('SF', 'SS'): 1, ('NS', 'SS'): 1, ('SS', 'SS'): 1, ('VI', 'SS'): 1, ('TA', 'SS'): 1, ('SV', 'SS'): 1, ('CL', 'SS'): 1, ('NC', 'SS'): 1, ('MC', 'SS'): 1, ('MI', 'SS'): 1, ('MO', 'SS'): 1, ('NO', 'SS'): 1, ('OP', 'SS'): 1, ('VO', 'SS'): '', ('AS', 'SS'): '', ('EJ', 'SS'): '', ('IT', 'SS'): '', ('IN', 'SS'): '', ('X', 'SS'): 1, ('LB', 'VI'): '', ('LD', 'VI'): '', ('LL', 'VI'): '', ('DE', 'VI'): '', ('AL', 'VI'): '', ('PO', 'VI'): '', ('RE', 'VI'): '', ('PA', 'VI'): '', ('NE', 'VI'): '', ('VE', 'VI'): '', ('UV', 'VI'): '', ('PH', 'VI'): '', ('GL', 'VI'): '', ('LZ', 'VI'): '', ('LH', 'VI'): '', ('RZ', 'VI'): '', ('PZ', 'VI'): '', ('VZ', 'VI'): '', ('HZ', 'VI'): '', ('GZ', 'VI'): '', ('LA', 'VI'): 1, ('NZ', 'VI'): 1, ('NA', 'VI'): 1, ('PL', 'VI'): 1, ('NF', 'VI'): 1, ('SF', 'VI'): 1, ('NS', 'VI'): 1, ('SS', 'VI'): 1, ('VI', 'VI'): 1, ('TA', 'VI'): 1, ('SV', 'VI'): 1, ('CL', 'VI'): 1, ('NC', 'VI'): 1, ('MC', 'VI'): 1, ('MI', 'VI'): 1, ('MO', 'VI'): 1, ('NO', 'VI'): 1, ('OP', 'VI'): 1, ('VO', 'VI'): '', ('AS', 'VI'): '', ('EJ', 'VI'): '', ('IT', 'VI'): '', ('IN', 'VI'): '', ('X', 'VI'): 1, ('LB', 'TA'): '', ('LD', 'TA'): '', ('LL', 'TA'): '', ('DE', 'TA'): '', ('AL', 'TA'): '', ('PO', 'TA'): '', ('RE', 'TA'): '', ('PA', 'TA'): '', ('NE', 'TA'): '', ('VE', 'TA'): '', ('UV', 'TA'): '', ('PH', 'TA'): '', ('GL', 'TA'): '', ('LZ', 'TA'): '', ('LH', 'TA'): '', ('RZ', 'TA'): '', ('PZ', 'TA'): '', ('VZ', 'TA'): '', ('HZ', 'TA'): '', ('GZ', 'TA'): '', ('LA', 'TA'): 1, ('NZ', 'TA'): 1, ('NA', 'TA'): 1, ('PL', 'TA'): 1, ('NF', 'TA'): 1, ('SF', 'TA'): 1, ('NS', 'TA'): 1, ('SS', 'TA'): 1, ('VI', 'TA'): 1, ('TA', 'TA'): 1, ('SV', 'TA'): 1, ('CL', 'TA'): 1, ('NC', 'TA'): 1, ('MC', 'TA'): 1, ('MI', 'TA'): 1, ('MO', 'TA'): 1, ('NO', 'TA'): 1, ('OP', 'TA'): 1, ('VO', 'TA'): '', ('AS', 'TA'): '', ('EJ', 'TA'): '', ('IT', 'TA'): '', ('IN', 'TA'): '', ('X', 'TA'): 1, ('LB', 'SV'): '', ('LD', 'SV'): '', ('LL', 'SV'): '', ('DE', 'SV'): '', ('AL', 'SV'): '', ('PO', 'SV'): '', ('RE', 'SV'): '', ('PA', 'SV'): '', ('NE', 'SV'): '', ('VE', 'SV'): '', ('UV', 'SV'): '', ('PH', 'SV'): '', ('GL', 'SV'): '', ('LZ', 'SV'): '', ('LH', 'SV'): '', ('RZ', 'SV'): '', ('PZ', 'SV'): '', ('VZ', 'SV'): '', ('HZ', 'SV'): '', ('GZ', 'SV'): '', ('LA', 'SV'): 1, ('NZ', 'SV'): 1, ('NA', 'SV'): 1, ('PL', 'SV'): 1, ('NF', 'SV'): 1, ('SF', 'SV'): 1, ('NS', 'SV'): 1, ('SS', 'SV'): 1, ('VI', 'SV'): 1, ('TA', 'SV'): 1, ('SV', 'SV'): 1, ('CL', 'SV'): 1, ('NC', 'SV'): 1, ('MC', 'SV'): 1, ('MI', 'SV'): 1, ('MO', 'SV'): 1, ('NO', 'SV'): 1, ('OP', 'SV'): 1, ('VO', 'SV'): '', ('AS', 'SV'): '', ('EJ', 'SV'): '', ('IT', 'SV'): '', ('IN', 'SV'): '', ('X', 'SV'): 1, ('LB', 'CL'): '', ('LD', 'CL'): '', ('LL', 'CL'): '', ('DE', 'CL'): '', ('AL', 'CL'): '', ('PO', 'CL'): '', ('RE', 'CL'): '', ('PA', 'CL'): '', ('NE', 'CL'): '', ('VE', 'CL'): '', ('UV', 'CL'): '', ('PH', 'CL'): '', ('GL', 'CL'): '', ('LZ', 'CL'): '', ('LH', 'CL'): '', ('RZ', 'CL'): '', ('PZ', 'CL'): '', ('VZ', 'CL'): '', ('HZ', 'CL'): '', ('GZ', 'CL'): '', ('LA', 'CL'): 1, ('NZ', 'CL'): 1, ('NA', 'CL'): 1, ('PL', 'CL'): 1, ('NF', 'CL'): 1, ('SF', 'CL'): 1, ('NS', 'CL'): 1, ('SS', 'CL'): 1, ('VI', 'CL'): 1, ('TA', 'CL'): 1, ('SV', 'CL'): 1, ('CL', 'CL'): 1, ('NC', 'CL'): 1, ('MC', 'CL'): 1, ('MI', 'CL'): 1, ('MO', 'CL'): 1, ('NO', 'CL'): 1, ('OP', 'CL'): 1, ('VO', 'CL'): '', ('AS', 'CL'): '', ('EJ', 'CL'): '', ('IT', 'CL'): '', ('IN', 'CL'): '', ('X', 'CL'): 1, ('LB', 'NC'): '', ('LD', 'NC'): '', ('LL', 'NC'): '', ('DE', 'NC'): '', ('AL', 'NC'): '', ('PO', 'NC'): '', ('RE', 'NC'): '', ('PA', 'NC'): '', ('NE', 'NC'): '', ('VE', 'NC'): '', ('UV', 'NC'): '', ('PH', 'NC'): '', ('GL', 'NC'): '', ('LZ', 'NC'): '', ('LH', 'NC'): '', ('RZ', 'NC'): '', ('PZ', 'NC'): '', ('VZ', 'NC'): '', ('HZ', 'NC'): '', ('GZ', 'NC'): '', ('LA', 'NC'): 1, ('NZ', 'NC'): 1, ('NA', 'NC'): 1, ('PL', 'NC'): 1, ('NF', 'NC'): 1, ('SF', 'NC'): 1, ('NS', 'NC'): 1, ('SS', 'NC'): 1, ('VI', 'NC'): 1, ('TA', 'NC'): 1, ('SV', 'NC'): 1, ('CL', 'NC'): 1, ('NC', 'NC'): 1, ('MC', 'NC'): 1, ('MI', 'NC'): 1, ('MO', 'NC'): 1, ('NO', 'NC'): 1, ('OP', 'NC'): 1, ('VO', 'NC'): '', ('AS', 'NC'): '', ('EJ', 'NC'): '', ('IT', 'NC'): '', ('IN', 'NC'): '', ('X', 'NC'): 1, ('LB', 'MC'): '', ('LD', 'MC'): '', ('LL', 'MC'): '', ('DE', 'MC'): '', ('AL', 'MC'): '', ('PO', 'MC'): '', ('RE', 'MC'): '', ('PA', 'MC'): '', ('NE', 'MC'): '', ('VE', 'MC'): '', ('UV', 'MC'): '', ('PH', 'MC'): '', ('GL', 'MC'): '', ('LZ', 'MC'): '', ('LH', 'MC'): '', ('RZ', 'MC'): '', ('PZ', 'MC'): '', ('VZ', 'MC'): '', ('HZ', 'MC'): '', ('GZ', 'MC'): '', ('LA', 'MC'): 1, ('NZ', 'MC'): 1, ('NA', 'MC'): 1, ('PL', 'MC'): 1, ('NF', 'MC'): 1, ('SF', 'MC'): 1, ('NS', 'MC'): 1, ('SS', 'MC'): 1, ('VI', 'MC'): 1, ('TA', 'MC'): 1, ('SV', 'MC'): 1, ('CL', 'MC'): 1, ('NC', 'MC'): 1, ('MC', 'MC'): 1, ('MI', 'MC'): 1, ('MO', 'MC'): 1, ('NO', 'MC'): 1, ('OP', 'MC'): 1, ('VO', 'MC'): '', ('AS', 'MC'): '', ('EJ', 'MC'): '', ('IT', 'MC'): '', ('IN', 'MC'): '', ('X', 'MC'): 1, ('LB', 'MI'): '', ('LD', 'MI'): '', ('LL', 'MI'): '', ('DE', 'MI'): '', ('AL', 'MI'): '', ('PO', 'MI'): '', ('RE', 'MI'): '', ('PA', 'MI'): '', ('NE', 'MI'): '', ('VE', 'MI'): '', ('UV', 'MI'): '', ('PH', 'MI'): '', ('GL', 'MI'): '', ('LZ', 'MI'): '', ('LH', 'MI'): '', ('RZ', 'MI'): '', ('PZ', 'MI'): '', ('VZ', 'MI'): '', ('HZ', 'MI'): '', ('GZ', 'MI'): '', ('LA', 'MI'): 1, ('NZ', 'MI'): 1, ('NA', 'MI'): 1, ('PL', 'MI'): 1, ('NF', 'MI'): 1, ('SF', 'MI'): 1, ('NS', 'MI'): 1, ('SS', 'MI'): 1, ('VI', 'MI'): 1, ('TA', 'MI'): 1, ('SV', 'MI'): 1, ('CL', 'MI'): 1, ('NC', 'MI'): 1, ('MC', 'MI'): 1, ('MI', 'MI'): 1, ('MO', 'MI'): 1, ('NO', 'MI'): 1, ('OP', 'MI'): 1, ('VO', 'MI'): '', ('AS', 'MI'): '', ('EJ', 'MI'): '', ('IT', 'MI'): '', ('IN', 'MI'): '', ('X', 'MI'): 1, ('LB', 'MO'): '', ('LD', 'MO'): '', ('LL', 'MO'): '', ('DE', 'MO'): '', ('AL', 'MO'): '', ('PO', 'MO'): '', ('RE', 'MO'): '', ('PA', 'MO'): '', ('NE', 'MO'): '', ('VE', 'MO'): '', ('UV', 'MO'): '', ('PH', 'MO'): '', ('GL', 'MO'): '', ('LZ', 'MO'): '', ('LH', 'MO'): '', ('RZ', 'MO'): '', ('PZ', 'MO'): '', ('VZ', 'MO'): '', ('HZ', 'MO'): '', ('GZ', 'MO'): '', ('LA', 'MO'): 1, ('NZ', 'MO'): 1, ('NA', 'MO'): 1, ('PL', 'MO'): 1, ('NF', 'MO'): 1, ('SF', 'MO'): 1, ('NS', 'MO'): 1, ('SS', 'MO'): 1, ('VI', 'MO'): 1, ('TA', 'MO'): 1, ('SV', 'MO'): 1, ('CL', 'MO'): 1, ('NC', 'MO'): 1, ('MC', 'MO'): 1, ('MI', 'MO'): 1, ('MO', 'MO'): 1, ('NO', 'MO'): 1, ('OP', 'MO'): 1, ('VO', 'MO'): '', ('AS', 'MO'): '', ('EJ', 'MO'): '', ('IT', 'MO'): '', ('IN', 'MO'): '', ('X', 'MO'): 1, ('LB', 'NO'): '', ('LD', 'NO'): '', ('LL', 'NO'): '', ('DE', 'NO'): '', ('AL', 'NO'): '', ('PO', 'NO'): '', ('RE', 'NO'): '', ('PA', 'NO'): '', ('NE', 'NO'): '', ('VE', 'NO'): '', ('UV', 'NO'): '', ('PH', 'NO'): '', ('GL', 'NO'): '', ('LZ', 'NO'): '', ('LH', 'NO'): '', ('RZ', 'NO'): '', ('PZ', 'NO'): '', ('VZ', 'NO'): '', ('HZ', 'NO'): '', ('GZ', 'NO'): '', ('LA', 'NO'): 1, ('NZ', 'NO'): 1, ('NA', 'NO'): 1, ('PL', 'NO'): 1, ('NF', 'NO'): 1, ('SF', 'NO'): 1, ('NS', 'NO'): 1, ('SS', 'NO'): 1, ('VI', 'NO'): 1, ('TA', 'NO'): 1, ('SV', 'NO'): 1, ('CL', 'NO'): 1, ('NC', 'NO'): 1, ('MC', 'NO'): 1, ('MI', 'NO'): 1, ('MO', 'NO'): 1, ('NO', 'NO'): 1, ('OP', 'NO'): 1, ('VO', 'NO'): '', ('AS', 'NO'): '', ('EJ', 'NO'): '', ('IT', 'NO'): '', ('IN', 'NO'): '', ('X', 'NO'): 1, ('LB', 'OP'): '', ('LD', 'OP'): '', ('LL', 'OP'): '', ('DE', 'OP'): '', ('AL', 'OP'): '', ('PO', 'OP'): '', ('RE', 'OP'): '', ('PA', 'OP'): '', ('NE', 'OP'): '', ('VE', 'OP'): '', ('UV', 'OP'): '', ('PH', 'OP'): '', ('GL', 'OP'): '', ('LZ', 'OP'): '', ('LH', 'OP'): '', ('RZ', 'OP'): '', ('PZ', 'OP'): '', ('VZ', 'OP'): '', ('HZ', 'OP'): '', ('GZ', 'OP'): '', ('LA', 'OP'): 1, ('NZ', 'OP'): 1, ('NA', 'OP'): 1, ('PL', 'OP'): 1, ('NF', 'OP'): 1, ('SF', 'OP'): 1, ('NS', 'OP'): 1, ('SS', 'OP'): 1, ('VI', 'OP'): 1, ('TA', 'OP'): 1, ('SV', 'OP'): 1, ('CL', 'OP'): 1, ('NC', 'OP'): 1, ('MC', 'OP'): 1, ('MI', 'OP'): 1, ('MO', 'OP'): 1, ('NO', 'OP'): 1, ('OP', 'OP'): 1, ('VO', 'OP'): '', ('AS', 'OP'): '', ('EJ', 'OP'): '', ('IT', 'OP'): '', ('IN', 'OP'): '', ('X', 'OP'): 1, ('LB', 'VO'): '', ('LD', 'VO'): '', ('LL', 'VO'): '', ('DE', 'VO'): '', ('AL', 'VO'): '', ('PO', 'VO'): '', ('RE', 'VO'): '', ('PA', 'VO'): '', ('NE', 'VO'): '', ('VE', 'VO'): '', ('UV', 'VO'): '', ('PH', 'VO'): '', ('GL', 'VO'): '', ('LZ', 'VO'): '', ('LH', 'VO'): '', ('RZ', 'VO'): '', ('PZ', 'VO'): '', ('VZ', 'VO'): '', ('HZ', 'VO'): '', ('GZ', 'VO'): '', ('LA', 'VO'): '', ('NZ', 'VO'): '', ('NA', 'VO'): '', ('PL', 'VO'): '', ('NF', 'VO'): '', ('SF', 'VO'): '', ('NS', 'VO'): '', ('SS', 'VO'): '', ('VI', 'VO'): '', ('TA', 'VO'): '', ('SV', 'VO'): '', ('CL', 'VO'): '', ('NC', 'VO'): '', ('MC', 'VO'): '', ('MI', 'VO'): '', ('MO', 'VO'): '', ('NO', 'VO'): '', ('OP', 'VO'): '', ('VO', 'VO'): 1, ('AS', 'VO'): 1, ('EJ', 'VO'): 1, ('IT', 'VO'): 1, ('IN', 'VO'): 1, ('X', 'VO'): 1, ('LB', 'AS'): '', ('LD', 'AS'): '', ('LL', 'AS'): '', ('DE', 'AS'): '', ('AL', 'AS'): '', ('PO', 'AS'): '', ('RE', 'AS'): '', ('PA', 'AS'): '', ('NE', 'AS'): '', ('VE', 'AS'): '', ('UV', 'AS'): '', ('PH', 'AS'): '', ('GL', 'AS'): '', ('LZ', 'AS'): '', ('LH', 'AS'): '', ('RZ', 'AS'): '', ('PZ', 'AS'): '', ('VZ', 'AS'): '', ('HZ', 'AS'): '', ('GZ', 'AS'): '', ('LA', 'AS'): '', ('NZ', 'AS'): '', ('NA', 'AS'): '', ('PL', 'AS'): '', ('NF', 'AS'): '', ('SF', 'AS'): '', ('NS', 'AS'): '', ('SS', 'AS'): '', ('VI', 'AS'): '', ('TA', 'AS'): '', ('SV', 'AS'): '', ('CL', 'AS'): '', ('NC', 'AS'): '', ('MC', 'AS'): '', ('MI', 'AS'): '', ('MO', 'AS'): '', ('NO', 'AS'): '', ('OP', 'AS'): '', ('VO', 'AS'): 1, ('AS', 'AS'): 1, ('EJ', 'AS'): 1, ('IT', 'AS'): 1, ('IN', 'AS'): 1, ('X', 'AS'): 1, ('LB', 'EJ'): '', ('LD', 'EJ'): '', ('LL', 'EJ'): '', ('DE', 'EJ'): '', ('AL', 'EJ'): '', ('PO', 'EJ'): '', ('RE', 'EJ'): '', ('PA', 'EJ'): '', ('NE', 'EJ'): '', ('VE', 'EJ'): '', ('UV', 'EJ'): '', ('PH', 'EJ'): '', ('GL', 'EJ'): 1, ('LZ', 'EJ'): '', ('LH', 'EJ'): '', ('RZ', 'EJ'): '', ('PZ', 'EJ'): '', ('VZ', 'EJ'): '', ('HZ', 'EJ'): '', ('GZ', 'EJ'): 1, ('LA', 'EJ'): '', ('NZ', 'EJ'): '', ('NA', 'EJ'): '', ('PL', 'EJ'): '', ('NF', 'EJ'): '', ('SF', 'EJ'): '', ('NS', 'EJ'): '', ('SS', 'EJ'): '', ('VI', 'EJ'): '', ('TA', 'EJ'): '', ('SV', 'EJ'): '', ('CL', 'EJ'): '', ('NC', 'EJ'): '', ('MC', 'EJ'): '', ('MI', 'EJ'): '', ('MO', 'EJ'): '', ('NO', 'EJ'): '', ('OP', 'EJ'): '', ('VO', 'EJ'): 1, ('AS', 'EJ'): 1, ('EJ', 'EJ'): 1, ('IT', 'EJ'): 1, ('IN', 'EJ'): 1, ('X', 'EJ'): 1, ('LB', 'IT'): '', ('LD', 'IT'): '', ('LL', 'IT'): '', ('DE', 'IT'): '', ('AL', 'IT'): '', ('PO', 'IT'): '', ('RE', 'IT'): '', ('PA', 'IT'): '', ('NE', 'IT'): '', ('VE', 'IT'): '', ('UV', 'IT'): '', ('PH', 'IT'): '', ('GL', 'IT'): 1, ('LZ', 'IT'): '', ('LH', 'IT'): '', ('RZ', 'IT'): '', ('PZ', 'IT'): '', ('VZ', 'IT'): '', ('HZ', 'IT'): '', ('GZ', 'IT'): 1, ('LA', 'IT'): '', ('NZ', 'IT'): '', ('NA', 'IT'): '', ('PL', 'IT'): '', ('NF', 'IT'): '', ('SF', 'IT'): '', ('NS', 'IT'): '', ('SS', 'IT'): '', ('VI', 'IT'): '', ('TA', 'IT'): '', ('SV', 'IT'): '', ('CL', 'IT'): '', ('NC', 'IT'): '', ('MC', 'IT'): '', ('MI', 'IT'): '', ('MO', 'IT'): '', ('NO', 'IT'): '', ('OP', 'IT'): '', ('VO', 'IT'): 1, ('AS', 'IT'): 1, ('EJ', 'IT'): 1, ('IT', 'IT'): 1, ('IN', 'IT'): 1, ('X', 'IT'): 1, ('LB', 'IN'): '', ('LD', 'IN'): '', ('LL', 'IN'): '', ('DE', 'IN'): '', ('AL', 'IN'): '', ('PO', 'IN'): '', ('RE', 'IN'): '', ('PA', 'IN'): '', ('NE', 'IN'): '', ('VE', 'IN'): '', ('UV', 'IN'): '', ('PH', 'IN'): '', ('GL', 'IN'): '', ('LZ', 'IN'): '', ('LH', 'IN'): '', ('RZ', 'IN'): '', ('PZ', 'IN'): '', ('VZ', 'IN'): '', ('HZ', 'IN'): '', ('GZ', 'IN'): '', ('LA', 'IN'): '', ('NZ', 'IN'): '', ('NA', 'IN'): '', ('PL', 'IN'): '', ('NF', 'IN'): '', ('SF', 'IN'): '', ('NS', 'IN'): '', ('SS', 'IN'): '', ('VI', 'IN'): '', ('TA', 'IN'): '', ('SV', 'IN'): '', ('CL', 'IN'): '', ('NC', 'IN'): '', ('MC', 'IN'): '', ('MI', 'IN'): '', ('MO', 'IN'): '', ('NO', 'IN'): '', ('OP', 'IN'): '', ('VO', 'IN'): 1, ('AS', 'IN'): 1, ('EJ', 'IN'): 1, ('IT', 'IN'): 1, ('IN', 'IN'): 1, ('X', 'IN'): 1, ('LB', 'X'): 1, ('LD', 'X'): 1, ('LL', 'X'): 1, ('DE', 'X'): 1, ('AL', 'X'): 1, ('PO', 'X'): 1, ('RE', 'X'): 1, ('PA', 'X'): 1, ('NE', 'X'): 1, ('VE', 'X'): 1, ('UV', 'X'): 1, ('PH', 'X'): 1, ('GL', 'X'): 1, ('LZ', 'X'): 1, ('LH', 'X'): 1, ('RZ', 'X'): 1, ('PZ', 'X'): 1, ('VZ', 'X'): 1, ('HZ', 'X'): 1, ('GZ', 'X'): 1, ('LA', 'X'): 1, ('NZ', 'X'): 1, ('NA', 'X'): 1, ('PL', 'X'): 1, ('NF', 'X'): 1, ('SF', 'X'): 1, ('NS', 'X'): 1, ('SS', 'X'): 1, ('VI', 'X'): 1, ('TA', 'X'): 1, ('SV', 'X'): 1, ('CL', 'X'): 1, ('NC', 'X'): 1, ('MC', 'X'): 1, ('MI', 'X'): 1, ('MO', 'X'): 1, ('NO', 'X'): 1, ('OP', 'X'): 1, ('VO', 'X'): 1, ('AS', 'X'): 1, ('EJ', 'X'): 1, ('IT', 'X'): 1, ('IN', 'X'): 1, ('X', 'X'): 1}
asymmetric_feature_distance_map = {('LB', 'LB'): 0.0, ('LB', 'LD'): 1.0, ('LB', 'LL'): 2.0, ('LB', 'DE'): 2.0, ('LB', 'AL'): 3.0, ('LB', 'PO'): 4.0, ('LB', 'RE'): 5.0, ('LB', 'PA'): 5.0, ('LB', 'NE'): 4.0, ('LB', 'VE'): 3.0, ('LB', 'UV'): 5.0, ('LB', 'PH'): 6.0, ('LB', 'GL'): 3.0, ('LB', 'LZ'): 1.0, ('LB', 'LH'): 2.0, ('LB', 'RZ'): 7.0, ('LB', 'PZ'): 7.0, ('LB', 'VZ'): 7.0, ('LB', 'HZ'): 7.0, ('LB', 'GZ'): 7.0, ('LB', 'X'): 4.0, ('LD', 'LB'): 1.0, ('LD', 'LD'): 0.0, ('LD', 'LL'): 3.0, ('LD', 'DE'): 1.0, ('LD', 'AL'): 2.0, ('LD', 'PO'): 3.0, ('LD', 'RE'): 5.0, ('LD', 'PA'): 5.0, ('LD', 'NE'): 5.0, ('LD', 'VE'): 4.0, ('LD', 'UV'): 5.0, ('LD', 'PH'): 6.0, ('LD', 'GL'): 3.0, ('LD', 'LZ'): 2.0, ('LD', 'LH'): 3.0, ('LD', 'RZ'): 7.0, ('LD', 'PZ'): 7.0, ('LD', 'VZ'): 7.0, ('LD', 'HZ'): 7.0, ('LD', 'GZ'): 7.0, ('LD', 'X'): 4.0, ('LL', 'LB'): 2.0, ('LL', 'LD'): 3.0, ('LL', 'LL'): 0.0, ('LL', 'DE'): 2.0, ('LL', 'AL'): 2.0, ('LL', 'PO'): 3.0, ('LL', 'RE'): 5.0, ('LL', 'PA'): 5.0, ('LL', 'NE'): 5.0, ('LL', 'VE'): 5.0, ('LL', 'UV'): 5.0, ('LL', 'PH'): 6.0, ('LL', 'GL'): 3.0, ('LL', 'LZ'): 3.0, ('LL', 'LH'): 4.0, ('LL', 'RZ'): 7.0, ('LL', 'PZ'): 7.0, ('LL', 'VZ'): 7.0, ('LL', 'HZ'): 7.0, ('LL', 'GZ'): 7.0, ('LL', 'X'): 4.0, ('DE', 'LB'): 2.0, ('DE', 'LD'): 1.0, ('DE', 'LL'): 2.0, ('DE', 'DE'): 0.0, ('DE', 'AL'): 1.0, ('DE', 'PO'): 3.0, ('DE', 'RE'): 5.0, ('DE', 'PA'): 5.0, ('DE', 'NE'): 5.0, ('DE', 'VE'): 4.0, ('DE', 'UV'): 5.0, ('DE', 'PH'): 6.0, ('DE', 'GL'): 3.0, ('DE', 'LZ'): 7.0, ('DE', 'LH'): 6.0, ('DE', 'RZ'): 3.0, ('DE', 'PZ'): 7.0, ('DE', 'VZ'): 7.0, ('DE', 'HZ'): 7.0, ('DE', 'GZ'): 7.0, ('DE', 'X'): 4.0, ('AL', 'LB'): 3.0, ('AL', 'LD'): 2.0, ('AL', 'LL'): 2.0, ('AL', 'DE'): 1.0, ('AL', 'AL'): 0.0, ('AL', 'PO'): 2.0, ('AL', 'RE'): 4.0, ('AL', 'PA'): 4.0, ('AL', 'NE'): 4.0, ('AL', 'VE'): 3.0, ('AL', 'UV'): 5.0, ('AL', 'PH'): 6.0, ('AL', 'GL'): 3.0, ('AL', 'LZ'): 7.0, ('AL', 'LH'): 6.0, ('AL', 'RZ'): 2.0, ('AL', 'PZ'): 7.0, ('AL', 'VZ'): 7.0, ('AL', 'HZ'): 7.0, ('AL', 'GZ'): 7.0, ('AL', 'X'): 4.0, ('PO', 'LB'): 4.0, ('PO', 'LD'): 3.0, ('PO', 'LL'): 3.0, ('PO', 'DE'): 3.0, ('PO', 'AL'): 2.0, ('PO', 'PO'): 0.0, ('PO', 'RE'): 2.0, ('PO', 'PA'): 2.0, ('PO', 'NE'): 3.0, ('PO', 'VE'): 3.0, ('PO', 'UV'): 5.0, ('PO', 'PH'): 6.0, ('PO', 'GL'): 3.0, ('PO', 'LZ'): 7.0, ('PO', 'LH'): 6.0, ('PO', 'RZ'): 2.0, ('PO', 'PZ'): 7.0, ('PO', 'VZ'): 7.0, ('PO', 'HZ'): 7.0, ('PO', 'GZ'): 7.0, ('PO', 'X'): 4.0, ('RE', 'LB'): 5.0, ('RE', 'LD'): 5.0, ('RE', 'LL'): 5.0, ('RE', 'DE'): 5.0, ('RE', 'AL'): 4.0, ('RE', 'PO'): 2.0, ('RE', 'RE'): 0.0, ('RE', 'PA'): 4.0, ('RE', 'NE'): 4.0, ('RE', 'VE'): 3.0, ('RE', 'UV'): 5.0, ('RE', 'PH'): 6.0, ('RE', 'GL'): 3.0, ('RE', 'LZ'): 7.0, ('RE', 'LH'): 6.0, ('RE', 'RZ'): 2.0, ('RE', 'PZ'): 7.0, ('RE', 'VZ'): 7.0, ('RE', 'HZ'): 7.0, ('RE', 'GZ'): 7.0, ('RE', 'X'): 4.0, ('PA', 'LB'): 5.0, ('PA', 'LD'): 5.0, ('PA', 'LL'): 5.0, ('PA', 'DE'): 5.0, ('PA', 'AL'): 4.0, ('PA', 'PO'): 2.0, ('PA', 'RE'): 4.0, ('PA', 'PA'): 0.0, ('PA', 'NE'): 1.0, ('PA', 'VE'): 2.0, ('PA', 'UV'): 4.0, ('PA', 'PH'): 6.0, ('PA', 'GL'): 2.0, ('PA', 'LZ'): 7.0, ('PA', 'LH'): 6.0, ('PA', 'RZ'): 7.0, ('PA', 'PZ'): 1.0, ('PA', 'VZ'): 7.0, ('PA', 'HZ'): 7.0, ('PA', 'GZ'): 7.0, ('PA', 'X'): 4.0, ('NE', 'LB'): 4.0, ('NE', 'LD'): 5.0, ('NE', 'LL'): 5.0, ('NE', 'DE'): 5.0, ('NE', 'AL'): 4.0, ('NE', 'PO'): 3.0, ('NE', 'RE'): 4.0, ('NE', 'PA'): 1.0, ('NE', 'NE'): 0.0, ('NE', 'VE'): 1.0, ('NE', 'UV'): 3.0, ('NE', 'PH'): 5.0, ('NE', 'GL'): 2.0, ('NE', 'LZ'): 7.0, ('NE', 'LH'): 6.0, ('NE', 'RZ'): 7.0, ('NE', 'PZ'): 2.0, ('NE', 'VZ'): 7.0, ('NE', 'HZ'): 7.0, ('NE', 'GZ'): 7.0, ('NE', 'X'): 4.0, ('VE', 'LB'): 3.0, ('VE', 'LD'): 4.0, ('VE', 'LL'): 5.0, ('VE', 'DE'): 4.0, ('VE', 'AL'): 3.0, ('VE', 'PO'): 3.0, ('VE', 'RE'): 3.0, ('VE', 'PA'): 2.0, ('VE', 'NE'): 1.0, ('VE', 'VE'): 0.0, ('VE', 'UV'): 2.0, ('VE', 'PH'): 4.0, ('VE', 'GL'): 2.0, ('VE', 'LZ'): 7.0, ('VE', 'LH'): 6.0, ('VE', 'RZ'): 7.0, ('VE', 'PZ'): 7.0, ('VE', 'VZ'): 1.0, ('VE', 'HZ'): 7.0, ('VE', 'GZ'): 7.0, ('VE', 'X'): 4.0, ('UV', 'LB'): 5.0, ('UV', 'LD'): 5.0, ('UV', 'LL'): 5.0, ('UV', 'DE'): 5.0, ('UV', 'AL'): 5.0, ('UV', 'PO'): 5.0, ('UV', 'RE'): 5.0, ('UV', 'PA'): 4.0, ('UV', 'NE'): 3.0, ('UV', 'VE'): 2.0, ('UV', 'UV'): 0.0, ('UV', 'PH'): 3.0, ('UV', 'GL'): 3.0, ('UV', 'LZ'): 7.0, ('UV', 'LH'): 6.0, ('UV', 'RZ'): 7.0, ('UV', 'PZ'): 7.0, ('UV', 'VZ'): 3.0, ('UV', 'HZ'): 7.0, ('UV', 'GZ'): 7.0, ('UV', 'X'): 4.0, ('PH', 'LB'): 6.0, ('PH', 'LD'): 6.0, ('PH', 'LL'): 6.0, ('PH', 'DE'): 6.0, ('PH', 'AL'): 6.0, ('PH', 'PO'): 6.0, ('PH', 'RE'): 6.0, ('PH', 'PA'): 6.0, ('PH', 'NE'): 5.0, ('PH', 'VE'): 4.0, ('PH', 'UV'): 3.0, ('PH', 'PH'): 0.0, ('PH', 'GL'): 3.0, ('PH', 'LZ'): 7.0, ('PH', 'LH'): 6.0, ('PH', 'RZ'): 7.0, ('PH', 'PZ'): 7.0, ('PH', 'VZ'): 7.0, ('PH', 'HZ'): 1.0, ('PH', 'GZ'): 7.0, ('PH', 'X'): 4.0, ('GL', 'LB'): 3.0, ('GL', 'LD'): 3.0, ('GL', 'LL'): 3.0, ('GL', 'DE'): 3.0, ('GL', 'AL'): 3.0, ('GL', 'PO'): 3.0, ('GL', 'RE'): 3.0, ('GL', 'PA'): 2.0, ('GL', 'NE'): 2.0, ('GL', 'VE'): 2.0, ('GL', 'UV'): 3.0, ('GL', 'PH'): 3.0, ('GL', 'GL'): 0.0, ('GL', 'LZ'): 5.0, ('GL', 'LH'): 4.0, ('GL', 'RZ'): 5.0, ('GL', 'PZ'): 5.0, ('GL', 'VZ'): 5.0, ('GL', 'HZ'): 5.0, ('GL', 'GZ'): 1.0, ('GL', 'EJ'): 5.0, ('GL', 'IT'): 8.0, ('GL', 'X'): 2.0, ('LZ', 'LB'): 1.0, ('LZ', 'LD'): 2.0, ('LZ', 'LL'): 3.0, ('LZ', 'DE'): 7.0, ('LZ', 'AL'): 7.0, ('LZ', 'PO'): 7.0, ('LZ', 'RE'): 7.0, ('LZ', 'PA'): 7.0, ('LZ', 'NE'): 7.0, ('LZ', 'VE'): 7.0, ('LZ', 'UV'): 7.0, ('LZ', 'PH'): 7.0, ('LZ', 'GL'): 5.0, ('LZ', 'LZ'): 0.0, ('LZ', 'LH'): 1.0, ('LZ', 'RZ'): 5.0, ('LZ', 'PZ'): 5.0, ('LZ', 'VZ'): 5.0, ('LZ', 'HZ'): 5.0, ('LZ', 'GZ'): 5.0, ('LZ', 'X'): 2.0, ('LH', 'LB'): 2.0, ('LH', 'LD'): 3.0, ('LH', 'LL'): 4.0, ('LH', 'DE'): 6.0, ('LH', 'AL'): 6.0, ('LH', 'PO'): 6.0, ('LH', 'RE'): 6.0, ('LH', 'PA'): 6.0, ('LH', 'NE'): 6.0, ('LH', 'VE'): 6.0, ('LH', 'UV'): 6.0, ('LH', 'PH'): 6.0, ('LH', 'GL'): 4.0, ('LH', 'LZ'): 1.0, ('LH', 'LH'): 0.0, ('LH', 'RZ'): 4.0, ('LH', 'PZ'): 4.0, ('LH', 'VZ'): 4.0, ('LH', 'HZ'): 4.0, ('LH', 'GZ'): 4.0, ('LH', 'X'): 1.0, ('RZ', 'LB'): 7.0, ('RZ', 'LD'): 7.0, ('RZ', 'LL'): 7.0, ('RZ', 'DE'): 3.0, ('RZ', 'AL'): 2.0, ('RZ', 'PO'): 2.0, ('RZ', 'RE'): 2.0, ('RZ', 'PA'): 7.0, ('RZ', 'NE'): 7.0, ('RZ', 'VE'): 7.0, ('RZ', 'UV'): 7.0, ('RZ', 'PH'): 7.0, ('RZ', 'GL'): 5.0, ('RZ', 'LZ'): 5.0, ('RZ', 'LH'): 4.0, ('RZ', 'RZ'): 0.0, ('RZ', 'PZ'): 5.0, ('RZ', 'VZ'): 5.0, ('RZ', 'HZ'): 5.0, ('RZ', 'GZ'): 5.0, ('RZ', 'X'): 2.0, ('PZ', 'LB'): 7.0, ('PZ', 'LD'): 7.0, ('PZ', 'LL'): 7.0, ('PZ', 'DE'): 7.0, ('PZ', 'AL'): 7.0, ('PZ', 'PO'): 7.0, ('PZ', 'RE'): 7.0, ('PZ', 'PA'): 1.0, ('PZ', 'NE'): 2.0, ('PZ', 'VE'): 7.0, ('PZ', 'UV'): 7.0, ('PZ', 'PH'): 7.0, ('PZ', 'GL'): 5.0, ('PZ', 'LZ'): 5.0, ('PZ', 'LH'): 4.0, ('PZ', 'RZ'): 5.0, ('PZ', 'PZ'): 0.0, ('PZ', 'VZ'): 5.0, ('PZ', 'HZ'): 5.0, ('PZ', 'GZ'): 5.0, ('PZ', 'X'): 2.0, ('VZ', 'LB'): 7.0, ('VZ', 'LD'): 7.0, ('VZ', 'LL'): 7.0, ('VZ', 'DE'): 7.0, ('VZ', 'AL'): 7.0, ('VZ', 'PO'): 7.0, ('VZ', 'RE'): 7.0, ('VZ', 'PA'): 7.0, ('VZ', 'NE'): 7.0, ('VZ', 'VE'): 1.0, ('VZ', 'UV'): 3.0, ('VZ', 'PH'): 7.0, ('VZ', 'GL'): 5.0, ('VZ', 'LZ'): 5.0, ('VZ', 'LH'): 4.0, ('VZ', 'RZ'): 5.0, ('VZ', 'PZ'): 5.0, ('VZ', 'VZ'): 0.0, ('VZ', 'HZ'): 2.0, ('VZ', 'GZ'): 5.0, ('VZ', 'X'): 2.0, ('HZ', 'LB'): 7.0, ('HZ', 'LD'): 7.0, ('HZ', 'LL'): 7.0, ('HZ', 'DE'): 7.0, ('HZ', 'AL'): 7.0, ('HZ', 'PO'): 7.0, ('HZ', 'RE'): 7.0, ('HZ', 'PA'): 7.0, ('HZ', 'NE'): 7.0, ('HZ', 'VE'): 7.0, ('HZ', 'UV'): 7.0, ('HZ', 'PH'): 1.0, ('HZ', 'GL'): 5.0, ('HZ', 'LZ'): 5.0, ('HZ', 'LH'): 4.0, ('HZ', 'RZ'): 5.0, ('HZ', 'PZ'): 5.0, ('HZ', 'VZ'): 2.0, ('HZ', 'HZ'): 0.0, ('HZ', 'GZ'): 5.0, ('HZ', 'X'): 2.0, ('GZ', 'LB'): 7.0, ('GZ', 'LD'): 7.0, ('GZ', 'LL'): 7.0, ('GZ', 'DE'): 7.0, ('GZ', 'AL'): 7.0, ('GZ', 'PO'): 7.0, ('GZ', 'RE'): 7.0, ('GZ', 'PA'): 7.0, ('GZ', 'NE'): 7.0, ('GZ', 'VE'): 7.0, ('GZ', 'UV'): 7.0, ('GZ', 'PH'): 7.0, ('GZ', 'GL'): 1.0, ('GZ', 'LZ'): 5.0, ('GZ', 'LH'): 4.0, ('GZ', 'RZ'): 5.0, ('GZ', 'PZ'): 5.0, ('GZ', 'VZ'): 5.0, ('GZ', 'HZ'): 5.0, ('GZ', 'GZ'): 0.0, ('GZ', 'EJ'): 4.0, ('GZ', 'IT'): 7.0, ('GZ', 'X'): 2.0, ('LA', 'LA'): 0.0, ('LA', 'NZ'): 11.0, ('LA', 'NA'): 9.0, ('LA', 'PL'): 11.0, ('LA', 'NF'): 11.0, ('LA', 'SF'): 11.0, ('LA', 'NS'): 9.0, ('LA', 'SS'): 9.0, ('LA', 'VI'): 9.0, ('LA', 'TA'): 9.0, ('LA', 'SV'): 8.0, ('LA', 'CL'): 11.0, ('LA', 'NC'): 11.0, ('LA', 'MC'): 10.0, ('LA', 'MI'): 10.0, ('LA', 'MO'): 10.0, ('LA', 'NO'): 11.0, ('LA', 'OP'): 11.0, ('LA', 'X'): 5.0, ('NZ', 'LA'): 11.0, ('NZ', 'NZ'): 0.0, ('NZ', 'NA'): 2.0, ('NZ', 'PL'): 5.0, ('NZ', 'NF'): 7.0, ('NZ', 'SF'): 7.0, ('NZ', 'NS'): 6.0, ('NZ', 'SS'): 7.0, ('NZ', 'VI'): 6.0, ('NZ', 'TA'): 5.0, ('NZ', 'SV'): 4.0, ('NZ', 'CL'): 6.0, ('NZ', 'NC'): 7.0, ('NZ', 'MC'): 8.0, ('NZ', 'MI'): 9.0, ('NZ', 'MO'): 10.0, ('NZ', 'NO'): 11.0, ('NZ', 'OP'): 12.0, ('NZ', 'X'): 3.0, ('NA', 'LA'): 9.0, ('NA', 'NZ'): 2.0, ('NA', 'NA'): 0.0, ('NA', 'PL'): 3.0, ('NA', 'NF'): 5.0, ('NA', 'SF'): 5.0, ('NA', 'NS'): 4.0, ('NA', 'SS'): 5.0, ('NA', 'VI'): 4.0, ('NA', 'TA'): 3.0, ('NA', 'SV'): 2.0, ('NA', 'CL'): 4.0, ('NA', 'NC'): 5.0, ('NA', 'MC'): 6.0, ('NA', 'MI'): 7.0, ('NA', 'MO'): 8.0, ('NA', 'NO'): 9.0, ('NA', 'OP'): 10.0, ('NA', 'X'): 3.0, ('PL', 'LA'): 11.0, ('PL', 'NZ'): 5.0, ('PL', 'NA'): 3.0, ('PL', 'PL'): 0.0, ('PL', 'NF'): 2.0, ('PL', 'SF'): 2.0, ('PL', 'NS'): 4.0, ('PL', 'SS'): 4.0, ('PL', 'VI'): 5.0, ('PL', 'TA'): 4.0, ('PL', 'SV'): 6.0, ('PL', 'CL'): 8.0, ('PL', 'NC'): 9.0, ('PL', 'MC'): 10.0, ('PL', 'MI'): 11.0, ('PL', 'MO'): 12.0, ('PL', 'NO'): 13.0, ('PL', 'OP'): 14.0, ('PL', 'X'): 5.0, ('NF', 'LA'): 11.0, ('NF', 'NZ'): 7.0, ('NF', 'NA'): 5.0, ('NF', 'PL'): 2.0, ('NF', 'NF'): 0.0, ('NF', 'SF'): 1.0, ('NF', 'NS'): 2.0, ('NF', 'SS'): 3.0, ('NF', 'VI'): 4.0, ('NF', 'TA'): 3.0, ('NF', 'SV'): 4.0, ('NF', 'CL'): 6.0, ('NF', 'NC'): 7.0, ('NF', 'MC'): 8.0, ('NF', 'MI'): 9.0, ('NF', 'MO'): 10.0, ('NF', 'NO'): 11.0, ('NF', 'OP'): 12.0, ('NF', 'X'): 5.0, ('SF', 'LA'): 11.0, ('SF', 'NZ'): 7.0, ('SF', 'NA'): 5.0, ('SF', 'PL'): 2.0, ('SF', 'NF'): 1.0, ('SF', 'SF'): 0.0, ('SF', 'NS'): 3.0, ('SF', 'SS'): 2.0, ('SF', 'VI'): 5.0, ('SF', 'TA'): 4.0, ('SF', 'SV'): 5.0, ('SF', 'CL'): 7.0, ('SF', 'NC'): 8.0, ('SF', 'MC'): 9.0, ('SF', 'MI'): 10.0, ('SF', 'MO'): 11.0, ('SF', 'NO'): 12.0, ('SF', 'OP'): 13.0, ('SF', 'X'): 5.0, ('NS', 'LA'): 9.0, ('NS', 'NZ'): 6.0, ('NS', 'NA'): 4.0, ('NS', 'PL'): 4.0, ('NS', 'NF'): 2.0, ('NS', 'SF'): 3.0, ('NS', 'NS'): 0.0, ('NS', 'SS'): 1.0, ('NS', 'VI'): 2.0, ('NS', 'TA'): 1.0, ('NS', 'SV'): 2.0, ('NS', 'CL'): 4.0, ('NS', 'NC'): 5.0, ('NS', 'MC'): 6.0, ('NS', 'MI'): 7.0, ('NS', 'MO'): 8.0, ('NS', 'NO'): 9.0, ('NS', 'OP'): 10.0, ('NS', 'X'): 3.0, ('SS', 'LA'): 9.0, ('SS', 'NZ'): 7.0, ('SS', 'NA'): 5.0, ('SS', 'PL'): 4.0, ('SS', 'NF'): 3.0, ('SS', 'SF'): 2.0, ('SS', 'NS'): 1.0, ('SS', 'SS'): 0.0, ('SS', 'VI'): 3.0, ('SS', 'TA'): 2.0, ('SS', 'SV'): 3.0, ('SS', 'CL'): 5.0, ('SS', 'NC'): 6.0, ('SS', 'MC'): 7.0, ('SS', 'MI'): 8.0, ('SS', 'MO'): 9.0, ('SS', 'NO'): 10.0, ('SS', 'OP'): 11.0, ('SS', 'X'): 3.0, ('VI', 'LA'): 9.0, ('VI', 'NZ'): 6.0, ('VI', 'NA'): 4.0, ('VI', 'PL'): 5.0, ('VI', 'NF'): 4.0, ('VI', 'SF'): 5.0, ('VI', 'NS'): 2.0, ('VI', 'SS'): 3.0, ('VI', 'VI'): 0.0, ('VI', 'TA'): 1.0, ('VI', 'SV'): 3.0, ('VI', 'CL'): 5.0, ('VI', 'NC'): 6.0, ('VI', 'MC'): 7.0, ('VI', 'MI'): 8.0, ('VI', 'MO'): 9.0, ('VI', 'NO'): 10.0, ('VI', 'OP'): 11.0, ('VI', 'X'): 3.0, ('TA', 'LA'): 9.0, ('TA', 'NZ'): 5.0, ('TA', 'NA'): 3.0, ('TA', 'PL'): 4.0, ('TA', 'NF'): 3.0, ('TA', 'SF'): 4.0, ('TA', 'NS'): 1.0, ('TA', 'SS'): 2.0, ('TA', 'VI'): 1.0, ('TA', 'TA'): 0.0, ('TA', 'SV'): 3.0, ('TA', 'CL'): 5.0, ('TA', 'NC'): 6.0, ('TA', 'MC'): 7.0, ('TA', 'MI'): 8.0, ('TA', 'MO'): 9.0, ('TA', 'NO'): 10.0, ('TA', 'OP'): 11.0, ('TA', 'X'): 3.0, ('SV', 'LA'): 8.0, ('SV', 'NZ'): 4.0, ('SV', 'NA'): 2.0, ('SV', 'PL'): 6.0, ('SV', 'NF'): 4.0, ('SV', 'SF'): 5.0, ('SV', 'NS'): 2.0, ('SV', 'SS'): 3.0, ('SV', 'VI'): 3.0, ('SV', 'TA'): 3.0, ('SV', 'SV'): 0.0, ('SV', 'CL'): 2.0, ('SV', 'NC'): 3.0, ('SV', 'MC'): 4.0, ('SV', 'MI'): 5.0, ('SV', 'MO'): 6.0, ('SV', 'NO'): 7.0, ('SV', 'OP'): 8.0, ('SV', 'X'): 2.0, ('CL', 'LA'): 11.0, ('CL', 'NZ'): 6.0, ('CL', 'NA'): 4.0, ('CL', 'PL'): 8.0, ('CL', 'NF'): 6.0, ('CL', 'SF'): 7.0, ('CL', 'NS'): 4.0, ('CL', 'SS'): 5.0, ('CL', 'VI'): 5.0, ('CL', 'TA'): 5.0, ('CL', 'SV'): 2.0, ('CL', 'CL'): 0.0, ('CL', 'NC'): 1.0, ('CL', 'MC'): 2.0, ('CL', 'MI'): 3.0, ('CL', 'MO'): 4.0, ('CL', 'NO'): 5.0, ('CL', 'OP'): 6.0, ('CL', 'X'): 5.0, ('NC', 'LA'): 11.0, ('NC', 'NZ'): 7.0, ('NC', 'NA'): 5.0, ('NC', 'PL'): 9.0, ('NC', 'NF'): 7.0, ('NC', 'SF'): 8.0, ('NC', 'NS'): 5.0, ('NC', 'SS'): 6.0, ('NC', 'VI'): 6.0, ('NC', 'TA'): 6.0, ('NC', 'SV'): 3.0, ('NC', 'CL'): 1.0, ('NC', 'NC'): 0.0, ('NC', 'MC'): 1.0, ('NC', 'MI'): 2.0, ('NC', 'MO'): 3.0, ('NC', 'NO'): 4.0, ('NC', 'OP'): 5.0, ('NC', 'X'): 5.0, ('MC', 'LA'): 10.0, ('MC', 'NZ'): 8.0, ('MC', 'NA'): 6.0, ('MC', 'PL'): 10.0, ('MC', 'NF'): 8.0, ('MC', 'SF'): 9.0, ('MC', 'NS'): 6.0, ('MC', 'SS'): 7.0, ('MC', 'VI'): 7.0, ('MC', 'TA'): 7.0, ('MC', 'SV'): 4.0, ('MC', 'CL'): 2.0, ('MC', 'NC'): 1.0, ('MC', 'MC'): 0.0, ('MC', 'MI'): 1.0, ('MC', 'MO'): 2.0, ('MC', 'NO'): 3.0, ('MC', 'OP'): 4.0, ('MC', 'X'): 4.0, ('MI', 'LA'): 10.0, ('MI', 'NZ'): 9.0, ('MI', 'NA'): 7.0, ('MI', 'PL'): 11.0, ('MI', 'NF'): 9.0, ('MI', 'SF'): 10.0, ('MI', 'NS'): 7.0, ('MI', 'SS'): 8.0, ('MI', 'VI'): 8.0, ('MI', 'TA'): 8.0, ('MI', 'SV'): 5.0, ('MI', 'CL'): 3.0, ('MI', 'NC'): 2.0, ('MI', 'MC'): 1.0, ('MI', 'MI'): 0.0, ('MI', 'MO'): 1.0, ('MI', 'NO'): 2.0, ('MI', 'OP'): 3.0, ('MI', 'X'): 4.0, ('MO', 'LA'): 10.0, ('MO', 'NZ'): 10.0, ('MO', 'NA'): 8.0, ('MO', 'PL'): 12.0, ('MO', 'NF'): 10.0, ('MO', 'SF'): 11.0, ('MO', 'NS'): 8.0, ('MO', 'SS'): 9.0, ('MO', 'VI'): 9.0, ('MO', 'TA'): 9.0, ('MO', 'SV'): 6.0, ('MO', 'CL'): 4.0, ('MO', 'NC'): 3.0, ('MO', 'MC'): 2.0, ('MO', 'MI'): 1.0, ('MO', 'MO'): 0.0, ('MO', 'NO'): 1.0, ('MO', 'OP'): 2.0, ('MO', 'X'): 4.0, ('NO', 'LA'): 11.0, ('NO', 'NZ'): 11.0, ('NO', 'NA'): 9.0, ('NO', 'PL'): 13.0, ('NO', 'NF'): 11.0, ('NO', 'SF'): 12.0, ('NO', 'NS'): 9.0, ('NO', 'SS'): 10.0, ('NO', 'VI'): 10.0, ('NO', 'TA'): 10.0, ('NO', 'SV'): 7.0, ('NO', 'CL'): 5.0, ('NO', 'NC'): 4.0, ('NO', 'MC'): 3.0, ('NO', 'MI'): 2.0, ('NO', 'MO'): 1.0, ('NO', 'NO'): 0.0, ('NO', 'OP'): 1.0, ('NO', 'X'): 5.0, ('OP', 'LA'): 11.0, ('OP', 'NZ'): 12.0, ('OP', 'NA'): 10.0, ('OP', 'PL'): 14.0, ('OP', 'NF'): 12.0, ('OP', 'SF'): 13.0, ('OP', 'NS'): 10.0, ('OP', 'SS'): 11.0, ('OP', 'VI'): 11.0, ('OP', 'TA'): 11.0, ('OP', 'SV'): 8.0, ('OP', 'CL'): 6.0, ('OP', 'NC'): 5.0, ('OP', 'MC'): 4.0, ('OP', 'MI'): 3.0, ('OP', 'MO'): 2.0, ('OP', 'NO'): 1.0, ('OP', 'OP'): 0.0, ('OP', 'X'): 5.0, ('VO', 'VO'): 0.0, ('VO', 'AS'): 8.0, ('VO', 'EJ'): 10.0, ('VO', 'IT'): 7.0, ('VO', 'IN'): 10.0, ('VO', 'X'): 3.0, ('AS', 'VO'): 8.0, ('AS', 'AS'): 0.0, ('AS', 'EJ'): 11.0, ('AS', 'IT'): 8.0, ('AS', 'IN'): 11.0, ('AS', 'X'): 4.0, ('EJ', 'GL'): 5.0, ('EJ', 'GZ'): 4.0, ('EJ', 'VO'): 10.0, ('EJ', 'AS'): 11.0, ('EJ', 'EJ'): 0.0, ('EJ', 'IT'): 3.0, ('EJ', 'IN'): 13.0, ('EJ', 'X'): 6.0, ('IT', 'GL'): 8.0, ('IT', 'GZ'): 7.0, ('IT', 'VO'): 7.0, ('IT', 'AS'): 8.0, ('IT', 'EJ'): 3.0, ('IT', 'IT'): 0.0, ('IT', 'IN'): 10.0, ('IT', 'X'): 3.0, ('IN', 'VO'): 10.0, ('IN', 'AS'): 11.0, ('IN', 'EJ'): 13.0, ('IN', 'IT'): 10.0, ('IN', 'IN'): 0.0, ('IN', 'X'): 6.0, ('X', 'LB'): 4.0, ('X', 'LD'): 4.0, ('X', 'LL'): 4.0, ('X', 'DE'): 4.0, ('X', 'AL'): 4.0, ('X', 'PO'): 4.0, ('X', 'RE'): 4.0, ('X', 'PA'): 4.0, ('X', 'NE'): 4.0, ('X', 'VE'): 4.0, ('X', 'UV'): 4.0, ('X', 'PH'): 4.0, ('X', 'GL'): 2.0, ('X', 'LZ'): 2.0, ('X', 'LH'): 1.0, ('X', 'RZ'): 2.0, ('X', 'PZ'): 2.0, ('X', 'VZ'): 2.0, ('X', 'HZ'): 2.0, ('X', 'GZ'): 2.0, ('X', 'LA'): 5.0, ('X', 'NZ'): 3.0, ('X', 'NA'): 3.0, ('X', 'PL'): 5.0, ('X', 'NF'): 5.0, ('X', 'SF'): 5.0, ('X', 'NS'): 3.0, ('X', 'SS'): 3.0, ('X', 'VI'): 3.0, ('X', 'TA'): 3.0, ('X', 'SV'): 2.0, ('X', 'CL'): 5.0, ('X', 'NC'): 5.0, ('X', 'MC'): 4.0, ('X', 'MI'): 4.0, ('X', 'MO'): 4.0, ('X', 'NO'): 5.0, ('X', 'OP'): 5.0, ('X', 'VO'): 3.0, ('X', 'AS'): 4.0, ('X', 'EJ'): 6.0, ('X', 'IT'): 3.0, ('X', 'IN'): 6.0, ('X', 'X'): 0.0}
asymmetric_identity_feature_distance_map = {('LB', 'LB'): 1, ('LB', 'LD'): 1, ('LB', 'LL'): 1, ('LB', 'DE'): 1, ('LB', 'AL'): 1, ('LB', 'PO'): 1, ('LB', 'RE'): 1, ('LB', 'PA'): 1, ('LB', 'NE'): 1, ('LB', 'VE'): 1, ('LB', 'UV'): 1, ('LB', 'PH'): 1, ('LB', 'GL'): 1, ('LB', 'LZ'): 1, ('LB', 'LH'): 1, ('LB', 'RZ'): 1, ('LB', 'PZ'): 1, ('LB', 'VZ'): 1, ('LB', 'HZ'): 1, ('LB', 'GZ'): 1, ('LB', 'X'): 1, ('LD', 'LB'): 1, ('LD', 'LD'): 1, ('LD', 'LL'): 1, ('LD', 'DE'): 1, ('LD', 'AL'): 1, ('LD', 'PO'): 1, ('LD', 'RE'): 1, ('LD', 'PA'): 1, ('LD', 'NE'): 1, ('LD', 'VE'): 1, ('LD', 'UV'): 1, ('LD', 'PH'): 1, ('LD', 'GL'): 1, ('LD', 'LZ'): 1, ('LD', 'LH'): 1, ('LD', 'RZ'): 1, ('LD', 'PZ'): 1, ('LD', 'VZ'): 1, ('LD', 'HZ'): 1, ('LD', 'GZ'): 1, ('LD', 'X'): 1, ('LL', 'LB'): 1, ('LL', 'LD'): 1, ('LL', 'LL'): 1, ('LL', 'DE'): 1, ('LL', 'AL'): 1, ('LL', 'PO'): 1, ('LL', 'RE'): 1, ('LL', 'PA'): 1, ('LL', 'NE'): 1, ('LL', 'VE'): 1, ('LL', 'UV'): 1, ('LL', 'PH'): 1, ('LL', 'GL'): 1, ('LL', 'LZ'): 1, ('LL', 'LH'): 1, ('LL', 'RZ'): 1, ('LL', 'PZ'): 1, ('LL', 'VZ'): 1, ('LL', 'HZ'): 1, ('LL', 'GZ'): 1, ('LL', 'X'): 1, ('DE', 'LB'): 1, ('DE', 'LD'): 1, ('DE', 'LL'): 1, ('DE', 'DE'): 1, ('DE', 'AL'): 1, ('DE', 'PO'): 1, ('DE', 'RE'): 1, ('DE', 'PA'): 1, ('DE', 'NE'): 1, ('DE', 'VE'): 1, ('DE', 'UV'): 1, ('DE', 'PH'): 1, ('DE', 'GL'): 1, ('DE', 'LZ'): 1, ('DE', 'LH'): 1, ('DE', 'RZ'): 1, ('DE', 'PZ'): 1, ('DE', 'VZ'): 1, ('DE', 'HZ'): 1, ('DE', 'GZ'): 1, ('DE', 'X'): 1, ('AL', 'LB'): 1, ('AL', 'LD'): 1, ('AL', 'LL'): 1, ('AL', 'DE'): 1, ('AL', 'AL'): 1, ('AL', 'PO'): 1, ('AL', 'RE'): 1, ('AL', 'PA'): 1, ('AL', 'NE'): 1, ('AL', 'VE'): 1, ('AL', 'UV'): 1, ('AL', 'PH'): 1, ('AL', 'GL'): 1, ('AL', 'LZ'): 1, ('AL', 'LH'): 1, ('AL', 'RZ'): 1, ('AL', 'PZ'): 1, ('AL', 'VZ'): 1, ('AL', 'HZ'): 1, ('AL', 'GZ'): 1, ('AL', 'X'): 1, ('PO', 'LB'): 1, ('PO', 'LD'): 1, ('PO', 'LL'): 1, ('PO', 'DE'): 1, ('PO', 'AL'): 1, ('PO', 'PO'): 1, ('PO', 'RE'): 1, ('PO', 'PA'): 1, ('PO', 'NE'): 1, ('PO', 'VE'): 1, ('PO', 'UV'): 1, ('PO', 'PH'): 1, ('PO', 'GL'): 1, ('PO', 'LZ'): 1, ('PO', 'LH'): 1, ('PO', 'RZ'): 1, ('PO', 'PZ'): 1, ('PO', 'VZ'): 1, ('PO', 'HZ'): 1, ('PO', 'GZ'): 1, ('PO', 'X'): 1, ('RE', 'LB'): 1, ('RE', 'LD'): 1, ('RE', 'LL'): 1, ('RE', 'DE'): 1, ('RE', 'AL'): 1, ('RE', 'PO'): 1, ('RE', 'RE'): 1, ('RE', 'PA'): 1, ('RE', 'NE'): 1, ('RE', 'VE'): 1, ('RE', 'UV'): 1, ('RE', 'PH'): 1, ('RE', 'GL'): 1, ('RE', 'LZ'): 1, ('RE', 'LH'): 1, ('RE', 'RZ'): 1, ('RE', 'PZ'): 1, ('RE', 'VZ'): 1, ('RE', 'HZ'): 1, ('RE', 'GZ'): 1, ('RE', 'X'): 1, ('PA', 'LB'): 1, ('PA', 'LD'): 1, ('PA', 'LL'): 1, ('PA', 'DE'): 1, ('PA', 'AL'): 1, ('PA', 'PO'): 1, ('PA', 'RE'): 1, ('PA', 'PA'): 1, ('PA', 'NE'): 1, ('PA', 'VE'): 1, ('PA', 'UV'): 1, ('PA', 'PH'): 1, ('PA', 'GL'): 1, ('PA', 'LZ'): 1, ('PA', 'LH'): 1, ('PA', 'RZ'): 1, ('PA', 'PZ'): 1, ('PA', 'VZ'): 1, ('PA', 'HZ'): 1, ('PA', 'GZ'): 1, ('PA', 'X'): 1, ('NE', 'LB'): 1, ('NE', 'LD'): 1, ('NE', 'LL'): 1, ('NE', 'DE'): 1, ('NE', 'AL'): 1, ('NE', 'PO'): 1, ('NE', 'RE'): 1, ('NE', 'PA'): 1, ('NE', 'NE'): 1, ('NE', 'VE'): 1, ('NE', 'UV'): 1, ('NE', 'PH'): 1, ('NE', 'GL'): 1, ('NE', 'LZ'): 1, ('NE', 'LH'): 1, ('NE', 'RZ'): 1, ('NE', 'PZ'): 1, ('NE', 'VZ'): 1, ('NE', 'HZ'): 1, ('NE', 'GZ'): 1, ('NE', 'X'): 1, ('VE', 'LB'): 1, ('VE', 'LD'): 1, ('VE', 'LL'): 1, ('VE', 'DE'): 1, ('VE', 'AL'): 1, ('VE', 'PO'): 1, ('VE', 'RE'): 1, ('VE', 'PA'): 1, ('VE', 'NE'): 1, ('VE', 'VE'): 1, ('VE', 'UV'): 1, ('VE', 'PH'): 1, ('VE', 'GL'): 1, ('VE', 'LZ'): 1, ('VE', 'LH'): 1, ('VE', 'RZ'): 1, ('VE', 'PZ'): 1, ('VE', 'VZ'): 1, ('VE', 'HZ'): 1, ('VE', 'GZ'): 1, ('VE', 'X'): 1, ('UV', 'LB'): 1, ('UV', 'LD'): 1, ('UV', 'LL'): 1, ('UV', 'DE'): 1, ('UV', 'AL'): 1, ('UV', 'PO'): 1, ('UV', 'RE'): 1, ('UV', 'PA'): 1, ('UV', 'NE'): 1, ('UV', 'VE'): 1, ('UV', 'UV'): 1, ('UV', 'PH'): 1, ('UV', 'GL'): 1, ('UV', 'LZ'): 1, ('UV', 'LH'): 1, ('UV', 'RZ'): 1, ('UV', 'PZ'): 1, ('UV', 'VZ'): 1, ('UV', 'HZ'): 1, ('UV', 'GZ'): 1, ('UV', 'X'): 1, ('PH', 'LB'): 1, ('PH', 'LD'): 1, ('PH', 'LL'): 1, ('PH', 'DE'): 1, ('PH', 'AL'): 1, ('PH', 'PO'): 1, ('PH', 'RE'): 1, ('PH', 'PA'): 1, ('PH', 'NE'): 1, ('PH', 'VE'): 1, ('PH', 'UV'): 1, ('PH', 'PH'): 1, ('PH', 'GL'): 1, ('PH', 'LZ'): 1, ('PH', 'LH'): 1, ('PH', 'RZ'): 1, ('PH', 'PZ'): 1, ('PH', 'VZ'): 1, ('PH', 'HZ'): 1, ('PH', 'GZ'): 1, ('PH', 'X'): 1, ('GL', 'LB'): 1, ('GL', 'LD'): 1, ('GL', 'LL'): 1, ('GL', 'DE'): 1, ('GL', 'AL'): 1, ('GL', 'PO'): 1, ('GL', 'RE'): 1, ('GL', 'PA'): 1, ('GL', 'NE'): 1, ('GL', 'VE'): 1, ('GL', 'UV'): 1, ('GL', 'PH'): 1, ('GL', 'GL'): 1, ('GL', 'LZ'): 1, ('GL', 'LH'): 1, ('GL', 'RZ'): 1, ('GL', 'PZ'): 1, ('GL', 'VZ'): 1, ('GL', 'HZ'): 1, ('GL', 'GZ'): 1, ('GL', 'EJ'): 1, ('GL', 'IT'): 1, ('GL', 'X'): 1, ('LZ', 'LB'): 1, ('LZ', 'LD'): 1, ('LZ', 'LL'): 1, ('LZ', 'DE'): 1, ('LZ', 'AL'): 1, ('LZ', 'PO'): 1, ('LZ', 'RE'): 1, ('LZ', 'PA'): 1, ('LZ', 'NE'): 1, ('LZ', 'VE'): 1, ('LZ', 'UV'): 1, ('LZ', 'PH'): 1, ('LZ', 'GL'): 1, ('LZ', 'LZ'): 1, ('LZ', 'LH'): 1, ('LZ', 'RZ'): 1, ('LZ', 'PZ'): 1, ('LZ', 'VZ'): 1, ('LZ', 'HZ'): 1, ('LZ', 'GZ'): 1, ('LZ', 'X'): 1, ('LH', 'LB'): 1, ('LH', 'LD'): 1, ('LH', 'LL'): 1, ('LH', 'DE'): 1, ('LH', 'AL'): 1, ('LH', 'PO'): 1, ('LH', 'RE'): 1, ('LH', 'PA'): 1, ('LH', 'NE'): 1, ('LH', 'VE'): 1, ('LH', 'UV'): 1, ('LH', 'PH'): 1, ('LH', 'GL'): 1, ('LH', 'LZ'): 1, ('LH', 'LH'): 1, ('LH', 'RZ'): 1, ('LH', 'PZ'): 1, ('LH', 'VZ'): 1, ('LH', 'HZ'): 1, ('LH', 'GZ'): 1, ('LH', 'X'): 1, ('RZ', 'LB'): 1, ('RZ', 'LD'): 1, ('RZ', 'LL'): 1, ('RZ', 'DE'): 1, ('RZ', 'AL'): 1, ('RZ', 'PO'): 1, ('RZ', 'RE'): 1, ('RZ', 'PA'): 1, ('RZ', 'NE'): 1, ('RZ', 'VE'): 1, ('RZ', 'UV'): 1, ('RZ', 'PH'): 1, ('RZ', 'GL'): 1, ('RZ', 'LZ'): 1, ('RZ', 'LH'): 1, ('RZ', 'RZ'): 1, ('RZ', 'PZ'): 1, ('RZ', 'VZ'): 1, ('RZ', 'HZ'): 1, ('RZ', 'GZ'): 1, ('RZ', 'X'): 1, ('PZ', 'LB'): 1, ('PZ', 'LD'): 1, ('PZ', 'LL'): 1, ('PZ', 'DE'): 1, ('PZ', 'AL'): 1, ('PZ', 'PO'): 1, ('PZ', 'RE'): 1, ('PZ', 'PA'): 1, ('PZ', 'NE'): 1, ('PZ', 'VE'): 1, ('PZ', 'UV'): 1, ('PZ', 'PH'): 1, ('PZ', 'GL'): 1, ('PZ', 'LZ'): 1, ('PZ', 'LH'): 1, ('PZ', 'RZ'): 1, ('PZ', 'PZ'): 1, ('PZ', 'VZ'): 1, ('PZ', 'HZ'): 1, ('PZ', 'GZ'): 1, ('PZ', 'X'): 1, ('VZ', 'LB'): 1, ('VZ', 'LD'): 1, ('VZ', 'LL'): 1, ('VZ', 'DE'): 1, ('VZ', 'AL'): 1, ('VZ', 'PO'): 1, ('VZ', 'RE'): 1, ('VZ', 'PA'): 1, ('VZ', 'NE'): 1, ('VZ', 'VE'): 1, ('VZ', 'UV'): 1, ('VZ', 'PH'): 1, ('VZ', 'GL'): 1, ('VZ', 'LZ'): 1, ('VZ', 'LH'): 1, ('VZ', 'RZ'): 1, ('VZ', 'PZ'): 1, ('VZ', 'VZ'): 1, ('VZ', 'HZ'): 1, ('VZ', 'GZ'): 1, ('VZ', 'X'): 1, ('HZ', 'LB'): 1, ('HZ', 'LD'): 1, ('HZ', 'LL'): 1, ('HZ', 'DE'): 1, ('HZ', 'AL'): 1, ('HZ', 'PO'): 1, ('HZ', 'RE'): 1, ('HZ', 'PA'): 1, ('HZ', 'NE'): 1, ('HZ', 'VE'): 1, ('HZ', 'UV'): 1, ('HZ', 'PH'): 1, ('HZ', 'GL'): 1, ('HZ', 'LZ'): 1, ('HZ', 'LH'): 1, ('HZ', 'RZ'): 1, ('HZ', 'PZ'): 1, ('HZ', 'VZ'): 1, ('HZ', 'HZ'): 1, ('HZ', 'GZ'): 1, ('HZ', 'X'): 1, ('GZ', 'LB'): 1, ('GZ', 'LD'): 1, ('GZ', 'LL'): 1, ('GZ', 'DE'): 1, ('GZ', 'AL'): 1, ('GZ', 'PO'): 1, ('GZ', 'RE'): 1, ('GZ', 'PA'): 1, ('GZ', 'NE'): 1, ('GZ', 'VE'): 1, ('GZ', 'UV'): 1, ('GZ', 'PH'): 1, ('GZ', 'GL'): 1, ('GZ', 'LZ'): 1, ('GZ', 'LH'): 1, ('GZ', 'RZ'): 1, ('GZ', 'PZ'): 1, ('GZ', 'VZ'): 1, ('GZ', 'HZ'): 1, ('GZ', 'GZ'): 1, ('GZ', 'EJ'): 1, ('GZ', 'IT'): 1, ('GZ', 'X'): 1, ('LA', 'LA'): 1, ('LA', 'NZ'): 1, ('LA', 'NA'): 1, ('LA', 'PL'): 1, ('LA', 'NF'): 1, ('LA', 'SF'): 1, ('LA', 'NS'): 1, ('LA', 'SS'): 1, ('LA', 'VI'): 1, ('LA', 'TA'): 1, ('LA', 'SV'): 1, ('LA', 'CL'): 1, ('LA', 'NC'): 1, ('LA', 'MC'): 1, ('LA', 'MI'): 1, ('LA', 'MO'): 1, ('LA', 'NO'): 1, ('LA', 'OP'): 1, ('LA', 'X'): 1, ('NZ', 'LA'): 1, ('NZ', 'NZ'): 1, ('NZ', 'NA'): 1, ('NZ', 'PL'): 1, ('NZ', 'NF'): 1, ('NZ', 'SF'): 1, ('NZ', 'NS'): 1, ('NZ', 'SS'): 1, ('NZ', 'VI'): 1, ('NZ', 'TA'): 1, ('NZ', 'SV'): 1, ('NZ', 'CL'): 1, ('NZ', 'NC'): 1, ('NZ', 'MC'): 1, ('NZ', 'MI'): 1, ('NZ', 'MO'): 1, ('NZ', 'NO'): 1, ('NZ', 'OP'): 1, ('NZ', 'X'): 1, ('NA', 'LA'): 1, ('NA', 'NZ'): 1, ('NA', 'NA'): 1, ('NA', 'PL'): 1, ('NA', 'NF'): 1, ('NA', 'SF'): 1, ('NA', 'NS'): 1, ('NA', 'SS'): 1, ('NA', 'VI'): 1, ('NA', 'TA'): 1, ('NA', 'SV'): 1, ('NA', 'CL'): 1, ('NA', 'NC'): 1, ('NA', 'MC'): 1, ('NA', 'MI'): 1, ('NA', 'MO'): 1, ('NA', 'NO'): 1, ('NA', 'OP'): 1, ('NA', 'X'): 1, ('PL', 'LA'): 1, ('PL', 'NZ'): 1, ('PL', 'NA'): 1, ('PL', 'PL'): 1, ('PL', 'NF'): 1, ('PL', 'SF'): 1, ('PL', 'NS'): 1, ('PL', 'SS'): 1, ('PL', 'VI'): 1, ('PL', 'TA'): 1, ('PL', 'SV'): 1, ('PL', 'CL'): 1, ('PL', 'NC'): 1, ('PL', 'MC'): 1, ('PL', 'MI'): 1, ('PL', 'MO'): 1, ('PL', 'NO'): 1, ('PL', 'OP'): 1, ('PL', 'X'): 1, ('NF', 'LA'): 1, ('NF', 'NZ'): 1, ('NF', 'NA'): 1, ('NF', 'PL'): 1, ('NF', 'NF'): 1, ('NF', 'SF'): 1, ('NF', 'NS'): 1, ('NF', 'SS'): 1, ('NF', 'VI'): 1, ('NF', 'TA'): 1, ('NF', 'SV'): 1, ('NF', 'CL'): 1, ('NF', 'NC'): 1, ('NF', 'MC'): 1, ('NF', 'MI'): 1, ('NF', 'MO'): 1, ('NF', 'NO'): 1, ('NF', 'OP'): 1, ('NF', 'X'): 1, ('SF', 'LA'): 1, ('SF', 'NZ'): 1, ('SF', 'NA'): 1, ('SF', 'PL'): 1, ('SF', 'NF'): 1, ('SF', 'SF'): 1, ('SF', 'NS'): 1, ('SF', 'SS'): 1, ('SF', 'VI'): 1, ('SF', 'TA'): 1, ('SF', 'SV'): 1, ('SF', 'CL'): 1, ('SF', 'NC'): 1, ('SF', 'MC'): 1, ('SF', 'MI'): 1, ('SF', 'MO'): 1, ('SF', 'NO'): 1, ('SF', 'OP'): 1, ('SF', 'X'): 1, ('NS', 'LA'): 1, ('NS', 'NZ'): 1, ('NS', 'NA'): 1, ('NS', 'PL'): 1, ('NS', 'NF'): 1, ('NS', 'SF'): 1, ('NS', 'NS'): 1, ('NS', 'SS'): 1, ('NS', 'VI'): 1, ('NS', 'TA'): 1, ('NS', 'SV'): 1, ('NS', 'CL'): 1, ('NS', 'NC'): 1, ('NS', 'MC'): 1, ('NS', 'MI'): 1, ('NS', 'MO'): 1, ('NS', 'NO'): 1, ('NS', 'OP'): 1, ('NS', 'X'): 1, ('SS', 'LA'): 1, ('SS', 'NZ'): 1, ('SS', 'NA'): 1, ('SS', 'PL'): 1, ('SS', 'NF'): 1, ('SS', 'SF'): 1, ('SS', 'NS'): 1, ('SS', 'SS'): 1, ('SS', 'VI'): 1, ('SS', 'TA'): 1, ('SS', 'SV'): 1, ('SS', 'CL'): 1, ('SS', 'NC'): 1, ('SS', 'MC'): 1, ('SS', 'MI'): 1, ('SS', 'MO'): 1, ('SS', 'NO'): 1, ('SS', 'OP'): 1, ('SS', 'X'): 1, ('VI', 'LA'): 1, ('VI', 'NZ'): 1, ('VI', 'NA'): 1, ('VI', 'PL'): 1, ('VI', 'NF'): 1, ('VI', 'SF'): 1, ('VI', 'NS'): 1, ('VI', 'SS'): 1, ('VI', 'VI'): 1, ('VI', 'TA'): 1, ('VI', 'SV'): 1, ('VI', 'CL'): 1, ('VI', 'NC'): 1, ('VI', 'MC'): 1, ('VI', 'MI'): 1, ('VI', 'MO'): 1, ('VI', 'NO'): 1, ('VI', 'OP'): 1, ('VI', 'X'): 1, ('TA', 'LA'): 1, ('TA', 'NZ'): 1, ('TA', 'NA'): 1, ('TA', 'PL'): 1, ('TA', 'NF'): 1, ('TA', 'SF'): 1, ('TA', 'NS'): 1, ('TA', 'SS'): 1, ('TA', 'VI'): 1, ('TA', 'TA'): 1, ('TA', 'SV'): 1, ('TA', 'CL'): 1, ('TA', 'NC'): 1, ('TA', 'MC'): 1, ('TA', 'MI'): 1, ('TA', 'MO'): 1, ('TA', 'NO'): 1, ('TA', 'OP'): 1, ('TA', 'X'): 1, ('SV', 'LA'): 1, ('SV', 'NZ'): 1, ('SV', 'NA'): 1, ('SV', 'PL'): 1, ('SV', 'NF'): 1, ('SV', 'SF'): 1, ('SV', 'NS'): 1, ('SV', 'SS'): 1, ('SV', 'VI'): 1, ('SV', 'TA'): 1, ('SV', 'SV'): 1, ('SV', 'CL'): 1, ('SV', 'NC'): 1, ('SV', 'MC'): 1, ('SV', 'MI'): 1, ('SV', 'MO'): 1, ('SV', 'NO'): 1, ('SV', 'OP'): 1, ('SV', 'X'): 1, ('CL', 'LA'): 1, ('CL', 'NZ'): 1, ('CL', 'NA'): 1, ('CL', 'PL'): 1, ('CL', 'NF'): 1, ('CL', 'SF'): 1, ('CL', 'NS'): 1, ('CL', 'SS'): 1, ('CL', 'VI'): 1, ('CL', 'TA'): 1, ('CL', 'SV'): 1, ('CL', 'CL'): 1, ('CL', 'NC'): 1, ('CL', 'MC'): 1, ('CL', 'MI'): 1, ('CL', 'MO'): 1, ('CL', 'NO'): 1, ('CL', 'OP'): 1, ('CL', 'X'): 1, ('NC', 'LA'): 1, ('NC', 'NZ'): 1, ('NC', 'NA'): 1, ('NC', 'PL'): 1, ('NC', 'NF'): 1, ('NC', 'SF'): 1, ('NC', 'NS'): 1, ('NC', 'SS'): 1, ('NC', 'VI'): 1, ('NC', 'TA'): 1, ('NC', 'SV'): 1, ('NC', 'CL'): 1, ('NC', 'NC'): 1, ('NC', 'MC'): 1, ('NC', 'MI'): 1, ('NC', 'MO'): 1, ('NC', 'NO'): 1, ('NC', 'OP'): 1, ('NC', 'X'): 1, ('MC', 'LA'): 1, ('MC', 'NZ'): 1, ('MC', 'NA'): 1, ('MC', 'PL'): 1, ('MC', 'NF'): 1, ('MC', 'SF'): 1, ('MC', 'NS'): 1, ('MC', 'SS'): 1, ('MC', 'VI'): 1, ('MC', 'TA'): 1, ('MC', 'SV'): 1, ('MC', 'CL'): 1, ('MC', 'NC'): 1, ('MC', 'MC'): 1, ('MC', 'MI'): 1, ('MC', 'MO'): 1, ('MC', 'NO'): 1, ('MC', 'OP'): 1, ('MC', 'X'): 1, ('MI', 'LA'): 1, ('MI', 'NZ'): 1, ('MI', 'NA'): 1, ('MI', 'PL'): 1, ('MI', 'NF'): 1, ('MI', 'SF'): 1, ('MI', 'NS'): 1, ('MI', 'SS'): 1, ('MI', 'VI'): 1, ('MI', 'TA'): 1, ('MI', 'SV'): 1, ('MI', 'CL'): 1, ('MI', 'NC'): 1, ('MI', 'MC'): 1, ('MI', 'MI'): 1, ('MI', 'MO'): 1, ('MI', 'NO'): 1, ('MI', 'OP'): 1, ('MI', 'X'): 1, ('MO', 'LA'): 1, ('MO', 'NZ'): 1, ('MO', 'NA'): 1, ('MO', 'PL'): 1, ('MO', 'NF'): 1, ('MO', 'SF'): 1, ('MO', 'NS'): 1, ('MO', 'SS'): 1, ('MO', 'VI'): 1, ('MO', 'TA'): 1, ('MO', 'SV'): 1, ('MO', 'CL'): 1, ('MO', 'NC'): 1, ('MO', 'MC'): 1, ('MO', 'MI'): 1, ('MO', 'MO'): 1, ('MO', 'NO'): 1, ('MO', 'OP'): 1, ('MO', 'X'): 1, ('NO', 'LA'): 1, ('NO', 'NZ'): 1, ('NO', 'NA'): 1, ('NO', 'PL'): 1, ('NO', 'NF'): 1, ('NO', 'SF'): 1, ('NO', 'NS'): 1, ('NO', 'SS'): 1, ('NO', 'VI'): 1, ('NO', 'TA'): 1, ('NO', 'SV'): 1, ('NO', 'CL'): 1, ('NO', 'NC'): 1, ('NO', 'MC'): 1, ('NO', 'MI'): 1, ('NO', 'MO'): 1, ('NO', 'NO'): 1, ('NO', 'OP'): 1, ('NO', 'X'): 1, ('OP', 'LA'): 1, ('OP', 'NZ'): 1, ('OP', 'NA'): 1, ('OP', 'PL'): 1, ('OP', 'NF'): 1, ('OP', 'SF'): 1, ('OP', 'NS'): 1, ('OP', 'SS'): 1, ('OP', 'VI'): 1, ('OP', 'TA'): 1, ('OP', 'SV'): 1, ('OP', 'CL'): 1, ('OP', 'NC'): 1, ('OP', 'MC'): 1, ('OP', 'MI'): 1, ('OP', 'MO'): 1, ('OP', 'NO'): 1, ('OP', 'OP'): 1, ('OP', 'X'): 1, ('VO', 'VO'): 1, ('VO', 'AS'): 1, ('VO', 'EJ'): 1, ('VO', 'IT'): 1, ('VO', 'IN'): 1, ('VO', 'X'): 1, ('AS', 'VO'): 1, ('AS', 'AS'): 1, ('AS', 'EJ'): 1, ('AS', 'IT'): 1, ('AS', 'IN'): 1, ('AS', 'X'): 1, ('EJ', 'GL'): 1, ('EJ', 'GZ'): 1, ('EJ', 'VO'): 1, ('EJ', 'AS'): 1, ('EJ', 'EJ'): 1, ('EJ', 'IT'): 1, ('EJ', 'IN'): 1, ('EJ', 'X'): 1, ('IT', 'GL'): 1, ('IT', 'GZ'): 1, ('IT', 'VO'): 1, ('IT', 'AS'): 1, ('IT', 'EJ'): 1, ('IT', 'IT'): 1, ('IT', 'IN'): 1, ('IT', 'X'): 1, ('IN', 'VO'): 1, ('IN', 'AS'): 1, ('IN', 'EJ'): 1, ('IN', 'IT'): 1, ('IN', 'IN'): 1, ('IN', 'X'): 1, ('X', 'LB'): 1, ('X', 'LD'): 1, ('X', 'LL'): 1, ('X', 'DE'): 1, ('X', 'AL'): 1, ('X', 'PO'): 1, ('X', 'RE'): 1, ('X', 'PA'): 1, ('X', 'NE'): 1, ('X', 'VE'): 1, ('X', 'UV'): 1, ('X', 'PH'): 1, ('X', 'GL'): 1, ('X', 'LZ'): 1, ('X', 'LH'): 1, ('X', 'RZ'): 1, ('X', 'PZ'): 1, ('X', 'VZ'): 1, ('X', 'HZ'): 1, ('X', 'GZ'): 1, ('X', 'LA'): 1, ('X', 'NZ'): 1, ('X', 'NA'): 1, ('X', 'PL'): 1, ('X', 'NF'): 1, ('X', 'SF'): 1, ('X', 'NS'): 1, ('X', 'SS'): 1, ('X', 'VI'): 1, ('X', 'TA'): 1, ('X', 'SV'): 1, ('X', 'CL'): 1, ('X', 'NC'): 1, ('X', 'MC'): 1, ('X', 'MI'): 1, ('X', 'MO'): 1, ('X', 'NO'): 1, ('X', 'OP'): 1, ('X', 'VO'): 1, ('X', 'AS'): 1, ('X', 'EJ'): 1, ('X', 'IT'): 1, ('X', 'IN'): 1, ('X', 'X'): 1}
reversed_letters = {tuple(sorted(list(letters[key]))): key for key in letters}
|
992,937 | 33c6f66875422c9c5a25513e08f1a771b737c01d | from django.shortcuts import render
# from summary.models import SumItems
from confirm.models import PSumItems
from twilio.rest import Client
from django.contrib.auth.models import User
from datetime import date
def myorders(request):
if request.user.is_superuser:
if request.method == 'POST':
orders1 = PSumItems.objects.all()
status = request.POST.getlist('status')
n = 0
for st in orders1:
stats = status[n]
updte = PSumItems(id=st.id,
user_name=st.user_name,
pnumber=st.pnumber,
selected_items=st.selected_items,
items_cost=st.items_cost,
items_weight=st.items_weight,
items_price=st.items_price,
total_cost=st.total_cost,
date_created=st.date_created,
status=stats)
updte.save()
n = n + 1
orders = PSumItems.objects.all()
else:
# orders = SumItems.objects.filter(user_name=request.user)
orders = PSumItems.objects.filter(user_name=request.user)
order_date, idnum, username, status1 = [], [], [], []
for x in orders:
y = x.date_created
ds = str(str(y))
order_date.append(str(ds))
idnum.append(str(x.id))
username.append(x.user_name)
status1.append(x.status)
return render(request, 'myorders.html', {'order_date': order_date, 'id': idnum, 'username': username, 'status': status1})
|
992,938 | 160b75886b7daff70df1a53c50340d73d1ebefff |
def USE_PPHT
def MAX_NUM_LINES 200
# include "opencv2/core/core.hpp"
# include "opencv2/highgui/highgui.hpp"
# include "opencv2/imgproc/imgproc.hpp"
import cv2
import MSAC
def help() :
pass
# cout << "/*\n"
# << " **************************************************************************************************\n"
# << " * Vanishing point detection using Hough and MSAC \n"
# << " * ----------------------------------------------------\n"
# << " * \n"
# << " * Author:Marcos Nieto\n"
# << " * www.marcosnieto.net\n"
# << " * marcos.nieto.doncel@gmail.com\n"
# << " * \n"
# << " * Date:01/12/2011\n"
# << " **************************************************************************************************\n"
# << " * \n"
# << " * Usage: \n"
# << " * -numVps # Number of vanishing points to detect (at maximum) \n"
# << " * -mode # Estimation mode (default is NIETO): LS (Least Squares), NIETO\n"
# << " * -video # Specifies video file as input (if not specified, camera is used) \n"
# << " * -image # Specifies image file as input (if not specified, camera is used) \n"
# << " * -verbose # Actives verbose: ON, OFF (default)\n"
# << " * -play # ON: the video runs until the end; OFF: frame by frame (key press event)\n"
# << " * -resizedWidth # Specifies the desired width of the image (the height is computed to keep aspect ratio)\n"
# << " * Example:\n"
# << " * vanishingPoint.exe -numVps 2 -video myVideo.avi -verbose ON\n"
# << " * vanishingPoint.exe -numVps 2 -image myImage.jpg\n"
# << " * vanishingPoint.exe -numVps 1 -play OFF -resizedWidth 640\n"
# << " * \n"
# << " * Keys:\n"
# << " * Esc: Quit\n"
# << " */\n" << endl;
def processImage(msac, numVps, imgGRAY, outputImg):
i = 0
imgCanny = {}
#Canny
cv2.Canny(imgGRAY, imgCanny, 180, 120, 3)
#Hough
#
# vector < vector < cv::Point > > lineSegments;
# vector < cv::Point > aux;
# # ifndef USE_PPHT
lineSegments = {}
lines = {}
cv2.HoughLines(imgCanny, lines, 1, CV_PI / 180, 200)
for i in range(0,len(lines):
rho = lines[i][0];
theta = lines[i][1];
a = cos(theta), b = sin(theta);
x0 = a * rho, y0 = b * rho;
pt1, pt2;
pt1.x = cv2. cvRound(x0 + 1000 * (-b));
pt1.y = cvRound(y0 + 1000 * (a));
pt2.x = cvRound(x0 - 1000 * (-b));
pt2.y = cvRound(y0 - 1000 * (a));
aux.clear();
aux.push_back(pt1);
aux.push_back(pt2);
lineSegments.push_back(aux);
line(outputImg, pt1, pt2, CV_RGB(0, 0, 0), 1, 8);
}
# else
vector < Vec4i > lines;
int houghThreshold = 70;
if (imgGRAY.cols * imgGRAY.rows < 400 * 400)
houghThreshold = 100;
cv::
HoughLinesP(imgCanny, lines, 1, CV_PI / 180, houghThreshold, 10, 10);
while (lines.size() > MAX_NUM_LINES)
{
lines.clear();
houghThreshold += 10;
cv::HoughLinesP(imgCanny, lines, 1, CV_PI / 180, houghThreshold, 10, 10);
}
for (size_t i=0; i < lines.size(); i++)
{
Point
pt1, pt2;
pt1.x = lines[i][0];
pt1.y = lines[i][1];
pt2.x = lines[i][2];
pt2.y = lines[i][3];
line(outputImg, pt1, pt2, CV_RGB(0, 0, 0), 2);
/ *circle(outputImg, pt1, 2, CV_RGB(255, 255, 255), CV_FILLED);
circle(outputImg, pt1, 3, CV_RGB(0, 0, 0), 1);
circle(outputImg, pt2, 2, CV_RGB(255, 255, 255), CV_FILLED);
circle(outputImg, pt2, 3, CV_RGB(0, 0, 0), 1); * /
// Store
into
vector
of
pairs
of
Points
for msac
aux.clear();
aux.push_back(pt1);
aux.push_back(pt2);
lineSegments.push_back(aux);
}
# endif
// Multiple
vanishing
points
std::vector < cv::Mat > vps; // vector
of
vps: vps[vpNum],
with vpNum=0...numDetectedVps
std::vector < std::vector < int > > CS; // index
of
Consensus
Set
for all vps: CS[vpNum] is a
vector
containing
indexes
of
lineSegments
belonging
to
Consensus
Set
of
vp
numVp
std::vector < int > numInliers;
std::vector < std::vector < std::vector < cv::Point > > > lineSegmentsClusters;
// Call
msac
function
for multiple vanishing point estimation
msac.multipleVPEstimation(lineSegments, lineSegmentsClusters, numInliers, vps, numVps);
for (int v=0; v < vps.size(); v++)
{
printf("VP %d (%.3f, %.3f, %.3f)", v, vps[v].at < float > (0, 0), vps[v].at < float > (1, 0),
vps[v].at < float > (2, 0));
fflush(stdout);
double
vpNorm = cv::norm(vps[v]);
if (fabs(vpNorm - 1) < 0.001)
{
printf("(INFINITE)");
fflush(stdout);
}
printf("\n");
}
// Draw
line
segments
according
to
their
cluster
msac.drawCS(outputImg, lineSegmentsClusters, vps);
}
/ ** Main
function * /
int
main(int
argc, char ** argv)
{
// Images
cv::Mat
inputImg, imgGRAY;
cv::Mat
outputImg;
// Other
variables
char * videoFileName = 0;
char * imageFileName = 0;
cv::VideoCapture
video;
bool
useCamera = true;
int
mode = MODE_NIETO;
int
numVps = 1;
bool
playMode = true;
bool
stillImage = false;
bool
verbose = false;
int
procWidth = -1;
int
procHeight = -1;
cv::Size
procSize;
// Start
showing
help
help();
// Parse
arguments
if (argc < 2)
return -1;
for (int i=1; i < argc; i++)
{
const
char * s = argv[i];
if (strcmp(s, "-video") == 0)
{
// Input video is a video file
videoFileName = argv[++i];
useCamera = false;
}
else if (strcmp(s, "-image") == 0)
{
// Input is a image file
imageFileName = argv[++i];
stillImage = true;
useCamera = false;
}
else if (strcmp(s, "-resizedWidth") == 0)
{
procWidth = atoi(argv[++i]);
}
else if (strcmp(s, "-verbose" ) == 0)
{
const char * ss = argv[++i];
if (strcmp(ss, "ON") == 0 | | strcmp(ss, "on") == 0
| | strcmp(ss, "TRUE") == 0 | | strcmp(ss, "true") == 0
| | strcmp(ss, "YES") == 0 | | strcmp(ss, "yes") == 0 )
verbose = true;
}
else if (strcmp(s, "-play" ) == 0)
{
const char * ss = argv[++i];
if (strcmp(ss, "OFF") == 0 | | strcmp(ss, "off") == 0
| | strcmp(ss, "FALSE") == 0 | | strcmp(ss, "false") == 0
| | strcmp(ss, "NO") == 0 | | strcmp(ss, "no") == 0
| | strcmp(ss, "STEP") == 0 | | strcmp(ss, "step") == 0)
playMode = false;
}
else if (strcmp(s, "-mode" ) == 0)
{
const char * ss = argv[++i];
if (strcmp(ss, "LS") == 0)
mode = MODE_LS;
else if (strcmp(ss, "NIETO") == 0)
mode = MODE_NIETO;
else
{
perror("ERROR: Only LS or NIETO modes are supported\n");
}
}
else if (strcmp(s, "-numVps") == 0)
{
numVps = atoi(argv[++i]);
}
}
// Open
video
input
if (useCamera)
video.open(0);
else
{
if (!stillImage)
video.open(videoFileName);
}
// Check
video
input
int
width = 0, height = 0, fps = 0, fourcc = 0;
if (!stillImage)
{
if ( !video.isOpened() )
{
printf("ERROR: can not open camera or video file\n");
return -1;
}
else
{
// Show
video
information
width = (int)
video.get(CV_CAP_PROP_FRAME_WIDTH);
height = (int)
video.get(CV_CAP_PROP_FRAME_HEIGHT);
fps = (int)
video.get(CV_CAP_PROP_FPS);
fourcc = (int)
video.get(CV_CAP_PROP_FOURCC);
if (!useCamera)
printf("Input video: (%d x %d) at %d fps, fourcc = %d\n", width, height, fps, fourcc);
else
printf("Input camera: (%d x %d) at %d fps\n", width, height, fps);
}
}
else
{
inputImg = cv::imread(imageFileName);
if (inputImg.empty())
return -1;
width = inputImg.cols;
height = inputImg.rows;
printf("Input image: (%d x %d)\n", width, height);
playMode = false;
}
// Resize
if (procWidth != -1)
{
procHeight = height * ((double)
procWidth / width);
procSize = cv::Size(procWidth, procHeight);
printf("Resize to: (%d x %d)\n", procWidth, procHeight);
}
else
procSize = cv::Size(width, height);
// Create and init
MSAC
MSAC
msac;
msac.init(mode, procSize, verbose);
int
frameNum = 0;
for ( ;; )
{
if (!stillImage)
{
printf("\n-------------------------\nFRAME #%6d\n", frameNum);
frameNum + +;
// Get
current
image
video >> inputImg;
}
if (inputImg.empty())
break;
// Resize
to
processing
size
cv::resize(inputImg, inputImg, procSize);
// Color
Conversion
if (inputImg.channels() == 3)
{
cv::cvtColor(inputImg, imgGRAY, CV_BGR2GRAY);
inputImg.copyTo(outputImg);
}
else
{
inputImg.copyTo(imgGRAY);
cv::cvtColor(inputImg, outputImg, CV_GRAY2BGR);
}
// ++++++++++++++++++++++++++++++++++++++++
// Process
// ++++++++++++++++++++++++++++++++++++++++
processImage(msac, numVps, imgGRAY, outputImg);
// View
imshow("Output", outputImg);
if (playMode)
cv::waitKey(1);
else
cv::waitKey(0);
char
q = (char)
waitKey(1);
if (q == 27)
{
printf("\nStopped by user request\n");
break;
}
if (stillImage)
break;
}
if (!stillImage)
video.release();
return 0;
}
|
992,939 | 0d61bf0ecc9171394e6014ff496f7647f0cbfdc9 | # cascadingMenus.py
# imports ====================================
from tkinter import *
from tkinter import ttk
# menus are not part of ttk
# tkinter ====================================
root = Tk()
root.title('My New App')
root.geometry('240x140')
# tell Tk object that each meunu in interface should not be
# of the tearoff type (Tkinter defaults to tearoff menus as a
# legacy feature, but tearoff menus oare not part of modern GUI design)
root.option_add('*tearOff', False)
# create a menu bar object and assign it to the root window
menubar = Menu(root)
# configure the root window to use the menu bar object as the menu
root.config(menu = menubar)
# rem now each menu item that you create will be a new menu object
# which is a child of the menu bar object
# create menu items
file_ = Menu(menubar)
edit = Menu(menubar)
help_ = Menu(menubar)
about = Menu(menubar)
# add menu items to the menu bar
menubar.add_cascade(menu = file_, label = 'File')
menubar.add_cascade(menu = edit, label = 'Edit')
menubar.add_cascade(menu = help_, label = 'Help')
menubar.add_cascade(menu = about, label = 'About')
# put an info label on root window
menuInfo = ttk.Label(root, text ='menuInfo')
menuInfo.pack()
# add some commands to the file menu
# File commands
file_.add_command(label = 'New', command = lambda: menuInfo.configure(text='New File'))
file_.add_command(label = 'Save', command = lambda: menuInfo.configure(text='Save File'))
file_.add_command(label = 'Delete', command = lambda: menuInfo.configure(text='Delete File'))
# Edit commands
edit.add_command(label = 'Erase', command = lambda: menuInfo.configure(text='Erase File'))
edit.add_command(label = 'Append', command = lambda: menuInfo.configure(text='Append to File'))
# add separator line between menu elements
file_.add_separator()
file_.add_command(label = 'Open...', command = lambda: menuInfo.configure(text='Open File'))
file_.add_command(label = 'Close', command = lambda: menuInfo.configure(text='Close File'))
# add shortcut properties using the accelerator property of the entry config method
# rem the accelerator property does not actually create the shortcut but only
# formats the shortcut key to the right of the menu item
# (event binding can be used to actually create the shortcuts)
file_.entryconfig('New', accelerator = 'Ctrl + N')
file_.entryconfig('Save', accelerator = 'Ctrl + S')
# you can use PhotoImage and .entryconfig to add images to your menu
logo = PhotoImage(file = 'python_logo.gif').subsample(15,15)
file_.entryconfig('Open...', image = logo, compound = 'left')
# you can disable menu items using state
file_.entryconfig('Delete', state = 'disabled')
# in addition to adding commands to a menu, you can also add other menus to create submenus
# here, we can create a save submenu
file_.delete('Save') # delete original save menu item
save = Menu(file_) # create save menu item as child of the file_ menu item
file_.add_cascade(menu = save, label = 'Save')
save.add_command(label = 'Save As...', command = lambda: menuInfo.configure(text = 'Saving As...'))
save.add_command(label = 'Save All...', command = lambda: menuInfo.configure(text = 'Saving All...'))
save.entryconfig('Save As...', accelerator = 'Ctrl + S') # cormat shortcut keys
# you can also add radio buttons and check buttons to menus
choice = IntVar()
edit.add_separator()
choose = Menu(edit)
edit.add_cascade(menu = choose, label = 'Select Level ')
choose.add_radiobutton(label = 'One', variable = choice, value = 1,
command = lambda: menuInfo.configure(text = 'Select Level One'))
choose.add_radiobutton(label = 'Two', variable = choice, value = 2,
command = lambda: menuInfo.configure(text = 'Select Level Two'))
choose.add_radiobutton(label = 'Three', variable = choice, value = 3,
command = lambda: menuInfo.configure(text = 'Select Level Three'))
# you can create popup menus at specific locations on the screen with post() method
# this method takes (x,y) coordinates of the location for the popup menu
# based on the entire screen (not just the Tk window ), starting from top left hand corner
file_.post(400,300)
# tkinter loop
root.mainloop()
# main() ====================================
def main():
print('Done.')
if __name__ == '__main__': main() |
992,940 | 4b53af99516979102da3973b70f276a815d34f82 | while True:
login = input("Enter your login")
if login == "First":
print("Greetings, First")
else:
print("Error, wrong login")
again = input("Try again?")
if again in ["Yes", "yes", "Y", "y"]:
continue
else:
break |
992,941 | ef7f5d95b522f3f8b8504407c4527c3407f02a61 | use_names = []
if use_names:
for use_name in use_names:
if use_name == 'admin':
print("Hello admin,would you like to see a status report?")
else:
print("hello Eric,thank you for logging in again.")
else:
print("We need to find some users!")
|
992,942 | baa0314491823a10f9e859e2c4249f0dd7364414 | #!/usr/bin/py
data_gathered = {}
data_gathered['info'] = [
# 'Name',
# 'Version',
# 'Release_date',
# 'Nbproc',
# 'Process_num',
# 'Pid',
# 'Uptime',
# 'Uptime_sec',
# 'Memmax_MB',
# 'Ulimit-n',
# 'Maxsock',
# 'Maxconn',
# 'Hard_maxconn',
# 'CurrConns',
# 'CumConns',
# 'CumReq',
# 'MaxSslConns',
# 'CurrSslConns',
# 'CumSslConns',
# 'Maxpipes',
# 'PipesUsed',
# 'PipesFree',
# 'ConnRate',
# 'ConnRateLimit',
# 'MaxConnRate',
# 'SessRate',
# 'SessRateLimit',
# 'MaxSessRate',
# 'SslRate',
# 'SslRateLimit',
# 'MaxSslRate',
# 'SslFrontendKeyRate',
# 'SslFrontendMaxKeyRate',
# 'SslFrontendSessionReuse_pct',
# 'SslBackendKeyRate',
# 'SslBackendMaxKeyRate',
# 'SslCacheLookups',
# 'SslCacheMisses',
# 'CompressBpsIn',
# 'CompressBpsOut',
# 'CompressBpsRateLim',
# 'ZlibMemUsage',
# 'MaxZlibMemUsage',
# 'Tasks',
# 'Run_queue',
# 'Idle_pct',
# 'node',
# 'description'
]
data_gathered['stat'] = [
# 'pxname',
# 'svname',
'queue_current',
'sess_current',
'sess_total',
'bytes_in',
'bytes_out',
'request_errors'
]
data_stat_index = {
'pxname': 0,
'svname': 1,
'queue_current': 2,
'qmax': 3,
'sess_current': 4,
'smax': 5,
'slim': 6,
'sess_total': 7,
'bytes_in': 8,
'bytes_out': 9,
'dreq': 10,
'dresp': 11,
'request_errors': 12,
'wretr': 13,
'wredis': 14,
'status': 15,
'weight': 16,
'act': 17,
'bckchkfail': 18,
'chkdown': 19,
'lastchg': 20,
'downtime': 21,
'qlimit': 22,
'pid': 23,
'iid': 24,
'sid': 25,
'throttle': 26,
'lbtot': 27,
'tracked': 28,
'type': 29,
'rate': 30,
'rate_lim': 31,
'rate_max': 32,
'check_status': 33,
'check_code': 34,
'check_duration': 35,
'hrsp_1xx': 36,
'hrsp_2xx': 37,
'hrsp_3xx': 38,
'hrsp_4xx': 39,
'hrsp_5xx': 40,
'hrsp_other': 41,
'hanafail': 42,
'req_rate': 43,
'req_rate_max': 44,
'req_tot': 45,
'cli_abrt': 46,
'srv_abrt': 47,
'comp_in': 48,
'comp_out': 49,
'comp_byp': 50,
'comp_rsp': 51,
'lastsess': 52,
'last_chk': 53,
'last_agt': 54,
'qtime': 55,
'ctime': 56,
'rtime': 57,
'ttime': 58,
'agent_status': 59,
'agent_code': 60,
'agent_duration': 61,
'check_desc': 62,
'agent_desc': 63,
'check_rise': 64,
'check_fall': 65,
'check_health': 66,
'agent_rise': 67,
'agent_fall': 68,
'agent_health': 69,
'addr': 70,
'cookie': 71,
'mode': 72,
'algo': 73,
'conn_rate': 74,
'conn_rate_max': 75,
'conn_tot': 76,
'intercepted': 77,
'dcon': 78,
'dses': 79
}
|
992,943 | 1f06e24d72d9d2e6c135be20fa0190795f695b64 | import json
import subprocess
PARSER_DIR = "~/Programming/tools/stanford-parser-2012-11-12"
TAGGER_DIR = "~/Programming/tools/stanford-postagger-2012-11-11/"
PHRASES_FILE = "../data/phrases"
gold = []
with open('../data/gold_labels.json') as f:
gold = json.loads(f.read())
with open(PHRASES_FILE, 'w') as f:
for phrase_group in gold:
for phrase, label in phrase_group:
f.write(phrase + '\n')
subprocess.call("java -mx300m -cp " + TAGGER_DIR + "stanford-postagger-3.1.4.jar" + \
": edu.stanford.nlp.tagger.maxent.MaxentTagger -sentenceDelimiter newline " + \
"-model " + TAGGER_DIR + "models/english-left3words-distsim.tagger -textFile " + \
PHRASES_FILE + " -outputFile ../data/tagged_phrases ",
shell=True)
subprocess.call("java -mx1024m -cp " + PARSER_DIR + \
"/*: edu.stanford.nlp.parser.lexparser.LexicalizedParser " + \
"-outputFormat penn -sentences newline " + \
"edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz " + \
"> ../data/parse_trees " + \
PHRASES_FILE, shell=True)
|
992,944 | c054e9c21ef555e6f9fe7d877ff3ed9449454a21 |
# coding: utf-8
# In[1]:
# globals
Alphabet = {}
Tests = []
def init():
with open("alphabet.txt") as f:
AlphabetRaw = " ".join(f.readlines())
alphalist = AlphabetRaw.split()
i = 0
while i < len(alphalist):
Alphabet[alphalist[i]] = alphalist[i+1]
i += 2
for i in range(4):
with open("tests/test" + str(i + 1) + ".txt") as ftest:
lines = ftest.readlines()
code = lines[0].strip()
dictionary = [line.strip() for line in lines[1:]]
Tests.append((code, dictionary))
init()
# print(Alphabet)
# print(Tests[0:3])
# In[1]:
def tt_translate(words):
return "".join(["".join([Alphabet[char] for char in word]) for word in words])
def tt_check(code, dic, sols, a):
error = False
if a != len(sols):
print("number of solutions is incorrect. Should be", a)
error = True
i = 0
for s in sols:
so = tt_translate(s)
if so != code:
print("solution #" + i, "translates to code different from input: [", so, "]")
error = True
i += 1
return not error
def tt_test(solver, code, dic, a):
if len(code) > 100:
debug = False
print("======================================= input ( length =", len(code), "):")
print(code[:100] + (" (...)" if len(code) > 100 else ""))
solutions = solver(Alphabet, code, dic)
print("======================================= solution:")
sep = " " if len(solutions) < 5 else "\n"
solutions.sort()
print(len(solutions), "solution(s):" + sep, ("," + sep).join([" ".join(sol) for sol in solutions]))
print("=======================================")
res = tt_check(code, dic, solutions, a)
print("OK" if res else "FAILED")
return res
def tt_TA(solver, ACTIVATEBONUS = False):
i = 0
for t in (Tests[:-1] if not ACTIVATEBONUS else Tests):
tt_test(*([solver]+list(t)+[[1, 1, 2, 72, 5][i]]))
i += 1
|
992,945 | 0ecf7e40defcaa5c299c83cf1af8fe7e118c8b38 | """
data-upload.py
Author: Jonathan Whitaker
Email: jon.b.whitaker@gmail.com
Date: April 21, 2016
data-upload.py is an integral part of the AirU toolchain, serving as the script
which uploads the collected data from the AirU station. This script is designed
to run using a Cron.
"""
def fetch_data(excludeNonPollutants):
"""
Queries local database for all metrics which have not been uploaded to AirU server.
:return: An array of all data metrics which have not been uploaded.
"""
if excludeNonPollutants:
return AirMeasurement().select().where(~AirMeasurement.uploaded,
AirMeasurement.type != 'Temperature',
AirMeasurement.type != 'Altitude',
AirMeasurement.type != 'Pressure',
AirMeasurement.type != 'Humidity',
AirMeasurement.type != 'PM1.0').limit(500)
return AirMeasurement().select().where(~(AirMeasurement.uploaded)).limit(500)
def encode_data(metrics):
"""
Encodes an array of data metrics into a dictionary which can be used as JSON data.
Below is the model expected by the server API.
[{
"Time": "12-31-2015",
"Station": {
"Id": "T1000"
},
"Parameter": {
"Name": "PM2.5",
"Unit": "UG/M3"
},
"Location": {
"lat": 40.687033,
"lng": -111.824517
},
"Value": 30
}]
"""
msg = []
for m in metrics:
msg.append({
"Time": str(m.date_added),
"Station": { "Id": get_mac('eth0').replace(':', '') },
"Parameter": { "Name": m.type, "Unit": m.unit },
"Location": { "lat": m.latitude, "lng": m.longitude },
"Value": m.value
})
return msg
def upload(message, metrics):
"""
Uploads JSON messages to AirU central server API.
"""
url = 'http://dev.air.eng.utah.edu/api/stations/data'
headers = {'Content-Type': 'application/json'}
print json.dumps(message)
r = requests.post(url, data=json.dumps(message), headers=headers)
print r.status_code # TODO: just printing for sanity check
if r.status_code == 200:
print 'OK! SUCCESS'
for m in metrics:
m.uploaded = True
m.save()
return r.status_code
if __name__ == '__main__':
"""
Entry point for uploaded Beaglebone data to server API.
"""
prepare_db()
# server api has an unresolved bug which causes uploads to fail if the
# uploaded datapoint is a type without a computable AQI
# sending non pollutants first, is a workaround until the bug is resolved
pollutants = fetch_data(True)
if len(pollutants) > 0:
status_code = upload(encode_data(pollutants), pollutants)
if status_code == 200:
non_pollutants = fetch_data(False)
upload(encode_data(non_pollutants), non_pollutants)
|
992,946 | afd3a72b6186ac4cb0272f8b082566e7e0524393 | from keras.layers import *
from keras.models import *
import keras.backend as K
class DeepFM():
def __init__(self):
self.cat_vars = ["Cat1", "Cat2", "Cat3", "Cat4", "Cat5"] # List of All Categorical Variables Names
self.cat_levels = [2, 5, 6, 3, 50] # List of All Categorical Variables Levels
self.embd_dim = 8 # Size for Embedding Output
self.hidden_layers = [500,100,20] # List of MLP hidden layers
self.inputs = [Input(shape=(1,), name='Input_'+c) for c in self.cat_vars]
self.embeddings_1st = []
self.embeddings_2nd = []
"""
FM - 1st order
"""
def FM_1st_order(self):
for input, c, i in zip(self.inputs, self.cat_vars, self.cat_levels):
embedding = Embedding(i, 1)(input)
self.embeddings_1st.append(embedding)
res1 = Add()([Reshape((1,))(x) for x in self.embeddings_1st])
return res1, self.embeddings_1st
"""
FM - 2nd order
"""
def FM_2nd_order(self):
for input, c, i in zip(self.inputs, self.cat_vars, self.cat_levels):
embedding = Embedding(i, self.embd_dim)(input)
self.embeddings_2nd.append(embedding)
concat = Concatenate(axis=1)(self.embeddings_2nd)
square_sum = Lambda(lambda x: K.square(K.sum(x, axis=1)))(concat)
sum_square = Lambda(lambda x: K.sum(x**2, axis=1))(concat)
diff = Subtract()([square_sum, sum_square])
res2 = Lambda(lambda x: 0.5 * K.sum(x, axis=1, keepdims=True))(diff)
return res2, self.embeddings_2nd
"""
MLP layer
"""
def MLP(self):
_, embeddings2 = self.FM_2nd_order()
embeddings2 = Flatten()(Concatenate()(embeddings2))
dense = Dense(self.hidden_layers[0], activation='relu')(embeddings2)
for k in self.hidden_layers[1:]:
dense = Dense(k, activation='relu')(dense)
res3 = Dense(1, activation='relu')(dense)
return res3
"""
Final Output
"""
def DeepFM_modeling(self):
res1, _ = self.FM_1st_order()
res2, _ = self.FM_2nd_order()
res3 = self.MLP()
y = Concatenate()([res1, res2, res3])
y = Dense(1, activation='sigmoid')(y)
model_DeepFM = Model(inputs=self.inputs, outputs=y)
model_DeepFM.summary()
return model_DeepFM
|
992,947 | fba66d4e3c6b933b3337e67d1dcf98b4a4b3eaf4 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: lisnb
# @Date: 2015-05-07 22:59:04
# @Last Modified by: lisnb
# @Last Modified time: 2015-05-10 01:37:05
class Solution:
"""
@param A: A positive integer which has N digits, A is a string.
@param k: Remove k digits.
@return: A string
"""
def DeleteDigits(self, A, k):
stack = []
popcnt = 0
i = 0
while i<len(A):
if not stack:
stack.append(A[i])
i+=1
elif A[i] >= stack[-1]:
stack.append(A[i])
i+=1
else:
if popcnt<k:
stack.pop()
popcnt += 1
else:
stack.append(A[i])
i+=1
if len(stack)>len(A)-k:
stack = stack[:len(A)-k]
i = 0
while i<len(stack) and stack[i]=='0':
i+=1
if i == len(stack):
return '0'
else:
return ''.join(stack[i:])
if __name__ == '__main__':
s = Solution()
A = '8000076543'
print s.DeleteDigits(A, 7)
|
992,948 | 8c198e6b2fe13c9c3ac72da1996e1130e4b31ca8 | # 最大公約数
def gcd(a, b):
while b != 0:
a, b = b, a % b
return a
# 最小公倍数
def lcm(a, b):
return a * b // gcd(a, b)
N = int(input())
A = list(map(int, input().split()))
mod = 10 ** 9 + 7
lcm_val = A[0]
for a in A[1:]:
lcm_val = lcm(lcm_val, a)
ans = 0
for a in A:
ans += lcm_val // a
print(ans % mod) |
992,949 | bd11ae6d6f70210db1a75f6fd450baf62e549a90 | from kivymd.app import MDApp
from kivymd.uix.textfield import MDTextFieldRect
from kivymd.uix.boxlayout import BoxLayout
from kivymd.uix.button import MDRaisedButton
from kivymd.uix.screen import MDScreen
from kivy.core.window import Window
from kivymd.uix.gridlayout import GridLayout
from kivymd.uix.boxlayout import BoxLayout
class SimpleCalculator(MDApp):
def build(self):
Window.size=(350,180)
screen = MDScreen()
layout = BoxLayout(orientation="vertical")
self.b_9=MDRaisedButton(text="9",on_press=self.insert_text)
self.b_8=MDRaisedButton(text="8",on_press=self.insert_text)
self.b_7=MDRaisedButton(text="7",on_press=self.insert_text)
self.b_6=MDRaisedButton(text="6",on_press=self.insert_text)
self.b_5=MDRaisedButton(text="5",on_press=self.insert_text)
self.b_4=MDRaisedButton(text="4",on_press=self.insert_text)
self.b_3=MDRaisedButton(text="3",on_press=self.insert_text)
self.b_2=MDRaisedButton(text="2",on_press=self.insert_text)
self.b_1=MDRaisedButton(text="1",on_press=self.insert_text)
self.b_zero=MDRaisedButton(text="0",on_press=self.insert_text)
self.b_equals=MDRaisedButton(text="=",on_press=self.equals)
self.b_CLR=MDRaisedButton(text="CLR",on_press=self.clear_text)
self.b_plus=MDRaisedButton(text="+",on_press=self.addition)
self.b_minus=MDRaisedButton(text="-",on_press=self.sub)
self.b_mul=MDRaisedButton(text="*",on_press=self.mul)
self.b_Div=MDRaisedButton(text="/",on_press=self.div)
self.text_field = MDTextFieldRect(text="",size_hint=(1, None),height="30dp",pos_hint={'centr_x':0,'center_y':0.96})
gl = GridLayout(cols=4)
layout.add_widget(self.text_field)
lis=[self.b_9,self.b_8,self.b_7,self.b_plus,self.b_6,self.b_5,self.b_4,self.b_minus,self.b_3,self.b_2,self.b_1,self.b_mul,self.b_CLR,self.b_zero,self.b_equals,self.b_Div]
for k in lis:
gl.add_widget(k)
layout.add_widget(gl)
return layout
def insert_text(self,obj):
self.text_field.text=self.text_field.text + obj.text
def clear_text(self,obj):
self.text_field.text=""
def addition(self,obj):
self.operand1 = float(self.text_field.text)
self.text_field.text=""
self.oprator='+'
def div(self,obj):
self.operand1 = float(self.text_field.text)
self.text_field.text=""
self.oprator='/'
def sub(self,obj):
if self.text_field.text!="":
self.operand1 = float(self.text_field.text)
self.text_field.text=""
self.oprator='-'
else:
self.text_field.text="-"
def mul(self,obj):
self.operand1 = float(self.text_field.text)
self.text_field.text=""
self.oprator='*'
def equals(self,obj):
self.operand2 = float(self.text_field.text)
if self.oprator=='+':
self.text_field.text=str(self.operand1+self.operand2)
elif self.oprator=='-':
self.text_field.text=str(self.operand1-self.operand2)
elif self.oprator=='*':
self.text_field.text=str(self.operand1*self.operand2)
else:
if self.operand2!=0:
self.text_field.text=str(self.operand1/self.operand2)
else:
self.text_field.text="ZeorDivisionError(CLR to continue)"
SimpleCalculator().run()
|
992,950 | e7f79cf4aac685f72983457c52adb8517eba18a7 | from telegram.ext import Updater, CommandHandler, MessageHandler, Filters
from telegram import ReplyKeyboardMarkup
import requests
import json
from telegram.replykeyboardremove import ReplyKeyboardRemove
updater = Updater('449150567:AAFwxZjQO_BrE1RVe4qH467vIiDlRD7Hu24')
states = {}
id_global_1 = -1
def get_genre_list():
data = json.loads(requests.get('http://127.0.0.1:8000/genreList/').text)
return data
def get_dialog_list(id_post):
r = requests.post('http://127.0.0.1:8000/dialogListByGenreId/', json={"genre_id": id_post})
data = r.json()
return data
def get_dialog_text_list(id_post):
r = requests.get('http://127.0.0.1:8000/getDialogById/{0}'.format(id_post))
return r.json()
START = 'start'
CHOICE_GENRE = 'choice-genre'
CHOICE_DIALOG = 'choice-dialog'
CHOICE_DIALOG_TEXT = 'choice-dialog-text'
END = 'end'
def start(bot, update):
global states
states[update.message.chat_id] = {'step': START}
main_function(bot, update)
def help(bot, update):
bot.send_message(update.message.chat_id, "Hello, it's Majara Bot and Welcome to our Bot.")
def end_function(bot, update):
bot.send_message(update.message.chat_id, "Thank You For Using This Bot!")
def main_function(bot, update):
global states, id_global_1
chat_id = update.message.chat_id
if chat_id in states:
step = states[chat_id]['step']
if step == START:
g_list = get_genre_list()
keyboard = []
for genre in g_list:
title = genre.get('title', '')
keyboard.append([title])
keyboard.append(['Exit'])
markup = ReplyKeyboardMarkup(keyboard, one_time_keyboard=True)
bot.send_message(chat_id, "Genre's:", reply_markup=markup)
states[chat_id]['step'] = CHOICE_GENRE
if step == CHOICE_GENRE:
text = update.message.text
if text == "Exit":
end_function(bot, update)
g_list = get_genre_list()
for genre in g_list:
title_get = genre.get('title', '')
id_get = genre.get('id')
if title_get == text:
id_global_1 = update.message.text = id_get
data = get_dialog_list(update.message.text)
keyboard = []
for dialog in data:
title = dialog.get('title')
keyboard.append([title])
keyboard.append(['Back'])
markup = ReplyKeyboardMarkup(keyboard, one_time_keyboard=True)
bot.send_message(chat_id, "Dialog's:", reply_markup=markup)
if len(keyboard) > 1:
states[chat_id]['step'] = CHOICE_DIALOG
else:
markup = ReplyKeyboardMarkup(keyboard, one_time_keyboard=True)
bot.send_message(chat_id, "Dialog List is Empty.", reply_markup=markup)
states[chat_id]['step'] = CHOICE_DIALOG
if step == CHOICE_DIALOG:
if update.message.text == 'Back':
start(bot, update)
data = get_dialog_list(id_global_1)
for dialog in data:
title_get = dialog.get('title')
id_get = dialog.get('id')
if title_get == update.message.text:
update.message.text = id_get
data = get_dialog_text_list(update.message.text)[0]
dialog_text = data.get('Dialog_Text')
list_show = []
for dialog in dialog_text:
dialog_id = dialog[0]
dialog_from = dialog[1]
dialog_text = dialog[3]
dialog_type = dialog[2]
dialog_image = dialog[4]
data = "Dialog ID: {0}\n" \
"Dialog From: {1}\nDialog Text: {2}" \
"\nType: {3}\nImage:{4}".format(dialog_id, dialog_from, dialog_text, dialog_type, dialog_image)
list_show.append(data)
bot.send_message(update.message.chat_id, data)
def main():
updater.dispatcher.add_handler(CommandHandler('start', start))
updater.dispatcher.add_handler(CommandHandler('help', help))
updater.dispatcher.add_handler(MessageHandler([Filters.text], main_function))
updater.start_polling()
if __name__ == '__main__':
main()
|
992,951 | d606c712cfcf14cb0e47630638c61da124ec9759 | """
Author: Arsene Bwasisi
Description: This program will transform a given grid of numbers
to where the rows become the column and vice versa.
"""
def get_col(grid, n):
""" Return list of values from column n in grid."""
if grid == []:
return grid
return [grid[0][n]] + get_col(grid[1:],n)
def transpose(grid, n=0):
"""Returns a new grid that has been transposed."""
if n == len(grid):
return []
col = get_col(grid, n)
return [col] + transpose(grid, n+1)
def main():
x = [ [ 'aa', 'bb', 'cc' ],
[ 'ee', 'ff', 'gg' ],
[ 'kk', 'll', 'mm' ] ]
t = transpose(x)
print(t)
if __name__ == "__main__":
main() |
992,952 | f71a57f2365187ebc3e26898bd06b137a31dd346 | date_data=[[[('企业', 1283), ('防控', 1066), ('组织', 1038), ('社会', 846), ('工作', 832), ('协会', 738), ('服务', 705), ('复工', 666), ('复产', 466), ('会员', 413), ('行业', 404), ('积极', 399), ('捐赠', 391), ('开展', 361), ('提供', 352), ('物资', 334), ('慈善', 314), ('防疫', 311), ('肺炎', 300), ('行业协会', 295), ('中国', 292), ('商会', 255), ('抗疫', 251), ('通过', 250), ('做好', 240), ('及时', 237), ('发展', 233), ('新冠', 223), ('社区', 221), ('心理', 219), ('专业', 210), ('进行', 205), ('单位', 203), ('支持', 201), ('相关', 199), ('宣传', 199), ('助力', 196), ('发挥', 193), ('活动', 191), ('政策', 190), ('帮助', 189), ('抗击', 187), ('参与', 187), ('问题', 185), ('会员单位', 174), ('万元', 174), ('一线', 171), ('联合', 170), ('保障', 169)], '2020/3/12', '2020/7/10'], [[('企业', 1532), ('防控', 1219), ('组织', 1087), ('工作', 933), ('社会', 893), ('协会', 818), ('复工', 813), ('服务', 777), ('复产', 552), ('积极', 484), ('行业', 471), ('会员', 452), ('捐赠', 417), ('提供', 388), ('开展', 384), ('物资', 365), ('防疫', 354), ('肺炎', 336), ('慈善', 330), ('行业协会', 326), ('商会', 297), ('中国', 283), ('及时', 281), ('抗疫', 280), ('通过', 278), ('做好', 276), ('发展', 247), ('支持', 245), ('新冠', 243), ('单位', 235), ('心理', 233), ('政策', 231), ('社区', 227), ('相关', 223), ('抗击', 222), ('专业', 222), ('帮助', 220), ('进行', 218), ('宣传', 217), ('发挥', 210), ('助力', 208), ('问题', 206), ('会员单位', 204), ('参与', 198), ('一线', 196), ('期间', 195), ('活动', 193), ('保障', 192), ('信息', 186)], '2020/3/11', '2020/6/15'], [[('企业', 1654), ('防控', 1211), ('组织', 1077), ('工作', 940), ('社会', 936), ('复工', 892), ('协会', 881), ('服务', 804), ('复产', 598), ('积极', 507), ('会员', 506), ('行业', 489), ('提供', 396), ('防疫', 383), ('开展', 377), ('捐赠', 362), ('物资', 353), ('商会', 335), ('行业协会', 333), ('肺炎', 306), ('通过', 280), ('及时', 275), ('做好', 271), ('中国', 265), ('发展', 264), ('抗疫', 264), ('支持', 254), ('慈善', 245), ('政策', 245), ('单位', 238), ('社区', 230), ('助力', 228), ('抗击', 228), ('帮助', 226), ('心理', 224), ('宣传', 223), ('新冠', 221), ('专业', 220), ('进行', 217), ('发挥', 216), ('会员单位', 213), ('相关', 212), ('万元', 212), ('一线', 207), ('保障', 204), ('问题', 201), ('口罩', 201), ('引导', 201), ('参与', 200)], '2020/3/9', '2020/5/9'], [[('企业', 1585), ('防控', 1340), ('组织', 1104), ('工作', 1018), ('社会', 997), ('复工', 880), ('协会', 841), ('服务', 774), ('复产', 572), ('积极', 511), ('会员', 502), ('行业', 462), ('防疫', 410), ('提供', 392), ('捐赠', 385), ('物资', 376), ('开展', 371), ('商会', 331), ('肺炎', 328), ('行业协会', 326), ('社区', 295), ('及时', 292), ('做好', 278), ('通过', 261), ('支持', 255), ('万元', 251), ('发展', 249), ('抗疫', 244), ('抗击', 242), ('一线', 238), ('宣传', 232), ('心理', 232), ('助力', 228), ('单位', 228), ('参与', 228), ('新冠', 228), ('政策', 226), ('中国', 224), ('发挥', 222), ('口罩', 222), ('帮助', 219), ('引导', 218), ('会员单位', 215), ('进行', 208), ('保障', 205), ('相关', 202), ('活动', 196), ('指导', 192), ('联合', 189)], '2020/3/9', '2020/3/31'], [[('企业', 1674), ('防控', 1445), ('组织', 1183), ('工作', 1078), ('社会', 1067), ('复工', 887), ('协会', 875), ('服务', 810), ('复产', 581), ('积极', 517), ('会员', 502), ('行业', 480), ('防疫', 426), ('捐赠', 410), ('物资', 402), ('提供', 397), ('开展', 382), ('肺炎', 364), ('行业协会', 344), ('商会', 331), ('社区', 314), ('及时', 292), ('做好', 287), ('通过', 275), ('支持', 269), ('万元', 259), ('一线', 256), ('新冠', 255), ('心理', 255), ('抗疫', 254), ('抗击', 252), ('参与', 250), ('宣传', 243), ('发展', 242), ('政策', 235), ('口罩', 233), ('引导', 228), ('会员单位', 228), ('帮助', 228), ('中国', 225), ('助力', 222), ('单位', 221), ('发挥', 219), ('相关', 219), ('保障', 216), ('进行', 213), ('关于', 206), ('联合', 204), ('医院', 203)], '2020/3/6', '2020/3/27'], [[('企业', 1831), ('防控', 1487), ('组织', 1105), ('工作', 1074), ('社会', 1001), ('复工', 969), ('协会', 946), ('服务', 798), ('复产', 633), ('积极', 557), ('会员', 549), ('行业', 521), ('捐赠', 468), ('防疫', 462), ('物资', 453), ('提供', 409), ('开展', 381), ('肺炎', 366), ('商会', 356), ('行业协会', 334), ('社区', 321), ('做好', 309), ('通过', 302), ('万元', 289), ('及时', 279), ('支持', 279), ('口罩', 276), ('发展', 275), ('抗疫', 272), ('一线', 270), ('会员单位', 269), ('抗击', 265), ('助力', 256), ('参与', 251), ('新冠', 249), ('单位', 246), ('相关', 245), ('宣传', 243), ('中国', 238), ('政策', 233), ('帮助', 232), ('发挥', 231), ('心理', 230), ('保障', 226), ('引导', 226), ('慈善', 226), ('关于', 218), ('活动', 210), ('捐款', 210)], '2020/3/5', '2020/3/25'], [[('企业', 1804), ('防控', 1506), ('组织', 1192), ('社会', 1083), ('工作', 1056), ('复工', 1010), ('协会', 949), ('服务', 779), ('复产', 663), ('积极', 581), ('行业', 549), ('会员', 518), ('物资', 496), ('捐赠', 490), ('防疫', 481), ('提供', 420), ('开展', 399), ('行业协会', 378), ('肺炎', 367), ('商会', 364), ('社区', 319), ('通过', 306), ('做好', 302), ('万元', 297), ('支持', 292), ('口罩', 290), ('会员单位', 283), ('发展', 278), ('一线', 277), ('抗疫', 273), ('及时', 268), ('中国', 264), ('参与', 263), ('抗击', 262), ('助力', 259), ('单位', 250), ('心理', 249), ('相关', 245), ('新冠', 243), ('发挥', 241), ('宣传', 240), ('引导', 233), ('政策', 232), ('慈善', 232), ('帮助', 230), ('保障', 224), ('关于', 221), ('联合', 218), ('活动', 214)], '2020/3/4', '2020/3/20'], [[('企业', 1791), ('防控', 1473), ('组织', 1158), ('社会', 1068), ('工作', 1055), ('复工', 1032), ('协会', 917), ('服务', 750), ('复产', 649), ('积极', 574), ('行业', 536), ('物资', 506), ('防疫', 504), ('会员', 490), ('捐赠', 478), ('提供', 410), ('开展', 387), ('行业协会', 370), ('商会', 360), ('肺炎', 358), ('做好', 319), ('通过', 313), ('社区', 313), ('支持', 295), ('万元', 293), ('会员单位', 290), ('口罩', 283), ('抗疫', 279), ('发展', 276), ('一线', 268), ('及时', 254), ('参与', 253), ('抗击', 252), ('助力', 251), ('心理', 251), ('相关', 250), ('宣传', 244), ('中国', 244), ('政策', 242), ('新冠', 241), ('单位', 241), ('发挥', 238), ('引导', 230), ('进行', 222), ('联合', 220), ('志愿者', 218), ('帮助', 217), ('关于', 215), ('生产', 213)], '2020/3/3', '2020/3/19'], [[('企业', 1850), ('防控', 1566), ('组织', 1167), ('复工', 1098), ('社会', 1077), ('工作', 1066), ('协会', 976), ('服务', 798), ('复产', 695), ('积极', 590), ('行业', 549), ('物资', 520), ('防疫', 511), ('会员', 510), ('捐赠', 506), ('提供', 427), ('开展', 408), ('肺炎', 379), ('行业协会', 368), ('商会', 363), ('做好', 350), ('通过', 330), ('社区', 320), ('会员单位', 315), ('万元', 305), ('口罩', 297), ('支持', 294), ('抗疫', 289), ('一线', 284), ('发展', 269), ('及时', 269), ('相关', 264), ('宣传', 260), ('抗击', 260), ('助力', 255), ('中国', 252), ('引导', 251), ('心理', 251), ('新冠', 250), ('发挥', 249), ('单位', 242), ('参与', 239), ('保障', 237), ('慈善', 235), ('政策', 234), ('进行', 233), ('生产', 231), ('帮助', 227), ('联合', 221)], '2020/3/2', '2020/3/18'], [[('企业', 1795), ('防控', 1536), ('组织', 1111), ('复工', 1060), ('工作', 1032), ('社会', 1017), ('协会', 964), ('服务', 725), ('复产', 676), ('积极', 571), ('行业', 558), ('物资', 517), ('捐赠', 507), ('会员', 493), ('防疫', 481), ('提供', 390), ('开展', 389), ('肺炎', 377), ('做好', 356), ('通过', 325), ('行业协会', 319), ('商会', 313), ('会员单位', 308), ('口罩', 297), ('社区', 291), ('支持', 289), ('一线', 276), ('抗疫', 274), ('中国', 274), ('发展', 273), ('及时', 270), ('相关', 269), ('万元', 267), ('慈善', 264), ('抗击', 263), ('新冠', 248), ('引导', 246), ('助力', 244), ('宣传', 243), ('政策', 238), ('发挥', 238), ('保障', 235), ('单位', 233), ('参与', 227), ('生产', 225), ('进行', 224), ('医院', 224), ('人员', 222), ('关于', 221)], '2020/2/29', '2020/3/16'], [[('企业', 1850), ('防控', 1575), ('组织', 1119), ('复工', 1110), ('工作', 1036), ('社会', 1018), ('协会', 1003), ('服务', 730), ('复产', 723), ('积极', 580), ('物资', 568), ('行业', 554), ('捐赠', 551), ('会员', 517), ('防疫', 500), ('提供', 417), ('肺炎', 394), ('开展', 390), ('做好', 366), ('行业协会', 348), ('商会', 337), ('通过', 334), ('会员单位', 325), ('口罩', 301), ('支持', 292), ('一线', 286), ('社区', 286), ('万元', 281), ('发展', 280), ('及时', 279), ('相关', 277), ('抗击', 274), ('抗疫', 268), ('中国', 255), ('慈善', 255), ('宣传', 254), ('新冠', 253), ('引导', 251), ('助力', 249), ('单位', 246), ('政策', 238), ('保障', 236), ('发挥', 235), ('参与', 228), ('关于', 226), ('期间', 224), ('指导', 223), ('要求', 222), ('生产', 222)], '2020/2/28', '2020/3/12'], [[('企业', 1722), ('防控', 1559), ('组织', 1182), ('复工', 1079), ('社会', 1057), ('工作', 1023), ('协会', 984), ('服务', 739), ('复产', 716), ('物资', 597), ('捐赠', 581), ('会员', 580), ('积极', 548), ('行业', 542), ('防疫', 498), ('提供', 426), ('开展', 401), ('肺炎', 384), ('做好', 356), ('行业协会', 334), ('商会', 331), ('社区', 326), ('通过', 325), ('口罩', 314), ('万元', 301), ('会员单位', 300), ('一线', 292), ('发展', 281), ('支持', 279), ('相关', 262), ('抗击', 256), ('宣传', 256), ('慈善', 253), ('及时', 252), ('抗疫', 246), ('引导', 242), ('参与', 242), ('新冠', 241), ('助力', 239), ('公益', 239), ('发挥', 236), ('保障', 233), ('单位', 230), ('中国', 230), ('政策', 227), ('关于', 224), ('医院', 223), ('要求', 222), ('指导', 220)], '2020/2/27', '2020/3/11'], [[('企业', 1706), ('防控', 1567), ('组织', 1128), ('复工', 1089), ('工作', 1026), ('社会', 991), ('协会', 970), ('复产', 733), ('服务', 719), ('物资', 595), ('捐赠', 576), ('会员', 550), ('积极', 544), ('行业', 529), ('防疫', 486), ('提供', 427), ('开展', 403), ('肺炎', 390), ('做好', 374), ('行业协会', 337), ('社区', 330), ('通过', 329), ('商会', 327), ('会员单位', 319), ('万元', 315), ('口罩', 312), ('一线', 296), ('发展', 274), ('相关', 271), ('支持', 270), ('慈善', 259), ('及时', 256), ('抗击', 255), ('宣传', 250), ('抗疫', 244), ('新冠', 244), ('参与', 239), ('发挥', 237), ('关于', 236), ('要求', 234), ('助力', 233), ('志愿者', 232), ('保障', 230), ('单位', 229), ('引导', 229), ('政策', 225), ('指导', 222), ('中国', 222), ('人员', 220)], '2020/2/27', '2020/3/9'], [[('企业', 1900), ('防控', 1603), ('复工', 1199), ('工作', 1038), ('协会', 995), ('组织', 994), ('社会', 878), ('复产', 818), ('服务', 740), ('物资', 613), ('会员', 608), ('积极', 573), ('捐赠', 565), ('行业', 562), ('防疫', 508), ('提供', 441), ('做好', 405), ('肺炎', 386), ('开展', 382), ('行业协会', 368), ('通过', 350), ('商会', 346), ('会员单位', 332), ('口罩', 316), ('万元', 305), ('一线', 297), ('发展', 291), ('相关', 289), ('社区', 275), ('支持', 274), ('及时', 264), ('抗疫', 256), ('抗击', 255), ('保障', 252), ('助力', 249), ('关于', 247), ('政策', 245), ('新冠', 244), ('单位', 244), ('宣传', 242), ('发挥', 242), ('生产', 239), ('要求', 238), ('指导', 230), ('帮助', 229), ('引导', 228), ('慈善', 228), ('中国', 224), ('情况', 221)], '2020/2/26', '2020/3/9'], [[('企业', 1842), ('防控', 1624), ('复工', 1190), ('工作', 1077), ('协会', 989), ('组织', 957), ('社会', 844), ('复产', 801), ('服务', 776), ('会员', 619), ('物资', 616), ('捐赠', 574), ('积极', 569), ('行业', 568), ('防疫', 559), ('提供', 451), ('做好', 417), ('开展', 411), ('肺炎', 378), ('通过', 360), ('行业协会', 357), ('商会', 334), ('社区', 332), ('口罩', 331), ('会员单位', 322), ('万元', 320), ('一线', 301), ('及时', 288), ('相关', 285), ('发展', 285), ('抗疫', 269), ('支持', 267), ('抗击', 263), ('宣传', 260), ('助力', 257), ('单位', 249), ('保障', 248), ('发挥', 246), ('关于', 244), ('中国', 243), ('指导', 241), ('进行', 241), ('人员', 240), ('慈善', 239), ('生产', 238), ('要求', 236), ('新冠', 233), ('志愿者', 233), ('政策', 232)], '2020/2/26', '2020/3/6'], [[('企业', 1754), ('防控', 1581), ('复工', 1165), ('工作', 1017), ('协会', 941), ('组织', 932), ('社会', 793), ('复产', 785), ('服务', 771), ('物资', 603), ('会员', 600), ('防疫', 551), ('捐赠', 532), ('积极', 531), ('行业', 521), ('提供', 445), ('开展', 411), ('做好', 403), ('肺炎', 369), ('行业协会', 368), ('社区', 349), ('商会', 342), ('通过', 334), ('口罩', 312), ('万元', 304), ('一线', 299), ('会员单位', 295), ('及时', 276), ('抗击', 272), ('宣传', 269), ('支持', 265), ('相关', 263), ('志愿者', 260), ('发展', 257), ('生产', 253), ('保障', 252), ('抗疫', 250), ('进行', 243), ('关于', 238), ('要求', 236), ('单位', 235), ('新冠', 234), ('指导', 233), ('人员', 232), ('医院', 229), ('帮助', 227), ('中国', 224), ('心理', 223), ('发挥', 222)], '2020/2/25', '2020/3/5'], [[('企业', 1767), ('防控', 1616), ('复工', 1134), ('工作', 1047), ('组织', 930), ('协会', 889), ('社会', 796), ('复产', 766), ('服务', 754), ('会员', 634), ('物资', 590), ('捐赠', 571), ('防疫', 535), ('行业', 521), ('积极', 516), ('做好', 425), ('提供', 423), ('开展', 384), ('肺炎', 367), ('商会', 367), ('行业协会', 357), ('社区', 352), ('通过', 329), ('万元', 322), ('口罩', 311), ('一线', 299), ('会员单位', 282), ('相关', 272), ('抗击', 267), ('宣传', 266), ('发展', 265), ('支持', 263), ('及时', 261), ('关于', 260), ('生产', 252), ('志愿者', 251), ('保障', 250), ('进行', 241), ('抗疫', 239), ('指导', 237), ('人员', 234), ('帮助', 232), ('要求', 230), ('引导', 227), ('医院', 224), ('新冠', 223), ('单位', 222), ('助力', 220), ('发挥', 219)], '2020/2/24', '2020/3/4'], [[('企业', 1728), ('防控', 1694), ('工作', 1099), ('复工', 1099), ('组织', 1006), ('协会', 913), ('社会', 838), ('服务', 826), ('复产', 738), ('会员', 641), ('物资', 593), ('防疫', 585), ('捐赠', 583), ('积极', 526), ('行业', 480), ('提供', 457), ('做好', 434), ('开展', 429), ('社区', 419), ('肺炎', 403), ('商会', 386), ('行业协会', 375), ('万元', 364), ('通过', 343), ('一线', 337), ('口罩', 336), ('抗击', 297), ('志愿者', 289), ('宣传', 284), ('相关', 275), ('及时', 274), ('医院', 273), ('会员单位', 271), ('生产', 262), ('保障', 261), ('人员', 258), ('发展', 256), ('关于', 254), ('帮助', 252), ('支持', 249), ('进行', 249), ('单位', 246), ('公益', 244), ('抗疫', 243), ('要求', 239), ('助力', 235), ('信息', 235), ('参与', 231), ('新冠', 229)], '2020/2/22', '2020/3/3'], [[('防控', 1698), ('企业', 1664), ('工作', 1120), ('组织', 1037), ('复工', 1033), ('协会', 892), ('社会', 888), ('服务', 775), ('复产', 687), ('会员', 628), ('物资', 587), ('捐赠', 582), ('防疫', 576), ('积极', 518), ('行业', 461), ('社区', 452), ('提供', 437), ('商会', 437), ('做好', 422), ('开展', 421), ('肺炎', 402), ('行业协会', 391), ('万元', 365), ('口罩', 339), ('一线', 332), ('通过', 323), ('志愿者', 311), ('抗击', 295), ('宣传', 272), ('医院', 269), ('及时', 263), ('生产', 258), ('保障', 257), ('关于', 256), ('公益', 256), ('单位', 253), ('发展', 249), ('相关', 248), ('人员', 247), ('会员单位', 246), ('参与', 246), ('支持', 237), ('帮助', 234), ('进行', 233), ('抗疫', 233), ('要求', 229), ('新冠', 229), ('慈善', 227), ('心理', 226)], '2020/2/21', '2020/3/2'], [[('防控', 1788), ('企业', 1717), ('工作', 1172), ('组织', 1120), ('复工', 1070), ('社会', 936), ('协会', 913), ('服务', 844), ('复产', 688), ('会员', 654), ('捐赠', 616), ('防疫', 600), ('物资', 587), ('积极', 529), ('社区', 528), ('商会', 469), ('提供', 458), ('行业', 449), ('开展', 431), ('万元', 427), ('做好', 427), ('行业协会', 412), ('肺炎', 410), ('志愿者', 367), ('一线', 365), ('口罩', 358), ('通过', 334), ('抗击', 289), ('心理', 284), ('生产', 283), ('参与', 282), ('人员', 280), ('单位', 279), ('宣传', 278), ('及时', 268), ('保障', 260), ('关于', 259), ('医院', 259), ('会员单位', 250), ('公益', 247), ('进行', 243), ('支持', 242), ('相关', 242), ('发展', 242), ('抗疫', 241), ('帮助', 240), ('新冠', 235), ('要求', 233), ('有序', 231)], '2020/2/21', '2020/2/29'], [[('防控', 1781), ('企业', 1649), ('工作', 1156), ('组织', 1130), ('复工', 1018), ('社会', 959), ('协会', 888), ('服务', 820), ('复产', 639), ('会员', 634), ('捐赠', 590), ('防疫', 579), ('物资', 545), ('积极', 536), ('社区', 517), ('行业', 444), ('商会', 440), ('提供', 420), ('做好', 418), ('肺炎', 411), ('开展', 409), ('万元', 404), ('行业协会', 385), ('口罩', 366), ('一线', 362), ('志愿者', 347), ('通过', 337), ('人员', 283), ('参与', 282), ('抗击', 281), ('生产', 279), ('单位', 273), ('及时', 269), ('宣传', 268), ('保障', 260), ('心理', 260), ('关于', 259), ('医院', 257), ('进行', 246), ('抗疫', 243), ('新冠', 239), ('要求', 239), ('帮助', 238), ('有序', 238), ('公益', 236), ('支持', 236), ('会员单位', 235), ('相关', 232), ('发展', 227)], '2020/2/20', '2020/2/28'], [[('防控', 1805), ('企业', 1444), ('工作', 1175), ('组织', 1103), ('社会', 952), ('复工', 896), ('协会', 832), ('服务', 763), ('捐赠', 614), ('防疫', 569), ('复产', 553), ('会员', 543), ('物资', 532), ('积极', 515), ('社区', 511), ('万元', 439), ('商会', 418), ('肺炎', 417), ('做好', 405), ('开展', 401), ('行业', 392), ('提供', 380), ('一线', 370), ('口罩', 363), ('行业协会', 355), ('志愿者', 355), ('通过', 325), ('人员', 298), ('参与', 290), ('抗击', 284), ('医院', 283), ('单位', 281), ('心理', 269), ('宣传', 265), ('及时', 264), ('关于', 253), ('生产', 250), ('新冠', 244), ('抗疫', 242), ('要求', 240), ('进行', 239), ('保障', 238), ('会员单位', 235), ('有序', 231), ('相关', 227), ('慈善', 226), ('捐款', 223), ('公益', 217), ('发挥', 213)], '2020/2/20', '2020/2/27'], [[('防控', 1887), ('企业', 1359), ('工作', 1231), ('组织', 1162), ('社会', 1005), ('协会', 828), ('复工', 814), ('服务', 799), ('捐赠', 604), ('防疫', 567), ('社区', 541), ('积极', 530), ('会员', 529), ('物资', 528), ('复产', 493), ('肺炎', 418), ('做好', 407), ('万元', 406), ('开展', 404), ('行业', 394), ('商会', 384), ('一线', 378), ('提供', 369), ('志愿者', 369), ('口罩', 357), ('行业协会', 348), ('通过', 332), ('人员', 300), ('参与', 295), ('医院', 293), ('宣传', 287), ('抗击', 281), ('心理', 275), ('单位', 270), ('及时', 270), ('公益', 260), ('关于', 246), ('慈善', 244), ('志愿', 243), ('抗疫', 242), ('保障', 240), ('新冠', 238), ('生产', 238), ('要求', 235), ('进行', 234), ('服务中心', 234), ('会员单位', 224), ('有序', 222), ('支持', 221)], '2020/2/19', '2020/2/27'], [[('防控', 1815), ('企业', 1194), ('组织', 1194), ('工作', 1183), ('社会', 1051), ('协会', 837), ('服务', 791), ('复工', 728), ('捐赠', 617), ('社区', 550), ('物资', 536), ('防疫', 533), ('积极', 514), ('会员', 495), ('万元', 432), ('复产', 424), ('肺炎', 416), ('开展', 416), ('志愿者', 403), ('做好', 377), ('一线', 368), ('商会', 366), ('口罩', 362), ('行业', 354), ('提供', 346), ('通过', 319), ('行业协会', 318), ('人员', 310), ('参与', 299), ('医院', 298), ('宣传', 296), ('抗击', 285), ('单位', 274), ('公益', 274), ('志愿', 270), ('慈善', 268), ('心理', 260), ('及时', 253), ('服务中心', 243), ('要求', 239), ('进行', 239), ('关于', 237), ('抗疫', 234), ('捐款', 233), ('有序', 231), ('新冠', 229), ('活动', 224), ('会员单位', 219), ('保障', 217)], '2020/2/19', '2020/2/26'], [[('防控', 1728), ('组织', 1169), ('工作', 1143), ('企业', 1117), ('社会', 1014), ('协会', 805), ('服务', 705), ('复工', 680), ('捐赠', 606), ('防疫', 581), ('物资', 558), ('积极', 494), ('社区', 489), ('会员', 464), ('万元', 418), ('肺炎', 399), ('志愿者', 392), ('开展', 383), ('复产', 382), ('一线', 371), ('商会', 366), ('做好', 356), ('口罩', 352), ('提供', 321), ('行业', 310), ('行业协会', 305), ('通过', 303), ('医院', 300), ('参与', 298), ('公益', 290), ('抗击', 284), ('人员', 283), ('单位', 275), ('宣传', 271), ('慈善', 262), ('心理', 249), ('志愿', 244), ('服务中心', 238), ('捐款', 237), ('要求', 230), ('会员单位', 226), ('进行', 226), ('有序', 223), ('及时', 220), ('关于', 217), ('新冠', 214), ('保障', 214), ('活动', 212), ('新型', 211)], '2020/2/18', '2020/2/26'], [[('防控', 1782), ('组织', 1209), ('工作', 1151), ('社会', 1103), ('企业', 1037), ('协会', 793), ('服务', 668), ('捐赠', 630), ('复工', 594), ('防疫', 592), ('物资', 538), ('社区', 522), ('积极', 506), ('会员', 454), ('万元', 438), ('肺炎', 399), ('志愿者', 395), ('一线', 379), ('开展', 373), ('口罩', 352), ('做好', 352), ('商会', 340), ('通过', 313), ('复产', 310), ('参与', 308), ('提供', 303), ('行业', 294), ('公益', 290), ('医院', 290), ('单位', 288), ('人员', 287), ('宣传', 280), ('行业协会', 277), ('抗击', 276), ('慈善', 263), ('捐款', 256), ('及时', 239), ('服务中心', 237), ('志愿', 235), ('关于', 229), ('会员单位', 228), ('要求', 225), ('心理', 225), ('新型', 223), ('发挥', 216), ('进行', 216), ('有序', 215), ('爱心', 214), ('抗疫', 211)], '2020/2/18', '2020/2/25'], [[('防控', 1836), ('组织', 1255), ('工作', 1171), ('社会', 1139), ('企业', 979), ('协会', 865), ('捐赠', 685), ('服务', 676), ('防疫', 640), ('社区', 570), ('物资', 559), ('复工', 534), ('积极', 527), ('万元', 482), ('志愿者', 448), ('一线', 426), ('会员', 425), ('口罩', 420), ('肺炎', 411), ('开展', 409), ('商会', 353), ('做好', 337), ('通过', 320), ('提供', 315), ('医院', 315), ('参与', 310), ('人员', 309), ('捐款', 307), ('单位', 305), ('抗击', 302), ('宣传', 302), ('公益', 298), ('慈善', 280), ('复产', 270), ('行业协会', 261), ('行业', 254), ('志愿', 253), ('爱心', 251), ('新型', 248), ('服务中心', 247), ('心理', 240), ('会员单位', 237), ('进行', 236), ('活动', 234), ('要求', 229), ('冠状病毒', 225), ('及时', 223), ('抗疫', 217), ('关于', 212)], '2020/2/18', '2020/2/24'], [[('防控', 1782), ('组织', 1206), ('工作', 1124), ('社会', 1087), ('企业', 876), ('协会', 813), ('捐赠', 717), ('服务', 653), ('防疫', 614), ('物资', 591), ('社区', 575), ('万元', 518), ('积极', 501), ('志愿者', 459), ('复工', 458), ('一线', 431), ('口罩', 426), ('会员', 413), ('肺炎', 395), ('开展', 382), ('商会', 333), ('做好', 315), ('捐款', 313), ('参与', 308), ('通过', 308), ('人员', 305), ('提供', 300), ('抗击', 296), ('公益', 296), ('宣传', 294), ('医院', 294), ('慈善', 290), ('单位', 274), ('志愿', 257), ('爱心', 255), ('行业', 250), ('进行', 242), ('服务中心', 241), ('心理', 237), ('新型', 234), ('复产', 232), ('会员单位', 224), ('行业协会', 222), ('抗疫', 222), ('冠状病毒', 217), ('要求', 216), ('活动', 213), ('防护', 209), ('及时', 205)], '2020/2/17', '2020/2/21'], [[('防控', 1650), ('组织', 1181), ('社会', 1062), ('工作', 1052), ('企业', 815), ('协会', 741), ('捐赠', 704), ('服务', 639), ('防疫', 602), ('物资', 578), ('社区', 544), ('万元', 516), ('积极', 490), ('志愿者', 449), ('一线', 428), ('口罩', 413), ('复工', 404), ('会员', 401), ('肺炎', 375), ('开展', 356), ('捐款', 306), ('抗击', 305), ('人员', 301), ('医院', 300), ('参与', 297), ('通过', 296), ('做好', 286), ('商会', 284), ('宣传', 284), ('提供', 282), ('慈善', 277), ('公益', 265), ('单位', 259), ('志愿', 252), ('进行', 243), ('心理', 242), ('爱心', 234), ('行业', 232), ('会员单位', 231), ('新型', 222), ('活动', 214), ('抗疫', 213), ('服务中心', 213), ('冠状病毒', 207), ('防护', 204), ('发挥', 204), ('要求', 203), ('复产', 196), ('武汉', 195)], '2020/2/17', '2020/2/21'], [[('防控', 1660), ('组织', 1181), ('社会', 1083), ('工作', 1055), ('协会', 782), ('企业', 721), ('捐赠', 720), ('服务', 605), ('防疫', 587), ('物资', 557), ('社区', 519), ('积极', 481), ('万元', 481), ('志愿者', 441), ('一线', 404), ('口罩', 395), ('肺炎', 372), ('会员', 369), ('开展', 352), ('复工', 335), ('慈善', 315), ('抗击', 312), ('医院', 298), ('参与', 293), ('宣传', 293), ('捐款', 291), ('通过', 291), ('做好', 288), ('公益', 283), ('人员', 275), ('提供', 266), ('商会', 260), ('志愿', 245), ('爱心', 244), ('进行', 236), ('单位', 232), ('新型', 226), ('会员单位', 217), ('活动', 217), ('行业', 212), ('冠状病毒', 210), ('要求', 206), ('武汉', 206), ('心理', 202), ('防护', 202), ('抗疫', 199), ('发挥', 198), ('服务中心', 195), ('关于', 187)], '2020/2/16', '2020/2/20'], [[('防控', 1685), ('组织', 1139), ('工作', 1079), ('社会', 1052), ('协会', 772), ('捐赠', 724), ('企业', 710), ('服务', 648), ('防疫', 590), ('物资', 573), ('社区', 555), ('万元', 507), ('积极', 481), ('志愿者', 454), ('一线', 419), ('口罩', 401), ('会员', 377), ('开展', 365), ('肺炎', 364), ('抗击', 320), ('复工', 320), ('慈善', 314), ('参与', 305), ('医院', 302), ('宣传', 300), ('做好', 299), ('捐款', 296), ('通过', 287), ('公益', 287), ('提供', 282), ('人员', 276), ('商会', 275), ('爱心', 260), ('志愿', 258), ('进行', 242), ('新型', 229), ('单位', 228), ('活动', 220), ('冠状病毒', 218), ('武汉', 218), ('行业', 216), ('会员单位', 213), ('发挥', 207), ('服务中心', 204), ('心理', 203), ('防护', 200), ('要求', 200), ('机构', 198), ('相关', 188)], '2020/2/16', '2020/2/20'], [[('防控', 1798), ('组织', 1233), ('社会', 1150), ('工作', 1129), ('协会', 844), ('捐赠', 788), ('企业', 715), ('服务', 681), ('物资', 598), ('万元', 581), ('防疫', 578), ('社区', 577), ('积极', 494), ('志愿者', 472), ('口罩', 435), ('一线', 428), ('会员', 404), ('肺炎', 380), ('开展', 377), ('抗击', 352), ('慈善', 325), ('参与', 324), ('做好', 319), ('复工', 318), ('宣传', 313), ('捐款', 309), ('医院', 304), ('商会', 300), ('通过', 295), ('提供', 294), ('公益', 290), ('人员', 286), ('爱心', 273), ('志愿', 263), ('新型', 262), ('进行', 260), ('冠状病毒', 248), ('行业', 234), ('单位', 229), ('心理', 227), ('服务中心', 222), ('会员单位', 220), ('活动', 219), ('机构', 210), ('发挥', 205), ('武汉', 204), ('防护', 202), ('抗疫', 201), ('要求', 199)], '2020/2/15', '2020/2/20'], [[('防控', 1754), ('组织', 1214), ('社会', 1143), ('工作', 1069), ('协会', 834), ('捐赠', 815), ('企业', 716), ('服务', 666), ('万元', 637), ('物资', 599), ('防疫', 568), ('社区', 541), ('积极', 488), ('志愿者', 448), ('口罩', 432), ('一线', 430), ('会员', 404), ('开展', 380), ('肺炎', 377), ('抗击', 364), ('商会', 334), ('捐款', 331), ('参与', 321), ('复工', 321), ('慈善', 313), ('爱心', 305), ('做好', 303), ('宣传', 303), ('提供', 303), ('医院', 300), ('人员', 298), ('通过', 291), ('进行', 259), ('新型', 257), ('冠状病毒', 251), ('公益', 251), ('单位', 239), ('志愿', 239), ('会员单位', 231), ('心理', 230), ('行业', 228), ('抗疫', 215), ('活动', 212), ('价值', 205), ('发挥', 200), ('武汉', 198), ('服务中心', 198), ('机构', 197), ('防护', 196)], '2020/2/15', '2020/2/19'], [[('防控', 1793), ('组织', 1223), ('社会', 1148), ('工作', 1117), ('协会', 833), ('捐赠', 827), ('企业', 708), ('服务', 665), ('万元', 621), ('物资', 601), ('防疫', 581), ('社区', 569), ('积极', 486), ('一线', 457), ('志愿者', 428), ('口罩', 424), ('会员', 393), ('肺炎', 392), ('抗击', 387), ('开展', 379), ('商会', 351), ('参与', 329), ('爱心', 319), ('提供', 315), ('做好', 313), ('捐款', 308), ('医院', 302), ('慈善', 292), ('人员', 292), ('宣传', 289), ('复工', 288), ('新型', 278), ('冠状病毒', 275), ('通过', 274), ('公益', 266), ('心理', 261), ('进行', 258), ('行业', 256), ('抗疫', 238), ('会员单位', 222), ('单位', 221), ('志愿', 218), ('防护', 211), ('价值', 205), ('发挥', 200), ('活动', 199), ('服务中心', 197), ('相关', 195), ('机构', 192)], '2020/2/14', '2020/2/19'], [[('防控', 1880), ('组织', 1314), ('社会', 1264), ('工作', 1128), ('捐赠', 835), ('协会', 829), ('企业', 737), ('服务', 689), ('万元', 620), ('社区', 591), ('物资', 561), ('积极', 501), ('防疫', 473), ('一线', 451), ('会员', 417), ('肺炎', 416), ('口罩', 416), ('志愿者', 408), ('抗击', 380), ('商会', 378), ('开展', 377), ('做好', 334), ('爱心', 333), ('参与', 331), ('提供', 321), ('宣传', 312), ('捐款', 311), ('人员', 304), ('慈善', 301), ('复工', 295), ('医院', 295), ('新型', 291), ('冠状病毒', 289), ('通过', 272), ('行业', 264), ('抗疫', 244), ('进行', 240), ('公益', 239), ('心理', 232), ('会员单位', 225), ('单位', 215), ('志愿', 214), ('机构', 207), ('活动', 206), ('价值', 202), ('责任', 201), ('行业协会', 198), ('要求', 198), ('防护', 198)], '2020/2/14', '2020/2/18'], [[('防控', 1814), ('组织', 1304), ('社会', 1219), ('工作', 1122), ('协会', 809), ('捐赠', 798), ('服务', 698), ('企业', 655), ('万元', 596), ('社区', 568), ('物资', 549), ('积极', 476), ('防疫', 442), ('一线', 432), ('肺炎', 411), ('口罩', 411), ('会员', 391), ('志愿者', 377), ('开展', 373), ('抗击', 372), ('做好', 340), ('商会', 334), ('参与', 327), ('提供', 324), ('爱心', 318), ('人员', 309), ('捐款', 302), ('慈善', 295), ('宣传', 294), ('冠状病毒', 291), ('新型', 287), ('医院', 281), ('复工', 250), ('通过', 248), ('进行', 247), ('心理', 246), ('行业', 246), ('抗疫', 234), ('公益', 229), ('会员单位', 223), ('防护', 212), ('机构', 210), ('活动', 205), ('要求', 204), ('单位', 203), ('志愿', 202), ('责任', 199), ('价值', 189), ('相关', 186)], '2020/2/14', '2020/2/18'], [[('防控', 1797), ('组织', 1298), ('社会', 1216), ('工作', 1113), ('协会', 779), ('捐赠', 723), ('服务', 704), ('企业', 617), ('万元', 568), ('社区', 568), ('物资', 528), ('积极', 448), ('防疫', 421), ('一线', 413), ('肺炎', 390), ('会员', 374), ('开展', 370), ('抗击', 353), ('口罩', 351), ('志愿者', 343), ('做好', 343), ('参与', 337), ('提供', 316), ('爱心', 287), ('人员', 286), ('商会', 285), ('宣传', 284), ('慈善', 276), ('医院', 270), ('冠状病毒', 264), ('新型', 259), ('行业', 254), ('捐款', 251), ('心理', 245), ('抗疫', 231), ('进行', 231), ('通过', 231), ('复工', 225), ('公益', 224), ('机构', 213), ('会员单位', 209), ('要求', 207), ('志愿', 205), ('责任', 197), ('防护', 193), ('活动', 192), ('累计', 189), ('保障', 185), ('单位', 182)], '2020/2/14', '2020/2/18'], [[('防控', 1851), ('组织', 1292), ('社会', 1217), ('工作', 1122), ('协会', 764), ('捐赠', 687), ('服务', 636), ('企业', 570), ('社区', 526), ('万元', 506), ('物资', 470), ('积极', 458), ('肺炎', 401), ('一线', 387), ('防疫', 382), ('会员', 362), ('做好', 359), ('开展', 350), ('参与', 342), ('口罩', 336), ('抗击', 335), ('志愿者', 298), ('提供', 286), ('人员', 285), ('宣传', 284), ('冠状病毒', 279), ('商会', 275), ('新型', 271), ('爱心', 270), ('慈善', 255), ('心理', 242), ('医院', 237), ('行业', 234), ('捐款', 230), ('进行', 216), ('通过', 212), ('抗疫', 211), ('要求', 206), ('会员单位', 203), ('责任', 200), ('活动', 197), ('复工', 197), ('公益', 196), ('党员', 193), ('志愿', 191), ('防护', 190), ('单位', 188), ('发挥', 188), ('累计', 183)], '2020/2/13', '2020/2/17'], [[('防控', 1979), ('组织', 1344), ('社会', 1276), ('工作', 1223), ('协会', 840), ('捐赠', 735), ('服务', 669), ('企业', 622), ('社区', 603), ('万元', 565), ('物资', 503), ('积极', 478), ('防疫', 438), ('肺炎', 432), ('一线', 420), ('做好', 391), ('会员', 380), ('参与', 375), ('开展', 373), ('口罩', 365), ('抗击', 346), ('志愿者', 340), ('冠状病毒', 317), ('提供', 311), ('人员', 309), ('新型', 309), ('宣传', 300), ('爱心', 293), ('商会', 279), ('慈善', 267), ('通过', 255), ('行业', 248), ('心理', 247), ('捐款', 244), ('医院', 239), ('进行', 228), ('抗疫', 226), ('复工', 222), ('要求', 217), ('公益', 216), ('党员', 211), ('责任', 207), ('防护', 206), ('会员单位', 200), ('单位', 200), ('累计', 196), ('活动', 192), ('感染', 192), ('志愿', 191)], '2020/2/13', '2020/2/17'], [[('防控', 1915), ('组织', 1298), ('社会', 1238), ('工作', 1198), ('协会', 767), ('捐赠', 706), ('服务', 664), ('企业', 607), ('社区', 582), ('万元', 563), ('物资', 514), ('积极', 475), ('防疫', 446), ('肺炎', 429), ('一线', 428), ('做好', 378), ('口罩', 376), ('开展', 375), ('会员', 373), ('参与', 365), ('抗击', 335), ('志愿者', 331), ('冠状病毒', 322), ('提供', 317), ('新型', 314), ('人员', 308), ('宣传', 303), ('爱心', 287), ('商会', 284), ('心理', 268), ('行业', 257), ('通过', 249), ('捐款', 243), ('抗疫', 232), ('医院', 228), ('党员', 225), ('进行', 222), ('防护', 216), ('公益', 214), ('要求', 214), ('会员单位', 207), ('慈善', 205), ('复工', 205), ('志愿', 205), ('责任', 203), ('累计', 201), ('单位', 198), ('社工', 196), ('感染', 192)], '2020/2/13', '2020/2/16'], [[('防控', 1903), ('组织', 1358), ('社会', 1278), ('工作', 1187), ('协会', 782), ('捐赠', 730), ('服务', 651), ('万元', 575), ('企业', 572), ('社区', 564), ('物资', 531), ('积极', 469), ('防疫', 450), ('肺炎', 428), ('一线', 423), ('开展', 376), ('做好', 374), ('参与', 363), ('会员', 362), ('口罩', 353), ('志愿者', 335), ('冠状病毒', 332), ('人员', 331), ('新型', 329), ('抗击', 328), ('心理', 326), ('提供', 324), ('宣传', 295), ('商会', 288), ('爱心', 287), ('行业', 257), ('通过', 252), ('捐款', 248), ('慈善', 240), ('抗疫', 239), ('党员', 234), ('医院', 231), ('单位', 224), ('进行', 223), ('公益', 219), ('要求', 215), ('会员单位', 213), ('防护', 211), ('责任', 201), ('累计', 199), ('感染', 199), ('保障', 199), ('社工', 199), ('志愿', 197)], '2020/2/13', '2020/2/16'], [[('防控', 1782), ('组织', 1239), ('社会', 1169), ('工作', 1129), ('协会', 687), ('捐赠', 617), ('服务', 605), ('企业', 553), ('社区', 519), ('物资', 492), ('万元', 448), ('防疫', 448), ('积极', 441), ('肺炎', 416), ('一线', 394), ('开展', 354), ('做好', 352), ('参与', 336), ('会员', 316), ('冠状病毒', 316), ('心理', 311), ('新型', 309), ('提供', 307), ('口罩', 305), ('人员', 301), ('志愿者', 297), ('抗击', 292), ('宣传', 277), ('爱心', 258), ('行业', 244), ('商会', 244), ('通过', 243), ('党员', 237), ('医院', 222), ('慈善', 217), ('捐款', 216), ('抗疫', 213), ('单位', 212), ('进行', 210), ('要求', 209), ('公益', 201), ('会员单位', 198), ('防护', 198), ('感染', 192), ('行业协会', 188), ('保障', 185), ('机构', 185), ('责任', 184), ('累计', 181)], '2020/2/12', '2020/2/15'], [[('防控', 1869), ('组织', 1234), ('工作', 1215), ('社会', 1181), ('协会', 720), ('捐赠', 635), ('企业', 617), ('服务', 600), ('社区', 528), ('物资', 481), ('万元', 457), ('积极', 456), ('防疫', 451), ('肺炎', 441), ('一线', 387), ('做好', 384), ('参与', 346), ('会员', 342), ('开展', 337), ('冠状病毒', 326), ('新型', 323), ('口罩', 323), ('提供', 310), ('心理', 308), ('抗击', 304), ('人员', 304), ('行业', 290), ('志愿者', 285), ('宣传', 270), ('商会', 251), ('通过', 245), ('医院', 237), ('捐款', 232), ('慈善', 226), ('要求', 226), ('爱心', 225), ('进行', 225), ('党员', 219), ('单位', 210), ('感染', 208), ('防护', 208), ('保障', 204), ('抗疫', 198), ('机构', 197), ('关于', 192), ('公益', 192), ('责任', 191), ('行业协会', 186), ('会员单位', 185)], '2020/2/12', '2020/2/14'], [[('防控', 1852), ('组织', 1312), ('社会', 1226), ('工作', 1175), ('协会', 739), ('捐赠', 664), ('企业', 587), ('服务', 581), ('社区', 527), ('物资', 474), ('万元', 467), ('积极', 455), ('防疫', 444), ('肺炎', 423), ('参与', 386), ('做好', 374), ('一线', 367), ('会员', 359), ('口罩', 349), ('志愿者', 342), ('开展', 336), ('人员', 321), ('心理', 320), ('宣传', 305), ('冠状病毒', 301), ('新型', 300), ('提供', 299), ('抗击', 278), ('行业', 260), ('捐款', 259), ('商会', 248), ('通过', 248), ('党员', 242), ('进行', 235), ('医院', 234), ('慈善', 232), ('要求', 226), ('爱心', 225), ('单位', 205), ('活动', 202), ('保障', 199), ('防护', 198), ('关于', 196), ('及时', 196), ('消毒', 196), ('感染', 195), ('行业协会', 192), ('会员单位', 191), ('引导', 188)], '2020/2/12', '2020/2/14'], [[('防控', 1781), ('组织', 1241), ('社会', 1148), ('工作', 1139), ('协会', 771), ('捐赠', 691), ('服务', 557), ('企业', 529), ('万元', 525), ('社区', 492), ('物资', 477), ('防疫', 438), ('积极', 427), ('肺炎', 415), ('参与', 376), ('口罩', 367), ('一线', 362), ('做好', 347), ('会员', 344), ('志愿者', 337), ('心理', 332), ('开展', 329), ('人员', 311), ('抗击', 292), ('冠状病毒', 292), ('宣传', 292), ('提供', 290), ('新型', 289), ('捐款', 273), ('行业', 259), ('商会', 247), ('通过', 237), ('爱心', 234), ('进行', 231), ('医院', 231), ('党员', 226), ('慈善', 225), ('单位', 220), ('要求', 211), ('消毒', 203), ('防护', 203), ('关于', 192), ('有限公司', 192), ('公益', 192), ('活动', 190), ('感染', 189), ('及时', 186), ('保障', 186), ('行业协会', 181)], '2020/2/12', '2020/2/14'], [[('防控', 1851), ('组织', 1285), ('社会', 1204), ('工作', 1183), ('协会', 782), ('捐赠', 752), ('企业', 614), ('服务', 574), ('万元', 561), ('物资', 535), ('社区', 486), ('积极', 452), ('防疫', 452), ('肺炎', 436), ('参与', 387), ('口罩', 384), ('一线', 369), ('会员', 367), ('做好', 354), ('志愿者', 353), ('开展', 339), ('心理', 326), ('人员', 320), ('冠状病毒', 309), ('新型', 308), ('抗击', 305), ('宣传', 299), ('提供', 294), ('行业', 293), ('商会', 290), ('捐款', 290), ('医院', 270), ('通过', 257), ('爱心', 255), ('慈善', 250), ('党员', 250), ('单位', 227), ('行业协会', 224), ('进行', 220), ('关于', 215), ('要求', 208), ('感染', 207), ('防护', 205), ('公益', 197), ('有限公司', 194), ('及时', 193), ('消毒', 192), ('保障', 192), ('活动', 191)], '2020/2/11', '2020/2/14'], [[('防控', 1830), ('组织', 1196), ('工作', 1171), ('社会', 1124), ('捐赠', 773), ('协会', 736), ('企业', 629), ('万元', 582), ('服务', 557), ('物资', 522), ('社区', 468), ('积极', 467), ('肺炎', 449), ('防疫', 428), ('口罩', 399), ('会员', 373), ('参与', 363), ('做好', 361), ('一线', 351), ('心理', 332), ('新型', 330), ('冠状病毒', 329), ('志愿者', 329), ('开展', 325), ('人员', 325), ('商会', 322), ('抗击', 306), ('宣传', 305), ('捐款', 305), ('行业', 298), ('医院', 272), ('提供', 270), ('通过', 263), ('行业协会', 250), ('爱心', 250), ('党员', 249), ('慈善', 240), ('感染', 232), ('单位', 232), ('进行', 224), ('关于', 219), ('防护', 208), ('消毒', 201), ('要求', 200), ('复工', 200), ('公益', 197), ('有限公司', 194), ('发挥', 193), ('保障', 192)], '2020/2/11', '2020/2/14'], [[('防控', 1726), ('组织', 1138), ('工作', 1110), ('社会', 1071), ('捐赠', 789), ('协会', 718), ('企业', 653), ('万元', 613), ('物资', 535), ('服务', 532), ('积极', 456), ('社区', 452), ('口罩', 447), ('肺炎', 439), ('防疫', 426), ('会员', 356), ('一线', 355), ('做好', 342), ('参与', 337), ('商会', 328), ('志愿者', 324), ('捐款', 323), ('开展', 318), ('心理', 313), ('冠状病毒', 313), ('人员', 311), ('行业', 311), ('新型', 311), ('抗击', 308), ('医院', 298), ('宣传', 291), ('通过', 279), ('提供', 269), ('爱心', 257), ('行业协会', 249), ('党员', 246), ('慈善', 240), ('单位', 235), ('感染', 224), ('进行', 221), ('关于', 219), ('防护', 210), ('有限公司', 206), ('复工', 204), ('公益', 202), ('保障', 201), ('要求', 200), ('消毒', 194), ('武汉', 188)], '2020/2/11', '2020/2/13'], [[('防控', 1783), ('组织', 1162), ('工作', 1121), ('社会', 1090), ('捐赠', 769), ('协会', 698), ('万元', 585), ('企业', 582), ('服务', 535), ('物资', 534), ('积极', 464), ('口罩', 454), ('肺炎', 440), ('社区', 417), ('防疫', 397), ('参与', 350), ('会员', 347), ('一线', 347), ('捐款', 341), ('做好', 334), ('开展', 328), ('商会', 321), ('志愿者', 321), ('心理', 320), ('新型', 308), ('冠状病毒', 306), ('抗击', 299), ('人员', 299), ('宣传', 298), ('医院', 292), ('行业', 291), ('提供', 264), ('爱心', 262), ('通过', 261), ('党员', 249), ('慈善', 237), ('行业协会', 232), ('单位', 223), ('关于', 223), ('进行', 214), ('防护', 213), ('感染', 213), ('要求', 212), ('消毒', 200), ('保障', 200), ('医用', 190), ('抗疫', 190), ('及时', 188), ('党支部', 188)], '2020/2/11', '2020/2/13'], [[('防控', 1862), ('组织', 1221), ('工作', 1168), ('社会', 1165), ('捐赠', 751), ('协会', 693), ('万元', 630), ('企业', 590), ('服务', 555), ('物资', 555), ('肺炎', 470), ('积极', 459), ('口罩', 450), ('社区', 404), ('防疫', 392), ('参与', 387), ('会员', 357), ('做好', 352), ('捐款', 351), ('开展', 339), ('一线', 332), ('新型', 325), ('志愿者', 319), ('冠状病毒', 314), ('抗击', 308), ('商会', 304), ('宣传', 298), ('心理', 298), ('行业', 292), ('人员', 284), ('医院', 276), ('慈善', 259), ('通过', 256), ('爱心', 256), ('提供', 255), ('党员', 244), ('关于', 238), ('单位', 232), ('行业协会', 225), ('感染', 224), ('要求', 220), ('进行', 218), ('保障', 215), ('防护', 205), ('复工', 190), ('及时', 189), ('武汉', 189), ('抗疫', 189), ('活动', 187)], '2020/2/11', '2020/2/13'], [[('防控', 1884), ('组织', 1236), ('工作', 1171), ('社会', 1161), ('捐赠', 743), ('协会', 682), ('万元', 607), ('服务', 575), ('企业', 565), ('物资', 542), ('积极', 463), ('口罩', 462), ('肺炎', 461), ('社区', 415), ('防疫', 390), ('参与', 386), ('开展', 359), ('做好', 349), ('会员', 349), ('捐款', 337), ('一线', 330), ('宣传', 326), ('志愿者', 323), ('新型', 322), ('抗击', 315), ('商会', 314), ('冠状病毒', 311), ('心理', 299), ('医院', 296), ('行业', 283), ('人员', 265), ('党员', 261), ('爱心', 258), ('通过', 255), ('提供', 249), ('关于', 233), ('慈善', 232), ('感染', 224), ('进行', 219), ('防护', 211), ('行业协会', 210), ('党支部', 209), ('要求', 208), ('全市', 205), ('保障', 200), ('单位', 195), ('消毒', 193), ('阻击战', 192), ('引导', 191)], '2020/2/10', '2020/2/13'], [[('防控', 1852), ('组织', 1242), ('社会', 1169), ('工作', 1136), ('捐赠', 789), ('协会', 688), ('万元', 632), ('企业', 594), ('服务', 594), ('物资', 571), ('口罩', 482), ('积极', 473), ('肺炎', 441), ('社区', 416), ('防疫', 394), ('参与', 385), ('开展', 364), ('做好', 361), ('会员', 356), ('志愿者', 352), ('心理', 350), ('商会', 349), ('一线', 341), ('宣传', 337), ('捐款', 330), ('抗击', 323), ('新型', 309), ('医院', 299), ('冠状病毒', 291), ('爱心', 286), ('行业', 281), ('人员', 268), ('提供', 261), ('通过', 260), ('党员', 247), ('慈善', 242), ('进行', 220), ('关于', 219), ('全市', 218), ('防护', 217), ('行业协会', 214), ('要求', 205), ('保障', 204), ('感染', 203), ('单位', 200), ('公益', 198), ('志愿', 195), ('党支部', 194), ('引导', 193)], '2020/2/10', '2020/2/12'], [[('防控', 1750), ('组织', 1217), ('社会', 1118), ('工作', 1062), ('捐赠', 775), ('协会', 697), ('万元', 633), ('物资', 594), ('服务', 569), ('企业', 507), ('口罩', 475), ('积极', 462), ('社区', 423), ('肺炎', 417), ('防疫', 402), ('参与', 376), ('志愿者', 359), ('开展', 357), ('宣传', 354), ('会员', 352), ('一线', 352), ('心理', 350), ('做好', 343), ('商会', 325), ('捐款', 314), ('抗击', 309), ('新型', 308), ('医院', 304), ('冠状病毒', 292), ('爱心', 281), ('行业', 255), ('提供', 254), ('人员', 253), ('通过', 250), ('党员', 246), ('慈善', 225), ('进行', 218), ('防护', 215), ('武汉', 215), ('全市', 207), ('关于', 205), ('行业协会', 205), ('单位', 202), ('公益', 197), ('活动', 196), ('感染', 196), ('保障', 193), ('志愿', 193), ('要求', 192)], '2020/2/10', '2020/2/12'], [[('防控', 1683), ('组织', 1060), ('工作', 1049), ('社会', 1003), ('捐赠', 703), ('协会', 656), ('万元', 594), ('物资', 549), ('服务', 538), ('企业', 522), ('积极', 442), ('口罩', 422), ('肺炎', 417), ('防疫', 381), ('社区', 374), ('做好', 339), ('开展', 336), ('商会', 333), ('一线', 324), ('会员', 321), ('新型', 319), ('宣传', 311), ('参与', 309), ('心理', 308), ('冠状病毒', 306), ('抗击', 299), ('医院', 293), ('志愿者', 291), ('捐款', 289), ('行业', 269), ('爱心', 262), ('通过', 246), ('提供', 243), ('人员', 216), ('武汉', 216), ('党员', 216), ('慈善', 214), ('防护', 209), ('感染', 209), ('单位', 202), ('保障', 199), ('进行', 198), ('行业协会', 196), ('要求', 194), ('关于', 193), ('全市', 190), ('发挥', 181), ('志愿', 176), ('公益', 175)], '2020/2/10', '2020/2/12'], [[('防控', 1794), ('工作', 1141), ('组织', 1100), ('社会', 1016), ('捐赠', 712), ('协会', 693), ('企业', 602), ('物资', 597), ('服务', 586), ('万元', 579), ('积极', 479), ('口罩', 436), ('防疫', 427), ('肺炎', 425), ('社区', 389), ('商会', 384), ('做好', 380), ('开展', 364), ('会员', 351), ('新型', 343), ('一线', 336), ('冠状病毒', 329), ('医院', 327), ('心理', 326), ('宣传', 322), ('参与', 320), ('志愿者', 308), ('捐款', 294), ('抗击', 288), ('行业', 281), ('提供', 276), ('通过', 254), ('爱心', 250), ('武汉', 231), ('党员', 230), ('保障', 227), ('防护', 227), ('慈善', 227), ('要求', 225), ('行业协会', 222), ('人员', 222), ('感染', 221), ('进行', 216), ('会员单位', 216), ('发挥', 210), ('关于', 204), ('全市', 203), ('单位', 202), ('公益', 194)], '2020/2/10', '2020/2/12'], [[('防控', 1902), ('工作', 1224), ('组织', 1105), ('社会', 1019), ('捐赠', 737), ('协会', 701), ('企业', 626), ('服务', 605), ('物资', 595), ('万元', 578), ('积极', 484), ('肺炎', 445), ('防疫', 441), ('做好', 431), ('口罩', 430), ('社区', 421), ('商会', 388), ('开展', 385), ('新型', 362), ('会员', 349), ('冠状病毒', 346), ('宣传', 337), ('一线', 329), ('参与', 321), ('心理', 321), ('志愿者', 316), ('行业', 314), ('医院', 296), ('提供', 282), ('抗击', 280), ('捐款', 276), ('慈善', 269), ('要求', 264), ('通过', 261), ('进行', 247), ('爱心', 245), ('会员单位', 240), ('行业协会', 239), ('感染', 239), ('保障', 236), ('人员', 232), ('防护', 231), ('相关', 217), ('武汉', 217), ('公益', 217), ('单位', 212), ('发挥', 208), ('关于', 207), ('党员', 204)], '2020/2/9', '2020/2/11'], [[('防控', 1839), ('工作', 1190), ('组织', 1136), ('社会', 1020), ('捐赠', 753), ('协会', 719), ('服务', 610), ('物资', 603), ('万元', 587), ('企业', 557), ('积极', 465), ('肺炎', 454), ('防疫', 447), ('口罩', 437), ('做好', 413), ('社区', 409), ('开展', 380), ('新型', 379), ('冠状病毒', 375), ('商会', 357), ('医院', 351), ('心理', 332), ('志愿者', 328), ('会员', 323), ('参与', 323), ('宣传', 322), ('慈善', 321), ('一线', 317), ('提供', 296), ('抗击', 282), ('行业', 281), ('捐款', 268), ('要求', 265), ('通过', 265), ('感染', 261), ('会员单位', 249), ('进行', 245), ('爱心', 242), ('人员', 230), ('防护', 226), ('保障', 225), ('相关', 225), ('武汉', 218), ('公益', 213), ('单位', 211), ('情况', 204), ('及时', 200), ('责任', 199), ('发挥', 199)], '2020/2/8', '2020/2/11'], [[('防控', 1982), ('工作', 1302), ('组织', 1209), ('社会', 1075), ('捐赠', 830), ('协会', 779), ('服务', 663), ('物资', 627), ('万元', 626), ('企业', 577), ('社区', 488), ('防疫', 479), ('积极', 474), ('肺炎', 451), ('做好', 438), ('口罩', 424), ('开展', 421), ('慈善', 417), ('商会', 406), ('新型', 405), ('志愿者', 399), ('冠状病毒', 397), ('心理', 361), ('会员', 361), ('医院', 355), ('一线', 350), ('参与', 345), ('宣传', 335), ('提供', 308), ('抗击', 307), ('捐款', 291), ('爱心', 281), ('感染', 270), ('要求', 267), ('进行', 265), ('通过', 261), ('行业', 260), ('会员单位', 249), ('公益', 246), ('人员', 244), ('相关', 232), ('防护', 227), ('保障', 219), ('情况', 217), ('活动', 215), ('单位', 212), ('发挥', 209), ('武汉', 208), ('及时', 206)], '2020/2/8', '2020/2/11'], [[('防控', 1951), ('工作', 1286), ('组织', 1160), ('社会', 1010), ('捐赠', 829), ('协会', 763), ('服务', 638), ('物资', 628), ('万元', 619), ('企业', 602), ('防疫', 469), ('积极', 467), ('做好', 456), ('社区', 456), ('肺炎', 438), ('商会', 430), ('口罩', 418), ('慈善', 417), ('新型', 400), ('冠状病毒', 396), ('开展', 395), ('医院', 366), ('志愿者', 362), ('会员', 353), ('心理', 338), ('一线', 337), ('宣传', 330), ('参与', 311), ('抗击', 304), ('提供', 299), ('行业', 292), ('会员单位', 269), ('公益', 268), ('爱心', 267), ('要求', 266), ('感染', 265), ('捐款', 262), ('进行', 259), ('相关', 247), ('通过', 241), ('人员', 239), ('行业协会', 238), ('情况', 236), ('活动', 225), ('防护', 224), ('保障', 223), ('单位', 221), ('责任', 216), ('及时', 213)], '2020/2/7', '2020/2/11'], [[('防控', 1988), ('工作', 1345), ('组织', 1277), ('社会', 1118), ('捐赠', 886), ('协会', 814), ('服务', 666), ('物资', 640), ('企业', 628), ('万元', 582), ('防疫', 504), ('积极', 495), ('做好', 482), ('社区', 469), ('商会', 460), ('肺炎', 443), ('慈善', 439), ('新型', 433), ('冠状病毒', 433), ('口罩', 429), ('医院', 420), ('开展', 399), ('心理', 392), ('一线', 373), ('会员', 370), ('志愿者', 357), ('抗击', 335), ('宣传', 333), ('提供', 326), ('参与', 304), ('行业', 298), ('会员单位', 294), ('进行', 291), ('感染', 284), ('公益', 282), ('爱心', 278), ('要求', 273), ('通过', 270), ('相关', 269), ('人员', 269), ('行业协会', 263), ('捐款', 259), ('情况', 248), ('防护', 247), ('基金会', 230), ('发挥', 225), ('武汉', 224), ('活动', 224), ('保障', 223)], '2020/2/7', '2020/2/11'], [[('防控', 2035), ('工作', 1373), ('组织', 1223), ('社会', 1113), ('捐赠', 901), ('协会', 833), ('服务', 650), ('企业', 645), ('物资', 615), ('万元', 602), ('防疫', 500), ('积极', 492), ('做好', 487), ('肺炎', 477), ('冠状病毒', 461), ('商会', 460), ('新型', 459), ('慈善', 458), ('社区', 453), ('口罩', 437), ('医院', 406), ('心理', 393), ('开展', 391), ('会员', 383), ('一线', 382), ('志愿者', 373), ('抗击', 352), ('提供', 330), ('宣传', 319), ('参与', 313), ('会员单位', 310), ('感染', 308), ('人员', 296), ('进行', 291), ('行业', 288), ('要求', 287), ('相关', 282), ('捐款', 281), ('通过', 275), ('公益', 269), ('爱心', 267), ('防护', 264), ('行业协会', 262), ('情况', 248), ('武汉', 239), ('单位', 235), ('活动', 233), ('基金会', 231), ('责任', 228)], '2020/2/7', '2020/2/10'], [[('防控', 2003), ('工作', 1360), ('组织', 1197), ('社会', 1061), ('捐赠', 870), ('协会', 855), ('服务', 648), ('企业', 628), ('万元', 596), ('物资', 582), ('防疫', 493), ('积极', 482), ('冠状病毒', 474), ('肺炎', 472), ('做好', 466), ('新型', 465), ('社区', 458), ('慈善', 440), ('商会', 431), ('口罩', 421), ('会员', 399), ('医院', 399), ('开展', 381), ('一线', 369), ('心理', 368), ('志愿者', 354), ('抗击', 352), ('提供', 333), ('会员单位', 320), ('感染', 315), ('参与', 312), ('宣传', 305), ('行业', 299), ('人员', 296), ('进行', 291), ('要求', 288), ('相关', 286), ('捐款', 283), ('通过', 274), ('公益', 269), ('防护', 260), ('行业协会', 256), ('爱心', 255), ('武汉', 253), ('情况', 241), ('单位', 233), ('活动', 230), ('基金会', 226), ('发挥', 222)], '2020/2/6', '2020/2/10'], [[('防控', 2025), ('工作', 1401), ('组织', 1214), ('社会', 1098), ('捐赠', 910), ('协会', 788), ('服务', 673), ('企业', 663), ('物资', 618), ('万元', 611), ('冠状病毒', 504), ('肺炎', 503), ('新型', 496), ('防疫', 492), ('做好', 477), ('慈善', 476), ('社区', 468), ('积极', 466), ('商会', 456), ('口罩', 443), ('开展', 397), ('医院', 392), ('会员', 380), ('抗击', 367), ('心理', 367), ('志愿者', 364), ('一线', 345), ('提供', 337), ('感染', 333), ('参与', 311), ('会员单位', 306), ('要求', 302), ('人员', 299), ('公益', 296), ('宣传', 295), ('捐款', 290), ('通过', 287), ('进行', 285), ('相关', 284), ('行业', 281), ('防护', 276), ('爱心', 267), ('基金会', 258), ('情况', 255), ('行业协会', 253), ('武汉', 236), ('活动', 231), ('医用', 231), ('及时', 230)], '2020/2/6', '2020/2/10'], [[('防控', 2065), ('工作', 1412), ('组织', 1358), ('社会', 1216), ('捐赠', 926), ('协会', 775), ('服务', 702), ('企业', 627), ('物资', 621), ('万元', 617), ('防疫', 511), ('肺炎', 506), ('冠状病毒', 499), ('慈善', 495), ('新型', 494), ('积极', 490), ('社区', 479), ('做好', 478), ('口罩', 460), ('商会', 430), ('开展', 412), ('医院', 384), ('志愿者', 375), ('会员', 375), ('抗击', 370), ('心理', 369), ('一线', 349), ('提供', 347), ('参与', 342), ('感染', 324), ('公益', 316), ('宣传', 309), ('人员', 295), ('会员单位', 295), ('要求', 294), ('捐款', 294), ('通过', 289), ('进行', 289), ('相关', 279), ('爱心', 279), ('防护', 275), ('行业', 269), ('基金会', 266), ('行业协会', 259), ('情况', 251), ('活动', 242), ('发挥', 236), ('医用', 235), ('武汉', 232)], '2020/2/6', '2020/2/10'], [[('防控', 2173), ('工作', 1454), ('组织', 1387), ('社会', 1256), ('捐赠', 1019), ('协会', 772), ('服务', 728), ('万元', 675), ('物资', 619), ('企业', 606), ('慈善', 561), ('肺炎', 527), ('新型', 520), ('冠状病毒', 520), ('社区', 512), ('积极', 502), ('防疫', 487), ('做好', 484), ('口罩', 477), ('开展', 419), ('志愿者', 397), ('抗击', 390), ('医院', 388), ('商会', 388), ('一线', 368), ('参与', 359), ('心理', 352), ('感染', 351), ('会员', 347), ('提供', 344), ('通过', 324), ('人员', 314), ('宣传', 311), ('捐款', 309), ('公益', 304), ('爱心', 302), ('要求', 294), ('进行', 288), ('行业', 286), ('防护', 286), ('会员单位', 277), ('相关', 276), ('活动', 272), ('行业协会', 271), ('情况', 265), ('基金会', 261), ('武汉', 253), ('支持', 241), ('单位', 235)], '2020/2/6', '2020/2/10'], [[('防控', 2048), ('组织', 1358), ('工作', 1347), ('社会', 1231), ('捐赠', 993), ('协会', 833), ('服务', 724), ('万元', 675), ('物资', 594), ('慈善', 534), ('新型', 514), ('冠状病毒', 514), ('口罩', 511), ('企业', 509), ('肺炎', 497), ('防疫', 481), ('积极', 477), ('社区', 477), ('抗击', 424), ('做好', 416), ('志愿者', 411), ('开展', 407), ('心理', 391), ('医院', 383), ('一线', 382), ('参与', 364), ('商会', 355), ('提供', 346), ('感染', 332), ('会员', 328), ('通过', 314), ('捐款', 314), ('人员', 303), ('公益', 300), ('爱心', 296), ('宣传', 290), ('防护', 282), ('活动', 269), ('进行', 266), ('相关', 259), ('武汉', 251), ('要求', 249), ('志愿', 247), ('会员单位', 244), ('医用', 241), ('基金会', 241), ('情况', 237), ('募捐', 236), ('服务中心', 231)], '2020/2/5', '2020/2/9'], [[('防控', 2187), ('工作', 1447), ('组织', 1403), ('社会', 1301), ('捐赠', 1026), ('协会', 847), ('服务', 714), ('万元', 673), ('物资', 614), ('企业', 554), ('口罩', 529), ('慈善', 514), ('新型', 504), ('积极', 503), ('冠状病毒', 497), ('肺炎', 493), ('防疫', 479), ('社区', 463), ('做好', 445), ('抗击', 439), ('商会', 426), ('志愿者', 423), ('一线', 393), ('参与', 387), ('开展', 380), ('心理', 367), ('会员', 362), ('提供', 345), ('医院', 342), ('捐款', 342), ('通过', 322), ('爱心', 315), ('感染', 312), ('人员', 301), ('宣传', 299), ('防护', 297), ('公益', 290), ('武汉', 270), ('进行', 269), ('活动', 263), ('要求', 257), ('相关', 252), ('会员单位', 249), ('单位', 246), ('志愿', 242), ('行业', 241), ('情况', 239), ('医用', 239), ('关于', 235)], '2020/2/5', '2020/2/8'], [[('防控', 2047), ('工作', 1340), ('组织', 1321), ('社会', 1245), ('捐赠', 918), ('协会', 762), ('服务', 687), ('万元', 597), ('物资', 581), ('企业', 534), ('口罩', 503), ('新型', 497), ('冠状病毒', 489), ('肺炎', 485), ('积极', 475), ('防疫', 432), ('慈善', 429), ('抗击', 413), ('做好', 405), ('参与', 374), ('社区', 374), ('商会', 369), ('一线', 357), ('志愿者', 346), ('心理', 334), ('提供', 331), ('开展', 329), ('医院', 325), ('会员', 314), ('通过', 313), ('感染', 311), ('人员', 290), ('捐款', 290), ('宣传', 284), ('爱心', 283), ('防护', 282), ('武汉', 268), ('公益', 262), ('行业', 257), ('要求', 255), ('会员单位', 255), ('相关', 241), ('行业协会', 240), ('活动', 237), ('进行', 236), ('基金会', 233), ('关于', 231), ('单位', 229), ('医用', 228)], '2020/2/5', '2020/2/8'], [[('防控', 1973), ('组织', 1335), ('工作', 1288), ('社会', 1274), ('捐赠', 889), ('协会', 741), ('服务', 684), ('万元', 587), ('物资', 546), ('企业', 509), ('新型', 493), ('冠状病毒', 486), ('口罩', 482), ('肺炎', 478), ('积极', 456), ('慈善', 449), ('防疫', 421), ('抗击', 397), ('社区', 389), ('做好', 378), ('参与', 374), ('商会', 360), ('心理', 348), ('一线', 341), ('志愿者', 339), ('开展', 338), ('提供', 325), ('通过', 318), ('感染', 315), ('会员', 314), ('捐款', 308), ('医院', 302), ('人员', 280), ('防护', 279), ('爱心', 274), ('宣传', 269), ('武汉', 257), ('公益', 256), ('要求', 243), ('进行', 241), ('活动', 236), ('会员单位', 232), ('关于', 230), ('行业', 228), ('单位', 224), ('相关', 223), ('动员', 217), ('发挥', 213), ('发出', 212)], '2020/2/4', '2020/2/7'], [[('防控', 1919), ('工作', 1232), ('组织', 1186), ('社会', 1134), ('捐赠', 867), ('协会', 739), ('服务', 700), ('万元', 629), ('企业', 528), ('物资', 515), ('新型', 505), ('口罩', 505), ('冠状病毒', 489), ('肺炎', 485), ('积极', 438), ('慈善', 431), ('防疫', 387), ('社区', 387), ('抗击', 384), ('心理', 368), ('商会', 359), ('做好', 357), ('参与', 352), ('提供', 339), ('志愿者', 329), ('开展', 323), ('通过', 316), ('捐款', 308), ('一线', 307), ('医院', 305), ('感染', 304), ('会员', 304), ('人员', 274), ('防护', 271), ('武汉', 267), ('宣传', 265), ('爱心', 254), ('单位', 252), ('要求', 241), ('公益', 240), ('会员单位', 228), ('行业', 223), ('相关', 222), ('活动', 219), ('关于', 219), ('社工', 215), ('基金会', 214), ('动员', 210), ('进行', 209)], '2020/2/4', '2020/2/7'], [[('防控', 1927), ('组织', 1307), ('工作', 1246), ('社会', 1215), ('捐赠', 883), ('协会', 721), ('服务', 700), ('万元', 681), ('物资', 543), ('企业', 535), ('口罩', 512), ('新型', 504), ('冠状病毒', 483), ('肺炎', 476), ('积极', 450), ('慈善', 423), ('社区', 392), ('防疫', 389), ('抗击', 363), ('做好', 362), ('参与', 361), ('商会', 355), ('开展', 336), ('提供', 325), ('心理', 323), ('捐款', 321), ('医院', 311), ('通过', 310), ('志愿者', 310), ('会员', 305), ('感染', 300), ('一线', 285), ('爱心', 265), ('公益', 264), ('防护', 263), ('人员', 261), ('宣传', 259), ('单位', 257), ('武汉', 255), ('要求', 246), ('会员单位', 238), ('行业', 237), ('基金会', 227), ('动员', 224), ('活动', 217), ('关于', 217), ('行业协会', 216), ('进行', 215), ('发挥', 214)], '2020/2/4', '2020/2/7'], [[('防控', 1977), ('组织', 1350), ('工作', 1279), ('社会', 1264), ('捐赠', 901), ('协会', 693), ('服务', 692), ('万元', 661), ('物资', 549), ('企业', 538), ('口罩', 525), ('新型', 515), ('冠状病毒', 492), ('肺炎', 487), ('积极', 464), ('慈善', 444), ('做好', 393), ('防疫', 384), ('商会', 372), ('社区', 364), ('参与', 358), ('抗击', 358), ('开展', 335), ('感染', 317), ('提供', 317), ('捐款', 317), ('通过', 302), ('医院', 299), ('志愿者', 297), ('心理', 288), ('会员', 283), ('一线', 281), ('防护', 272), ('宣传', 271), ('单位', 264), ('人员', 262), ('要求', 258), ('爱心', 257), ('武汉', 245), ('行业', 243), ('公益', 242), ('动员', 231), ('行业协会', 225), ('关于', 222), ('倡议', 222), ('发出', 219), ('活动', 218), ('会员单位', 217), ('基金会', 217)], '2020/2/4', '2020/2/6'], [[('防控', 2093), ('组织', 1398), ('工作', 1355), ('社会', 1302), ('捐赠', 867), ('协会', 752), ('服务', 716), ('万元', 637), ('口罩', 533), ('新型', 523), ('企业', 516), ('物资', 505), ('冠状病毒', 497), ('肺炎', 496), ('积极', 493), ('慈善', 434), ('做好', 416), ('防疫', 395), ('社区', 387), ('商会', 385), ('参与', 381), ('抗击', 369), ('开展', 337), ('感染', 334), ('志愿者', 315), ('捐款', 315), ('提供', 312), ('通过', 303), ('一线', 302), ('会员', 301), ('心理', 300), ('医院', 297), ('宣传', 284), ('单位', 279), ('要求', 274), ('人员', 273), ('防护', 272), ('爱心', 258), ('行业', 248), ('动员', 244), ('倡议', 243), ('武汉', 242), ('行业协会', 234), ('会员单位', 233), ('关于', 229), ('公益', 227), ('发挥', 225), ('发出', 224), ('厦门市', 224)], '2020/2/3', '2020/2/6'], [[('防控', 2276), ('组织', 1550), ('工作', 1510), ('社会', 1454), ('捐赠', 960), ('协会', 803), ('服务', 728), ('万元', 722), ('新型', 587), ('口罩', 586), ('企业', 570), ('肺炎', 568), ('冠状病毒', 556), ('物资', 551), ('积极', 527), ('慈善', 483), ('做好', 477), ('商会', 437), ('社区', 425), ('参与', 406), ('抗击', 399), ('防疫', 396), ('感染', 385), ('会员', 356), ('捐款', 351), ('开展', 347), ('通过', 332), ('志愿者', 332), ('一线', 325), ('医院', 318), ('单位', 317), ('提供', 314), ('要求', 307), ('人员', 306), ('宣传', 306), ('心理', 306), ('防护', 303), ('关于', 294), ('动员', 279), ('行业', 263), ('会员单位', 262), ('武汉', 260), ('发出', 259), ('倡议', 257), ('爱心', 257), ('行业协会', 248), ('活动', 235), ('加强', 233), ('全市', 230)], '2020/2/3', '2020/2/6'], [[('防控', 2124), ('组织', 1526), ('社会', 1443), ('工作', 1425), ('捐赠', 854), ('协会', 744), ('服务', 690), ('万元', 641), ('新型', 570), ('口罩', 564), ('肺炎', 550), ('冠状病毒', 543), ('物资', 513), ('企业', 505), ('积极', 500), ('做好', 455), ('慈善', 425), ('商会', 424), ('社区', 423), ('防疫', 410), ('参与', 402), ('抗击', 396), ('感染', 373), ('会员', 349), ('心理', 338), ('开展', 335), ('捐款', 323), ('志愿者', 321), ('防护', 307), ('单位', 307), ('提供', 306), ('宣传', 304), ('通过', 301), ('一线', 297), ('人员', 294), ('要求', 291), ('医院', 290), ('动员', 278), ('关于', 272), ('倡议', 252), ('发出', 241), ('会员单位', 238), ('行业', 231), ('武汉', 230), ('发挥', 229), ('爱心', 227), ('加强', 224), ('公益', 223), ('全市', 220)], '2020/2/2', '2020/2/6'], [[('防控', 2146), ('组织', 1548), ('社会', 1469), ('工作', 1446), ('捐赠', 839), ('协会', 704), ('服务', 690), ('万元', 639), ('新型', 577), ('肺炎', 562), ('冠状病毒', 547), ('口罩', 541), ('企业', 512), ('物资', 511), ('积极', 504), ('做好', 466), ('慈善', 437), ('社区', 433), ('商会', 425), ('参与', 409), ('防疫', 396), ('感染', 389), ('抗击', 368), ('会员', 350), ('开展', 327), ('捐款', 325), ('心理', 318), ('宣传', 314), ('志愿者', 309), ('通过', 308), ('单位', 302), ('要求', 302), ('医院', 300), ('防护', 296), ('人员', 287), ('提供', 285), ('动员', 283), ('一线', 283), ('关于', 277), ('倡议', 261), ('发出', 250), ('会员单位', 244), ('爱心', 237), ('武汉', 234), ('厦门市', 232), ('加强', 230), ('行业', 229), ('发挥', 223), ('进行', 223)], '2020/2/2', '2020/2/5'], [[('防控', 2071), ('组织', 1513), ('社会', 1434), ('工作', 1408), ('捐赠', 780), ('服务', 695), ('协会', 692), ('万元', 610), ('新型', 588), ('肺炎', 568), ('冠状病毒', 551), ('口罩', 506), ('企业', 485), ('积极', 476), ('物资', 476), ('做好', 466), ('慈善', 437), ('社区', 429), ('感染', 392), ('参与', 380), ('防疫', 374), ('开展', 361), ('抗击', 359), ('商会', 338), ('会员', 324), ('心理', 320), ('要求', 315), ('宣传', 310), ('医院', 306), ('捐款', 303), ('防护', 295), ('通过', 294), ('人员', 294), ('志愿者', 289), ('关于', 282), ('单位', 277), ('提供', 276), ('一线', 270), ('动员', 269), ('会员单位', 259), ('倡议', 254), ('武汉', 253), ('发出', 246), ('行业', 241), ('全市', 236), ('厦门市', 232), ('公益', 231), ('加强', 226), ('爱心', 225)], '2020/2/1', '2020/2/5'], [[('防控', 2304), ('组织', 1706), ('社会', 1646), ('工作', 1576), ('捐赠', 801), ('协会', 743), ('服务', 722), ('新型', 649), ('肺炎', 633), ('万元', 620), ('冠状病毒', 611), ('做好', 557), ('积极', 525), ('口罩', 508), ('企业', 491), ('物资', 480), ('慈善', 462), ('感染', 439), ('社区', 424), ('参与', 409), ('防疫', 384), ('开展', 375), ('要求', 370), ('会员', 370), ('抗击', 366), ('商会', 355), ('关于', 347), ('心理', 339), ('宣传', 331), ('志愿者', 328), ('动员', 322), ('医院', 321), ('倡议', 314), ('防护', 314), ('捐款', 312), ('人员', 304), ('通过', 303), ('发出', 299), ('单位', 295), ('提供', 293), ('民政局', 282), ('全市', 272), ('会员单位', 266), ('一线', 261), ('行业', 254), ('爱心', 246), ('武汉', 245), ('活动', 241), ('发挥', 240)], '2020/2/1', '2020/2/5'], [[('防控', 2510), ('组织', 1928), ('社会', 1851), ('工作', 1773), ('捐赠', 785), ('协会', 766), ('服务', 738), ('新型', 679), ('肺炎', 663), ('冠状病毒', 640), ('做好', 636), ('万元', 604), ('积极', 572), ('口罩', 510), ('企业', 506), ('物资', 478), ('慈善', 466), ('感染', 464), ('参与', 455), ('社区', 420), ('要求', 405), ('防疫', 400), ('开展', 397), ('抗击', 387), ('会员', 372), ('宣传', 365), ('关于', 365), ('志愿者', 348), ('倡议', 345), ('商会', 343), ('防护', 338), ('心理', 336), ('动员', 327), ('医院', 320), ('通过', 317), ('人员', 317), ('发出', 313), ('全市', 307), ('捐款', 305), ('民政局', 304), ('单位', 300), ('会员单位', 288), ('提供', 288), ('行业', 275), ('加强', 275), ('一线', 259), ('管理', 258), ('发挥', 256), ('相关', 256)], '2020/1/31', '2020/2/4'], [[('防控', 2614), ('组织', 2050), ('社会', 1963), ('工作', 1829), ('捐赠', 756), ('协会', 743), ('服务', 686), ('做好', 674), ('新型', 656), ('肺炎', 641), ('冠状病毒', 630), ('积极', 613), ('万元', 575), ('感染', 475), ('慈善', 466), ('物资', 461), ('参与', 459), ('口罩', 459), ('企业', 456), ('要求', 419), ('社区', 407), ('开展', 392), ('关于', 384), ('宣传', 377), ('会员', 376), ('倡议', 374), ('抗击', 373), ('防疫', 369), ('商会', 343), ('志愿者', 341), ('防护', 340), ('发出', 333), ('捐款', 321), ('民政局', 315), ('动员', 315), ('全市', 307), ('人员', 299), ('加强', 297), ('通过', 294), ('管理', 282), ('行业', 278), ('会员单位', 276), ('单位', 262), ('登记', 262), ('发挥', 261), ('医院', 258), ('有序', 258), ('机构', 258), ('爱心', 253)], '2020/1/31', '2020/2/4'], [[('防控', 2585), ('组织', 2009), ('社会', 1956), ('工作', 1835), ('协会', 787), ('捐赠', 727), ('做好', 700), ('服务', 671), ('新型', 646), ('肺炎', 640), ('冠状病毒', 622), ('积极', 603), ('万元', 554), ('慈善', 522), ('感染', 471), ('口罩', 458), ('物资', 457), ('参与', 455), ('企业', 437), ('要求', 421), ('社区', 391), ('宣传', 377), ('关于', 377), ('会员', 370), ('抗击', 366), ('倡议', 365), ('开展', 362), ('防疫', 355), ('志愿者', 349), ('防护', 337), ('商会', 327), ('发出', 321), ('动员', 315), ('人员', 303), ('加强', 303), ('全市', 299), ('捐款', 295), ('通过', 292), ('民政局', 291), ('管理', 277), ('行业', 272), ('有序', 271), ('会员单位', 268), ('登记', 263), ('机构', 262), ('发挥', 260), ('单位', 253), ('发布', 252), ('部署', 252)], '2020/1/30', '2020/2/4'], [[('防控', 2660), ('组织', 2106), ('社会', 2069), ('工作', 1926), ('协会', 811), ('做好', 726), ('捐赠', 719), ('新型', 680), ('肺炎', 673), ('服务', 671), ('冠状病毒', 649), ('积极', 617), ('万元', 576), ('慈善', 539), ('感染', 483), ('参与', 473), ('企业', 464), ('要求', 450), ('物资', 444), ('口罩', 439), ('社区', 406), ('关于', 393), ('倡议', 386), ('宣传', 385), ('会员', 383), ('开展', 375), ('抗击', 369), ('防疫', 356), ('志愿者', 354), ('防护', 345), ('商会', 335), ('发出', 333), ('人员', 316), ('加强', 313), ('动员', 311), ('捐款', 306), ('民政局', 303), ('通过', 301), ('管理', 295), ('全市', 293), ('有序', 289), ('会员单位', 284), ('机构', 282), ('登记', 275), ('行业', 273), ('责任', 264), ('发挥', 263), ('部署', 260), ('活动', 260)], '2020/1/30', '2020/2/4'], [[('防控', 2588), ('组织', 2117), ('社会', 2091), ('工作', 1874), ('协会', 771), ('做好', 712), ('新型', 681), ('捐赠', 678), ('肺炎', 676), ('冠状病毒', 643), ('服务', 634), ('积极', 612), ('慈善', 563), ('万元', 543), ('感染', 471), ('企业', 466), ('参与', 460), ('物资', 435), ('要求', 435), ('口罩', 410), ('关于', 397), ('倡议', 384), ('开展', 372), ('会员', 371), ('宣传', 364), ('抗击', 346), ('发出', 343), ('社区', 340), ('防护', 338), ('防疫', 322), ('志愿者', 316), ('动员', 308), ('加强', 305), ('全市', 299), ('有序', 297), ('民政局', 295), ('通过', 291), ('人员', 290), ('管理', 286), ('捐款', 286), ('机构', 286), ('商会', 284), ('行业', 279), ('会员单位', 277), ('登记', 270), ('活动', 268), ('引导', 266), ('责任', 264), ('部署', 264)], '2020/1/29', '2020/2/3'], [[('防控', 2544), ('组织', 2075), ('社会', 2054), ('工作', 1849), ('协会', 705), ('做好', 698), ('服务', 656), ('新型', 643), ('肺炎', 620), ('冠状病毒', 610), ('积极', 605), ('捐赠', 588), ('慈善', 537), ('万元', 467), ('参与', 453), ('感染', 452), ('要求', 433), ('企业', 430), ('物资', 407), ('倡议', 371), ('开展', 369), ('口罩', 354), ('关于', 351), ('宣传', 349), ('会员', 346), ('防护', 328), ('抗击', 324), ('加强', 321), ('发出', 313), ('管理', 309), ('机构', 306), ('登记', 302), ('社区', 300), ('志愿者', 297), ('防疫', 296), ('有序', 294), ('引导', 289), ('动员', 285), ('部署', 279), ('责任', 274), ('行业', 273), ('人员', 273), ('通过', 265), ('民政局', 263), ('全市', 259), ('相关', 257), ('活动', 253), ('会员单位', 253), ('发挥', 251)], '2020/1/29', '2020/2/3'], [[('防控', 2541), ('组织', 2050), ('社会', 2021), ('工作', 1875), ('做好', 716), ('协会', 676), ('服务', 659), ('新型', 650), ('肺炎', 633), ('冠状病毒', 606), ('积极', 601), ('捐赠', 592), ('慈善', 551), ('万元', 471), ('要求', 448), ('感染', 445), ('参与', 438), ('物资', 433), ('企业', 432), ('倡议', 383), ('开展', 362), ('关于', 354), ('会员', 349), ('口罩', 345), ('宣传', 332), ('发出', 323), ('防护', 322), ('加强', 321), ('抗击', 314), ('管理', 310), ('机构', 305), ('登记', 304), ('武汉', 299), ('有序', 289), ('部署', 287), ('引导', 281), ('通过', 281), ('会员单位', 281), ('动员', 281), ('行业', 277), ('志愿者', 275), ('人员', 273), ('防疫', 272), ('责任', 268), ('相关', 266), ('全市', 265), ('社区', 258), ('民政局', 256), ('活动', 253)], '2020/1/28', '2020/2/2'], [[('防控', 2496), ('组织', 1996), ('社会', 1966), ('工作', 1872), ('做好', 739), ('肺炎', 631), ('新型', 628), ('协会', 614), ('服务', 601), ('捐赠', 593), ('冠状病毒', 588), ('积极', 588), ('慈善', 552), ('万元', 448), ('感染', 438), ('要求', 438), ('参与', 427), ('企业', 415), ('物资', 410), ('倡议', 383), ('开展', 342), ('关于', 341), ('会员', 337), ('防护', 328), ('加强', 325), ('发出', 319), ('口罩', 316), ('宣传', 315), ('管理', 307), ('机构', 294), ('抗击', 292), ('武汉', 290), ('有序', 284), ('引导', 283), ('动员', 283), ('部署', 281), ('登记', 280), ('人员', 279), ('会员单位', 273), ('行业', 265), ('责任', 264), ('全市', 262), ('活动', 256), ('民政局', 253), ('相关', 253), ('通过', 248), ('志愿者', 245), ('防疫', 243), ('发挥', 239)], '2020/1/28', '2020/2/2'], [[('防控', 2433), ('组织', 1936), ('社会', 1918), ('工作', 1802), ('做好', 732), ('肺炎', 614), ('新型', 607), ('协会', 572), ('服务', 570), ('积极', 569), ('冠状病毒', 564), ('捐赠', 547), ('慈善', 538), ('感染', 428), ('参与', 420), ('要求', 413), ('万元', 404), ('物资', 399), ('企业', 387), ('倡议', 370), ('关于', 319), ('加强', 313), ('防护', 313), ('会员', 311), ('开展', 309), ('发出', 302), ('宣传', 301), ('管理', 295), ('口罩', 290), ('部署', 286), ('动员', 283), ('有序', 278), ('机构', 274), ('引导', 270), ('登记', 269), ('抗击', 266), ('责任', 264), ('行业', 253), ('会员单位', 252), ('武汉', 252), ('相关', 251), ('人员', 249), ('活动', 246), ('基金会', 237), ('志愿者', 233), ('发挥', 232), ('防疫', 232), ('全市', 232), ('通过', 230)], '2020/1/28', '2020/2/1'], [[('防控', 2163), ('组织', 1748), ('社会', 1704), ('工作', 1601), ('做好', 646), ('肺炎', 545), ('慈善', 541), ('捐赠', 537), ('新型', 536), ('服务', 532), ('积极', 517), ('冠状病毒', 495), ('协会', 492), ('万元', 395), ('物资', 390), ('感染', 380), ('参与', 379), ('企业', 361), ('要求', 347), ('倡议', 305), ('加强', 299), ('开展', 299), ('防护', 296), ('口罩', 289), ('管理', 278), ('基金会', 277), ('会员', 271), ('武汉', 269), ('宣传', 268), ('部署', 267), ('引导', 262), ('机构', 256), ('登记', 255), ('有序', 255), ('关于', 243), ('抗击', 243), ('发出', 242), ('相关', 235), ('责任', 235), ('动员', 234), ('募捐', 229), ('会员单位', 224), ('行业', 221), ('人员', 221), ('提供', 218), ('群众', 216), ('防疫', 214), ('通过', 208), ('活动', 207)], '2020/1/27', '2020/2/1'], [[('防控', 1882), ('组织', 1434), ('社会', 1397), ('工作', 1357), ('捐赠', 574), ('服务', 567), ('做好', 562), ('慈善', 511), ('新型', 501), ('肺炎', 500), ('冠状病毒', 459), ('积极', 452), ('物资', 432), ('协会', 423), ('万元', 400), ('企业', 361), ('感染', 352), ('参与', 319), ('武汉', 304), ('要求', 293), ('基金会', 287), ('口罩', 270), ('倡议', 267), ('防护', 267), ('加强', 252), ('开展', 252), ('会员', 250), ('部署', 238), ('相关', 234), ('引导', 231), ('管理', 225), ('提供', 223), ('抗击', 220), ('有序', 220), ('动员', 219), ('宣传', 218), ('责任', 217), ('发出', 215), ('关于', 214), ('募捐', 211), ('机构', 207), ('人员', 201), ('会员单位', 198), ('群众', 197), ('行业', 196), ('登记', 195), ('通过', 189), ('志愿者', 186), ('活动', 183)], '2020/1/27', '2020/1/31'], [[('防控', 1656), ('组织', 1223), ('社会', 1198), ('工作', 1197), ('捐赠', 532), ('服务', 513), ('做好', 498), ('肺炎', 484), ('慈善', 484), ('新型', 469), ('物资', 427), ('冠状病毒', 424), ('积极', 384), ('协会', 362), ('企业', 340), ('万元', 340), ('感染', 328), ('武汉', 316), ('参与', 282), ('要求', 263), ('基金会', 260), ('口罩', 256), ('防护', 240), ('相关', 232), ('倡议', 230), ('加强', 224), ('开展', 223), ('会员', 219), ('部署', 219), ('提供', 214), ('动员', 201), ('募捐', 195), ('抗击', 194), ('责任', 193), ('有序', 191), ('引导', 191), ('人员', 191), ('会员单位', 183), ('宣传', 183), ('机构', 182), ('群众', 181), ('管理', 178), ('医院', 173), ('发出', 173), ('活动', 173), ('关于', 172), ('通过', 171), ('行业', 171), ('地区', 171)], '2020/1/26', '2020/1/31'], [[('防控', 1511), ('工作', 1082), ('组织', 1069), ('社会', 1037), ('捐赠', 495), ('肺炎', 459), ('服务', 459), ('新型', 452), ('做好', 443), ('慈善', 424), ('冠状病毒', 405), ('物资', 391), ('积极', 344), ('企业', 318), ('感染', 313), ('武汉', 292), ('万元', 259), ('协会', 250), ('参与', 242), ('要求', 241), ('口罩', 232), ('基金会', 230), ('相关', 228), ('防护', 226), ('加强', 223), ('部署', 207), ('开展', 199), ('倡议', 196), ('会员', 195), ('提供', 193), ('抗击', 183), ('募捐', 181), ('动员', 176), ('群众', 174), ('责任', 173), ('地区', 169), ('引导', 168), ('宣传', 166), ('及时', 166), ('武汉市', 165), ('湖北省', 165), ('有序', 164), ('机构', 160), ('管理', 160), ('关于', 159), ('中国', 157), ('人员', 156), ('医院', 154), ('发出', 153)], '2020/1/26', '2020/1/30']] |
992,953 | 426e37a039a32a6ad3e3c79f71407ef3cf4cf599 | """Automatically build a multiconformer residue"""
import numpy as np
import argparse
import logging
import copy
import os
import sys
import time
from string import ascii_uppercase
from . import Structure
from .structure import residue_type
def parse_args():
p = argparse.ArgumentParser(description=__doc__)
p.add_argument("structure", type=str, help="PDB-file containing structure.")
# Output options
p.add_argument(
"-d",
"--directory",
type=os.path.abspath,
default=".",
metavar="<dir>",
help="Directory to store results.",
)
p.add_argument("-v", "--verbose", action="store_true", help="Be verbose.")
p.add_argument(
"-occ",
"--occ_cutoff",
type=float,
default=0.01,
metavar="<float>",
help="Remove conformers with occupancies below occ_cutoff. Default = 0.01",
)
p.add_argument(
"-rmsd", "--rmsd_cutoff", type=float, default=0.01, metavar="<float>"
)
args = p.parse_args()
return args
def main():
args = parse_args()
try:
os.makedirs(args.directory)
output_file = os.path.join(args.directory, args.structure[:-4] + "_norm.pdb")
except OSError:
output_file = args.structure[:-4] + "_norm.pdb"
structure = Structure.fromfile(args.structure).reorder()
to_remove = []
# Iterate over every residue...
for chain in structure:
for residue in chain:
should_collapse = False
if residue_type(residue) != ("aa-residue" and "rotamer-residue"):
continue
altlocs = list(set(residue.altloc))
# Deal with the simplest case first: only a single conformer
if len(altlocs) == 1:
residue._q[residue._selection] = 1.0
continue
# Should we collapse the backbone for the current residue?
if not "" in altlocs:
for i, altloc1 in enumerate(altlocs):
conf1 = residue.extract("altloc", altloc1)
conf1 = conf1.extract("name", ("N", "CA", "C", "O"))
for altloc2 in altlocs[i + 1 :]:
conf2 = residue.extract("altloc", altloc2)
conf2 = conf2.extract("name", ("N", "CA", "C", "O"))
# If the conformer has occupancy greater than the cutoff
# and if it is not identical to all
if (
np.mean(np.linalg.norm(conf2.coor - conf1.coor, axis=1))
> 0.05
) and (np.min(conf2.q) > args.occ_cutoff):
should_collapse = False
# Add the atoms of the collapsed backbone to the to_remove list
# and fix altloc and occupancy of the backbone
if should_collapse:
print("collapse!")
conf1._q[conf1._selection] = 1.0
conf1._altloc[conf1._selection] = ""
for altloc2 in altlocs[1:]:
conf2 = residue.extract("altloc", altloc2)
conf2 = conf2.extract("name", ("N", "CA", "C", "O"))
[to_remove.append(x) for x in conf2._selection]
print(to_remove)
conf1.tofile(str(residue.chain[0]) + str(residue.resi[0]) + ".pdb")
# If the backbone is collapsed, we can remove identical side chain conformers
# or side chain conformers that fall below the occupancy cutoff:
if residue.resn[0] != "GLY" and (should_collapse or ("" in altlocs)):
for i, altloc1 in enumerate(altlocs):
if altloc1 == "":
continue
conf1 = residue.extract("altloc", altloc1)
conf1 = conf1.extract("name", ("N", "CA", "C", "O"), "!=")
if np.min(conf1.q) < args.occ_cutoff:
[to_remove.append(x) for x in conf1._selection]
continue
for altloc2 in altlocs[i + 1 :]:
conf2 = residue.extract("altloc", altloc2)
conf2 = conf2.extract("name", ("N", "CA", "C", "O"), "!=")
if conf1.rmsd(conf2) < args.rmsd_cutoff:
[to_remove.append(x) for x in conf2._selection]
""" try:
structure._altloc[conf._selection] = ''
except:
pass """
# Now, to the case where the backbone is not collapsed
else:
# Here, we only want to remove if ALL conformers are identical or below
# occupancy cutoff
is_identical = True
for i, altloc1 in enumerate(altlocs):
if not is_identical:
break
conf1 = residue.extract("altloc", altloc1)
conf1.tofile(
str(residue.chain[0]) + str(residue.resi[0]) + "_conf1.pdb"
)
for altloc2 in altlocs[i + 1 :]:
conf2 = residue.extract("altloc", altloc2)
conf2.tofile(
str(residue.chain[0]) + str(residue.resi[0]) + "_conf2.pdb"
)
# If the conformer has occupancy greater than the cutoff
# and if it is not identical to all
if (
(np.min(conf2.q) > args.occ_cutoff)
and (np.min(conf1.q) > args.occ_cutoff)
and (conf1.rmsd(conf2) > args.rmsd_cutoff)
):
is_identical = False
break
# If all conformers converged (either because of RMSD or occupancy)
# keep one occupancy > args.occ_cutoff
found_unique_conf = False
if is_identical:
for altloc1 in altlocs:
conf1 = residue.extract("altloc", altloc1)
if np.min(conf1.q) > args.occ_cutoff and found_unique_conf:
[to_remove.append(x) for x in conf1._selection]
found_unique_conf = True
# If the occupancy of the conformer fell below the cutoff...
for altloc in altlocs:
conf = residue.extract("altloc", altloc)
if np.min(conf.q) < args.occ_cutoff:
[to_remove.append(x) for x in conf._selection]
# Remove conformers in to_remove list:
mask = structure.active
mask[to_remove] = False
data = {}
for attr in structure.data:
data[attr] = getattr(structure, attr).copy()[mask]
structure = Structure(data).reorder()
# for chain in structure:
# for residue in chain:
# print (residue.resi[0])
# Normalize occupancies and fix altlocs:
for chain in structure:
for residue in chain:
altlocs = list(set(residue.altloc))
try:
altlocs.remove("")
except ValueError:
pass
naltlocs = len(altlocs)
if naltlocs < 2:
residue._q[residue._selection] = 1.0
residue._altloc[residue._selection] = ""
else:
conf = residue.extract("altloc", altlocs)
natoms = len(residue.extract("altloc", altlocs[-1]).name)
factor = natoms / np.sum(conf.q)
residue._q[conf._selection] *= factor
structure.tofile(output_file)
print(len(to_remove))
|
992,954 | 0de152fadb0b3b4c3111caabf129881df0e2be33 | from django.utils.translation import gettext_lazy as _
from rest_framework import exceptions as rf_exceptions
from rest_framework import serializers
from waldur_core.structure.models import CUSTOMER_DETAILS_FIELDS
from waldur_core.structure.serializers import (
CountrySerializerMixin,
ProjectDetailsSerializerMixin,
)
from waldur_mastermind.marketplace import models as marketplace_models
from waldur_mastermind.marketplace.serializers import BaseItemSerializer
from . import models
class ReviewSerializerMixin(serializers.HyperlinkedModelSerializer):
state = serializers.ReadOnlyField(source='get_state_display')
uuid = serializers.ReadOnlyField(source='flow.uuid')
created = serializers.ReadOnlyField(source='flow.created')
requested_by_full_name = serializers.ReadOnlyField(
source='flow.requested_by.full_name'
)
reviewed_by_full_name = serializers.ReadOnlyField(source='reviewed_by.full_name')
class Meta:
model = models.ReviewMixin
extra_kwargs = {
'reviewed_by': {'lookup_field': 'uuid', 'view_name': 'user-detail'},
}
fields = (
'uuid',
'reviewed_by',
'reviewed_by_full_name',
'requested_by_full_name',
'reviewed_at',
'review_comment',
'state',
'created',
)
class CustomerCreateRequestSerializer(CountrySerializerMixin, ReviewSerializerMixin):
class Meta(ReviewSerializerMixin.Meta):
model = models.CustomerCreateRequest
fields = ReviewSerializerMixin.Meta.fields + CUSTOMER_DETAILS_FIELDS
class ProjectCreateRequestSerializer(
ProjectDetailsSerializerMixin, ReviewSerializerMixin
):
class Meta(ReviewSerializerMixin.Meta):
model = models.ProjectCreateRequest
fields = ReviewSerializerMixin.Meta.fields + (
'name',
'description',
'end_date',
'is_industry',
)
class ResourceCreateRequestSerializer(BaseItemSerializer, ReviewSerializerMixin):
uuid = serializers.ReadOnlyField(source='flow.uuid')
class Meta(BaseItemSerializer.Meta):
model = models.ResourceCreateRequest
fields = (
ReviewSerializerMixin.Meta.fields
+ BaseItemSerializer.Meta.fields
+ ('name', 'description', 'end_date')
)
extra_kwargs = {
**BaseItemSerializer.Meta.extra_kwargs,
'reviewed_by': {'lookup_field': 'uuid', 'view_name': 'user-detail'},
}
class FlowSerializer(serializers.HyperlinkedModelSerializer):
state = serializers.ReadOnlyField(source='get_state_display')
customer_create_request = CustomerCreateRequestSerializer(required=False)
customer_name = serializers.ReadOnlyField(source='customer.name')
project_create_request = ProjectCreateRequestSerializer()
resource_create_request = ResourceCreateRequestSerializer()
def get_fields(self):
fields = super().get_fields()
if self.instance is None:
return fields
try:
request = self.context['view'].request
except (KeyError, AttributeError):
return fields
if request.method in ('PUT', 'PATCH'):
fields['resource_create_request'] = ResourceCreateRequestSerializer(
instance=self.instance.resource_create_request
)
return fields
class Meta:
model = models.FlowTracker
fields = (
'uuid',
'url',
'customer',
'customer_name',
'order_item',
'customer_create_request',
'project_create_request',
'resource_create_request',
'state',
)
extra_kwargs = {
'url': {
'lookup_field': 'uuid',
'view_name': 'marketplace-resource-creation-flow-detail',
},
'customer': {'lookup_field': 'uuid', 'view_name': 'customer-detail'},
'order_item': {
'lookup_field': 'uuid',
'view_name': 'marketplace-order-item-detail',
},
}
read_only_fields = ('requested_by', 'order_item')
def create(self, validated_data):
request = self.context['request']
customer = validated_data.get('customer')
customer_create_request_data = validated_data.pop(
'customer_create_request', None
)
project_create_request_data = validated_data.pop('project_create_request')
resource_create_request_data = validated_data.pop('resource_create_request')
if not customer_create_request_data and not customer:
raise serializers.ValidationError(
_('Either customer_create_request or customer should be specified.')
)
if customer_create_request_data and customer:
raise serializers.ValidationError(
_('customer_create_request and customer are mutually exclusive.')
)
if (
customer
and not request.user.is_staff
and request.user not in customer.get_users()
):
raise serializers.ValidationError(
_('User is not connected to this customer.')
)
if not customer:
validated_data[
'customer_create_request'
] = models.CustomerCreateRequest.objects.create(
**customer_create_request_data
)
validated_data[
'project_create_request'
] = models.ProjectCreateRequest.objects.create(**project_create_request_data)
validated_data[
'resource_create_request'
] = models.ResourceCreateRequest.objects.create(**resource_create_request_data)
validated_data['requested_by'] = request.user
return super().create(validated_data)
def update(self, instance, validated_data):
for field in (
'customer_create_request',
'project_create_request',
'resource_create_request',
):
data = validated_data.pop(field, None)
section = getattr(instance, field)
if data:
for k, v in data.items():
setattr(section, k, v)
if section:
section.save()
return super().update(instance, validated_data)
class OfferingActivateRequestSerializer(serializers.HyperlinkedModelSerializer):
state = serializers.ReadOnlyField(source='get_state_display')
class Meta:
model = models.OfferingStateRequest
fields = (
'reviewed_by',
'reviewed_at',
'review_comment',
'state',
'created',
'url',
'uuid',
'offering',
'requested_by',
)
extra_kwargs = {
'url': {
'lookup_field': 'uuid',
'view_name': 'marketplace-offering-activate-request-detail',
},
'offering': {
'lookup_field': 'uuid',
'view_name': 'marketplace-provider-offering-detail',
},
'reviewed_by': {'lookup_field': 'uuid', 'view_name': 'user-detail'},
'requested_by': {'lookup_field': 'uuid', 'view_name': 'user-detail'},
}
read_only_fields = (
'reviewed_by',
'reviewed_at',
'review_comment',
'state',
'created',
'url',
'uuid',
'requested_by',
'issue',
)
def create(self, validated_data):
request = self.context['request']
validated_data['requested_by'] = request.user
return super().create(validated_data)
def validate_offering(self, offering):
if offering.state != marketplace_models.Offering.States.DRAFT:
raise rf_exceptions.ValidationError(_('Offering state must be draft.'))
request = self.context['request']
if models.OfferingStateRequest.objects.filter(
offering=offering,
requested_by=request.user,
state__in=(
models.OfferingStateRequest.States.DRAFT,
models.OfferingStateRequest.States.PENDING,
),
).exists():
raise rf_exceptions.ValidationError(
_('Pending request for this offering already exists.')
)
return offering
|
992,955 | 3e3283931c7c7b23099d3a9f1c5dd6ac2ccd310a | import numpy as np
import collections
import os
def read_words(conf):
words = []
for file in os.listdir(conf["directory"]):
with open(os.path.join(conf["directory"], file), 'r') as f:
for line in f.readlines():
tokens = line.split()
# NOTE Currently, only sentences with a fixed size are chosen
# to account for fixed convolutional layer size.
if len(tokens) == conf["contextSize"]-2:
words.extend((['<pad>']*(conf["filterH"]/2)) + ['<s>'] + tokens + ['</s>'])
return words
def index_words(words, conf):
word_counter = collections.Counter(words).most_common(conf["vocabSize"]-1)
word_to_idx = {'<unk>': 0}
idx_to_word = {0: '<unk>'}
for i,_ in enumerate(word_counter):
word_to_idx[_[0]] = i+1
idx_to_word[i+1] = _[0]
data = []
for word in words:
idx = word_to_idx.get(word)
idx = idx if idx else word_to_idx['<unk>']
data.append(idx)
return np.array(data), word_to_idx, idx_to_word
def create_batches(data, conf):
conf["numBatches"] = int(len(data) / (conf["batchSize"] * conf["contextSize"]))
data = data[:conf["numBatches"] * conf["batchSize"] * conf["contextSize"]]
xdata = data
ydata = np.copy(data)
ydata[:-1] = xdata[1:]
ydata[-1] = xdata[0]
x_batches = np.split(xdata.reshape(conf["batchSize"], -1), conf["numBatches"], 1)
y_batches = np.split(ydata.reshape(conf["batchSize"], -1), conf["numBatches"], 1)
for i in xrange(conf["numBatches"]):
x_batches[i] = x_batches[i][:,:-1]
y_batches[i] = y_batches[i][:,:-1]
return x_batches, y_batches, conf
def get_batch(x_batches, y_batches, batch_idx):
x, y = x_batches[batch_idx], y_batches[batch_idx]
batch_idx += 1
if batch_idx >= len(x_batches):
batch_idx = 0
return x, y.reshape(-1,1), batch_idx
def prepare_data(conf):
words = read_words(conf)
data, word_to_idx, idx_to_word = index_words(words, conf)
x_batches, y_batches, conf = create_batches(data, conf)
del words
del data
return x_batches, y_batches
|
992,956 | f75cd630178c9bdac06be62122eb3251fc1a4169 | from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from thornode_client.api.health_check_api import HealthCheckApi
from thornode_client.api.keygen__keysign_api import KeygenKeysignApi
from thornode_client.api.network_api import NetworkApi
from thornode_client.api.nodes_api import NodesApi
from thornode_client.api.pools_api import PoolsApi
from thornode_client.api.queue_api import QueueApi
from thornode_client.api.tx_api import TxApi
from thornode_client.api.vaults_api import VaultsApi
|
992,957 | 6de2b9ba31bfa6482625729f893d1a535a5a8370 | class SegTree:
def __init__(self, init_list, seg_func, id_el):
self.id_el = id_el
self.seg_func = seg_func
n = len(init_list)
self.next_pow_of_2 = 2**(n-1).bit_length()
self.tree = [self.id_el]*2*self.next_pow_of_2
for i in range(n):
self.tree[self.next_pow_of_2 + i] = init_list[i]
for i in range(self.next_pow_of_2 - 1, 0, -1):
self.tree[i] = self.seg_func(self.tree[2*i], self.tree[2*i+1])
def update(self, idx, val):
k = self.next_pow_of_2 + idx
self.tree[k] = val
while k > 1:
k //= 2
self.tree[k] = self.seg_func(self.tree[2*k], self.tree[2*k+1])
def query(self, l, r):
l += self.next_pow_of_2
r += self.next_pow_of_2
res = self.id_el
while l < r:
if l%2:
res = self.seg_func(res, self.tree[l])
l += 1
if r%2:
res = self.seg_func(res, self.tree[r-1])
l //= 2
r //= 2
return res
def get(self, idx):
return self.tree[self.next_pow_of_2 + idx]
H, W, M = list(map(int, input().split()))
hrz = [[H] for _ in range(W+1)]
vrt = [[W] for _ in range(H+1)]
for _ in range(M):
X, Y = list(map(lambda x: int(x) - 1, input().split()))
hrz[Y].append(X)
vrt[X].append(Y)
hrz[W].append(0)
vrt[H].append(0)
ans = sum([min(x) for x in hrz[:min(vrt[0])]])
init_list = [0 if i < min(vrt[0]) else 1 for i in range(W+1)]
st = SegTree(init_list, lambda x, y: x+y, 0)
for h in range(1, min(hrz[0])):
ans += st.query(0, min(vrt[h]))
for w in vrt[h]:
st.update(w, 1)
print(ans)
|
992,958 | 24df5a008f43e2091633e3173a163206a1de2463 | # -*- coding: utf-8 -*-
"""
Database of C-Mod shots, corresponding atomic lines, times of interest, THT for Hirex-Sr data access, etc..
@author: sciortino
"""
from builtins import str
def get_shot_info(shot, imp_override=None):
''' Function to output key information for BSFC fitting of atomic lines '''
if shot==1121002022:
primary_impurity = 'Ar' if imp_override is None else imp_override
primary_line = 'lya1'
tbin=5; chbin=40
t_min=0.7; t_max=0.8
tht=0
elif shot==1120914029: #I-mode FS
primary_impurity = 'Ca' if imp_override is None else imp_override
primary_line = 'lya1'
tbin=104; chbin=11
t_min= 1.29; t_max=1.4
tht=9 ############# tht=0 has 15 chords; tht=9 has 32
elif shot==1120914036: # I-mode FS
primary_impurity = 'Ca' if imp_override is None else imp_override
primary_line = 'lya1'
tbin=104; chbin=11
#t_min=1.05; t_max=1.27
t_min= 0.89; t_max=1.05
tht=5
elif shot==1101014019: # EDA H-mode FS
primary_impurity = 'Ca' if imp_override is None else imp_override
primary_line = 'z' #'z' #'z' # 'w'
#t_min = 0.83; t_max = 1.4 # entire LBO interval
t_min=1.24; t_max=1.4
#t_min=1.26; t_max=1.27
if primary_impurity=='Ar': # for Ca: THT=0; for Ar: THT=1?
tht=1
tbin=1.25 # if set to float, fitSingleBin looks for corresponding bin
chbin=11 # random
else:
tht=0
tbin=125; chbin=11
elif shot==1101014029: # I-mode FS
primary_impurity = 'Ca' if imp_override is None else imp_override
primary_line = 'w' # 'z' #'w'
tbin=120; chbin=7 #good
#tbin=6; chbin=19 # apparently below noise level ?
#tbin=9; chbin = 4 # very little signal, fit should be thrown out
#t_min=1.18; t_max=1.3
t_min=0.78; t_max=1.55 # entire LBO interval
tht=0
elif shot==1101014030: # I-mode FS
primary_impurity = 'Ca' if imp_override is None else imp_override
primary_line = 'q' #'z' #'x' #'w' #'all' #'z' # 'w'
#t_min = 1.2; t_max = 1.3
t_min=1.185; t_max=1.3
if primary_impurity=='Ar': # for Ca: THT=0; for Ar: THT=1
tht=1
tbin = 6; chbin = 18
else:
tht=0
#tbin=128; chbin=31 # t=1.2695
tbin=116; chbin=18 # t=1.2095, ~ peak Ca signal
#tbin=116; chbin=8 # unknown signal comes up near 3.196A only in this channel, motivated shorter lambda bounds
#tbin=135; chbin=8 # t=1.3115
####t_min=0.780; t_max = 1.5 # entire LBO interval
elif shot==1100305019:
primary_impurity = 'Ca' if imp_override is None else imp_override
primary_line = 'w'
# tbin=128; chbin=11
tbin=116; chbin=18
t_min=0.98; t_max=1.2
tht=9
elif shot==1160506007:
primary_impurity = 'Ar' if imp_override is None else imp_override
primary_line = 'w'
tbin = 46; chbin = 40
t_min=0.93; t_max=0.99 #counter-current rotation SOC
#t_min=0.57; t_max=0.63 #co-current rotation LOC
tht = 0
elif shot==1150903021:
primary_impurity = 'Ar' if imp_override is None else imp_override
primary_line = 'w'
tbin = 16; chbin = 6
t_min=0.93; t_max=0.99
tht = 2
elif shot==1160920007:
primary_impurity = 'Ar' if imp_override is None else imp_override
primary_line = 'lya1'
tbin = 12; chbin = 4
t_min=0.81; t_max=0.84
tht = 0
elif shot==1101014006: # L-mode FS
# for Ca: THT=0; for Ar: THT=1
primary_impurity = 'Ca' if imp_override is None else imp_override
primary_line = 'z' #'w' #'all' #'z' #'w'
if primary_impurity=='Ar':
tbin=14; chbin=20 # for Ar
tht=2
elif primary_impurity=='Ca':
#tbin=116; chbin=18
tbin=124; chbin=11 # good for Ca
tht=0
#t_min = 0.75; t_max = 1.5 #entire LBO interval
t_min=1.155; t_max=1.265
elif shot==1101014011: # L-mode FS
primary_impurity = 'Ca' if imp_override is None else imp_override
primary_line = 'z' #'w'
#tbin=116; chbin=18
tbin=124; chbin=11
#t_min=0.7; t_max=0.95
t_min = 0.75; t_max = 1.5 #entire LBO interval
tht=0
elif shot==1101014012: # L-mode FS
primary_impurity = 'Ca' if imp_override is None else imp_override
primary_line = 'w' # 'z'
#tbin=116; chbin=18
tbin=124; chbin=11
#t_min=1.150; t_max=1.3
t_min = 0.75; t_max = 1.5 #entire LBO interval
tht=0
elif str(shot).startswith('1140729'): #1140729021 or shot==1140729023 or shot==1140729030:
primary_impurity = 'Ca' if imp_override is None else imp_override
primary_line = 'w'
tbin=155; chbin=1 #tbin=155 is t=1.43
#t_min=0.98; t_max=1.2 #1.15
t_min=1.38; t_max = 1.499
tht=9
else:
# define more lines!
raise Exception('Times of interest not set for this shot!')
return primary_impurity, primary_line, tbin,chbin, t_min, t_max,tht
|
992,959 | fe61f3b4ffe02f42d47d5bf0d0305b16330b87b8 | import random
def gamewin(comp, you):
if(comp == you):
return None
elif(comp == "Rock"):
if(you == "s"):
return False
elif(you == "p"):
return True
elif(comp == "Paper"):
if(you == "r"):
return False
elif(you == "s"):
return True
elif(comp == "Scissors"):
if(you == "p"):
return False
elif(you == "r"):
return True
print("Computer's turn: ")
randnum = random.randint(1,3)
if(randnum == 1):
comp = "Rock"
elif(randnum == 2):
comp = "Paper"
elif(randnum == 3):
comp = "Scissors"
you = input("Your turn: Rock(r), Paper(p), Scissors(s)")
a = gamewin(comp, you)
print("Computer chose: "+comp)
print("You chose: "+you)
if a == True:
print("You Win!")
elif a == False:
print("You Lose")
else:
print("It's a Tie")
|
992,960 | c5be0c79bcd4364316b79a263f51cde27068c3cd | #!/usr/bin/python
# -*- coding: utf-8 -*-
#元组一旦初始化,就不可更改
print '今天我们学习:%s' % '元组tuple'
classmates = ('Tom','John','Lili','merry')
print '打印元组',classmates
print '打印元组的长度',len(classmates)
print '打印元组第2个元素',classmates[1]
print '-------------元组tuple中文---------------'
#定义一个元素的元组时需使用下列样式消除歧义
hanzi = ('汉字',)
#下面的输出结果很有意思,中文在list中显示时是不正常的,但是直接输出中文元素又是正常的
print '中文数组:', hanzi
print '中文数组元素:',hanzi[0] |
992,961 | 46f60b0983f72cd11fdd7732eb00662c20283ba5 | #!/usr/bin/env python
"""Fetches diesel prices"""
import paho.mqtt.client as paho # pip install paho-mqtt
import time
import logging
import sys
import requests
from pathlib import Path
from config import *
from secrets import *
FREQUENCY = 3600 # 1h
TIMEOUT = 10 #sec
def fetch_data():
# 73ce263a-8b6a-4b3f-b283-a1f4dc0925c4 Aral Tankstelle Darmstädter Str.
# 51d4b70c-a095-1aa0-e100-80009459e03a Supermarkt-Tankstelle WEITERSTADT IM ROEDLING 8 A
# 213e33be-8b98-4a3f-8f52-fec1edbb6403 Shell Buettelborn A67 Buettelborn Sued
values = {}
tankstellen = ['73ce263a-8b6a-4b3f-b283-a1f4dc0925c4', '51d4b70c-a095-1aa0-e100-80009459e03a', '213e33be-8b98-4a3f-8f52-fec1edbb6403']
try:
tankstellenlist = ','.join(tankstellen)
url = f"https://creativecommons.tankerkoenig.de/json/prices.php?ids={tankstellenlist}&apikey={TANKERKOENIG_API_KEY}" # noqa E501
r = requests.get(url, timeout=TIMEOUT)
r.raise_for_status()
data = r.json()
if not data['ok']:
raise RuntimeError('tankerkoenig result not ok')
if 'diesel' in data['prices']['73ce263a-8b6a-4b3f-b283-a1f4dc0925c4']:
values['aral'] = data['prices']['73ce263a-8b6a-4b3f-b283-a1f4dc0925c4']['diesel']
if 'diesel' in data['prices']['51d4b70c-a095-1aa0-e100-80009459e03a']:
values['metro'] = data['prices']['51d4b70c-a095-1aa0-e100-80009459e03a']['diesel']
if 'diesel' in data['prices']['213e33be-8b98-4a3f-8f52-fec1edbb6403']:
values['shell'] = data['prices']['213e33be-8b98-4a3f-8f52-fec1edbb6403']['diesel']
except requests.exceptions.Timeout:
logging.error(f"Timeout requesting {url}")
except requests.exceptions.RequestException as e:
logging.error(f"requests exception {e}")
return values
def update():
values = fetch_data()
for k, v in values.items():
(result, mid) = mqttc.publish(f"{DIESEL_MQTT_PREFIX}/{k}", str(v), 0, retain=True) # noqa E501
logging.info(f"Pubish Result: {result} MID: {mid} for {k}: {v}") # noqa E501
if __name__ == '__main__':
logging.basicConfig(stream=sys.stdout,
format='%(asctime)s %(levelname)-8s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO)
mqttc = paho.Client(f'{Path(__file__).stem}-connector', clean_session=True)
# mqttc.enable_logger()
mqttc.will_set(f"{DIESEL_MQTT_PREFIX}/connectorstatus", "Connector: LOST_CONNECTION", 0, retain=True) # noqa E501
mqttc.connect(BROKER_HOST, BROKER_PORT, 60)
logging.info(f"Connected to {BROKER_HOST}:{BROKER_PORT}")
mqttc.publish(f"{DIESEL_MQTT_PREFIX}/connectorstatus", "Connector: ON-LINE", retain=True) # noqa E501
mqttc.loop_start()
while True:
try:
update()
time.sleep(FREQUENCY)
except KeyboardInterrupt:
break
except Exception:
raise
mqttc.publish(f"{DIESEL_MQTT_PREFIX}/connectorstatus", "Connector: OFF-LINE", retain=True) # noqa E501
mqttc.disconnect()
mqttc.loop_stop() # waits, until DISCONNECT message is sent out
logging.info(f"Disconnected from to {BROKER_HOST}:{BROKER_PORT}")
|
992,962 | 5dd095a02e7bdc04c3217b80b3c3f6c5995a6ad0 | import pygame
class GameObj(pygame.sprite.Sprite):
def __init__(self, x, y, image=None):
super().__init__()
if image:
self.image = pygame.image.load(image).convert_alpha()
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
def _collide_with_blocks_x(self):
# Did this update cause us to hit a wall?
blocks_hit = pygame.sprite.spritecollide(self, self.blocks, False)
for block in blocks_hit:
# If we are moving right, set our right side to the left side of
# the item we hit
if self.velX > 0:
self.rect.right = block.rect.left
else:
# Otherwise if we are moving left, do the opposite.
self.rect.left = block.rect.right
def _collide_with_blocks_y(self):
# Check and see if we hit anything
blocks_hit = pygame.sprite.spritecollide(self, self.blocks, False)
for block in blocks_hit:
# Reset our position based on the top/bottom of the object.
if self.velY > 0:
self.rect.bottom = block.rect.top
else:
self.rect.top = block.rect.bottom |
992,963 | 1a4ea63660e511d25a4d8ad88f2fa95a2e5b7f89 | from django.apps import AppConfig as OAppConfig
from proso.django.enrichment import register_object_type_enricher
class AppConfig(OAppConfig):
name = 'proso_user'
def ready(self):
register_object_type_enricher(['user_question'], 'proso_user.json_enrich.user_answers')
|
992,964 | 7909fc8d175132d6c567ca1c0faabe1dfbf4098a | import numpy as np
import load
import utils.sampling as smp
import matplotlib.pyplot as plt
def gm2code(arr, info):
"""
arr originally is a reference to the object from the outside.
But later in "arr = arr / info.pboxsize + 0.5",
the arr is a new
"""
return (arr / info.pboxsize + 0.5)# * 0.999783599
nout = 368
idgal = 18
s = load.sim.Sim(nout)
# GalaxyMaker dump
gal = load.rd_GM.rd_gal(nout, idgal, base='./')
gal.header['xg'] = gm2code(gal.header['xg'], s.info)
gal.star['x'] = gm2code(gal.star['x'], s.info)
gal.star['y'] = gm2code(gal.star['y'], s.info)
gal.star['z'] = gm2code(gal.star['z'], s.info)
# Region.
radius = 0.5 * max([gal.star['x'].ptp(), gal.star['y'].ptp(), gal.star['z'].ptp()])
region = smp.set_region(centers=gal.header['xg'], radius=1.5*radius)
xc, yc, zc = gal.header['xg']
# DM particles in the Region.
s.add_part(ptypes=['dm id pos vel mass'], region=region)
# Cell dump
cell = load.rd_GM.rd_cell(nout, idgal)
# center on galaxy position
cell['x'] -= xc
cell['y'] -= yc
cell['z'] -= zc
kpc_in_cm = 3.0857e21
msun_in_g = 1.989e33
gas_mass = cell['var0'] * s.info.unit_d * (cell['dx'] * s.info.boxtokpc * kpc_in_cm)**3 / msun_in_g
# 'var0' = density.
# cell['var'] * s.info.unit_d : density in gram unit (cgs unit system).
# cell['dx'] * s.info.boxtokpc : cell size in kpc
# cell['dx'] * s.info.boxtokpc * kpc_in_cm : cell size in cm.
star = gal.star
# center on galaxy position
star['x'] -= xc
star['y'] -= yc
star['z'] -= zc
star['m'] *=1e11
dm = s.part.dm
ind = np.where(np.square(dm['x'] - xc) + \
np.square(dm['y'] - yc) + \
np.square(dm['z'] - zc) < np.square(radius))[0]
dm = dm[ind]
# center on galaxy position
dm['x'] -= xc
dm['y'] -= yc
dm['z'] -= zc
dm['m'] *= s.info.msun
# Distance
rdm = np.sqrt(dm['x']**2 + dm['y']**2 + dm['z']**2) * s.info.pboxsize * 1e3
rst = np.sqrt(star['x']**2 + star['y']**2 + star['z']**2) * s.info.pboxsize * 1e3
rgas = np.sqrt(cell['x']**2 + cell['y']**2 + cell['z']**2) * s.info.pboxsize * 1e3 # in kpc unit.
# Sort by distance
rdsort = np.argsort(rdm)
rssort = np.argsort(rst)
rgsort = np.argsort(rgas)
# cumulative mass sum
# All mass in Msun unit.
cmdm = np.cumsum(dm['m'][rdsort])
cmst = np.cumsum(star['m'][rssort])
cmgas = np.cumsum(gas_mass[rgsort])
# radial density profile.
fig, ax = plt.subplots()
ax.plot(np.log10(rdm[rdsort]), np.log10(cmdm))
ax.plot(np.log10(rst[rssort]), np.log10(cmst))
ax.plot(np.log10(rgas[rgsort]), np.log10(cmgas))
ax.set_xlabel("log(kpc)")
ax.set_ylabel("log(Msun)")
ax.set_title("Cumulative mass")
plt.show()
|
992,965 | 1479fdffa8fcba073a26fd1e14abf4a7f4ddf4e7 | try:
with open('sad.txt', mode='r') as input_file:
print(input_file.read())
except FileNotFoundError as err:
print('Oop!! File does not exists')
|
992,966 | aaf3190604ff70d71cadee1707fd790a66cace49 | # Improve the implementation of Question 3, reusing the basic class DynamicalModel defined in q7
import q7_rumor_simulation as q7
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
class LogisticGrowthModel(q7.DynamicalModel):
# basic settings and initial conditions
def __init__(self, step_size=1, simulation_method="euler", growth_ratio=0.2, capacity=10, X_init=1):
super().__init__(step_size=step_size, state_variables=[X_init], simulation_method=simulation_method )
self.growth_ratio = growth_ratio
self.capacity = capacity
self.X_init = 1
# continuous formula:
# state_dots[0] = dx/dt = ...
# state_dots[1] = dy/dt = ...
def continuous_formula(self, state_variables):
state_dots = state_variables.copy()
state_dots[0] = self.growth_ratio * state_variables[0] * (self.capacity-state_variables[0]) /self.capacity
return state_dots
if __name__ == "__main__":
h = [0.1,1,5,10]
methods = ["Euler", "Heun"]
plt.figure(figsize=(14,6))
for j in range(len(methods)):
for i in range(len(h)):
plt.subplot(len(methods), len(h), j*len(h)+i+1)
model = LogisticGrowthModel(step_size=h[i], simulation_method=methods[j])
model.run_simulation(max_time=50)
df = model.get_data()
plt.plot(df.iloc[:,0], df.iloc[:,1], q7.constant.line_styles[j],color=q7.constant.colors[j], label=methods[j])
plt.xlabel("Time")
plt.ylabel("Population")
# Make a custom legend for all subplots
custom_lines = [ Line2D([0], [0], linestyle=q7.constant.line_styles[i], color=q7.constant.colors[i], lw=2) for i in range(len(methods)) ]
plt.figlegend(custom_lines, methods, loc='lower center', ncol=4)
plt.subplots_adjust(left=0.1, right=0.9, top=0.95, bottom=0.15, wspace=0.4, hspace=0.3)
plt.show() |
992,967 | 51578bf6e9155dacbf9807b247049ccfed952d76 | from expression import Expression
def main():
exp = Expression("2 / 3 * 4")
print exp.solve(None)
if __name__ == '__main__':
main()
|
992,968 | cd187eebf6d512291bb18ab69102e66cb6bc92d9 | from .general_obj import General
from .message import Message, MessageStatus
from .bft import ByzantineMessages
|
992,969 | a2802311a5672cd1886a349b5df9aca682286f4d | import numpy as np
from tensorflow.keras.models import Sequential,load_model
from tensorflow.keras.layers import Dense, Conv2D,MaxPooling2D,Dropout,Flatten
from tensorflow.keras.wrappers.scikit_learn import KerasClassifier
from tensorflow.keras.callbacks import ModelCheckpoint
from sklearn.model_selection import KFold,cross_val_score,train_test_split
import matplotlib.image as Image
import PIL.Image as Image
m_check = ModelCheckpoint("model/--{epoch:02d}--{val_loss:.4f}.hdf5", monitor = 'val_loss',save_best_only=True)
# y = np.zeros((4500,))
# c = 0
# d = 0
# for i in [1,2,3,4,5,6,7,8,9]:
# for _ in range(500):
# y[c]=d
# c += 1
# d +=1
# np.save("y.npy",y)
x = np.load("train.npy")
print(x[0])
y = np.load("y.npy")
predx = Image.open("495.png").resize(((150,112)))
predx= np.asarray(predx).reshape(1,112,150,3)
def create_model():
model = Sequential()
model.add(Conv2D(200,(3,3),padding='same',activation='relu',input_shape=(112,150,3)))
model.add(Dropout(0.3))
model.add(MaxPooling2D(2,2))
model.add(Conv2D(200,(3,3),padding='same',activation='relu'))
model.add(Dropout(0.3))
model.add(MaxPooling2D(2,2))
model.add(Conv2D(100,(3,3),padding='same',activation='relu'))
model.add(Dropout(0.3))
model.add(MaxPooling2D(2,2))
model.add(Conv2D(100,(3,3),padding='same',activation='relu'))
model.add(Dropout(0.3))
model.add(MaxPooling2D(2,2))
model.add(Conv2D(100,(2,2),padding='same',activation='relu'))
model.add(Dropout(0.3))
model.add(MaxPooling2D(2,2))
model.add(Conv2D(100,(2,2),padding='same',activation='relu'))
model.add(Dropout(0.3))
model.add(MaxPooling2D(2,2))
model.add(Flatten())
model.add(Dense(500,activation="relu"))
model.add(Dropout(0.3))
model.add(Dense(100,activation="relu"))
model.add(Dropout(0.3))
model.add(Dense(100,activation="relu"))
model.add(Dropout(0.3))
model.add(Dense(100,activation="relu"))
model.add(Dropout(0.3))
model.add(Dense(100,activation="relu"))
model.add(Dropout(0.3))
model.add(Dense(9,activation="softmax"))
model.compile(optimizer="adam",loss="categorical_crossentropy",metrics=['acc'])
return model
seed = np.random.seed(7)
kf = KFold(n_splits=3, shuffle=True,random_state=seed)
x_train,x_test , y_train,y_test = train_test_split(x,y,shuffle=True , random_state=seed)
# model = load_model('./model/--100--0.0354.hdf5')
model = create_model()
for train_i,test_i in kf.split(x):
train_x,train_y = x[train_i],y[train_i]
test_x, test_y = x[test_i], y[test_i]
model.fit(train_x,train_y,batch_size=30,epochs=100,validation_split=0.25,callbacks=[m_check])
score = model.evaluate(test_x,test_y)
print(score)
model.fit(x_train,y_train,batch_size=30,epochs=100,callbacks=[m_check],validation_data=[(x_val,y_val)])
model.fit(x_train,y_train,batch_size=30,epochs=100,validation_split=0.25,callbacks=[m_check])
#
predy = model.evaluate(x_test,y_test)
print(predy)
|
992,970 | ea19d2715e3be1a51a51fc8d8a2446f77dbf0dd2 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import asyncio
import websockets
async def send_data(url,data):
"""
:parm url url wanted to connect (str)
:parm data data wanted to send out
"""
async with websockets.connect(url) as socket:
await socket.send(data)
data = await socket.recv()
print(data)
if __name__ == "__main__":
event_loop = asyncio.get_event_loop()
event_loop.run_until_complete(
send_data("ws://localhost:12345","hello")
)
|
992,971 | bc8bc25e38e3a0d1bf5df936b9f7bdbdb88515c7 | class User:
def __init__(self,name,email):
self.name = name
self.email = email
self.account_balance = 0
#methods
def make_deposit(self,amount):
self.account_balance += amount
return self
def make_withdrawal(self,amount):
self.account_balance -= amount
return self
def display_user_balance(self):
print(self.name+", Balance: $"+str(self.account_balance))
return self
def transfer_money(self,other_user,amount):
self.account_balance -= amount
other_user.account_balance += amount
print(f"{other_user.name} received my money and her balance is ${other_user.account_balance}")
return self
#instances
maria = User("Maria","mariahernandez@gyahoo.com")
janice = User("Janice","misschanandlerbong@yhotmail.com")
winston = User("Winston","winstonchurchill@uk.gov")
#1 Have the first user make 3 deposits and 1 withdrawal and then display their balance
#2 Have the second user make 2 deposits and 2 withdrawals and then display their balance
#3 Have the third user make 1 deposits and 3 withdrawals and then display their balance
maria.make_deposit(100.00).make_deposit(100.00).make_deposit(100.00).make_withdrawal(50.00).display_user_balance()
janice.make_deposit(5000.50).make_deposit(976.25).make_withdrawal(40.00).make_withdrawal(20.00).display_user_balance()
winston.make_deposit(25.00).make_withdrawal(10.00).make_withdrawal(10.00).make_withdrawal(4.75).display_user_balance()
#BONUS: Add a transfer_money method; have the first user transfer money to the third user and then print both users' balances
maria.transfer_money(janice,150.00).display_user_balance()
|
992,972 | 452f8bc3d8fb70333befe087477a0b0a8d22864a | # -*- coding: utf-8 -*-
"""
@propertyの動作を確認するためのサンプルコード。
"""
class FixedResistance(object):
def __init__(self, ohms):
self._ohms = ohms
self._voltage = 0
self._current = 0
@property
def ohms(self):
return self._ohms
@ohms.setter
def ohms(self, value):
self._check_attr('_ohms')
self._ohms = value
def _check_attr(self, name):
if hasattr(self, name):
raise AttributeError("Can't set attribute " + name)
r4 = FixedResistance(1e3)
print('[DEBUG]---------------------------------------')
print('r4.ohms=%5r' % r4.ohms)
print('[DEBUG]---------------------------------------')
r4.ohms = 2e3
print('r4.ohms=%5r' % r4.ohms)
|
992,973 | e14a9a7358128061104e8ca055dbea85fe9257bf | import anvil.node_types as nt
import anvil.runtime as rt
from base_test import TestBase, clean_up_scene
class TestBaseCurve(TestBase):
def setUp(self):
super(TestBaseCurve, self).setUp()
self.null_transform = nt.Transform.build()
class TestCurveBuild(TestBaseCurve):
@clean_up_scene
def test_empty_input(self):
nt.Curve.build()
@clean_up_scene
def test_full_input(self):
nt.Curve.build(name='test_curve',
append=False,
bezier=True,
degree=3,
objectSpace=False,
periodic=False,
point=[[0, 0, 0], [0, 1, 0], [0, 2, 0], [0, 3, 0]],
replace=False,
worldSpace=True)
@clean_up_scene
def test_partial_input(self):
nt.Curve.build(bezier=True,
worldSpace=True,
point=[[0, 0, 0], [0, 1, 0], [0, 2, 0], [0, 3, 0]])
@clean_up_scene
def test_point_input(self):
curve = nt.Curve.build(point=[[0, 0, 0], [0, 1, 0], [0, 2, 0], [0, 3, 0]])
try:
self.assertEqual(curve.getShape().numCVs(), 4)
except AttributeError:
self.assertIsNotNone(curve)
@clean_up_scene
def test_shape_input(self):
curve = nt.Curve.build(shape='jack')
try:
self.assertEqual(curve.getShape().numCVs(), len(nt.Curve.SHAPE_CACHE['jack']['point']))
except AttributeError:
self.assertIsNotNone(curve)
@clean_up_scene
def test_with_parent(self):
curve = nt.Curve.build(parent=nt.Transform.build())
if 'standalone' in rt.dcc.ENGINE:
self.assertTrue(curve.get_parent() == 'curve')
else:
self.assertTrue(self.null_transform == curve.get_parent())
class TestCurveGetShapeConstructor(TestBaseCurve):
@clean_up_scene
def existing_shape(self):
shape_lambda = nt.Curve._get_shape_constructor('star')
shape_lambda()
@clean_up_scene
def existing_shape_positions(self):
positions_dict = nt.Curve._get_shape_constructor('star', return_positions=True)
self.assertListEqual(list(positions_dict), ['point', 'degree'])
@clean_up_scene
def non_existing_shape(self):
self.assertIsNone(nt.Curve._get_shape_constructor('corndog'))
class TestCurvePopulateShapeFileData(TestBaseCurve):
@clean_up_scene
def existing_shape_file(self):
self.assertIsNotNone(nt.Curve.populate_shape_file_data().SHAPE_CACHE)
@clean_up_scene
def non_existing_shape_file(self):
nt.Curve.SHAPE_CACHE = None
self.assertEquals(nt.Curve.populate_shape_file_data('not a file').SHAPE_CACHE, {})
|
992,974 | 47bddbe06ef59bd37b39490cb78ab7fdce6149e9 | #!/usr/bin/python
n = input()
ar = map(int, raw_input().split())
res = [0] * (max(ar) - min(ar) + 1)
for i in ar:
res[i-1] += 1
print res.index(max(res)) + 1
|
992,975 | 75959e6058535e0ac96b440cd96cc0f383c4944e | from typing import Optional
from sqlmodel import SQLModel, Field, create_engine, Session
engine = create_engine(url="sqlite:///users.db", echo=True)
class User(SQLModel, table=True):
id:Optional[int] = Field(None, primary_key=True)
username: str
password:str
def get_session():
with Session(engine) as session:
yield session
def init_db():
SQLModel.metadata.create_all(engine) |
992,976 | 76196993beb6de2c6b5a2f77a82b3876bb3aca00 | #!/usr/bin/python
# -*- coding:utf-8 -*-
while True:
x = [i for i in range(10) if i%2 == 0]
print x
break
else:
print "It's over"
print "end"
|
992,977 | 6a9911fb9cb5cd251bb5a8f811f81c47b8f83ce4 | def prenet(inputs, num_units=None, dropout_rate=0, is_training=True, scope="prenet", reuse=None):
with tf.variable_scope(scope, reuse=reuse):
outputs = tf.layers.dense(inputs, units=num_units[0], activation=tf.nn.relu, name="dense1")
outputs = tf.layers.dropout(outputs, rate=dropout_rate, training=is_training, name="dropout1")
outputs = tf.layers.dense(outputs, units=num_units[1], activation=tf.nn.relu, name="dense2")
outputs = tf.layers.dropout(outputs, rate=dropout_rate, training=is_training, name="dropout2")
return outputs # (N, T, num_units/2)
|
992,978 | 0aeb58463fd2ac23bc4e78eda872e533e90cc243 | import pickle
def psave(a):
pickle.dump(a, open("debug/save.p", "wb"))
def popen(a):
return pickle.load("debug.save", wb) |
992,979 | 574cc6c156ca98d1ac600db2e4cdcadeb0d2211c | from os import path, mkdir
import nrrd
import numpy as np
import matplotlib.pyplot as plt
# sample code for opening PET and CT files in nrrd format, cutting the head region, printing some data from the header,
# and saving them again.
def sample_stack(stack, rows=6, cols=6, start_with=10, show_every=3):
fig, ax = plt.subplots(rows, cols, figsize=[12, 12])
for i in range(rows * cols):
ind = start_with + i * show_every
ax[int(i / rows), int(i % rows)].set_title('slice %d' % ind)
ax[int(i / rows), int(i % rows)].imshow(stack[:, :, ind], cmap='gray')
ax[int(i / rows), int(i % rows)].axis('off')
if __name__ == '__main__':
# directory of patient data
data_dir = 'D:\Christina\Data\PET-CT LKH'
# directory to save outputs to
dest_dir = 'D:\Christina\Data\PET-CT LKH\Database_for_upload'
patient = 'Pat1'
ct_path = path.join(data_dir, patient, 'PET-CT', patient + '_CT.nrrd')
pet_path = path.join(data_dir, patient, 'PET-CT', patient + '_PET.nrrd')
dest_dir = path.join(dest_dir, patient)
# Create target Directory if it doesn't exist
if not path.exists(dest_dir):
mkdir(dest_dir)
# read nrrd files
print('reading files...')
ct_data, ct_header = nrrd.read(ct_path)
pet_data, pet_header = nrrd.read(pet_path)
# parse nrrd header for spacing
ct_spacing = np.asarray([ct_header['space directions'][0, 0],
ct_header['space directions'][1, 1],
ct_header['space directions'][2, 2]])
pet_spacing = np.asarray([pet_header['space directions'][0, 0],
pet_header['space directions'][1, 1],
pet_header['space directions'][2, 2]])
print('cut...')
# cut the head region. if the head is approx 25 cm high, calculate the number of slices using the slice thickness
height = 250
num_slices_ct = int(height / ct_spacing[2])
ct_data = ct_data[:, :, -num_slices_ct:]
num_slices_pet = int(height / pet_spacing[2])
pet_data = pet_data[:, :, -num_slices_pet:]
# print data information
print('CT: ')
print(' Volume Size')
print(ct_data.shape)
print(' Resolution')
print(ct_spacing)
print('PET: ')
print(' Volume Size')
print(pet_data.shape)
print(' Resolution')
print(pet_spacing)
# save the files
print('writing files...')
nrrd.write(path.join(dest_dir, patient + '_CT.nrrd'), ct_data, ct_header)
nrrd.write(path.join(dest_dir, patient + '_PET.nrrd'), pet_data, pet_header)
print('done.')
|
992,980 | 16aed2d56a2174767146a4a40f3618ca1e881eef | """
Classes for Neural Net learning via NEAT
NeuroEvolution of Augmenting Topologies
Video that inspired this entire machine-learning project:
https://www.youtube.com/watch?v=qv6UVOQ0F44
Original NEAT paper (that I didn't read):
http://nn.cs.utexas.edu/downloads/papers/stanley.ec02.pdf
"""
from __future__ import print_function
from copy import deepcopy
import random
class Neuron(object):
"""
Base Network Node
"""
class State(object):
"""
Enum of neuron states
"""
NEUTRAL = 0
POSITIVE = 1
NEGATIVE = 2
def resolve(self, other):
"""
Resolve compound state
"""
return max(self, other)
def __init__(self):
self.outputs = {}
def eval(self, hot, traversal):
"""
Determine actual state (can be overridden)
"""
return hot
def simulate(self, hot, traversal):
"""
Evaluate and propagate state
"""
hot = self.eval(hot, traversal)
traversal[self] = traversal.get(self, self.State.NEUTRAL) or hot
for neuron, polarity in self.outputs.iteritems():
neuron.simulate(polarity.resolve(hot), traversal)
def print(self, **kwargs):
"""
Print node info
"""
output_hashes = [
(polarity, hash(node))
for polarity, node in self.outputs.iteritems()
]
print(hash(self), '->', *output_hashes, **kwargs)
class Network(object):
"""
Neural Network
"""
def __init__(self, input_layer, output_layer, middle_layer=None):
self.inputs = deepcopy(input_layer)
self.outputs = deepcopy(output_layer)
self.middle = deepcopy(middle_layer) if middle_layer else []
def deep_copy(self):
"""
Create a copy of the network
"""
return self.__class__(self.inputs, self.outputs, self.middle)
def traverse(self):
"""
Evaluate network
"""
traversal = {}
for node in self.inputs:
node.simulate(node.State.POSITIVE, traversal)
return traversal
def add_random_neuron(self, allow_middle=True, factory=Neuron):
"""
Mutate network with a new neuron
"""
edge = self._create_canidate_edge(allow_middle)
new_input, input_polarity, new_output, output_polarity = edge
node = factory()
node.outputs[new_output] = output_polarity
new_input.outputs[node] = input_polarity
self.middle.append(new_input)
def add_random_connection(self):
"""
Mutate network with a new connection
"""
edge = self._create_canidate_edge()
new_input, input_polarity, new_output, _ = edge
new_input.outputs[new_output] = input_polarity
def _create_canidate_edge(self, allow_middle=True):
possible_states = (Neuron.State.POSITIVE, Neuron.State.NEGATIVE)
input_polarity = random.choice(possible_states)
output_polarity = random.choice(possible_states)
input_choices = self.inputs
if allow_middle:
input_choices += self.middle
new_input = random.choice(input_choices)
output_choices = self.outputs
if allow_middle:
output_choices += self.middle
output_choices = [_ for _ in output_choices if _ != new_input]
new_output = random.choice(output_choices)
return new_input, input_polarity, new_output, output_polarity
def print(self):
"""
Print network info
"""
for layer in [self.inputs, self.middle, self.outputs]:
for node in layer:
node.print(end=', ')
print('')
|
992,981 | f59f5b02c71801a6bed3e359ddfac18711da543d | from tkinter import *
def calc():
try:
value = eval(entry.get())
entry.delete(0,END)
Entry.insert(entry,0,'= '+str(value))
except:
entry.delete(0,END)
Entry.insert(entry,0,"ERROR")
def clr():
entry.delete(0,END)
def insert(txt):
if freeze:
entry.delete(0,END)
Entry.insert(entry,END,txt)
top = Tk()
top.configure(background='black')
top.iconbitmap(r'Data/calc.ico')
top.title("Basic Calculator")
top.geometry('340x325+100+100')
l1 = Label(top, text="CALCULATOR", font=('Agency FB', 30, 'bold'), bg='black',fg='deep sky blue')
l1.grid(row=0,column=0, pady=5,columnspan=4)
global entry
entry = Entry(top, bd=4, width=30, font=('Helvetica', 15, 'bold'), bg='black',fg='deep sky blue')
entry.grid(row=1,column=0,pady=5 ,columnspan=4)
fnt = ('Helvetica', 15, 'bold')
# NUMBER BUTTONS
b9 = Button(top, text='9', font=fnt, width=6, bg='black', fg='deep sky blue')
text = b9.cget('text')
b9.bind('<Button-1>', lambda event, txt=text: insert(txt))
b9.grid(row=2, column=0)
b8 = Button(top, text='8', font=fnt, width=6, bg='black', fg='deep sky blue')
text = b8.cget('text')
b8.bind('<Button-1>', lambda event, txt=text: insert(txt))
b8.grid(row=2, column=1)
b7 = Button(top, text='7', font=fnt, width=6, bg='black', fg='deep sky blue')
text = b7.cget('text')
b7.bind('<Button-1>', lambda event, txt=text: insert(txt))
b7.grid(row=2, column=2)
b6 = Button(top, text='6', font=fnt, width=6, bg='black', fg='deep sky blue')
text = b6.cget('text')
b6.bind('<Button-1>', lambda event, txt=text: insert(txt))
b6.grid(row=3, column=0)
b5 = Button(top, text='5', font=fnt, width=6, bg='black', fg='deep sky blue')
text = b5.cget('text')
b5.bind('<Button-1>', lambda event, txt=text: insert(txt))
b5.grid(row=3, column=1)
b4 = Button(top, text='4', font=fnt, width=6, bg='black', fg='deep sky blue')
text = b4.cget('text')
b4.bind('<Button-1>', lambda event, txt=text: insert(txt))
b4.grid(row=3, column=2)
b3 = Button(top, text='3', font=fnt, width=6, bg='black', fg='deep sky blue')
text = b3.cget('text')
b3.bind('<Button-1>', lambda event, txt=text: insert(txt))
b3.grid(row=4, column=0)
b2 = Button(top, text='2', font=fnt, width=6, bg='black', fg='deep sky blue')
text = b2.cget('text')
b2.bind('<Button-1>', lambda event, txt=text: insert(txt))
b2.grid(row=4, column=1)
b1 = Button(top, text='1', font=fnt, width=6, bg='black', fg='deep sky blue')
text = b1.cget('text')
b1.bind('<Button-1>', lambda event, txt=text: insert(txt))
b1.grid(row=4, column=2)
b = Button(top, text='.', font=fnt, width=6, bg='black', fg='deep sky blue')
text = b.cget('text')
b.bind('<Button-1>', lambda event, txt=text: insert(txt))
b.grid(row=5, column=0)
b0 = Button(top, text='0', font=fnt, width=6, bg='black', fg='deep sky blue')
text = b0.cget('text')
b0.bind('<Button-1>', lambda event, txt=text: insert(txt))
b0.grid(row=5, column=1)
# OPERATION BUTTONS
fnt = ('Helvetica', 15)
bpower= Button(top, text='^', font=('Helvetica', 15, 'bold'), width=6, bg='black', fg='deep sky blue')
text = '**'
bpower.bind('<Button-1>', lambda event, txt=text: insert(txt))
bpower.grid(row=5, column=2)
add = Button(top, text='+', font=fnt, width=6, bg='black', fg='deep sky blue')
text = add.cget('text')
add.bind('<Button-1>', lambda event, txt=text: insert(txt))
add.grid(row=2, column=3)
sub = Button(top, text='-', font=fnt, width=6, bg='black', fg='deep sky blue')
text = sub.cget('text')
sub.bind('<Button-1>', lambda event, txt=text: insert(txt))
sub.grid(row=3, column=3)
mult = Button(top, text='*', font=fnt, width=6, bg='black', fg='deep sky blue')
text = mult.cget('text')
mult.bind('<Button-1>', lambda event, txt=text: insert(txt))
mult.grid(row=4, column=3)
div = Button(top, text='/', font=fnt, width=6, bg='black', fg='deep sky blue')
text = div.cget('text')
div.bind('<Button-1>', lambda event, txt=text: insert(txt))
div.grid(row=5, column=3)
# RESULT BUTTON
fnt = ('Helvetica', 15,'bold')
res = Button(top, text='=', font=fnt, width=13, bg='black', fg='deep sky blue')
res.grid(row=6, column=2,columnspan=2)
# ALL CLEAR BUTTON
clr = Button(top, text='AC', font=fnt, command=clr,width=13, bg='black', fg='deep sky blue')
clr.grid(row=6, column=0,columnspan=2)
top.mainloop()
|
992,982 | 377ca4caa42fb8869c796b9ad21b24437d65200f | print("Hello World\a");
print("Hello World\b");
print("Hello World\cx");
print("Hello World\C-x");
print("Hello World\e");
print("Hello World\f");
print("Hello World\M-\C-x");
print("Hello World\n");
print("Hello World\756");
print("Hello World\r");
print("Hello World\s");
print("Hello World\t");
print("Hello World\v");
print("Hello World\xab")
|
992,983 | d87b253695c109e77fb9bfc743970147edad5c62 | import socket
import time
from PyQt5.QtCore import QTimer, QThread
import queue
import logging
import pyaudio
import threading
logging.basicConfig(format="%(message)s", level=logging.INFO)
class AudioRec(QThread):
def __init__(self, threadChat):
super().__init__()
self.threadChat = threadChat
self.host_name = socket.gethostname()
self.host_ip = socket.gethostbyname(self.host_name)
# self.host_ip = '127.0.0.1'
self.port = 9634
self.socket_address = (self.host_ip, self.port)
# a maxsize 100 will be ideal but lags with video at the moment
# must send frames from server VideoGen and make sync in client
# using audio and frame timestamps
self.q = queue.Queue(maxsize=5)
self.BUFF_SIZE = 65536
self.audio_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.audio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, self.BUFF_SIZE)
self.audio_socket.bind(self.socket_address)
self.p = pyaudio.PyAudio()
self.CHUNK = 1024
self.stream = self.p.open(format=self.p.get_format_from_width(2),
channels=2,
rate=44100,
output=True,
frames_per_buffer=self.CHUNK)
self.timer = QTimer()
self.timer.timeout.connect(self.play_audio)
self.timer.start(1000 * 0.8 * self.CHUNK / 44100)
t1 = threading.Thread(target=self.get_audio_data, args=())
t1.start()
print('Listening for audio...')
def get_audio_data(self):
while self.threadChat.nickname == "":
# print('wait audio')
# time.sleep(0.1)
pass
while True:
try:
self.frame, _ = self.audio_socket.recvfrom(self.BUFF_SIZE)
self.q.put(self.frame)
except BlockingIOError:
pass
except Exception as e:
logging.error(e)
def play_audio(self):
if not self.q.empty():
frame = self.q.get()
self.stream.write(frame)
|
992,984 | 1c25c7fdc846346b6d8409c3a1e3959b74f3c356 | # Search Function
import re
line = "Welcome to PYTHON World By Keshav Kummari"
searchObj = re.search(r'(.*)python(.*)',line,re.M|re.I)
if searchObj:
print(searchObj.group())
print(searchObj.groups())
else:
print("No Match Found!") |
992,985 | 7f46a69dcc918142b4c0c756768125d69b649167 | from django.conf.urls import patterns, url
from apps.pcvblog.views import EntryCreate, EntryList, EntryUpdate, TagJSON
urlpatterns = patterns('',
url(r'entry/list/$', EntryList.as_view(), name='entry_list'),
url(r'entry/create/$', EntryCreate.as_view(), name='entry_create'),
url(r'entry/(?P<pk>\d+)/$', EntryUpdate.as_view(), name='entry_update'),
url(r'entry/(?P<entry_pk>\d+)/delete/$', 'apps.pcvblog.views.entry_delete', name='entry_delete'),
url(r'tags/$', TagJSON.as_view(), name='tag_json'),
)
|
992,986 | bae56b01ae55c90502679b80f504921a8ee1ef57 | import math,string,itertools,fractions,heapq,collections,re,array,bisect,sys,random,time,copy,functools
from collections import deque
sys.setrecursionlimit(10**7)
inf = 10**20
mod = 10**9 + 7
DR = [1, -1, 0, 0]
DC = [0, 0, 1, -1]
def LI(): return [int(x) for x in sys.stdin.readline().split()]
def LI_(): return [int(x)-1 for x in sys.stdin.readline().split()]
def LF(): return [float(x) for x in sys.stdin.readline().split()]
def LS(): return sys.stdin.readline().split()
def I(): return int(sys.stdin.readline())
def F(): return float(sys.stdin.readline())
def S(): return input()
def main():
string = S()
a_cumsum = [0 for _ in range(len(string) + 1)]
c_cumsum_inv = [0 for _ in range(len(string) + 1)]
hatena_cumsum = [0 for _ in range(len(string)+1)]
hatena_cumsum_inv = [0 for _ in range(len(string)+1)]
for i, c in enumerate(string):
if c == 'A':
a_cumsum[i+1] += 1
if c == '?':
hatena_cumsum[i+1] += 1
for i, c in enumerate(string[::-1]):
if c == 'C':
c_cumsum_inv[i+1] += 1
if c == '?':
hatena_cumsum_inv[i+1] += 1
hatena_cumsum_inv = hatena_cumsum_inv[::-1]
c_cumsum_inv = c_cumsum_inv[::-1]
cnt = 0
for i, c in enumerate(string):
if c == '?':
cnt += ((a_cumsum[i] * c_cumsum_inv[i+1]) % mod)
print(cnt)
main()
|
992,987 | f7e2233c43491cd681d4701c82c3831c8c70cdbe | import sys
import urllib2
import urllib
from bs4 import BeautifulSoup
import re
import urlparse
import os
def main():
downloadSongsInFolder(sys.argv[1])
def downloadSongsInFolder(folder):
print folder
response = urllib2.urlopen(folder)
html = response.read()
dom = BeautifulSoup(html,"html.parser")
links = dom.find_all('a')
for link in links:
linkURL = link.get('href')
linkName = link.get_text()
print linkName
if linkName == " Parent Directory":
continue
if linkURL.find(".mp3") == -1:
downloadSongsInFolder(folder + linkURL)
continue
songURLFinal = folder + linkURL
print songURLFinal
parsedURL = urlparse.urlparse(songURLFinal)
folderPath = '/Users/gchandok/Music'+os.path.dirname(parsedURL.path)
folderPath = folderPath.replace("%20"," ")
print folderPath
if not os.path.exists(folderPath):
os.makedirs(folderPath)
filePath = os.path.join(folderPath,os.path.basename(parsedURL.path))
filePath = filePath.replace("%20"," ")
print filePath
result = urllib.urlretrieve(songURLFinal, filePath)
if __name__ == '__main__':
main()
|
992,988 | a980198c4bf76493edbc060e67bb9e177aee07b1 | info={'name':'egon','age':18,'sex':'male'}
# #本质info=dict({'name':'egon','age':18,'sex':'male'})
#
print(info['age'])
print("age" in info)
# info['height']=1.80
#
# print(info)
#
# for key in info:
# print(key)
#字典的key必须是不可变类型,也成为可hash类型
# info={(1,2):'a'}
# print(info[(1,2)])
#字典常用的方法(优先掌握)
# info={'name':'egon','age':18,'sex':'male'}
# print(info.pop('name'))
# print(info)
# print(info.pop('asdfsadfasdfasfasdfasdfasdf',None))
#
#
#
# print(info['name'])
# print(info.get('name1'))
# print(info.get('nameasdfasdfasdfasdf','not key'))
#字典其他的方法
# info={'name':'egon','age':18,'sex':'male'}
# print(info.popitem())
# print(info.popitem())
# print(info)
# #
# print(info.keys(),type(info.keys()))
# print(info.values())
# for key in info.keys():
# print(key)
# for key in info.values():
# print(key)
#
# for key in info:
# print(key,info[key])
# print(info.items())
# for key,value in info.items(): # key,value=('name','egon')
# print(key,value)
#
# msg_dic={
# 'apple':10,
# 'tesla':100000,
# 'mac':3000,
# 'lenovo':30000,
# 'chicken':10,
# # }
# for key,value in msg_dic.items():
# print(key,value)
#
# info={'name':'egon','age':18,'sex':'male'}
# info.clear()
# print(info)
#
# print(info.items())
# dic=info.fromkeys(['name','age','sex'],11111111)
# print(dic)
# dic=info.fromkeys(['name','age','sex'],None)
# print(dic)
#
# dic=dict(a=1,b=2,c=3)
# print(dic)
# print(info.items())
# print(dict([('name', 'egon'), ('age', 18), ('sex', 'male')]))
# dic=dict.fromkeys(['name','age','sex'],11111111)
# print(dic)
# print(info)
# print(info)
# dic={'a':1,'b':2,'name':'SHUAI'}
# info.update(dic)
# print(info)
#
# d=dict.setdefault(['a','b','c'],[])
# print(d)
# d={}
# print(d)
# d['name']='egon'
# d['age']=18
# d['sex']='male'
# d['hobby']=[]
# d['hobby'].append('play basketball')
# d['hobby'].append('play football')
#
# d.setdefault('hobby',[]).append('play1') #d['hobby']
# d.setdefault('hobby',[]).append('play2') #d['hobby']
# d.setdefault('hobby',[]).append('play3') #d['hobby']
# print(d) |
992,989 | 282575a432159ab1bb77c75eacde6368516e82b5 | from django.db import models
from django.contrib.postgres.fields import JSONField
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from opencivicdata.core.models import Jurisdiction
from opencivicdata.legislative.models import LegislativeSession
OBJECT_TYPES = (
("jurisdiction", "Jurisdiction"),
("person", "Person"),
("organization", "Organization"),
("post", "Post"),
("membership", "Membership"),
("bill", "Bill"),
("vote_event", "VoteEvent"),
("event", "Event"),
)
class RunPlan(models.Model):
jurisdiction = models.ForeignKey(
Jurisdiction, related_name="runs", on_delete=models.CASCADE
)
success = models.BooleanField(default=True)
start_time = models.DateTimeField()
end_time = models.DateTimeField()
exception = models.TextField(blank=True, default="")
traceback = models.TextField(blank=True, default="")
class ScrapeReport(models.Model):
plan = models.ForeignKey(RunPlan, related_name="scrapers", on_delete=models.CASCADE)
scraper = models.CharField(max_length=300)
args = models.CharField(max_length=300)
start_time = models.DateTimeField()
end_time = models.DateTimeField()
class ScrapeObjects(models.Model):
report = models.ForeignKey(
ScrapeReport, related_name="scraped_objects", on_delete=models.CASCADE
)
object_type = models.CharField(max_length=20, choices=OBJECT_TYPES)
count = models.PositiveIntegerField()
class ImportObjects(models.Model):
report = models.ForeignKey(
RunPlan, related_name="imported_objects", on_delete=models.CASCADE
)
object_type = models.CharField(max_length=20, choices=OBJECT_TYPES)
insert_count = models.PositiveIntegerField()
update_count = models.PositiveIntegerField()
noop_count = models.PositiveIntegerField()
start_time = models.DateTimeField()
end_time = models.DateTimeField()
class Identifier(models.Model):
identifier = models.CharField(max_length=300)
jurisdiction = models.ForeignKey(
Jurisdiction,
related_name="pupa_ids",
on_delete=models.CASCADE,
)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.CharField(max_length=300)
content_object = GenericForeignKey("content_type", "object_id")
def __str__(self): # __unicode__ on Python 2
return self.identifier
class SessionDataQualityReport(models.Model):
legislative_session = models.ForeignKey(
LegislativeSession, on_delete=models.CASCADE
)
bills_missing_actions = models.PositiveIntegerField()
bills_missing_sponsors = models.PositiveIntegerField()
bills_missing_versions = models.PositiveIntegerField()
votes_missing_voters = models.PositiveIntegerField()
votes_missing_bill = models.PositiveIntegerField()
votes_missing_yes_count = models.PositiveIntegerField()
votes_missing_no_count = models.PositiveIntegerField()
votes_with_bad_counts = models.PositiveIntegerField()
# these fields store lists of names mapped to numbers of occurances
unmatched_sponsor_people = JSONField()
unmatched_sponsor_organizations = JSONField()
unmatched_voters = JSONField()
|
992,990 | 447a149ae9f2f1186ee3dfe1d483c0ce47fdc974 | # 3. Пользователь вводит месяц в виде целого числа от 1 до 12.
# Сообщить к какому времени года относится месяц (зима, весна, лето, осень).
# Напишите решения через list и через dict.
dct = {1: "Зима", 2: "Зима", 3: "Весна", 4: "Весна", 5: "Весна", 6: "Лето", 7: "Лето", 8: "Лето", 9: "Осень",
10: "Осень", 11: "Осень", 12: "Зима"}
m = ("Зима,Весна,Лето,Осень").split(",")
mnth = int(input("Номер месяца: "))
if mnth >= 1 and mnth <= 12:
print(m[0] if mnth == 12 else m[(mnth) // 3])
print(dct[mnth])
|
992,991 | b21ab7a6ab8a6cf206ed8a8f4cfed76e5e3d50b6 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
# horizon工程目录
HORIZON_HOME = "/home/dengjy/horizon"
# runserver时使用的端口
SERVER_PORT = "8093"
# runserver时是否启用: ip netns exec haproxy ...
IS_NEED_IP_NETNS = True
# 常量
PUBLIC_LOCAL_SETTINGS = "/usr/share/openstack-dashboard/openstack_dashboard/local/local_settings.conf"
HORIZON_LOCAL_SETTINGS = "{}/openstack_dashboard/local/local_settings.conf".format(HORIZON_HOME)
HORIZON_SETTINGS = "{}/openstack_dashboard/settings.py".format(HORIZON_HOME)
HORIZON_SCRIPTS = "{}/horizon/templates/horizon/_scripts.html".format(HORIZON_HOME)
HORIZON_CONF = "{}/horizon/templates/horizon/_conf.html".format(HORIZON_HOME)
def system(cmd):
if os.system(cmd) != 0:
sys.exit(1)
def replace_file_str(filename, old_str, new_str):
data = ""
is_exist = False
with open(filename, mode="r") as f:
for line in f:
if old_str in line:
is_exist = True
line = line.replace(old_str, new_str)
data += line
if is_exist:
with open(filename, mode="w") as f:
f.write(data)
def compile_proj():
os.chdir(HORIZON_HOME + "/horizon")
system("django-admin compilemessages")
os.chdir(HORIZON_HOME + "/openstack_dashboard")
system("django-admin compilemessages")
os.chdir(HORIZON_HOME)
system("python manage.py compilejsi18n")
def compress_proj():
os.chdir(HORIZON_HOME)
system("python manage.py compress")
def run_proj(nohup):
os.chdir(HORIZON_HOME)
if IS_NEED_IP_NETNS:
command = "ip netns exec haproxy python manage.py runserver 0:{}".format(SERVER_PORT)
else:
command = "python manage.py runserver 0:{}".format(SERVER_PORT)
if nohup:
command = "nohup {} > {}_log 2>&1 &".format(command, HORIZON_HOME)
system(command)
def set_sso_login(flag):
if flag:
value = "True"
else:
value = "False"
system("openstack-config --set {} ccas sso_login {}".format(HORIZON_LOCAL_SETTINGS, value))
replace_file_str(
HORIZON_SETTINGS,
"HORIZON_CONFIG['captcha_disabled'] = False",
"HORIZON_CONFIG['captcha_disabled'] = True"
)
def set_compress(flag):
old_str1 = "{% compress js %}"
old_str2 = "{% endcompress %}"
new_str1 = "{#% compress js %#}"
new_str2 = "{#% endcompress %#}"
if flag:
old_str1 = "{#% compress js %#}"
old_str2 = "{#% endcompress %#}"
new_str1 = "{% compress js %}"
new_str2 = "{% endcompress %}"
replace_file_str(HORIZON_SCRIPTS, old_str1, new_str1)
replace_file_str(HORIZON_SCRIPTS, old_str2, new_str2)
replace_file_str(HORIZON_CONF, old_str1, new_str1)
replace_file_str(HORIZON_CONF, old_str2, new_str2)
def stop_runserver():
show_cmd = "ps aux | grep 'runserver 0:" + SERVER_PORT + "' | grep -v grep | awk '{print $2}'"
# print("Done: {}".format(show_cmd))
# noinspection PyBroadException
try:
process_ids = os.popen(show_cmd).read().split("\n")
for process_id in process_ids:
if process_id != "":
kill_cmd = "kill -9 {}".format(process_id)
print("Done: {}".format(kill_cmd))
system(kill_cmd)
except Exception:
print("Error: Kill Process Fail!")
if len(sys.argv) == 1:
print(" 1.cpl Compile, compress and run server")
print(" 2.cpr Compress and run server")
print(" 3.run Only run server")
print(" 4.reset_local_setting Reset openstack_dashboard/local/local_settings.py")
print(" 5.edit_local_setting Edit openstack_dashboard/local/local_settings.py")
print(" 6.edit_setting Edit openstack_dashboard/settings.py")
print(" 7.enable_sso_login Enable SSO login")
print(" 8.disable_sso_login Disable SSO login")
print(" 9.enable_compress Enable JS compress")
print(" 10.disable_compress Disable JS compress")
print(" 11.run_nohup Run with nohup")
print(" 12.stop_nohup Stop the nohup runserver")
sys.exit(0)
if len(sys.argv) > 2:
print("Error: Unknow Command!")
sys.exit(1)
if sys.argv[1] == "1" or sys.argv[1] == "cpl":
compile_proj()
compress_proj()
run_proj(False)
elif sys.argv[1] == "2" or sys.argv[1] == "cpr":
compress_proj()
run_proj(False)
elif sys.argv[1] == "3" or sys.argv[1] == "run":
run_proj(False)
elif sys.argv[1] == "4" or sys.argv[1] == "reset_local_setting":
# 拷贝 local_settings.py 文件
system("cat %s > %s" % (PUBLIC_LOCAL_SETTINGS, HORIZON_LOCAL_SETTINGS,))
# 修改 DEBUG
system("openstack-config --set {} DEFAULT debug True".format(HORIZON_LOCAL_SETTINGS))
# 禁止验证码校验
replace_file_str(
HORIZON_SETTINGS,
"HORIZON_CONFIG['captcha_disabled'] = False",
"HORIZON_CONFIG['captcha_disabled'] = True",
)
elif sys.argv[1] == "5" or sys.argv[1] == "edit_local_setting":
system("vim {}".format(HORIZON_LOCAL_SETTINGS))
elif sys.argv[1] == "6" or sys.argv[1] == "edit_setting":
system("vim {}".format(HORIZON_SETTINGS))
elif sys.argv[1] == "7" or sys.argv[1] == "enable_sso_login":
set_sso_login(True)
elif sys.argv[1] == "8" or sys.argv[1] == "disable_sso_login":
set_sso_login(False)
elif sys.argv[1] == "9" or sys.argv[1] == "enable_compress":
set_compress(True)
elif sys.argv[1] == "10" or sys.argv[1] == "disable_compress":
set_compress(False)
elif sys.argv[1] == "11" or sys.argv[1] == "run_nohup":
stop_runserver()
commond = "nohup {} 1 > {}_log 2>&1 &".format(__file__, HORIZON_HOME)
# print("Done: {}".format(commond))
print("The log file is: {}_log".format(HORIZON_HOME))
system(commond)
elif sys.argv[1] == "12" or sys.argv[1] == "stop_nohup":
stop_runserver()
elif sys.argv[1] == "13" or sys.argv[1] == "run_nohup3":
stop_runserver()
commond = "nohup {} 3 > {}_log 2>&1 &".format(__file__, HORIZON_HOME)
# print("Done: {}".format(commond))
print("The log file is: {}_log".format(HORIZON_HOME))
system(commond)
else:
print("Error: Unknow Command!")
sys.exit(1)
|
992,992 | b3dcf8ef8b47e8bcac2d33fc137ff52fe36eb88f | #!/usr/bin/env python
#
# Patrick Jenkins 1/7/2011
#
# Parse the query string portion of a URL into name and value pairs.
#
# Example:
# parse_qs "parse_qs "sourceid=chrome&ie=UTF-8&q=patrick+jenkins""
from sys import argv
from urlparse import parse_qs
url_parts = parse_qs(argv[1])
for key, val in url_parts.items():
print "%s=%s" % (key, val[0])
|
992,993 | bf45617dafa562af48cb63d9e231049dfd2b834f |
a=abs(3)
b=cmp
print a
print b(1,2) |
992,994 | 0c53cdc2a4075b06b144c14406fd356b7df1adfe | #Author(s) - Mukund Manikarnike
from igraph import *
from random import randint
import scipy
import scipy.stats
#Read from the anonymized edge list that was created
anonymizedEdgeListFile = open("anonymized_edge_list.csv", 'r')
#Create a graph object for the graph that has been created
graph = Graph()
graph.add_vertices(1517)
edges = []
i = 0
for line in anonymizedEdgeListFile.xreadlines():
if i == 0:
i += 1
continue
edgeString = line.split(",")
edgeSource = int(str(edgeString[0]))
edgeDest = int(str(edgeString[1]).strip("\r\n"))
edge = (edgeSource, edgeDest)
edges.append(edge)
i += 1
#Add all edges to the graph
graph.add_edges(edges)
#Compute a power law version of the graph with exponent = 2
newGraph = graph.Static_Power_Law(1517, 547873, 2)
#Display the degree distribution of the oringal graph
print "Plotting distribution"
plot(graph.degree_distribution(), "DegreeDistribution.pdf")
#Display the degree distribution of the power Law graph
print "Power Law Deg Dist."
plot(newGraph.degree_distribution(), "PowerLawDegreeDistribution.pdf")
print "--------Created Graph Stats---------"
print "Diamter: " + str(graph.diameter())
print "The created graph doesn't exhibit a power law because the graph was"
print "based on the location of the users and not actual connections."
print "Bridges: " + str(len(graph.cut_vertices())/2)
print "Tempe, Chandler, Glendale and Surprise have common users and each of them are connected."
print "-------Power Law Graph Stats--------"
print "Diameter: " + str(newGraph.diameter())
print "Bridges: " + str(len(newGraph.cut_vertices())/2)
x = 1
while x < 100:
print str(x) + "percent of edges being removed"
tempGraph = newGraph
numEdgesToRemove = 547873 * (x / 100)
i = 0
edgesToRemove = []
while i < numEdgesToRemove:
rand1 = randint(1, 1516)
rand2 = randint(1, 1516)
edgeToRemove = (rand1, rand2)
edgesToRemove.append(edgeToRemove)
i += 1
#Removing edges
tempGraph.delete_edges(edgesToRemove)
#Computing size of largest connected component
sizes = tempGraph.components().sizes()
size = max(sizes)
print "Size of component: " + str(size)
print "===================================="
x += 1
print "=====================Network Measures============="
print "Average Local Clustering Coefficient: " + str(newGraph.transitivity_avglocal_undirected())
print "Global Clustering Coefficient: " + str(newGraph.transitivity_undirected())
print "Average Path Length: " + str(newGraph.average_path_length(directed = False))
#Compute PageRank
pageRanks = newGraph.pagerank()
pageRanks.sort(reverse = True)
i = 0
while i < 10:
print "pageRank No. " + str(i) + ":" + str(pageRanks[i])
i += 1
#Compute EigenVector Centralities
eigenVectorCent = newGraph.evcent(directed = False)
eigenVectorCent.sort(reverse = True)
i = 0
while i < 10:
print "eigen vector centrality No. " + str(i) + ": " + str(eigenVectorCent[i])
i += 1
#Degree centrality
degrees = newGraph.degree()
degrees.sort(reverse = True)
i = 0
sum1 = 0
while i < len(degrees):
sum1 += degrees[i]
i += 1
avg = sum1/len(degrees)
print "Average degree: " + str(avg)
i = 0
while i < 10:
print "degree centrality No. " + str(i) + ": " + str(degrees[i])
i += 1
#Rank Correlation
pPageRankEigen = scipy.stats.pearsonr(pageRanks, eigenVectorCent)
pPageRankDegree = scipy.stats.pearsonr(pageRanks, degrees)
pEigenDegree = scipy.stats.pearsonr(eigenVectorCent, degrees)
print "Rank Correlation between pageRank and EigenVector: " + str(pPageRankEigen)
print "Rank Correlation between pageRank and degree: " + str(pPageRankDegree)
print "Rank Correlation between EigenVector and degree: " + str(pEigenDegree)
#Jaccard Similarity
jaccards = newGraph.similarity_jaccard(loops = False)
i = 0
max1 = 0
while i < len(jaccards):
j = 1
while j < len(jaccards[i]):
if jaccards[i][j] > max1 and jaccards[i][j] != 1:
max1 = jaccards[i][j]
j += 1
i += 1
print "Highest Similarity is " + str(max1)
print "===============Network Models=============="
#Simulate a random graph
print "Random Graph"
randomGraph = Graph.Erdos_Renyi(n=1517, m=547873)
print "Global Clustering Coefficient: " + str(randomGraph.transitivity_undirected())
print "Average Path Length: " + str(randomGraph.average_path_length(directed = False))
print "Plot Degree Distribution for random graph"
plot(randomGraph.degree_distribution(), "PowerLawDegreeDistribution-RandomGraph.pdf")
print "Small World"
smallWorldGraph = newGraph
#Simulate a small world graph
p = 0.75 * ((avg - 2)/(avg - 1))
dim = 1
size = 1517
nei = 2
smallWorldGraph = Graph.Watts_Strogatz(dim, size, nei, p)
print "Global Clustering Coefficient: " + str(smallWorldGraph.transitivity_undirected())
print "Average Path Length: " + str(smallWorldGraph.average_path_length(directed = False))
print "Plot Degree Distribution for small world graph"
plot(smallWorldGraph.degree_distribution(), "PowerLawDegreeDistribution-SmallWorldGraph.pdf")
print "PreferentialAttachment"
prefGraph = newGraph
#Simulate a preferential attachment graph
nei = newGraph.average_path_length(directed = False)
prefGraph = Graph.Barabasi(1517, 547873)
print "Global Clustering Coefficient: " + str(prefGraph.transitivity_undirected())
print "Average Path Length: " + str(prefGraph.average_path_length(directed = False))
print "Plot Degree Distribution for Preferential Attachment graph"
plot(prefGraph.degree_distribution(), "PowerLawDegreeDistribution-PreferentialAttachmetGraph.pdf")
|
992,995 | e95880c40c7594445b446b31d964db11f2a44dc4 | import numpy as np
import pandas as pd
from lsst.sims.skybrightness import SkyModel
import lsst.sims.skybrightness_pre as sb
from lsst.sims.utils import raDec2Hpid, m5_flat_sed, Site, _approx_RaDec2AltAz
import healpy as hp
import sqlite3
import ephem
import sys
__all__ = ['mjd2night', 'obs2sqlite']
class mjd2night(object):
"""Convert MJD to LSST integer 'night' by simply estimating noon and splitting MJDs on this.
Assumes you have no chance of observing within an hour of noon (since UTC noon varies during year).
"""
def __init__(self, mjd_start=59853, noon=(0.16-0.5)):
self.mjd_start = mjd_start
self.noon = noon
def __call__(self, mjd):
night = np.floor(mjd - self.noon - self.mjd_start)
return night
class mjd2night_sunset(object):
"""Convert MJD to 'night' after calculating actual times of sunsets. (deprecated?)"""
def __init__(self, mjd_start=59853.035):
self.site = Site(name='LSST')
self.obs = ephem.Observer()
self.obs.lat = self.site.latitude_rad
self.obs.lon = self.site.longitude_rad
self.obs.elevation = self.site.height
self.mjd = mjd_start
self.sun = ephem.Sun()
self.generate_sunsets()
def generate_sunsets(self, nyears=13, day_pad=50):
"""
Generate the sunrise times for LSST so we can label nights by MJD
"""
# Set observatory horizon to zero
doff = ephem.Date(0)-ephem.Date('1858/11/17')
self.obs.horizon = 0.
# Swipe dates to match sims_skybrightness_pre365
mjd_start = self.mjd
mjd_end = np.arange(mjd_start, mjd_start+365.25*nyears+day_pad+366, 366).max()
step = 0.25
mjds = np.arange(mjd_start, mjd_end+step, step)
setting = mjds*0.
# Stupid Dublin Julian Date
djds = mjds - doff
sun = ephem.Sun()
for i, (mjd, djd) in enumerate(zip(mjds, djds)):
sun.compute(djd)
setting[i] = self.obs.previous_setting(sun, start=djd, use_center=True)
setting = setting + doff
# zomg, round off crazy floating point precision issues
setting_rough = np.round(setting*100.)
u, indx = np.unique(setting_rough, return_index=True)
self.setting_sun_mjds = setting[indx]
left = np.searchsorted(self.setting_sun_mjds, mjd_start)
self.setting_sun_mjds = self.setting_sun_mjds[left:]
def __call__(self, mjd):
"""
Convert an mjd to a night integer.
"""
return np.searchsorted(self.setting_sun_mjds, mjd)
def obs2sqlite(observations_in, location='LSST', outfile='observations.sqlite', slewtime_limit=5.,
full_sky=False, radians=True):
"""
Utility to take an array of observations and dump it to a sqlite file, filling in useful columns along the way.
observations_in: numpy array with at least columns of
ra : RA in degrees
dec : dec in degrees
mjd : MJD in day
filter : string with the filter name
exptime : the exposure time in seconds
slewtime_limit : float
Consider all slewtimes larger than this to be closed-dome time not part of a slew.
"""
# Set the location to be LSST
if location == 'LSST':
telescope = Site('LSST')
# Check that we have the columns we need
needed_cols = ['ra', 'dec', 'mjd', 'filter']
in_cols = observations_in.dtype.names
for col in needed_cols:
if needed_cols not in in_cols:
ValueError('%s column not found in observtion array' % col)
n_obs = observations_in.size
sm = None
# make sure they are in order by MJD
observations_in.sort(order='mjd')
# Take all the columns that are in the input and add any missing
names = ['filter', 'ra', 'dec', 'mjd', 'exptime', 'alt', 'az', 'skybrightness',
'seeing', 'night', 'slewtime', 'fivesigmadepth', 'airmass', 'sunAlt', 'moonAlt']
types = ['|S1']
types.extend([float]*(len(names)-1))
observations = np.zeros(n_obs, dtype=list(zip(names, types)))
# copy over the ones we have
for col in in_cols:
observations[col] = observations_in[col]
# convert output to be in degrees like expected
if radians:
observations['ra'] = np.degrees(observations['ra'])
observations['dec'] = np.degrees(observations['dec'])
if 'exptime' not in in_cols:
observations['exptime'] = 30.
# Fill in the slewtime. Note that filterchange time gets included in slewtimes
if 'slewtime' not in in_cols:
# Assume MJD is midpoint of exposures
mjd_sec = observations_in['mjd']*24.*3600.
observations['slewtime'][1:] = mjd_sec[1:]-mjd_sec[0:-1] - observations['exptime'][0:-1]*0.5 - observations['exptime'][1:]*0.5
closed = np.where(observations['slewtime'] > slewtime_limit*60.)
observations['slewtime'][closed] = 0.
# Let's just use the stupid-fast to get alt-az
if 'alt' not in in_cols:
alt, az = _approx_RaDec2AltAz(np.radians(observations['ra']), np.radians(observations['dec']),
telescope.latitude_rad, telescope.longitude_rad, observations['mjd'])
observations['alt'] = np.degrees(alt)
observations['az'] = np.degrees(az)
# Fill in the airmass
if 'airmass' not in in_cols:
observations['airmass'] = 1./np.cos(np.pi/2. - np.radians(observations['alt']))
# Fill in the seeing
if 'seeing' not in in_cols:
# XXX just fill in a dummy val
observations['seeing'] = 0.8
if 'night' not in in_cols:
m2n = mjd2night()
observations['night'] = m2n(observations['mjd'])
# Sky Brightness
if 'skybrightness' not in in_cols:
if full_sky:
sm = SkyModel(mags=True)
for i, obs in enumerate(observations):
sm.setRaDecMjd(obs['ra'], obs['dec'], obs['mjd'], degrees=True)
observations['skybrightness'][i] = sm.returnMags()[obs['filter']]
else:
# Let's try using the pre-computed sky brighntesses
sm = sb.SkyModelPre(preload=False)
full = sm.returnMags(observations['mjd'][0])
nside = hp.npix2nside(full['r'].size)
imax = float(np.size(observations))
for i, obs in enumerate(observations):
indx = raDec2Hpid(nside, obs['ra'], obs['dec'])
observations['skybrightness'][i] = sm.returnMags(obs['mjd'], indx=[indx])[obs['filter']]
sunMoon = sm.returnSunMoon(obs['mjd'])
observations['sunAlt'][i] = sunMoon['sunAlt']
observations['moonAlt'][i] = sunMoon['moonAlt']
progress = i/imax*100
text = "\rprogress = %.2f%%"%progress
sys.stdout.write(text)
sys.stdout.flush()
observations['sunAlt'] = np.degrees(observations['sunAlt'])
observations['moonAlt'] = np.degrees(observations['moonAlt'])
# 5-sigma depth
for fn in np.unique(observations['filter']):
good = np.where(observations['filter'] == fn)
observations['fivesigmadepth'][good] = m5_flat_sed(fn, observations['skybrightness'][good],
observations['seeing'][good],
observations['exptime'][good],
observations['airmass'][good])
conn = sqlite3.connect(outfile)
df = pd.DataFrame(observations)
df.to_sql('observations', conn)
|
992,996 | 8a893edb2d72354aa36e82c3325711a8eb4b73f7 | __author__ = 'Joe Linn'
from .abstract import AbstractQuery
import pylastica.script
class CustomScore(AbstractQuery):
def __init__(self, script=None, query=None):
"""
@param script:
@type script: str or dict or pylastica.script.Script
@param query:
@type query: str or pylastica.query.AbstractQuery
"""
super(CustomScore, self).__init__()
if script is not None:
self.set_script(script)
self.set_query(query)
def set_query(self, query):
"""
Set the query object
@param query:
@type query: str or pylastica.query.Query or pylastica.query.AbstractQuery
@return:
@rtype: self
"""
query = pylastica.query.Query.create(query)
data = query.to_dict()
return self.set_param('query', data['query'])
def set_script(self, script):
"""
Set the script
@param script:
@type script: str or pylastica.script.Script or dict
@return:
@rtype: self
"""
script = pylastica.script.Script.create(script)
for param, value in script.to_dict().iteritems():
self.set_param(param, value)
return self
def add_params(self, params):
"""
Add params for the script
@param params:
@type params: dict
@return:
@rtype: self
"""
return self.set_param('params', params)
|
992,997 | 117593e0a28fbd83acc8ae5972d344a9025c5ddf | #!/usr/bin/python
# -*- coding : utf-8 -*-
import os, sys
import os.path
import setting
class IngentiaError:
def __init__(self):
self.msg = ''
self.detail = ''
self.url = ''
def __str__(self):
return "%s \n %s \n %s\n" % (self.url, self.msg, self.detail)
class Reporter:
def __init__(self, src_name):
self.src_name = src_name
self.result_log = os.path.join(setting.RESULT_DIR, "%s.log" % self.src_name)
self.result_html = os.path.join(setting.RESULT_DIR, "%s-error.html" % self.src_name)
def get_all_errors(self):
errors = []
with open(self.result_log) as file:
flag = False
error = None
for line in file:
if line.startswith('*** THIS IS AN ERROR WHICH IS SENT FROM THE INGENTIA CUSTOM NEWS CRAWLER***'):
flag = True
error = IngentiaError()
if line.startswith('Time Zone (GMT/UTC)'):
flag = False
errors.append(error)
if flag:
line = line.strip()
if line.startswith('Caused by: '):
msg = line[10:]
if error.detail == '':
error.detail = msg
else:
error.msg = msg
if line.startswith('The url that failed:'):
error.url = line[len('The url that failed:'):]
return errors
def gen(self):
errs = self.get_all_errors()
if 0 == len(errs):
print "No error found in source [%s]" % self.src_name
exit(0)
errors = {}
for err in errs:
if err.msg in errors:
errors[err.msg].append(err.url)
else:
errors[err.msg] = [err.url, ]
if os.path.exists(self.result_html):
os.remove(self.result_html)
report_file = open(self.result_html, 'w')
report_file.write('''<html><head></head><body>''')
for key,value in errors.iteritems():
if key == '':
key = 'Http request errors'
report_file.write("<h3>%s</h3>" % key)
report_file.write("<ul>")
for url in set(value):
report_file.write("<li><a href='%s' target='_blank'>%s</a></li>" % (url, url))
report_file.write("</ul>")
report_file.write('''</body></html>''')
report_file.close()
if __name__ == '__main__':
src_name = raw_input("Please input source name: ")
rpt = Reporter(src_name)
rpt.gen()
|
992,998 | 159b7269b3520d49faf549097c42bc303c9d1caa |
from math import sqrt
class Point(object):
def __init__(self, x, y):
self.x = x
self.y = y
def getPoint(self):
return self.x, self.y
def getNormal(self):
return Point(self.y, -self.x)
def getMagnitude(self):
return sqrt(self.x ** 2 + self.y ** 2)
def getUnit(self):
m = self.getMagnitude()
return Point(self.x / m, self.y / m)
def getDirection(self):
v = Fractals.Vector.Vector(Fractals.Point.Point(0, 0), self)
return v.getDirection()
def isParallel(self, other):
return self.getDirection() == other.getDirection()
def __mul__(self, other):
if (type(other) is Point):
return self.x * other.x + self.y * other.y
else:
return Point(self.x * other, self.y * other)
def __str__(self):
return "X:" + str(self.x) + " Y:" + str(self.y)
def __add__(self, other):
return Point(self.x + other.x, self.y + other.y)
def __sub__(self, other):
return Point(self.x - other.x, self.y - other.y)
|
992,999 | d8cac68ea2741bc426a516189a948cdf1bb48cd4 | #!/usr/bin/env python
# coding: utf-8
import numpy as np
import cv2
import argparse
from PIL import Image, ImageChops
from framing_helper import trim
from metricas_qualidade import metricas_qualidade
def crop_first_half(image):
#print("[INFO] Importando imagem de entrada ...\n")
# Import image
# Get image dimensions
#print("[INFO] Dimensões da imagem de entrada:\n")
height, width = image.shape[:2]
#print("[INFO] Altura: {}".format(height))
#print("[INFO] Largura: {}\n".format(width))
#print("[INFO] Verificando a razão entre largura e altura do documento ...\n")
#print("[INFO] A razão largura:altura padrão para o documento aberto deve ser 85:120 ~ {} \n".format(round(85/120,2)) )
#print("[INFO] A razão largura:altura da imagem de entrada: {}:{} ~ {}\n".format( width,height,round(width/height,2)) )
new_height = int( (60/85)*width )
new_height = int(new_height*1.05)
crop = image[ 0:new_height, 0:width ]
## Remove extra white space left from previus process and repeat crop rotine
crop = cv2.cvtColor(crop, cv2.COLOR_BGR2RGB)
crop = Image.fromarray(crop)
crop = trim(crop)
crop = np.asarray(crop)
crop = cv2.cvtColor(crop, cv2.COLOR_RGB2BGR)
height, width = crop.shape[:2]
new_height = int( (60/85)*width )
#new_height = int(new_height*1.03)
new_height = int(new_height*1.05)
crop = crop[ 0:new_height, 0:width ]
ind_image_quality = metricas_qualidade(crop)
return crop, ind_image_quality
if __name__ == "__main__":
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--input_image", required=True,
help="path to input image")
ap.add_argument("-o", "--output_image", type=str, default="crop_half_output.png",
help="path to output image")
args = vars(ap.parse_args())
image = cv2.imread( args["input_image"] )
crop, ind_image_quality = crop_first_half(image)
cv2.imwrite( args["output_image"] , crop)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.