blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5e5c6414271cbd989561de51d4ffc44f3105539d | ce47784499852d9f4f328ca39ac954ce7932e932 | /shop/catalogue/fields.py | b5500f1755a3b0aef78b36402dc988c121f0cca6 | [] | no_license | kshutashvili/multishop | 3f7ddb14a534009936f7cb0950555744f12c06f6 | 6f3c1b9a719b8ae6004bb819f492bbef9cc211b7 | refs/heads/master | 2022-11-30T23:43:22.807302 | 2018-01-12T16:39:02 | 2018-01-12T16:39:02 | 203,870,369 | 0 | 0 | null | 2022-11-22T01:39:59 | 2019-08-22T20:43:31 | JavaScript | UTF-8 | Python | false | false | 889 | py | from __future__ import unicode_literals
from django.utils.encoding import force_text
from django.forms.fields import MultipleChoiceField
class CustomFilterMultipleChoiceField(MultipleChoiceField):
def valid_value(self, value):
"Check to see if the provided value is a valid choice"
text_value = force_text(value)
for k, v, params in self.choices:
if isinstance(v, (list, tuple)):
# This is an optgroup, so look inside the group for options
for k2, v2 in v:
if value == k2 or text_value == force_text(k2):
return True
else:
if value == k or text_value == force_text(k):
return True
return False
class NonValidationMultipleChoiceField(CustomFilterMultipleChoiceField):
def validate(self, value):
pass
| [
"dmyutro@ukr.net"
] | dmyutro@ukr.net |
75beee39f655ccdabb0e887a3fea8cafc7e95c8a | e2897c39ec494856e0f110c57f3f0bb4740ac4de | /task_2/task_2/wsgi.py | d926bbe042842485d5f7b8e6eef5a2e769852adf | [] | no_license | ksuvarna85/app_school | fc3f75eddf18535fff8cbf2b38d1fd39bf313102 | 9804cd0d9c629e37d72cd72738c675536ce1dd24 | refs/heads/master | 2022-12-10T23:24:30.967284 | 2020-09-02T05:13:16 | 2020-09-02T05:13:16 | 292,185,066 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | """
WSGI config for task_2 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'task_2.settings')
application = get_wsgi_application()
| [
"you@example.com"
] | you@example.com |
953dfcb4dd312ccbcb7d455b544179ac4a617b59 | 2d4005c1bce1bad26fa9cba6c8ccab913e27c4ec | /Python高级/7丶http协议丶web服务器/4丶根据用户的需求返回相应的页面.py | 98eb031fe68132eb0345e8427d55a43e7c9ea1ae | [] | no_license | wfwf1990/learn | 4b801f2c082ce180a6d70d680c8cadbc5c6ec3cf | 5ed32454ddf083866fabd730d5b2ffb544a30e08 | refs/heads/master | 2020-03-21T18:16:20.284168 | 2018-07-18T11:35:29 | 2018-07-18T11:35:29 | 138,881,605 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,626 | py | # Author: wangfang
# Author: wangfang
import socket
import re
def handle_data(client_socket):
recv_data = client_socket.recv(1024)
#接收的数据进行解码
recv_data = recv_data.decode("utf-8")
#接收的数据进行合并
recv_data = recv_data.splitlines()
#获取请求头中的URI
url = re.match("[^/]+(/[^ ]*)",recv_data[0]).group(1)
#如果路径是/ 修改路径为/index.html
if url == "/":
url = "/index.html"
#读取文件,没有不存在,执行异常代码
try:
f1 = open("./html" +url,"rb")
except:
response_header = "http/1.1 404 not found \r\n"
response_header += "\r\n"
response_body = "file not found".encode("utf-8")
else:
response_header = "http/1.1 200 OK \r\n"
response_header += "\r\n"
response_body = f1.read()
f1.close()
#向客户端返回报头和body
client_socket.send(response_header.encode("utf-8"))
client_socket.send(response_body)
#关闭套接字
client_socket.close()
def main():
"""控制整个程序"""
#创建tcp套接字
tcp_server_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
#绑定端口
server_ip = ""
server_port = 8080
server_addr = (server_ip,server_port)
tcp_server_socket.bind(server_addr)
#监听
tcp_server_socket.listen(128)
while True:
"""接收用户请求和返回用户数据"""
client_socket,client_addr = tcp_server_socket.accept()
handle_data(client_socket)
tcp_server_socket.close()
if __name__ == "__main__":
main() | [
"576589099@qq.com"
] | 576589099@qq.com |
51a63a31cdd2da9dd062609a777200d0a364cdf2 | d66fe51e587258c5aa989d91dec2b94409d1abd4 | /keras_model/nets/SqueezeNet_speed_wire_fit.py | d8e76dc18a66a1c6c2a3004b70a3dc7544006fdd | [] | no_license | izeki/RL_driving | 5519961a2f28f6c2d2ddf5f2f41ebb10bee5dfce | 7605d79504dd2af9d23b43d5588989c7cc049d40 | refs/heads/master | 2021-05-07T22:36:13.043987 | 2018-04-06T22:50:28 | 2018-04-06T22:50:28 | 109,063,940 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,906 | py | from keras.models import Model
from keras.layers import Input, BatchNormalization, AveragePooling2D, Conv2D, \
MaxPooling2D, Dense, ZeroPadding2D, Flatten, concatenate
from keras import optimizers
from keras import regularizers
from keras.layers.core import Lambda, Dropout, Reshape
from keras.layers import Activation, Merge
from keras import backend as K
from keras.engine import Layer, InputSpec
from keras import initializers, regularizers, constraints
from Net import Net
import tensorflow as tf
from utils import INPUT_SHAPE
import numpy as np
def fire(name, squeeze_planes, expand1x1_planes, expand3x3_planes, **kwargs):
def f(input):
squeeze1x1 = Conv2D(filters=squeeze_planes,
kernel_size=1,
padding='valid',
activation='relu',
name='squeeze1x1_'+name)(input)
expand1x1 = Conv2D(filters=expand1x1_planes,
kernel_size=1,
padding='valid',
activation='relu',
name='expand1x1_'+name)(squeeze1x1)
expand3x3 = Conv2D(filters=expand3x3_planes,
kernel_size=3,
padding='valid',
activation='relu',
name='expand3x3_'+name)(squeeze1x1)
expand3x3 = ZeroPadding2D(padding=(1, 1))(expand3x3)
return concatenate([expand1x1, expand3x3], axis=3, name='concat'+name)
return f
class SqueezeSpeedWireFitNet(Net):
def __init__(self, input_shape = INPUT_SHAPE):
super(SqueezeSpeedWireFitNet, self).__init__(input_shape)
def _get_model(self):
IMG_data = Input(shape=self.input_shape, name='IMG_input')
IMG_data_norm = Lambda(lambda x: x/127.5-1.0, input_shape=self.input_shape)(IMG_data)
metadata = Input(shape=(11, 20, 1), name='speed_input')
IMG_data_pool1 = AveragePooling2D(pool_size=(2, 2),
strides=(2,2),
padding='valid',
name='IMG_data_pool1')(IMG_data_norm)
IMG_data_pool2 = AveragePooling2D(pool_size=(2, 2),
strides=(2,2),
padding='valid',
name='IMG_data_pool2')(IMG_data_pool1)
conv1 = Conv2D(filters=64,
kernel_size=2,
strides=(2,2),
padding='valid',
activation='relu',
name='conv1')(IMG_data_pool2)
conv1_pool = MaxPooling2D(pool_size=(2, 2),
strides=(2,2),
padding='valid',
name='conv1_pool')(conv1)
fire1 = fire('1', 16, 64, 64)(conv1_pool)
fire2 = fire('2', 16, 64, 64)(fire1)
fire_pool1 = MaxPooling2D(pool_size=(3, 3),
strides=(2,2),
padding='valid',
name='fire_pool1')(fire2)
fire_pool1_metadata_concat = concatenate([fire_pool1, metadata], axis=3, name='fire_pool1_metadata_concat')
fire3 = fire('3',32, 128, 128)(fire_pool1_metadata_concat)
fire4 = fire('4',32, 128, 128)(fire3)
fire_pool2 = MaxPooling2D(pool_size=(3, 3),
strides=(2,2),
padding='valid',
name='fire_pool2')(fire4)
fire5 = fire('5',48, 192, 192)(fire_pool2)
fire6 = fire('6',48, 192, 192)(fire5)
fire7 = fire('7',64, 256, 256)(fire6)
fire8 = fire('8',64, 256, 256)(fire7)
drop1 = Dropout(rate=0.5, name='drop1')(fire8)
conv2 = Conv2D(filters=2 * self.N_STEPS,
kernel_size=1,
padding='valid',
name='conv2')(drop1)
avg_pool1 = AveragePooling2D(pool_size=(4, 4),
strides=(6,6),
padding='valid',
name='avg_pool1')(conv2)
flat1 = Flatten(name='flat1')(avg_pool1)
out = Dense(units=24, name='out_q_a')(flat1)
model = Model(inputs=[IMG_data, metadata], outputs=out)
return model
def model_init(self, weight_file_path=None):
def load_model_weight(model, weight_file_path):
model.load_weights(weight_file_path, by_name=True)
return model
model = self._get_model()
if weight_file_path != None:
model = load_model_weight(model, weight_file_path)
model.compile(
loss = wire_fit_loss,
optimizer = optimizers.SGD(
lr = 0.01,
momentum = 0.8,
decay = 1.0e-6,
nesterov = True),
metrics=['accuracy'])
self.net = model
def model_compile(self,
learning_rate,
momentum,
decay,
nesterov = True):
model = self.net
model.compile(
loss = wire_fit_loss,
optimizer = optimizers.SGD(
lr = learning_rate,
momentum = momentum,
decay = decay,
nesterov = nesterov),
metrics=['accuracy'])
self.net = model
def get_layer_output(self, model_input, training_flag = True):
get_outputs = K.function([self.net.layers[0].input,
self.net.layers[16].input, K.learning_phase()],
[self.net.layers[52].output])
layer_outputs = get_outputs([model_input['img'], model_input['speed'], training_flag])[0]
return layer_outputs
def forward_backward(self, model_input, target_output):
losses = self.net.train_on_batch({'IMG_input':model_input['img'],
'speed_input':model_input['speed']},
{'out_q_a': target_output['q_s_a']})
return dict(zip(self.net.metrics_names, losses))
def forward(self, model_input):
q_index = np.array([0,3,6,9,12,15,18,21])
a_index = np.array([[1,2],
[4,5],
[7,8],
[10,11],
[13,14],
[16,17],
[19,20],
[22,23]])
prediction = self.net.predict_on_batch({'IMG_input':model_input['img'],
'speed_input':model_input['speed']})
q = prediction[0][q_index]
a = prediction[0][a_index]
return q, a
# wire_fit learning
#
# y_true : [r + gamma * max_a(Q(s,a)), a_best]
#
# y_pred : <q_i, a_i>, q_i is the value function approximator, a_i is the policy approximator
#
# wsum(s,a)
# Q(s,a) = lim ----------
# e->0 norm(s,a)
#
# n q_i(s)
# wsum(s,a) = sum ---------
# i=0 d_i(s,a)
#
# n 1
# norm(s,a) = sum ---------
# i=0 d_i(s,a)
#
# d_i(s,a) = |a-a_i(s)|^2 + c_i * (q_max(s) - q_i(s)) + e
#
# dQ norm(s,a)*(d_k(s,a)+q_k*c_k) - wsum(s,a) * c_k
# ---- = lim -----------------------------------------------
# dq_k e->0 (norm(s,a) * d_k(s,a))^2
#
# dQ (wsum(s,a) - norm(s,a)* q_k)* 2 * (a_k - a)
# ---- = lim -----------------------------------------------
# da_k e->0 (norm(s,a) * d_k(s,a))^2
#
def wire_fit_loss(y_true, y_pred):
q_index = [0,3,6,9,12,15,18,21]
a_index = [[1,2],
[4,5],
[7,8],
[10,11],
[13,14],
[16,17],
[19,20],
[22,23]]
lr = 0.001
c = -0.001
e = 1e-08
q_idx = K.variable(q_index, dtype='int32')
a_idx = K.variable(a_index, dtype='int32')
q = tf.gather(y_pred, q_idx, axis = -1)
a = tf.gather(y_pred, a_idx, axis = -1)
q_prime_idx = K.variable([0], dtype='int32')
a_best_idx = K.variable([1], dtype='int32')
Q_prime = tf.gather(y_true, q_prime_idx, axis = -1)
a_best = tf.gather(y_true, a_best_idx, axis = -1)
q_max = K.max(q)
# random explore
#q_max_arg = K.argmax(q)
#a_arg = K.gather(a, q_max_arg)
#d = K.sqrt(K.sum(K.square((a-a_arg)), -1)) + c * (q - q_max) + e
d = K.sqrt(K.sum(K.square((a-a_best)), -1)) + c * (q - q_max) + e
wsum = K.sum(q / d)
norm = K.sum(1/d)
Q = wsum/norm
# random explore
#dq = lr * (y_true - Q) * (norm * (d + c * q) - wsum * c) / K.square(norm * d)
#da = lr * (y_true - Q) *((wsum - norm * K.transpose(K.stack([q,q]))) * 2 * (a - a_arg)) / K.square(norm * K.transpose(K.stack([d,d])))
dq = lr * (Q_prime - Q) * (norm * (d + c * q) - wsum * c) / K.square(norm * d)
da = lr * (Q_prime - Q) *((wsum - norm * K.transpose(K.stack([q,q]))) * 2 * (a - a_best)) / K.square(norm * K.transpose(K.stack([d,d])))
loss_q = K.mean(K.sum(K.square(dq)))
loss_a = K.mean(K.sum(K.square(da)))
loss = (loss_q + loss_a)/2
# log state value and corresponding loss
tf.summary.scalar("loss_state_value", K.sum(loss_q))
tf.summary.scalar("loss_action", K.sum(loss_a))
tf.summary.scalar("state_value", K.sum(q_max))
return loss
# pass a custom metric function to model's compile() call
# which returns aggregated summary tensor.
# https://groups.google.com/forum/#!topic/keras-users/rEJ1xYqD3AM
def summary(y_true, y_pred):
return tf.summary.merge_all()
def unit_test():
test_net = SqueezeSpeedWireFitNet((376, 672, 3))
test_net.model_init()
test_net.net.summary()
a = test_net.forward({'img': np.random.rand(1, 376, 672, 3),
'speed': np.random.rand(1, 11, 20, 1)})
print(a)
unit_test()
| [
"izeki@berkeley.edu"
] | izeki@berkeley.edu |
0c14f478ee49acb3207cb1a384fee040a3bdda58 | 236afd92083f18a053f03caa4b3c6fd212890ef6 | /utils.py | 86bc4b9338d450159791b1b626ed5316f308682f | [
"CC-BY-4.0"
] | permissive | IldikoPilan/linkage | 31f3293323ca357d38ee3c461ea99ec2dbe24bcd | 3832bcc2db9446cbfd836e3500896e446b99ea4c | refs/heads/master | 2023-06-04T01:01:46.591856 | 2021-06-21T07:52:55 | 2021-06-21T07:52:55 | 257,837,813 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,097 | py |
def read_relations(path_to_file):
""" Reads information from .uio file format with
relations. Returns a nested list where sub-lists
contain the follwing token-level information.
"""
relation_info = []
with open(path_to_file) as f:
lines = f.readlines()
for line in lines:
if line.startswith("# sent_id = "):
continue
elif line == "\n":
continue
elif line.startswith("# text = "):
sent = line[10:-1]
else:
line_elem = line.strip().split("\t")
relation = line_elem[1]
orig_tag, l_ix_orig, r_ix_orig = line_elem[2].split(",")
orig_token = sent[int(l_ix_orig):int(r_ix_orig)]
target_tag, l_ix_target, r_ix_target = line_elem[3].split(",")
target_token = sent[int(l_ix_target):int(r_ix_target)]
relation_info.append([relation, orig_tag, orig_token,
target_tag, target_token])
return relation_info
| [
"ildiko.pilan@gmail.com"
] | ildiko.pilan@gmail.com |
67e955dc2e70709a21813dde1e1e3ecf9da1ec54 | 41c26da9c57052a3c9cd17b81d91f41ef074cf8d | /MyLeetCode/FB/Sqrt(x).py | e57f5f518cdc5ab67b63761318a1cca55c7a2c24 | [] | no_license | ihuei801/leetcode | a82f59a16574f4781ce64a5faa099b75943de94e | fe79161211cc08c269cde9e1fdcfed27de11f2cb | refs/heads/master | 2021-06-08T05:12:53.934029 | 2020-05-07T07:22:25 | 2020-05-07T07:22:25 | 93,356,513 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 571 | py | ###
# Binary Search
# Time Complexity: O(logn)
# Space Complexity: O(1)
###
class Solution(object):
def mySqrt(self, x):
"""
:type x: int
:rtype: int
"""
if x <= 0:
return 0
if x == 1:
return 1
l, r = 1, x
while l + 1 < r:
mid = (l + r)/2
if mid*mid == x:
return mid
elif mid*mid < x:
l = mid
else:
r = mid
if r*r <= x:
return r
else:
return l | [
"hhuang@pinterest.com"
] | hhuang@pinterest.com |
8251df20d34bd166007675aafc1b6abc6c317a2d | 4408859ff99069ade676ae47fd73e117339f1445 | /Data_Structure/Python/Generators/generators.py | b1e670cca1e201437acd65d567a8b6f5b6c7b536 | [] | no_license | stulsani/Additional-WorkSpace | 71309c28d8487ea1a080cf923b4efa8cbbfb9e6f | bba9140ff64ab83a63c6293719702d3939ab1b4e | refs/heads/master | 2022-06-12T04:47:11.251338 | 2020-04-26T03:41:28 | 2020-04-26T03:41:28 | 88,526,381 | 0 | 0 | null | 2022-03-02T08:21:22 | 2017-04-17T16:15:41 | C++ | UTF-8 | Python | false | false | 611 | py | input_list = [2,5,10,14,15,23,25]
def div_by_five(num):
if num%5 == 0:
return True
else:
return False
xyz = ( i for i in input_list if div_by_five(i) )
print(xyz)
for i in xyz:
print(i)
[[ print(i,ii) for ii in range(5)] for i in range(3) ]
x = ([ (i,ii) for ii in range(5)] for i in range(3) )
print (x)
CORRECT_COMBO = (3,5,7)
def generate_code():
for c1 in range(10):
for c2 in range(10):
for c3 in range(10):
yield(c1,c2,c3)
for (c1,c2,c3) in generate_code():
print(c1,c2,c3)
if (c1,c2,c3) == CORRECT_COMBO:
break
| [
"sumeettulsani1@gmail.com"
] | sumeettulsani1@gmail.com |
bbd25fc97bb6bd34c80649e0f1729d4b08bd09aa | 13dd6f83cfa9bd227741abe1fe01445a88b93944 | /project/urls.py | d9199a27dc45611d7809c7867856b9caa101ca42 | [] | no_license | Rajeshkanchi/Revisedfile | f34a62450ebc9ef33ff41ed776e68e3b4d50227b | 5930a41b42603dcee88cc27fec778aad3dd6f6ea | refs/heads/master | 2022-12-25T18:57:48.017650 | 2020-09-17T12:41:41 | 2020-09-17T12:41:41 | 296,321,881 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,169 | py | """project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
from django.conf.urls import url
from jobs.views import home_view, signup_view, activation_sent_view, activate
urlpatterns = [
path('admin/', admin.site.urls),
path('',include ('jobs.urls')),
path('', home_view, name="home"),
path('signup/', signup_view, name="signup"),
path('sent/', activation_sent_view, name="activation_sent"),
path('activate/<slug:uidb64>/<slug:token>/', activate, name='activate'),
]
| [
"noreply@github.com"
] | Rajeshkanchi.noreply@github.com |
e45e92ac2a515b699091a99231db873b58ea6c9e | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/cirq_new/cirq_program/startCirq_noisy786.py | f2b65f32ed029be9c4178c2a3b37eb138cc9f1e7 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,253 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=4
# total number=19
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[1])) # number=7
c.append(cirq.H.on(input_qubit[2])) # number=3
c.append(cirq.H.on(input_qubit[3])) # number=4
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=5
c.append(cirq.H.on(input_qubit[0])) # number=14
c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=15
c.append(cirq.H.on(input_qubit[0])) # number=16
c.append(cirq.Z.on(input_qubit[1])) # number=13
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=8
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=9
c.append(cirq.SWAP.on(input_qubit[3],input_qubit[0])) # number=10
c.append(cirq.SWAP.on(input_qubit[3],input_qubit[0])) # number=11
c.append(cirq.Z.on(input_qubit[2])) # number=12
c.append(cirq.Y.on(input_qubit[0])) # number=17
c.append(cirq.Y.on(input_qubit[0])) # number=18
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2820
circuit = circuit.with_noise(cirq.depolarize(p=0.01))
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq_noisy786.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | [
"wangjiyuan123@yeah.net"
] | wangjiyuan123@yeah.net |
bdb1dcd0e7a5318ece8f7870d373b25ee900437c | 8d8772b873b2fc5bfe193453b23fc0ba9a270433 | /tools/benchmark.py | a648fcb3b422639121f27e396f417c98bcabae82 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | hz-ants/BCNet | 5246c8bc90839c5d1bd040a88bed561232594068 | a027645a03a288115c703184bb9c8f5126221db8 | refs/heads/main | 2023-05-25T14:36:58.872327 | 2021-06-01T16:21:29 | 2021-06-01T16:21:29 | 373,832,572 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,462 | py | """
A script to benchmark builtin models.
Note: this script has an extra dependency of psutil.
"""
import itertools
import logging
import psutil
import torch
import tqdm
from fvcore.common.timer import Timer
from torch.nn.parallel import DistributedDataParallel
from detectron2.checkpoint import DetectionCheckpointer
from detectron2.config import get_cfg
from detectron2.data import (
DatasetFromList,
build_detection_test_loader,
build_detection_train_loader,
)
from detectron2.engine import SimpleTrainer, default_argument_parser, hooks, launch
from detectron2.modeling import build_model
from detectron2.solver import build_optimizer
from detectron2.utils import comm
from detectron2.utils.events import CommonMetricPrinter
from detectron2.utils.logger import setup_logger
logger = logging.getLogger("detectron2")
def setup(args):
cfg = get_cfg()
cfg.merge_from_file(args.config_file)
cfg.SOLVER.BASE_LR = 0.001 # Avoid NaNs. Not useful in this script anyway.
cfg.merge_from_list(args.opts)
cfg.freeze()
setup_logger(distributed_rank=comm.get_rank())
return cfg
def benchmark_data(args):
cfg = setup(args)
dataloader = build_detection_train_loader(cfg)
timer = Timer()
itr = iter(dataloader)
for i in range(10): # warmup
next(itr)
if i == 0:
startup_time = timer.seconds()
timer = Timer()
max_iter = 1000
for _ in tqdm.trange(max_iter):
next(itr)
logger.info(
"{} iters ({} images) in {} seconds.".format(
max_iter, max_iter * cfg.SOLVER.IMS_PER_BATCH, timer.seconds()
)
)
logger.info("Startup time: {} seconds".format(startup_time))
vram = psutil.virtual_memory()
logger.info(
"RAM Usage: {:.2f}/{:.2f} GB".format(
(vram.total - vram.available) / 1024 ** 3, vram.total / 1024 ** 3
)
)
def benchmark_train(args):
cfg = setup(args)
model = build_model(cfg)
logger.info("Model:\n{}".format(model))
if comm.get_world_size() > 1:
model = DistributedDataParallel(
model, device_ids=[comm.get_local_rank()], broadcast_buffers=False
)
optimizer = build_optimizer(cfg, model)
checkpointer = DetectionCheckpointer(model, optimizer=optimizer)
checkpointer.load(cfg.MODEL.WEIGHTS)
cfg.defrost()
cfg.DATALOADER.NUM_WORKERS = 0
data_loader = build_detection_train_loader(cfg)
dummy_data = list(itertools.islice(data_loader, 100))
def f():
while True:
yield from DatasetFromList(dummy_data, copy=False)
max_iter = 400
trainer = SimpleTrainer(model, f(), optimizer)
trainer.register_hooks(
[hooks.IterationTimer(), hooks.PeriodicWriter([CommonMetricPrinter(max_iter)])]
)
trainer.train(1, max_iter)
@torch.no_grad()
def benchmark_eval(args):
cfg = setup(args)
model = build_model(cfg)
model.eval()
logger.info("Model:\n{}".format(model))
DetectionCheckpointer(model).load(cfg.MODEL.WEIGHTS)
cfg.defrost()
cfg.DATALOADER.NUM_WORKERS = 0
data_loader = build_detection_test_loader(cfg, cfg.DATASETS.TEST[0])
dummy_data = list(itertools.islice(data_loader, 100))
def f():
while True:
yield from DatasetFromList(dummy_data, copy=False)
for _ in range(5): # warmup
model(dummy_data[0])
max_iter = 400
timer = Timer()
with tqdm.tqdm(total=max_iter) as pbar:
for idx, d in enumerate(f()):
if idx == max_iter:
break
model(d)
pbar.update()
logger.info("{} iters in {} seconds.".format(max_iter, timer.seconds()))
if __name__ == "__main__":
parser = default_argument_parser()
parser.add_argument("--task", choices=["train", "eval", "data"], required=True)
args = parser.parse_args()
assert not args.eval_only
if args.task == "data":
f = benchmark_data
elif args.task == "train":
"""
Note: training speed may not be representative.
The training cost of a R-CNN model varies with the content of the data
and the quality of the model.
"""
f = benchmark_train
elif args.task == "eval":
f = benchmark_eval
# only benchmark single-GPU inference.
assert args.num_gpus == 1 and args.num_machines == 1
launch(f, args.num_gpus, args.num_machines, args.machine_rank, args.dist_url, args=(args,))
| [
"keleiwhu@gmail.com"
] | keleiwhu@gmail.com |
7ed737bd9a5b2c998c3e7ecd20e75e8b6c05aa42 | e24fee7793cc7e89e78174b744cdaddbaecf45cd | /app/hypatio/wsgi.py | 7bb13ea6aa8ca74f101740b809c54ab1222b0ab9 | [] | no_license | hms-dbmi/hypatio-app | e63bfa9f123233ca33175d6d15bc0fd65294f749 | bff632ea72fa62ee503a1bff15b7e840e53a0d45 | refs/heads/master | 2023-09-01T02:14:03.432174 | 2023-08-09T15:14:08 | 2023-08-09T15:14:08 | 71,184,816 | 4 | 0 | null | 2023-09-13T16:04:50 | 2016-10-17T21:58:35 | Python | UTF-8 | Python | false | false | 392 | py | """
WSGI config for hypatio project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hypatio.settings")
application = get_wsgi_application()
| [
"mcduffie.michael@gmail.com"
] | mcduffie.michael@gmail.com |
33b7f9ac9953ad10274b3232c352058e44e69e71 | 0724193d3ddd21a428aabbdd16d84e944cf082dc | /tests/config/test_potable.py | 8d372186a4a4ab1d590d25e1e5fde35894af90bd | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | fengnianzhuang/atsim-potentials | 305b5aa33b9e7df7765abfda3a73d7f31241ba69 | 566020dc0d2df4b701b9c8cd00319a1c9461f56e | refs/heads/master | 2023-06-15T21:05:00.608645 | 2021-05-20T19:00:22 | 2021-05-20T19:00:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,210 | py | import pytest
from ._common import _get_dlpoly_resource_dir, _get_lammps_resource_dir
from atsim.potentials.tools.potable import _query_actions
from atsim.potentials.config import ConfigParser
def test_list_item_labels():
expect = [
"Tabulation:target",
"Tabulation:nr",
"Tabulation:dr",
"Tabulation:nrho",
"Tabulation:drho",
"Potential-Form:buck_morse(r,A,rho,C,D,gamma,r0)",
"Potential-Form:density(r,n)",
"EAM-Embed:Th",
"EAM-Embed:U",
"EAM-Embed:O",
"EAM-Density:Th",
"EAM-Density:U",
"EAM-Density:O",
"Pair:O-O",
"Pair:Th-Th",
"Pair:U-U",
"Pair:Th-O",
"Pair:U-O" ]
expect.sort()
with _get_lammps_resource_dir().join("CRG_U_Th.aspot").open() as infile:
cp = ConfigParser(infile)
items = _query_actions._list_item_labels(cp)
items.sort()
assert expect == items
def test_list_items():
expect = [
("Tabulation:target", "setfl"),
("Tabulation:nr", "1000"),
("Tabulation:dr", "0.01"),
("Tabulation:nrho", "1000"),
("Tabulation:drho", "0.01"),
("Potential-Form:buck_morse(r,A,rho,C,D,gamma,r0)", "as.buck(r,A,rho,C) + as.morse(r, gamma, r0, D)" ),
("Potential-Form:density(r,n)", "(n/r^8) * 0.5 * (1+erf(20*(r-1.5)))" ),
("EAM-Embed:Th", "as.sqrt -1.185"),
("EAM-Embed:U", "as.sqrt -1.806"),
("EAM-Embed:O", "as.sqrt -0.690"),
("EAM-Density:Th", "density 1742.622"),
("EAM-Density:U", "density 3450.995"),
("EAM-Density:O", "density 106.856"),
("Pair:O-O","as.buck 830.283 0.352856 3.884372"),
("Pair:Th-Th","as.buck 18600 0.2884 0.0"),
("Pair:U-U","as.buck 18600 0.2747 0.0"),
("Pair:Th-O","buck_morse 315.544 0.395903 0.0 0.62614 1.85960 2.49788"),
("Pair:U-O" ,"buck_morse 448.779 0.387758 0.0 0.66080 2.05815 2.38051")]
expect.sort()
with _get_lammps_resource_dir().join("CRG_U_Th.aspot").open() as infile:
cp = ConfigParser(infile)
items = _query_actions._list_items(cp)
items.sort()
assert expect == items
def test_item_value():
with _get_lammps_resource_dir().join("CRG_U_Th.aspot").open() as infile:
cp = ConfigParser(infile)
value = _query_actions._item_value(cp, "Tabulation:drho")
assert "0.01" == value
def test_list_plot_item_labels():
expect = [
"EAM-Embed:Th",
"EAM-Embed:U",
"EAM-Embed:O",
"EAM-Density:Th",
"EAM-Density:U",
"EAM-Density:O",
"Pair:O-O",
"Pair:Th-Th",
"Pair:U-U",
"Pair:Th-O",
"Pair:U-O" ]
expect.sort()
with _get_lammps_resource_dir().join("CRG_U_Th.aspot").open() as infile:
cp = ConfigParser(infile)
items = _query_actions._list_plot_item_labels(cp)
items.sort()
assert expect == items
def test_key_normalisation():
import io
cfg1 = u"""[Potential-Form]
buck_morse(r, A, rho, C, D, gamma, r0) : test
[EAM-Density]
A -> B : test"""
expect = [("Potential-Form:buck_morse(r,A,rho,C,D,gamma,r0)", "test"),
("EAM-Density:A->B", "test")
]
expect.sort()
with io.StringIO(cfg1) as infile:
cp = ConfigParser(infile)
items = _query_actions._list_items(cp)
items.sort()
assert expect == items
@pytest.mark.parametrize('cli_option, cli_attr', [('--override-item', 'override_item'), ('--add-item', 'add_item'), ('--remove-item', 'remove_item')])
def test_comandline_multiple_overrides(cli_option, cli_attr):
from atsim.potentials.tools.potable import _parse_command_line
cli_args = [__file__, "OUT", cli_option, "Tabulation:target=GULP"]
p, args = _parse_command_line(cli_args)
argval = getattr(args, cli_attr)
assert len(argval) == 1
assert argval == [["Tabulation:target=GULP"]]
cli_args = [__file__, "OUT", cli_option, "Tabulation:target=GULP", cli_option, "Tabulation:cutoff=20"]
p, args = _parse_command_line(cli_args)
argval = getattr(args, cli_attr)
assert len(argval) == 2
assert argval == [["Tabulation:target=GULP"], ["Tabulation:cutoff=20"]]
cli_args = [__file__, "OUT", cli_option, "Tabulation:target=GULP", "Tabulation:cutoff=20"]
p, args = _parse_command_line(cli_args)
argval = getattr(args, cli_attr)
assert len(argval) == 1
assert argval == [["Tabulation:target=GULP", "Tabulation:cutoff=20"]]
| [
"m.j.d.rushton@gmail.com"
] | m.j.d.rushton@gmail.com |
82367d80dbeb2a3e6cf100cb33c3a7f9e7406328 | 71063487dc412ba4d7c24e169e14916172dc7756 | /Common/models.py | 6ae161cb4cd3f38193bc5027fd8e7057615a9d4f | [] | no_license | saurabhwahile/Stark | 420ff11bff6ffdf7bf02ea5b573e274815823e5f | ab683e80acffb57ecfc8c2c62abc550427129fa9 | refs/heads/master | 2020-04-16T07:28:19.681736 | 2015-06-08T17:17:21 | 2015-06-08T17:17:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 379 | py | __author__ = 'Saurabh'
class IncompleteDataException(Exception):
def __init__(self, message):
self.value = message
def __str__(self):
return repr('Missing '+self.value)
class DuplicateDataException(Exception):
def __init__(self, message):
self.value = message
def __str__(self):
return repr('Duplicate '+self.value)
| [
"saurabh-wahile@hotmail.com"
] | saurabh-wahile@hotmail.com |
4713452da4676073abf78c5b13eaf884b0b5121a | e11d8ccf73cbf5fb30dbde69832f1a9c95df467a | /src/Dj4sq/settings.py | 67a512912d343a74dda4f937c87587609134329a | [
"CC-BY-3.0"
] | permissive | martinjc/DjangoFoursq | 013060794cc1f83080597a800ecfd1ede7aad4f4 | 5f3736a9bff4f955160b603a74645ec447376b30 | refs/heads/master | 2020-04-26T23:49:44.058089 | 2011-09-08T17:42:02 | 2011-09-08T17:42:02 | 2,349,262 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,759 | py | import os
ROOT_PATH = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(ROOT_PATH, ''), # path to database file as using sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/London'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-gb'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(ROOT_PATH, 'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://127.0.0.1:8000/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'n3$dy&8lw7#h%$%4%pqdpsuty+6b)agzpjgw1ek+4rm6+rb-^r'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'Dj4sq.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(ROOT_PATH, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'foursq_auth',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| [
"m.j.chorley@cs.cardiff.ac.uk"
] | m.j.chorley@cs.cardiff.ac.uk |
6a71d095d97872e0de1678319efb32d063a8d074 | 6ee6a5fcd84c6a3aaf3894a1f06fd4cc3ba8bc63 | /Chapter01/ch01_IceCream.py | aef3f86d14e9b96f8897a074717428b7e1f72a36 | [] | no_license | lyubomirShoylev/pythonBook | ff50952035b883ec3721da49d17112d4ef5b8b13 | f3eca2d47ea31aaaecaa5a0b00a45d1ccee80dae | refs/heads/master | 2020-03-21T14:09:30.826106 | 2018-08-22T17:41:14 | 2018-08-22T17:41:14 | 138,642,968 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 115 | py | #Challenge1
#Favourite ice cream
print("Chocolate, of course")
input("\nWhat's yours?\nPress Enter key to exit.")
| [
"luboshoilev@gmail.com"
] | luboshoilev@gmail.com |
d01a284894e33a20d0198937965703279aa95c84 | b42665a5b7c30ff9609d04a5fafaaffad2121fe7 | /leglib/__init__.py | b098b19c29352e149c9735cbefcbdb89aa9f22dd | [
"MIT"
] | permissive | joelegner/leglib | 71f7779d43f910a074ea59c8c0c1fab84773c7ae | 5f7f4cc48112302bb48857d85435c42fb8c72169 | refs/heads/master | 2021-06-13T22:13:11.414707 | 2020-11-26T16:02:45 | 2020-11-26T16:02:45 | 155,223,190 | 0 | 0 | MIT | 2021-03-20T04:57:58 | 2018-10-29T14:12:39 | HTML | UTF-8 | Python | false | false | 57 | py | __all__ = ["legmath", "util", "fmt", "tests", "unitval"]
| [
"joelegner.gmail.com"
] | joelegner.gmail.com |
4401d1e8caca6d642272910a81badbe024178ea0 | 99a8fee72524a221f3e754876a051cdc799d9ca5 | /bentoml/cli/yatai_service.py | 35edda58dc7a85b1c7feffaa0186f6c892c0b53a | [
"Apache-2.0"
] | permissive | AmeerHamza111/BentoML | d112f3d73e4ac1bb088a11548a04314eadafb2da | 7a23ebb986971d0cf0508c88a33304773926f3e8 | refs/heads/master | 2022-11-18T13:05:01.219977 | 2020-07-07T14:39:49 | 2020-07-07T14:39:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,581 | py | # Copyright 2019 Atalaya Tech, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import click
from bentoml.yatai.yatai_service import start_yatai_service_grpc_server
logger = logging.getLogger(__name__)
def add_yatai_service_sub_command(cli):
# pylint: disable=unused-variable
@cli.command(help='Start BentoML YataiService for model management and deployment')
@click.option(
'--db-url',
type=click.STRING,
help='Database URL following RFC-1738, and usually can include username, '
'password, hostname, database name as well as optional keyword arguments '
'for additional configuration',
envvar='BENTOML_DB_URL',
)
@click.option(
'--repo-base-url',
type=click.STRING,
help='Base URL for storing BentoML saved bundle files, this can be a filesystem'
'path(POSIX/Windows), or a S3 URL, usually starting with "s3://"',
envvar='BENTOML_REPO_BASE_URL',
)
@click.option(
'--grpc-port',
type=click.INT,
default=50051,
help='Port to run YataiService gRPC server',
envvar='BENTOML_GRPC_PORT',
)
@click.option(
'--ui-port',
type=click.INT,
default=3000,
help='Port to run YataiService Web UI server',
envvar='BENTOML_WEB_UI_PORT',
)
@click.option(
'--ui/--no-ui',
default=True,
help='Run YataiService with or without Web UI, when running with --no-ui, it '
'will only run the gRPC server',
envvar='BENTOML_ENABLE_WEB_UI',
)
@click.option(
'--s3-endpoint-url',
type=click.STRING,
help='S3 Endpoint URL is used for deploying with storage services that are '
'compatible with Amazon S3, such as MinIO',
envvar='BENTOML_S3_ENDPOINT_URL',
)
def yatai_service_start(
db_url, repo_base_url, grpc_port, ui_port, ui, s3_endpoint_url
):
start_yatai_service_grpc_server(
db_url, repo_base_url, grpc_port, ui_port, ui, s3_endpoint_url
)
| [
"noreply@github.com"
] | AmeerHamza111.noreply@github.com |
63a817e8557c763d366712c27c996a9e667b18c3 | ebe7c57183b0eeba9af1bdc72f0f81b9b8129ca9 | /23. HashTable/387.py | 8b7b7473758a3bccdd09c324c10a8ef2fb84a148 | [] | no_license | proTao/leetcode | f2e46392b56b69606e1dd25cf5738cb0ad275645 | 97533d53c8892b6519e99f344489fa4fd4c9ab93 | refs/heads/master | 2021-11-24T10:23:56.927122 | 2021-11-18T04:28:05 | 2021-11-18T04:28:05 | 110,225,265 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 688 | py | from collections import Counter
from math import inf
class Solution:
def firstUniqChar(self, s: str) -> int:
count = Counter(s)
for i, c in enumerate(s):
if count[c] == 1:
return i
return -1
def firstUniqChar(self, s: str) -> int:
alpha = "qwertyuiopasdfghjklzxcvbnm"
res = inf
for c in alpha:
i = s.find(c)
if i == -1:
continue
j = s.find(c, i+1)
if j == -1:
res = min(res, i)
return res if res is not inf else -1
if __name__ == "__main__":
print(Solution().firstUniqCharBetter("loveleetcode"))
| [
"836807627@qq.com"
] | 836807627@qq.com |
a76178923d7a753fe33c9b685cdc686a0032faa6 | cd117c1bcde2d7d7d7a389ef6889630b99a53059 | /Lesson 1/lesson1_task1.py | 3737200d9156fcddbe4c8288443bc813f35a04d3 | [] | no_license | mironnay/Python-Classes | e120cf5ea5403a907a9c567f9e634ab89cd04bea | 6f44618519983b28e2b743f9f7f982d556fee177 | refs/heads/master | 2022-08-31T17:28:59.064379 | 2020-05-27T12:57:46 | 2020-05-27T12:57:46 | 259,941,900 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py | num = input("Enter number of products you want to buy ")
price = input("Enter price for each product ")
final_price = int(num) * int(price)
print("Final sum of products is: " + str(final_price)) | [
"noreply@github.com"
] | mironnay.noreply@github.com |
fb7767bbf20ea1ac3b6d1db40ee1d0556a8dce91 | c8d39c580c3b0716d161c2d070335a713173c701 | /setting/urls.py | 1f6976e62f29c7c44830b9930fba3bd05219f778 | [] | no_license | NIKULAHIR/B2C | 2d58c95757314eb54a142b53ac3072e99639b2cf | cd24761a364a4314ac6857a621c859be5fea7122 | refs/heads/master | 2020-04-02T18:18:53.664016 | 2019-07-27T19:06:30 | 2019-07-27T19:06:30 | 154,695,303 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,077 | py | """FromDemo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.urls import include, path
from django.contrib import admin
from app.Profiles.views import LoginView
urlpatterns = [
path('',LoginView.as_view(),),
path('admin/', admin.site.urls),
path('root/',include(('app.Profiles.urls','root'))),
path('product/',include(('app.Product.urls','product'))),
path('oredr/',include(('app.Order.urls','order'))),
#path('cart/',include(('app.Order.urls','cart'))),
]
| [
"nikulahir90@gmail.com"
] | nikulahir90@gmail.com |
a6664ec1cdda715d878aabeded1980ae5457a15c | 6f4f4d2ff85574a42a6e539d43becce5815e4530 | /lyman/tests/test_frontend.py | 639f8dc1d503c8f8798aa0fc2826f066d4bf4007 | [
"BSD-2-Clause"
] | permissive | toddt/lyman | b6aa656b6f8a6a235b9bf2f64d035a1b78dc188f | e3a5519fce41a765ae593d8d161e995c5f9aae8c | refs/heads/master | 2021-01-22T12:53:33.693352 | 2014-01-15T21:33:10 | 2014-01-15T21:33:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 661 | py | from argparse import Namespace
from nose.tools import assert_equal
from .. import frontend
def test_determine_engine():
plugin_dict = dict(linear="Linear",
multiproc="MultiProc",
ipython="IPython",
torque="PBS")
for arg, plugin_str in plugin_dict.items():
args = Namespace(plugin=arg, queue=None)
if arg == "multiproc":
args.nprocs = 4
plugin, plugin_args = frontend.determine_engine(args)
yield assert_equal, plugin, plugin_str
if arg == "multiproc":
yield assert_equal, plugin_args, dict(n_procs=4, qsub_args="")
| [
"mwaskom@stanford.edu"
] | mwaskom@stanford.edu |
99bdc1c699823b80e602a055a5b4a13d0a38c9ac | 4666793c3310dd20cf507dd4511a22d74365f851 | /database.py | b97bc5b343cf59b90d6b6036b605f601111db60e | [] | no_license | huomarc/covid19live | 00c102e7fe9829a9f790cf056ecb8d28ae34757c | 94923fe2c65b3e1d35190c69e08f56fb9bf3605c | refs/heads/master | 2022-11-05T00:10:23.290949 | 2020-06-18T21:29:34 | 2020-06-18T21:29:34 | 273,342,110 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 396 | py | from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
SQLALCHEMY_DATABASE_URL = "sqlite:///./covid.db"
engine = create_engine(
SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base() | [
"noreply@github.com"
] | huomarc.noreply@github.com |
b6fc0625c6dd1554957b189643c7ecfa03533679 | f66d348063989778817eaeeaff868fbaadbdb68a | /euclidean.py | 5f715621e15aa74daac7745adc0c3a1d52a29751 | [] | no_license | william-richard/BowdoinMath252 | aca1f0247afe857b813dfc95511b898115cf8acb | 701c8d29c759d964970d006e3637c4c8b88b0dca | refs/heads/master | 2021-05-27T12:24:05.213768 | 2013-05-21T02:34:13 | 2013-05-21T02:34:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 821 | py | import sys
import math
def getInt(msg):
try:
data = int(raw_input(msg + " "))
except ValueError:
data = int(raw_input("Invalid input. Try again: "))
return data
def main():
#get our values
first = getInt("Input the 1st number you would like to get the GCD for: ")
second = getInt("Input the 2nd number you would like to get the GCD for: ")
#make sure we get a > b
if first > second:
a = first
b = second
else:
a = second
b = first
#set up r
r = a%b
while(r != 0):
print 'r = ' + repr(r) + ' a = ' + repr(a) + ' b = ' + repr(b)
a = b
b = r
r = a%b
print 'DONE: r = ' + repr(r) + ' a = ' + repr(a) + ' b = ' + repr(b)
print 'GCD = ' + repr(b)
if __name__ == "__main__":
main()
| [
"willster3021@gmail.com"
] | willster3021@gmail.com |
d2c956a7058044ec6ac1b4794ea0c4c38068efa6 | fb00807a844bcb9e828479ba206ee842469f27df | /sample_density_map.py | 3c7f49fa4886351509b2d64220db8c930032bdc2 | [] | no_license | eamalikaaa/CSRNet | fa60a84221fe403b1a05489f641cf5c6e2ef5fa0 | 156fa12bd4eabfea492b62401bbc30025994436a | refs/heads/main | 2023-04-09T21:39:51.437847 | 2021-04-18T17:30:36 | 2021-04-18T17:30:36 | 359,210,528 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | gt_file = h5py.File(img_paths[0].replace('.jpg','.h5').replace('images','ground-truth'),'r')
groundtruth = np.asarray(gt_file['density'])
plt.imshow(groundtruth,cmap=CM.jet) | [
"noreply@github.com"
] | eamalikaaa.noreply@github.com |
8c6a5a3e278d1c8a19d73033246e3453833eb81e | 18f8a1c7122c0b320f17ea31192439779a8c63e8 | /web/apps/admin/groups.py | b0ad4bfe977abf42bf38d551d4f9ce035134e1a5 | [
"MIT"
] | permissive | RyanLainchbury/zoom | d49afa8d3506fca2c6e426707bd60ba640420a45 | 684a16f4fe3cea3d26f2d520c743a871ca84ecc5 | refs/heads/master | 2020-12-25T19:03:12.881247 | 2017-06-09T07:29:27 | 2017-06-09T07:29:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,650 | py | """
system users
"""
from zoom.components import success, error
from zoom.collect import Collection, CollectionController
from zoom.forms import Form
from zoom.helpers import link_to, url_for
from zoom.models import Group, Groups
from zoom.tools import now
import zoom.validators as v
import zoom.fields as f
from model import update_group_members
def group_fields(request):
fields = f.Fields([
f.TextField('Name', v.required, v.valid_name),
f.MemoField('Description'),
f.PulldownField('Administrators', default='administrators', options=request.site.user_groups),
])
personal_fields = f.Section('Includes',[
# f.ChosenMultiselectField('Groups', options=request.site.user_groups),
f.ChosenMultiselectField('Users', options=request.site.user_options),
])
return f.Fields(fields, personal_fields)
class GroupCollectionController(CollectionController):
def before_insert(self, record):
record['type'] = 'U'
update_group_members(record)
def before_update(self, record):
record['type'] = 'U'
update_group_members(record)
def main(route, request):
def user_group(group):
return group.type == 'U' and not group.name.startswith('a_')
db = request.site.db
users = Groups(db)
fields = group_fields(request)
columns = 'link', 'description', 'administrators'
return Collection(
fields,
model=Group,
controller=GroupCollectionController,
store=users,
item_name='group',
url='/admin/groups',
filter=user_group,
columns=columns,
)(route, request)
| [
"herb@dynamic-solutions.com"
] | herb@dynamic-solutions.com |
e112517d50528afa0bdebfab681278009f81d1f9 | e801dbaf4cf72782827e618e88bb1a460d297ba0 | /Scripts/StatisticalAnalysisScripts/csvGenerationConfigToNumeric.py | e17fa9f64da1a81935c0a9e6af3ed33050b174f8 | [] | no_license | maliha1234/StaticAnalysisFeatureSelection | 463345fd2d3f08ade1cfbc33d7851c930718bac3 | 9181ccb7271c02c64e7a72fc0721d8f3024ba202 | refs/heads/master | 2023-02-03T17:01:30.325084 | 2020-12-16T06:24:01 | 2020-12-16T06:24:01 | 234,459,863 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,278 | py | import pandas
import sys
import glob
import os
from collections import defaultdict
postAnalysisDirectory = sys.argv[1]
programName = sys.argv[2]
try:
aiCsvFileName = postAnalysisDirectory + programName + ".csv"
aiCsv = pandas.read_csv(aiCsvFileName)
print(aiCsvFileName)
# Create the dictionary with 1,2,3 random values
c1_dictionary ={'TD' : 1, 'BU' : 2, 'TD+BU' : 3}
c2_dictionary ={'AP' : 1, 'SO' : 2, 'AP+SO' : 3}
c3_dictionary ={'1-CFA' : 1, 'CI' : 2, '1-TYPE' : 3}
c4_dictionary ={'ALLOCATION' : 1, 'SMUSH_STRING' : 2, 'TYPE' : 3}
c5_dictionary ={'BOX' : 1, 'POLY' : 2}
# Add new columns
aiCsv['C1*'] = aiCsv['C1'].map(c1_dictionary)
aiCsv['C2*'] = aiCsv['C2'].map(c2_dictionary)
aiCsv['C3*'] = aiCsv['C3'].map(c3_dictionary)
aiCsv['C4*'] = aiCsv['C4'].map(c4_dictionary)
aiCsv['C5*'] = aiCsv['C5'].map(c5_dictionary)
aiCsv = aiCsv.drop(aiCsv.columns[[0]], axis=1) # delete the unnamed column
outputCsvFileName = postAnalysisDirectory + programName + "Config_to_numeric.csv"
aiCsv.to_csv(outputCsvFileName)
for i in range(9, 71):
print(aiCsv.columns[[i]])
except Exception as e:
print(e)
print("no file") | [
"malihasarwat@Malihas-MBP.lan1"
] | malihasarwat@Malihas-MBP.lan1 |
2c3a942a58a9f33ffbac28e938292022335ed2fa | 1db4518a0bc3782d46f1895d5592f81ed917868d | /src/VirtualNodeMap.py | d64b7b55b2b57a4502527302f4e7828692a54966 | [] | no_license | nilotpalsrkr/ConsistentHashing | fa8a1f4cf45d4b1bd2fd0d6d9ddf90d00a16e6d2 | 7e3e3b624606c9e52c3bd92fae4ae93eb1fcdcba | refs/heads/main | 2023-08-12T18:38:48.260161 | 2021-09-16T16:23:55 | 2021-09-16T16:23:55 | 407,236,074 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,555 | py | import random
import math
# Stores the vnode to node mapping
# Composed within a node so that every node has its own vnode mapping
class VirtualNodeMap:
def __init__(self, node_names, TOTAL_VIRTUAL_NODES):
self._vnode_map = {}
self._node_names = node_names
self._TOTAL_VIRTUAL_NODES = TOTAL_VIRTUAL_NODES
self._node_vnode_map = {}
@property
def vnode_map(self):
return self._vnode_map
@property
def node_names(self):
return self._node_names
# Populates the Virtual Node Nap, given the set of Node names.
# Creates a mapping of Virtual Node to corresponding assigned physical Node
def populate_map(self):
# Problem statement 1
# Generate a dict of vnode ids (0 to (TOTAL_VIRTUAL_NODES - 1) mapped randomly
# but equally (as far as maths permits) to node names
""" This assigns node to vnode in a sequential manner.
Meaning -
Lets say we have 4 nodes : node-1, node-2, node-3, node-4
Allocation happens as follows:
node-1 -> 0,4,8,12..
node-2 -> 1,5,9,13..
node-3 -> 2,6,10,14..
node-4 -> 3,7,11,15..
"""
total_node_count = len(self._node_names)
for v in range(-1, self._TOTAL_VIRTUAL_NODES, total_node_count): # The counter is increased by
# total_node_count. The outer loop increase by this integer.
t = v # This 't' is increased by 1 in inner loop for sequential effect and is assigned to each node
for node in self._node_names:
t = t + 1
self._vnode_map[t] = node
if node not in self._node_vnode_map:
self._node_vnode_map[node] = [t]
else:
self._node_vnode_map[node].append(t)
# Return the vnode name mapped to a particular vnode
def get_node_for_vnode(self, vnode):
return self._vnode_map[vnode]
# Returns the vnode name where a particular key is stored
# It finds the vnode for the key through modulo mapping, and then looks up the physical node
def get_assigned_node(self, key):
vnode = key % self._TOTAL_VIRTUAL_NODES
return self._vnode_map[vnode]
# Assign a new node name as mapping for a particular vnode
# This is useful when vnodes are remapped during node addition or removal
def set_new_assigned_node(self, vnode, new_node_name):
self._vnode_map[vnode] = new_node_name
| [
"nilotpalsarkar@Nilotpals-MacBook-Pro.local"
] | nilotpalsarkar@Nilotpals-MacBook-Pro.local |
e059a8bdbfc125439f694cd82fc8d44168c0cafe | 9aa46b74b6c3d927aa870306057b3d0d50236e87 | /weatherapp.py | 0b28f6000aebc18b2441bd129cd19117c72d59c6 | [] | no_license | McEdward/Mini-Weather-App-Python- | d81dede01a3e6039290f3b3605501a3cb3fcfa21 | 4badf3e75cd1c880e2fe714a66346b1bf1d411cb | refs/heads/master | 2021-01-21T12:11:53.748945 | 2017-08-31T22:42:29 | 2017-08-31T22:42:29 | 102,049,564 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,771 | py | def window():
ui = tkinter.Tk()
ui.geometry('300x250+550+200')
L1 = tkinter.Label(text = "Locate City")
L1.pack()
E1 = tkinter.Entry(width=45)
E1.pack()
go = tkinter.Button(master = ui, text = "Search", command = lambda: yahooweather(E1.get()))
go.pack()
ui.mainloop()
def yahooweather(name):
baseurl = "https://query.yahooapis.com/v1/public/yql?"
yql_query = "select * from weather.forecast where woeid in (select woeid from geo.places(1) where text=\""+ name +"\")"
yql_url = baseurl + urllib.parse.urlencode({'q':yql_query}) + "&format=json"
result = urllib.request.urlopen(yql_url).read()
data = json.loads(result)
print (data['query']['results'])
city = data['query']['results']['channel']["location"]["city"]
country = data['query']['results']['channel']["location"]["country"]
temp = data['query']['results']['channel']["item"]["condition"]["temp"] + "º F"
tex = data['query']['results']['channel']["item"]["condition"]["text"]
datentime = data['query']['results']['channel']["lastBuildDate"]
print (data['query']['results']['channel']["location"]["city"])
print (data['query']['results']['channel']["location"]["country"])
print (data['query']['results']['channel']["item"]["condition"]["temp"] + "F")
print (data['query']['results']['channel']["item"]["condition"]["text"])
print (data['query']['results']['channel']["lastBuildDate"])
details(city, country, temp, tex, datentime)
def details(city, country, temp, tex, time):
c = tkinter.Label(text = city + ", "+ country)
tem = tkinter.Label(text = temp)
txt = tkinter.Label(text = tex)
datentime = tkinter.Label(text = time)
c.pack()
tem.pack()
txt.pack()
datentime.pack()
import tkinter
import urllib.parse, urllib.request, json
city = "No city Selected yet!"
window()
| [
"mac114.md@gmail.com"
] | mac114.md@gmail.com |
d04bc1786001d02dc9ac9d0587bf959ff6a23588 | 3d1079a886b22081a7d8137e29ec47528a912d86 | /MT3FileNameChange/MT3FileNameChange.py | f688c351160fa8570ce67a1ab5a6e7607a134b62 | [] | no_license | satoshi246ss/MT3FileMove | 0273d20000dacc8f2b547f38489159081c0aa6ca | d3042a3743af9d368bce119debdcc3053f12a6e6 | refs/heads/master | 2021-01-19T03:19:58.949652 | 2017-01-07T06:53:46 | 2017-01-07T06:53:46 | 48,326,541 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,605 | py | ## -*- coding: utf-8 -*-
import os
import sys
import datetime
import shutil
import time
import cr2_file_move
import bmp2avi
#---------------------------------------------------
# 2_ .avi ファイル名変更
# datedir -> yyyymmdd
#
def proc_2_rename(BaseSoucePath,dir):
datedir = dir.replace("/","")
# 条件確認
if os.path.exists(BaseSoucePath) == False:
print "Base Souce path '%s' not exists!" % BaseSoucePath
return BaseSoucePath
# src list up
SoucePath = BaseSoucePath+"/"+datedir
FileList = os.listdir(SoucePath)
for f in FileList:
if ( f[-4:]==".avi"):
if ( f[:2]=="2_" ):
fn=f[2:]
fn=fn[:-4]+"_2.avi"
src = SoucePath +"/"+ f
dst = SoucePath +"/"+ fn
os.rename(src, dst)
if ( f[:2]=="3_" ):
fn=f[2:]
fn=fn[:-4]+"_3.avi"
src = SoucePath +"/"+ f
dst = SoucePath +"/"+ fn
os.rename(src, dst)
if ( f[:2]=="4_" ):
fn=f[2:]
fn=fn[:-4]+"_4.avi"
src = SoucePath +"/"+ f
dst = SoucePath +"/"+ fn
os.rename(src, dst)
if ( f[:2]=="7_" ):
fn=f[2:]
fn=fn[:-4]+"_7.avi"
src = SoucePath +"/"+ f
dst = SoucePath +"/"+ fn
os.rename(src, dst)
if ( f[:2]=="8_" ):
fn=f[2:]
fn=fn[:-4]+"_8.avi"
src = SoucePath +"/"+ f
dst = SoucePath +"/"+ fn
os.rename(src, dst)
if ( f[:3]=="11_" ):
fn=f[3:]
fn=fn[:-4]+"_11.avi"
src = SoucePath +"/"+ f
dst = SoucePath +"/"+ fn
os.rename(src, dst)
#---------------------------------------------------
# avi ファイルコピー
def mt3filemove(dt = datetime.datetime.now()):
dir = dt.strftime("/%Y%m%d")
print dt,dir
BaseSoucePath = "J:/MT"
# 2_のファイル名変更後、年月日ディレクトリに再振り分け
proc_2_rename(BaseSoucePath,dir)
#---------------------------------------------------
# main
# 日付指定
if __name__ == "__main__":
dtnow = datetime.datetime.now()
drange=1 #実行日数(戻り日数)
if len( sys.argv ) >= 5:
yyyy=int(sys.argv[1])
mm =int(sys.argv[2])
dd =int(sys.argv[3])
drange =int(sys.argv[4])
elif len( sys.argv ) == 4:
yyyy=int(sys.argv[1])
mm =int(sys.argv[2])
dd =int(sys.argv[3])
elif len( sys.argv ) == 3:
yyyy=dtnow.year
mm =int(sys.argv[1])
dd =int(sys.argv[2])
elif len( sys.argv ) == 2:
yyyy=dtnow.year
mm =dtnow.month
dd =int(sys.argv[1])
elif len( sys.argv ) == 1:
yyyy=dtnow.year
mm =dtnow.month
dd =dtnow.day
drange =7
if yyyy < 2000 or yyyy > dtnow.year :
print "Year '%s' 範囲外" % yyyy
sys.exit()
if mm < 1 or mm > 12 :
print "Month '%s' 範囲外" % mm
sys.exit()
if dd < 1 or dd > 31 :
print "Day '%s' 範囲外" % dd
sys.exit()
if drange < 1 or drange > 365 :
print "Drange '%s' 範囲外" % drange
sys.exit()
for i in range(drange):
dt = datetime.date(yyyy,mm,dd) -datetime.timedelta(days=i)
print dt
time.sleep(1)
mt3filemove(dt)
| [
"satoshi246ss@yahoo.co.jp"
] | satoshi246ss@yahoo.co.jp |
1c7cc732f20e33f9a1ba6643dbfd612499b6a9e0 | 7afafec3e6bde1b182581ca32d1dcd19279a8c34 | /server/venv/bin/easy_install | 2fa70b27267b04639af57eb0c2f883de7a16b7a5 | [] | no_license | LudSkywalker/PythonReactAI | 6849dead7db73833ed584c1df02779f950f37b6c | dc0dc216e99fe77f2ff2ab6a7a0e44c5fee86710 | refs/heads/master | 2023-07-04T02:26:43.610476 | 2021-07-29T18:39:00 | 2021-07-29T18:39:00 | 384,199,179 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | #!/home/lud/Desktop/flask_react_AI/server/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"ludingnumpaque@gmail.com"
] | ludingnumpaque@gmail.com | |
892fbd06f8f8a270de2eff8e0b8cc19d184a457b | 2b11362f30d7e07fabfb8cd76085cfa4485d12ea | /mysqlite/__init__.py | f051ef662371cb1252bfb783c0c1296b8220777b | [] | no_license | stomer1/Python-Basic | 93e05032d2d7676a197be5a7c59acb18c3cffaed | 74b00bf9a909371bf2dd2f41c2880736c16aadd6 | refs/heads/master | 2023-05-11T05:48:10.523535 | 2021-06-04T08:41:00 | 2021-06-04T08:41:00 | 368,451,922 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | from .database import *
# __init__.py
# 패키지 임포할 때 초기화 작업을 수행하는 파일
# 없어도 패키지로 인식
# from 패키지 import * : 내부에 있는 모든 객체를 import
__all__ = ["Database"] # 명시된 심볼만 export된다
#__all__ = [] # *로 임포트시 아무 것도 export 안함
| [
"stomer1@naver.com"
] | stomer1@naver.com |
cc247e80135181a627d1df3c82785a5871e3b13c | 255e19ddc1bcde0d3d4fe70e01cec9bb724979c9 | /all-gists/1485023/snippet.py | 55f1081e778224a3121589a27e60a6f8ebd07476 | [
"MIT"
] | permissive | gistable/gistable | 26c1e909928ec463026811f69b61619b62f14721 | 665d39a2bd82543d5196555f0801ef8fd4a3ee48 | refs/heads/master | 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 | Python | UTF-8 | Python | false | false | 1,053 | py | # You need gevent 1.0 and pyzmq 3.x
#
# pip install --user git://github.com/SiteSupport/gevent.git
# pip install --user pyzmq
#
import gevent
import zmq.green as zmq
import os, sys
ADDR = 'tcp://127.0.0.1:5555'
def run_parent():
ctx = zmq.Context()
sock = ctx.socket(zmq.PUSH)
sock.bind(ADDR)
for i in range(10):
sock.send('message: %d' % i)
gevent.sleep(1)
def run_child(ident):
# create a new context since we are forked in a new process
ctx = zmq.Context()
sock = ctx.socket(zmq.PULL)
sock.connect(ADDR)
while True:
msg = sock.recv()
print '%s: %s' % (ident, msg)
def fork_workers(num):
pids = []
for i in range(num):
pid = gevent.fork()
if pid == 0:
run_child(os.getpid())
sys.exit(0)
else:
pids.append(pid)
return pids
pids = fork_workers(3)
print 'workers:', ', '.join('%d' % p for p in pids)
run_parent()
# not cool, workers should die themselves actually
for pid in pids:
os.kill(pid, 15)
| [
"gistshub@gmail.com"
] | gistshub@gmail.com |
222a24bf377055ea4f4cd7687dc139e8332a4893 | 948a8fe4a46bbdda00f3af5d7a999092fd546808 | /src/QPS_simlearning.py | 01fd2a4dbf239f1fd79955040d1a9cf7af07bda0 | [] | no_license | wencanluo/QuantitativeSummarization | fcaf072566f0a4907f383042af0054ed1c47d82e | 8c34923e3447e517ee99fc00fda2bd81b34e25a0 | refs/heads/master | 2020-12-21T01:07:37.842895 | 2017-05-11T01:06:02 | 2017-05-11T01:06:02 | 56,019,382 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,737 | py | import sys
import re
import fio
import xml.etree.ElementTree as ET
from collections import defaultdict
import random
import NLTKWrapper
import SennaParser
import porter
import annotation
import os
import CourseMirror_Survey
import OracleExperiment
import json
from CourseMirror_Survey import stopwords, punctuations
import codecs
from nltk.tag import SennaPSGTagger
import pickle
import numpy as np
from sklearn import svm
from sklearn.metrics import mean_squared_error, precision_recall_fscore_support, accuracy_score
import pickle
import file_util
from AlignPhraseAnnotation import AlignPhraseAnnotation
from similarity import Similarity
import global_params
sim_exe = '.feature.sim'
def extractPhrasePaireFeature(phrasedir):
for lec in annotation.Lectures:
path = phrasedir + str(lec)+ '/'
fio.NewPath(path)
for prompt in ['q1', 'q2']:
prefix = os.path.join(path, '%s.%s.'%(prompt, method))
filename = path + prompt + sim_exe
print filename
featureset = []
feature_extractor = Similarity(prefix)
phrasefile = os.path.join(path, "%s.%s.key"%(prompt, method))
phrases = fio.LoadList(phrasefile)
for p1 in phrases:
for p2 in phrases:
featureset.append((feature_extractor.get_features(p1, p2), 0.0, {'p1':p1, 'p2':p2}))
fio.SaveDict2Json(featureset, filename)
feature_extractor.save()
def extractPhrasePaireFromAnnotation(phrasedir, annotators, id):
for doc, lec, annotator in annotation.generate_all_files(annotation.datadir + 'json/', '.json', anotators = annotators, lectures=annotation.Lectures):
print doc
#load task
task = annotation.Task()
task.loadjson(doc)
path = phrasedir + str(lec)+ '/'
fio.NewPath(path)
for prompt in ['q1', 'q2']:
prefix = os.path.join(path, '%s.%s.'%(prompt, method))
filename = path + prompt + sim_exe
print filename
featureset = []
feature_extractor = Similarity(prefix)
phrase_annotation = task.get_phrase_annotation(prompt)
#positive examples
for rank1 in sorted(phrase_annotation):
for rank2 in sorted(phrase_annotation):
if rank1 == rank2:
score = 1.0
else:
score = 0.0
phrases1 = phrase_annotation[rank1]
phrases2 = phrase_annotation[rank2]
for phrasedict1 in phrases1:
p1 = phrasedict1['phrase'].lower().strip()
for phrasedict2 in phrases2:
p2 = phrasedict2['phrase'].lower().strip()
featureset.append((feature_extractor.get_features(p1, p2), score, {'p1':p1, 'p2':p2}))
fio.SaveDict2Json(featureset, filename)
feature_extractor.save()
def combine_files_test(phrasedir, lectures, features=None, prompts=['q1', 'q2']):
X = []
Y = []
if features == None:
sim_extractor = Similarity()
features = sorted(sim_extractor.features.keys())
for i, lec in enumerate(lectures):
for q in prompts:
for phrasedir in [phrasedir]:
path = phrasedir + str(lec)+ '/'
filename = os.path.join(path, q + sim_exe)
data = fio.LoadDictJson(filename)
for fdict, score, _ in data:
row = []
for name in features:
x = fdict[name]
if str(x) == 'nan':
x = 0.0
row.append(x)
X.append(row)
Y.append(score)
return X, Y
def combine_files_course(course, lectures, features=None, prompts=['q1', 'q2']):
phrasedir1 = '../data/%s/oracle_annotator_1/phrase/'%course
phrasedir2 = '../data/%s/oracle_annotator_2/phrase/'%course
X = []
Y = []
if features == None:
sim_extractor = Similarity()
features = sorted(sim_extractor.features.keys())
for i, lec in enumerate(lectures):
for q in prompts:
for phrasedir in [phrasedir1, phrasedir2]:
path = phrasedir + str(lec)+ '/'
filename = os.path.join(path, q + sim_exe)
data = fio.LoadDictJson(filename)
for fdict, score, _ in data:
row = []
for name in features:
x = fdict[name]
if str(x) == 'nan':
x = 0.0
row.append(x)
X.append(row)
Y.append(score)
return X, Y
def combine_files(lectures, features=None, prompts=['q1', 'q2']):
phrasedir1 = '../data/%s/oracle_annotator_1/phrase/'%course
phrasedir2 = '../data/%s/oracle_annotator_2/phrase/'%course
X = []
Y = []
if features == None:
sim_extractor = Similarity()
features = sorted(sim_extractor.features.keys())
for i, lec in enumerate(lectures):
for q in prompts:
for phrasedir in [phrasedir1, phrasedir2]:
path = phrasedir + str(lec)+ '/'
filename = os.path.join(path, q + sim_exe)
data = fio.LoadDictJson(filename)
for fdict, score, _ in data:
row = []
for name in features:
x = fdict[name]
if str(x) == 'nan':
x = 0.0
row.append(x)
X.append(row)
Y.append(score)
return X, Y
def correlation_analysis(course):
phrasedir1 = '../data/%s/oracle_annotator_1/phrase/'%course
phrasedir2 = '../data/%s/oracle_annotator_2/phrase/'%course
outdir = '../data/%s/simlearning/'%course
fio.NewPath(outdir)
sim_extractor = Similarity()
features = sorted(sim_extractor.features.keys())
head = features + ['score', 'predict']
body = []
lectures = annotation.Lectures
name = '_'.join(features)
for i, lec in enumerate(lectures):
model_file = os.path.join(model_dir, '%d_%s.model'%(lec, name))
with open(model_file, 'rb') as handle:
clf = pickle.load(handle)
for q in ['q1', 'q2']:
outfile = os.path.join(outdir, str(lec), '%s%s'%(q, sim_exe))
for phrasedir in [phrasedir1, phrasedir2]:
path = phrasedir + str(lec)+ '/'
filename = os.path.join(path, q + sim_exe)
data = fio.LoadDictJson(filename)
for fdict, score, _ in data:
row = []
for fname in features:
x = fdict[fname]
if str(x) == 'nan':
x = 0.0
row.append(x)
predict_score = clf.predict([row])
row.append(score)
row.append(predict_score[0])
body.append(row)
out_correlation = os.path.join(outdir, 'data.txt')
print out_correlation
fio.WriteMatrix(out_correlation, body, head)
def correlation_analysis_noduplicate():
phrasedir1 = '../data/%s/oracle_annotator_1/phrase/'%course
phrasedir2 = '../data/%s/oracle_annotator_2/phrase/'%course
outdir = '../data/%s/simlearning/'%course
fio.NewPath(outdir)
sim_extractor = Similarity()
features = sorted(sim_extractor.features.keys())
head = features + ['score']
body = []
lectures = annotation.Lectures
for i, lec in enumerate(lectures):
for q in ['q1', 'q2']:
outfile = os.path.join(outdir, str(lec), '%s%s'%(q, sim_exe))
for phrasedir in [phrasedir1, phrasedir2]:
path = phrasedir + str(lec)+ '/'
filename = os.path.join(path, q + sim_exe)
data = fio.LoadDictJson(filename)
for fdict, score, pd in data:
if pd['p1'] == pd['p2']:
print pd['p1']
continue
row = []
for name in features:
x = fdict[name]
if str(x) == 'nan':
x = 0.0
row.append(x)
row.append(score)
body.append(row)
out_correlation = os.path.join(outdir, 'data.txt')
fio.WriteMatrix(out_correlation, body, head)
def train_leave_one_lecture_out(model_dir, name='simlearn_cv'):
# model_dir = '../data/IE256/%s/model/%s/'%(system, name)
# fio.NewPath(model_dir)
#
# outputdir = '../data/IE256/%s/extraction/%s_output/'%(system, name)
# fio.NewPath(outputdir)
sim_extractor = Similarity()
allfeatures = sorted(sim_extractor.features.keys())
if True:
k = len(allfeatures)
#for k in range(len(allfeatures)+1):
#features = allfeatures#['WordEmbedding']
if k == len(allfeatures):#use all features
features = allfeatures
else:
features = [allfeatures[k]]
name = '_'.join(features)
lectures = annotation.Lectures
dict = defaultdict(int)
MSE = []
for i, lec in enumerate(lectures):
train = [x for x in lectures if x != lec]
test = [lec]
print train
print test
model_file = os.path.join(model_dir, '%d_%s.model'%(lec, name))
if fio.IsExist(model_file):
with open(model_file, 'rb') as handle:
clf = pickle.load(handle)
else:
train_X, train_Y = combine_files(train, features)
clf = svm.SVR()
clf.fit(train_X, train_Y)
with open(model_file, 'wb') as handle:
pickle.dump(clf, handle)
for q in ['q1', 'q2']:
test_X, test_Y = combine_files(test, features, prompts=[q])
predict_Y = clf.predict(test_X)
mse = mean_squared_error(test_Y, predict_Y)
MSE.append([lec, q, mse])
output = '../data/%s/simlearning.cv.%s.txt'%(course, name)
fio.WriteMatrix(output, MSE, header=['lec', 'prompt', 'MSE'])
def train_IE256_svm(traincourse, model_dir, name='simlearn_cv'):
sim_extractor = Similarity()
allfeatures = sorted(sim_extractor.features.keys())
features = allfeatures
name = '_'.join(features)
lectures = annotation.Lectures
dict = defaultdict(int)
if traincourse == 'IE256':
train = [x for x in range(14, 26) if x != 22]
else:
train = [x for x in range(3, 27)]
model_file = os.path.join(model_dir, '%s_%s.model'%(traincourse, name))
if fio.IsExist(model_file):
with open(model_file, 'rb') as handle:
clf = pickle.load(handle)
else:
train_X, train_Y = combine_files_course(traincourse, train, features)
clf = svm.SVC()
clf.fit(train_X, train_Y)
with open(model_file, 'wb') as handle:
pickle.dump(clf, handle)
def train_leave_one_lecture_out_svm(model_dir, name='simlearn_cv'):
# model_dir = '../data/IE256/%s/model/%s/'%(system, name)
# fio.NewPath(model_dir)
#
# outputdir = '../data/IE256/%s/extraction/%s_output/'%(system, name)
# fio.NewPath(outputdir)
sim_extractor = Similarity()
allfeatures = sorted(sim_extractor.features.keys())
#for k in range(len(allfeatures)+1):
k = len(allfeatures)
if True:
#for k in range(len(allfeatures)):
#if allfeatures[k] != 'optimumComparerLSATasa': continue
if k == len(allfeatures):#use all features
features = allfeatures
else:
features = [allfeatures[k]]
#features = allfeatures[0:k] + allfeatures[k+1:]
name = '_'.join(features)
lectures = annotation.Lectures
dict = defaultdict(int)
MSE = []
for i, lec in enumerate(lectures):
train = [x for x in lectures if x != lec]
test = [lec]
print train
print test
model_file = os.path.join(model_dir, '%d_%s.model'%(lec, name))
if fio.IsExist(model_file):
with open(model_file, 'rb') as handle:
clf = pickle.load(handle)
else:
train_X, train_Y = combine_files(train, features)
clf = svm.SVC()
clf.fit(train_X, train_Y)
with open(model_file, 'wb') as handle:
pickle.dump(clf, handle)
for q in ['q1', 'q2']:
test_X, test_Y = combine_files(test, features, prompts=[q])
predict_Y = clf.predict(test_X)
prf = precision_recall_fscore_support(test_Y, predict_Y, average='weighted')
accuracy = accuracy_score(test_Y, predict_Y)
MSE.append([lec, q, accuracy] + [prf[0], prf[1], prf[2]])
output = '../data/%s/simlearning.cv.svm.%s.txt'%(course, name)
fio.WriteMatrix(output, MSE, header=['lec', 'prompt', 'accuracy', 'precision', 'recall', 'f-score'])
def predict_IE256(train_course, model_dir, phrasedir, modelname='svm'):
sim_extractor = Similarity()
allfeatures = sorted(sim_extractor.features.keys())
features = allfeatures
name = '_'.join(features)
lectures = annotation.Lectures
for i, lec in enumerate(lectures):
test = [lec]
print test
model_file = os.path.join(model_dir, '%s_%s.model'%(train_course, name))
with open(model_file, 'rb') as handle:
clf = pickle.load(handle)
path = os.path.join(phrasedir, str(lec))
for q in ['q1', 'q2']:
test_X, test_Y = combine_files_test(phrasedir, test, features, prompts=[q])
predict_Y = clf.predict(test_X)
#write the output
phrasefile = os.path.join(path, "%s.%s.key"%(q, method))
phrases = fio.LoadList(phrasefile)
assert(len(predict_Y) == len(phrases)*len(phrases))
k = 0
body = []
for p1 in phrases:
row = []
for p2 in phrases:
row.append(predict_Y[k])
k += 1
body.append(row)
output = os.path.join(path, "%s.%s.%s"%(q, method,modelname))
fio.WriteMatrix(output, body, phrases)
def predict_leave_one_lecture_out(model_dir, phrasedir, modelname='svr'):
sim_extractor = Similarity()
allfeatures = sorted(sim_extractor.features.keys())
features = allfeatures
name = '_'.join(features)
lectures = annotation.Lectures
for i, lec in enumerate(lectures):
test = [lec]
print test
model_file = os.path.join(model_dir, '%d_%s.model'%(lec, name))
with open(model_file, 'rb') as handle:
clf = pickle.load(handle)
path = os.path.join(phrasedir, str(lec))
for q in ['q1', 'q2']:
test_X, test_Y = combine_files_test(phrasedir, test, features, prompts=[q])
predict_Y = clf.predict(test_X)
#write the output
phrasefile = os.path.join(path, "%s.%s.key"%(q, method))
phrases = fio.LoadList(phrasefile)
assert(len(predict_Y) == len(phrases)*len(phrases))
k = 0
body = []
for p1 in phrases:
row = []
for p2 in phrases:
row.append(predict_Y[k])
k += 1
body.append(row)
output = os.path.join(path, "%s.%s.%s"%(q, method,modelname))
fio.WriteMatrix(output, body, phrases)
def gather_performance(output):
sim_extractor = Similarity()
allfeatures = sorted(sim_extractor.features.keys())
allbody = []
for k in range(len(allfeatures)+1):
#features = allfeatures#['WordEmbedding']
if k == len(allfeatures):#use all features
features = allfeatures
else:
features = [allfeatures[k]]
#features = allfeatures[0:k] + allfeatures[k+1:]
name = '_'.join(features)
resultfile = '../data/%s/simlearning.cv.svm.%s.txt'%(course, name)
head, body = fio.ReadMatrix(resultfile, hasHead=True)
#get the average
allhead = ['name'] + head[2:]
average = [name]
for i in range(2, len(head)):#start from the third one
values = [float(row[i]) for row in body]
average.append(np.mean(values))
allbody.append(average)
fio.WriteMatrix(output, allbody, allhead)
def check_stopword():
from CourseMirror_Survey import stopwords
vocab = fio.LoadDictJson(global_params.vocab)
for word, count in vocab.items():
if count < 5: continue
if word in stopwords:
print word, '\t', count
if __name__ == '__main__':
course = global_params.g_cid
for system, method in [
('QPS_combine', 'crf'),
]:
phrasedir = "../data/"+course+"/"+system+"/phrase/"
# extractPhrasePaireFeature(phrasedir)
model_dir = "../data/"+course+"/simlearning/svm"
fio.NewPath(model_dir)
train_leave_one_lecture_out_svm(model_dir)
predict_leave_one_lecture_out(model_dir, phrasedir, modelname='svm')
| [
"wencanluo.cn@gmail.com"
] | wencanluo.cn@gmail.com |
2868e0431b4695d3c0c1bf5f09a50754ff439a4e | 983f77449bbea7ae1993a93d7f4431f0f07193f0 | /lab/agent_exercising/model.py | 3593f4708457d6223c507bb9e459248134d29983 | [] | no_license | johnholl/TDnets | 09d45f2bab138639e3be107d2e44df01533c10c3 | 00afc8a5ad412047c658deed2f487a98f062788b | refs/heads/master | 2020-06-19T06:41:42.159903 | 2017-03-13T13:02:11 | 2017-03-13T13:02:11 | 74,916,539 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,714 | py | import numpy as np
import tensorflow as tf
import tensorflow.contrib.rnn as rnn
def normalized_columns_initializer(std=1.0):
def _initializer(shape, dtype=None, partition_info=None):
out = np.random.randn(*shape).astype(np.float32)
out *= std / np.sqrt(np.square(out).sum(axis=0, keepdims=True))
return tf.constant(out)
return _initializer
def flatten(x):
return tf.reshape(x, [-1, np.prod(x.get_shape().as_list()[1:])])
def conv2d(x, num_filters, name, filter_size=(3, 3), stride=(1, 1), pad="SAME", dtype=tf.float32, collections=None):
with tf.variable_scope(name):
stride_shape = [1, stride[0], stride[1], 1]
filter_shape = [filter_size[0], filter_size[1], int(x.get_shape()[3]), num_filters]
# there are "num input feature maps * filter height * filter width"
# inputs to each hidden unit
fan_in = np.prod(filter_shape[:3])
# each unit in the lower layer receives a gradient from:
# "num output feature maps * filter height * filter width" /
# pooling size
fan_out = np.prod(filter_shape[:2]) * num_filters
# initialize weights with random weights
w_bound = np.sqrt(6. / (fan_in + fan_out))
w = tf.get_variable("W", filter_shape, dtype, tf.random_uniform_initializer(-w_bound, w_bound),
collections=collections)
b = tf.get_variable("b", [1, 1, 1, num_filters], initializer=tf.constant_initializer(0.0),
collections=collections)
return tf.nn.conv2d(x, w, stride_shape, pad) + b
def linear(x, size, name, initializer=None, bias_init=0):
w = tf.get_variable(name + "/w", [x.get_shape()[1], size], initializer=initializer)
b = tf.get_variable(name + "/b", [size], initializer=tf.constant_initializer(bias_init))
return tf.matmul(x, w) + b
def categorical_sample(logits, d):
value = tf.squeeze(tf.multinomial(logits - tf.reduce_max(logits, [1], keep_dims=True), 1), [1])
return tf.one_hot(value, d)
class LSTMPolicy(object):
def __init__(self, ob_space, ac_space):
self.x = x = tf.placeholder(tf.float32, [None] + list(ob_space))
for i in range(4):
x = tf.nn.elu(conv2d(x, 32, "l{}".format(i + 1), [3, 3], [2, 2]))
# introduce a "fake" batch dimension of 1 after flatten so that we can do LSTM over time dim
x = tf.expand_dims(flatten(x), [0])
size = 256
lstm = rnn.rnn_cell.BasicLSTMCell(size, state_is_tuple=True)
self.state_size = lstm.state_size
step_size = tf.shape(self.x)[:1]
c_init = np.zeros((1, lstm.state_size.c), np.float32)
h_init = np.zeros((1, lstm.state_size.h), np.float32)
self.state_init = [c_init, h_init]
c_in = tf.placeholder(tf.float32, [1, lstm.state_size.c])
h_in = tf.placeholder(tf.float32, [1, lstm.state_size.h])
self.state_in = [c_in, h_in]
state_in = rnn.rnn_cell.LSTMStateTuple(c_in, h_in)
lstm_outputs, lstm_state = tf.nn.dynamic_rnn(
lstm, x, initial_state=state_in, sequence_length=step_size,
time_major=False)
lstm_c, lstm_h = lstm_state
x = tf.reshape(lstm_outputs, [-1, size])
self.logits = linear(x, ac_space, "action", normalized_columns_initializer(0.01))
self.vf = tf.reshape(linear(x, 1, "value", normalized_columns_initializer(1.0)), [-1])
self.state_out = [lstm_c[:1, :], lstm_h[:1, :]]
self.sample = categorical_sample(self.logits, ac_space)[0, :]
self.var_list = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, tf.get_variable_scope().name)
def get_initial_features(self):
return self.state_init
def act(self, ob, c, h):
sess = tf.get_default_session()
return sess.run([self.sample, self.vf] + self.state_out,
{self.x: [ob], self.state_in[0]: c, self.state_in[1]: h})
def value(self, ob, c, h):
sess = tf.get_default_session()
return sess.run(self.vf, {self.x: [ob], self.state_in[0]: c, self.state_in[1]: h})[0]
class AuxLSTMPolicy(object):
def __init__(self, ob_space, ac_space):
self.x = x = tf.placeholder(tf.float32, [None] + list(ob_space))
self.action = tf.placeholder(tf.float32, [None, ac_space])
self.reward = tf.placeholder(tf.float32, [None, 1])
x = tf.nn.relu(conv2d(x, 16, "l1", [8, 8], [4, 4]))
x = conv_features = tf.nn.relu(conv2d(x, 32, "l2", [4, 4], [2, 2]))
x = flatten(x)
x = tf.nn.relu(linear(x, 256, "l3", normalized_columns_initializer(0.1)))
x = tf.concat(concat_dim=1, values=[x, self.action, self.reward])
# introduce a "fake" batch dimension of 1 after flatten so that we can do LSTM over time dim
x = tf.expand_dims(x, [0])
size = 256
lstm = rnn.rnn_cell.BasicLSTMCell(size, state_is_tuple=True)
self.state_size = lstm.state_size
step_size = tf.shape(self.x)[:1]
c_init = np.zeros((1, lstm.state_size.c), np.float32)
h_init = np.zeros((1, lstm.state_size.h), np.float32)
self.state_init = [c_init, h_init]
c_in = tf.placeholder(tf.float32, [1, lstm.state_size.c])
h_in = tf.placeholder(tf.float32, [1, lstm.state_size.h])
self.state_in = [c_in, h_in]
state_in = rnn.rnn_cell.LSTMStateTuple(c_in, h_in)
lstm_outputs, lstm_state = tf.nn.dynamic_rnn(
lstm, x, initial_state=state_in, sequence_length=step_size,
time_major=False)
lstm_c, lstm_h = lstm_state
x = tf.reshape(lstm_outputs, [-1, size])
self.logits = linear(x, ac_space, "action", normalized_columns_initializer(0.01))
self.vf = tf.reshape(linear(x, 1, "value", normalized_columns_initializer(1.0)), [-1])
self.state_out = [lstm_c[:1, :], lstm_h[:1, :]]
self.sample = categorical_sample(self.logits, ac_space)[0, :]
self.var_list = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, tf.get_variable_scope().name)
def get_initial_features(self):
return self.state_init
def act(self, ob, prev_a, prev_r, c, h):
sess = tf.get_default_session()
return sess.run([self.sample, self.vf] + self.state_out,
{self.x: [ob], self.action: [prev_a], self.reward: [[prev_r]],
self.state_in[0]: c, self.state_in[1]: h})
def value(self, ob, prev_a, prev_r, c, h):
sess = tf.get_default_session()
return sess.run(self.vf, {self.x: [ob], self.action: [prev_a], self.reward: [[prev_r]],
self.state_in[0]: c, self.state_in[1]: h})[0]
| [
"johnholl@umich.edu"
] | johnholl@umich.edu |
3a02692131fd90bae63c99554320c5133209474e | 26126f17914650c004b09013d12866cb4104703e | /Trabalhos/Matheus/ex6.py | 3a64aef85160b5644307109413d78e7785ea36f0 | [] | no_license | rogerroxbr/Treinamento-Analytics | b422c5e20db458186ff2ca475aaea58209b88f17 | a59a1bf0380cb3ce28090330ce293c549a6da5d6 | refs/heads/master | 2023-08-17T01:04:25.700227 | 2021-09-30T11:00:20 | 2021-09-30T11:00:20 | 404,708,898 | 3 | 1 | null | 2021-09-14T11:13:23 | 2021-09-09T12:13:30 | Python | UTF-8 | Python | false | false | 411 | py | typesOfPeople = 10
x = f"There are {typesOfPeople} types of people."
binary = "binary"
doNot = "don't"
y = f"Those who know {binary} and those who{doNot}."
print(x)
print(y)
print(f"I said: {x}")
print(f"I said '{y}'")
hilarious = False
jokeEvaluation = "isn't that joke so funny?! {}"
print(jokeEvaluation.format(hilarious))
w = "This is the left side of ..."
e = "a string with a right side."
print( w + e) | [
"matheus.felipe@brf.com"
] | matheus.felipe@brf.com |
8a7c2bf0d5885b48aeeca6bc47e6a22239786f1d | 42844cba683edbc101fb709a91f63a08a6c11ccb | /next_greater_element_i.py | 1d72b36ddf4aeb63fc2e111e294a21b674773ca1 | [] | no_license | DucksOnFlame/LeetCodePy | 89c6e2cd471f4a8441efc7eb7603f8f6ee39b98a | e364742240c426475e8b7c47c69164838b20fc50 | refs/heads/master | 2021-06-28T03:33:34.591693 | 2017-09-16T09:36:45 | 2017-09-16T09:36:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 548 | py | class Solution(object):
def nextGreaterElement(self, findNums, nums):
results = []
length = len(nums)
for num in findNums:
index = nums.index(num)
found = False
for i in range(index + 1, length):
if nums[i] > num:
results.append(nums[i])
found = True
break
if not found:
results.append(-1)
return results
print(Solution().nextGreaterElement([4, 1, 2], [1, 3, 4, 2]))
| [
"bartlomiej.styczynski@gmail.com"
] | bartlomiej.styczynski@gmail.com |
16fa09d98ae94c03f350ec91e5c2d5e11ab9d306 | c3066292a71288b0b2597e6cc89000603d16412e | /capstone/activity-5.py | 0fcd5f7a7bdef35b58756937d13a76047a427601 | [] | no_license | valleyjo/cs0008 | db6727f02d7543a047bb522ee34c8be4d2a6715f | fa9b0181b268626250c241c7e4c08c99b4483acf | refs/heads/master | 2021-01-01T18:48:48.868221 | 2014-04-17T19:35:26 | 2014-04-17T19:35:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,410 | py | #Email: amv49@pitt.edu
#Name: Alex Vallejo
#ID: 3578411
#Date: 2/19/2014
#Description: This program is the game of craps!
from dice import *
from valid_input import *
def welcome():
user_name = input("Enter your name: "); #Get the user's name
print("\nWelcome " + user_name + "!"); #Print a nice welcome message
print("This game of craps was written by Alex Vallejo <amv49@pitt.edu>\n") #Tell 'em who write this!
print("Instructions:"); #Display the instructions!
print("A new shooter (player) begins his roll. This is known as the come out " +
"roll. If the shooter rolls a 7 or 11 you win. If the shooter rolls a 2, " +
"3 or 12, you lose. If the shooter rolls any other number, that number " +
"becomes the point number. The shooter must roll that number again before " +
"a seven is rolled. If that happens, you win. If a seven is rolled before " +
"the point number is rolled again, you lose. ");
times_to_play = 0
times_to_play = valid_input.get_int("\nHow many times do you wanna play?: ");
return user_name, times_to_play;
def main():
user_name, times_to_play = welcome()
times_played = 0;
while (times_played <= times_to_play):
game_over = False; # Boolean flag used to keep the game running
shooter_roll = dice.roll() + dice.roll();
print("\nShooter rolls: ", shooter_roll);
# Player wins if the computer rolls 7 or 11
if (shooter_roll == 7 or shooter_roll == 11):
game_over = True;
print("Congrats, you win!");
# Computer wins if it rolls 2, 3 or 12
elif (shooter_roll == 2 or shooter_roll == 3 or shooter_roll == 12):
game_over = True;
print("Sorry, you lose!");
# The point number becomes the roll
else:
point_number = shooter_roll;
print("The point number is: ", point_number);
# While the game is not over, keep rollin'
while (not game_over):
roll = dice.roll() + dice.roll();
print("Roll: ", roll);
# If the computer rolls the point number, player wins!
if (roll == point_number):
game_over = True;
print("Congrats, you win!");
# If the computer rolls 7, the computer wins!
if (roll == 7):
game_over = True;
print("Sorry, you lose!");
times_played += 1;
# Print a nice message to thank the user for playing
print("Thanks for playing", user_name,"!");
main();
| [
"vallejo.alex@gmail.com"
] | vallejo.alex@gmail.com |
99e8f87d77592c1ee10ddceb41fe07f5fd5cd44d | d8d45938c39b9b51a77264eddb77210a4894bfdd | /kml/io.py | 722a1f07eaf5ac99f8596b75578f98234dca1cf7 | [] | no_license | fiveham/map-tools | f331b4059e6608c7131b67ae5b654d412bff0b32 | 469dc6141d1f07f20bfac025f8e365301564dd05 | refs/heads/master | 2020-05-16T11:22:37.325916 | 2019-12-26T18:32:26 | 2019-12-26T18:32:26 | 183,014,239 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,852 | py | from bs4.element import CData, NavigableString, Tag
from bs4 import BeautifulSoup
_OPEN = open
def open(filepath, encoding=None):
"""Read `filepath` and parse it as a KML document (bs4.BeautifulSoup).
:param filepath: the name of or relative path to a KML file
:param encoding: optional character encoding (rarely needed)
:returns: a formatted KML document
"""
return formatted(BeautifulSoup(_OPEN(filepath, encoding=encoding),
'xml'))
def parse(filetext):
"""Parse `filetext` as a KML document.
:param filetext: Either valid XML or a file-like object"""
return formatted(BeautifulSoup(filetext, 'xml'))
def save(soup, filepath):
"""Save `soup` to a file at `filepath`.
:param soup: a KML document (bs4.BeautifulSoup)
:param filepath: the name of the file to save
:returns: None
"""
_OPEN(filepath, 'w').write(str(soup))
def format(soup, no_empty=False):
"""Remove all leading and trailing whitespace on all strings in `soup`,
remove all empty or self-terminating tags, remove all kml: prefixes
from all tags, and ensure that all CDATA tags are properly wrapped in
CData objects.
This function modifies the `soup` object.
`soup` : a KML document (bs4.BeautifulSoup)
CDATA in KML gets parsed correctly when read from text, but when that
CDATA text is put into string representations of the tag it's
in, it is blindly given HTML entity substitution instead of being
wrapped in "<![CDATA[...]]>"
This function hunts down CDATA strings in `soup` and replaces them with
bs4.element.CData objects so that they print in the "<![CDATA[...]]>"
form.
A KML document when converted to a string will often "kml:" prefixes on
every tag. A KML file like that opens perfectly in Google Earth,
but the Google Maps Javascript API's KmlLayer class insists that those
make the file an "INVALID_DOCUMENT".
This function checks every single tag and removes the "kml" prefix if it
is present.
There is never any reason for whitespace padding at the front or end of
a string in a tag in a KML document. Similarly, pure-whitespace strings
have no meaning in a kml document.
This function checks every string in `soup`, replaces trimmable strings
with their trimmed counterparts, and outright removes pure-whitespace
strings.
Empty or self-terminating tags do nothing in a KML document. This
function checks every tag and removes the empty/self-terminating
ones.
:param soup: a KML document (bs4.BeautifulSoup)
:param no_empty: if True, remove empty tags. Default False.
:returns: None
"""
strip = []
destroy = []
for e in soup.descendants:
if isinstance(e, NavigableString):
if e.isspace():
destroy.append(e) #remove empty strings
elif e.strip() != e:
strip.append(e) #trim trimmable strings
elif isinstance(e, Tag):
if e.prefix == "kml":
e.prefix = None #remove kml: prefixes
if e.string and e.string.parent is e: #.string works indirectly
e.string = e.string.strip() #trim some trimmable strings
if any(c in e.string for c in REPLACE):
cdata = CData(e.string)
if len(str(cdata)) <= len(_as_html(e.string)):
e.string = cdata #use CDATA to wrap HTML
for d in destroy:
d.extract()
for s in strip:
s.replace_with(s.strip())
if no_empty:
for tag in soup(lambda thing : isinstance(thing,Tag) and
len(list(thing.contents)) == 0):
tag.decompose()
def formatted(soup, **kwargs):
"""Format `soup` and return it. Convenience function wrapping `format`.
:param soup: a KML document (bs4.BeautifulSoup)
:param no_empty: (optional, default False) remove empty tags if True
:returns: `soup`
"""
format(soup, **kwargs)
return soup
REPLACE = {'<': '<',
'>': '>',
'&': '&'}
def _as_html(string):
"""Return a copy of `string` where all less-thans, greater-thans,
and ampersands are replaced by their HTML character entity equivalents.
:param string: a string
:returns: a string where certain chars are replaced by html entity codes
"""
for k,v in REPLACE.items():
string = string.replace(k,v)
return string
| [
"noreply@github.com"
] | fiveham.noreply@github.com |
588e197a43161c84c3ccafbfff2892dc41deacec | 9a85c309adab7bd0c13986a5ddb7ebc1136fc5b9 | /globals.py | cdada65722203ad5eb77324e56fa3506ae448d6e | [] | no_license | ivanovsaleksejs/leds_rpi | f32c4541b7d83c3e78ab6e28bc79046f5998c63c | 19f8aec4141b3266a4eccfd8017b63f32269bed6 | refs/heads/master | 2020-03-27T14:14:34.304980 | 2019-07-02T12:21:15 | 2019-07-02T12:21:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 556 | py | import json
# Returns public config
def dumpconf(config):
return json.dumps(readConf()[0])
def readConf():
# Load config file. Put secrets in separate variable
configFile = open('config.json', 'r')
config = json.loads(configFile.read().replace('\n', ''))
secrets = config["private"]
config = config["public"]
configFile.close()
return (config, secrets)
# While this variable is true redraw thread runs. Thread terminates once it is false
redraw_active = True
# Variable used to adjust delay of each frame
frameTime = 0
| [
"ivanovs.aleksejs@gmail.com"
] | ivanovs.aleksejs@gmail.com |
2f211ee9858ffddacd1a6b995f06cd8455450b80 | 4d9ce4ab1f0ce0a857f215edc2ffc99ce3b82623 | /tfx/orchestration/experimental/core/mlmd_state_test.py | 6faacc6cc12f8ce1e987bfdbb57b7de35f8efd41 | [
"Apache-2.0"
] | permissive | vpipkt/tfx | 448fd85a177f7e3a3a6dacf262eb0c93f459f534 | 42f4f4095ff3c3e23fe2ac1076c9a0fdfc631d23 | refs/heads/master | 2023-06-20T12:27:56.083959 | 2021-05-25T18:31:23 | 2021-05-25T18:33:12 | 370,820,614 | 0 | 0 | Apache-2.0 | 2021-05-25T20:31:22 | 2021-05-25T20:31:22 | null | UTF-8 | Python | false | false | 2,934 | py | # Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.orchestration.experimental.core.mlmd_state."""
import os
import tensorflow as tf
from tfx.orchestration import metadata
from tfx.orchestration.experimental.core import mlmd_state
from tfx.orchestration.experimental.core import test_utils
from ml_metadata.proto import metadata_store_pb2
def _write_test_execution(mlmd_handle):
execution_type = metadata_store_pb2.ExecutionType(name='foo', version='bar')
execution_type_id = mlmd_handle.store.put_execution_type(execution_type)
[execution_id] = mlmd_handle.store.put_executions(
[metadata_store_pb2.Execution(type_id=execution_type_id)])
[execution] = mlmd_handle.store.get_executions_by_id([execution_id])
return execution
class MlmdStateTest(test_utils.TfxTest):
def setUp(self):
super().setUp()
pipeline_root = os.path.join(
os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR', self.get_temp_dir()),
self.id())
metadata_path = os.path.join(pipeline_root, 'metadata', 'metadata.db')
connection_config = metadata.sqlite_metadata_connection_config(
metadata_path)
connection_config.sqlite.SetInParent()
self._mlmd_connection = metadata.Metadata(
connection_config=connection_config)
def test_mlmd_execution_update(self):
with self._mlmd_connection as m:
expected_execution = _write_test_execution(m)
# Mutate execution.
with mlmd_state.mlmd_execution_atomic_op(
m, expected_execution.id) as execution:
self.assertEqual(expected_execution, execution)
execution.last_known_state = metadata_store_pb2.Execution.CANCELED
# Test that updated execution is committed to MLMD.
[execution] = m.store.get_executions_by_id([execution.id])
self.assertEqual(metadata_store_pb2.Execution.CANCELED,
execution.last_known_state)
# Test that in-memory state is also in sync.
with mlmd_state.mlmd_execution_atomic_op(
m, expected_execution.id) as execution:
self.assertEqual(metadata_store_pb2.Execution.CANCELED,
execution.last_known_state)
def test_mlmd_execution_absent(self):
with self._mlmd_connection as m:
with mlmd_state.mlmd_execution_atomic_op(m, 1) as execution:
self.assertIsNone(execution)
if __name__ == '__main__':
tf.test.main()
| [
"tensorflow-extended-nonhuman@googlegroups.com"
] | tensorflow-extended-nonhuman@googlegroups.com |
07369c480633eed0c086cb3990217e6ff4a0c039 | 12ce75fc994395c9eb54c6fe30c0fffc6ee19ee1 | /Algorithms/Implementation/beautiful-days-at-the-movies.py | 89e6156da316e89614692a55390bac58509d5cba | [] | no_license | RobinDeHerdt/HackerRank | aeb8c1f080b9d8a116f66a0fffb6fbdfd4f79076 | b7ce29783845d0edd83e7e196ffe599143005a5d | refs/heads/master | 2021-07-10T13:55:34.099852 | 2020-06-13T13:51:38 | 2020-06-13T13:51:38 | 132,801,390 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 590 | py | #!/bin/python3
import os
def beautifulDays(i, j, k):
total_amount = 0
for date in range(i, j):
reversed_date = str(date)
reversed_date = int(reversed_date[::-1])
if (date - reversed_date) % k == 0:
# Date is considered beautiful
total_amount += 1
return total_amount
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
ijk = input().split()
i = int(ijk[0])
j = int(ijk[1])
k = int(ijk[2])
result = beautifulDays(i, j, k)
fptr.write(str(result) + '\n')
fptr.close()
| [
"robindh95@gmail.com"
] | robindh95@gmail.com |
12b8b281870c0126256289845c95a9ce92329691 | 40b977d7657bc735f3705efd790b45d95130e8d5 | /week16homeworkbeyza.py | 82c96065b12fa8cd3e8308f7f651917f223e5aec | [] | no_license | beizaa/16.Hafta-Odevler | 8449f6e8b38a8d3ea3d342b085497e21a406468d | dedaa91aee2f51b0d3e4a87219f012ee3331532c | refs/heads/master | 2020-08-05T19:44:19.031904 | 2019-10-03T20:23:08 | 2019-10-03T20:23:08 | 212,682,277 | 0 | 0 | null | 2019-10-03T21:18:47 | 2019-10-03T21:18:47 | null | UTF-8 | Python | false | false | 2,233 | py | ###################SCHERLOCK'S MATH
def squares():
q = int(input()) #burada birden fazla line icin nasil input alacagim
for q_itr in range(q):
ab = list(map(int, input().split())) #araligin basi a, sonu b olmali so b+1 yapiyoruz asagida
a = ab[0]
b = ab[1]
mylist = [i for i in range(b+1) for j in range(a, (b+1)) if i ** 2 == j]
print(len(mylist))
squares()
#Dogru ve calisiyor ama time limitini asiyormus
#Kimkimin karesi degil de kim kimin karekokunden gittim basta,
#ama o durumda sonucun integer olup olmayacagini kontrol edecek bir fonks bilmiyorum
#cunku sonuc hep float olacak bunu isinteger ile kontrol edemem, int'e cevirsem zaten 4.0
# olsa da 4 olacak 4.5 olsa da, bu yuzden bu daha mantikli
#########################APPENDDELETE
def appendAndDelete(s, t, k):
s = input() #to be deleted
t = input() #to be replaced
k = int(input()) #move limit
if k >= (len(t)+len(s)): # empty list deletions is possible
print('Yes')
# to find common lenght of elements in them
commons = 0
for i in range(0, min(len(s), len(t)), 1):
if s[i] == t[i]:
commons += 1
else:
break
if ((k - len(s) - len(t) + 2 * commons) % 2 == 0):
print('Yes')
else:
print('No')
appendAndDelete('ayse', 'fatma', 5)
#idle'de yes ve no veriyor hackerrankte de error veriyor, no olayini cozemiyom cozecem ins
##################SOCK MERCHANT
def sockMerchant():
n = input() #numb of socks in th epile
ar = list(map(int, input().split())) #colors of each sock
#n space seperated integers describing colors of socks
remainders=[i for i in set(ar) if ar.count(i)%2 ==1] #her elemani br kere count edelim diye
#ar setindeki her bir elemani ar listesinde say demek istiyorum
print(int((len(ar)-len(remainders))/2))
sockMerchant() #print the pairs of socks
#Bu sekilde calisiyor ama parametreyi parantezin icine koyunca olmuyor
#Hello tehre, normalde fonksiyona istendigi gibi n ve ar'i da veriyorum ama neden oldugunu bilmiyorum hata veriyor hep,
#asagiya parametre girince de super sacma oluyor o da hata veriyor.
| [
"beyzaydin007@gmail.com"
] | beyzaydin007@gmail.com |
61568db31e9d7b2d8fa0d2c395d9da0c6d81ca53 | f4b8c90c1349c8740c1805f7b6b0e15eb5db7f41 | /starrez_client/models/transaction_dispute_item.py | d514f910513b38e744435d4c97d3d923c2655c8b | [] | no_license | CalPolyResDev/StarRezAPI | 012fb8351159f96a81352d6c7bfa36cd2d7df13c | b184e1863c37ff4fcf7a05509ad8ea8ba825b367 | refs/heads/master | 2021-01-25T10:29:37.966602 | 2018-03-15T01:01:35 | 2018-03-15T01:01:35 | 123,355,501 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 9,339 | py | # coding: utf-8
"""
StarRez API
This is a way to connect with the StarRez API. We are not the developers of the StarRez API, we are just an organization that uses it and wanted a better way to connect to it. # noqa: E501
OpenAPI spec version: 1.0.0
Contact: resdev@calpoly.edu
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class TransactionDisputeItem(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'transaction_dispute_id': 'int',
'entry_id': 'int',
'transaction_dispute_status_enum': 'str',
'date_created': 'datetime',
'security_user_id': 'int',
'created_by_security_user_id': 'int',
'date_modified': 'str'
}
attribute_map = {
'transaction_dispute_id': 'TransactionDisputeID',
'entry_id': 'EntryID',
'transaction_dispute_status_enum': 'TransactionDisputeStatusEnum',
'date_created': 'DateCreated',
'security_user_id': 'SecurityUserID',
'created_by_security_user_id': 'CreatedBy_SecurityUserID',
'date_modified': 'DateModified'
}
def __init__(self, transaction_dispute_id=None, entry_id=None, transaction_dispute_status_enum=None, date_created=None, security_user_id=None, created_by_security_user_id=None, date_modified=None): # noqa: E501
"""TransactionDisputeItem - a model defined in Swagger""" # noqa: E501
self._transaction_dispute_id = None
self._entry_id = None
self._transaction_dispute_status_enum = None
self._date_created = None
self._security_user_id = None
self._created_by_security_user_id = None
self._date_modified = None
self.discriminator = None
if transaction_dispute_id is not None:
self.transaction_dispute_id = transaction_dispute_id
if entry_id is not None:
self.entry_id = entry_id
if transaction_dispute_status_enum is not None:
self.transaction_dispute_status_enum = transaction_dispute_status_enum
if date_created is not None:
self.date_created = date_created
if security_user_id is not None:
self.security_user_id = security_user_id
if created_by_security_user_id is not None:
self.created_by_security_user_id = created_by_security_user_id
if date_modified is not None:
self.date_modified = date_modified
@property
def transaction_dispute_id(self):
"""Gets the transaction_dispute_id of this TransactionDisputeItem. # noqa: E501
Transaction Dispute # noqa: E501
:return: The transaction_dispute_id of this TransactionDisputeItem. # noqa: E501
:rtype: int
"""
return self._transaction_dispute_id
@transaction_dispute_id.setter
def transaction_dispute_id(self, transaction_dispute_id):
"""Sets the transaction_dispute_id of this TransactionDisputeItem.
Transaction Dispute # noqa: E501
:param transaction_dispute_id: The transaction_dispute_id of this TransactionDisputeItem. # noqa: E501
:type: int
"""
self._transaction_dispute_id = transaction_dispute_id
@property
def entry_id(self):
"""Gets the entry_id of this TransactionDisputeItem. # noqa: E501
Entry # noqa: E501
:return: The entry_id of this TransactionDisputeItem. # noqa: E501
:rtype: int
"""
return self._entry_id
@entry_id.setter
def entry_id(self, entry_id):
"""Sets the entry_id of this TransactionDisputeItem.
Entry # noqa: E501
:param entry_id: The entry_id of this TransactionDisputeItem. # noqa: E501
:type: int
"""
self._entry_id = entry_id
@property
def transaction_dispute_status_enum(self):
"""Gets the transaction_dispute_status_enum of this TransactionDisputeItem. # noqa: E501
Transaction Dispute Status # noqa: E501
:return: The transaction_dispute_status_enum of this TransactionDisputeItem. # noqa: E501
:rtype: str
"""
return self._transaction_dispute_status_enum
@transaction_dispute_status_enum.setter
def transaction_dispute_status_enum(self, transaction_dispute_status_enum):
"""Sets the transaction_dispute_status_enum of this TransactionDisputeItem.
Transaction Dispute Status # noqa: E501
:param transaction_dispute_status_enum: The transaction_dispute_status_enum of this TransactionDisputeItem. # noqa: E501
:type: str
"""
self._transaction_dispute_status_enum = transaction_dispute_status_enum
@property
def date_created(self):
"""Gets the date_created of this TransactionDisputeItem. # noqa: E501
Date Created # noqa: E501
:return: The date_created of this TransactionDisputeItem. # noqa: E501
:rtype: datetime
"""
return self._date_created
@date_created.setter
def date_created(self, date_created):
"""Sets the date_created of this TransactionDisputeItem.
Date Created # noqa: E501
:param date_created: The date_created of this TransactionDisputeItem. # noqa: E501
:type: datetime
"""
self._date_created = date_created
@property
def security_user_id(self):
"""Gets the security_user_id of this TransactionDisputeItem. # noqa: E501
Security User # noqa: E501
:return: The security_user_id of this TransactionDisputeItem. # noqa: E501
:rtype: int
"""
return self._security_user_id
@security_user_id.setter
def security_user_id(self, security_user_id):
"""Sets the security_user_id of this TransactionDisputeItem.
Security User # noqa: E501
:param security_user_id: The security_user_id of this TransactionDisputeItem. # noqa: E501
:type: int
"""
self._security_user_id = security_user_id
@property
def created_by_security_user_id(self):
"""Gets the created_by_security_user_id of this TransactionDisputeItem. # noqa: E501
Created By Security User # noqa: E501
:return: The created_by_security_user_id of this TransactionDisputeItem. # noqa: E501
:rtype: int
"""
return self._created_by_security_user_id
@created_by_security_user_id.setter
def created_by_security_user_id(self, created_by_security_user_id):
"""Sets the created_by_security_user_id of this TransactionDisputeItem.
Created By Security User # noqa: E501
:param created_by_security_user_id: The created_by_security_user_id of this TransactionDisputeItem. # noqa: E501
:type: int
"""
self._created_by_security_user_id = created_by_security_user_id
@property
def date_modified(self):
"""Gets the date_modified of this TransactionDisputeItem. # noqa: E501
Date Modified # noqa: E501
:return: The date_modified of this TransactionDisputeItem. # noqa: E501
:rtype: str
"""
return self._date_modified
@date_modified.setter
def date_modified(self, date_modified):
"""Sets the date_modified of this TransactionDisputeItem.
Date Modified # noqa: E501
:param date_modified: The date_modified of this TransactionDisputeItem. # noqa: E501
:type: str
"""
self._date_modified = date_modified
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TransactionDisputeItem):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"fedorareis@gmail.com"
] | fedorareis@gmail.com |
91ce98caf5eb1d00a26968473e60714c1ab4630c | 19125a1bd5b1202ebafe3a26bf4b26de67d824f3 | /greenstreet/utils/sun.py | 943f71c01c0fe7009381d08c17fc983718f60586 | [] | no_license | qubixes/streetview-greenery | 06e1349172a3097628aa55e1d8088ef222f67552 | 0ddc40b646020fcf5c6d0becf433690b41f776a6 | refs/heads/master | 2021-08-16T21:40:18.881801 | 2021-07-12T10:37:03 | 2021-07-12T10:37:03 | 184,270,288 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,692 | py | #!/usr/bin/env python
import math
from datetime import datetime
from dateutil import tz
def degree_to_meter(avg_lat):
R_earth = 6356e3 # meters]
long_fac = math.pi/180*R_earth
lat_fac = math.pi*math.cos(math.pi*avg_lat/180.0)/180*R_earth
return (lat_fac, long_fac)
def fast_coor_to_dist(lat_1, long_1, lat_2, long_2):
lat_fac, long_fac = degree_to_meter((lat_1+lat_2)/2)
dist = math.sqrt(((lat_1-lat_2)*lat_fac)**2 +
((long_1-long_2)*long_fac)**2)
return dist
class Sun:
def getSunriseTime(self, coords):
return self.calcSunTime(coords, True)
def getSunsetTime(self, coords):
return self.calcSunTime(coords, False)
def getCurrentUTC(self):
now = datetime.now()
return [now.day, now.month, now.year]
def timeToDawnDusk(self, dt, time_zone='UTC', **kwargs):
sun_rise_set = self.sunRiseSetUTC(dt=dt, **kwargs)
sunrise = sun_rise_set["sunrise"]
sunset = sun_rise_set["sunset"]
sr_dt = dt.replace(hour=sunrise["hour"], minute=sunrise["minute"])
ss_dt = dt.replace(hour=sunset["hour"], minute=sunset["minute"])
utc_zone = tz.gettz('UTC')
local_zone = tz.gettz(time_zone)
sr_dt = sr_dt.replace(tzinfo=utc_zone)
ss_dt = ss_dt.replace(tzinfo=utc_zone)
sr_dt = sr_dt.astimezone(local_zone)
ss_dt = ss_dt.astimezone(local_zone)
# print(sr_dt, ss_dt)
after_sunrise = dt.hour-sr_dt.hour + (dt.minute-sr_dt.minute)/60.0
before_sunset = ss_dt.hour-dt.hour + (ss_dt.minute-dt.minute)/60.0
return min(after_sunrise, before_sunset)
def sunRiseSetUTC(self, **kwargs):
sunrise = self.sunTimeUTC(isRiseTime=True, **kwargs)
sunset = self.sunTimeUTC(isRiseTime=False, **kwargs)
return {
"sunrise": sunrise,
"sunset": sunset,
}
def sunTimeUTC(self, coords=None, latitude=None, longitude=None, dt=None,
isRiseTime=True, zenith=90.8):
"Returns sunrise/sun for a day at a location given by its coordinates."
# isRiseTime == False, returns sunsetTime
if dt is None:
dt = datetime.now()
day, month, year = (dt.day, dt.month, dt.year)
if coords is not None:
longitude = coords['longitude']
latitude = coords['latitude']
elif latitude is None or longitude is None:
raise ValueError(
"Error: give coordinate for sunrise/set calculation.")
TO_RAD = math.pi/180
# 1. first calculate the day of the year
N1 = math.floor(275 * month / 9)
N2 = math.floor((month + 9) / 12)
N3 = (1 + math.floor((year - 4 * math.floor(year / 4) + 2) / 3))
N = N1 - (N2 * N3) + day - 30
# 2. convert the longitude to hours and calculate an approximate time
lngHour = longitude / 15
if isRiseTime:
t = N + ((6 - lngHour) / 24)
else: # sunset
t = N + ((18 - lngHour) / 24)
# 3. calculate the Sun's mean anomaly
M = (0.9856 * t) - 3.289
# 4. calculate the Sun's true longitude
L = M + (1.916 * math.sin(TO_RAD*M))
L += (0.020 * math.sin(TO_RAD * 2 * M)) + 282.634
# NOTE: L adjusted into the range [0,360)
L = self.forceRange(L, 360)
# 5a. calculate the Sun's right ascension
RA = (1/TO_RAD) * math.atan(0.91764 * math.tan(TO_RAD*L))
# NOTE: RA adjusted into the range [0,360)
RA = self.forceRange(RA, 360)
# 5b. right ascension value needs to be in the same quadrant as L
Lquadrant = (math.floor(L/90)) * 90
RAquadrant = (math.floor(RA/90)) * 90
RA = RA + (Lquadrant - RAquadrant)
# 5c. right ascension value needs to be converted into hours
RA = RA / 15
# 6. calculate the Sun's declination
sinDec = 0.39782 * math.sin(TO_RAD*L)
cosDec = math.cos(math.asin(sinDec))
# 7a. calculate the Sun's local hour angle
cosH = (math.cos(TO_RAD*zenith) - (sinDec * math.sin(TO_RAD*latitude))) / (cosDec * math.cos(TO_RAD*latitude))
if cosH > 1:
return {'status': False,
'msg': 'the sun never rises on this location (on the specified date)'}
if cosH < -1:
return {'status': False,
'msg': 'the sun never sets on this location (on the specified date)'}
# 7b. finish calculating H and convert into hours
if isRiseTime:
H = 360 - (1/TO_RAD) * math.acos(cosH)
else: #setting
H = (1/TO_RAD) * math.acos(cosH)
H = H / 15
# 8. calculate local mean time of rising/setting
T = H + RA - (0.06571 * t) - 6.622
# 9. adjust back to UTC
UT = T - lngHour
UT = self.forceRange( UT, 24) # UTC time in decimal format (e.g. 23.23)
#10. Return
minute = int(round((UT - int(UT))*60,0)+0.5)%60
carry = int(round((UT - int(UT))*60,0)+0.5)//60
hr = self.forceRange(int(UT) + carry, 24)
# print(hr, minute)
return {
# 'status': True,
# 'decimal': UT,
'hour': hr,
'minute': minute
}
def forceRange(self, v, maxim):
# force v to be >= 0 and < max
if v < 0:
return v + maxim
elif v >= maxim:
return v - maxim
return v
if __name__ == "__main__":
dt = datetime.now()
coors = {
'latitude':52.106175,
'longitude': 5.177329,
}
sun = Sun()
print(sun.timeToDawnDusk(dt, coords=coors)) | [
"noreply@github.com"
] | qubixes.noreply@github.com |
ebe1d1196f0d572fd31fdb6a921ed3cd5958e996 | a8b0b3c20ea24000dd6820e74808d4ae939ea5b1 | /source/accounts/models.py | 5b55a9687c456144da5a63e7807f6deeb16446fc | [] | no_license | azamat3522/exam-8 | 8ac2ad82e5601d99859e6843e19e69c1ff64df1e | 71d128bbaec02d886f0300de4fa8cf8f32c82902 | refs/heads/master | 2023-04-28T21:46:15.533595 | 2019-11-16T13:21:54 | 2019-11-16T13:21:54 | 222,100,891 | 0 | 0 | null | 2023-04-21T20:40:48 | 2019-11-16T13:08:26 | Python | UTF-8 | Python | false | false | 623 | py | from django.contrib.auth.models import User
from django.db import models
class Profile(models.Model):
user = models.OneToOneField(User, related_name='profile', on_delete=models.CASCADE, verbose_name='Пользователь')
birth_date = models.DateField(null=True, blank=True, verbose_name='Дата рождения')
avatar = models.ImageField(null=True, blank=True, upload_to='user_pics', verbose_name='Аватар')
def __str__(self):
return self.user.get_full_name() + "'s Profile"
class Meta:
verbose_name = 'Профиль'
verbose_name_plural = 'Профили'
| [
"User@Users-MacBook-Pro.local"
] | User@Users-MacBook-Pro.local |
f7d962573d6c4eeb3ac79b56b3303e17fe27a433 | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /_VSCODE-extensions/vscode-python/pythonFiles/runJediLanguageServer.py | a473bf76b3a84a8c79ff0f8fd1ea6b94dcf2f432 | [
"MIT"
] | permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 451 | py | import re
import sys
import os
# Add the lib path to our sys path so jedi_language_server can find its references
EXTENSION_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(os.path.join(EXTENSION_ROOT, "pythonFiles", "lib", "python"))
from jedi_language_server.cli import cli
# Trick language server into thinking it started from 'jedi-language-server.exe'
sys.argv[0] = "jedi-language-server.exe"
sys.exit(cli())
| [
"bryan.guner@gmail.com"
] | bryan.guner@gmail.com |
ed59af213a3ddd53d04f70db62293d2e5eec2275 | 058aaa6d6ac3b03cf30fb5fccf443ef16aa90453 | /filter.py | ff6ad73fefa0da9d2beb874309b18aeb14acaf77 | [] | no_license | hadyer2/fasttext_embed | 74cd1a78720d37b273aa05151752b77991c4bd54 | c374b928d9e4ee234b09fea1c4f8db932273f8ac | refs/heads/main | 2023-01-09T12:44:04.068564 | 2020-11-05T02:47:30 | 2020-11-05T02:47:30 | 310,170,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 518 | py | import sys
import os
import json
input_folder = sys.argv[1]
output_file = sys.argv[2]
output_list = []
for file in os.listdir(input_folder):
file_contents = open(input_folder+'/'+file).read()
print(file)
tweet_list = json.loads(file_contents)
for tweet in tweet_list:
try:
if tweet['data']['lang'] == 'en':
output_list.append(tweet['data']['text'])
except:
print(tweet)
with open(output_file, 'w+') as of:
of.write(json.dumps(output_list))
| [
"noreply@github.com"
] | hadyer2.noreply@github.com |
0943ae201a1571e0b8c8803d9ed60f43beef1bc7 | 0b358a0d64eb03655c030b36c0ae87880b153951 | /mmdet/models/dense_heads/corner_head.py | 327094bad674975cefd305d5ab08d6505ed45dca | [] | permissive | jshilong/DDQ | db05ff309d63316c62faa59b28c66d65eef973d1 | de9331e4579aaafab4d69e3a9a3c6638efc5392c | refs/heads/main | 2023-06-03T15:02:09.949907 | 2023-05-24T03:32:12 | 2023-05-24T03:32:12 | 498,974,099 | 199 | 6 | Apache-2.0 | 2022-06-02T05:01:53 | 2022-06-02T03:10:25 | null | UTF-8 | Python | false | false | 48,420 | py | # Copyright (c) OpenMMLab. All rights reserved.
from logging import warning
from math import ceil, log
import torch
import torch.nn as nn
from mmcv.cnn import ConvModule, bias_init_with_prob
from mmcv.ops import CornerPool, batched_nms
from mmcv.runner import BaseModule
from mmdet.core import multi_apply
from ..builder import HEADS, build_loss
from ..utils import gaussian_radius, gen_gaussian_target
from ..utils.gaussian_target import (gather_feat, get_local_maximum,
get_topk_from_heatmap,
transpose_and_gather_feat)
from .base_dense_head import BaseDenseHead
from .dense_test_mixins import BBoxTestMixin
class BiCornerPool(BaseModule):
"""Bidirectional Corner Pooling Module (TopLeft, BottomRight, etc.)
Args:
in_channels (int): Input channels of module.
out_channels (int): Output channels of module.
feat_channels (int): Feature channels of module.
directions (list[str]): Directions of two CornerPools.
norm_cfg (dict): Dictionary to construct and config norm layer.
init_cfg (dict or list[dict], optional): Initialization config dict.
Default: None
"""
def __init__(self,
in_channels,
directions,
feat_channels=128,
out_channels=128,
norm_cfg=dict(type='BN', requires_grad=True),
init_cfg=None):
super(BiCornerPool, self).__init__(init_cfg)
self.direction1_conv = ConvModule(
in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg)
self.direction2_conv = ConvModule(
in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg)
self.aftpool_conv = ConvModule(
feat_channels,
out_channels,
3,
padding=1,
norm_cfg=norm_cfg,
act_cfg=None)
self.conv1 = ConvModule(
in_channels, out_channels, 1, norm_cfg=norm_cfg, act_cfg=None)
self.conv2 = ConvModule(
in_channels, out_channels, 3, padding=1, norm_cfg=norm_cfg)
self.direction1_pool = CornerPool(directions[0])
self.direction2_pool = CornerPool(directions[1])
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
"""Forward features from the upstream network.
Args:
x (tensor): Input feature of BiCornerPool.
Returns:
conv2 (tensor): Output feature of BiCornerPool.
"""
direction1_conv = self.direction1_conv(x)
direction2_conv = self.direction2_conv(x)
direction1_feat = self.direction1_pool(direction1_conv)
direction2_feat = self.direction2_pool(direction2_conv)
aftpool_conv = self.aftpool_conv(direction1_feat + direction2_feat)
conv1 = self.conv1(x)
relu = self.relu(aftpool_conv + conv1)
conv2 = self.conv2(relu)
return conv2
@HEADS.register_module()
class CornerHead(BaseDenseHead, BBoxTestMixin):
"""Head of CornerNet: Detecting Objects as Paired Keypoints.
Code is modified from the `official github repo
<https://github.com/princeton-vl/CornerNet/blob/master/models/py_utils/
kp.py#L73>`_ .
More details can be found in the `paper
<https://arxiv.org/abs/1808.01244>`_ .
Args:
num_classes (int): Number of categories excluding the background
category.
in_channels (int): Number of channels in the input feature map.
num_feat_levels (int): Levels of feature from the previous module. 2
for HourglassNet-104 and 1 for HourglassNet-52. Because
HourglassNet-104 outputs the final feature and intermediate
supervision feature and HourglassNet-52 only outputs the final
feature. Default: 2.
corner_emb_channels (int): Channel of embedding vector. Default: 1.
train_cfg (dict | None): Training config. Useless in CornerHead,
but we keep this variable for SingleStageDetector. Default: None.
test_cfg (dict | None): Testing config of CornerHead. Default: None.
loss_heatmap (dict | None): Config of corner heatmap loss. Default:
GaussianFocalLoss.
loss_embedding (dict | None): Config of corner embedding loss. Default:
AssociativeEmbeddingLoss.
loss_offset (dict | None): Config of corner offset loss. Default:
SmoothL1Loss.
init_cfg (dict or list[dict], optional): Initialization config dict.
Default: None
"""
def __init__(self,
num_classes,
in_channels,
num_feat_levels=2,
corner_emb_channels=1,
train_cfg=None,
test_cfg=None,
loss_heatmap=dict(
type='GaussianFocalLoss',
alpha=2.0,
gamma=4.0,
loss_weight=1),
loss_embedding=dict(
type='AssociativeEmbeddingLoss',
pull_weight=0.25,
push_weight=0.25),
loss_offset=dict(
type='SmoothL1Loss', beta=1.0, loss_weight=1),
init_cfg=None):
assert init_cfg is None, 'To prevent abnormal initialization ' \
'behavior, init_cfg is not allowed to be set'
super(CornerHead, self).__init__(init_cfg)
self.num_classes = num_classes
self.in_channels = in_channels
self.corner_emb_channels = corner_emb_channels
self.with_corner_emb = self.corner_emb_channels > 0
self.corner_offset_channels = 2
self.num_feat_levels = num_feat_levels
self.loss_heatmap = build_loss(
loss_heatmap) if loss_heatmap is not None else None
self.loss_embedding = build_loss(
loss_embedding) if loss_embedding is not None else None
self.loss_offset = build_loss(
loss_offset) if loss_offset is not None else None
self.train_cfg = train_cfg
self.test_cfg = test_cfg
self._init_layers()
def _make_layers(self, out_channels, in_channels=256, feat_channels=256):
"""Initialize conv sequential for CornerHead."""
return nn.Sequential(
ConvModule(in_channels, feat_channels, 3, padding=1),
ConvModule(
feat_channels, out_channels, 1, norm_cfg=None, act_cfg=None))
def _init_corner_kpt_layers(self):
"""Initialize corner keypoint layers.
Including corner heatmap branch and corner offset branch. Each branch
has two parts: prefix `tl_` for top-left and `br_` for bottom-right.
"""
self.tl_pool, self.br_pool = nn.ModuleList(), nn.ModuleList()
self.tl_heat, self.br_heat = nn.ModuleList(), nn.ModuleList()
self.tl_off, self.br_off = nn.ModuleList(), nn.ModuleList()
for _ in range(self.num_feat_levels):
self.tl_pool.append(
BiCornerPool(
self.in_channels, ['top', 'left'],
out_channels=self.in_channels))
self.br_pool.append(
BiCornerPool(
self.in_channels, ['bottom', 'right'],
out_channels=self.in_channels))
self.tl_heat.append(
self._make_layers(
out_channels=self.num_classes,
in_channels=self.in_channels))
self.br_heat.append(
self._make_layers(
out_channels=self.num_classes,
in_channels=self.in_channels))
self.tl_off.append(
self._make_layers(
out_channels=self.corner_offset_channels,
in_channels=self.in_channels))
self.br_off.append(
self._make_layers(
out_channels=self.corner_offset_channels,
in_channels=self.in_channels))
def _init_corner_emb_layers(self):
"""Initialize corner embedding layers.
Only include corner embedding branch with two parts: prefix `tl_` for
top-left and `br_` for bottom-right.
"""
self.tl_emb, self.br_emb = nn.ModuleList(), nn.ModuleList()
for _ in range(self.num_feat_levels):
self.tl_emb.append(
self._make_layers(
out_channels=self.corner_emb_channels,
in_channels=self.in_channels))
self.br_emb.append(
self._make_layers(
out_channels=self.corner_emb_channels,
in_channels=self.in_channels))
def _init_layers(self):
"""Initialize layers for CornerHead.
Including two parts: corner keypoint layers and corner embedding layers
"""
self._init_corner_kpt_layers()
if self.with_corner_emb:
self._init_corner_emb_layers()
def init_weights(self):
super(CornerHead, self).init_weights()
bias_init = bias_init_with_prob(0.1)
for i in range(self.num_feat_levels):
# The initialization of parameters are different between
# nn.Conv2d and ConvModule. Our experiments show that
# using the original initialization of nn.Conv2d increases
# the final mAP by about 0.2%
self.tl_heat[i][-1].conv.reset_parameters()
self.tl_heat[i][-1].conv.bias.data.fill_(bias_init)
self.br_heat[i][-1].conv.reset_parameters()
self.br_heat[i][-1].conv.bias.data.fill_(bias_init)
self.tl_off[i][-1].conv.reset_parameters()
self.br_off[i][-1].conv.reset_parameters()
if self.with_corner_emb:
self.tl_emb[i][-1].conv.reset_parameters()
self.br_emb[i][-1].conv.reset_parameters()
def forward(self, feats):
"""Forward features from the upstream network.
Args:
feats (tuple[Tensor]): Features from the upstream network, each is
a 4D-tensor.
Returns:
tuple: Usually a tuple of corner heatmaps, offset heatmaps and
embedding heatmaps.
- tl_heats (list[Tensor]): Top-left corner heatmaps for all
levels, each is a 4D-tensor, the channels number is
num_classes.
- br_heats (list[Tensor]): Bottom-right corner heatmaps for all
levels, each is a 4D-tensor, the channels number is
num_classes.
- tl_embs (list[Tensor] | list[None]): Top-left embedding
heatmaps for all levels, each is a 4D-tensor or None.
If not None, the channels number is corner_emb_channels.
- br_embs (list[Tensor] | list[None]): Bottom-right embedding
heatmaps for all levels, each is a 4D-tensor or None.
If not None, the channels number is corner_emb_channels.
- tl_offs (list[Tensor]): Top-left offset heatmaps for all
levels, each is a 4D-tensor. The channels number is
corner_offset_channels.
- br_offs (list[Tensor]): Bottom-right offset heatmaps for all
levels, each is a 4D-tensor. The channels number is
corner_offset_channels.
"""
lvl_ind = list(range(self.num_feat_levels))
return multi_apply(self.forward_single, feats, lvl_ind)
def forward_single(self, x, lvl_ind, return_pool=False):
"""Forward feature of a single level.
Args:
x (Tensor): Feature of a single level.
lvl_ind (int): Level index of current feature.
return_pool (bool): Return corner pool feature or not.
Returns:
tuple[Tensor]: A tuple of CornerHead's output for current feature
level. Containing the following Tensors:
- tl_heat (Tensor): Predicted top-left corner heatmap.
- br_heat (Tensor): Predicted bottom-right corner heatmap.
- tl_emb (Tensor | None): Predicted top-left embedding heatmap.
None for `self.with_corner_emb == False`.
- br_emb (Tensor | None): Predicted bottom-right embedding
heatmap. None for `self.with_corner_emb == False`.
- tl_off (Tensor): Predicted top-left offset heatmap.
- br_off (Tensor): Predicted bottom-right offset heatmap.
- tl_pool (Tensor): Top-left corner pool feature. Not must
have.
- br_pool (Tensor): Bottom-right corner pool feature. Not must
have.
"""
tl_pool = self.tl_pool[lvl_ind](x)
tl_heat = self.tl_heat[lvl_ind](tl_pool)
br_pool = self.br_pool[lvl_ind](x)
br_heat = self.br_heat[lvl_ind](br_pool)
tl_emb, br_emb = None, None
if self.with_corner_emb:
tl_emb = self.tl_emb[lvl_ind](tl_pool)
br_emb = self.br_emb[lvl_ind](br_pool)
tl_off = self.tl_off[lvl_ind](tl_pool)
br_off = self.br_off[lvl_ind](br_pool)
result_list = [tl_heat, br_heat, tl_emb, br_emb, tl_off, br_off]
if return_pool:
result_list.append(tl_pool)
result_list.append(br_pool)
return result_list
def get_targets(self,
gt_bboxes,
gt_labels,
feat_shape,
img_shape,
with_corner_emb=False,
with_guiding_shift=False,
with_centripetal_shift=False):
"""Generate corner targets.
Including corner heatmap, corner offset.
Optional: corner embedding, corner guiding shift, centripetal shift.
For CornerNet, we generate corner heatmap, corner offset and corner
embedding from this function.
For CentripetalNet, we generate corner heatmap, corner offset, guiding
shift and centripetal shift from this function.
Args:
gt_bboxes (list[Tensor]): Ground truth bboxes of each image, each
has shape (num_gt, 4).
gt_labels (list[Tensor]): Ground truth labels of each box, each has
shape (num_gt,).
feat_shape (list[int]): Shape of output feature,
[batch, channel, height, width].
img_shape (list[int]): Shape of input image,
[height, width, channel].
with_corner_emb (bool): Generate corner embedding target or not.
Default: False.
with_guiding_shift (bool): Generate guiding shift target or not.
Default: False.
with_centripetal_shift (bool): Generate centripetal shift target or
not. Default: False.
Returns:
dict: Ground truth of corner heatmap, corner offset, corner
embedding, guiding shift and centripetal shift. Containing the
following keys:
- topleft_heatmap (Tensor): Ground truth top-left corner
heatmap.
- bottomright_heatmap (Tensor): Ground truth bottom-right
corner heatmap.
- topleft_offset (Tensor): Ground truth top-left corner offset.
- bottomright_offset (Tensor): Ground truth bottom-right corner
offset.
- corner_embedding (list[list[list[int]]]): Ground truth corner
embedding. Not must have.
- topleft_guiding_shift (Tensor): Ground truth top-left corner
guiding shift. Not must have.
- bottomright_guiding_shift (Tensor): Ground truth bottom-right
corner guiding shift. Not must have.
- topleft_centripetal_shift (Tensor): Ground truth top-left
corner centripetal shift. Not must have.
- bottomright_centripetal_shift (Tensor): Ground truth
bottom-right corner centripetal shift. Not must have.
"""
batch_size, _, height, width = feat_shape
img_h, img_w = img_shape[:2]
width_ratio = float(width / img_w)
height_ratio = float(height / img_h)
gt_tl_heatmap = gt_bboxes[-1].new_zeros(
[batch_size, self.num_classes, height, width])
gt_br_heatmap = gt_bboxes[-1].new_zeros(
[batch_size, self.num_classes, height, width])
gt_tl_offset = gt_bboxes[-1].new_zeros([batch_size, 2, height, width])
gt_br_offset = gt_bboxes[-1].new_zeros([batch_size, 2, height, width])
if with_corner_emb:
match = []
# Guiding shift is a kind of offset, from center to corner
if with_guiding_shift:
gt_tl_guiding_shift = gt_bboxes[-1].new_zeros(
[batch_size, 2, height, width])
gt_br_guiding_shift = gt_bboxes[-1].new_zeros(
[batch_size, 2, height, width])
# Centripetal shift is also a kind of offset, from center to corner
# and normalized by log.
if with_centripetal_shift:
gt_tl_centripetal_shift = gt_bboxes[-1].new_zeros(
[batch_size, 2, height, width])
gt_br_centripetal_shift = gt_bboxes[-1].new_zeros(
[batch_size, 2, height, width])
for batch_id in range(batch_size):
# Ground truth of corner embedding per image is a list of coord set
corner_match = []
for box_id in range(len(gt_labels[batch_id])):
left, top, right, bottom = gt_bboxes[batch_id][box_id]
center_x = (left + right) / 2.0
center_y = (top + bottom) / 2.0
label = gt_labels[batch_id][box_id]
# Use coords in the feature level to generate ground truth
scale_left = left * width_ratio
scale_right = right * width_ratio
scale_top = top * height_ratio
scale_bottom = bottom * height_ratio
scale_center_x = center_x * width_ratio
scale_center_y = center_y * height_ratio
# Int coords on feature map/ground truth tensor
left_idx = int(min(scale_left, width - 1))
right_idx = int(min(scale_right, width - 1))
top_idx = int(min(scale_top, height - 1))
bottom_idx = int(min(scale_bottom, height - 1))
# Generate gaussian heatmap
scale_box_width = ceil(scale_right - scale_left)
scale_box_height = ceil(scale_bottom - scale_top)
radius = gaussian_radius((scale_box_height, scale_box_width),
min_overlap=0.3)
radius = max(0, int(radius))
gt_tl_heatmap[batch_id, label] = gen_gaussian_target(
gt_tl_heatmap[batch_id, label], [left_idx, top_idx],
radius)
gt_br_heatmap[batch_id, label] = gen_gaussian_target(
gt_br_heatmap[batch_id, label], [right_idx, bottom_idx],
radius)
# Generate corner offset
left_offset = scale_left - left_idx
top_offset = scale_top - top_idx
right_offset = scale_right - right_idx
bottom_offset = scale_bottom - bottom_idx
gt_tl_offset[batch_id, 0, top_idx, left_idx] = left_offset
gt_tl_offset[batch_id, 1, top_idx, left_idx] = top_offset
gt_br_offset[batch_id, 0, bottom_idx, right_idx] = right_offset
gt_br_offset[batch_id, 1, bottom_idx,
right_idx] = bottom_offset
# Generate corner embedding
if with_corner_emb:
corner_match.append([[top_idx, left_idx],
[bottom_idx, right_idx]])
# Generate guiding shift
if with_guiding_shift:
gt_tl_guiding_shift[batch_id, 0, top_idx,
left_idx] = scale_center_x - left_idx
gt_tl_guiding_shift[batch_id, 1, top_idx,
left_idx] = scale_center_y - top_idx
gt_br_guiding_shift[batch_id, 0, bottom_idx,
right_idx] = right_idx - scale_center_x
gt_br_guiding_shift[
batch_id, 1, bottom_idx,
right_idx] = bottom_idx - scale_center_y
# Generate centripetal shift
if with_centripetal_shift:
gt_tl_centripetal_shift[batch_id, 0, top_idx,
left_idx] = log(scale_center_x -
scale_left)
gt_tl_centripetal_shift[batch_id, 1, top_idx,
left_idx] = log(scale_center_y -
scale_top)
gt_br_centripetal_shift[batch_id, 0, bottom_idx,
right_idx] = log(scale_right -
scale_center_x)
gt_br_centripetal_shift[batch_id, 1, bottom_idx,
right_idx] = log(scale_bottom -
scale_center_y)
if with_corner_emb:
match.append(corner_match)
target_result = dict(
topleft_heatmap=gt_tl_heatmap,
topleft_offset=gt_tl_offset,
bottomright_heatmap=gt_br_heatmap,
bottomright_offset=gt_br_offset)
if with_corner_emb:
target_result.update(corner_embedding=match)
if with_guiding_shift:
target_result.update(
topleft_guiding_shift=gt_tl_guiding_shift,
bottomright_guiding_shift=gt_br_guiding_shift)
if with_centripetal_shift:
target_result.update(
topleft_centripetal_shift=gt_tl_centripetal_shift,
bottomright_centripetal_shift=gt_br_centripetal_shift)
return target_result
def loss(self,
tl_heats,
br_heats,
tl_embs,
br_embs,
tl_offs,
br_offs,
gt_bboxes,
gt_labels,
img_metas,
gt_bboxes_ignore=None):
"""Compute losses of the head.
Args:
tl_heats (list[Tensor]): Top-left corner heatmaps for each level
with shape (N, num_classes, H, W).
br_heats (list[Tensor]): Bottom-right corner heatmaps for each
level with shape (N, num_classes, H, W).
tl_embs (list[Tensor]): Top-left corner embeddings for each level
with shape (N, corner_emb_channels, H, W).
br_embs (list[Tensor]): Bottom-right corner embeddings for each
level with shape (N, corner_emb_channels, H, W).
tl_offs (list[Tensor]): Top-left corner offsets for each level
with shape (N, corner_offset_channels, H, W).
br_offs (list[Tensor]): Bottom-right corner offsets for each level
with shape (N, corner_offset_channels, H, W).
gt_bboxes (list[Tensor]): Ground truth bboxes for each image with
shape (num_gts, 4) in [left, top, right, bottom] format.
gt_labels (list[Tensor]): Class indices corresponding to each box.
img_metas (list[dict]): Meta information of each image, e.g.,
image size, scaling factor, etc.
gt_bboxes_ignore (list[Tensor] | None): Specify which bounding
boxes can be ignored when computing the loss.
Returns:
dict[str, Tensor]: A dictionary of loss components. Containing the
following losses:
- det_loss (list[Tensor]): Corner keypoint losses of all
feature levels.
- pull_loss (list[Tensor]): Part one of AssociativeEmbedding
losses of all feature levels.
- push_loss (list[Tensor]): Part two of AssociativeEmbedding
losses of all feature levels.
- off_loss (list[Tensor]): Corner offset losses of all feature
levels.
"""
targets = self.get_targets(
gt_bboxes,
gt_labels,
tl_heats[-1].shape,
img_metas[0]['pad_shape'],
with_corner_emb=self.with_corner_emb)
mlvl_targets = [targets for _ in range(self.num_feat_levels)]
det_losses, pull_losses, push_losses, off_losses = multi_apply(
self.loss_single, tl_heats, br_heats, tl_embs, br_embs, tl_offs,
br_offs, mlvl_targets)
loss_dict = dict(det_loss=det_losses, off_loss=off_losses)
if self.with_corner_emb:
loss_dict.update(pull_loss=pull_losses, push_loss=push_losses)
return loss_dict
def loss_single(self, tl_hmp, br_hmp, tl_emb, br_emb, tl_off, br_off,
targets):
"""Compute losses for single level.
Args:
tl_hmp (Tensor): Top-left corner heatmap for current level with
shape (N, num_classes, H, W).
br_hmp (Tensor): Bottom-right corner heatmap for current level with
shape (N, num_classes, H, W).
tl_emb (Tensor): Top-left corner embedding for current level with
shape (N, corner_emb_channels, H, W).
br_emb (Tensor): Bottom-right corner embedding for current level
with shape (N, corner_emb_channels, H, W).
tl_off (Tensor): Top-left corner offset for current level with
shape (N, corner_offset_channels, H, W).
br_off (Tensor): Bottom-right corner offset for current level with
shape (N, corner_offset_channels, H, W).
targets (dict): Corner target generated by `get_targets`.
Returns:
tuple[torch.Tensor]: Losses of the head's different branches
containing the following losses:
- det_loss (Tensor): Corner keypoint loss.
- pull_loss (Tensor): Part one of AssociativeEmbedding loss.
- push_loss (Tensor): Part two of AssociativeEmbedding loss.
- off_loss (Tensor): Corner offset loss.
"""
gt_tl_hmp = targets['topleft_heatmap']
gt_br_hmp = targets['bottomright_heatmap']
gt_tl_off = targets['topleft_offset']
gt_br_off = targets['bottomright_offset']
gt_embedding = targets['corner_embedding']
# Detection loss
tl_det_loss = self.loss_heatmap(
tl_hmp.sigmoid(),
gt_tl_hmp,
avg_factor=max(1,
gt_tl_hmp.eq(1).sum()))
br_det_loss = self.loss_heatmap(
br_hmp.sigmoid(),
gt_br_hmp,
avg_factor=max(1,
gt_br_hmp.eq(1).sum()))
det_loss = (tl_det_loss + br_det_loss) / 2.0
# AssociativeEmbedding loss
if self.with_corner_emb and self.loss_embedding is not None:
pull_loss, push_loss = self.loss_embedding(tl_emb, br_emb,
gt_embedding)
else:
pull_loss, push_loss = None, None
# Offset loss
# We only compute the offset loss at the real corner position.
# The value of real corner would be 1 in heatmap ground truth.
# The mask is computed in class agnostic mode and its shape is
# batch * 1 * width * height.
tl_off_mask = gt_tl_hmp.eq(1).sum(1).gt(0).unsqueeze(1).type_as(
gt_tl_hmp)
br_off_mask = gt_br_hmp.eq(1).sum(1).gt(0).unsqueeze(1).type_as(
gt_br_hmp)
tl_off_loss = self.loss_offset(
tl_off,
gt_tl_off,
tl_off_mask,
avg_factor=max(1, tl_off_mask.sum()))
br_off_loss = self.loss_offset(
br_off,
gt_br_off,
br_off_mask,
avg_factor=max(1, br_off_mask.sum()))
off_loss = (tl_off_loss + br_off_loss) / 2.0
return det_loss, pull_loss, push_loss, off_loss
def get_bboxes(self,
tl_heats,
br_heats,
tl_embs,
br_embs,
tl_offs,
br_offs,
img_metas,
rescale=False,
with_nms=True):
"""Transform network output for a batch into bbox predictions.
Args:
tl_heats (list[Tensor]): Top-left corner heatmaps for each level
with shape (N, num_classes, H, W).
br_heats (list[Tensor]): Bottom-right corner heatmaps for each
level with shape (N, num_classes, H, W).
tl_embs (list[Tensor]): Top-left corner embeddings for each level
with shape (N, corner_emb_channels, H, W).
br_embs (list[Tensor]): Bottom-right corner embeddings for each
level with shape (N, corner_emb_channels, H, W).
tl_offs (list[Tensor]): Top-left corner offsets for each level
with shape (N, corner_offset_channels, H, W).
br_offs (list[Tensor]): Bottom-right corner offsets for each level
with shape (N, corner_offset_channels, H, W).
img_metas (list[dict]): Meta information of each image, e.g.,
image size, scaling factor, etc.
rescale (bool): If True, return boxes in original image space.
Default: False.
with_nms (bool): If True, do nms before return boxes.
Default: True.
"""
assert tl_heats[-1].shape[0] == br_heats[-1].shape[0] == len(img_metas)
result_list = []
for img_id in range(len(img_metas)):
result_list.append(
self._get_bboxes_single(
tl_heats[-1][img_id:img_id + 1, :],
br_heats[-1][img_id:img_id + 1, :],
tl_offs[-1][img_id:img_id + 1, :],
br_offs[-1][img_id:img_id + 1, :],
img_metas[img_id],
tl_emb=tl_embs[-1][img_id:img_id + 1, :],
br_emb=br_embs[-1][img_id:img_id + 1, :],
rescale=rescale,
with_nms=with_nms))
return result_list
def _get_bboxes_single(self,
tl_heat,
br_heat,
tl_off,
br_off,
img_meta,
tl_emb=None,
br_emb=None,
tl_centripetal_shift=None,
br_centripetal_shift=None,
rescale=False,
with_nms=True):
"""Transform outputs for a single batch item into bbox predictions.
Args:
tl_heat (Tensor): Top-left corner heatmap for current level with
shape (N, num_classes, H, W).
br_heat (Tensor): Bottom-right corner heatmap for current level
with shape (N, num_classes, H, W).
tl_off (Tensor): Top-left corner offset for current level with
shape (N, corner_offset_channels, H, W).
br_off (Tensor): Bottom-right corner offset for current level with
shape (N, corner_offset_channels, H, W).
img_meta (dict): Meta information of current image, e.g.,
image size, scaling factor, etc.
tl_emb (Tensor): Top-left corner embedding for current level with
shape (N, corner_emb_channels, H, W).
br_emb (Tensor): Bottom-right corner embedding for current level
with shape (N, corner_emb_channels, H, W).
tl_centripetal_shift: Top-left corner's centripetal shift for
current level with shape (N, 2, H, W).
br_centripetal_shift: Bottom-right corner's centripetal shift for
current level with shape (N, 2, H, W).
rescale (bool): If True, return boxes in original image space.
Default: False.
with_nms (bool): If True, do nms before return boxes.
Default: True.
"""
if isinstance(img_meta, (list, tuple)):
img_meta = img_meta[0]
batch_bboxes, batch_scores, batch_clses = self.decode_heatmap(
tl_heat=tl_heat.sigmoid(),
br_heat=br_heat.sigmoid(),
tl_off=tl_off,
br_off=br_off,
tl_emb=tl_emb,
br_emb=br_emb,
tl_centripetal_shift=tl_centripetal_shift,
br_centripetal_shift=br_centripetal_shift,
img_meta=img_meta,
k=self.test_cfg.corner_topk,
kernel=self.test_cfg.local_maximum_kernel,
distance_threshold=self.test_cfg.distance_threshold)
if rescale:
batch_bboxes /= batch_bboxes.new_tensor(img_meta['scale_factor'])
bboxes = batch_bboxes.view([-1, 4])
scores = batch_scores.view(-1)
clses = batch_clses.view(-1)
detections = torch.cat([bboxes, scores.unsqueeze(-1)], -1)
keepinds = (detections[:, -1] > -0.1)
detections = detections[keepinds]
labels = clses[keepinds]
if with_nms:
detections, labels = self._bboxes_nms(detections, labels,
self.test_cfg)
return detections, labels
def _bboxes_nms(self, bboxes, labels, cfg):
if 'nms_cfg' in cfg:
warning.warn('nms_cfg in test_cfg will be deprecated. '
'Please rename it as nms')
if 'nms' not in cfg:
cfg.nms = cfg.nms_cfg
if labels.numel() > 0:
max_num = cfg.max_per_img
bboxes, keep = batched_nms(bboxes[:, :4], bboxes[:,
-1].contiguous(),
labels, cfg.nms)
if max_num > 0:
bboxes = bboxes[:max_num]
labels = labels[keep][:max_num]
return bboxes, labels
def decode_heatmap(self,
tl_heat,
br_heat,
tl_off,
br_off,
tl_emb=None,
br_emb=None,
tl_centripetal_shift=None,
br_centripetal_shift=None,
img_meta=None,
k=100,
kernel=3,
distance_threshold=0.5,
num_dets=1000):
"""Transform outputs for a single batch item into raw bbox predictions.
Args:
tl_heat (Tensor): Top-left corner heatmap for current level with
shape (N, num_classes, H, W).
br_heat (Tensor): Bottom-right corner heatmap for current level
with shape (N, num_classes, H, W).
tl_off (Tensor): Top-left corner offset for current level with
shape (N, corner_offset_channels, H, W).
br_off (Tensor): Bottom-right corner offset for current level with
shape (N, corner_offset_channels, H, W).
tl_emb (Tensor | None): Top-left corner embedding for current
level with shape (N, corner_emb_channels, H, W).
br_emb (Tensor | None): Bottom-right corner embedding for current
level with shape (N, corner_emb_channels, H, W).
tl_centripetal_shift (Tensor | None): Top-left centripetal shift
for current level with shape (N, 2, H, W).
br_centripetal_shift (Tensor | None): Bottom-right centripetal
shift for current level with shape (N, 2, H, W).
img_meta (dict): Meta information of current image, e.g.,
image size, scaling factor, etc.
k (int): Get top k corner keypoints from heatmap.
kernel (int): Max pooling kernel for extract local maximum pixels.
distance_threshold (float): Distance threshold. Top-left and
bottom-right corner keypoints with feature distance less than
the threshold will be regarded as keypoints from same object.
num_dets (int): Num of raw boxes before doing nms.
Returns:
tuple[torch.Tensor]: Decoded output of CornerHead, containing the
following Tensors:
- bboxes (Tensor): Coords of each box.
- scores (Tensor): Scores of each box.
- clses (Tensor): Categories of each box.
"""
with_embedding = tl_emb is not None and br_emb is not None
with_centripetal_shift = (
tl_centripetal_shift is not None
and br_centripetal_shift is not None)
assert with_embedding + with_centripetal_shift == 1
batch, _, height, width = tl_heat.size()
if torch.onnx.is_in_onnx_export():
inp_h, inp_w = img_meta['pad_shape_for_onnx'][:2]
else:
inp_h, inp_w, _ = img_meta['pad_shape']
# perform nms on heatmaps
tl_heat = get_local_maximum(tl_heat, kernel=kernel)
br_heat = get_local_maximum(br_heat, kernel=kernel)
tl_scores, tl_inds, tl_clses, tl_ys, tl_xs = get_topk_from_heatmap(
tl_heat, k=k)
br_scores, br_inds, br_clses, br_ys, br_xs = get_topk_from_heatmap(
br_heat, k=k)
# We use repeat instead of expand here because expand is a
# shallow-copy function. Thus it could cause unexpected testing result
# sometimes. Using expand will decrease about 10% mAP during testing
# compared to repeat.
tl_ys = tl_ys.view(batch, k, 1).repeat(1, 1, k)
tl_xs = tl_xs.view(batch, k, 1).repeat(1, 1, k)
br_ys = br_ys.view(batch, 1, k).repeat(1, k, 1)
br_xs = br_xs.view(batch, 1, k).repeat(1, k, 1)
tl_off = transpose_and_gather_feat(tl_off, tl_inds)
tl_off = tl_off.view(batch, k, 1, 2)
br_off = transpose_and_gather_feat(br_off, br_inds)
br_off = br_off.view(batch, 1, k, 2)
tl_xs = tl_xs + tl_off[..., 0]
tl_ys = tl_ys + tl_off[..., 1]
br_xs = br_xs + br_off[..., 0]
br_ys = br_ys + br_off[..., 1]
if with_centripetal_shift:
tl_centripetal_shift = transpose_and_gather_feat(
tl_centripetal_shift, tl_inds).view(batch, k, 1, 2).exp()
br_centripetal_shift = transpose_and_gather_feat(
br_centripetal_shift, br_inds).view(batch, 1, k, 2).exp()
tl_ctxs = tl_xs + tl_centripetal_shift[..., 0]
tl_ctys = tl_ys + tl_centripetal_shift[..., 1]
br_ctxs = br_xs - br_centripetal_shift[..., 0]
br_ctys = br_ys - br_centripetal_shift[..., 1]
# all possible boxes based on top k corners (ignoring class)
tl_xs *= (inp_w / width)
tl_ys *= (inp_h / height)
br_xs *= (inp_w / width)
br_ys *= (inp_h / height)
if with_centripetal_shift:
tl_ctxs *= (inp_w / width)
tl_ctys *= (inp_h / height)
br_ctxs *= (inp_w / width)
br_ctys *= (inp_h / height)
x_off, y_off = 0, 0 # no crop
if not torch.onnx.is_in_onnx_export():
# since `RandomCenterCropPad` is done on CPU with numpy and it's
# not dynamic traceable when exporting to ONNX, thus 'border'
# does not appears as key in 'img_meta'. As a tmp solution,
# we move this 'border' handle part to the postprocess after
# finished exporting to ONNX, which is handle in
# `mmdet/core/export/model_wrappers.py`. Though difference between
# pytorch and exported onnx model, it might be ignored since
# comparable performance is achieved between them (e.g. 40.4 vs
# 40.6 on COCO val2017, for CornerNet without test-time flip)
if 'border' in img_meta:
x_off = img_meta['border'][2]
y_off = img_meta['border'][0]
tl_xs -= x_off
tl_ys -= y_off
br_xs -= x_off
br_ys -= y_off
zeros = tl_xs.new_zeros(*tl_xs.size())
tl_xs = torch.where(tl_xs > 0.0, tl_xs, zeros)
tl_ys = torch.where(tl_ys > 0.0, tl_ys, zeros)
br_xs = torch.where(br_xs > 0.0, br_xs, zeros)
br_ys = torch.where(br_ys > 0.0, br_ys, zeros)
bboxes = torch.stack((tl_xs, tl_ys, br_xs, br_ys), dim=3)
area_bboxes = ((br_xs - tl_xs) * (br_ys - tl_ys)).abs()
if with_centripetal_shift:
tl_ctxs -= x_off
tl_ctys -= y_off
br_ctxs -= x_off
br_ctys -= y_off
tl_ctxs *= tl_ctxs.gt(0.0).type_as(tl_ctxs)
tl_ctys *= tl_ctys.gt(0.0).type_as(tl_ctys)
br_ctxs *= br_ctxs.gt(0.0).type_as(br_ctxs)
br_ctys *= br_ctys.gt(0.0).type_as(br_ctys)
ct_bboxes = torch.stack((tl_ctxs, tl_ctys, br_ctxs, br_ctys),
dim=3)
area_ct_bboxes = ((br_ctxs - tl_ctxs) * (br_ctys - tl_ctys)).abs()
rcentral = torch.zeros_like(ct_bboxes)
# magic nums from paper section 4.1
mu = torch.ones_like(area_bboxes) / 2.4
mu[area_bboxes > 3500] = 1 / 2.1 # large bbox have smaller mu
bboxes_center_x = (bboxes[..., 0] + bboxes[..., 2]) / 2
bboxes_center_y = (bboxes[..., 1] + bboxes[..., 3]) / 2
rcentral[..., 0] = bboxes_center_x - mu * (bboxes[..., 2] -
bboxes[..., 0]) / 2
rcentral[..., 1] = bboxes_center_y - mu * (bboxes[..., 3] -
bboxes[..., 1]) / 2
rcentral[..., 2] = bboxes_center_x + mu * (bboxes[..., 2] -
bboxes[..., 0]) / 2
rcentral[..., 3] = bboxes_center_y + mu * (bboxes[..., 3] -
bboxes[..., 1]) / 2
area_rcentral = ((rcentral[..., 2] - rcentral[..., 0]) *
(rcentral[..., 3] - rcentral[..., 1])).abs()
dists = area_ct_bboxes / area_rcentral
tl_ctx_inds = (ct_bboxes[..., 0] <= rcentral[..., 0]) | (
ct_bboxes[..., 0] >= rcentral[..., 2])
tl_cty_inds = (ct_bboxes[..., 1] <= rcentral[..., 1]) | (
ct_bboxes[..., 1] >= rcentral[..., 3])
br_ctx_inds = (ct_bboxes[..., 2] <= rcentral[..., 0]) | (
ct_bboxes[..., 2] >= rcentral[..., 2])
br_cty_inds = (ct_bboxes[..., 3] <= rcentral[..., 1]) | (
ct_bboxes[..., 3] >= rcentral[..., 3])
if with_embedding:
tl_emb = transpose_and_gather_feat(tl_emb, tl_inds)
tl_emb = tl_emb.view(batch, k, 1)
br_emb = transpose_and_gather_feat(br_emb, br_inds)
br_emb = br_emb.view(batch, 1, k)
dists = torch.abs(tl_emb - br_emb)
tl_scores = tl_scores.view(batch, k, 1).repeat(1, 1, k)
br_scores = br_scores.view(batch, 1, k).repeat(1, k, 1)
scores = (tl_scores + br_scores) / 2 # scores for all possible boxes
# tl and br should have same class
tl_clses = tl_clses.view(batch, k, 1).repeat(1, 1, k)
br_clses = br_clses.view(batch, 1, k).repeat(1, k, 1)
cls_inds = (tl_clses != br_clses)
# reject boxes based on distances
dist_inds = dists > distance_threshold
# reject boxes based on widths and heights
width_inds = (br_xs <= tl_xs)
height_inds = (br_ys <= tl_ys)
# No use `scores[cls_inds]`, instead we use `torch.where` here.
# Since only 1-D indices with type 'tensor(bool)' are supported
# when exporting to ONNX, any other bool indices with more dimensions
# (e.g. 2-D bool tensor) as input parameter in node is invalid
negative_scores = -1 * torch.ones_like(scores)
scores = torch.where(cls_inds, negative_scores, scores)
scores = torch.where(width_inds, negative_scores, scores)
scores = torch.where(height_inds, negative_scores, scores)
scores = torch.where(dist_inds, negative_scores, scores)
if with_centripetal_shift:
scores[tl_ctx_inds] = -1
scores[tl_cty_inds] = -1
scores[br_ctx_inds] = -1
scores[br_cty_inds] = -1
scores = scores.view(batch, -1)
scores, inds = torch.topk(scores, num_dets)
scores = scores.unsqueeze(2)
bboxes = bboxes.view(batch, -1, 4)
bboxes = gather_feat(bboxes, inds)
clses = tl_clses.contiguous().view(batch, -1, 1)
clses = gather_feat(clses, inds).float()
return bboxes, scores, clses
def onnx_export(self,
tl_heats,
br_heats,
tl_embs,
br_embs,
tl_offs,
br_offs,
img_metas,
rescale=False,
with_nms=True):
"""Transform network output for a batch into bbox predictions.
Args:
tl_heats (list[Tensor]): Top-left corner heatmaps for each level
with shape (N, num_classes, H, W).
br_heats (list[Tensor]): Bottom-right corner heatmaps for each
level with shape (N, num_classes, H, W).
tl_embs (list[Tensor]): Top-left corner embeddings for each level
with shape (N, corner_emb_channels, H, W).
br_embs (list[Tensor]): Bottom-right corner embeddings for each
level with shape (N, corner_emb_channels, H, W).
tl_offs (list[Tensor]): Top-left corner offsets for each level
with shape (N, corner_offset_channels, H, W).
br_offs (list[Tensor]): Bottom-right corner offsets for each level
with shape (N, corner_offset_channels, H, W).
img_metas (list[dict]): Meta information of each image, e.g.,
image size, scaling factor, etc.
rescale (bool): If True, return boxes in original image space.
Default: False.
with_nms (bool): If True, do nms before return boxes.
Default: True.
Returns:
tuple[Tensor, Tensor]: First tensor bboxes with shape
[N, num_det, 5], 5 arrange as (x1, y1, x2, y2, score)
and second element is class labels of shape [N, num_det].
"""
assert tl_heats[-1].shape[0] == br_heats[-1].shape[0] == len(
img_metas) == 1
result_list = []
for img_id in range(len(img_metas)):
result_list.append(
self._get_bboxes_single(
tl_heats[-1][img_id:img_id + 1, :],
br_heats[-1][img_id:img_id + 1, :],
tl_offs[-1][img_id:img_id + 1, :],
br_offs[-1][img_id:img_id + 1, :],
img_metas[img_id],
tl_emb=tl_embs[-1][img_id:img_id + 1, :],
br_emb=br_embs[-1][img_id:img_id + 1, :],
rescale=rescale,
with_nms=with_nms))
detections, labels = result_list[0]
# batch_size 1 here, [1, num_det, 5], [1, num_det]
return detections.unsqueeze(0), labels.unsqueeze(0)
| [
"2392587229zsl@gmail.com"
] | 2392587229zsl@gmail.com |
38b90d79ce9d9eedfac0869b24130ae48261dec5 | c8e87ed447ba8e1ac25cf5a1b6e6a9a7bc6f8ca0 | /python/test.py | e5346ad04fac69122c4faa7921581df0f86f3574 | [] | no_license | hernando/libsonata | efd9476ee89fd542bffea620bcb747ccca8df506 | 3567b373529e32ebe9eec7d8a1324a970bbdac53 | refs/heads/master | 2020-04-29T00:05:23.937449 | 2018-11-20T13:56:42 | 2018-11-21T13:51:26 | 175,680,830 | 0 | 0 | null | 2019-03-14T18:47:46 | 2019-03-14T18:47:45 | null | UTF-8 | Python | false | false | 3,632 | py | import unittest
from sonata import *
class TestSelection(unittest.TestCase):
def test_basic(self):
ranges = [(3, 5), (0, 3)]
selection = Selection(ranges)
self.assertTrue(selection)
self.assertEqual(selection.ranges, ranges)
self.assertEqual(selection.flat_size, 5)
self.assertEqual(selection.flatten(), [3, 4, 0, 1, 2])
def test_from_values(self):
selection = Selection([1, 3, 4, 1])
self.assertEqual(selection.ranges, [(1, 2), (3, 5), (1, 2)])
class TestNodePopulation(unittest.TestCase):
def setUp(self):
self.test_obj = NodeStorage('./tests/data/nodes1.h5').open_population('nodes-A')
def test_name(self):
self.assertEqual(self.test_obj.name, "nodes-A")
def test_size(self):
self.assertEqual(self.test_obj.size, 6)
def test_attribute_names(self):
self.assertEqual(self.test_obj.attribute_names, {"attr-X", "attr-Y", "attr-Z"})
def test_get_attribute(self):
self.assertEqual(self.test_obj.get_attribute('attr-X', 0), 11.)
self.assertEqual(self.test_obj.get_attribute('attr-X', Selection([0, 5])).tolist(), [11., 16.])
# different dtypes
self.assertEqual(self.test_obj.get_attribute('attr-Y', 0), 21)
self.assertEqual(self.test_obj.get_attribute('attr-Z', 0), 'aa')
# default value
self.assertEqual(self.test_obj.get_attribute('attr-X', Selection([0, 5]), 42.).tolist(), [11., 16.])
self.assertRaises(SonataError, self.test_obj.get_attribute, 'no-such-attribute', 0)
def test_get_dynamics_attribute(self):
self.assertEqual(self.test_obj.get_dynamics_attribute('dparam-X', 0), 1011.)
self.assertEqual(self.test_obj.get_dynamics_attribute('dparam-X', Selection([0, 5])).tolist(), [1011., 1016.])
# different dtypes
self.assertEqual(self.test_obj.get_dynamics_attribute('dparam-Y', 0), 1021)
self.assertEqual(self.test_obj.get_dynamics_attribute('dparam-Z', 0), 'd-aa')
# default value
self.assertEqual(self.test_obj.get_dynamics_attribute('dparam-X', Selection([0, 5]), 42.).tolist(), [1011., 1016.])
self.assertRaises(SonataError, self.test_obj.get_dynamics_attribute, 'no-such-attribute', 0)
class TestEdgePopulation(unittest.TestCase):
def setUp(self):
self.test_obj = EdgeStorage('./tests/data/edges1.h5').open_population('edges-AB')
def test_source(self):
self.assertEqual(self.test_obj.source, 'nodes-A')
def test_target(self):
self.assertEqual(self.test_obj.target, 'nodes-B')
def test_source_nodes(self):
self.assertEqual(self.test_obj.source_node(1), 1)
self.assertEqual(self.test_obj.source_nodes(Selection([0, 1, 2, 4])).tolist(), [1, 1, 2, 3])
def test_target_nodes(self):
self.assertEqual(self.test_obj.target_node(1), 2)
self.assertEqual(self.test_obj.target_nodes(Selection([0, 1, 2, 4])).tolist(), [1, 2, 1, 0])
def test_afferent_edges(self):
self.assertEqual(self.test_obj.afferent_edges([1, 2]).ranges, [(0, 4), (5, 6)])
self.assertEqual(self.test_obj.afferent_edges(1).ranges, [(0, 1), (2, 4)])
def test_efferent_edges(self):
self.assertEqual(self.test_obj.efferent_edges([1, 2]).ranges, [(0, 4)])
self.assertEqual(self.test_obj.efferent_edges(0).ranges, [])
def test_connecting_edges(self):
self.assertEqual(self.test_obj.connecting_edges([1, 2], [1, 2]).ranges, [(0, 4)])
self.assertEqual(self.test_obj.connecting_edges(1, 1).ranges, [(0, 1)])
if __name__ == '__main__':
unittest.main()
| [
"arseny.povolotsky@epfl.ch"
] | arseny.povolotsky@epfl.ch |
029cf01ff41bacef7626fa5dd1e70da81a945288 | a4f192ae2d8f53061df82d37505a06ebe09903da | /NeuralStyleTransfer/style_transfer.py | 00af79e7abcb065f2b11ff7310b1d24c4a7e2094 | [
"MIT"
] | permissive | MartinBCN/NeuralStyleTransfer | db2a09b9e0ac496c27f2305254f07208a15f0361 | e681257897643e18acefe3dc194551d2b08c2581 | refs/heads/main | 2023-02-27T19:38:52.009954 | 2021-02-06T18:56:25 | 2021-02-06T18:56:25 | 336,191,565 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,436 | py | from __future__ import print_function
import os
from typing import List
import torch
import torch.nn as nn
import torchvision.models as models
from torch import Tensor, optim
from NeuralStyleTransfer.content_loss import ContentLoss
from NeuralStyleTransfer.normalisation import Normalization
from NeuralStyleTransfer.style_loss import StyleLoss
def get_input_optimizer(input_img: Tensor):
# this line to show that input is a parameter that requires a gradient
optimizer = optim.LBFGS([input_img.requires_grad_()])
return optimizer
class NeuralStyleTransfer:
device = torch.device("cuda" if torch.cuda.is_available() and (not os.environ.get('USE_CPU', False)) else "cpu")
cnn = models.vgg19(pretrained=False)
p = os.environ.get('MODEL_PATH', '/home/martin/Programming/Python/NeuralStyleTransfer/model/vgg19-dcbb9e9d.pth')
cnn.load_state_dict(torch.load(p))
cnn = cnn.features.to(device).eval()
normalization = Normalization().to(device)
def __init__(self, content_layers: List[str] = None, style_layers: List[str] = None,
num_steps: int = 10):
self.num_steps = num_steps
# desired depth layers to compute style/content losses :
if content_layers is None:
self.content_layers = ['conv_4']
else:
self.content_layers = content_layers
if content_layers is None:
self.style_layers = ['conv_1', 'conv_2', 'conv_3', 'conv_4', 'conv_5']
else:
self.style_layers = style_layers
self.style_losses = []
self.content_losses = []
# assuming that cnn is a nn.Sequential, so we make a new nn.Sequential
# to put in modules that are supposed to be activated sequentially
self.model = nn.Sequential(self.normalization)
self.model = self.model.to(self.device)
def __call__(self, t: Tensor):
return self.model(t)
def get_style_model_and_losses(self, style_img: Tensor, content_img: Tensor) -> None:
# just in order to have an iterable access to or list of content/syle
# losses
content_losses = []
style_losses = []
i = 0 # increment every time we see a conv
for layer in self.cnn.children():
if isinstance(layer, nn.Conv2d):
i += 1
name = f'conv_{i}'
elif isinstance(layer, nn.ReLU):
name = f'relu_{i}'
# The in-place version doesn't play very nicely with the ContentLoss
# and StyleLoss we insert below. So we replace with out-of-place
# ones here.
layer = nn.ReLU(inplace=False)
elif isinstance(layer, nn.MaxPool2d):
name = f'pool_{i}'
elif isinstance(layer, nn.BatchNorm2d):
name = f'bn_{i}'
else:
raise RuntimeError(f'Unrecognized layer: {layer.__class__.__name__}')
self.model.add_module(name, layer)
if name in self.content_layers:
# add content loss:
target = self.model(content_img).detach()
content_loss = ContentLoss(target)
self.model.add_module(f"content_loss_{i}", content_loss)
content_losses.append(content_loss)
if name in self.style_layers:
# add style loss:
target_feature = self.model(style_img).detach()
style_loss = StyleLoss(target_feature)
self.model.add_module(f"style_loss_{i}", style_loss)
style_losses.append(style_loss)
# now we trim off the layers after the last content and style losses
for i in range(len(self.model) - 1, -1, -1):
if isinstance(self.model[i], ContentLoss) or isinstance(self.model[i], StyleLoss):
break
self.model = self.model[:(i + 1)]
self.model = self.model.to(self.device)
self.style_losses = style_losses
self.content_losses = content_losses
def fit_transform(self, input_img: Tensor) -> Tensor:
optimizer = get_input_optimizer(input_img)
style_weight = 1000000
content_weight = 1
print('Optimizing..')
for i in range(self.num_steps):
def closure():
# correct the values of updated input image
input_img.data.clamp_(0, 1)
optimizer.zero_grad()
self(input_img)
style_score = 0
content_score = 0
for sl in self.style_losses:
style_score += sl.loss
for cl in self.content_losses:
content_score += cl.loss
style_score *= style_weight
content_score *= content_weight
loss = style_score + content_score
loss.backward()
if i % 1 == 0:
print(f"run {i}:")
print('Style Loss : {:4f} Content Loss: {:4f}'.format(
style_score.item(), content_score.item()))
print()
return style_score + content_score
optimizer.step(closure)
# a last correction...
input_img.data.clamp_(0, 1)
print(input_img)
return input_img
if __name__ == '__main__':
nst = NeuralStyleTransfer()
| [
"martin-cleven@gmx.net"
] | martin-cleven@gmx.net |
b986eb7743e180367024e6fec8f37a1dcba074a2 | 192874fd96861ceb1864a71bf6f13932cc017d63 | /hue/desktop/core/ext-py/kombu-2.5.10/kombu/utils/__init__.py | 532fb883b49d9ef2073274f95e24dbbd98bbbd8a | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | OpenPOWER-BigData/HDP-hue | 1de3efc0ac773f1e7b1acd03675f11b65c6f477d | 23719febdaae26c916bdc9d0712645987ae7e0e4 | refs/heads/master | 2021-01-17T17:19:31.157051 | 2016-07-18T19:44:10 | 2016-07-18T19:44:10 | 63,631,863 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,546 | py | """
kombu.utils
===========
Internal utilities.
"""
from __future__ import absolute_import
import importlib
import random
import sys
from contextlib import contextmanager
from itertools import count, repeat
from time import sleep
from uuid import UUID, uuid4 as _uuid4, _uuid_generate_random
from .encoding import safe_repr as _safe_repr
try:
import ctypes
except:
ctypes = None # noqa
__all__ = ['EqualityDict', 'say', 'uuid', 'kwdict', 'maybe_list',
'fxrange', 'fxrangemax', 'retry_over_time',
'emergency_dump_state', 'cached_property',
'reprkwargs', 'reprcall', 'nested']
def symbol_by_name(name, aliases={}, imp=None, package=None,
sep='.', default=None, **kwargs):
"""Get symbol by qualified name.
The name should be the full dot-separated path to the class::
modulename.ClassName
Example::
celery.concurrency.processes.TaskPool
^- class name
or using ':' to separate module and symbol::
celery.concurrency.processes:TaskPool
If `aliases` is provided, a dict containing short name/long name
mappings, the name is looked up in the aliases first.
Examples:
>>> symbol_by_name('celery.concurrency.processes.TaskPool')
<class 'celery.concurrency.processes.TaskPool'>
>>> symbol_by_name('default', {
... 'default': 'celery.concurrency.processes.TaskPool'})
<class 'celery.concurrency.processes.TaskPool'>
# Does not try to look up non-string names.
>>> from celery.concurrency.processes import TaskPool
>>> symbol_by_name(TaskPool) is TaskPool
True
"""
if imp is None:
imp = importlib.import_module
if not isinstance(name, basestring):
return name # already a class
name = aliases.get(name) or name
sep = ':' if ':' in name else sep
module_name, _, cls_name = name.rpartition(sep)
if not module_name:
cls_name, module_name = None, package if package else cls_name
try:
try:
module = imp(module_name, package=package, **kwargs)
except ValueError, exc:
raise ValueError, ValueError(
"Couldn't import %r: %s" % (name, exc)), sys.exc_info()[2]
return getattr(module, cls_name) if cls_name else module
except (ImportError, AttributeError):
if default is None:
raise
return default
def eqhash(o):
try:
return o.__eqhash__()
except AttributeError:
return hash(o)
class EqualityDict(dict):
def __getitem__(self, key):
h = eqhash(key)
if h not in self:
return self.__missing__(key)
return dict.__getitem__(self, h)
def __setitem__(self, key, value):
return dict.__setitem__(self, eqhash(key), value)
def __delitem__(self, key):
return dict.__delitem__(self, eqhash(key))
def say(m, *s):
sys.stderr.write(str(m) % s + '\n')
def uuid4():
# Workaround for http://bugs.python.org/issue4607
if ctypes and _uuid_generate_random: # pragma: no cover
buffer = ctypes.create_string_buffer(16)
_uuid_generate_random(buffer)
return UUID(bytes=buffer.raw)
return _uuid4()
def uuid():
"""Generate a unique id, having - hopefully - a very small chance of
collision.
For now this is provided by :func:`uuid.uuid4`.
"""
return str(uuid4())
gen_unique_id = uuid
if sys.version_info >= (2, 6, 5):
def kwdict(kwargs):
return kwargs
else:
def kwdict(kwargs): # pragma: no cover # noqa
"""Make sure keyword arguments are not in Unicode.
This should be fixed in newer Python versions,
see: http://bugs.python.org/issue4978.
"""
return dict((key.encode('utf-8'), value)
for key, value in kwargs.items())
def maybe_list(v):
if v is None:
return []
if hasattr(v, '__iter__'):
return v
return [v]
def fxrange(start=1.0, stop=None, step=1.0, repeatlast=False):
cur = start * 1.0
while 1:
if not stop or cur <= stop:
yield cur
cur += step
else:
if not repeatlast:
break
yield cur - step
def fxrangemax(start=1.0, stop=None, step=1.0, max=100.0):
sum_, cur = 0, start * 1.0
while 1:
if sum_ >= max:
break
yield cur
if stop:
cur = min(cur + step, stop)
else:
cur += step
sum_ += cur
def retry_over_time(fun, catch, args=[], kwargs={}, errback=None,
max_retries=None, interval_start=2, interval_step=2,
interval_max=30, callback=None):
"""Retry the function over and over until max retries is exceeded.
For each retry we sleep a for a while before we try again, this interval
is increased for every retry until the max seconds is reached.
:param fun: The function to try
:param catch: Exceptions to catch, can be either tuple or a single
exception class.
:keyword args: Positional arguments passed on to the function.
:keyword kwargs: Keyword arguments passed on to the function.
:keyword errback: Callback for when an exception in ``catch`` is raised.
The callback must take two arguments: ``exc`` and ``interval``, where
``exc`` is the exception instance, and ``interval`` is the time in
seconds to sleep next..
:keyword max_retries: Maximum number of retries before we give up.
If this is not set, we will retry forever.
:keyword interval_start: How long (in seconds) we start sleeping between
retries.
:keyword interval_step: By how much the interval is increased for each
retry.
:keyword interval_max: Maximum number of seconds to sleep between retries.
"""
retries = 0
interval_range = fxrange(interval_start,
interval_max + interval_start,
interval_step, repeatlast=True)
for retries in count():
try:
return fun(*args, **kwargs)
except catch, exc:
if max_retries is not None and retries > max_retries:
raise
if callback:
callback()
tts = (errback(exc, interval_range, retries) if errback
else next(interval_range))
if tts:
for i in range(int(tts / interval_step)):
if callback:
callback()
sleep(interval_step)
def emergency_dump_state(state, open_file=open, dump=None):
from pprint import pformat
from tempfile import mktemp
if dump is None:
import pickle
dump = pickle.dump
persist = mktemp()
say('EMERGENCY DUMP STATE TO FILE -> %s <-' % persist)
fh = open_file(persist, 'w')
try:
try:
dump(state, fh, protocol=0)
except Exception, exc:
say('Cannot pickle state: %r. Fallback to pformat.' % (exc, ))
fh.write(pformat(state))
finally:
fh.flush()
fh.close()
return persist
class cached_property(object):
"""Property descriptor that caches the return value
of the get function.
*Examples*
.. code-block:: python
@cached_property
def connection(self):
return Connection()
@connection.setter # Prepares stored value
def connection(self, value):
if value is None:
raise TypeError('Connection must be a connection')
return value
@connection.deleter
def connection(self, value):
# Additional action to do at del(self.attr)
if value is not None:
print('Connection %r deleted' % (value, ))
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.__get = fget
self.__set = fset
self.__del = fdel
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
self.__module__ = fget.__module__
def __get__(self, obj, type=None):
if obj is None:
return self
try:
return obj.__dict__[self.__name__]
except KeyError:
value = obj.__dict__[self.__name__] = self.__get(obj)
return value
def __set__(self, obj, value):
if obj is None:
return self
if self.__set is not None:
value = self.__set(obj, value)
obj.__dict__[self.__name__] = value
def __delete__(self, obj):
if obj is None:
return self
try:
value = obj.__dict__.pop(self.__name__)
except KeyError:
pass
else:
if self.__del is not None:
self.__del(obj, value)
def setter(self, fset):
return self.__class__(self.__get, fset, self.__del)
def deleter(self, fdel):
return self.__class__(self.__get, self.__set, fdel)
def reprkwargs(kwargs, sep=', ', fmt='%s=%s'):
return sep.join(fmt % (k, _safe_repr(v)) for k, v in kwargs.iteritems())
def reprcall(name, args=(), kwargs={}, sep=', '):
return '%s(%s%s%s)' % (name, sep.join(map(_safe_repr, args or ())),
(args and kwargs) and sep or '',
reprkwargs(kwargs, sep))
@contextmanager
def nested(*managers): # pragma: no cover
# flake8: noqa
"""Combine multiple context managers into a single nested
context manager."""
exits = []
vars = []
exc = (None, None, None)
try:
try:
for mgr in managers:
exit = mgr.__exit__
enter = mgr.__enter__
vars.append(enter())
exits.append(exit)
yield vars
except:
exc = sys.exc_info()
finally:
while exits:
exit = exits.pop()
try:
if exit(*exc):
exc = (None, None, None)
except:
exc = sys.exc_info()
if exc != (None, None, None):
# Don't rely on sys.exc_info() still containing
# the right information. Another exception may
# have been raised and caught by an exit method
raise exc[0], exc[1], exc[2]
finally:
del(exc)
def shufflecycle(it):
it = list(it) # don't modify callers list
shuffle = random.shuffle
for _ in repeat(None):
shuffle(it)
yield it[0]
def entrypoints(namespace):
try:
from pkg_resources import iter_entry_points
except ImportError:
return iter([])
return ((ep, ep.load()) for ep in iter_entry_points(namespace))
class ChannelPromise(object):
def __init__(self, contract):
self.__contract__ = contract
def __call__(self):
try:
return self.__value__
except AttributeError:
value = self.__value__ = self.__contract__()
return value
def __repr__(self):
return '<promise: %r>' % (self(), )
def escape_regex(p, white=''):
# what's up with re.escape? that code must be neglected or someting
return ''.join(c if c.isalnum() or c in white
else ('\\000' if c == '\000' else '\\' + c)
for c in p)
| [
"afsanjar@gmail.com"
] | afsanjar@gmail.com |
915e34925e1f50d0f7b5af4c7c49bce1281ae603 | 4f0f8990ff83745b9944ee4e06ecfa44874be325 | /test_metrics.py | 87273edddde013dbde2463a48c1155e4381db81a | [] | no_license | cy5e/lm_proj | 300def02e9dc1113fa77e2b9d4d81c8640404847 | af2ae6a38399c8d6d1db2cf8a3b4ceba7badccbb | refs/heads/master | 2020-03-19T13:09:06.762847 | 2018-06-08T03:55:24 | 2018-06-08T03:55:24 | 136,563,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,882 | py | import numpy as np
import sys
import copy
probs_file = sys.argv[1]
gts_file = sys.argv[2]
probs = np.load(probs_file) # N*6151 matrix
gts = np.load(gts_file) # N*6151 one-hot matrix
reals= np.argmax(gts,axis=1)
classes = set()
for label in reals:
classes.add(label)
cl_to_label = {} # dict having mapping from index number 0-6150 to actual class value
with open('retrained50_labels_ADAM0.0007_10000_512.txt','r') as f:
for i,line in enumerate(f.readlines()):
cl_to_label[i] = int(line.strip())
print(len(cl_to_label.keys()))
print("Num of classes",len(classes))
top1accu = sum(np.argmax(probs,1)==np.argmax(gts,1))/probs.shape[0]
print('Top-1 Accuracy is ',top1accu)
def cal_accu(inds, reals): # predict topk-accuracy given top-k indexes and ground truths
N, k = inds.shape
correct = 0
for i,label in enumerate(reals):
if label in inds[i]:
correct += 1
return float(correct)/N
for i in range(1,11):
inds = np.argpartition(probs,-1*i,axis=1)[:,-1*i:]
print('Top ',i,'Accuracy is',cal_accu(inds,reals))
preds = np.argmax(probs,1)
#inds5 = np.argpartition(probs,-5,axis=1)[:,-5:]
#inds10 = np.argpartition(probs,-10,axis=1)[:,-10:]
num_preds_per_class = {} # class : #predictions made in that class
num_corr_preds_pc = {} # class: #correct predictions
num_corr_10preds = {}
for cl in classes:
num_preds_per_class[cl] = 0
num_corr_preds_pc[cl] = 0
num_corr_10preds[cl] = 0
for label in preds:
num_preds_per_class[label] += 1
for i, label in enumerate(preds):
if label == reals[i]:
num_corr_preds_pc[label] += 1
prec_class = {}
recall_class = {}
f1_class = {}
for cl in classes:
if num_preds_per_class[cl] != 0:
prec_class[cl] = num_corr_preds_pc[cl]/float(num_preds_per_class[cl])
else:
prec_class[cl] = 0.0
recall_class[cl] = num_corr_preds_pc[cl]/2.0
if recall_class[cl] + prec_class[cl] > 0:
f1_class[cl] = 2*prec_class[cl]*recall_class[cl]/(recall_class[cl]+prec_class[cl])
else:
f1_class[cl] = 0.0
cum_prec, cum_recall, cum_f1 = 0.0,0.0,0.0
for cl in classes:
cum_prec += prec_class[cl]
cum_recall += recall_class[cl]
cum_f1 += f1_class[cl]
C = len(classes)
print("Avg precision over all classes is",cum_prec/C)
print("Avg recall over all classes is",cum_recall/C)
print("Avg F1 score over all classes is",cum_f1/C)
mis_classes = [] # classes where both test examples were misclassified with Top-10 accuracy metric
inds10 = np.argpartition(probs,-10,axis=1)[:,-10:]
for i, label in enumerate(reals):
if label in inds10[i]:
num_corr_10preds[label] += 1
for cl in classes:
if num_corr_10preds[cl] == 0:
mis_classes.append(cl_to_label[cl])
print('List of classes where both test examples were misclassified with Top-10 accuracy metric')
print(mis_classes)
| [
"32782504+cy5e@users.noreply.github.com"
] | 32782504+cy5e@users.noreply.github.com |
0046028dca7949cbaffeae14736998ad1ac8db95 | d69af4729640549a09d61b4ccef42dd76a0f4a90 | /pyjfuzz/core/pjf_external_fuzzer.py | da901978b63292244d321c502cf496f558bdeaa4 | [
"MIT"
] | permissive | rongqinglee/PyJFuzz | 86266a440df0fe7f15bcc77cb58b13b1f9d2263d | 615e243ef1f1358d8adcd4dfa057c5101cea90a5 | refs/heads/master | 2021-06-18T16:26:56.553846 | 2017-06-22T13:32:04 | 2017-06-22T13:32:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,265 | py | """
The MIT License (MIT)
Copyright (c) 2016 Daniele Linguaglossa <d.linguaglossa@mseclab.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from pjf_executor import PJFExecutor
from errors import PJFMissingArgument, PJFBaseException
import time
class PJFExternalFuzzer(PJFExecutor):
"""
Represent an instance of an external command line fuzzer
"""
def __init__(self, configuration):
"""
Init the class with fuzzer name (command), a boolean that represent whenever the fuzzer
accept arguments form stdin, otherwise specify a command line. The special keyword "@@"
will be replaced with the content of argument to fuzz
"""
self.logger = self.init_logger()
if ["command"] not in configuration:
raise PJFMissingArgument()
self.fuzzer = None
self.config = configuration
super(PJFExternalFuzzer, self).__init__(configuration)
self.logger.debug("[{0}] - PJFExternalFuzzer successfully initialized".format(time.strftime("%H:%M:%S")))
def execute_sigsegv(self, obj):
self.execute(obj)
self.logger.debug("[{0}] - PJFExternalFuzzer successfully completed".format(time.strftime("%H:%M:%S")))
return self.return_code in [-11, -6, -1]
def execute(self, obj):
"""
Perform the actual external fuzzing, you may replace this method in order to increase performance
"""
try:
if self.config.stdin:
self.spawn(self.config.command, stdin_content=obj, stdin=True, timeout=1)
else:
if "@@" not in self.config.command:
raise PJFMissingArgument("Missing @@ filename indicator while using non-stdin fuzzing method")
for x in self.config.command:
if "@@" in x:
self.config.command[self.config.command.index(x)] = x.replace("@@", obj)
self.spawn(self.config.command, timeout=2)
self.logger.debug("[{0}] - PJFExternalFuzzer successfully completed".format(time.strftime("%H:%M:%S")))
return self._out
except KeyboardInterrupt:
return ""
except Exception as e:
raise PJFBaseException(e.message)
| [
"d.linguaglossa@consulthink.it"
] | d.linguaglossa@consulthink.it |
f8cf9963e0fea96f539710e967763d67675c54aa | ec4664e6b14a426bb34808ac40047703872af5f3 | /date.py | 1474e6880ad0f5a79cc30a2f1b85375cf74c60a5 | [] | no_license | djh-sudo/data-visualization | 4ec50afe7dbc79f68fda655f93600145060a7ce9 | 597c49573ceecb334435f6869905e80cb44b07a4 | refs/heads/main | 2023-06-24T16:10:50.846635 | 2021-07-23T14:41:32 | 2021-07-23T14:41:32 | 388,826,555 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,477 | py | import random
import datetime
import handleExcel as handle
import pyecharts.options as opts
from pyecharts.charts import Calendar,Page
# Page.save_resize_html("render.html",cfg_file='chart_config.json')
def getData():
sh1 = handle.readByIndex('./stepover.xls', 4)
col = handle.readSheetAllContentByCol(sh1)
number = col[8]
return number
def calendar():
begin = datetime.date(2021, 7, 11)
end = datetime.date(2021, 8, 31)
peopleNumber = getData()
data = [
[str(begin + datetime.timedelta(days=i)), peopleNumber[i]]
for i in range((end - begin).days + 1)
]
can = (
Calendar(init_opts=opts.InitOpts(theme='dark',width="500px", height="300px"))
.add(
series_name="",
yaxis_data=data,
calendar_opts=opts.CalendarOpts(
pos_top="80",
pos_left="30",
pos_right="10",
range_= ['2021-07-11', '2021-08-31'],
yearlabel_opts=opts.CalendarYearLabelOpts(is_show= True),
),
)
.set_global_opts(
title_opts=opts.TitleOpts(pos_top="30", pos_left="150", title="2021一战到底每日打卡人数"),
visualmap_opts=opts.VisualMapOpts(
max_=178, min_=160, orient="horizontal", is_piecewise=False
),
)
# .render("calendar_heatmap.html")
)
return can
| [
"noreply@github.com"
] | djh-sudo.noreply@github.com |
7eaa11fb107f3c92f84e5d9f2af1744a99ffae2b | 0f384dcf55722c664f7dc46c2176c20b97fbafa5 | /utils/metrics.py | 3bbb3cb2b0c822b2b4b3bd92e775f1ffca6ec8ce | [] | no_license | Mingxiao-Li/Modeling-Coreference-Relations-in-Visual-Dialog | a516f7bf6b58a16b24f6ec51932c435511bc7d4a | e45f0a6c98b939ba6371fe5df0a8c231f71385be | refs/heads/master | 2023-04-11T11:18:37.068861 | 2021-04-21T13:02:03 | 2021-04-21T13:02:03 | 332,660,992 | 8 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,477 | py | """
A Metric observes output of certain model, for example, in form of logits or
scores, and accumulates a particular metric with reference to some provided
targets. In context of VisDial, we use Recall (@ 1, 5, 10), Mean Rank, Mean
Reciprocal Rank (MRR) and Normalized Discounted Cumulative Gain (NDCG).
Each ``Metric`` must atleast implement three methods:
- ``observe``, update accumulated metric with currently observed outputs
and targets.
- ``retrieve`` to return the accumulated metric., an optionally reset
internally accumulated metric (this is commonly done between two epochs
after validation).
- ``reset`` to explicitly reset the internally accumulated metric.
Caveat, if you wish to implement your own class of Metric, make sure you call
``detach`` on output tensors (like logits), else it will cause memory leaks.
"""
import torch
import numpy as np
def scores_to_ranks(scores: torch.Tensor):
"""Convert model output scores into ranks."""
batch_size, num_rounds, num_options = scores.size()
scores = scores.view(-1, num_options)
# sort in descending order - largest score gets highest rank
sorted_ranks, ranked_idx = scores.sort(1, descending=True)
# i-th position in ranked_idx specifies which score shall take this
# position but we want i-th position to have rank of score at that
# position, do this conversion
ranks = ranked_idx.clone().fill_(0)
for i in range(ranked_idx.size(0)):
for j in range(num_options):
ranks[i][ranked_idx[i][j]] = j
# convert from 0-99 ranks to 1-100 ranks
ranks += 1
ranks = ranks.view(batch_size, num_rounds, num_options)
return ranks
class SparseGTMetrics(object):
"""
A class to accumulate all metrics with sparse ground truth annotations.
These include Recall (@ 1, 5, 10), Mean Rank and Mean Reciprocal Rank.
"""
def __init__(self):
self._rank_list = []
self._rank_list_rnd = []
self.num_rounds = None
def observe(
self, predicted_scores: torch.Tensor, target_ranks: torch.Tensor
):
predicted_scores = predicted_scores.detach()
# shape: (batch_size, num_rounds, num_options)
predicted_ranks = scores_to_ranks(predicted_scores)
batch_size, num_rounds, num_options = predicted_ranks.size()
self.num_rounds = num_rounds
# collapse batch dimension
predicted_ranks = predicted_ranks.view(
batch_size * num_rounds, num_options
)
# shape: (batch_size * num_rounds, )
target_ranks = target_ranks.view(batch_size * num_rounds).long()
# shape: (batch_size * num_rounds, )
predicted_gt_ranks = predicted_ranks[
torch.arange(batch_size * num_rounds), target_ranks
]
self._rank_list.extend(list(predicted_gt_ranks.cpu().numpy()))
predicted_gt_ranks_rnd = predicted_gt_ranks.view(batch_size, num_rounds)
# predicted gt ranks
self._rank_list_rnd.append(predicted_gt_ranks_rnd.cpu().numpy())
def retrieve(self, reset: bool = True):
num_examples = len(self._rank_list)
if num_examples > 0:
# convert to numpy array for easy calculation.
__rank_list = torch.tensor(self._rank_list).float()
metrics = {
"r@1": torch.mean((__rank_list <= 1).float()).item(),
"r@5": torch.mean((__rank_list <= 5).float()).item(),
"r@10": torch.mean((__rank_list <= 10).float()).item(),
"mean": torch.mean(__rank_list).item(),
"mrr": torch.mean(__rank_list.reciprocal()).item()
}
# add round metrics
_rank_list_rnd = np.concatenate(self._rank_list_rnd)
_rank_list_rnd = _rank_list_rnd.astype(float)
r_1_rnd = np.mean(_rank_list_rnd <= 1, axis=0)
r_5_rnd = np.mean(_rank_list_rnd <= 5, axis=0)
r_10_rnd = np.mean(_rank_list_rnd <= 10, axis=0)
mean_rnd = np.mean(_rank_list_rnd, axis=0)
mrr_rnd = np.mean(np.reciprocal(_rank_list_rnd), axis=0)
for rnd in range(1, self.num_rounds + 1):
metrics["r_1" + "_round_" + str(rnd)] = r_1_rnd[rnd - 1]
metrics["r_5" + "_round_" + str(rnd)] = r_5_rnd[rnd - 1]
metrics["r_10" + "_round_" + str(rnd)] = r_10_rnd[rnd - 1]
metrics["mean" + "_round_" + str(rnd)] = mean_rnd[rnd - 1]
metrics["mrr" + "_round_" + str(rnd)] = mrr_rnd[rnd - 1]
else:
metrics = {}
if reset:
self.reset()
return metrics
def reset(self):
self._rank_list = []
self._rank_list_rnd = []
class NDCG(object):
def __init__(self):
self._ndcg_numerator = 0.0
self._ndcg_denominator = 0.0
def observe(
self, predicted_scores: torch.Tensor, target_relevance: torch.Tensor
):
"""
Observe model output scores and target ground truth relevance and
accumulate NDCG metric.
Parameters
----------
predicted_scores: torch.Tensor
A tensor of shape (batch_size, num_options), because dense
annotations are available for 1 randomly picked round out of 10.
target_relevance: torch.Tensor
A tensor of shape same as predicted scores, indicating ground truth
relevance of each answer option for a particular round.
"""
predicted_scores = predicted_scores.detach()
# shape: (batch_size, 1, num_options)
predicted_scores = predicted_scores.unsqueeze(1)
predicted_ranks = scores_to_ranks(predicted_scores)
# shape: (batch_size, num_options)
predicted_ranks = predicted_ranks.squeeze()
batch_size, num_options = predicted_ranks.size()
k = torch.sum(target_relevance != 0, dim=-1)
# shape: (batch_size, num_options)
_, rankings = torch.sort(predicted_ranks, dim=-1)
# Sort relevance in descending order so highest relevance gets top rnk.
_, best_rankings = torch.sort(
target_relevance, dim=-1, descending=True
)
# shape: (batch_size, )
batch_ndcg = []
for batch_index in range(batch_size):
num_relevant = k[batch_index]
dcg = self._dcg(
rankings[batch_index][:num_relevant],
target_relevance[batch_index],
)
best_dcg = self._dcg(
best_rankings[batch_index][:num_relevant],
target_relevance[batch_index],
)
batch_ndcg.append(dcg / best_dcg)
self._ndcg_denominator += batch_size
self._ndcg_numerator += sum(batch_ndcg)
def _dcg(self, rankings: torch.Tensor, relevance: torch.Tensor):
sorted_relevance = relevance[rankings].cpu().float()
discounts = torch.log2(torch.arange(len(rankings)).float() + 2)
return torch.sum(sorted_relevance / discounts, dim=-1)
def retrieve(self, reset: bool = True):
if self._ndcg_denominator > 0:
metrics = {
"ndcg": float(self._ndcg_numerator / self._ndcg_denominator)
}
else:
metrics = {}
if reset:
self.reset()
return metrics
def reset(self):
self._ndcg_numerator = 0.0
self._ndcg_denominator = 0.0 | [
"eric.lee.xiao@gmail.com"
] | eric.lee.xiao@gmail.com |
8d5f1d2cdc30cdbc8d5132fb442f67a1249267a4 | 9a1d36b9ec03464de74b4074fc1d70d5cadce73e | /src/both.py | eaab9de003af719e39eaf6deea92d968c9487598 | [] | no_license | birajdahal/PyChat | aba197e4bd5d80f5f89c71606ea2c8ae304f6b0a | fe6cee73dfb7dd6ffef0dee0ddf8d474f7904795 | refs/heads/master | 2021-06-21T18:49:14.821725 | 2017-07-26T20:03:10 | 2017-07-26T20:03:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,215 | py | import socket, sys, threading, queue, os
from encryption import *
def receive(connection, privk=None):
message = connection.recv(3)
if not message:
raise ConnectionError("Connection ended")
os._exit(0)
remaining = int(message.decode())
message = b''
try:
while remaining > 0:
r = connection.recv(min(remaining, 16))
#print("received next chunk: " + str(r))
message += r
remaining -= 16
except Exception as e:
print(e)
os._exit(0)
if privk:
decrypted = RSA_decrypt(privk, message).encode()
if decrypted.decode()[8] == '?':
return message
else:
return decrypted
else:
return message
def send(connection, message, pubk=None):
if pubk:
if len(message) <= 120:
print(message.decode().strip())
encrypted = RSA_encrypt(pubk, message.decode())
connection.sendall(str(len(encrypted)).zfill(3).encode())
connection.sendall(encrypted)
else:
raise ValueError("Size of message to be encrypted is too large")
else:
if len(message) < 999:
connection.sendall(str(len(message.decode())).zfill(3).encode())
connection.sendall(message)
else:
raise ValueError("Size of message to be sent is too large")
def add_input(input_queue):
while True:
input_queue.put(sys.stdin.read(1))
def get_message(sock, privk):
while True:
try:
message = receive(sock, privk)
print(message.decode().strip())
except ConnectionError:
print("GET_MESSAGE SOCKET CLOSED")
os._exit(0)
except UnicodeDecodeError:
print(message)
print("Generating keys...")
pubk, privk = generate_RSA_keypair()
print("Key pair generated\n")
if input("Server? (1 or 0): ").strip() == "1":
print("Server\n")
sock = socket.socket()
server_address = (input("IP: "), int(input("Port: ")))
#server_address = ("localhost", 80)
print("\nStarting server on {0}:{1}".format(*server_address))
sock.bind(server_address)
sock.listen(1)
cpubk = None
while not cpubk:
print("Waiting for a connection...")
connection, client_address = sock.accept()
if input("\nConnection from {0}:{1}, type 'accept' to accept: ".format(client_address[0], client_address[1])).strip() == "accept":
print("Accepted connection from {0}".format(client_address))
# Exchange keys
send(connection, str(pubk.key.n).encode())
send(connection, str(pubk.key.e).encode())
cpubk = RSA.construct((int(receive(connection).decode()), int(receive(connection).decode())))
else:
print("Declined connection from {0}".format(client_address))
connection.close()
# Start thread that handles the client's communication with us
#client_thread = threading.Thread(target = handle_client, args = (connection, client_address))
#client_thread.daemon = True
#client_thread.start()
# Handle our side of the conversation
input_queue = queue.Queue()
input_thread = threading.Thread(target=add_input, args=(input_queue,))
input_thread.daemon = True
input_thread.start()
receive_thread = threading.Thread(target=get_message, args=(connection, privk))
receive_thread.daemon = True
receive_thread.start()
while True:
try:
message = ""
while not input_queue.empty():
message += input_queue.get()
if message != "":
send(connection, ("Server: " + message).encode(), cpubk)
except ConnectionError:
break
if threading.active_count() < 3:
raise Exception("One of the threads broke")
break
print("Connection ended, closing server")
sock.close()
else:
print("Client\n")
sock = socket.socket()
server_address = None
#server_address = ("localhost", 80)
while not server_address:
potential = (input("IP: "), int(input("Port: ")))
if input("\nYou entered {0}:{1}\nType 'yes' to confirm: ".format(*potential)).strip() == 'yes':
server_address = potential
else:
print("\n")
print("\nConnecting to {0}:{1} ...".format(*server_address))
sock.connect(server_address)
try:
cpubk = RSA.construct((int(receive(sock).decode()), int(receive(sock).decode())))
send(sock, str(pubk.key.n).encode())
send(sock, str(pubk.key.e).encode())
print("Connected\n")
except:
raise ConnectionError("Server refused connection")
# Start thread that handles the client's communication with us
#client_thread = threading.Thread(target = handle_client, args = (connection, client_address))
#client_thread.daemon = True
#client_thread.start()
# Handle our side of the conversation
input_queue = queue.Queue()
input_thread = threading.Thread(target=add_input, args=(input_queue,))
input_thread.daemon = True
input_thread.start()
receive_thread = threading.Thread(target=get_message, args=(sock, privk))
receive_thread.daemon = True
receive_thread.start()
while True:
try:
message = ""
while not input_queue.empty():
message += input_queue.get()
if(message != ""):
send(sock, ("Client: " + message).encode(), cpubk)
except ConnectionError:
break
if threading.active_count() < 3:
raise Exception("One of the threads broke")
break
print("Connection ended, closing client")
sock.close()
'''
Generate key pair
Server:
Launch server
Wait for client
When client joins, ask to accept
If accepted, exchange key information (receive then send)
After that, run separate threads for IO
If client leaves, crash
Client:
Connect to a server
If accepted, exchange key information (send then receive)
After that, run seperate threads for IO
If server ends, crash
'''
| [
"bdahal@g.clemson.edu"
] | bdahal@g.clemson.edu |
59dbd4c1d9bc4461b445620b7ad952709f2b33e1 | 1c54c285e89d668b771554df65dbdc5307d1aa0a | /smallest-multiple.py | 3308096eb8d5b035ecb343e11087728edfce6ab7 | [] | no_license | adamcfro/project-euler-solutions | 2c825d580e9aec3ebe393f6ab05a1ad2ab742131 | 3bc63552619a3d5cc73163b2340ad4e6b0a1a6d7 | refs/heads/master | 2022-09-05T16:07:56.090798 | 2022-08-06T13:20:31 | 2022-08-06T13:20:31 | 153,854,428 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 439 | py | def smallest_multiple():
'''This program finds the smallest number than is evenly divisible by all of the numbers from 1 to 20.'''
i = 1
num = 1
while i < 10:
if num % i == 0:
i += 1
else:
num += 1
i = 1
return num
print(smallest_multiple())
# notes: this program works but there is a faster way to compute. if need to get rid of multiples of unacceptable numbers | [
"adamcfro@gmail.com"
] | adamcfro@gmail.com |
ae1a4e70e93a50b68f9c89b4e9f26d6c8ebb51fd | c9299c10a175a8d925839adc58bbc7f86d4650f5 | /tracker/apps.py | b74e83baa0ca93cb1542d3e047c81241d8fa30c1 | [] | no_license | bartwroblewski/strava_gear_wear_tracker | e2e082a04cfdd226b24f59d64f1a0651224eba68 | e12e3cd559d096668525ce2e0bd6047d3d508c4c | refs/heads/master | 2023-09-04T13:58:42.909991 | 2023-08-20T18:52:06 | 2023-08-20T18:52:06 | 296,386,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 88 | py | from django.apps import AppConfig
class TrackerConfig(AppConfig):
name = 'tracker'
| [
"barti.wroblewski@gmail.com"
] | barti.wroblewski@gmail.com |
b59c150b00f4f258483032fd787f347eff062302 | 229e1e103bc24dda4d8fef54b762009e19045a45 | /configs/nowd/gc/res101_d_gc.py | ad3d4ce5f80de72c915fef67bf6c818a89d6128a | [
"MIT"
] | permissive | yinmh17/CCNet | c0be71919877c0d44c51cd8fd8ad8f644ef618a6 | d5e90fe5ccfa16389fd25bdd3e2160ffe2dfbd22 | refs/heads/master | 2020-06-18T13:03:46.781284 | 2019-11-12T06:26:59 | 2019-11-12T06:26:59 | 196,311,075 | 1 | 1 | MIT | 2019-07-21T19:48:39 | 2019-07-11T03:10:01 | Python | UTF-8 | Python | false | false | 1,097 | py | model = dict(
type='basenet',
pretrained='',
backbone=dict(
type='ResNet',
depth=101,
num_stages=4,
block_num=[3, 4, 23, 3],
),
att=dict(
with_att=False,
type='glore',
att_stage=[False,False,True,False],
att_pos='after_add',
att_location=[[],[],[5,11,17],[]],
),
module=dict(
type='nl_nowd',
downsample=True,
whiten_type=[],
weight_init_scale=1.0,
with_gc=True,
with_nl=False,
nowd=[],
use_out=False,
out_bn=False,
)
)
train_cfg = dict(
batch_size=8,
learning_rate=1e-2,
momentum=0.9,
num_steps=60000,
power=0.9,
random_seed=1234,
restore_from='./dataset/resnet101-imagenet.pth',
save_num_images=2,
start_iters=0,
save_from=59500,
save_pred_every=100,
snapshot_dir='snapshots/',
weight_decay=0.0005
)
data_cfg = dict(
data_dir='cityscapes',
data_list='./dataset/list/cityscapes/train.lst',
ignore_label=255,
input_size='769,769',
num_classes=19,
)
| [
"yaozhuliang13@gmail.com"
] | yaozhuliang13@gmail.com |
07d9768285b5f23b8c2bd34060f52da9cea03416 | c2a168ec9e91415eeadd53ba6042e614c3e8460c | /test.py | 47261bc4a60b18fa1e1040628cecc70dc7a3d94c | [] | no_license | LiYanChalmers/BoschProductionLine | 530098a9de0d08332511b24a31cdd4b4ec5473fb | de864e55be0e8cd174ccacb06afc77e3dc9ec42a | refs/heads/master | 2020-03-21T20:29:14.134812 | 2018-09-03T08:10:08 | 2018-09-03T08:10:08 | 139,010,159 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 39 | py | import xgboost as xgb
print('xgboost') | [
"li.yan.chalmers@gmail.com"
] | li.yan.chalmers@gmail.com |
cfce560a7127bc84dcbc5306a488ae7911522e70 | 086a894b060a16adf46f08118f72c85b3861da63 | /music_controller/frontend/apps.py | c46657a9204ae10fb39cdaeeb14b6662b70eb2d6 | [] | no_license | tib-source/House-Party-Fullstack-practice | c650ced51d7f108c3acfed9a51329ae4c4de409f | 80baf8f5893e89eb688a86dc2bdb65d1c692c6fe | refs/heads/master | 2023-08-25T04:52:36.312353 | 2021-11-07T09:29:57 | 2021-11-07T09:29:57 | 413,433,028 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 149 | py | from django.apps import AppConfig
class FrontendConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'frontend'
| [
"tibebe1234t@gmail.com"
] | tibebe1234t@gmail.com |
4701c0faa1cfafc48a1f8a72ab04e9a4dd53dae9 | def7ce6778e1b03938f82394ff7b800e8bdc37e3 | /web-app/AutoTomato.py | 86dcd44d7c6b17c67d609ca110458fb79e4833af | [
"MIT"
] | permissive | JeremyEudy/AutoTomato | b37059c74156283d87764d3c4079a0e7f714cb2d | 393b5e0cd9e9f6468ed7554087f6e6b7ec655198 | refs/heads/master | 2020-05-01T05:56:44.443895 | 2019-03-25T18:06:06 | 2019-03-25T18:06:06 | 177,316,551 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296,751 | py | import tensorflow as tf
from tensorflow.keras.models import load_model
from tensorflow import keras
import numpy as np
import pandas as pd
FILENAME = 'files/script.txt'
word_to_id = {'3po': 0, 'disgusting': 1, 'idea': 2, 'jug': 3, 'rafters': 4, 'pants': 5, 'juno': 6, 'noose': 7, 'townspeople': 8, 'sawing': 9, 'n146': 10, 'ntakes': 11, 'veiled': 12, 'animal': 13, 'rips': 14, 'setting': 15, 'diploma': 16, 'crazed': 17, 'caribbean': 18, 'jaws': 19, 'yellow': 20, 'amateur': 21, 'architectural': 22, 'revolution': 23, 'unleashes': 24, 'hawaii': 25, 'nandy': 26, 'boosters': 27, 'opening': 28, 'century': 29, 'sniff': 30, 'beaches': 31, 'fr': 32, 'weaver': 33, 'attache': 34, 'patrolman': 35, 'needles': 36, 'come': 37, 'husbands': 38, 'henchmen': 39, 'clocked': 40, 'nremoves': 41, 'playpen': 42, 'meanwhile': 43, 'knows': 44, 'albums': 45, 'bedpost': 46, 'hopes': 47, 'tricks': 48, 'dashes': 49, 'embracing': 50, 'translation': 51, 'pinky': 52, 'scoot': 53, 'delta': 54, 'cruising': 55, 'sags': 56, 'jongewaard': 57, 'nsefelt': 58, 'liquor': 59, 'bonfire': 60, 'heaves': 61, 'realization': 62, 'pot': 63, 'jerked': 64, 'separates': 65, 'drenched': 66, 'token': 67, 'wino': 68, '69th': 69, 'visit': 70, 'stinson': 71, '159': 72, 'npasses': 73, 'woven': 74, 'firestair': 75, 'zealand': 76, 'ndrilling': 77, 'blackjack': 78, 'official': 79, 'saloon': 80, 'cowboy': 81, 'foster': 82, 'prick': 83, 'actor': 84, 'lens': 85, 'happily': 86, 'touched': 87, 'evaluation': 88, 'tavern': 89, 'coach': 90, 'hedge': 91, 'breeze': 92, 'grows': 93, 'patrols': 94, "i's": 95, 'intimidated': 96, 'catherine': 97, 'bro': 98, 'jutting': 99, 'leah': 100, 'punnoose': 101, '176': 102, 'helpless': 103, '222': 104, 'musketeer': 105, 'crunch': 106, 'live': 107, 'bleeding': 108, 'fascination': 109, 'vinyl': 110, 'sally': 111, 'posed': 112, 'nervousness': 113, 'rumors': 114, 'activities': 115, 'cornerman': 116, 'heights': 117, 'overcoats': 118, '249': 119, 'reinforced': 120, 'jots': 121, '34th': 122, 'medication': 123, 'hopeless': 124, 'timidly': 125, 'core': 126, 'coordinating': 127, "dougie's": 128, 'blink': 129, 'senator': 130, 'seclusion': 131, 'indicate': 132, 'cabs': 133, 'smartest': 134, 'nwaiting': 135, 'etc': 136, 'swallowing': 137, 'french': 138, 'imaginary': 139, 'dating': 140, 'ultimately': 141, 'seagulls': 142, 'acted': 143, 'hinged': 144, 'fumes': 145, 'true': 146, 'vicious': 147, 'nnearby': 148, 'golf': 149, 'bert': 150, 'unbelievable': 151, 'attackers': 152, 'squirming': 153, 'nsecurity': 154, 'mystical': 155, 'brody': 156, 'investigator': 157, 'budge': 158, 'fifteen': 159, 'dealers': 160, 'era': 161, 'ulcer': 162, 'nshakes': 163, 'faxed': 164, 'harpoon': 165, 'ourselves': 166, 'confrontation': 167, 'it': 168, 'ashen': 169, 'eighty': 170, 'glaring': 171, 'dabs': 172, 'judgment': 173, 'cruiser': 174, 'ordered': 175, 'with': 176, 'marriage': 177, 'laughin': 178, 'genie': 179, 'helen': 180, 'skyscraper': 181, 'flights': 182, 'generic': 183, 'int': 184, 'shadows': 185, 'moms': 186, '337': 187, 'n151': 188, 'regard': 189, 'n20': 190, 'legit': 191, 'velvet': 192, 'booties': 193, 'george': 194, '1958': 195, 'patron': 196, 'helm': 197, 'wall': 198, 'undone': 199, 'condo': 200, 'pounds': 201, 'nunderstand': 202, 'selectmen': 203, 'masquerade': 204, 'blindly': 205, 'bitty': 206, 'heinrich': 207, 'rocket': 208, 'bellies': 209, 'glove': 210, 'greg': 211, 'switches': 212, 'peaceful': 213, 'stings': 214, 'escorts': 215, 'debris': 216, 'creaks': 217, 'town': 218, 'barbaric': 219, 'helps': 220, 'tree': 221, 'cranks': 222, 'reader': 223, 'ravi': 224, 'cannot': 225, 'glory': 226, 'shuffling': 227, 'some': 228, 'il': 229, 'cullens': 230, 'splat': 231, 'donovan': 232, 'compose': 233, 'barnes': 234, 'ayatollah': 235, 'testing': 236, 'roaches': 237, 'strewn': 238, 'tribune': 239, 'hush': 240, 'unanswered': 241, "nhe's": 242, 'xc3': 243, 'horizontal': 244, 'flickering': 245, 'sandpeople': 246, 'anchorwoman': 247, 'patients': 248, 'rachel': 249, 'pods': 250, 'thrusting': 251, 'mccoy': 252, 'nthan': 253, 'translates': 254, 'approaches': 255, 'nathan': 256, 'environment': 257, 'annoying': 258, 'modem': 259, 'jumpsuit': 260, 'jock': 261, 'veneer': 262, 'dissolve': 263, 'regulations': 264, 'terminator': 265, 'speedometer': 266, 'bamboo': 267, 'nmax': 268, 'gingerly': 269, 'standing': 270, 'alarms': 271, 'cashier': 272, 'external': 273, 'balances': 274, 'march': 275, 'fetch': 276, 'forgiveness': 277, 'creeping': 278, 'providing': 279, 'increasing': 280, 'n41': 281, 'neverything': 282, 'conquer': 283, 'turrets': 284, 'nwoman': 285, 'gifts': 286, 'easily': 287, 'lodge': 288, 'n161': 289, 'controlled': 290, 'monologue': 291, "'cause": 292, '363': 293, 'catches': 294, 'infrared': 295, 'thermal': 296, 'lingerie': 297, 'wields': 298, 'consulate': 299, 'tunnel': 300, 'un': 301, 'placed': 302, 'dent': 303, 'soothing': 304, "who's": 305, 'ncontinues': 306, 'slashes': 307, 'feet': 308, 'wrist': 309, 'arkansas': 310, 'ngoes': 311, 'bb': 312, 'row': 313, 'razorhead': 314, 'sub': 315, 'opium': 316, 'skeptical': 317, 'x9d': 318, 'endearing': 319, 'grandma': 320, 'express': 321, 'standard': 322, 'wet': 323, 'stoeger': 324, 'lifting': 325, 'trumpets': 326, 'unfolds': 327, 'ntone': 328, 'favors': 329, 'xadt': 330, 'goose': 331, 'toolbox': 332, 'witnesses': 333, 'salvation': 334, 'gesturing': 335, 'pike': 336, 'nwhatever': 337, 'frame': 338, 'factor': 339, 'sweet': 340, 'aw': 341, '1986': 342, 'fairy': 343, 'shipyard': 344, 'twitch': 345, 'password': 346, 'robau': 347, 'drunkenly': 348, 'supernatural': 349, 'boss': 350, 'urges': 351, 'jogging': 352, 'decide': 353, 'destroyed': 354, 'exercise': 355, 'pong': 356, 'discussing': 357, 'skinny': 358, 'righteous': 359, 'enthusiastic': 360, 'locker': 361, 'royalty': 362, 'beers': 363, 'poldek': 364, 'repaired': 365, 'whatever': 366, 'nlaughing': 367, 'runway': 368, 'cadenza': 369, '132': 370, '191': 371, 'ago': 372, 'kitty': 373, 'tessie': 374, 'respectful': 375, 'towne': 376, 'sketch': 377, 'sucks': 378, 'ignite': 379, 'cooper': 380, 'onward': 381, 'widens': 382, 'indulgent': 383, '265': 384, 'stabilizes': 385, 'unhappy': 386, 'serious': 387, 'monk': 388, 'flung': 389, 'desire': 390, 'cheese': 391, 'seventy': 392, 'shocked': 393, 'strack': 394, 'hawkins': 395, 'ntop': 396, 'thinking': 397, 'manner': 398, 'boiling': 399, 'phonograph': 400, 'widen': 401, 'randy': 402, '65': 403, 'shirtless': 404, 'herd': 405, 'ncloser': 406, '241': 407, 'oo': 408, 'very': 409, 'representative': 410, 'riff': 411, 'loretta': 412, 'ns': 413, 'whining': 414, 'ii': 415, 'danburry': 416, 'obeys': 417, 'dammit': 418, 'hospitality': 419, 'supper': 420, 'hoping': 421, 'talcott': 422, 'tupac': 423, 'pots': 424, 'perhaps': 425, 'massaging': 426, 'basketball': 427, 'lobby': 428, 'peaks': 429, 'mysterious': 430, 'thoughtful': 431, 'woken': 432, 'nproprietor': 433, 'lucite': 434, 'electronic': 435, 'remembers': 436, 'felix': 437, 'frosted': 438, 'stretches': 439, 'chases': 440, 'returned': 441, 'returning': 442, 'nbelle': 443, 'body': 444, 'original': 445, 'nsultan': 446, 'nap': 447, 'cookie': 448, 'means': 449, 'sidewinder': 450, 'schwartz': 451, 'despair': 452, 'radisson': 453, 'rams': 454, 'surging': 455, 'distraught': 456, 'foreign': 457, 'carson': 458, 'rookie': 459, 'helpful': 460, 'cheerfully': 461, 'brothel': 462, 'bora': 463, 'crouched': 464, 'smacks': 465, 'stairwell': 466, 'saddlebags': 467, 'sliced': 468, 'weaken': 469, 'obstacles': 470, 'pressurized': 471, '313': 472, 'engine': 473, 'tenderly': 474, 'sunglasses': 475, 'corny': 476, 'goddamit': 477, 'fine': 478, 'beldam': 479, 'lamppost': 480, 'n117': 481, 'leopold': 482, 'mathematics': 483, 'patches': 484, 'umm': 485, 'bar': 486, 'ambulance': 487, 'eames': 488, 'nighttime': 489, '225': 490, 'period': 491, 'manila': 492, 'recliner': 493, 'transmissions': 494, 'jittery': 495, 'gears': 496, 'obtain': 497, 'torres': 498, 'begbie': 499, '9mm': 500, 'exhaust': 501, 'sorts': 502, 'batsman': 503, 'shrubs': 504, 'lies': 505, 'innocently': 506, 'clumsily': 507, 'waldos': 508, 'cutting': 509, 'coverage': 510, 'lunging': 511, 'discovers': 512, 'mags': 513, 'secretaries': 514, 'parkway': 515, '291': 516, 'awareness': 517, 'otto': 518, 'bounds': 519, 'bull': 520, 'jacinta': 521, 'tamil': 522, 'plot': 523, 'hall': 524, 'gunshots': 525, 'denver': 526, 'smirking': 527, 'softer': 528, 'strong': 529, 'fedex': 530, 'yay': 531, 'contribute': 532, 'debt': 533, 'atop': 534, 'unorthodox': 535, 'unlatches': 536, 'lax': 537, 'penetrating': 538, 'chew': 539, 'tattooed': 540, 'cockpit': 541, 'lemon': 542, 'janine': 543, 'woozy': 544, 'anne': 545, "b'": 546, 'cautious': 547, 'supervising': 548, '239': 549, 'pruitt': 550, 'muted': 551, 'steadily': 552, 'cock': 553, 'ridin': 554, 'sleeps': 555, '107': 556, 'citizens': 557, 'league': 558, 'cruise': 559, 'mcmurphy': 560, 'drawer': 561, 'routes': 562, 'scrawled': 563, 'italian': 564, 'hopkins': 565, 'surgery': 566, 'alderaan': 567, 'outlaw': 568, 'x88re': 569, 'clamps': 570, 'beaming': 571, 'colonies': 572, 'saturn': 573, 'finn': 574, 'hathaway': 575, 'necktie': 576, '184': 577, 'vertical': 578, 'ncrab': 579, 'barrage': 580, 'buoy': 581, 'jerking': 582, 'concerned': 583, 'smeared': 584, 'dear': 585, 'spaceships': 586, 'harvard': 587, 'lawn': 588, 'fanning': 589, 'unfurls': 590, 'emanating': 591, 'n130': 592, 'flight': 593, 'sterile': 594, 'roland': 595, 'equivalent': 596, 'npieces': 597, 'newscaster': 598, 'digs': 599, 'shakes': 600, 'attitude': 601, 'continuing': 602, 'napartment': 603, 'paramedic': 604, 'crap': 605, 'subtitled': 606, 'saw': 607, 'resistance': 608, 'se': 609, 'mixed': 610, 'collecting': 611, 'compares': 612, 'soy': 613, 'tightens': 614, 'extended': 615, 'ric': 616, 'drown': 617, 'junky': 618, 'nsmall': 619, 'outa': 620, 'accuse': 621, 'featuring': 622, 'steers': 623, 'alejandro': 624, 'health': 625, 'flutters': 626, 'resemble': 627, 'great': 628, 'vi': 629, 'paralyzed': 630, 'joke': 631, 'bearded': 632, 'cripple': 633, 'grandpa': 634, 'retard': 635, '228': 636, 'swings': 637, 'sinks': 638, 'talks': 639, 'lucky': 640, 'painful': 641, 'mount': 642, 'moaning': 643, 'wouldn': 644, 'cocktail': 645, 'forklift': 646, '201': 647, 'plunged': 648, 'biggs': 649, 'personality': 650, 'holdaway': 651, 'nboth': 652, 'companies': 653, 'detailed': 654, 'carol': 655, 'shoveling': 656, 'sawyer': 657, 'connections': 658, 'splatter': 659, 'intellectual': 660, 'nail': 661, "where's": 662, 'nskip': 663, 'ncut': 664, 'cocktails': 665, 'realized': 666, 'osterman': 667, 'courses': 668, 'casino': 669, 'fleeing': 670, 'hunch': 671, 'savoring': 672, 'lingers': 673, 'balance': 674, 'n49': 675, 'molten': 676, 'spurs': 677, '114': 678, 'nwhite': 679, 'nbecause': 680, 'margin': 681, 'broadway': 682, 'salad': 683, 'solidly': 684, 'backing': 685, 'highest': 686, 'age': 687, 'magnante': 688, 'sad': 689, 'motto': 690, 'spaces': 691, 'sturdy': 692, 'mistress': 693, 'did': 694, 'catalog': 695, 'digital': 696, 'plaque': 697, 'abort': 698, 'cat': 699, 'raisins': 700, 'nturned': 701, 'lapels': 702, 'nnods': 703, 'hooks': 704, 'wandered': 705, 'warped': 706, 'ways': 707, 'weird': 708, 'flaring': 709, 'lauren': 710, 'nthroat': 711, 'tokyo': 712, 'rattle': 713, 'scuttles': 714, 'paste': 715, 'slick': 716, 'scan': 717, 'fading': 718, 'shackled': 719, 'sweep': 720, 'rolls': 721, 'awed': 722, 'squeezed': 723, 'wyatt': 724, 'muthafucka': 725, 'gentlemen': 726, 'crash': 727, '30s': 728, 'yella': 729, 'memorabilia': 730, 'clownfish': 731, 'reverend': 732, 'filed': 733, '154': 734, 'ncarmine': 735, 'cathedral': 736, 'des': 737, 'suitcases': 738, 'mmmm': 739, 'beef': 740, 'laboratory': 741, 'guyrich': 742, 'dimaso': 743, 'crescendo': 744, 'gravely': 745, 'cable': 746, 'fitzgerald': 747, 'deploy': 748, 'going': 749, 'laden': 750, 'dali': 751, 'lockers': 752, 'differences': 753, 'taller': 754, 'producer': 755, 'speaker': 756, 'tops': 757, 'host': 758, 'dorado': 759, 'fishermen': 760, 'ninety': 761, 'spooky': 762, 'vega': 763, 'claymore': 764, 'nkeeps': 765, 'sunlit': 766, 'ranch': 767, 'thrashes': 768, 'bulb': 769, 'dangles': 770, 'victorian': 771, 'vulnerable': 772, 'fax': 773, 'potential': 774, 'vu': 775, "cont'd": 776, 'grilled': 777, 'n181': 778, 'plows': 779, 'metal': 780, 'god': 781, 'harry': 782, 'knees': 783, 'booty': 784, 'promoter': 785, 'actually': 786, 'ikea': 787, 'mesh': 788, 'flowers': 789, 'drab': 790, 'barge': 791, 'wrecked': 792, 'bartender': 793, 'entry': 794, 'hasn': 795, 'disc': 796, 'identification': 797, 'hmm': 798, 'represent': 799, 'absorbs': 800, 'expectantly': 801, 'morocco': 802, 'breath': 803, 'ndrives': 804, 'practices': 805, 'fry': 806, 'perfume': 807, 'teaching': 808, 'syd': 809, 'mommy': 810, 'homeless': 811, 'chuck': 812, 'jonathan': 813, 'n244': 814, 'blinded': 815, 'literally': 816, 'nam': 817, 'surely': 818, 'pch': 819, 'ja': 820, 'sheaf': 821, 'tackle': 822, 'where': 823, 'entangled': 824, 'buckles': 825, 'n164': 826, 'choir': 827, 'whales': 828, "couldn't": 829, 'demeanor': 830, 'gerta': 831, 'philip': 832, 'virtually': 833, 'topeka': 834, 'muscles': 835, '300': 836, 'practicing': 837, 'lace': 838, '276': 839, 'passport': 840, 'analyst': 841, 'partition': 842, 'term': 843, 'unfolded': 844, 'grits': 845, 'nbreak': 846, 'politicians': 847, 'milton': 848, 'cia': 849, 'wrestles': 850, 'peacefully': 851, 'bolts': 852, 'gm': 853, 'starship': 854, 'scamper': 855, 'drops': 856, 'alex': 857, '71': 858, 'anonymous': 859, 'components': 860, 'viewfinder': 861, 'snake': 862, 'ngroup': 863, 'drain': 864, 'error': 865, 'npaulie': 866, 'capitol': 867, 'versus': 868, 'incident': 869, 'jellybean': 870, 'including': 871, 'n185': 872, 'intriguing': 873, 'crews': 874, 'likeness': 875, 'unity': 876, 'veers': 877, 'gravitational': 878, 'dominant': 879, 'stunt': 880, 'apt': 881, 'dont': 882, 'rustles': 883, 'folding': 884, 'angeles': 885, 'granddaughter': 886, 'buying': 887, 'nrobbins': 888, 'convoy': 889, 'pronounced': 890, 'portal': 891, 'rhythm': 892, 'knapsack': 893, 'ndolly': 894, 'discussion': 895, 'purple': 896, 'toasts': 897, 'jonah': 898, 'ringo': 899, 'major': 900, 'gangs': 901, 'vegetation': 902, 'chin': 903, 'widely': 904, 'sample': 905, 'policy': 906, 'grain': 907, 'stretch': 908, 'ahhhh': 909, '1': 910, 'shoppers': 911, 'white': 912, 'spice': 913, 'vow': 914, 'nshotgun': 915, 'super': 916, '109': 917, 'pillow': 918, 'nestled': 919, 'ashtray': 920, 'folds': 921, 'greedily': 922, 'soggy': 923, 'boyle': 924, 'atmospheric': 925, 'whips': 926, 'unit': 927, 'nactually': 928, 'superhub': 929, 'marco': 930, 'reference': 931, 'statement': 932, 'plainclothes': 933, 'n197': 934, 'niago': 935, 'aghast': 936, 'euro': 937, 'convincing': 938, 'dance': 939, 'concussion': 940, 'katanas': 941, 'bases': 942, 'buffalo': 943, 'groin': 944, 'accept': 945, '73': 946, '20s': 947, 'loring': 948, 'muck': 949, 'openly': 950, 'gloria': 951, 'rigged': 952, 'wedge': 953, 'candidate': 954, 'gathers': 955, 'janice': 956, 'doing': 957, 'mention': 958, 'thrust': 959, 'smug': 960, 'damage': 961, 'dah': 962, 'detonated': 963, 'slower': 964, 'floral': 965, 'peculiar': 966, 'rodney': 967, 'ngrandma': 968, 'dan': 969, 'goals': 970, 'wilkes': 971, 'fuselage': 972, 'unknown': 973, 'unnerved': 974, 'municipal': 975, 'underestimate': 976, 'vol': 977, 'effectively': 978, 'n168': 979, 'gardner': 980, 'venafro': 981, 'rooting': 982, 'ariadne': 983, 'greensly': 984, 'nbut': 985, 'restored': 986, 'settles': 987, 'drugstore': 988, 'skating': 989, 'notices': 990, 'hilltop': 991, 'sneezes': 992, 'crops': 993, 'ne': 994, 'game': 995, 'torpedo': 996, 'dimension': 997, 'sophisticated': 998, 'banking': 999, '113': 1000, 'tumbling': 1001, 'halsey': 1002, 'nwho': 1003, 'nsammy': 1004, 'meal': 1005, 'arch': 1006, 'nbeneath': 1007, 'stripper': 1008, 'separating': 1009, '255': 1010, 'breadfruit': 1011, 'heavy': 1012, 'giddy': 1013, 'looking': 1014, 'clings': 1015, 'durden': 1016, 'picasso': 1017, 'ballplayer': 1018, 'vodka': 1019, 'asleep': 1020, 'embrace': 1021, 'productions': 1022, 'dolphins': 1023, 'amusement': 1024, 'psychologist': 1025, 'freaking': 1026, 'dock': 1027, 'dougie': 1028, 'choosing': 1029, 'maintaining': 1030, 'stardate': 1031, 'rear': 1032, 'condescending': 1033, 'correctly': 1034, 'hapless': 1035, 'dismembered': 1036, 'assed': 1037, '224f': 1038, 'assembled': 1039, 'ngoing': 1040, 'post': 1041, 'oogway': 1042, 'levels': 1043, 'central': 1044, 'puddles': 1045, 'stargate': 1046, 'spider': 1047, "steve's": 1048, 'fully': 1049, 'loaded': 1050, 'automobile': 1051, 'wad': 1052, 'taught': 1053, 'nbuilding': 1054, 'affair': 1055, 'nruns': 1056, 'cracow': 1057, 'awning': 1058, 'judging': 1059, 'securely': 1060, 'sorry': 1061, 'introduces': 1062, 'planted': 1063, 'airborne': 1064, 'nremember': 1065, 'nsummer': 1066, 'hound': 1067, 'furrows': 1068, '75': 1069, 'diaz': 1070, 'particles': 1071, 'root': 1072, 'maid': 1073, 'bulge': 1074, 'psychopath': 1075, 'genco': 1076, 'solemn': 1077, 'injury': 1078, 'stool': 1079, 'careful': 1080, 'evans': 1081, 'words': 1082, 'hesitate': 1083, 'whimper': 1084, 'beep': 1085, 'hakim': 1086, 'fi': 1087, 'astronauts': 1088, 'fucks': 1089, "'round": 1090, 'dillon': 1091, 'birth': 1092, 'reminiscent': 1093, 'atomic': 1094, 'knives': 1095, 'streams': 1096, 'jerky': 1097, 'bluff': 1098, 'jumble': 1099, 'musicians': 1100, 'sg13': 1101, 'ndoor': 1102, 'accepted': 1103, 'considered': 1104, 'monday': 1105, 'docking': 1106, 'film': 1107, 'nthrows': 1108, 'buddy': 1109, 'clasped': 1110, 'two': 1111, 'sixty': 1112, 'gettin': 1113, "i'm": 1114, 'caps': 1115, 'ruler': 1116, 'nwas': 1117, 'roadway': 1118, 'barney': 1119, 'galleria': 1120, 'five': 1121, 'rad': 1122, 'brings': 1123, 'crowbar': 1124, 'cities': 1125, 'still': 1126, 'effort': 1127, 'weightless': 1128, 'machine': 1129, 'nwork': 1130, 'nquiet': 1131, 'nstrange': 1132, 'sensors': 1133, 'nobel': 1134, 'greasy': 1135, 'natured': 1136, 'blinking': 1137, 'front': 1138, 'cracking': 1139, 'norad': 1140, 'whitaker': 1141, 'enjoyment': 1142, 'insurance': 1143, 'flemmer': 1144, 'gail': 1145, 'persons': 1146, 'isn': 1147, 'mustang': 1148, 'everywhere': 1149, 'cavity': 1150, 'lining': 1151, 'strokes': 1152, 'marquis': 1153, 'monkey': 1154, 'hefts': 1155, 'snoop': 1156, 'focusing': 1157, 'given': 1158, 'films': 1159, 'forbid': 1160, 'uses': 1161, 'greater': 1162, 'moral': 1163, 'insecure': 1164, 'revision': 1165, 'nlooks': 1166, 'harp': 1167, 'detective': 1168, 'ammunition': 1169, 'mardi': 1170, 'taping': 1171, 'cut': 1172, 'dumps': 1173, 'signals': 1174, 'blushes': 1175, 'mistaken': 1176, 'wallah': 1177, 'furious': 1178, 'individuals': 1179, 'screams': 1180, 'buzz': 1181, 'fierce': 1182, 'nbriefcase': 1183, 'exposed': 1184, '345': 1185, 'grave': 1186, 'animals': 1187, 'full': 1188, 'cent': 1189, 'disguise': 1190, 'difference': 1191, 'thousands': 1192, 'accomplished': 1193, 'nscreams': 1194, 'inn': 1195, 'disgust': 1196, 'nslow': 1197, 'snell': 1198, 'decorations': 1199, 'robbins': 1200, 'rickety': 1201, 'limitations': 1202, 'nbegins': 1203, 'aria': 1204, 'corridors': 1205, "'the": 1206, 'ncolonel': 1207, 'onstage': 1208, 'possessions': 1209, 'enforcement': 1210, 'nlisten': 1211, 'nmetal': 1212, 'calming': 1213, 'floorboard': 1214, 'floats': 1215, 'irene': 1216, 'banister': 1217, 'n184': 1218, 'marker': 1219, 'seven': 1220, 'aladdin': 1221, 'stepped': 1222, 'pizzeria': 1223, 'groper': 1224, 'burt': 1225, 'ablaze': 1226, 'tracked': 1227, 'mahmoud': 1228, 'crawling': 1229, 'circuits': 1230, 'proximity': 1231, '289': 1232, 'committee': 1233, 'tearing': 1234, 'events': 1235, 'dorsal': 1236, 'assisting': 1237, 'hans': 1238, 'schoolgirl': 1239, 'goldberg': 1240, 'collapsing': 1241, 'plaid': 1242, 'arrested': 1243, 'maps': 1244, 'korea': 1245, 'shifu': 1246, 'ncabbie': 1247, 'savage': 1248, 'ngreat': 1249, 'saver': 1250, 'nrosie': 1251, 're': 1252, 'apartments': 1253, 'congressmen': 1254, 'saucer': 1255, 'duties': 1256, 'kentucky': 1257, 'field': 1258, 'motor': 1259, 'klonowska': 1260, 'eater': 1261, '09': 1262, 'cannon': 1263, 'roberto': 1264, 'skipping': 1265, 'tenth': 1266, 'nthings': 1267, 'semi': 1268, 'spilled': 1269, 'oldest': 1270, 'replacing': 1271, 'sewing': 1272, 'sac': 1273, '58': 1274, 'helicopter': 1275, 'elizabeth': 1276, 'scout': 1277, 'gargantua': 1278, 'remarkably': 1279, 'leslie': 1280, 'sprays': 1281, 'ty': 1282, 'tempted': 1283, 'adopted': 1284, 'rail': 1285, 'connected': 1286, 'continuous': 1287, 'idly': 1288, 'hatch': 1289, 'defensive': 1290, 'isolated': 1291, 'crucified': 1292, 'sneaking': 1293, 'groupies': 1294, 'mulan': 1295, 'maltese': 1296, 'coursing': 1297, 'likewise': 1298, 'ante': 1299, 'useless': 1300, 'nyu': 1301, 'barriers': 1302, 'obligations': 1303, 'aims': 1304, 'blam': 1305, 'creak': 1306, 'sebastian': 1307, 'cheers': 1308, 'religious': 1309, 'badass': 1310, 'n37': 1311, 'outside': 1312, 'asteroid': 1313, 'disbelieving': 1314, 'snapper': 1315, 'driveway': 1316, 'flak': 1317, 'jameson': 1318, 'pursue': 1319, 'homie': 1320, 'concierge': 1321, "nthat's": 1322, 'nshoulders': 1323, 'expelled': 1324, 'scorpion': 1325, "'ra": 1326, 'sided': 1327, 'eyes': 1328, 'startling': 1329, 'officially': 1330, 'hueys': 1331, 'medieval': 1332, 'x93': 1333, 'hysterically': 1334, 'riverside': 1335, 'spine': 1336, "'": 1337, 'listening': 1338, 'superior': 1339, 'ncigarette': 1340, 'halls': 1341, "'a": 1342, 'slender': 1343, 'nabout': 1344, 'outward': 1345, 'auto': 1346, 'intersection': 1347, 'fn': 1348, 'ackbar': 1349, 'new': 1350, 'ndarkness': 1351, 'blanket': 1352, 'murky': 1353, 'communicate': 1354, 'had': 1355, 'chelsea': 1356, 'fuses': 1357, 'ancestors': 1358, 'stationary': 1359, 'canvas': 1360, 'refuse': 1361, 'knocking': 1362, 'nchi': 1363, 'neames': 1364, 'ripples': 1365, 'mouthing': 1366, 'telemetry': 1367, 'scream': 1368, 'skids': 1369, 'removing': 1370, 'tape': 1371, 'raven': 1372, 'nbuzz': 1373, 'rushes': 1374, 'lied': 1375, 'feelin': 1376, 'jade': 1377, 'tuning': 1378, 'anxiety': 1379, 'nlooking': 1380, 'approach': 1381, 'stockings': 1382, 'plague': 1383, 'nrenton': 1384, 'hannah': 1385, 'etched': 1386, 'kim': 1387, 'friday': 1388, 'roman': 1389, 'rash': 1390, 'breach': 1391, 'custody': 1392, 'negotiator': 1393, 'miguel': 1394, 'n136': 1395, 'hosty': 1396, 'involving': 1397, 'eskimo': 1398, 'cross': 1399, 'besides': 1400, 'iranian': 1401, 's': 1402, 'nyusuf': 1403, 'gone': 1404, '138': 1405, 'finish': 1406, 'filming': 1407, 'beach': 1408, 'muzak': 1409, 'nbehind': 1410, 'n120': 1411, 'reeves': 1412, 'deafening': 1413, 'fabulous': 1414, 'lapd': 1415, 'fix': 1416, '155': 1417, 'throttle': 1418, 'sweating': 1419, 'nickname': 1420, 'trolley': 1421, 'bandage': 1422, 'fray': 1423, 'aningang': 1424, 'cheerleaders': 1425, 'minor': 1426, 'particularly': 1427, 'billows': 1428, 'edward': 1429, 'n186': 1430, '2nd': 1431, 'nurse': 1432, '1956': 1433, 'emotions': 1434, 'followers': 1435, 'submachine': 1436, 'explanation': 1437, 'mausoleum': 1438, 'positive': 1439, 'brushes': 1440, 'lanes': 1441, 'mistake': 1442, 'rabbi': 1443, 'elaborate': 1444, 'monster': 1445, 'handgun': 1446, 'gunner': 1447, 'les': 1448, 'passengers': 1449, 'snoring': 1450, 'undaunted': 1451, 'egyptian': 1452, 'breathe': 1453, 'momentary': 1454, 'year': 1455, 'tense': 1456, 'nag': 1457, 'plugs': 1458, 'divine': 1459, 'sake': 1460, 'wyborne': 1461, 'nyellow': 1462, 'thoroughfare': 1463, 'spectacle': 1464, 'picnic': 1465, 'ntwenty': 1466, 'ryerson': 1467, 'honor': 1468, 'sock': 1469, 'dunno': 1470, 'cares': 1471, 'cocks': 1472, 'labor': 1473, 'applied': 1474, 'nagent': 1475, 'morgue': 1476, 'nhave': 1477, 'montreal': 1478, 'carved': 1479, 'chad': 1480, 'magnum': 1481, 'blueberry': 1482, 'nhigh': 1483, 'ancestor': 1484, 'sheer': 1485, 'nc': 1486, 'darling': 1487, 'drilling': 1488, 'convulses': 1489, 'bobby': 1490, 'tarts': 1491, 'isla': 1492, 'shaking': 1493, 'follows': 1494, 'n39': 1495, 'sunset': 1496, 'nchecks': 1497, 'nknocks': 1498, 'tighter': 1499, 'dumb': 1500, 'nthank': 1501, 'gotham': 1502, 'symphony': 1503, 'promise': 1504, 'double': 1505, 'finished': 1506, 'bucking': 1507, 'temper': 1508, 'tagge': 1509, 'unlocking': 1510, 'chickens': 1511, 'landscape': 1512, 'crushes': 1513, 'touching': 1514, 'nceiling': 1515, 'grove': 1516, 'promises': 1517, 'gazes': 1518, 'travels': 1519, 'surprising': 1520, 'venus': 1521, 'predicted': 1522, 'product': 1523, 'precious': 1524, 'hoses': 1525, 'handheld': 1526, 'injured': 1527, 'mates': 1528, 'toyota': 1529, 'woman': 1530, 'venom': 1531, 'trajectory': 1532, 'downey': 1533, 'lifted': 1534, 'hat': 1535, 'nmade': 1536, '338': 1537, 'bookcase': 1538, 'nsign': 1539, 'bobbing': 1540, 'compact': 1541, 'explains': 1542, 'heard': 1543, 'suffering': 1544, 'craziest': 1545, 'goal': 1546, 'hooded': 1547, 'cavern': 1548, 'n108': 1549, 'magazines': 1550, 'impress': 1551, 'santos': 1552, 'coal': 1553, 'humor': 1554, 'wilson': 1555, 'community': 1556, 'partying': 1557, 'opponents': 1558, 'regrets': 1559, 'nedgar': 1560, 'sensitive': 1561, '216': 1562, "'oh": 1563, 'gaping': 1564, 'darkly': 1565, 'garcia': 1566, 'federation': 1567, 'hit': 1568, 'kristoff': 1569, 'detail': 1570, 'betting': 1571, 'dumped': 1572, 'catch': 1573, 'butcher': 1574, 'ncontrol': 1575, 'deserves': 1576, 'begun': 1577, 'ngurgle': 1578, 'people': 1579, 'cooly': 1580, 'wolfi': 1581, 'dea': 1582, 'scaffolding': 1583, 'tourists': 1584, 'defendants': 1585, 'stone': 1586, 'universal': 1587, 'giambi': 1588, 'cates': 1589, 'was': 1590, 'clinch': 1591, 'poking': 1592, 'pupils': 1593, 'wiring': 1594, 'n67': 1595, 'manhole': 1596, 'dick': 1597, 'rosenfeld': 1598, 'whack': 1599, 'installation': 1600, 'ben': 1601, 'shriek': 1602, 'lonzo': 1603, 'jackson': 1604, 'character': 1605, 'deformed': 1606, 'easy': 1607, 'affairs': 1608, 'covers': 1609, 'n147': 1610, 'psst': 1611, 'exploding': 1612, 'jockey': 1613, 'belmonte': 1614, 'dogs': 1615, 'defence': 1616, 'criminals': 1617, 'destiny': 1618, 'rendition': 1619, 'volley': 1620, 'breathing': 1621, 'sipping': 1622, 'sixties': 1623, 'airport': 1624, 'paw': 1625, 'ndirection': 1626, '220': 1627, 'network': 1628, 'heroin': 1629, 'chet': 1630, 'mop': 1631, 'pal': 1632, 'n127': 1633, 'duffle': 1634, 'podium': 1635, 'troubles': 1636, 'hamburger': 1637, "'see": 1638, 'nasa': 1639, 'chaos': 1640, 'disney': 1641, 'generals': 1642, '211': 1643, 'penetrate': 1644, 'extinction': 1645, 'beg': 1646, 'lot': 1647, 'schmuck': 1648, 'incense': 1649, 'globe': 1650, 'psychiatric': 1651, 'skyline': 1652, 'pocket': 1653, 'telegraph': 1654, 'loops': 1655, 'sworn': 1656, 'njen': 1657, 'roots': 1658, 'troops': 1659, 'uncertainly': 1660, 'interviews': 1661, 'them': 1662, 'casings': 1663, 'crate': 1664, 'gape': 1665, 'cadet': 1666, 'ntweed': 1667, 'guarding': 1668, 'rain': 1669, 'proudly': 1670, 'compys': 1671, 'searchlight': 1672, 'batting': 1673, 'vendor': 1674, 'n159': 1675, 'neuralyzer': 1676, 'quartet': 1677, 'excellent': 1678, 'patient': 1679, 'ceiling': 1680, 'majestic': 1681, 'translucent': 1682, 'elbows': 1683, 'match': 1684, 'amber': 1685, 'quote': 1686, 'asylum': 1687, 'unfortunately': 1688, 'boiler': 1689, 'cogsworth': 1690, 'cleats': 1691, 'viet': 1692, 'consular': 1693, 'unbeknownst': 1694, 'dishes': 1695, 'arrows': 1696, 'grenades': 1697, 'handler': 1698, 'ferociously': 1699, 'maxims': 1700, 'lucy': 1701, 'irony': 1702, 'emanates': 1703, 'buckle': 1704, 'heidi': 1705, 'interrupting': 1706, 'midair': 1707, 'engell': 1708, 'open': 1709, 'generous': 1710, 'softens': 1711, 'thermos': 1712, 'sleepless': 1713, 'stares': 1714, 'hearts': 1715, 'victor': 1716, 'psych': 1717, 'oui': 1718, 'publicist': 1719, 'smile': 1720, 'me': 1721, 'storefront': 1722, 'memo': 1723, '227': 1724, 'n18': 1725, 'nstand': 1726, 'npan': 1727, 'lafayette': 1728, 'nlarge': 1729, 'flashy': 1730, 'goat': 1731, 'diagram': 1732, 'hesitating': 1733, 'sargent': 1734, 'thomas': 1735, 'turf': 1736, 'vikings': 1737, 'steve': 1738, 'folder': 1739, 'nfive': 1740, 'revving': 1741, 'valuable': 1742, 'gives': 1743, 'bettina': 1744, 'nut': 1745, 'artoo': 1746, 'bmw': 1747, 'dissolving': 1748, 'console': 1749, 'includes': 1750, 'commence': 1751, 'lake': 1752, 'bugle': 1753, 'thumps': 1754, 'anything': 1755, 'hillbillies': 1756, 'sixth': 1757, 'separation': 1758, 'skylar': 1759, '40': 1760, 'wingmen': 1761, 'options': 1762, 'altar': 1763, 'october': 1764, 'vine': 1765, 'corresponding': 1766, 'lance': 1767, 'sucking': 1768, 'pondering': 1769, 'commuter': 1770, 'nfischer': 1771, 'bloody': 1772, 'paraphernalia': 1773, 'sink': 1774, 'canada': 1775, 'potty': 1776, 'empty': 1777, 'detonate': 1778, 'expect': 1779, 'communists': 1780, 'printing': 1781, 'yoke': 1782, 'fitting': 1783, 'riggs': 1784, 'made': 1785, 'crushing': 1786, 'plastered': 1787, 'dispenser': 1788, 'bi': 1789, 'swoop': 1790, 'stirs': 1791, 'veil': 1792, 'musta': 1793, 'potts': 1794, 'somewhat': 1795, 'unspoken': 1796, 'nadder': 1797, 'thinks': 1798, 'one': 1799, 'admiral': 1800, 'presumably': 1801, 'neighborhood': 1802, 'wrath': 1803, 'majesty': 1804, 'soundstage': 1805, 'frederick': 1806, '304': 1807, 'towel': 1808, 'fitted': 1809, 'speeding': 1810, 'stuffs': 1811, 'soap': 1812, 'transformation': 1813, 'coated': 1814, 'both': 1815, 'marge': 1816, 'lightly': 1817, 'negative': 1818, 'development': 1819, 'shimmy': 1820, 'nbig': 1821, 'possession': 1822, 'backup': 1823, 'cheerful': 1824, 'hoot': 1825, 'absorbed': 1826, 'luther': 1827, 'altogether': 1828, 'spur': 1829, 'spinning': 1830, 'baggage': 1831, 'middle': 1832, 'comb': 1833, 'n192': 1834, 'fell': 1835, 'talk': 1836, 'stamper': 1837, 'decent': 1838, 'many': 1839, 'traditional': 1840, 'snickers': 1841, 'dust': 1842, 'nmoonfish': 1843, 'passed': 1844, 'nground': 1845, 'finds': 1846, 'extracts': 1847, 'son': 1848, 'answered': 1849, 'engineered': 1850, '1967': 1851, 'helmets': 1852, 'n193': 1853, 'hiccups': 1854, 'spidey': 1855, 'satisfaction': 1856, 'intermittently': 1857, 'nocean': 1858, 'fascinating': 1859, 'alone': 1860, 'pick': 1861, 'nerves': 1862, 'tinny': 1863, 'awk': 1864, 'separately': 1865, '8': 1866, 'leader': 1867, 'opposing': 1868, 'fianc': 1869, 'stripe': 1870, 'sgt': 1871, 'stallion': 1872, 'turns': 1873, 'despondent': 1874, 'hysterical': 1875, 'mafia': 1876, 'argo': 1877, 'slave': 1878, 'bulging': 1879, 'robots': 1880, 'connie': 1881, 'njoe': 1882, 'blaster': 1883, 'until': 1884, 'footbridge': 1885, 'goddamned': 1886, 'epoque': 1887, 'expanding': 1888, '282': 1889, '44': 1890, 'revelation': 1891, 'rabbit': 1892, 'withered': 1893, 'groundhogs': 1894, 'whisper': 1895, 'sledge': 1896, 'heartbeat': 1897, 'graves': 1898, 'brandishing': 1899, 'trapping': 1900, 'slam': 1901, 'percent': 1902, 'octopus': 1903, 'ivy': 1904, 'players': 1905, 'coiled': 1906, 'darkened': 1907, 'powder': 1908, 'causeway': 1909, 'implodes': 1910, 'vat': 1911, 'overturned': 1912, 'barbershop': 1913, 'scumbag': 1914, 'vampires': 1915, 'wiggles': 1916, 'ape': 1917, 'corvette': 1918, 'arched': 1919, '5': 1920, 'assassination': 1921, 'arthur': 1922, 'clark': 1923, 'rehearse': 1924, 'haze': 1925, 'laurel': 1926, 'vanity': 1927, 'armadillo': 1928, 'armory': 1929, 'kissing': 1930, 'playful': 1931, 'humanly': 1932, 'dispose': 1933, 'rubber': 1934, 'crawled': 1935, 'brow': 1936, 'orsini': 1937, 'detectives': 1938, 'slightly': 1939, 'horrifying': 1940, 'shutting': 1941, 'clothing': 1942, "'cha": 1943, 'parole': 1944, 'twinkling': 1945, 'fake': 1946, 'nickel': 1947, 'privacy': 1948, 'nspivey': 1949, 'causes': 1950, 'weakness': 1951, 'urgently': 1952, 'watchtower': 1953, 'bin': 1954, 'shyly': 1955, 'distractedly': 1956, 'affected': 1957, 'pregnancy': 1958, 'drool': 1959, 'conspiratorially': 1960, 'deadpan': 1961, 'lane': 1962, 'stages': 1963, 'unbuttons': 1964, 'chapter': 1965, 'nmiranda': 1966, 'monotone': 1967, 'farmer': 1968, 'interstellar': 1969, 'mtv': 1970, 'detach': 1971, 'mcdonald': 1972, 'heads': 1973, 'dusts': 1974, 'nlawyer': 1975, 'twenty': 1976, 'harrier': 1977, 'working': 1978, 'help': 1979, 'lifestyle': 1980, 'retreating': 1981, 'del': 1982, 'throughout': 1983, 'operators': 1984, "c'mon": 1985, 'nangelo': 1986, 'tournament': 1987, 'mini': 1988, 'napalm': 1989, 'links': 1990, 'scratches': 1991, 'manned': 1992, 'jane': 1993, 'nsmiles': 1994, 'preliminary': 1995, 'backdrop': 1996, 'threats': 1997, 'takes': 1998, 'go': 1999, 'stupidity': 2000, 'elm': 2001, 'waldo': 2002, 'checking': 2003, 'information': 2004, 'betsy': 2005, 'strange': 2006, 'platforms': 2007, 'views': 2008, 'unkar': 2009, 'scored': 2010, 'pervert': 2011, 'xadd': 2012, 'pupil': 2013, 'nrises': 2014, 'bedrooms': 2015, 'becomes': 2016, 'alice': 2017, 'photographs': 2018, 'snarls': 2019, 'morphine': 2020, 'brothers': 2021, 'fischer': 2022, '96': 2023, 'nday': 2024, 'slept': 2025, 'farquaad': 2026, 'mecca': 2027, 'drafting': 2028, 'orphan': 2029, 'snip': 2030, 'freeway': 2031, 'n133': 2032, 'premium': 2033, 'peaches': 2034, '116': 2035, 'groundhog': 2036, 'ships': 2037, 'straggle': 2038, 'tech': 2039, 'grime': 2040, 'diaper': 2041, 'unison': 2042, 'stormtrooper': 2043, 'snort': 2044, 'creatures': 2045, 'portrait': 2046, 'gallops': 2047, 'elders': 2048, 'crawls': 2049, 'knox': 2050, 'nchuck': 2051, 'dog': 2052, 'acquaintance': 2053, 'leaders': 2054, 'cluster': 2055, 'lice': 2056, 'billy': 2057, 'screw': 2058, '128': 2059, 'unconscious': 2060, 'observing': 2061, 'roberts': 2062, 'offering': 2063, 'eyeing': 2064, 'ndave': 2065, 'frond': 2066, 'huff': 2067, 'stripped': 2068, 'pouch': 2069, 'thunderbird': 2070, 'merit': 2071, 'strikes': 2072, 'verse': 2073, 'obscure': 2074, 'challenge': 2075, 'stains': 2076, 'c': 2077, 'cents': 2078, 'sprint': 2079, 'nseries': 2080, 'persona': 2081, 'ntime': 2082, 'salary': 2083, '292': 2084, 'pickup': 2085, 'volume': 2086, 'swam': 2087, 'suits': 2088, 'ghost': 2089, 'pinball': 2090, 'seed': 2091, 'expendable': 2092, 'chicken': 2093, 'scrolling': 2094, 'efforts': 2095, 'alive': 2096, 'philadelphia': 2097, 'reza': 2098, 'susan': 2099, 'nsmooth': 2100, 'arn': 2101, 'time': 2102, 'fisk': 2103, 'targeting': 2104, 'fabric': 2105, 'threatened': 2106, 'transferred': 2107, 'turbulence': 2108, 'ncould': 2109, 'parkland': 2110, 'fireplace': 2111, 'sin': 2112, 'raptor': 2113, 'shoved': 2114, 'champa': 2115, 'dancers': 2116, 'swipe': 2117, 'calm': 2118, 'spaz': 2119, 'delays': 2120, 'pajama': 2121, 'nrear': 2122, 'niggers': 2123, 'wipers': 2124, 'unprecedented': 2125, 'chokes': 2126, 'plexiglas': 2127, 'echo': 2128, 'latch': 2129, '50': 2130, 'hangars': 2131, 'derelict': 2132, 'expansive': 2133, 'samples': 2134, 'fluegelheim': 2135, 'back': 2136, 'anticipate': 2137, 'former': 2138, 'confirmed': 2139, 'chamber': 2140, 'bandstand': 2141, 'boxes': 2142, 'outpost': 2143, 'nlegs': 2144, 'boats': 2145, 'teen': 2146, 'inches': 2147, 'damp': 2148, '141': 2149, 'chairman': 2150, 'ditches': 2151, 'third': 2152, 'naway': 2153, 'hammer': 2154, 'faux': 2155, 'puppet': 2156, 'strawberries': 2157, 'door': 2158, 'mashed': 2159, 'suntory': 2160, 'adorable': 2161, 'restrained': 2162, 'eagle': 2163, 'dumbly': 2164, 'burned': 2165, 'insulted': 2166, 'hiding': 2167, 'humiliated': 2168, 'njosh': 2169, 'dreiberg': 2170, 'nights': 2171, 'distance': 2172, 'returns': 2173, 'person': 2174, 'cover': 2175, 'pack': 2176, 'dicky': 2177, 'cages': 2178, 'radios': 2179, 'properly': 2180, 'foreman': 2181, 'wrenching': 2182, 'businessmen': 2183, 'nangle': 2184, 'clutches': 2185, 'needs': 2186, 'boy': 2187, 'reins': 2188, 'imaginable': 2189, 'via': 2190, 'failing': 2191, 'interview': 2192, 'willard': 2193, 'spills': 2194, 'lunge': 2195, 'ncarson': 2196, 'jagged': 2197, 'baked': 2198, 'leafs': 2199, 'casket': 2200, 'join': 2201, '1000': 2202, 'eat': 2203, 'shakin': 2204, 'emilie': 2205, 'areas': 2206, 'aware': 2207, 'quite': 2208, 'brain': 2209, 'sight': 2210, 'ni': 2211, 'walter': 2212, 'plain': 2213, 'squeak': 2214, 'margie': 2215, 'mamaji': 2216, 'hiya': 2217, 'swing': 2218, 'nsomething': 2219, 'paddy': 2220, 'punk': 2221, 'sleek': 2222, '57': 2223, 'dragged': 2224, 'flex': 2225, 'clenching': 2226, 'stylish': 2227, 'rape': 2228, 'ntim': 2229, 'test': 2230, 'punches': 2231, 'enjoys': 2232, 'inspect': 2233, 'clamped': 2234, 'exchange': 2235, 'shrimp': 2236, 'babysitter': 2237, 'truck': 2238, 'require': 2239, 'stadium': 2240, 'jiggles': 2241, 'bats': 2242, 'instruction': 2243, 'module': 2244, 'spink': 2245, 'office': 2246, 'schott': 2247, 'hotel': 2248, 'babbit': 2249, 'n66': 2250, 'tracking': 2251, 'wheezing': 2252, 'dumbstruck': 2253, 'n190': 2254, 'assuming': 2255, 'tune': 2256, 'generate': 2257, 'shells': 2258, 'because': 2259, 'alt': 2260, 'inhuman': 2261, 'engineer': 2262, 'completely': 2263, 'expressions': 2264, 'cafeteria': 2265, 'gerald': 2266, 'nurses': 2267, 'n60': 2268, 'greeted': 2269, 'nenough': 2270, 'papigone': 2271, 'status': 2272, 'organize': 2273, 'windows': 2274, 'healing': 2275, 'hyper': 2276, 'al': 2277, 'warden': 2278, 'fellahs': 2279, 'crestfallen': 2280, 'n163': 2281, 'worries': 2282, 'gps': 2283, 'requires': 2284, 'nmine': 2285, 'bringin': 2286, 'recklessly': 2287, 'launched': 2288, 'fronds': 2289, 'unlike': 2290, 'sinatra': 2291, 'trans': 2292, 'amanda': 2293, 'parka': 2294, 'delighted': 2295, 'edinburgh': 2296, 'growing': 2297, 'stash': 2298, 'nbeatrice': 2299, 'insignia': 2300, 'evil': 2301, 'pappas': 2302, 'irv': 2303, 'dokey': 2304, 'backdoor': 2305, 'vain': 2306, 'cole': 2307, 'unsettled': 2308, 'eisley': 2309, 'ren': 2310, 'anyone': 2311, 'telescopes': 2312, 'punching': 2313, 'forcefully': 2314, 'moist': 2315, 'novel': 2316, 'growling': 2317, 'staying': 2318, 'irma': 2319, 'nquick': 2320, 'nmr': 2321, 'discharged': 2322, 'cinder': 2323, 'upset': 2324, 'alright': 2325, 'lifelessly': 2326, 'reload': 2327, 'common': 2328, 'sadness': 2329, 'haunted': 2330, 'penis': 2331, 'abandoned': 2332, 'coolidge': 2333, 'pass': 2334, 'receipt': 2335, 'whenever': 2336, 'nestablishing': 2337, 'gate': 2338, 'stride': 2339, 'complicated': 2340, 'headlight': 2341, '94': 2342, 'microphone': 2343, 'wardrobe': 2344, 'wiping': 2345, 'alexander': 2346, 'n206': 2347, 'recognition': 2348, 'sled': 2349, 'passive': 2350, 'nsmiling': 2351, 'swim': 2352, 'spread': 2353, "wouldn't": 2354, 'aspect': 2355, 'dab': 2356, 'likes': 2357, 'race': 2358, 'counting': 2359, 'bikes': 2360, 'ammo': 2361, 'sec': 2362, 'volvo': 2363, 'rosalie': 2364, 'sabatini': 2365, 'believe': 2366, 'alter': 2367, 'attempting': 2368, 'wool': 2369, 'nhall': 2370, 'markinson': 2371, 'attention': 2372, 'crayon': 2373, 'batman': 2374, 'idiots': 2375, 'foul': 2376, 'comparison': 2377, 'contacted': 2378, 'gripped': 2379, 'nby': 2380, 'pd': 2381, 'w': 2382, 'underground': 2383, 'servants': 2384, 'harold': 2385, 'med': 2386, 'documentary': 2387, 'joints': 2388, 'switch': 2389, 'warehouse': 2390, 'expert': 2391, 'peephole': 2392, 'wood': 2393, 'grew': 2394, 'outdoors': 2395, 'harder': 2396, 'strained': 2397, 'razor': 2398, 'gross': 2399, 'wolfgang': 2400, 'scratched': 2401, '32': 2402, 'buys': 2403, 'controller': 2404, 'wandering': 2405, 'hurl': 2406, 'technique': 2407, 'hydrant': 2408, 'lie': 2409, 'james': 2410, 'arabic': 2411, 'screwing': 2412, 'irs': 2413, 'overflowing': 2414, 'ones': 2415, 'handwriting': 2416, 'soundtrack': 2417, 'spiritual': 2418, 'belted': 2419, 'wealthy': 2420, 'diabetic': 2421, 'gents': 2422, 'analysis': 2423, 'vette': 2424, 'reduced': 2425, 'ndeb': 2426, 'dumpling': 2427, "you're": 2428, 'desks': 2429, 'briefing': 2430, 'imprisoned': 2431, 'pinning': 2432, 'fuzzy': 2433, 'ncobb': 2434, 'catching': 2435, 'cuffed': 2436, 'loudspeakers': 2437, 'brake': 2438, 'dodonna': 2439, 'bruised': 2440, 'displaying': 2441, 'experiments': 2442, 'cascades': 2443, 'spotlight': 2444, 'bags': 2445, 'accidentally': 2446, 'tiangong': 2447, 'steely': 2448, 'color': 2449, 'excuses': 2450, 'rome': 2451, "let's": 2452, 'repellent': 2453, 'spectre': 2454, 'hooves': 2455, 'dorry': 2456, 'passenger': 2457, 'tenses': 2458, 'sheds': 2459, 'mightily': 2460, 'breen': 2461, 'guilt': 2462, 'gitmo': 2463, 'hunk': 2464, 'glances': 2465, 'climbing': 2466, 'drape': 2467, 'amusing': 2468, 'resignation': 2469, 'cultural': 2470, 'champion': 2471, 'slings': 2472, 'sized': 2473, 'transfixed': 2474, 'badchuck': 2475, 'arizona': 2476, 'hydraulic': 2477, 'noting': 2478, 'scheduled': 2479, 'scripts': 2480, '105': 2481, 'proceedings': 2482, 'tuxedo': 2483, 'utterly': 2484, 'yessir': 2485, 'escalators': 2486, 'choose': 2487, 'suite': 2488, 'spell': 2489, 'talkback': 2490, 'honk': 2491, 'gil': 2492, 'mile': 2493, 'locket': 2494, 'poet': 2495, 'insanity': 2496, 'travis': 2497, 'brooklyn': 2498, 'sergeant': 2499, 'cannister': 2500, 'buildings': 2501, 'element': 2502, 'bastard': 2503, 'clothed': 2504, 'jokes': 2505, 'nend': 2506, 'mixture': 2507, 'anna': 2508, 'seraglio': 2509, 'bros': 2510, 'professors': 2511, 'ft': 2512, 'toast': 2513, 'nthey': 2514, 'wonderland': 2515, 'rioting': 2516, 'hangin': 2517, 'pair': 2518, 'latina': 2519, 'n169': 2520, 'incredulous': 2521, 'nwondering': 2522, 'boras': 2523, 'dapper': 2524, 'nrosalyn': 2525, 'daughtrey': 2526, 'steady': 2527, 'eats': 2528, 'labyrinth': 2529, 'mister': 2530, 'plow': 2531, 'elite': 2532, 'toward': 2533, 'zips': 2534, 'practice': 2535, 'free': 2536, 'hookers': 2537, 'darlin': 2538, 'situations': 2539, 'fact': 2540, 'snark': 2541, 'juggling': 2542, 'dr': 2543, 'hearse': 2544, 'push': 2545, 'jimmie': 2546, 'nmental': 2547, 'sporting': 2548, 'camera': 2549, '267': 2550, 'surrounds': 2551, 'reclines': 2552, 'concession': 2553, 'mock': 2554, 'uncertain': 2555, '264': 2556, 'eh': 2557, 'odds': 2558, 'hose': 2559, 'got': 2560, 'around': 2561, 'scrambling': 2562, 'jellyfish': 2563, 'developing': 2564, 'tilt': 2565, 'jennings': 2566, 'overcoat': 2567, 'kiss': 2568, 'guarded': 2569, 'trucks': 2570, 'cupboard': 2571, 'nicholls': 2572, 'nkeeping': 2573, 'qaeda': 2574, 'loose': 2575, 'sucked': 2576, 'trench': 2577, 'cutt': 2578, 'lots': 2579, 'woof': 2580, 'framework': 2581, 'blasts': 2582, 'morris': 2583, 'disperse': 2584, 'misty': 2585, 'traces': 2586, 'spotlights': 2587, 'barren': 2588, 'accountant': 2589, 'oakland': 2590, 'groggily': 2591, 'predawn': 2592, 'mendez': 2593, 'shoreline': 2594, 'flank': 2595, '117': 2596, 'clock': 2597, 'inferno': 2598, 'suggested': 2599, 'stance': 2600, 'zeppelin': 2601, 'naive': 2602, 'mutator': 2603, 'past': 2604, 'aleksei': 2605, 'seasoned': 2606, 'showers': 2607, 'sheeny': 2608, 'baseball': 2609, 'parked': 2610, 'harlan': 2611, 'outsiders': 2612, 'belches': 2613, 'gift': 2614, 'flaming': 2615, 'cooperation': 2616, 'nwave': 2617, 'outs': 2618, 'lunges': 2619, 'internet': 2620, 'apart': 2621, 'wonders': 2622, 'hypnotized': 2623, 'heliport': 2624, '270': 2625, '172': 2626, 'outfits': 2627, 'nwaves': 2628, 'tonic': 2629, 'scraping': 2630, 'mumbles': 2631, 'glimpsed': 2632, 'diameter': 2633, 'christy': 2634, 'attractions': 2635, 'newspapers': 2636, 'chores': 2637, 'could': 2638, 'tomorrow': 2639, 'devices': 2640, 'ballots': 2641, 'shattered': 2642, 'bounced': 2643, 'household': 2644, 'halting': 2645, "there's": 2646, 'fromm': 2647, 'questioned': 2648, 'tranny': 2649, 'discovered': 2650, 'elder': 2651, 'grown': 2652, 'women': 2653, 'bathing': 2654, 'satan': 2655, 'n58': 2656, 'violent': 2657, 'belle': 2658, 'linen': 2659, 'businesses': 2660, 'quivering': 2661, 'unharmed': 2662, 'combined': 2663, 'directly': 2664, 'tortured': 2665, 'dilemma': 2666, 'stanley': 2667, 'vicki': 2668, 'lotta': 2669, 'nint': 2670, 'finch': 2671, 'stakeout': 2672, 'forrest': 2673, 'sofa': 2674, 'tip': 2675, 'shabby': 2676, 'acceptable': 2677, 'bombers': 2678, 'vs': 2679, 'flyboy': 2680, 'contacts': 2681, 'apple': 2682, 'gods': 2683, 'waist': 2684, 'flare': 2685, 'ghostbusters': 2686, 'nigga': 2687, 'portions': 2688, 'incredibly': 2689, 'museum': 2690, 'capsule': 2691, 'ncredits': 2692, 'spikes': 2693, 'opposed': 2694, 'altitude': 2695, 'fucker': 2696, 'holster': 2697, 'and': 2698, 'nchris': 2699, 'suppositories': 2700, 'blazer': 2701, 'phone': 2702, 'pekec': 2703, 'ndiane': 2704, 'stanzi': 2705, 'saber': 2706, 'appealing': 2707, '173': 2708, 'exits': 2709, '1966': 2710, 'tomato': 2711, 'adrian': 2712, 'triumphant': 2713, 'bomber': 2714, 'crest': 2715, 'undoes': 2716, 'nicky': 2717, 'descend': 2718, 'samir': 2719, 'expecting': 2720, 'journalism': 2721, 'coordinates': 2722, 'nibbles': 2723, 'n68': 2724, 'panties': 2725, 'rockefeller': 2726, 'lt': 2727, 'menace': 2728, 'historic': 2729, 'horan': 2730, 'lamp': 2731, 'periscope': 2732, 'extinct': 2733, 'math': 2734, 'nfinally': 2735, 'n140': 2736, 'ale': 2737, 'shep': 2738, 'tab': 2739, 'plasma': 2740, 'branch': 2741, 'beer': 2742, 'buffet': 2743, 'tito': 2744, 'livid': 2745, 'stoop': 2746, 'nwait': 2747, 'morning': 2748, 'engineers': 2749, 'ransom': 2750, '1962': 2751, 'colt': 2752, 'extent': 2753, 'elderly': 2754, 'recently': 2755, 'inspired': 2756, 'unlocked': 2757, 'shivering': 2758, 'apron': 2759, 'stands': 2760, 'treeline': 2761, '1980': 2762, 'settlement': 2763, 'barred': 2764, '175': 2765, 'nboom': 2766, 'pinch': 2767, 'fiction': 2768, 'patched': 2769, 'froufrou': 2770, 'backwards': 2771, 'nirv': 2772, 'nite': 2773, 'dwelling': 2774, 'entertaining': 2775, 'raw': 2776, 'scoops': 2777, 'cucumber': 2778, 'over': 2779, 'valves': 2780, 'practiced': 2781, 'surgical': 2782, 'gliding': 2783, '167': 2784, 'europe': 2785, 'apparently': 2786, 'mankind': 2787, 'mark': 2788, 'wafts': 2789, 'davis': 2790, 'scenario': 2791, 'conjunction': 2792, 'flatten': 2793, 'playin': 2794, 'stand': 2795, 'grimaces': 2796, 'slide': 2797, 'rot': 2798, 'moishe': 2799, 'markings': 2800, 'pulpit': 2801, 'tora': 2802, 'experiencing': 2803, 'warn': 2804, 'float': 2805, 'civilized': 2806, 'hum': 2807, 'raging': 2808, '101': 2809, 'forgetting': 2810, 'shorter': 2811, 'throw': 2812, 'administration': 2813, 'shoot': 2814, 'stage': 2815, 'slate': 2816, 'homes': 2817, 'scherner': 2818, 'radiator': 2819, 'argument': 2820, 'tractor': 2821, 'socks': 2822, 'adjoining': 2823, 'composition': 2824, 'pretends': 2825, 'hacks': 2826, 'nwilson': 2827, 'pender': 2828, 'kisses': 2829, 'tentacle': 2830, 'tender': 2831, 'special': 2832, 'unloaded': 2833, 'squinting': 2834, 'steadies': 2835, '16': 2836, 'n34': 2837, 'groups': 2838, '157': 2839, 'carrots': 2840, 'propellers': 2841, 'hours': 2842, 'evacuation': 2843, 'cling': 2844, 'meetings': 2845, 'severely': 2846, 'valentine': 2847, 'starfleet': 2848, 'iran': 2849, 'instead': 2850, 'announced': 2851, 'hurricane': 2852, 'tension': 2853, 'cul': 2854, 'conflicted': 2855, 'jerome': 2856, 'nursing': 2857, 'theirs': 2858, 'roll': 2859, 'einsatz': 2860, 'high': 2861, 'ill': 2862, 'stagger': 2863, 'marilyn': 2864, 'communist': 2865, 'staring': 2866, 'expectant': 2867, 'carrier': 2868, 'tis': 2869, '253': 2870, 'boyish': 2871, 'braced': 2872, 'dented': 2873, 'scrawls': 2874, 'modern': 2875, 'monet': 2876, 'upside': 2877, 'raft': 2878, "'bout": 2879, 'russia': 2880, 'heels': 2881, 'gammell': 2882, 'signed': 2883, 'reconsider': 2884, 'blanche': 2885, 'dollars': 2886, 'happening': 2887, '209': 2888, 'fixed': 2889, 'nintercut': 2890, 'chip': 2891, 'carcass': 2892, 'rocked': 2893, '76': 2894, 'hoarse': 2895, 'sala': 2896, 'brainerd': 2897, 'heavyset': 2898, 'peshawar': 2899, 'denial': 2900, 'breakdown': 2901, 'catatonic': 2902, 'nervously': 2903, 'wham': 2904, 'tad': 2905, 'sizzles': 2906, 'basement': 2907, 'seriousness': 2908, 'swims': 2909, 'risks': 2910, 'gasping': 2911, 'warm': 2912, 'edwards': 2913, 'timmy': 2914, 'sledgehammer': 2915, 'yang': 2916, 'photographed': 2917, 'plea': 2918, 'tug': 2919, 'absolutely': 2920, 'creepy': 2921, 'wearing': 2922, 'mis': 2923, 'battleships': 2924, 'riffling': 2925, 'nextreme': 2926, 'seen': 2927, 'plush': 2928, 'sacks': 2929, 'nbennie': 2930, 'nscud': 2931, '288': 2932, 'william': 2933, 'riveted': 2934, 'pote': 2935, 'blain': 2936, 'rifle': 2937, 'down': 2938, 'awestruck': 2939, 'skill': 2940, 'rooftops': 2941, 'thrower': 2942, 'clawed': 2943, 'flow': 2944, 'lullaby': 2945, 'nsame': 2946, 'gladly': 2947, 'improvise': 2948, 'vreeland': 2949, 'concept': 2950, 'ntaber': 2951, '274': 2952, 'ndad': 2953, 'uptown': 2954, 'accidents': 2955, 'n201': 2956, 'apprehension': 2957, 'olympic': 2958, 'n42': 2959, 'sox': 2960, 'interrupted': 2961, 'leveled': 2962, 'goldmill': 2963, 'heroic': 2964, 'relatively': 2965, 'loyalty': 2966, 'checkmate': 2967, 'scare': 2968, 'lay': 2969, 'pratt': 2970, 'ntravis': 2971, 'perched': 2972, 'hardy': 2973, 'trapdoor': 2974, 'cheery': 2975, 'horribly': 2976, 'dat': 2977, 'habits': 2978, 'ave': 2979, 'september': 2980, 'noticed': 2981, 'mother': 2982, 'joseph': 2983, 'required': 2984, 'undresses': 2985, 'cats': 2986, 'cyclops': 2987, 'danny': 2988, 'launches': 2989, 'prominent': 2990, 'emotionless': 2991, 'iggy': 2992, 'adore': 2993, 'ensign': 2994, 'crazier': 2995, 'rung': 2996, 'con': 2997, 'vault': 2998, 'skinned': 2999, '2004': 3000, 'guantanamo': 3001, 'datastick': 3002, 'wolves': 3003, 'graceful': 3004, 'commands': 3005, 'blows': 3006, 'lasses': 3007, 'marie': 3008, 'election': 3009, 'holler': 3010, 'headline': 3011, 'december': 3012, 'ponders': 3013, 'aisles': 3014, 'staircase': 3015, 'appellplatz': 3016, 'pecks': 3017, 'park': 3018, 'amid': 3019, 'nsue': 3020, 'please': 3021, 'language': 3022, 'dances': 3023, 'stickers': 3024, 'produced': 3025, 'survivor': 3026, 'tendulkar': 3027, 'alpha': 3028, 'gulp': 3029, 'palace': 3030, 'burying': 3031, 'shannon': 3032, 'purchase': 3033, 'values': 3034, 'presented': 3035, 'tiptoes': 3036, 'readouts': 3037, 'relationship': 3038, 'released': 3039, 'madame': 3040, 'nair': 3041, '1968': 3042, 'volumes': 3043, 'presence': 3044, 'streak': 3045, 'surviving': 3046, 'patel': 3047, '150': 3048, 'repeatedly': 3049, 'halfway': 3050, 'oval': 3051, 'freighter': 3052, 'fright': 3053, 'earnest': 3054, 'martha': 3055, 'poetry': 3056, 'assistants': 3057, 'vibrate': 3058, 'xadve': 3059, 'halt': 3060, 'brandt': 3061, 'timing': 3062, 'quarry': 3063, 'retaliation': 3064, 'ambulances': 3065, 'thumping': 3066, 'aft': 3067, 'faking': 3068, 'unleashed': 3069, 'madness': 3070, 'panels': 3071, 'served': 3072, 'xc5': 3073, 'artist': 3074, 'cameraman': 3075, 'dot': 3076, 'fling': 3077, 'roby': 3078, 'doors': 3079, 'desk': 3080, 'remarkable': 3081, 'tending': 3082, 'divider': 3083, 'forecourt': 3084, 'pals': 3085, 'fissure': 3086, 'janey': 3087, 'n86': 3088, 'numb': 3089, 'npi': 3090, 'thinkin': 3091, 'hauls': 3092, 'calendar': 3093, 'pale': 3094, 'watches': 3095, 'proceeding': 3096, 'sparrow': 3097, 'campers': 3098, 'hopeful': 3099, 'refugees': 3100, 'reception': 3101, 'fork': 3102, 'minister': 3103, 'dusty': 3104, 'spring': 3105, 'milwaukee': 3106, 'grill': 3107, 'screenplay': 3108, 'onlookers': 3109, 'hovers': 3110, 'merry': 3111, 'book': 3112, 'offend': 3113, 'jeanie': 3114, 'activated': 3115, 'small': 3116, 'videotapes': 3117, 'reappears': 3118, 'hangar': 3119, 'shoos': 3120, 'speak': 3121, "'ll": 3122, 'n59': 3123, 'lefou': 3124, 'paper': 3125, 'vague': 3126, 'dominate': 3127, 'doctors': 3128, 'po': 3129, 'startled': 3130, 'nsince': 3131, 'colonel': 3132, 'attacked': 3133, 'blacksmith': 3134, 'arcing': 3135, 'jammed': 3136, 'troopers': 3137, 'twist': 3138, 'unbuckles': 3139, 'fuckin': 3140, 'portfolio': 3141, 'shaves': 3142, 'guns': 3143, 'phil': 3144, 'emits': 3145, 'babies': 3146, 'stake': 3147, 'coup': 3148, 'reigns': 3149, 'oversized': 3150, 'believers': 3151, 'stack': 3152, 'logic': 3153, 'batmobile': 3154, 'delightedly': 3155, 'nbubbles': 3156, 'trust': 3157, 'hellish': 3158, 'unaffected': 3159, 'gagged': 3160, 'edgar': 3161, 'blazes': 3162, 'mort': 3163, 'mercedes': 3164, 'carpet': 3165, 'placing': 3166, 'apprehensively': 3167, 'uhhh': 3168, 'dork': 3169, 'taxi': 3170, 'usually': 3171, 'storage': 3172, 'x9cthe': 3173, '79': 3174, 'lancaster': 3175, 'martial': 3176, 'ivon': 3177, '147': 3178, 'controlling': 3179, 'lieutenant': 3180, 'dudes': 3181, 'knight': 3182, 'here': 3183, 'n171': 3184, '20th': 3185, 'wonder': 3186, '286': 3187, 'experience': 3188, 'n205': 3189, 'enough': 3190, 'cheeks': 3191, 'twisting': 3192, 'headset': 3193, 'dead': 3194, 'readies': 3195, 'pursuit': 3196, 'ammar': 3197, 'swear': 3198, 'nslams': 3199, 'nooh': 3200, 'names': 3201, 'mario': 3202, 'energized': 3203, 'training': 3204, 'procession': 3205, 'gosh': 3206, 'knots': 3207, 'admired': 3208, 'bow': 3209, 'sung': 3210, 'captain': 3211, 'curt': 3212, 'eckerton': 3213, 'his': 3214, 'ncharlie': 3215, 'ed': 3216, 'hurriedly': 3217, 'naked': 3218, 'registration': 3219, 'tissue': 3220, 'uneasily': 3221, 'sheryl': 3222, 'swivels': 3223, 'diagonal': 3224, 'wastebasket': 3225, 'pillows': 3226, 'fraction': 3227, 'sparrin': 3228, 'blood': 3229, 'writer': 3230, 'ole': 3231, 'boil': 3232, 'villager': 3233, 'ndentist': 3234, 'eight': 3235, 'hardware': 3236, 'threw': 3237, 'nylon': 3238, 'qui': 3239, 'cigarette': 3240, 'nonce': 3241, 'study': 3242, 'mitchell': 3243, 'eavesdropping': 3244, 'yet': 3245, 'tankard': 3246, 'skippy': 3247, 'lull': 3248, 'musketeers': 3249, 'stakes': 3250, 'turning': 3251, 'bottom': 3252, 'ton': 3253, 'blaze': 3254, 'infantry': 3255, 'civic': 3256, 'sonofabitch': 3257, 'n153': 3258, 'harkness': 3259, 'gurgle': 3260, 'hotter': 3261, 'golden': 3262, 'enraged': 3263, 'mental': 3264, 'riders': 3265, 'bomb': 3266, 'seeming': 3267, 'maestro': 3268, 'nbegbie': 3269, 'island': 3270, 'incubation': 3271, 'jedi': 3272, 'waddles': 3273, 'panicking': 3274, 'climax': 3275, 'inform': 3276, 'stuffed': 3277, 'ninsert': 3278, 'etch': 3279, 'turret': 3280, 'flood': 3281, 'nhold': 3282, 'workman': 3283, 'retracts': 3284, 'tyranny': 3285, 'scientists': 3286, 'closets': 3287, 'storey': 3288, 'respectfully': 3289, 'science': 3290, 'colleague': 3291, 'avoiding': 3292, 'arrest': 3293, 'upturned': 3294, 'pan': 3295, 'guard': 3296, 'n116': 3297, 'lounging': 3298, 'n121': 3299, 'stroller': 3300, 'pictures': 3301, 'advertising': 3302, 'notorious': 3303, 'chuckling': 3304, 'baffled': 3305, 'nout': 3306, 'infirmary': 3307, 'ling': 3308, 'orona': 3309, 'vogler': 3310, 'debating': 3311, 'career': 3312, 'thrones': 3313, 'gaunt': 3314, 'cellular': 3315, 'frost': 3316, 'chips': 3317, 'moonpies': 3318, 'february': 3319, 'n71': 3320, 'n158': 3321, 'dully': 3322, 'guy': 3323, '170': 3324, 'consequences': 3325, 'revenge': 3326, 'n25': 3327, 'zooms': 3328, 'accessory': 3329, 'chosen': 3330, 'fixer': 3331, 'tatooine': 3332, 'spirits': 3333, 'exhibit': 3334, 'vibrating': 3335, 'ow': 3336, 'mural': 3337, 'appearing': 3338, 'harlem': 3339, 'bachelorette': 3340, 'treat': 3341, 'transparent': 3342, 'nemma': 3343, 'slippers': 3344, 'rapidly': 3345, 'gibbs': 3346, "'hm": 3347, 'tens': 3348, 'surfer': 3349, 'agony': 3350, 'music': 3351, 'plunge': 3352, 'mon': 3353, 'receiving': 3354, 'happy': 3355, 'degree': 3356, 'figures': 3357, 'swiftly': 3358, 'soviet': 3359, 'seatbelt': 3360, 'congo': 3361, 'maru': 3362, 'approve': 3363, 'disregard': 3364, 'nmickey': 3365, 'sh': 3366, 'duffel': 3367, 'asap': 3368, 'releasing': 3369, 'watson': 3370, 'cowboys': 3371, 'transom': 3372, 'bullets': 3373, 'picked': 3374, 'visibly': 3375, 'purposes': 3376, 'nthere': 3377, 'goggles': 3378, 'miniature': 3379, 'colony': 3380, 'excitedly': 3381, 'n47': 3382, 'jealousy': 3383, 'round': 3384, 'complaining': 3385, 'toes': 3386, 'multiple': 3387, 'pressed': 3388, 'opportunity': 3389, 'reilly': 3390, 'elegant': 3391, 'cars': 3392, 'frankly': 3393, 'nunder': 3394, 'gawkers': 3395, 'virus': 3396, 'slumped': 3397, 'stops': 3398, 'managed': 3399, 'does': 3400, 'slow': 3401, 'speaks': 3402, 'pointing': 3403, 'instructions': 3404, 'thy': 3405, 'valium': 3406, 'friend': 3407, 'bernadette': 3408, 'puddle': 3409, 'respects': 3410, 'nview': 3411, 'dishonor': 3412, 'disconnected': 3413, 'applies': 3414, 'nblack': 3415, 'nokay': 3416, 'location': 3417, 'trails': 3418, 'rustle': 3419, 'ngrace': 3420, 'discuss': 3421, 'games': 3422, 'luggage': 3423, 'baldwin': 3424, 'sachin': 3425, 'ndre': 3426, 'meat': 3427, 'n101': 3428, 'circled': 3429, 'weed': 3430, 'planetary': 3431, 'perpetrator': 3432, 'incomplete': 3433, 'gruesome': 3434, 'abu': 3435, 'transports': 3436, 'catwalk': 3437, 'repairing': 3438, 'ads': 3439, 'removed': 3440, 'bubbles': 3441, 'heavyweight': 3442, 'pagoda': 3443, 'kirill': 3444, 'camouflaged': 3445, 'faculty': 3446, 'expense': 3447, 'sizzling': 3448, 'torture': 3449, 'impacts': 3450, 'deliverer': 3451, 'nbag': 3452, 'nhuge': 3453, 'plank': 3454, 'mill': 3455, 'dutifully': 3456, 'fortress': 3457, 'shards': 3458, 'deflects': 3459, 'trot': 3460, 'platinum': 3461, 'movie': 3462, 'polished': 3463, 'searched': 3464, 'raped': 3465, 'tease': 3466, 'sum': 3467, 'sold': 3468, 'fireballs': 3469, 'partially': 3470, 'n22': 3471, 'stinking': 3472, 'porthole': 3473, 'excused': 3474, 'recede': 3475, 'behind': 3476, 'kind': 3477, 'y': 3478, 'faintest': 3479, 'verde': 3480, 'draining': 3481, 'teal': 3482, 'foam': 3483, 'clocks': 3484, 'gait': 3485, '72': 3486, 'givens': 3487, 'investigation': 3488, 'bothering': 3489, 'ushers': 3490, 'presently': 3491, 'tensing': 3492, 'telescope': 3493, 'reading': 3494, 'marrying': 3495, 'shades': 3496, 'magnesium': 3497, 'closed': 3498, '133': 3499, 'contorted': 3500, 'bigger': 3501, 'beauty': 3502, 'womb': 3503, 'nhead': 3504, 'karaoke': 3505, 'laptop': 3506, 'menu': 3507, 'fresh': 3508, 'aliens': 3509, 'ghoul': 3510, 'bearings': 3511, 'stitches': 3512, 'tok': 3513, 'grazing': 3514, 'moths': 3515, 'formidable': 3516, 'dean': 3517, 'three': 3518, 'enclosed': 3519, 'claustrophobic': 3520, 'pending': 3521, 'activates': 3522, 'smashed': 3523, '35a': 3524, 'yah': 3525, 'sidewalks': 3526, 'comment': 3527, 'ignore': 3528, 'prepares': 3529, 'wicked': 3530, 'reflection': 3531, 'jackrabbit': 3532, 'gulf': 3533, 'dropped': 3534, '221': 3535, 'detainee': 3536, 'moscow': 3537, 'vous': 3538, 'kirk': 3539, 'louisiana': 3540, 'bath': 3541, 'coin': 3542, 'sweetheart': 3543, 'laughed': 3544, 'maker': 3545, 'germans': 3546, 'fractured': 3547, 'eckhardt': 3548, 'bugged': 3549, 'arrived': 3550, 'nmusic': 3551, '245': 3552, 'equations': 3553, 'tim': 3554, 'flickers': 3555, 'warren': 3556, 'blaring': 3557, 'verdict': 3558, 'thorns': 3559, 'hatred': 3560, 'upon': 3561, 'house': 3562, 'salvy': 3563, 'milk': 3564, 'noses': 3565, 'mccandless': 3566, 'steep': 3567, 'plumbing': 3568, 'molester': 3569, 'glass': 3570, 'kindergarten': 3571, 'box': 3572, 'matthew': 3573, 'miracles': 3574, 'wh': 3575, 'sleeve': 3576, 'secret': 3577, 'emergency': 3578, 'reluctant': 3579, 'sleepy': 3580, 'phenomenon': 3581, '18th': 3582, 'yup': 3583, 'pleasantries': 3584, 'conferring': 3585, 'nchristian': 3586, 'chugs': 3587, 'took': 3588, 'money': 3589, 'n177': 3590, 'christ': 3591, 'merge': 3592, 'edges': 3593, 'enthralled': 3594, "don't": 3595, 'grand': 3596, 'cal': 3597, 'ditch': 3598, 'facing': 3599, 'rattled': 3600, 'critical': 3601, 'burns': 3602, 'exhales': 3603, '59': 3604, 'mir': 3605, 'hallways': 3606, '12': 3607, 'yusuf': 3608, 'recent': 3609, 'amd': 3610, 'swanson': 3611, 'alicia': 3612, 'trees': 3613, '67': 3614, 'komiteh': 3615, 'severing': 3616, 'committed': 3617, 'interrogator': 3618, 'vip': 3619, 'supply': 3620, 'vengeance': 3621, 'cigarettes': 3622, 'nuh': 3623, 'ports': 3624, 'moan': 3625, 'pointedly': 3626, 'sends': 3627, 'sandwich': 3628, 'grandchildren': 3629, 'tina': 3630, 'waitress': 3631, 'intro': 3632, 'appearance': 3633, 'chance': 3634, 'humans': 3635, 'quieter': 3636, 'bud': 3637, 'seminarian': 3638, 'unto': 3639, 'stopped': 3640, 'veteran': 3641, 'lesbian': 3642, 'beeping': 3643, '202': 3644, 'patrol': 3645, 'bed': 3646, 'marking': 3647, 'fair': 3648, 'england': 3649, 'choked': 3650, 'trashed': 3651, 'nsheeny': 3652, 'sudden': 3653, 'zaps': 3654, "'fee": 3655, 'trap': 3656, 'ordinates': 3657, 'swept': 3658, 'insects': 3659, 'ncalm': 3660, 'sidelong': 3661, 'comlink': 3662, 'await': 3663, 'aback': 3664, 'projections': 3665, 'stars': 3666, '235': 3667, 'beth': 3668, 'masters': 3669, 'bicycles': 3670, 'appreciates': 3671, 'consciousness': 3672, 'chatter': 3673, 'vent': 3674, 'angry': 3675, 'commandant': 3676, 'sword': 3677, 'frees': 3678, 'pullin': 3679, 'harnesses': 3680, 'rears': 3681, 'district': 3682, 'destroys': 3683, 'power': 3684, 'overseers': 3685, 'doll': 3686, 'dreamy': 3687, 'spradling': 3688, 'nsilence': 3689, 'armored': 3690, 'fascinated': 3691, 'technically': 3692, 'determination': 3693, 'furnished': 3694, 'npark': 3695, 'communications': 3696, '83': 3697, 'juarez': 3698, 'housed': 3699, 'lee': 3700, 'banks': 3701, 'frazzled': 3702, 'schematic': 3703, '195': 3704, 'paramount': 3705, 'hurts': 3706, 'sobs': 3707, 'scanned': 3708, 'du': 3709, 'copy': 3710, 'tightening': 3711, 'unsuccessfully': 3712, 'jarring': 3713, 'danbury': 3714, 'robbed': 3715, 'nwhoa': 3716, 'humping': 3717, 'following': 3718, 'stare': 3719, 'selection': 3720, 'argue': 3721, 'petey': 3722, 'sidelines': 3723, 'ajar': 3724, '236': 3725, 'intrigued': 3726, 'slug': 3727, 'parakeet': 3728, 'skins': 3729, 'nmirror': 3730, 'nino': 3731, 'site': 3732, 'hoist': 3733, 'nfloor': 3734, 'neveryone': 3735, 'eagerly': 3736, 'reservation': 3737, 'sculpted': 3738, 'poke': 3739, "'64": 3740, 'invite': 3741, 'tricky': 3742, 'beds': 3743, 'os': 3744, 'eclipsed': 3745, 'patience': 3746, 'quiet': 3747, 'adds': 3748, 'registers': 3749, 'faster': 3750, 'flyer': 3751, 'hearing': 3752, 'trough': 3753, 'motorcycle': 3754, 'n122': 3755, 'electricity': 3756, 'occupant': 3757, 'effect': 3758, 'mogwai': 3759, 'undocking': 3760, 'unaware': 3761, 'nlike': 3762, 'invented': 3763, 'nonly': 3764, 'barrel': 3765, 'part': 3766, 'nappears': 3767, 'industrial': 3768, 'straightened': 3769, 'witnessed': 3770, 'nlex': 3771, 'dic': 3772, 'forces': 3773, 'fails': 3774, "do't": 3775, 'domino': 3776, 'role': 3777, 'npassenger': 3778, 'toro': 3779, 'lemonade': 3780, 'address': 3781, 'runnin': 3782, 'kidnapped': 3783, 'sponge': 3784, 'lump': 3785, 'stomping': 3786, 'irvine': 3787, 'unceremoniously': 3788, 'urge': 3789, 'way': 3790, 'sheets': 3791, 'fantastic': 3792, 'carrie': 3793, 'brightly': 3794, 'courage': 3795, 'ping': 3796, 'floods': 3797, 'see': 3798, 'squeeze': 3799, 'alternate': 3800, 'neach': 3801, 'nbo': 3802, 'snoke': 3803, 'ah': 3804, 'hawaiian': 3805, 'street': 3806, 'pamphlet': 3807, 'imagine': 3808, 'regret': 3809, 'algeria': 3810, 'sears': 3811, '290': 3812, 'students': 3813, 'carpe': 3814, 'resolve': 3815, 'tour': 3816, 'muffled': 3817, 'maintenance': 3818, 'charles': 3819, 'suburban': 3820, 'between': 3821, 'bonding': 3822, 'nyoung': 3823, 'owed': 3824, 'erupt': 3825, 'won': 3826, 'fathers': 3827, 'largely': 3828, 'bathtub': 3829, 'walkman': 3830, '707': 3831, 'component': 3832, 'nstruggles': 3833, 'clerks': 3834, 'proton': 3835, 'chaplain': 3836, 'politely': 3837, 'angie': 3838, 'curved': 3839, 'grimsrud': 3840, 'silver': 3841, 'spent': 3842, '136': 3843, 'scatter': 3844, 'hysterics': 3845, 'yankee': 3846, 'n94': 3847, 'covered': 3848, 'freak': 3849, 'npull': 3850, 'law': 3851, 'jack': 3852, 'starting': 3853, 'polexia': 3854, 'serenity': 3855, 'kinda': 3856, '194': 3857, 'modesty': 3858, 'norma': 3859, 'plugged': 3860, 'latino': 3861, 'a85': 3862, 'cassette': 3863, 'fiery': 3864, 'swoon': 3865, 'ntechnicians': 3866, 'nmuldoon': 3867, 'feminine': 3868, 'silhouettes': 3869, 'published': 3870, 'maiden': 3871, 'random': 3872, 'whoo': 3873, 'garden': 3874, 'marla': 3875, 'wrapped': 3876, 'fumbles': 3877, 'lodged': 3878, 'bacon': 3879, 'realty': 3880, 'spectrum': 3881, 'bent': 3882, 'various': 3883, 'creeps': 3884, 'sanchez': 3885, '208': 3886, 'roads': 3887, 'nf': 3888, 'pete': 3889, 'proof': 3890, 'extensive': 3891, 'antarctic': 3892, 'patrick': 3893, 'pries': 3894, 'gig': 3895, 'brush': 3896, 'sketches': 3897, 'sewage': 3898, 'woo': 3899, 'touch': 3900, 'populated': 3901, 'forward': 3902, 'oasis': 3903, 'located': 3904, 'opposite': 3905, 'narmadillo': 3906, 'shaye': 3907, 'blushing': 3908, 'clerk': 3909, 'predict': 3910, 'moorings': 3911, 'complete': 3912, 'rumpled': 3913, 'carolyn': 3914, 'protectively': 3915, 'nnow': 3916, 'takin': 3917, 'airfield': 3918, 'reputation': 3919, 'best': 3920, 'ncave': 3921, 'expenses': 3922, 'oswald': 3923, 'schlumberg': 3924, 'napproaches': 3925, '269': 3926, 'easel': 3927, 'tons': 3928, 'larger': 3929, 'sweater': 3930, 'realizing': 3931, 'ceremonial': 3932, 'remained': 3933, 'valet': 3934, 'humanity': 3935, 'pakistan': 3936, 'since': 3937, 'nuthin': 3938, 'forever': 3939, 'kuwait': 3940, 'njeans': 3941, 'geo': 3942, 'leading': 3943, 'brea': 3944, 'international': 3945, 'ram': 3946, 'binds': 3947, 'pyramid': 3948, 'calling': 3949, 'link': 3950, 'takagi': 3951, '1976': 3952, 'concrete': 3953, 'eel': 3954, 'til': 3955, 'recovers': 3956, 'breakfast': 3957, 'homemade': 3958, 'pursued': 3959, 'throbbing': 3960, 'winded': 3961, 'concealing': 3962, 'quarantine': 3963, 'dell': 3964, 'shah': 3965, 'defiantly': 3966, 'industries': 3967, 'surgeon': 3968, 'nready': 3969, 'schaefer': 3970, 'ragged': 3971, 'deejay': 3972, 'terrible': 3973, 'shrimpin': 3974, 'perfect': 3975, 'toys': 3976, 'devoid': 3977, 'npockets': 3978, 'delay': 3979, 'woodward': 3980, 'warrant': 3981, '70s': 3982, 'irish': 3983, 'fancy': 3984, 'firelight': 3985, 'crappy': 3986, 'prevent': 3987, 'olive': 3988, 'narms': 3989, 'bedspread': 3990, 'splitting': 3991, 'weep': 3992, 'underwater': 3993, 'dividing': 3994, 'resigned': 3995, 'disturb': 3996, 'dixie': 3997, 'skipped': 3998, 'hunched': 3999, 'genes': 4000, 'grow': 4001, 'leaf': 4002, 'jojo': 4003, 'poem': 4004, 'yolanda': 4005, 'nightclub': 4006, 'oskar': 4007, 'nvery': 4008, 'digicam': 4009, 'vic': 4010, "'m": 4011, 'cornered': 4012, 'stepping': 4013, 'monument': 4014, 'weirdo': 4015, 'nfour': 4016, 'blockade': 4017, 'binding': 4018, 'centre': 4019, 'hint': 4020, 'bustle': 4021, '185': 4022, 'mad': 4023, 'nusing': 4024, 'stew': 4025, 'adorn': 4026, 'evolution': 4027, 'nrc': 4028, 'jew': 4029, 'shuddering': 4030, 'dessert': 4031, 'safer': 4032, 'manly': 4033, 'majestically': 4034, 'ngood': 4035, 'onto': 4036, 'pawn': 4037, 'angered': 4038, 'klondike': 4039, 'eccentric': 4040, 'zoom': 4041, '40s': 4042, 'schreber': 4043, 'blending': 4044, 'sales': 4045, 'visual': 4046, 'rousing': 4047, 'couples': 4048, 'fragile': 4049, 'behave': 4050, '213': 4051, 'breaker': 4052, 'greenish': 4053, 'dope': 4054, 'nred': 4055, 'beretta': 4056, 'darting': 4057, 'mexican': 4058, 'relieve': 4059, 'announcing': 4060, 'predator': 4061, 'gentleman': 4062, 'self': 4063, 'como': 4064, 'features': 4065, 'impersonating': 4066, 'marshall': 4067, 'manuscript': 4068, '271': 4069, 'dropping': 4070, 'research': 4071, 'swerves': 4072, 'lost': 4073, 'unhook': 4074, 'trunk': 4075, 'ngirl': 4076, 'crashing': 4077, 'hundreds': 4078, 'tied': 4079, 'sheraton': 4080, 'rub': 4081, 'install': 4082, 'pavement': 4083, 'locks': 4084, 'screens': 4085, 'colossal': 4086, 'score': 4087, 'nthis': 4088, 'nharding': 4089, 'county': 4090, 'sophia': 4091, 'trike': 4092, 'uneasy': 4093, 'lunatic': 4094, 'flunky': 4095, 'rip': 4096, 'surveying': 4097, 'doorframe': 4098, 'burgundy': 4099, 'las': 4100, 'ornaments': 4101, '10': 4102, 'freezes': 4103, 'turbine': 4104, 'serial': 4105, 'tests': 4106, 'proposition': 4107, 'elements': 4108, 'subtle': 4109, 'spike': 4110, 'consumed': 4111, '04': 4112, 'prayed': 4113, 'beside': 4114, 'nsaito': 4115, 'method': 4116, 'differently': 4117, 'told': 4118, 'pulled': 4119, 'presses': 4120, 'gorge': 4121, 'bravely': 4122, '81': 4123, 'strings': 4124, 'australia': 4125, 'walker': 4126, 'steam': 4127, 'thrusts': 4128, 'nudging': 4129, 'perry': 4130, 'victoria': 4131, 'zeng': 4132, 'beard': 4133, 'nforward': 4134, 'migrants': 4135, 'nblows': 4136, 'joanne': 4137, 'prescription': 4138, 'shallows': 4139, 'bum': 4140, 'belong': 4141, 'slope': 4142, 'compartments': 4143, 'unmade': 4144, 'streaks': 4145, 'wine': 4146, 'ripped': 4147, 'copyright': 4148, 'nhands': 4149, 'squash': 4150, 'brimming': 4151, 'grooming': 4152, 'addressing': 4153, 'thud': 4154, 'worthy': 4155, 'nreveal': 4156, 'liquid': 4157, '243': 4158, 'cook': 4159, 'pockets': 4160, 'instinct': 4161, 'feared': 4162, 'pussy': 4163, 'america': 4164, 'ropes': 4165, 'spacious': 4166, 'nregis': 4167, 'holy': 4168, 'n2': 4169, 'frankie': 4170, '360': 4171, 'protein': 4172, 'handwritten': 4173, 'lincoln': 4174, 'iv': 4175, 'payin': 4176, 'protesters': 4177, 'fade': 4178, 'nyes': 4179, 'kneeling': 4180, 'hilarious': 4181, 'mammoth': 4182, 'hulk': 4183, 'brick': 4184, 'sotto': 4185, 'concentrate': 4186, 'clatter': 4187, 'nreagan': 4188, 'blurts': 4189, 'nsee': 4190, 'indistinguishable': 4191, '8th': 4192, 'develop': 4193, 'thicker': 4194, 'nstunned': 4195, 'stumbling': 4196, 'helo': 4197, 'frau': 4198, 'turtles': 4199, 'life': 4200, 'nbeside': 4201, 'daytime': 4202, 'mesa': 4203, 'vietnam': 4204, 'innards': 4205, 'hunger': 4206, 'guardhouse': 4207, 'clippers': 4208, 'whiz': 4209, 'thelma': 4210, 'sonsabitches': 4211, 'composed': 4212, 'calligraphy': 4213, 'affectionately': 4214, 'spinal': 4215, 'channels': 4216, 'nmike': 4217, 'punctuated': 4218, 'stormy': 4219, 'boar': 4220, "we'll": 4221, 'navigate': 4222, 'nalright': 4223, 'lamb': 4224, 'heartfelt': 4225, 'stations': 4226, 'daze': 4227, 'est': 4228, 'disperses': 4229, 'shelves': 4230, 'lyrics': 4231, 'brad': 4232, 'bucket': 4233, 'hostility': 4234, 'tacked': 4235, 'neverybody': 4236, 'longing': 4237, '110': 4238, 'gallon': 4239, 'serum': 4240, 'everyone': 4241, 'appear': 4242, 'unfair': 4243, 'compliment': 4244, 'converge': 4245, 'china': 4246, 'inconvenience': 4247, 'nknife': 4248, 'skate': 4249, 'my': 4250, 'rude': 4251, 'doris': 4252, 'dwarf': 4253, 'caddy': 4254, 'reef': 4255, 'mall': 4256, 'seconds': 4257, 'floorboards': 4258, 'rat': 4259, 'explaining': 4260, 'essentially': 4261, 'patterns': 4262, 'relentless': 4263, 'pink': 4264, '182': 4265, 'fat': 4266, 'rebellion': 4267, 'fro': 4268, 'axe': 4269, 'legendary': 4270, 'tarkin': 4271, 'adjusts': 4272, 'n96': 4273, 'murdering': 4274, 'dough': 4275, 'glittering': 4276, 'joe': 4277, 'santiago': 4278, 'accomplishment': 4279, 'hadda': 4280, 'hardened': 4281, 'consume': 4282, 'blackened': 4283, 'cruz': 4284, 'roaming': 4285, 'loading': 4286, 'tiny': 4287, 'fight': 4288, 'ahmed': 4289, 'unable': 4290, 'regalia': 4291, 'initials': 4292, 'organic': 4293, 'mannequins': 4294, 'snorts': 4295, 'woulda': 4296, 'nalmost': 4297, 'clouds': 4298, 'hammaker': 4299, 'exhaustion': 4300, 'salon': 4301, 'viciously': 4302, 'admire': 4303, 'miraculously': 4304, 'agrees': 4305, 'nfew': 4306, 'bourbon': 4307, 'booked': 4308, "you've": 4309, 'blackberry': 4310, 'chewing': 4311, 'joy': 4312, 'nconversation': 4313, 'wire': 4314, 'fremont': 4315, 'thru': 4316, 'goodnight': 4317, 'lapel': 4318, 'quickly': 4319, 'rooney': 4320, 'sometimes': 4321, 'endeavor': 4322, 'construction': 4323, 'candelabra': 4324, 'nwould': 4325, 'funds': 4326, 'smiley': 4327, 'nwon': 4328, 'urine': 4329, 'churning': 4330, 'fellow': 4331, 'shot': 4332, 'honeymoon': 4333, 'wasn': 4334, 'smelled': 4335, 'n62': 4336, 'unless': 4337, 'ak': 4338, 'expertly': 4339, 'residents': 4340, 'massage': 4341, 'yo': 4342, 'denny': 4343, 'actresses': 4344, '134': 4345, 'headache': 4346, 'chewbacca': 4347, 'bloodcurdling': 4348, 'whispering': 4349, 'weasel': 4350, 'bandhu': 4351, 'stuck': 4352, 'splashing': 4353, 'belly': 4354, 'inventor': 4355, 'squint': 4356, 'less': 4357, 'offensive': 4358, 'islands': 4359, 'guillermo': 4360, 'jaguar': 4361, 'phony': 4362, 'oath': 4363, 'sexual': 4364, 'speech': 4365, 'han': 4366, 'unprepared': 4367, 'bored': 4368, 'nleonard': 4369, 'appreciatively': 4370, 'mystique': 4371, 'scribbles': 4372, 'nsydney': 4373, 'compromised': 4374, 'measure': 4375, 'intercom': 4376, 'manny': 4377, 'njailer': 4378, 'babble': 4379, 'virginia': 4380, 'triangle': 4381, 'thoughtfully': 4382, 'ass': 4383, 'bushes': 4384, 'liu': 4385, 'franco': 4386, 'peter': 4387, 'view': 4388, 'formally': 4389, 'collapses': 4390, 'mortician': 4391, 'shuffle': 4392, '230': 4393, 'trade': 4394, 'flashing': 4395, 'scratching': 4396, 'vietnamese': 4397, 'operative': 4398, 'exhilarated': 4399, 'glimmer': 4400, 'ballpark': 4401, 'parties': 4402, 'creating': 4403, 'among': 4404, 'squish': 4405, 'n189': 4406, 'fuming': 4407, 'depending': 4408, 'sphere': 4409, 'prove': 4410, 'summer': 4411, 'nancestor': 4412, 'pet': 4413, 'gronckle': 4414, 'describe': 4415, 'hardcore': 4416, 'signal': 4417, 'cd': 4418, 'scribbling': 4419, 'masked': 4420, 'present': 4421, 'fearing': 4422, 'tiled': 4423, 'results': 4424, 'goddamn': 4425, 'dee': 4426, 'zip': 4427, 'muthafuckas': 4428, 'reel': 4429, 'lor': 4430, 'camping': 4431, 'happier': 4432, 'nfor': 4433, 'thicket': 4434, 'pound': 4435, 'dons': 4436, 'strategy': 4437, 'flap': 4438, 'dormitory': 4439, 'curling': 4440, 'ngenie': 4441, 'drum': 4442, 'toothpaste': 4443, 'takeshi': 4444, 'ocean': 4445, 'arco': 4446, 'classroom': 4447, 'winona': 4448, 'requests': 4449, 'essential': 4450, 'ties': 4451, 'weaker': 4452, 'arm': 4453, 'detected': 4454, 'nwaits': 4455, 'sssh': 4456, 'split': 4457, 'blake': 4458, 'abreast': 4459, 'nick': 4460, 'drinking': 4461, 'leafing': 4462, 'funeral': 4463, 'plated': 4464, 'practically': 4465, 'attic': 4466, 'invoice': 4467, 'ntito': 4468, 'losing': 4469, 'firepower': 4470, 'rural': 4471, 'encourage': 4472, 'pleading': 4473, 'stove': 4474, 'embassy': 4475, 'skyscrapers': 4476, 'eternal': 4477, '251': 4478, 'plunks': 4479, 'sisters': 4480, 'downwards': 4481, 'shu': 4482, 'njay': 4483, 'nlittle': 4484, 'lizard': 4485, 'commercials': 4486, 'inspection': 4487, 'imagery': 4488, 'tammany': 4489, 'maximum': 4490, 'elephant': 4491, 'wounded': 4492, 'backed': 4493, 'relative': 4494, 'divided': 4495, 'haired': 4496, 'nplissken': 4497, 'droids': 4498, 'nthem': 4499, 'eerily': 4500, 'jazz': 4501, 'primary': 4502, 'ntrying': 4503, '275': 4504, 'file': 4505, 'winner': 4506, 'recording': 4507, 'slinky': 4508, 'chewie': 4509, 'infectious': 4510, 'recess': 4511, 'nsits': 4512, 'decorative': 4513, 'settings': 4514, 'elway': 4515, 'competing': 4516, 'madly': 4517, 'orca': 4518, 'nsome': 4519, 'waters': 4520, 'detoo': 4521, 'droid': 4522, 'price': 4523, 'skidding': 4524, 'puppeteer': 4525, 'rushed': 4526, 'adams': 4527, 'hm': 4528, 'movement': 4529, 'morts': 4530, 'inhabitants': 4531, 'splits': 4532, 'mantel': 4533, 'agrabah': 4534, 'hmmm': 4535, 'blurry': 4536, 'genius': 4537, 'shh': 4538, 'tentative': 4539, 'trading': 4540, 'wretched': 4541, 'misunderstanding': 4542, 'counters': 4543, 'perks': 4544, 'butch': 4545, 'frustrated': 4546, 'delicious': 4547, 'heartsick': 4548, 'sugar': 4549, 'janus': 4550, 'vo': 4551, 'hauk': 4552, 'region': 4553, 'crooked': 4554, 'remains': 4555, 'decides': 4556, 'veterans': 4557, 'identity': 4558, 'order': 4559, 'puffing': 4560, 'addiction': 4561, 'nclear': 4562, 'pneumonia': 4563, 'needed': 4564, 'ntony': 4565, '189': 4566, 'galloway': 4567, 'shadowed': 4568, 'noisily': 4569, 'suffocating': 4570, 'grips': 4571, 'brief': 4572, 'acres': 4573, '153': 4574, 'wail': 4575, 'accented': 4576, 'slopes': 4577, 'wage': 4578, 'arguing': 4579, 'nfeet': 4580, 'creation': 4581, 'attached': 4582, '197': 4583, 'z': 4584, 'everything': 4585, 'roadies': 4586, '218': 4587, 'darryl': 4588, 'wells': 4589, 'gum': 4590, 'defend': 4591, 'wobbles': 4592, '1st': 4593, 'sorta': 4594, 'lasers': 4595, 'gotcha': 4596, 'caravan': 4597, 'buckboard': 4598, 'prakash': 4599, 'latin': 4600, 'camels': 4601, 'deeper': 4602, 'ntechnician': 4603, 'states': 4604, 'snicker': 4605, 'heating': 4606, 'smooths': 4607, 'muttered': 4608, 'containers': 4609, 'festival': 4610, 'adventure': 4611, 'bizarre': 4612, 'lena': 4613, 'nsurveillance': 4614, 'fastens': 4615, 'wash': 4616, 'devoted': 4617, 'fired': 4618, 'calculations': 4619, 'assured': 4620, 'knights': 4621, 'ingrid': 4622, 'obvious': 4623, 'rises': 4624, 'weighs': 4625, 'peers': 4626, 'property': 4627, 'beginning': 4628, 'supporting': 4629, 'relatives': 4630, 'like': 4631, 'hay': 4632, 'thudding': 4633, 'nostalgic': 4634, 'chilly': 4635, 'brass': 4636, 'modified': 4637, 'services': 4638, 'dripping': 4639, 'reveals': 4640, 'dsn': 4641, "'n": 4642, 'guilty': 4643, 'nstarts': 4644, 'pappa': 4645, 'canopy': 4646, 'catapults': 4647, 'band': 4648, 'harriers': 4649, 'smirk': 4650, 'beating': 4651, 'gallery': 4652, 'labored': 4653, 'tired': 4654, 'picture': 4655, 'nhim': 4656, 'testicular': 4657, 'colorado': 4658, 'bugging': 4659, 'tapping': 4660, 'canadians': 4661, 'horrible': 4662, 'archie': 4663, 'dipping': 4664, 'thorkel': 4665, 'tsch': 4666, 'spirited': 4667, 'westwood': 4668, 'thunderous': 4669, 'splendor': 4670, 'turntable': 4671, 'deborah': 4672, 'unnaturally': 4673, 'assist': 4674, 'weak': 4675, 'threateningly': 4676, 'yourselves': 4677, 'rescue': 4678, 'big': 4679, 'implying': 4680, 'sarah': 4681, 'thinning': 4682, 'nbeyond': 4683, 'announcements': 4684, 'but': 4685, 'plantation': 4686, 'mongoose': 4687, 'monsters': 4688, 'nkeep': 4689, 'flick': 4690, 'listens': 4691, 'goons': 4692, 'ngreenpeace': 4693, 'showing': 4694, 'puff': 4695, 'lazily': 4696, 'prisoners': 4697, 'dried': 4698, 'mimes': 4699, 'agents': 4700, 'jergens': 4701, 'rig': 4702, 'united': 4703, 'mahogany': 4704, 'nhallway': 4705, 'coraline': 4706, 'hillside': 4707, 'alley': 4708, 'radius': 4709, 'hammered': 4710, 'searchlights': 4711, 'libs': 4712, 'junkie': 4713, 'joel': 4714, 'rival': 4715, 'motioning': 4716, 'pierce': 4717, 'circus': 4718, 'hope': 4719, 'spires': 4720, 'nfish': 4721, 'usual': 4722, 'fun': 4723, 'symbiote': 4724, 'owned': 4725, 'sharon': 4726, 'nhammond': 4727, 'lonesome': 4728, 'tidal': 4729, 'robin': 4730, 'envelops': 4731, 'reiss': 4732, 'deposits': 4733, 'hyena': 4734, 'nharry': 4735, 'lounge': 4736, 'fault': 4737, 'cords': 4738, 'fists': 4739, 'walked': 4740, 'watermelon': 4741, 'objective': 4742, 'sinister': 4743, 'farrell': 4744, 'parents': 4745, 'leonard': 4746, 'ngill': 4747, 'painting': 4748, 'lloyd': 4749, 'maynard': 4750, 'whirlybird': 4751, 'ndriver': 4752, 'cushion': 4753, 'congress': 4754, 'none': 4755, 'want': 4756, 'gurney': 4757, 'ndory': 4758, 'teens': 4759, 'pepper': 4760, 'gash': 4761, 'seventeen': 4762, 'nnicholas': 4763, 'comforting': 4764, 'nwhen': 4765, 'whose': 4766, 'restraints': 4767, 'homicide': 4768, '28': 4769, 'kings': 4770, 'salmon': 4771, 'happened': 4772, 'bail': 4773, 'snapshot': 4774, 'stella': 4775, 'wesley': 4776, 'obligation': 4777, 'precariously': 4778, 'saturday': 4779, 'felicia': 4780, 'sideways': 4781, 'n52': 4782, 'nambassador': 4783, 'nnative': 4784, 'uzi': 4785, 'removes': 4786, 'shhh': 4787, 'windy': 4788, 'pulp': 4789, 'enter': 4790, '334': 4791, 'x94': 4792, 'malkovich': 4793, 'surveys': 4794, "he's": 4795, 'lightsaber': 4796, 'prop': 4797, 'force': 4798, 'biohazard': 4799, 'mu': 4800, 'leisure': 4801, 'ndead': 4802, 'rocketing': 4803, 'girlfriends': 4804, 'dancer': 4805, 'whitmore': 4806, 'snap': 4807, 'unpredictable': 4808, 'nwell': 4809, 'roxy': 4810, 'thug': 4811, 'binocular': 4812, 'squeals': 4813, 'spit': 4814, 'parapet': 4815, 'alleyway': 4816, 'bon': 4817, 'gaining': 4818, 'jeez': 4819, 'cotton': 4820, 'embedded': 4821, 'lennon': 4822, 'vibration': 4823, '1977': 4824, 'men': 4825, 'difficult': 4826, 'wallpaper': 4827, 'cups': 4828, 'anita': 4829, 'gently': 4830, 'amazing': 4831, 'attorneys': 4832, 'freda': 4833, 'creepers': 4834, 'anger': 4835, 'difficulty': 4836, 'peach': 4837, 'criminal': 4838, 'paying': 4839, 'gaze': 4840, 'coarse': 4841, 'containing': 4842, 'barreling': 4843, 'vets': 4844, 'punish': 4845, 'scanlon': 4846, 'blossom': 4847, 'sirens': 4848, 'muh': 4849, 'fritz': 4850, 'screeches': 4851, 'unloads': 4852, 'boxcar': 4853, 'ghastly': 4854, 'cabbie': 4855, 'sanders': 4856, 'sunroof': 4857, 'dish': 4858, 'cronies': 4859, 'yin': 4860, 'ntw': 4861, 'tuned': 4862, 'shares': 4863, 'motors': 4864, 'polaroids': 4865, 'raquel': 4866, 'nstares': 4867, 'overheard': 4868, 'lengths': 4869, 'javed': 4870, 'abdomen': 4871, 'rumble': 4872, 'kanjiklub': 4873, 'watchin': 4874, 'ax': 4875, 'aah': 4876, 'suitable': 4877, 'merciless': 4878, 'council': 4879, 'swerve': 4880, 'ability': 4881, 'surfing': 4882, 'impression': 4883, 'mph': 4884, 'kirby': 4885, 'protest': 4886, 'pacific': 4887, 'salutes': 4888, "'bravo": 4889, 'virtual': 4890, 'share': 4891, 'lifeboat': 4892, 'regal': 4893, 'nhelicopters': 4894, 'nmaggie': 4895, 'became': 4896, 'headlines': 4897, 'owe': 4898, 'flattened': 4899, 'stalker': 4900, 'chatting': 4901, 'nseems': 4902, 'buttons': 4903, 'n141': 4904, 'blade': 4905, 'hillbilly': 4906, 'bodies': 4907, 'detaches': 4908, 'moonlight': 4909, 'ramps': 4910, 'clams': 4911, 'hatteberg': 4912, 'cricket': 4913, 'twins': 4914, 'usher': 4915, 'manor': 4916, 'pub': 4917, 'hitter': 4918, 'mos': 4919, 'interior': 4920, 'basic': 4921, 'stories': 4922, 'raise': 4923, 'jims': 4924, 'tag': 4925, 'tacs': 4926, 'slits': 4927, 'ushered': 4928, 'nmeeks': 4929, 'crouching': 4930, 'ncan': 4931, 'ross': 4932, 'clone': 4933, 'shuttle': 4934, 'buy': 4935, 'whir': 4936, 'get': 4937, 'unconsciously': 4938, 'squeal': 4939, 'thee': 4940, 'bosch': 4941, 'untie': 4942, 'ntammany': 4943, 'capital': 4944, 'conclusions': 4945, 'snack': 4946, 'cleared': 4947, 'painted': 4948, 'intercepts': 4949, 'n200': 4950, 'explode': 4951, 'zone': 4952, 'engineering': 4953, 'merchandise': 4954, 'government': 4955, 'hissing': 4956, 'hysteria': 4957, 'attach': 4958, 'ooo': 4959, 'gunmen': 4960, 'fu': 4961, 'whadda': 4962, 'black': 4963, 'warned': 4964, 'beings': 4965, 'nhome': 4966, 'cannons': 4967, 'shifting': 4968, 'fireball': 4969, '26': 4970, 'spoken': 4971, 'forte': 4972, 'wrench': 4973, 'unpacking': 4974, 'move': 4975, 'scrambles': 4976, 'boyo': 4977, 'nlight': 4978, '143': 4979, 'abyss': 4980, 'rest': 4981, 'scar': 4982, 'examining': 4983, 'fastened': 4984, 'randomly': 4985, 'laminated': 4986, 'evenly': 4987, 'barry': 4988, 'barking': 4989, 'turban': 4990, 'touches': 4991, 'crafts': 4992, 'thirsty': 4993, 'nhundred': 4994, 'boo': 4995, 'ejects': 4996, 'cuba': 4997, 'seller': 4998, 'pitchers': 4999, 'yanks': 5000, 'butler': 5001, 'anytime': 5002, 'pantagruel': 5003, 'favoring': 5004, 'explain': 5005, 'redhead': 5006, 'fiddles': 5007, 'consult': 5008, 'rick': 5009, 'bulk': 5010, 'object': 5011, 'rotors': 5012, 'morons': 5013, 'forgotten': 5014, 'prem': 5015, 'scurries': 5016, 'northern': 5017, 'rug': 5018, 'deck': 5019, 'overrun': 5020, 'naladdin': 5021, 'terms': 5022, 'face': 5023, 'klingon': 5024, '43': 5025, 'collection': 5026, 'towers': 5027, 'ghetto': 5028, 'ngraphic': 5029, 'hurrying': 5030, 'jumping': 5031, 'whistle': 5032, 'gonna': 5033, 'give': 5034, 'murray': 5035, 'aerospatiale': 5036, 'spout': 5037, 'mary': 5038, 'bikers': 5039, 'apache': 5040, 'murmurs': 5041, 'so': 5042, 'pena': 5043, 'oak': 5044, 'whitney': 5045, 'sea': 5046, 'festivities': 5047, 'andre': 5048, 'leopard': 5049, 'dizzy': 5050, 'splinter': 5051, 'billions': 5052, 'upgrade': 5053, 'throat': 5054, 'queen': 5055, 'fastball': 5056, 'hungrily': 5057, 'furnace': 5058, 'boat': 5059, 'sputtering': 5060, 'converted': 5061, 'decades': 5062, 'uk': 5063, 'owner': 5064, 'nsam': 5065, 'brian': 5066, 'bliss': 5067, 'story': 5068, 'pilbow': 5069, 'nstanding': 5070, 'throne': 5071, 'ant': 5072, 'midday': 5073, 'writhe': 5074, 'practical': 5075, 'bless': 5076, 'analyzing': 5077, 'worrying': 5078, 'market': 5079, 'coco': 5080, 'opaque': 5081, 'sewn': 5082, 'lambeau': 5083, 'completed': 5084, 'opera': 5085, 'claimed': 5086, 'sepia': 5087, 'triggers': 5088, 'channel': 5089, 'nlloyd': 5090, 'completes': 5091, 'fridge': 5092, 'hi': 5093, 'feeling': 5094, 'rocking': 5095, 'locals': 5096, 'creep': 5097, 'reality': 5098, 'ext': 5099, 'sibert': 5100, 'nbeach': 5101, 'photographic': 5102, 'parachute': 5103, 'porters': 5104, 'sunken': 5105, 'forth': 5106, 'shaving': 5107, 'discreetly': 5108, 'ascend': 5109, 'eyeballs': 5110, 'crunches': 5111, 'crosses': 5112, 'eleven': 5113, 'undergrowth': 5114, 'composing': 5115, 'squirt': 5116, 'refrigerator': 5117, 'magnificent': 5118, 'setup': 5119, 'njack': 5120, 'ndipper': 5121, 'reasonable': 5122, 'pouring': 5123, 'gauze': 5124, 'clod': 5125, "'course": 5126, 'unshaven': 5127, 'obsession': 5128, 'reflecting': 5129, 'janie': 5130, 'cobb': 5131, 'n174': 5132, 'sounds': 5133, 'began': 5134, 'alot': 5135, 'transferring': 5136, 'reason': 5137, 'opponent': 5138, 'reset': 5139, 'viktor': 5140, 'nroom': 5141, 'armaments': 5142, 'nephew': 5143, 'stopping': 5144, 'nlet': 5145, 'closing': 5146, 'chess': 5147, 'fog': 5148, '5th': 5149, 'porsche': 5150, 'obscured': 5151, 'hug': 5152, 'wedged': 5153, 'zeke': 5154, 'process': 5155, 'wincing': 5156, 'nrosenberg': 5157, 'hoisted': 5158, 'njacques': 5159, 'delight': 5160, 'innocence': 5161, 'aiight': 5162, 'stair': 5163, 'whimpering': 5164, 'groaning': 5165, 'keg': 5166, 'assholes': 5167, 'collect': 5168, 'supplies': 5169, 'bead': 5170, 'fossil': 5171, 'tumbles': 5172, 'undercover': 5173, 'boston': 5174, 'roster': 5175, 'range': 5176, 'juhu': 5177, 'sail': 5178, 'drips': 5179, 'felton': 5180, 'socket': 5181, 'walky': 5182, 'compelled': 5183, 'drillers': 5184, 'pregnant': 5185, 'forge': 5186, 'attend': 5187, 'children': 5188, 'switched': 5189, 'wore': 5190, 'air': 5191, 'ndid': 5192, 'ricochets': 5193, 'sacred': 5194, 'heh': 5195, 'startles': 5196, 'mortar': 5197, 'careens': 5198, 'explorer': 5199, 'nondescript': 5200, 'sleazy': 5201, 'nbreathing': 5202, 'pages': 5203, 'milkcrate': 5204, 'copper': 5205, 'bounce': 5206, 'n175': 5207, 'radiant': 5208, 'n142': 5209, 'laurent': 5210, 'awkward': 5211, 'machete': 5212, 'aren': 5213, 'tight': 5214, 'nneil': 5215, 'souvenir': 5216, 'glazed': 5217, 'renton': 5218, 'cliff': 5219, 'streets': 5220, 'generally': 5221, 'lightyear': 5222, 'gropes': 5223, 'underfoot': 5224, 'creates': 5225, 'springs': 5226, 'fellas': 5227, '23': 5228, 'ntries': 5229, 'newly': 5230, 'aww': 5231, 'robbing': 5232, 'comic': 5233, 'rollers': 5234, 'noodle': 5235, 'behalf': 5236, 'clueless': 5237, "nit's": 5238, 'rangers': 5239, 'dejected': 5240, 'nmay': 5241, 'serve': 5242, 'graduated': 5243, 'n73': 5244, 'therapy': 5245, 'relax': 5246, 'cattle': 5247, 'nroof': 5248, 'better': 5249, 'guessing': 5250, 'wound': 5251, 'tulip': 5252, 'bronson': 5253, 'overcome': 5254, 'grunts': 5255, 'lip': 5256, 'works': 5257, 'determined': 5258, 'n111': 5259, 'twirling': 5260, 'nfriend': 5261, 'partners': 5262, 'colloredo': 5263, 'engines': 5264, 'miriam': 5265, 'keypad': 5266, 'parisian': 5267, 'conducting': 5268, 'nsheldon': 5269, 'akbar': 5270, 'gorgeous': 5271, 'towels': 5272, 'curiosity': 5273, 'n63': 5274, 'shoe': 5275, 'marsellus': 5276, 'appreciative': 5277, 'flakes': 5278, 'hers': 5279, 'responding': 5280, 'bringing': 5281, 'vulcan': 5282, 'doctor': 5283, 'cots': 5284, 'surrounding': 5285, 'n139': 5286, 'pushes': 5287, 'batwing': 5288, 'need': 5289, 'jeweler': 5290, 'social': 5291, 'nhe': 5292, 'n65': 5293, 'dismissive': 5294, 'collapsed': 5295, 'chimp': 5296, 'mulholland': 5297, 'pug': 5298, 'scraps': 5299, 'composure': 5300, 'barrow': 5301, 'grotesque': 5302, 'cubes': 5303, 'suggests': 5304, 'hux': 5305, 'ce': 5306, 'facial': 5307, 'heaving': 5308, 'ounce': 5309, 'stumbles': 5310, 'granted': 5311, 'n183': 5312, '283': 5313, 'shittiest': 5314, 'sal': 5315, 'ncandy': 5316, 'cooks': 5317, 'heats': 5318, 'hu': 5319, 'lays': 5320, 'thighs': 5321, 'maintain': 5322, 'talkie': 5323, 'guerrilla': 5324, 'nthese': 5325, 'nchair': 5326, 'lars': 5327, 'prowler': 5328, 'wooden': 5329, 'ravine': 5330, 'framed': 5331, 'clenches': 5332, 'shine': 5333, 'cleanly': 5334, 'reasons': 5335, 'pilothouse': 5336, 'conduct': 5337, 'adjustments': 5338, 'missed': 5339, 'sheet': 5340, 'punch': 5341, 'librarian': 5342, 'swedish': 5343, 'ledge': 5344, 'howls': 5345, 'exhibits': 5346, 'bet': 5347, 'leon': 5348, 'n149': 5349, 'fishes': 5350, 'hereby': 5351, 'darts': 5352, 'pistol': 5353, 'counselor': 5354, 'ndenise': 5355, 'killed': 5356, 'ballgame': 5357, 'multi': 5358, 'impeccably': 5359, 'sims': 5360, '293': 5361, 'not': 5362, 'shoves': 5363, 'hang': 5364, 'hologram': 5365, 'blizzard': 5366, 'nanchor': 5367, 'fuckers': 5368, 'tugging': 5369, 'cloth': 5370, 'virginity': 5371, 'uli': 5372, 'grommet': 5373, 'superimpose': 5374, 'pronounce': 5375, 'sedan': 5376, 'iss': 5377, 'papa': 5378, 'hemingway': 5379, 'janet': 5380, 'gaulle': 5381, 'balboa': 5382, 'oblivion': 5383, 'neyes': 5384, 'spooks': 5385, 'finding': 5386, 'folks': 5387, 'holsters': 5388, 'n124': 5389, "'malley": 5390, 'marina': 5391, 'embarrassment': 5392, 'n144': 5393, 'relents': 5394, 'nthree': 5395, 'preacher': 5396, 'shakespeare': 5397, 'ponytail': 5398, 'converging': 5399, 'afterwards': 5400, 'jackets': 5401, 'moreno': 5402, 'shatner': 5403, 'revealed': 5404, 'emptied': 5405, 'often': 5406, 'freezer': 5407, 'sign': 5408, 'soaring': 5409, 'vermont': 5410, 'rapt': 5411, 'railway': 5412, 'invest': 5413, 'privately': 5414, 'prehistoric': 5415, 'impossible': 5416, 'partner': 5417, 'scrubs': 5418, 'nuttin': 5419, 'dumping': 5420, '1972': 5421, 'capture': 5422, 'willie': 5423, 'unnerving': 5424, 'teaches': 5425, 'batteries': 5426, 'tray': 5427, 'securing': 5428, 'ferrari': 5429, 'lawyer': 5430, 'x89': 5431, 'berets': 5432, 'scores': 5433, 'let': 5434, 'bottles': 5435, 'cedars': 5436, 'flexes': 5437, 'checks': 5438, 'provocatively': 5439, 'envelope': 5440, 'scalpel': 5441, 'statements': 5442, 'representation': 5443, 'previous': 5444, 'easter': 5445, 'clip': 5446, 'boxer': 5447, 'rather': 5448, 'extinguisher': 5449, 'blackboard': 5450, 'ominous': 5451, 'stiffens': 5452, 'melodramatic': 5453, 'tryin': 5454, 'shields': 5455, 'wrap': 5456, 'flustered': 5457, 'borrow': 5458, 'jenny': 5459, 'kowalsky': 5460, 'saucers': 5461, 'freshly': 5462, 'possibly': 5463, 'convention': 5464, 'bowling': 5465, 'overlooks': 5466, 'chart': 5467, 'dryer': 5468, 'berkeley': 5469, 'males': 5470, 'willing': 5471, 'rockhound': 5472, 'bother': 5473, 'absorb': 5474, 'wonderment': 5475, 'bulbs': 5476, 'passage': 5477, 'airplane': 5478, 'gush': 5479, 'potato': 5480, 'spasms': 5481, 'nyay': 5482, 'hydraulics': 5483, 'afterthought': 5484, 'exiting': 5485, 'arrests': 5486, 'youth': 5487, 'loosely': 5488, "'on": 5489, 'arabian': 5490, 'winston': 5491, 'yards': 5492, 'ants': 5493, 'southeast': 5494, 'symbols': 5495, 'rib': 5496, 'imperial': 5497, 'vigorously': 5498, 'tars': 5499, 'lumber': 5500, 'hop': 5501, 'grandfather': 5502, "'til": 5503, 'whaddaya': 5504, 'weather': 5505, 'coming': 5506, 'sigh': 5507, 'plummet': 5508, 'tongs': 5509, 'deagle': 5510, 'pit': 5511, 'apparition': 5512, 'standoff': 5513, 'northwest': 5514, 'impassively': 5515, 'thrilling': 5516, 'saxophone': 5517, 'midst': 5518, 'zap': 5519, 'serves': 5520, 'unused': 5521, 'programmed': 5522, '203': 5523, 'hearin': 5524, 'ntheo': 5525, 'wan': 5526, 'faust': 5527, 'hash': 5528, 'pr': 5529, 'mansion': 5530, 'absurd': 5531, 'blow': 5532, 'viscous': 5533, 'skeletal': 5534, 'thanking': 5535, 'grateful': 5536, 'tremble': 5537, 'envy': 5538, "helen's": 5539, 'sticking': 5540, 'ruin': 5541, 'babe': 5542, 'whiskers': 5543, 'spends': 5544, 'mixes': 5545, 'string': 5546, 'grinning': 5547, 'nmrs': 5548, 'sensible': 5549, 'access': 5550, 'eyebrow': 5551, 'arches': 5552, 'runner': 5553, '2187': 5554, 'considering': 5555, 'ignores': 5556, 'cong': 5557, 'bachelor': 5558, 'however': 5559, 'freaky': 5560, 'signore': 5561, 'coffee': 5562, 'hurls': 5563, 'eighth': 5564, 'paperback': 5565, 'abe': 5566, 'hun': 5567, 'lettering': 5568, 'interrogation': 5569, 'blips': 5570, 'imitates': 5571, 'themselves': 5572, 'center': 5573, 'readings': 5574, 'responsible': 5575, 'nkids': 5576, 'line': 5577, 'tic': 5578, 'escapes': 5579, 'pandemonium': 5580, 'espionage': 5581, 'source': 5582, 'nixon': 5583, 'locking': 5584, 'pearls': 5585, 'roasted': 5586, 'mumbai': 5587, 'pads': 5588, 'thuds': 5589, 'non': 5590, 'aiyana': 5591, 'bought': 5592, 'casualties': 5593, 'acutes': 5594, 'clubhouse': 5595, "nyou're": 5596, 'sarcastic': 5597, 'dumplings': 5598, "'night": 5599, 'country': 5600, 'ashtrays': 5601, 'definition': 5602, 'cullen': 5603, 'nsay': 5604, 'slowing': 5605, 'damaging': 5606, 'fucked': 5607, 'stifles': 5608, 'burnett': 5609, 'clever': 5610, 'accused': 5611, 'drivin': 5612, 'divide': 5613, '151': 5614, 'angelo': 5615, 'compass': 5616, 'lionel': 5617, 'tiniest': 5618, 'astounding': 5619, 'reels': 5620, 'iris': 5621, 'intercept': 5622, 'whistlin': 5623, 'langley': 5624, 'unnecessary': 5625, 'yep': 5626, 'virtue': 5627, 'nboss': 5628, 'emmett': 5629, 'milky': 5630, 'jeopardy': 5631, 'hates': 5632, 'marionette': 5633, 'done': 5634, 'fountain': 5635, 'max': 5636, 'deciding': 5637, 'prototype': 5638, 'cactus': 5639, 'peoples': 5640, 'restaurant': 5641, 'calf': 5642, 'under': 5643, 'nwatches': 5644, 'betrayal': 5645, 'srinivas': 5646, 'deserted': 5647, 'understands': 5648, 'treated': 5649, 'ew': 5650, 'deep': 5651, 'nbob': 5652, 'hobbs': 5653, 'ensues': 5654, 'elated': 5655, 'overpass': 5656, 'button': 5657, 'sara': 5658, 'floris': 5659, 'snowbank': 5660, 'infinity': 5661, 'recycle': 5662, '250': 5663, 'manning': 5664, 'nash': 5665, 'sacrifice': 5666, 'pennsylvania': 5667, 'cracked': 5668, 'intelligent': 5669, 'rey': 5670, 'bells': 5671, 'jaye': 5672, 'plus': 5673, 'jag': 5674, 'wasted': 5675, 'impossibly': 5676, 'drill': 5677, 'sticky': 5678, 'efficient': 5679, 'bing': 5680, 'dayton': 5681, 'thrift': 5682, 'tunic': 5683, 'supreme': 5684, 'sprints': 5685, 'bystander': 5686, 'barricades': 5687, 'sellin': 5688, 'pivot': 5689, 'mahal': 5690, 'reflex': 5691, 'previously': 5692, 'pulses': 5693, 'casually': 5694, 'icing': 5695, 'salinas': 5696, 'hike': 5697, 'soar': 5698, 'legs': 5699, 'lights': 5700, 'drilled': 5701, 'poole': 5702, 'totem': 5703, 'songs': 5704, 'wavering': 5705, 'nricky': 5706, 'ins': 5707, 'concertina': 5708, 'team': 5709, 'tilts': 5710, 'supernova': 5711, 'liberal': 5712, 'slicing': 5713, '400': 5714, 'n107': 5715, 'buyer': 5716, 'soon': 5717, 'marcel': 5718, 'chaotic': 5719, 'contains': 5720, 'lowrey': 5721, 'win': 5722, 'tippit': 5723, 'colored': 5724, 'matter': 5725, 'beverly': 5726, 'success': 5727, 'checkpoint': 5728, 'chest': 5729, 'futters': 5730, 'fresco': 5731, 'realize': 5732, 'upright': 5733, 'repeating': 5734, 'briefcase': 5735, 'walls': 5736, 'sewer': 5737, 'busy': 5738, 'times': 5739, 'darkness': 5740, 'drawings': 5741, 'recover': 5742, 'insignificant': 5743, 'occur': 5744, 'handing': 5745, 'n241': 5746, 'joyce': 5747, 'backyard': 5748, '223': 5749, 'blown': 5750, 'scent': 5751, 'rhonda': 5752, 'are': 5753, 'baker': 5754, 'suspicion': 5755, 'hallway': 5756, 'independence': 5757, 'pull': 5758, 'message': 5759, 'tai': 5760, 'fudge': 5761, 'fantasies': 5762, 'frank': 5763, 'database': 5764, 'vomit': 5765, 'endlessly': 5766, 'retrieve': 5767, 'suburb': 5768, 'carried': 5769, 'ernest': 5770, 'circle': 5771, 'flattens': 5772, 'barracks': 5773, 'blowing': 5774, 'luxury': 5775, 'honestly': 5776, 'biscuit': 5777, 'cannibals': 5778, 'bitches': 5779, 'emptying': 5780, 'automatic': 5781, 'discern': 5782, 'hip': 5783, 'volunteer': 5784, 'bananas': 5785, 'strict': 5786, 'ntruck': 5787, 'ornate': 5788, 'overnight': 5789, 'revive': 5790, 'pokes': 5791, 'ghul': 5792, 'assumes': 5793, 'layout': 5794, 'philippe': 5795, 'woody': 5796, 'earphone': 5797, 'hollywood': 5798, 'jessica': 5799, 'judge': 5800, 'agonized': 5801, 'njane': 5802, 'brook': 5803, 'nhear': 5804, 'invade': 5805, 'n209': 5806, 'lunch': 5807, 'mortified': 5808, 'di': 5809, 'directs': 5810, 'refugee': 5811, 'happens': 5812, 'booby': 5813, 'cuffs': 5814, 'sharpened': 5815, 'news': 5816, 'bunny': 5817, 'lotsa': 5818, 'wallet': 5819, 'vanish': 5820, 'njohnny': 5821, 'laying': 5822, 'figuring': 5823, 'tile': 5824, 'confirms': 5825, 'evacuate': 5826, 'diapers': 5827, 'lien': 5828, 'aftermath': 5829, 'broussard': 5830, 'foredeck': 5831, 'leans': 5832, 'horror': 5833, 'dismounts': 5834, 'bowl': 5835, 'specific': 5836, 'cacophony': 5837, 'blinding': 5838, 'goodchuck': 5839, 'nglasses': 5840, 'tbd': 5841, 'binoculars': 5842, '82': 5843, 'shared': 5844, 'swung': 5845, 'waving': 5846, 'chopped': 5847, 'shocking': 5848, 'radio': 5849, 'cerebro': 5850, 'crashes': 5851, 'on': 5852, 'work': 5853, 'ever': 5854, 'shark': 5855, 'balloon': 5856, 'startin': 5857, 'disturbing': 5858, 'lever': 5859, 'teddy': 5860, 'npushes': 5861, 'embarrass': 5862, 'principal': 5863, 'nwalking': 5864, 'ruthless': 5865, 'azizi': 5866, 'nfollows': 5867, 'whizzes': 5868, 'foolish': 5869, 'nellis': 5870, 'outraged': 5871, 'pitcher': 5872, 'lynx': 5873, 'bathroom': 5874, 'alias': 5875, 'astonishment': 5876, 'nipples': 5877, 'illuminated': 5878, 'steaming': 5879, 'e': 5880, 'fail': 5881, 'store': 5882, 'n4': 5883, 'mercy': 5884, 'scowls': 5885, 'sentimental': 5886, 'bulky': 5887, 'robotic': 5888, 'detonators': 5889, 'lamps': 5890, 'glinting': 5891, 'modules': 5892, 'reach': 5893, 'accelerating': 5894, 'meters': 5895, 'search': 5896, 'credits': 5897, 'spilling': 5898, 'answer': 5899, 'katrina': 5900, 'succession': 5901, 'department': 5902, 'sustained': 5903, 'court': 5904, 'lagoon': 5905, 'troop': 5906, 'toll': 5907, 'nemo': 5908, "'s": 5909, 'chant': 5910, 'cap': 5911, 'intern': 5912, 'cinderella': 5913, 'ba': 5914, 'momentarily': 5915, 'stated': 5916, 'blankets': 5917, 'truth': 5918, 'banned': 5919, 'nstop': 5920, 'garderobe': 5921, 'martinez': 5922, 'voice': 5923, 'contagious': 5924, 'ahead': 5925, 'extinguish': 5926, 'nbutton': 5927, 'paintings': 5928, 'gamble': 5929, 'diane': 5930, 'sing': 5931, 'floyd': 5932, 'sneak': 5933, 'countless': 5934, 'ndaddy': 5935, 'fluids': 5936, "'y": 5937, 'joins': 5938, 'stats': 5939, 'ferdy': 5940, 'kidding': 5941, 'customers': 5942, 'oblivious': 5943, 'ndesk': 5944, 'ripping': 5945, 'nwu': 5946, 'cavalry': 5947, 'underway': 5948, 'pitches': 5949, 'account': 5950, 'hooting': 5951, 'whacking': 5952, 'jumbo': 5953, 'eye': 5954, 'club': 5955, 'wear': 5956, 'liking': 5957, 'hail': 5958, 'engraved': 5959, 'streamers': 5960, 'pinto': 5961, 'ironic': 5962, 'thruster': 5963, 'ride': 5964, 'energy': 5965, 'nmichael': 5966, 'lib': 5967, 'osbourne': 5968, 'persian': 5969, 'sighs': 5970, 'beyond': 5971, "she's": 5972, 'commander': 5973, 'decline': 5974, 'whine': 5975, 'souls': 5976, 'feathered': 5977, 'john': 5978, 'enterprise': 5979, 'reached': 5980, 'nuclear': 5981, 'nimziki': 5982, 'situation': 5983, 'michigan': 5984, 'currently': 5985, 'thumb': 5986, 'justin': 5987, 'newsstand': 5988, 'rincon': 5989, 'snowing': 5990, 'shariff': 5991, 'reporters': 5992, 'square': 5993, 'food': 5994, 'shoulda': 5995, 'edna': 5996, 'cambridge': 5997, 'technological': 5998, 'birds': 5999, 'crook': 6000, 'combing': 6001, 'surfboards': 6002, 'moose': 6003, 'decisions': 6004, 'ramp': 6005, 'hums': 6006, 'clicks': 6007, 'pittaro': 6008, 'nearing': 6009, 'arrival': 6010, 'freedom': 6011, 'hanger': 6012, 'dazed': 6013, 'doggie': 6014, 'mutilated': 6015, 'tonight': 6016, 'lacing': 6017, 'nsuper': 6018, 'duplicate': 6019, 'thirteen': 6020, 'turtle': 6021, 'craig': 6022, 'important': 6023, 'casual': 6024, 'ngail': 6025, 'generation': 6026, 'shuffles': 6027, 'crust': 6028, 'sandbagged': 6029, 'vivid': 6030, 'wake': 6031, 'suffer': 6032, 'cub': 6033, 'scrubbing': 6034, 'trent': 6035, 'airmen': 6036, 'goodbyes': 6037, 'crisis': 6038, 'welcome': 6039, 'side': 6040, 'mosaic': 6041, 'purposefully': 6042, 'posts': 6043, 'illusion': 6044, 'nelec': 6045, 'silk': 6046, 'painfully': 6047, 'substitute': 6048, 'bills': 6049, 'turner': 6050, 'stacy': 6051, 'brown': 6052, 'habighorst': 6053, 'docks': 6054, 'nshang': 6055, 'reactions': 6056, 'navy': 6057, 'pressures': 6058, 'studios': 6059, 'regaining': 6060, 'scans': 6061, 'conspiracy': 6062, 'limbs': 6063, 'ncogsworth': 6064, 'whether': 6065, 'n26': 6066, 'amnesia': 6067, 'ventilator': 6068, 'zoo': 6069, 'rents': 6070, 'kept': 6071, 'tripod': 6072, 'slows': 6073, 'nmiles': 6074, 'dept': 6075, 'specialty': 6076, 'computers': 6077, 'dauthuille': 6078, 'necessary': 6079, 'careening': 6080, 'lookout': 6081, 'headquarters': 6082, 'auxiliary': 6083, 'mantis': 6084, 'crush': 6085, 'york': 6086, 'vulnerability': 6087, 'mack': 6088, 'curled': 6089, 'descent': 6090, 'houston': 6091, 'gem': 6092, 'achievement': 6093, 'koons': 6094, 'sheriff': 6095, 'concert': 6096, 'mud': 6097, 'nhouse': 6098, 'hell': 6099, 'challenger': 6100, 'duloc': 6101, '80': 6102, 'tubular': 6103, 'thief': 6104, 'xavier': 6105, 'francine': 6106, '266': 6107, 'mature': 6108, 'cranes': 6109, 'existence': 6110, 'nsky': 6111, 'nmel': 6112, 'caroline': 6113, 'millennium': 6114, 'focused': 6115, 'trigger': 6116, 'slink': 6117, 'riddled': 6118, 'hardest': 6119, 'quicker': 6120, 'nrocky': 6121, 'barrier': 6122, 'meghan': 6123, 'bretsaws': 6124, 'ugh': 6125, 'nfingers': 6126, 'lips': 6127, 'czurda': 6128, 'celebration': 6129, 'planet': 6130, 'robot': 6131, 'collegiate': 6132, 'casper': 6133, 'powerless': 6134, 'strung': 6135, 'squints': 6136, 'jody': 6137, 'motorcade': 6138, 'engulfs': 6139, 'fever': 6140, 'trennant': 6141, 'investigating': 6142, 'snappy': 6143, 'edgy': 6144, 'lamont': 6145, 'marsh': 6146, 'return': 6147, 'pause': 6148, 'diagrams': 6149, 'glare': 6150, 'guerrillas': 6151, 'month': 6152, 'rotted': 6153, 'erect': 6154, 'abagnale': 6155, 'lowenstein': 6156, 'photograph': 6157, 'disaster': 6158, 'hugs': 6159, 'rolled': 6160, 'rooftop': 6161, 'peking': 6162, 'shame': 6163, 'include': 6164, 'carry': 6165, 'captured': 6166, 'walks': 6167, 'marci': 6168, 'swerving': 6169, 'grouping': 6170, 'inhales': 6171, 'scurrying': 6172, 'nineteen': 6173, 'rosenberg': 6174, 'contact': 6175, 'nonetheless': 6176, 'incoherent': 6177, 'latrine': 6178, 'blank': 6179, 'oil': 6180, 'proven': 6181, 'command': 6182, 'tummy': 6183, 'almighty': 6184, 'smokes': 6185, 'tfm': 6186, 'blossoms': 6187, 'neatly': 6188, 'beatin': 6189, '2010': 6190, 'invitation': 6191, '3000': 6192, 'bates': 6193, 'activity': 6194, 'dejectedly': 6195, 'corps': 6196, 'shootin': 6197, 'bella': 6198, 'conventional': 6199, 'thanked': 6200, 'four': 6201, 'django': 6202, 'bracelet': 6203, 'orientation': 6204, 'coals': 6205, 'jasmine': 6206, 'solitary': 6207, 'fighting': 6208, 'protection': 6209, 'sharks': 6210, 'panting': 6211, 'evolving': 6212, 'aspen': 6213, 'napkins': 6214, '99': 6215, 'meaning': 6216, 'legal': 6217, 'hesitation': 6218, 'knowing': 6219, 'swearing': 6220, 'ncube': 6221, 'nkelly': 6222, 'laserbolts': 6223, 'charmed': 6224, 'tyree': 6225, 'cartoon': 6226, 'winter': 6227, 'consul': 6228, 'brilliantly': 6229, 'underwear': 6230, 'pig': 6231, 'backward': 6232, 'circuitry': 6233, 'danced': 6234, 'shattering': 6235, 'incidentally': 6236, 'clucking': 6237, 'cringing': 6238, 'relaxed': 6239, 'kitten': 6240, 'villa': 6241, 'sleep': 6242, 'layers': 6243, 'senseless': 6244, 'spandex': 6245, 'torpedoes': 6246, 'disappearing': 6247, 'pastries': 6248, 'nbeast': 6249, 'meadow': 6250, 'calms': 6251, 'protester': 6252, 'classified': 6253, 'depicting': 6254, 'starts': 6255, 'horses': 6256, 'nreading': 6257, 'dwayne': 6258, 'track': 6259, 'sexually': 6260, 'stainless': 6261, 'slowed': 6262, 'flanked': 6263, 'dow': 6264, 'casement': 6265, 'distribution': 6266, 'holographic': 6267, 'adjusting': 6268, 'magneto': 6269, 'oww': 6270, 'drunken': 6271, 'benches': 6272, 'logical': 6273, 'flew': 6274, 'saddled': 6275, 'perfectly': 6276, 'spiked': 6277, 'tisserant': 6278, 'tourist': 6279, 'shiver': 6280, 'slipping': 6281, 'lyin': 6282, 'n132': 6283, 'facts': 6284, 'shit': 6285, 'orangutan': 6286, "'ly": 6287, 'buyin': 6288, 'aggressive': 6289, 'translator': 6290, 'demolition': 6291, 'ji': 6292, 'gulden': 6293, 'letting': 6294, 'mines': 6295, 'nwizard': 6296, 'investigators': 6297, 'moment': 6298, 'hangs': 6299, 'spectacles': 6300, 'sammy': 6301, 'glowing': 6302, 'caresses': 6303, 'partly': 6304, 'majordomo': 6305, 'shocks': 6306, 'robbie': 6307, 'smokey': 6308, 'sixteen': 6309, 'activate': 6310, 'n83': 6311, '298': 6312, 'ndidn': 6313, 'bows': 6314, 'gazing': 6315, 'ushering': 6316, 'weigh': 6317, 'henderson': 6318, 'quaint': 6319, 'overwhelming': 6320, 'becca': 6321, 'declared': 6322, 'tipped': 6323, 'newark': 6324, 'scorched': 6325, "'mon": 6326, 'mercer': 6327, 'every': 6328, 'tracts': 6329, 'minnesota': 6330, 'tax': 6331, 'top': 6332, 'nlook': 6333, 'embers': 6334, 'tattoo': 6335, 'wrapping': 6336, 'crackhead': 6337, 'foil': 6338, 'ing': 6339, 'raining': 6340, 'nline': 6341, 'pfc': 6342, 'weirder': 6343, 'neat': 6344, 'verify': 6345, 'confidence': 6346, 'bazaar': 6347, 'cashed': 6348, 'cancer': 6349, 'faraway': 6350, 'strapped': 6351, 'axes': 6352, 'ding': 6353, 'dude': 6354, 'penny': 6355, 'la': 6356, 'childish': 6357, 'company': 6358, 'explosive': 6359, 'dubious': 6360, 'offered': 6361, 'quarters': 6362, 'chubby': 6363, 'artie': 6364, 'nisn': 6365, 'gripping': 6366, 'revolutionary': 6367, 'disbelief': 6368, 'urn': 6369, 'pecking': 6370, 'rattling': 6371, 'whooping': 6372, 'petal': 6373, 'normal': 6374, 'necessarily': 6375, 'challenges': 6376, 'cloud': 6377, 'swats': 6378, 'deaths': 6379, 'springing': 6380, 'survivors': 6381, 'suck': 6382, 'yells': 6383, 'successfully': 6384, 'ogre': 6385, 'nget': 6386, 'jerk': 6387, 'foot': 6388, 'season': 6389, 'do': 6390, 'nmaking': 6391, 'groomed': 6392, 'kneels': 6393, 'sliding': 6394, 'depression': 6395, 'menacing': 6396, 'npulls': 6397, 'winks': 6398, 'communicator': 6399, 'such': 6400, 'reminding': 6401, 'untouched': 6402, 'cloak': 6403, 'cu': 6404, 'mechanic': 6405, 'scissors': 6406, 'clap': 6407, 'extend': 6408, 'impressed': 6409, 'discount': 6410, 'jules': 6411, 'civilians': 6412, 'npast': 6413, 'boyfriend': 6414, 'reflexes': 6415, 'abrar': 6416, 'tasker': 6417, 'toad': 6418, 'nn': 6419, 'gown': 6420, 'movin': 6421, 'anchorhead': 6422, 'chuckie': 6423, 'erupts': 6424, 'vial': 6425, 'exertion': 6426, 'extension': 6427, 'managing': 6428, 'perk': 6429, 'yapping': 6430, 'snag': 6431, 'xb7': 6432, 'recruiter': 6433, 'addition': 6434, 'safe': 6435, 'rounding': 6436, 'slurred': 6437, 'ignoring': 6438, 'scud': 6439, 'gullfire': 6440, 'kay': 6441, 'economic': 6442, 'marvelous': 6443, 'canteen': 6444, 'makeup': 6445, 'hostages': 6446, 'wont': 6447, 'solution': 6448, 'wider': 6449, 'hunting': 6450, 'tube': 6451, 'mutant': 6452, 'nhalf': 6453, 'nother': 6454, 'church': 6455, 'prevented': 6456, 'fugue': 6457, 'wybie': 6458, 'coupling': 6459, 'horn': 6460, 'weiner': 6461, 'jakku': 6462, 'spoil': 6463, 'joanna': 6464, 'chase': 6465, 'malibu': 6466, 'slowly': 6467, 'hate': 6468, 'psychic': 6469, 'originally': 6470, 'blue': 6471, 'distress': 6472, 'nlev': 6473, 'enormous': 6474, 'skinner': 6475, 'mcguire': 6476, 'nosed': 6477, 'grossman': 6478, 'ntyler': 6479, 'nsharp': 6480, 'stomp': 6481, 'perspective': 6482, 'photo': 6483, 'bra': 6484, 'queens': 6485, 'digits': 6486, 'russian': 6487, 'henry': 6488, 'ndrops': 6489, 'curve': 6490, 'n91': 6491, 'produces': 6492, 'nariadne': 6493, 'sons': 6494, 'successful': 6495, 'epic': 6496, 'n162': 6497, 'schoolhouse': 6498, 'flushing': 6499, 'egypt': 6500, 'puke': 6501, 'losin': 6502, 'should': 6503, 'thin': 6504, 'passing': 6505, 'boomer': 6506, 'paths': 6507, 'n29': 6508, 'fredrick': 6509, 'schism': 6510, 'tiger': 6511, 'unfamiliar': 6512, 'qualified': 6513, 'fer': 6514, 'peel': 6515, 'containment': 6516, 'second': 6517, 'regularly': 6518, 'notebooks': 6519, 'ndee': 6520, 'sensual': 6521, "'no": 6522, 'irrelevant': 6523, 'projected': 6524, 'handiwork': 6525, 'addict': 6526, 'brave': 6527, 'shrink': 6528, 'graph': 6529, 'lawyers': 6530, 'snooze': 6531, 'unloading': 6532, 'stacking': 6533, 'parking': 6534, 'sprinting': 6535, '13': 6536, 'gramma': 6537, 'governor': 6538, 'ntaken': 6539, 'worse': 6540, 'rigid': 6541, 'brushing': 6542, 'salesman': 6543, '205': 6544, 'secrets': 6545, 'figaro': 6546, 'sunk': 6547, 'attracted': 6548, 'stranded': 6549, 'n173': 6550, 'au': 6551, 'wedges': 6552, 'tremor': 6553, 'sculpture': 6554, 'sloane': 6555, 'pant': 6556, 'sneaks': 6557, 'examine': 6558, "'t": 6559, 'hurting': 6560, 'esmeralda': 6561, 'understood': 6562, 'nmoment': 6563, 'inspiring': 6564, 'ohio': 6565, 'jr': 6566, 'muthafuckin': 6567, 'fist': 6568, 'flipped': 6569, 'chalet': 6570, 'cute': 6571, 'drove': 6572, 'bonds': 6573, 'sighing': 6574, 'candles': 6575, 'greeting': 6576, 'covert': 6577, 'cuff': 6578, 'gag': 6579, 'tuck': 6580, 'fin': 6581, 'nlast': 6582, 'prosser': 6583, 'candyland': 6584, 'atlanta': 6585, 'nbelow': 6586, 'diva': 6587, 'bronx': 6588, 'cackle': 6589, 'spelled': 6590, 'available': 6591, 'illustration': 6592, 'winces': 6593, 'marry': 6594, 'article': 6595, 'dyin': 6596, 'baretta': 6597, 'suspect': 6598, 'dime': 6599, 'nedry': 6600, 'soccer': 6601, 'reserved': 6602, 'skulls': 6603, 'administrator': 6604, '188': 6605, 'gasps': 6606, '240': 6607, 'constructed': 6608, 'flying': 6609, 'indians': 6610, 'scanner': 6611, 'meerkats': 6612, 'emerging': 6613, 'the': 6614, 'terminus': 6615, 'illuminates': 6616, 'centered': 6617, 'fans': 6618, 'load': 6619, 'xads': 6620, 'matchstick': 6621, 'head': 6622, 'distract': 6623, 'necks': 6624, 'someplace': 6625, 'nilsen': 6626, 'ngrant': 6627, 'urinal': 6628, 'n188': 6629, 'video': 6630, 'lean': 6631, 'parasite': 6632, 'soul': 6633, 'kite': 6634, 'niggas': 6635, 'morally': 6636, '405': 6637, 'lighthouse': 6638, 'nhmm': 6639, 'cheek': 6640, "'fore": 6641, 'nasty': 6642, 'benjamin': 6643, 'glorious': 6644, 'alfred': 6645, 'running': 6646, 'tailing': 6647, '60': 6648, 'operated': 6649, 'snarling': 6650, 'swirling': 6651, 'murders': 6652, 'unmoving': 6653, 'ntable': 6654, 'ndifferent': 6655, 'survives': 6656, 'ledgers': 6657, 'slime': 6658, 'drive': 6659, 'nadrian': 6660, '256': 6661, 'manhattan': 6662, 'acquired': 6663, 'protecting': 6664, 'diego': 6665, 'nelton': 6666, 'taped': 6667, 'memories': 6668, 'shaved': 6669, 'garb': 6670, 'waiting': 6671, 'umbrella': 6672, 'muldoon': 6673, 'far': 6674, 'outta': 6675, 'grasping': 6676, 'lavalier': 6677, 'bankers': 6678, 'pastry': 6679, 'alcohol': 6680, 'nread': 6681, 'mutters': 6682, 'stricken': 6683, 'baking': 6684, 'reloading': 6685, 'identities': 6686, 'warily': 6687, 'nexcept': 6688, 'siren': 6689, 'celebrate': 6690, 'terminal': 6691, 'nbed': 6692, 'blurred': 6693, 'n79': 6694, 'lenny': 6695, 'fan': 6696, 'upscale': 6697, 'weakly': 6698, 'clutch': 6699, 'aside': 6700, 'reward': 6701, 'factory': 6702, '50000': 6703, 'crushed': 6704, 'balcony': 6705, 'filing': 6706, 'descript': 6707, 'nwhich': 6708, 'throws': 6709, 'rob': 6710, 'unlocks': 6711, 'suggesting': 6712, 'employees': 6713, 'vanessa': 6714, 'vaguely': 6715, 'planetoid': 6716, 'owners': 6717, 'suckers': 6718, 'savannah': 6719, 'morgan': 6720, 'caught': 6721, 'xcb': 6722, 'schedule': 6723, 'cordless': 6724, 'fires': 6725, 'frowning': 6726, 'gills': 6727, 'ready': 6728, 'crawl': 6729, 'contribution': 6730, 'reservoir': 6731, 'vein': 6732, 'chinese': 6733, 'lester': 6734, '284': 6735, 'khalkali': 6736, '6th': 6737, 'using': 6738, 'tongue': 6739, 'purpose': 6740, 'nstraight': 6741, 'shaffer': 6742, 'ds': 6743, 'n231': 6744, 'chirps': 6745, 'scottie': 6746, 'tagged': 6747, 'acknowledge': 6748, 'fund': 6749, 'nmom': 6750, 'while': 6751, 'he': 6752, 'xadm': 6753, 'revealing': 6754, 'drunks': 6755, 'lumiere': 6756, 'russell': 6757, 'diabetes': 6758, 'deflector': 6759, 'ice': 6760, 'shrill': 6761, 'helmsman': 6762, 'kidnapping': 6763, 'powell': 6764, 'tags': 6765, 'handshake': 6766, 'nbefore': 6767, 'nwarren': 6768, 'nshot': 6769, 'slight': 6770, 'ntell': 6771, 'rags': 6772, 'buses': 6773, 'passageway': 6774, 'sweats': 6775, 'swords': 6776, 'feed': 6777, 'surrounded': 6778, "'neill": 6779, 'expectation': 6780, 'gigantic': 6781, 'fa': 6782, 'slumps': 6783, 've': 6784, 'jars': 6785, 'listened': 6786, 'stuffing': 6787, 'execute': 6788, 'commentators': 6789, 'dilapidated': 6790, 'dial': 6791, 'formed': 6792, 'awe': 6793, 'percentage': 6794, 'constance': 6795, 'portion': 6796, 'justice': 6797, 'consists': 6798, 'mime': 6799, 'married': 6800, 'tailored': 6801, 'stony': 6802, 'bunker': 6803, 'nmontage': 6804, 'depth': 6805, 'chapters': 6806, 'presidential': 6807, 'madam': 6808, 'nvague': 6809, 'nplastic': 6810, 'sister': 6811, 'whipping': 6812, 'mars': 6813, 'porter': 6814, 'reagan': 6815, 'n1': 6816, 'abroad': 6817, 'nalong': 6818, 'loud': 6819, '84': 6820, 'pharmacy': 6821, 'restricted': 6822, 'chasm': 6823, 'nquickly': 6824, 'cowering': 6825, 'com': 6826, 'nclothes': 6827, 'list': 6828, 'pint': 6829, 'quadrant': 6830, 'marshals': 6831, 'nway': 6832, 'npatrick': 6833, '3rd': 6834, 'crew': 6835, 'fixture': 6836, 'greatly': 6837, 'materialize': 6838, 'nears': 6839, 'stretcher': 6840, 'n90': 6841, 'romulans': 6842, 'longer': 6843, 'adult': 6844, 'mitaka': 6845, 'germany': 6846, 'copa': 6847, 'commitment': 6848, 'isringhausen': 6849, 'definitely': 6850, 'spacecraft': 6851, 'tattered': 6852, 'messed': 6853, 'burden': 6854, 'gym': 6855, 'crescent': 6856, 'pattern': 6857, 'wondered': 6858, 'boos': 6859, 'emptiness': 6860, 'ohh': 6861, 'bowing': 6862, 'chicks': 6863, 'xa2': 6864, 'boom': 6865, 'mice': 6866, "'mere": 6867, 'wonderful': 6868, 'jeffrey': 6869, 'shrieks': 6870, 'minivan': 6871, 'connally': 6872, 'njumps': 6873, 'intend': 6874, '1998': 6875, 'ham': 6876, 'introduction': 6877, 'connection': 6878, 'nods': 6879, 'workbench': 6880, 'sheath': 6881, 'cottage': 6882, 'cody': 6883, 'towering': 6884, '74': 6885, 'bridesmaids': 6886, 'bola': 6887, 'shitting': 6888, 'simple': 6889, 'dandy': 6890, 'veterinarian': 6891, 'squarely': 6892, 'boot': 6893, 'shops': 6894, 'strain': 6895, 'eve': 6896, 'attempt': 6897, 'boxing': 6898, 'cyclone': 6899, 'suitcase': 6900, 'lightening': 6901, 'honour': 6902, 'silences': 6903, 'limousine': 6904, 'n198': 6905, 'uncontrollably': 6906, 'angles': 6907, 'giang': 6908, 'wreck': 6909, 'trance': 6910, 'punishing': 6911, 'nazi': 6912, 'hager': 6913, 'bustles': 6914, 'honey': 6915, 'young': 6916, 'appetite': 6917, 'sniffs': 6918, 'quiets': 6919, 'ngulls': 6920, 'flirt': 6921, 'irate': 6922, 'dwarves': 6923, 'chimney': 6924, 'currency': 6925, 'calmer': 6926, 'bronze': 6927, 'hai': 6928, 'mugs': 6929, 'lap': 6930, 'cousin': 6931, 'blasters': 6932, 'marked': 6933, 'o2': 6934, 'benefits': 6935, 'affixed': 6936, 'bad': 6937, 'commentary': 6938, 'thorsen': 6939, 'whadaya': 6940, 'aerosol': 6941, 'fury': 6942, 'freeing': 6943, 'seat': 6944, 'wagon': 6945, 'nva': 6946, 'flicking': 6947, 'aka': 6948, 'memory': 6949, 'vacuum': 6950, 'wally': 6951, 'spiraling': 6952, 'funded': 6953, 'observation': 6954, 'sill': 6955, 'measuring': 6956, 'billiard': 6957, 'guys': 6958, 'comfortable': 6959, 'dolls': 6960, 'clothes': 6961, 'details': 6962, 'literature': 6963, 'camden': 6964, 'snapped': 6965, 'greet': 6966, 'lineup': 6967, 'merging': 6968, 'sound': 6969, '1961': 6970, 'permit': 6971, 'splashes': 6972, 'tanker': 6973, 'accurate': 6974, 'fooling': 6975, 'overriding': 6976, 'nclementine': 6977, 'jalalabad': 6978, 'chief': 6979, 'visiting': 6980, 'charter': 6981, 'initiate': 6982, 'summit': 6983, 'n104': 6984, 'weber': 6985, 'objects': 6986, 'pedestal': 6987, 'comforts': 6988, 'berk': 6989, 'finishing': 6990, 'suge': 6991, 'minds': 6992, 'filthy': 6993, 'surreal': 6994, 'abrupt': 6995, 'rehme': 6996, 'ngod': 6997, 'ceremony': 6998, 'christian': 6999, 'lad': 7000, 'noland': 7001, 'roses': 7002, 'ngot': 7003, 'ngaston': 7004, 'flaw': 7005, 'tik': 7006, '77': 7007, 'stereo': 7008, "they're": 7009, 'nrehme': 7010, 'gums': 7011, 'wallets': 7012, '102': 7013, 'threshold': 7014, 'striding': 7015, 'romantic': 7016, 'sprayed': 7017, 'pretty': 7018, 'filter': 7019, 'farewell': 7020, 'ankles': 7021, 'turkey': 7022, 'nwoody': 7023, 'yank': 7024, 'simply': 7025, 'document': 7026, 'huntley': 7027, 'mp3': 7028, 'diamond': 7029, 'jewelry': 7030, 'ndoesn': 7031, 'slabs': 7032, 'delivered': 7033, 'threatening': 7034, 'ticks': 7035, 'cruel': 7036, 'finck': 7037, 'bandages': 7038, 'tits': 7039, 'refusal': 7040, 'ferocious': 7041, 'nameplate': 7042, 'brenda': 7043, 'nbarbara': 7044, 'rupees': 7045, 'patio': 7046, 'conscience': 7047, 'n194': 7048, 'battling': 7049, 'reliving': 7050, 'roaring': 7051, 'proposing': 7052, 'episode': 7053, 'astonished': 7054, 'accelerate': 7055, 'missiles': 7056, 'proportions': 7057, 'porno': 7058, 'grenade': 7059, 'bunk': 7060, 'aging': 7061, 'zombies': 7062, 'subtly': 7063, 'sandcrawler': 7064, 'corridor': 7065, 'textbook': 7066, 'ashland': 7067, 'disorder': 7068, 'eating': 7069, 'requiem': 7070, 'equipped': 7071, 'wherever': 7072, 'jupiter': 7073, 'narrow': 7074, '281': 7075, 'projector': 7076, 'schultz': 7077, 'swimming': 7078, 'greene': 7079, 'p': 7080, 'follow': 7081, 'fame': 7082, 'congressional': 7083, 'duane': 7084, 'positioned': 7085, 'flailing': 7086, 'recovered': 7087, 'f': 7088, 'cell': 7089, 'waterfront': 7090, 'models': 7091, 'tasty': 7092, 'france': 7093, 'fours': 7094, 'frosty': 7095, 'paula': 7096, '33rd': 7097, 'appreciate': 7098, 'hideously': 7099, 'ladder': 7100, 'brewing': 7101, 'response': 7102, 'kills': 7103, 'flurry': 7104, 'electronics': 7105, 'aluminum': 7106, 'tragedy': 7107, 'prosecutor': 7108, 'corpse': 7109, 'sorrow': 7110, 'attract': 7111, 'steamroller': 7112, 'nboy': 7113, 'closer': 7114, 'slashing': 7115, 'bren': 7116, 'sex': 7117, 'nah': 7118, 'fats': 7119, 'explosions': 7120, 'hypnotic': 7121, 'lungs': 7122, 'rhodes': 7123, 'forget': 7124, 'hah': 7125, 'njimbo': 7126, 'lev': 7127, 'fairly': 7128, 'playing': 7129, 'tire': 7130, 'sincerely': 7131, 'although': 7132, 'laurie': 7133, 'fanfare': 7134, 'stromboli': 7135, 'place': 7136, 'yanking': 7137, 'bean': 7138, "'connell": 7139, 'boxers': 7140, 'furniture': 7141, 'hats': 7142, 'snarl': 7143, 'save': 7144, 'enemy': 7145, 'marvin': 7146, 'spitter': 7147, 'convince': 7148, 'gleam': 7149, 'hoffman': 7150, 'overture': 7151, 'clearance': 7152, 'point': 7153, 'hidden': 7154, 'session': 7155, 'stubby': 7156, 'reflected': 7157, 'aircraft': 7158, 'trunks': 7159, 'latest': 7160, 'cadence': 7161, 'stalks': 7162, 'dante': 7163, 'generations': 7164, 'giant': 7165, 'week': 7166, 'somersault': 7167, 'helos': 7168, 'egg': 7169, 'balancing': 7170, 'entitled': 7171, 'nunderneath': 7172, 'ngeneral': 7173, 'soil': 7174, 'megan': 7175, 'nholding': 7176, 'smiles': 7177, 'tarmac': 7178, 'bundle': 7179, 'guests': 7180, 'pairs': 7181, 'investigate': 7182, 'exchanges': 7183, 'antarctica': 7184, 'broken': 7185, 'nearl': 7186, 'freight': 7187, 'monitor': 7188, 'himself': 7189, 'rochelle': 7190, 'spaced': 7191, 'apples': 7192, 'translating': 7193, 'capability': 7194, 'pinches': 7195, 'straddles': 7196, 'envelopes': 7197, 'nbeat': 7198, 'discreet': 7199, 'drawing': 7200, 'corporate': 7201, 'chock': 7202, 'scrap': 7203, 'brutal': 7204, 'sleeves': 7205, 'n150': 7206, 'cops': 7207, 'nus': 7208, 'titles': 7209, 'recruit': 7210, 'dionne': 7211, 'speeds': 7212, 'mountainside': 7213, 'trouble': 7214, 'investment': 7215, 'doin': 7216, 'grasp': 7217, 'formerly': 7218, 'faded': 7219, 'khomeini': 7220, 'roach': 7221, 'being': 7222, 'blessed': 7223, 'party': 7224, 'stills': 7225, 'spins': 7226, 'respected': 7227, 'defined': 7228, 'active': 7229, 'swell': 7230, 'pristine': 7231, 'n109': 7232, 'interrupt': 7233, 'nopen': 7234, 'television': 7235, 'blend': 7236, 'roy': 7237, 'erase': 7238, 'to': 7239, 'gaston': 7240, '186': 7241, 'receives': 7242, 'flails': 7243, 'survival': 7244, 'fox': 7245, 'fiancee': 7246, 'nsuit': 7247, "'all": 7248, 'intimidating': 7249, 'swigs': 7250, 'bends': 7251, 'security': 7252, 'vortex': 7253, 'throwers': 7254, 'teenage': 7255, 'actin': 7256, 'nnext': 7257, 'njust': 7258, 'pleasant': 7259, 'ncoral': 7260, "'gate": 7261, 'gauges': 7262, 'condemned': 7263, 'dreamer': 7264, 'nadders': 7265, 'noh': 7266, 'beane': 7267, 'niece': 7268, 'nonsense': 7269, 'expertise': 7270, 'breakneck': 7271, 'normally': 7272, 'dwindling': 7273, 'sabbath': 7274, 'en': 7275, 'donna': 7276, 'meet': 7277, 'screws': 7278, 'july': 7279, 'n40': 7280, 'spotting': 7281, 'phoney': 7282, 'snatch': 7283, 'hundred': 7284, 'wendy': 7285, 'id': 7286, 'acting': 7287, 'triple': 7288, 'everdeane': 7289, 'makin': 7290, 'homies': 7291, 'preoccupied': 7292, 'jordan': 7293, 'nhere': 7294, 'ntsai': 7295, 'corporation': 7296, 'desert': 7297, 'bolting': 7298, 'contrast': 7299, 'tobacco': 7300, 'glowering': 7301, 'scampers': 7302, 'nbee': 7303, 'cubans': 7304, 'defeated': 7305, 'ratched': 7306, 'auction': 7307, 'establishment': 7308, 'murmuring': 7309, 'ngreen': 7310, 'ntodd': 7311, 'claw': 7312, 'roust': 7313, 'utility': 7314, 'specimen': 7315, 'presenting': 7316, 'kuwaiti': 7317, 'cast': 7318, 'junebug': 7319, 'holly': 7320, 'getting': 7321, 'cackling': 7322, 'blare': 7323, 'ryan': 7324, 'couple': 7325, 'disabled': 7326, 'doorman': 7327, 'architecture': 7328, 'earthquake': 7329, 'coaches': 7330, 'dips': 7331, 'crumples': 7332, 'except': 7333, 'parasites': 7334, 'exasperated': 7335, 'secured': 7336, 'theory': 7337, 'julius': 7338, 'sky': 7339, 'battles': 7340, 'daniel': 7341, 'swarming': 7342, 'airman': 7343, 'experiment': 7344, 'nearth': 7345, 'incredulously': 7346, 'poised': 7347, 'quarter': 7348, 'gust': 7349, 'votes': 7350, 'editor': 7351, 'amount': 7352, 'xa9': 7353, 'fortunate': 7354, 'nseat': 7355, 'swirl': 7356, 'farmers': 7357, 'drags': 7358, 'toothbrush': 7359, 'interested': 7360, 'homeland': 7361, 'wrestling': 7362, 'publicity': 7363, 'glob': 7364, 'loan': 7365, 'continues': 7366, 'firm': 7367, 'nfrom': 7368, 'disintegrating': 7369, 'crisp': 7370, 'barnard': 7371, 'k': 7372, 'nspace': 7373, 'ominously': 7374, 'cheat': 7375, "won't": 7376, 'tejada': 7377, 'silent': 7378, 'straps': 7379, 'marble': 7380, 'yelled': 7381, 'lower': 7382, 'mailman': 7383, 'sensor': 7384, 'houseboat': 7385, 'enthusiastically': 7386, 'dealership': 7387, 'cavalieri': 7388, 'dummy': 7389, '53': 7390, 'sidney': 7391, 'nhand': 7392, 'spark': 7393, 'hideous': 7394, 'classical': 7395, 'caf': 7396, 'stud': 7397, 'archbishop': 7398, 'severe': 7399, 'nbarnum': 7400, 'mirrors': 7401, 'pauses': 7402, 'priest': 7403, 'narc': 7404, 'athletic': 7405, 'strolls': 7406, 'flair': 7407, 'cholos': 7408, 'holding': 7409, 'sprouts': 7410, 'closes': 7411, 'contempt': 7412, 'aide': 7413, 'skis': 7414, 'nvice': 7415, 'tweed': 7416, '56': 7417, 'laughing': 7418, 'sausages': 7419, 'mung': 7420, 'floodlights': 7421, 'capacity': 7422, 'gnaws': 7423, 'wait': 7424, 'unusual': 7425, '85': 7426, 'loads': 7427, 'undulating': 7428, 'n100': 7429, 'nnatalie': 7430, 'hildi': 7431, 'cartons': 7432, 'piper': 7433, 'betray': 7434, 'maria': 7435, 'billowing': 7436, 'crater': 7437, 'sticker': 7438, 'symbol': 7439, 'entire': 7440, 'merchants': 7441, 'glisten': 7442, 'sylvia': 7443, 'lid': 7444, 'helipad': 7445, 'ambitious': 7446, 'roast': 7447, 'day': 7448, 'shoulder': 7449, 'blush': 7450, 'those': 7451, 'stature': 7452, 'nbegin': 7453, 'restless': 7454, 'nmaybe': 7455, 'wanted': 7456, 'specifically': 7457, 'whipped': 7458, 'pfefferberg': 7459, 'majority': 7460, 'bureau': 7461, 'intact': 7462, 'seeking': 7463, '02': 7464, 'grab': 7465, 'emily': 7466, 'smell': 7467, 'apes': 7468, 'peanuts': 7469, 'mustard': 7470, 'frown': 7471, 'attaching': 7472, 'pours': 7473, 'marches': 7474, 'bums': 7475, 'ntom': 7476, 'pleasure': 7477, 'n102': 7478, 'nme': 7479, 'chrissakes': 7480, 'forest': 7481, 'flabbergasted': 7482, 'orphanage': 7483, 'loot': 7484, 'comrades': 7485, 'sources': 7486, 'stinks': 7487, 'methodically': 7488, 'gloves': 7489, 'nuzzles': 7490, 'voltron': 7491, 'pastor': 7492, 'genetic': 7493, 'shackles': 7494, 'chair': 7495, 'okey': 7496, 'introduced': 7497, 'rumbling': 7498, 'pirates': 7499, 'gagging': 7500, 'nli': 7501, 'michaels': 7502, 'cycles': 7503, 'medic': 7504, 'n88': 7505, 'n172': 7506, 'increased': 7507, 'mascara': 7508, 'jurassic': 7509, 'invasion': 7510, 'bumstead': 7511, 'janiro': 7512, 'familiar': 7513, 'resembling': 7514, 'bruce': 7515, 'harvest': 7516, 'leg': 7517, 'hole': 7518, 'probing': 7519, '1987': 7520, '06': 7521, 'intense': 7522, 'rajah': 7523, 'understanding': 7524, 'pigs': 7525, 'curran': 7526, 'officer': 7527, 'dashing': 7528, 'ix': 7529, 'nt': 7530, 'clothilde': 7531, 'fulfill': 7532, 'be': 7533, 'nmother': 7534, 'quarterback': 7535, 'cantina': 7536, 'answers': 7537, 'holder': 7538, 'nsitting': 7539, 'darth': 7540, 'dungeon': 7541, 'emblazoned': 7542, 'gazeem': 7543, 'israeli': 7544, 'moff': 7545, 'green': 7546, 'skywalker': 7547, 'asses': 7548, 'programming': 7549, 'talkin': 7550, 'nhannah': 7551, 'urging': 7552, 'civilian': 7553, 'eclipse': 7554, 'handle': 7555, 'ncreed': 7556, 'kobayashi': 7557, 'swish': 7558, 'popping': 7559, 'final': 7560, 'fence': 7561, 'nis': 7562, 'surveillance': 7563, 'hooked': 7564, 'octavius': 7565, 'marquee': 7566, 'buffeted': 7567, 'buster': 7568, 'pulsing': 7569, 'jelly': 7570, 'whap': 7571, 'maman': 7572, 'heroes': 7573, 'rushing': 7574, 'tellin': 7575, 'alaska': 7576, 'momentum': 7577, 'laughs': 7578, 'jano': 7579, 'stores': 7580, 'trembles': 7581, 'nsteps': 7582, 'nhi': 7583, 'always': 7584, 'shotguns': 7585, 'picking': 7586, 'smoke': 7587, 'lawson': 7588, 'straightening': 7589, 'pulls': 7590, 'growls': 7591, 'will': 7592, 'mothafucka': 7593, 'prime': 7594, 'carcano': 7595, 'nostrils': 7596, 'moon': 7597, 'whatcha': 7598, 'cheating': 7599, 'noise': 7600, 'ng': 7601, 'slurps': 7602, 'assignment': 7603, 'urban': 7604, 'chekov': 7605, 'backs': 7606, 'sane': 7607, 'nora': 7608, 'magnitude': 7609, 'benny': 7610, 'khaled': 7611, 'poisoned': 7612, 'n11': 7613, '9th': 7614, 'domain': 7615, 'margaret': 7616, 'nfurther': 7617, 'been': 7618, 'nits': 7619, 'weeds': 7620, 'fuel': 7621, 'rakes': 7622, 'knob': 7623, 'brim': 7624, 'slice': 7625, 'grady': 7626, 'fit': 7627, 'begins': 7628, 'ohhhh': 7629, 'singing': 7630, 'india': 7631, 'typical': 7632, 'massacre': 7633, 'instantly': 7634, 'grabbing': 7635, 'pierre': 7636, 'length': 7637, 'closeup': 7638, 'deja': 7639, 'taste': 7640, 'embankment': 7641, 'mug': 7642, 'thankful': 7643, 'bank': 7644, 'junction': 7645, 'x99s': 7646, 'alonzo': 7647, 'na': 7648, 'know': 7649, 'glimpse': 7650, 'buckling': 7651, 'zippleback': 7652, 'concludes': 7653, 'trooper': 7654, 'superimposed': 7655, '112': 7656, 'voter': 7657, 'niima': 7658, 'disease': 7659, 'locating': 7660, 'establish': 7661, 'unlock': 7662, 'plotting': 7663, 'nminutes': 7664, 'punished': 7665, 'warlord': 7666, 'invested': 7667, 'jews': 7668, 'nightmarish': 7669, 'angled': 7670, 'alan': 7671, 'teach': 7672, 'denherder': 7673, 'schools': 7674, 'nedith': 7675, 'nest': 7676, 'clicking': 7677, 'shimmers': 7678, 'glad': 7679, 'pets': 7680, 'drapes': 7681, 'n38': 7682, '30th': 7683, 'institutional': 7684, 'nonto': 7685, 'judges': 7686, 'leadership': 7687, 'ignition': 7688, 'catholic': 7689, 'nnose': 7690, 'watch': 7691, 'ca': 7692, 'clamp': 7693, 'jet': 7694, 'plants': 7695, 'sausage': 7696, 'bye': 7697, 'nailed': 7698, 'remotely': 7699, 'insect': 7700, 'defiant': 7701, 'limply': 7702, 'overhead': 7703, 'notes': 7704, 'nhas': 7705, 'mode': 7706, 'crippled': 7707, 'foreground': 7708, 'complex': 7709, 'mushu': 7710, 'gulping': 7711, 'advised': 7712, 'commune': 7713, 'breasts': 7714, 'worker': 7715, 'piano': 7716, 'gloom': 7717, 'planks': 7718, 'sarcastically': 7719, 'vickie': 7720, 'buzzing': 7721, 'flipping': 7722, 'embarrassed': 7723, 'laugh': 7724, 'marijuana': 7725, 'races': 7726, 'might': 7727, 'stuff': 7728, 'jacksonville': 7729, 'unnatural': 7730, 'collar': 7731, 'spots': 7732, 'nmore': 7733, 'pans': 7734, 'shrine': 7735, 'washrooms': 7736, 'ceases': 7737, 'frigid': 7738, 'inc': 7739, 'violently': 7740, "'am": 7741, 'fashion': 7742, 'tent': 7743, 'stones': 7744, 'lifebuoy': 7745, 'mortal': 7746, 'whole': 7747, 'dusk': 7748, 'dense': 7749, '193': 7750, 'preparation': 7751, 'tackles': 7752, 'diss': 7753, 'hasty': 7754, 'pov': 7755, 'angels': 7756, 'commerce': 7757, 'rifles': 7758, 'looks': 7759, 'supervisor': 7760, 'perform': 7761, 'noises': 7762, 'nabu': 7763, 'roger': 7764, 'ogres': 7765, 'manage': 7766, 'nmargaret': 7767, 'we': 7768, 'blond': 7769, 'gobber': 7770, 'premier': 7771, 'khan': 7772, 'chord': 7773, 'perceptible': 7774, 'stabilize': 7775, 'suggestion': 7776, 'screwed': 7777, 'remote': 7778, 'burning': 7779, "'63": 7780, 'popular': 7781, 'benicio': 7782, 'ketchup': 7783, 'sell': 7784, 'beret': 7785, 'cried': 7786, 'homecoming': 7787, 'gunderson': 7788, 'oliver': 7789, 'aerial': 7790, 'shacks': 7791, 'reaching': 7792, 'soaking': 7793, 'broom': 7794, 'nmeanwhile': 7795, 'faint': 7796, 'overseas': 7797, 'plexiglass': 7798, 'shook': 7799, 'coral': 7800, 'yvette': 7801, 'deejays': 7802, 'cocking': 7803, 'gobbler': 7804, '19': 7805, 'audience': 7806, 'leverage': 7807, 'bunch': 7808, 'prying': 7809, 'pursuers': 7810, 'schoolyard': 7811, 'cleaner': 7812, 'hoboes': 7813, 'cory': 7814, 'champ': 7815, 'daughter': 7816, 'wade': 7817, 'toilet': 7818, 'gauntlet': 7819, 'harbour': 7820, 'boring': 7821, 'evan': 7822, 'bai': 7823, 'rise': 7824, 'musket': 7825, 'terrorists': 7826, 'service': 7827, 'delivers': 7828, 'sultan': 7829, 'candy': 7830, 'forgets': 7831, 'strippers': 7832, 'dolphin': 7833, 'aaaah': 7834, 'carrucan': 7835, 'uhh': 7836, 'linda': 7837, 'risk': 7838, 'sneakers': 7839, 'pierced': 7840, 'fixes': 7841, 'jessep': 7842, 'fitness': 7843, 'pistols': 7844, 'effects': 7845, 'measures': 7846, 'larry': 7847, 'nmia': 7848, 'earl': 7849, 'amon': 7850, 'firewood': 7851, 'n125': 7852, 'unique': 7853, 'nice': 7854, '104': 7855, 'narnold': 7856, 'sprawled': 7857, 'gamblers': 7858, 'wished': 7859, 'record': 7860, 'damon': 7861, 'bowie': 7862, 'swivel': 7863, 'intellect': 7864, 'scanning': 7865, 'brandy': 7866, "doesn't": 7867, 'thirty': 7868, 'produce': 7869, 'written': 7870, 'amplified': 7871, 'business': 7872, 'soyuz': 7873, 'estuary': 7874, 'towns': 7875, 'recovering': 7876, 'nedwards': 7877, 'erases': 7878, 'route': 7879, 'rake': 7880, 'abbie': 7881, 'velocity': 7882, '4': 7883, 'cracker': 7884, 'williams': 7885, 'tilting': 7886, 'rugged': 7887, 'rene': 7888, 'trudges': 7889, 'elliot': 7890, 'ball': 7891, 'brother': 7892, 'fish': 7893, 'toe': 7894, 'examiner': 7895, 'fender': 7896, 'slouched': 7897, 'sefelt': 7898, '87': 7899, 'battering': 7900, 'xbf': 7901, 'shots': 7902, 'song': 7903, 'notebook': 7904, 'ndriving': 7905, 'nineties': 7906, 'sawdust': 7907, 'uncharted': 7908, 'taj': 7909, 'cyclotron': 7910, 'coughs': 7911, 'giggles': 7912, 'reported': 7913, 'authority': 7914, 'within': 7915, 'buoyancy': 7916, 'struggles': 7917, 'enjoying': 7918, 'answering': 7919, 'snapping': 7920, 'slinks': 7921, 'eyebrows': 7922, 'marcus': 7923, 'intended': 7924, 'shaw': 7925, 'asphalt': 7926, 'slo': 7927, 'offer': 7928, 'create': 7929, 'bedside': 7930, 'sweatpants': 7931, 'matted': 7932, 'perpendicular': 7933, 'barricade': 7934, 'ecstatic': 7935, 'twirls': 7936, 'invisible': 7937, 'vanished': 7938, '254': 7939, 'de': 7940, 'another': 7941, 'yuri': 7942, 'garren': 7943, 'km': 7944, 'xa9e': 7945, 'blasted': 7946, 'claus': 7947, 'dares': 7948, 'serene': 7949, 'whip': 7950, 'nhimself': 7951, 'gardener': 7952, 'wagner': 7953, 'advantage': 7954, 'anniversary': 7955, 'bony': 7956, 'nicer': 7957, 'straighten': 7958, 'points': 7959, 'nmonk': 7960, 'megaphone': 7961, 'jimbo': 7962, 'operational': 7963, 'ta': 7964, '5000': 7965, 'mistakes': 7966, 'backpack': 7967, 'leave': 7968, 'thunder': 7969, 'wearily': 7970, 'threatens': 7971, 'jams': 7972, 'sapphire': 7973, 'remembering': 7974, 'marching': 7975, 'inadvertently': 7976, 'discomfort': 7977, '68': 7978, 'wagons': 7979, 'wish': 7980, 'psychology': 7981, 'collide': 7982, 'slumber': 7983, 'rages': 7984, 'saving': 7985, 'monsoon': 7986, 'position': 7987, 'bravo': 7988, 'softball': 7989, 'louder': 7990, 'seems': 7991, 'diem': 7992, 'dusters': 7993, 'plume': 7994, 'loved': 7995, 'martini': 7996, 'panic': 7997, 'scouts': 7998, 'flu': 7999, 'dig': 8000, 'herded': 8001, 'propped': 8002, 'ultimate': 8003, 'nudges': 8004, 'nothin': 8005, 'stamp': 8006, 'prepare': 8007, 'feature': 8008, 'cards': 8009, 'amongst': 8010, 'paranoia': 8011, 'melting': 8012, 'amidst': 8013, 'couches': 8014, 'excess': 8015, '140': 8016, 'thick': 8017, 'ngets': 8018, 'ncovered': 8019, 'tout': 8020, 'tsai': 8021, 'geez': 8022, 'plates': 8023, 'scouting': 8024, 'beacon': 8025, 'prepping': 8026, 'cabot': 8027, 'chopper': 8028, 'credit': 8029, 'weapon': 8030, 'paradise': 8031, 'obviously': 8032, 'pyjamas': 8033, 'gustafson': 8034, 'caution': 8035, 'ncheswick': 8036, 'smaller': 8037, 'resembles': 8038, 'shits': 8039, 'eddie': 8040, 'cases': 8041, 'resume': 8042, 'reluctantly': 8043, 'childhood': 8044, 'experimenting': 8045, 'jeebs': 8046, 'nbrain': 8047, 'nigger': 8048, 'volcano': 8049, 'flicka': 8050, 'nnathaniel': 8051, 'tapes': 8052, 'nbetween': 8053, 'dusting': 8054, 'aid': 8055, 'fee': 8056, 'cnn': 8057, 'representing': 8058, 'resolute': 8059, 'slams': 8060, 'rich': 8061, 'woohoo': 8062, 'swiss': 8063, 'spire': 8064, 'doorstep': 8065, 'enveloped': 8066, 'ponds': 8067, 'jheri': 8068, 'ndresser': 8069, 'plaza': 8070, 'gesture': 8071, 'gunman': 8072, 'receive': 8073, 'have': 8074, 'slapped': 8075, 'nas': 8076, 'dismay': 8077, 'tentacles': 8078, 'safely': 8079, 'skim': 8080, 'opportunities': 8081, '180': 8082, 'mind': 8083, 'hitman': 8084, 'oops': 8085, 'fingertips': 8086, 'martin': 8087, 'armor': 8088, 'father': 8089, 'bunkhouse': 8090, '35': 8091, 'wistfully': 8092, 'washed': 8093, 'tomica': 8094, '219': 8095, 'howe': 8096, 'reassure': 8097, '224b': 8098, 'who': 8099, 'prints': 8100, 'much': 8101, 'nshirt': 8102, 'trembling': 8103, 'combine': 8104, 'zeroes': 8105, 'mayor': 8106, 'slices': 8107, 'bungalow': 8108, 'grades': 8109, 'human': 8110, 'broke': 8111, 'voos': 8112, 'jeep': 8113, 'added': 8114, 'reciting': 8115, 'merc': 8116, 'wagging': 8117, 'flips': 8118, 'intruder': 8119, 'jus': 8120, 'wiry': 8121, 'cutters': 8122, 'impatiently': 8123, 'identify': 8124, 'alcove': 8125, 'pained': 8126, 'samson': 8127, 'grim': 8128, 'palms': 8129, 'dream': 8130, 'odd': 8131, 'jailer': 8132, 'misery': 8133, 'matt': 8134, 'distortion': 8135, 'u': 8136, 'hoods': 8137, 'loving': 8138, 'lu': 8139, 'dragons': 8140, 'insulin': 8141, 'viewscreen': 8142, 'wudan': 8143, 'child': 8144, 'knobs': 8145, 'arranging': 8146, 'zagging': 8147, 'siege': 8148, 'n98': 8149, 'outline': 8150, 'statue': 8151, 'conveyor': 8152, 'long': 8153, 'nacross': 8154, 'sale': 8155, 'break': 8156, 'snotty': 8157, 'fingerprint': 8158, 'drowned': 8159, 'dormant': 8160, 'task': 8161, 'compassion': 8162, 'admits': 8163, 'antonio': 8164, 'vastness': 8165, 'frequency': 8166, 'ribbons': 8167, 'patricia': 8168, 'adopt': 8169, 'reporter': 8170, 'abuse': 8171, 'nd': 8172, 'journey': 8173, 'barbecue': 8174, 'breakin': 8175, 'chemical': 8176, 'worried': 8177, 'mimosa': 8178, 'cornelius': 8179, 'spending': 8180, 'motti': 8181, 'njoel': 8182, 'passion': 8183, 'charlie': 8184, 'trainer': 8185, 'n7': 8186, 'injection': 8187, 'julien': 8188, 'nfollow': 8189, 'pension': 8190, 'creaking': 8191, 'gotta': 8192, 'upstate': 8193, 'patting': 8194, 'skin': 8195, 'able': 8196, 'switchblade': 8197, 'checked': 8198, 'malkovichians': 8199, 'genitals': 8200, 'dignitaries': 8201, 'weeping': 8202, 'vendors': 8203, 'rundown': 8204, 'near': 8205, 'pope': 8206, 'plaszow': 8207, 'nmal': 8208, 'course': 8209, 't': 8210, 'lifts': 8211, 'witness': 8212, 'lively': 8213, 'charity': 8214, 'stubs': 8215, 'nails': 8216, '1989': 8217, 'duct': 8218, 'obscuring': 8219, 'warfare': 8220, 'devastation': 8221, 'complain': 8222, 'assembling': 8223, 'limping': 8224, 'san': 8225, 'maxine': 8226, 'ricky': 8227, 'hilary': 8228, 'organism': 8229, 'writing': 8230, 'pats': 8231, 'dagger': 8232, 'ock': 8233, 'bumper': 8234, 'n182': 8235, 'nnixon': 8236, 'shock': 8237, 'singin': 8238, 'chews': 8239, 'earring': 8240, 'spears': 8241, 'slater': 8242, 'wears': 8243, 'pretending': 8244, 'feel': 8245, 'terribly': 8246, 'bag': 8247, 'oughta': 8248, 'battlefield': 8249, 'butt': 8250, '108': 8251, 'quotes': 8252, 'softly': 8253, 'popcorn': 8254, 'apparatus': 8255, 'netting': 8256, 'nauseous': 8257, 'whisky': 8258, 'cold': 8259, 'quaking': 8260, 'gerry': 8261, 'admit': 8262, 'jawas': 8263, 'reginald': 8264, 'credentials': 8265, 'stomach': 8266, 'lightning': 8267, 'trucoat': 8268, 'roof': 8269, 'nalready': 8270, 'n76': 8271, 'holds': 8272, 'wuddup': 8273, 'showering': 8274, 'foliage': 8275, 'senate': 8276, 'logs': 8277, 'model': 8278, 'squirms': 8279, 'labelled': 8280, 'frames': 8281, 'antenna': 8282, 'nring': 8283, 'nattention': 8284, 'spying': 8285, 'squadron': 8286, 'combinations': 8287, 'add': 8288, 'tuesday': 8289, 'curtsey': 8290, 'held': 8291, 'several': 8292, 'bitterly': 8293, 'hollering': 8294, 'gritty': 8295, 'seeker': 8296, 'departments': 8297, 'posture': 8298, 'river': 8299, 'sniper': 8300, 'gags': 8301, 'drugs': 8302, 'manipulating': 8303, 'indicator': 8304, 'fruit': 8305, 'topped': 8306, 'fractal': 8307, 'fluid': 8308, 'yao': 8309, 'fried': 8310, 'm': 8311, 'donkeys': 8312, 'fiberglass': 8313, 'bump': 8314, 'writes': 8315, 'knit': 8316, 'psychological': 8317, 'piss': 8318, 'mademoiselle': 8319, 'isolation': 8320, 'blinds': 8321, 'bowed': 8322, 'nold': 8323, 'avoid': 8324, 'lung': 8325, 'apologies': 8326, 'tableau': 8327, 'middleweight': 8328, 'webster': 8329, 'seawall': 8330, 'trips': 8331, 'inside': 8332, 'subject': 8333, 'celebrating': 8334, 'artillery': 8335, 'yawn': 8336, '2000': 8337, 'nmachine': 8338, 'dine': 8339, 'japan': 8340, 'josh': 8341, 'nonchalantly': 8342, 'roars': 8343, 'flutter': 8344, 'collector': 8345, 'splendid': 8346, 'dice': 8347, 'craps': 8348, 'zombie': 8349, 'swig': 8350, 'shout': 8351, 'vestibule': 8352, 'quigley': 8353, 'figure': 8354, 'curlers': 8355, 'visible': 8356, 'bark': 8357, 'tick': 8358, 'copies': 8359, 'npotato': 8360, 'regain': 8361, 'pontchartrain': 8362, 'beagle': 8363, 'crystal': 8364, '126': 8365, 'georgia': 8366, 'stay': 8367, 'typing': 8368, 'xmas': 8369, 'julia': 8370, 'cbs': 8371, 'keith': 8372, 'russel': 8373, 'without': 8374, 'slugs': 8375, 'knocks': 8376, 'clipboard': 8377, 'billee': 8378, 'wins': 8379, 'australian': 8380, 'hook': 8381, 'jacket': 8382, 'viewing': 8383, 'handles': 8384, 'nathaniel': 8385, 'problems': 8386, 'aggravated': 8387, 'movers': 8388, 'dulles': 8389, 'plunges': 8390, 'n221': 8391, 'puppies': 8392, 'lenore': 8393, 'applauding': 8394, 'jewish': 8395, 'relay': 8396, 'boulder': 8397, 'lives': 8398, 'ongoing': 8399, 'crazy': 8400, 'rotor': 8401, 'fights': 8402, 'shadow': 8403, 'illegal': 8404, 'unseen': 8405, 'seas': 8406, 'shoes': 8407, 'camps': 8408, 'miller': 8409, 'forced': 8410, 'kilometers': 8411, 'happenin': 8412, 'alarm': 8413, 'diner': 8414, 'ouch': 8415, 'housekeeper': 8416, 'earned': 8417, 'nyears': 8418, 'space': 8419, 'engages': 8420, 'german': 8421, 'shimmies': 8422, 'skies': 8423, 'absently': 8424, 'munoz': 8425, 'intensity': 8426, 'havin': 8427, 'reduce': 8428, 'hockey': 8429, 'species': 8430, 'herr': 8431, 'tough': 8432, 'cozy': 8433, 'academy': 8434, 'ncloses': 8435, 'mccall': 8436, 'effective': 8437, 'preservation': 8438, 'grounded': 8439, 'frowns': 8440, 'yelling': 8441, 'disgustedly': 8442, 'lovingly': 8443, 'forrestal': 8444, 'prostitute': 8445, 'distinguished': 8446, 'retract': 8447, 'sincere': 8448, 'ntemple': 8449, 'rathtar': 8450, 'faucet': 8451, 'appeared': 8452, 'strut': 8453, 'rebuilt': 8454, 'catwalks': 8455, 'bodily': 8456, 'galactic': 8457, 'delirious': 8458, "'kay": 8459, 'xbd': 8460, 'hamburgers': 8461, 'farm': 8462, 'jury': 8463, 'web': 8464, 'excellency': 8465, 'belonged': 8466, 'kruczynski': 8467, 'jersey': 8468, 'ronald': 8469, 'earlier': 8470, 'hired': 8471, 'crumbles': 8472, 'throwing': 8473, 'vice': 8474, 'tv': 8475, 'knee': 8476, 'rod': 8477, 'cloudy': 8478, 'tenderness': 8479, 'crane': 8480, 'girder': 8481, 'n84': 8482, 'compound': 8483, 'condition': 8484, 'nrocks': 8485, 'mierzwiak': 8486, 'teacher': 8487, 'torso': 8488, 'colorful': 8489, 'annoyed': 8490, 'biology': 8491, 'paint': 8492, '21': 8493, 'ensemble': 8494, 'shudder': 8495, 'poncho': 8496, 'presidents': 8497, 'soften': 8498, 'shitty': 8499, 'prostrate': 8500, 'nvoice': 8501, 'investor': 8502, 'failed': 8503, 'musane': 8504, 'frayed': 8505, 'limp': 8506, 'flashback': 8507, 'surplus': 8508, 'surprises': 8509, 'seals': 8510, 'beast': 8511, 'sticks': 8512, 'checkbook': 8513, 'muscular': 8514, 'suspects': 8515, 'composer': 8516, 'depicted': 8517, 'program': 8518, 'heartbroken': 8519, '204': 8520, 'rusted': 8521, 'stall': 8522, 'unfold': 8523, 'ngrommet': 8524, 'fed': 8525, 'omit': 8526, 'craggy': 8527, 'snipers': 8528, 'depository': 8529, "'im": 8530, 'august': 8531, 'workin': 8532, 'avon': 8533, 'thundering': 8534, 'nface': 8535, 'gateway': 8536, 'delivery': 8537, 'union': 8538, 'fumbling': 8539, "'d": 8540, 'emerge': 8541, '231': 8542, 'reverses': 8543, 'zebra': 8544, 'n180': 8545, 'shade': 8546, 'leaves': 8547, 'nimmediately': 8548, 'hides': 8549, 'linked': 8550, 'stolen': 8551, 'weeks': 8552, 'illegally': 8553, 'dispatch': 8554, "wasn't": 8555, 'fishin': 8556, 'only': 8557, 'chowder': 8558, 'pond': 8559, 'visited': 8560, 'zhou': 8561, 'hover': 8562, 'education': 8563, 'awac': 8564, 'value': 8565, 'treaty': 8566, 'giants': 8567, 'during': 8568, 'stunned': 8569, 'breaking': 8570, 'indian': 8571, 'keeping': 8572, 'dissolves': 8573, 'attempts': 8574, '2': 8575, 'supervision': 8576, 'peas': 8577, 'stan': 8578, 'raymond': 8579, 'hold': 8580, 'changed': 8581, 'wayne': 8582, 'straight': 8583, 'frustration': 8584, 'avalanche': 8585, 'nobody': 8586, '210': 8587, 'usta': 8588, 'fidgeting': 8589, '200': 8590, 'sonic': 8591, 'expectations': 8592, 'cod': 8593, 'revisions': 8594, 'whew': 8595, 'tank': 8596, 'nozzle': 8597, 'cry': 8598, 'fifth': 8599, 'amazed': 8600, 'adamantium': 8601, 'eazy': 8602, "'donnell": 8603, 'right': 8604, 'storyboard': 8605, 'dipper': 8606, 'iod': 8607, 'tracks': 8608, 'groans': 8609, 'coliseum': 8610, 'finger': 8611, 'lookin': 8612, 'ndo': 8613, '46': 8614, 'finale': 8615, 'recommended': 8616, 'motionless': 8617, 'staffers': 8618, 'swat': 8619, 'salieri': 8620, 'drink': 8621, 'nnew': 8622, 'turbolift': 8623, 'americans': 8624, '214': 8625, 'nmake': 8626, 'iconic': 8627, 'pensive': 8628, 'n118': 8629, 'macy': 8630, 'seb': 8631, 'backside': 8632, 'clank': 8633, 'accompanying': 8634, 'incapable': 8635, 'forgive': 8636, 'gobs': 8637, 'proudfoot': 8638, 'n143': 8639, 'pressure': 8640, 'sprinkler': 8641, 'parade': 8642, 'stubborn': 8643, 'outer': 8644, 'crackles': 8645, 'funky': 8646, 'hemisphere': 8647, 'rights': 8648, 'sunbeam': 8649, 'ndrill': 8650, 'shouldered': 8651, 'whiskey': 8652, 'douses': 8653, 'unbecoming': 8654, 'victorious': 8655, 'kill': 8656, 'trotting': 8657, 'exaggerated': 8658, 'choking': 8659, 'months': 8660, 'terri': 8661, 'steele': 8662, 'copter': 8663, 'hazel': 8664, 'nand': 8665, 'stupidly': 8666, 'away': 8667, 'swissair': 8668, 'singers': 8669, 'unusually': 8670, 'amazingly': 8671, 'kung': 8672, 'receiver': 8673, "'bye": 8674, 'squawks': 8675, 'jen': 8676, 'ababwa': 8677, 'woods': 8678, 'pork': 8679, 'n160': 8680, 'tahiti': 8681, 'pulleys': 8682, 'ignored': 8683, 'media': 8684, 'ellis': 8685, 'grasps': 8686, 'echoing': 8687, 'replies': 8688, 'fast': 8689, 'bees': 8690, 'n119': 8691, 'nose': 8692, 'lumbering': 8693, 'canisters': 8694, 'ahhhhh': 8695, 'bottoms': 8696, 'mindless': 8697, 'yuan': 8698, 'geography': 8699, 'releases': 8700, 'mutual': 8701, 'kawasaki': 8702, 'emplacements': 8703, 'midwest': 8704, 'limps': 8705, 'bundled': 8706, 'x82': 8707, 'lbj': 8708, "we've": 8709, 'exactly': 8710, 'assault': 8711, 'spoon': 8712, 'murdered': 8713, 'inning': 8714, 'committing': 8715, 'waitresses': 8716, 'piled': 8717, 'agonizing': 8718, 'locator': 8719, 'strap': 8720, 'begs': 8721, 'lecture': 8722, 'crumbs': 8723, 'ruff': 8724, 'approval': 8725, 'njenny': 8726, 'skiing': 8727, 'trainers': 8728, 'harris': 8729, 'peace': 8730, 'trades': 8731, 'condensation': 8732, 'judgement': 8733, 'panicked': 8734, 'wednesday': 8735, 'harness': 8736, 'panel': 8737, 'talented': 8738, 'puffy': 8739, 'duke': 8740, 'suntan': 8741, 'stephanie': 8742, 'picket': 8743, 'shrub': 8744, 'nlumi': 8745, 'applaud': 8746, 'suit': 8747, 'widening': 8748, 'oily': 8749, 'sash': 8750, 'nemperor': 8751, 'volleyball': 8752, 'demented': 8753, 'agility': 8754, 'demand': 8755, 'nbetter': 8756, 'graveyard': 8757, 'nq': 8758, 'n154': 8759, 'silvia': 8760, 'water': 8761, 'stream': 8762, 'burps': 8763, 'alert': 8764, 'busts': 8765, 'slit': 8766, 'raising': 8767, 'dragon': 8768, 'financing': 8769, 'astonishing': 8770, 'advancing': 8771, 'smooth': 8772, 'hangers': 8773, 'carolers': 8774, 'invisibility': 8775, 'dread': 8776, 'noted': 8777, 'refills': 8778, 'skateboard': 8779, 'resign': 8780, 'outskirts': 8781, 'remembered': 8782, 'knuckles': 8783, 'heal': 8784, 'roughly': 8785, 'kangaroo': 8786, 'paces': 8787, 'renquist': 8788, '95': 8789, 'shithead': 8790, 'truly': 8791, 'fragment': 8792, 'dennis': 8793, "i've": 8794, 'even': 8795, 'graffiti': 8796, 'intently': 8797, 'portraits': 8798, 'melt': 8799, 'amy': 8800, 'hurtle': 8801, 'arquillians': 8802, 'didn': 8803, 'crime': 8804, 'spencer': 8805, 'nfollowing': 8806, 'rests': 8807, 'span': 8808, 'spotted': 8809, 'venice': 8810, 'applicant': 8811, 'gimp': 8812, 'chattering': 8813, 'nlow': 8814, 'ahh': 8815, 'ndionne': 8816, 'treasure': 8817, 'shoulders': 8818, 'sandy': 8819, 'vale': 8820, 'cautiously': 8821, 'twists': 8822, 'acknowledgment': 8823, 'morton': 8824, 'reynolds': 8825, 'confirmation': 8826, 'personnel': 8827, 'peels': 8828, 'nscene': 8829, 'contender': 8830, "annie's": 8831, 'n93': 8832, 'stern': 8833, 'rollin': 8834, 'burst': 8835, 'arnold': 8836, 'amity': 8837, 'guides': 8838, 'calmed': 8839, 'accomplice': 8840, 'hideout': 8841, 'function': 8842, 'kick': 8843, 'slab': 8844, 'assess': 8845, 'fightin': 8846, 'scribble': 8847, 'jay': 8848, 'pensively': 8849, 'enchanted': 8850, 'alleyways': 8851, 'stiffly': 8852, 'dirty': 8853, 'recedes': 8854, 'wooded': 8855, 'confess': 8856, 'feeble': 8857, 'name': 8858, 'basis': 8859, 'teams': 8860, 'rounded': 8861, 'structures': 8862, 'brew': 8863, 'honorable': 8864, 'snout': 8865, 'dentist': 8866, 'nnothing': 8867, 'goddammit': 8868, 'wrong': 8869, 'philosophy': 8870, 'cube': 8871, 'alien': 8872, 'aimlessly': 8873, 'ndoctor': 8874, 'tripping': 8875, 'edge': 8876, 'gibson': 8877, 'hairs': 8878, 'trays': 8879, 'n32': 8880, 'handy': 8881, 'neven': 8882, 'motel': 8883, 'ad': 8884, 'sweaters': 8885, 'acquire': 8886, 'fatigue': 8887, 'militia': 8888, 'sways': 8889, 'smear': 8890, 'donald': 8891, 'geese': 8892, 'scales': 8893, 'allah': 8894, 'nour': 8895, 'technical': 8896, 'future': 8897, 'buckets': 8898, 'nhow': 8899, 'sullen': 8900, 'fiona': 8901, 'md': 8902, 'freezing': 8903, 'sunlight': 8904, 'whatsoever': 8905, 'inaudible': 8906, 'gurgling': 8907, 'virgin': 8908, 'nfigure': 8909, 'fella': 8910, 'static': 8911, 'cleo': 8912, 'lacuna': 8913, 'gotten': 8914, 'ned': 8915, 'vision': 8916, 'clusters': 8917, 'rhyme': 8918, 'workshop': 8919, 'examines': 8920, 'coward': 8921, 'late': 8922, 'molina': 8923, 'prayer': 8924, 'cough': 8925, 'fifty': 8926, 'facility': 8927, 'oars': 8928, 'evenin': 8929, 'terrain': 8930, 'minded': 8931, 'mib': 8932, 'nkilloran': 8933, 'mood': 8934, 'kintner': 8935, 'coins': 8936, 'breed': 8937, 'medallion': 8938, 'swimmers': 8939, 'sweetie': 8940, 'owl': 8941, 'cooked': 8942, 'mehrabad': 8943, 'mouthed': 8944, 'sadly': 8945, 'n128': 8946, 'compton': 8947, 'castle': 8948, 'arsenal': 8949, 'sober': 8950, 'mia': 8951, 'succeed': 8952, 'deftly': 8953, 'helplessly': 8954, 'significance': 8955, 'keating': 8956, 'stacks': 8957, 'plastic': 8958, 'wishing': 8959, 'slogging': 8960, 'canister': 8961, 'seething': 8962, 'straining': 8963, 'writers': 8964, 'referring': 8965, 'jake': 8966, 'thousand': 8967, 'n95': 8968, 'left': 8969, 'goatee': 8970, 'volunteers': 8971, 'bubbling': 8972, 'bassoon': 8973, 'spanish': 8974, 'kamal': 8975, 'n85': 8976, 'other': 8977, 'ritual': 8978, 'popularity': 8979, 'visitor': 8980, 'pays': 8981, 'garth': 8982, 'restore': 8983, 'vera': 8984, 'nstars': 8985, 'show': 8986, 'shield': 8987, 'impulse': 8988, 'laura': 8989, 'bride': 8990, 'npitts': 8991, 'nbeen': 8992, "ndon't": 8993, 'chipped': 8994, 'breast': 8995, 'cadets': 8996, 'inspecting': 8997, 'tole': 8998, 'no': 8999, 'victim': 9000, 'tide': 9001, 'lesson': 9002, 'anybody': 9003, 'nnot': 9004, 'combed': 9005, 'dave': 9006, 'possessed': 9007, 'hacking': 9008, 'ntai': 9009, 'latter': 9010, 'storm': 9011, 'slack': 9012, 'dose': 9013, 'requesting': 9014, 'happiness': 9015, 'mechanics': 9016, 'washer': 9017, 'constantly': 9018, 'hugging': 9019, 'arius': 9020, "'ya": 9021, 'lead': 9022, 'nhole': 9023, 'rallies': 9024, 'faraj': 9025, 'plane': 9026, 'hyatt': 9027, 'birthing': 9028, 'n135': 9029, 'blonde': 9030, 'cabell': 9031, 'narrator': 9032, 'n99': 9033, 'scraped': 9034, 'key': 9035, 'bay': 9036, 'courtiers': 9037, 'stashed': 9038, 'powerful': 9039, 'islamic': 9040, 'dingy': 9041, 'wormhole': 9042, '163': 9043, 'slammed': 9044, 'couldn': 9045, 'carter': 9046, 'snowmobile': 9047, 'cringes': 9048, 'kendrick': 9049, 'crowd': 9050, 'seizes': 9051, 'inspects': 9052, '4th': 9053, 'ticket': 9054, 'nohh': 9055, 'permits': 9056, 'niris': 9057, 'windbreaker': 9058, 'squawk': 9059, '152': 9060, 'nher': 9061, "nwhat's": 9062, 'mission': 9063, 'hygiene': 9064, 'read': 9065, 'halts': 9066, 'assumed': 9067, 'spar': 9068, 'cocked': 9069, 'sly': 9070, 'exchanged': 9071, 'appropriately': 9072, 'boarded': 9073, 'brutally': 9074, 'employer': 9075, 'adorned': 9076, 'policemen': 9077, 'crowded': 9078, 'apply': 9079, 'than': 9080, 'np': 9081, 'cries': 9082, 'shucks': 9083, 'whale': 9084, 'shuts': 9085, 'wildly': 9086, 'craft': 9087, 'raised': 9088, 'hoyt': 9089, 'colossus': 9090, 'whores': 9091, 'vacation': 9092, 'visibility': 9093, 'page': 9094, 'wired': 9095, 'merely': 9096, 'demonstration': 9097, 'masses': 9098, 'downward': 9099, 'sparkling': 9100, 'unstable': 9101, 'seventh': 9102, 'bitter': 9103, 'spin': 9104, 'immense': 9105, 'searches': 9106, 'consuming': 9107, 'puerto': 9108, 'conscription': 9109, 'overmyer': 9110, 'mutants': 9111, 'commercial': 9112, 'grunting': 9113, 'buyers': 9114, 'achievements': 9115, 'lettin': 9116, 'searing': 9117, 'arab': 9118, 'smoldering': 9119, 'hal': 9120, 'lined': 9121, 'fuse': 9122, 'mint': 9123, 'silence': 9124, 'n3': 9125, 'increasingly': 9126, 'drug': 9127, 'curbside': 9128, 'pak': 9129, 'ms': 9130, 'matters': 9131, 'steak': 9132, 'eggs': 9133, 'feast': 9134, 'metallic': 9135, 'inland': 9136, 'brat': 9137, 'firemen': 9138, 'bleed': 9139, 'scrub': 9140, 'dry': 9141, 'buick': 9142, 'carlos': 9143, 'feds': 9144, 'amen': 9145, 'event': 9146, 'studio': 9147, 'damnit': 9148, 'priority': 9149, 'pl': 9150, 'pens': 9151, 'height': 9152, 'speed': 9153, 'somethin': 9154, 'student': 9155, 'finder': 9156, 'stevens': 9157, 'dreaded': 9158, 'sherby': 9159, 'winged': 9160, 'passionate': 9161, 'herschel': 9162, 'artifacts': 9163, 'kashmir': 9164, 'chassis': 9165, 'snow': 9166, 'thum': 9167, 'ballroom': 9168, 'medium': 9169, 'nalvy': 9170, 'eventually': 9171, 'partial': 9172, 'limo': 9173, 'fearful': 9174, 'compromise': 9175, 'sparkle': 9176, 'joking': 9177, 'boundaries': 9178, 'verge': 9179, 'ncars': 9180, 'extending': 9181, 'belt': 9182, '00': 9183, 'income': 9184, 'reggie': 9185, 'jawed': 9186, 'has': 9187, 'theater': 9188, 'stained': 9189, 'n106': 9190, 'ankle': 9191, 'hull': 9192, 'njerry': 9193, 'samoan': 9194, 'tucks': 9195, 'unearthly': 9196, 'taber': 9197, 'escaped': 9198, 'idle': 9199, 'executed': 9200, 'autograph': 9201, 'vomits': 9202, 'herman': 9203, 'glum': 9204, 'stamped': 9205, '33': 9206, 'proprietor': 9207, 'large': 9208, 'orchard': 9209, 'till': 9210, 'movies': 9211, 'gabrielle': 9212, 'harvey': 9213, 'clem': 9214, 'annie': 9215, 'clips': 9216, 'nfront': 9217, 'loneliness': 9218, 'shay': 9219, 'liam': 9220, 'wraps': 9221, 'doo': 9222, 'scott': 9223, 'num': 9224, 'clay': 9225, 'sonar': 9226, 'astronomer': 9227, 'taiwanese': 9228, 'selfish': 9229, 'miserable': 9230, 'nlong': 9231, 'pigeons': 9232, 'resting': 9233, 'louis': 9234, 'asks': 9235, 'punishment': 9236, 'carries': 9237, 'erroll': 9238, 'job': 9239, 'headed': 9240, 'skyward': 9241, 'njimmy': 9242, 'regarding': 9243, 'emphatically': 9244, 'lifepod': 9245, 'bedtime': 9246, 'roam': 9247, 'report': 9248, 'shell': 9249, 'decaying': 9250, 'elect': 9251, 'silverware': 9252, 'fireman': 9253, 'bone': 9254, 'think': 9255, 'forties': 9256, 'drafted': 9257, 'fieldstone': 9258, 'accelerator': 9259, 'mobster': 9260, 'knew': 9261, 'hammering': 9262, 'moss': 9263, 'crucial': 9264, 'shears': 9265, 'schnapps': 9266, 'nstill': 9267, 'pacing': 9268, 'n203': 9269, 'le': 9270, 'brewer': 9271, 'nnigel': 9272, 'staggering': 9273, 'thrashing': 9274, 'banner': 9275, 'wesson': 9276, 'wrinkles': 9277, 'inert': 9278, 'jungle': 9279, 'gleaming': 9280, 'aggressively': 9281, 'sang': 9282, 'chalk': 9283, 'joint': 9284, 'leaps': 9285, 'wright': 9286, 'voting': 9287, 'louden': 9288, 'compliments': 9289, 'examined': 9290, 'grass': 9291, 'sins': 9292, 'flake': 9293, 'faintly': 9294, 'becky': 9295, 'charges': 9296, 'wit': 9297, 'enters': 9298, 'intention': 9299, 'misses': 9300, 'askew': 9301, 'dt': 9302, 'sweat': 9303, 'exit': 9304, 'icy': 9305, 'interviewer': 9306, 'muzzle': 9307, 'shorts': 9308, 'gap': 9309, 'chick': 9310, 'hiss': 9311, 'american': 9312, 'eagan': 9313, 'endure': 9314, 'franklin': 9315, 'swooping': 9316, 'hiccup': 9317, 'cam': 9318, 'idiot': 9319, 'ceramic': 9320, 'confirm': 9321, 'deserved': 9322, 'artificial': 9323, 'flute': 9324, 'improving': 9325, 'blender': 9326, 'wipe': 9327, 'facilities': 9328, 'appropriate': 9329, 'advances': 9330, 'digest': 9331, 'airplanes': 9332, 'thread': 9333, 'patiently': 9334, 'nsea': 9335, 'anguish': 9336, 'stalls': 9337, 'sympathetic': 9338, 'bradley': 9339, 'translated': 9340, 'art': 9341, 'goofy': 9342, 'adrenaline': 9343, 'myself': 9344, 'apologetic': 9345, 'scale': 9346, 'dreams': 9347, 'ferris': 9348, 'fiddling': 9349, 'tektel': 9350, 'dank': 9351, 'wearin': 9352, 'spaghetti': 9353, 'reflective': 9354, 'dinner': 9355, 'heated': 9356, 'recognizing': 9357, 'neighbors': 9358, 'cobwebs': 9359, 'attacking': 9360, 'devour': 9361, 'tnt': 9362, 'bulletproof': 9363, 'puny': 9364, 'shaky': 9365, '160': 9366, 'impromptu': 9367, 'native': 9368, 'archway': 9369, 'submerged': 9370, 'exterior': 9371, 'crown': 9372, 'magician': 9373, 'nshe': 9374, 'nsure': 9375, 'cooke': 9376, 'sausalito': 9377, 'mute': 9378, 'single': 9379, 'doubles': 9380, 'board': 9381, 'bein': 9382, 'biscuits': 9383, 'safari': 9384, 'wallace': 9385, 'aunt': 9386, 'marlin': 9387, '78': 9388, 'nkicks': 9389, 'ground': 9390, 'curses': 9391, 'suspiciously': 9392, 'director': 9393, 'housing': 9394, 'sobbing': 9395, 'ordinance': 9396, 'whirs': 9397, 'hafta': 9398, 'direction': 9399, 'vast': 9400, 'hipster': 9401, 'polaroid': 9402, 'arrangements': 9403, 'celebrity': 9404, 'deadline': 9405, 'grease': 9406, 'nsharkbait': 9407, 'wink': 9408, 'guitar': 9409, 'thought': 9410, 'fills': 9411, 'inscribed': 9412, 'improvement': 9413, 'spud': 9414, 'welcomes': 9415, 'ordinary': 9416, 'woke': 9417, 'rodin': 9418, 'eatin': 9419, 'streaking': 9420, 'gilded': 9421, 'pretzel': 9422, 'hq': 9423, 'bouncing': 9424, 'guarantee': 9425, 'rendezvous': 9426, 'conclude': 9427, 'looters': 9428, 'studied': 9429, 'ex': 9430, 'accordion': 9431, 'european': 9432, 'financial': 9433, 'versailles': 9434, 'discussed': 9435, 'fishing': 9436, 'sc': 9437, 'nstay': 9438, 'gallons': 9439, 'beards': 9440, 'distinctive': 9441, 'pearl': 9442, 'charged': 9443, 'fools': 9444, 'concourse': 9445, 'trudge': 9446, 'npocket': 9447, 'waterfall': 9448, 'glitter': 9449, 'hesitant': 9450, 'surround': 9451, 'limited': 9452, 'gangsta': 9453, 'swipes': 9454, 'blurring': 9455, 'mcgloin': 9456, 'marty': 9457, 'astrid': 9458, 'mick': 9459, 'hears': 9460, 'gunport': 9461, 'masts': 9462, 'numa': 9463, 'dickey': 9464, 'rambling': 9465, 'norm': 9466, 'howling': 9467, 'grade': 9468, 'shenzhou': 9469, 'frat': 9470, 'myth': 9471, 'enraptured': 9472, 'reacting': 9473, 'description': 9474, 'bright': 9475, 'nflying': 9476, 'sickened': 9477, 'deathly': 9478, 'nfreedom': 9479, 'strangely': 9480, 'curiously': 9481, 'nsport': 9482, 'eliseo': 9483, 'member': 9484, 'natalie': 9485, 'repair': 9486, 'eager': 9487, 'assets': 9488, 'stab': 9489, 'born': 9490, 'bennett': 9491, 'tobin': 9492, 'knicks': 9493, 'bucks': 9494, 'uppercut': 9495, 'et': 9496, 'yes': 9497, 'dim': 9498, 'whites': 9499, 'neighbor': 9500, 'celebrated': 9501, 'nfox': 9502, 'hamm': 9503, 'nrockhound': 9504, 'snorting': 9505, 'irving': 9506, 'disappear': 9507, 'hooking': 9508, 'neutron': 9509, 'bell': 9510, 'theresa': 9511, 'mombasa': 9512, 'relentlessly': 9513, 'wastin': 9514, 'shivaji': 9515, 'pole': 9516, 'sexy': 9517, 'red': 9518, 'entertain': 9519, 'gracious': 9520, 'steps': 9521, 'shhhh': 9522, 'chemicals': 9523, 'plays': 9524, 'traveling': 9525, 'stocking': 9526, 'code': 9527, 'funny': 9528, 'harmlessly': 9529, 'bracing': 9530, 'beam': 9531, 'incline': 9532, 'pop': 9533, 'extra': 9534, 'device': 9535, 'appears': 9536, 'momma': 9537, 'jimmy': 9538, 'burrow': 9539, 'vallon': 9540, 'mentioned': 9541, 'groupie': 9542, 'hospital': 9543, 'sensational': 9544, 'represents': 9545, 'struggling': 9546, 'therein': 9547, 'approached': 9548, 'relaxes': 9549, 'javal': 9550, 'n15': 9551, 'louise': 9552, 'nha': 9553, 'outburst': 9554, 'pain': 9555, 'nancy': 9556, 'prey': 9557, 'mosquito': 9558, 'announcement': 9559, 'escaping': 9560, 'monitoring': 9561, 'dildo': 9562, 'soprano': 9563, 'adrenalin': 9564, 'yer': 9565, 'ups': 9566, 'clinton': 9567, 'keeper': 9568, 'catcher': 9569, 'when': 9570, 'tha': 9571, 'kong': 9572, 'linoleum': 9573, 'mighty': 9574, 'suzanne': 9575, '111': 9576, 'cookies': 9577, 'levelled': 9578, 'limit': 9579, 'nso': 9580, "'er": 9581, 'excitement': 9582, 'philly': 9583, 'uncle': 9584, 'smacking': 9585, 'trickle': 9586, 'unload': 9587, 'devil': 9588, 'greenbow': 9589, 'ntimmy': 9590, 'guitarist': 9591, 'trip': 9592, 'trial': 9593, 'rosalyn': 9594, 'april': 9595, 'fits': 9596, 'onboard': 9597, 'cunt': 9598, 'iago': 9599, 'groom': 9600, 'manager': 9601, 'chester': 9602, 'supposedly': 9603, 'van': 9604, 'recommend': 9605, 'nkeating': 9606, 'main': 9607, 'carriage': 9608, 'shouldn': 9609, 'paddle': 9610, 'before': 9611, 'gravity': 9612, 'erasing': 9613, 'admirals': 9614, 'array': 9615, '273': 9616, 'zane': 9617, 'interviewed': 9618, 'cleopatra': 9619, 'nova': 9620, 'avenue': 9621, 'kingdom': 9622, 'barbed': 9623, 'thump': 9624, 'nblue': 9625, 'khrushchev': 9626, 'wheedle': 9627, 'hilt': 9628, 'signaling': 9629, 'leering': 9630, 'chapman': 9631, 'rider': 9632, 'blunt': 9633, 'jog': 9634, 'railing': 9635, 'tramell': 9636, 'manufacturing': 9637, 'iovine': 9638, 'types': 9639, 'nling': 9640, 'autopsy': 9641, 'discipline': 9642, 'npalantine': 9643, 'forensics': 9644, 'fluttering': 9645, 'ar': 9646, 'leak': 9647, 'hunted': 9648, 'upcoming': 9649, 'bond': 9650, 'insane': 9651, 'performed': 9652, 'control': 9653, 'elaine': 9654, 'allen': 9655, 'chevy': 9656, 'indifferent': 9657, 'ncri': 9658, 'hilo': 9659, 'dodge': 9660, 'xe2': 9661, 'ch': 9662, 'lend': 9663, 'sherman': 9664, 'rid': 9665, 'grid': 9666, 'chris': 9667, 'jo': 9668, 'fouchet': 9669, 'tying': 9670, 'imagined': 9671, 'undressing': 9672, 'holes': 9673, 'madhouse': 9674, 'thunk': 9675, 'nsight': 9676, 'mounting': 9677, 'pinned': 9678, 'impassive': 9679, 'copters': 9680, 'pools': 9681, 'lime': 9682, 'jones': 9683, 'manners': 9684, 'garbled': 9685, 'vocal': 9686, 'elvis': 9687, 'karl': 9688, 'defense': 9689, "end'": 9690, '1993': 9691, 'bobinsky': 9692, 'hopping': 9693, 'situated': 9694, 'derrick': 9695, 'torturing': 9696, 'jean': 9697, 'sick': 9698, 'measured': 9699, 'lautrec': 9700, 'gore': 9701, 'minute': 9702, 'hollers': 9703, 'senior': 9704, 'brinnlitz': 9705, 'narm': 9706, 'colors': 9707, 'nbottom': 9708, '91': 9709, 'fiend': 9710, 'ate': 9711, 'depressed': 9712, 'informed': 9713, 'syringe': 9714, 'linger': 9715, 'breaks': 9716, 'endless': 9717, 'sgc': 9718, 'pedro': 9719, 'caped': 9720, 'handbill': 9721, 'swinging': 9722, 'levartov': 9723, 'brah': 9724, 'mikey': 9725, 'swanney': 9726, 'gin': 9727, 'oooo': 9728, 'pounding': 9729, 'conditioning': 9730, 'caption': 9731, 'appreciated': 9732, 'printed': 9733, 'chaz': 9734, 'simone': 9735, 'headlights': 9736, 'nwill': 9737, 'grin': 9738, 'oncoming': 9739, 'reverberates': 9740, 'piercing': 9741, 'peeling': 9742, 'outcropping': 9743, 'along': 9744, '61': 9745, 'clocking': 9746, 'messages': 9747, 'boathouse': 9748, 'n179': 9749, 'photographer': 9750, 'politics': 9751, 'accent': 9752, 'confusing': 9753, 'bancini': 9754, 'depend': 9755, 'glancing': 9756, 'milling': 9757, 'overwhelmed': 9758, 'businessman': 9759, 'sailors': 9760, 'bleeker': 9761, 'obnoxious': 9762, 'vertically': 9763, 'pauline': 9764, 'reds': 9765, 'patsy': 9766, 'never': 9767, 'winding': 9768, 'prod': 9769, '169': 9770, 'goddamnit': 9771, 'cavett': 9772, 'rate': 9773, 'r2': 9774, 'nhello': 9775, 'entrance': 9776, 'clippings': 9777, 'hide': 9778, 'forearm': 9779, 'tumble': 9780, 'saudi': 9781, 'murph': 9782, 'rope': 9783, 'comfort': 9784, 'nfrightened': 9785, 'wide': 9786, 'lover': 9787, 'described': 9788, 'scholarship': 9789, 'dismissed': 9790, 'draft': 9791, 'parallel': 9792, 'cola': 9793, 'blistering': 9794, 'triumphs': 9795, 'dealing': 9796, 'gingerbread': 9797, 'kits': 9798, 'shore': 9799, 'predicament': 9800, 'rasping': 9801, 'sailing': 9802, 'divorced': 9803, 'cavernous': 9804, 'drooling': 9805, 'briefcases': 9806, 'clinging': 9807, 'whom': 9808, 'drop': 9809, 'sparkler': 9810, 'diplomatic': 9811, 'banter': 9812, 'restaurants': 9813, 'columns': 9814, 'habit': 9815, 'cot': 9816, 'grind': 9817, 'witch': 9818, 'tasted': 9819, 'swallowed': 9820, 'apollo': 9821, 'replica': 9822, 'guiding': 9823, 'tony': 9824, 'harriet': 9825, 'omitted': 9826, 'erupting': 9827, 'gala': 9828, 'fries': 9829, 'perplexed': 9830, 'lowering': 9831, 'prosthetic': 9832, 'loop': 9833, 'eases': 9834, 'beckoning': 9835, 'cutter': 9836, 'lulu': 9837, 'microphones': 9838, 'duet': 9839, 'tea': 9840, 'squats': 9841, 'chrissie': 9842, 'keenser': 9843, 'negasonic': 9844, 'version': 9845, 'chuckle': 9846, 'snakes': 9847, 'damaged': 9848, 'attack': 9849, 'yuh': 9850, 'welling': 9851, 'nmartini': 9852, 'doorbell': 9853, 'scrambled': 9854, 'yelp': 9855, 'hind': 9856, 'greatest': 9857, 'room': 9858, 'gras': 9859, 'bubble': 9860, 'oh': 9861, 'zipper': 9862, 'africa': 9863, 'illuminating': 9864, 'sleepers': 9865, 'death': 9866, 'tigress': 9867, 'robinson': 9868, 'defenses': 9869, 'evening': 9870, '97': 9871, 'tahoe': 9872, 'jubilant': 9873, 'horizon': 9874, 'nhair': 9875, 'migrant': 9876, 'replaces': 9877, 'temporary': 9878, 'dreadful': 9879, 'slacks': 9880, 'roz': 9881, 'doorknob': 9882, 'networks': 9883, 'selected': 9884, 'guts': 9885, 'crossed': 9886, 'aimed': 9887, 'fill': 9888, 'contestant': 9889, 'outwards': 9890, 'pebble': 9891, 'amitabh': 9892, 'ndodd': 9893, 'dalton': 9894, 'flawless': 9895, 'loves': 9896, 'manipulate': 9897, 'suspicious': 9898, 'jacked': 9899, 'daggers': 9900, 'questioning': 9901, 'fatal': 9902, 'bits': 9903, 'indignant': 9904, 'apology': 9905, 'nbe': 9906, 'send': 9907, 'gals': 9908, 'pumpkin': 9909, 'followed': 9910, 'act': 9911, 'adoption': 9912, 'nnervously': 9913, 'liked': 9914, 'helsing': 9915, 'coconut': 9916, 'knockout': 9917, 'explore': 9918, 'tapped': 9919, 'morse': 9920, 'darcy': 9921, 'swallows': 9922, 'chump': 9923, 'wee': 9924, 'wax': 9925, 'sickening': 9926, 'xadre': 9927, 'division': 9928, 'fidgets': 9929, 'n187': 9930, 'breathtaking': 9931, 'nisland': 9932, 'turn': 9933, 'dynamite': 9934, 'missile': 9935, 'steven': 9936, 'radical': 9937, 'strands': 9938, 'ramirez': 9939, 'ridiculous': 9940, 'phones': 9941, 'beeper': 9942, 'vacant': 9943, 'pissing': 9944, 'bear': 9945, 'doom': 9946, 'sincerity': 9947, '9': 9948, 'allowed': 9949, 'elaborately': 9950, 'nrolling': 9951, 'zapped': 9952, 'chill': 9953, 'geraldo': 9954, '190': 9955, 'smithereens': 9956, 'nslimer': 9957, 'instrumentation': 9958, 'erotic': 9959, 'elevate': 9960, 'apparent': 9961, 'unidentified': 9962, 'brittle': 9963, 'nip': 9964, 'detonates': 9965, 'moguy': 9966, 'brent': 9967, 'bandaged': 9968, 'offended': 9969, 'dull': 9970, 'nflash': 9971, 'te': 9972, '277': 9973, 'set': 9974, 'imagining': 9975, 'elton': 9976, 'flee': 9977, 'gnarled': 9978, 'inspector': 9979, 'directed': 9980, 'sheldon': 9981, 'soft': 9982, 'london': 9983, 'respond': 9984, 'nvarious': 9985, 'unnoticed': 9986, 'scoreboard': 9987, 'patter': 9988, 'cleveland': 9989, 'fuck': 9990, 'innocent': 9991, 'charming': 9992, 'auschwitz': 9993, 'clapping': 9994, 'aloud': 9995, 'menacingly': 9996, 'acidosis': 9997, 'transmitting': 9998, 'train': 9999, 'mane': 10000, 'ring': 10001, 'horseback': 10002, 'smudge': 10003, 'okun': 10004, 'average': 10005, 'taunting': 10006, 'hindu': 10007, 'goddam': 10008, 'motherfucker': 10009, 'forgot': 10010, 'snores': 10011, 'projectile': 10012, 'nbathroom': 10013, 'meditation': 10014, 'zipping': 10015, 'chooses': 10016, 'parrot': 10017, 'spaceship': 10018, 'ntalking': 10019, 'daylight': 10020, 'meeting': 10021, 'contrary': 10022, 'unfinished': 10023, 'ordering': 10024, 'at': 10025, 'irregular': 10026, 'comfy': 10027, 'legged': 10028, 'scratch': 10029, 'trains': 10030, 'saying': 10031, 'puppets': 10032, 'navigates': 10033, 'fainter': 10034, 'bones': 10035, '166': 10036, 'attentive': 10037, 'familiarity': 10038, 'laserfire': 10039, 'deserve': 10040, 'billion': 10041, 'restrains': 10042, 'conversation': 10043, 'sand': 10044, 'sling': 10045, 'pancake': 10046, 'maze': 10047, 'nfacing': 10048, 'simultaneous': 10049, 'shack': 10050, 'observe': 10051, 'crouch': 10052, 'last': 10053, 'siblings': 10054, 'defeat': 10055, 'walt': 10056, 'platform': 10057, 'count': 10058, 'armrest': 10059, 'nman': 10060, 'textbooks': 10061, 'congressman': 10062, 'bout': 10063, 'lines': 10064, 'krishna': 10065, 'smashes': 10066, 'painter': 10067, 'diesel': 10068, 'swath': 10069, 'nrain': 10070, 'nbreaks': 10071, 'thorough': 10072, 'spatters': 10073, 'bury': 10074, 'scientist': 10075, 'n115': 10076, 'overtake': 10077, 'ginny': 10078, 'frightening': 10079, 'rapping': 10080, 'similarly': 10081, 'wobbly': 10082, 'adjacent': 10083, 'support': 10084, 'chrome': 10085, 'lz': 10086, 'seductive': 10087, 'pieces': 10088, 'thus': 10089, 'mail': 10090, 'pledge': 10091, 'half': 10092, 'ballistic': 10093, 'bayou': 10094, 'struts': 10095, 'murmur': 10096, 'assistant': 10097, 'too': 10098, 'echoes': 10099, 'if': 10100, 'mere': 10101, 'chloe': 10102, 'clutching': 10103, 'jeff': 10104, 'stretching': 10105, 'album': 10106, 'shitless': 10107, 'surprised': 10108, 'bumping': 10109, 'daily': 10110, 'leaving': 10111, 'strapping': 10112, 'theme': 10113, 'tatum': 10114, 'callin': 10115, 'n167': 10116, 'gourmet': 10117, 'malfunctioning': 10118, 'robe': 10119, 'nlights': 10120, 'winch': 10121, 'oar': 10122, 'pad': 10123, 'nlater': 10124, 'ncall': 10125, 'indeed': 10126, 'transmitter': 10127, 'cave': 10128, 'mystery': 10129, '49': 10130, 'tenement': 10131, 'subway': 10132, 'chapeau': 10133, 'real': 10134, 'suggestions': 10135, 'crumble': 10136, 'shipping': 10137, 'concerto': 10138, 'corral': 10139, 'comet': 10140, 'sap': 10141, 'wanders': 10142, 'lisa': 10143, 'iowa': 10144, 'out': 10145, 'underpass': 10146, 'absolute': 10147, 'beeps': 10148, 'crack': 10149, 'dopinder': 10150, 'derek': 10151, 'glows': 10152, 'lighting': 10153, 'yu': 10154, 'arrow': 10155, 'sl': 10156, 'tusken': 10157, 'costume': 10158, 'arrange': 10159, 'clang': 10160, 'psycho': 10161, 'bleached': 10162, 'ideas': 10163, 'against': 10164, 'demonstrates': 10165, 'antwan': 10166, 'speeder': 10167, 'chopping': 10168, 'south': 10169, 'significant': 10170, 'falcon': 10171, 'broad': 10172, 'uh': 10173, 'assaulted': 10174, 'temperature': 10175, 'squeaking': 10176, '98': 10177, 'crusher': 10178, 'already': 10179, 'lifeless': 10180, 'shrinks': 10181, 'bingo': 10182, 'sawed': 10183, 'terrifying': 10184, 'thumbs': 10185, 'more': 10186, 'perpetual': 10187, 'rigging': 10188, 'spreading': 10189, 'realizes': 10190, 'pry': 10191, 'morose': 10192, 'n110': 10193, 'anywhere': 10194, 'blindfolded': 10195, 'imploding': 10196, 'caltech': 10197, 'videos': 10198, 'sci': 10199, 'southwest': 10200, 'mr': 10201, 'gasoline': 10202, 'nkitchen': 10203, 'dana': 10204, 'curly': 10205, 'lands': 10206, 'runs': 10207, 'judy': 10208, 'ashley': 10209, 'conversations': 10210, 'crouches': 10211, 'mirror': 10212, 'blind': 10213, 'glares': 10214, 'dramatically': 10215, 'cleans': 10216, 'strobe': 10217, 'raj': 10218, 'cds': 10219, 'plunging': 10220, 'intent': 10221, 'papierman': 10222, 'nhappy': 10223, 'confidently': 10224, 'slimy': 10225, 'motorist': 10226, 'signature': 10227, 'squad': 10228, 'clementine': 10229, 'shrouded': 10230, 'drip': 10231, 'benz': 10232, 'stationery': 10233, 'mayhem': 10234, 'danger': 10235, 'nok': 10236, 'luke': 10237, 'fashioned': 10238, 'pushed': 10239, 'evidence': 10240, 'booze': 10241, 'russians': 10242, 'toby': 10243, 'port': 10244, 'car': 10245, 'redgick': 10246, 'nguy': 10247, 'profound': 10248, 'nangela': 10249, 'muddy': 10250, 'vegetarian': 10251, 'dramatic': 10252, 'mirrored': 10253, 'curb': 10254, 'convinced': 10255, 'quince': 10256, 'groan': 10257, 'bands': 10258, 'heater': 10259, 'nsir': 10260, 'loft': 10261, 'trying': 10262, 'tables': 10263, 'shrek': 10264, '2001': 10265, 'buried': 10266, 'capable': 10267, 'nsomeone': 10268, 'knowledge': 10269, 'beckons': 10270, 'glassed': 10271, 'nreverend': 10272, 'dazzling': 10273, 'brownstone': 10274, 'wondering': 10275, 'manipulates': 10276, 'demands': 10277, 'oscillator': 10278, 'jeremy': 10279, '312': 10280, 'jacks': 10281, 'bewildered': 10282, 'mattie': 10283, 'gizmo': 10284, 'raincoat': 10285, 'trick': 10286, 'belongs': 10287, 'pardon': 10288, 'ellie': 10289, 'rippled': 10290, 'streaming': 10291, 'nreally': 10292, 'audible': 10293, 'peek': 10294, 'crude': 10295, 'n57': 10296, 'mustache': 10297, 'nita': 10298, 'pinocchio': 10299, 'dunes': 10300, 'oozes': 10301, 'icon': 10302, 'uniformed': 10303, 'irritated': 10304, 'cameras': 10305, 'praying': 10306, 'national': 10307, 'resident': 10308, 'scurry': 10309, 'tinted': 10310, 'nine': 10311, 'princess': 10312, 'bo': 10313, 'slobbering': 10314, 'suddenly': 10315, 'trailer': 10316, 'launchers': 10317, 'wise': 10318, 'costa': 10319, 'chunks': 10320, 'appeal': 10321, 'hulking': 10322, 'looms': 10323, 'um': 10324, 'corpses': 10325, 'toxic': 10326, 'caesar': 10327, 'cape': 10328, 'certain': 10329, 'end': 10330, 'subterranean': 10331, 'hotels': 10332, '233': 10333, 'admires': 10334, 'staging': 10335, 'chrysler': 10336, 'sort': 10337, 'hyperspace': 10338, 'weathered': 10339, 'substance': 10340, 'bouncer': 10341, 'slaves': 10342, 'interests': 10343, 'chanting': 10344, 'opposition': 10345, 'grandmother': 10346, 'bonjour': 10347, 'stir': 10348, 'insist': 10349, 'hauled': 10350, 'marines': 10351, 'kaboom': 10352, 'penalty': 10353, '206': 10354, 'reckless': 10355, 'landlady': 10356, 'for': 10357, 'gardens': 10358, 'schindler': 10359, 'starbucks': 10360, 'requested': 10361, 'bashful': 10362, 'forbes': 10363, 'dozens': 10364, 'twinkle': 10365, 'lowered': 10366, 'perp': 10367, 'dickhead': 10368, 'naughty': 10369, 'outstretched': 10370, 'tailor': 10371, 'mellow': 10372, 'potentially': 10373, 'luckily': 10374, 'hits': 10375, 'gunshot': 10376, 'tsimtsum': 10377, 'deco': 10378, 'swieten': 10379, 'retreat': 10380, 'hernandez': 10381, 'cheaper': 10382, 'waking': 10383, 'approaching': 10384, 'sentry': 10385, 'ridge': 10386, 'titanium': 10387, 'strike': 10388, 'stylized': 10389, 'node': 10390, 'nsnake': 10391, '192': 10392, 'receptacle': 10393, 'lapse': 10394, 'identical': 10395, 'physically': 10396, 'intoxicated': 10397, 'cult': 10398, 'anyway': 10399, 'jasper': 10400, 'plummets': 10401, 'explosion': 10402, 'manages': 10403, 'debates': 10404, 'sid': 10405, 'quest': 10406, 'chavez': 10407, 'associate': 10408, 'mic': 10409, 'furnishings': 10410, 'lo': 10411, "it's": 10412, 'squeaks': 10413, 'chasing': 10414, 'same': 10415, 'operator': 10416, 'competition': 10417, 'twilight': 10418, 'holdin': 10419, 'leia': 10420, '1954': 10421, '137': 10422, 'movements': 10423, 'huh': 10424, 'highly': 10425, 'freezers': 10426, 'man': 10427, 'skipper': 10428, 'sun': 10429, 'n114': 10430, 'tones': 10431, 'straightens': 10432, 'loses': 10433, 'onrushing': 10434, 'dodd': 10435, 'discretion': 10436, 'realises': 10437, 'expensive': 10438, 'don': 10439, 'kiddo': 10440, 'stoick': 10441, 'moons': 10442, 'rumbles': 10443, 'pro': 10444, 'drizzle': 10445, 'sifts': 10446, 'operating': 10447, 'curtains': 10448, 'alcoholic': 10449, 'adding': 10450, 'introducing': 10451, 'hobbles': 10452, 'jumped': 10453, 'remain': 10454, 'marcia': 10455, 'powerfully': 10456, 'take': 10457, 'damn': 10458, 'squawking': 10459, 'nguys': 10460, 'haunting': 10461, 'kevin': 10462, 'swells': 10463, 'vance': 10464, 'pelham': 10465, '93': 10466, 'ashes': 10467, 'bemused': 10468, 'corona': 10469, 'production': 10470, 'reads': 10471, 'ellen': 10472, 'thomson': 10473, 'eyelids': 10474, 'above': 10475, 'fourth': 10476, 'rods': 10477, 'buns': 10478, 'neon': 10479, 'especially': 10480, 'clientele': 10481, 'ncenter': 10482, 'hung': 10483, 'd2': 10484, 'motivated': 10485, 'pains': 10486, 'exotic': 10487, 'huge': 10488, 'n75': 10489, 'bodhi': 10490, 'mountain': 10491, 'mortuary': 10492, 'ceilings': 10493, 'frightened': 10494, 'gratefully': 10495, 'prefer': 10496, 'nscanlon': 10497, 'distracted': 10498, 'occupants': 10499, 'carefully': 10500, 'theatre': 10501, 'melkonis': 10502, 'tink': 10503, 'aha': 10504, 'kicks': 10505, 'slap': 10506, 'years': 10507, 'below': 10508, 'congratulations': 10509, 'collapse': 10510, 'travel': 10511, 'basin': 10512, 'masks': 10513, 'bellowing': 10514, 'panicky': 10515, 'lobster': 10516, 'deaf': 10517, 'fighters': 10518, 'matching': 10519, 'attendance': 10520, 'battered': 10521, 'streetlight': 10522, 'miles': 10523, 'vegetable': 10524, 'ninth': 10525, 'mocking': 10526, 'pillar': 10527, 'smitten': 10528, 'flower': 10529, 'disappoint': 10530, 'chorus': 10531, 'jewel': 10532, 'noble': 10533, 'matte': 10534, 'pins': 10535, 'cabin': 10536, 'regis': 10537, 'neasy': 10538, 'execution': 10539, 'nwater': 10540, 'sir': 10541, 'neck': 10542, 'determine': 10543, 'swallow': 10544, 'contain': 10545, 'shafts': 10546, 'orbits': 10547, 'nof': 10548, "'artagnan": 10549, 'bets': 10550, 'shirts': 10551, 'monsieur': 10552, 'propeller': 10553, 'hadrosaurs': 10554, 'thrown': 10555, 'n165': 10556, 'torch': 10557, 'pitiful': 10558, 'vase': 10559, 'compressor': 10560, 'feigns': 10561, 'islamabad': 10562, 'hill': 10563, 'faced': 10564, 'knitting': 10565, 'contd': 10566, 'ducky': 10567, 'narmy': 10568, 'elevated': 10569, 'liverpool': 10570, 'ntight': 10571, 'peep': 10572, 'shrugs': 10573, 'genuine': 10574, 'guitars': 10575, 'crop': 10576, 'headlong': 10577, 'ladders': 10578, 'pushing': 10579, 'gates': 10580, 'rosko': 10581, 'bass': 10582, 'performing': 10583, 'relations': 10584, 'chi': 10585, 'mom': 10586, '279': 10587, 'flock': 10588, 'cropped': 10589, 'webbed': 10590, 'locate': 10591, 'dc': 10592, 'summon': 10593, 'meadows': 10594, 'nrichie': 10595, 'plops': 10596, 'tri': 10597, 'sire': 10598, 'african': 10599, 'wheeling': 10600, 'foreboding': 10601, 'hippie': 10602, 'headstone': 10603, 'hungry': 10604, 'tracer': 10605, 'celestial': 10606, 'crimes': 10607, 'nleaving': 10608, 'pipe': 10609, 'zooming': 10610, 'rode': 10611, 'hopefully': 10612, 'beak': 10613, 'bouquet': 10614, 'labels': 10615, 'chances': 10616, 'reminds': 10617, 'cruises': 10618, 'bikini': 10619, 'amdursky': 10620, "'melio": 10621, 'doc': 10622, 'pumps': 10623, 'corrupt': 10624, 'frozen': 10625, 'nothers': 10626, 'tipping': 10627, '20': 10628, 'beads': 10629, 'absent': 10630, 'kindness': 10631, 'flirtatious': 10632, 'try': 10633, 'nazis': 10634, 'gleason': 10635, 'browning': 10636, 'lundegaard': 10637, 'nplatform': 10638, 'chucks': 10639, 'teachers': 10640, 'nknow': 10641, 'gaps': 10642, 'n50': 10643, 'tiles': 10644, 'lair': 10645, 'haste': 10646, 'claims': 10647, 'nmushu': 10648, 'application': 10649, 'drifts': 10650, 'uncertainty': 10651, 'resources': 10652, 'trapeze': 10653, 'swan': 10654, 'expression': 10655, 'commit': 10656, 'indistinct': 10657, 'njaval': 10658, 'heartedly': 10659, 'police': 10660, 'nshan': 10661, 'killin': 10662, 'villains': 10663, 'marshal': 10664, 'planting': 10665, 'crotch': 10666, 'monkeys': 10667, 'passionately': 10668, 'desperation': 10669, 'stagehand': 10670, 'commuters': 10671, 'mix': 10672, 'curves': 10673, 'props': 10674, 'moody': 10675, 'cement': 10676, 'strictly': 10677, 'zelda': 10678, 'motive': 10679, 'somewhere': 10680, '287': 10681, 'funnel': 10682, 'ted': 10683, 'nbobbi': 10684, 'hadrosaur': 10685, 'imagination': 10686, '238': 10687, 'peterson': 10688, 'burp': 10689, 'accepting': 10690, 'capt': 10691, 'jamming': 10692, 'nherself': 10693, 'n12': 10694, 'combat': 10695, 'sparse': 10696, 'rouge': 10697, 'coffin': 10698, 'destructive': 10699, 'files': 10700, 'comin': 10701, 'rustling': 10702, 'telling': 10703, 'nwant': 10704, 'scene': 10705, 'unpack': 10706, 'vibe': 10707, 'performance': 10708, 'giest': 10709, 'foggy': 10710, 'wrestle': 10711, 'ndoors': 10712, 'fistful': 10713, 'guardian': 10714, '247': 10715, 'antoine': 10716, 'lye': 10717, 'awakens': 10718, 'spill': 10719, 'nfalls': 10720, 'mechanically': 10721, 'rummaging': 10722, 'nwalther': 10723, 'circumstances': 10724, 'fond': 10725, 'dive': 10726, 'contemplates': 10727, 'confronts': 10728, 'yield': 10729, 'gazzo': 10730, 'focuses': 10731, 'squeaky': 10732, 'chains': 10733, 'submit': 10734, 'houses': 10735, 'shown': 10736, 'extreme': 10737, 'nirving': 10738, 'dare': 10739, 'scientific': 10740, 'flanks': 10741, 'nwa': 10742, 'nflip': 10743, '50s': 10744, 'reign': 10745, 'halo': 10746, 'iz': 10747, 'nmarlin': 10748, 'jim': 10749, 'experienced': 10750, 'rains': 10751, 'melancholy': 10752, '48': 10753, 'naval': 10754, 'vintage': 10755, 'racing': 10756, 'ndeafening': 10757, 'scope': 10758, 'nshoots': 10759, 'behold': 10760, 'nmierzwiak': 10761, 'frieda': 10762, 'dealer': 10763, 'watched': 10764, 'evac': 10765, 'titled': 10766, 'elephants': 10767, 'circling': 10768, 'madritsch': 10769, 'lara': 10770, 'carlo': 10771, 'humble': 10772, 'stranger': 10773, 'establishing': 10774, 'mel': 10775, 'pod': 10776, 'pitch': 10777, 'twice': 10778, 'ward': 10779, 'daring': 10780, 'candlestick': 10781, 'vans': 10782, 'singer': 10783, 'nwarchild': 10784, 'invited': 10785, 'title': 10786, 'rubs': 10787, '6': 10788, 'criss': 10789, 'nmoves': 10790, 'executive': 10791, 'vanilla': 10792, 'eerie': 10793, 'broadhurst': 10794, 'recall': 10795, 'reconnaissance': 10796, 'ron': 10797, 'mariners': 10798, 'awaits': 10799, 'texas': 10800, 'clients': 10801, 'fascism': 10802, 'caliber': 10803, 'cylinder': 10804, 'actual': 10805, 'restroom': 10806, 'lame': 10807, 'narada': 10808, 'rotting': 10809, 'muslim': 10810, 'knoll': 10811, 'nseen': 10812, 'vijay': 10813, 'vincent': 10814, 'escape': 10815, 'accelerates': 10816, 'klaxon': 10817, 'radioactive': 10818, 'wreckage': 10819, 'glock': 10820, 'sheepishly': 10821, 'respectable': 10822, 'pile': 10823, 'firing': 10824, 'busily': 10825, 'effortlessly': 10826, 'policeman': 10827, 'authorized': 10828, 'chamberlain': 10829, 'glee': 10830, 'decay': 10831, 'jukebox': 10832, 'somber': 10833, 'shudders': 10834, 'paulson': 10835, 'theft': 10836, 'custom': 10837, 'ndon': 10838, 'shoeshine': 10839, 'radioed': 10840, 'psychiatrist': 10841, 'col': 10842, 'ghosts': 10843, 'script': 10844, 'ohhh': 10845, 'moving': 10846, 'beehive': 10847, 'rotation': 10848, 'asked': 10849, 'stoddard': 10850, 'queer': 10851, 'november': 10852, 'narthur': 10853, 'banners': 10854, 'payphone': 10855, 'awkwardly': 10856, 'smears': 10857, 'english': 10858, 'wrote': 10859, 'shirley': 10860, 'twelve': 10861, 'fearsome': 10862, 'butter': 10863, 'shredded': 10864, 'destroyer': 10865, '171': 10866, 'reed': 10867, 'nshoulder': 10868, 'n81': 10869, 'nsick': 10870, 'showalter': 10871, 'spectators': 10872, 'sprawls': 10873, 'gary': 10874, 'cronkite': 10875, '139': 10876, 'hittin': 10877, 'kaffee': 10878, 'brilliant': 10879, 'sections': 10880, 'bobbi': 10881, '1964': 10882, 'tropical': 10883, 'hammond': 10884, 'hinge': 10885, 'plaster': 10886, 'ndark': 10887, 'orderly': 10888, 'chambers': 10889, 'gardening': 10890, 'packs': 10891, 'pace': 10892, 'jabs': 10893, 'stock': 10894, 'ban': 10895, 'driver': 10896, 'plowing': 10897, 'transporter': 10898, 'auditorium': 10899, 'windmill': 10900, 'rank': 10901, 'recruits': 10902, 'clanging': 10903, 'pity': 10904, 'disguised': 10905, 'failure': 10906, 'crocodile': 10907, 'architects': 10908, 'stopwatch': 10909, 'christopher': 10910, 'wormholes': 10911, 'sports': 10912, 'decks': 10913, 'stephen': 10914, 'particular': 10915, 'jam': 10916, 'nsuddenly': 10917, 'anxiously': 10918, 'si': 10919, 'twitching': 10920, 'ooh': 10921, 'upwards': 10922, 'piles': 10923, 'turnpike': 10924, 'mounted': 10925, 'ha': 10926, 'toy': 10927, 'nu': 10928, '1780': 10929, 'elsewhere': 10930, 'dollar': 10931, 'caused': 10932, 'clogged': 10933, 'slippery': 10934, 'once': 10935, 'reunion': 10936, 'nrolls': 10937, 'puppy': 10938, 'unexpectedly': 10939, 'burly': 10940, 'nearer': 10941, 'birthday': 10942, 'mess': 10943, 'stillness': 10944, 'must': 10945, 'jihad': 10946, 'religion': 10947, '41': 10948, "i'll": 10949, 'cakes': 10950, 'stale': 10951, 'rearview': 10952, 'zed': 10953, 'dragging': 10954, 'onions': 10955, 'drifting': 10956, 'welton': 10957, 'rental': 10958, 'fixing': 10959, 'lock': 10960, 'stupid': 10961, 'snaking': 10962, 'coaster': 10963, 'cubicles': 10964, 'edwardo': 10965, 'higher': 10966, 'grisly': 10967, 'lick': 10968, 'everybody': 10969, 'stronger': 10970, 'vote': 10971, 'souvenirs': 10972, 'savages': 10973, 'evident': 10974, 'theo': 10975, 'chandelier': 10976, 'pie': 10977, 'transforms': 10978, 'gradually': 10979, 'massive': 10980, 'depressing': 10981, 'krause': 10982, 'terror': 10983, 'action': 10984, 'cargo': 10985, 'punxsutawney': 10986, 'showered': 10987, 'distressed': 10988, 'regardless': 10989, 'orion': 10990, 'block': 10991, 'fall': 10992, 'materializes': 10993, 'placid': 10994, 'directions': 10995, 'wakes': 10996, 'n27': 10997, 'fireworks': 10998, 'naomi': 10999, 'landing': 11000, 'magic': 11001, 'whoa': 11002, 'den': 11003, 'quincy': 11004, 'hustler': 11005, 'betty': 11006, 'bolding': 11007, 'biting': 11008, 'installer': 11009, 'turbo': 11010, '88': 11011, 'cancel': 11012, 'selects': 11013, 'shooter': 11014, 'briskly': 11015, 'stumble': 11016, 'gal': 11017, 'attitudes': 11018, 'impact': 11019, 'charlotte': 11020, 'scotch': 11021, 'drummer': 11022, 'juicy': 11023, 'overweight': 11024, 'n129': 11025, 'dj': 11026, 'grainy': 11027, 'dallas': 11028, 'collected': 11029, 'mast': 11030, 'smashing': 11031, 'meaningful': 11032, 'bala': 11033, 'babes': 11034, 'refill': 11035, 'rusting': 11036, 'machinery': 11037, 'trendy': 11038, 'giggle': 11039, 'hard': 11040, 'designed': 11041, 'mildly': 11042, 'eighteen': 11043, 'pounces': 11044, 'ol': 11045, 'thrill': 11046, 'transfer': 11047, 'bore': 11048, 'eastern': 11049, 'leaning': 11050, 'noooo': 11051, 'airspace': 11052, 'briefs': 11053, 'stop': 11054, 'walk': 11055, 'alleys': 11056, 'gawk': 11057, 'blades': 11058, 'chapped': 11059, 'teammates': 11060, 'beloved': 11061, 'git': 11062, 'nstart': 11063, 'bitterness': 11064, 'ninside': 11065, 'comedian': 11066, 'spec': 11067, 'pivots': 11068, 'nprobably': 11069, 'escorted': 11070, 'rainy': 11071, 'sig': 11072, 'stole': 11073, 'nearest': 11074, 'cradles': 11075, 'considerable': 11076, 'nlefou': 11077, 'throats': 11078, 'italy': 11079, 'tombs': 11080, 'willy': 11081, 'huddled': 11082, 'flashlights': 11083, 'nbreath': 11084, 'donkey': 11085, '148': 11086, 'passports': 11087, 'methodist': 11088, 'greenville': 11089, 'yuppie': 11090, 'grumbling': 11091, 'workroom': 11092, 'nicely': 11093, 'tradition': 11094, 'absorbing': 11095, 'falsetto': 11096, 'barges': 11097, 'dimensional': 11098, 'bricks': 11099, 'champagne': 11100, 'amounts': 11101, 'refer': 11102, 'stocky': 11103, 'tidy': 11104, 'acid': 11105, 'rings': 11106, 'contrite': 11107, 'costumes': 11108, 'kicked': 11109, 'advice': 11110, 'headlock': 11111, 'doesn': 11112, 'clear': 11113, 'updated': 11114, 'gaear': 11115, 'thoroughly': 11116, 'scrolls': 11117, 'legend': 11118, 'terrorist': 11119, 'okamoto': 11120, 'attempted': 11121, 'volts': 11122, 'boothe': 11123, 'fitts': 11124, 'accounts': 11125, 'melvin': 11126, 'nrex': 11127, 'traffic': 11128, "'mma": 11129, 'antechamber': 11130, 'swag': 11131, 'mannlicher': 11132, 'shrug': 11133, 'adjustment': 11134, 'gulls': 11135, 'drift': 11136, 'jerry': 11137, 'worry': 11138, 'nclose': 11139, 'forming': 11140, 'mate': 11141, 'pasadena': 11142, 'midsection': 11143, 'drives': 11144, 'any': 11145, 'ngun': 11146, 'lotte': 11147, 'teacup': 11148, 'contemplating': 11149, 'swarm': 11150, 'cleaning': 11151, 'plate': 11152, 'worked': 11153, 'bowers': 11154, 'heck': 11155, 'badly': 11156, 'delightful': 11157, 'matchmaker': 11158, 'littered': 11159, 'sharp': 11160, 'masking': 11161, 'hectic': 11162, 'pajamas': 11163, 'frequencies': 11164, 'miranda': 11165, 'shows': 11166, 'largest': 11167, 'bend': 11168, 'daisy': 11169, 'blackout': 11170, '37': 11171, 'winds': 11172, 'mixing': 11173, 'hips': 11174, 'animated': 11175, 'sydney': 11176, 'retreats': 11177, 'busting': 11178, 'seth': 11179, 'huns': 11180, 'em': 11181, 'cheap': 11182, 'meantime': 11183, 'arranges': 11184, 'king': 11185, 'wishes': 11186, 'decided': 11187, 'chilled': 11188, 'nanyone': 11189, 'nhelp': 11190, 'anxious': 11191, '217': 11192, 'picker': 11193, 'izzy': 11194, 'strolling': 11195, 'marched': 11196, 'yusif': 11197, 'invention': 11198, '318': 11199, 'deliberate': 11200, 'drills': 11201, 'tally': 11202, 'stench': 11203, 'n145': 11204, 'grids': 11205, 'filled': 11206, 'exhausted': 11207, 'remainder': 11208, 'barks': 11209, 'felony': 11210, 'romulan': 11211, 'shapiro': 11212, '278': 11213, 'r': 11214, 'narrows': 11215, 'flour': 11216, 'rising': 11217, 'hilton': 11218, 'afford': 11219, "'you": 11220, 'pressing': 11221, 'seek': 11222, 'wonderfully': 11223, 'pride': 11224, 'lovers': 11225, '8000': 11226, 'concerns': 11227, 'campaign': 11228, 'accusing': 11229, 'acceptance': 11230, 'papers': 11231, 'alabama': 11232, 'bastards': 11233, 'lighter': 11234, 'lumps': 11235, 'lumi': 11236, 'nan': 11237, 'mega': 11238, 'stealth': 11239, 'displayed': 11240, 'transport': 11241, 'exact': 11242, 'whoosh': 11243, 'dryly': 11244, 'shop': 11245, 'farmhouse': 11246, 'pablo': 11247, 'litter': 11248, 'retrieves': 11249, 'ambush': 11250, 'batre': 11251, 'awesome': 11252, 'first': 11253, 'therefore': 11254, 'clawing': 11255, 'mothers': 11256, 'obi': 11257, 'slavers': 11258, 'tom': 11259, 'nheads': 11260, 'haven': 11261, 'nvan': 11262, 'profile': 11263, 'award': 11264, 'ee': 11265, 'dangerously': 11266, 'burgers': 11267, 'nicholas': 11268, 'dashboard': 11269, 'heap': 11270, 'faith': 11271, 'menus': 11272, 'introduce': 11273, 'behaving': 11274, 'fifties': 11275, 'nyou': 11276, 'semester': 11277, 'reprise': 11278, '45': 11279, 'pumping': 11280, 'orkin': 11281, 'pa': 11282, 'rollo': 11283, 'hollow': 11284, 'rejoins': 11285, 'smallest': 11286, 'njafar': 11287, 'feelings': 11288, 'edged': 11289, 'nnumber': 11290, 'drifted': 11291, 'instant': 11292, 'suspended': 11293, 'trophies': 11294, 'fred': 11295, 'rough': 11296, 'kevlar': 11297, 'richard': 11298, 'locations': 11299, 'conditions': 11300, 'saves': 11301, 'reaction': 11302, 'rialto': 11303, 'matisse': 11304, 'ducked': 11305, 'shivers': 11306, 'dots': 11307, 'concentrates': 11308, 'bug': 11309, 'vaughn': 11310, 'sorting': 11311, 'wistful': 11312, 'approximately': 11313, 'allie': 11314, 'grier': 11315, 'aids': 11316, 'ferrie': 11317, 'raider': 11318, 'stewardess': 11319, 'kurtz': 11320, 'soup': 11321, 'getaway': 11322, 'huddle': 11323, 'crying': 11324, '103': 11325, 'blvd': 11326, 'talking': 11327, 'gutter': 11328, 'dimly': 11329, 'porridge': 11330, 'phillips': 11331, 'bid': 11332, 'avoids': 11333, 'sounded': 11334, 'cluck': 11335, 'gathered': 11336, 'fraiser': 11337, 'slicked': 11338, 'gloved': 11339, 'snatches': 11340, 'operate': 11341, 'debate': 11342, 'noon': 11343, 'n138': 11344, 'n31': 11345, 'resounding': 11346, 'pliers': 11347, 'nbox': 11348, 'ncameron': 11349, 'rigs': 11350, 'ponting': 11351, 'blocks': 11352, '55': 11353, 'goldie': 11354, 'certainly': 11355, 'commissioner': 11356, 'nclark': 11357, 'discovery': 11358, 'owes': 11359, 'cleaver': 11360, 'nnotice': 11361, 'seein': 11362, 'sooner': 11363, 'tossed': 11364, 'dugout': 11365, 'juice': 11366, 'bible': 11367, 'stays': 11368, 'flourish': 11369, 'destination': 11370, 'tres': 11371, 'napollo': 11372, 'sinking': 11373, 'gita': 11374, 'seized': 11375, 'call': 11376, 'poland': 11377, 'landed': 11378, 'manicured': 11379, 'downhole': 11380, 'nif': 11381, 'distorted': 11382, 'skirt': 11383, 'paulie': 11384, 'packet': 11385, 'melts': 11386, 'conclusion': 11387, 'siddown': 11388, 'blazing': 11389, 'nglass': 11390, 'natives': 11391, 'driven': 11392, 'astride': 11393, 'donny': 11394, 'gun': 11395, 'shielding': 11396, 'ncrying': 11397, 'pathetic': 11398, 'breathes': 11399, 'tried': 11400, 'importance': 11401, 'appendage': 11402, 'lousy': 11403, 'bullpen': 11404, 'castro': 11405, 'stevie': 11406, 'multiply': 11407, 'juggernaut': 11408, 'cabinet': 11409, 'brooding': 11410, 'tattoos': 11411, 'triumphantly': 11412, 'plains': 11413, 'considerably': 11414, 'rescued': 11415, 'safety': 11416, 'campus': 11417, 'earn': 11418, 'gleams': 11419, 'ain': 11420, 'tellegio': 11421, 'nuke': 11422, 'hunters': 11423, 'fenceline': 11424, 'sandals': 11425, 'elevators': 11426, 'sputters': 11427, 'spotter': 11428, 'bruises': 11429, 'suspension': 11430, 'highway': 11431, 'scum': 11432, '2002': 11433, 'begin': 11434, 'nahead': 11435, 'sat': 11436, 'resuming': 11437, 'teenagers': 11438, 'nreverse': 11439, 'biltmore': 11440, 'jail': 11441, 'reserve': 11442, 'notice': 11443, 'click': 11444, 'jeans': 11445, 'doubt': 11446, 'riot': 11447, 'cracks': 11448, 'ultrasound': 11449, 'dated': 11450, 'probes': 11451, 'trailing': 11452, 'retirement': 11453, 'stationed': 11454, 'entranced': 11455, 'n92': 11456, 'ego': 11457, 'affect': 11458, 'assassins': 11459, 'wizard': 11460, 'executives': 11461, 'canto': 11462, 'illustrated': 11463, 'huckleberry': 11464, 'strobing': 11465, 'favorite': 11466, 'stoned': 11467, 'famous': 11468, 'ghouls': 11469, 'wades': 11470, 'cheque': 11471, '120': 11472, '261': 11473, 'slouches': 11474, 'territory': 11475, 'sweatshirt': 11476, 'physician': 11477, 'tracy': 11478, 'sheep': 11479, 'occurred': 11480, 'spite': 11481, 'sole': 11482, 'balawi': 11483, 'urgency': 11484, 'political': 11485, 'ended': 11486, 'nliving': 11487, 'yawns': 11488, 'n199': 11489, 'shape': 11490, 'swab': 11491, 'clutched': 11492, 'mouths': 11493, 'icepick': 11494, 'meteor': 11495, 'gear': 11496, 'ntrack': 11497, 'smarter': 11498, 'kathy': 11499, 'flip': 11500, 'pedals': 11501, 'nbodhi': 11502, 'moved': 11503, 'eric': 11504, 'janek': 11505, 'n74': 11506, 'transformed': 11507, '262': 11508, 'aisle': 11509, 'employee': 11510, 'steals': 11511, 'laid': 11512, 'entryway': 11513, 'clad': 11514, 'wa': 11515, 'loosen': 11516, 'confused': 11517, 'efficiently': 11518, 'warms': 11519, 'anthony': 11520, 'seated': 11521, 'stealthily': 11522, 'leech': 11523, '285': 11524, 'trouser': 11525, 'unlikely': 11526, 'pondicherry': 11527, 'nforth': 11528, 'warhead': 11529, 'boys': 11530, 'floor': 11531, 'cunningham': 11532, 'nno': 11533, 'nandha': 11534, 'rabbits': 11535, 'nboxes': 11536, 'nmiss': 11537, 'glance': 11538, 'pyramids': 11539, 'honesty': 11540, 'resumes': 11541, 'fur': 11542, 'structure': 11543, 'throng': 11544, 'skid': 11545, 'upsetting': 11546, 'ndressed': 11547, 'volunteered': 11548, 'hopelessly': 11549, 'consider': 11550, 'letty': 11551, 'n113': 11552, 'ncatches': 11553, 'nevermind': 11554, "'posed": 11555, 'lashes': 11556, 'seaweed': 11557, 'ticking': 11558, 'jabba': 11559, 'riots': 11560, 'nrest': 11561, 'midnight': 11562, 'equally': 11563, 'gaff': 11564, 'allows': 11565, 'assassin': 11566, 'improved': 11567, 'complies': 11568, '1960': 11569, 'armpits': 11570, 'encouraging': 11571, 'pawnshop': 11572, 'naturally': 11573, 'agreement': 11574, 'balloons': 11575, 'ventilation': 11576, 'rows': 11577, 'realistic': 11578, 'starving': 11579, 'campfire': 11580, 'british': 11581, 'spirals': 11582, 'schikaneder': 11583, 'gut': 11584, 'dresser': 11585, 'chicago': 11586, 'struggle': 11587, 'insight': 11588, 'cream': 11589, 'detached': 11590, 'vegetables': 11591, 'scheme': 11592, 'cooler': 11593, 'grappling': 11594, 'senators': 11595, 'delicately': 11596, 'jolts': 11597, 'sense': 11598, 'honors': 11599, 'buttoning': 11600, 'detachment': 11601, 'rethink': 11602, 'excuse': 11603, 'react': 11604, 'wary': 11605, 'scruffy': 11606, 'stroking': 11607, 'grumbles': 11608, 'filling': 11609, 'plummeting': 11610, 'nkay': 11611, 'wanna': 11612, 'emplacement': 11613, 'outrage': 11614, 'emblem': 11615, 'bears': 11616, 'slip': 11617, 'equation': 11618, 'player': 11619, 'swap': 11620, "'ve": 11621, 'typewriter': 11622, 'phasers': 11623, 'surges': 11624, 'raid': 11625, 'professionals': 11626, 'staff': 11627, 'freshman': 11628, 'dangle': 11629, 'gave': 11630, 'nwhy': 11631, 'pay': 11632, 'gav': 11633, '119': 11634, 'calvin': 11635, 'romance': 11636, 'alexis': 11637, 'belongings': 11638, 'blinks': 11639, 'cuts': 11640, '34': 11641, 'zapruder': 11642, 'plutt': 11643, 'splattered': 11644, 'interrupts': 11645, 'jolt': 11646, '199': 11647, 'manuscripts': 11648, 'whatta': 11649, 'thoughts': 11650, 'nsebastian': 11651, 'nsquirt': 11652, 'runners': 11653, 'l': 11654, 'doe': 11655, 'teenager': 11656, 'jason': 11657, 'nraises': 11658, 'willful': 11659, 'airlines': 11660, 'storyboards': 11661, 'lids': 11662, 'angle': 11663, 'askin': 11664, 'rhino': 11665, 'plumette': 11666, 'plunger': 11667, 'monitors': 11668, 'sahar': 11669, 'casey': 11670, 'materials': 11671, 'cheer': 11672, 'dockside': 11673, 'greenhouse': 11674, 'israel': 11675, 'turkle': 11676, 'detention': 11677, 'pure': 11678, 'wry': 11679, 'carton': 11680, 'radiation': 11681, 'awful': 11682, 'muttering': 11683, 'obstacle': 11684, 'cessna': 11685, 'building': 11686, 'sides': 11687, 'shutters': 11688, 'dominated': 11689, 'spoons': 11690, 'harkins': 11691, 'gurgles': 11692, 'hoover': 11693, 'challenging': 11694, 'ntomica': 11695, 'wafting': 11696, 'segments': 11697, 'leo': 11698, 'belltower': 11699, 'hatches': 11700, 'motherfuckers': 11701, 'pursuer': 11702, 'generator': 11703, 'nskin': 11704, 'ncorner': 11705, 'permanent': 11706, 'destroy': 11707, 'emaciated': 11708, 'spoke': 11709, 'east': 11710, 'robbers': 11711, 'ummm': 11712, '528': 11713, 'bombay': 11714, 'president': 11715, 'agreed': 11716, 'goes': 11717, 'nbrush': 11718, 'perez': 11719, 'npressure': 11720, 'polyester': 11721, 'functioning': 11722, 'touchdown': 11723, 'felt': 11724, 'tall': 11725, 'whirl': 11726, 'hoped': 11727, 'explosives': 11728, 'nwide': 11729, 'climbs': 11730, 'powers': 11731, 'scary': 11732, 'up': 11733, 'n72': 11734, 'vehicles': 11735, 'ricardo': 11736, 'helluva': 11737, 'doubtful': 11738, 'developed': 11739, 'cherry': 11740, 'emerged': 11741, 'unpleasant': 11742, 'lemme': 11743, 'carving': 11744, 'preparations': 11745, 'planning': 11746, 'cycle': 11747, 'interstate': 11748, 'hbo': 11749, 'date': 11750, 'humorless': 11751, 'ncar': 11752, 'nroach': 11753, 'draped': 11754, 'world': 11755, 'filth': 11756, 'wiser': 11757, 'cooling': 11758, 'shrugging': 11759, "ni'll": 11760, 'cathy': 11761, 'adjust': 11762, 'stable': 11763, 'lijek': 11764, 'fare': 11765, 'lately': 11766, 'hoo': 11767, 'pies': 11768, 'swirls': 11769, 'n55': 11770, 'simulation': 11771, 'towed': 11772, "can't": 11773, 'miss': 11774, 'baton': 11775, 'ann': 11776, 'wings': 11777, 'clangs': 11778, 'write': 11779, 'nenters': 11780, 'memos': 11781, 'ninja': 11782, 'wolf': 11783, 'snaps': 11784, 'discharge': 11785, 'von': 11786, 'rent': 11787, 'seemingly': 11788, 'evaluate': 11789, 'dip': 11790, 'cavalier': 11791, 'docked': 11792, 'pausing': 11793, 'charging': 11794, 'scissor': 11795, 'struck': 11796, 'guidance': 11797, 'costumed': 11798, 'esmarelda': 11799, 'ncarrying': 11800, 'counterpart': 11801, 'guest': 11802, 'xac': 11803, 'hitchhiker': 11804, 'coughing': 11805, 'profusely': 11806, 'audition': 11807, 'n46': 11808, 'grabs': 11809, 'prepared': 11810, 'carrying': 11811, 'archbold': 11812, 'form': 11813, 'understand': 11814, "'11": 11815, 'li': 11816, 'frenzied': 11817, 'note': 11818, 'jigsaw': 11819, 'toweling': 11820, 'planes': 11821, 'selvam': 11822, 'countries': 11823, 'gaila': 11824, 'terrific': 11825, 'mumbling': 11826, 'learned': 11827, 'nstairs': 11828, 'viper': 11829, 'fold': 11830, 'marbles': 11831, 'nco': 11832, 'mammy': 11833, 'finally': 11834, 'kitchen': 11835, 'plant': 11836, 'war': 11837, 'contraption': 11838, 'wondrous': 11839, 'spock': 11840, 'greedo': 11841, 'mentally': 11842, 'roosevelt': 11843, 'screening': 11844, 'flat': 11845, 'cluttered': 11846, 'gunports': 11847, 'anchorman': 11848, 'devastated': 11849, 'medal': 11850, 'january': 11851, 'rules': 11852, 'nbrakes': 11853, 'skilled': 11854, 'communication': 11855, 'boz': 11856, 'rosie': 11857, 'nbunker': 11858, 'learning': 11859, 'print': 11860, 'arena': 11861, 'disagree': 11862, 'repairs': 11863, 'satchel': 11864, 'bleachers': 11865, 'spirit': 11866, 'pellets': 11867, 'detector': 11868, 'confusion': 11869, 'girl': 11870, 'caramels': 11871, 'treetop': 11872, 'brightening': 11873, 'worst': 11874, 'welding': 11875, 'overall': 11876, 'traveled': 11877, 'oxford': 11878, 'hubble': 11879, 'heartily': 11880, 'utah': 11881, 'austere': 11882, 'caressing': 11883, 'whoop': 11884, 'compartment': 11885, '30': 11886, 'd': 11887, 'employment': 11888, 'kilgore': 11889, 'fucking': 11890, 'nup': 11891, 'listed': 11892, 'thwips': 11893, 'history': 11894, 'quietly': 11895, 'stayin': 11896, 'stomps': 11897, 'dale': 11898, "what's": 11899, 'peer': 11900, 'hau': 11901, 'yawp': 11902, 'nrunning': 11903, 'medals': 11904, 'salzburg': 11905, 'choices': 11906, 'relationships': 11907, 'transform': 11908, '491': 11909, 'underneath': 11910, 'impressive': 11911, 'gennero': 11912, 'massages': 11913, 'wookiee': 11914, 'vest': 11915, 'culture': 11916, 'nerve': 11917, 'nevertheless': 11918, 'moments': 11919, 'nhard': 11920, 'solvent': 11921, 'melody': 11922, 'strangers': 11923, 'taillights': 11924, 'patch': 11925, 'bully': 11926, 'management': 11927, 'trilby': 11928, 'bridesmaid': 11929, 'charm': 11930, 'crevice': 11931, 'chained': 11932, 'excited': 11933, 'pudgy': 11934, 'nit': 11935, 'improvised': 11936, 'circuit': 11937, 'drag': 11938, 'pitbull': 11939, 'houseguests': 11940, 'n176': 11941, 'plug': 11942, 'elois': 11943, 'insured': 11944, 'wedding': 11945, 'nholds': 11946, 'weakened': 11947, 'soars': 11948, 'energetic': 11949, 'nuntil': 11950, 'extremely': 11951, 'said': 11952, 'abortion': 11953, 'ka': 11954, 'wisdom': 11955, 'cassidy': 11956, 'moloch': 11957, 'slyly': 11958, 'nwe': 11959, 'occupied': 11960, 'pakistani': 11961, 'nthe': 11962, 'as': 11963, 'whoops': 11964, 'pros': 11965, 'ntowards': 11966, 'numbered': 11967, 'restrain': 11968, 'foxhole': 11969, '1947': 11970, 'piece': 11971, 'n80': 11972, 'lyndon': 11973, 'cows': 11974, 'yeah': 11975, 'coast': 11976, 'citizen': 11977, 'proves': 11978, 'listlessly': 11979, 'flashlight': 11980, 'noff': 11981, 'koreans': 11982, 'shovel': 11983, 'mandingo': 11984, 'bandits': 11985, '70': 11986, 'clambers': 11987, 'stalled': 11988, 'extraction': 11989, 'dawns': 11990, 'futuristic': 11991, 'darn': 11992, 'stucco': 11993, 'nearby': 11994, 'carts': 11995, 'pounce': 11996, 'tyler': 11997, 'soaked': 11998, 'hobby': 11999, '244': 12000, 'possibility': 12001, 'xae': 12002, 'depths': 12003, 'supports': 12004, 'hoss': 12005, 'inception': 12006, 'disarray': 12007, 'related': 12008, 'may': 12009, 'deer': 12010, 'gimme': 12011, 'were': 12012, 'seriously': 12013, 'chisel': 12014, 'clumsy': 12015, 'xa8re': 12016, 'tipsy': 12017, 'activating': 12018, 'recon': 12019, 'blackhawk': 12020, '25': 12021, 'portable': 12022, 'riding': 12023, "'em": 12024, 'shines': 12025, '1945': 12026, 'chested': 12027, 'kylo': 12028, 'sniffing': 12029, 'squatting': 12030, 'daddy': 12031, 'browsing': 12032, 'cooking': 12033, 'approvingly': 12034, 'vines': 12035, '05': 12036, 'yaar': 12037, 'pointed': 12038, 'duh': 12039, 'soda': 12040, 'flecks': 12041, 'nplace': 12042, 'whistles': 12043, 'rim': 12044, 'destruction': 12045, 'chef': 12046, 'fourteen': 12047, 'silencing': 12048, 'old': 12049, 'xerox': 12050, 'nempty': 12051, 'colliding': 12052, 'repetition': 12053, 'waits': 12054, 'memphis': 12055, 'dna': 12056, 'outrun': 12057, 'nthose': 12058, 'bombing': 12059, 'madison': 12060, 'girls': 12061, 'tommy': 12062, 'publication': 12063, 'protestors': 12064, '226': 12065, 'else': 12066, 'flinging': 12067, 'mechanical': 12068, 'tends': 12069, 'animation': 12070, '2003': 12071, 'tend': 12072, 'spotless': 12073, 'snowman': 12074, 'little': 12075, 'bang': 12076, 'become': 12077, 'straw': 12078, 'glow': 12079, 'grimace': 12080, 'unwraps': 12081, 'puzzlement': 12082, 'gray': 12083, 'after': 12084, 'calculating': 12085, 'noah': 12086, 'claps': 12087, 'guide': 12088, 'graduates': 12089, 'galveston': 12090, 'tongues': 12091, 'cb': 12092, 'wasting': 12093, 'monica': 12094, 'carves': 12095, 'corruption': 12096, 'nreads': 12097, 'arvind': 12098, 'jawa': 12099, 'n43': 12100, 'recoil': 12101, '03': 12102, 'functions': 12103, 'nboys': 12104, 'also': 12105, 'mine': 12106, 'watching': 12107, 'pleasantly': 12108, 'peering': 12109, 'relief': 12110, 'insult': 12111, 'sync': 12112, 'maintains': 12113, 'tornado': 12114, 'playfully': 12115, 'otherwise': 12116, 'sometime': 12117, 'curious': 12118, 'philippa': 12119, 'clearing': 12120, 'unkempt': 12121, 'stacked': 12122, 'tin': 12123, 'spokesman': 12124, 'irresponsible': 12125, 'included': 12126, 'troubled': 12127, 'bursting': 12128, 'arts': 12129, 'sauce': 12130, 'rage': 12131, 'seattle': 12132, 'hendricks': 12133, 'civil': 12134, 'toothless': 12135, 'laverie': 12136, 'suppose': 12137, 'bubba': 12138, 'continue': 12139, 'faces': 12140, 'formal': 12141, 'gifted': 12142, 'newspaper': 12143, 'parker': 12144, 'phoenix': 12145, 'gowns': 12146, 'motherfuckin': 12147, 'n134': 12148, 'payment': 12149, 'chute': 12150, 'interviewing': 12151, 'liar': 12152, 'nstreet': 12153, 'haystack': 12154, 'smouldering': 12155, 'gennaro': 12156, 'enjoy': 12157, 'unblinking': 12158, 'personally': 12159, 'catering': 12160, 'surprisingly': 12161, 'horns': 12162, 'hooper': 12163, 'bustling': 12164, 'massively': 12165, 'sloppy': 12166, 'expanse': 12167, 'baseman': 12168, 'petersburg': 12169, 'starched': 12170, 'chronics': 12171, 'churn': 12172, 'foremost': 12173, 'freeze': 12174, 'days': 12175, 'mask': 12176, 'npresident': 12177, 'garish': 12178, 'exodus': 12179, 'maneuvering': 12180, 'early': 12181, 'nmakes': 12182, 'dexterity': 12183, 'hammers': 12184, 'flash': 12185, 'ngazzo': 12186, 'now': 12187, 'brennon': 12188, 'hut': 12189, 'diesels': 12190, 'luxurious': 12191, 'selling': 12192, 'heading': 12193, 'rooster': 12194, 'tires': 12195, 'light': 12196, 'dire': 12197, 'remaining': 12198, 'dorm': 12199, 'belching': 12200, 'talent': 12201, 'unmarked': 12202, 'trumpet': 12203, 'averts': 12204, 'locked': 12205, 'j': 12206, 'north': 12207, 'healthy': 12208, 'eternity': 12209, 'propels': 12210, 'gas': 12211, 'dre': 12212, 'found': 12213, 'addressed': 12214, 'hurtling': 12215, 'spies': 12216, 'billfold': 12217, 'impatient': 12218, 'spy': 12219, 'wails': 12220, 'there': 12221, 'journalists': 12222, 'keptin': 12223, 'survive': 12224, 'spare': 12225, 'members': 12226, 'data': 12227, 'mrs': 12228, 'roofs': 12229, 'tack': 12230, 'liberty': 12231, 'jab': 12232, 'worlds': 12233, 'submarine': 12234, 'station': 12235, 'houdini': 12236, 'clasps': 12237, 'asking': 12238, 'bebe': 12239, 'recites': 12240, 'responsibility': 12241, 'protective': 12242, 'fastest': 12243, 'coldly': 12244, 'geyser': 12245, 'starry': 12246, 'led': 12247, 'underbelly': 12248, 'causing': 12249, 'something': 12250, 'grimacing': 12251, 'antique': 12252, 'altered': 12253, 'nhuh': 12254, 'chickasaw': 12255, 'blah': 12256, 'katana': 12257, 'flees': 12258, 'allegedly': 12259, 'congested': 12260, 'sandbags': 12261, 'her': 12262, 'magnified': 12263, 'nsmile': 12264, 'dainard': 12265, 'die': 12266, 'sedans': 12267, "haven't": 12268, 'instincts': 12269, 'premises': 12270, 'lewis': 12271, 'subconscious': 12272, 'npearl': 12273, 'designated': 12274, 'reflexively': 12275, 'terrified': 12276, 'piling': 12277, 'senses': 12278, 'bueller': 12279, 'run': 12280, 'consulting': 12281, 'says': 12282, 'blessing': 12283, 'barrels': 12284, 'unscrew': 12285, 'ace': 12286, 'hyperdrive': 12287, 'collects': 12288, 'courts': 12289, 'precise': 12290, 'entering': 12291, 'tear': 12292, 'seymour': 12293, 'mat': 12294, 'issued': 12295, 'backseat': 12296, 'ascending': 12297, '123': 12298, 'disrespect': 12299, 'battery': 12300, 'hatte': 12301, 'newman': 12302, 'exam': 12303, 'inez': 12304, 'admitted': 12305, 'sullivan': 12306, 'owen': 12307, 'proceeds': 12308, 'by': 12309, 'kit': 12310, '115': 12311, 'nare': 12312, 'sure': 12313, 'connecting': 12314, 'gib': 12315, '198': 12316, 'silhouette': 12317, 'wealth': 12318, 'clink': 12319, '129': 12320, 'husband': 12321, 'nnobody': 12322, 'comedy': 12323, 'public': 12324, 'wives': 12325, 'cognac': 12326, 'ayel': 12327, 'pillowcase': 12328, 'nwalks': 12329, 'society': 12330, 'congregation': 12331, 'jose': 12332, 'things': 12333, 'n64': 12334, 'typed': 12335, 'log': 12336, 'grey': 12337, 'narrowly': 12338, 'deliberately': 12339, 'perfection': 12340, 'classic': 12341, 'pallets': 12342, 'readout': 12343, 'neto': 12344, 'backlit': 12345, 'nwithout': 12346, 'wrought': 12347, 'signing': 12348, 'suppress': 12349, 'melon': 12350, 'announcer': 12351, 'swoops': 12352, 'saint': 12353, 'irons': 12354, 'prospects': 12355, 'vibrates': 12356, 'chain': 12357, 'hire': 12358, 'authorization': 12359, 'dishwasher': 12360, 'hunter': 12361, 'secure': 12362, 'taco': 12363, 'smack': 12364, 'theories': 12365, 'crossword': 12366, 'tiffany': 12367, 'overflow': 12368, 'chrissake': 12369, 'secondary': 12370, 'bound': 12371, 'rhinoceros': 12372, '177': 12373, 'neverywhere': 12374, 'n48': 12375, 'hanson': 12376, 'veins': 12377, 'defendant': 12378, 'freckled': 12379, '15': 12380, 'saito': 12381, '339': 12382, 'fatigues': 12383, 'smith': 12384, 'respective': 12385, 'kicking': 12386, "we're": 12387, 'naaaaaaaaaaaah': 12388, 'charts': 12389, 'revs': 12390, 'oooh': 12391, 'alvy': 12392, 'loudly': 12393, 'sit': 12394, 'deposit': 12395, 'swift': 12396, 'bicentennial': 12397, 'indicates': 12398, 'cab': 12399, 'frisked': 12400, 'rioters': 12401, 'tremendous': 12402, '242': 12403, 'scattered': 12404, 'experts': 12405, 'solar': 12406, 'promotional': 12407, 'whispered': 12408, 'blues': 12409, 'crests': 12410, 'blame': 12411, 'gong': 12412, 'parks': 12413, 'wasteland': 12414, 'afro': 12415, 'weapons': 12416, 'poets': 12417, 'canceled': 12418, 'jump': 12419, 'these': 12420, 'nmedic': 12421, 'concealed': 12422, 'montagnards': 12423, 'saddle': 12424, 'cared': 12425, 'nblood': 12426, 'dispatcher': 12427, 'hustling': 12428, 'bread': 12429, 'dearest': 12430, 'pulling': 12431, 'vaulting': 12432, 'foyer': 12433, 'engage': 12434, 'gantry': 12435, 'wounds': 12436, 'shaped': 12437, 'nervous': 12438, 'nmatchmaker': 12439, 'los': 12440, 'frisk': 12441, 'junior': 12442, 'fought': 12443, 'transcripts': 12444, 'medicine': 12445, "black'": 12446, 'vents': 12447, 'marx': 12448, 'dotted': 12449, 'nevada': 12450, 'crackle': 12451, 'acknowledging': 12452, 'doomed': 12453, 'sliver': 12454, 'intercepted': 12455, 'haircut': 12456, 'millions': 12457, 'tether': 12458, 'oddly': 12459, 'nwerechicken': 12460, 'lady': 12461, '135': 12462, 'oven': 12463, '164': 12464, 'heavily': 12465, 'ngone': 12466, 'flea': 12467, 'grins': 12468, 'auuu': 12469, 'nmouth': 12470, 'nbruce': 12471, 'what': 12472, 'cutts': 12473, 'southpaw': 12474, 'idling': 12475, 'batcave': 12476, 'or': 12477, 'haul': 12478, 'client': 12479, 'blackness': 12480, 'skywayman': 12481, 'loudspeaker': 12482, 'languages': 12483, 'appalled': 12484, 'gruber': 12485, 'ngrabs': 12486, 'remnants': 12487, 'duster': 12488, 'onslaught': 12489, 'crib': 12490, 'farms': 12491, 'evasive': 12492, 'telex': 12493, 'bleeds': 12494, '118': 12495, 'el': 12496, 'reviews': 12497, 'promptly': 12498, 'archive': 12499, 'compared': 12500, 'kenobi': 12501, 'refilling': 12502, 'living': 12503, 'saliva': 12504, 'sizes': 12505, 'ntad': 12506, 'permission': 12507, 'entirely': 12508, 'thunders': 12509, 'recorded': 12510, 'teapot': 12511, 'crow': 12512, 'gratitude': 12513, 'guardsmen': 12514, 'mertin': 12515, 'probe': 12516, 'namber': 12517, 'distinctly': 12518, 'rehearsal': 12519, 'lander': 12520, 'kiddin': 12521, 'palpable': 12522, 'arming': 12523, 'bit': 12524, 'cue': 12525, 'warchild': 12526, 'came': 12527, 'rebels': 12528, 'seaplane': 12529, 'harland': 12530, 'faction': 12531, 'nlaurel': 12532, 'taxis': 12533, 'ndissolve': 12534, 'stunning': 12535, 'memorial': 12536, 'protests': 12537, 'n51': 12538, 'avenge': 12539, 'ncrush': 12540, 'doubled': 12541, 'afraid': 12542, 'albert': 12543, 'musician': 12544, 'interfere': 12545, 'mayday': 12546, 'surf': 12547, 'suicide': 12548, 'corto': 12549, 'initial': 12550, 'shrunken': 12551, '263': 12552, 'dune': 12553, 'ncarolyn': 12554, '22': 12555, 'stressed': 12556, 'blindfold': 12557, 'itself': 12558, 'origin': 12559, 'ciera': 12560, 'changing': 12561, 'arrives': 12562, 'sneers': 12563, 'jacuzzi': 12564, 'hisses': 12565, 'nfighter': 12566, 'dialed': 12567, 'feels': 12568, 'clockwork': 12569, 'purses': 12570, 'boing': 12571, 'siegel': 12572, 'locates': 12573, 'shortly': 12574, 'giggling': 12575, '100': 12576, 'juvenile': 12577, 'manic': 12578, 'parlor': 12579, 'handful': 12580, 'increase': 12581, 'sour': 12582, 'intensely': 12583, 'ncamera': 12584, 'cunning': 12585, 'crossing': 12586, 'declines': 12587, 'equal': 12588, 'scarred': 12589, 'fingertip': 12590, 'instructed': 12591, 'nhauk': 12592, 'faints': 12593, 'tensely': 12594, 'plans': 12595, 'chronic': 12596, 'masai': 12597, 'courtesy': 12598, 'instinctively': 12599, '1963': 12600, 'servant': 12601, 'ntracking': 12602, 'covering': 12603, 'nstaring': 12604, 'associates': 12605, 'am': 12606, 'necto': 12607, 'barish': 12608, 'npeter': 12609, 'burnham': 12610, 'kneel': 12611, 'q': 12612, 'companions': 12613, 'deals': 12614, 'rep': 12615, 'boards': 12616, 'describes': 12617, 'explodes': 12618, 'doorways': 12619, 'bickle': 12620, 'accident': 12621, 'boost': 12622, 'decoy': 12623, '168': 12624, 'nnight': 12625, 'grimly': 12626, 'maya': 12627, 'physics': 12628, 'caller': 12629, 'detonator': 12630, 'consumer': 12631, 'nburt': 12632, 'thank': 12633, 'shower': 12634, 'bittersweet': 12635, 'exists': 12636, 'tents': 12637, 'smoking': 12638, 'lion': 12639, 'holland': 12640, 'poisonous': 12641, 'n155': 12642, 'institution': 12643, 'opens': 12644, 'hearted': 12645, 'poker': 12646, 'punched': 12647, 'adults': 12648, 'jellyman': 12649, 'shreds': 12650, 'obliterated': 12651, 'cup': 12652, 'spices': 12653, 'ncher': 12654, 'editors': 12655, 'shipment': 12656, 'luminous': 12657, 'someday': 12658, 'suited': 12659, 'cradle': 12660, 'potatoes': 12661, 'royal': 12662, 'pirate': 12663, 'mulvaney': 12664, 'series': 12665, 'feat': 12666, 'pitched': 12667, 'windward': 12668, 'nsylvia': 12669, 'puck': 12670, 'awash': 12671, 'admiring': 12672, '183': 12673, 'bullshit': 12674, 'attire': 12675, 'nchum': 12676, 'oi': 12677, 'paddles': 12678, 'shave': 12679, 'ncontinued': 12680, 'ask': 12681, 'reflect': 12682, 'operates': 12683, 'heavier': 12684, "'61": 12685, '1790': 12686, 'departing': 12687, 'pimp': 12688, 'arrive': 12689, 'buttocks': 12690, 'class': 12691, 'aim': 12692, 'vw': 12693, 'glides': 12694, 'watchmen': 12695, 'reverse': 12696, 'nmoving': 12697, 'narcotics': 12698, 'commotion': 12699, 'shrapnel': 12700, 'microscope': 12701, 'arrangement': 12702, 'population': 12703, 'battle': 12704, 'spaceman': 12705, 'darla': 12706, 'fisherman': 12707, 'adventures': 12708, 'curl': 12709, 'nbrowning': 12710, 'ntree': 12711, 'expand': 12712, 'porch': 12713, 'yanked': 12714, 'tail': 12715, 'ricochet': 12716, 'fong': 12717, 'grip': 12718, 'farther': 12719, 'resists': 12720, 'monique': 12721, 'assembly': 12722, 'natural': 12723, 'honking': 12724, 'bunks': 12725, 'pitching': 12726, 'ovens': 12727, 'learn': 12728, 'ken': 12729, 'bu': 12730, 'watts': 12731, 'poorly': 12732, 'dealin': 12733, 'purse': 12734, 'trio': 12735, 'assortment': 12736, 'glasses': 12737, 'nslowly': 12738, 'walled': 12739, 'feverishly': 12740, 'loser': 12741, 'tangle': 12742, 'minimum': 12743, 'ruiz': 12744, 'journalist': 12745, 'boyz': 12746, 'flops': 12747, 'treatment': 12748, 'sparks': 12749, 'him': 12750, 'weave': 12751, 'asshole': 12752, 'nanother': 12753, 'uptight': 12754, 'varying': 12755, 'truman': 12756, 'slats': 12757, 'njacket': 12758, 'flapping': 12759, 'rewinds': 12760, 'mp': 12761, 'leads': 12762, 'cadillac': 12763, 'tale': 12764, 'hudson': 12765, 'poured': 12766, 'needing': 12767, 'donnas': 12768, 'sidewalk': 12769, 'overload': 12770, 'sleeping': 12771, 'provided': 12772, '124': 12773, 'gossip': 12774, 'residential': 12775, 'registered': 12776, 'din': 12777, 'warning': 12778, 'drone': 12779, 'shatter': 12780, 'chords': 12781, 'deepest': 12782, 'npoints': 12783, 'ribs': 12784, 'tasu': 12785, 'nany': 12786, 'mm': 12787, 'evade': 12788, 'individual': 12789, 'unscrews': 12790, 'specialist': 12791, 'n170': 12792, 'bhai': 12793, 'cured': 12794, 'rehearsing': 12795, 'flooding': 12796, 'archives': 12797, '54': 12798, 'nreaches': 12799, 'few': 12800, 'twirl': 12801, 'mattress': 12802, 'froeling': 12803, 'levers': 12804, 'surfaces': 12805, 'chime': 12806, 'knocked': 12807, 'use': 12808, 'carriages': 12809, 'racquet': 12810, 'wheels': 12811, 'steed': 12812, 'duel': 12813, 'chien': 12814, 'coils': 12815, 'engagement': 12816, 'scramble': 12817, 'vehicle': 12818, 'radar': 12819, 'cage': 12820, 'sheba': 12821, 'snotlout': 12822, 'slips': 12823, 'galaxy': 12824, 'humming': 12825, 'claire': 12826, 'laughter': 12827, 'bars': 12828, "'62": 12829, 'rag': 12830, 'powdered': 12831, 'knot': 12832, 'dealey': 12833, '127': 12834, 'cannery': 12835, 'slipped': 12836, 'tupperware': 12837, '52': 12838, 'nsabatini': 12839, 'duck': 12840, 'chemistry': 12841, 'rating': 12842, 'printer': 12843, 'staked': 12844, 'nlate': 12845, 'gorilla': 12846, 'topple': 12847, 'nput': 12848, 'remark': 12849, 'malone': 12850, 'catapult': 12851, 'dutch': 12852, 'bleach': 12853, 'wu': 12854, 'paws': 12855, 'mantini': 12856, 'specially': 12857, 'jared': 12858, 'shockwave': 12859, 'biker': 12860, 'based': 12861, 'hello': 12862, 'drones': 12863, 'snowy': 12864, 'everyday': 12865, 'engulfed': 12866, 'stored': 12867, 'entered': 12868, 'manual': 12869, 'digging': 12870, 'rotating': 12871, 'hopper': 12872, 'herself': 12873, 'mi': 12874, 'newsman': 12875, 'headsets': 12876, 'license': 12877, 'hayes': 12878, 'andrews': 12879, 'mountains': 12880, 'transit': 12881, 'yelps': 12882, 'trophy': 12883, 'homo': 12884, 'home': 12885, 'regains': 12886, 'nostalgia': 12887, 'eaten': 12888, 'nightmare': 12889, 'considers': 12890, 'traded': 12891, 'crab': 12892, 'gallop': 12893, 'friggin': 12894, 'stark': 12895, 'afloat': 12896, 'mouse': 12897, 'standin': 12898, 'convenience': 12899, 'tequila': 12900, 'adam': 12901, 'nnemo': 12902, 'proposal': 12903, 'dialog': 12904, 'flanking': 12905, 'unconcerned': 12906, 'nthen': 12907, 'evidently': 12908, 'tie': 12909, 'handed': 12910, 'propelled': 12911, 'about': 12912, 'table': 12913, 'cleaned': 12914, 'gee': 12915, 'cans': 12916, 'complaint': 12917, 'ksm': 12918, 'pops': 12919, 'section': 12920, 'teetering': 12921, 'ops': 12922, 'clipping': 12923, '174': 12924, 'loony': 12925, 'rinse': 12926, 'disengage': 12927, 'hurled': 12928, 'hatchet': 12929, 'independent': 12930, 'mg': 12931, 'vantage': 12932, 'inserts': 12933, 'whirls': 12934, 'how': 12935, 'molitor': 12936, 'alarmed': 12937, 'civilization': 12938, 'violence': 12939, 'photographers': 12940, 'combination': 12941, 'fiddle': 12942, 'nagain': 12943, 'heaven': 12944, 'lowers': 12945, 'tangled': 12946, 'make': 12947, 'n54': 12948, 'gideon': 12949, 'nfar': 12950, 'einstein': 12951, 'honks': 12952, 'sells': 12953, 'pin': 12954, 'noffice': 12955, 'bench': 12956, 'towards': 12957, 'dresses': 12958, 'graton': 12959, 'mouth': 12960, 'jackie': 12961, 'democratic': 12962, 'dietz': 12963, 'peninsula': 12964, 'berserk': 12965, 'rails': 12966, 'nbill': 12967, 'objection': 12968, 'garrison': 12969, 'upward': 12970, 'laps': 12971, 'believing': 12972, 'replaced': 12973, 'waste': 12974, 'assigned': 12975, 'rule': 12976, 'howard': 12977, 'nweirdo': 12978, 'occasional': 12979, '324': 12980, 'missing': 12981, 'omega': 12982, 'sting': 12983, 'cocoons': 12984, 'miracle': 12985, 'melted': 12986, 'chopsticks': 12987, 'detonation': 12988, 'ho': 12989, 'lipstick': 12990, 'focus': 12991, 'regards': 12992, 'glassy': 12993, 'eastchester': 12994, 'nwhat': 12995, 'mulligan': 12996, 'seedy': 12997, 'alfonse': 12998, 'ngent': 12999, 'chauffeur': 13000, 'blocking': 13001, 'avail': 13002, 'u2': 13003, 'squid': 13004, 'cri': 13005, 'puffs': 13006, 'condoms': 13007, 'har': 13008, 'commission': 13009, 'reactor': 13010, 'nright': 13011, 'henriques': 13012, '60s': 13013, 'jets': 13014, 'insanely': 13015, 'n': 13016, 'flirting': 13017, 'logan': 13018, 'empties': 13019, 'buckley': 13020, 'advise': 13021, 'standby': 13022, 'outdoor': 13023, 'display': 13024, 'irth': 13025, 'drums': 13026, 'congratulate': 13027, '321': 13028, 'stress': 13029, 'maurice': 13030, 'aaron': 13031, 'crucifix': 13032, 'brand': 13033, 'bolt': 13034, 'ray': 13035, 'blacks': 13036, 'honored': 13037, 'inward': 13038, 'mobile': 13039, 'spewing': 13040, 'outfit': 13041, 'perimeter': 13042, 'nsecret': 13043, 'railroad': 13044, 'kelvin': 13045, 'hauling': 13046, 'why': 13047, 'can': 13048, 'jon': 13049, 'aged': 13050, 'slump': 13051, 'hid': 13052, 'pipes': 13053, 'trackers': 13054, 'explained': 13055, 'edition': 13056, 'squeezes': 13057, 'again': 13058, 'someone': 13059, "i'd": 13060, 'overhang': 13061, 'dreaming': 13062, 'batter': 13063, 'torn': 13064, 'zoe': 13065, 'n195': 13066, 'namsterdam': 13067, 'ye': 13068, 'then': 13069, '234': 13070, 'suitor': 13071, 'tekka': 13072, 'lapping': 13073, 'hollis': 13074, 'nalone': 13075, 'noticing': 13076, 'connect': 13077, 'expose': 13078, 'slim': 13079, 'orchestra': 13080, 'horrified': 13081, 'nplaying': 13082, 'mound': 13083, '1978': 13084, 'ncomputer': 13085, 'cora': 13086, 'rays': 13087, "'est": 13088, 'nknox': 13089, 'engrossed': 13090, 'alongside': 13091, 'chats': 13092, 'tux': 13093, 'demonstrate': 13094, 'sam': 13095, 'nslide': 13096, '1969': 13097, 'jerks': 13098, 'withdraw': 13099, 'lamott': 13100, 'ngennaro': 13101, 'grubby': 13102, 'brig': 13103, 'unrecognizable': 13104, 'canyon': 13105, 'topic': 13106, 'nhelicopter': 13107, 'durham': 13108, 'n196': 13109, 'precisely': 13110, 'pritchard': 13111, 'accents': 13112, 'earnestly': 13113, 'characters': 13114, 'housewife': 13115, 'college': 13116, 'nowhere': 13117, 'stealing': 13118, 'chancery': 13119, 'fargo': 13120, 'scoop': 13121, 'nhun': 13122, 'atmosphere': 13123, 'malcolm': 13124, 'ink': 13125, 'corner': 13126, 'briefly': 13127, 'sank': 13128, 'inviting': 13129, 'specimens': 13130, 'guatemala': 13131, 'nnervous': 13132, 'feebly': 13133, 'ash': 13134, 'sailboat': 13135, 'gene': 13136, 'affects': 13137, 'similar': 13138, 'proper': 13139, 'nnear': 13140, 'barnum': 13141, 'illuminate': 13142, 'conflict': 13143, 'tasting': 13144, 'catcalls': 13145, 'thinly': 13146, 'cake': 13147, 'coulda': 13148, 'decade': 13149, 'star': 13150, 'current': 13151, 'richie': 13152, 'brewery': 13153, 'remind': 13154, 'napier': 13155, 'extraordinary': 13156, 'soo': 13157, 'telephoto': 13158, 'nthing': 13159, 'noil': 13160, 'nfalling': 13161, 'courthouse': 13162, 'fantasy': 13163, 'attacker': 13164, 'mockingly': 13165, 'baggy': 13166, 'fort': 13167, 'rides': 13168, 'enterprises': 13169, 'helped': 13170, 'carl': 13171, 'presser': 13172, 'opinion': 13173, 'claim': 13174, 'discover': 13175, '106': 13176, 'federal': 13177, 'leavin': 13178, 'quivers': 13179, 'mcluhan': 13180, 'organisms': 13181, 'ralph': 13182, 'upstairs': 13183, 'tiered': 13184, 'ntoo': 13185, 'signs': 13186, 'nmy': 13187, 'roar': 13188, 'spraying': 13189, 'frenchman': 13190, 'peak': 13191, 'nwatching': 13192, 'tara': 13193, 'indicated': 13194, 'supposed': 13195, 'yours': 13196, 'wander': 13197, 'fences': 13198, 'hickey': 13199, 'sew': 13200, 'christie': 13201, 'quitting': 13202, 'neither': 13203, 'swelling': 13204, 'laws': 13205, 'v': 13206, 'owns': 13207, 'messing': 13208, 'pat': 13209, 'schatz': 13210, 'n237': 13211, 'winning': 13212, 'sushi': 13213, 'badge': 13214, 'fbi': 13215, 'bewilderment': 13216, 'lottery': 13217, 'request': 13218, 'cemetery': 13219, 'emotionally': 13220, 'salvage': 13221, 'you': 13222, '600': 13223, 'polite': 13224, 'diet': 13225, 'banana': 13226, 'twenties': 13227, 'relevant': 13228, 'downed': 13229, 'madonna': 13230, 'legends': 13231, 'rican': 13232, 'munch': 13233, 'gasp': 13234, 'presume': 13235, 'tub': 13236, 'concur': 13237, 'laced': 13238, 'alluring': 13239, 'directing': 13240, '89': 13241, 'profit': 13242, 'customer': 13243, 'jogs': 13244, 'tool': 13245, 'cord': 13246, 'ncarrie': 13247, 'bonno': 13248, 'singles': 13249, 'builds': 13250, 'aboard': 13251, 'passes': 13252, 'named': 13253, 'verna': 13254, 'ducts': 13255, 'shanghai': 13256, 'parent': 13257, 'benefit': 13258, 'basket': 13259, 'pas': 13260, 'nturtle': 13261, 'contents': 13262, 'bale': 13263, 'takeoff': 13264, 'dignified': 13265, 'murder': 13266, 'filtered': 13267, 'eastman': 13268, 'victims': 13269, 'hushed': 13270, 'cryin': 13271, 'belts': 13272, 'inherited': 13273, 'nakatomi': 13274, 'downhill': 13275, 'valve': 13276, 'genuinely': 13277, 'tex': 13278, 'nhey': 13279, 'enlisted': 13280, "'till": 13281, 'walkin': 13282, 'boyd': 13283, 'bam': 13284, 'transcript': 13285, 'cityscape': 13286, 'unwieldy': 13287, 'nb': 13288, '260': 13289, 'prince': 13290, 'love': 13291, 'escalator': 13292, 'depends': 13293, 'detroit': 13294, 'reaches': 13295, 'supermarket': 13296, 'tinkering': 13297, 'b': 13298, 'finest': 13299, 'showed': 13300, 'infinite': 13301, 'observes': 13302, 'coroner': 13303, 'olds': 13304, 'scarf': 13305, 'hot': 13306, 'rex': 13307, 'pour': 13308, 'dealt': 13309, 'nfamily': 13310, 'wildlife': 13311, 'troy': 13312, 'asian': 13313, 'lurch': 13314, 'necklace': 13315, 'column': 13316, 'vaults': 13317, 'trace': 13318, 'nbrad': 13319, 'downstairs': 13320, 'pilot': 13321, 'styrofoam': 13322, 'ignorant': 13323, 'accepts': 13324, 'motion': 13325, 'da': 13326, 'firework': 13327, 'sent': 13328, 'savagely': 13329, 'dreamed': 13330, 'quizzically': 13331, 'paul': 13332, 'their': 13333, 'physical': 13334, 'exposure': 13335, 'ingredient': 13336, 'beautifully': 13337, 'exciting': 13338, 'apologize': 13339, 'pulse': 13340, 'meter': 13341, 'nroad': 13342, '280': 13343, 'handset': 13344, 'beans': 13345, 'vista': 13346, 'probably': 13347, 'npush': 13348, 'folk': 13349, 'rare': 13350, 'faisil': 13351, 'lab': 13352, 'drinkin': 13353, 'mounts': 13354, 'naaah': 13355, 'ncome': 13356, 'glue': 13357, 'scrape': 13358, 'cabinets': 13359, 'coke': 13360, 'formula': 13361, 'recognized': 13362, 'elbow': 13363, 'trampled': 13364, 'manually': 13365, 'bounty': 13366, 'spend': 13367, 'sweaty': 13368, 'writhing': 13369, 'editorial': 13370, 'nublar': 13371, 'stifling': 13372, 'eying': 13373, 'email': 13374, 'boardroom': 13375, 'repeat': 13376, 'gargantuan': 13377, 'deadpool': 13378, 'negotiate': 13379, 'disable': 13380, 'outnumbered': 13381, 'teeth': 13382, 'pricks': 13383, 'creed': 13384, 'useful': 13385, 'paranoid': 13386, 'mets': 13387, 'tugs': 13388, 'camp': 13389, 'ndetonator': 13390, 'cultists': 13391, '229': 13392, 'assure': 13393, 'remember': 13394, 'joined': 13395, 'chose': 13396, 'speck': 13397, 'hating': 13398, 'presents': 13399, 'pasted': 13400, 'nspeaks': 13401, 'industry': 13402, 'cocaine': 13403, 'nwhere': 13404, 'nfast': 13405, 'fiercely': 13406, 'total': 13407, 'wheeled': 13408, 'overstreet': 13409, 'tenor': 13410, 'departure': 13411, 'rotate': 13412, 'ga': 13413, 'taut': 13414, 'nbc': 13415, 'nfather': 13416, 'nleft': 13417, 'limbo': 13418, 'stilts': 13419, 'shang': 13420, 'rubble': 13421, 'resisting': 13422, 'vitamin': 13423, 'wits': 13424, 'cardboard': 13425, 'je': 13426, 'carmen': 13427, 'naw': 13428, 'flask': 13429, 'farsi': 13430, '158': 13431, 'nabove': 13432, 'blot': 13433, 'cobbler': 13434, 'rapid': 13435, 'vienna': 13436, 'ranger': 13437, 'peddler': 13438, 'blip': 13439, 'insistent': 13440, 'continued': 13441, 'seeps': 13442, 'blanches': 13443, 'ish': 13444, 'scottsdale': 13445, 'disdain': 13446, 'heller': 13447, 'noisy': 13448, 'nightgown': 13449, 'constable': 13450, 'cease': 13451, 'satisfied': 13452, 'cruisers': 13453, 'postcard': 13454, 'infant': 13455, 'grinding': 13456, 'drunk': 13457, 'redlicht': 13458, 'pentagon': 13459, 'pedal': 13460, 'dates': 13461, 'hadn': 13462, 'nelse': 13463, 'traced': 13464, 'settling': 13465, 'mild': 13466, 'canal': 13467, 'tap': 13468, 'deflated': 13469, 'rc': 13470, 'wheat': 13471, 'n123': 13472, 'n87': 13473, 'either': 13474, 'dynamic': 13475, 'rented': 13476, 'steaks': 13477, 'bumps': 13478, 'brute': 13479, 'jango': 13480, 'brett': 13481, 'pyro': 13482, 'lose': 13483, 'nto': 13484, 'lush': 13485, 'behavior': 13486, 'expressionless': 13487, 'hovering': 13488, 'nannie': 13489, 'greetings': 13490, 'satellites': 13491, 'splatters': 13492, 'simultaneously': 13493, 'colonial': 13494, 'nnash': 13495, 'studying': 13496, 'trained': 13497, 'ufo': 13498, 'bolted': 13499, 'greedy': 13500, 'gangsters': 13501, 'believed': 13502, 'combs': 13503, 'maude': 13504, 'result': 13505, 'overlooking': 13506, 'mississippi': 13507, 'subpoena': 13508, 'footman': 13509, 'sending': 13510, 'footage': 13511, 'damned': 13512, 'murderer': 13513, 'indicating': 13514, 'nevery': 13515, 'courtroom': 13516, 'nfilled': 13517, 'permeates': 13518, 'escalade': 13519, 'jacobs': 13520, 'recognizable': 13521, 'n44': 13522, 'ahhh': 13523, 'stick': 13524, 'spasm': 13525, 'rush': 13526, 'stafford': 13527, 'poses': 13528, 'lightbulb': 13529, 'recruited': 13530, 'dinosaur': 13531, 'bold': 13532, 'goods': 13533, 'nduke': 13534, 'dialogue': 13535, 'panics': 13536, 'off': 13537, 'occupation': 13538, 'fins': 13539, 'balanced': 13540, 'online': 13541, 'molly': 13542, 'rotates': 13543, 'breaths': 13544, 'jfk': 13545, 'constanze': 13546, 'carriers': 13547, 'npushing': 13548, 'hump': 13549, 'good': 13550, 'nsurprised': 13551, 'lil': 13552, 'jurors': 13553, 'teasing': 13554, 'smash': 13555, 'rehearsed': 13556, 'tennis': 13557, 'centuries': 13558, 'darker': 13559, 'midwife': 13560, 'mercilessly': 13561, 'awake': 13562, 'sunny': 13563, 'flooded': 13564, 'imperceptibly': 13565, 'outstanding': 13566, '1823': 13567, 'pilots': 13568, 'notepad': 13569, 'oklahoma': 13570, 'freeman': 13571, 'unimpressed': 13572, 'closely': 13573, "'clock": 13574, 'debbie': 13575, 'mouthful': 13576, '272': 13577, 'paco': 13578, 'slot': 13579, 'slum': 13580, 'impulsively': 13581, 'rats': 13582, 'uhura': 13583, 'scaring': 13584, 'val': 13585, 'sitting': 13586, 'worship': 13587, 'exercises': 13588, 'checker': 13589, 'matrix': 13590, 'arquillian': 13591, 'repeated': 13592, 'viking': 13593, 'crewman': 13594, 'tries': 13595, 'italians': 13596, 'hooker': 13597, 'growth': 13598, 'forehead': 13599, 'n61': 13600, 'junked': 13601, 'photos': 13602, 'dressed': 13603, 'storms': 13604, 'applauds': 13605, 'risky': 13606, 'loyal': 13607, 'gleefully': 13608, 'landspeeder': 13609, 'stonesipher': 13610, 'nwindow': 13611, 'subsides': 13612, 'shooters': 13613, 'chewed': 13614, 'sheriffs': 13615, 'silencer': 13616, 'maybe': 13617, 'bus': 13618, 'younger': 13619, '29': 13620, 'n131': 13621, 'dress': 13622, 'sidekick': 13623, 'legitimate': 13624, 'imposing': 13625, 'lucas': 13626, 'excrement': 13627, 'gymnasium': 13628, 'gets': 13629, 'nquincy': 13630, 'male': 13631, 'fearfully': 13632, 'roommate': 13633, 'gargoyles': 13634, 'shopping': 13635, 'sequence': 13636, 'stillwater': 13637, 'nwatts': 13638, 'nj': 13639, 'unsure': 13640, 'darkening': 13641, 'clunk': 13642, 'handcuffed': 13643, 'splash': 13644, 'shepherds': 13645, 'steering': 13646, 'instruments': 13647, 'glistening': 13648, 'tigers': 13649, 'nwith': 13650, 'betrayed': 13651, 'gains': 13652, 'nback': 13653, 'hanging': 13654, 'cove': 13655, 'specks': 13656, 'triumph': 13657, 'suzy': 13658, 'nseveral': 13659, 'cooperate': 13660, 'romulus': 13661, 'mangled': 13662, 'utter': 13663, 'wooley': 13664, 'today': 13665, 'nvallon': 13666, 'printout': 13667, 'banged': 13668, 'smitty': 13669, 'numerous': 13670, 'certificates': 13671, 'raleigh': 13672, 'korean': 13673, 'n166': 13674, 'sore': 13675, 'nolan': 13676, 'muscle': 13677, 'pillars': 13678, 'fangs': 13679, 'pedestrian': 13680, 'whirling': 13681, 'pulsating': 13682, 'dozen': 13683, 'spectacular': 13684, 'beautiful': 13685, 'gestapo': 13686, 'mohrenschildt': 13687, 'edie': 13688, 'lighted': 13689, 'dove': 13690, 'dirt': 13691, 'gunfire': 13692, 'n36': 13693, 'suv': 13694, 'lunar': 13695, 'styles': 13696, '500': 13697, 'anguished': 13698, 'close': 13699, 'mouthpiece': 13700, "you'll": 13701, '39': 13702, 'climb': 13703, 'kuhnpast': 13704, 'bugs': 13705, 'fortified': 13706, 'attendant': 13707, 'ladies': 13708, 'nstation': 13709, 'nmulan': 13710, 'oscar': 13711, '302': 13712, 'poe': 13713, 'weight': 13714, 'n8': 13715, 'across': 13716, 'cartridge': 13717, 'mollusk': 13718, 'slung': 13719, 'taken': 13720, 'weaves': 13721, 'jesus': 13722, 'hangman': 13723, 'warmer': 13724, 'shuttles': 13725, 'sets': 13726, 'listen': 13727, 'investigates': 13728, 'countdown': 13729, 'ally': 13730, 'nbromden': 13731, 'nearly': 13732, 'brochure': 13733, 'flown': 13734, 'continually': 13735, 'knowingly': 13736, 'waitin': 13737, 'terrace': 13738, 'steal': 13739, 'fighter': 13740, 'press': 13741, 'responds': 13742, 'private': 13743, 'descending': 13744, 'scrapyard': 13745, 'n89': 13746, 'calls': 13747, 'spitting': 13748, 'revolving': 13749, 'counter': 13750, 'undo': 13751, 'jodie': 13752, 'mart': 13753, 'lets': 13754, 'unbearable': 13755, 'floating': 13756, 'question': 13757, 'grassy': 13758, 'swaying': 13759, 'destroying': 13760, 'mesmerized': 13761, 'rotary': 13762, 'elijah': 13763, 'surface': 13764, 'tragic': 13765, 'is': 13766, 'offices': 13767, 'voices': 13768, 'which': 13769, 'shouts': 13770, 'columbus': 13771, 'clustered': 13772, 'niner': 13773, 'uncomprehending': 13774, 'injects': 13775, 'forms': 13776, 'ntruman': 13777, 'ndoes': 13778, '232': 13779, 'nrevealing': 13780, 'prods': 13781, 'panda': 13782, 'monstrous': 13783, 'basically': 13784, 'agent': 13785, 'nypd': 13786, 'shadowy': 13787, 'distortions': 13788, 'feathers': 13789, 'prom': 13790, 'immigration': 13791, 'splintered': 13792, 'curtain': 13793, 'clubs': 13794, 'whacks': 13795, 'demon': 13796, 'opened': 13797, 'joining': 13798, 'lex': 13799, 'look': 13800, 'attaches': 13801, 'glossy': 13802, 'xadll': 13803, 'brightens': 13804, 'nray': 13805, 'nuwanda': 13806, 'directional': 13807, 'lethal': 13808, 'survey': 13809, 'admission': 13810, 'notch': 13811, 'golacinski': 13812, 'astronaut': 13813, 'ada': 13814, 'personal': 13815, "'know": 13816, '1973': 13817, 'gou': 13818, 'whoever': 13819, 'bleak': 13820, 'lying': 13821, 'maggie': 13822, 'bitten': 13823, 'criticize': 13824, 'clues': 13825, "here's": 13826, 'reflections': 13827, 'nphone': 13828, 'salim': 13829, 'workstation': 13830, 'freaks': 13831, 'downtown': 13832, 'scowling': 13833, 'bodyguards': 13834, 'rearrange': 13835, 'threads': 13836, 'our': 13837, 'exclusive': 13838, 'beneath': 13839, 'n112': 13840, 'restraining': 13841, 'n78': 13842, 'ambassador': 13843, 'positions': 13844, 'ford': 13845, 'todashi': 13846, 'scrubbed': 13847, 'bridge': 13848, 'tossing': 13849, 'n97': 13850, 'stein': 13851, 'bathers': 13852, 'deegan': 13853, 'abraham': 13854, 'enthusiasm': 13855, 'jaw': 13856, 'tuna': 13857, 'upper': 13858, 'offers': 13859, 'climate': 13860, 'skylight': 13861, 'shatters': 13862, 'jann': 13863, 'fishlegs': 13864, 'nature': 13865, 'preserved': 13866, 'dawson': 13867, 'disgrace': 13868, 'nharp': 13869, 'tightly': 13870, 'harem': 13871, 'splayed': 13872, 'repeaters': 13873, 'graphic': 13874, 'burbank': 13875, 'cerdan': 13876, 'accompaniment': 13877, 'questions': 13878, 'unsteadily': 13879, 'broadcast': 13880, 'jonas': 13881, 'malik': 13882, '69': 13883, 'windowsill': 13884, 'saved': 13885, 'sunrise': 13886, 'strangling': 13887, 'caged': 13888, 'theatrical': 13889, 'whirring': 13890, 'parameters': 13891, "'re": 13892, 'ntoward': 13893, 'inky': 13894, 'lonely': 13895, 'hub': 13896, 'peanut': 13897, 'strongly': 13898, 'hustle': 13899, 'merciful': 13900, 'taunt': 13901, 'sparking': 13902, 'butts': 13903, 'bryan': 13904, 'additional': 13905, 'reacts': 13906, 'squealing': 13907, 'ascends': 13908, 'chimps': 13909, 'cartridges': 13910, 'articles': 13911, 'rotten': 13912, 'forks': 13913, 'dunne': 13914, 'harmless': 13915, 'buries': 13916, 'tears': 13917, 'cliffs': 13918, 'emitting': 13919, 'darkens': 13920, 'exposing': 13921, 'kinds': 13922, 'herds': 13923, 'nmcmurphy': 13924, 'permanently': 13925, 'moves': 13926, 'huts': 13927, 'pump': 13928, 'pine': 13929, 'builder': 13930, 'gloomy': 13931, 'manuel': 13932, 'technology': 13933, 'smoothly': 13934, 'paid': 13935, 'miko': 13936, 'sip': 13937, 'vile': 13938, 'statura': 13939, 'lovely': 13940, 'devils': 13941, 'portland': 13942, 'hastily': 13943, 'joystick': 13944, 'leaping': 13945, 'dom': 13946, 'wild': 13947, 'fleet': 13948, 'forty': 13949, 'gauguin': 13950, 'plutonium': 13951, 'camouflage': 13952, 'gestures': 13953, 'n17': 13954, 'nnurse': 13955, 'honest': 13956, 'slamming': 13957, 'putting': 13958, 'n191': 13959, 'hog': 13960, 'immediately': 13961, 'expedition': 13962, 'positioning': 13963, 'accomplish': 13964, 'staples': 13965, 'tellers': 13966, 'brakes': 13967, 'plead': 13968, 'targets': 13969, 'exceptional': 13970, 'seem': 13971, 'nmary': 13972, 'rock': 13973, 'designer': 13974, 'blur': 13975, 'arcade': 13976, 'fart': 13977, 'wilshire': 13978, 'overhears': 13979, 'liz': 13980, 'muffin': 13981, 'nnedry': 13982, 'dart': 13983, 'attorney': 13984, 'nstan': 13985, 'que': 13986, 'ooooh': 13987, 'rosner': 13988, 'dreamily': 13989, 'stirring': 13990, 'tan': 13991, 'polito': 13992, 'torches': 13993, 'consciously': 13994, 'bins': 13995, 'occasionally': 13996, 'jut': 13997, 'economy': 13998, 'figured': 13999, 'worth': 14000, 'nnotices': 14001, 'smart': 14002, 'x9ccause': 14003, 'beats': 14004, 'overboard': 14005, '122': 14006, 'weaponry': 14007, 'coincidence': 14008, 'igniting': 14009, 'gentle': 14010, 'washing': 14011, 'they': 14012, 'mining': 14013, 'tentatively': 14014, 'headphones': 14015, 'giving': 14016, 'somebody': 14017, 'packets': 14018, 'maitre': 14019, 'image': 14020, 'separate': 14021, 'salt': 14022, 'fredrickson': 14023, 'es': 14024, 'lorl': 14025, 'ncarefully': 14026, 'mainly': 14027, 'nlester': 14028, 'troll': 14029, 'emma': 14030, 'phasma': 14031, 'intimidate': 14032, 'nteddy': 14033, 'flicker': 14034, 'nonchalant': 14035, 'overlapping': 14036, 'unmistakable': 14037, 'overlap': 14038, 'bottle': 14039, '215': 14040, 'hand': 14041, 'hoists': 14042, 'dominates': 14043, 'handlers': 14044, 'decked': 14045, 'shooting': 14046, 'nside': 14047, 'intricate': 14048, 'wanting': 14049, 'images': 14050, 'aspirin': 14051, 'tuff': 14052, 'immediate': 14053, 'hurry': 14054, 'tell': 14055, 'chod': 14056, 'robert': 14057, 'whooshes': 14058, 'transmit': 14059, 'fortune': 14060, 'taking': 14061, 'hey': 14062, 'ruined': 14063, 'taft': 14064, 'mais': 14065, 'rolling': 14066, 'jess': 14067, 'shepherd': 14068, 'harried': 14069, 'waiter': 14070, 'elegantly': 14071, 'programs': 14072, 'gook': 14073, 'defying': 14074, 'nutah': 14075, 'baron': 14076, 'fragments': 14077, 'shall': 14078, 'flinches': 14079, 'drivers': 14080, 'keisel': 14081, 'exceptionally': 14082, 'orange': 14083, 'shopkeeper': 14084, 'drying': 14085, 'stray': 14086, 'loaf': 14087, 'mcallister': 14088, 'brought': 14089, 'deputy': 14090, 'mannequin': 14091, 'oriental': 14092, 'kablam': 14093, 'automatics': 14094, 'aye': 14095, 'anderson': 14096, 'increases': 14097, 'dal': 14098, 'routine': 14099, 'murphy': 14100, 'ma': 14101, 'harmony': 14102, 'nimble': 14103, 'branches': 14104, 'mushroom': 14105, 'exist': 14106, 'sepulveda': 14107, 'capsules': 14108, 'frighten': 14109, 'sharply': 14110, 'turned': 14111, 'lived': 14112, 'beat': 14113, 'hairy': 14114, 'iii': 14115, 'groggy': 14116, 'pebbles': 14117, 'sickly': 14118, 'making': 14119, 'fascist': 14120, 'depot': 14121, 'group': 14122, 'coma': 14123, 'machines': 14124, 'cont': 14125, 'weighing': 14126, 'werechicken': 14127, 'cure': 14128, 'faggots': 14129, 'accurately': 14130, 'flattered': 14131, 'withdrawing': 14132, 'jealous': 14133, 'splinters': 14134, 'unarmed': 14135, 'nsid': 14136, 'comparing': 14137, 'passersby': 14138, 'sunday': 14139, 'pi': 14140, 'vessels': 14141, 'handling': 14142, 'warmly': 14143, 'mailbox': 14144, 'garage': 14145, 'youngsters': 14146, 'cigars': 14147, 'walking': 14148, 'distant': 14149, 'crowding': 14150, 'brace': 14151, 'cheering': 14152, 'advisor': 14153, 'kettle': 14154, 'fingers': 14155, 'bearing': 14156, 'porcelain': 14157, 'sedative': 14158, 'trapped': 14159, 'systems': 14160, 'jumps': 14161, 'assent': 14162, 'gill': 14163, 'weary': 14164, 'nsa': 14165, 'notion': 14166, 'offense': 14167, 'wow': 14168, 'trash': 14169, 'restlessly': 14170, 'disciple': 14171, 'anchor': 14172, 'dedication': 14173, 'drains': 14174, 'fades': 14175, 'snail': 14176, 'pm': 14177, 'onion': 14178, 'thou': 14179, 'electrified': 14180, 'solid': 14181, 'forests': 14182, 'nicole': 14183, 'homestead': 14184, 'bird': 14185, 'rap': 14186, 'lasts': 14187, 'torrance': 14188, 'disappointment': 14189, 'nmurray': 14190, 'pirateship': 14191, 'poles': 14192, 'gay': 14193, 'bookstore': 14194, 'strips': 14195, 'furtively': 14196, 'n53': 14197, 'withdraws': 14198, 'plan': 14199, 'shelby': 14200, 'unzips': 14201, 'ran': 14202, 'rodent': 14203, 'naah': 14204, 'vincente': 14205, 'skills': 14206, 'giovanni': 14207, 'fool': 14208, 'dancing': 14209, 'homosexual': 14210, 'organized': 14211, 'gracefully': 14212, 'wrinkled': 14213, '18': 14214, 'functional': 14215, 'ruffles': 14216, 'launching': 14217, 'nthat': 14218, 'twitches': 14219, 'strains': 14220, 'wannabe': 14221, '64': 14222, 'draws': 14223, 'vicinity': 14224, 'conductor': 14225, 'shed': 14226, 'quiver': 14227, 'n19': 14228, 'walther': 14229, 'agathe': 14230, 'gambling': 14231, 'tehran': 14232, 'npappas': 14233, 'dew': 14234, 'bullhorn': 14235, 'encampment': 14236, 'tosses': 14237, 'shiny': 14238, 'beige': 14239, 'rippling': 14240, 'archival': 14241, 'package': 14242, 'leap': 14243, 'zig': 14244, 'ox': 14245, 'courier': 14246, 'dozes': 14247, 'bargain': 14248, 'comments': 14249, 'pico': 14250, 'mozart': 14251, 'simon': 14252, 'spiel': 14253, 'duplicated': 14254, 'bile': 14255, 'regulars': 14256, 'abruptly': 14257, 'shining': 14258, 'confession': 14259, 'trauma': 14260, 'workers': 14261, 'ngo': 14262, 'recognizes': 14263, 'chairs': 14264, 'dash': 14265, 'micker': 14266, 'homework': 14267, 'lench': 14268, 'thanks': 14269, 'balding': 14270, 'shotgun': 14271, 'pissed': 14272, 'extravagant': 14273, 'underworld': 14274, 'silvio': 14275, 'incubator': 14276, 'kee': 14277, 'frontiersman': 14278, 'underdog': 14279, 'shyres': 14280, 'assemble': 14281, 'refueling': 14282, '144': 14283, 'burn': 14284, 'bertrand': 14285, 'bumped': 14286, 'denied': 14287, 'avoided': 14288, 'friendly': 14289, 'powered': 14290, 'educated': 14291, 'begging': 14292, 'mattered': 14293, 'badges': 14294, 'container': 14295, '187': 14296, 'lynn': 14297, 'bending': 14298, 'freaked': 14299, 'petrified': 14300, 'village': 14301, 'cocky': 14302, 'pizza': 14303, 'governess': 14304, 'banging': 14305, 'flings': 14306, 'conspiratorial': 14307, 'filtering': 14308, 'rebel': 14309, 'howl': 14310, 'direct': 14311, 'facade': 14312, 'unlucky': 14313, "goin'": 14314, 'dinosaurs': 14315, 'spontaneous': 14316, 'groping': 14317, 'nhanging': 14318, 'n13': 14319, 'op': 14320, 'tylenol': 14321, 'chomp': 14322, 'lou': 14323, 'doyle': 14324, 'camel': 14325, 'grissom': 14326, 'meant': 14327, 'jokers': 14328, 'register': 14329, 'definite': 14330, 'jamal': 14331, 'allergic': 14332, 'ponys': 14333, 'broomhilda': 14334, 'primitive': 14335, 'n5': 14336, 'stung': 14337, 'calmly': 14338, 'greenpeace': 14339, 'i': 14340, 'volcanic': 14341, 'wassup': 14342, 'corrects': 14343, 'teller': 14344, 'stalking': 14345, 'blares': 14346, 'visor': 14347, 'lenses': 14348, 'nmemento': 14349, 'pelt': 14350, 'frail': 14351, 'jankis': 14352, 'anticipation': 14353, 'projects': 14354, 'dreamlike': 14355, 'anti': 14356, 'launch': 14357, 'weekend': 14358, 'forcing': 14359, 'haggerty': 14360, 'attractive': 14361, 'sinclair': 14362, 'gold': 14363, 'planets': 14364, 'desolate': 14365, 'wipes': 14366, 'david': 14367, 'christmas': 14368, 'trusted': 14369, 'helmet': 14370, 'nstudents': 14371, 'clucks': 14372, 'bombs': 14373, 'fleshy': 14374, 'indication': 14375, 'moisture': 14376, 'freddy': 14377, 'ear': 14378, 'snatching': 14379, 'restraint': 14380, 'n23': 14381, 'assassinate': 14382, 'n157': 14383, 'mo': 14384, 'anandi': 14385, 'homing': 14386, 'clutter': 14387, 'growl': 14388, 'floors': 14389, 'opie': 14390, 'peckawood': 14391, 'recognize': 14392, 'flavor': 14393, 'huey': 14394, 'looked': 14395, '31': 14396, 'taxes': 14397, 'flail': 14398, 'dining': 14399, 'nets': 14400, 'sentences': 14401, 'welder': 14402, 'mmmmm': 14403, 'mornin': 14404, 'observer': 14405, 'loo': 14406, 'cosmetics': 14407, 'cursing': 14408, 'piscine': 14409, 'females': 14410, 'starter': 14411, 'confront': 14412, 'flashes': 14413, 'beru': 14414, 'stain': 14415, 'choice': 14416, 'skip': 14417, 'strawberry': 14418, 'gang': 14419, 'involve': 14420, 'lequint': 14421, 'obscene': 14422, 'viggo': 14423, 'reassuring': 14424, 'introductions': 14425, 'stroke': 14426, 'confronted': 14427, 'sits': 14428, 'crenshaw': 14429, 'combo': 14430, 'bitch': 14431, 'ion': 14432, 'bill': 14433, 'pinkie': 14434, 'nbutcher': 14435, 'ease': 14436, 'coats': 14437, 'sabretooth': 14438, 'goers': 14439, 'electrical': 14440, 'cackles': 14441, 'beaten': 14442, 'each': 14443, 'find': 14444, 'mercury': 14445, 'nhot': 14446, 'advisors': 14447, 'expected': 14448, 'ringing': 14449, 'atrium': 14450, 'nation': 14451, 'ski': 14452, 'sunshine': 14453, 'studded': 14454, 'later': 14455, 'agency': 14456, 'suggest': 14457, 'finishes': 14458, 'tunnels': 14459, 'style': 14460, 'flannel': 14461, 'warner': 14462, 'atlantis': 14463, 'slaps': 14464, 'academic': 14465, 'speakers': 14466, 'performs': 14467, 'unfolding': 14468, 'stabbing': 14469, 'blast': 14470, 'keep': 14471, 'peeks': 14472, 'pee': 14473, 'challenged': 14474, 'lettuce': 14475, 'mist': 14476, 'exploration': 14477, 'graduate': 14478, 'instance': 14479, 'maneuvers': 14480, 'methods': 14481, 'saddened': 14482, 'wailing': 14483, 'retired': 14484, 'flushed': 14485, 'fraulein': 14486, 'maidens': 14487, 'probation': 14488, 'n24': 14489, 'shouting': 14490, 'medics': 14491, 'su': 14492, 'comprehend': 14493, 'dims': 14494, 'dory': 14495, 'squeezing': 14496, 'sith': 14497, 'actress': 14498, 'hinges': 14499, 'earth': 14500, 'angel': 14501, 'unauthorized': 14502, 'ahem': 14503, 'inevitable': 14504, 'arranged': 14505, 'hostess': 14506, 'clamor': 14507, 'mandingos': 14508, 'manifest': 14509, 'spot': 14510, 'dials': 14511, 'crowds': 14512, 'collides': 14513, 'flesh': 14514, 'hurries': 14515, '131': 14516, 'bradford': 14517, 'lift': 14518, 'nwritten': 14519, 'newton': 14520, 'nbusiness': 14521, 'x80': 14522, 'all': 14523, 'headless': 14524, 'phrase': 14525, 'nwider': 14526, 'telescopic': 14527, 'shelter': 14528, 'agree': 14529, 'h': 14530, 'mosque': 14531, 'counsel': 14532, 'tights': 14533, 'taiwan': 14534, 'wuss': 14535, 'secretary': 14536, 'shake': 14537, 'nbloat': 14538, 'lamotta': 14539, 'cobra': 14540, 'mechanism': 14541, 'swollen': 14542, 'correct': 14543, 'russ': 14544, 'stairs': 14545, 'flicks': 14546, 'disturbed': 14547, 'moonlit': 14548, 'prayers': 14549, 'worn': 14550, 'lazy': 14551, '11': 14552, 'glide': 14553, 'jinx': 14554, 'boarding': 14555, 'calendars': 14556, 'spook': 14557, 'youkilis': 14558, 'snatched': 14559, 'drinks': 14560, 'chuckles': 14561, 'barf': 14562, 'repulsed': 14563, 'spivey': 14564, 'wallaby': 14565, 'duty': 14566, 'master': 14567, 'scenery': 14568, 'text': 14569, 'polly': 14570, 'falling': 14571, 'candlelight': 14572, 'packages': 14573, 'travelling': 14574, 'nthough': 14575, 'according': 14576, 'belonging': 14577, 'technicians': 14578, 'importantly': 14579, 'reports': 14580, 'minks': 14581, 'shifts': 14582, 'wrists': 14583, 'skull': 14584, 'lanky': 14585, 'spacesuit': 14586, 'horrific': 14587, 'sharing': 14588, 'givin': 14589, 'deliver': 14590, 'change': 14591, 'outcome': 14592, 'bundles': 14593, 'would': 14594, 'beatrice': 14595, 'problem': 14596, 'global': 14597, 'ellington': 14598, 'nor': 14599, 'june': 14600, 'membranous': 14601, 'exception': 14602, 'puts': 14603, 'fallen': 14604, 'warmth': 14605, 'raps': 14606, 'candle': 14607, 'pretend': 14608, 'bennie': 14609, 'joey': 14610, 'skag': 14611, 'replace': 14612, 'trademark': 14613, 'deployed': 14614, 'nexcuse': 14615, 'spiral': 14616, 'choke': 14617, 'ya': 14618, 'traumatized': 14619, 'chow': 14620, 'helicopters': 14621, 'clipped': 14622, 'dylan': 14623, 'offscreen': 14624, 'vanishes': 14625, 'prone': 14626, 'daryl': 14627, 'recorder': 14628, 'remove': 14629, 'vests': 14630, 'booths': 14631, 'terry': 14632, 'mcclane': 14633, '17': 14634, 'republic': 14635, 'enhanced': 14636, 'ntommy': 14637, 'ignites': 14638, "'arque": 14639, 'nodding': 14640, 'npig': 14641, 'spreads': 14642, 'affecting': 14643, 'flows': 14644, 'feather': 14645, 'bullet': 14646, 'blouse': 14647, 'casts': 14648, 'creature': 14649, 'th': 14650, 'knife': 14651, 'surrender': 14652, 'steamy': 14653, 'nhis': 14654, 'quality': 14655, 'n103': 14656, 'voyage': 14657, 'background': 14658, 'n156': 14659, 'shelf': 14660, 'underside': 14661, 'planned': 14662, 'bounding': 14663, 'ambles': 14664, 'chiefs': 14665, 'delicate': 14666, 'dad': 14667, 'shovels': 14668, 'grant': 14669, 'lima': 14670, 'accompanied': 14671, 'strand': 14672, 'frazier': 14673, 'broadly': 14674, 'fiber': 14675, 'warming': 14676, 'ndown': 14677, 'possible': 14678, 'dangerous': 14679, 'inexplicably': 14680, 'subtitle': 14681, 'antidote': 14682, 'emerges': 14683, 'packing': 14684, 'inject': 14685, 'hearty': 14686, 'sockets': 14687, 'prices': 14688, 'stabs': 14689, 'nthrough': 14690, 'stoically': 14691, 'awakening': 14692, 'protect': 14693, 'family': 14694, 'sails': 14695, 'indifference': 14696, 'nagainst': 14697, 'triceratops': 14698, 'erased': 14699, 'embarrassing': 14700, 'professional': 14701, 'care': 14702, 'commando': 14703, 'ending': 14704, 'together': 14705, 'tarp': 14706, 'gather': 14707, 'involved': 14708, 'flag': 14709, 'messy': 14710, 'smiling': 14711, 'kilos': 14712, 'experimental': 14713, 'scenes': 14714, 'heart': 14715, 'citizenship': 14716, 'slash': 14717, 'flares': 14718, 'beggars': 14719, 'carmine': 14720, 'reminded': 14721, 'hero': 14722, 'stamps': 14723, 'paris': 14724, 'solve': 14725, 'cop': 14726, 'overgrown': 14727, 'resort': 14728, 'drowning': 14729, 'lantern': 14730, 'barbara': 14731, 'rotelli': 14732, 'walkway': 14733, 'strip': 14734, 'monroe': 14735, 'cleaners': 14736, 'parts': 14737, 'release': 14738, 'privilege': 14739, 'area': 14740, 'layer': 14741, 'dicks': 14742, 'compassionate': 14743, 'actors': 14744, 'clenched': 14745, 'believes': 14746, 'footprints': 14747, 'trippin': 14748, 'maz': 14749, 'nwears': 14750, 'camaro': 14751, 'nshould': 14752, 'she': 14753, 'arctic': 14754, 'nuts': 14755, 'easier': 14756, 'sulu': 14757, 'peggy': 14758, 'item': 14759, 'pray': 14760, 'beasts': 14761, 'material': 14762, 'sampled': 14763, 'corey': 14764, 'beginnings': 14765, 'stump': 14766, 'todd': 14767, 'pitts': 14768, 'discarded': 14769, 'nguard': 14770, 'scotty': 14771, 'shag': 14772, 'n69': 14773, 'quits': 14774, 'coolly': 14775, 'rusty': 14776, "'head": 14777, 'temporarily': 14778, 'funk': 14779, 'chocolate': 14780, 'livin': 14781, 'penthouse': 14782, 'circular': 14783, 'conference': 14784, 'corn': 14785, 'beams': 14786, 'infection': 14787, 'roller': 14788, 'size': 14789, 'built': 14790, 'seventies': 14791, 'punks': 14792, 'ducking': 14793, 'treating': 14794, 'pickin': 14795, 'noodles': 14796, 'departs': 14797, 'corporal': 14798, 'tracing': 14799, 'plausible': 14800, 'roles': 14801, 'commanding': 14802, 'grocery': 14803, 'outrageous': 14804, 'parody': 14805, 'abandon': 14806, 'harding': 14807, 'system': 14808, 'sandman': 14809, 'falters': 14810, 'strides': 14811, 'nellie': 14812, 'squat': 14813, 'supported': 14814, 'binder': 14815, 'n77': 14816, '207': 14817, 'npoliceman': 14818, 'crazily': 14819, '911': 14820, 'ruby': 14821, 'drama': 14822, 'leaking': 14823, 'playboy': 14824, 'ajax': 14825, 'flames': 14826, 'vital': 14827, 'toddler': 14828, 'florist': 14829, 'fronts': 14830, 'nbetsy': 14831, 'imitation': 14832, 'chateau': 14833, 'gushing': 14834, 'desperately': 14835, 'thrilled': 14836, 'dental': 14837, 'almost': 14838, 'disappears': 14839, 'trim': 14840, 'symptoms': 14841, 'robber': 14842, 'hated': 14843, 'threaten': 14844, 'toss': 14845, 'switching': 14846, 'ridden': 14847, 'saunters': 14848, 'ranks': 14849, 'slid': 14850, 'packed': 14851, 'windshield': 14852, 'smelling': 14853, 'videotape': 14854, 'shoving': 14855, 'degas': 14856, 'grief': 14857, '142': 14858, 'nunable': 14859, 'teedo': 14860, 'hails': 14861, 'pointless': 14862, 'thirties': 14863, 'raises': 14864, 'steel': 14865, 'twisted': 14866, 'bald': 14867, 'michael': 14868, 'ndrunk': 14869, 'petals': 14870, 'mal': 14871, 'heavens': 14872, 'bridal': 14873, 'wha': 14874, 'newborn': 14875, 'ruth': 14876, 'skims': 14877, 'ostinato': 14878, 'knude': 14879, '10000': 14880, 'kate': 14881, 'grimy': 14882, 'screech': 14883, 'administrative': 14884, 'cascade': 14885, 'billboards': 14886, 'collins': 14887, 'rawalpindi': 14888, 'njoy': 14889, 'klein': 14890, 'announces': 14891, 'clears': 14892, 'otherworldly': 14893, 'mops': 14894, 'burnt': 14895, 'ringside': 14896, 'longingly': 14897, 'breathless': 14898, 'limits': 14899, 'gump': 14900, 'imitating': 14901, 'managers': 14902, 'groove': 14903, 'moans': 14904, 'fag': 14905, 'mac': 14906, 'registry': 14907, 'furtive': 14908, 'techs': 14909, 'seemed': 14910, 'treetops': 14911, 'confer': 14912, 'next': 14913, 'smells': 14914, 'darned': 14915, 'revulsion': 14916, 'gymnast': 14917, 'bach': 14918, 'bee': 14919, 'beware': 14920, 'appointed': 14921, 'thompson': 14922, 'wiped': 14923, 'bolivia': 14924, 'formation': 14925, 'automotive': 14926, 'thirds': 14927, 'nyao': 14928, 'curls': 14929, 'precision': 14930, 'payroll': 14931, 'greek': 14932, 'road': 14933, 'hatched': 14934, 'feral': 14935, 'chilling': 14936, 'keeps': 14937, 'mourning': 14938, 'project': 14939, 'dribbles': 14940, 'gnarly': 14941, 'nat': 14942, 'rockets': 14943, 'strongarm': 14944, 'worriedly': 14945, 'creek': 14946, 'tells': 14947, 'fly': 14948, 'moustache': 14949, 'smoky': 14950, '301': 14951, 'mickey': 14952, 'frying': 14953, 'keen': 14954, 'crewmen': 14955, 'snowball': 14956, 'n178': 14957, 'twa': 14958, '0': 14959, 'trickles': 14960, 'fishy': 14961, 'pre': 14962, 'stealthy': 14963, 'traps': 14964, 'ntheir': 14965, 'spray': 14966, 'ngolden': 14967, 'mantra': 14968, 'conscious': 14969, '3': 14970, 'nasteroid': 14971, 'sizzle': 14972, 'guiltily': 14973, 'junkies': 14974, 'nspud': 14975, 'happiest': 14976, 'n45': 14977, 'taps': 14978, 'holiday': 14979, 'slightest': 14980, 'neil': 14981, 'mila': 14982, 'ndoorway': 14983, 'npicks': 14984, 'tract': 14985, 'observatory': 14986, 'n30': 14987, 'arson': 14988, 'bait': 14989, 'nations': 14990, 'admiration': 14991, 'scroll': 14992, 'shallow': 14993, 'mike': 14994, 'youngest': 14995, 'politician': 14996, 'andy': 14997, 'hypersleep': 14998, 'uncomfortably': 14999, 'operations': 15000, 'silverlake': 15001, '63': 15002, 'goo': 15003, 'itsu': 15004, 'salute': 15005, 'hon': 15006, 'build': 15007, 'regular': 15008, 'flush': 15009, 'glowers': 15010, 'karate': 15011, 'waylon': 15012, 'deny': 15013, 'cuban': 15014, 'nbilly': 15015, 'yavin': 15016, 'nwashington': 15017, 'makeshift': 15018, 'moretti': 15019, 'thornburg': 15020, 'earphones': 15021, 'library': 15022, 'boisterous': 15023, 'sport': 15024, '224': 15025, 'malinov': 15026, '259': 15027, 'gated': 15028, 'rates': 15029, 'hack': 15030, 'recreation': 15031, 'charred': 15032, 'smirks': 15033, 'atlantic': 15034, 'prison': 15035, 'preston': 15036, 'drumming': 15037, 'rosomorf': 15038, 'marriott': 15039, 'clowns': 15040, 'oc': 15041, 'dmz': 15042, 'maniac': 15043, 'daughters': 15044, 'shy': 15045, 'border': 15046, 'overtakes': 15047, 'interference': 15048, 'possess': 15049, 'intelligence': 15050, 'folded': 15051, 'nglances': 15052, '92': 15053, 'counts': 15054, 'leaks': 15055, 'poodle': 15056, 'ltd': 15057, 'buddies': 15058, 'bernie': 15059, 'magical': 15060, 'procedure': 15061, 'kimberly': 15062, 'yesterday': 15063, 'ranging': 15064, 'marketplace': 15065, 'having': 15066, 'meals': 15067, '27': 15068, 'customary': 15069, 'ml320': 15070, 'nightstand': 15071, 'dumpster': 15072, 'striking': 15073, 'chunk': 15074, 'bonnie': 15075, 'sympathy': 15076, 'intercourse': 15077, 'mostly': 15078, 'emotion': 15079, 'annual': 15080, 'ncop': 15081, 'naturedly': 15082, 'ncole': 15083, 'armed': 15084, 'soldiers': 15085, 'arcs': 15086, 'elections': 15087, 'older': 15088, 'chop': 15089, 'ntraffic': 15090, 'anders': 15091, 'negon': 15092, 'townhouse': 15093, 'husk': 15094, 'inter': 15095, 'washes': 15096, 'leporello': 15097, 'frenzy': 15098, 'lifetime': 15099, 'sounding': 15100, 'known': 15101, 'ninto': 15102, 'speechless': 15103, 'exquisite': 15104, 'shoots': 15105, 'ratty': 15106, 'ours': 15107, 'sway': 15108, 'pere': 15109, 'reporting': 15110, 'disappointed': 15111, 'hunt': 15112, 'pen': 15113, 'bureaucrat': 15114, 'ok': 15115, 'saigon': 15116, 'backstage': 15117, 'keyboard': 15118, 'launcher': 15119, 'previa': 15120, 'a8': 15121, 'driftwood': 15122, 'ample': 15123, 'sportcoat': 15124, 'impenetrable': 15125, 'speakerbox': 15126, 'navigating': 15127, 'cabbies': 15128, 'endurance': 15129, 'us': 15130, '212': 15131, 'stink': 15132, 'picks': 15133, 'elected': 15134, 'content': 15135, 'pleasing': 15136, "that's": 15137, 'investigations': 15138, 'ashamed': 15139, 'underbrush': 15140, 'barn': 15141, 'excruciating': 15142, 'decision': 15143, 'parish': 15144, 'fausto': 15145, '178': 15146, 'gutted': 15147, 'airline': 15148, 'terminate': 15149, 'falls': 15150, 'threat': 15151, "isn't": 15152, 'bust': 15153, 'matches': 15154, 'moines': 15155, 'ramshackle': 15156, 'contract': 15157, 'lists': 15158, 'whaddya': 15159, 'raiders': 15160, 'thrusters': 15161, 'villeneuve': 15162, 'brightest': 15163, 'keough': 15164, 'astro': 15165, 'awfully': 15166, 'clearly': 15167, 'grounds': 15168, 'nown': 15169, 'wheelbarrow': 15170, 'bio': 15171, 'hypnotist': 15172, '121': 15173, 'fencing': 15174, 'amused': 15175, 'defending': 15176, 'rack': 15177, 'female': 15178, "'h": 15179, 'ten': 15180, 'cost': 15181, 'artists': 15182, 'nafter': 15183, 'carbon': 15184, 'grace': 15185, 'refined': 15186, 'fives': 15187, 'advanced': 15188, 'nsorry': 15189, 'balls': 15190, '156': 15191, 'parting': 15192, 'of': 15193, 'investors': 15194, 'tennessee': 15195, 'exec': 15196, 'bedsit': 15197, 'seductively': 15198, 'encouragement': 15199, 'wheel': 15200, 'define': 15201, 'puttin': 15202, 'prominently': 15203, 'needle': 15204, 'tips': 15205, 'furry': 15206, 'acknowledges': 15207, 'bobs': 15208, 'climbed': 15209, 'twigs': 15210, 'paperwork': 15211, 'feeding': 15212, 'sips': 15213, 'halves': 15214, 'prowl': 15215, 'started': 15216, 'flags': 15217, 'leash': 15218, 'perspiration': 15219, 'flaps': 15220, 'poof': 15221, 'posted': 15222, 'dismisses': 15223, 'map': 15224, 'investigative': 15225, 'lydia': 15226, 'heat': 15227, 'trailers': 15228, '62': 15229, 'documents': 15230, 'hitting': 15231, '7th': 15232, 'cocoon': 15233, 'hunches': 15234, 'skillfully': 15235, 'hottest': 15236, 'utmost': 15237, 'buzzes': 15238, 'existed': 15239, 'shelving': 15240, 'kindly': 15241, 'huffing': 15242, 'bathrobe': 15243, 'muster': 15244, 'graham': 15245, 'garson': 15246, 'redneck': 15247, 'styled': 15248, 'g': 15249, 'scoots': 15250, 'screeching': 15251, '146': 15252, 'bellows': 15253, 'fearless': 15254, 'comm': 15255, 'controls': 15256, 'screaming': 15257, 'jafar': 15258, 'ansen': 15259, 'razzolini': 15260, 'footsteps': 15261, 'changes': 15262, 'used': 15263, 'hitler': 15264, 'amazement': 15265, 'dye': 15266, 'holidays': 15267, 'quake': 15268, 'crashed': 15269, 'rink': 15270, 'weaving': 15271, 'ferns': 15272, 'commentator': 15273, 'baltimore': 15274, 'thanksgiving': 15275, 'fingernails': 15276, 'professor': 15277, 'waited': 15278, 'refused': 15279, 'flame': 15280, 'turkish': 15281, 'cables': 15282, 'nothing': 15283, 'poison': 15284, 'moonby': 15285, 'appliances': 15286, 'tallest': 15287, 'lava': 15288, 'victory': 15289, 'army': 15290, 'aaah': 15291, 'eyeball': 15292, 'harsh': 15293, 'sob': 15294, '165': 15295, 'goon': 15296, 'tribe': 15297, 'dodging': 15298, 'bystanders': 15299, 'mixer': 15300, 'garbage': 15301, 'jeeps': 15302, 'ugly': 15303, 'thursday': 15304, 'ribbon': 15305, 'affirmative': 15306, 'nplaces': 15307, 'dignity': 15308, 'robbery': 15309, 'pointer': 15310, 'okay': 15311, 'intimate': 15312, 'fran': 15313, 'keys': 15314, 'pedestrians': 15315, 'glider': 15316, 'sympathetically': 15317, 'grammy': 15318, 'dug': 15319, 'deblois': 15320, 'dodges': 15321, 'nslides': 15322, 'beaded': 15323, 'casting': 15324, 'nedge': 15325, 'subtitles': 15326, 'surrenders': 15327, 'incoming': 15328, 'clown': 15329, 'hear': 15330, 'inmates': 15331, 'buck': 15332, 'deeply': 15333, 'pencils': 15334, 'visitors': 15335, 'gathering': 15336, 'charge': 15337, 'ny': 15338, 'corners': 15339, 'n105': 15340, 'susie': 15341, 'pursues': 15342, 'mutt': 15343, 'crates': 15344, 'poor': 15345, 'coil': 15346, 'ages': 15347, 'different': 15348, 'npeople': 15349, 'awaiting': 15350, 'toffel': 15351, 'issues': 15352, 'andrew': 15353, 'tickles': 15354, 'bunuel': 15355, 'bullseye': 15356, 'occurs': 15357, 'cramped': 15358, 'dump': 15359, 'fraud': 15360, 'questioningly': 15361, 'starboard': 15362, 'dumbwaiter': 15363, 'chili': 15364, 'receptionist': 15365, 'sizing': 15366, 'potter': 15367, 'bush': 15368, 'institute': 15369, 'xad': 15370, 'despite': 15371, 'contained': 15372, 'fenced': 15373, '125': 15374, 'hujar': 15375, 'alas': 15376, 'umbrellas': 15377, 'case': 15378, 'number': 15379, 'pursuing': 15380, 'headmaster': 15381, 'harbor': 15382, 'fishtails': 15383, 'colby': 15384, 'yourself': 15385, 'yoo': 15386, 'comes': 15387, 'prior': 15388, 'minutes': 15389, 'approved': 15390, '257': 15391, 'roadie': 15392, 'grayson': 15393, 'clue': 15394, 'braces': 15395, 'starkiller': 15396, 'royals': 15397, 'bodyguard': 15398, 'sandwiches': 15399, '145': 15400, 'protestant': 15401, 'bromden': 15402, 'der': 15403, 'countryside': 15404, 'kid': 15405, 'npolice': 15406, '320': 15407, 'mob': 15408, 'dumbfounded': 15409, 'scanners': 15410, 'overhanging': 15411, 'others': 15412, 'nall': 15413, 'handprint': 15414, 'jurisdiction': 15415, 'assistance': 15416, 'scattering': 15417, 'hops': 15418, 'a': 15419, 'mexico': 15420, 'shift': 15421, 'thoran': 15422, 'label': 15423, 'spooked': 15424, 'brunette': 15425, 'clam': 15426, 'blurs': 15427, 'burger': 15428, 'examination': 15429, 'draw': 15430, 'anticipating': 15431, 'wife': 15432, 'marks': 15433, 'clatters': 15434, 'demanding': 15435, 'bat': 15436, 'biggest': 15437, 'residence': 15438, 'celebrities': 15439, 'nanny': 15440, 'nothingness': 15441, 'daniels': 15442, 'centerfold': 15443, 'convertible': 15444, 'neighboring': 15445, 'squishy': 15446, 'through': 15447, 'permitted': 15448, 'glimpses': 15449, 'maneuver': 15450, 'progress': 15451, 'latika': 15452, 'wick': 15453, 'respect': 15454, '294': 15455, 'guess': 15456, 'loss': 15457, 'seize': 15458, 'couch': 15459, 'snags': 15460, 'elevator': 15461, 'aides': 15462, 'booming': 15463, 'rubbish': 15464, 'mason': 15465, 'eraser': 15466, 'beggar': 15467, 'scars': 15468, 'abc': 15469, 'sr': 15470, 'wants': 15471, 'parcel': 15472, 'primal': 15473, 'horny': 15474, 'maternity': 15475, "'keefe": 15476, 'check': 15477, '357': 15478, '130': 15479, 'dickinson': 15480, 'infected': 15481, 'consoles': 15482, 'soldier': 15483, 'rumor': 15484, 'roadside': 15485, 'penetrates': 15486, 'halloween': 15487, 'displays': 15488, 'arc': 15489, 'scares': 15490, 'silenced': 15491, 'trek': 15492, 'utensils': 15493, 'rocky': 15494, 'cues': 15495, 'gravy': 15496, 'elk': 15497, 'cheswick': 15498, 'gs': 15499, 'blocked': 15500, 'honda': 15501, 'announce': 15502, 'seeing': 15503, 'insults': 15504, 'review': 15505, 'latches': 15506, 'handbag': 15507, 'ncrowd': 15508, 'shut': 15509, 'crunching': 15510, 'holstered': 15511, 'paramedics': 15512, 'measurements': 15513, 'false': 15514, 'midtown': 15515, 'this': 15516, 'certainty': 15517, 'stocked': 15518, 'acts': 15519, 'gulps': 15520, 'nlife': 15521, 'ancient': 15522, 'rebuild': 15523, 'airstrip': 15524, 'bodhisattva': 15525, 'microwave': 15526, 'allison': 15527, 'hee': 15528, 'waiters': 15529, 'rainbow': 15530, 'nprince': 15531, 'separated': 15532, 'tastes': 15533, 'nmen': 15534, 'contorts': 15535, 'inspiration': 15536, 'stoops': 15537, 'freely': 15538, 'ss': 15539, 'shaken': 15540, 'rose': 15541, 'panther': 15542, 'motions': 15543, 'bulletin': 15544, 'received': 15545, 'kenny': 15546, 'spoils': 15547, 'bleek': 15548, 'macguff': 15549, 'whistling': 15550, 'greatness': 15551, 'cubicle': 15552, '237': 15553, 'formations': 15554, 'design': 15555, 'screwdriver': 15556, 'mean': 15557, 'drawers': 15558, 'shapes': 15559, 'horse': 15560, 'samuel': 15561, 'arms': 15562, 'lavetta': 15563, 'patriotic': 15564, 'cary': 15565, 'swamp': 15566, 'stood': 15567, 'plucks': 15568, 'completing': 15569, 'sharkbait': 15570, 'counted': 15571, 'favor': 15572, '179': 15573, 'waltz': 15574, 'jacob': 15575, 'santa': 15576, '196': 15577, 'entourage': 15578, 'consults': 15579, 'det': 15580, 'shrieking': 15581, 'worm': 15582, 'montage': 15583, 'positively': 15584, 'your': 15585, 'nin': 15586, 'poster': 15587, 'cousins': 15588, 'colleagues': 15589, '1970': 15590, 'xaf': 15591, 'rand': 15592, 'conviction': 15593, 'legally': 15594, 'ruins': 15595, 'thirst': 15596, 'trims': 15597, 'randolph': 15598, 'sweeping': 15599, 'whilst': 15600, 'impala': 15601, 'costs': 15602, 'carla': 15603, 'entrances': 15604, 'sarek': 15605, 'internal': 15606, 'armoire': 15607, 'cafe': 15608, 'target': 15609, 'resist': 15610, 'weights': 15611, 'nscreen': 15612, 'grinds': 15613, 'enclosure': 15614, 'mmm': 15615, 'sensing': 15616, 'blasting': 15617, 'frantic': 15618, 'roxanne': 15619, 'kai': 15620, 'disturbs': 15621, 'ferguson': 15622, 'nfa': 15623, 'bloods': 15624, 'kennel': 15625, 'graduation': 15626, 'abilities': 15627, 'lawrence': 15628, 'bounces': 15629, 'dolly': 15630, 'trots': 15631, 'contracts': 15632, 'taylor': 15633, 'assorted': 15634, 'blew': 15635, 'faggot': 15636, 'hardly': 15637, 'mlb': 15638, 'universe': 15639, 'strongroom': 15640, 'roughnecks': 15641, 'nbody': 15642, 'sopranos': 15643, 'coop': 15644, 'crammed': 15645, 'driving': 15646, 'budget': 15647, 'ferry': 15648, 'awards': 15649, 'tested': 15650, 'francisco': 15651, 'play': 15652, 'watering': 15653, 'protesting': 15654, 'motorcycles': 15655, 'dies': 15656, 'lobotomy': 15657, 'peters': 15658, 'bike': 15659, 'frog': 15660, 'lumbers': 15661, 'licks': 15662, 'longest': 15663, 'wielding': 15664, 'haggard': 15665, '181': 15666, 'weinberg': 15667, 'projection': 15668, 'japanese': 15669, 'terminals': 15670, 'studies': 15671, 'doorway': 15672, 'magazine': 15673, 'bare': 15674, 'connors': 15675, 'noooh': 15676, 'concern': 15677, 'trousers': 15678, 'astounded': 15679, 'flamethrower': 15680, 'cradled': 15681, 'scrapes': 15682, 'furiously': 15683, 'apprehensive': 15684, 'relish': 15685, 'empire': 15686, 'drained': 15687, 'nrob': 15688, 'nope': 15689, 'sweetly': 15690, 'hoagie': 15691, 'bloodied': 15692, 'erratic': 15693, 'ncomes': 15694, 'phase': 15695, 'sketching': 15696, 'borrowed': 15697, 'laundry': 15698, 'lestercorp': 15699, 'ncoming': 15700, 'circles': 15701, 'just': 15702, 'claws': 15703, 'twin': 15704, 'bangs': 15705, 'smoked': 15706, 'co': 15707, 'operation': 15708, 'quint': 15709, 'chat': 15710, 'ndarla': 15711, 'stanford': 15712, 'stroll': 15713, 'decor': 15714, 'racks': 15715, 'defensively': 15716, '14': 15717, 'ali': 15718, 'downs': 15719, 'reveal': 15720, 'delivering': 15721, 'weddings': 15722, 'silly': 15723, 'backhands': 15724, 'drawn': 15725, 'fusion': 15726, 'handsome': 15727, 'er': 15728, 'hazard': 15729, 'silently': 15730, 'impending': 15731, 'survived': 15732, 'ultra': 15733, 'scavenger': 15734, 'puzzled': 15735, 'nlo': 15736, 'nducky': 15737, 'sufficient': 15738, 'boots': 15739, 'fire': 15740, 'afternoon': 15741, 'yummy': 15742, 'n6': 15743, 'loosens': 15744, 'grille': 15745, 'further': 15746, 'arrivals': 15747, 'cosmic': 15748, 'contest': 15749, 'doubts': 15750, 'hawley': 15751, 'instrument': 15752, 'pittsburgh': 15753, 'skips': 15754, 'workmen': 15755, 'harm': 15756, 'officials': 15757, 'nturns': 15758, 'cart': 15759, 'gusto': 15760, 'johnny': 15761, 'mounds': 15762, 'weirdly': 15763, 'curse': 15764, 'screen': 15765, 'somehow': 15766, 'desktop': 15767, 'iraq': 15768, 'chirp': 15769, 'ship': 15770, 'decorated': 15771, 'ntie': 15772, 'path': 15773, 'n152': 15774, 'ruining': 15775, 'lone': 15776, 'rhythmic': 15777, 'rattles': 15778, 'telephone': 15779, 'nthanks': 15780, 'arnie': 15781, 'correspondent': 15782, 'panning': 15783, 'huddles': 15784, 'barber': 15785, 'n28': 15786, 'meets': 15787, 'toothpick': 15788, 'emcee': 15789, 'kansas': 15790, 'kids': 15791, 'pencil': 15792, 'nobu': 15793, 'clarise': 15794, 'tools': 15795, 'stoic': 15796, 'hustles': 15797, 'addresses': 15798, 'computer': 15799, 'ought': 15800, 'feeds': 15801, 'villagers': 15802, 'rorschach': 15803, 'went': 15804, 'chased': 15805, 'tu': 15806, 'california': 15807, 'companion': 15808, 'tooth': 15809, 'cork': 15810, 'hoofs': 15811, 'issue': 15812, 'merchant': 15813, 'licking': 15814, 'level': 15815, 'nnever': 15816, 'bellow': 15817, 'letters': 15818, 'sights': 15819, 'units': 15820, 'type': 15821, 'whawhak': 15822, 'audio': 15823, 'washington': 15824, 'goeth': 15825, 'locomotive': 15826, 'rivers': 15827, 'prize': 15828, 'disco': 15829, 'warrior': 15830, 'n56': 15831, 'assume': 15832, 'missions': 15833, 'base': 15834, 'n137': 15835, 'biological': 15836, 'npaper': 15837, 'modest': 15838, 'registering': 15839, 'tanks': 15840, 'waves': 15841, 'ecto': 15842, 'amsterdam': 15843, 'hostile': 15844, 'njasmine': 15845, 'comms': 15846, 'nstops': 15847, 'glint': 15848, 'ncrosses': 15849, 'lawns': 15850, 'card': 15851, 'mid': 15852, 'football': 15853, 'wingman': 15854, 'suites': 15855, 'vomiting': 15856, 'serving': 15857, 'blob': 15858, 'orbit': 15859, 'zurich': 15860, 'neazy': 15861, 'grunt': 15862, 'interest': 15863, 'slashed': 15864, 'records': 15865, 'beauties': 15866, 'puzzle': 15867, 'referee': 15868, 'intel': 15869, 'comfortably': 15870, 'brains': 15871, 'iosef': 15872, 'shite': 15873, 'nboyo': 15874, 'nwhile': 15875, 'alike': 15876, 'numbers': 15877, 'retching': 15878, 'relaxing': 15879, 'nchien': 15880, '149': 15881, 'stinging': 15882, 'pose': 15883, 'tanned': 15884, 'drank': 15885, 'meeks': 15886, 'lois': 15887, 'junk': 15888, 'cells': 15889, 'allow': 15890, 'meaningless': 15891, 'orbiting': 15892, 'cane': 15893, '162': 15894, 'tactical': 15895, 'lighten': 15896, 'most': 15897, 'johnson': 15898, 'heave': 15899, 'sonny': 15900, 'nindependence': 15901, 'experiences': 15902, 'settle': 15903, 'pier': 15904, 'catbox': 15905, 'dedicated': 15906, 'factly': 15907, 'corrigan': 15908, 'refers': 15909, 'mimicking': 15910, 'girlfriend': 15911, 'producing': 15912, 'applause': 15913, 'attendants': 15914, '248': 15915, 'fabienne': 15916, 'neutral': 15917, 'tuffnut': 15918, 'stretched': 15919, 'violin': 15920, 'disappeared': 15921, 'provide': 15922, 'amado': 15923, 'sack': 15924, 'severed': 15925, 'tapas': 15926, 'makes': 15927, 'gain': 15928, 'dumas': 15929, 'variety': 15930, 'sheepish': 15931, 'caked': 15932, 'hesitantly': 15933, 'booms': 15934, 'incredible': 15935, 'sittin': 15936, 'derringer': 15937, 'timecut': 15938, 'wig': 15939, 'nsequence': 15940, 'military': 15941, 'crudely': 15942, 'vin': 15943, 'into': 15944, "'morning": 15945, 'reich': 15946, 'ntake': 15947, 'mover': 15948, 'deadly': 15949, 'scavengers': 15950, '51': 15951, 'crank': 15952, 'forearms': 15953, 'desperate': 15954, 'technician': 15955, 'jar': 15956, 'nmove': 15957, 'ndistance': 15958, 'plateau': 15959, 'slaver': 15960, 'instructor': 15961, 'yankees': 15962, 'transportation': 15963, 'inclined': 15964, 'chalkboard': 15965, 'wanta': 15966, 'cigar': 15967, 'spits': 15968, 'ark': 15969, 'hostage': 15970, 'nwatch': 15971, 'palantine': 15972, 'convenient': 15973, 'stinger': 15974, 'sachiko': 15975, 'n21': 15976, 'rice': 15977, 'wars': 15978, 'ottavio': 15979, 'aiming': 15980, 'shoo': 15981, 'ghostly': 15982, 'howdy': 15983, '24': 15984, 'sakes': 15985, 'dymond': 15986, 'crocs': 15987, 'x': 15988, 'western': 15989, 'curtie': 15990, 'scatters': 15991, 'rescuing': 15992, 'shirt': 15993, 'safeway': 15994, 'rita': 15995, 'visa': 15996, 'waffles': 15997, 'charismatic': 15998, 'lotion': 15999, 'staggers': 16000, 'drew': 16001, 'per': 16002, 'killoran': 16003, 'creem': 16004, 'casull': 16005, 'marcellus': 16006, 'squirts': 16007, 'organ': 16008, 'neglected': 16009, 'rogue': 16010, "lillian's": 16011, 'disturbance': 16012, 'napproaching': 16013, 'yard': 16014, 'janitor': 16015, 'nfade': 16016, 'unclear': 16017, 'filters': 16018, 'walkie': 16019, 'clinic': 16020, 'nfredrickson': 16021, 'proud': 16022, 'clattering': 16023, 'steer': 16024, 'millionaire': 16025, 'flowing': 16026, 'julie': 16027, 'whiteness': 16028, 'killing': 16029, 'thigh': 16030, 'admiringly': 16031, 'kelly': 16032, 'popped': 16033, 'peeking': 16034, 'fields': 16035, 'bothers': 16036, 'automatically': 16037, 'sealed': 16038, 'cellar': 16039, 'electrodes': 16040, 'ostrich': 16041, 'boulders': 16042, 'hurt': 16043, 'agape': 16044, 'mosquitos': 16045, 'degrees': 16046, 'barefoot': 16047, 'bathe': 16048, 'stormtroopers': 16049, 'savings': 16050, 'killer': 16051, 'rounds': 16052, 'conservative': 16053, 'wing': 16054, 'veidt': 16055, 'stu': 16056, 'revised': 16057, 'distinct': 16058, 'bites': 16059, 'orleans': 16060, 'dark': 16061, 'letter': 16062, 'regulation': 16063, 'greets': 16064, 'velcro': 16065, 'bandit': 16066, 'coat': 16067, 'nzed': 16068, 'futile': 16069, 'unmoved': 16070, 'potted': 16071, 'adriana': 16072, 'plankton': 16073, 'lord': 16074, 'paulina': 16075, "'i": 16076, 'leather': 16077, 'denise': 16078, 'tower': 16079, 'diving': 16080, 'scoring': 16081, 'argyle': 16082, 'translate': 16083, 'wheelchair': 16084, 'noutside': 16085, 'nwearing': 16086, 'peck': 16087, 'chapel': 16088, 'ecu': 16089, 'distraction': 16090, 'decorating': 16091, 'from': 16092, 'magnetic': 16093, 'southern': 16094, 'apologetically': 16095, 'confined': 16096, 'whiskerandos': 16097, 'beet': 16098, 'stripes': 16099, 'platoon': 16100, 'olsen': 16101, 'erica': 16102, 'pancakes': 16103, 'compete': 16104, 'sector': 16105, 'wellhead': 16106, 'fear': 16107, 'boils': 16108, 'nover': 16109, 'entertainment': 16110, 'books': 16111, 'pool': 16112, 'mantle': 16113, 'inch': 16114, 'its': 16115, 'choppers': 16116, 'rally': 16117, 'bowls': 16118, 'casing': 16119, 'cash': 16120, 'hood': 16121, 'simulator': 16122, 'relieved': 16123, 'flies': 16124, 'met': 16125, 'rover': 16126, 'prisoner': 16127, 'allowing': 16128, 'jetty': 16129, 'officers': 16130, 'bob': 16131, 'glued': 16132, 'testify': 16133, 'land': 16134, 'gertrude': 16135, 'freed': 16136, 'peg': 16137, 'nandre': 16138, 'whore': 16139, 'estate': 16140, '36': 16141, 'nwinston': 16142, 'sprawling': 16143, 'harrigan': 16144, 'seal': 16145, 'surge': 16146, 'ornamental': 16147, 'propelling': 16148, 'tattooist': 16149, 'treasures': 16150, 'posing': 16151, 'heel': 16152, 'descends': 16153, 'rooms': 16154, 'strangled': 16155, 'that': 16156, 'sheik': 16157, 'nopposite': 16158, 'nfirst': 16159, 'carnival': 16160, 'roth': 16161, 'formica': 16162, 'nbeing': 16163, 'lack': 16164, 'overly': 16165, 'demons': 16166, 'umpire': 16167, 'losers': 16168, 'whispers': 16169, 'rectangular': 16170, 'quack': 16171, 'prefers': 16172, '47': 16173, 'tubes': 16174, 'appointment': 16175, 'apricot': 16176, 'defibrillator': 16177, 'lust': 16178, 'ruffnut': 16179, 'confident': 16180, 'dangling': 16181, 'gadgets': 16182, 'amerigo': 16183, 'skeleton': 16184, 'slob': 16185, 'nturkle': 16186, 'word': 16187, 'shan': 16188, 'sighting': 16189, 'ripple': 16190, 'pees': 16191, 'dis': 16192, 'speaking': 16193, 'maniacally': 16194, 'escort': 16195, 'closet': 16196, 'perch': 16197, 'philips': 16198, 'n16': 16199, 'revolver': 16200, 'pumped': 16201, 'angela': 16202, 'eyed': 16203, 'winchester': 16204, 'murderers': 16205, 'cheated': 16206, 'elementary': 16207, 'runaway': 16208, 'hurling': 16209, 'armchair': 16210, 'purchased': 16211, "'ol": 16212, 'betina': 16213, 'nstands': 16214, 'friendship': 16215, 'nopens': 16216, 'coastline': 16217, 'extends': 16218, 'devastating': 16219, 'therapist': 16220, 'pills': 16221, 'handcuffs': 16222, 'roundhouse': 16223, 'karnak': 16224, '161': 16225, 'dying': 16226, 'welfare': 16227, 'buzzer': 16228, 'jabbing': 16229, '90': 16230, 'nfull': 16231, 'commendatore': 16232, 'thugs': 16233, 'cone': 16234, 'bacall': 16235, 'praise': 16236, 'six': 16237, 'trail': 16238, 'mingle': 16239, 'alternative': 16240, 'nfine': 16241, 'bicycle': 16242, 'ful': 16243, 'doug': 16244, 'cow': 16245, 'nanything': 16246, 'n33': 16247, 'clean': 16248, 'prob': 16249, 'galloping': 16250, 'eleanor': 16251, 'protected': 16252, 'harpsichord': 16253, 'processing': 16254, 'moron': 16255, 'xc2': 16256, 'employed': 16257, 'peltzer': 16258, 'demolished': 16259, 'bathed': 16260, 'liaison': 16261, 'quit': 16262, 'eisenhower': 16263, 'grate': 16264, 'crimson': 16265, 'orders': 16266, 'diamonds': 16267, 'kissed': 16268, 'reeling': 16269, 'uniforms': 16270, 'denying': 16271, 'berries': 16272, 'chhatrapati': 16273, 'void': 16274, 'n35': 16275, '16mm': 16276, 'pornography': 16277, 'short': 16278, 'petting': 16279, 'low': 16280, 'shaft': 16281, 'pelts': 16282, 'n70': 16283, 'interfering': 16284, 'liepold': 16285, 'sean': 16286, 'archer': 16287, 'wigs': 16288, 'handkerchief': 16289, 'families': 16290, 'attending': 16291, 'in': 16292, 'becoming': 16293, 'well': 16294, 'replacement': 16295, 'crepe': 16296, 'anymore': 16297, 'esme': 16298, 'tethered': 16299, 'courtyard': 16300, '7': 16301, 'medical': 16302, 'influence': 16303, 'nod': 16304, '246': 16305, 'deal': 16306, '86': 16307, 'warp': 16308, 'mass': 16309, 'nwow': 16310, 'mama': 16311, 'obsessed': 16312, 'involuntarily': 16313, 'threepio': 16314, 'christine': 16315, 'warsaw': 16316, 'sweeps': 16317, 'dawning': 16318, 'index': 16319, 'aloft': 16320, 'encounter': 16321, 'example': 16322, 'sparring': 16323, 'whimpers': 16324, 'wave': 16325, '38': 16326, 'flinch': 16327, 'bambi': 16328, '66': 16329, 'haphazard': 16330, 'sully': 16331, 'journal': 16332, 'firmly': 16333, 'advance': 16334, 'divers': 16335, 'forcible': 16336, 'angrily': 16337, 'airforce': 16338, 'crossbow': 16339, 'skitters': 16340, 'an': 16341, 'adolescent': 16342, 'nputs': 16343, 'fitzgeralds': 16344, 'nsfx': 16345, 'diary': 16346, 'opener': 16347, 'musical': 16348, 'florida': 16349, 'disgusted': 16350, 'pings': 16351, 'baldy': 16352, 'goodbye': 16353, 'treacherous': 16354, 'fluorescent': 16355, 'tunes': 16356, 'strength': 16357, 'wires': 16358, 'baby': 16359, 'defiance': 16360, 'lizzy': 16361, 'booth': 16362, 'drugged': 16363, 'championship': 16364, 'tow': 16365, 'zero': 16366, 'solo': 16367, 'writhes': 16368, 'scared': 16369, 'squares': 16370, 'estrella': 16371, 'put': 16372, 'disoriented': 16373, 'nsees': 16374, 'identified': 16375, 'nbear': 16376, 'confidentially': 16377, 'napkin': 16378, 'tucked': 16379, 'gauge': 16380, 'sagging': 16381, 'cameron': 16382, 'protocol': 16383, 'npeach': 16384, 'strangest': 16385, 'crackling': 16386, 'stiff': 16387, 'weren': 16388, 'reply': 16389, 'forwards': 16390, 'fears': 16391, 'ntogether': 16392, 'shimmering': 16393, 'rocks': 16394, 'foundation': 16395, 'afghanistan': 16396, 'bursts': 16397, 'racket': 16398, 'connects': 16399, 'hesitates': 16400, 'step': 16401, 'plenty': 16402, 'sculptures': 16403, 'jingle': 16404, 'shushes': 16405, 'solved': 16406, 'grating': 16407, 'lillian': 16408, 'satellite': 16409, 'slimer': 16410, 'mercs': 16411, 'occasion': 16412, 'trickling': 16413, 'affection': 16414, 'organization': 16415, '747': 16416, 'lit': 16417, 'hordes': 16418, 'donut': 16419, 'guided': 16420, 'rummages': 16421, 'massassi': 16422, 'due': 16423, 'installed': 16424, 'night': 16425, 'cartwheels': 16426, 'spheres': 16427, 'dialing': 16428, 'cool': 16429, 'surroundings': 16430, 'looming': 16431, 'chimes': 16432, 'net': 16433, 'palapa': 16434, 'uniform': 16435, 'cause': 16436, 'ballet': 16437, 'lowest': 16438, 'laser': 16439, 'nelson': 16440, 'preparing': 16441, 'lactic': 16442, 'neptune': 16443, 'intensifies': 16444, 'signalling': 16445, 'ian': 16446, 'lurches': 16447, 'egon': 16448, 'belief': 16449, 'awakened': 16450, 'oxman': 16451, 'edith': 16452, 'posters': 16453, 'receding': 16454, '252': 16455, 'constant': 16456, 'handled': 16457, 'nous': 16458, 'nchick': 16459, 'hem': 16460, 'million': 16461, 'squads': 16462, 'n9': 16463, 'sayin': 16464, 'refuses': 16465, 'bothered': 16466, 'timer': 16467, 'washroom': 16468, 'n82': 16469, 'grabbed': 16470, 'talked': 16471, 'devouring': 16472, 'window': 16473, 'emotional': 16474, 'pleased': 16475, 'thieves': 16476, 'general': 16477, 'tone': 16478, 'n14': 16479, 'sentence': 16480, 'rasch': 16481, 'groceries': 16482, 'remarks': 16483, 'flexing': 16484, 'reflects': 16485, 'possibilities': 16486, 'appliance': 16487, 'carnage': 16488, 'gronkle': 16489, 'collective': 16490, 'sings': 16491, 'withering': 16492, 'sparkles': 16493, 'happen': 16494, 'applying': 16495, 'patrons': 16496, 'encased': 16497, 'bring': 16498, 'forging': 16499, 'statues': 16500, 'slaughter': 16501, 'airlock': 16502, 'prep': 16503, 'sue': 16504, 'mystified': 16505, 'cindy': 16506, 'limb': 16507, 'warriors': 16508, 'enemies': 16509, 'gravel': 16510, 'apartment': 16511, 'dressing': 16512, 'scowl': 16513, 'surprise': 16514, 'say': 16515, 'ears': 16516, 'highness': 16517, 'gallagher': 16518, 'tracker': 16519, 'tier': 16520, 'madman': 16521, 'spaceport': 16522, 'minus': 16523, 'died': 16524, 'places': 16525, 'mainland': 16526, 'emperor': 16527, 'trucker': 16528, 'attacks': 16529, 'dawn': 16530, 'ntwo': 16531, 'attended': 16532, 'totally': 16533, 'decrepit': 16534, 'arriving': 16535, 'vegas': 16536, 'chops': 16537, 'dives': 16538, 'hills': 16539, 'settled': 16540, 'bedroom': 16541, 'thing': 16542, 'repeats': 16543, 'sa': 16544, 'canadian': 16545, 'architect': 16546, 'ecstasy': 16547, 'vet': 16548, 'crumbling': 16549, 'hurtles': 16550, 'depresses': 16551, 'griffith': 16552, 'vcr': 16553, 'engaged': 16554, 'stairway': 16555, 'school': 16556, 'equipment': 16557, 'called': 16558, 'violated': 16559, 'surfboard': 16560, 'transistor': 16561, 'scenic': 16562, 'surfers': 16563, 'really': 16564, 'stayed': 16565, 'appreciation': 16566, 'o': 16567, 'dumbass': 16568, 'coronary': 16569, 'robes': 16570, 'least': 16571, 'violation': 16572, 'frantically': 16573, 'sailor': 16574, 'storefronts': 16575, 'n10': 16576, 'nslinky': 16577, 'worthless': 16578, 'concentration': 16579, 'interesting': 16580, 'oxygen': 16581, 'urgent': 16582, 'tails': 16583, 'brighter': 16584, 'polish': 16585, 'paddling': 16586, 'peppers': 16587, 'bartholomew': 16588, 'vessel': 16589, 'armada': 16590, 'ducks': 16591, 'st': 16592, 'orthodox': 16593, 'prosperous': 16594, 'yale': 16595, 'disheveled': 16596, 'cereal': 16597, 'aurelio': 16598, 'dries': 16599, 'airways': 16600, 'sealing': 16601, 'landcruiser': 16602, 'chayka': 16603, 'gordon': 16604, 'billboard': 16605, 'godfather': 16606, '258': 16607, 'asia': 16608, 'traitor': 16609, 'own': 16610, 'closest': 16611, 'goodness': 16612, 'concentrating': 16613, 'sternly': 16614, 'orgasm': 16615, 'attracts': 16616, 'gangster': 16617, 'exploring': 16618, 'nmiddle': 16619, 'n126': 16620, 'west': 16621, 'banquet': 16622, 'killers': 16623, 'n148': 16624, 'jobs': 16625, 'marine': 16626, 'ernie': 16627, 'mam': 16628, 'inner': 16629, 'uncomfortable': 16630, 'authorities': 16631, 'recoils': 16632, 'naround': 16633, 'wind': 16634, 'canned': 16635, 'tomb': 16636, 'university': 16637, 'pony': 16638, 'temple': 16639, 'yell': 16640, 'disconnects': 16641, 'papageno': 16642, 'carlisle': 16643, 'phaser': 16644, 'fate': 16645, 'diners': 16646, 'cuz': 16647, 'wrenches': 16648, 'local': 16649, 'francis': 16650, 'zippo': 16651, 'nmost': 16652, 'shove': 16653, 'flushes': 16654, 'psychedelic': 16655, 'athlete': 16656, 'guttural': 16657, 'picturesque': 16658, 'created': 16659, 'friends': 16660, 'start': 16661, 'rubbing': 16662, 'nheard': 16663, 'cradling': 16664, 'slides': 16665, 'embraces': 16666, 'labeled': 16667, 'carrot': 16668, 'vaseline': 16669, 'nsong': 16670, 'palm': 16671, 'valley': 16672, 'tighten': 16673, 'testimony': 16674, 'unexpected': 16675, 'sees': 16676, 'inscription': 16677, 'update': 16678, 'info': 16679, 'broadcasting': 16680, 'busted': 16681, 'solemnly': 16682, 'hands': 16683, 'meticulously': 16684, 'retarded': 16685, 'likely': 16686, 'calculated': 16687, 'yearbook': 16688, 'actions': 16689, 'rev': 16690, 'jill': 16691, 'divorce': 16692, 'paso': 16693, 'pierces': 16694, 'nursery': 16695, 'goin': 16696, 'nero': 16697, 'nsound': 16698, 'gus': 16699, 'nyeah': 16700, 'footing': 16701, 'loosened': 16702, 'plissken': 16703, 'dobkins': 16704, 'slapping': 16705, 'adrenalized': 16706, 'nwall': 16707, 'domestic': 16708, 'miami': 16709, 'ncliff': 16710, 'trusting': 16711, 'waistband': 16712, 'computerized': 16713, 'maple': 16714, 'destined': 16715, 'saws': 16716, 'products': 16717, 'kindling': 16718, 'vader': 16719, 'insert': 16720, 'promised': 16721, 'city': 16722, 'iron': 16723, 'dome': 16724, 'vince': 16725, 'blankly': 16726, 'searching': 16727, 'lexus': 16728, 'pow': 16729, 'highland': 16730, 'subhallway': 16731, 'lessons': 16732, "'c": 16733, 'mower': 16734, 'candie': 16735, 'awhile': 16736, 'rejected': 16737, 'boulevard': 16738, 'kickin': 16739, 'bite': 16740, 'ponder': 16741, 'yawning': 16742, 'n208': 16743, 'kennedy': 16744, '42': 16745, 'weeps': 16746, 'mourners': 16747, 'logo': 16748, 'flo': 16749, 'collision': 16750, 'guards': 16751, 'curtained': 16752, 'unfortunate': 16753, 'butterfly': 16754, 'ends': 16755, 'npelican': 16756, 'firebird': 16757, 'nwomen': 16758, 'played': 16759, 'joker': 16760, 'burglary': 16761, 'sucker': 16762, 'agitated': 16763, 'marsha': 16764, 'voltage': 16765, 'capone': 16766, 'tribal': 16767, 'deputies': 16768, 'charly': 16769, 'poop': 16770, 'cosmo': 16771, 'narrowing': 16772, 'knock': 16773, 'ph': 16774, 'bulls': 16775, 'liver': 16776, 'satin': 16777, 'quick': 16778, 'hour': 16779, 'seats': 16780, 'enjoyed': 16781, 'classes': 16782, 'stalling': 16783, 'puss': 16784, 'intercut': 16785, 'crumpled': 16786, "didn't": 16787, 'topples': 16788, 'items': 16789, 'silhouetted': 16790, 'chocolates': 16791, 'state': 16792, 'pill': 16793, 'cher': 16794, 'luck': 16795, 'scribbled': 16796, 'nyour': 16797, 'striped': 16798, 'spear': 16799, 'worms': 16800, 'kidney': 16801, 'utters': 16802, 'secretly': 16803, 'paints': 16804, 'jewels': 16805, 'taffy': 16806, 'hyperventilating': 16807, 'transmission': 16808, 'frannie': 16809, 'giraffe': 16810, 'option': 16811, 'though': 16812, 'hair': 16813, 'nsecond': 16814, 'securty': 16815, 'ndr': 16816, 'barely': 16817, 'earrings': 16818, 'electric': 16819, 'protruding': 16820, 'hatchway': 16821, 'continental': 16822, 'chum': 16823, 'cobol': 16824, 'helping': 16825, 'nbye': 16826, 'tee': 16827, 'proceed': 16828, "ni'm": 16829, 'tickets': 16830, 'welcoming': 16831, 'lightspeed': 16832, 'humiliation': 16833}
trope_list = ['"the reason you suck" speech', '"well done, son!" guy', '"what now?" ending', '"where are they now?" epilogue', 'a date with rosie palms', 'a god am i', 'abusive parents', 'action film, quiet drama scene', 'action girl', 'action prologue', 'actually pretty funny', 'adaptation distillation', 'adaptation expansion', 'adaptation personality change', 'adaptational attractiveness', 'adaptational heroism', 'adaptational villainy', 'adapted out', 'adorkable', 'adult fear', 'affably evil', 'affectionate parody', 'air-vent passageway', 'alas, poor villain', 'all girls want bad boys', 'all there in the manual', 'all there in the script', 'alliterative name', 'alone with the psycho', 'alternative foreign theme song', 'aluminum christmas trees', 'ambiguous disorder', 'ambiguous situation', 'ambiguously gay', 'amusing injuries', 'an aesop', 'an arm and a leg', 'anachronic order', 'anachronism stew', 'analogy backfire', 'and i must scream', 'and starring', 'answer cut', 'anti-hero', 'anti-villain', 'anyone can die', 'apocalypse how', 'armor-piercing question', 'arson, murder, and jaywalking', 'artistic license', 'artistic license – biology', 'artistic license – geography', 'artistic license – gun safety', 'artistic license – history', 'artistic license – physics', 'as you know', 'ascended extra', 'ask a stupid question...', 'asshole victim', 'audience surrogate', 'author appeal', 'author avatar', 'award-bait song', 'awesome mc coolname', 'ax-crazy', 'bad boss', 'bad-guy bar', 'badass bookworm', 'badass crew', 'badass in a nice suit', 'badass longcoat', 'bald of awesome', 'bald of evil', 'bar brawl', 'bare your midriff', 'batman gambit', 'be careful what you wish for', 'beauty is never tarnished', 'behind the black', 'beware the nice ones', 'big "no!"', 'big "what?!"', 'big eater', "bitch in sheep's clothing", 'black and gray morality', 'black and white morality', 'black comedy', 'black dude dies first', 'blatant lies', 'bloodless carnage', 'body horror', 'bond one-liner', 'book dumb', 'book-ends', 'bookends', 'boom, headshot!', 'bottomless magazines', 'bowdlerise', 'break the cutie', 'break the haughty', 'break-up/make-up scenario', 'breaking the fourth wall', 'brief accent imitation', 'brilliant, but lazy', 'bring my brown pants', 'broken ace', 'broken pedestal', 'buffy speak', 'bullet time', 'bullying a dragon', 'bunny-ears lawyer', 'call-back', 'calling the old man out', 'cannot spit it out', 'captain obvious', 'cassandra truth', 'casting gag', 'casual danger dialogue', 'catch-phrase', 'celebrity paradox', 'central theme', 'character development', "chekhov's gunman", "chekhov's hobby", "chekhov's skill", 'chewing the scenery', 'cloud cuckoo lander', 'cloudcuckoolander', 'cluster f-bomb', 'cold-blooded torture', 'color-coded characters', 'color-coded for your convenience', 'combat pragmatist', 'comically missing the point', 'coming-of-age story', 'composite character', 'contrived coincidence', 'cool car', 'cool old guy', 'corpsing', 'costume porn', 'covers always lie', 'crapsaccharine world', 'crapsack world', 'creative closing credits', 'credits gag', 'crouching moron, hidden badass', 'crowd song', 'cruel and unusual death', 'curb-stomp battle', 'curbstomp battle', 'curse cut short', 'cursed with awesome', 'damsel in distress', 'dark reprise', 'darker and edgier', 'darkest hour', 'david vs. goliath', 'death by adaptation', 'death glare', 'deconstruction', 'decoy protagonist', 'defiant to the end', 'defrosting ice queen', 'deliberate values dissonance', 'deliberately monochrome', 'demoted to extra', 'department of redundancy department', 'description cut', 'despair event horizon', 'destination defenestration', 'determinator', 'deus ex machina', 'did not get the girl', "didn't think this through", 'diegetic switch', 'dirty cop', 'dirty coward', 'disappeared dad', 'disney death', 'disney villain death', 'disproportionate retribution', 'distracted by the sexy', 'does this remind you of anything?', 'double entendre', 'downer ending', 'dramatic irony', 'driven to suicide', 'drives like crazy', 'drowning my sorrows', 'dub name change', 'dumb blonde', 'dwindling party', 'dying moment of awesome', 'early-bird cameo', 'earn your happy ending', 'easter egg', 'eerie pale-skinned brunette', 'embarrassing first name', 'enemy mine', 'enemy rising behind', 'epic fail', 'eureka moment', 'even evil has loved ones', 'even evil has standards', 'everyone has standards', 'evil brit', 'evil cannot comprehend good', 'evil counterpart', 'evil is petty', 'evil laugh', 'exact words', 'exactly what it says on the tin', 'explain, explain... oh, crap!', 'expy', 'extremely short timespan', 'eye scream', 'face death with dignity', 'face palm', 'failed a spot check', 'fainting', 'family-unfriendly death', 'famous last words', 'fan disservice', 'fanservice', 'fantastic racism', 'fatal family photo', 'fatal flaw', 'fate worse than death', 'faux affably evil', 'film noir', 'fire-forged friends', 'five-man band', 'five-second foreshadowing', 'flat "what."', 'flipping the bird', 'foil', 'for the evulz', 'for want of a nail', 'foregone conclusion', 'four-temperament ensemble', 'freudian excuse', 'friendly enemy', 'from bad to worse', 'genre throwback', 'genre-busting', 'gentle giant', 'getting crap past the radar', 'gilligan cut', 'go out with a smile', 'gone horribly right', 'good is not nice', 'good scars, evil scars', 'gorn', 'gory discretion shot', 'guile hero', 'gunship rescue', 'hair-trigger temper', 'hammerspace', 'hand cannon', 'handicapped badass', 'happily married', 'hate sink', 'he who fights monsters', 'heel–face turn', 'held gaze', 'hero of another story', 'heroic sacrifice', 'heterosexual life-partners', 'hidden depths', 'historical in-joke', 'hoist by his own petard', 'hollywood law', 'homage', 'honor before reason', 'hope spot', 'how we got here', 'humiliation conga', 'hypocrite', 'hypocritical humor', 'i have your wife', 'i just want to be special', 'i need a freaking drink', 'i surrender, suckers', 'i want my beloved to be happy', 'idiot ball', 'imagine spot', 'impaled with extreme prejudice', 'imperial stormtrooper marksmanship academy', 'improbable aiming skills', 'improvised weapon', 'in medias res', 'indy ploy', 'informed attribute', 'insane troll logic', 'insistent terminology', 'instant death bullet', 'insult backfire', 'intergenerational friendship', 'irony', 'it will never catch on', "it's all about me", "it's personal", 'jack bauer interrogation technique', 'jerk with a heart of gold', 'jerkass has a point', 'jump scare', 'karmic death', 'kick the son of a bitch', 'kids are cruel', 'kill it with fire', 'killed mid-sentence', 'kneel before zod', 'knight of cerebus', 'laser-guided karma', 'laughing mad', 'leaning on the fourth wall', 'left the background music on', 'leitmotif', 'lens flare', "let's get dangerous!", 'light is not good', 'lighter and softer', 'line-of-sight name', 'loads and loads of characters', 'lock and load montage', 'logo joke', 'love at first sight', 'love triangle', 'macguffin', 'mad scientist', 'made of iron', 'magic a is magic a', 'male gaze', 'manchild', 'manic pixie dream girl', 'manly tears', 'mass "oh, crap!"', 'match cut', 'meaningful background event', 'mercy kill', 'mind rape', 'mind screw', 'missing mom', 'mistaken for gay', 'mohs scale of violence hardness', 'moment killer', 'mook horror show', 'more dakka', 'mr. exposition', 'mr. fanservice', 'ms. fanservice', 'mugging the monster', 'my god, what have i done?', 'mythology gag', 'names to run away from really fast', 'neck lift', 'neck snap', 'never give the captain a straight answer', 'never my fault', 'never trust a trailer', 'nice guy', 'nice hat', 'nice job breaking it, hero!', 'nice job fixing it, villain!', 'nice to the waiter', 'no antagonist', 'no celebrities were harmed', 'no name given', 'no one gets left behind', 'no osha compliance', 'no-holds-barred beatdown', 'nobody poops', 'not even bothering with the accent', 'not quite dead', 'not so above it all', 'not so different', 'not so stoic', 'not what it looks like', 'nothing is scarier', 'o.o.c. is serious business', 'obfuscating stupidity', 'obstructive bureaucrat', 'obviously evil', 'odd friendship', 'off with his head!', 'offscreen moment of awesome', 'once more, with clarity!', 'one dialogue, two conversations', 'one steve limit', 'one-man army', 'one-word title', 'only a flesh wound', 'only known by their nickname', 'only sane man', "ooh, me accent's slipping", 'outrun the fireball', 'painting the medium', 'pants-positive safety', 'papa wolf', 'paper-thin disguise', 'parental abandonment', 'parental bonus', 'parental substitute', 'pay evil unto evil', 'pet the dog', 'plot armor', 'police are useless', 'politically correct history', 'politically incorrect villain', 'poor communication kills', 'posthumous character', 'power walk', 'pragmatic adaptation', 'pragmatic villainy', 'pre-asskicking one-liner', 'pre-mortem one-liner', 'product placement', 'properly paranoid', 'protagonist title', 'protagonist-centered morality', 'punch-clock villain', 'punctuated! for! emphasis!', 'punny name', 'race against the clock', 'race lift', 'rage against the reflection', 'rage breaking point', 'rated m for manly', 'real life writes the plot', 'reality has no subtitles', 'reality is unrealistic', 'reasonable authority figure', 'recycled in space', 'red herring', 'red oni, blue oni', 'red shirt', 'refuge in audacity', 'refusal of the call', 'reptiles are abhorrent', 'revenge before reason', 'revised ending', 'rewatch bonus', 'ridiculously cute critter', 'roaring rampage of revenge', 'rousing speech', 'rousseau was right', 'rule of cool', 'rule of symbolism', 'rule of three', 'sacrificial lamb', 'sacrificial lion', 'sanity slippage', 'save the villain', 'say my name', 'scare chord', 'scary black man', 'scenery gorn', 'screw the rules, i have money!', "screw the rules, i'm doing what's right!", "screw this, i'm outta here!", 'sean connery is about to shoot you', 'seinfeldian conversation', 'self-deprecation', 'sherlock scan', 'ship tease', 'shipper on deck', 'shirtless scene', 'shoo out the clowns', 'shoot the shaggy dog', "show, don't tell", 'shown their work', 'sir not-appearing-in-this-trailer', 'sir swears-a-lot', 'skyward scream', 'sliding scale of idealism vs. cynicism', 'small name, big ego', 'small role, big impact', 'smug snake', 'so proud of you', 'so what do we do now?', 'sole survivor', 'soundtrack dissonance', 'space is noisy', 'spanner in the works', 'spared by the adaptation', 'stalker with a crush', 'stealth insult', 'stealth pun', 'stepford smiler', 'stock scream', 'stuff blowing up', 'suddenly shouting!', 'tagline', 'take my hand', 'take that!', 'taking you with me', 'testosterone poisoning', "the '80s", 'the atoner', 'the bad guy wins', 'the big board', 'the chessmaster', 'the ditz', 'the dog bites back', 'the dragon', 'the dreaded', 'the dulcinea effect', 'the film of the book', 'the ghost', 'the load', 'the lost lenore', 'the only one allowed to defeat you', 'the quiet one', 'the reveal', 'the smurfette principle', 'the sociopath', 'the stinger', 'the stoic', 'the un-reveal', 'the unfettered', 'the worf effect', 'theme naming', 'there is no kill like overkill', 'this is gonna suck', 'those two bad guys', 'those two guys', 'thousand-yard stare', 'title drop', 'toilet humor', 'took a level in badass', 'trailers always lie', 'trailers always spoil', 'training from hell', 'tranquil fury', 'true companions', 'truth in television', 'uncle tomfoolery', 'understatement', 'undying loyalty', 'unflinching walk', 'ungrateful bastard', 'unreliable narrator', 'unresolved sexual tension', 'unspoken plan guarantee', 'unusually uninteresting sight', 'unwitting instigator of doom', 'very loosely based on a true story', 'villain ball', 'villain protagonist', 'villainous breakdown', 'villainous crush', 'visual pun', 'vitriolic best buds', 'vomit discretion shot', 'vomit indiscretion shot', 'walking shirtless scene', 'wham shot', 'what measure is a mook?', 'what the hell, hero?', 'what you are in the dark', 'worthy opponent', 'would hit a girl', 'would hurt a child', 'wounded gazelle gambit', 'writers cannot do math', 'wrong genre savvy', 'you are better than you think you are', "you can't fight fate", 'you have outlived your usefulness', 'you have to believe me!', 'you killed my father', 'you monster!', 'your cheating heart']
padding = 16834
#############################################################################################
model = load_model('TropeClassifier.model')
with open(FILENAME, 'r') as f:
test_text = f.read()
text_processor = lambda t: keras.preprocessing.text.text_to_word_sequence(t, filters='!"#$%&()*+,-./:;<=>?@[\\]^_`{|}~\t\n', lower=True, split=' ')
test_text_clean = text_processor(test_text)
test_array = []
for script in [test_text_clean]:
s = [0]*padding
for word in script:
if word in word_to_id:
s[word_to_id[word]] = 1
test_array.append(s)
test_array = np.array(test_array)
predictions = model.predict(test_array)
L = sorted([(trope_list[i], trope) for i, trope in enumerate((predictions[0]*100).astype('uint32'))], key=lambda x: x[1], reverse=True)[:10]
L = [' - '.join([str(ll) for ll in l]) for l in L]
L
df_out = pd.DataFrame({'Results': L})
df_out.to_csv(r'static/Results.csv', index=None)
| [
"noreply@github.com"
] | JeremyEudy.noreply@github.com |
7f0d8ddfb5d1865d4d1c3ef2676385f16418ee94 | 332b9de1b4679c95303a36cfa6edb5109b8895d6 | /categories/adminx.py | e8121eb6dac558e0744c19aec26982c43e634944 | [] | no_license | centyuan/centyuan-blog | 3c17bff07c62ce01140367ef9c4522a46beda2f8 | 8911a2308ecbcd838fc07bee5aa18f52bf7c38be | refs/heads/master | 2022-11-26T13:26:52.744820 | 2019-02-28T09:11:46 | 2019-02-28T09:11:46 | 148,744,820 | 0 | 0 | null | 2022-11-22T03:12:46 | 2018-09-14T06:23:30 | JavaScript | UTF-8 | Python | false | false | 352 | py | #-*- coding:utf-8 -*-
#author:centyuan
#@time:18-11-8 下午5:17
import xadmin
from .models import CategoriesModel
class CategoriesAdminx(object):
list_display=['name','created_time','numbers','get_num']
search_fields=['name','numbers']
list_filter=['name','created_time','numbers']
xadmin.site.register(CategoriesModel,CategoriesAdminx) | [
"centyuan@outlook.com"
] | centyuan@outlook.com |
dd922c4631d1621cba552c04e7a3a18198c51259 | 728ba26a9f26f706211124e8532c839d0fc155b1 | /corpus/apps/user/urls.py | 270c4499c5d56c53a6fe208ce07fc6251aacfd13 | [] | no_license | git00000/corpus | 62eeeac4350ea3ff4706ea61d10d18dfe810c5be | 9341a6d411db27295e3ac60a8308a116dac5fa59 | refs/heads/master | 2023-01-10T02:43:17.987180 | 2019-10-15T15:59:43 | 2019-10-15T15:59:43 | 214,669,617 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 938 | py | from django.urls import path
from .views import (
UserProfileView,
TranslationTaskUserView,
TranslationTaskNextItemAPIView,
TranslationTaskItemPhraseSavingAPIView)
app_name = "user"
urlpatterns = [
path(
'',
UserProfileView.as_view(),
name="profile",
),
path(
'taches/<int:translation_task_id>',
TranslationTaskUserView.as_view(),
name="translation-task"
),
]
api_patterns = [
path(
'translation-task-next-item/',
TranslationTaskNextItemAPIView.as_view(),
name="corpus-xhr-user-translation-task-next-item"
),
path(
'save-translation-task-item-phrase/',
TranslationTaskItemPhraseSavingAPIView.as_view(),
name="corpus-xhr-user-save-translation-task-item-phrase"
),
] | [
"mohamedibrahima@protonmail.com"
] | mohamedibrahima@protonmail.com |
e1cb23bb5b50d46abe817c714906c781f2609d32 | dd0de36f03f313efb28cd52dc25a10a7c5c7f10b | /midi_inverter/midi_inverter.py | 61b2f3f97969f8d262f4abbf87115be6dc180105 | [
"ISC"
] | permissive | ebabel-eu/midi-inverter | ae6bfb87dc7ac291b40829a1fae57141b318472d | b1a018e47056e525c77613afb72acd9a42e9fbf4 | refs/heads/master | 2020-12-05T01:17:13.081323 | 2017-02-23T03:06:06 | 2017-02-23T03:06:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,175 | py | import mido, tempfile
def invert_around_middle(note, middle_note):
return note - ((note - middle_note) * 2)
def invert_midi(infile, invert_drums=False):
mid = mido.MidiFile(file = infile)
# Get highest and lowest notes
highest_note = -1
lowest_note = 1000
first_note = None
for track_num, track in enumerate(mid.tracks):
if invert_drums or track_num != 10:
for message in track:
if message.type in ('note_on', 'note_off'):
if first_note is None:
first_note = message.note
lowest_note = min(lowest_note, message.note)
highest_note = max(highest_note, message.note)
middle_note = (highest_note + lowest_note) / 2
# Invert all the notes
for track_num, track in enumerate(mid.tracks):
if invert_drums or track_num != 10:
for message in track:
if message.type in ('note_on', 'note_off'):
message.note = message.note - ((message.note - middle_note) * 2)
outfile = tempfile.TemporaryFile()
mid.save(file = outfile)
outfile.seek(0)
return outfile
| [
"dan@dancusher.com"
] | dan@dancusher.com |
e758c96bcacadac46fa2e1e9c9e97c4040b4e08c | 85163b7a3a7d1e8f5867ef4b0b03a4fb5f8df54c | /control_and_ai/DDPG/train_ddpg_gpu.py | b8f73f1291c3559a49c941df8362c6591ef5459b | [] | no_license | abhinavkssk/rocket-lander | 63a1da6f158905c3e608e61136586cf2e89bf54b | f546f227c2f9d5cdb4cd02f0f1c8999df2a03a20 | refs/heads/master | 2021-05-06T14:24:37.632392 | 2017-08-10T09:41:53 | 2017-08-10T09:41:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,562 | py | from DDPG.ddpg import DDPG
from DDPG.train import set_up
from DDPG.train_third_model_normalized import train as train_third_model_normalized
from constants import DEGTORAD
from control_and_ai.DDPG.exploration import OUPolicy
from rocketlander_v2 import RocketLander
FLAGS = set_up()
action_bounds = [1, 1, 15*DEGTORAD]
eps = []
eps.append(OUPolicy(0, 0.2, 0.4))
eps.append(OUPolicy(0, 0.2, 0.4))
eps.append(OUPolicy(0, 0.2, 0.4))
simulation_settings = {'Side Engines': True,
'Clouds': True,
'Vectorized Nozzle': True,
'Graph': False,
'Render': False,
'Starting Y-Pos Constant': 1,
'Initial Force': 'random',
'Rows': 1,
'Columns': 2,
'Episodes': 300}
env = RocketLander(simulation_settings)
#env = wrappers.Monitor(env, '/tmp/contlunarlander', force=True, write_upon_reset=True)
FLAGS.retrain = True # Restore weights if False
FLAGS.test = False
FLAGS.num_episodes = 300
model_dir = 'C://Users//REUBS_LEN//PycharmProjects//RocketLanding//DDPG//models_unlimited_episodes_full_normalized_normal_state'
agent = DDPG(
action_bounds,
eps,
env.observation_space.shape[0],
actor_learning_rate=0.0001,
critic_learning_rate=0.001,
retrain=FLAGS.retrain,
log_dir=FLAGS.log_dir,
model_dir=model_dir)
#test(env, agent, simulation_settings)
train_third_model_normalized(env, agent, FLAGS)
#train_second_model(env, agent, FLAGS)
| [
"reuben.ferrante@gmail.com"
] | reuben.ferrante@gmail.com |
a76ac90843514fd223703c25311d3db82fdcb1d9 | fb86f0dca6e525b8a8ddb63f10b8d220ddd7f7fe | /test/functional/sapling_changeaddresses.py | f8aa5d49517df753cb930c26fe101290083c7303 | [
"MIT"
] | permissive | ORO-mlm/UNO-Core | 14fcdb3c2db4bde256e48ea661ada61579ccf403 | d6e6769ce57466cfc9e7cab681eab880cdb8e3e8 | refs/heads/main | 2023-06-16T08:21:00.808606 | 2021-07-12T07:08:35 | 2021-07-12T07:08:35 | 383,350,655 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,141 | py | #!/usr/bin/env python3
# Copyright (c) 2019 The Zcash developers
# Copyright (c) 2020 The PIVX developers
# Copyright (c) 2021- The UNO developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or https://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import UnoTestFramework
from test_framework.util import *
from decimal import Decimal
# Test wallet change address behaviour
class WalletChangeAddressesTest(UnoTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
saplingUpgrade = ['-nuparams=v5_shield:1']
self.extra_args = [saplingUpgrade, saplingUpgrade]
def run_test(self):
self.nodes[0].generate(110)
# Obtain some transparent funds
midAddr = self.nodes[0].getnewshieldaddress()
# Shield almost all the balance
txid = self.nodes[0].shieldsendmany(get_coinstake_address(self.nodes[0]), [{"address": midAddr, "amount": Decimal(2400)}])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
taddrSource = self.nodes[0].getnewaddress()
for _ in range(6):
recipients = [{"address": taddrSource, "amount": Decimal('3')}]
txid = self.nodes[0].shieldsendmany(midAddr, recipients, 1)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
def check_change_taddr_reuse(target, isTargetShielded):
recipients = [{"address": target, "amount": Decimal('1')}]
# Send funds to recipient address twice
txid1 = self.nodes[0].shieldsendmany(taddrSource, recipients, 1)
self.nodes[1].generate(1)
self.sync_all()
txid2 = self.nodes[0].shieldsendmany(taddrSource, recipients, 1)
self.nodes[1].generate(1)
self.sync_all()
# Verify that the two transactions used different change addresses
tx1 = self.nodes[0].getrawtransaction(txid1, 1)
tx2 = self.nodes[0].getrawtransaction(txid2, 1)
assert_true(len(tx1['vout']) >= 1) # at least one output
assert_true(len(tx2['vout']) >= 1)
for i in range(len(tx1['vout'])):
tx1OutAddrs = tx1['vout'][i]['scriptPubKey']['addresses']
tx2OutAddrs = tx2['vout'][i]['scriptPubKey']['addresses']
if tx1OutAddrs != [target]:
print('Source address: %s' % taddrSource)
print('TX1 change address: %s' % tx1OutAddrs[0])
print('TX2 change address: %s' % tx2OutAddrs[0])
assert(tx1OutAddrs != tx2OutAddrs)
taddr = self.nodes[0].getnewaddress()
saplingAddr = self.nodes[0].getnewshieldaddress()
print()
print('Checking shieldsendmany(taddr->Sapling)')
check_change_taddr_reuse(saplingAddr, True)
print()
print('Checking shieldsendmany(taddr->taddr)')
check_change_taddr_reuse(taddr, False)
if __name__ == '__main__':
WalletChangeAddressesTest().main()
| [
"brandon2davincci@gmail.com"
] | brandon2davincci@gmail.com |
e71c89dfca11bf541afd663e30de86f024c6fb9e | 33e472f39027ac49a2297b740aff4e5989a15c4b | /pytypo/pytypo.py | 9fffa9e72e0e9b2fcce114a3f5f4bccc1fde6ab9 | [
"MIT"
] | permissive | pombredanne/pytypo | 96617e606ad12c21be3b4c09a9b8ec4d3c364366 | 9d6c3f588f86ec98971807a1037c55c8a4030e8b | refs/heads/master | 2021-01-18T14:42:20.112992 | 2017-11-22T22:49:56 | 2017-11-22T22:49:56 | 62,485,349 | 1 | 0 | null | 2017-11-22T22:49:57 | 2016-07-03T07:05:52 | Python | UTF-8 | Python | false | false | 1,458 | py | # -*- coding: utf-8 -*-
from sys import version_info
import re
from . import _conv_table
re_symbol = re.compile("[^a-zA-Z0-9']{2,256}$")
if version_info < (3, 0, 0):
range = xrange
def cut_repeat(text, threshold):
"""Reduce repeated characters until threshold
Param:
<str> text
<int> threshold
Return:
<str> result
"""
text = list(text)
result = text[0]
count = 0
for i in range(1, len(text)):
if text[i - 1] == text[i]:
count += 1
if count < threshold:
result += text[i]
else:
count = 0
result += text[i]
return result
def correct(word):
"""Normalize repeat expression for English
Param:
<str> word
Return:
<str> normalized_word
"""
suffix_symbol = re_symbol.findall(word)
if suffix_symbol:
word = word.replace(suffix_symbol[0], '')
word = cut_repeat(word, 2)
normalized_word = _conv_table.eng_lengthened.get(word, word)
if normalized_word == word:
normalized_word = _conv_table.eng_typo.get(word, word)
if suffix_symbol:
return (normalized_word + cut_repeat(suffix_symbol[0], 1))
return normalized_word
def correct_sentence(sentence):
"""Normalize for each word
Param:
<str> sentence
Return:
<str> normalized_sentence
"""
return ' '.join([correct(word) for word in sentence.split()])
| [
"yknikgm@gmail.com"
] | yknikgm@gmail.com |
82b7bb23549063fd6da0b3b4d5e445361ee671fa | 18d74b7044f83f6dfca27f037cb3ad651f100ee1 | /ガウスザイデル法.py | 8a2001236998723768b3edfa8681a537a732a214 | [] | no_license | youngstar152/Numerical-Analysis | 539e02a90f29df2327a0ec9a6d11c11fe8f939f4 | 65d48a2134207a88797cb2de3af0c4acbc6a8a0a | refs/heads/main | 2023-05-29T06:14:07.803221 | 2021-06-13T13:07:42 | 2021-06-13T13:07:42 | 376,480,653 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 868 | py | import math
def yakobi(a,y):
error = pow(10,-6)
length = len(y)
x = [0] * length
x2 = [0] * length
count = 0
while True:
sum=0
x2[0] = (y[0] - a[0][1]*x[1] - a[0][2]*x[2]) / a[0][0]
x2[1] = (y[1] - a[1][2]*x2[2] - a[1][0]*x[0]) / a[1][1]
x2[2] = (y[2] - a[2][0]*x2[0] - a[2][1]*x2[1]) / a[2][2]
for i in range(length):
sum+=pow((x2[i] - x[i]),2)
sum=math.sqrt(sum)
count += 1
print("------" + str(count)+ "------")
z=1.0
for i in range(length):
print(str(x2[i])+"//gosa:"+str(z-x2[i]))
z+=1.0
print("total-gosa:"+str(sum))
if(sum<error):
break
for i in range(length):
x[i] = x2[i]
a = [[7,-2,1],[-1,5,-2],[-2,-1,6]]
y = [6,3,14]
yakobi(a,y) | [
"noreply@github.com"
] | youngstar152.noreply@github.com |
97bdd84951ddab4fee9fb5bee69c5773d03d9543 | 39887d6b15bc6227b4fdf2b75ad180ceb5e950d1 | /TestBlog/blog/forms.py | e39295d3a0a488eec4a381157dcbc9d845d40d21 | [] | no_license | vonxar/mycode | 95d060a2d6eecdc72d503ea4fe2ae18a798d41f0 | c848fcc62b4d8005d9509b3a5d85bd1364558d85 | refs/heads/master | 2022-12-05T18:47:56.313598 | 2020-09-01T12:47:39 | 2020-09-01T12:47:39 | 289,490,124 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,402 | py | from django.forms import ModelForm, TextInput, Textarea
from django import forms
from django.conf import settings
from django.core.mail import BadHeaderError, send_mail
from django.http import HttpResponse
from blog.models import Comment, Reply
class CommentForm(ModelForm):
class Meta:
model = Comment
fields = ('author', 'text')
widgets = {
'author': TextInput(attrs={
'class': 'form-control',
'placeholder': '名前',
}),
'text': Textarea(attrs={
'class': 'form-control',
'placeholder': 'コメント内容',
}),
}
labels = {
'author': '',
'text': '',
}
class ReplyForm(ModelForm):
class Meta:
model = Reply
fields = ('author', 'text')
widgets = {
'author': TextInput(attrs={
'class': 'form-control',
'placeholder': '名前',
}),
'text': Textarea(attrs={
'class': 'form-control',
'placeholder': '返信内容',
}),
}
labels = {
'author': '',
'text': '',
}
class ContactForm(forms.Form):
name = forms.CharField(
label='',
max_length=100,
widget=forms.TextInput(attrs={
'class': 'form-control',
'placeholder': "お名前",
}),
)
email = forms.EmailField(
label='',
widget=forms.EmailInput(attrs={
'class': 'form-control',
'placeholder': "メールアドレス",
}),
)
message = forms.CharField(
label='',
widget=forms.Textarea(attrs={
'class': 'form-control',
'placeholder': "お問い合わせ内容",
}),
)
def send_email(self):
subject = "お問い合わせ"
message = self.cleaned_data['message']
name = self.cleaned_data['name']
email = self.cleaned_data['email']
from_email = '{name} <{email}>'.format(name=name, email=email)
recipient_list = [settings.EMAIL_HOST_USER] # 受信者リスト
try:
send_mail(subject, message, from_email, recipient_list)
except BadHeaderError:
return HttpResponse("無効なヘッダが検出されました。") | [
"p02d1108@gmail.com"
] | p02d1108@gmail.com |
c99e55694550686a94afe335dad8b6cf9520cbf2 | 15e668cbbe5751156d9ec7925a94ccb4dea0c472 | /finalexam/urls.py | d2fd409b12f3217e2ac029b84f5fbdb82446c68b | [] | no_license | hamzanawaz31997/exam | b8cab7cb9c90970c4b3d2eedec49f5fb49b830dd | 65c99517cc6e1961731b7d04afb94a49d2bcb696 | refs/heads/master | 2020-05-22T14:54:36.828101 | 2019-05-13T10:24:03 | 2019-05-13T10:24:03 | 186,396,045 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('exam.urls')),
]
| [
"hamzanawaz31997@yahoo.com"
] | hamzanawaz31997@yahoo.com |
88502bd6321b55185ed3f16746ae614c21a1f572 | 83534233897a52087f3758706a63f0c1cf46e79e | /arrays/130_SurroundRegions.py | 5f68755855610da0a31d04d5cd2105d7969c0a38 | [] | no_license | Jayesh97/programmming | 2a2bf1af5689e421774a40b0f1db5f123722f603 | 2b68a9a863207061d44dc50e0fd533e28eb2e010 | refs/heads/master | 2020-04-29T15:10:12.371878 | 2020-01-15T22:32:30 | 2020-01-15T22:32:30 | 176,219,531 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,184 | py | board = [["X","X","X","X"],
["X","O","O","X"],
["X","X","O","X"],
["X","O","X","X"]]
def surround(board):
if not board:
return
m = len(board)
n = len(board[0])
def expand(i,j):
neighbors = [(1,0),(0,1),(-1,0),(0,-1)]
ans = []
for (x,y) in neighbors:
r = i+x
c = j+y
if (r>=0 and r<m) and (c>=0 and c<n) and board[r][c]=="O":
ans.append([r,c])
return ans
#recursing over a certain point to convert all "O" to "S"
def dfs(i,j):
board[i][j]="S"
for neighbor in expand(i,j):
x,y = neighbor
dfs(x,y)
#expand around borders to find the unrestricted "O"s
for i in range(m):
if board[i][0]=="O":
dfs(i,0)
if board[i][n-1]=="O":
dfs(i,n-1)
for i in range(n):
if board[0][i]=="O":
dfs(0,i)
if board[m-1][i]=="O":
dfs(m-1,i)
for i in range(m):
for j in range(n):
if board[i][j] == 'O':
board[i][j] = 'X'
if board[i][j] == 'S':
board[i][j] = 'O'
print(board)
surround(board) | [
"jayesh5397@gmail.com"
] | jayesh5397@gmail.com |
014921c7ff08df286253143df4ec0c42b80ba68d | 70b751bd12645fbcd47adec44b7c2e9dfb96e4c0 | /Twitter_Clustering_Model.py | edefe81a1c3b39ce4544ac74ae61b70879d50ae0 | [] | no_license | pdmnvj/Samples | 39fd4424f9dfbb664d4961edecd402991d934278 | d2c4759bec23dc0a8afe1e4e333f93fac8ed09a6 | refs/heads/master | 2020-03-19T12:26:17.377026 | 2018-06-08T20:54:02 | 2018-06-08T20:54:02 | 136,517,112 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,446 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Feb 25 20:39:16 2017
@author: akshp
"""
import tweepy
from tweepy import OAuthHandler
from tweepy import Stream
from tweepy.streaming import StreamListener
import json
import re
import nltk
from nltk.tokenize import word_tokenize
from nltk.corpus import stopwords
import string
from collections import Counter
from nltk import bigrams
# load nltk's English stopwords as variable called 'stopwords'
#stopwords = nltk.corpus.stopwords.words('english')
# load nltk's SnowballStemmer as variabled 'stemmer'
from nltk.stem.snowball import SnowballStemmer
stemmer = SnowballStemmer("english")
emoticons_str = r"""
(?:
[:=;] # Eyes
[oO\-]? # Nose (optional)
[D\)\]\(\]/\\OpP] # Mouth
)"""
regex_str = [
emoticons_str,
r'<[^>]+>', # HTML tags
r'(?:@[\w_]+)', # @-mentions
r"(?:\#+[\w_]+[\w\'_\-]*[\w_]+)", # hash-tags
r'http[s]?://(?:[a-z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-f][0-9a-f]))+', # URLs
r'(?:(?:\d+,?)+(?:\.?\d+)?)', # numbers
r"(?:[a-z][a-z'\-_]+[a-z])", # words with - and '
r'(?:[\w_]+)', # other words
r'(?:\S)' # anything else
]
tokens_re = re.compile(r'('+'|'.join(regex_str)+')', re.VERBOSE | re.IGNORECASE)
emoticon_re = re.compile(r'^'+emoticons_str+'$', re.VERBOSE | re.IGNORECASE)
punctuation = list(string.punctuation)
stop = stopwords.words('english') + punctuation + ['rt', 'via', '\n']
#grammar = r"""
# NBAR:
# {<NN.*|JJ>*<NN.*>} # Nouns and Adjectives, terminated with Nouns
#
# NP:
# {<NBAR>}
# {<NBAR><IN><NBAR>} # Above, connected with in/of/etc...
#"""
grammar = r"""
NBAR:
{<NN.*|JJ>*<NN.*>} # Nouns and Adjectives, terminated with Nouns
NP:
{<NBAR>}
"""
#grammar=r'KT: {(<JJ>* <NN.*>+ <IN>)? <JJ>* <NN.*>+}'
#grammar = """
# NP: {<DT|PP\$>?<JJ>*<NN>}
# {<NNP>+}
# {<NN>+}
# """
good_tags=set(['JJ','JJR','JJS','NN','NNP','NNS','NNPS'])
def tokenize(s):
return tokens_re.findall(s)
def preprocess(s, lowercase=False):
tokens = tokenize(s)
if lowercase:
tokens = [token if emoticon_re.search(token) else token.lower() for token in tokens]
return tokens
def tokenize_and_stem(text):
# first tokenize by sentence, then by word to ensure that punctuation is caught as it's own token
#tokens = [word for sent in nltk.sent_tokenize(text) for word in nltk.word_tokenize(sent)]
# tokens = [word.lower() for word in preprocess(text)
# if word not in stop and
# not word.startswith(('#', '@'))
# ]
tokens = [word.lower() for sent in nltk.sent_tokenize(text)
for word in preprocess(sent)
if word not in stop and
not word.startswith(('#', '@'))
and not word.startswith("'")
and not word.endswith("'")
and "'" not in word]
filtered_tokens = []
# filter out any tokens not containing letters (e.g., numeric tokens, raw punctuation)
for token in tokens:
if re.search('[a-zA-Z]', token):
filtered_tokens.append(token)
stems = [stemmer.stem(t) for t in filtered_tokens]
return stems
def tokenize_only(text):
# first tokenize by sentence, then by word to ensure that punctuation is caught as it's own token
tokens = [word.lower() for sent in nltk.sent_tokenize(text)
for word in preprocess(sent)
if word not in stop and
not word.startswith(('#', '@'))
and not word.startswith("'")
and not word.endswith("'")
and "'" not in word]
# tokens = [word.lower() for word in preprocess(text)
# if word not in stop and
# not word.startswith(('#', '@'))]
filtered_tokens = []
# filter out any tokens not containing letters (e.g., numeric tokens, raw punctuation)
for token in tokens:
if re.search('[a-zA-Z]', token):
filtered_tokens.append(token)
return filtered_tokens
def leaves(tree):
"""Finds NP (nounphrase) leaf nodes of a chunk tree."""
for subtree in tree.subtrees(filter = lambda t: t.label()=='NP'):
yield subtree.leaves()
def normalise(word):
"""Normalises words to lowercase and stems and lemmatizes it."""
word = word.lower()
#word = stemmer.stem_word(word)
#word = lemmatizer.lemmatize(word)
return word
def acceptable_word(word):
"""Checks conditions for acceptable word: length, stopword."""
accepted = bool(2 <= len(word) <= 40
and word.lower() not in stop)
return accepted
def get_terms(tree):
for leaf in leaves(tree):
term = [ normalise(w) for w,t in leaf if acceptable_word(w) ]
yield term
def get_chunks(text):
text = re.sub(r'http[s]?://(?:[a-z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-f][0-9a-f]))+',r'',text)
text = text.lower()
toks = tokenize_only(text)
postoks = nltk.tag.pos_tag(toks)
chunker = nltk.RegexpParser(grammar)
tree = chunker.parse(postoks)
terms = get_terms(tree)
allwords_tokenized = []
for term in terms:
phrase = []
for word in term:
phrase.append(word)
key = ' '.join(phrase)
if key in ecdict:
if ecdict.get(key) != '':
allwords_tokenized.append(ecdict.get(key))
#else:
#allwords_tokenized.append(key)
#allwords_tokenized.append(str(phrase))
return allwords_tokenized
def get_words(text):
text = re.sub(r'http[s]?://(?:[a-z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-f][0-9a-f]))+',r'',text)
text = text.lower()
toks = tokenize_only(text)
tagged_words = nltk.tag.pos_tag(toks)
# filter on certain POS tags and lowercase all words
allwords_tokenized = [word.lower() for word, tag in tagged_words
if tag in good_tags and word.lower() not in stop
]
return allwords_tokenized
def get_phrases_and_terms(text,candidates='chunks'):
boc_texts = []
text = re.sub(r'http[s]?://(?:[a-z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-f][0-9a-f]))+',r'',text)
text = text.lower()
if candidates == 'chunks':
boc_texts.extend(get_chunks(text))
elif candidates == 'words':
boc_texts.extend(get_words(text))
return boc_texts
import csv
dirname = 'C:/Users/akshp/Google Drive/Predict 453 Text Analytics/Project Twitter Text Analytics/Data/'
import pandas as pd
corpus = []
df = pd.read_csv(dirname+'TweetsTrumpPresidency.csv')
Tweets = df[(df.RetweetCount > 0)].Text #you can also use df['column_name']
for tweet in Tweets:
corpus.append(tweet)
#print(Tweets)
dirname = 'C:/Users/akshp/Google Drive/Predict 453 Text Analytics/Project Twitter Text Analytics/'
ecdict = {}
with open(dirname+'terms.csv', mode='r') as infile:
reader = csv.reader(infile)
ecdict = {row[1]:row[2] for row in reader}
totalvocab_tokenized=[]
for text in corpus:
text = re.sub(r'http[s]?://(?:[a-z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-f][0-9a-f]))+',r'',text)
text = text.lower()
totalvocab_tokenized.extend(get_chunks(text))
vocab_frame = pd.DataFrame({'words': totalvocab_tokenized}, index = totalvocab_tokenized)
from sklearn.feature_extraction.text import TfidfVectorizer
#tfidf_vectorizer = TfidfVectorizer( stop_words=stop,tokenizer=tokenize, ngram_range=(1,1))
tfidf_vectorizer = TfidfVectorizer(max_df=0.8, max_features=100,
min_df=2, stop_words=stop,
use_idf=True, tokenizer=get_phrases_and_terms, ngram_range=(1,1))
%time tfidf_matrix = tfidf_vectorizer.fit_transform(corpus)
print(tfidf_matrix.shape)
terms = tfidf_vectorizer.get_feature_names()
dense = tfidf_matrix.todense()
from sklearn.metrics.pairwise import cosine_similarity
#cosine similarity if document 1 with others
cosine_similarity(tfidf_matrix[1], tfidf_matrix)
dist = 1 - cosine_similarity(tfidf_matrix)
#Using LSA to check for clustering
from sklearn.decomposition import TruncatedSVD
from sklearn.preprocessing import Normalizer
from sklearn.pipeline import make_pipeline
svd = TruncatedSVD(n_components=2)
normalizer = Normalizer(copy=False)
lsa = make_pipeline(svd, normalizer)
%time tfidf_matrix_lsa = lsa.fit_transform(tfidf_matrix)
explained_variance = svd.explained_variance_ratio_.sum()
print("Explained variance of the SVD step: {}%".format(int(explained_variance * 100)))
#hierarchical document clustering
from scipy.cluster.hierarchy import ward, dendrogram
linkage_matrix = ward(dist) #define the linkage_matrix using ward clustering pre-computed distances
from matplotlib import pyplot as plt
plt.tick_params(\
axis= 'x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom='off', # ticks along the bottom edge are off
top='off', # ticks along the top edge are off
labelbottom='off')
plt.tight_layout() #show plot with tight layout
fig, ax = plt.subplots(figsize=(15, 20)) # set size
ax = dendrogram(linkage_matrix, orientation="right");
#k-means with td-idf matrix
from sklearn.cluster import KMeans
num_clusters = 2
km = KMeans(n_clusters=num_clusters, init='k-means++', max_iter=100, n_init=1,verbose=2)
%time tfidf_Xfm = km.fit_transform(tfidf_matrix)
cluster_labels = km.fit_predict(tfidf_matrix)
clusters = km.labels_.tolist()
#display clusters
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
#create data frame that has the result of the LSA plus the cluster numbers
df = pd.DataFrame(dict(x=tfidf_matrix_lsa[:,0], y=tfidf_matrix_lsa[:,1], label=clusters))
#group by cluster
groups = df.groupby('label')
# set up plot
fig, ax = plt.subplots(figsize=(17, 9)) # set size
ax.margins(0.05) # Optional, just adds 5% padding to the autoscaling
#iterate through groups to layer the plot
#note that I use the cluster_name and cluster_color dicts with the 'name' lookup to return the appropriate color/label
for name, group in groups:
ax.plot(group.x, group.y, marker='o', linestyle='', ms=12,
mec='none')
ax.set_aspect('auto')
ax.tick_params(\
axis= 'x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom='off', # ticks along the bottom edge are off
top='off', # ticks along the top edge are off
labelbottom='off')
ax.tick_params(\
axis= 'y', # changes apply to the y-axis
which='both', # both major and minor ticks are affected
left='off', # ticks along the bottom edge are off
top='off', # ticks along the top edge are off
labelleft='off')
ax.legend(numpoints=1) #show legend with only 1 point
#add label in x,y position with the label as the film title
for i in range(len(df)):
ax.text(df.ix[i]['x'], df.ix[i]['y'], size=8)
plt.show() #show the plot
#another way of showing clusters
# 2nd Plot showing the actual clusters formed
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
fig, ax = plt.subplots(figsize=(17, 9)) # set size
ax.margins(0.05) #
#colors = cm.spectral(clusters.astype(float) / num_clusters)
ax.scatter(tfidf_matrix_lsa[:,0], tfidf_matrix_lsa[:,1],s=30, lw=0, alpha=0.7)
# Labeling the clusters
centers = km.cluster_centers_
# Draw white circles at cluster centers
ax.scatter(centers[:, 0], centers[:, 1],
marker='o', c="white", alpha=1, s=200)
for i, c in enumerate(centers):
ax.scatter(c[0], c[1], marker='$%d$' % i, alpha=1, s=50)
ax.set_title("The visualization of the clustered data.")
ax.set_xlabel("Feature space for the 1st feature")
ax.set_ylabel("Feature space for the 2nd feature")
plt.show()
tweetdata = { 'tweets': corpus, 'cluster':clusters}
frame = pd.DataFrame(tweetdata, index = [clusters] , columns = ['tweets','cluster'])
print("Top terms per cluster:")
print()
#sort cluster centers by proximity to centroid
order_centroids = km.cluster_centers_.argsort()[:, ::-1]
for i in range(num_clusters):
print("Cluster %d:" % i, end='')
for ind in order_centroids[i, :25]:
print(' %s' % terms[ind], end='')
print()
#for i in range(num_clusters):
# print("Cluster %d words:" % i, end='')
#
# for ind in order_centroids[i, :20]: #replace 6 with n words per cluster
#
# print(' %s' % vocab_frame.ix[ind], end=',')
# print() #add whitespace
# print() #add whitespace
#
# print("Cluster %d titles:" % i, end='')
# for title in frame.ix[i]['title']:
# print(' %s,' % title, end='')
# print() #add whitespace
# print() #add whitespace
from sklearn.metrics import silhouette_samples, silhouette_score
# This gives a perspective into the density and separation of the formed
# clusters
silhouette_avg = silhouette_score(tfidf_matrix, cluster_labels)
print("For n_clusters =", num_clusters,
"The average silhouette_score is :", silhouette_avg)
# Compute the silhouette scores for each sample
sample_silhouette_values = silhouette_samples(tfidf_matrix, cluster_labels)
fig, ax1 = plt.subplots(figsize=(17, 9))
y_lower = 10
for i in range(num_clusters):
# Aggregate the silhouette scores for samples belonging to
# cluster i, and sort them
ith_cluster_silhouette_values = \
sample_silhouette_values[cluster_labels == i]
ith_cluster_silhouette_values.sort()
size_cluster_i = ith_cluster_silhouette_values.shape[0]
y_upper = y_lower + size_cluster_i
color = cm.spectral(float(i) / num_clusters)
ax1.fill_betweenx(np.arange(y_lower, y_upper),
0, ith_cluster_silhouette_values,
facecolor=color, edgecolor=color, alpha=0.7)
# Label the silhouette plots with their cluster numbers at the middle
ax1.text(-0.05, y_lower + 0.5 * size_cluster_i, str(i))
# Compute the new y_lower for next plot
y_lower = y_upper + 10 # 10 for the 0 samples
ax1.set_title("The silhouette plot for the various clusters.")
ax1.set_xlabel("The silhouette coefficient values")
ax1.set_ylabel("Cluster label")
# The vertical line for average silhouette score of all the values
ax1.axvline(x=silhouette_avg, color="red", linestyle="--")
ax1.set_yticks([]) # Clear the yaxis labels / ticks
ax1.set_xticks([-0.1, 0, 0.2, 0.4, 0.6, 0.8, 1])
plt.show()
# 2nd Plot showing the actual clusters formed
fig, ax2 = plt.subplots(figsize=(17, 9))
colors = cm.spectral(cluster_labels.astype(float) / num_clusters)
ax2.scatter(tfidf_Xfm[:, 0], tfidf_Xfm[:, 1], marker='.', s=30, lw=0, alpha=0.7,
c=colors)
# Labeling the clusters
centers = km.cluster_centers_
# Draw white circles at cluster centers
ax2.scatter(centers[:, 0], centers[:, 1],
marker='o', c="white", alpha=1, s=200)
for i, c in enumerate(centers):
ax2.scatter(c[0], c[1], marker='$%d$' % i, alpha=1, s=50)
ax2.set_title("The visualization of the clustered data.")
ax2.set_xlabel("Feature space for the 1st feature")
ax2.set_ylabel("Feature space for the 2nd feature")
plt.show()
#LSA applied to tfidf-matrix
from sklearn.decomposition import TruncatedSVD
from sklearn.preprocessing import Normalizer
from sklearn.pipeline import make_pipeline
svd = TruncatedSVD(n_components=2)
normalizer = Normalizer(copy=False)
lsa = make_pipeline(svd, normalizer)
%time tfidf_matrix_lsa = lsa.fit_transform(tfidf_matrix)
explained_variance = svd.explained_variance_ratio_.sum()
print("Explained variance of the SVD step: {}%".format(int(explained_variance * 100)))
#from sklearn.cluster import KMeans
#num_clusters = 2
#km_lsa = KMeans(n_clusters=num_clusters, init='k-means++', max_iter=100, n_init=1,verbose=2)
#%time km.fit(tfidf_matrix_lsa)
#clusters_lsa = km.labels_.tolist()
#original_space_centroids = svd.inverse_transform(km_lsa.cluster_centers_)
#order_centroids = original_space_centroids.argsort()[:, ::-1]
#Topic modelling with LDA
import gensim, nltk
def lda_score_keyphrases_by_tfidf(texts, candidates='chunks'):
boc_texts = []
for text in texts:
text = re.sub(r'http[s]?://(?:[a-z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-f][0-9a-f]))+',r'',text)
text = text.lower()
if candidates == 'chunks':
boc_texts.append(get_chunks(text))
elif candidates == 'words':
boc_texts.append(get_words(text))
#make gensim dictionary and corpus
dictionary = gensim.corpora.Dictionary(boc_texts)
dictionary.filter_extremes(no_below=0.4, no_above=0.8)
corpus = [dictionary.doc2bow(boc_text) for boc_text in boc_texts]
# transform corpus with tf*idf model
tfidf = gensim.models.TfidfModel(corpus)
corpus_tfidf = tfidf[corpus]
return corpus_tfidf, dictionary, corpus
#LDA model
tfidif, dictionary, bow = lda_score_keyphrases_by_tfidf(corpus,'chunks')
#remove extremes (similar to the min/max df step used when creating the tf-idf matrix)
print(dictionary.token2id)
print(bow[0])
ldamodel = gensim.models.ldamodel.LdaModel(bow, num_topics=2, id2word = dictionary, passes=100, update_every=5,chunksize=100)
ldamodel.show_topics()
import numpy as np
topics_matrix = ldamodel.show_topics(formatted=False, num_words=20)
topics_matrix | [
"padmini@nealanalytics.com"
] | padmini@nealanalytics.com |
9669e42fb9711079f8410219882d8ad76a059912 | 49c872af9455f1e3663b8c250c9375852a28ce29 | /cart/contexts.py | c13f189d474b58fd4a927a9c7e609aca0c0dc461 | [] | no_license | Daanivd/msp4-unicornattractor | 4917a9e8e8188c71f8b0aa148d289b194d250247 | 86ca2d68cecd2f002545e369cf4031bf74126e4c | refs/heads/master | 2022-12-13T05:17:39.872688 | 2019-08-15T15:27:49 | 2019-08-15T15:27:49 | 195,424,511 | 0 | 1 | null | 2022-12-08T05:51:06 | 2019-07-05T14:39:19 | HTML | UTF-8 | Python | false | false | 595 | py | from django.shortcuts import get_object_or_404
from features.models import Feature
def cart_contents(request):
"""
Ensures that the cart contents are available when rendering
every page
"""
cart = request.session.get('cart', {})
cart_items = []
total = 0
for id, contribution in cart.items():
feature = get_object_or_404(Feature, pk=id)
total += contribution
cart_items.append({'id': id, 'contribution':contribution, 'feature': feature})
print(cart_items)
return {'cart_items': cart_items, 'total': total} | [
"ubuntu@ip-172-31-35-5.ec2.internal"
] | ubuntu@ip-172-31-35-5.ec2.internal |
2ed6f964e8ae60b2213f6ebac2a9ea76990a1b32 | 530deb444cb7ff34368b64cd93f23d480e53902a | /20170913_Googleplace_api.py | 71305064f1ebdefd3c9b3a16437da39c758e7b36 | [] | no_license | kuonumber/crawling | 083b9f21e05f56da74c9884d8e337a893406006f | 8a4572d87526edf154fd203cae144796547a1a47 | refs/heads/master | 2021-05-02T02:06:21.189296 | 2018-02-09T09:21:03 | 2018-02-09T09:21:03 | 120,852,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,370 | py |
# 目前用的python套件:
# Github 頁面:
# https://github.com/slimkrazy/python-google-places
from googleplaces import GooglePlaces, types, lang
import pandas as pd
google_places = GooglePlaces(api_key)
# 輸入自己api_key
radar_result = (google_places.radar_search(language = 'zh-TW',
lat_lng ={'lat':25.0513848,
'lng':121.5475527},
# 這邊是小巨蛋gps點
keyword = '室內設計',
radius = 3000,
location = '松山,台北'
# 設定範圍 單位公尺
))
for p in radar_result.places:
p.get_details(language='zh-TW')
# 得到place的詳細資料
for i in range(0, len(radar_result.places)):
print(radar_result.places[i])
# 印出來看看
results = radar_result.places
result_list = []
for result in results:
n_lat = float(result.geo_location['lat'])
n_lng = float(result.geo_location['lng'])
n_name = result.name
n_list = [n_name, n_lat, n_lng]
result_list.append(n_list)
# Data clearning...
df_place = pd.DataFrame(result_list,columns=['name','lat','lng'])
# 利用pandas dataframe 將資料合併
| [
"v7368858@gmail.com"
] | v7368858@gmail.com |
5769fa876cdfe6d70262c3cd07eb71f328ad565a | 9bd632b952e8935eb1bca638ed88a4aae8ba52b5 | /sp_report_invoice/models/account_invoice_line.py | 4bef2b5a9f8d77a1fc39295136965f442ae8a011 | [] | no_license | supercoopbdx/odoo-addons | e8d4e9272d742ffa640ddd25d6f000f5fa0d2b2e | 5107148ac5378fabe45138e539cdc8a134c12062 | refs/heads/master | 2022-05-20T11:51:57.014738 | 2022-03-10T17:45:29 | 2022-03-10T17:45:29 | 239,326,231 | 2 | 5 | null | 2021-02-11T10:59:33 | 2020-02-09T15:23:41 | JavaScript | UTF-8 | Python | false | false | 1,381 | py | # -*- coding: utf-8 -*-
from openerp import models, fields, api
import openerp.addons.decimal_precision as dp
class AccountInvoiceLine(models.Model):
_inherit = 'account.invoice.line'
price_subtotal_tax = fields.Monetary(string=' Total including tax',
compute='_compute_price_tax',
readonly= True,
store=True)
@api.one
@api.depends('price_unit', 'discount', 'invoice_line_tax_ids', 'quantity',
'product_id', 'invoice_id.partner_id',
'invoice_id.currency_id', 'invoice_id.company_id')
def _compute_price_tax(self):
currency = self.invoice_id and self.invoice_id.currency_id or None
price = self.price_unit * (1 - (self.discount or 0.0) / 100.0)
taxes = False
if self.invoice_line_tax_ids:
taxes = self.invoice_line_tax_ids.compute_all(
price,
currency,
self.quantity,
product=self.product_id,
partner=self.invoice_id.partner_id
)
self.price_subtotal_tax = taxes['total_included'] if taxes else self.quantity * price
# if self.invoice_id:
# self.price_subtotal_tax = self.invoice_id.currency_id.round(
# self.price_subtotal_tax
# )
| [
"erivard@vps318408.ovh.net"
] | erivard@vps318408.ovh.net |
e771760e3355c5972d4ce3aa46d6b55a5cda00de | 3e17d21ff07674783981f261557e0803544d381f | /MajorProjectServerEnd/classes/migrations/0001_initial.py | e03aef7e1514f19abb91a768e3e2979cc0d17f7d | [] | no_license | meghalagrawal/minor_project | 46c4e4bf73402202e7cde2ddac5e3f64d0cd1491 | 95b58edac75b055a4216596226abbd47fe5812d3 | refs/heads/master | 2021-09-14T16:44:26.327567 | 2018-05-16T05:07:08 | 2018-05-16T05:07:08 | 113,749,310 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,321 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2018-04-03 15:44
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ClassData',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, null=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='ProfessorData',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, null=True)),
('email', models.CharField(blank=True, max_length=255)),
('password', models.CharField(blank=True, max_length=255, null=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
],
),
]
| [
"meghal.nit@gmail.com"
] | meghal.nit@gmail.com |
68b4c4489787df435861b44a9c27322813ccf918 | e74c7f7b55caf6a23429b66626a9246870e77c60 | /hw06/lab.py | 1efda010baeede137ebd3b32d9708197c6d33e68 | [] | no_license | dlui220/softdev-assignments | b4b7269bf361f99e2b7223d6c3e465a33066185b | 77999d52a55c5a9ad5f45e12e430aa4142f698cb | refs/heads/master | 2021-01-13T00:16:37.622979 | 2016-04-13T16:33:28 | 2016-04-13T16:33:28 | 51,458,144 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,779 | py | # nums = []
# for x in range(8):
# nums.append(x)
# print "nums = " + str(nums)
# squares = []
# for x in range(8):
# squares.append(x**2)
# print "squares = " + str(squares)
# print "list comprehensions = " + str([x for x in range(8)])
# print "list comprehensions with squares = " + str([x*x for x in range(8)])
# print "list of tuples" + str([ (x, x*x, x*x*x) for x in range(8) ])
# p="myNoobPass1234"
# print [x for x in p]
# print [x for x in "1234"]
# UC_LETTERS="ABCDEFGHIJKLMNOPQRSTUVWXYZ"
# print [ x for x in p if x in UC_LETTERS ]
# print [ 1 if x in UC_LETTERS else 0 for x in p ]
UC_LETTERS="ABCDEFGHIJKLMNOPQRSTUVWXYZ"
LC_LETTERS="abcdefghijklmnopqrstuvwyxz"
NUMERALS="1234567890"
CHARS=".?!&#,;:-_*"
def checkPasswordSimple( password ):
u = [ 1 if x in UC_LETTERS else 0 for x in password ]
l = [ 1 if x in LC_LETTERS else 0 for x in password ]
n = [ 1 if x in NUMERALS else 0 for x in password ]
if ((sum(u) > 0) and (sum(l) > 0) and (sum(n) > 0)):
return True
return False
# print checkPasswordSimple("HelloThisIsPassword")
# print checkPasswordSimple("HelloThisIsPassword1")
def checkPasswordStrength( password ):
strength = 1;
UC_LETTERS="ABCDEFGHIJKLMNOPQRSTUVWXYZ"
LC_LETTERS="abcdefghijklmnopqrstuvwyxz"
NUM="1234567890"
SYM=".?!&#,;:-_*"
u = [ 1 if x in UC_LETTERS else 0 for x in password ]
l = [ 1 if x in LC_LETTERS else 0 for x in password ]
n = [ 1 if x in NUMERALS else 0 for x in password ]
s = [ 1 if x in CHARS else 0 for x in password ]
if ((sum(u) > 0) and (sum(l) > 0)):
strength += 2
if (sum(n) > 0):
strength += 1
if (sum(s) > 0):
strength += 1
if (strength > 1):
return "From 1-10, your password (" + password + ") strength is: "+ str(strength*2)
return "From 1-10, your password (" + password + ") strength is: "+ str(strength)
print checkPasswordStrength("hello") # return 1
print checkPasswordStrength("hello1") # return 8
print checkPasswordStrength("hello_1") # return 6
print checkPasswordStrength("Hello1") # return 8
print checkPasswordStrength("Hello_1") # return 10
def strengthCheck(p):
l = [1 if x in UC_LETTERS else
2 if x in LC_LETTERS else
3 if x in NUMERALS else
0 for x in p]
return 1 in l and 2 in l and 3 in l
print strengthCheck("hello")
print strengthCheck("Hello")
print strengthCheck("Hello1")
print strengthCheck("hello1")
def strength_rate(p):
l = [1 if x in UC_LETTERS else
2 if x in LC_LETTERS else
3 if x in NUMERALS else
4 if x in chars else
0 for x in p]
uc = len(p) - l.count(1)
lc = len(p) - l.count(2)
nums = len(p) - l.count(3)
chrs = len(p) - l.count(4)
| [
"derricklui1@gmail.com"
] | derricklui1@gmail.com |
986a98c7a191eded558d64c0115171a971068428 | 42545e522c676789e253c04a7f37d2e56c92529b | /FailedTests/kerasTest.py | 72628723ef812eba81e0681cb83896122c028ec5 | [
"MIT"
] | permissive | esslushy/AbstractArtGenerator | 8abec8bc727008e98767e2b42c609f8e33189b06 | 48ebfee04673bc109fc202c3368dbbc80f0e7021 | refs/heads/master | 2020-03-29T17:43:00.370812 | 2019-05-02T15:42:30 | 2019-05-02T15:42:30 | 150,177,422 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,365 | py | import time
import os
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import numpy as np
from keras.models import Sequential
from keras.layers import Conv2D, Conv2DTranspose, Reshape
from keras.layers import Flatten, BatchNormalization, Dense, Activation
from keras.layers.advanced_activations import LeakyReLU
from keras.optimizers import Adam
from keras.preprocessing.image import ImageDataGenerator
def construct_generator():
generator = Sequential()
generator.add(Dense(units=4 * 4 * 512,
kernel_initializer='glorot_uniform',
input_shape=(1, 1, 100)))
generator.add(Reshape(target_shape=(4, 4, 512)))
generator.add(BatchNormalization(momentum=0.5))
generator.add(Activation('relu'))
generator.add(Conv2DTranspose(filters=256, kernel_size=(5, 5),
strides=(2, 2), padding='same',
data_format='channels_last',
kernel_initializer='glorot_uniform'))
generator.add(BatchNormalization(momentum=0.5))
generator.add(Activation('relu'))
generator.add(Conv2DTranspose(filters=128, kernel_size=(5, 5),
strides=(2, 2), padding='same',
data_format='channels_last',
kernel_initializer='glorot_uniform'))
generator.add(BatchNormalization(momentum=0.5))
generator.add(Activation('relu'))
generator.add(Conv2DTranspose(filters=64, kernel_size=(5, 5),
strides=(2, 2), padding='same',
data_format='channels_last',
kernel_initializer='glorot_uniform'))
generator.add(BatchNormalization(momentum=0.5))
generator.add(Activation('relu'))
generator.add(Conv2DTranspose(filters=3, kernel_size=(5, 5),
strides=(2, 2), padding='same',
data_format='channels_last',
kernel_initializer='glorot_uniform'))
generator.add(Activation('tanh'))
return generator
generator = construct_generator()
noise = np.random.normal(0, 1, size=(4,) + (1, 1, 100))
# Generate images
generated_images = generator.predict(noise)
print(generated_images)
| [
"ess090502@gmail.com"
] | ess090502@gmail.com |
711e796f08056e19d7c2e63ae6360a8924cd6f5e | 9b6109b1d71bc6c514e38a1b23103df8a115e1da | /binding_energies/c-2.000o-3.375/input.py | 586d9b9dbcb4433acc724de6e8606c6960437441 | [] | no_license | rwest/linear-scaling-tests | 4fd53c50ca9a91023ee9c1b94506155ed32af179 | 9d4678cb1e46786023642fbc28ad00ddff748aa6 | refs/heads/master | 2023-03-07T05:36:40.382146 | 2017-10-31T16:44:33 | 2017-10-31T16:44:33 | 108,619,306 | 1 | 2 | null | 2020-10-17T16:05:18 | 2017-10-28T04:20:42 | HTML | UTF-8 | Python | false | false | 2,903 | py | # Data sources
database(
thermoLibraries=['surfaceThermo', 'primaryThermoLibrary', 'thermo_DFT_CCSDTF12_BAC','DFT_QCI_thermo'],
reactionLibraries = [('Deutschmann_Ni', False)],
seedMechanisms = [],
kineticsDepositories = ['training'],
kineticsFamilies = 'default',
kineticsEstimator = 'rate rules',
bindingEnergies = { # default values for Ni(111)
'C':(-2.000000, 'eV/molecule'),
'H':(-2.778, 'eV/molecule'),
'O':(-3.375000, 'eV/molecule'),
}
)
# List of species
species(
label='X',
reactive=True,
structure=adjacencyList("1 X u0"),
)
species(
label='CH4',
reactive=True,
structure=SMILES("[CH4]"),
)
species(
label='O2',
reactive=True,
structure=adjacencyList(
"""
1 O u1 p2 c0 {2,S}
2 O u1 p2 c0 {1,S}
"""),
)
species(
label='N2',
reactive=False,
structure=SMILES("N#N"),
)
species(
label='CO2',
reactive=True,
structure=SMILES("O=C=O"),
)
species(
label='H2O',
reactive=True,
structure=SMILES("O"),
)
species(
label='H2',
reactive=True,
structure=SMILES("[H][H]"),
)
species(
label='CO',
reactive=True,
structure=SMILES("[C-]#[O+]"),
)
species(
label='C2H6',
reactive=True,
structure=SMILES("CC"),
)
species(
label='CH2O',
reactive=True,
structure=SMILES("C=O"),
)
species(
label='CH3',
reactive=True,
structure=SMILES("[CH3]"),
)
species(
label='C3H8',
reactive=True,
structure=SMILES("CCC"),
)
species(
label='H',
reactive=True,
structure=SMILES("[H]"),
)
species(
label='C2H5',
reactive=True,
structure=SMILES("C[CH2]"),
)
species(
label='CH3OH',
reactive=True,
structure=SMILES("CO"),
)
species(
label='HCO',
reactive=True,
structure=SMILES("[CH]=O"),
)
species(
label='CH3CHO',
reactive=True,
structure=SMILES("CC=O"),
)
species(
label='OH',
reactive=True,
structure=SMILES("[OH]"),
)
species(
label='C2H4',
reactive=True,
structure=SMILES("C=C"),
)
#----------
# Reaction systems
surfaceReactor(
temperature=(1000,'K'),
initialPressure=(1.0, 'bar'),
initialGasMoleFractions={
"CH4": 0.1,
"CO2": 0.1,
"N2": 0.8,
},
initialSurfaceCoverages={
"X": 1.0,
},
surfaceVolumeRatio=(1.e5, 'm^-1'),
surfaceSiteDensity=(2.9e-9, 'mol/cm^2'),
# terminationConversion = { "CH4":0.9,},
terminationTime=(1.0, 's'),
)
simulator(
atol=1e-18,
rtol=1e-12,
)
model(
toleranceKeepInEdge=0.0,
toleranceMoveToCore=1e-4,
toleranceInterruptSimulation=0.1,
maximumEdgeSpecies=100000
)
options(
units='si',
saveRestartPeriod=None,
generateOutputHTML=True,
generatePlots=False,
saveEdgeSpecies=True,
saveSimulationProfiles=True,
)
| [
"r.west@northeastern.edu"
] | r.west@northeastern.edu |
8d7c026f98cc43856dcf74350d178b0b6ea068c0 | 1327667dc39d7fba09b9d98716a0818ff2677ce7 | /contact_list.py | 8c0a774ed412b9d4f48035cdfdae901aa1997afa | [] | no_license | darkwlf/contact_list | 8019564bb8b80b059af2526041ee490b1498bc32 | 17e120fe269e09c1098ec4663eaeabd0ffdb3fa1 | refs/heads/main | 2023-05-04T12:37:40.572511 | 2021-05-24T13:28:20 | 2021-05-24T13:28:20 | 370,361,790 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 154 | py | import sqlite3
con = sqlite3.connect('contacts.db')
cur = con.cursor()
for row in cur.execute('SELECT * FROM contact ORDER BY name'):
print(row)
| [
"noreply@github.com"
] | darkwlf.noreply@github.com |
055feb63859169aa5ad1f36d99b7a8cc679480d0 | 5969684b386d25881e349ff55184c222ea0d6f3e | /vkusnooo_app/views.py | 787a1de959919bb877153bf591bd2d1683e1492d | [] | no_license | BAndonova/Python-Web-Framework-2020-VkusNooo_project | b505af43794e03e8ec0acd7ece383a2678b3e1e5 | f94a877b57610c12280a130e4bd2c7bc4722a3f8 | refs/heads/main | 2023-02-21T15:42:22.847599 | 2021-01-23T09:40:24 | 2021-01-23T09:40:24 | 315,734,335 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,480 | py | import djclick as click
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect
from recipe_core.decorators import group_required
from vkusnooo_app.models import Recipe, Like
from vkusnooo_app.forms import RecipeForm
from vkusnooo_auth.models import UserProfile
from vkusnooo_auth.views import user_count
def index(request):
if Recipe.objects.exists():
recipes = Recipe.objects.all()
recipes_count = recipes.count()
users_all = UserProfile.objects.all()
users_count = users_all.count()
# for recipe in recipes:
# recipe.can_delete = recipe.created_by_id == request.user.id
context = {
'recipes': recipes,
'recipes_count': recipes_count,
# 'can_delete': recipe.can_delete,
'users_count': users_count,
}
return render(request, 'index.html', context)
else:
return render(request, 'index.html')
def all_recipes(request):
if Recipe.objects.exists():
recipes = Recipe.objects.all()
recipes_count = recipes.count()
users_all = UserProfile.objects.all()
users_count = users_all.count()
context = {
'recipes': recipes,
'recipes_count': recipes_count,
'users_count': user_count,
}
return render(request, 'all_recipes.html', context)
else:
return redirect('index.html')
@login_required
# @group_required(groups=['Regular Users'])
def create_recipe(request):
if request.method == 'GET':
instance = Recipe(created_by=request.user)
context = {
'form': RecipeForm(),
'current_page': 'create',
'created_by': instance,
}
return render(request, 'create.html', context)
else:
instance = Recipe(created_by=request.user)
form = RecipeForm(request.POST, request.FILES, instance=instance)
if form.is_valid():
recipe = form.save(commit=False)
recipe.created_by = request.user
recipe.save()
return redirect('index')
context = {
'form': form,
}
return render(request, 'create.html', context)
@login_required
def edit_recipe(request, pk):
recipe = Recipe.objects.get(pk=pk)
if recipe.created_by_id == request.user.id or request.user.is_superuser:
recipe.can_delete = True
else:
recipe.can_delete = False
if request.method == 'GET':
context = {
'form': RecipeForm(instance=recipe),
'recipe': recipe,
'can_delete': recipe.can_delete
}
return render(request, 'edit.html', context)
else:
form = RecipeForm(request.POST, request.FILES, instance=recipe)
if form.is_valid():
form.save()
return redirect('index')
context = {
'form': form,
'recipe': recipe,
}
return render(request, 'edit.html', context)
def details_recipe(request, pk):
recipe = Recipe.objects.get(pk=pk)
# user = Recipe.created_by.get(isinstance=recipe)
ingredients = recipe.ingredients.split(',')
if recipe.created_by_id == request.user.id or request.user.is_superuser:
recipe.can_delete = True
else:
recipe.can_delete = False
if request.method == 'GET':
# user = Recipe.created_by
context = {
'form': RecipeForm(instance=recipe),
'recipe': recipe,
'ingredients': ingredients,
'can_delete': recipe.can_delete,
'can_like': recipe.created_by_id != request.user.id or request.user.is_superuser,
'has_liked': recipe.like_set.filter(user_id=request.user.id, value=True),
'likes_count': recipe.like_set.filter(value=True).count(),
'current_page': 'all recipes',
}
return render(request, 'details.html', context)
@login_required
def delete_recipe(request, pk):
recipe = Recipe.objects.get(pk=pk)
if recipe.created_by_id != request.user or request.user.is_superuser:
# forbid
pass
if request.method == 'GET':
context = {
'form': RecipeForm(instance=recipe),
'recipe': recipe,
}
return render(request, 'delete.html', context)
else:
recipe.delete()
return redirect('index')
def desserts(request):
recipes = Recipe.objects.filter(type='Desserts')
context = {
'recipes': recipes,
}
return render(request, 'meals/desserts.html', context)
def meat_meals(request):
recipes = Recipe.objects.filter(type='Meat Meals')
context = {
'recipes': recipes,
}
return render(request, 'meals/meat.html', context)
def meatless_meals(request):
recipes = Recipe.objects.filter(type='Meatless Meals')
context = {
'recipes': recipes,
}
return render(request, 'meals/meatless.html', context)
def other(request):
recipes = Recipe.objects.filter(type='Other')
context = {
'recipes': recipes,
}
return render(request, 'meals/other.html', context)
def pasta_dough(request):
recipes = Recipe.objects.filter(type='Pasta and Dough')
context = {
'recipes': recipes,
}
return render(request, 'meals/pasta_and_dough.html', context)
def vegan(request):
recipes = Recipe.objects.filter(type='Vegan')
context = {
'recipes': recipes,
}
return render(request, 'meals/vegan.html', context)
def healthy(request):
recipes = Recipe.objects.filter(type='Healthy and Dietetic')
context = {
'recipes': recipes,
}
return render(request, 'meals/healthy.html', context)
@login_required
def like_recipe(request, pk):
likes = Like.objects.filter(recipe_id=pk).all()
user_like = likes.filter(user_id=request.user.userprofile.id).first()
if user_like and user_like.value == True:
Like.objects.filter(recipe_id=pk, user_id=request.user.userprofile.id).update(value=False)
elif user_like and user_like.value == False:
Like.objects.filter(recipe_id=pk, user_id=request.user.userprofile.id).update(value=True)
else:
# recipe = Recipe.objects.get(pk=pk)
like = Like(value=True, user_id=request.user.id, recipe_id=pk)
# likes.recipe = recipe
like.save()
return redirect('details recipe', pk)
| [
"60629112+BAndonova@users.noreply.github.com"
] | 60629112+BAndonova@users.noreply.github.com |
e74a4232dc7fc3b1de106635f6beb9dc191f4f63 | 373e44ad5fba391d86543f28b91a2cdf9a22f874 | /model/TestPar45_60_varydatasize/60/PowerPredEDFA_average.py | d70aad24fd3ba2b6baa9d93a86d69d68d3e1e57a | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | szhu3210/oopt-gnpy | c311fe00b869d3bcfabae4e22366cbc2215eb91d | 83768480eb9aedad560ab9a722493f04cfe80c9c | refs/heads/master | 2020-04-02T00:24:48.608431 | 2019-04-01T18:32:11 | 2019-04-01T18:32:11 | 153,803,494 | 1 | 0 | BSD-3-Clause | 2018-10-19T15:22:16 | 2018-10-19T15:22:15 | null | UTF-8 | Python | false | false | 1,521 | py | # coding: utf-8
# In[171]:
# Ido Michael
import tensorflow as tf
import os, struct
import numpy as np
import matplotlib.pyplot as plt
import ParsePowerEDFA
from sklearn.metrics import mean_squared_error
from sklearn.metrics import mean_absolute_error
import math
import sys
import configparser
import random
print(tf.__version__)
# In case we need to average results of 5 different debug files and plot them on a graph.
# ParsePowerEDFA.getTestFiles()
# Average files by name and then write collected results into a csv file.
[testdb, testmse, testmae, tr2, tr4, tr6, tr8, tr1, mse_tr, mae_tr] = ParsePowerEDFA.averageResults("TestPar45_60_60")
[val2, val4, val6, val8, val1, mse_val, mae_val] = ParsePowerEDFA.averageResults_val("TestPar45_60_60")
ParsePowerEDFA.plot_to_matrix(tr2, tr4, tr6, tr8, tr1, mse_tr, mae_tr)
ParsePowerEDFA.plot_to_matrix_Val(val2, val4, val6, val8, val1, mse_val, mae_val)
ParsePowerEDFA.plot_to_matrix_test(testdb, testmse, testmae)
# 20%
# [testdb, val2, val4, val6, val8, val1] = ParsePowerEDFA.averageResults([
# "./TestPar29.ini140-debug.log",
# "./TestPar29.ini84-debug.log",
# "./TestPar29.ini150-debug.log"
# ])
# [testdb, val2, val4, val6, val8, val1] = ParsePowerEDFA.averageResults(["./test/TestPar25.ini-smaller53-debug.log", "./test/TestPar25.ini-smaller103-debug.log", "./test/TestPar25.ini-smaller25-debug.log", "./test/TestPar25.ini-smaller37-debug.log", "./test/TestPar25.ini-smaller30-debug.log"])
# ParsePowerEDFA.plotGraph(val2, val4, val6, val8, val1)
| [
"szhu@email.arizona.edu"
] | szhu@email.arizona.edu |
a030538c5ca7316deb104f9555029252bad5e681 | 3c40dce2af71dd6216f4b64e5f42d4d6d5bc6b25 | /auto_client/libs/plugins/__init__.py | 02afa5f43fb806ca16c94112950069f750df338e | [] | no_license | huzhou520/cmdb | 1c93ad47d2a5e564c1e8f34ec9015590208fafeb | b6b4aba4184ed316a0a0b5f2b1a876473ec4cdbc | refs/heads/master | 2020-09-11T22:36:31.588900 | 2019-12-27T01:55:40 | 2019-12-27T01:55:40 | 222,212,934 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,078 | py | from settings import PLUGIN_DICT
def get_server_info(hostname, ssh_func):
"""
:param hostname: 要操作的远程主机
:param ssh_func: 要执行的方法
:return:
"""
info_dict = {}
for key, path in PLUGIN_DICT.items():
# 1.切割settings文件中的字典
"""
例:libs.plugins.board.Board,切割settings文件中的values切成如下:
key:libs.plugins.board(模块路径) value: Board(对应模块下面的方法)
"""
module_name, class_name = path.rsplit('.', maxsplit=1)
# 2.以字符串的方式加载模块
import importlib
module = importlib.import_module(module_name)
# print(module_name,class_name)
# 3.通过反射找模块下面的方法
cls = getattr(module, class_name)
# print(module_name, class_name)
# 4.实例化对象
obj = cls()
# 5.执行对象的process方法
ret = obj.process(hostname, ssh_func)
info_dict[key] = ret
# print(info_dict)
return info_dict
| [
"you@example.com"
] | you@example.com |
315dde5190931ae95728751f22a8752b3de8b9e1 | a439ca43178d38cfe6daaee50ea134ca6c52b502 | /thaniya_client/src/thaniya_client/tools/ThaniyaMySQL_native.py | a110c96dca5ffc807f8d9a6352e11b529c08a02e | [
"Apache-2.0"
] | permissive | jkpubsrc/Thaniya | 37ca727abdc6f9f605257813889fe3a033995bba | 4ebdf2854e3d7888af7396adffa22628b4ab2267 | refs/heads/master | 2023-03-05T20:58:59.528746 | 2021-02-15T19:31:06 | 2021-02-15T19:31:06 | 331,318,787 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,503 | py |
import os
import typing
import tarfile
import jk_simpleexec
import jk_utils
from ..ThaniyaBackupContext import ThaniyaBackupContext
from .EnumTarPathMode import EnumTarPathMode
from .ThaniyaService import ThaniyaService
class ThaniyaMySQL_native:
@staticmethod
def mySQLDump(ctx:ThaniyaBackupContext, dbName:str, dbUserName:str, dbPassword:str, outputDumpFilePath:str) -> int:
assert isinstance(ctx, ThaniyaBackupContext)
assert isinstance(dbName, str)
assert dbName
assert isinstance(outputDumpFilePath, str)
assert outputDumpFilePath
ctx = ctx.descend("Creating dump file " + repr(outputDumpFilePath) + " ...")
with ctx.log as nestedLog:
outputDumpFilePath = ctx.absPath(outputDumpFilePath)
authFile = ctx.privateTempDir.writeTextFile("[mysqldump]\nuser=" + dbUserName + "\npassword=" + dbPassword + "\n")
result = jk_simpleexec.invokeCmd("/usr/bin/mysqldump", [
"--defaults-extra-file=" + authFile,
"--r",
outputDumpFilePath,
"--routines", # Include stored routines (procedures and functions) for the dumped databases in the output.
"--triggers", # Include triggers for each dumped table in the output.
dbName,
], workingDirectory=os.path.dirname(authFile))
if result.returnCode == 0:
nestedLog.notice("Succeeded.")
return os.path.getsize(outputDumpFilePath)
else:
result.dump(nestedLog.error)
raise Exception("Failed to backup database '" + dbName + "'!")
#
@staticmethod
def mySQLDumpCalculateSize(ctx:ThaniyaBackupContext, dbName:str, dbUserName:str, dbPassword:str) -> int:
import mysql.connector
assert isinstance(ctx, ThaniyaBackupContext)
ctx = ctx.descend("Calculating size for the MySQL dump ...")
with ctx.log as nestedLog:
con = None
try:
# Segmentation fault
# see: https://bugs.mysql.com/bug.php?id=89889
# (but this does not work)
print("> Connecting ....")
con = mysql.connector.connect(host="localhost", database=dbName, user=dbUserName, passwd=dbPassword)
print("> Connected.")
sqlQuery = "SELECT SUM(data_length) FROM information_schema.tables WHERE table_schema = '" + dbName + "';"
cursor = con.cursor()
cursor.execute(sqlQuery)
records = cursor.fetchall()
assert cursor.rowcount == 1
nEstimatedSize = -1
for row in records:
nEstimatedSize = row[0]
break
return nEstimatedSize
finally:
if con and con.is_connected():
cursor.close()
con.close()
#
#
| [
"pubsrc@binary-overflow.de"
] | pubsrc@binary-overflow.de |
d678a6a0cdb4da16ce9b9f35ad49253187d46ab1 | 222abe72fdf538587184c45d819d32363f8c3f13 | /AssignmentAnalysis.py | 504f8d65ac4e8fc01f624ac1ad5c6f17b784d18f | [] | no_license | Vojtech-Sassmann/programmingProblemsAnalysis | 07763b9802eed6d3c61b018a573a8d4dc569e8be | 707aea739a5b2a9b1b937a6e827b3150a47b2013 | refs/heads/master | 2021-09-11T01:45:51.539623 | 2018-04-05T21:48:49 | 2018-04-05T21:48:49 | 103,759,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 603 | py | from data import tasks
import codecs
from new_bagOfWords_vector import searched_nodes
import new_bagOfWords_vector as bagAnalysis
def foo():
bagAnalysis.run = False
bagAnalysis.binary = False
bagAnalysis.solution_number = 1
bagAnalysis.submission_limit = 1
bagAnalysis.minimal_vector_size = 1
bagAnalysis.output_path = "resources/example/XexampleBoW.csv"
bagAnalysis.save_header()
for task in tasks:
results = bagAnalysis.AnalyseResults()
bagAnalysis.analyze_solution(task.solution, results)
bagAnalysis.save_results(results, task.name)
foo()
| [
"vojtech.sassmann@gmail.com"
] | vojtech.sassmann@gmail.com |
bdb719cc2a2ea4518e62f7168541578a0df9488e | 544d7951fa13346b395499255a9fc2bf7001bbc2 | /Study_API/common/__init__.py | 0a881170bcefad66f7afdd708ffcfdb9633c48ee | [] | no_license | hanfan0510/superme | dea074a4ab29824960836b2ead76930fc9cf3799 | 31600bf189211e909af82d3e6bfd415ef0cd51de | refs/heads/master | 2020-05-24T11:16:51.433542 | 2019-05-21T20:41:28 | 2019-05-21T20:41:28 | 187,244,622 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52 | py | # _*_ coding: utf-8 _*_
# @Time : 2019-04-28 23:32 | [
"2379696379@qq.vom"
] | 2379696379@qq.vom |
9101590f371cd95848d965d2fcdfe6031f0b94b9 | 9945fa3eddbf9e823fedd24544a6435ab3b61a10 | /bronzegaming/migrations/0008_auto_20190301_2308.py | ce51e2dbe055a11c496d0e77648d316b92a52b91 | [] | no_license | cmk112/BronzeGaming | 345552e59c349d8c70ff774fce36d1725b43a062 | aeea7329cbb103f20a240404845513794d093dd1 | refs/heads/master | 2020-04-25T22:53:54.103100 | 2019-08-21T21:54:09 | 2019-08-21T21:54:09 | 173,126,709 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 796 | py | # Generated by Django 2.1.7 on 2019-03-02 05:08
from django.db import migrations
import sorl.thumbnail.fields
class Migration(migrations.Migration):
dependencies = [
('bronzegaming', '0007_profile'),
]
operations = [
migrations.AlterField(
model_name='game',
name='image',
field=sorl.thumbnail.fields.ImageField(upload_to='./games/'),
),
migrations.AlterField(
model_name='platform',
name='image',
field=sorl.thumbnail.fields.ImageField(upload_to='./games/'),
),
migrations.AlterField(
model_name='profile',
name='profile_image',
field=sorl.thumbnail.fields.ImageField(null=True, upload_to='./profiles/'),
),
]
| [
"cody.kostyak@gmail.com"
] | cody.kostyak@gmail.com |
130b4eb9d1fb8e5a74b3cd23a2e9762ed8b8cffa | 054f45adf45d43f6b8aeecbf7b45793dedaa4255 | /venv/Scripts/pasteurize-script.py | a418e05e3f7f9d47a64868d8a8f973f587516e4d | [] | no_license | PolinaRubinova/rz1_words | 82eb07bdcfac6d540a91a017c0e199a0dc2c1321 | 0dc97c6d4538e618209904ad07be9773e41514e5 | refs/heads/master | 2023-04-10T05:13:45.636368 | 2021-04-17T12:59:33 | 2021-04-17T12:59:33 | 358,879,355 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,011 | py | #!C:\Users\User\PycharmProjects\rz1_words\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'future==0.18.2','console_scripts','pasteurize'
import re
import sys
# for compatibility with easy_install; see #2198
__requires__ = 'future==0.18.2'
try:
from importlib.metadata import distribution
except ImportError:
try:
from importlib_metadata import distribution
except ImportError:
from pkg_resources import load_entry_point
def importlib_load_entry_point(spec, group, name):
dist_name, _, _ = spec.partition('==')
matches = (
entry_point
for entry_point in distribution(dist_name).entry_points
if entry_point.group == group and entry_point.name == name
)
return next(matches).load()
globals().setdefault('load_entry_point', importlib_load_entry_point)
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(load_entry_point('future==0.18.2', 'console_scripts', 'pasteurize')())
| [
"jerrygonsales@mail.ru"
] | jerrygonsales@mail.ru |
630090193b6e2470241c4ed36f5cebbe2eea7935 | 2002956ea01eec2c71c8f0a3c14b57129c74c04b | /sdk/conf_pb2.py | 67443d473a26f9ec00b098abae0179ef481bc2fa | [
"Apache-2.0"
] | permissive | kjchavez/jarvis | 4cecea353eefc50a4a9692a8e8b751c3d36f6b87 | 56eb65d959d82e5643797ad16cda9d7d378d4385 | refs/heads/master | 2020-05-31T17:36:31.128666 | 2015-08-04T04:38:12 | 2015-08-04T04:38:12 | 39,577,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 7,170 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: conf.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='conf.proto',
package='jarvis',
serialized_pb=_b('\n\nconf.proto\x12\x06jarvis\"3\n\x07\x41udioIO\x12\x0c\n\x04host\x18\x01 \x02(\t\x12\x0c\n\x04port\x18\x02 \x02(\x05\x12\x0c\n\x04name\x18\x03 \x02(\t\"$\n\x06Server\x12\x0c\n\x04host\x18\x01 \x02(\t\x12\x0c\n\x04port\x18\x02 \x02(\x05\"\xbb\x01\n\nJarvisConf\x12\x10\n\x08root_dir\x18\x06 \x02(\t\x12\x0f\n\x07\x61pp_dir\x18\x01 \x02(\t\x12\x1e\n\x06memory\x18\x02 \x02(\x0b\x32\x0e.jarvis.Server\x12\x1d\n\x05state\x18\x03 \x02(\x0b\x32\x0e.jarvis.Server\x12$\n\x0b\x61udio_input\x18\x04 \x03(\x0b\x32\x0f.jarvis.AudioIO\x12%\n\x0c\x61udio_output\x18\x05 \x03(\x0b\x32\x0f.jarvis.AudioIO')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_AUDIOIO = _descriptor.Descriptor(
name='AudioIO',
full_name='jarvis.AudioIO',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='host', full_name='jarvis.AudioIO.host', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='port', full_name='jarvis.AudioIO.port', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='name', full_name='jarvis.AudioIO.name', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=22,
serialized_end=73,
)
_SERVER = _descriptor.Descriptor(
name='Server',
full_name='jarvis.Server',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='host', full_name='jarvis.Server.host', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='port', full_name='jarvis.Server.port', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=75,
serialized_end=111,
)
_JARVISCONF = _descriptor.Descriptor(
name='JarvisConf',
full_name='jarvis.JarvisConf',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='root_dir', full_name='jarvis.JarvisConf.root_dir', index=0,
number=6, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='app_dir', full_name='jarvis.JarvisConf.app_dir', index=1,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='memory', full_name='jarvis.JarvisConf.memory', index=2,
number=2, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='jarvis.JarvisConf.state', index=3,
number=3, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='audio_input', full_name='jarvis.JarvisConf.audio_input', index=4,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='audio_output', full_name='jarvis.JarvisConf.audio_output', index=5,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=114,
serialized_end=301,
)
_JARVISCONF.fields_by_name['memory'].message_type = _SERVER
_JARVISCONF.fields_by_name['state'].message_type = _SERVER
_JARVISCONF.fields_by_name['audio_input'].message_type = _AUDIOIO
_JARVISCONF.fields_by_name['audio_output'].message_type = _AUDIOIO
DESCRIPTOR.message_types_by_name['AudioIO'] = _AUDIOIO
DESCRIPTOR.message_types_by_name['Server'] = _SERVER
DESCRIPTOR.message_types_by_name['JarvisConf'] = _JARVISCONF
AudioIO = _reflection.GeneratedProtocolMessageType('AudioIO', (_message.Message,), dict(
DESCRIPTOR = _AUDIOIO,
__module__ = 'conf_pb2'
# @@protoc_insertion_point(class_scope:jarvis.AudioIO)
))
_sym_db.RegisterMessage(AudioIO)
Server = _reflection.GeneratedProtocolMessageType('Server', (_message.Message,), dict(
DESCRIPTOR = _SERVER,
__module__ = 'conf_pb2'
# @@protoc_insertion_point(class_scope:jarvis.Server)
))
_sym_db.RegisterMessage(Server)
JarvisConf = _reflection.GeneratedProtocolMessageType('JarvisConf', (_message.Message,), dict(
DESCRIPTOR = _JARVISCONF,
__module__ = 'conf_pb2'
# @@protoc_insertion_point(class_scope:jarvis.JarvisConf)
))
_sym_db.RegisterMessage(JarvisConf)
# @@protoc_insertion_point(module_scope)
| [
"kjchavez@stanford.edu"
] | kjchavez@stanford.edu |
6b9da091c4e7f39403cfd8adbe13a8a58faacb97 | 62605ca9e27cd33a7743dd449e9d2d3232f67e3d | /Ch5 - Python Crash Course/Code/5.6_some_dictionary_operations.py | 5ed2cb7541cb8009b1ce52a3aacc72778d49bbc4 | [
"MIT"
] | permissive | The-Ineffable-Alias/ProgrammingDigitalHumanitiesBook | 70e4488a3aa3893d6359d028a66c848b126a24b3 | 9ce1bef19e28fc3c1775e1135d9d177820e69f08 | refs/heads/master | 2022-12-13T18:03:27.647824 | 2020-09-10T15:56:55 | 2020-09-10T15:56:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 998 | py | # Dictionaries operations
address_book = {
"Alice": "123", # Number as string
"Bob": 231, # This number is an int
"Carl": "312"
}
# Get the value for a certain key
# (and return the default, if provided.)
address_book.get("Alice")
address_book.get("Ann") # Returns 0, no Ann in our dict
# Print all values in the dictionary, one by one:
for x in address_book: # x takes the key
print(address_book[x]) # dict[key] returns you the value
# You can use values() function to return values of a dictionary:
for x in address_book.values():
# Now x is whatever is founf in values() - i.e. values already
print(x)
# Loop through both keys and values
# use the items() function that tell which items we have
# in a dictionary, i.e. keys values:
for x, y in address_book.items():
print(x, y)
# You can replace x and y with what they stand for
# i.e. key, value
for key, value in address_book.items():
print(key, value)
| [
"55744890+1110sillabo@users.noreply.github.com"
] | 55744890+1110sillabo@users.noreply.github.com |
9b9069b00e19b7f1119e5df9d8f405c2e310aae3 | 67635134685261d087653d380b0aeb1475e5286e | /d.py | d4cad3b5951c67ff60c78e7b67b38afb1fa7dc49 | [] | no_license | simgenurcankaya/Network_TP1 | 1d252ca1ee96d29fe9aeaa0e6d664018c92693d2 | ca77f447aa2b58e8f28a02fb88af232eb8e2bd67 | refs/heads/master | 2020-09-10T17:55:45.390295 | 2019-11-22T22:46:43 | 2019-11-22T22:46:43 | 221,786,801 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,696 | py | import threading
import socket
# SERVER NEEDS TO START BEFORE CLIENT
# IP and Port is the same with client
ip_send_r1 = "10.10.4.1"
ip_get_r1 = "10.10.4.2"
ip_send_r2= "10.10.5.1"
ip_get_r2 = "10.10.5.2"
ip_send_r3 = "10.10.7.2"
ip_get_r3 = "10.10.7.1"
port_r1= 23426
port_r2= 44004
port_r3= 45678
sockR1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sockR2 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sockR3 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def getR1(ip,port):
sockR1.bind((ip,port))
i = 1000
while i:
data, addr = sockR1.recvfrom(1024)
print "Message from R1: ", data
sockR1.sendto(data, addr)
i -= 1
def getR2(ip,port):
sockR2.bind((ip,port))
i =1000
while i:
data, addr = sockR2.recvfrom(1024)
print "Message from R2: ", data
sockR2.sendto(data, addr)
i -= 1
def getR3(ip,port):
sockR3.bind((ip,port))
i = 1000
while i:
data, addr = sockR3.recvfrom(1024)
print "Message from R3: ", data
sockR3.sendto(data, addr)
i -= 1
if __name__ == "__main__":
t1 = threading.Thread(target=getR1, args=(ip_get_r1,port_r1))
t2 = threading.Thread(target=getR2, args=(ip_get_r2,port_r2))
t3 = threading.Thread(target=getR3, args=(ip_get_r3,port_r3))
# starting thread 1
t1.start()
# starting thread 2
t2.start()
t3.start()
# wait until thread 1 is completely executed
t1.join()
# wait until thread 2 is completely executed
t2.join()
t3.join()
print t1.isAlive()
print t2.isAlive()
print t3.isAlive()
# both threads completely executed
print("Done!") | [
"simgenurcankaya@gmail.com"
] | simgenurcankaya@gmail.com |
697d726a199f108a0730d881badc4c2040eb0a1c | 3027b528e95c10ffe9245bf4421ff1f9155acfd4 | /code/gamescene.py | 2c31a024dccd00a7e17a289a6b5ec9699ad4bb7a | [
"MIT"
] | permissive | prake71/blackandwhite | 4f3d8158d79e04a83fa873e70ae08495da6b92ea | 2f23f4b3dc57080d230a4c423332000fe02b8024 | refs/heads/master | 2020-03-10T23:53:16.846879 | 2018-04-22T01:09:34 | 2018-04-22T01:09:34 | 129,650,171 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,749 | py | import pygame
import constants
from player import *
from scene import *
from level01 import *
from level03 import *
from level02 import *
from customscene import *
import titlescene
class GameScene(Scene):
scr_w = constants.SCREENWIDTH
scr_h = constants.SCREENHEIGHT
def __init__(self, levelno):
super(GameScene, self).__init__()
# Create the player
self.player = Player()
self.player.inlevelno = levelno
# Create all the levels
self.level_list = []
self.level_list.append(Level_01(self.player))
self.level_list.append(Level_03(self.player))
# Set the current level
self.current_level_no = levelno
self.current_level = self.level_list[self.current_level_no]
self.player.level = self.current_level
self.active_sprite_list = pygame.sprite.Group()
self.set_player_pos()
# music
pygame.mixer.init()
self.music = pygame.mixer.music.load("music/jumpandrun.ogg")
pygame.mixer.music.play(-1)
def set_player_pos(self):
if self.current_level_no == 0:
self.player.rect.x = 0
self.player.rect.y = self.scr_h - self.player.rect.height
self.active_sprite_list.add(self.player)
else:
print("in player mirror")
self.player.rect.x = constants.SCREENWIDTH - 20
self.player.rect.y = 0
self.active_sprite_list.add(self.player)
def render(self, screen):
# ALL CODE TO DRAW SHOULD GO BELOW THIS COMMENT
self.current_level.draw(screen)
self.active_sprite_list.draw(screen)
# ALL CODE TO DRAW SHOULD GO ABOVE THIS COMMENT
def update(self):
# Update the player.
self.active_sprite_list.update()
# Update items in the level
self.current_level.update()
# If the player gets near the right side, shift the world left (-x)
if self.player.rect.right > self.scr_w:
self.player.rect.right = self.scr_w
# If the player gets near the left side, shift the world right (+x)
if self.player.rect.left < 0:
self.player.rect.left = 0
if self.player.level_completed():
self.player.goal_reached = False
self.current_level_no += 1
if self.current_level_no > len(self.level_list) - 1:
self.exit()
else:
self.current_level = self.level_list[self.current_level_no]
self.manager.go_to(GameScene(self.current_level_no))
def exit(self):
self.manager.go_to(CustomScene("You Won!"))
def die(self):
self.manager.go_to(CustomScene("You lose!"))
def handle_events(self, events):
if not self.current_level_no % 2:
for e in events:
if e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:
self.manager.go_to(titlescene.TitleScene())
if e.type == pygame.KEYDOWN:
if e.key == pygame.K_LEFT:
self.player.go_left()
if e.key == pygame.K_RIGHT:
self.player.go_right()
if e.key == pygame.K_SPACE:
self.player.jump()
if e.type == pygame.KEYUP:
if e.key == pygame.K_LEFT and self.player.change_x < 0:
self.player.stop()
if e.key == pygame.K_RIGHT and self.player.change_x > 0:
self.player.stop()
if e.key == pygame.K_r:
self.set_player_pos()
# skip level (for testing)
if e.key == pygame.K_s:
self.manager.go_to(GameScene(1))
else:
for e in events:
if e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:
self.manager.go_to(titlescene.TitleScene())
if e.type == pygame.KEYDOWN:
if e.key == pygame.K_LEFT:
self.player.go_right()
if e.key == pygame.K_RIGHT:
self.player.go_left()
if e.key == pygame.K_SPACE:
self.player.jump_mirror()
if e.type == pygame.KEYUP:
if e.key == pygame.K_LEFT and self.player.change_x > 0:
self.player.stop()
if e.key == pygame.K_RIGHT and self.player.change_x < 0:
self.player.stop()
if e.key == pygame.K_r:
self.set_player_pos()
#self.current_level.check_keys()
| [
"prake71@gmail.com"
] | prake71@gmail.com |
716ed2177858886621060abad9ac3e5c264f152a | ec0b8bfe19b03e9c3bb13d9cfa9bd328fb9ca3f1 | /res/packages/scripts/scripts/client/gui/Scaleform/daapi/view/battle/shared/timers_common.py | fa0fb189e2e6d1344d6ee8d161432c625338e6e9 | [] | no_license | webiumsk/WOT-0.9.20.0 | de3d7441c5d442f085c47a89fa58a83f1cd783f2 | 811cb4e1bca271372a1d837a268b6e0e915368bc | refs/heads/master | 2021-01-20T22:11:45.505844 | 2017-08-29T20:11:38 | 2017-08-29T20:11:38 | 101,803,045 | 0 | 1 | null | null | null | null | WINDOWS-1250 | Python | false | false | 2,809 | py | # 2017.08.29 21:46:09 Střední Evropa (letní čas)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/battle/shared/timers_common.py
import BigWorld
from gui.shared.utils.TimeInterval import TimeInterval
class TimerComponent(object):
__slots__ = ('_panel', '_typeID', '_viewID', '_totalTime', '_startTime', '_finishTime')
def __init__(self, panel, typeID, viewID, totalTime):
super(TimerComponent, self).__init__()
self._panel = panel
self._typeID = typeID
self._viewID = viewID
self._totalTime = totalTime
self._startTime = BigWorld.serverTime()
self._finishTime = self._startTime + totalTime if totalTime else 0
def __repr__(self):
return 'TimerComponent(typeID = {}, viewID = {}, totalTime = {})'.format(self._typeID, self._viewID, self._totalTime)
def clear(self):
self._panel = None
return
def show(self, isBubble = True):
self._showView(isBubble)
self._startTick()
def hide(self):
self._stopTick()
self._hideView()
@property
def typeID(self):
return self._typeID
@property
def viewID(self):
return self._viewID
@property
def finishTime(self):
return self._finishTime
@property
def totalTime(self):
return self._totalTime
def _startTick(self):
raise NotImplementedError
def _stopTick(self):
raise NotImplementedError
def _hideView(self):
raise NotImplementedError
def _showView(self, isBubble):
raise NotImplementedError
class PythonTimer(TimerComponent):
__slots__ = ('_timeInterval', '__weakref__')
def __init__(self, panel, typeID, viewID, totalTime):
super(PythonTimer, self).__init__(panel, typeID, viewID, totalTime)
self._timeInterval = TimeInterval(1.0, self, '_tick')
def clear(self):
self._timeInterval.stop()
super(PythonTimer, self).clear()
def _startTick(self):
if self._totalTime:
timeLeft = max(0, self._finishTime - BigWorld.serverTime())
if timeLeft:
self._setViewSnapshot(timeLeft)
self._timeInterval.start()
def _stopTick(self):
self._timeInterval.stop()
def _tick(self):
timeLeft = self._finishTime - BigWorld.serverTime()
if timeLeft >= 0:
self._setViewSnapshot(timeLeft)
else:
self.hide()
def _setViewSnapshot(self, timeLeft):
raise NotImplementedError
# okay decompyling c:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\client\gui\Scaleform\daapi\view\battle\shared\timers_common.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.08.29 21:46:09 Střední Evropa (letní čas)
| [
"info@webium.sk"
] | info@webium.sk |
09ab077c784360c58d2374981a7f02a9991ab938 | a301c3a8702fdf39a96e6f0a509a2c90a5969d4a | /framac/codeeditor/admin.py | d2c3e69f950a75451b9672d70a9c00f8544113b7 | [] | no_license | rklimek123/awww1 | fe656bcabad4bbc2682f43d65b64e84c043d39c0 | 972f4ab40ba71dd963cd9f18f3171328b4b37de2 | refs/heads/master | 2023-05-10T15:30:05.814803 | 2021-06-17T21:58:23 | 2021-06-17T21:58:23 | 344,237,433 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 285 | py | from django.contrib import admin
from .models import *
admin.site.register(Directory)
admin.site.register(File)
admin.site.register(FileSection)
admin.site.register(SectionCategory)
admin.site.register(SectionStatus)
admin.site.register(SectionStatusData)
admin.site.register(Prover)
| [
"rafal1klimek@gmail.com"
] | rafal1klimek@gmail.com |
d42c548a74719a1d6a26357b3b649727be0cedff | 6e5c18ff8dc132580defa69128a6195525862e9e | /fizzbuzz.py | 4ae7c0fc1e48744651c5b0ee7a626a5fc6b5ffef | [] | no_license | vukster/python | 9c660c69fdc25b8b6b64ac8dcd6e97def8e3ace1 | 5465f69fbae26818dcac6aa69c2c78c087d8aeb0 | refs/heads/main | 2023-03-12T12:49:04.114165 | 2021-03-07T10:29:07 | 2021-03-07T10:29:07 | 340,090,475 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 347 | py | input1 = int(input("Select a number between 1 and 100"))
for x in range(0, input1):
check_count = x+1
if ((check_count % 3) == 0) and ((check_count % 5) == 0):
print("fizzbuzz")
elif (check_count % 3) == 0:
print("fizz")
elif (check_count % 5) == 0:
print("buzz")
else:
print(check_count)
| [
"dvukicev@cisco.com"
] | dvukicev@cisco.com |
ef8a75b0ff9cd7873c4b6b19ff53e7782fa3b405 | 5835f580ce78ead9e76d8bddc54a1efb54a51402 | /driver/fixcode.py | 48045486f3556ea711d69cfedccdb6dc57245ee7 | [
"MIT"
] | permissive | SimonLarsen/mmlgb | 82b9f0295f689cbe529a64414c724f865880cf7c | aa0864d4391490057760a2a84dd4ab117f0b04e8 | refs/heads/master | 2023-02-08T04:32:27.052886 | 2019-07-28T19:19:45 | 2019-07-28T19:19:45 | 28,891,338 | 43 | 8 | MIT | 2023-01-28T08:06:38 | 2015-01-07T01:00:14 | C | UTF-8 | Python | false | false | 853 | py | #!/usr/bin/env python3
import argparse
import re
def main():
parser = argparse.ArgumentParser()
parser.add_argument("infile", help="Code file.", type=str)
parser.add_argument("outfile", help="Outfile file.", type=str)
args = parser.parse_args()
infile = open(args.infile, "r")
lines = infile.readlines()
infile.close()
outfile = open(args.outfile, "w")
for line in lines:
skip = False
if re.match("^\s*\.optsdcc", line):
line = "; " + line
if re.match("^\s*;", line) and len(line) > 128:
line = line[0:127] + "\n"
skip = skip or re.match("^\s*.area\s+_CABS", line) != None
skip = skip or re.match("^\s*.area\s+_DABS", line) != None
if not skip:
outfile.write(line)
outfile.close()
if __name__ == "__main__":
main()
| [
"simonhffh@gmail.com"
] | simonhffh@gmail.com |
852cbc1af2b4d54d3ba0a58bc1812c1ddf7cbe0c | c61283c61442a4413dd13696ff513bfe732cdcdc | /test/test_api.py | a292b58aca87a0c7c71aae5e88b194f6f235d44e | [
"BSD-2-Clause",
"BSD-2-Clause-Views"
] | permissive | quietguoguo/nginx-config-builder | 138a07478518b146ad4b0f3e4c2c70c1af80ead9 | e3030c879b008fbb73033ba639359407038edefc | refs/heads/master | 2021-01-20T03:06:38.988737 | 2017-07-13T19:59:46 | 2017-07-13T19:59:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,418 | py | from nginx.config.api.blocks import Block, EmptyBlock
from nginx.config.api.options import KeyOption, KeyValueOption, KeyMultiValueOption
from nginx.config.helpers import duplicate_options
def test_block_options():
block = Block('test')
assert block.options == {'_owner': block}
assert block.sections == {'_owner': block}
block.options.opt = 'val1'
assert repr(block) == '\ntest {\n opt val1;\n}'
block.options.opt = 'val2 val3'
assert repr(block) == '\ntest {\n opt val2 val3;\n}'
block.options.opt = ''
assert repr(block) == '\ntest {\n opt;\n}'
def test_emptyblock_options():
block = EmptyBlock()
assert block.options == {'_owner': block}
assert block.sections == {'_owner': block}
block.options.opt = 'val'
assert repr(block) == '\nopt val;'
def test_options():
opt1 = KeyOption('opt')
assert repr(opt1) == '\nopt;'
opt2 = KeyValueOption('opt', value='value')
assert repr(opt2) == '\nopt value;'
opt3 = KeyMultiValueOption('opt', value=['v', 'a', 'l'])
assert repr(opt3) == '\nopt v a l;'
def test_sections():
block = Block('test')
block.sections.add(EmptyBlock(test=1))
assert repr(block) == '\ntest {\n test 1;\n}'
def test_duplicates():
dupes = duplicate_options('test', [1, 2, 3])
assert sorted(repr(dupes).splitlines()) == sorted('\ntest 1;\ntest 2;\ntest 3;'.splitlines())
| [
"lcarvalho@linkedin.com"
] | lcarvalho@linkedin.com |
295065fa1ab2d76f7cb08c19a6d47d4aa00dda02 | 46e028809514ab7dcdac97e370f15fee85d9fb22 | /vappio-twisted/vappio_tx/credentials/ctypes/nimbus.py | 252dd9aa1d500998d92921d140580043378e3aee | [] | no_license | carze/vappio | c26c1f07511ad0dd6041c540651c4e6397569e61 | 23d85308ec51299233ade086e1f3df86d3452a4f | refs/heads/master | 2021-01-21T00:08:12.685666 | 2014-11-19T12:36:12 | 2014-11-19T12:36:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,035 | py | import os
import urlparse
from twisted.internet import defer
from twisted.python import log
from igs_tx.utils import commands
from igs_tx.utils import defer_utils
from igs.utils import functional as func
from igs.utils import config
from vappio_tx.credentials.ctypes import ec2
##
# This module wants to go by
NAME = 'Nimbus'
DESC = """Control module for Nimbus-based users"""
RETRY_ATTEMPTS = 30
def instantiateCredential(conf, cred):
if not conf('config_loaded', default=False):
conf = config.configFromConfig(conf,
base=config.configFromStream(open(conf('conf_file')),
base=conf))
certFile = os.path.join(conf('general.secure_tmp'), cred.name + '_cert.pem')
keyFile = os.path.join(conf('general.secure_tmp'), cred.name + '_key.pem')
mainDeferred = defer.succeed(None)
if not os.path.exists(certFile) and not os.path.exists(keyFile):
tmpCertFile = os.path.join(conf('general.secure_tmp'), cred.name + '_cert-tmp.pem')
tmpKeyFile = os.path.join(conf('general.secure_tmp'), cred.name + '_key-tmp.pem')
if 'ec2_url' not in cred.metadata:
return defer.fail(Exception('You must have an ec2_url'))
parsedUrl = urlparse.urlparse(cred.metadata['ec2_url'])
if ':' not in parsedUrl.netloc:
return defer.fail(Exception('Your URL must contain a port'))
host, port = parsedUrl.netloc.split(':')
fout = open(tmpCertFile, 'w')
fout.write(cred.cert)
fout.close()
fout = open(tmpKeyFile, 'w')
fout.write(cred.pkey)
fout.close()
d = commands.runProcess(['nimbusCerts2EC2.py',
'--in-cert=' + tmpCertFile,
'--out-cert=' + certFile,
'--in-key=' + tmpKeyFile,
'--out-key=' + keyFile,
'--java-cert-dir=/tmp',
'--java-cert-host=' + host,
'--java-cert-port=' + port],
stdoutf=None,
stderrf=None,
log=True)
def _chmod(_exitCode):
return commands.runProcess(['chmod', '+r', keyFile], stdoutf=None, stderrf=None)
d.addCallback(_chmod)
def _unlink(v):
os.unlink(tmpCertFile)
os.unlink(tmpKeyFile)
return v
d.addCallback(_unlink)
d.addErrback(_unlink)
mainDeferred.addCallback(lambda _ : d)
ec2Home = cred.metadata.get('ec2_api_tools', '/opt/ec2-api-tools-1.3-57419')
newCred = func.Record(name=cred.name, conf=conf, cert=certFile, pkey=keyFile, ec2Path=os.path.join(ec2Home, 'bin'),
env=dict(EC2_JVM_ARGS='-Djavax.net.ssl.trustStore=/tmp/jssecacerts',
EC2_HOME=ec2Home,
EC2_URL=cred.metadata['ec2_url']))
if os.path.exists(conf('cluster.cluster_private_key') + '.pub'):
pubKey = open(conf('cluster.cluster_private_key') + '.pub').read().rstrip()
def _addKeypair():
keyPairDefer = ec2.addKeypair(newCred, conf('cluster.key') + '||' + pubKey)
def _printError(f):
log.msg('Adding keypair failed, retrying')
log.err(f)
return f
keyPairDefer.addErrback(_printError)
return keyPairDefer
mainDeferred.addCallback(lambda _ : defer_utils.tryUntil(10, _addKeypair, onFailure=defer_utils.sleep(30)))
mainDeferred.addCallback(lambda _ : newCred)
return mainDeferred
def retryIfTTLError(fail):
try:
fail.raiseException()
except commands.ProgramRunError, err:
return defer.succeed('General security' in err.stderr or 'Read timeout' in err.stderr)
except Exception:
return fail
def retry(n, f):
def _(*args, **kwargs):
return defer_utils.tryUntil(n,
lambda : f(*args, **kwargs),
onFailure=defer_utils.sleep(30),
retry=retryIfTTLError)
return _
# Set all of these to what ec2 does
Instance = ec2.Instance
instanceFromDict = ec2.instanceFromDict
instanceToDict = ec2.instanceToDict
addGroup = retry(RETRY_ATTEMPTS, ec2.addGroup)
addKeypair = retry(RETRY_ATTEMPTS, ec2.addKeypair)
authorizeGroup = retry(RETRY_ATTEMPTS, ec2.authorizeGroup)
listGroups = retry(RETRY_ATTEMPTS, ec2.listGroups)
listInstances = retry(RETRY_ATTEMPTS, ec2.listInstances)
listKeypairs = retry(RETRY_ATTEMPTS, ec2.listKeypairs)
runInstances = retry(RETRY_ATTEMPTS, ec2.runInstances)
runSpotInstances = retry(RETRY_ATTEMPTS, ec2.runSpotInstances)
updateInstances = retry(RETRY_ATTEMPTS, ec2.updateInstances)
terminateInstances = retry(RETRY_ATTEMPTS, ec2.terminateInstances)
| [
"orbitz@gmail.com"
] | orbitz@gmail.com |
1e2101b0b45013b2cd6d6af25611d357c69532db | 035c466890f0daf424acb5b59a1ab2f470bc9ab5 | /check_data.py | bdce0cc5453893f9b5955d9afa52c9dcf2c5bbc6 | [] | no_license | poastertoaster/NBA-Twitter | 75d12f576e542c634d41a2f9da68c10a3be8af70 | a2e8ed863c56de3461c86cee3160b61899eb964d | refs/heads/master | 2020-11-24T15:19:06.052964 | 2020-01-13T21:56:36 | 2020-01-13T21:56:36 | 228,212,825 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,497 | py | import datetime
from nba_api.stats.endpoints import scoreboardv2
from nba_api.stats.endpoints import boxscoretraditionalv2
#Classes
from image import image
from set_data import set_data
from tracker import tracker
from update_twitter import update_twitter
class check_data():
def check_game(self, game_index, games_list, dayOffset):
todaysGames = scoreboardv2.ScoreboardV2(day_offset=dayOffset, game_date=datetime.datetime.today()).game_header.get_data_frame()
#Go through all of the games for today
game = todaysGames.iloc[game_index]
gameStats = set_data().create_data(game, dayOffset)
#If the game is over ...
if game.GAME_STATUS_TEXT == 'Final':
#Grab the game stat line
teamStats = boxscoretraditionalv2.BoxScoreTraditionalV2(game_id=game.GAME_ID).team_stats.get_data_frame()
#Check for the team's collective stats
gameStats = set_data().set_team_points(game, teamStats, gameStats)
#Check that the box score has a final total
if gameStats['home']['boxscore']['team_points'] != None:
#Grab the boxscore
boxScore = boxscoretraditionalv2.BoxScoreTraditionalV2(game_id=game.GAME_ID).player_stats.get_data_frame()
#Set this game to complete in the tracking file
tracker().write_games(game_index, games_list)
#Set the category leaders
gameStats = set_data().set_player_stats(boxScore, gameStats)
#Create the image summary
image().create_image(gameStats)
#Print the tagline for the game.
if gameStats['home']['boxscore']['team_points'] > gameStats['away']['boxscore']['team_points']:
status = f"The {gameStats['home']['team_info'][0]} defeat the {gameStats['away']['team_info'][0]} {gameStats['home']['boxscore']['team_points']}-{gameStats['away']['boxscore']['team_points']} off of {gameStats['home']['boxscore']['Player_Points'][2]} points from {gameStats['home']['boxscore']['Player_Points'][0]}. #{gameStats['away']['team_info'][3]}at{gameStats['home']['team_info'][3]}"
else:
status = f"The {gameStats['away']['team_info'][0]} defeat the {gameStats['home']['team_info'][0]} {gameStats['away']['boxscore']['team_points']}-{gameStats['home']['boxscore']['team_points']} off of {gameStats['away']['boxscore']['Player_Points'][2]} points from {gameStats['away']['boxscore']['Player_Points'][0]}. #{gameStats['away']['team_info'][3]}at{gameStats['home']['team_info'][3]}"
print(status)
update_twitter().send_update(status)
#When the last game has been checked, set up for tomorrow
if '0' not in games_list:
tracker().reset_games(dayOffset+1, True)
#Break the snooze loop
return False
else:
#Continue the snooze loop if there are more games to check
return True
def check_tracker(self, dayOffset):
#Open the tracking file
file = open("todays_games.txt", "r")
#Filter the data in the file to create an iterable list
gamesString = str(file.read().strip())
games_list = gamesString.split(",")
file.close()
#Go through the data in the list and check games that haven't finished
for index, game in enumerate(games_list):
if int(game) == 0:
return check_data().check_game(index, games_list, dayOffset)
def check_start(self, dayOffset):
#Check the games for today
todaysGames = scoreboardv2.ScoreboardV2(day_offset=dayOffset, game_date=datetime.datetime.today()).game_header.get_data_frame()
#If a game has started keep going. If not, go back to napping.
gamesStarted = [[index, game['GAME_STATUS_TEXT']] for index, game in todaysGames.iterrows() if 'ET' not in game['GAME_STATUS_TEXT']]
if len(gamesStarted) > 0:
gamesFinished = [finished[0] for finished in gamesStarted if finished[1] == 'Final']
#If a game has finished run the rest of the script
if len(gamesFinished) > 0:
return check_data().check_tracker(dayOffset)
'''#If there are games that haven't finished, continue waiting to check them
if len(gamesFinished) != len(todaysGames):
return True
#If all the games have finished, break the loop and go to sleep
else:
return False'''
#If a game hasn't finished, snooze until it's over
else:
return False
def check_start_2(self, dayOffset):
todaysGames = scoreboardv2.ScoreboardV2(day_offset=dayOffset, game_date=datetime.datetime.today()).game_header.get_data_frame()
gamesStarted = [[index, game['GAME_STATUS_TEXT']] for index, game in todaysGames.iterrows() if 'ET' not in game['GAME_STATUS_TEXT']]
return gamesStarted
def check_game_2(self, game_index, dayOffset):
todaysGames = scoreboardv2.ScoreboardV2(day_offset=dayOffset, game_date=datetime.datetime.today()).game_header.get_data_frame()
#Go through all of the games for today
game = todaysGames.iloc[game_index]
gameStats = set_data().create_data(game, dayOffset)
#If the game is over ...
if game.GAME_STATUS_TEXT == 'Final':
#Grab the game stat line
teamStats = boxscoretraditionalv2.BoxScoreTraditionalV2(game_id=game.GAME_ID).team_stats.get_data_frame()
#Check for the team's collective stats
gameStats = set_data().set_team_points(game, teamStats, gameStats)
#Check that the box score has a final total
if gameStats['home']['boxscore']['team_points'] != None:
#Grab the boxscore
boxScore = boxscoretraditionalv2.BoxScoreTraditionalV2(game_id=game.GAME_ID).player_stats.get_data_frame()
#Set the category leaders
gameStats = set_data().set_player_stats(boxScore, gameStats)
#Create the image summary
image().create_image(gameStats)
#Print the tagline for the game.
if gameStats['home']['boxscore']['team_points'] > gameStats['away']['boxscore']['team_points']:
status = f"The {gameStats['home']['team_info'][0]} defeat the {gameStats['away']['team_info'][0]} {gameStats['home']['boxscore']['team_points']}-{gameStats['away']['boxscore']['team_points']} off of {gameStats['home']['boxscore']['Player_Points'][2]} points from {gameStats['home']['boxscore']['Player_Points'][0]}. #{gameStats['away']['team_info'][3]}at{gameStats['home']['team_info'][3]}"
else:
status = f"The {gameStats['away']['team_info'][0]} defeat the {gameStats['home']['team_info'][0]} {gameStats['away']['boxscore']['team_points']}-{gameStats['home']['boxscore']['team_points']} off of {gameStats['away']['boxscore']['Player_Points'][2]} points from {gameStats['away']['boxscore']['Player_Points'][0]}. #{gameStats['away']['team_info'][3]}at{gameStats['home']['team_info'][3]}"
print(status)
#update_twitter().send_update(status)
return 1
else:
return 0 | [
"46764889+poastertoaster@users.noreply.github.com"
] | 46764889+poastertoaster@users.noreply.github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.