code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
import numpy as np
z = np.linspace(2,10,5) #from 2 to 10, with 5 elements
# OUT: array( [ 2. , 4. , 6. , 8. , 10. ] )
np.random.seed(0)
z1 = np.random.randint(10, size = 6)
# OUT: array( [5, 0, 3, 3, 7, 9] )
z = np.array([1,2,3,4,5])
z < 3
# OUT: array([T,T,F,F,F])
z[z<3]
# OUT: array([1,2])
a = np.array([1,2,3,4,5])
b = np.array([6,7,8,9,10])
a + b # - * /
# OUT: array([7,9,11,13,15])
a + 30 # - * /
# OUT: array([31,32,33,34,35])
a = np.array([[1,2,3],[4,5,6]])
print(a)
# OUT: [[1 2 3]
# [4 5 6]]
a.shape()
# OUT: (2,3)
a.ndim()
# OUT: 2
a[0,2]
# OUT: 3
a[0,:]
# array([1,2,3])
a[:,1]
# array([2,4])
np.min(a) #or MAX|SUM
# OUT: 1
np.zeros(5)
# OUT: array([0.,0.,0.,0.,0.])
np.zeros_like([[10,10],[1,1]])
# OUT: [[0,0],[0,0]]
np.ones(3,2)
# OUT: array([[1,1],
# [1,1],
# [1,1]])
np.full((2,2),100)
# OUT: array([[100,100],
# [100,100]])
np.full_like((2,2), 10, dtype = np.int)
# OUT: [[10,10][10,10]]
np.random.rand(2,4)
#OUT: array([[x,x,x,x],
# [x,x,x,x]])
np.random.randint(10)
#OUT: x # random from 0 to 10 (non include)
np.random.randint(5,10, size=(2,2)) #from 5 to 10(non include)
#OUT: array([[x,x],
# [x,x]])
a = [np.pi,-np.pi,0]
np.cos(a)
#OUT: [-1,-1,1]
np.arange(10)
#OUT: [0,1,...,9]
v1 = np.array([1,2,3])
v2 = np.array([4,5,6])
np.vstack([v1,v2,v1])
#1 2 3
#4 5 6
#1 2 3
a = np.array([1,2,3,4,5,6,7,8,9])
#a[[1,2,8]]
#OUT: 2,3,9
filedata = np.genfromtxt("name.txt", delimiter = ",")
# ?
filedata = filedata.astype("type") #!
# filedata[filedata > 50]
# ((filedata > 50) & (filedata < 100))
# bool Boolean (True or False) stored as a bit
# inti Platform integer (normally either int32 or int64)
# int8 Byte (-128 to 127)
# int16 Integer (-32768 to 32767)
# int32 Integer (-2 ** 31 to 2 ** 31 -1)
# int64 Integer (-2 ** 63 to 2 ** 63 -1)
# uint8 Unsigned integer (0 to 255)
# uint16 Unsigned integer (0 to 65535)
# uint32 Unsigned integer (0 to 2 ** 32 - 1)
# uint64 Unsigned integer (0 to 2 ** 64 - 1)
# float16 Half precision float: sign bit, 5 bits exponent, 10 bits mantissa
# float32 Single precision float: sign bit, 8 bits exponent, 23 bits mantissa
# float64 Double precision float: sign bit, 11 bits exponent, 52 bits mantissa
a = np.arange(7, dtype='f')
# Integer i
# Unsigned integer u
# Single precision float f
# Double precision float d
# Boolean b
# Complex D
# String S
# Unicode U
# Void V
x = np.arange(0,10,2) # x=([0,2,4,6,8])
y = np.arange(5) # y=([0,1,2,3,4])
m = np.vstack([x,y]) # m=([[0,2,4,6,8],
# [0,1,2,3,4]])
xy = np.hstack([x,y]) # xy =([0,2,4,6,8,0,1,2,3,4])
|
normal
|
{
"blob_id": "be5147efda879165107378527ebf44890c03be75",
"index": 6679,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nnp.random.seed(0)\n<mask token>\nz < 3\nz[z < 3]\n<mask token>\na + b\na + 30\n<mask token>\nprint(a)\na.shape()\na.ndim()\na[0, 2]\na[0, :]\na[:, 1]\nnp.min(a)\nnp.zeros(5)\nnp.zeros_like([[10, 10], [1, 1]])\nnp.ones(3, 2)\nnp.full((2, 2), 100)\nnp.full_like((2, 2), 10, dtype=np.int)\nnp.random.rand(2, 4)\nnp.random.randint(10)\nnp.random.randint(5, 10, size=(2, 2))\n<mask token>\nnp.cos(a)\nnp.arange(10)\n<mask token>\nnp.vstack([v1, v2, v1])\n<mask token>\n",
"step-3": "<mask token>\nz = np.linspace(2, 10, 5)\nnp.random.seed(0)\nz1 = np.random.randint(10, size=6)\nz = np.array([1, 2, 3, 4, 5])\nz < 3\nz[z < 3]\na = np.array([1, 2, 3, 4, 5])\nb = np.array([6, 7, 8, 9, 10])\na + b\na + 30\na = np.array([[1, 2, 3], [4, 5, 6]])\nprint(a)\na.shape()\na.ndim()\na[0, 2]\na[0, :]\na[:, 1]\nnp.min(a)\nnp.zeros(5)\nnp.zeros_like([[10, 10], [1, 1]])\nnp.ones(3, 2)\nnp.full((2, 2), 100)\nnp.full_like((2, 2), 10, dtype=np.int)\nnp.random.rand(2, 4)\nnp.random.randint(10)\nnp.random.randint(5, 10, size=(2, 2))\na = [np.pi, -np.pi, 0]\nnp.cos(a)\nnp.arange(10)\nv1 = np.array([1, 2, 3])\nv2 = np.array([4, 5, 6])\nnp.vstack([v1, v2, v1])\na = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9])\nfiledata = np.genfromtxt('name.txt', delimiter=',')\nfiledata = filedata.astype('type')\na = np.arange(7, dtype='f')\nx = np.arange(0, 10, 2)\ny = np.arange(5)\nm = np.vstack([x, y])\nxy = np.hstack([x, y])\n",
"step-4": "import numpy as np\nz = np.linspace(2, 10, 5)\nnp.random.seed(0)\nz1 = np.random.randint(10, size=6)\nz = np.array([1, 2, 3, 4, 5])\nz < 3\nz[z < 3]\na = np.array([1, 2, 3, 4, 5])\nb = np.array([6, 7, 8, 9, 10])\na + b\na + 30\na = np.array([[1, 2, 3], [4, 5, 6]])\nprint(a)\na.shape()\na.ndim()\na[0, 2]\na[0, :]\na[:, 1]\nnp.min(a)\nnp.zeros(5)\nnp.zeros_like([[10, 10], [1, 1]])\nnp.ones(3, 2)\nnp.full((2, 2), 100)\nnp.full_like((2, 2), 10, dtype=np.int)\nnp.random.rand(2, 4)\nnp.random.randint(10)\nnp.random.randint(5, 10, size=(2, 2))\na = [np.pi, -np.pi, 0]\nnp.cos(a)\nnp.arange(10)\nv1 = np.array([1, 2, 3])\nv2 = np.array([4, 5, 6])\nnp.vstack([v1, v2, v1])\na = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9])\nfiledata = np.genfromtxt('name.txt', delimiter=',')\nfiledata = filedata.astype('type')\na = np.arange(7, dtype='f')\nx = np.arange(0, 10, 2)\ny = np.arange(5)\nm = np.vstack([x, y])\nxy = np.hstack([x, y])\n",
"step-5": "import numpy as np\n\n\nz = np.linspace(2,10,5) #from 2 to 10, with 5 elements\n# OUT: array( [ 2. , 4. , 6. , 8. , 10. ] )\n\nnp.random.seed(0)\nz1 = np.random.randint(10, size = 6)\n# OUT: array( [5, 0, 3, 3, 7, 9] )\n\nz = np.array([1,2,3,4,5])\nz < 3\n# OUT: array([T,T,F,F,F])\nz[z<3]\n# OUT: array([1,2])\n\na = np.array([1,2,3,4,5])\nb = np.array([6,7,8,9,10])\n\na + b # - * /\n# OUT: array([7,9,11,13,15])\na + 30 # - * /\n# OUT: array([31,32,33,34,35])\n\na = np.array([[1,2,3],[4,5,6]])\nprint(a)\n# OUT: [[1 2 3]\n# [4 5 6]]\na.shape()\n# OUT: (2,3)\na.ndim()\n# OUT: 2\na[0,2]\n# OUT: 3\na[0,:]\n# array([1,2,3])\na[:,1]\n# array([2,4])\n\nnp.min(a) #or MAX|SUM\n# OUT: 1\n\n\n\nnp.zeros(5)\n# OUT: array([0.,0.,0.,0.,0.])\nnp.zeros_like([[10,10],[1,1]])\n# OUT: [[0,0],[0,0]]\nnp.ones(3,2)\n# OUT: array([[1,1],\n#\t [1,1],\n#\t [1,1]])\nnp.full((2,2),100)\n# OUT: array([[100,100],\n#\t [100,100]])\nnp.full_like((2,2), 10, dtype = np.int)\n# OUT: [[10,10][10,10]]\n\n\nnp.random.rand(2,4)\n#OUT: array([[x,x,x,x],\n#\t [x,x,x,x]])\n\nnp.random.randint(10) \n#OUT: x # random from 0 to 10 (non include)\n\nnp.random.randint(5,10, size=(2,2)) #from 5 to 10(non include)\n#OUT: array([[x,x],\n#\t [x,x]])\n\n\na = [np.pi,-np.pi,0]\nnp.cos(a) \n#OUT: [-1,-1,1]\n\n\nnp.arange(10)\n#OUT: [0,1,...,9]\n\n\nv1 = np.array([1,2,3])\nv2 = np.array([4,5,6])\n\nnp.vstack([v1,v2,v1])\n\n#1 2 3\n#4 5 6\n#1 2 3\n\n\n\na = np.array([1,2,3,4,5,6,7,8,9])\n#a[[1,2,8]]\n#OUT: 2,3,9\n\n\nfiledata = np.genfromtxt(\"name.txt\", delimiter = \",\")\n# ?\nfiledata = filedata.astype(\"type\") #!\n# filedata[filedata > 50] \n# ((filedata > 50) & (filedata < 100))\n\n\n\n\n# bool Boolean (True or False) stored as a bit\n# inti Platform integer (normally either int32 or int64)\n# int8 Byte (-128 to 127)\n# int16 Integer (-32768 to 32767)\n# int32 Integer (-2 ** 31 to 2 ** 31 -1)\n# int64 Integer (-2 ** 63 to 2 ** 63 -1)\n# uint8 Unsigned integer (0 to 255)\n# uint16 Unsigned integer (0 to 65535)\n# uint32 Unsigned integer (0 to 2 ** 32 - 1)\n# uint64 Unsigned integer (0 to 2 ** 64 - 1)\n# float16 Half precision float: sign bit, 5 bits exponent, 10 bits mantissa\n# float32 Single precision float: sign bit, 8 bits exponent, 23 bits mantissa\n# float64 Double precision float: sign bit, 11 bits exponent, 52 bits mantissa\n\n\na = np.arange(7, dtype='f')\n# Integer i\n# Unsigned integer u\n# Single precision float f\n# Double precision float d\n# Boolean b\n# Complex D\n# String S\n# Unicode U\n# Void V\n\n\n\nx = np.arange(0,10,2) # x=([0,2,4,6,8])\ny = np.arange(5) # y=([0,1,2,3,4])\nm = np.vstack([x,y]) # m=([[0,2,4,6,8],\n # [0,1,2,3,4]])\nxy = np.hstack([x,y]) # xy =([0,2,4,6,8,0,1,2,3,4])",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from http import HTTPStatus
#from pytest_chalice.handlers import RequestHandler
import app
from chalice.test import Client
def test_index_with_url():
with Client(app.app) as client:
response = client.http.get('/?url=https://google.com')
assert response.status_code == HTTPStatus.MOVED_PERMANENTLY
assert response.headers['Location'] is not None
def test_index_without_url():
with Client(app.app) as client:
response = client.http.get('/')
assert response.body == b'Invalid or missing url'
def test_link_received_by_sns():
with Client(app.app) as client:
with open('sns_message.txt') as f:
event = client.events.generate_sns_event(message=f.read())
with open('/tmp/event.json', 'w') as f:
import json
f.write(json.dumps(event))
response = client.lambda_.invoke('handle_link_visit', event)
assert response.payload['message'] == 'link visited'
|
normal
|
{
"blob_id": "e7e9a53d4c41448521b324d51641a46827faa692",
"index": 2607,
"step-1": "<mask token>\n\n\ndef test_index_with_url():\n with Client(app.app) as client:\n response = client.http.get('/?url=https://google.com')\n assert response.status_code == HTTPStatus.MOVED_PERMANENTLY\n assert response.headers['Location'] is not None\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_index_with_url():\n with Client(app.app) as client:\n response = client.http.get('/?url=https://google.com')\n assert response.status_code == HTTPStatus.MOVED_PERMANENTLY\n assert response.headers['Location'] is not None\n\n\n<mask token>\n\n\ndef test_link_received_by_sns():\n with Client(app.app) as client:\n with open('sns_message.txt') as f:\n event = client.events.generate_sns_event(message=f.read())\n with open('/tmp/event.json', 'w') as f:\n import json\n f.write(json.dumps(event))\n response = client.lambda_.invoke('handle_link_visit', event)\n assert response.payload['message'] == 'link visited'\n",
"step-3": "<mask token>\n\n\ndef test_index_with_url():\n with Client(app.app) as client:\n response = client.http.get('/?url=https://google.com')\n assert response.status_code == HTTPStatus.MOVED_PERMANENTLY\n assert response.headers['Location'] is not None\n\n\ndef test_index_without_url():\n with Client(app.app) as client:\n response = client.http.get('/')\n assert response.body == b'Invalid or missing url'\n\n\ndef test_link_received_by_sns():\n with Client(app.app) as client:\n with open('sns_message.txt') as f:\n event = client.events.generate_sns_event(message=f.read())\n with open('/tmp/event.json', 'w') as f:\n import json\n f.write(json.dumps(event))\n response = client.lambda_.invoke('handle_link_visit', event)\n assert response.payload['message'] == 'link visited'\n",
"step-4": "from http import HTTPStatus\nimport app\nfrom chalice.test import Client\n\n\ndef test_index_with_url():\n with Client(app.app) as client:\n response = client.http.get('/?url=https://google.com')\n assert response.status_code == HTTPStatus.MOVED_PERMANENTLY\n assert response.headers['Location'] is not None\n\n\ndef test_index_without_url():\n with Client(app.app) as client:\n response = client.http.get('/')\n assert response.body == b'Invalid or missing url'\n\n\ndef test_link_received_by_sns():\n with Client(app.app) as client:\n with open('sns_message.txt') as f:\n event = client.events.generate_sns_event(message=f.read())\n with open('/tmp/event.json', 'w') as f:\n import json\n f.write(json.dumps(event))\n response = client.lambda_.invoke('handle_link_visit', event)\n assert response.payload['message'] == 'link visited'\n",
"step-5": "from http import HTTPStatus\n#from pytest_chalice.handlers import RequestHandler\nimport app\nfrom chalice.test import Client\n\ndef test_index_with_url():\n with Client(app.app) as client:\n response = client.http.get('/?url=https://google.com')\n assert response.status_code == HTTPStatus.MOVED_PERMANENTLY\n assert response.headers['Location'] is not None\n\ndef test_index_without_url():\n with Client(app.app) as client:\n response = client.http.get('/')\n assert response.body == b'Invalid or missing url'\n\ndef test_link_received_by_sns():\n with Client(app.app) as client:\n with open('sns_message.txt') as f:\n event = client.events.generate_sns_event(message=f.read())\n with open('/tmp/event.json', 'w') as f:\n import json\n f.write(json.dumps(event))\n response = client.lambda_.invoke('handle_link_visit', event)\n assert response.payload['message'] == 'link visited'",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def kde_Gaussian_fitting(miu, bandwidth):
kde_analyzer = KernelDensity(kernel='gaussian', bandwidth=bandwidth).fit(
miu)
return kde_analyzer
<|reserved_special_token_0|>
def second_moment_all_dist(batch_dim_dist):
return batch_dim_dist.pow(2).sum(dim=1).mean(dim=0)
def inprod_average(batch_dim_1, batch_dim_2):
assert batch_dim_1.shape[0] == batch_dim_2.shape[0]
batch_size = batch_dim_1.shape[0]
inner_product_avg = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.
reshape(-1)) / batch_size
return inner_product_avg
def inprod(batch_dim_1, batch_dim_2):
innner_product = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.reshape(-1)
)
return innner_product
<|reserved_special_token_0|>
def w2_distance_samples_solver(sample1_n_d, sample2_n_d):
assert sample1_n_d.shape == sample2_n_d.shape
num_sample = sample1_n_d.shape[0]
a = np.ones([num_sample]) / num_sample
b = np.ones([num_sample]) / num_sample
tmp_marginal_1 = np.expand_dims(sample1_n_d, axis=0)
tmp_marginal_2 = np.expand_dims(sample2_n_d, axis=1)
M = tmp_marginal_1 - tmp_marginal_2
M = np.sum(np.abs(M) ** 2, axis=2)
return ot.emd2(a, b, M)
<|reserved_special_token_0|>
class ReshapeTransform:
def __init__(self, new_size):
self.new_size = new_size
def __call__(self, img):
return torch.reshape(img, self.new_size)
class CustomMnistDataset(Dataset):
def __init__(self, data, target, transform=None):
self.data = data
self.target = target
self.transform = transform
def __len__(self):
assert len(self.target) == len(self.data)
return len(self.target)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
data_idxed = self.data[idx]
target_idxed = self.target[idx].float()
if self.transform:
data_idxed = self.transform(data_idxed)
return [data_idxed, target_idxed]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def kde_Gaussian_fitting(miu, bandwidth):
kde_analyzer = KernelDensity(kernel='gaussian', bandwidth=bandwidth).fit(
miu)
return kde_analyzer
<|reserved_special_token_0|>
def second_moment_all_dist(batch_dim_dist):
return batch_dim_dist.pow(2).sum(dim=1).mean(dim=0)
def inprod_average(batch_dim_1, batch_dim_2):
assert batch_dim_1.shape[0] == batch_dim_2.shape[0]
batch_size = batch_dim_1.shape[0]
inner_product_avg = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.
reshape(-1)) / batch_size
return inner_product_avg
def inprod(batch_dim_1, batch_dim_2):
innner_product = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.reshape(-1)
)
return innner_product
def grad_of_function(input_samples, network):
g_of_y = network(input_samples).sum()
gradient = torch.autograd.grad(g_of_y, input_samples, create_graph=True)[0]
return gradient
<|reserved_special_token_0|>
def w2_distance_samples_solver(sample1_n_d, sample2_n_d):
assert sample1_n_d.shape == sample2_n_d.shape
num_sample = sample1_n_d.shape[0]
a = np.ones([num_sample]) / num_sample
b = np.ones([num_sample]) / num_sample
tmp_marginal_1 = np.expand_dims(sample1_n_d, axis=0)
tmp_marginal_2 = np.expand_dims(sample2_n_d, axis=1)
M = tmp_marginal_1 - tmp_marginal_2
M = np.sum(np.abs(M) ** 2, axis=2)
return ot.emd2(a, b, M)
<|reserved_special_token_0|>
class ReshapeTransform:
def __init__(self, new_size):
self.new_size = new_size
def __call__(self, img):
return torch.reshape(img, self.new_size)
class CustomMnistDataset(Dataset):
def __init__(self, data, target, transform=None):
self.data = data
self.target = target
self.transform = transform
def __len__(self):
assert len(self.target) == len(self.data)
return len(self.target)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
data_idxed = self.data[idx]
target_idxed = self.target[idx].float()
if self.transform:
data_idxed = self.transform(data_idxed)
return [data_idxed, target_idxed]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def kde_Gaussian_fitting(miu, bandwidth):
kde_analyzer = KernelDensity(kernel='gaussian', bandwidth=bandwidth).fit(
miu)
return kde_analyzer
<|reserved_special_token_0|>
def second_moment_single_dist(batch_dim):
return batch_dim.pow(2).sum(dim=1).mean()
def second_moment_all_dist(batch_dim_dist):
return batch_dim_dist.pow(2).sum(dim=1).mean(dim=0)
def inprod_average(batch_dim_1, batch_dim_2):
assert batch_dim_1.shape[0] == batch_dim_2.shape[0]
batch_size = batch_dim_1.shape[0]
inner_product_avg = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.
reshape(-1)) / batch_size
return inner_product_avg
def inprod(batch_dim_1, batch_dim_2):
innner_product = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.reshape(-1)
)
return innner_product
def grad_of_function(input_samples, network):
g_of_y = network(input_samples).sum()
gradient = torch.autograd.grad(g_of_y, input_samples, create_graph=True)[0]
return gradient
def two_loop_loss_in_W2(convex_f_list, grad_g_of_y, miu_i, dist_weight,
idx_dist):
n_dist = dist_weight.shape[0]
f_grad_g_y = convex_f_list[idx_dist](grad_g_of_y).mean()
for j in range(n_dist):
f_grad_g_y -= dist_weight[j] * convex_f_list[j](grad_g_of_y).mean()
inner_product = inprod_average(grad_g_of_y, miu_i)
half_moment_grad_of_g = 0.5 * second_moment_single_dist(grad_g_of_y)
loss_gi = (f_grad_g_y - inner_product + half_moment_grad_of_g
) * dist_weight[idx_dist]
return loss_gi
<|reserved_special_token_0|>
def w2_distance_samples_solver(sample1_n_d, sample2_n_d):
assert sample1_n_d.shape == sample2_n_d.shape
num_sample = sample1_n_d.shape[0]
a = np.ones([num_sample]) / num_sample
b = np.ones([num_sample]) / num_sample
tmp_marginal_1 = np.expand_dims(sample1_n_d, axis=0)
tmp_marginal_2 = np.expand_dims(sample2_n_d, axis=1)
M = tmp_marginal_1 - tmp_marginal_2
M = np.sum(np.abs(M) ** 2, axis=2)
return ot.emd2(a, b, M)
<|reserved_special_token_0|>
class ReshapeTransform:
def __init__(self, new_size):
self.new_size = new_size
def __call__(self, img):
return torch.reshape(img, self.new_size)
class CustomMnistDataset(Dataset):
def __init__(self, data, target, transform=None):
self.data = data
self.target = target
self.transform = transform
def __len__(self):
assert len(self.target) == len(self.data)
return len(self.target)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
data_idxed = self.data[idx]
target_idxed = self.target[idx].float()
if self.transform:
data_idxed = self.transform(data_idxed)
return [data_idxed, target_idxed]
<|reserved_special_token_0|>
def average_nn(args, **kwargs):
averaged_parameters = np.zeros([args.N_SAMPLES, args.INPUT_DIM])
tmp_data = averaged_parameters
n_samp_of_subset = int(args.N_SAMPLES / args.NUM_DISTRIBUTION)
for i in range(args.NUM_DISTRIBUTION):
model_param = io.load(args.get_nn(**kwargs) +
f'/subset_{i + 1}_samples_{args.subset_samples}/trial_26/storing_models/nn_2layer_epoch200.pt'
)
assert args.N_SAMPLES == model_param['layer1.weight'].shape[0]
tmp_data[:, :-1] = PTU.torch2numpy(model_param['layer1.weight'])
tmp_data[:, -1] = PTU.torch2numpy(model_param['last_layer.weight'].
squeeze())
if i == args.NUM_DISTRIBUTION - 1:
averaged_parameters[i * n_samp_of_subset:] = tmp_data[i *
n_samp_of_subset:]
else:
averaged_parameters[i * n_samp_of_subset:(i + 1) * n_samp_of_subset
] = tmp_data[i * n_samp_of_subset:(i + 1) * n_samp_of_subset]
return averaged_parameters
<|reserved_special_token_0|>
def get_marginal_list(cfg, type_data='2block'):
if type_data == '2block':
marginal_data = g_data.marginal_data_blocks_3loop_ficnn(cfg)[:, :, :-1]
elif type_data == 'circ_squa':
marginal_data = g_data.marginal_data_circ_squ_3loop_ficnn(cfg)[:, :,
:-1]
elif type_data == 'mnist0-1':
marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(cfg)
elif type_data == '3digit':
marginal_data = g_data.marginal_data_3digit_3loop_ficnn(cfg)[:, :, :-1]
elif type_data == 'ellipse':
marginal_data = g_data.marginal_data_ellipse_3loop_ficnn(cfg)[:, :, :-1
]
elif type_data == 'line':
marginal_data = g_data.marginal_data_line_3loop_ficnn(cfg)[:, :, :-1]
elif type_data == 'usps_mnist':
marginal_data = g_data.marginal_usps_3loop_ficnn_handle(cfg)[0][
torch.randperm(5000), :, :-1]
elif type_data == 'mnist_group':
if cfg.N_TEST == 25:
idx_digit = torch.zeros(25).long()
for idx in range(5):
idx_digit[idx * 5:(idx + 1) * 5] = 5000 * idx + torch.arange(5)
marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(cfg)[
idx_digit]
else:
marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(cfg)[torch
.randperm(25000)]
elif type_data == 'cifar':
marginal_data = g_data.marginal_cifar_handle(cfg)
elif type_data == 'gmm':
marginal_data = g_data.marginal_data_gmm_3loop_ficnn(cfg)[:, :, :-1]
return marginal_data.permute(2, 0, 1)
<|reserved_special_token_1|>
from __future__ import print_function
import ot
import torch
import numpy as np
from sklearn.neighbors import KernelDensity
from torch.utils.data import Dataset
import jacinle.io as io
import optimal_transport_modules.pytorch_utils as PTU
import optimal_transport_modules.generate_data as g_data
from optimal_transport_modules.record_mean_cov import select_mean_and_cov
<|reserved_special_token_0|>
def kde_Gaussian_fitting(miu, bandwidth):
kde_analyzer = KernelDensity(kernel='gaussian', bandwidth=bandwidth).fit(
miu)
return kde_analyzer
def second_moment_no_average(batch_dim):
return batch_dim.pow(2).sum(dim=1)
def second_moment_single_dist(batch_dim):
return batch_dim.pow(2).sum(dim=1).mean()
def second_moment_all_dist(batch_dim_dist):
return batch_dim_dist.pow(2).sum(dim=1).mean(dim=0)
def inprod_average(batch_dim_1, batch_dim_2):
assert batch_dim_1.shape[0] == batch_dim_2.shape[0]
batch_size = batch_dim_1.shape[0]
inner_product_avg = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.
reshape(-1)) / batch_size
return inner_product_avg
def inprod(batch_dim_1, batch_dim_2):
innner_product = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.reshape(-1)
)
return innner_product
def grad_of_function(input_samples, network):
g_of_y = network(input_samples).sum()
gradient = torch.autograd.grad(g_of_y, input_samples, create_graph=True)[0]
return gradient
def two_loop_loss_in_W2(convex_f_list, grad_g_of_y, miu_i, dist_weight,
idx_dist):
n_dist = dist_weight.shape[0]
f_grad_g_y = convex_f_list[idx_dist](grad_g_of_y).mean()
for j in range(n_dist):
f_grad_g_y -= dist_weight[j] * convex_f_list[j](grad_g_of_y).mean()
inner_product = inprod_average(grad_g_of_y, miu_i)
half_moment_grad_of_g = 0.5 * second_moment_single_dist(grad_g_of_y)
loss_gi = (f_grad_g_y - inner_product + half_moment_grad_of_g
) * dist_weight[idx_dist]
return loss_gi
<|reserved_special_token_0|>
def w2_distance_samples_solver(sample1_n_d, sample2_n_d):
assert sample1_n_d.shape == sample2_n_d.shape
num_sample = sample1_n_d.shape[0]
a = np.ones([num_sample]) / num_sample
b = np.ones([num_sample]) / num_sample
tmp_marginal_1 = np.expand_dims(sample1_n_d, axis=0)
tmp_marginal_2 = np.expand_dims(sample2_n_d, axis=1)
M = tmp_marginal_1 - tmp_marginal_2
M = np.sum(np.abs(M) ** 2, axis=2)
return ot.emd2(a, b, M)
def free_support_barycenter(measures_locations, measures_weights, X_init, b
=None, weights=None, numItermax=100, stopThr=1e-07, use_sinkhorn=False):
g_sinkhorn_reg = 0.1
iter_count = 0
N = len(measures_locations)
k = X_init.shape[0]
d = X_init.shape[1]
if b is None:
b = np.ones((k,)) / k
if weights is None:
weights = np.ones((N,)) / N
X = X_init
log_dict = {}
displacement_square_norm = stopThr + 1.0
while displacement_square_norm > stopThr and iter_count < numItermax:
T_sum = np.zeros((k, d))
for measure_locations_i, measure_weights_i, weight_i in zip(
measures_locations, measures_weights, weights.tolist()):
M_i = ot.dist(X, measure_locations_i)
if use_sinkhorn:
T_i = ot.bregman.sinkhorn(b, measure_weights_i, M_i,
g_sinkhorn_reg)
else:
T_i = ot.emd(b, measure_weights_i, M_i)
T_sum = T_sum + weight_i * np.reshape(1.0 / b, (-1, 1)
) * np.matmul(T_i, measure_locations_i)
displacement_square_norm = np.sum(np.square(T_sum - X))
X = T_sum
print('iteration %d, displacement_square_norm=%f\n', iter_count,
displacement_square_norm)
iter_count += 1
return X
<|reserved_special_token_0|>
class ReshapeTransform:
def __init__(self, new_size):
self.new_size = new_size
def __call__(self, img):
return torch.reshape(img, self.new_size)
class CustomMnistDataset(Dataset):
def __init__(self, data, target, transform=None):
self.data = data
self.target = target
self.transform = transform
def __len__(self):
assert len(self.target) == len(self.data)
return len(self.target)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
data_idxed = self.data[idx]
target_idxed = self.target[idx].float()
if self.transform:
data_idxed = self.transform(data_idxed)
return [data_idxed, target_idxed]
<|reserved_special_token_0|>
def get_gmm_param(trial, cond=-1):
if cond > 0:
MEAN, COV = select_mean_and_cov(trial, range_cond=cond)
else:
MEAN, COV = select_mean_and_cov(trial)
INPUT_DIM = MEAN[0].shape[1]
OUTPUT_DIM = INPUT_DIM
NUM_DISTRIBUTION = len(MEAN)
NUM_GMM_COMPONENT = []
for i in range(NUM_DISTRIBUTION):
NUM_GMM_COMPONENT.append(MEAN[i].shape[0])
high_dim_flag = INPUT_DIM > 2
return (MEAN, COV, INPUT_DIM, OUTPUT_DIM, NUM_DISTRIBUTION,
NUM_GMM_COMPONENT, high_dim_flag)
<|reserved_special_token_0|>
def average_nn(args, **kwargs):
averaged_parameters = np.zeros([args.N_SAMPLES, args.INPUT_DIM])
tmp_data = averaged_parameters
n_samp_of_subset = int(args.N_SAMPLES / args.NUM_DISTRIBUTION)
for i in range(args.NUM_DISTRIBUTION):
model_param = io.load(args.get_nn(**kwargs) +
f'/subset_{i + 1}_samples_{args.subset_samples}/trial_26/storing_models/nn_2layer_epoch200.pt'
)
assert args.N_SAMPLES == model_param['layer1.weight'].shape[0]
tmp_data[:, :-1] = PTU.torch2numpy(model_param['layer1.weight'])
tmp_data[:, -1] = PTU.torch2numpy(model_param['last_layer.weight'].
squeeze())
if i == args.NUM_DISTRIBUTION - 1:
averaged_parameters[i * n_samp_of_subset:] = tmp_data[i *
n_samp_of_subset:]
else:
averaged_parameters[i * n_samp_of_subset:(i + 1) * n_samp_of_subset
] = tmp_data[i * n_samp_of_subset:(i + 1) * n_samp_of_subset]
return averaged_parameters
<|reserved_special_token_0|>
def get_marginal_list(cfg, type_data='2block'):
if type_data == '2block':
marginal_data = g_data.marginal_data_blocks_3loop_ficnn(cfg)[:, :, :-1]
elif type_data == 'circ_squa':
marginal_data = g_data.marginal_data_circ_squ_3loop_ficnn(cfg)[:, :,
:-1]
elif type_data == 'mnist0-1':
marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(cfg)
elif type_data == '3digit':
marginal_data = g_data.marginal_data_3digit_3loop_ficnn(cfg)[:, :, :-1]
elif type_data == 'ellipse':
marginal_data = g_data.marginal_data_ellipse_3loop_ficnn(cfg)[:, :, :-1
]
elif type_data == 'line':
marginal_data = g_data.marginal_data_line_3loop_ficnn(cfg)[:, :, :-1]
elif type_data == 'usps_mnist':
marginal_data = g_data.marginal_usps_3loop_ficnn_handle(cfg)[0][
torch.randperm(5000), :, :-1]
elif type_data == 'mnist_group':
if cfg.N_TEST == 25:
idx_digit = torch.zeros(25).long()
for idx in range(5):
idx_digit[idx * 5:(idx + 1) * 5] = 5000 * idx + torch.arange(5)
marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(cfg)[
idx_digit]
else:
marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(cfg)[torch
.randperm(25000)]
elif type_data == 'cifar':
marginal_data = g_data.marginal_cifar_handle(cfg)
elif type_data == 'gmm':
marginal_data = g_data.marginal_data_gmm_3loop_ficnn(cfg)[:, :, :-1]
return marginal_data.permute(2, 0, 1)
<|reserved_special_token_1|>
from __future__ import print_function
import ot
import torch
import numpy as np
from sklearn.neighbors import KernelDensity
from torch.utils.data import Dataset
import jacinle.io as io
import optimal_transport_modules.pytorch_utils as PTU
import optimal_transport_modules.generate_data as g_data
from optimal_transport_modules.record_mean_cov import select_mean_and_cov
'''
PyTorch type
'''
def kde_Gaussian_fitting(miu, bandwidth):
kde_analyzer = KernelDensity(
kernel='gaussian', bandwidth=bandwidth).fit(miu)
return kde_analyzer
def second_moment_no_average(batch_dim):
return batch_dim.pow(2).sum(dim=1)
def second_moment_single_dist(batch_dim):
return batch_dim.pow(2).sum(dim=1).mean()
def second_moment_all_dist(batch_dim_dist):
return batch_dim_dist.pow(2).sum(dim=1).mean(dim=0)
def inprod_average(batch_dim_1, batch_dim_2):
assert batch_dim_1.shape[0] == batch_dim_2.shape[0]
batch_size = batch_dim_1.shape[0]
inner_product_avg = torch.dot(batch_dim_1.reshape(-1),
batch_dim_2.reshape(-1)) / batch_size
return inner_product_avg
def inprod(batch_dim_1, batch_dim_2):
innner_product = torch.dot(batch_dim_1.reshape(-1),
batch_dim_2.reshape(-1))
return innner_product
def grad_of_function(input_samples, network):
g_of_y = network(input_samples).sum()
gradient = torch.autograd.grad(
g_of_y, input_samples, create_graph=True)[0]
return gradient
def two_loop_loss_in_W2(convex_f_list, grad_g_of_y, miu_i, dist_weight, idx_dist):
n_dist = dist_weight.shape[0]
#! The 2nd loss part useful for f/g parameters
f_grad_g_y = convex_f_list[idx_dist](grad_g_of_y).mean()
#! The 4th loss part useful for f/g parameters
for j in range(n_dist):
f_grad_g_y -= dist_weight[j] * convex_f_list[j](grad_g_of_y).mean()
#! The 1st loss part useful for g parameters
inner_product = inprod_average(grad_g_of_y, miu_i)
#! The 3rd loss part useful for g parameters
half_moment_grad_of_g = 0.5 * second_moment_single_dist(grad_g_of_y)
loss_gi = (f_grad_g_y - inner_product +
half_moment_grad_of_g) * dist_weight[idx_dist]
return loss_gi
'''
localized POT library
'''
def w2_distance_samples_solver(sample1_n_d, sample2_n_d):
# see here for details
# https://pythonot.github.io/all.html#ot.emd
# https://pythonot.github.io/all.html#ot.emd2
assert sample1_n_d.shape == sample2_n_d.shape
num_sample = sample1_n_d.shape[0]
a = np.ones([num_sample]) / num_sample
b = np.ones([num_sample]) / num_sample
tmp_marginal_1 = np.expand_dims(sample1_n_d, axis=0)
tmp_marginal_2 = np.expand_dims(sample2_n_d, axis=1)
M = tmp_marginal_1 - tmp_marginal_2
M = np.sum(np.abs(M)**2, axis=2)
return ot.emd2(a, b, M)
def free_support_barycenter(measures_locations, measures_weights, X_init, b=None, weights=None, numItermax=100, stopThr=1e-7, use_sinkhorn=False):
g_sinkhorn_reg = 0.1
iter_count = 0
N = len(measures_locations)
k = X_init.shape[0]
d = X_init.shape[1]
if b is None:
b = np.ones((k,)) / k
if weights is None:
weights = np.ones((N,)) / N
X = X_init
log_dict = {}
displacement_square_norm = stopThr + 1.
while (displacement_square_norm > stopThr and iter_count < numItermax):
T_sum = np.zeros((k, d))
for (measure_locations_i, measure_weights_i, weight_i) in zip(measures_locations, measures_weights, weights.tolist()):
M_i = ot.dist(X, measure_locations_i)
if use_sinkhorn:
T_i = ot.bregman.sinkhorn(
b, measure_weights_i, M_i, g_sinkhorn_reg)
else:
T_i = ot.emd(b, measure_weights_i, M_i)
T_sum = T_sum + weight_i * \
np.reshape(1. / b, (-1, 1)) * \
np.matmul(T_i, measure_locations_i)
displacement_square_norm = np.sum(np.square(T_sum - X))
X = T_sum
print('iteration %d, displacement_square_norm=%f\n',
iter_count, displacement_square_norm)
iter_count += 1
return X
'''
MNIST utils
'''
class ReshapeTransform:
def __init__(self, new_size):
self.new_size = new_size
def __call__(self, img):
return torch.reshape(img, self.new_size)
# def extract_three_number(total_data):
# idx_train = (total_data.targets == 0) + (total_data.targets ==
# 1) + (total_data.targets == 7)
# total_data.targets = total_data.targets[idx_train]
# total_data.data = total_data.data[idx_train]
# return total_data
class CustomMnistDataset(Dataset):
def __init__(self, data, target, transform=None):
self.data = data
self.target = target
self.transform = transform
def __len__(self):
assert len(self.target) == len(self.data)
return len(self.target)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
data_idxed = self.data[idx]
target_idxed = self.target[idx].float()
# sample = {'data': data_idxed, 'target': target_idxed}
if self.transform:
data_idxed = self.transform(data_idxed)
return [data_idxed, target_idxed]
'''
Gaussian utils
'''
def get_gmm_param(trial, cond=-1):
if cond > 0:
MEAN, COV = select_mean_and_cov(trial, range_cond=cond)
else:
MEAN, COV = select_mean_and_cov(trial)
INPUT_DIM = MEAN[0].shape[1]
OUTPUT_DIM = INPUT_DIM
NUM_DISTRIBUTION = len(MEAN)
NUM_GMM_COMPONENT = []
for i in range(NUM_DISTRIBUTION):
NUM_GMM_COMPONENT.append(MEAN[i].shape[0])
high_dim_flag = INPUT_DIM > 2
return MEAN, COV, INPUT_DIM, OUTPUT_DIM, NUM_DISTRIBUTION, NUM_GMM_COMPONENT, high_dim_flag
'''
Average the 2 layer neural networks
'''
def average_nn(args, **kwargs):
averaged_parameters = np.zeros([args.N_SAMPLES, args.INPUT_DIM])
tmp_data = averaged_parameters
n_samp_of_subset = int(args.N_SAMPLES / args.NUM_DISTRIBUTION)
for i in range(args.NUM_DISTRIBUTION):
model_param = io.load(args.get_nn(**kwargs) +
f"/subset_{i+1}_samples_{args.subset_samples}/trial_26/storing_models/nn_2layer_epoch200.pt")
assert args.N_SAMPLES == model_param['layer1.weight'].shape[0]
tmp_data[:, :-1] = PTU.torch2numpy(model_param['layer1.weight'])
tmp_data[:, -
1] = PTU.torch2numpy(model_param['last_layer.weight'].squeeze())
if i == args.NUM_DISTRIBUTION - 1:
averaged_parameters[(i * n_samp_of_subset)
:] = tmp_data[(i * n_samp_of_subset):]
else:
averaged_parameters[i * n_samp_of_subset:
(i + 1) * n_samp_of_subset] = tmp_data[i * n_samp_of_subset:
(i + 1) * n_samp_of_subset]
return averaged_parameters
'''
get marginal data handle
'''
def get_marginal_list(cfg, type_data='2block'):
if type_data == '2block':
marginal_data = g_data.marginal_data_blocks_3loop_ficnn(
cfg)[:, :, :-1]
elif type_data == 'circ_squa':
marginal_data = g_data.marginal_data_circ_squ_3loop_ficnn(
cfg)[:, :, :-1]
elif type_data == 'mnist0-1':
marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(
cfg)
elif type_data == '3digit':
marginal_data = g_data.marginal_data_3digit_3loop_ficnn(
cfg)[:, :, :-1]
elif type_data == 'ellipse':
marginal_data = g_data.marginal_data_ellipse_3loop_ficnn(
cfg)[:, :, :-1]
elif type_data == 'line':
marginal_data = g_data.marginal_data_line_3loop_ficnn(
cfg)[:, :, :-1]
elif type_data == 'usps_mnist':
marginal_data = g_data.marginal_usps_3loop_ficnn_handle(
cfg)[0][torch.randperm(5000), :, :-1]
elif type_data == 'mnist_group':
if cfg.N_TEST == 25:
idx_digit = torch.zeros(25).long()
for idx in range(5):
idx_digit[idx * 5:(idx + 1) * 5] = 5000 * idx + torch.arange(5)
marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(
cfg)[idx_digit]
else:
marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(
cfg)[torch.randperm(25000)]
elif type_data == 'cifar':
marginal_data = g_data.marginal_cifar_handle(cfg)
elif type_data == 'gmm':
marginal_data = g_data.marginal_data_gmm_3loop_ficnn(
cfg)[:, :, :-1]
return marginal_data.permute(2, 0, 1)
|
flexible
|
{
"blob_id": "0ee902d59d3d01b6ec8bb4cc8d5e8aa583644397",
"index": 1298,
"step-1": "<mask token>\n\n\ndef kde_Gaussian_fitting(miu, bandwidth):\n kde_analyzer = KernelDensity(kernel='gaussian', bandwidth=bandwidth).fit(\n miu)\n return kde_analyzer\n\n\n<mask token>\n\n\ndef second_moment_all_dist(batch_dim_dist):\n return batch_dim_dist.pow(2).sum(dim=1).mean(dim=0)\n\n\ndef inprod_average(batch_dim_1, batch_dim_2):\n assert batch_dim_1.shape[0] == batch_dim_2.shape[0]\n batch_size = batch_dim_1.shape[0]\n inner_product_avg = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.\n reshape(-1)) / batch_size\n return inner_product_avg\n\n\ndef inprod(batch_dim_1, batch_dim_2):\n innner_product = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.reshape(-1)\n )\n return innner_product\n\n\n<mask token>\n\n\ndef w2_distance_samples_solver(sample1_n_d, sample2_n_d):\n assert sample1_n_d.shape == sample2_n_d.shape\n num_sample = sample1_n_d.shape[0]\n a = np.ones([num_sample]) / num_sample\n b = np.ones([num_sample]) / num_sample\n tmp_marginal_1 = np.expand_dims(sample1_n_d, axis=0)\n tmp_marginal_2 = np.expand_dims(sample2_n_d, axis=1)\n M = tmp_marginal_1 - tmp_marginal_2\n M = np.sum(np.abs(M) ** 2, axis=2)\n return ot.emd2(a, b, M)\n\n\n<mask token>\n\n\nclass ReshapeTransform:\n\n def __init__(self, new_size):\n self.new_size = new_size\n\n def __call__(self, img):\n return torch.reshape(img, self.new_size)\n\n\nclass CustomMnistDataset(Dataset):\n\n def __init__(self, data, target, transform=None):\n self.data = data\n self.target = target\n self.transform = transform\n\n def __len__(self):\n assert len(self.target) == len(self.data)\n return len(self.target)\n\n def __getitem__(self, idx):\n if torch.is_tensor(idx):\n idx = idx.tolist()\n data_idxed = self.data[idx]\n target_idxed = self.target[idx].float()\n if self.transform:\n data_idxed = self.transform(data_idxed)\n return [data_idxed, target_idxed]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef kde_Gaussian_fitting(miu, bandwidth):\n kde_analyzer = KernelDensity(kernel='gaussian', bandwidth=bandwidth).fit(\n miu)\n return kde_analyzer\n\n\n<mask token>\n\n\ndef second_moment_all_dist(batch_dim_dist):\n return batch_dim_dist.pow(2).sum(dim=1).mean(dim=0)\n\n\ndef inprod_average(batch_dim_1, batch_dim_2):\n assert batch_dim_1.shape[0] == batch_dim_2.shape[0]\n batch_size = batch_dim_1.shape[0]\n inner_product_avg = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.\n reshape(-1)) / batch_size\n return inner_product_avg\n\n\ndef inprod(batch_dim_1, batch_dim_2):\n innner_product = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.reshape(-1)\n )\n return innner_product\n\n\ndef grad_of_function(input_samples, network):\n g_of_y = network(input_samples).sum()\n gradient = torch.autograd.grad(g_of_y, input_samples, create_graph=True)[0]\n return gradient\n\n\n<mask token>\n\n\ndef w2_distance_samples_solver(sample1_n_d, sample2_n_d):\n assert sample1_n_d.shape == sample2_n_d.shape\n num_sample = sample1_n_d.shape[0]\n a = np.ones([num_sample]) / num_sample\n b = np.ones([num_sample]) / num_sample\n tmp_marginal_1 = np.expand_dims(sample1_n_d, axis=0)\n tmp_marginal_2 = np.expand_dims(sample2_n_d, axis=1)\n M = tmp_marginal_1 - tmp_marginal_2\n M = np.sum(np.abs(M) ** 2, axis=2)\n return ot.emd2(a, b, M)\n\n\n<mask token>\n\n\nclass ReshapeTransform:\n\n def __init__(self, new_size):\n self.new_size = new_size\n\n def __call__(self, img):\n return torch.reshape(img, self.new_size)\n\n\nclass CustomMnistDataset(Dataset):\n\n def __init__(self, data, target, transform=None):\n self.data = data\n self.target = target\n self.transform = transform\n\n def __len__(self):\n assert len(self.target) == len(self.data)\n return len(self.target)\n\n def __getitem__(self, idx):\n if torch.is_tensor(idx):\n idx = idx.tolist()\n data_idxed = self.data[idx]\n target_idxed = self.target[idx].float()\n if self.transform:\n data_idxed = self.transform(data_idxed)\n return [data_idxed, target_idxed]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef kde_Gaussian_fitting(miu, bandwidth):\n kde_analyzer = KernelDensity(kernel='gaussian', bandwidth=bandwidth).fit(\n miu)\n return kde_analyzer\n\n\n<mask token>\n\n\ndef second_moment_single_dist(batch_dim):\n return batch_dim.pow(2).sum(dim=1).mean()\n\n\ndef second_moment_all_dist(batch_dim_dist):\n return batch_dim_dist.pow(2).sum(dim=1).mean(dim=0)\n\n\ndef inprod_average(batch_dim_1, batch_dim_2):\n assert batch_dim_1.shape[0] == batch_dim_2.shape[0]\n batch_size = batch_dim_1.shape[0]\n inner_product_avg = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.\n reshape(-1)) / batch_size\n return inner_product_avg\n\n\ndef inprod(batch_dim_1, batch_dim_2):\n innner_product = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.reshape(-1)\n )\n return innner_product\n\n\ndef grad_of_function(input_samples, network):\n g_of_y = network(input_samples).sum()\n gradient = torch.autograd.grad(g_of_y, input_samples, create_graph=True)[0]\n return gradient\n\n\ndef two_loop_loss_in_W2(convex_f_list, grad_g_of_y, miu_i, dist_weight,\n idx_dist):\n n_dist = dist_weight.shape[0]\n f_grad_g_y = convex_f_list[idx_dist](grad_g_of_y).mean()\n for j in range(n_dist):\n f_grad_g_y -= dist_weight[j] * convex_f_list[j](grad_g_of_y).mean()\n inner_product = inprod_average(grad_g_of_y, miu_i)\n half_moment_grad_of_g = 0.5 * second_moment_single_dist(grad_g_of_y)\n loss_gi = (f_grad_g_y - inner_product + half_moment_grad_of_g\n ) * dist_weight[idx_dist]\n return loss_gi\n\n\n<mask token>\n\n\ndef w2_distance_samples_solver(sample1_n_d, sample2_n_d):\n assert sample1_n_d.shape == sample2_n_d.shape\n num_sample = sample1_n_d.shape[0]\n a = np.ones([num_sample]) / num_sample\n b = np.ones([num_sample]) / num_sample\n tmp_marginal_1 = np.expand_dims(sample1_n_d, axis=0)\n tmp_marginal_2 = np.expand_dims(sample2_n_d, axis=1)\n M = tmp_marginal_1 - tmp_marginal_2\n M = np.sum(np.abs(M) ** 2, axis=2)\n return ot.emd2(a, b, M)\n\n\n<mask token>\n\n\nclass ReshapeTransform:\n\n def __init__(self, new_size):\n self.new_size = new_size\n\n def __call__(self, img):\n return torch.reshape(img, self.new_size)\n\n\nclass CustomMnistDataset(Dataset):\n\n def __init__(self, data, target, transform=None):\n self.data = data\n self.target = target\n self.transform = transform\n\n def __len__(self):\n assert len(self.target) == len(self.data)\n return len(self.target)\n\n def __getitem__(self, idx):\n if torch.is_tensor(idx):\n idx = idx.tolist()\n data_idxed = self.data[idx]\n target_idxed = self.target[idx].float()\n if self.transform:\n data_idxed = self.transform(data_idxed)\n return [data_idxed, target_idxed]\n\n\n<mask token>\n\n\ndef average_nn(args, **kwargs):\n averaged_parameters = np.zeros([args.N_SAMPLES, args.INPUT_DIM])\n tmp_data = averaged_parameters\n n_samp_of_subset = int(args.N_SAMPLES / args.NUM_DISTRIBUTION)\n for i in range(args.NUM_DISTRIBUTION):\n model_param = io.load(args.get_nn(**kwargs) +\n f'/subset_{i + 1}_samples_{args.subset_samples}/trial_26/storing_models/nn_2layer_epoch200.pt'\n )\n assert args.N_SAMPLES == model_param['layer1.weight'].shape[0]\n tmp_data[:, :-1] = PTU.torch2numpy(model_param['layer1.weight'])\n tmp_data[:, -1] = PTU.torch2numpy(model_param['last_layer.weight'].\n squeeze())\n if i == args.NUM_DISTRIBUTION - 1:\n averaged_parameters[i * n_samp_of_subset:] = tmp_data[i *\n n_samp_of_subset:]\n else:\n averaged_parameters[i * n_samp_of_subset:(i + 1) * n_samp_of_subset\n ] = tmp_data[i * n_samp_of_subset:(i + 1) * n_samp_of_subset]\n return averaged_parameters\n\n\n<mask token>\n\n\ndef get_marginal_list(cfg, type_data='2block'):\n if type_data == '2block':\n marginal_data = g_data.marginal_data_blocks_3loop_ficnn(cfg)[:, :, :-1]\n elif type_data == 'circ_squa':\n marginal_data = g_data.marginal_data_circ_squ_3loop_ficnn(cfg)[:, :,\n :-1]\n elif type_data == 'mnist0-1':\n marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(cfg)\n elif type_data == '3digit':\n marginal_data = g_data.marginal_data_3digit_3loop_ficnn(cfg)[:, :, :-1]\n elif type_data == 'ellipse':\n marginal_data = g_data.marginal_data_ellipse_3loop_ficnn(cfg)[:, :, :-1\n ]\n elif type_data == 'line':\n marginal_data = g_data.marginal_data_line_3loop_ficnn(cfg)[:, :, :-1]\n elif type_data == 'usps_mnist':\n marginal_data = g_data.marginal_usps_3loop_ficnn_handle(cfg)[0][\n torch.randperm(5000), :, :-1]\n elif type_data == 'mnist_group':\n if cfg.N_TEST == 25:\n idx_digit = torch.zeros(25).long()\n for idx in range(5):\n idx_digit[idx * 5:(idx + 1) * 5] = 5000 * idx + torch.arange(5)\n marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(cfg)[\n idx_digit]\n else:\n marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(cfg)[torch\n .randperm(25000)]\n elif type_data == 'cifar':\n marginal_data = g_data.marginal_cifar_handle(cfg)\n elif type_data == 'gmm':\n marginal_data = g_data.marginal_data_gmm_3loop_ficnn(cfg)[:, :, :-1]\n return marginal_data.permute(2, 0, 1)\n",
"step-4": "from __future__ import print_function\nimport ot\nimport torch\nimport numpy as np\nfrom sklearn.neighbors import KernelDensity\nfrom torch.utils.data import Dataset\nimport jacinle.io as io\nimport optimal_transport_modules.pytorch_utils as PTU\nimport optimal_transport_modules.generate_data as g_data\nfrom optimal_transport_modules.record_mean_cov import select_mean_and_cov\n<mask token>\n\n\ndef kde_Gaussian_fitting(miu, bandwidth):\n kde_analyzer = KernelDensity(kernel='gaussian', bandwidth=bandwidth).fit(\n miu)\n return kde_analyzer\n\n\ndef second_moment_no_average(batch_dim):\n return batch_dim.pow(2).sum(dim=1)\n\n\ndef second_moment_single_dist(batch_dim):\n return batch_dim.pow(2).sum(dim=1).mean()\n\n\ndef second_moment_all_dist(batch_dim_dist):\n return batch_dim_dist.pow(2).sum(dim=1).mean(dim=0)\n\n\ndef inprod_average(batch_dim_1, batch_dim_2):\n assert batch_dim_1.shape[0] == batch_dim_2.shape[0]\n batch_size = batch_dim_1.shape[0]\n inner_product_avg = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.\n reshape(-1)) / batch_size\n return inner_product_avg\n\n\ndef inprod(batch_dim_1, batch_dim_2):\n innner_product = torch.dot(batch_dim_1.reshape(-1), batch_dim_2.reshape(-1)\n )\n return innner_product\n\n\ndef grad_of_function(input_samples, network):\n g_of_y = network(input_samples).sum()\n gradient = torch.autograd.grad(g_of_y, input_samples, create_graph=True)[0]\n return gradient\n\n\ndef two_loop_loss_in_W2(convex_f_list, grad_g_of_y, miu_i, dist_weight,\n idx_dist):\n n_dist = dist_weight.shape[0]\n f_grad_g_y = convex_f_list[idx_dist](grad_g_of_y).mean()\n for j in range(n_dist):\n f_grad_g_y -= dist_weight[j] * convex_f_list[j](grad_g_of_y).mean()\n inner_product = inprod_average(grad_g_of_y, miu_i)\n half_moment_grad_of_g = 0.5 * second_moment_single_dist(grad_g_of_y)\n loss_gi = (f_grad_g_y - inner_product + half_moment_grad_of_g\n ) * dist_weight[idx_dist]\n return loss_gi\n\n\n<mask token>\n\n\ndef w2_distance_samples_solver(sample1_n_d, sample2_n_d):\n assert sample1_n_d.shape == sample2_n_d.shape\n num_sample = sample1_n_d.shape[0]\n a = np.ones([num_sample]) / num_sample\n b = np.ones([num_sample]) / num_sample\n tmp_marginal_1 = np.expand_dims(sample1_n_d, axis=0)\n tmp_marginal_2 = np.expand_dims(sample2_n_d, axis=1)\n M = tmp_marginal_1 - tmp_marginal_2\n M = np.sum(np.abs(M) ** 2, axis=2)\n return ot.emd2(a, b, M)\n\n\ndef free_support_barycenter(measures_locations, measures_weights, X_init, b\n =None, weights=None, numItermax=100, stopThr=1e-07, use_sinkhorn=False):\n g_sinkhorn_reg = 0.1\n iter_count = 0\n N = len(measures_locations)\n k = X_init.shape[0]\n d = X_init.shape[1]\n if b is None:\n b = np.ones((k,)) / k\n if weights is None:\n weights = np.ones((N,)) / N\n X = X_init\n log_dict = {}\n displacement_square_norm = stopThr + 1.0\n while displacement_square_norm > stopThr and iter_count < numItermax:\n T_sum = np.zeros((k, d))\n for measure_locations_i, measure_weights_i, weight_i in zip(\n measures_locations, measures_weights, weights.tolist()):\n M_i = ot.dist(X, measure_locations_i)\n if use_sinkhorn:\n T_i = ot.bregman.sinkhorn(b, measure_weights_i, M_i,\n g_sinkhorn_reg)\n else:\n T_i = ot.emd(b, measure_weights_i, M_i)\n T_sum = T_sum + weight_i * np.reshape(1.0 / b, (-1, 1)\n ) * np.matmul(T_i, measure_locations_i)\n displacement_square_norm = np.sum(np.square(T_sum - X))\n X = T_sum\n print('iteration %d, displacement_square_norm=%f\\n', iter_count,\n displacement_square_norm)\n iter_count += 1\n return X\n\n\n<mask token>\n\n\nclass ReshapeTransform:\n\n def __init__(self, new_size):\n self.new_size = new_size\n\n def __call__(self, img):\n return torch.reshape(img, self.new_size)\n\n\nclass CustomMnistDataset(Dataset):\n\n def __init__(self, data, target, transform=None):\n self.data = data\n self.target = target\n self.transform = transform\n\n def __len__(self):\n assert len(self.target) == len(self.data)\n return len(self.target)\n\n def __getitem__(self, idx):\n if torch.is_tensor(idx):\n idx = idx.tolist()\n data_idxed = self.data[idx]\n target_idxed = self.target[idx].float()\n if self.transform:\n data_idxed = self.transform(data_idxed)\n return [data_idxed, target_idxed]\n\n\n<mask token>\n\n\ndef get_gmm_param(trial, cond=-1):\n if cond > 0:\n MEAN, COV = select_mean_and_cov(trial, range_cond=cond)\n else:\n MEAN, COV = select_mean_and_cov(trial)\n INPUT_DIM = MEAN[0].shape[1]\n OUTPUT_DIM = INPUT_DIM\n NUM_DISTRIBUTION = len(MEAN)\n NUM_GMM_COMPONENT = []\n for i in range(NUM_DISTRIBUTION):\n NUM_GMM_COMPONENT.append(MEAN[i].shape[0])\n high_dim_flag = INPUT_DIM > 2\n return (MEAN, COV, INPUT_DIM, OUTPUT_DIM, NUM_DISTRIBUTION,\n NUM_GMM_COMPONENT, high_dim_flag)\n\n\n<mask token>\n\n\ndef average_nn(args, **kwargs):\n averaged_parameters = np.zeros([args.N_SAMPLES, args.INPUT_DIM])\n tmp_data = averaged_parameters\n n_samp_of_subset = int(args.N_SAMPLES / args.NUM_DISTRIBUTION)\n for i in range(args.NUM_DISTRIBUTION):\n model_param = io.load(args.get_nn(**kwargs) +\n f'/subset_{i + 1}_samples_{args.subset_samples}/trial_26/storing_models/nn_2layer_epoch200.pt'\n )\n assert args.N_SAMPLES == model_param['layer1.weight'].shape[0]\n tmp_data[:, :-1] = PTU.torch2numpy(model_param['layer1.weight'])\n tmp_data[:, -1] = PTU.torch2numpy(model_param['last_layer.weight'].\n squeeze())\n if i == args.NUM_DISTRIBUTION - 1:\n averaged_parameters[i * n_samp_of_subset:] = tmp_data[i *\n n_samp_of_subset:]\n else:\n averaged_parameters[i * n_samp_of_subset:(i + 1) * n_samp_of_subset\n ] = tmp_data[i * n_samp_of_subset:(i + 1) * n_samp_of_subset]\n return averaged_parameters\n\n\n<mask token>\n\n\ndef get_marginal_list(cfg, type_data='2block'):\n if type_data == '2block':\n marginal_data = g_data.marginal_data_blocks_3loop_ficnn(cfg)[:, :, :-1]\n elif type_data == 'circ_squa':\n marginal_data = g_data.marginal_data_circ_squ_3loop_ficnn(cfg)[:, :,\n :-1]\n elif type_data == 'mnist0-1':\n marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(cfg)\n elif type_data == '3digit':\n marginal_data = g_data.marginal_data_3digit_3loop_ficnn(cfg)[:, :, :-1]\n elif type_data == 'ellipse':\n marginal_data = g_data.marginal_data_ellipse_3loop_ficnn(cfg)[:, :, :-1\n ]\n elif type_data == 'line':\n marginal_data = g_data.marginal_data_line_3loop_ficnn(cfg)[:, :, :-1]\n elif type_data == 'usps_mnist':\n marginal_data = g_data.marginal_usps_3loop_ficnn_handle(cfg)[0][\n torch.randperm(5000), :, :-1]\n elif type_data == 'mnist_group':\n if cfg.N_TEST == 25:\n idx_digit = torch.zeros(25).long()\n for idx in range(5):\n idx_digit[idx * 5:(idx + 1) * 5] = 5000 * idx + torch.arange(5)\n marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(cfg)[\n idx_digit]\n else:\n marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(cfg)[torch\n .randperm(25000)]\n elif type_data == 'cifar':\n marginal_data = g_data.marginal_cifar_handle(cfg)\n elif type_data == 'gmm':\n marginal_data = g_data.marginal_data_gmm_3loop_ficnn(cfg)[:, :, :-1]\n return marginal_data.permute(2, 0, 1)\n",
"step-5": "from __future__ import print_function\nimport ot\nimport torch\nimport numpy as np\nfrom sklearn.neighbors import KernelDensity\nfrom torch.utils.data import Dataset\nimport jacinle.io as io\nimport optimal_transport_modules.pytorch_utils as PTU\nimport optimal_transport_modules.generate_data as g_data\nfrom optimal_transport_modules.record_mean_cov import select_mean_and_cov\n\n'''\nPyTorch type\n'''\n\n\ndef kde_Gaussian_fitting(miu, bandwidth):\n kde_analyzer = KernelDensity(\n kernel='gaussian', bandwidth=bandwidth).fit(miu)\n return kde_analyzer\n\n\ndef second_moment_no_average(batch_dim):\n return batch_dim.pow(2).sum(dim=1)\n\n\ndef second_moment_single_dist(batch_dim):\n return batch_dim.pow(2).sum(dim=1).mean()\n\n\ndef second_moment_all_dist(batch_dim_dist):\n return batch_dim_dist.pow(2).sum(dim=1).mean(dim=0)\n\n\ndef inprod_average(batch_dim_1, batch_dim_2):\n assert batch_dim_1.shape[0] == batch_dim_2.shape[0]\n batch_size = batch_dim_1.shape[0]\n inner_product_avg = torch.dot(batch_dim_1.reshape(-1),\n batch_dim_2.reshape(-1)) / batch_size\n return inner_product_avg\n\n\ndef inprod(batch_dim_1, batch_dim_2):\n innner_product = torch.dot(batch_dim_1.reshape(-1),\n batch_dim_2.reshape(-1))\n return innner_product\n\n\ndef grad_of_function(input_samples, network):\n g_of_y = network(input_samples).sum()\n gradient = torch.autograd.grad(\n g_of_y, input_samples, create_graph=True)[0]\n return gradient\n\n\ndef two_loop_loss_in_W2(convex_f_list, grad_g_of_y, miu_i, dist_weight, idx_dist):\n n_dist = dist_weight.shape[0]\n\n #! The 2nd loss part useful for f/g parameters\n f_grad_g_y = convex_f_list[idx_dist](grad_g_of_y).mean()\n\n #! The 4th loss part useful for f/g parameters\n for j in range(n_dist):\n f_grad_g_y -= dist_weight[j] * convex_f_list[j](grad_g_of_y).mean()\n\n #! The 1st loss part useful for g parameters\n inner_product = inprod_average(grad_g_of_y, miu_i)\n\n #! The 3rd loss part useful for g parameters\n half_moment_grad_of_g = 0.5 * second_moment_single_dist(grad_g_of_y)\n\n loss_gi = (f_grad_g_y - inner_product +\n half_moment_grad_of_g) * dist_weight[idx_dist]\n return loss_gi\n\n\n'''\nlocalized POT library\n'''\n\n\ndef w2_distance_samples_solver(sample1_n_d, sample2_n_d):\n # see here for details\n # https://pythonot.github.io/all.html#ot.emd\n # https://pythonot.github.io/all.html#ot.emd2\n assert sample1_n_d.shape == sample2_n_d.shape\n num_sample = sample1_n_d.shape[0]\n a = np.ones([num_sample]) / num_sample\n b = np.ones([num_sample]) / num_sample\n tmp_marginal_1 = np.expand_dims(sample1_n_d, axis=0)\n tmp_marginal_2 = np.expand_dims(sample2_n_d, axis=1)\n M = tmp_marginal_1 - tmp_marginal_2\n M = np.sum(np.abs(M)**2, axis=2)\n return ot.emd2(a, b, M)\n\n\ndef free_support_barycenter(measures_locations, measures_weights, X_init, b=None, weights=None, numItermax=100, stopThr=1e-7, use_sinkhorn=False):\n g_sinkhorn_reg = 0.1\n iter_count = 0\n N = len(measures_locations)\n k = X_init.shape[0]\n d = X_init.shape[1]\n if b is None:\n b = np.ones((k,)) / k\n if weights is None:\n weights = np.ones((N,)) / N\n\n X = X_init\n\n log_dict = {}\n displacement_square_norm = stopThr + 1.\n while (displacement_square_norm > stopThr and iter_count < numItermax):\n T_sum = np.zeros((k, d))\n for (measure_locations_i, measure_weights_i, weight_i) in zip(measures_locations, measures_weights, weights.tolist()):\n M_i = ot.dist(X, measure_locations_i)\n if use_sinkhorn:\n T_i = ot.bregman.sinkhorn(\n b, measure_weights_i, M_i, g_sinkhorn_reg)\n else:\n T_i = ot.emd(b, measure_weights_i, M_i)\n T_sum = T_sum + weight_i * \\\n np.reshape(1. / b, (-1, 1)) * \\\n np.matmul(T_i, measure_locations_i)\n\n displacement_square_norm = np.sum(np.square(T_sum - X))\n\n X = T_sum\n print('iteration %d, displacement_square_norm=%f\\n',\n iter_count, displacement_square_norm)\n\n iter_count += 1\n\n return X\n\n\n'''\nMNIST utils\n'''\n\n\nclass ReshapeTransform:\n def __init__(self, new_size):\n self.new_size = new_size\n\n def __call__(self, img):\n return torch.reshape(img, self.new_size)\n\n\n# def extract_three_number(total_data):\n# idx_train = (total_data.targets == 0) + (total_data.targets ==\n# 1) + (total_data.targets == 7)\n# total_data.targets = total_data.targets[idx_train]\n# total_data.data = total_data.data[idx_train]\n# return total_data\n\n\nclass CustomMnistDataset(Dataset):\n def __init__(self, data, target, transform=None):\n\n self.data = data\n self.target = target\n self.transform = transform\n\n def __len__(self):\n assert len(self.target) == len(self.data)\n return len(self.target)\n\n def __getitem__(self, idx):\n if torch.is_tensor(idx):\n idx = idx.tolist()\n\n data_idxed = self.data[idx]\n target_idxed = self.target[idx].float()\n # sample = {'data': data_idxed, 'target': target_idxed}\n\n if self.transform:\n data_idxed = self.transform(data_idxed)\n\n return [data_idxed, target_idxed]\n\n\n'''\nGaussian utils\n'''\n\n\ndef get_gmm_param(trial, cond=-1):\n if cond > 0:\n MEAN, COV = select_mean_and_cov(trial, range_cond=cond)\n else:\n MEAN, COV = select_mean_and_cov(trial)\n INPUT_DIM = MEAN[0].shape[1]\n OUTPUT_DIM = INPUT_DIM\n NUM_DISTRIBUTION = len(MEAN)\n NUM_GMM_COMPONENT = []\n for i in range(NUM_DISTRIBUTION):\n NUM_GMM_COMPONENT.append(MEAN[i].shape[0])\n high_dim_flag = INPUT_DIM > 2\n return MEAN, COV, INPUT_DIM, OUTPUT_DIM, NUM_DISTRIBUTION, NUM_GMM_COMPONENT, high_dim_flag\n\n\n'''\nAverage the 2 layer neural networks\n'''\n\n\ndef average_nn(args, **kwargs):\n averaged_parameters = np.zeros([args.N_SAMPLES, args.INPUT_DIM])\n tmp_data = averaged_parameters\n n_samp_of_subset = int(args.N_SAMPLES / args.NUM_DISTRIBUTION)\n for i in range(args.NUM_DISTRIBUTION):\n model_param = io.load(args.get_nn(**kwargs) +\n f\"/subset_{i+1}_samples_{args.subset_samples}/trial_26/storing_models/nn_2layer_epoch200.pt\")\n\n assert args.N_SAMPLES == model_param['layer1.weight'].shape[0]\n tmp_data[:, :-1] = PTU.torch2numpy(model_param['layer1.weight'])\n tmp_data[:, -\n 1] = PTU.torch2numpy(model_param['last_layer.weight'].squeeze())\n if i == args.NUM_DISTRIBUTION - 1:\n averaged_parameters[(i * n_samp_of_subset)\n :] = tmp_data[(i * n_samp_of_subset):]\n else:\n averaged_parameters[i * n_samp_of_subset:\n (i + 1) * n_samp_of_subset] = tmp_data[i * n_samp_of_subset:\n (i + 1) * n_samp_of_subset]\n\n return averaged_parameters\n\n\n'''\nget marginal data handle\n'''\n\n\ndef get_marginal_list(cfg, type_data='2block'):\n if type_data == '2block':\n marginal_data = g_data.marginal_data_blocks_3loop_ficnn(\n cfg)[:, :, :-1]\n elif type_data == 'circ_squa':\n marginal_data = g_data.marginal_data_circ_squ_3loop_ficnn(\n cfg)[:, :, :-1]\n elif type_data == 'mnist0-1':\n marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(\n cfg)\n elif type_data == '3digit':\n marginal_data = g_data.marginal_data_3digit_3loop_ficnn(\n cfg)[:, :, :-1]\n elif type_data == 'ellipse':\n marginal_data = g_data.marginal_data_ellipse_3loop_ficnn(\n cfg)[:, :, :-1]\n elif type_data == 'line':\n marginal_data = g_data.marginal_data_line_3loop_ficnn(\n cfg)[:, :, :-1]\n elif type_data == 'usps_mnist':\n marginal_data = g_data.marginal_usps_3loop_ficnn_handle(\n cfg)[0][torch.randperm(5000), :, :-1]\n elif type_data == 'mnist_group':\n if cfg.N_TEST == 25:\n idx_digit = torch.zeros(25).long()\n for idx in range(5):\n idx_digit[idx * 5:(idx + 1) * 5] = 5000 * idx + torch.arange(5)\n marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(\n cfg)[idx_digit]\n else:\n marginal_data = g_data.marginal_mnist_3loop_ficnn_handle(\n cfg)[torch.randperm(25000)]\n elif type_data == 'cifar':\n marginal_data = g_data.marginal_cifar_handle(cfg)\n elif type_data == 'gmm':\n marginal_data = g_data.marginal_data_gmm_3loop_ficnn(\n cfg)[:, :, :-1]\n return marginal_data.permute(2, 0, 1)\n",
"step-ids": [
12,
13,
17,
21,
22
]
}
|
[
12,
13,
17,
21,
22
] |
# Copyright 2014 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
import json
import netaddr
from time import sleep
import uuid
from proboscis import after_class
from proboscis.asserts import assert_equal
from proboscis.asserts import assert_not_equal
from proboscis.asserts import assert_raises
from proboscis.asserts import assert_true
from proboscis.asserts import fail
from proboscis import before_class
from proboscis.decorators import time_out
from proboscis import SkipTest
from proboscis import test
from troveclient.compat import exceptions
from trove.common.utils import poll_until
from trove import tests
from trove.tests.api.instances import assert_unprocessable
from trove.tests.api.instances import instance_info
from trove.tests.api.instances import InstanceTestInfo
from trove.tests.api.instances import TIMEOUT_INSTANCE_CREATE
from trove.tests.api.instances import TIMEOUT_INSTANCE_DELETE
from trove.tests.config import CONFIG
from trove.tests.util.check import AttrCheck
from trove.tests.util.check import CollectionCheck
from trove.tests.util.check import TypeCheck
from trove.tests.util import create_dbaas_client
from trove.tests.util.mysql import create_mysql_connection
from trove.tests.util.users import Requirements
CONFIG_NAME = "test_configuration"
CONFIG_DESC = "configuration description"
configuration_default = None
configuration_info = None
configuration_href = None
configuration_instance = InstanceTestInfo()
configuration_instance_id = None
sql_variables = [
'key_buffer_size',
'connect_timeout',
'join_buffer_size',
]
def _is_valid_timestamp(time_string):
try:
datetime.strptime(time_string, "%Y-%m-%dT%H:%M:%S")
except ValueError:
return False
return True
# helper methods to validate configuration is applied to instance
def _execute_query(host, user_name, password, query):
print("Starting to query database, host: %s, user: %s, password: %s, "
"query: %s" % (host, user_name, password, query))
with create_mysql_connection(host, user_name, password) as db:
result = db.execute(query)
return result
def _get_address(instance_id):
result = instance_info.dbaas_admin.mgmt.instances.show(instance_id)
try:
return next(str(ip) for ip in result.ip
if netaddr.valid_ipv4(ip))
except StopIteration:
fail("No IPV4 ip found")
def _test_configuration_is_applied_to_instance(instance, configuration_id):
if CONFIG.fake_mode:
raise SkipTest("configuration from sql does not work in fake mode")
instance_test = instance_info.dbaas.instances.get(instance.id)
assert_equal(configuration_id, instance_test.configuration['id'])
if configuration_id:
testconfig_info = instance_info.dbaas.configurations.get(
configuration_id)
else:
testconfig_info = instance_info.dbaas.instance.configuration(
instance.id)
testconfig_info['configuration']
conf_instances = instance_info.dbaas.configurations.instances(
configuration_id)
config_instance_ids = [inst.id for inst in conf_instances]
assert_true(instance_test.id in config_instance_ids)
cfg_names = testconfig_info.values.keys()
host = _get_address(instance.id)
for user in instance.users:
username = user['name']
password = user['password']
concat_variables = "','".join(cfg_names)
query = ("show variables where Variable_name "
"in ('%s');" % concat_variables)
actual_values = _execute_query(host, username, password, query)
print("actual_values %s" % actual_values)
print("testconfig_info.values %s" % testconfig_info.values)
assert_true(len(actual_values) == len(cfg_names))
# check the configs exist
attrcheck = AttrCheck()
allowed_attrs = [actual_key for actual_key, actual_value in actual_values]
attrcheck.contains_allowed_attrs(
testconfig_info.values, allowed_attrs,
msg="Configurations parameters")
def _get_parameter_type(name):
instance_info.dbaas.configuration_parameters.get_parameter(
instance_info.dbaas_datastore,
instance_info.dbaas_datastore_version,
name)
resp, body = instance_info.dbaas.client.last_response
print(resp)
print(body)
return json.loads(body.decode())['type']
# check the config values are correct
for key, value in actual_values:
key_type = _get_parameter_type(key)
# mysql returns 'ON' and 'OFF' for True and False respectively
if value == 'ON':
converted_key_value = (str(key), 1)
elif value == 'OFF':
converted_key_value = (str(key), 0)
else:
if key_type == 'integer':
value = int(value)
converted_key_value = (str(key), value)
print("converted_key_value: %s" % str(converted_key_value))
assert_true(converted_key_value in testconfig_info.values.items())
class ConfigurationsTestBase(object):
@staticmethod
def expected_instance_datastore_configs(instance_id):
"""Given an instance retrieve the expected test configurations for
instance's datastore.
"""
instance = instance_info.dbaas.instances.get(instance_id)
datastore_type = instance.datastore['type']
datastore_test_configs = CONFIG.get(datastore_type, {})
return datastore_test_configs.get("configurations", {})
@staticmethod
def expected_default_datastore_configs():
"""Returns the expected test configurations for the default datastore
defined in the Test Config as dbaas_datastore.
"""
default_datastore = CONFIG.get('dbaas_datastore', None)
datastore_test_configs = CONFIG.get(default_datastore, {})
return datastore_test_configs.get("configurations", {})
@test(depends_on_groups=[tests.DBAAS_API_BACKUPS],
groups=[tests.DBAAS_API_CONFIGURATIONS])
class CreateConfigurations(ConfigurationsTestBase):
@test
def test_expected_configurations_parameters(self):
"""Test get expected configurations parameters."""
allowed_attrs = ["configuration-parameters"]
instance_info.dbaas.configuration_parameters.parameters(
instance_info.dbaas_datastore,
instance_info.dbaas_datastore_version)
resp, body = instance_info.dbaas.client.last_response
attrcheck = AttrCheck()
config_parameters_dict = json.loads(body.decode())
attrcheck.contains_allowed_attrs(
config_parameters_dict, allowed_attrs,
msg="Configurations parameters")
# sanity check that a few options are in the list
config_params_list = config_parameters_dict['configuration-parameters']
config_param_keys = []
for param in config_params_list:
config_param_keys.append(param['name'])
expected_configs = self.expected_default_datastore_configs()
expected_config_params = expected_configs.get('parameters_list')
# check for duplicate configuration parameters
msg = "check for duplicate configuration parameters"
assert_equal(len(config_param_keys), len(set(config_param_keys)), msg)
for expected_config_item in expected_config_params:
assert_true(expected_config_item in config_param_keys)
@test
def test_expected_get_configuration_parameter(self):
# tests get on a single parameter to verify it has expected attributes
param_name = 'key_buffer_size'
allowed_config_params = ['name', 'restart_required',
'max', 'min', 'type',
'deleted', 'deleted_at',
'datastore_version_id']
param = instance_info.dbaas.configuration_parameters.get_parameter(
instance_info.dbaas_datastore,
instance_info.dbaas_datastore_version,
param_name)
resp, body = instance_info.dbaas.client.last_response
print("params: %s" % param)
print("resp: %s" % resp)
print("body: %s" % body)
attrcheck = AttrCheck()
config_parameter_dict = json.loads(body.decode())
print("config_parameter_dict: %s" % config_parameter_dict)
attrcheck.contains_allowed_attrs(
config_parameter_dict,
allowed_config_params,
msg="Get Configuration parameter")
assert_equal(param_name, config_parameter_dict['name'])
with TypeCheck('ConfigurationParameter', param) as parameter:
parameter.has_field('name', str)
parameter.has_field('restart_required', bool)
parameter.has_field('max', int)
parameter.has_field('min', int)
parameter.has_field('type', str)
parameter.has_field('datastore_version_id', str)
@test
def test_configurations_create_invalid_values(self):
"""Test create configurations with invalid values."""
values = '{"this_is_invalid": 123}'
try:
instance_info.dbaas.configurations.create(
CONFIG_NAME,
values,
CONFIG_DESC)
except exceptions.UnprocessableEntity:
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 422)
@test
def test_configurations_create_invalid_value_type(self):
"""Test create configuration with invalid value type."""
values = '{"key_buffer_size": "this is a string not int"}'
assert_unprocessable(instance_info.dbaas.configurations.create,
CONFIG_NAME, values, CONFIG_DESC)
@test
def test_configurations_create_value_out_of_bounds(self):
"""Test create configuration with value out of bounds."""
expected_configs = self.expected_default_datastore_configs()
values = json.dumps(expected_configs.get('out_of_bounds_over'))
assert_unprocessable(instance_info.dbaas.configurations.create,
CONFIG_NAME, values, CONFIG_DESC)
values = json.dumps(expected_configs.get('out_of_bounds_under'))
assert_unprocessable(instance_info.dbaas.configurations.create,
CONFIG_NAME, values, CONFIG_DESC)
@test
def test_valid_configurations_create(self):
"""create a configuration with valid parameters from config."""
expected_configs = self.expected_default_datastore_configs()
values = json.dumps(expected_configs.get('valid_values'))
expected_values = json.loads(values)
result = instance_info.dbaas.configurations.create(
CONFIG_NAME,
values,
CONFIG_DESC,
datastore=instance_info.dbaas_datastore,
datastore_version=instance_info.dbaas_datastore_version)
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 200)
with TypeCheck('Configuration', result) as configuration:
configuration.has_field('name', str)
configuration.has_field('description', str)
configuration.has_field('values', dict)
configuration.has_field('datastore_name', str)
configuration.has_field('datastore_version_id', str)
configuration.has_field('datastore_version_name', str)
global configuration_info
configuration_info = result
assert_equal(configuration_info.name, CONFIG_NAME)
assert_equal(configuration_info.description, CONFIG_DESC)
assert_equal(configuration_info.values, expected_values)
@test(runs_after=[test_valid_configurations_create])
def test_appending_to_existing_configuration(self):
"""test_appending_to_existing_configuration"""
# test being able to update and insert new parameter name and values
# to an existing configuration
expected_configs = self.expected_default_datastore_configs()
values = json.dumps(expected_configs.get('appending_values'))
# ensure updated timestamp is different than created
if not CONFIG.fake_mode:
sleep(1)
instance_info.dbaas.configurations.edit(configuration_info.id,
values)
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 200)
@test(depends_on_classes=[CreateConfigurations],
groups=[tests.DBAAS_API_CONFIGURATIONS])
class AfterConfigurationsCreation(ConfigurationsTestBase):
@test
def test_assign_configuration_to_invalid_instance(self):
"""test assigning to an instance that does not exist"""
invalid_id = "invalid-inst-id"
try:
instance_info.dbaas.instances.modify(invalid_id,
configuration_info.id)
except exceptions.NotFound:
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 404)
@test
def test_assign_configuration_to_valid_instance(self):
"""test assigning a configuration to an instance"""
print("instance_info.id: %s" % instance_info.id)
print("configuration_info: %s" % configuration_info)
print("configuration_info.id: %s" % configuration_info.id)
config_id = configuration_info.id
instance_info.dbaas.instances.modify(instance_info.id,
configuration=config_id)
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 202)
@test(depends_on=[test_assign_configuration_to_valid_instance])
def test_assign_configuration_to_instance_with_config(self):
"""test assigning a configuration to an instance conflicts"""
config_id = configuration_info.id
assert_raises(exceptions.BadRequest,
instance_info.dbaas.instances.modify, instance_info.id,
configuration=config_id)
@test(depends_on=[test_assign_configuration_to_valid_instance])
@time_out(30)
def test_get_configuration_details_from_instance_validation(self):
"""validate the configuration after attaching"""
print("instance_info.id: %s" % instance_info.id)
inst = instance_info.dbaas.instances.get(instance_info.id)
configuration_id = inst.configuration['id']
print("configuration_info: %s" % configuration_id)
assert_not_equal(None, configuration_id)
_test_configuration_is_applied_to_instance(instance_info,
configuration_id)
@test(depends_on=[test_get_configuration_details_from_instance_validation])
def test_configurations_get(self):
"""test that the instance shows up on the assigned configuration"""
result = instance_info.dbaas.configurations.get(configuration_info.id)
assert_equal(configuration_info.id, result.id)
assert_equal(configuration_info.name, result.name)
assert_equal(configuration_info.description, result.description)
# check the result field types
with TypeCheck("configuration", result) as check:
check.has_field("id", str)
check.has_field("name", str)
check.has_field("description", str)
check.has_field("values", dict)
check.has_field("created", str)
check.has_field("updated", str)
check.has_field("instance_count", int)
print(result.values)
# check for valid timestamps
assert_true(_is_valid_timestamp(result.created))
assert_true(_is_valid_timestamp(result.updated))
# check that created and updated timestamps differ, since
# test_appending_to_existing_configuration should have changed the
# updated timestamp
if not CONFIG.fake_mode:
assert_not_equal(result.created, result.updated)
assert_equal(result.instance_count, 1)
with CollectionCheck("configuration_values", result.values) as check:
# check each item has the correct type according to the rules
for (item_key, item_val) in result.values.items():
print("item_key: %s" % item_key)
print("item_val: %s" % item_val)
dbaas = instance_info.dbaas
param = dbaas.configuration_parameters.get_parameter(
instance_info.dbaas_datastore,
instance_info.dbaas_datastore_version,
item_key)
if param.type == 'integer':
check.has_element(item_key, int)
if param.type == 'string':
check.has_element(item_key, str)
if param.type == 'boolean':
check.has_element(item_key, bool)
# Test to make sure that another user is not able to GET this config
reqs = Requirements(is_admin=False)
test_auth_user = instance_info.user.auth_user
other_user = CONFIG.users.find_user(reqs, black_list=[test_auth_user])
other_user_tenant_id = other_user.tenant_id
client_tenant_id = instance_info.user.tenant_id
if other_user_tenant_id == client_tenant_id:
other_user = CONFIG.users.find_user(
reqs, black_list=[instance_info.user.auth_user,
other_user])
print(other_user)
print(other_user.__dict__)
other_client = create_dbaas_client(other_user)
assert_raises(exceptions.NotFound, other_client.configurations.get,
configuration_info.id)
@test(depends_on_classes=[AfterConfigurationsCreation],
groups=[tests.DBAAS_API_CONFIGURATIONS])
class ListConfigurations(ConfigurationsTestBase):
@test
def test_configurations_list(self):
# test listing configurations show up
result = instance_info.dbaas.configurations.list()
for conf in result:
with TypeCheck("Configuration", conf) as check:
check.has_field('id', str)
check.has_field('name', str)
check.has_field('description', str)
check.has_field('datastore_version_id', str)
check.has_field('datastore_version_name', str)
check.has_field('datastore_name', str)
exists = [config for config in result if
config.id == configuration_info.id]
assert_equal(1, len(exists))
configuration = exists[0]
assert_equal(configuration.id, configuration_info.id)
assert_equal(configuration.name, configuration_info.name)
assert_equal(configuration.description, configuration_info.description)
@test
def test_configurations_list_for_instance(self):
# test getting an instance shows the configuration assigned shows up
instance = instance_info.dbaas.instances.get(instance_info.id)
assert_equal(instance.configuration['id'], configuration_info.id)
assert_equal(instance.configuration['name'], configuration_info.name)
# expecting two things in links, href and bookmark
assert_equal(2, len(instance.configuration['links']))
link = instance.configuration['links'][0]
global configuration_href
configuration_href = link['href']
@test
def test_get_default_configuration_on_instance(self):
# test the api call to get the default template of an instance exists
result = instance_info.dbaas.instances.configuration(instance_info.id)
global configuration_default
configuration_default = result
assert_not_equal(None, result.configuration)
@test
def test_changing_configuration_with_nondynamic_parameter(self):
"""test_changing_configuration_with_nondynamic_parameter"""
expected_configs = self.expected_default_datastore_configs()
values = json.dumps(expected_configs.get('nondynamic_parameter'))
instance_info.dbaas.configurations.update(configuration_info.id,
values)
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 202)
instance_info.dbaas.configurations.get(configuration_info.id)
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 200)
@test(depends_on=[test_changing_configuration_with_nondynamic_parameter])
@time_out(20)
def test_waiting_for_instance_in_restart_required(self):
"""test_waiting_for_instance_in_restart_required"""
def result_is_not_active():
instance = instance_info.dbaas.instances.get(
instance_info.id)
if instance.status in CONFIG.running_status:
return False
else:
return True
poll_until(result_is_not_active)
instance = instance_info.dbaas.instances.get(instance_info.id)
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 200)
assert_equal('RESTART_REQUIRED', instance.status)
@test(depends_on=[test_waiting_for_instance_in_restart_required])
def test_restart_service_should_return_active(self):
"""test_restart_service_should_return_active"""
instance_info.dbaas.instances.restart(instance_info.id)
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 202)
def result_is_active():
instance = instance_info.dbaas.instances.get(
instance_info.id)
if instance.status in CONFIG.running_status:
return True
else:
assert_true(instance.status in ['REBOOT', 'SHUTDOWN'])
return False
poll_until(result_is_active)
@test(depends_on=[test_restart_service_should_return_active])
@time_out(30)
def test_get_configuration_details_from_instance_validation(self):
"""test_get_configuration_details_from_instance_validation"""
inst = instance_info.dbaas.instances.get(instance_info.id)
configuration_id = inst.configuration['id']
assert_not_equal(None, inst.configuration['id'])
_test_configuration_is_applied_to_instance(instance_info,
configuration_id)
@test(depends_on=[test_configurations_list])
def test_compare_list_and_details_timestamps(self):
# compare config timestamps between list and details calls
result = instance_info.dbaas.configurations.list()
list_config = [config for config in result if
config.id == configuration_info.id]
assert_equal(1, len(list_config))
details_config = instance_info.dbaas.configurations.get(
configuration_info.id)
assert_equal(list_config[0].created, details_config.created)
assert_equal(list_config[0].updated, details_config.updated)
@test(depends_on_classes=[ListConfigurations],
groups=[tests.DBAAS_API_CONFIGURATIONS])
class StartInstanceWithConfiguration(ConfigurationsTestBase):
@test
def test_start_instance_with_configuration(self):
"""test that a new instance will apply the configuration on create"""
global configuration_instance
databases = []
databases.append({"name": "firstdbconfig", "character_set": "latin2",
"collate": "latin2_general_ci"})
databases.append({"name": "db2"})
configuration_instance.databases = databases
users = []
users.append({"name": "liteconf", "password": "liteconfpass",
"databases": [{"name": "firstdbconfig"}]})
configuration_instance.users = users
configuration_instance.name = "TEST_" + str(uuid.uuid4()) + "_config"
flavor_href = instance_info.dbaas_flavor_href
configuration_instance.dbaas_flavor_href = flavor_href
configuration_instance.volume = instance_info.volume
configuration_instance.dbaas_datastore = instance_info.dbaas_datastore
configuration_instance.dbaas_datastore_version = \
instance_info.dbaas_datastore_version
configuration_instance.nics = instance_info.nics
result = instance_info.dbaas.instances.create(
configuration_instance.name,
configuration_instance.dbaas_flavor_href,
configuration_instance.volume,
configuration_instance.databases,
configuration_instance.users,
nics=configuration_instance.nics,
availability_zone="nova",
datastore=configuration_instance.dbaas_datastore,
datastore_version=configuration_instance.dbaas_datastore_version,
configuration=configuration_href)
assert_equal(200, instance_info.dbaas.last_http_code)
assert_equal("BUILD", result.status)
configuration_instance.id = result.id
@test(depends_on_classes=[StartInstanceWithConfiguration],
groups=[tests.DBAAS_API_CONFIGURATIONS])
class WaitForConfigurationInstanceToFinish(ConfigurationsTestBase):
@test
@time_out(TIMEOUT_INSTANCE_CREATE)
def test_instance_with_configuration_active(self):
"""wait for the instance created with configuration"""
def result_is_active():
instance = instance_info.dbaas.instances.get(
configuration_instance.id)
if instance.status in CONFIG.running_status:
return True
else:
assert_equal("BUILD", instance.status)
return False
poll_until(result_is_active)
@test(depends_on=[test_instance_with_configuration_active])
@time_out(30)
def test_get_configuration_details_from_instance_validation(self):
"""Test configuration is applied correctly to the instance."""
inst = instance_info.dbaas.instances.get(configuration_instance.id)
configuration_id = inst.configuration['id']
assert_not_equal(None, configuration_id)
_test_configuration_is_applied_to_instance(configuration_instance,
configuration_id)
@test(depends_on=[WaitForConfigurationInstanceToFinish],
groups=[tests.DBAAS_API_CONFIGURATIONS])
class DeleteConfigurations(ConfigurationsTestBase):
@before_class
def setUp(self):
# need to store the parameter details that will be deleted
config_param_name = sql_variables[1]
instance_info.dbaas.configuration_parameters.get_parameter(
instance_info.dbaas_datastore,
instance_info.dbaas_datastore_version,
config_param_name)
resp, body = instance_info.dbaas.client.last_response
print(resp)
print(body)
self.config_parameter_dict = json.loads(body.decode())
@after_class(always_run=True)
def tearDown(self):
# need to "undelete" the parameter that was deleted from the mgmt call
if instance_info.dbaas:
ds = instance_info.dbaas_datastore
ds_v = instance_info.dbaas_datastore_version
version = instance_info.dbaas.datastore_versions.get(
ds, ds_v)
client = instance_info.dbaas_admin.mgmt_configs
print(self.config_parameter_dict)
client.create(version.id,
self.config_parameter_dict['name'],
self.config_parameter_dict['restart_required'],
self.config_parameter_dict['type'],
self.config_parameter_dict['max'],
self.config_parameter_dict['min'])
@test
def test_delete_invalid_configuration_not_found(self):
# test deleting a configuration that does not exist throws exception
invalid_configuration_id = "invalid-config-id"
assert_raises(exceptions.NotFound,
instance_info.dbaas.configurations.delete,
invalid_configuration_id)
@test(depends_on=[test_delete_invalid_configuration_not_found])
def test_delete_configuration_parameter_with_mgmt_api(self):
# testing a param that is assigned to an instance can be deleted
# and doesn't affect an unassign later. So we delete a parameter
# that is used by a test (connect_timeout)
ds = instance_info.dbaas_datastore
ds_v = instance_info.dbaas_datastore_version
version = instance_info.dbaas.datastore_versions.get(
ds, ds_v)
client = instance_info.dbaas_admin.mgmt_configs
config_param_name = self.config_parameter_dict['name']
client.delete(version.id, config_param_name)
assert_raises(
exceptions.NotFound,
instance_info.dbaas.configuration_parameters.get_parameter,
ds,
ds_v,
config_param_name)
@test(depends_on=[test_delete_configuration_parameter_with_mgmt_api])
def test_unable_delete_instance_configurations(self):
# test deleting a configuration that is assigned to
# an instance is not allowed.
assert_raises(exceptions.BadRequest,
instance_info.dbaas.configurations.delete,
configuration_info.id)
@test(depends_on=[test_unable_delete_instance_configurations])
@time_out(30)
def test_unassign_configuration_from_instances(self):
"""test to unassign configuration from instance"""
instance_info.dbaas.instances.update(configuration_instance.id,
remove_configuration=True)
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 202)
instance_info.dbaas.instances.update(instance_info.id,
remove_configuration=True)
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 202)
instance_info.dbaas.instances.get(instance_info.id)
def result_has_no_configuration():
instance = instance_info.dbaas.instances.get(inst_info.id)
if hasattr(instance, 'configuration'):
return False
else:
return True
inst_info = instance_info
poll_until(result_has_no_configuration)
inst_info = configuration_instance
poll_until(result_has_no_configuration)
instance = instance_info.dbaas.instances.get(instance_info.id)
assert_equal('RESTART_REQUIRED', instance.status)
@test(depends_on=[test_unassign_configuration_from_instances])
def test_assign_in_wrong_state(self):
# test assigning a config to an instance in RESTART state
assert_raises(exceptions.BadRequest,
instance_info.dbaas.instances.modify,
configuration_instance.id,
configuration=configuration_info.id)
@test(depends_on=[test_assign_in_wrong_state])
def test_no_instances_on_configuration(self):
"""test_no_instances_on_configuration"""
result = instance_info.dbaas.configurations.get(configuration_info.id)
assert_equal(configuration_info.id, result.id)
assert_equal(configuration_info.name, result.name)
assert_equal(configuration_info.description, result.description)
assert_equal(result.instance_count, 0)
print(configuration_instance.id)
print(instance_info.id)
@test(depends_on=[test_unassign_configuration_from_instances])
@time_out(120)
def test_restart_service_should_return_active(self):
"""test that after restarting the instance it becomes active"""
instance_info.dbaas.instances.restart(instance_info.id)
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 202)
def result_is_active():
instance = instance_info.dbaas.instances.get(
instance_info.id)
if instance.status in CONFIG.running_status:
return True
else:
assert_equal("REBOOT", instance.status)
return False
poll_until(result_is_active)
@test(depends_on=[test_restart_service_should_return_active])
def test_assign_config_and_name_to_instance_using_patch(self):
"""test_assign_config_and_name_to_instance_using_patch"""
new_name = 'new_name'
report = CONFIG.get_report()
report.log("instance_info.id: %s" % instance_info.id)
report.log("configuration_info: %s" % configuration_info)
report.log("configuration_info.id: %s" % configuration_info.id)
report.log("instance name:%s" % instance_info.name)
report.log("instance new name:%s" % new_name)
saved_name = instance_info.name
config_id = configuration_info.id
instance_info.dbaas.instances.update(instance_info.id,
configuration=config_id,
name=new_name)
assert_equal(202, instance_info.dbaas.last_http_code)
check = instance_info.dbaas.instances.get(instance_info.id)
assert_equal(200, instance_info.dbaas.last_http_code)
assert_equal(check.name, new_name)
# restore instance name
instance_info.dbaas.instances.update(instance_info.id,
name=saved_name)
assert_equal(202, instance_info.dbaas.last_http_code)
instance = instance_info.dbaas.instances.get(instance_info.id)
assert_equal('RESTART_REQUIRED', instance.status)
# restart to be sure configuration is applied
instance_info.dbaas.instances.restart(instance_info.id)
assert_equal(202, instance_info.dbaas.last_http_code)
sleep(2)
def result_is_active():
instance = instance_info.dbaas.instances.get(
instance_info.id)
if instance.status in CONFIG.running_status:
return True
else:
assert_equal("REBOOT", instance.status)
return False
poll_until(result_is_active)
# test assigning a configuration to an instance that
# already has an assigned configuration with patch
config_id = configuration_info.id
assert_raises(exceptions.BadRequest,
instance_info.dbaas.instances.update,
instance_info.id, configuration=config_id)
@test(runs_after=[test_assign_config_and_name_to_instance_using_patch])
def test_unassign_configuration_after_patch(self):
"""Remove the configuration from the instance"""
instance_info.dbaas.instances.update(instance_info.id,
remove_configuration=True)
assert_equal(202, instance_info.dbaas.last_http_code)
instance = instance_info.dbaas.instances.get(instance_info.id)
assert_equal('RESTART_REQUIRED', instance.status)
# restart to be sure configuration has been unassigned
instance_info.dbaas.instances.restart(instance_info.id)
assert_equal(202, instance_info.dbaas.last_http_code)
sleep(2)
def result_is_active():
instance = instance_info.dbaas.instances.get(
instance_info.id)
if instance.status in CONFIG.running_status:
return True
else:
assert_equal("REBOOT", instance.status)
return False
poll_until(result_is_active)
result = instance_info.dbaas.configurations.get(configuration_info.id)
assert_equal(result.instance_count, 0)
@test
def test_unassign_configuration_from_invalid_instance_using_patch(self):
# test unassign config group from an invalid instance
invalid_id = "invalid-inst-id"
try:
instance_info.dbaas.instances.update(invalid_id,
remove_configuration=True)
except exceptions.NotFound:
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 404)
@test(runs_after=[test_unassign_configuration_after_patch])
def test_delete_unassigned_configuration(self):
"""test_delete_unassigned_configuration"""
instance_info.dbaas.configurations.delete(configuration_info.id)
resp, body = instance_info.dbaas.client.last_response
assert_equal(resp.status, 202)
@test(depends_on=[test_delete_unassigned_configuration])
@time_out(TIMEOUT_INSTANCE_DELETE)
def test_delete_configuration_instance(self):
"""test_delete_configuration_instance"""
instance_info.dbaas.instances.delete(configuration_instance.id)
assert_equal(202, instance_info.dbaas.last_http_code)
def instance_is_gone():
try:
instance_info.dbaas.instances.get(configuration_instance.id)
return False
except exceptions.NotFound:
return True
poll_until(instance_is_gone)
assert_raises(exceptions.NotFound, instance_info.dbaas.instances.get,
configuration_instance.id)
|
normal
|
{
"blob_id": "120021e44f6df9745db35ea2f38f25acecca9252",
"index": 3201,
"step-1": "<mask token>\n\n\n@test(depends_on_classes=[AfterConfigurationsCreation], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass ListConfigurations(ConfigurationsTestBase):\n\n @test\n def test_configurations_list(self):\n result = instance_info.dbaas.configurations.list()\n for conf in result:\n with TypeCheck('Configuration', conf) as check:\n check.has_field('id', str)\n check.has_field('name', str)\n check.has_field('description', str)\n check.has_field('datastore_version_id', str)\n check.has_field('datastore_version_name', str)\n check.has_field('datastore_name', str)\n exists = [config for config in result if config.id ==\n configuration_info.id]\n assert_equal(1, len(exists))\n configuration = exists[0]\n assert_equal(configuration.id, configuration_info.id)\n assert_equal(configuration.name, configuration_info.name)\n assert_equal(configuration.description, configuration_info.description)\n\n @test\n def test_configurations_list_for_instance(self):\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal(instance.configuration['id'], configuration_info.id)\n assert_equal(instance.configuration['name'], configuration_info.name)\n assert_equal(2, len(instance.configuration['links']))\n link = instance.configuration['links'][0]\n global configuration_href\n configuration_href = link['href']\n\n @test\n def test_get_default_configuration_on_instance(self):\n result = instance_info.dbaas.instances.configuration(instance_info.id)\n global configuration_default\n configuration_default = result\n assert_not_equal(None, result.configuration)\n\n @test\n def test_changing_configuration_with_nondynamic_parameter(self):\n \"\"\"test_changing_configuration_with_nondynamic_parameter\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('nondynamic_parameter'))\n instance_info.dbaas.configurations.update(configuration_info.id, values\n )\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n instance_info.dbaas.configurations.get(configuration_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n\n @test(depends_on=[test_changing_configuration_with_nondynamic_parameter])\n @time_out(20)\n def test_waiting_for_instance_in_restart_required(self):\n \"\"\"test_waiting_for_instance_in_restart_required\"\"\"\n\n def result_is_not_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return False\n else:\n return True\n poll_until(result_is_not_active)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n assert_equal('RESTART_REQUIRED', instance.status)\n\n @test(depends_on=[test_waiting_for_instance_in_restart_required])\n def test_restart_service_should_return_active(self):\n \"\"\"test_restart_service_should_return_active\"\"\"\n instance_info.dbaas.instances.restart(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_true(instance.status in ['REBOOT', 'SHUTDOWN'])\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_restart_service_should_return_active])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"test_get_configuration_details_from_instance_validation\"\"\"\n inst = instance_info.dbaas.instances.get(instance_info.id)\n configuration_id = inst.configuration['id']\n assert_not_equal(None, inst.configuration['id'])\n _test_configuration_is_applied_to_instance(instance_info,\n configuration_id)\n\n @test(depends_on=[test_configurations_list])\n def test_compare_list_and_details_timestamps(self):\n result = instance_info.dbaas.configurations.list()\n list_config = [config for config in result if config.id ==\n configuration_info.id]\n assert_equal(1, len(list_config))\n details_config = instance_info.dbaas.configurations.get(\n configuration_info.id)\n assert_equal(list_config[0].created, details_config.created)\n assert_equal(list_config[0].updated, details_config.updated)\n\n\n@test(depends_on_classes=[ListConfigurations], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass StartInstanceWithConfiguration(ConfigurationsTestBase):\n\n @test\n def test_start_instance_with_configuration(self):\n \"\"\"test that a new instance will apply the configuration on create\"\"\"\n global configuration_instance\n databases = []\n databases.append({'name': 'firstdbconfig', 'character_set':\n 'latin2', 'collate': 'latin2_general_ci'})\n databases.append({'name': 'db2'})\n configuration_instance.databases = databases\n users = []\n users.append({'name': 'liteconf', 'password': 'liteconfpass',\n 'databases': [{'name': 'firstdbconfig'}]})\n configuration_instance.users = users\n configuration_instance.name = 'TEST_' + str(uuid.uuid4()) + '_config'\n flavor_href = instance_info.dbaas_flavor_href\n configuration_instance.dbaas_flavor_href = flavor_href\n configuration_instance.volume = instance_info.volume\n configuration_instance.dbaas_datastore = instance_info.dbaas_datastore\n configuration_instance.dbaas_datastore_version = (instance_info.\n dbaas_datastore_version)\n configuration_instance.nics = instance_info.nics\n result = instance_info.dbaas.instances.create(configuration_instance\n .name, configuration_instance.dbaas_flavor_href,\n configuration_instance.volume, configuration_instance.databases,\n configuration_instance.users, nics=configuration_instance.nics,\n availability_zone='nova', datastore=configuration_instance.\n dbaas_datastore, datastore_version=configuration_instance.\n dbaas_datastore_version, configuration=configuration_href)\n assert_equal(200, instance_info.dbaas.last_http_code)\n assert_equal('BUILD', result.status)\n configuration_instance.id = result.id\n\n\n@test(depends_on_classes=[StartInstanceWithConfiguration], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass WaitForConfigurationInstanceToFinish(ConfigurationsTestBase):\n\n @test\n @time_out(TIMEOUT_INSTANCE_CREATE)\n def test_instance_with_configuration_active(self):\n \"\"\"wait for the instance created with configuration\"\"\"\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(configuration_instance\n .id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('BUILD', instance.status)\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_instance_with_configuration_active])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"Test configuration is applied correctly to the instance.\"\"\"\n inst = instance_info.dbaas.instances.get(configuration_instance.id)\n configuration_id = inst.configuration['id']\n assert_not_equal(None, configuration_id)\n _test_configuration_is_applied_to_instance(configuration_instance,\n configuration_id)\n\n\n@test(depends_on=[WaitForConfigurationInstanceToFinish], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass DeleteConfigurations(ConfigurationsTestBase):\n\n @before_class\n def setUp(self):\n config_param_name = sql_variables[1]\n instance_info.dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore, instance_info.\n dbaas_datastore_version, config_param_name)\n resp, body = instance_info.dbaas.client.last_response\n print(resp)\n print(body)\n self.config_parameter_dict = json.loads(body.decode())\n\n @after_class(always_run=True)\n def tearDown(self):\n if instance_info.dbaas:\n ds = instance_info.dbaas_datastore\n ds_v = instance_info.dbaas_datastore_version\n version = instance_info.dbaas.datastore_versions.get(ds, ds_v)\n client = instance_info.dbaas_admin.mgmt_configs\n print(self.config_parameter_dict)\n client.create(version.id, self.config_parameter_dict['name'],\n self.config_parameter_dict['restart_required'], self.\n config_parameter_dict['type'], self.config_parameter_dict[\n 'max'], self.config_parameter_dict['min'])\n\n @test\n def test_delete_invalid_configuration_not_found(self):\n invalid_configuration_id = 'invalid-config-id'\n assert_raises(exceptions.NotFound, instance_info.dbaas.\n configurations.delete, invalid_configuration_id)\n\n @test(depends_on=[test_delete_invalid_configuration_not_found])\n def test_delete_configuration_parameter_with_mgmt_api(self):\n ds = instance_info.dbaas_datastore\n ds_v = instance_info.dbaas_datastore_version\n version = instance_info.dbaas.datastore_versions.get(ds, ds_v)\n client = instance_info.dbaas_admin.mgmt_configs\n config_param_name = self.config_parameter_dict['name']\n client.delete(version.id, config_param_name)\n assert_raises(exceptions.NotFound, instance_info.dbaas.\n configuration_parameters.get_parameter, ds, ds_v, config_param_name\n )\n\n @test(depends_on=[test_delete_configuration_parameter_with_mgmt_api])\n def test_unable_delete_instance_configurations(self):\n assert_raises(exceptions.BadRequest, instance_info.dbaas.\n configurations.delete, configuration_info.id)\n\n @test(depends_on=[test_unable_delete_instance_configurations])\n @time_out(30)\n def test_unassign_configuration_from_instances(self):\n \"\"\"test to unassign configuration from instance\"\"\"\n instance_info.dbaas.instances.update(configuration_instance.id,\n remove_configuration=True)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n instance_info.dbaas.instances.update(instance_info.id,\n remove_configuration=True)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n instance_info.dbaas.instances.get(instance_info.id)\n\n def result_has_no_configuration():\n instance = instance_info.dbaas.instances.get(inst_info.id)\n if hasattr(instance, 'configuration'):\n return False\n else:\n return True\n inst_info = instance_info\n poll_until(result_has_no_configuration)\n inst_info = configuration_instance\n poll_until(result_has_no_configuration)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n\n @test(depends_on=[test_unassign_configuration_from_instances])\n def test_assign_in_wrong_state(self):\n assert_raises(exceptions.BadRequest, instance_info.dbaas.instances.\n modify, configuration_instance.id, configuration=\n configuration_info.id)\n\n @test(depends_on=[test_assign_in_wrong_state])\n def test_no_instances_on_configuration(self):\n \"\"\"test_no_instances_on_configuration\"\"\"\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(configuration_info.id, result.id)\n assert_equal(configuration_info.name, result.name)\n assert_equal(configuration_info.description, result.description)\n assert_equal(result.instance_count, 0)\n print(configuration_instance.id)\n print(instance_info.id)\n\n @test(depends_on=[test_unassign_configuration_from_instances])\n @time_out(120)\n def test_restart_service_should_return_active(self):\n \"\"\"test that after restarting the instance it becomes active\"\"\"\n instance_info.dbaas.instances.restart(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('REBOOT', instance.status)\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_restart_service_should_return_active])\n def test_assign_config_and_name_to_instance_using_patch(self):\n \"\"\"test_assign_config_and_name_to_instance_using_patch\"\"\"\n new_name = 'new_name'\n report = CONFIG.get_report()\n report.log('instance_info.id: %s' % instance_info.id)\n report.log('configuration_info: %s' % configuration_info)\n report.log('configuration_info.id: %s' % configuration_info.id)\n report.log('instance name:%s' % instance_info.name)\n report.log('instance new name:%s' % new_name)\n saved_name = instance_info.name\n config_id = configuration_info.id\n instance_info.dbaas.instances.update(instance_info.id,\n configuration=config_id, name=new_name)\n assert_equal(202, instance_info.dbaas.last_http_code)\n check = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal(200, instance_info.dbaas.last_http_code)\n assert_equal(check.name, new_name)\n instance_info.dbaas.instances.update(instance_info.id, name=saved_name)\n assert_equal(202, instance_info.dbaas.last_http_code)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n instance_info.dbaas.instances.restart(instance_info.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n sleep(2)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('REBOOT', instance.status)\n return False\n poll_until(result_is_active)\n config_id = configuration_info.id\n assert_raises(exceptions.BadRequest, instance_info.dbaas.instances.\n update, instance_info.id, configuration=config_id)\n\n @test(runs_after=[test_assign_config_and_name_to_instance_using_patch])\n def test_unassign_configuration_after_patch(self):\n \"\"\"Remove the configuration from the instance\"\"\"\n instance_info.dbaas.instances.update(instance_info.id,\n remove_configuration=True)\n assert_equal(202, instance_info.dbaas.last_http_code)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n instance_info.dbaas.instances.restart(instance_info.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n sleep(2)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('REBOOT', instance.status)\n return False\n poll_until(result_is_active)\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(result.instance_count, 0)\n\n @test\n def test_unassign_configuration_from_invalid_instance_using_patch(self):\n invalid_id = 'invalid-inst-id'\n try:\n instance_info.dbaas.instances.update(invalid_id,\n remove_configuration=True)\n except exceptions.NotFound:\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 404)\n\n @test(runs_after=[test_unassign_configuration_after_patch])\n def test_delete_unassigned_configuration(self):\n \"\"\"test_delete_unassigned_configuration\"\"\"\n instance_info.dbaas.configurations.delete(configuration_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n @test(depends_on=[test_delete_unassigned_configuration])\n @time_out(TIMEOUT_INSTANCE_DELETE)\n def test_delete_configuration_instance(self):\n \"\"\"test_delete_configuration_instance\"\"\"\n instance_info.dbaas.instances.delete(configuration_instance.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n\n def instance_is_gone():\n try:\n instance_info.dbaas.instances.get(configuration_instance.id)\n return False\n except exceptions.NotFound:\n return True\n poll_until(instance_is_gone)\n assert_raises(exceptions.NotFound, instance_info.dbaas.instances.\n get, configuration_instance.id)\n",
"step-2": "<mask token>\n\n\n@test(depends_on_groups=[tests.DBAAS_API_BACKUPS], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass CreateConfigurations(ConfigurationsTestBase):\n\n @test\n def test_expected_configurations_parameters(self):\n \"\"\"Test get expected configurations parameters.\"\"\"\n allowed_attrs = ['configuration-parameters']\n instance_info.dbaas.configuration_parameters.parameters(instance_info\n .dbaas_datastore, instance_info.dbaas_datastore_version)\n resp, body = instance_info.dbaas.client.last_response\n attrcheck = AttrCheck()\n config_parameters_dict = json.loads(body.decode())\n attrcheck.contains_allowed_attrs(config_parameters_dict,\n allowed_attrs, msg='Configurations parameters')\n config_params_list = config_parameters_dict['configuration-parameters']\n config_param_keys = []\n for param in config_params_list:\n config_param_keys.append(param['name'])\n expected_configs = self.expected_default_datastore_configs()\n expected_config_params = expected_configs.get('parameters_list')\n msg = 'check for duplicate configuration parameters'\n assert_equal(len(config_param_keys), len(set(config_param_keys)), msg)\n for expected_config_item in expected_config_params:\n assert_true(expected_config_item in config_param_keys)\n <mask token>\n\n @test\n def test_configurations_create_invalid_values(self):\n \"\"\"Test create configurations with invalid values.\"\"\"\n values = '{\"this_is_invalid\": 123}'\n try:\n instance_info.dbaas.configurations.create(CONFIG_NAME, values,\n CONFIG_DESC)\n except exceptions.UnprocessableEntity:\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 422)\n\n @test\n def test_configurations_create_invalid_value_type(self):\n \"\"\"Test create configuration with invalid value type.\"\"\"\n values = '{\"key_buffer_size\": \"this is a string not int\"}'\n assert_unprocessable(instance_info.dbaas.configurations.create,\n CONFIG_NAME, values, CONFIG_DESC)\n <mask token>\n <mask token>\n\n @test(runs_after=[test_valid_configurations_create])\n def test_appending_to_existing_configuration(self):\n \"\"\"test_appending_to_existing_configuration\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('appending_values'))\n if not CONFIG.fake_mode:\n sleep(1)\n instance_info.dbaas.configurations.edit(configuration_info.id, values)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n\n\n@test(depends_on_classes=[CreateConfigurations], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass AfterConfigurationsCreation(ConfigurationsTestBase):\n\n @test\n def test_assign_configuration_to_invalid_instance(self):\n \"\"\"test assigning to an instance that does not exist\"\"\"\n invalid_id = 'invalid-inst-id'\n try:\n instance_info.dbaas.instances.modify(invalid_id,\n configuration_info.id)\n except exceptions.NotFound:\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 404)\n\n @test\n def test_assign_configuration_to_valid_instance(self):\n \"\"\"test assigning a configuration to an instance\"\"\"\n print('instance_info.id: %s' % instance_info.id)\n print('configuration_info: %s' % configuration_info)\n print('configuration_info.id: %s' % configuration_info.id)\n config_id = configuration_info.id\n instance_info.dbaas.instances.modify(instance_info.id,\n configuration=config_id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n @test(depends_on=[test_assign_configuration_to_valid_instance])\n def test_assign_configuration_to_instance_with_config(self):\n \"\"\"test assigning a configuration to an instance conflicts\"\"\"\n config_id = configuration_info.id\n assert_raises(exceptions.BadRequest, instance_info.dbaas.instances.\n modify, instance_info.id, configuration=config_id)\n\n @test(depends_on=[test_assign_configuration_to_valid_instance])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"validate the configuration after attaching\"\"\"\n print('instance_info.id: %s' % instance_info.id)\n inst = instance_info.dbaas.instances.get(instance_info.id)\n configuration_id = inst.configuration['id']\n print('configuration_info: %s' % configuration_id)\n assert_not_equal(None, configuration_id)\n _test_configuration_is_applied_to_instance(instance_info,\n configuration_id)\n\n @test(depends_on=[test_get_configuration_details_from_instance_validation])\n def test_configurations_get(self):\n \"\"\"test that the instance shows up on the assigned configuration\"\"\"\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(configuration_info.id, result.id)\n assert_equal(configuration_info.name, result.name)\n assert_equal(configuration_info.description, result.description)\n with TypeCheck('configuration', result) as check:\n check.has_field('id', str)\n check.has_field('name', str)\n check.has_field('description', str)\n check.has_field('values', dict)\n check.has_field('created', str)\n check.has_field('updated', str)\n check.has_field('instance_count', int)\n print(result.values)\n assert_true(_is_valid_timestamp(result.created))\n assert_true(_is_valid_timestamp(result.updated))\n if not CONFIG.fake_mode:\n assert_not_equal(result.created, result.updated)\n assert_equal(result.instance_count, 1)\n with CollectionCheck('configuration_values', result.values) as check:\n for item_key, item_val in result.values.items():\n print('item_key: %s' % item_key)\n print('item_val: %s' % item_val)\n dbaas = instance_info.dbaas\n param = dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore, instance_info.\n dbaas_datastore_version, item_key)\n if param.type == 'integer':\n check.has_element(item_key, int)\n if param.type == 'string':\n check.has_element(item_key, str)\n if param.type == 'boolean':\n check.has_element(item_key, bool)\n reqs = Requirements(is_admin=False)\n test_auth_user = instance_info.user.auth_user\n other_user = CONFIG.users.find_user(reqs, black_list=[test_auth_user])\n other_user_tenant_id = other_user.tenant_id\n client_tenant_id = instance_info.user.tenant_id\n if other_user_tenant_id == client_tenant_id:\n other_user = CONFIG.users.find_user(reqs, black_list=[\n instance_info.user.auth_user, other_user])\n print(other_user)\n print(other_user.__dict__)\n other_client = create_dbaas_client(other_user)\n assert_raises(exceptions.NotFound, other_client.configurations.get,\n configuration_info.id)\n\n\n@test(depends_on_classes=[AfterConfigurationsCreation], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass ListConfigurations(ConfigurationsTestBase):\n\n @test\n def test_configurations_list(self):\n result = instance_info.dbaas.configurations.list()\n for conf in result:\n with TypeCheck('Configuration', conf) as check:\n check.has_field('id', str)\n check.has_field('name', str)\n check.has_field('description', str)\n check.has_field('datastore_version_id', str)\n check.has_field('datastore_version_name', str)\n check.has_field('datastore_name', str)\n exists = [config for config in result if config.id ==\n configuration_info.id]\n assert_equal(1, len(exists))\n configuration = exists[0]\n assert_equal(configuration.id, configuration_info.id)\n assert_equal(configuration.name, configuration_info.name)\n assert_equal(configuration.description, configuration_info.description)\n\n @test\n def test_configurations_list_for_instance(self):\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal(instance.configuration['id'], configuration_info.id)\n assert_equal(instance.configuration['name'], configuration_info.name)\n assert_equal(2, len(instance.configuration['links']))\n link = instance.configuration['links'][0]\n global configuration_href\n configuration_href = link['href']\n\n @test\n def test_get_default_configuration_on_instance(self):\n result = instance_info.dbaas.instances.configuration(instance_info.id)\n global configuration_default\n configuration_default = result\n assert_not_equal(None, result.configuration)\n\n @test\n def test_changing_configuration_with_nondynamic_parameter(self):\n \"\"\"test_changing_configuration_with_nondynamic_parameter\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('nondynamic_parameter'))\n instance_info.dbaas.configurations.update(configuration_info.id, values\n )\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n instance_info.dbaas.configurations.get(configuration_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n\n @test(depends_on=[test_changing_configuration_with_nondynamic_parameter])\n @time_out(20)\n def test_waiting_for_instance_in_restart_required(self):\n \"\"\"test_waiting_for_instance_in_restart_required\"\"\"\n\n def result_is_not_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return False\n else:\n return True\n poll_until(result_is_not_active)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n assert_equal('RESTART_REQUIRED', instance.status)\n\n @test(depends_on=[test_waiting_for_instance_in_restart_required])\n def test_restart_service_should_return_active(self):\n \"\"\"test_restart_service_should_return_active\"\"\"\n instance_info.dbaas.instances.restart(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_true(instance.status in ['REBOOT', 'SHUTDOWN'])\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_restart_service_should_return_active])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"test_get_configuration_details_from_instance_validation\"\"\"\n inst = instance_info.dbaas.instances.get(instance_info.id)\n configuration_id = inst.configuration['id']\n assert_not_equal(None, inst.configuration['id'])\n _test_configuration_is_applied_to_instance(instance_info,\n configuration_id)\n\n @test(depends_on=[test_configurations_list])\n def test_compare_list_and_details_timestamps(self):\n result = instance_info.dbaas.configurations.list()\n list_config = [config for config in result if config.id ==\n configuration_info.id]\n assert_equal(1, len(list_config))\n details_config = instance_info.dbaas.configurations.get(\n configuration_info.id)\n assert_equal(list_config[0].created, details_config.created)\n assert_equal(list_config[0].updated, details_config.updated)\n\n\n@test(depends_on_classes=[ListConfigurations], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass StartInstanceWithConfiguration(ConfigurationsTestBase):\n\n @test\n def test_start_instance_with_configuration(self):\n \"\"\"test that a new instance will apply the configuration on create\"\"\"\n global configuration_instance\n databases = []\n databases.append({'name': 'firstdbconfig', 'character_set':\n 'latin2', 'collate': 'latin2_general_ci'})\n databases.append({'name': 'db2'})\n configuration_instance.databases = databases\n users = []\n users.append({'name': 'liteconf', 'password': 'liteconfpass',\n 'databases': [{'name': 'firstdbconfig'}]})\n configuration_instance.users = users\n configuration_instance.name = 'TEST_' + str(uuid.uuid4()) + '_config'\n flavor_href = instance_info.dbaas_flavor_href\n configuration_instance.dbaas_flavor_href = flavor_href\n configuration_instance.volume = instance_info.volume\n configuration_instance.dbaas_datastore = instance_info.dbaas_datastore\n configuration_instance.dbaas_datastore_version = (instance_info.\n dbaas_datastore_version)\n configuration_instance.nics = instance_info.nics\n result = instance_info.dbaas.instances.create(configuration_instance\n .name, configuration_instance.dbaas_flavor_href,\n configuration_instance.volume, configuration_instance.databases,\n configuration_instance.users, nics=configuration_instance.nics,\n availability_zone='nova', datastore=configuration_instance.\n dbaas_datastore, datastore_version=configuration_instance.\n dbaas_datastore_version, configuration=configuration_href)\n assert_equal(200, instance_info.dbaas.last_http_code)\n assert_equal('BUILD', result.status)\n configuration_instance.id = result.id\n\n\n@test(depends_on_classes=[StartInstanceWithConfiguration], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass WaitForConfigurationInstanceToFinish(ConfigurationsTestBase):\n\n @test\n @time_out(TIMEOUT_INSTANCE_CREATE)\n def test_instance_with_configuration_active(self):\n \"\"\"wait for the instance created with configuration\"\"\"\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(configuration_instance\n .id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('BUILD', instance.status)\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_instance_with_configuration_active])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"Test configuration is applied correctly to the instance.\"\"\"\n inst = instance_info.dbaas.instances.get(configuration_instance.id)\n configuration_id = inst.configuration['id']\n assert_not_equal(None, configuration_id)\n _test_configuration_is_applied_to_instance(configuration_instance,\n configuration_id)\n\n\n@test(depends_on=[WaitForConfigurationInstanceToFinish], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass DeleteConfigurations(ConfigurationsTestBase):\n\n @before_class\n def setUp(self):\n config_param_name = sql_variables[1]\n instance_info.dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore, instance_info.\n dbaas_datastore_version, config_param_name)\n resp, body = instance_info.dbaas.client.last_response\n print(resp)\n print(body)\n self.config_parameter_dict = json.loads(body.decode())\n\n @after_class(always_run=True)\n def tearDown(self):\n if instance_info.dbaas:\n ds = instance_info.dbaas_datastore\n ds_v = instance_info.dbaas_datastore_version\n version = instance_info.dbaas.datastore_versions.get(ds, ds_v)\n client = instance_info.dbaas_admin.mgmt_configs\n print(self.config_parameter_dict)\n client.create(version.id, self.config_parameter_dict['name'],\n self.config_parameter_dict['restart_required'], self.\n config_parameter_dict['type'], self.config_parameter_dict[\n 'max'], self.config_parameter_dict['min'])\n\n @test\n def test_delete_invalid_configuration_not_found(self):\n invalid_configuration_id = 'invalid-config-id'\n assert_raises(exceptions.NotFound, instance_info.dbaas.\n configurations.delete, invalid_configuration_id)\n\n @test(depends_on=[test_delete_invalid_configuration_not_found])\n def test_delete_configuration_parameter_with_mgmt_api(self):\n ds = instance_info.dbaas_datastore\n ds_v = instance_info.dbaas_datastore_version\n version = instance_info.dbaas.datastore_versions.get(ds, ds_v)\n client = instance_info.dbaas_admin.mgmt_configs\n config_param_name = self.config_parameter_dict['name']\n client.delete(version.id, config_param_name)\n assert_raises(exceptions.NotFound, instance_info.dbaas.\n configuration_parameters.get_parameter, ds, ds_v, config_param_name\n )\n\n @test(depends_on=[test_delete_configuration_parameter_with_mgmt_api])\n def test_unable_delete_instance_configurations(self):\n assert_raises(exceptions.BadRequest, instance_info.dbaas.\n configurations.delete, configuration_info.id)\n\n @test(depends_on=[test_unable_delete_instance_configurations])\n @time_out(30)\n def test_unassign_configuration_from_instances(self):\n \"\"\"test to unassign configuration from instance\"\"\"\n instance_info.dbaas.instances.update(configuration_instance.id,\n remove_configuration=True)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n instance_info.dbaas.instances.update(instance_info.id,\n remove_configuration=True)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n instance_info.dbaas.instances.get(instance_info.id)\n\n def result_has_no_configuration():\n instance = instance_info.dbaas.instances.get(inst_info.id)\n if hasattr(instance, 'configuration'):\n return False\n else:\n return True\n inst_info = instance_info\n poll_until(result_has_no_configuration)\n inst_info = configuration_instance\n poll_until(result_has_no_configuration)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n\n @test(depends_on=[test_unassign_configuration_from_instances])\n def test_assign_in_wrong_state(self):\n assert_raises(exceptions.BadRequest, instance_info.dbaas.instances.\n modify, configuration_instance.id, configuration=\n configuration_info.id)\n\n @test(depends_on=[test_assign_in_wrong_state])\n def test_no_instances_on_configuration(self):\n \"\"\"test_no_instances_on_configuration\"\"\"\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(configuration_info.id, result.id)\n assert_equal(configuration_info.name, result.name)\n assert_equal(configuration_info.description, result.description)\n assert_equal(result.instance_count, 0)\n print(configuration_instance.id)\n print(instance_info.id)\n\n @test(depends_on=[test_unassign_configuration_from_instances])\n @time_out(120)\n def test_restart_service_should_return_active(self):\n \"\"\"test that after restarting the instance it becomes active\"\"\"\n instance_info.dbaas.instances.restart(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('REBOOT', instance.status)\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_restart_service_should_return_active])\n def test_assign_config_and_name_to_instance_using_patch(self):\n \"\"\"test_assign_config_and_name_to_instance_using_patch\"\"\"\n new_name = 'new_name'\n report = CONFIG.get_report()\n report.log('instance_info.id: %s' % instance_info.id)\n report.log('configuration_info: %s' % configuration_info)\n report.log('configuration_info.id: %s' % configuration_info.id)\n report.log('instance name:%s' % instance_info.name)\n report.log('instance new name:%s' % new_name)\n saved_name = instance_info.name\n config_id = configuration_info.id\n instance_info.dbaas.instances.update(instance_info.id,\n configuration=config_id, name=new_name)\n assert_equal(202, instance_info.dbaas.last_http_code)\n check = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal(200, instance_info.dbaas.last_http_code)\n assert_equal(check.name, new_name)\n instance_info.dbaas.instances.update(instance_info.id, name=saved_name)\n assert_equal(202, instance_info.dbaas.last_http_code)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n instance_info.dbaas.instances.restart(instance_info.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n sleep(2)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('REBOOT', instance.status)\n return False\n poll_until(result_is_active)\n config_id = configuration_info.id\n assert_raises(exceptions.BadRequest, instance_info.dbaas.instances.\n update, instance_info.id, configuration=config_id)\n\n @test(runs_after=[test_assign_config_and_name_to_instance_using_patch])\n def test_unassign_configuration_after_patch(self):\n \"\"\"Remove the configuration from the instance\"\"\"\n instance_info.dbaas.instances.update(instance_info.id,\n remove_configuration=True)\n assert_equal(202, instance_info.dbaas.last_http_code)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n instance_info.dbaas.instances.restart(instance_info.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n sleep(2)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('REBOOT', instance.status)\n return False\n poll_until(result_is_active)\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(result.instance_count, 0)\n\n @test\n def test_unassign_configuration_from_invalid_instance_using_patch(self):\n invalid_id = 'invalid-inst-id'\n try:\n instance_info.dbaas.instances.update(invalid_id,\n remove_configuration=True)\n except exceptions.NotFound:\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 404)\n\n @test(runs_after=[test_unassign_configuration_after_patch])\n def test_delete_unassigned_configuration(self):\n \"\"\"test_delete_unassigned_configuration\"\"\"\n instance_info.dbaas.configurations.delete(configuration_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n @test(depends_on=[test_delete_unassigned_configuration])\n @time_out(TIMEOUT_INSTANCE_DELETE)\n def test_delete_configuration_instance(self):\n \"\"\"test_delete_configuration_instance\"\"\"\n instance_info.dbaas.instances.delete(configuration_instance.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n\n def instance_is_gone():\n try:\n instance_info.dbaas.instances.get(configuration_instance.id)\n return False\n except exceptions.NotFound:\n return True\n poll_until(instance_is_gone)\n assert_raises(exceptions.NotFound, instance_info.dbaas.instances.\n get, configuration_instance.id)\n",
"step-3": "<mask token>\n\n\n@test(depends_on_groups=[tests.DBAAS_API_BACKUPS], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass CreateConfigurations(ConfigurationsTestBase):\n\n @test\n def test_expected_configurations_parameters(self):\n \"\"\"Test get expected configurations parameters.\"\"\"\n allowed_attrs = ['configuration-parameters']\n instance_info.dbaas.configuration_parameters.parameters(instance_info\n .dbaas_datastore, instance_info.dbaas_datastore_version)\n resp, body = instance_info.dbaas.client.last_response\n attrcheck = AttrCheck()\n config_parameters_dict = json.loads(body.decode())\n attrcheck.contains_allowed_attrs(config_parameters_dict,\n allowed_attrs, msg='Configurations parameters')\n config_params_list = config_parameters_dict['configuration-parameters']\n config_param_keys = []\n for param in config_params_list:\n config_param_keys.append(param['name'])\n expected_configs = self.expected_default_datastore_configs()\n expected_config_params = expected_configs.get('parameters_list')\n msg = 'check for duplicate configuration parameters'\n assert_equal(len(config_param_keys), len(set(config_param_keys)), msg)\n for expected_config_item in expected_config_params:\n assert_true(expected_config_item in config_param_keys)\n\n @test\n def test_expected_get_configuration_parameter(self):\n param_name = 'key_buffer_size'\n allowed_config_params = ['name', 'restart_required', 'max', 'min',\n 'type', 'deleted', 'deleted_at', 'datastore_version_id']\n param = instance_info.dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore, instance_info.\n dbaas_datastore_version, param_name)\n resp, body = instance_info.dbaas.client.last_response\n print('params: %s' % param)\n print('resp: %s' % resp)\n print('body: %s' % body)\n attrcheck = AttrCheck()\n config_parameter_dict = json.loads(body.decode())\n print('config_parameter_dict: %s' % config_parameter_dict)\n attrcheck.contains_allowed_attrs(config_parameter_dict,\n allowed_config_params, msg='Get Configuration parameter')\n assert_equal(param_name, config_parameter_dict['name'])\n with TypeCheck('ConfigurationParameter', param) as parameter:\n parameter.has_field('name', str)\n parameter.has_field('restart_required', bool)\n parameter.has_field('max', int)\n parameter.has_field('min', int)\n parameter.has_field('type', str)\n parameter.has_field('datastore_version_id', str)\n\n @test\n def test_configurations_create_invalid_values(self):\n \"\"\"Test create configurations with invalid values.\"\"\"\n values = '{\"this_is_invalid\": 123}'\n try:\n instance_info.dbaas.configurations.create(CONFIG_NAME, values,\n CONFIG_DESC)\n except exceptions.UnprocessableEntity:\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 422)\n\n @test\n def test_configurations_create_invalid_value_type(self):\n \"\"\"Test create configuration with invalid value type.\"\"\"\n values = '{\"key_buffer_size\": \"this is a string not int\"}'\n assert_unprocessable(instance_info.dbaas.configurations.create,\n CONFIG_NAME, values, CONFIG_DESC)\n\n @test\n def test_configurations_create_value_out_of_bounds(self):\n \"\"\"Test create configuration with value out of bounds.\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('out_of_bounds_over'))\n assert_unprocessable(instance_info.dbaas.configurations.create,\n CONFIG_NAME, values, CONFIG_DESC)\n values = json.dumps(expected_configs.get('out_of_bounds_under'))\n assert_unprocessable(instance_info.dbaas.configurations.create,\n CONFIG_NAME, values, CONFIG_DESC)\n\n @test\n def test_valid_configurations_create(self):\n \"\"\"create a configuration with valid parameters from config.\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('valid_values'))\n expected_values = json.loads(values)\n result = instance_info.dbaas.configurations.create(CONFIG_NAME,\n values, CONFIG_DESC, datastore=instance_info.dbaas_datastore,\n datastore_version=instance_info.dbaas_datastore_version)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n with TypeCheck('Configuration', result) as configuration:\n configuration.has_field('name', str)\n configuration.has_field('description', str)\n configuration.has_field('values', dict)\n configuration.has_field('datastore_name', str)\n configuration.has_field('datastore_version_id', str)\n configuration.has_field('datastore_version_name', str)\n global configuration_info\n configuration_info = result\n assert_equal(configuration_info.name, CONFIG_NAME)\n assert_equal(configuration_info.description, CONFIG_DESC)\n assert_equal(configuration_info.values, expected_values)\n\n @test(runs_after=[test_valid_configurations_create])\n def test_appending_to_existing_configuration(self):\n \"\"\"test_appending_to_existing_configuration\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('appending_values'))\n if not CONFIG.fake_mode:\n sleep(1)\n instance_info.dbaas.configurations.edit(configuration_info.id, values)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n\n\n@test(depends_on_classes=[CreateConfigurations], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass AfterConfigurationsCreation(ConfigurationsTestBase):\n\n @test\n def test_assign_configuration_to_invalid_instance(self):\n \"\"\"test assigning to an instance that does not exist\"\"\"\n invalid_id = 'invalid-inst-id'\n try:\n instance_info.dbaas.instances.modify(invalid_id,\n configuration_info.id)\n except exceptions.NotFound:\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 404)\n\n @test\n def test_assign_configuration_to_valid_instance(self):\n \"\"\"test assigning a configuration to an instance\"\"\"\n print('instance_info.id: %s' % instance_info.id)\n print('configuration_info: %s' % configuration_info)\n print('configuration_info.id: %s' % configuration_info.id)\n config_id = configuration_info.id\n instance_info.dbaas.instances.modify(instance_info.id,\n configuration=config_id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n @test(depends_on=[test_assign_configuration_to_valid_instance])\n def test_assign_configuration_to_instance_with_config(self):\n \"\"\"test assigning a configuration to an instance conflicts\"\"\"\n config_id = configuration_info.id\n assert_raises(exceptions.BadRequest, instance_info.dbaas.instances.\n modify, instance_info.id, configuration=config_id)\n\n @test(depends_on=[test_assign_configuration_to_valid_instance])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"validate the configuration after attaching\"\"\"\n print('instance_info.id: %s' % instance_info.id)\n inst = instance_info.dbaas.instances.get(instance_info.id)\n configuration_id = inst.configuration['id']\n print('configuration_info: %s' % configuration_id)\n assert_not_equal(None, configuration_id)\n _test_configuration_is_applied_to_instance(instance_info,\n configuration_id)\n\n @test(depends_on=[test_get_configuration_details_from_instance_validation])\n def test_configurations_get(self):\n \"\"\"test that the instance shows up on the assigned configuration\"\"\"\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(configuration_info.id, result.id)\n assert_equal(configuration_info.name, result.name)\n assert_equal(configuration_info.description, result.description)\n with TypeCheck('configuration', result) as check:\n check.has_field('id', str)\n check.has_field('name', str)\n check.has_field('description', str)\n check.has_field('values', dict)\n check.has_field('created', str)\n check.has_field('updated', str)\n check.has_field('instance_count', int)\n print(result.values)\n assert_true(_is_valid_timestamp(result.created))\n assert_true(_is_valid_timestamp(result.updated))\n if not CONFIG.fake_mode:\n assert_not_equal(result.created, result.updated)\n assert_equal(result.instance_count, 1)\n with CollectionCheck('configuration_values', result.values) as check:\n for item_key, item_val in result.values.items():\n print('item_key: %s' % item_key)\n print('item_val: %s' % item_val)\n dbaas = instance_info.dbaas\n param = dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore, instance_info.\n dbaas_datastore_version, item_key)\n if param.type == 'integer':\n check.has_element(item_key, int)\n if param.type == 'string':\n check.has_element(item_key, str)\n if param.type == 'boolean':\n check.has_element(item_key, bool)\n reqs = Requirements(is_admin=False)\n test_auth_user = instance_info.user.auth_user\n other_user = CONFIG.users.find_user(reqs, black_list=[test_auth_user])\n other_user_tenant_id = other_user.tenant_id\n client_tenant_id = instance_info.user.tenant_id\n if other_user_tenant_id == client_tenant_id:\n other_user = CONFIG.users.find_user(reqs, black_list=[\n instance_info.user.auth_user, other_user])\n print(other_user)\n print(other_user.__dict__)\n other_client = create_dbaas_client(other_user)\n assert_raises(exceptions.NotFound, other_client.configurations.get,\n configuration_info.id)\n\n\n@test(depends_on_classes=[AfterConfigurationsCreation], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass ListConfigurations(ConfigurationsTestBase):\n\n @test\n def test_configurations_list(self):\n result = instance_info.dbaas.configurations.list()\n for conf in result:\n with TypeCheck('Configuration', conf) as check:\n check.has_field('id', str)\n check.has_field('name', str)\n check.has_field('description', str)\n check.has_field('datastore_version_id', str)\n check.has_field('datastore_version_name', str)\n check.has_field('datastore_name', str)\n exists = [config for config in result if config.id ==\n configuration_info.id]\n assert_equal(1, len(exists))\n configuration = exists[0]\n assert_equal(configuration.id, configuration_info.id)\n assert_equal(configuration.name, configuration_info.name)\n assert_equal(configuration.description, configuration_info.description)\n\n @test\n def test_configurations_list_for_instance(self):\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal(instance.configuration['id'], configuration_info.id)\n assert_equal(instance.configuration['name'], configuration_info.name)\n assert_equal(2, len(instance.configuration['links']))\n link = instance.configuration['links'][0]\n global configuration_href\n configuration_href = link['href']\n\n @test\n def test_get_default_configuration_on_instance(self):\n result = instance_info.dbaas.instances.configuration(instance_info.id)\n global configuration_default\n configuration_default = result\n assert_not_equal(None, result.configuration)\n\n @test\n def test_changing_configuration_with_nondynamic_parameter(self):\n \"\"\"test_changing_configuration_with_nondynamic_parameter\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('nondynamic_parameter'))\n instance_info.dbaas.configurations.update(configuration_info.id, values\n )\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n instance_info.dbaas.configurations.get(configuration_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n\n @test(depends_on=[test_changing_configuration_with_nondynamic_parameter])\n @time_out(20)\n def test_waiting_for_instance_in_restart_required(self):\n \"\"\"test_waiting_for_instance_in_restart_required\"\"\"\n\n def result_is_not_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return False\n else:\n return True\n poll_until(result_is_not_active)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n assert_equal('RESTART_REQUIRED', instance.status)\n\n @test(depends_on=[test_waiting_for_instance_in_restart_required])\n def test_restart_service_should_return_active(self):\n \"\"\"test_restart_service_should_return_active\"\"\"\n instance_info.dbaas.instances.restart(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_true(instance.status in ['REBOOT', 'SHUTDOWN'])\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_restart_service_should_return_active])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"test_get_configuration_details_from_instance_validation\"\"\"\n inst = instance_info.dbaas.instances.get(instance_info.id)\n configuration_id = inst.configuration['id']\n assert_not_equal(None, inst.configuration['id'])\n _test_configuration_is_applied_to_instance(instance_info,\n configuration_id)\n\n @test(depends_on=[test_configurations_list])\n def test_compare_list_and_details_timestamps(self):\n result = instance_info.dbaas.configurations.list()\n list_config = [config for config in result if config.id ==\n configuration_info.id]\n assert_equal(1, len(list_config))\n details_config = instance_info.dbaas.configurations.get(\n configuration_info.id)\n assert_equal(list_config[0].created, details_config.created)\n assert_equal(list_config[0].updated, details_config.updated)\n\n\n@test(depends_on_classes=[ListConfigurations], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass StartInstanceWithConfiguration(ConfigurationsTestBase):\n\n @test\n def test_start_instance_with_configuration(self):\n \"\"\"test that a new instance will apply the configuration on create\"\"\"\n global configuration_instance\n databases = []\n databases.append({'name': 'firstdbconfig', 'character_set':\n 'latin2', 'collate': 'latin2_general_ci'})\n databases.append({'name': 'db2'})\n configuration_instance.databases = databases\n users = []\n users.append({'name': 'liteconf', 'password': 'liteconfpass',\n 'databases': [{'name': 'firstdbconfig'}]})\n configuration_instance.users = users\n configuration_instance.name = 'TEST_' + str(uuid.uuid4()) + '_config'\n flavor_href = instance_info.dbaas_flavor_href\n configuration_instance.dbaas_flavor_href = flavor_href\n configuration_instance.volume = instance_info.volume\n configuration_instance.dbaas_datastore = instance_info.dbaas_datastore\n configuration_instance.dbaas_datastore_version = (instance_info.\n dbaas_datastore_version)\n configuration_instance.nics = instance_info.nics\n result = instance_info.dbaas.instances.create(configuration_instance\n .name, configuration_instance.dbaas_flavor_href,\n configuration_instance.volume, configuration_instance.databases,\n configuration_instance.users, nics=configuration_instance.nics,\n availability_zone='nova', datastore=configuration_instance.\n dbaas_datastore, datastore_version=configuration_instance.\n dbaas_datastore_version, configuration=configuration_href)\n assert_equal(200, instance_info.dbaas.last_http_code)\n assert_equal('BUILD', result.status)\n configuration_instance.id = result.id\n\n\n@test(depends_on_classes=[StartInstanceWithConfiguration], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass WaitForConfigurationInstanceToFinish(ConfigurationsTestBase):\n\n @test\n @time_out(TIMEOUT_INSTANCE_CREATE)\n def test_instance_with_configuration_active(self):\n \"\"\"wait for the instance created with configuration\"\"\"\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(configuration_instance\n .id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('BUILD', instance.status)\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_instance_with_configuration_active])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"Test configuration is applied correctly to the instance.\"\"\"\n inst = instance_info.dbaas.instances.get(configuration_instance.id)\n configuration_id = inst.configuration['id']\n assert_not_equal(None, configuration_id)\n _test_configuration_is_applied_to_instance(configuration_instance,\n configuration_id)\n\n\n@test(depends_on=[WaitForConfigurationInstanceToFinish], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass DeleteConfigurations(ConfigurationsTestBase):\n\n @before_class\n def setUp(self):\n config_param_name = sql_variables[1]\n instance_info.dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore, instance_info.\n dbaas_datastore_version, config_param_name)\n resp, body = instance_info.dbaas.client.last_response\n print(resp)\n print(body)\n self.config_parameter_dict = json.loads(body.decode())\n\n @after_class(always_run=True)\n def tearDown(self):\n if instance_info.dbaas:\n ds = instance_info.dbaas_datastore\n ds_v = instance_info.dbaas_datastore_version\n version = instance_info.dbaas.datastore_versions.get(ds, ds_v)\n client = instance_info.dbaas_admin.mgmt_configs\n print(self.config_parameter_dict)\n client.create(version.id, self.config_parameter_dict['name'],\n self.config_parameter_dict['restart_required'], self.\n config_parameter_dict['type'], self.config_parameter_dict[\n 'max'], self.config_parameter_dict['min'])\n\n @test\n def test_delete_invalid_configuration_not_found(self):\n invalid_configuration_id = 'invalid-config-id'\n assert_raises(exceptions.NotFound, instance_info.dbaas.\n configurations.delete, invalid_configuration_id)\n\n @test(depends_on=[test_delete_invalid_configuration_not_found])\n def test_delete_configuration_parameter_with_mgmt_api(self):\n ds = instance_info.dbaas_datastore\n ds_v = instance_info.dbaas_datastore_version\n version = instance_info.dbaas.datastore_versions.get(ds, ds_v)\n client = instance_info.dbaas_admin.mgmt_configs\n config_param_name = self.config_parameter_dict['name']\n client.delete(version.id, config_param_name)\n assert_raises(exceptions.NotFound, instance_info.dbaas.\n configuration_parameters.get_parameter, ds, ds_v, config_param_name\n )\n\n @test(depends_on=[test_delete_configuration_parameter_with_mgmt_api])\n def test_unable_delete_instance_configurations(self):\n assert_raises(exceptions.BadRequest, instance_info.dbaas.\n configurations.delete, configuration_info.id)\n\n @test(depends_on=[test_unable_delete_instance_configurations])\n @time_out(30)\n def test_unassign_configuration_from_instances(self):\n \"\"\"test to unassign configuration from instance\"\"\"\n instance_info.dbaas.instances.update(configuration_instance.id,\n remove_configuration=True)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n instance_info.dbaas.instances.update(instance_info.id,\n remove_configuration=True)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n instance_info.dbaas.instances.get(instance_info.id)\n\n def result_has_no_configuration():\n instance = instance_info.dbaas.instances.get(inst_info.id)\n if hasattr(instance, 'configuration'):\n return False\n else:\n return True\n inst_info = instance_info\n poll_until(result_has_no_configuration)\n inst_info = configuration_instance\n poll_until(result_has_no_configuration)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n\n @test(depends_on=[test_unassign_configuration_from_instances])\n def test_assign_in_wrong_state(self):\n assert_raises(exceptions.BadRequest, instance_info.dbaas.instances.\n modify, configuration_instance.id, configuration=\n configuration_info.id)\n\n @test(depends_on=[test_assign_in_wrong_state])\n def test_no_instances_on_configuration(self):\n \"\"\"test_no_instances_on_configuration\"\"\"\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(configuration_info.id, result.id)\n assert_equal(configuration_info.name, result.name)\n assert_equal(configuration_info.description, result.description)\n assert_equal(result.instance_count, 0)\n print(configuration_instance.id)\n print(instance_info.id)\n\n @test(depends_on=[test_unassign_configuration_from_instances])\n @time_out(120)\n def test_restart_service_should_return_active(self):\n \"\"\"test that after restarting the instance it becomes active\"\"\"\n instance_info.dbaas.instances.restart(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('REBOOT', instance.status)\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_restart_service_should_return_active])\n def test_assign_config_and_name_to_instance_using_patch(self):\n \"\"\"test_assign_config_and_name_to_instance_using_patch\"\"\"\n new_name = 'new_name'\n report = CONFIG.get_report()\n report.log('instance_info.id: %s' % instance_info.id)\n report.log('configuration_info: %s' % configuration_info)\n report.log('configuration_info.id: %s' % configuration_info.id)\n report.log('instance name:%s' % instance_info.name)\n report.log('instance new name:%s' % new_name)\n saved_name = instance_info.name\n config_id = configuration_info.id\n instance_info.dbaas.instances.update(instance_info.id,\n configuration=config_id, name=new_name)\n assert_equal(202, instance_info.dbaas.last_http_code)\n check = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal(200, instance_info.dbaas.last_http_code)\n assert_equal(check.name, new_name)\n instance_info.dbaas.instances.update(instance_info.id, name=saved_name)\n assert_equal(202, instance_info.dbaas.last_http_code)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n instance_info.dbaas.instances.restart(instance_info.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n sleep(2)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('REBOOT', instance.status)\n return False\n poll_until(result_is_active)\n config_id = configuration_info.id\n assert_raises(exceptions.BadRequest, instance_info.dbaas.instances.\n update, instance_info.id, configuration=config_id)\n\n @test(runs_after=[test_assign_config_and_name_to_instance_using_patch])\n def test_unassign_configuration_after_patch(self):\n \"\"\"Remove the configuration from the instance\"\"\"\n instance_info.dbaas.instances.update(instance_info.id,\n remove_configuration=True)\n assert_equal(202, instance_info.dbaas.last_http_code)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n instance_info.dbaas.instances.restart(instance_info.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n sleep(2)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('REBOOT', instance.status)\n return False\n poll_until(result_is_active)\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(result.instance_count, 0)\n\n @test\n def test_unassign_configuration_from_invalid_instance_using_patch(self):\n invalid_id = 'invalid-inst-id'\n try:\n instance_info.dbaas.instances.update(invalid_id,\n remove_configuration=True)\n except exceptions.NotFound:\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 404)\n\n @test(runs_after=[test_unassign_configuration_after_patch])\n def test_delete_unassigned_configuration(self):\n \"\"\"test_delete_unassigned_configuration\"\"\"\n instance_info.dbaas.configurations.delete(configuration_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n @test(depends_on=[test_delete_unassigned_configuration])\n @time_out(TIMEOUT_INSTANCE_DELETE)\n def test_delete_configuration_instance(self):\n \"\"\"test_delete_configuration_instance\"\"\"\n instance_info.dbaas.instances.delete(configuration_instance.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n\n def instance_is_gone():\n try:\n instance_info.dbaas.instances.get(configuration_instance.id)\n return False\n except exceptions.NotFound:\n return True\n poll_until(instance_is_gone)\n assert_raises(exceptions.NotFound, instance_info.dbaas.instances.\n get, configuration_instance.id)\n",
"step-4": "from datetime import datetime\nimport json\nimport netaddr\nfrom time import sleep\nimport uuid\nfrom proboscis import after_class\nfrom proboscis.asserts import assert_equal\nfrom proboscis.asserts import assert_not_equal\nfrom proboscis.asserts import assert_raises\nfrom proboscis.asserts import assert_true\nfrom proboscis.asserts import fail\nfrom proboscis import before_class\nfrom proboscis.decorators import time_out\nfrom proboscis import SkipTest\nfrom proboscis import test\nfrom troveclient.compat import exceptions\nfrom trove.common.utils import poll_until\nfrom trove import tests\nfrom trove.tests.api.instances import assert_unprocessable\nfrom trove.tests.api.instances import instance_info\nfrom trove.tests.api.instances import InstanceTestInfo\nfrom trove.tests.api.instances import TIMEOUT_INSTANCE_CREATE\nfrom trove.tests.api.instances import TIMEOUT_INSTANCE_DELETE\nfrom trove.tests.config import CONFIG\nfrom trove.tests.util.check import AttrCheck\nfrom trove.tests.util.check import CollectionCheck\nfrom trove.tests.util.check import TypeCheck\nfrom trove.tests.util import create_dbaas_client\nfrom trove.tests.util.mysql import create_mysql_connection\nfrom trove.tests.util.users import Requirements\nCONFIG_NAME = 'test_configuration'\nCONFIG_DESC = 'configuration description'\nconfiguration_default = None\nconfiguration_info = None\nconfiguration_href = None\nconfiguration_instance = InstanceTestInfo()\nconfiguration_instance_id = None\nsql_variables = ['key_buffer_size', 'connect_timeout', 'join_buffer_size']\n\n\ndef _is_valid_timestamp(time_string):\n try:\n datetime.strptime(time_string, '%Y-%m-%dT%H:%M:%S')\n except ValueError:\n return False\n return True\n\n\ndef _execute_query(host, user_name, password, query):\n print(\n 'Starting to query database, host: %s, user: %s, password: %s, query: %s'\n % (host, user_name, password, query))\n with create_mysql_connection(host, user_name, password) as db:\n result = db.execute(query)\n return result\n\n\ndef _get_address(instance_id):\n result = instance_info.dbaas_admin.mgmt.instances.show(instance_id)\n try:\n return next(str(ip) for ip in result.ip if netaddr.valid_ipv4(ip))\n except StopIteration:\n fail('No IPV4 ip found')\n\n\ndef _test_configuration_is_applied_to_instance(instance, configuration_id):\n if CONFIG.fake_mode:\n raise SkipTest('configuration from sql does not work in fake mode')\n instance_test = instance_info.dbaas.instances.get(instance.id)\n assert_equal(configuration_id, instance_test.configuration['id'])\n if configuration_id:\n testconfig_info = instance_info.dbaas.configurations.get(\n configuration_id)\n else:\n testconfig_info = instance_info.dbaas.instance.configuration(instance\n .id)\n testconfig_info['configuration']\n conf_instances = instance_info.dbaas.configurations.instances(\n configuration_id)\n config_instance_ids = [inst.id for inst in conf_instances]\n assert_true(instance_test.id in config_instance_ids)\n cfg_names = testconfig_info.values.keys()\n host = _get_address(instance.id)\n for user in instance.users:\n username = user['name']\n password = user['password']\n concat_variables = \"','\".join(cfg_names)\n query = (\"show variables where Variable_name in ('%s');\" %\n concat_variables)\n actual_values = _execute_query(host, username, password, query)\n print('actual_values %s' % actual_values)\n print('testconfig_info.values %s' % testconfig_info.values)\n assert_true(len(actual_values) == len(cfg_names))\n attrcheck = AttrCheck()\n allowed_attrs = [actual_key for actual_key, actual_value in actual_values]\n attrcheck.contains_allowed_attrs(testconfig_info.values, allowed_attrs,\n msg='Configurations parameters')\n\n def _get_parameter_type(name):\n instance_info.dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore, instance_info.\n dbaas_datastore_version, name)\n resp, body = instance_info.dbaas.client.last_response\n print(resp)\n print(body)\n return json.loads(body.decode())['type']\n for key, value in actual_values:\n key_type = _get_parameter_type(key)\n if value == 'ON':\n converted_key_value = str(key), 1\n elif value == 'OFF':\n converted_key_value = str(key), 0\n else:\n if key_type == 'integer':\n value = int(value)\n converted_key_value = str(key), value\n print('converted_key_value: %s' % str(converted_key_value))\n assert_true(converted_key_value in testconfig_info.values.items())\n\n\nclass ConfigurationsTestBase(object):\n\n @staticmethod\n def expected_instance_datastore_configs(instance_id):\n \"\"\"Given an instance retrieve the expected test configurations for\n instance's datastore.\n \"\"\"\n instance = instance_info.dbaas.instances.get(instance_id)\n datastore_type = instance.datastore['type']\n datastore_test_configs = CONFIG.get(datastore_type, {})\n return datastore_test_configs.get('configurations', {})\n\n @staticmethod\n def expected_default_datastore_configs():\n \"\"\"Returns the expected test configurations for the default datastore\n defined in the Test Config as dbaas_datastore.\n \"\"\"\n default_datastore = CONFIG.get('dbaas_datastore', None)\n datastore_test_configs = CONFIG.get(default_datastore, {})\n return datastore_test_configs.get('configurations', {})\n\n\n@test(depends_on_groups=[tests.DBAAS_API_BACKUPS], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass CreateConfigurations(ConfigurationsTestBase):\n\n @test\n def test_expected_configurations_parameters(self):\n \"\"\"Test get expected configurations parameters.\"\"\"\n allowed_attrs = ['configuration-parameters']\n instance_info.dbaas.configuration_parameters.parameters(instance_info\n .dbaas_datastore, instance_info.dbaas_datastore_version)\n resp, body = instance_info.dbaas.client.last_response\n attrcheck = AttrCheck()\n config_parameters_dict = json.loads(body.decode())\n attrcheck.contains_allowed_attrs(config_parameters_dict,\n allowed_attrs, msg='Configurations parameters')\n config_params_list = config_parameters_dict['configuration-parameters']\n config_param_keys = []\n for param in config_params_list:\n config_param_keys.append(param['name'])\n expected_configs = self.expected_default_datastore_configs()\n expected_config_params = expected_configs.get('parameters_list')\n msg = 'check for duplicate configuration parameters'\n assert_equal(len(config_param_keys), len(set(config_param_keys)), msg)\n for expected_config_item in expected_config_params:\n assert_true(expected_config_item in config_param_keys)\n\n @test\n def test_expected_get_configuration_parameter(self):\n param_name = 'key_buffer_size'\n allowed_config_params = ['name', 'restart_required', 'max', 'min',\n 'type', 'deleted', 'deleted_at', 'datastore_version_id']\n param = instance_info.dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore, instance_info.\n dbaas_datastore_version, param_name)\n resp, body = instance_info.dbaas.client.last_response\n print('params: %s' % param)\n print('resp: %s' % resp)\n print('body: %s' % body)\n attrcheck = AttrCheck()\n config_parameter_dict = json.loads(body.decode())\n print('config_parameter_dict: %s' % config_parameter_dict)\n attrcheck.contains_allowed_attrs(config_parameter_dict,\n allowed_config_params, msg='Get Configuration parameter')\n assert_equal(param_name, config_parameter_dict['name'])\n with TypeCheck('ConfigurationParameter', param) as parameter:\n parameter.has_field('name', str)\n parameter.has_field('restart_required', bool)\n parameter.has_field('max', int)\n parameter.has_field('min', int)\n parameter.has_field('type', str)\n parameter.has_field('datastore_version_id', str)\n\n @test\n def test_configurations_create_invalid_values(self):\n \"\"\"Test create configurations with invalid values.\"\"\"\n values = '{\"this_is_invalid\": 123}'\n try:\n instance_info.dbaas.configurations.create(CONFIG_NAME, values,\n CONFIG_DESC)\n except exceptions.UnprocessableEntity:\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 422)\n\n @test\n def test_configurations_create_invalid_value_type(self):\n \"\"\"Test create configuration with invalid value type.\"\"\"\n values = '{\"key_buffer_size\": \"this is a string not int\"}'\n assert_unprocessable(instance_info.dbaas.configurations.create,\n CONFIG_NAME, values, CONFIG_DESC)\n\n @test\n def test_configurations_create_value_out_of_bounds(self):\n \"\"\"Test create configuration with value out of bounds.\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('out_of_bounds_over'))\n assert_unprocessable(instance_info.dbaas.configurations.create,\n CONFIG_NAME, values, CONFIG_DESC)\n values = json.dumps(expected_configs.get('out_of_bounds_under'))\n assert_unprocessable(instance_info.dbaas.configurations.create,\n CONFIG_NAME, values, CONFIG_DESC)\n\n @test\n def test_valid_configurations_create(self):\n \"\"\"create a configuration with valid parameters from config.\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('valid_values'))\n expected_values = json.loads(values)\n result = instance_info.dbaas.configurations.create(CONFIG_NAME,\n values, CONFIG_DESC, datastore=instance_info.dbaas_datastore,\n datastore_version=instance_info.dbaas_datastore_version)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n with TypeCheck('Configuration', result) as configuration:\n configuration.has_field('name', str)\n configuration.has_field('description', str)\n configuration.has_field('values', dict)\n configuration.has_field('datastore_name', str)\n configuration.has_field('datastore_version_id', str)\n configuration.has_field('datastore_version_name', str)\n global configuration_info\n configuration_info = result\n assert_equal(configuration_info.name, CONFIG_NAME)\n assert_equal(configuration_info.description, CONFIG_DESC)\n assert_equal(configuration_info.values, expected_values)\n\n @test(runs_after=[test_valid_configurations_create])\n def test_appending_to_existing_configuration(self):\n \"\"\"test_appending_to_existing_configuration\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('appending_values'))\n if not CONFIG.fake_mode:\n sleep(1)\n instance_info.dbaas.configurations.edit(configuration_info.id, values)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n\n\n@test(depends_on_classes=[CreateConfigurations], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass AfterConfigurationsCreation(ConfigurationsTestBase):\n\n @test\n def test_assign_configuration_to_invalid_instance(self):\n \"\"\"test assigning to an instance that does not exist\"\"\"\n invalid_id = 'invalid-inst-id'\n try:\n instance_info.dbaas.instances.modify(invalid_id,\n configuration_info.id)\n except exceptions.NotFound:\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 404)\n\n @test\n def test_assign_configuration_to_valid_instance(self):\n \"\"\"test assigning a configuration to an instance\"\"\"\n print('instance_info.id: %s' % instance_info.id)\n print('configuration_info: %s' % configuration_info)\n print('configuration_info.id: %s' % configuration_info.id)\n config_id = configuration_info.id\n instance_info.dbaas.instances.modify(instance_info.id,\n configuration=config_id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n @test(depends_on=[test_assign_configuration_to_valid_instance])\n def test_assign_configuration_to_instance_with_config(self):\n \"\"\"test assigning a configuration to an instance conflicts\"\"\"\n config_id = configuration_info.id\n assert_raises(exceptions.BadRequest, instance_info.dbaas.instances.\n modify, instance_info.id, configuration=config_id)\n\n @test(depends_on=[test_assign_configuration_to_valid_instance])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"validate the configuration after attaching\"\"\"\n print('instance_info.id: %s' % instance_info.id)\n inst = instance_info.dbaas.instances.get(instance_info.id)\n configuration_id = inst.configuration['id']\n print('configuration_info: %s' % configuration_id)\n assert_not_equal(None, configuration_id)\n _test_configuration_is_applied_to_instance(instance_info,\n configuration_id)\n\n @test(depends_on=[test_get_configuration_details_from_instance_validation])\n def test_configurations_get(self):\n \"\"\"test that the instance shows up on the assigned configuration\"\"\"\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(configuration_info.id, result.id)\n assert_equal(configuration_info.name, result.name)\n assert_equal(configuration_info.description, result.description)\n with TypeCheck('configuration', result) as check:\n check.has_field('id', str)\n check.has_field('name', str)\n check.has_field('description', str)\n check.has_field('values', dict)\n check.has_field('created', str)\n check.has_field('updated', str)\n check.has_field('instance_count', int)\n print(result.values)\n assert_true(_is_valid_timestamp(result.created))\n assert_true(_is_valid_timestamp(result.updated))\n if not CONFIG.fake_mode:\n assert_not_equal(result.created, result.updated)\n assert_equal(result.instance_count, 1)\n with CollectionCheck('configuration_values', result.values) as check:\n for item_key, item_val in result.values.items():\n print('item_key: %s' % item_key)\n print('item_val: %s' % item_val)\n dbaas = instance_info.dbaas\n param = dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore, instance_info.\n dbaas_datastore_version, item_key)\n if param.type == 'integer':\n check.has_element(item_key, int)\n if param.type == 'string':\n check.has_element(item_key, str)\n if param.type == 'boolean':\n check.has_element(item_key, bool)\n reqs = Requirements(is_admin=False)\n test_auth_user = instance_info.user.auth_user\n other_user = CONFIG.users.find_user(reqs, black_list=[test_auth_user])\n other_user_tenant_id = other_user.tenant_id\n client_tenant_id = instance_info.user.tenant_id\n if other_user_tenant_id == client_tenant_id:\n other_user = CONFIG.users.find_user(reqs, black_list=[\n instance_info.user.auth_user, other_user])\n print(other_user)\n print(other_user.__dict__)\n other_client = create_dbaas_client(other_user)\n assert_raises(exceptions.NotFound, other_client.configurations.get,\n configuration_info.id)\n\n\n@test(depends_on_classes=[AfterConfigurationsCreation], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass ListConfigurations(ConfigurationsTestBase):\n\n @test\n def test_configurations_list(self):\n result = instance_info.dbaas.configurations.list()\n for conf in result:\n with TypeCheck('Configuration', conf) as check:\n check.has_field('id', str)\n check.has_field('name', str)\n check.has_field('description', str)\n check.has_field('datastore_version_id', str)\n check.has_field('datastore_version_name', str)\n check.has_field('datastore_name', str)\n exists = [config for config in result if config.id ==\n configuration_info.id]\n assert_equal(1, len(exists))\n configuration = exists[0]\n assert_equal(configuration.id, configuration_info.id)\n assert_equal(configuration.name, configuration_info.name)\n assert_equal(configuration.description, configuration_info.description)\n\n @test\n def test_configurations_list_for_instance(self):\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal(instance.configuration['id'], configuration_info.id)\n assert_equal(instance.configuration['name'], configuration_info.name)\n assert_equal(2, len(instance.configuration['links']))\n link = instance.configuration['links'][0]\n global configuration_href\n configuration_href = link['href']\n\n @test\n def test_get_default_configuration_on_instance(self):\n result = instance_info.dbaas.instances.configuration(instance_info.id)\n global configuration_default\n configuration_default = result\n assert_not_equal(None, result.configuration)\n\n @test\n def test_changing_configuration_with_nondynamic_parameter(self):\n \"\"\"test_changing_configuration_with_nondynamic_parameter\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('nondynamic_parameter'))\n instance_info.dbaas.configurations.update(configuration_info.id, values\n )\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n instance_info.dbaas.configurations.get(configuration_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n\n @test(depends_on=[test_changing_configuration_with_nondynamic_parameter])\n @time_out(20)\n def test_waiting_for_instance_in_restart_required(self):\n \"\"\"test_waiting_for_instance_in_restart_required\"\"\"\n\n def result_is_not_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return False\n else:\n return True\n poll_until(result_is_not_active)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n assert_equal('RESTART_REQUIRED', instance.status)\n\n @test(depends_on=[test_waiting_for_instance_in_restart_required])\n def test_restart_service_should_return_active(self):\n \"\"\"test_restart_service_should_return_active\"\"\"\n instance_info.dbaas.instances.restart(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_true(instance.status in ['REBOOT', 'SHUTDOWN'])\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_restart_service_should_return_active])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"test_get_configuration_details_from_instance_validation\"\"\"\n inst = instance_info.dbaas.instances.get(instance_info.id)\n configuration_id = inst.configuration['id']\n assert_not_equal(None, inst.configuration['id'])\n _test_configuration_is_applied_to_instance(instance_info,\n configuration_id)\n\n @test(depends_on=[test_configurations_list])\n def test_compare_list_and_details_timestamps(self):\n result = instance_info.dbaas.configurations.list()\n list_config = [config for config in result if config.id ==\n configuration_info.id]\n assert_equal(1, len(list_config))\n details_config = instance_info.dbaas.configurations.get(\n configuration_info.id)\n assert_equal(list_config[0].created, details_config.created)\n assert_equal(list_config[0].updated, details_config.updated)\n\n\n@test(depends_on_classes=[ListConfigurations], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass StartInstanceWithConfiguration(ConfigurationsTestBase):\n\n @test\n def test_start_instance_with_configuration(self):\n \"\"\"test that a new instance will apply the configuration on create\"\"\"\n global configuration_instance\n databases = []\n databases.append({'name': 'firstdbconfig', 'character_set':\n 'latin2', 'collate': 'latin2_general_ci'})\n databases.append({'name': 'db2'})\n configuration_instance.databases = databases\n users = []\n users.append({'name': 'liteconf', 'password': 'liteconfpass',\n 'databases': [{'name': 'firstdbconfig'}]})\n configuration_instance.users = users\n configuration_instance.name = 'TEST_' + str(uuid.uuid4()) + '_config'\n flavor_href = instance_info.dbaas_flavor_href\n configuration_instance.dbaas_flavor_href = flavor_href\n configuration_instance.volume = instance_info.volume\n configuration_instance.dbaas_datastore = instance_info.dbaas_datastore\n configuration_instance.dbaas_datastore_version = (instance_info.\n dbaas_datastore_version)\n configuration_instance.nics = instance_info.nics\n result = instance_info.dbaas.instances.create(configuration_instance\n .name, configuration_instance.dbaas_flavor_href,\n configuration_instance.volume, configuration_instance.databases,\n configuration_instance.users, nics=configuration_instance.nics,\n availability_zone='nova', datastore=configuration_instance.\n dbaas_datastore, datastore_version=configuration_instance.\n dbaas_datastore_version, configuration=configuration_href)\n assert_equal(200, instance_info.dbaas.last_http_code)\n assert_equal('BUILD', result.status)\n configuration_instance.id = result.id\n\n\n@test(depends_on_classes=[StartInstanceWithConfiguration], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass WaitForConfigurationInstanceToFinish(ConfigurationsTestBase):\n\n @test\n @time_out(TIMEOUT_INSTANCE_CREATE)\n def test_instance_with_configuration_active(self):\n \"\"\"wait for the instance created with configuration\"\"\"\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(configuration_instance\n .id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('BUILD', instance.status)\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_instance_with_configuration_active])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"Test configuration is applied correctly to the instance.\"\"\"\n inst = instance_info.dbaas.instances.get(configuration_instance.id)\n configuration_id = inst.configuration['id']\n assert_not_equal(None, configuration_id)\n _test_configuration_is_applied_to_instance(configuration_instance,\n configuration_id)\n\n\n@test(depends_on=[WaitForConfigurationInstanceToFinish], groups=[tests.\n DBAAS_API_CONFIGURATIONS])\nclass DeleteConfigurations(ConfigurationsTestBase):\n\n @before_class\n def setUp(self):\n config_param_name = sql_variables[1]\n instance_info.dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore, instance_info.\n dbaas_datastore_version, config_param_name)\n resp, body = instance_info.dbaas.client.last_response\n print(resp)\n print(body)\n self.config_parameter_dict = json.loads(body.decode())\n\n @after_class(always_run=True)\n def tearDown(self):\n if instance_info.dbaas:\n ds = instance_info.dbaas_datastore\n ds_v = instance_info.dbaas_datastore_version\n version = instance_info.dbaas.datastore_versions.get(ds, ds_v)\n client = instance_info.dbaas_admin.mgmt_configs\n print(self.config_parameter_dict)\n client.create(version.id, self.config_parameter_dict['name'],\n self.config_parameter_dict['restart_required'], self.\n config_parameter_dict['type'], self.config_parameter_dict[\n 'max'], self.config_parameter_dict['min'])\n\n @test\n def test_delete_invalid_configuration_not_found(self):\n invalid_configuration_id = 'invalid-config-id'\n assert_raises(exceptions.NotFound, instance_info.dbaas.\n configurations.delete, invalid_configuration_id)\n\n @test(depends_on=[test_delete_invalid_configuration_not_found])\n def test_delete_configuration_parameter_with_mgmt_api(self):\n ds = instance_info.dbaas_datastore\n ds_v = instance_info.dbaas_datastore_version\n version = instance_info.dbaas.datastore_versions.get(ds, ds_v)\n client = instance_info.dbaas_admin.mgmt_configs\n config_param_name = self.config_parameter_dict['name']\n client.delete(version.id, config_param_name)\n assert_raises(exceptions.NotFound, instance_info.dbaas.\n configuration_parameters.get_parameter, ds, ds_v, config_param_name\n )\n\n @test(depends_on=[test_delete_configuration_parameter_with_mgmt_api])\n def test_unable_delete_instance_configurations(self):\n assert_raises(exceptions.BadRequest, instance_info.dbaas.\n configurations.delete, configuration_info.id)\n\n @test(depends_on=[test_unable_delete_instance_configurations])\n @time_out(30)\n def test_unassign_configuration_from_instances(self):\n \"\"\"test to unassign configuration from instance\"\"\"\n instance_info.dbaas.instances.update(configuration_instance.id,\n remove_configuration=True)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n instance_info.dbaas.instances.update(instance_info.id,\n remove_configuration=True)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n instance_info.dbaas.instances.get(instance_info.id)\n\n def result_has_no_configuration():\n instance = instance_info.dbaas.instances.get(inst_info.id)\n if hasattr(instance, 'configuration'):\n return False\n else:\n return True\n inst_info = instance_info\n poll_until(result_has_no_configuration)\n inst_info = configuration_instance\n poll_until(result_has_no_configuration)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n\n @test(depends_on=[test_unassign_configuration_from_instances])\n def test_assign_in_wrong_state(self):\n assert_raises(exceptions.BadRequest, instance_info.dbaas.instances.\n modify, configuration_instance.id, configuration=\n configuration_info.id)\n\n @test(depends_on=[test_assign_in_wrong_state])\n def test_no_instances_on_configuration(self):\n \"\"\"test_no_instances_on_configuration\"\"\"\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(configuration_info.id, result.id)\n assert_equal(configuration_info.name, result.name)\n assert_equal(configuration_info.description, result.description)\n assert_equal(result.instance_count, 0)\n print(configuration_instance.id)\n print(instance_info.id)\n\n @test(depends_on=[test_unassign_configuration_from_instances])\n @time_out(120)\n def test_restart_service_should_return_active(self):\n \"\"\"test that after restarting the instance it becomes active\"\"\"\n instance_info.dbaas.instances.restart(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('REBOOT', instance.status)\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_restart_service_should_return_active])\n def test_assign_config_and_name_to_instance_using_patch(self):\n \"\"\"test_assign_config_and_name_to_instance_using_patch\"\"\"\n new_name = 'new_name'\n report = CONFIG.get_report()\n report.log('instance_info.id: %s' % instance_info.id)\n report.log('configuration_info: %s' % configuration_info)\n report.log('configuration_info.id: %s' % configuration_info.id)\n report.log('instance name:%s' % instance_info.name)\n report.log('instance new name:%s' % new_name)\n saved_name = instance_info.name\n config_id = configuration_info.id\n instance_info.dbaas.instances.update(instance_info.id,\n configuration=config_id, name=new_name)\n assert_equal(202, instance_info.dbaas.last_http_code)\n check = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal(200, instance_info.dbaas.last_http_code)\n assert_equal(check.name, new_name)\n instance_info.dbaas.instances.update(instance_info.id, name=saved_name)\n assert_equal(202, instance_info.dbaas.last_http_code)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n instance_info.dbaas.instances.restart(instance_info.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n sleep(2)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('REBOOT', instance.status)\n return False\n poll_until(result_is_active)\n config_id = configuration_info.id\n assert_raises(exceptions.BadRequest, instance_info.dbaas.instances.\n update, instance_info.id, configuration=config_id)\n\n @test(runs_after=[test_assign_config_and_name_to_instance_using_patch])\n def test_unassign_configuration_after_patch(self):\n \"\"\"Remove the configuration from the instance\"\"\"\n instance_info.dbaas.instances.update(instance_info.id,\n remove_configuration=True)\n assert_equal(202, instance_info.dbaas.last_http_code)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n instance_info.dbaas.instances.restart(instance_info.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n sleep(2)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal('REBOOT', instance.status)\n return False\n poll_until(result_is_active)\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(result.instance_count, 0)\n\n @test\n def test_unassign_configuration_from_invalid_instance_using_patch(self):\n invalid_id = 'invalid-inst-id'\n try:\n instance_info.dbaas.instances.update(invalid_id,\n remove_configuration=True)\n except exceptions.NotFound:\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 404)\n\n @test(runs_after=[test_unassign_configuration_after_patch])\n def test_delete_unassigned_configuration(self):\n \"\"\"test_delete_unassigned_configuration\"\"\"\n instance_info.dbaas.configurations.delete(configuration_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n @test(depends_on=[test_delete_unassigned_configuration])\n @time_out(TIMEOUT_INSTANCE_DELETE)\n def test_delete_configuration_instance(self):\n \"\"\"test_delete_configuration_instance\"\"\"\n instance_info.dbaas.instances.delete(configuration_instance.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n\n def instance_is_gone():\n try:\n instance_info.dbaas.instances.get(configuration_instance.id)\n return False\n except exceptions.NotFound:\n return True\n poll_until(instance_is_gone)\n assert_raises(exceptions.NotFound, instance_info.dbaas.instances.\n get, configuration_instance.id)\n",
"step-5": "# Copyright 2014 Rackspace Hosting\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\nfrom datetime import datetime\nimport json\nimport netaddr\nfrom time import sleep\nimport uuid\n\nfrom proboscis import after_class\nfrom proboscis.asserts import assert_equal\nfrom proboscis.asserts import assert_not_equal\nfrom proboscis.asserts import assert_raises\nfrom proboscis.asserts import assert_true\nfrom proboscis.asserts import fail\nfrom proboscis import before_class\nfrom proboscis.decorators import time_out\nfrom proboscis import SkipTest\nfrom proboscis import test\nfrom troveclient.compat import exceptions\n\nfrom trove.common.utils import poll_until\nfrom trove import tests\nfrom trove.tests.api.instances import assert_unprocessable\nfrom trove.tests.api.instances import instance_info\nfrom trove.tests.api.instances import InstanceTestInfo\nfrom trove.tests.api.instances import TIMEOUT_INSTANCE_CREATE\nfrom trove.tests.api.instances import TIMEOUT_INSTANCE_DELETE\nfrom trove.tests.config import CONFIG\nfrom trove.tests.util.check import AttrCheck\nfrom trove.tests.util.check import CollectionCheck\nfrom trove.tests.util.check import TypeCheck\nfrom trove.tests.util import create_dbaas_client\nfrom trove.tests.util.mysql import create_mysql_connection\nfrom trove.tests.util.users import Requirements\n\nCONFIG_NAME = \"test_configuration\"\nCONFIG_DESC = \"configuration description\"\n\nconfiguration_default = None\nconfiguration_info = None\nconfiguration_href = None\nconfiguration_instance = InstanceTestInfo()\nconfiguration_instance_id = None\nsql_variables = [\n 'key_buffer_size',\n 'connect_timeout',\n 'join_buffer_size',\n]\n\n\ndef _is_valid_timestamp(time_string):\n try:\n datetime.strptime(time_string, \"%Y-%m-%dT%H:%M:%S\")\n except ValueError:\n return False\n return True\n\n\n# helper methods to validate configuration is applied to instance\ndef _execute_query(host, user_name, password, query):\n print(\"Starting to query database, host: %s, user: %s, password: %s, \"\n \"query: %s\" % (host, user_name, password, query))\n\n with create_mysql_connection(host, user_name, password) as db:\n result = db.execute(query)\n return result\n\n\ndef _get_address(instance_id):\n result = instance_info.dbaas_admin.mgmt.instances.show(instance_id)\n try:\n return next(str(ip) for ip in result.ip\n if netaddr.valid_ipv4(ip))\n except StopIteration:\n fail(\"No IPV4 ip found\")\n\n\ndef _test_configuration_is_applied_to_instance(instance, configuration_id):\n if CONFIG.fake_mode:\n raise SkipTest(\"configuration from sql does not work in fake mode\")\n instance_test = instance_info.dbaas.instances.get(instance.id)\n assert_equal(configuration_id, instance_test.configuration['id'])\n if configuration_id:\n testconfig_info = instance_info.dbaas.configurations.get(\n configuration_id)\n else:\n testconfig_info = instance_info.dbaas.instance.configuration(\n instance.id)\n testconfig_info['configuration']\n conf_instances = instance_info.dbaas.configurations.instances(\n configuration_id)\n config_instance_ids = [inst.id for inst in conf_instances]\n assert_true(instance_test.id in config_instance_ids)\n cfg_names = testconfig_info.values.keys()\n\n host = _get_address(instance.id)\n for user in instance.users:\n username = user['name']\n password = user['password']\n concat_variables = \"','\".join(cfg_names)\n query = (\"show variables where Variable_name \"\n \"in ('%s');\" % concat_variables)\n actual_values = _execute_query(host, username, password, query)\n print(\"actual_values %s\" % actual_values)\n print(\"testconfig_info.values %s\" % testconfig_info.values)\n assert_true(len(actual_values) == len(cfg_names))\n\n # check the configs exist\n attrcheck = AttrCheck()\n allowed_attrs = [actual_key for actual_key, actual_value in actual_values]\n attrcheck.contains_allowed_attrs(\n testconfig_info.values, allowed_attrs,\n msg=\"Configurations parameters\")\n\n def _get_parameter_type(name):\n instance_info.dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore,\n instance_info.dbaas_datastore_version,\n name)\n resp, body = instance_info.dbaas.client.last_response\n print(resp)\n print(body)\n return json.loads(body.decode())['type']\n\n # check the config values are correct\n for key, value in actual_values:\n key_type = _get_parameter_type(key)\n # mysql returns 'ON' and 'OFF' for True and False respectively\n if value == 'ON':\n converted_key_value = (str(key), 1)\n elif value == 'OFF':\n converted_key_value = (str(key), 0)\n else:\n if key_type == 'integer':\n value = int(value)\n converted_key_value = (str(key), value)\n print(\"converted_key_value: %s\" % str(converted_key_value))\n assert_true(converted_key_value in testconfig_info.values.items())\n\n\nclass ConfigurationsTestBase(object):\n\n @staticmethod\n def expected_instance_datastore_configs(instance_id):\n \"\"\"Given an instance retrieve the expected test configurations for\n instance's datastore.\n \"\"\"\n instance = instance_info.dbaas.instances.get(instance_id)\n datastore_type = instance.datastore['type']\n datastore_test_configs = CONFIG.get(datastore_type, {})\n return datastore_test_configs.get(\"configurations\", {})\n\n @staticmethod\n def expected_default_datastore_configs():\n \"\"\"Returns the expected test configurations for the default datastore\n defined in the Test Config as dbaas_datastore.\n \"\"\"\n default_datastore = CONFIG.get('dbaas_datastore', None)\n datastore_test_configs = CONFIG.get(default_datastore, {})\n return datastore_test_configs.get(\"configurations\", {})\n\n\n@test(depends_on_groups=[tests.DBAAS_API_BACKUPS],\n groups=[tests.DBAAS_API_CONFIGURATIONS])\nclass CreateConfigurations(ConfigurationsTestBase):\n\n @test\n def test_expected_configurations_parameters(self):\n \"\"\"Test get expected configurations parameters.\"\"\"\n allowed_attrs = [\"configuration-parameters\"]\n instance_info.dbaas.configuration_parameters.parameters(\n instance_info.dbaas_datastore,\n instance_info.dbaas_datastore_version)\n resp, body = instance_info.dbaas.client.last_response\n attrcheck = AttrCheck()\n config_parameters_dict = json.loads(body.decode())\n attrcheck.contains_allowed_attrs(\n config_parameters_dict, allowed_attrs,\n msg=\"Configurations parameters\")\n # sanity check that a few options are in the list\n config_params_list = config_parameters_dict['configuration-parameters']\n config_param_keys = []\n for param in config_params_list:\n config_param_keys.append(param['name'])\n expected_configs = self.expected_default_datastore_configs()\n expected_config_params = expected_configs.get('parameters_list')\n # check for duplicate configuration parameters\n msg = \"check for duplicate configuration parameters\"\n assert_equal(len(config_param_keys), len(set(config_param_keys)), msg)\n for expected_config_item in expected_config_params:\n assert_true(expected_config_item in config_param_keys)\n\n @test\n def test_expected_get_configuration_parameter(self):\n # tests get on a single parameter to verify it has expected attributes\n param_name = 'key_buffer_size'\n allowed_config_params = ['name', 'restart_required',\n 'max', 'min', 'type',\n 'deleted', 'deleted_at',\n 'datastore_version_id']\n param = instance_info.dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore,\n instance_info.dbaas_datastore_version,\n param_name)\n resp, body = instance_info.dbaas.client.last_response\n print(\"params: %s\" % param)\n print(\"resp: %s\" % resp)\n print(\"body: %s\" % body)\n attrcheck = AttrCheck()\n config_parameter_dict = json.loads(body.decode())\n print(\"config_parameter_dict: %s\" % config_parameter_dict)\n attrcheck.contains_allowed_attrs(\n config_parameter_dict,\n allowed_config_params,\n msg=\"Get Configuration parameter\")\n assert_equal(param_name, config_parameter_dict['name'])\n with TypeCheck('ConfigurationParameter', param) as parameter:\n parameter.has_field('name', str)\n parameter.has_field('restart_required', bool)\n parameter.has_field('max', int)\n parameter.has_field('min', int)\n parameter.has_field('type', str)\n parameter.has_field('datastore_version_id', str)\n\n @test\n def test_configurations_create_invalid_values(self):\n \"\"\"Test create configurations with invalid values.\"\"\"\n values = '{\"this_is_invalid\": 123}'\n try:\n instance_info.dbaas.configurations.create(\n CONFIG_NAME,\n values,\n CONFIG_DESC)\n except exceptions.UnprocessableEntity:\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 422)\n\n @test\n def test_configurations_create_invalid_value_type(self):\n \"\"\"Test create configuration with invalid value type.\"\"\"\n values = '{\"key_buffer_size\": \"this is a string not int\"}'\n assert_unprocessable(instance_info.dbaas.configurations.create,\n CONFIG_NAME, values, CONFIG_DESC)\n\n @test\n def test_configurations_create_value_out_of_bounds(self):\n \"\"\"Test create configuration with value out of bounds.\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('out_of_bounds_over'))\n assert_unprocessable(instance_info.dbaas.configurations.create,\n CONFIG_NAME, values, CONFIG_DESC)\n values = json.dumps(expected_configs.get('out_of_bounds_under'))\n assert_unprocessable(instance_info.dbaas.configurations.create,\n CONFIG_NAME, values, CONFIG_DESC)\n\n @test\n def test_valid_configurations_create(self):\n \"\"\"create a configuration with valid parameters from config.\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('valid_values'))\n expected_values = json.loads(values)\n result = instance_info.dbaas.configurations.create(\n CONFIG_NAME,\n values,\n CONFIG_DESC,\n datastore=instance_info.dbaas_datastore,\n datastore_version=instance_info.dbaas_datastore_version)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n with TypeCheck('Configuration', result) as configuration:\n configuration.has_field('name', str)\n configuration.has_field('description', str)\n configuration.has_field('values', dict)\n configuration.has_field('datastore_name', str)\n configuration.has_field('datastore_version_id', str)\n configuration.has_field('datastore_version_name', str)\n global configuration_info\n configuration_info = result\n assert_equal(configuration_info.name, CONFIG_NAME)\n assert_equal(configuration_info.description, CONFIG_DESC)\n assert_equal(configuration_info.values, expected_values)\n\n @test(runs_after=[test_valid_configurations_create])\n def test_appending_to_existing_configuration(self):\n \"\"\"test_appending_to_existing_configuration\"\"\"\n # test being able to update and insert new parameter name and values\n # to an existing configuration\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('appending_values'))\n # ensure updated timestamp is different than created\n if not CONFIG.fake_mode:\n sleep(1)\n instance_info.dbaas.configurations.edit(configuration_info.id,\n values)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n\n\n@test(depends_on_classes=[CreateConfigurations],\n groups=[tests.DBAAS_API_CONFIGURATIONS])\nclass AfterConfigurationsCreation(ConfigurationsTestBase):\n\n @test\n def test_assign_configuration_to_invalid_instance(self):\n \"\"\"test assigning to an instance that does not exist\"\"\"\n invalid_id = \"invalid-inst-id\"\n try:\n instance_info.dbaas.instances.modify(invalid_id,\n configuration_info.id)\n except exceptions.NotFound:\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 404)\n\n @test\n def test_assign_configuration_to_valid_instance(self):\n \"\"\"test assigning a configuration to an instance\"\"\"\n print(\"instance_info.id: %s\" % instance_info.id)\n print(\"configuration_info: %s\" % configuration_info)\n print(\"configuration_info.id: %s\" % configuration_info.id)\n config_id = configuration_info.id\n instance_info.dbaas.instances.modify(instance_info.id,\n configuration=config_id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n @test(depends_on=[test_assign_configuration_to_valid_instance])\n def test_assign_configuration_to_instance_with_config(self):\n \"\"\"test assigning a configuration to an instance conflicts\"\"\"\n config_id = configuration_info.id\n assert_raises(exceptions.BadRequest,\n instance_info.dbaas.instances.modify, instance_info.id,\n configuration=config_id)\n\n @test(depends_on=[test_assign_configuration_to_valid_instance])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"validate the configuration after attaching\"\"\"\n print(\"instance_info.id: %s\" % instance_info.id)\n inst = instance_info.dbaas.instances.get(instance_info.id)\n configuration_id = inst.configuration['id']\n print(\"configuration_info: %s\" % configuration_id)\n assert_not_equal(None, configuration_id)\n _test_configuration_is_applied_to_instance(instance_info,\n configuration_id)\n\n @test(depends_on=[test_get_configuration_details_from_instance_validation])\n def test_configurations_get(self):\n \"\"\"test that the instance shows up on the assigned configuration\"\"\"\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(configuration_info.id, result.id)\n assert_equal(configuration_info.name, result.name)\n assert_equal(configuration_info.description, result.description)\n\n # check the result field types\n with TypeCheck(\"configuration\", result) as check:\n check.has_field(\"id\", str)\n check.has_field(\"name\", str)\n check.has_field(\"description\", str)\n check.has_field(\"values\", dict)\n check.has_field(\"created\", str)\n check.has_field(\"updated\", str)\n check.has_field(\"instance_count\", int)\n\n print(result.values)\n\n # check for valid timestamps\n assert_true(_is_valid_timestamp(result.created))\n assert_true(_is_valid_timestamp(result.updated))\n\n # check that created and updated timestamps differ, since\n # test_appending_to_existing_configuration should have changed the\n # updated timestamp\n if not CONFIG.fake_mode:\n assert_not_equal(result.created, result.updated)\n\n assert_equal(result.instance_count, 1)\n\n with CollectionCheck(\"configuration_values\", result.values) as check:\n # check each item has the correct type according to the rules\n for (item_key, item_val) in result.values.items():\n print(\"item_key: %s\" % item_key)\n print(\"item_val: %s\" % item_val)\n dbaas = instance_info.dbaas\n param = dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore,\n instance_info.dbaas_datastore_version,\n item_key)\n if param.type == 'integer':\n check.has_element(item_key, int)\n if param.type == 'string':\n check.has_element(item_key, str)\n if param.type == 'boolean':\n check.has_element(item_key, bool)\n\n # Test to make sure that another user is not able to GET this config\n reqs = Requirements(is_admin=False)\n test_auth_user = instance_info.user.auth_user\n other_user = CONFIG.users.find_user(reqs, black_list=[test_auth_user])\n other_user_tenant_id = other_user.tenant_id\n client_tenant_id = instance_info.user.tenant_id\n if other_user_tenant_id == client_tenant_id:\n other_user = CONFIG.users.find_user(\n reqs, black_list=[instance_info.user.auth_user,\n other_user])\n print(other_user)\n print(other_user.__dict__)\n other_client = create_dbaas_client(other_user)\n assert_raises(exceptions.NotFound, other_client.configurations.get,\n configuration_info.id)\n\n\n@test(depends_on_classes=[AfterConfigurationsCreation],\n groups=[tests.DBAAS_API_CONFIGURATIONS])\nclass ListConfigurations(ConfigurationsTestBase):\n\n @test\n def test_configurations_list(self):\n # test listing configurations show up\n result = instance_info.dbaas.configurations.list()\n for conf in result:\n with TypeCheck(\"Configuration\", conf) as check:\n check.has_field('id', str)\n check.has_field('name', str)\n check.has_field('description', str)\n check.has_field('datastore_version_id', str)\n check.has_field('datastore_version_name', str)\n check.has_field('datastore_name', str)\n\n exists = [config for config in result if\n config.id == configuration_info.id]\n assert_equal(1, len(exists))\n configuration = exists[0]\n assert_equal(configuration.id, configuration_info.id)\n assert_equal(configuration.name, configuration_info.name)\n assert_equal(configuration.description, configuration_info.description)\n\n @test\n def test_configurations_list_for_instance(self):\n # test getting an instance shows the configuration assigned shows up\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal(instance.configuration['id'], configuration_info.id)\n assert_equal(instance.configuration['name'], configuration_info.name)\n # expecting two things in links, href and bookmark\n assert_equal(2, len(instance.configuration['links']))\n link = instance.configuration['links'][0]\n global configuration_href\n configuration_href = link['href']\n\n @test\n def test_get_default_configuration_on_instance(self):\n # test the api call to get the default template of an instance exists\n result = instance_info.dbaas.instances.configuration(instance_info.id)\n global configuration_default\n configuration_default = result\n assert_not_equal(None, result.configuration)\n\n @test\n def test_changing_configuration_with_nondynamic_parameter(self):\n \"\"\"test_changing_configuration_with_nondynamic_parameter\"\"\"\n expected_configs = self.expected_default_datastore_configs()\n values = json.dumps(expected_configs.get('nondynamic_parameter'))\n instance_info.dbaas.configurations.update(configuration_info.id,\n values)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n instance_info.dbaas.configurations.get(configuration_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n\n @test(depends_on=[test_changing_configuration_with_nondynamic_parameter])\n @time_out(20)\n def test_waiting_for_instance_in_restart_required(self):\n \"\"\"test_waiting_for_instance_in_restart_required\"\"\"\n def result_is_not_active():\n instance = instance_info.dbaas.instances.get(\n instance_info.id)\n if instance.status in CONFIG.running_status:\n return False\n else:\n return True\n poll_until(result_is_not_active)\n\n instance = instance_info.dbaas.instances.get(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 200)\n assert_equal('RESTART_REQUIRED', instance.status)\n\n @test(depends_on=[test_waiting_for_instance_in_restart_required])\n def test_restart_service_should_return_active(self):\n \"\"\"test_restart_service_should_return_active\"\"\"\n instance_info.dbaas.instances.restart(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(\n instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_true(instance.status in ['REBOOT', 'SHUTDOWN'])\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_restart_service_should_return_active])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"test_get_configuration_details_from_instance_validation\"\"\"\n inst = instance_info.dbaas.instances.get(instance_info.id)\n configuration_id = inst.configuration['id']\n assert_not_equal(None, inst.configuration['id'])\n _test_configuration_is_applied_to_instance(instance_info,\n configuration_id)\n\n @test(depends_on=[test_configurations_list])\n def test_compare_list_and_details_timestamps(self):\n # compare config timestamps between list and details calls\n result = instance_info.dbaas.configurations.list()\n list_config = [config for config in result if\n config.id == configuration_info.id]\n assert_equal(1, len(list_config))\n details_config = instance_info.dbaas.configurations.get(\n configuration_info.id)\n assert_equal(list_config[0].created, details_config.created)\n assert_equal(list_config[0].updated, details_config.updated)\n\n\n@test(depends_on_classes=[ListConfigurations],\n groups=[tests.DBAAS_API_CONFIGURATIONS])\nclass StartInstanceWithConfiguration(ConfigurationsTestBase):\n\n @test\n def test_start_instance_with_configuration(self):\n \"\"\"test that a new instance will apply the configuration on create\"\"\"\n global configuration_instance\n databases = []\n databases.append({\"name\": \"firstdbconfig\", \"character_set\": \"latin2\",\n \"collate\": \"latin2_general_ci\"})\n databases.append({\"name\": \"db2\"})\n configuration_instance.databases = databases\n users = []\n users.append({\"name\": \"liteconf\", \"password\": \"liteconfpass\",\n \"databases\": [{\"name\": \"firstdbconfig\"}]})\n configuration_instance.users = users\n configuration_instance.name = \"TEST_\" + str(uuid.uuid4()) + \"_config\"\n flavor_href = instance_info.dbaas_flavor_href\n configuration_instance.dbaas_flavor_href = flavor_href\n configuration_instance.volume = instance_info.volume\n configuration_instance.dbaas_datastore = instance_info.dbaas_datastore\n configuration_instance.dbaas_datastore_version = \\\n instance_info.dbaas_datastore_version\n configuration_instance.nics = instance_info.nics\n\n result = instance_info.dbaas.instances.create(\n configuration_instance.name,\n configuration_instance.dbaas_flavor_href,\n configuration_instance.volume,\n configuration_instance.databases,\n configuration_instance.users,\n nics=configuration_instance.nics,\n availability_zone=\"nova\",\n datastore=configuration_instance.dbaas_datastore,\n datastore_version=configuration_instance.dbaas_datastore_version,\n configuration=configuration_href)\n assert_equal(200, instance_info.dbaas.last_http_code)\n assert_equal(\"BUILD\", result.status)\n configuration_instance.id = result.id\n\n\n@test(depends_on_classes=[StartInstanceWithConfiguration],\n groups=[tests.DBAAS_API_CONFIGURATIONS])\nclass WaitForConfigurationInstanceToFinish(ConfigurationsTestBase):\n\n @test\n @time_out(TIMEOUT_INSTANCE_CREATE)\n def test_instance_with_configuration_active(self):\n \"\"\"wait for the instance created with configuration\"\"\"\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(\n configuration_instance.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal(\"BUILD\", instance.status)\n return False\n\n poll_until(result_is_active)\n\n @test(depends_on=[test_instance_with_configuration_active])\n @time_out(30)\n def test_get_configuration_details_from_instance_validation(self):\n \"\"\"Test configuration is applied correctly to the instance.\"\"\"\n inst = instance_info.dbaas.instances.get(configuration_instance.id)\n configuration_id = inst.configuration['id']\n assert_not_equal(None, configuration_id)\n _test_configuration_is_applied_to_instance(configuration_instance,\n configuration_id)\n\n\n@test(depends_on=[WaitForConfigurationInstanceToFinish],\n groups=[tests.DBAAS_API_CONFIGURATIONS])\nclass DeleteConfigurations(ConfigurationsTestBase):\n\n @before_class\n def setUp(self):\n # need to store the parameter details that will be deleted\n config_param_name = sql_variables[1]\n instance_info.dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore,\n instance_info.dbaas_datastore_version,\n config_param_name)\n resp, body = instance_info.dbaas.client.last_response\n print(resp)\n print(body)\n self.config_parameter_dict = json.loads(body.decode())\n\n @after_class(always_run=True)\n def tearDown(self):\n # need to \"undelete\" the parameter that was deleted from the mgmt call\n if instance_info.dbaas:\n ds = instance_info.dbaas_datastore\n ds_v = instance_info.dbaas_datastore_version\n version = instance_info.dbaas.datastore_versions.get(\n ds, ds_v)\n client = instance_info.dbaas_admin.mgmt_configs\n print(self.config_parameter_dict)\n client.create(version.id,\n self.config_parameter_dict['name'],\n self.config_parameter_dict['restart_required'],\n self.config_parameter_dict['type'],\n self.config_parameter_dict['max'],\n self.config_parameter_dict['min'])\n\n @test\n def test_delete_invalid_configuration_not_found(self):\n # test deleting a configuration that does not exist throws exception\n invalid_configuration_id = \"invalid-config-id\"\n assert_raises(exceptions.NotFound,\n instance_info.dbaas.configurations.delete,\n invalid_configuration_id)\n\n @test(depends_on=[test_delete_invalid_configuration_not_found])\n def test_delete_configuration_parameter_with_mgmt_api(self):\n # testing a param that is assigned to an instance can be deleted\n # and doesn't affect an unassign later. So we delete a parameter\n # that is used by a test (connect_timeout)\n ds = instance_info.dbaas_datastore\n ds_v = instance_info.dbaas_datastore_version\n version = instance_info.dbaas.datastore_versions.get(\n ds, ds_v)\n client = instance_info.dbaas_admin.mgmt_configs\n config_param_name = self.config_parameter_dict['name']\n client.delete(version.id, config_param_name)\n assert_raises(\n exceptions.NotFound,\n instance_info.dbaas.configuration_parameters.get_parameter,\n ds,\n ds_v,\n config_param_name)\n\n @test(depends_on=[test_delete_configuration_parameter_with_mgmt_api])\n def test_unable_delete_instance_configurations(self):\n # test deleting a configuration that is assigned to\n # an instance is not allowed.\n assert_raises(exceptions.BadRequest,\n instance_info.dbaas.configurations.delete,\n configuration_info.id)\n\n @test(depends_on=[test_unable_delete_instance_configurations])\n @time_out(30)\n def test_unassign_configuration_from_instances(self):\n \"\"\"test to unassign configuration from instance\"\"\"\n instance_info.dbaas.instances.update(configuration_instance.id,\n remove_configuration=True)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n instance_info.dbaas.instances.update(instance_info.id,\n remove_configuration=True)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n instance_info.dbaas.instances.get(instance_info.id)\n\n def result_has_no_configuration():\n instance = instance_info.dbaas.instances.get(inst_info.id)\n if hasattr(instance, 'configuration'):\n return False\n else:\n return True\n\n inst_info = instance_info\n poll_until(result_has_no_configuration)\n inst_info = configuration_instance\n poll_until(result_has_no_configuration)\n\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n\n @test(depends_on=[test_unassign_configuration_from_instances])\n def test_assign_in_wrong_state(self):\n # test assigning a config to an instance in RESTART state\n assert_raises(exceptions.BadRequest,\n instance_info.dbaas.instances.modify,\n configuration_instance.id,\n configuration=configuration_info.id)\n\n @test(depends_on=[test_assign_in_wrong_state])\n def test_no_instances_on_configuration(self):\n \"\"\"test_no_instances_on_configuration\"\"\"\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(configuration_info.id, result.id)\n assert_equal(configuration_info.name, result.name)\n assert_equal(configuration_info.description, result.description)\n assert_equal(result.instance_count, 0)\n print(configuration_instance.id)\n print(instance_info.id)\n\n @test(depends_on=[test_unassign_configuration_from_instances])\n @time_out(120)\n def test_restart_service_should_return_active(self):\n \"\"\"test that after restarting the instance it becomes active\"\"\"\n instance_info.dbaas.instances.restart(instance_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(\n instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal(\"REBOOT\", instance.status)\n return False\n poll_until(result_is_active)\n\n @test(depends_on=[test_restart_service_should_return_active])\n def test_assign_config_and_name_to_instance_using_patch(self):\n \"\"\"test_assign_config_and_name_to_instance_using_patch\"\"\"\n new_name = 'new_name'\n report = CONFIG.get_report()\n report.log(\"instance_info.id: %s\" % instance_info.id)\n report.log(\"configuration_info: %s\" % configuration_info)\n report.log(\"configuration_info.id: %s\" % configuration_info.id)\n report.log(\"instance name:%s\" % instance_info.name)\n report.log(\"instance new name:%s\" % new_name)\n saved_name = instance_info.name\n config_id = configuration_info.id\n instance_info.dbaas.instances.update(instance_info.id,\n configuration=config_id,\n name=new_name)\n assert_equal(202, instance_info.dbaas.last_http_code)\n check = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal(200, instance_info.dbaas.last_http_code)\n assert_equal(check.name, new_name)\n\n # restore instance name\n instance_info.dbaas.instances.update(instance_info.id,\n name=saved_name)\n assert_equal(202, instance_info.dbaas.last_http_code)\n\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n # restart to be sure configuration is applied\n instance_info.dbaas.instances.restart(instance_info.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n sleep(2)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(\n instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal(\"REBOOT\", instance.status)\n return False\n poll_until(result_is_active)\n # test assigning a configuration to an instance that\n # already has an assigned configuration with patch\n config_id = configuration_info.id\n assert_raises(exceptions.BadRequest,\n instance_info.dbaas.instances.update,\n instance_info.id, configuration=config_id)\n\n @test(runs_after=[test_assign_config_and_name_to_instance_using_patch])\n def test_unassign_configuration_after_patch(self):\n \"\"\"Remove the configuration from the instance\"\"\"\n instance_info.dbaas.instances.update(instance_info.id,\n remove_configuration=True)\n assert_equal(202, instance_info.dbaas.last_http_code)\n instance = instance_info.dbaas.instances.get(instance_info.id)\n assert_equal('RESTART_REQUIRED', instance.status)\n # restart to be sure configuration has been unassigned\n instance_info.dbaas.instances.restart(instance_info.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n sleep(2)\n\n def result_is_active():\n instance = instance_info.dbaas.instances.get(\n instance_info.id)\n if instance.status in CONFIG.running_status:\n return True\n else:\n assert_equal(\"REBOOT\", instance.status)\n return False\n\n poll_until(result_is_active)\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(result.instance_count, 0)\n\n @test\n def test_unassign_configuration_from_invalid_instance_using_patch(self):\n # test unassign config group from an invalid instance\n invalid_id = \"invalid-inst-id\"\n try:\n instance_info.dbaas.instances.update(invalid_id,\n remove_configuration=True)\n except exceptions.NotFound:\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 404)\n\n @test(runs_after=[test_unassign_configuration_after_patch])\n def test_delete_unassigned_configuration(self):\n \"\"\"test_delete_unassigned_configuration\"\"\"\n instance_info.dbaas.configurations.delete(configuration_info.id)\n resp, body = instance_info.dbaas.client.last_response\n assert_equal(resp.status, 202)\n\n @test(depends_on=[test_delete_unassigned_configuration])\n @time_out(TIMEOUT_INSTANCE_DELETE)\n def test_delete_configuration_instance(self):\n \"\"\"test_delete_configuration_instance\"\"\"\n instance_info.dbaas.instances.delete(configuration_instance.id)\n assert_equal(202, instance_info.dbaas.last_http_code)\n\n def instance_is_gone():\n try:\n instance_info.dbaas.instances.get(configuration_instance.id)\n return False\n except exceptions.NotFound:\n return True\n\n poll_until(instance_is_gone)\n assert_raises(exceptions.NotFound, instance_info.dbaas.instances.get,\n configuration_instance.id)\n",
"step-ids": [
29,
40,
43,
52,
53
]
}
|
[
29,
40,
43,
52,
53
] |
from distutils.core import setup
setup(name='json_config', version='0.0.01', packages=['', 'test'], url='',
license='', author='craig.ferguson', author_email='', description=
'Simple Functional Config For Changing Environments')
|
normal
|
{
"blob_id": "ee57e6a1ccbec93f3def8966f5621ea459f3d228",
"index": 6538,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetup(name='json_config', version='0.0.01', packages=['', 'test'], url='',\n license='', author='craig.ferguson', author_email='', description=\n 'Simple Functional Config For Changing Environments')\n",
"step-3": "from distutils.core import setup\nsetup(name='json_config', version='0.0.01', packages=['', 'test'], url='',\n license='', author='craig.ferguson', author_email='', description=\n 'Simple Functional Config For Changing Environments')\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import copy
import datetime
from sacred import Experiment
from tqdm import tqdm
from mms_msg.databases.classical.full_overlap import WSJ2Mix
import paderbox as pb
import padertorch as pt
ex = Experiment('mixture_generator_create_json')
@ex.config
def defaults():
json_path = 'database.json'
database = {
'factory': WSJ2Mix,
}
pt.Configurable.get_config(database)
@ex.automain
def main(json_path, database, _log):
database_config = database
database = pt.configurable.config_to_instance(database)
database_dict = {
'datasets': {
dataset_name: dict(tqdm(
database.get_dataset(dataset_name).items(),
desc=dataset_name,
)) for dataset_name in database.dataset_names
},
'meta': {
'config': pt.configurable.recursive_class_to_str(
copy.deepcopy(database_config)
),
'generated': datetime.datetime.now(),
}
}
pb.io.dump(database_dict, json_path)
_log.info(f'Wrote file: {json_path}')
|
normal
|
{
"blob_id": "f39130099ccf467623d65ac328fd02538044d36a",
"index": 6476,
"step-1": "<mask token>\n\n\n@ex.automain\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {'datasets': {dataset_name: dict(tqdm(database.\n get_dataset(dataset_name).items(), desc=dataset_name)) for\n dataset_name in database.dataset_names}, 'meta': {'config': pt.\n configurable.recursive_class_to_str(copy.deepcopy(database_config)),\n 'generated': datetime.datetime.now()}}\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n",
"step-2": "<mask token>\n\n\n@ex.config\ndef defaults():\n json_path = 'database.json'\n database = {'factory': WSJ2Mix}\n pt.Configurable.get_config(database)\n\n\n@ex.automain\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {'datasets': {dataset_name: dict(tqdm(database.\n get_dataset(dataset_name).items(), desc=dataset_name)) for\n dataset_name in database.dataset_names}, 'meta': {'config': pt.\n configurable.recursive_class_to_str(copy.deepcopy(database_config)),\n 'generated': datetime.datetime.now()}}\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n",
"step-3": "<mask token>\nex = Experiment('mixture_generator_create_json')\n\n\n@ex.config\ndef defaults():\n json_path = 'database.json'\n database = {'factory': WSJ2Mix}\n pt.Configurable.get_config(database)\n\n\n@ex.automain\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {'datasets': {dataset_name: dict(tqdm(database.\n get_dataset(dataset_name).items(), desc=dataset_name)) for\n dataset_name in database.dataset_names}, 'meta': {'config': pt.\n configurable.recursive_class_to_str(copy.deepcopy(database_config)),\n 'generated': datetime.datetime.now()}}\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n",
"step-4": "import copy\nimport datetime\nfrom sacred import Experiment\nfrom tqdm import tqdm\nfrom mms_msg.databases.classical.full_overlap import WSJ2Mix\nimport paderbox as pb\nimport padertorch as pt\nex = Experiment('mixture_generator_create_json')\n\n\n@ex.config\ndef defaults():\n json_path = 'database.json'\n database = {'factory': WSJ2Mix}\n pt.Configurable.get_config(database)\n\n\n@ex.automain\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {'datasets': {dataset_name: dict(tqdm(database.\n get_dataset(dataset_name).items(), desc=dataset_name)) for\n dataset_name in database.dataset_names}, 'meta': {'config': pt.\n configurable.recursive_class_to_str(copy.deepcopy(database_config)),\n 'generated': datetime.datetime.now()}}\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n",
"step-5": "import copy\nimport datetime\n\nfrom sacred import Experiment\nfrom tqdm import tqdm\n\nfrom mms_msg.databases.classical.full_overlap import WSJ2Mix\nimport paderbox as pb\nimport padertorch as pt\n\nex = Experiment('mixture_generator_create_json')\n\n\n@ex.config\ndef defaults():\n json_path = 'database.json'\n database = {\n 'factory': WSJ2Mix,\n }\n pt.Configurable.get_config(database)\n\n\n@ex.automain\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {\n 'datasets': {\n dataset_name: dict(tqdm(\n database.get_dataset(dataset_name).items(),\n desc=dataset_name,\n )) for dataset_name in database.dataset_names\n },\n 'meta': {\n 'config': pt.configurable.recursive_class_to_str(\n copy.deepcopy(database_config)\n ),\n 'generated': datetime.datetime.now(),\n }\n }\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def get_signature(now_):
h = hmac.new(key='d1b964811afb40118a12068ff74a12f4'.encode('utf-8'),
digestmod=sha1)
grant_type = 'password'
client_id = 'c3cef7c66a1843f8b3a9e6a1e3160e20'
source = 'com.zhihu.web'
now = now_
h.update((grant_type + client_id + source + now).encode('utf-8'))
return h.hexdigest()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_signature(now_):
h = hmac.new(key='d1b964811afb40118a12068ff74a12f4'.encode('utf-8'),
digestmod=sha1)
grant_type = 'password'
client_id = 'c3cef7c66a1843f8b3a9e6a1e3160e20'
source = 'com.zhihu.web'
now = now_
h.update((grant_type + client_id + source + now).encode('utf-8'))
return h.hexdigest()
<|reserved_special_token_0|>
print(signature)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_signature(now_):
h = hmac.new(key='d1b964811afb40118a12068ff74a12f4'.encode('utf-8'),
digestmod=sha1)
grant_type = 'password'
client_id = 'c3cef7c66a1843f8b3a9e6a1e3160e20'
source = 'com.zhihu.web'
now = now_
h.update((grant_type + client_id + source + now).encode('utf-8'))
return h.hexdigest()
timestamp = str(int(time.time() * 1000))
signature = get_signature(timestamp)
print(signature)
<|reserved_special_token_1|>
import hmac
import time
from hashlib import sha1
def get_signature(now_):
h = hmac.new(key='d1b964811afb40118a12068ff74a12f4'.encode('utf-8'),
digestmod=sha1)
grant_type = 'password'
client_id = 'c3cef7c66a1843f8b3a9e6a1e3160e20'
source = 'com.zhihu.web'
now = now_
h.update((grant_type + client_id + source + now).encode('utf-8'))
return h.hexdigest()
timestamp = str(int(time.time() * 1000))
signature = get_signature(timestamp)
print(signature)
<|reserved_special_token_1|>
# encoding = utf-8
import hmac
import time
from hashlib import sha1
def get_signature(now_):
# 签名由clientId,grantType,source,timestamp四个参数生成
h = hmac.new(
key='d1b964811afb40118a12068ff74a12f4'.encode('utf-8'),
digestmod=sha1)
grant_type = 'password'
client_id = 'c3cef7c66a1843f8b3a9e6a1e3160e20'
source = 'com.zhihu.web'
now = now_
h.update((grant_type + client_id + source + now).encode('utf-8'))
return h.hexdigest()
timestamp = str(int(time.time() * 1000))
signature = get_signature(timestamp)
print(signature)
|
flexible
|
{
"blob_id": "757a69f9ceaa3434c6d9f8b1fcdbadd991190f29",
"index": 9315,
"step-1": "<mask token>\n\n\ndef get_signature(now_):\n h = hmac.new(key='d1b964811afb40118a12068ff74a12f4'.encode('utf-8'),\n digestmod=sha1)\n grant_type = 'password'\n client_id = 'c3cef7c66a1843f8b3a9e6a1e3160e20'\n source = 'com.zhihu.web'\n now = now_\n h.update((grant_type + client_id + source + now).encode('utf-8'))\n return h.hexdigest()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_signature(now_):\n h = hmac.new(key='d1b964811afb40118a12068ff74a12f4'.encode('utf-8'),\n digestmod=sha1)\n grant_type = 'password'\n client_id = 'c3cef7c66a1843f8b3a9e6a1e3160e20'\n source = 'com.zhihu.web'\n now = now_\n h.update((grant_type + client_id + source + now).encode('utf-8'))\n return h.hexdigest()\n\n\n<mask token>\nprint(signature)\n",
"step-3": "<mask token>\n\n\ndef get_signature(now_):\n h = hmac.new(key='d1b964811afb40118a12068ff74a12f4'.encode('utf-8'),\n digestmod=sha1)\n grant_type = 'password'\n client_id = 'c3cef7c66a1843f8b3a9e6a1e3160e20'\n source = 'com.zhihu.web'\n now = now_\n h.update((grant_type + client_id + source + now).encode('utf-8'))\n return h.hexdigest()\n\n\ntimestamp = str(int(time.time() * 1000))\nsignature = get_signature(timestamp)\nprint(signature)\n",
"step-4": "import hmac\nimport time\nfrom hashlib import sha1\n\n\ndef get_signature(now_):\n h = hmac.new(key='d1b964811afb40118a12068ff74a12f4'.encode('utf-8'),\n digestmod=sha1)\n grant_type = 'password'\n client_id = 'c3cef7c66a1843f8b3a9e6a1e3160e20'\n source = 'com.zhihu.web'\n now = now_\n h.update((grant_type + client_id + source + now).encode('utf-8'))\n return h.hexdigest()\n\n\ntimestamp = str(int(time.time() * 1000))\nsignature = get_signature(timestamp)\nprint(signature)\n",
"step-5": "# encoding = utf-8\nimport hmac\nimport time\nfrom hashlib import sha1\n\n\ndef get_signature(now_):\n # 签名由clientId,grantType,source,timestamp四个参数生成\n h = hmac.new(\n key='d1b964811afb40118a12068ff74a12f4'.encode('utf-8'),\n digestmod=sha1)\n grant_type = 'password'\n client_id = 'c3cef7c66a1843f8b3a9e6a1e3160e20'\n source = 'com.zhihu.web'\n now = now_\n h.update((grant_type + client_id + source + now).encode('utf-8'))\n return h.hexdigest()\n\n\ntimestamp = str(int(time.time() * 1000))\nsignature = get_signature(timestamp)\nprint(signature)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from django.apps import AppConfig
class PrimaryuserConfig(AppConfig):
name = 'PrimaryUser'
|
normal
|
{
"blob_id": "82c10076ba73723b696e3e33280296c2a24f20b9",
"index": 4187,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass PrimaryuserConfig(AppConfig):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass PrimaryuserConfig(AppConfig):\n name = 'PrimaryUser'\n",
"step-4": "from django.apps import AppConfig\n\n\nclass PrimaryuserConfig(AppConfig):\n name = 'PrimaryUser'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
class Rect:
def __init__(self, w, h):
self.w = w
self.h = h
def half(self):
return self.w / 2
<|reserved_special_token_0|>
def setup():
size(500, 500)
noLoop()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Rect:
def __init__(self, w, h):
self.w = w
self.h = h
def half(self):
return self.w / 2
<|reserved_special_token_0|>
def setup():
size(500, 500)
noLoop()
def draw():
posx = 0
posy = 0
i = 0
for y in range(20):
posx = 0
for x in range(50):
fill(random(100, 250))
brick = get_brick(i)
rect(posx, posy, brick.w, brick.h)
posx += brick.w
i += 1
posy += brick.h
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Rect:
def __init__(self, w, h):
self.w = w
self.h = h
def half(self):
return self.w / 2
<|reserved_special_token_0|>
def setup():
size(500, 500)
noLoop()
def draw():
posx = 0
posy = 0
i = 0
for y in range(20):
posx = 0
for x in range(50):
fill(random(100, 250))
brick = get_brick(i)
rect(posx, posy, brick.w, brick.h)
posx += brick.w
i += 1
posy += brick.h
def get_brick(index):
i = int(random(len(bricks)))
return bricks[i]
<|reserved_special_token_1|>
class Rect:
def __init__(self, w, h):
self.w = w
self.h = h
def half(self):
return self.w / 2
bricks = [Rect(40, 25), Rect(30, 25), Rect(28, 25), Rect(13, 25)]
def setup():
size(500, 500)
noLoop()
def draw():
posx = 0
posy = 0
i = 0
for y in range(20):
posx = 0
for x in range(50):
fill(random(100, 250))
brick = get_brick(i)
rect(posx, posy, brick.w, brick.h)
posx += brick.w
i += 1
posy += brick.h
def get_brick(index):
i = int(random(len(bricks)))
return bricks[i]
<|reserved_special_token_1|>
class Rect():
def __init__(self, w, h):
self.w = w
self.h = h
def half(self):
return self.w / 2;
bricks = [Rect(40, 25), Rect(30, 25), Rect(28, 25), Rect(13, 25)]
def setup():
size(500, 500)
noLoop()
def draw():
posx = 0
posy = 0
i = 0
for y in range(20):
posx = 0
for x in range(50):
fill(random(100, 250))
brick = get_brick(i)
rect(posx, posy, brick.w, brick.h)
posx += brick.w
i += 1
posy += brick.h
def get_brick(index):
i = int(random(len(bricks)))
# i = index % len(bricks)
return bricks[i]
|
flexible
|
{
"blob_id": "807f0094a9736abdfa3f5b629615a80f1e0d13ef",
"index": 3037,
"step-1": "class Rect:\n\n def __init__(self, w, h):\n self.w = w\n self.h = h\n\n def half(self):\n return self.w / 2\n\n\n<mask token>\n\n\ndef setup():\n size(500, 500)\n noLoop()\n\n\n<mask token>\n",
"step-2": "class Rect:\n\n def __init__(self, w, h):\n self.w = w\n self.h = h\n\n def half(self):\n return self.w / 2\n\n\n<mask token>\n\n\ndef setup():\n size(500, 500)\n noLoop()\n\n\ndef draw():\n posx = 0\n posy = 0\n i = 0\n for y in range(20):\n posx = 0\n for x in range(50):\n fill(random(100, 250))\n brick = get_brick(i)\n rect(posx, posy, brick.w, brick.h)\n posx += brick.w\n i += 1\n posy += brick.h\n\n\n<mask token>\n",
"step-3": "class Rect:\n\n def __init__(self, w, h):\n self.w = w\n self.h = h\n\n def half(self):\n return self.w / 2\n\n\n<mask token>\n\n\ndef setup():\n size(500, 500)\n noLoop()\n\n\ndef draw():\n posx = 0\n posy = 0\n i = 0\n for y in range(20):\n posx = 0\n for x in range(50):\n fill(random(100, 250))\n brick = get_brick(i)\n rect(posx, posy, brick.w, brick.h)\n posx += brick.w\n i += 1\n posy += brick.h\n\n\ndef get_brick(index):\n i = int(random(len(bricks)))\n return bricks[i]\n",
"step-4": "class Rect:\n\n def __init__(self, w, h):\n self.w = w\n self.h = h\n\n def half(self):\n return self.w / 2\n\n\nbricks = [Rect(40, 25), Rect(30, 25), Rect(28, 25), Rect(13, 25)]\n\n\ndef setup():\n size(500, 500)\n noLoop()\n\n\ndef draw():\n posx = 0\n posy = 0\n i = 0\n for y in range(20):\n posx = 0\n for x in range(50):\n fill(random(100, 250))\n brick = get_brick(i)\n rect(posx, posy, brick.w, brick.h)\n posx += brick.w\n i += 1\n posy += brick.h\n\n\ndef get_brick(index):\n i = int(random(len(bricks)))\n return bricks[i]\n",
"step-5": "class Rect():\n def __init__(self, w, h):\n self.w = w\n self.h = h\n \n def half(self):\n return self.w / 2;\n \nbricks = [Rect(40, 25), Rect(30, 25), Rect(28, 25), Rect(13, 25)]\n\ndef setup():\n size(500, 500)\n noLoop()\n \ndef draw():\n \n posx = 0\n posy = 0\n i = 0\n for y in range(20):\n posx = 0\n for x in range(50):\n fill(random(100, 250))\n brick = get_brick(i)\n rect(posx, posy, brick.w, brick.h)\n posx += brick.w\n i += 1\n posy += brick.h\n\ndef get_brick(index):\n i = int(random(len(bricks)))\n# i = index % len(bricks)\n return bricks[i]\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def leaveKout_CV(X, y, n_scz_te, rep, perms, classifiers, parameters, count,
freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig=None):
"""
Calculates the leave K out cross validation.
Parameters
----------
X : array of arrays
Matrix containing a vector with all the features for each subject.
Dimension (number of subjects)x(number of features).
y : array
A vector containing the class-information.
Remember: 1 = healty controls, 0 = schizophrenic
n_scz_te : int
Desired number of schizophrenic patients in each test set.
rep : integer
The number of repition that has been used so far.
perms : range(*)
Range with desired number (*) of permutations.
*=1 indicates no permutations.
classifiers : dictionary
Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}
parameters : dictionary
Dictionary containing parameters to the classifiers as in "classifiers"
count : integer
Used to know how many loops that have been made due to the pre
allocated space for AUC.
freq_bands : list of strings
Either ['all'] or ['detla','theta','alpha','beta1','beta2','gamma'].
x_size : integer
The size each X has which changes depending on freq_bands.
auc : dictionary
Contains the auc-scores for each loop, either divided into bands or
with the key "all".
nz_coef_idx : dictionary
Contains the non-zero coefficient indices for each loop, either
divided into bands or with the key "all".
nz_coef_val : dictionary
Contains the non-zero coefficient values (the weights) for each
loop, either divided into bands or with the key "all".
n_BAitaSig : list of integers, optional
The number of connections in each band when BAitaSig is used.
The default is None.
Returns
-------
auc : dictionary
Contains the updated auc-scores for each loop, either divided into
bands or with the key "all".
nz_coef_idx : dictionary
Contains the updated non-zero coefficient indices for each loop,
either divided into bands or with the key "all".
nz_coef_val : dictionary
Contains the updated non-zero coefficient values (the weights) for
each loop, either divided into bands or with the key "all".
count : integer
Used to know how many loops that have been made due to the pre
allocated space for AUC.
"""
skf = StratifiedKFold(n_splits=int(sum(y == 0) // n_scz_te), shuffle=
True, random_state=rep)
count_plt = 0
fig, ax = plt.subplots(2, 3, figsize=(10, 6.5))
for tr_idx, te_idx in skf.split(X, y):
y_tr = np.ravel(y[tr_idx])
y_te = np.ravel(y[te_idx])
clf_name = list(classifiers.keys())[0]
count += 1
sns.set(font_scale=1.5)
for i in range(1):
if count_plt == 6:
plt.suptitle(
'Example of line search for the regularization parameter',
fontsize=18)
plt.tight_layout()
plt.subplots_adjust(top=0.84, bottom=0.15, hspace=0.5,
wspace=0.45)
fig.legend(['Train', 'Validation'], bbox_to_anchor=(0.5,
0.89), borderaxespad=0.0, loc='upper center', ncol=2)
plt.show()
fig.savefig(
'/share/FannyMaster/PythonNew/Figures/LineSearchEx.jpg',
bbox_inches='tight')
sns.reset_orig()
raise NameError(
'This is just a dumb way of stopping the code after 6 iterations'
)
i = 1
clf = GridSearchCV(classifiers[clf_name], {'alpha': parameters[
freq_bands[i]]}, cv=StratifiedKFold(n_splits=int(sum(y_tr ==
0) // n_scz_te)), scoring='roc_auc', n_jobs=-1,
return_train_score=True)
if n_BAitaSig == None:
X_tr = X[tr_idx, x_size * i:x_size * (i + 1)]
X_te = X[te_idx, x_size * i:x_size * (i + 1)]
elif x_size == sum(n_BAitaSig):
X_tr = X[tr_idx, :]
X_te = X[te_idx, :]
else:
n_temp = [0]
n_temp.extend(np.cumsum(n_BAitaSig))
X_tr = X[tr_idx, n_temp[i]:n_temp[i + 1]]
X_te = X[te_idx, n_temp[i]:n_temp[i + 1]]
scaler_out = preprocessing.StandardScaler().fit(X_tr)
X_tr = scaler_out.transform(X_tr)
X_te = scaler_out.transform(X_te)
fit = clf.fit(X_tr, y_tr)
auc[freq_bands[i]][count] = fit.score(X_te, y_te)
cv_results = clf.cv_results_
metric = 'score'
grid_param_1 = parameters[freq_bands[i]]
scores_mean = cv_results['mean_test_' + metric]
scores_mean_tr = cv_results['mean_train_' + metric]
sns.set(font_scale=1.5)
df1 = pd.DataFrame({'log($\\lambda$)': [math.log(i) for i in
grid_param_1], 'CV Average AUC': scores_mean_tr, 'type': [
'train'] * len(scores_mean_tr)})
df2 = pd.DataFrame({'log($\\lambda$)': [math.log(i) for i in
grid_param_1], 'CV Average AUC': scores_mean, 'type': [
'test'] * len(scores_mean_tr)})
sns.lineplot(x='log($\\lambda$)', y='CV Average AUC', style=
'type', legend=False, markers='o', data=df1, ax=ax[
count_plt // 3][count_plt % 3])
sns.lineplot(x='log($\\lambda$)', y='CV Average AUC', style=
'type', legend=False, markers='o', data=df2, ax=ax[
count_plt // 3][count_plt % 3])
ax[count_plt // 3][count_plt % 3].set_xlabel('log($\\lambda$)',
fontsize=14)
ax[count_plt // 3][count_plt % 3].set_ylabel('CV Average AUC',
fontsize=14)
count_plt += 1
if len(perms) == 1:
coef_idx = np.nonzero(fit.best_estimator_.coef_)
nz_coef_idx[freq_bands[i]].append(coef_idx)
nz_coef_val[freq_bands[i]].append(fit.best_estimator_.coef_
[coef_idx])
return auc, nz_coef_idx, nz_coef_val, count
def CV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,
classifiers, parameters, n_BAitaSig=None):
"""
Parameters
----------
X : np.array
Matrix with dimension (subjects)x(feature vector).
y : np.array
Vector with classifications (0: healthy, 1: schizo).
n_scz_te : int
Desired number of schizophrenic patients in each test set.
reps : range(*)
Range with desired number (*) of extra times the code should run.
separate_bands : boolean
True = seperate data into frequency bands. False = don't separate.
perms : range(*)
Range with desired number (*) of permutations.
*=1 indicates no permutations.
dir_save : string
Directory path to where the results should be saved.
classifiers : dictionary
Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}
parameters : dictionary
Dictionary containing parameters to the classifiers as in "classifiers"
Notes
-------
Saves three different values in the dir_save path:
auc : dictionary
Contains the auc-scores for each loop, either divided into bands or
with the key "all".
nz_coef_idx : dictionary
Contains the non-zero coefficient indices for each loop, either
divided into bands or with the key "all".
nz_coef_val : dictionary
Contains the non-zero coefficient values (the weights) for each
loop, either divided into bands or with the key "all".
"""
if separate_bands:
freq_bands = ['delta', 'theta', 'alpha', 'beta1', 'beta2', 'gamma']
else:
freq_bands = ['all']
if len(perms) > 1:
y_org = y
tqdm_perms = tqdm(perms)
tqdm_reps = reps
else:
tqdm_perms = perms
tqdm_reps = tqdm(reps)
auc = {}
nz_coef_idx = {}
nz_coef_val = {}
nb_loops = len(reps) * (sum(y == 0) // n_scz_te) * len(perms)
x_size = int(X.shape[1] / len(freq_bands))
for i in freq_bands:
auc[i] = np.zeros(nb_loops)
nz_coef_idx[i] = []
nz_coef_val[i] = []
count = -1
for perm in tqdm_perms:
if len(perms) > 1:
y = shuffle(y_org, random_state=perm).reset_index(drop=True)
for rep in tqdm_reps:
auc, nz_coef_idx, nz_coef_val, count = leaveKout_CV(X, y,
n_scz_te, rep, perms, classifiers, parameters, count,
freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig)
<|reserved_special_token_0|>
sns.set(font_scale=1.5)
<|reserved_special_token_0|>
if atlas == 'DKEgill':
X = getEgillX(X)
n_BAitaSig = None
parameters = getEgillParameters(con_type, separate_bands)
elif atlas == 'BAitaSig':
X, n_BAitaSig = significant_connected_areasBAitaSigX(X)
parameters = getBAitaSigParameters(con_type, separate_bands)
elif atlas == 'BAita':
parameters = getBAitaParameters(con_type, separate_bands)
n_BAitaSig = None
<|reserved_special_token_0|>
CV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,
classifiers, parameters)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def leaveKout_CV(X, y, n_scz_te, rep, perms, classifiers, parameters, count,
freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig=None):
"""
Calculates the leave K out cross validation.
Parameters
----------
X : array of arrays
Matrix containing a vector with all the features for each subject.
Dimension (number of subjects)x(number of features).
y : array
A vector containing the class-information.
Remember: 1 = healty controls, 0 = schizophrenic
n_scz_te : int
Desired number of schizophrenic patients in each test set.
rep : integer
The number of repition that has been used so far.
perms : range(*)
Range with desired number (*) of permutations.
*=1 indicates no permutations.
classifiers : dictionary
Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}
parameters : dictionary
Dictionary containing parameters to the classifiers as in "classifiers"
count : integer
Used to know how many loops that have been made due to the pre
allocated space for AUC.
freq_bands : list of strings
Either ['all'] or ['detla','theta','alpha','beta1','beta2','gamma'].
x_size : integer
The size each X has which changes depending on freq_bands.
auc : dictionary
Contains the auc-scores for each loop, either divided into bands or
with the key "all".
nz_coef_idx : dictionary
Contains the non-zero coefficient indices for each loop, either
divided into bands or with the key "all".
nz_coef_val : dictionary
Contains the non-zero coefficient values (the weights) for each
loop, either divided into bands or with the key "all".
n_BAitaSig : list of integers, optional
The number of connections in each band when BAitaSig is used.
The default is None.
Returns
-------
auc : dictionary
Contains the updated auc-scores for each loop, either divided into
bands or with the key "all".
nz_coef_idx : dictionary
Contains the updated non-zero coefficient indices for each loop,
either divided into bands or with the key "all".
nz_coef_val : dictionary
Contains the updated non-zero coefficient values (the weights) for
each loop, either divided into bands or with the key "all".
count : integer
Used to know how many loops that have been made due to the pre
allocated space for AUC.
"""
skf = StratifiedKFold(n_splits=int(sum(y == 0) // n_scz_te), shuffle=
True, random_state=rep)
count_plt = 0
fig, ax = plt.subplots(2, 3, figsize=(10, 6.5))
for tr_idx, te_idx in skf.split(X, y):
y_tr = np.ravel(y[tr_idx])
y_te = np.ravel(y[te_idx])
clf_name = list(classifiers.keys())[0]
count += 1
sns.set(font_scale=1.5)
for i in range(1):
if count_plt == 6:
plt.suptitle(
'Example of line search for the regularization parameter',
fontsize=18)
plt.tight_layout()
plt.subplots_adjust(top=0.84, bottom=0.15, hspace=0.5,
wspace=0.45)
fig.legend(['Train', 'Validation'], bbox_to_anchor=(0.5,
0.89), borderaxespad=0.0, loc='upper center', ncol=2)
plt.show()
fig.savefig(
'/share/FannyMaster/PythonNew/Figures/LineSearchEx.jpg',
bbox_inches='tight')
sns.reset_orig()
raise NameError(
'This is just a dumb way of stopping the code after 6 iterations'
)
i = 1
clf = GridSearchCV(classifiers[clf_name], {'alpha': parameters[
freq_bands[i]]}, cv=StratifiedKFold(n_splits=int(sum(y_tr ==
0) // n_scz_te)), scoring='roc_auc', n_jobs=-1,
return_train_score=True)
if n_BAitaSig == None:
X_tr = X[tr_idx, x_size * i:x_size * (i + 1)]
X_te = X[te_idx, x_size * i:x_size * (i + 1)]
elif x_size == sum(n_BAitaSig):
X_tr = X[tr_idx, :]
X_te = X[te_idx, :]
else:
n_temp = [0]
n_temp.extend(np.cumsum(n_BAitaSig))
X_tr = X[tr_idx, n_temp[i]:n_temp[i + 1]]
X_te = X[te_idx, n_temp[i]:n_temp[i + 1]]
scaler_out = preprocessing.StandardScaler().fit(X_tr)
X_tr = scaler_out.transform(X_tr)
X_te = scaler_out.transform(X_te)
fit = clf.fit(X_tr, y_tr)
auc[freq_bands[i]][count] = fit.score(X_te, y_te)
cv_results = clf.cv_results_
metric = 'score'
grid_param_1 = parameters[freq_bands[i]]
scores_mean = cv_results['mean_test_' + metric]
scores_mean_tr = cv_results['mean_train_' + metric]
sns.set(font_scale=1.5)
df1 = pd.DataFrame({'log($\\lambda$)': [math.log(i) for i in
grid_param_1], 'CV Average AUC': scores_mean_tr, 'type': [
'train'] * len(scores_mean_tr)})
df2 = pd.DataFrame({'log($\\lambda$)': [math.log(i) for i in
grid_param_1], 'CV Average AUC': scores_mean, 'type': [
'test'] * len(scores_mean_tr)})
sns.lineplot(x='log($\\lambda$)', y='CV Average AUC', style=
'type', legend=False, markers='o', data=df1, ax=ax[
count_plt // 3][count_plt % 3])
sns.lineplot(x='log($\\lambda$)', y='CV Average AUC', style=
'type', legend=False, markers='o', data=df2, ax=ax[
count_plt // 3][count_plt % 3])
ax[count_plt // 3][count_plt % 3].set_xlabel('log($\\lambda$)',
fontsize=14)
ax[count_plt // 3][count_plt % 3].set_ylabel('CV Average AUC',
fontsize=14)
count_plt += 1
if len(perms) == 1:
coef_idx = np.nonzero(fit.best_estimator_.coef_)
nz_coef_idx[freq_bands[i]].append(coef_idx)
nz_coef_val[freq_bands[i]].append(fit.best_estimator_.coef_
[coef_idx])
return auc, nz_coef_idx, nz_coef_val, count
def CV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,
classifiers, parameters, n_BAitaSig=None):
"""
Parameters
----------
X : np.array
Matrix with dimension (subjects)x(feature vector).
y : np.array
Vector with classifications (0: healthy, 1: schizo).
n_scz_te : int
Desired number of schizophrenic patients in each test set.
reps : range(*)
Range with desired number (*) of extra times the code should run.
separate_bands : boolean
True = seperate data into frequency bands. False = don't separate.
perms : range(*)
Range with desired number (*) of permutations.
*=1 indicates no permutations.
dir_save : string
Directory path to where the results should be saved.
classifiers : dictionary
Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}
parameters : dictionary
Dictionary containing parameters to the classifiers as in "classifiers"
Notes
-------
Saves three different values in the dir_save path:
auc : dictionary
Contains the auc-scores for each loop, either divided into bands or
with the key "all".
nz_coef_idx : dictionary
Contains the non-zero coefficient indices for each loop, either
divided into bands or with the key "all".
nz_coef_val : dictionary
Contains the non-zero coefficient values (the weights) for each
loop, either divided into bands or with the key "all".
"""
if separate_bands:
freq_bands = ['delta', 'theta', 'alpha', 'beta1', 'beta2', 'gamma']
else:
freq_bands = ['all']
if len(perms) > 1:
y_org = y
tqdm_perms = tqdm(perms)
tqdm_reps = reps
else:
tqdm_perms = perms
tqdm_reps = tqdm(reps)
auc = {}
nz_coef_idx = {}
nz_coef_val = {}
nb_loops = len(reps) * (sum(y == 0) // n_scz_te) * len(perms)
x_size = int(X.shape[1] / len(freq_bands))
for i in freq_bands:
auc[i] = np.zeros(nb_loops)
nz_coef_idx[i] = []
nz_coef_val[i] = []
count = -1
for perm in tqdm_perms:
if len(perms) > 1:
y = shuffle(y_org, random_state=perm).reset_index(drop=True)
for rep in tqdm_reps:
auc, nz_coef_idx, nz_coef_val, count = leaveKout_CV(X, y,
n_scz_te, rep, perms, classifiers, parameters, count,
freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig)
con_type = 'lps'
separate_bands = True
partialData = True
atlas = 'BAita'
sns.set(font_scale=1.5)
freq_band_type = 'DiLorenzo'
dir_folders = '/share/FannyMaster/PythonNew/' + atlas + '_timeseries_'
newest_date = getNewestFolderDate(dir_folders)
dir_features = dir_folders + newest_date + '/' + freq_band_type + '/Features'
dir_y_ID = '/share/FannyMaster/PythonNew/Age_Gender.csv'
n_scz_te = 2
reps = range(1)
classifiers = {'lasso': Lasso(max_iter=10000)}
dir_save = (dir_folders + newest_date + '/' + freq_band_type +
'/classificationResults/' + con_type.capitalize())
X, y = get_Xy(dir_features, dir_y_ID, con_type, partialData)
if atlas == 'DKEgill':
X = getEgillX(X)
n_BAitaSig = None
parameters = getEgillParameters(con_type, separate_bands)
elif atlas == 'BAitaSig':
X, n_BAitaSig = significant_connected_areasBAitaSigX(X)
parameters = getBAitaSigParameters(con_type, separate_bands)
elif atlas == 'BAita':
parameters = getBAitaParameters(con_type, separate_bands)
n_BAitaSig = None
perms = range(1)
CV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,
classifiers, parameters)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from tqdm import tqdm
import math
from sklearn.model_selection import GridSearchCV, StratifiedKFold
from sklearn import preprocessing
from sklearn.utils import shuffle
from sklearn.linear_model import Lasso
from utils_runOnce_classification import getEgillX, getEgillParameters
from utils_runOnce_classification import significant_connected_areasBAitaSigX, getBAitaSigParameters, getBAitaParameters
import seaborn as sns
from utils_joint import getNewestFolderDate, get_Xy
import pdb
def leaveKout_CV(X, y, n_scz_te, rep, perms, classifiers, parameters, count,
freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig=None):
"""
Calculates the leave K out cross validation.
Parameters
----------
X : array of arrays
Matrix containing a vector with all the features for each subject.
Dimension (number of subjects)x(number of features).
y : array
A vector containing the class-information.
Remember: 1 = healty controls, 0 = schizophrenic
n_scz_te : int
Desired number of schizophrenic patients in each test set.
rep : integer
The number of repition that has been used so far.
perms : range(*)
Range with desired number (*) of permutations.
*=1 indicates no permutations.
classifiers : dictionary
Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}
parameters : dictionary
Dictionary containing parameters to the classifiers as in "classifiers"
count : integer
Used to know how many loops that have been made due to the pre
allocated space for AUC.
freq_bands : list of strings
Either ['all'] or ['detla','theta','alpha','beta1','beta2','gamma'].
x_size : integer
The size each X has which changes depending on freq_bands.
auc : dictionary
Contains the auc-scores for each loop, either divided into bands or
with the key "all".
nz_coef_idx : dictionary
Contains the non-zero coefficient indices for each loop, either
divided into bands or with the key "all".
nz_coef_val : dictionary
Contains the non-zero coefficient values (the weights) for each
loop, either divided into bands or with the key "all".
n_BAitaSig : list of integers, optional
The number of connections in each band when BAitaSig is used.
The default is None.
Returns
-------
auc : dictionary
Contains the updated auc-scores for each loop, either divided into
bands or with the key "all".
nz_coef_idx : dictionary
Contains the updated non-zero coefficient indices for each loop,
either divided into bands or with the key "all".
nz_coef_val : dictionary
Contains the updated non-zero coefficient values (the weights) for
each loop, either divided into bands or with the key "all".
count : integer
Used to know how many loops that have been made due to the pre
allocated space for AUC.
"""
skf = StratifiedKFold(n_splits=int(sum(y == 0) // n_scz_te), shuffle=
True, random_state=rep)
count_plt = 0
fig, ax = plt.subplots(2, 3, figsize=(10, 6.5))
for tr_idx, te_idx in skf.split(X, y):
y_tr = np.ravel(y[tr_idx])
y_te = np.ravel(y[te_idx])
clf_name = list(classifiers.keys())[0]
count += 1
sns.set(font_scale=1.5)
for i in range(1):
if count_plt == 6:
plt.suptitle(
'Example of line search for the regularization parameter',
fontsize=18)
plt.tight_layout()
plt.subplots_adjust(top=0.84, bottom=0.15, hspace=0.5,
wspace=0.45)
fig.legend(['Train', 'Validation'], bbox_to_anchor=(0.5,
0.89), borderaxespad=0.0, loc='upper center', ncol=2)
plt.show()
fig.savefig(
'/share/FannyMaster/PythonNew/Figures/LineSearchEx.jpg',
bbox_inches='tight')
sns.reset_orig()
raise NameError(
'This is just a dumb way of stopping the code after 6 iterations'
)
i = 1
clf = GridSearchCV(classifiers[clf_name], {'alpha': parameters[
freq_bands[i]]}, cv=StratifiedKFold(n_splits=int(sum(y_tr ==
0) // n_scz_te)), scoring='roc_auc', n_jobs=-1,
return_train_score=True)
if n_BAitaSig == None:
X_tr = X[tr_idx, x_size * i:x_size * (i + 1)]
X_te = X[te_idx, x_size * i:x_size * (i + 1)]
elif x_size == sum(n_BAitaSig):
X_tr = X[tr_idx, :]
X_te = X[te_idx, :]
else:
n_temp = [0]
n_temp.extend(np.cumsum(n_BAitaSig))
X_tr = X[tr_idx, n_temp[i]:n_temp[i + 1]]
X_te = X[te_idx, n_temp[i]:n_temp[i + 1]]
scaler_out = preprocessing.StandardScaler().fit(X_tr)
X_tr = scaler_out.transform(X_tr)
X_te = scaler_out.transform(X_te)
fit = clf.fit(X_tr, y_tr)
auc[freq_bands[i]][count] = fit.score(X_te, y_te)
cv_results = clf.cv_results_
metric = 'score'
grid_param_1 = parameters[freq_bands[i]]
scores_mean = cv_results['mean_test_' + metric]
scores_mean_tr = cv_results['mean_train_' + metric]
sns.set(font_scale=1.5)
df1 = pd.DataFrame({'log($\\lambda$)': [math.log(i) for i in
grid_param_1], 'CV Average AUC': scores_mean_tr, 'type': [
'train'] * len(scores_mean_tr)})
df2 = pd.DataFrame({'log($\\lambda$)': [math.log(i) for i in
grid_param_1], 'CV Average AUC': scores_mean, 'type': [
'test'] * len(scores_mean_tr)})
sns.lineplot(x='log($\\lambda$)', y='CV Average AUC', style=
'type', legend=False, markers='o', data=df1, ax=ax[
count_plt // 3][count_plt % 3])
sns.lineplot(x='log($\\lambda$)', y='CV Average AUC', style=
'type', legend=False, markers='o', data=df2, ax=ax[
count_plt // 3][count_plt % 3])
ax[count_plt // 3][count_plt % 3].set_xlabel('log($\\lambda$)',
fontsize=14)
ax[count_plt // 3][count_plt % 3].set_ylabel('CV Average AUC',
fontsize=14)
count_plt += 1
if len(perms) == 1:
coef_idx = np.nonzero(fit.best_estimator_.coef_)
nz_coef_idx[freq_bands[i]].append(coef_idx)
nz_coef_val[freq_bands[i]].append(fit.best_estimator_.coef_
[coef_idx])
return auc, nz_coef_idx, nz_coef_val, count
def CV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,
classifiers, parameters, n_BAitaSig=None):
"""
Parameters
----------
X : np.array
Matrix with dimension (subjects)x(feature vector).
y : np.array
Vector with classifications (0: healthy, 1: schizo).
n_scz_te : int
Desired number of schizophrenic patients in each test set.
reps : range(*)
Range with desired number (*) of extra times the code should run.
separate_bands : boolean
True = seperate data into frequency bands. False = don't separate.
perms : range(*)
Range with desired number (*) of permutations.
*=1 indicates no permutations.
dir_save : string
Directory path to where the results should be saved.
classifiers : dictionary
Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}
parameters : dictionary
Dictionary containing parameters to the classifiers as in "classifiers"
Notes
-------
Saves three different values in the dir_save path:
auc : dictionary
Contains the auc-scores for each loop, either divided into bands or
with the key "all".
nz_coef_idx : dictionary
Contains the non-zero coefficient indices for each loop, either
divided into bands or with the key "all".
nz_coef_val : dictionary
Contains the non-zero coefficient values (the weights) for each
loop, either divided into bands or with the key "all".
"""
if separate_bands:
freq_bands = ['delta', 'theta', 'alpha', 'beta1', 'beta2', 'gamma']
else:
freq_bands = ['all']
if len(perms) > 1:
y_org = y
tqdm_perms = tqdm(perms)
tqdm_reps = reps
else:
tqdm_perms = perms
tqdm_reps = tqdm(reps)
auc = {}
nz_coef_idx = {}
nz_coef_val = {}
nb_loops = len(reps) * (sum(y == 0) // n_scz_te) * len(perms)
x_size = int(X.shape[1] / len(freq_bands))
for i in freq_bands:
auc[i] = np.zeros(nb_loops)
nz_coef_idx[i] = []
nz_coef_val[i] = []
count = -1
for perm in tqdm_perms:
if len(perms) > 1:
y = shuffle(y_org, random_state=perm).reset_index(drop=True)
for rep in tqdm_reps:
auc, nz_coef_idx, nz_coef_val, count = leaveKout_CV(X, y,
n_scz_te, rep, perms, classifiers, parameters, count,
freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig)
con_type = 'lps'
separate_bands = True
partialData = True
atlas = 'BAita'
sns.set(font_scale=1.5)
freq_band_type = 'DiLorenzo'
dir_folders = '/share/FannyMaster/PythonNew/' + atlas + '_timeseries_'
newest_date = getNewestFolderDate(dir_folders)
dir_features = dir_folders + newest_date + '/' + freq_band_type + '/Features'
dir_y_ID = '/share/FannyMaster/PythonNew/Age_Gender.csv'
n_scz_te = 2
reps = range(1)
classifiers = {'lasso': Lasso(max_iter=10000)}
dir_save = (dir_folders + newest_date + '/' + freq_band_type +
'/classificationResults/' + con_type.capitalize())
X, y = get_Xy(dir_features, dir_y_ID, con_type, partialData)
if atlas == 'DKEgill':
X = getEgillX(X)
n_BAitaSig = None
parameters = getEgillParameters(con_type, separate_bands)
elif atlas == 'BAitaSig':
X, n_BAitaSig = significant_connected_areasBAitaSigX(X)
parameters = getBAitaSigParameters(con_type, separate_bands)
elif atlas == 'BAita':
parameters = getBAitaParameters(con_type, separate_bands)
n_BAitaSig = None
perms = range(1)
CV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,
classifiers, parameters)
<|reserved_special_token_1|>
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue May 26 18:39:26 2020
@author: Fanny Fredriksson and Karen Marie Sandø Ambrosen
"""
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from tqdm import tqdm #count ffor loops
import math
from sklearn.model_selection import GridSearchCV, StratifiedKFold
from sklearn import preprocessing
from sklearn.utils import shuffle
from sklearn.linear_model import Lasso
from utils_runOnce_classification import getEgillX, getEgillParameters
from utils_runOnce_classification import significant_connected_areasBAitaSigX, getBAitaSigParameters, getBAitaParameters
import seaborn as sns
from utils_joint import getNewestFolderDate, get_Xy
import pdb
#{}
#[]
##############################################################################
def leaveKout_CV(X, y, n_scz_te, rep, perms, classifiers, parameters, count,
freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig = None):
"""
Calculates the leave K out cross validation.
Parameters
----------
X : array of arrays
Matrix containing a vector with all the features for each subject.
Dimension (number of subjects)x(number of features).
y : array
A vector containing the class-information.
Remember: 1 = healty controls, 0 = schizophrenic
n_scz_te : int
Desired number of schizophrenic patients in each test set.
rep : integer
The number of repition that has been used so far.
perms : range(*)
Range with desired number (*) of permutations.
*=1 indicates no permutations.
classifiers : dictionary
Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}
parameters : dictionary
Dictionary containing parameters to the classifiers as in "classifiers"
count : integer
Used to know how many loops that have been made due to the pre
allocated space for AUC.
freq_bands : list of strings
Either ['all'] or ['detla','theta','alpha','beta1','beta2','gamma'].
x_size : integer
The size each X has which changes depending on freq_bands.
auc : dictionary
Contains the auc-scores for each loop, either divided into bands or
with the key "all".
nz_coef_idx : dictionary
Contains the non-zero coefficient indices for each loop, either
divided into bands or with the key "all".
nz_coef_val : dictionary
Contains the non-zero coefficient values (the weights) for each
loop, either divided into bands or with the key "all".
n_BAitaSig : list of integers, optional
The number of connections in each band when BAitaSig is used.
The default is None.
Returns
-------
auc : dictionary
Contains the updated auc-scores for each loop, either divided into
bands or with the key "all".
nz_coef_idx : dictionary
Contains the updated non-zero coefficient indices for each loop,
either divided into bands or with the key "all".
nz_coef_val : dictionary
Contains the updated non-zero coefficient values (the weights) for
each loop, either divided into bands or with the key "all".
count : integer
Used to know how many loops that have been made due to the pre
allocated space for AUC.
"""
skf = StratifiedKFold(n_splits=int(sum(y==0)//n_scz_te),shuffle=True, random_state = rep)
count_plt = 0
fig, ax = plt.subplots(2,3 , figsize=(10,6.5))
for tr_idx, te_idx in skf.split(X,y):
# Compute test and train targets
y_tr = np.ravel(y[tr_idx])
y_te = np.ravel(y[te_idx])
# Make gridsearch function
clf_name = list(classifiers.keys())[0]
count += 1
sns.set(font_scale=1.5)
for i in range(1): #range(len(freq_bands)):
if count_plt == 6:
plt.suptitle('Example of line search for the regularization parameter', fontsize= 18)
plt.tight_layout()
plt.subplots_adjust(top = 0.84, bottom = 0.15, hspace = 0.5, wspace = 0.45)
fig.legend(['Train', 'Validation'], bbox_to_anchor = (0.5, 0.89),
borderaxespad = 0., loc = 'upper center', ncol = 2)
plt.show()
fig.savefig('/share/FannyMaster/PythonNew/Figures/LineSearchEx.jpg', bbox_inches = 'tight')
sns.reset_orig()
raise NameError('This is just a dumb way of stopping the code after 6 iterations')
i = 1
clf = GridSearchCV(classifiers[clf_name], {'alpha' :parameters[freq_bands[i]]},
cv = StratifiedKFold(n_splits = int(sum(y_tr==0)//n_scz_te)),
scoring = 'roc_auc', n_jobs = -1, return_train_score=True)
# Compute test and train sets
if n_BAitaSig == None:
X_tr = X[tr_idx, x_size*i:x_size*(i+1)]
X_te = X[te_idx, x_size*i:x_size*(i+1)]
else:
if x_size == sum(n_BAitaSig):
X_tr = X[tr_idx, :]
X_te = X[te_idx, :]
else:
n_temp = [0]
n_temp.extend(np.cumsum(n_BAitaSig))
X_tr = X[tr_idx, n_temp[i]:n_temp[i+1]]
X_te = X[te_idx, n_temp[i]:n_temp[i+1]]
# Standardize
scaler_out = preprocessing.StandardScaler().fit(X_tr)
X_tr = scaler_out.transform(X_tr)
X_te = scaler_out.transform(X_te)
# Fit data and save auc scores
fit = clf.fit(X_tr, y_tr)
auc[freq_bands[i]][count] = fit.score(X_te, y_te)
# Make parameter plot
#plot_grid_search(clf.cv_results_, 'score', parameters[freq_bands[i]], 'log($\lambda$) ' + freq_bands[i])
cv_results = clf.cv_results_
metric = 'score'
grid_param_1 = parameters[freq_bands[i]]
scores_mean = cv_results[('mean_test_' + metric)]
# scores_sd = cv_results[('std_test_' + metric)]
scores_mean_tr = cv_results[('mean_train_' + metric)]
# Set plot style
#plt.style.use('seaborn')
# Plot Grid search scores
sns.set(font_scale=1.5)
df1 = pd.DataFrame({'log($\lambda$)':[math.log(i) for i in grid_param_1], 'CV Average AUC' : scores_mean_tr, 'type' : ['train']*len(scores_mean_tr)})
df2 = pd.DataFrame({'log($\lambda$)':[math.log(i) for i in grid_param_1], 'CV Average AUC' : scores_mean, 'type' : ['test']*len(scores_mean_tr)})
sns.lineplot(x = 'log($\lambda$)', y = 'CV Average AUC', style='type', legend = False, markers = "o", data = df1, ax = ax[count_plt//3][count_plt%3])
sns.lineplot(x = 'log($\lambda$)', y = 'CV Average AUC', style='type', legend = False, markers = "o", data = df2, ax = ax[count_plt//3][count_plt%3])
ax[count_plt//3][count_plt%3].set_xlabel('log($\lambda$)', fontsize=14)
ax[count_plt//3][count_plt%3].set_ylabel('CV Average AUC' , fontsize=14)
#pprint(clf.cv_results_)
#pdb.set_trace() # Type "exit" to get out, type "c" to continue
count_plt += 1
if len(perms) == 1:
coef_idx = np.nonzero(fit.best_estimator_.coef_)
nz_coef_idx[freq_bands[i]].append(coef_idx)
nz_coef_val[freq_bands[i]].append(fit.best_estimator_.coef_[coef_idx])
return auc, nz_coef_idx, nz_coef_val, count
##############################################################################
def CV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,
classifiers, parameters, n_BAitaSig = None):
"""
Parameters
----------
X : np.array
Matrix with dimension (subjects)x(feature vector).
y : np.array
Vector with classifications (0: healthy, 1: schizo).
n_scz_te : int
Desired number of schizophrenic patients in each test set.
reps : range(*)
Range with desired number (*) of extra times the code should run.
separate_bands : boolean
True = seperate data into frequency bands. False = don't separate.
perms : range(*)
Range with desired number (*) of permutations.
*=1 indicates no permutations.
dir_save : string
Directory path to where the results should be saved.
classifiers : dictionary
Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}
parameters : dictionary
Dictionary containing parameters to the classifiers as in "classifiers"
Notes
-------
Saves three different values in the dir_save path:
auc : dictionary
Contains the auc-scores for each loop, either divided into bands or
with the key "all".
nz_coef_idx : dictionary
Contains the non-zero coefficient indices for each loop, either
divided into bands or with the key "all".
nz_coef_val : dictionary
Contains the non-zero coefficient values (the weights) for each
loop, either divided into bands or with the key "all".
"""
# Check if data should be seperated into bands or not:
if separate_bands:
freq_bands = ['delta', 'theta', 'alpha', 'beta1', 'beta2', 'gamma']
else:
freq_bands = ['all']
if len(perms) > 1:
y_org = y
tqdm_perms = tqdm(perms)
tqdm_reps = reps
else:
tqdm_perms = perms
tqdm_reps = tqdm(reps)
# Initialize space for values
auc = {}
nz_coef_idx= {}
nz_coef_val= {}
nb_loops = len(reps)*(sum(y==0)//n_scz_te)*len(perms)
# Define the size of X
x_size = int(X.shape[1]/len(freq_bands))
for i in freq_bands:
auc[i] = np.zeros(nb_loops) # e.g. auc = {'delta':[] , 'theta': [], 'alpha': [], ....}
nz_coef_idx[i] = []
nz_coef_val[i] = []
count = -1
for perm in tqdm_perms:
if len(perms) > 1:
y = shuffle(y_org, random_state=perm).reset_index(drop=True)
for rep in tqdm_reps:
auc, nz_coef_idx, nz_coef_val, count = leaveKout_CV(X, y, n_scz_te, rep,
perms, classifiers, parameters, count,
freq_bands, x_size, auc, nz_coef_idx,
nz_coef_val, n_BAitaSig)
#%%
con_type = 'lps'
separate_bands = True # False = All bands together
partialData = True
atlas = 'BAita' # DKEgill, BAita, BAitaSig
sns.set(font_scale=1.5)
freq_band_type = 'DiLorenzo'
# Directories
dir_folders = r'/share/FannyMaster/PythonNew/' + atlas + '_timeseries_'
newest_date = getNewestFolderDate(dir_folders)
dir_features = dir_folders + newest_date + '/' + freq_band_type + '/Features'
dir_y_ID = r'/share/FannyMaster/PythonNew/Age_Gender.csv'
n_scz_te = 2
reps = range(1)
classifiers = {'lasso' : Lasso(max_iter = 10000)}
dir_save = dir_folders + newest_date + '/' + freq_band_type + '/classificationResults/' + con_type.capitalize()
X,y = get_Xy(dir_features, dir_y_ID, con_type, partialData)
if atlas == 'DKEgill':
X = getEgillX(X)
n_BAitaSig = None
parameters = getEgillParameters(con_type, separate_bands)
elif atlas == 'BAitaSig':
X, n_BAitaSig = significant_connected_areasBAitaSigX(X)
parameters = getBAitaSigParameters(con_type, separate_bands)
elif atlas == 'BAita':
parameters = getBAitaParameters(con_type, separate_bands)
n_BAitaSig = None
perms = range(1) # 1 = No permutations
CV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,
classifiers, parameters)
|
flexible
|
{
"blob_id": "69511933697905fb4f365c895264596f19dc1d8d",
"index": 5021,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef leaveKout_CV(X, y, n_scz_te, rep, perms, classifiers, parameters, count,\n freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig=None):\n \"\"\"\n Calculates the leave K out cross validation. \n\n Parameters\n ----------\n X : array of arrays\n Matrix containing a vector with all the features for each subject.\n Dimension (number of subjects)x(number of features).\n y : array\n A vector containing the class-information. \n Remember: 1 = healty controls, 0 = schizophrenic \n n_scz_te : int\n Desired number of schizophrenic patients in each test set.\n rep : integer\n The number of repition that has been used so far.\n perms : range(*)\n Range with desired number (*) of permutations. \n *=1 indicates no permutations.\n classifiers : dictionary\n Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}\n parameters : dictionary\n Dictionary containing parameters to the classifiers as in \"classifiers\"\n count : integer\n Used to know how many loops that have been made due to the pre \n allocated space for AUC.\n freq_bands : list of strings\n Either ['all'] or ['detla','theta','alpha','beta1','beta2','gamma'].\n x_size : integer\n The size each X has which changes depending on freq_bands.\n auc : dictionary\n Contains the auc-scores for each loop, either divided into bands or \n with the key \"all\".\n nz_coef_idx : dictionary\n Contains the non-zero coefficient indices for each loop, either \n divided into bands or with the key \"all\".\n nz_coef_val : dictionary\n Contains the non-zero coefficient values (the weights) for each \n loop, either divided into bands or with the key \"all\".\n n_BAitaSig : list of integers, optional\n The number of connections in each band when BAitaSig is used. \n The default is None.\n Returns\n -------\n auc : dictionary\n Contains the updated auc-scores for each loop, either divided into \n bands or with the key \"all\".\n nz_coef_idx : dictionary\n Contains the updated non-zero coefficient indices for each loop, \n either divided into bands or with the key \"all\".\n nz_coef_val : dictionary\n Contains the updated non-zero coefficient values (the weights) for \n each loop, either divided into bands or with the key \"all\".\n count : integer\n Used to know how many loops that have been made due to the pre \n allocated space for AUC.\n\n \"\"\"\n skf = StratifiedKFold(n_splits=int(sum(y == 0) // n_scz_te), shuffle=\n True, random_state=rep)\n count_plt = 0\n fig, ax = plt.subplots(2, 3, figsize=(10, 6.5))\n for tr_idx, te_idx in skf.split(X, y):\n y_tr = np.ravel(y[tr_idx])\n y_te = np.ravel(y[te_idx])\n clf_name = list(classifiers.keys())[0]\n count += 1\n sns.set(font_scale=1.5)\n for i in range(1):\n if count_plt == 6:\n plt.suptitle(\n 'Example of line search for the regularization parameter',\n fontsize=18)\n plt.tight_layout()\n plt.subplots_adjust(top=0.84, bottom=0.15, hspace=0.5,\n wspace=0.45)\n fig.legend(['Train', 'Validation'], bbox_to_anchor=(0.5, \n 0.89), borderaxespad=0.0, loc='upper center', ncol=2)\n plt.show()\n fig.savefig(\n '/share/FannyMaster/PythonNew/Figures/LineSearchEx.jpg',\n bbox_inches='tight')\n sns.reset_orig()\n raise NameError(\n 'This is just a dumb way of stopping the code after 6 iterations'\n )\n i = 1\n clf = GridSearchCV(classifiers[clf_name], {'alpha': parameters[\n freq_bands[i]]}, cv=StratifiedKFold(n_splits=int(sum(y_tr ==\n 0) // n_scz_te)), scoring='roc_auc', n_jobs=-1,\n return_train_score=True)\n if n_BAitaSig == None:\n X_tr = X[tr_idx, x_size * i:x_size * (i + 1)]\n X_te = X[te_idx, x_size * i:x_size * (i + 1)]\n elif x_size == sum(n_BAitaSig):\n X_tr = X[tr_idx, :]\n X_te = X[te_idx, :]\n else:\n n_temp = [0]\n n_temp.extend(np.cumsum(n_BAitaSig))\n X_tr = X[tr_idx, n_temp[i]:n_temp[i + 1]]\n X_te = X[te_idx, n_temp[i]:n_temp[i + 1]]\n scaler_out = preprocessing.StandardScaler().fit(X_tr)\n X_tr = scaler_out.transform(X_tr)\n X_te = scaler_out.transform(X_te)\n fit = clf.fit(X_tr, y_tr)\n auc[freq_bands[i]][count] = fit.score(X_te, y_te)\n cv_results = clf.cv_results_\n metric = 'score'\n grid_param_1 = parameters[freq_bands[i]]\n scores_mean = cv_results['mean_test_' + metric]\n scores_mean_tr = cv_results['mean_train_' + metric]\n sns.set(font_scale=1.5)\n df1 = pd.DataFrame({'log($\\\\lambda$)': [math.log(i) for i in\n grid_param_1], 'CV Average AUC': scores_mean_tr, 'type': [\n 'train'] * len(scores_mean_tr)})\n df2 = pd.DataFrame({'log($\\\\lambda$)': [math.log(i) for i in\n grid_param_1], 'CV Average AUC': scores_mean, 'type': [\n 'test'] * len(scores_mean_tr)})\n sns.lineplot(x='log($\\\\lambda$)', y='CV Average AUC', style=\n 'type', legend=False, markers='o', data=df1, ax=ax[\n count_plt // 3][count_plt % 3])\n sns.lineplot(x='log($\\\\lambda$)', y='CV Average AUC', style=\n 'type', legend=False, markers='o', data=df2, ax=ax[\n count_plt // 3][count_plt % 3])\n ax[count_plt // 3][count_plt % 3].set_xlabel('log($\\\\lambda$)',\n fontsize=14)\n ax[count_plt // 3][count_plt % 3].set_ylabel('CV Average AUC',\n fontsize=14)\n count_plt += 1\n if len(perms) == 1:\n coef_idx = np.nonzero(fit.best_estimator_.coef_)\n nz_coef_idx[freq_bands[i]].append(coef_idx)\n nz_coef_val[freq_bands[i]].append(fit.best_estimator_.coef_\n [coef_idx])\n return auc, nz_coef_idx, nz_coef_val, count\n\n\ndef CV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,\n classifiers, parameters, n_BAitaSig=None):\n \"\"\"\n Parameters\n ----------\n X : np.array \n Matrix with dimension (subjects)x(feature vector).\n y : np.array\n Vector with classifications (0: healthy, 1: schizo).\n n_scz_te : int\n Desired number of schizophrenic patients in each test set.\n reps : range(*)\n Range with desired number (*) of extra times the code should run.\n separate_bands : boolean\n True = seperate data into frequency bands. False = don't separate.\n perms : range(*)\n Range with desired number (*) of permutations. \n *=1 indicates no permutations.\n dir_save : string\n Directory path to where the results should be saved.\n classifiers : dictionary\n Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}\n parameters : dictionary\n Dictionary containing parameters to the classifiers as in \"classifiers\"\n\n Notes\n -------\n Saves three different values in the dir_save path: \n auc : dictionary\n Contains the auc-scores for each loop, either divided into bands or \n with the key \"all\".\n nz_coef_idx : dictionary\n Contains the non-zero coefficient indices for each loop, either \n divided into bands or with the key \"all\".\n nz_coef_val : dictionary\n Contains the non-zero coefficient values (the weights) for each \n loop, either divided into bands or with the key \"all\".\n \n \"\"\"\n if separate_bands:\n freq_bands = ['delta', 'theta', 'alpha', 'beta1', 'beta2', 'gamma']\n else:\n freq_bands = ['all']\n if len(perms) > 1:\n y_org = y\n tqdm_perms = tqdm(perms)\n tqdm_reps = reps\n else:\n tqdm_perms = perms\n tqdm_reps = tqdm(reps)\n auc = {}\n nz_coef_idx = {}\n nz_coef_val = {}\n nb_loops = len(reps) * (sum(y == 0) // n_scz_te) * len(perms)\n x_size = int(X.shape[1] / len(freq_bands))\n for i in freq_bands:\n auc[i] = np.zeros(nb_loops)\n nz_coef_idx[i] = []\n nz_coef_val[i] = []\n count = -1\n for perm in tqdm_perms:\n if len(perms) > 1:\n y = shuffle(y_org, random_state=perm).reset_index(drop=True)\n for rep in tqdm_reps:\n auc, nz_coef_idx, nz_coef_val, count = leaveKout_CV(X, y,\n n_scz_te, rep, perms, classifiers, parameters, count,\n freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig)\n\n\n<mask token>\nsns.set(font_scale=1.5)\n<mask token>\nif atlas == 'DKEgill':\n X = getEgillX(X)\n n_BAitaSig = None\n parameters = getEgillParameters(con_type, separate_bands)\nelif atlas == 'BAitaSig':\n X, n_BAitaSig = significant_connected_areasBAitaSigX(X)\n parameters = getBAitaSigParameters(con_type, separate_bands)\nelif atlas == 'BAita':\n parameters = getBAitaParameters(con_type, separate_bands)\n n_BAitaSig = None\n<mask token>\nCV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,\n classifiers, parameters)\n",
"step-3": "<mask token>\n\n\ndef leaveKout_CV(X, y, n_scz_te, rep, perms, classifiers, parameters, count,\n freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig=None):\n \"\"\"\n Calculates the leave K out cross validation. \n\n Parameters\n ----------\n X : array of arrays\n Matrix containing a vector with all the features for each subject.\n Dimension (number of subjects)x(number of features).\n y : array\n A vector containing the class-information. \n Remember: 1 = healty controls, 0 = schizophrenic \n n_scz_te : int\n Desired number of schizophrenic patients in each test set.\n rep : integer\n The number of repition that has been used so far.\n perms : range(*)\n Range with desired number (*) of permutations. \n *=1 indicates no permutations.\n classifiers : dictionary\n Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}\n parameters : dictionary\n Dictionary containing parameters to the classifiers as in \"classifiers\"\n count : integer\n Used to know how many loops that have been made due to the pre \n allocated space for AUC.\n freq_bands : list of strings\n Either ['all'] or ['detla','theta','alpha','beta1','beta2','gamma'].\n x_size : integer\n The size each X has which changes depending on freq_bands.\n auc : dictionary\n Contains the auc-scores for each loop, either divided into bands or \n with the key \"all\".\n nz_coef_idx : dictionary\n Contains the non-zero coefficient indices for each loop, either \n divided into bands or with the key \"all\".\n nz_coef_val : dictionary\n Contains the non-zero coefficient values (the weights) for each \n loop, either divided into bands or with the key \"all\".\n n_BAitaSig : list of integers, optional\n The number of connections in each band when BAitaSig is used. \n The default is None.\n Returns\n -------\n auc : dictionary\n Contains the updated auc-scores for each loop, either divided into \n bands or with the key \"all\".\n nz_coef_idx : dictionary\n Contains the updated non-zero coefficient indices for each loop, \n either divided into bands or with the key \"all\".\n nz_coef_val : dictionary\n Contains the updated non-zero coefficient values (the weights) for \n each loop, either divided into bands or with the key \"all\".\n count : integer\n Used to know how many loops that have been made due to the pre \n allocated space for AUC.\n\n \"\"\"\n skf = StratifiedKFold(n_splits=int(sum(y == 0) // n_scz_te), shuffle=\n True, random_state=rep)\n count_plt = 0\n fig, ax = plt.subplots(2, 3, figsize=(10, 6.5))\n for tr_idx, te_idx in skf.split(X, y):\n y_tr = np.ravel(y[tr_idx])\n y_te = np.ravel(y[te_idx])\n clf_name = list(classifiers.keys())[0]\n count += 1\n sns.set(font_scale=1.5)\n for i in range(1):\n if count_plt == 6:\n plt.suptitle(\n 'Example of line search for the regularization parameter',\n fontsize=18)\n plt.tight_layout()\n plt.subplots_adjust(top=0.84, bottom=0.15, hspace=0.5,\n wspace=0.45)\n fig.legend(['Train', 'Validation'], bbox_to_anchor=(0.5, \n 0.89), borderaxespad=0.0, loc='upper center', ncol=2)\n plt.show()\n fig.savefig(\n '/share/FannyMaster/PythonNew/Figures/LineSearchEx.jpg',\n bbox_inches='tight')\n sns.reset_orig()\n raise NameError(\n 'This is just a dumb way of stopping the code after 6 iterations'\n )\n i = 1\n clf = GridSearchCV(classifiers[clf_name], {'alpha': parameters[\n freq_bands[i]]}, cv=StratifiedKFold(n_splits=int(sum(y_tr ==\n 0) // n_scz_te)), scoring='roc_auc', n_jobs=-1,\n return_train_score=True)\n if n_BAitaSig == None:\n X_tr = X[tr_idx, x_size * i:x_size * (i + 1)]\n X_te = X[te_idx, x_size * i:x_size * (i + 1)]\n elif x_size == sum(n_BAitaSig):\n X_tr = X[tr_idx, :]\n X_te = X[te_idx, :]\n else:\n n_temp = [0]\n n_temp.extend(np.cumsum(n_BAitaSig))\n X_tr = X[tr_idx, n_temp[i]:n_temp[i + 1]]\n X_te = X[te_idx, n_temp[i]:n_temp[i + 1]]\n scaler_out = preprocessing.StandardScaler().fit(X_tr)\n X_tr = scaler_out.transform(X_tr)\n X_te = scaler_out.transform(X_te)\n fit = clf.fit(X_tr, y_tr)\n auc[freq_bands[i]][count] = fit.score(X_te, y_te)\n cv_results = clf.cv_results_\n metric = 'score'\n grid_param_1 = parameters[freq_bands[i]]\n scores_mean = cv_results['mean_test_' + metric]\n scores_mean_tr = cv_results['mean_train_' + metric]\n sns.set(font_scale=1.5)\n df1 = pd.DataFrame({'log($\\\\lambda$)': [math.log(i) for i in\n grid_param_1], 'CV Average AUC': scores_mean_tr, 'type': [\n 'train'] * len(scores_mean_tr)})\n df2 = pd.DataFrame({'log($\\\\lambda$)': [math.log(i) for i in\n grid_param_1], 'CV Average AUC': scores_mean, 'type': [\n 'test'] * len(scores_mean_tr)})\n sns.lineplot(x='log($\\\\lambda$)', y='CV Average AUC', style=\n 'type', legend=False, markers='o', data=df1, ax=ax[\n count_plt // 3][count_plt % 3])\n sns.lineplot(x='log($\\\\lambda$)', y='CV Average AUC', style=\n 'type', legend=False, markers='o', data=df2, ax=ax[\n count_plt // 3][count_plt % 3])\n ax[count_plt // 3][count_plt % 3].set_xlabel('log($\\\\lambda$)',\n fontsize=14)\n ax[count_plt // 3][count_plt % 3].set_ylabel('CV Average AUC',\n fontsize=14)\n count_plt += 1\n if len(perms) == 1:\n coef_idx = np.nonzero(fit.best_estimator_.coef_)\n nz_coef_idx[freq_bands[i]].append(coef_idx)\n nz_coef_val[freq_bands[i]].append(fit.best_estimator_.coef_\n [coef_idx])\n return auc, nz_coef_idx, nz_coef_val, count\n\n\ndef CV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,\n classifiers, parameters, n_BAitaSig=None):\n \"\"\"\n Parameters\n ----------\n X : np.array \n Matrix with dimension (subjects)x(feature vector).\n y : np.array\n Vector with classifications (0: healthy, 1: schizo).\n n_scz_te : int\n Desired number of schizophrenic patients in each test set.\n reps : range(*)\n Range with desired number (*) of extra times the code should run.\n separate_bands : boolean\n True = seperate data into frequency bands. False = don't separate.\n perms : range(*)\n Range with desired number (*) of permutations. \n *=1 indicates no permutations.\n dir_save : string\n Directory path to where the results should be saved.\n classifiers : dictionary\n Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}\n parameters : dictionary\n Dictionary containing parameters to the classifiers as in \"classifiers\"\n\n Notes\n -------\n Saves three different values in the dir_save path: \n auc : dictionary\n Contains the auc-scores for each loop, either divided into bands or \n with the key \"all\".\n nz_coef_idx : dictionary\n Contains the non-zero coefficient indices for each loop, either \n divided into bands or with the key \"all\".\n nz_coef_val : dictionary\n Contains the non-zero coefficient values (the weights) for each \n loop, either divided into bands or with the key \"all\".\n \n \"\"\"\n if separate_bands:\n freq_bands = ['delta', 'theta', 'alpha', 'beta1', 'beta2', 'gamma']\n else:\n freq_bands = ['all']\n if len(perms) > 1:\n y_org = y\n tqdm_perms = tqdm(perms)\n tqdm_reps = reps\n else:\n tqdm_perms = perms\n tqdm_reps = tqdm(reps)\n auc = {}\n nz_coef_idx = {}\n nz_coef_val = {}\n nb_loops = len(reps) * (sum(y == 0) // n_scz_te) * len(perms)\n x_size = int(X.shape[1] / len(freq_bands))\n for i in freq_bands:\n auc[i] = np.zeros(nb_loops)\n nz_coef_idx[i] = []\n nz_coef_val[i] = []\n count = -1\n for perm in tqdm_perms:\n if len(perms) > 1:\n y = shuffle(y_org, random_state=perm).reset_index(drop=True)\n for rep in tqdm_reps:\n auc, nz_coef_idx, nz_coef_val, count = leaveKout_CV(X, y,\n n_scz_te, rep, perms, classifiers, parameters, count,\n freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig)\n\n\ncon_type = 'lps'\nseparate_bands = True\npartialData = True\natlas = 'BAita'\nsns.set(font_scale=1.5)\nfreq_band_type = 'DiLorenzo'\ndir_folders = '/share/FannyMaster/PythonNew/' + atlas + '_timeseries_'\nnewest_date = getNewestFolderDate(dir_folders)\ndir_features = dir_folders + newest_date + '/' + freq_band_type + '/Features'\ndir_y_ID = '/share/FannyMaster/PythonNew/Age_Gender.csv'\nn_scz_te = 2\nreps = range(1)\nclassifiers = {'lasso': Lasso(max_iter=10000)}\ndir_save = (dir_folders + newest_date + '/' + freq_band_type +\n '/classificationResults/' + con_type.capitalize())\nX, y = get_Xy(dir_features, dir_y_ID, con_type, partialData)\nif atlas == 'DKEgill':\n X = getEgillX(X)\n n_BAitaSig = None\n parameters = getEgillParameters(con_type, separate_bands)\nelif atlas == 'BAitaSig':\n X, n_BAitaSig = significant_connected_areasBAitaSigX(X)\n parameters = getBAitaSigParameters(con_type, separate_bands)\nelif atlas == 'BAita':\n parameters = getBAitaParameters(con_type, separate_bands)\n n_BAitaSig = None\nperms = range(1)\nCV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,\n classifiers, parameters)\n",
"step-4": "<mask token>\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport pandas as pd\nfrom tqdm import tqdm\nimport math\nfrom sklearn.model_selection import GridSearchCV, StratifiedKFold\nfrom sklearn import preprocessing\nfrom sklearn.utils import shuffle\nfrom sklearn.linear_model import Lasso\nfrom utils_runOnce_classification import getEgillX, getEgillParameters\nfrom utils_runOnce_classification import significant_connected_areasBAitaSigX, getBAitaSigParameters, getBAitaParameters\nimport seaborn as sns\nfrom utils_joint import getNewestFolderDate, get_Xy\nimport pdb\n\n\ndef leaveKout_CV(X, y, n_scz_te, rep, perms, classifiers, parameters, count,\n freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig=None):\n \"\"\"\n Calculates the leave K out cross validation. \n\n Parameters\n ----------\n X : array of arrays\n Matrix containing a vector with all the features for each subject.\n Dimension (number of subjects)x(number of features).\n y : array\n A vector containing the class-information. \n Remember: 1 = healty controls, 0 = schizophrenic \n n_scz_te : int\n Desired number of schizophrenic patients in each test set.\n rep : integer\n The number of repition that has been used so far.\n perms : range(*)\n Range with desired number (*) of permutations. \n *=1 indicates no permutations.\n classifiers : dictionary\n Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}\n parameters : dictionary\n Dictionary containing parameters to the classifiers as in \"classifiers\"\n count : integer\n Used to know how many loops that have been made due to the pre \n allocated space for AUC.\n freq_bands : list of strings\n Either ['all'] or ['detla','theta','alpha','beta1','beta2','gamma'].\n x_size : integer\n The size each X has which changes depending on freq_bands.\n auc : dictionary\n Contains the auc-scores for each loop, either divided into bands or \n with the key \"all\".\n nz_coef_idx : dictionary\n Contains the non-zero coefficient indices for each loop, either \n divided into bands or with the key \"all\".\n nz_coef_val : dictionary\n Contains the non-zero coefficient values (the weights) for each \n loop, either divided into bands or with the key \"all\".\n n_BAitaSig : list of integers, optional\n The number of connections in each band when BAitaSig is used. \n The default is None.\n Returns\n -------\n auc : dictionary\n Contains the updated auc-scores for each loop, either divided into \n bands or with the key \"all\".\n nz_coef_idx : dictionary\n Contains the updated non-zero coefficient indices for each loop, \n either divided into bands or with the key \"all\".\n nz_coef_val : dictionary\n Contains the updated non-zero coefficient values (the weights) for \n each loop, either divided into bands or with the key \"all\".\n count : integer\n Used to know how many loops that have been made due to the pre \n allocated space for AUC.\n\n \"\"\"\n skf = StratifiedKFold(n_splits=int(sum(y == 0) // n_scz_te), shuffle=\n True, random_state=rep)\n count_plt = 0\n fig, ax = plt.subplots(2, 3, figsize=(10, 6.5))\n for tr_idx, te_idx in skf.split(X, y):\n y_tr = np.ravel(y[tr_idx])\n y_te = np.ravel(y[te_idx])\n clf_name = list(classifiers.keys())[0]\n count += 1\n sns.set(font_scale=1.5)\n for i in range(1):\n if count_plt == 6:\n plt.suptitle(\n 'Example of line search for the regularization parameter',\n fontsize=18)\n plt.tight_layout()\n plt.subplots_adjust(top=0.84, bottom=0.15, hspace=0.5,\n wspace=0.45)\n fig.legend(['Train', 'Validation'], bbox_to_anchor=(0.5, \n 0.89), borderaxespad=0.0, loc='upper center', ncol=2)\n plt.show()\n fig.savefig(\n '/share/FannyMaster/PythonNew/Figures/LineSearchEx.jpg',\n bbox_inches='tight')\n sns.reset_orig()\n raise NameError(\n 'This is just a dumb way of stopping the code after 6 iterations'\n )\n i = 1\n clf = GridSearchCV(classifiers[clf_name], {'alpha': parameters[\n freq_bands[i]]}, cv=StratifiedKFold(n_splits=int(sum(y_tr ==\n 0) // n_scz_te)), scoring='roc_auc', n_jobs=-1,\n return_train_score=True)\n if n_BAitaSig == None:\n X_tr = X[tr_idx, x_size * i:x_size * (i + 1)]\n X_te = X[te_idx, x_size * i:x_size * (i + 1)]\n elif x_size == sum(n_BAitaSig):\n X_tr = X[tr_idx, :]\n X_te = X[te_idx, :]\n else:\n n_temp = [0]\n n_temp.extend(np.cumsum(n_BAitaSig))\n X_tr = X[tr_idx, n_temp[i]:n_temp[i + 1]]\n X_te = X[te_idx, n_temp[i]:n_temp[i + 1]]\n scaler_out = preprocessing.StandardScaler().fit(X_tr)\n X_tr = scaler_out.transform(X_tr)\n X_te = scaler_out.transform(X_te)\n fit = clf.fit(X_tr, y_tr)\n auc[freq_bands[i]][count] = fit.score(X_te, y_te)\n cv_results = clf.cv_results_\n metric = 'score'\n grid_param_1 = parameters[freq_bands[i]]\n scores_mean = cv_results['mean_test_' + metric]\n scores_mean_tr = cv_results['mean_train_' + metric]\n sns.set(font_scale=1.5)\n df1 = pd.DataFrame({'log($\\\\lambda$)': [math.log(i) for i in\n grid_param_1], 'CV Average AUC': scores_mean_tr, 'type': [\n 'train'] * len(scores_mean_tr)})\n df2 = pd.DataFrame({'log($\\\\lambda$)': [math.log(i) for i in\n grid_param_1], 'CV Average AUC': scores_mean, 'type': [\n 'test'] * len(scores_mean_tr)})\n sns.lineplot(x='log($\\\\lambda$)', y='CV Average AUC', style=\n 'type', legend=False, markers='o', data=df1, ax=ax[\n count_plt // 3][count_plt % 3])\n sns.lineplot(x='log($\\\\lambda$)', y='CV Average AUC', style=\n 'type', legend=False, markers='o', data=df2, ax=ax[\n count_plt // 3][count_plt % 3])\n ax[count_plt // 3][count_plt % 3].set_xlabel('log($\\\\lambda$)',\n fontsize=14)\n ax[count_plt // 3][count_plt % 3].set_ylabel('CV Average AUC',\n fontsize=14)\n count_plt += 1\n if len(perms) == 1:\n coef_idx = np.nonzero(fit.best_estimator_.coef_)\n nz_coef_idx[freq_bands[i]].append(coef_idx)\n nz_coef_val[freq_bands[i]].append(fit.best_estimator_.coef_\n [coef_idx])\n return auc, nz_coef_idx, nz_coef_val, count\n\n\ndef CV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,\n classifiers, parameters, n_BAitaSig=None):\n \"\"\"\n Parameters\n ----------\n X : np.array \n Matrix with dimension (subjects)x(feature vector).\n y : np.array\n Vector with classifications (0: healthy, 1: schizo).\n n_scz_te : int\n Desired number of schizophrenic patients in each test set.\n reps : range(*)\n Range with desired number (*) of extra times the code should run.\n separate_bands : boolean\n True = seperate data into frequency bands. False = don't separate.\n perms : range(*)\n Range with desired number (*) of permutations. \n *=1 indicates no permutations.\n dir_save : string\n Directory path to where the results should be saved.\n classifiers : dictionary\n Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}\n parameters : dictionary\n Dictionary containing parameters to the classifiers as in \"classifiers\"\n\n Notes\n -------\n Saves three different values in the dir_save path: \n auc : dictionary\n Contains the auc-scores for each loop, either divided into bands or \n with the key \"all\".\n nz_coef_idx : dictionary\n Contains the non-zero coefficient indices for each loop, either \n divided into bands or with the key \"all\".\n nz_coef_val : dictionary\n Contains the non-zero coefficient values (the weights) for each \n loop, either divided into bands or with the key \"all\".\n \n \"\"\"\n if separate_bands:\n freq_bands = ['delta', 'theta', 'alpha', 'beta1', 'beta2', 'gamma']\n else:\n freq_bands = ['all']\n if len(perms) > 1:\n y_org = y\n tqdm_perms = tqdm(perms)\n tqdm_reps = reps\n else:\n tqdm_perms = perms\n tqdm_reps = tqdm(reps)\n auc = {}\n nz_coef_idx = {}\n nz_coef_val = {}\n nb_loops = len(reps) * (sum(y == 0) // n_scz_te) * len(perms)\n x_size = int(X.shape[1] / len(freq_bands))\n for i in freq_bands:\n auc[i] = np.zeros(nb_loops)\n nz_coef_idx[i] = []\n nz_coef_val[i] = []\n count = -1\n for perm in tqdm_perms:\n if len(perms) > 1:\n y = shuffle(y_org, random_state=perm).reset_index(drop=True)\n for rep in tqdm_reps:\n auc, nz_coef_idx, nz_coef_val, count = leaveKout_CV(X, y,\n n_scz_te, rep, perms, classifiers, parameters, count,\n freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig)\n\n\ncon_type = 'lps'\nseparate_bands = True\npartialData = True\natlas = 'BAita'\nsns.set(font_scale=1.5)\nfreq_band_type = 'DiLorenzo'\ndir_folders = '/share/FannyMaster/PythonNew/' + atlas + '_timeseries_'\nnewest_date = getNewestFolderDate(dir_folders)\ndir_features = dir_folders + newest_date + '/' + freq_band_type + '/Features'\ndir_y_ID = '/share/FannyMaster/PythonNew/Age_Gender.csv'\nn_scz_te = 2\nreps = range(1)\nclassifiers = {'lasso': Lasso(max_iter=10000)}\ndir_save = (dir_folders + newest_date + '/' + freq_band_type +\n '/classificationResults/' + con_type.capitalize())\nX, y = get_Xy(dir_features, dir_y_ID, con_type, partialData)\nif atlas == 'DKEgill':\n X = getEgillX(X)\n n_BAitaSig = None\n parameters = getEgillParameters(con_type, separate_bands)\nelif atlas == 'BAitaSig':\n X, n_BAitaSig = significant_connected_areasBAitaSigX(X)\n parameters = getBAitaSigParameters(con_type, separate_bands)\nelif atlas == 'BAita':\n parameters = getBAitaParameters(con_type, separate_bands)\n n_BAitaSig = None\nperms = range(1)\nCV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save,\n classifiers, parameters)\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue May 26 18:39:26 2020\n\n@author: Fanny Fredriksson and Karen Marie Sandø Ambrosen\n\"\"\"\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport pandas as pd\nfrom tqdm import tqdm #count ffor loops\nimport math\nfrom sklearn.model_selection import GridSearchCV, StratifiedKFold\nfrom sklearn import preprocessing\nfrom sklearn.utils import shuffle\nfrom sklearn.linear_model import Lasso\nfrom utils_runOnce_classification import getEgillX, getEgillParameters\nfrom utils_runOnce_classification import significant_connected_areasBAitaSigX, getBAitaSigParameters, getBAitaParameters\nimport seaborn as sns\nfrom utils_joint import getNewestFolderDate, get_Xy\n\nimport pdb\n#{}\n#[]\n\n \n##############################################################################\ndef leaveKout_CV(X, y, n_scz_te, rep, perms, classifiers, parameters, count,\n freq_bands, x_size, auc, nz_coef_idx, nz_coef_val, n_BAitaSig = None):\n \"\"\"\n Calculates the leave K out cross validation. \n\n Parameters\n ----------\n X : array of arrays\n Matrix containing a vector with all the features for each subject.\n Dimension (number of subjects)x(number of features).\n y : array\n A vector containing the class-information. \n Remember: 1 = healty controls, 0 = schizophrenic \n n_scz_te : int\n Desired number of schizophrenic patients in each test set.\n rep : integer\n The number of repition that has been used so far.\n perms : range(*)\n Range with desired number (*) of permutations. \n *=1 indicates no permutations.\n classifiers : dictionary\n Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}\n parameters : dictionary\n Dictionary containing parameters to the classifiers as in \"classifiers\"\n count : integer\n Used to know how many loops that have been made due to the pre \n allocated space for AUC.\n freq_bands : list of strings\n Either ['all'] or ['detla','theta','alpha','beta1','beta2','gamma'].\n x_size : integer\n The size each X has which changes depending on freq_bands.\n auc : dictionary\n Contains the auc-scores for each loop, either divided into bands or \n with the key \"all\".\n nz_coef_idx : dictionary\n Contains the non-zero coefficient indices for each loop, either \n divided into bands or with the key \"all\".\n nz_coef_val : dictionary\n Contains the non-zero coefficient values (the weights) for each \n loop, either divided into bands or with the key \"all\".\n n_BAitaSig : list of integers, optional\n The number of connections in each band when BAitaSig is used. \n The default is None.\n Returns\n -------\n auc : dictionary\n Contains the updated auc-scores for each loop, either divided into \n bands or with the key \"all\".\n nz_coef_idx : dictionary\n Contains the updated non-zero coefficient indices for each loop, \n either divided into bands or with the key \"all\".\n nz_coef_val : dictionary\n Contains the updated non-zero coefficient values (the weights) for \n each loop, either divided into bands or with the key \"all\".\n count : integer\n Used to know how many loops that have been made due to the pre \n allocated space for AUC.\n\n \"\"\"\n \n skf = StratifiedKFold(n_splits=int(sum(y==0)//n_scz_te),shuffle=True, random_state = rep)\n count_plt = 0\n fig, ax = plt.subplots(2,3 , figsize=(10,6.5))\n for tr_idx, te_idx in skf.split(X,y):\n # Compute test and train targets\n y_tr = np.ravel(y[tr_idx])\n y_te = np.ravel(y[te_idx])\n \n # Make gridsearch function\n clf_name = list(classifiers.keys())[0]\n count += 1\n sns.set(font_scale=1.5)\n for i in range(1): #range(len(freq_bands)):\n if count_plt == 6:\n plt.suptitle('Example of line search for the regularization parameter', fontsize= 18)\n plt.tight_layout()\n plt.subplots_adjust(top = 0.84, bottom = 0.15, hspace = 0.5, wspace = 0.45)\n fig.legend(['Train', 'Validation'], bbox_to_anchor = (0.5, 0.89), \n borderaxespad = 0., loc = 'upper center', ncol = 2)\n \n plt.show()\n fig.savefig('/share/FannyMaster/PythonNew/Figures/LineSearchEx.jpg', bbox_inches = 'tight')\n sns.reset_orig()\n raise NameError('This is just a dumb way of stopping the code after 6 iterations')\n \n i = 1\n clf = GridSearchCV(classifiers[clf_name], {'alpha' :parameters[freq_bands[i]]}, \n cv = StratifiedKFold(n_splits = int(sum(y_tr==0)//n_scz_te)), \n scoring = 'roc_auc', n_jobs = -1, return_train_score=True)\n # Compute test and train sets \n if n_BAitaSig == None:\n X_tr = X[tr_idx, x_size*i:x_size*(i+1)]\n X_te = X[te_idx, x_size*i:x_size*(i+1)]\n else:\n if x_size == sum(n_BAitaSig):\n X_tr = X[tr_idx, :]\n X_te = X[te_idx, :]\n else:\n n_temp = [0]\n n_temp.extend(np.cumsum(n_BAitaSig))\n X_tr = X[tr_idx, n_temp[i]:n_temp[i+1]]\n X_te = X[te_idx, n_temp[i]:n_temp[i+1]]\n \n \n # Standardize\n scaler_out = preprocessing.StandardScaler().fit(X_tr)\n X_tr = scaler_out.transform(X_tr)\n X_te = scaler_out.transform(X_te)\n\n # Fit data and save auc scores\n fit = clf.fit(X_tr, y_tr)\n auc[freq_bands[i]][count] = fit.score(X_te, y_te)\n \n # Make parameter plot\n #plot_grid_search(clf.cv_results_, 'score', parameters[freq_bands[i]], 'log($\\lambda$) ' + freq_bands[i])\n cv_results = clf.cv_results_\n metric = 'score'\n grid_param_1 = parameters[freq_bands[i]]\n \n scores_mean = cv_results[('mean_test_' + metric)]\n # scores_sd = cv_results[('std_test_' + metric)]\n scores_mean_tr = cv_results[('mean_train_' + metric)]\n \n # Set plot style\n #plt.style.use('seaborn')\n \n # Plot Grid search scores\n\n sns.set(font_scale=1.5)\n df1 = pd.DataFrame({'log($\\lambda$)':[math.log(i) for i in grid_param_1], 'CV Average AUC' : scores_mean_tr, 'type' : ['train']*len(scores_mean_tr)})\n df2 = pd.DataFrame({'log($\\lambda$)':[math.log(i) for i in grid_param_1], 'CV Average AUC' : scores_mean, 'type' : ['test']*len(scores_mean_tr)})\n sns.lineplot(x = 'log($\\lambda$)', y = 'CV Average AUC', style='type', legend = False, markers = \"o\", data = df1, ax = ax[count_plt//3][count_plt%3])\n sns.lineplot(x = 'log($\\lambda$)', y = 'CV Average AUC', style='type', legend = False, markers = \"o\", data = df2, ax = ax[count_plt//3][count_plt%3])\n\n ax[count_plt//3][count_plt%3].set_xlabel('log($\\lambda$)', fontsize=14)\n ax[count_plt//3][count_plt%3].set_ylabel('CV Average AUC' , fontsize=14) \n \n #pprint(clf.cv_results_)\n #pdb.set_trace() # Type \"exit\" to get out, type \"c\" to continue\n count_plt += 1\n if len(perms) == 1:\n coef_idx = np.nonzero(fit.best_estimator_.coef_)\n nz_coef_idx[freq_bands[i]].append(coef_idx)\n nz_coef_val[freq_bands[i]].append(fit.best_estimator_.coef_[coef_idx])\n\n return auc, nz_coef_idx, nz_coef_val, count\n\n##############################################################################\ndef CV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save, \n classifiers, parameters, n_BAitaSig = None):\n \"\"\"\n Parameters\n ----------\n X : np.array \n Matrix with dimension (subjects)x(feature vector).\n y : np.array\n Vector with classifications (0: healthy, 1: schizo).\n n_scz_te : int\n Desired number of schizophrenic patients in each test set.\n reps : range(*)\n Range with desired number (*) of extra times the code should run.\n separate_bands : boolean\n True = seperate data into frequency bands. False = don't separate.\n perms : range(*)\n Range with desired number (*) of permutations. \n *=1 indicates no permutations.\n dir_save : string\n Directory path to where the results should be saved.\n classifiers : dictionary\n Dictionary containing classifiers. E.g. {'lasso' : Lasso(max_iter = 10000)}\n parameters : dictionary\n Dictionary containing parameters to the classifiers as in \"classifiers\"\n\n Notes\n -------\n Saves three different values in the dir_save path: \n auc : dictionary\n Contains the auc-scores for each loop, either divided into bands or \n with the key \"all\".\n nz_coef_idx : dictionary\n Contains the non-zero coefficient indices for each loop, either \n divided into bands or with the key \"all\".\n nz_coef_val : dictionary\n Contains the non-zero coefficient values (the weights) for each \n loop, either divided into bands or with the key \"all\".\n \n \"\"\" \n \n # Check if data should be seperated into bands or not:\n if separate_bands:\n freq_bands = ['delta', 'theta', 'alpha', 'beta1', 'beta2', 'gamma']\n else:\n freq_bands = ['all']\n \n if len(perms) > 1:\n y_org = y\n tqdm_perms = tqdm(perms)\n tqdm_reps = reps\n else: \n tqdm_perms = perms\n tqdm_reps = tqdm(reps)\n \n # Initialize space for values \n auc = {}\n nz_coef_idx= {}\n nz_coef_val= {}\n nb_loops = len(reps)*(sum(y==0)//n_scz_te)*len(perms)\n # Define the size of X\n x_size = int(X.shape[1]/len(freq_bands))\n for i in freq_bands:\n auc[i] = np.zeros(nb_loops) # e.g. auc = {'delta':[] , 'theta': [], 'alpha': [], ....}\n nz_coef_idx[i] = []\n nz_coef_val[i] = []\n \n count = -1\n for perm in tqdm_perms:\n if len(perms) > 1:\n y = shuffle(y_org, random_state=perm).reset_index(drop=True)\n \n for rep in tqdm_reps:\n auc, nz_coef_idx, nz_coef_val, count = leaveKout_CV(X, y, n_scz_te, rep, \n perms, classifiers, parameters, count, \n freq_bands, x_size, auc, nz_coef_idx, \n nz_coef_val, n_BAitaSig)\n\n\n\n#%%\ncon_type = 'lps'\nseparate_bands = True # False = All bands together\npartialData = True\n\natlas = 'BAita' # DKEgill, BAita, BAitaSig\n\nsns.set(font_scale=1.5)\nfreq_band_type = 'DiLorenzo'\n# Directories\ndir_folders = r'/share/FannyMaster/PythonNew/' + atlas + '_timeseries_'\nnewest_date = getNewestFolderDate(dir_folders)\ndir_features = dir_folders + newest_date + '/' + freq_band_type + '/Features' \ndir_y_ID = r'/share/FannyMaster/PythonNew/Age_Gender.csv'\nn_scz_te = 2\nreps = range(1)\nclassifiers = {'lasso' : Lasso(max_iter = 10000)} \ndir_save = dir_folders + newest_date + '/' + freq_band_type + '/classificationResults/' + con_type.capitalize() \nX,y = get_Xy(dir_features, dir_y_ID, con_type, partialData)\n\nif atlas == 'DKEgill':\n X = getEgillX(X)\n n_BAitaSig = None\n parameters = getEgillParameters(con_type, separate_bands)\nelif atlas == 'BAitaSig':\n X, n_BAitaSig = significant_connected_areasBAitaSigX(X)\n parameters = getBAitaSigParameters(con_type, separate_bands)\nelif atlas == 'BAita':\n parameters = getBAitaParameters(con_type, separate_bands)\n n_BAitaSig = None\n\nperms = range(1) # 1 = No permutations\nCV_classifier(X, y, n_scz_te, reps, separate_bands, perms, dir_save, \n classifiers, parameters)\n\n\n\n\n",
"step-ids": [
0,
3,
4,
5,
6
]
}
|
[
0,
3,
4,
5,
6
] |
"""Test radix sort."""
import random
from collections import OrderedDict
from que_ import Queue
def test_stringify_nums():
"""."""
from radixsort import stringify_nums
nums = [1, 2, 3, 4, 5]
stringified_nums = stringify_nums(nums)
assert stringified_nums == ['1', '2', '3', '4', '5']
def test_while_condition():
"""."""
from radixsort import while_condition
stringified_nums = ['1', '2', '3', '4', '5000']
assert while_condition(stringified_nums) == 4
def test_unravel_buckets():
"""."""
from radixsort import unravel_buckets
buckets_dict = OrderedDict({
'none': Queue(),
'0': Queue(),
'1': Queue(),
'2': Queue(),
'3': Queue(),
'4': Queue(),
'5': Queue(),
'6': Queue(),
'7': Queue(),
'8': Queue(),
'9': Queue(),
})
for bucket in buckets_dict:
buckets_dict[bucket].enqueue(bucket)
assert unravel_buckets(buckets_dict) == ['none', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
def test_push_into_buckets():
"""."""
from radixsort import push_into_buckets
buckets_dict = OrderedDict({
'none': Queue(),
'0': Queue(),
'1': Queue(),
'2': Queue(),
'3': Queue(),
'4': Queue(),
'5': Queue(),
'6': Queue(),
'7': Queue(),
'8': Queue(),
'9': Queue(),
})
nums = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
full_buckets_dict = push_into_buckets(nums, 0, buckets_dict)
for key in full_buckets_dict:
if full_buckets_dict[key].peek():
assert full_buckets_dict[key].dequeue() == key
def test_radix_sort():
"""Test with simple list."""
from radixsort import radixsort
nums = [5, 3, 2, 7, 9, 4, 0, 1]
assert radixsort(nums) == [0, 1, 2, 3, 4, 5, 7, 9]
def test_radix_sort_verbose():
"""Test with many lists."""
from radixsort import radixsort
# test on 100 lists
for i in range(100):
# generate random length of list
list_length = random.randint(0, 100)
unsorted_list = []
for x in range(list_length):
# generate random numbers for random length list
unsorted_list.append(random.randint(0, 100))
# test that list is sorted
assert radixsort(unsorted_list) == sorted(unsorted_list)
|
normal
|
{
"blob_id": "fd907dbcea01679c08aeae6bcbf6e61786f40260",
"index": 2511,
"step-1": "<mask token>\n\n\ndef test_stringify_nums():\n \"\"\".\"\"\"\n from radixsort import stringify_nums\n nums = [1, 2, 3, 4, 5]\n stringified_nums = stringify_nums(nums)\n assert stringified_nums == ['1', '2', '3', '4', '5']\n\n\ndef test_while_condition():\n \"\"\".\"\"\"\n from radixsort import while_condition\n stringified_nums = ['1', '2', '3', '4', '5000']\n assert while_condition(stringified_nums) == 4\n\n\n<mask token>\n\n\ndef test_push_into_buckets():\n \"\"\".\"\"\"\n from radixsort import push_into_buckets\n buckets_dict = OrderedDict({'none': Queue(), '0': Queue(), '1': Queue(),\n '2': Queue(), '3': Queue(), '4': Queue(), '5': Queue(), '6': Queue(\n ), '7': Queue(), '8': Queue(), '9': Queue()})\n nums = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']\n full_buckets_dict = push_into_buckets(nums, 0, buckets_dict)\n for key in full_buckets_dict:\n if full_buckets_dict[key].peek():\n assert full_buckets_dict[key].dequeue() == key\n\n\n<mask token>\n\n\ndef test_radix_sort_verbose():\n \"\"\"Test with many lists.\"\"\"\n from radixsort import radixsort\n for i in range(100):\n list_length = random.randint(0, 100)\n unsorted_list = []\n for x in range(list_length):\n unsorted_list.append(random.randint(0, 100))\n assert radixsort(unsorted_list) == sorted(unsorted_list)\n",
"step-2": "<mask token>\n\n\ndef test_stringify_nums():\n \"\"\".\"\"\"\n from radixsort import stringify_nums\n nums = [1, 2, 3, 4, 5]\n stringified_nums = stringify_nums(nums)\n assert stringified_nums == ['1', '2', '3', '4', '5']\n\n\ndef test_while_condition():\n \"\"\".\"\"\"\n from radixsort import while_condition\n stringified_nums = ['1', '2', '3', '4', '5000']\n assert while_condition(stringified_nums) == 4\n\n\ndef test_unravel_buckets():\n \"\"\".\"\"\"\n from radixsort import unravel_buckets\n buckets_dict = OrderedDict({'none': Queue(), '0': Queue(), '1': Queue(),\n '2': Queue(), '3': Queue(), '4': Queue(), '5': Queue(), '6': Queue(\n ), '7': Queue(), '8': Queue(), '9': Queue()})\n for bucket in buckets_dict:\n buckets_dict[bucket].enqueue(bucket)\n assert unravel_buckets(buckets_dict) == ['none', '0', '1', '2', '3',\n '4', '5', '6', '7', '8', '9']\n\n\ndef test_push_into_buckets():\n \"\"\".\"\"\"\n from radixsort import push_into_buckets\n buckets_dict = OrderedDict({'none': Queue(), '0': Queue(), '1': Queue(),\n '2': Queue(), '3': Queue(), '4': Queue(), '5': Queue(), '6': Queue(\n ), '7': Queue(), '8': Queue(), '9': Queue()})\n nums = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']\n full_buckets_dict = push_into_buckets(nums, 0, buckets_dict)\n for key in full_buckets_dict:\n if full_buckets_dict[key].peek():\n assert full_buckets_dict[key].dequeue() == key\n\n\n<mask token>\n\n\ndef test_radix_sort_verbose():\n \"\"\"Test with many lists.\"\"\"\n from radixsort import radixsort\n for i in range(100):\n list_length = random.randint(0, 100)\n unsorted_list = []\n for x in range(list_length):\n unsorted_list.append(random.randint(0, 100))\n assert radixsort(unsorted_list) == sorted(unsorted_list)\n",
"step-3": "<mask token>\n\n\ndef test_stringify_nums():\n \"\"\".\"\"\"\n from radixsort import stringify_nums\n nums = [1, 2, 3, 4, 5]\n stringified_nums = stringify_nums(nums)\n assert stringified_nums == ['1', '2', '3', '4', '5']\n\n\ndef test_while_condition():\n \"\"\".\"\"\"\n from radixsort import while_condition\n stringified_nums = ['1', '2', '3', '4', '5000']\n assert while_condition(stringified_nums) == 4\n\n\ndef test_unravel_buckets():\n \"\"\".\"\"\"\n from radixsort import unravel_buckets\n buckets_dict = OrderedDict({'none': Queue(), '0': Queue(), '1': Queue(),\n '2': Queue(), '3': Queue(), '4': Queue(), '5': Queue(), '6': Queue(\n ), '7': Queue(), '8': Queue(), '9': Queue()})\n for bucket in buckets_dict:\n buckets_dict[bucket].enqueue(bucket)\n assert unravel_buckets(buckets_dict) == ['none', '0', '1', '2', '3',\n '4', '5', '6', '7', '8', '9']\n\n\ndef test_push_into_buckets():\n \"\"\".\"\"\"\n from radixsort import push_into_buckets\n buckets_dict = OrderedDict({'none': Queue(), '0': Queue(), '1': Queue(),\n '2': Queue(), '3': Queue(), '4': Queue(), '5': Queue(), '6': Queue(\n ), '7': Queue(), '8': Queue(), '9': Queue()})\n nums = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']\n full_buckets_dict = push_into_buckets(nums, 0, buckets_dict)\n for key in full_buckets_dict:\n if full_buckets_dict[key].peek():\n assert full_buckets_dict[key].dequeue() == key\n\n\ndef test_radix_sort():\n \"\"\"Test with simple list.\"\"\"\n from radixsort import radixsort\n nums = [5, 3, 2, 7, 9, 4, 0, 1]\n assert radixsort(nums) == [0, 1, 2, 3, 4, 5, 7, 9]\n\n\ndef test_radix_sort_verbose():\n \"\"\"Test with many lists.\"\"\"\n from radixsort import radixsort\n for i in range(100):\n list_length = random.randint(0, 100)\n unsorted_list = []\n for x in range(list_length):\n unsorted_list.append(random.randint(0, 100))\n assert radixsort(unsorted_list) == sorted(unsorted_list)\n",
"step-4": "<mask token>\nimport random\nfrom collections import OrderedDict\nfrom que_ import Queue\n\n\ndef test_stringify_nums():\n \"\"\".\"\"\"\n from radixsort import stringify_nums\n nums = [1, 2, 3, 4, 5]\n stringified_nums = stringify_nums(nums)\n assert stringified_nums == ['1', '2', '3', '4', '5']\n\n\ndef test_while_condition():\n \"\"\".\"\"\"\n from radixsort import while_condition\n stringified_nums = ['1', '2', '3', '4', '5000']\n assert while_condition(stringified_nums) == 4\n\n\ndef test_unravel_buckets():\n \"\"\".\"\"\"\n from radixsort import unravel_buckets\n buckets_dict = OrderedDict({'none': Queue(), '0': Queue(), '1': Queue(),\n '2': Queue(), '3': Queue(), '4': Queue(), '5': Queue(), '6': Queue(\n ), '7': Queue(), '8': Queue(), '9': Queue()})\n for bucket in buckets_dict:\n buckets_dict[bucket].enqueue(bucket)\n assert unravel_buckets(buckets_dict) == ['none', '0', '1', '2', '3',\n '4', '5', '6', '7', '8', '9']\n\n\ndef test_push_into_buckets():\n \"\"\".\"\"\"\n from radixsort import push_into_buckets\n buckets_dict = OrderedDict({'none': Queue(), '0': Queue(), '1': Queue(),\n '2': Queue(), '3': Queue(), '4': Queue(), '5': Queue(), '6': Queue(\n ), '7': Queue(), '8': Queue(), '9': Queue()})\n nums = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']\n full_buckets_dict = push_into_buckets(nums, 0, buckets_dict)\n for key in full_buckets_dict:\n if full_buckets_dict[key].peek():\n assert full_buckets_dict[key].dequeue() == key\n\n\ndef test_radix_sort():\n \"\"\"Test with simple list.\"\"\"\n from radixsort import radixsort\n nums = [5, 3, 2, 7, 9, 4, 0, 1]\n assert radixsort(nums) == [0, 1, 2, 3, 4, 5, 7, 9]\n\n\ndef test_radix_sort_verbose():\n \"\"\"Test with many lists.\"\"\"\n from radixsort import radixsort\n for i in range(100):\n list_length = random.randint(0, 100)\n unsorted_list = []\n for x in range(list_length):\n unsorted_list.append(random.randint(0, 100))\n assert radixsort(unsorted_list) == sorted(unsorted_list)\n",
"step-5": "\"\"\"Test radix sort.\"\"\"\n\nimport random\nfrom collections import OrderedDict\nfrom que_ import Queue\n\n\ndef test_stringify_nums():\n \"\"\".\"\"\"\n from radixsort import stringify_nums\n nums = [1, 2, 3, 4, 5]\n stringified_nums = stringify_nums(nums)\n assert stringified_nums == ['1', '2', '3', '4', '5']\n\n\ndef test_while_condition():\n \"\"\".\"\"\"\n from radixsort import while_condition\n stringified_nums = ['1', '2', '3', '4', '5000']\n assert while_condition(stringified_nums) == 4\n\n\ndef test_unravel_buckets():\n \"\"\".\"\"\"\n from radixsort import unravel_buckets\n buckets_dict = OrderedDict({\n 'none': Queue(),\n '0': Queue(),\n '1': Queue(),\n '2': Queue(),\n '3': Queue(),\n '4': Queue(),\n '5': Queue(),\n '6': Queue(),\n '7': Queue(),\n '8': Queue(),\n '9': Queue(),\n })\n\n for bucket in buckets_dict:\n buckets_dict[bucket].enqueue(bucket)\n\n assert unravel_buckets(buckets_dict) == ['none', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9']\n\n\ndef test_push_into_buckets():\n \"\"\".\"\"\"\n from radixsort import push_into_buckets\n\n buckets_dict = OrderedDict({\n 'none': Queue(),\n '0': Queue(),\n '1': Queue(),\n '2': Queue(),\n '3': Queue(),\n '4': Queue(),\n '5': Queue(),\n '6': Queue(),\n '7': Queue(),\n '8': Queue(),\n '9': Queue(),\n })\n\n nums = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']\n\n full_buckets_dict = push_into_buckets(nums, 0, buckets_dict)\n\n for key in full_buckets_dict:\n if full_buckets_dict[key].peek():\n assert full_buckets_dict[key].dequeue() == key\n\n\ndef test_radix_sort():\n \"\"\"Test with simple list.\"\"\"\n from radixsort import radixsort\n nums = [5, 3, 2, 7, 9, 4, 0, 1]\n assert radixsort(nums) == [0, 1, 2, 3, 4, 5, 7, 9]\n\n\ndef test_radix_sort_verbose():\n \"\"\"Test with many lists.\"\"\"\n from radixsort import radixsort\n # test on 100 lists\n for i in range(100):\n # generate random length of list\n list_length = random.randint(0, 100)\n unsorted_list = []\n for x in range(list_length):\n # generate random numbers for random length list\n unsorted_list.append(random.randint(0, 100))\n\n # test that list is sorted\n assert radixsort(unsorted_list) == sorted(unsorted_list)\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from docutils import nodes
from docutils.parsers.rst import directives, Directive
from pygments import highlight
from pygments.lexers import get_lexer_by_name
from pygments.lexers.special import TextLexer
from pygments.formatters.html import HtmlFormatter
class Pygments(Directive):
""" Source code syntax hightlighting.
"""
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {
'anchorlinenos': directives.flag,
'classprefix': directives.unchanged,
'hl_lines': directives.unchanged,
'lineanchors': directives.unchanged,
'linenos': directives.unchanged,
'linenospecial': directives.nonnegative_int,
'linenostart': directives.nonnegative_int,
'linenostep': directives.nonnegative_int,
'lineseparator': directives.unchanged,
'linespans': directives.unchanged,
'nobackground': directives.flag,
'nowrap': directives.flag,
'tagsfile': directives.unchanged,
'tagurlformat': directives.unchanged,
}
has_content = True
def run(self):
self.assert_has_content()
try:
lexer = get_lexer_by_name(self.arguments[0])
except ValueError:
# no lexer found - use the text one instead of an exception
lexer = TextLexer()
if 'linenos' in self.options and self.options['linenos'] not in ('table', 'inline'):
if self.options['linenos'] == 'none':
self.options.pop('linenos')
else:
self.options['linenos'] = 'table'
for flag in ('nowrap', 'nobackground', 'anchorlinenos'):
if flag in self.options:
self.options[flag] = True
# noclasses should already default to False, but just in case...
formatter = HtmlFormatter(noclasses=False, **self.options)
parsed = highlight('\n'.join(self.content), lexer, formatter)
return [nodes.raw('', parsed, format='html')]
def register():
directives.register_directive('code-block', Pygments)
directives.register_directive('sourcecode', Pygments)
|
normal
|
{
"blob_id": "d3dcef6a1a6bcfc1161c4de46081703b8fe7016d",
"index": 9606,
"step-1": "<mask token>\n\n\nclass Pygments(Directive):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def run(self):\n self.assert_has_content()\n try:\n lexer = get_lexer_by_name(self.arguments[0])\n except ValueError:\n lexer = TextLexer()\n if 'linenos' in self.options and self.options['linenos'] not in (\n 'table', 'inline'):\n if self.options['linenos'] == 'none':\n self.options.pop('linenos')\n else:\n self.options['linenos'] = 'table'\n for flag in ('nowrap', 'nobackground', 'anchorlinenos'):\n if flag in self.options:\n self.options[flag] = True\n formatter = HtmlFormatter(noclasses=False, **self.options)\n parsed = highlight('\\n'.join(self.content), lexer, formatter)\n return [nodes.raw('', parsed, format='html')]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Pygments(Directive):\n \"\"\" Source code syntax hightlighting.\n \"\"\"\n required_arguments = 1\n optional_arguments = 0\n final_argument_whitespace = True\n option_spec = {'anchorlinenos': directives.flag, 'classprefix':\n directives.unchanged, 'hl_lines': directives.unchanged,\n 'lineanchors': directives.unchanged, 'linenos': directives.\n unchanged, 'linenospecial': directives.nonnegative_int,\n 'linenostart': directives.nonnegative_int, 'linenostep': directives\n .nonnegative_int, 'lineseparator': directives.unchanged,\n 'linespans': directives.unchanged, 'nobackground': directives.flag,\n 'nowrap': directives.flag, 'tagsfile': directives.unchanged,\n 'tagurlformat': directives.unchanged}\n has_content = True\n\n def run(self):\n self.assert_has_content()\n try:\n lexer = get_lexer_by_name(self.arguments[0])\n except ValueError:\n lexer = TextLexer()\n if 'linenos' in self.options and self.options['linenos'] not in (\n 'table', 'inline'):\n if self.options['linenos'] == 'none':\n self.options.pop('linenos')\n else:\n self.options['linenos'] = 'table'\n for flag in ('nowrap', 'nobackground', 'anchorlinenos'):\n if flag in self.options:\n self.options[flag] = True\n formatter = HtmlFormatter(noclasses=False, **self.options)\n parsed = highlight('\\n'.join(self.content), lexer, formatter)\n return [nodes.raw('', parsed, format='html')]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Pygments(Directive):\n \"\"\" Source code syntax hightlighting.\n \"\"\"\n required_arguments = 1\n optional_arguments = 0\n final_argument_whitespace = True\n option_spec = {'anchorlinenos': directives.flag, 'classprefix':\n directives.unchanged, 'hl_lines': directives.unchanged,\n 'lineanchors': directives.unchanged, 'linenos': directives.\n unchanged, 'linenospecial': directives.nonnegative_int,\n 'linenostart': directives.nonnegative_int, 'linenostep': directives\n .nonnegative_int, 'lineseparator': directives.unchanged,\n 'linespans': directives.unchanged, 'nobackground': directives.flag,\n 'nowrap': directives.flag, 'tagsfile': directives.unchanged,\n 'tagurlformat': directives.unchanged}\n has_content = True\n\n def run(self):\n self.assert_has_content()\n try:\n lexer = get_lexer_by_name(self.arguments[0])\n except ValueError:\n lexer = TextLexer()\n if 'linenos' in self.options and self.options['linenos'] not in (\n 'table', 'inline'):\n if self.options['linenos'] == 'none':\n self.options.pop('linenos')\n else:\n self.options['linenos'] = 'table'\n for flag in ('nowrap', 'nobackground', 'anchorlinenos'):\n if flag in self.options:\n self.options[flag] = True\n formatter = HtmlFormatter(noclasses=False, **self.options)\n parsed = highlight('\\n'.join(self.content), lexer, formatter)\n return [nodes.raw('', parsed, format='html')]\n\n\ndef register():\n directives.register_directive('code-block', Pygments)\n directives.register_directive('sourcecode', Pygments)\n",
"step-4": "from docutils import nodes\nfrom docutils.parsers.rst import directives, Directive\nfrom pygments import highlight\nfrom pygments.lexers import get_lexer_by_name\nfrom pygments.lexers.special import TextLexer\nfrom pygments.formatters.html import HtmlFormatter\n\n\nclass Pygments(Directive):\n \"\"\" Source code syntax hightlighting.\n \"\"\"\n required_arguments = 1\n optional_arguments = 0\n final_argument_whitespace = True\n option_spec = {'anchorlinenos': directives.flag, 'classprefix':\n directives.unchanged, 'hl_lines': directives.unchanged,\n 'lineanchors': directives.unchanged, 'linenos': directives.\n unchanged, 'linenospecial': directives.nonnegative_int,\n 'linenostart': directives.nonnegative_int, 'linenostep': directives\n .nonnegative_int, 'lineseparator': directives.unchanged,\n 'linespans': directives.unchanged, 'nobackground': directives.flag,\n 'nowrap': directives.flag, 'tagsfile': directives.unchanged,\n 'tagurlformat': directives.unchanged}\n has_content = True\n\n def run(self):\n self.assert_has_content()\n try:\n lexer = get_lexer_by_name(self.arguments[0])\n except ValueError:\n lexer = TextLexer()\n if 'linenos' in self.options and self.options['linenos'] not in (\n 'table', 'inline'):\n if self.options['linenos'] == 'none':\n self.options.pop('linenos')\n else:\n self.options['linenos'] = 'table'\n for flag in ('nowrap', 'nobackground', 'anchorlinenos'):\n if flag in self.options:\n self.options[flag] = True\n formatter = HtmlFormatter(noclasses=False, **self.options)\n parsed = highlight('\\n'.join(self.content), lexer, formatter)\n return [nodes.raw('', parsed, format='html')]\n\n\ndef register():\n directives.register_directive('code-block', Pygments)\n directives.register_directive('sourcecode', Pygments)\n",
"step-5": "from docutils import nodes\nfrom docutils.parsers.rst import directives, Directive\n\nfrom pygments import highlight\nfrom pygments.lexers import get_lexer_by_name\nfrom pygments.lexers.special import TextLexer\nfrom pygments.formatters.html import HtmlFormatter\n\n\nclass Pygments(Directive):\n \"\"\" Source code syntax hightlighting.\n \"\"\"\n required_arguments = 1\n optional_arguments = 0\n final_argument_whitespace = True\n option_spec = {\n 'anchorlinenos': directives.flag,\n 'classprefix': directives.unchanged,\n 'hl_lines': directives.unchanged,\n 'lineanchors': directives.unchanged,\n 'linenos': directives.unchanged,\n 'linenospecial': directives.nonnegative_int,\n 'linenostart': directives.nonnegative_int,\n 'linenostep': directives.nonnegative_int,\n 'lineseparator': directives.unchanged,\n 'linespans': directives.unchanged,\n 'nobackground': directives.flag,\n 'nowrap': directives.flag,\n 'tagsfile': directives.unchanged,\n 'tagurlformat': directives.unchanged,\n }\n has_content = True\n\n def run(self):\n self.assert_has_content()\n try:\n lexer = get_lexer_by_name(self.arguments[0])\n except ValueError:\n # no lexer found - use the text one instead of an exception\n lexer = TextLexer()\n\n if 'linenos' in self.options and self.options['linenos'] not in ('table', 'inline'):\n if self.options['linenos'] == 'none':\n self.options.pop('linenos')\n else:\n self.options['linenos'] = 'table'\n\n for flag in ('nowrap', 'nobackground', 'anchorlinenos'):\n if flag in self.options:\n self.options[flag] = True\n\n # noclasses should already default to False, but just in case...\n formatter = HtmlFormatter(noclasses=False, **self.options)\n parsed = highlight('\\n'.join(self.content), lexer, formatter)\n return [nodes.raw('', parsed, format='html')]\n\n\ndef register():\n directives.register_directive('code-block', Pygments)\n directives.register_directive('sourcecode', Pygments)\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
import requests
from bs4 import BeautifulSoup
import json
import geojson
import re
import time
_apiKey = "SNgeI1tCT-oihjeZDGi6WqcM0a9QAttLhKTecPaaETQ"
def Geocode(address, apiKey):
URL = 'https://geocode.search.hereapi.com/v1/geocode'
# Параметры запроса
params = {
'q': address,
'apiKey': apiKey
}
import pdb; pdb.set_trace()
# Парсинг ответа в JSON формате
response = requests.get(URL, params=params).json()
item = response['items'][0]
address = item['address']
position = item['position']
result = {
'address': address['label'],
'lat': position['lat'],
'lng': position['lng'],
}
return result
if __name__ == "__main__":
address = "Украина, Александрия, Соборный проспект 98"
res = Geocode(address, _apiKey)
|
normal
|
{
"blob_id": "d32496c9bce86f455b24cd9c6dc263aee1bf82af",
"index": 3552,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef Geocode(address, apiKey):\n URL = 'https://geocode.search.hereapi.com/v1/geocode'\n params = {'q': address, 'apiKey': apiKey}\n import pdb\n pdb.set_trace()\n response = requests.get(URL, params=params).json()\n item = response['items'][0]\n address = item['address']\n position = item['position']\n result = {'address': address['label'], 'lat': position['lat'], 'lng':\n position['lng']}\n return result\n\n\nif __name__ == '__main__':\n address = 'Украина, Александрия, Соборный проспект 98'\n res = Geocode(address, _apiKey)\n",
"step-3": "<mask token>\n_apiKey = 'SNgeI1tCT-oihjeZDGi6WqcM0a9QAttLhKTecPaaETQ'\n\n\ndef Geocode(address, apiKey):\n URL = 'https://geocode.search.hereapi.com/v1/geocode'\n params = {'q': address, 'apiKey': apiKey}\n import pdb\n pdb.set_trace()\n response = requests.get(URL, params=params).json()\n item = response['items'][0]\n address = item['address']\n position = item['position']\n result = {'address': address['label'], 'lat': position['lat'], 'lng':\n position['lng']}\n return result\n\n\nif __name__ == '__main__':\n address = 'Украина, Александрия, Соборный проспект 98'\n res = Geocode(address, _apiKey)\n",
"step-4": "import requests\nfrom bs4 import BeautifulSoup\nimport json\nimport geojson\nimport re\nimport time\n_apiKey = 'SNgeI1tCT-oihjeZDGi6WqcM0a9QAttLhKTecPaaETQ'\n\n\ndef Geocode(address, apiKey):\n URL = 'https://geocode.search.hereapi.com/v1/geocode'\n params = {'q': address, 'apiKey': apiKey}\n import pdb\n pdb.set_trace()\n response = requests.get(URL, params=params).json()\n item = response['items'][0]\n address = item['address']\n position = item['position']\n result = {'address': address['label'], 'lat': position['lat'], 'lng':\n position['lng']}\n return result\n\n\nif __name__ == '__main__':\n address = 'Украина, Александрия, Соборный проспект 98'\n res = Geocode(address, _apiKey)\n",
"step-5": "import requests\nfrom bs4 import BeautifulSoup\nimport json\nimport geojson\nimport re\nimport time\n\n_apiKey = \"SNgeI1tCT-oihjeZDGi6WqcM0a9QAttLhKTecPaaETQ\"\n\ndef Geocode(address, apiKey):\n URL = 'https://geocode.search.hereapi.com/v1/geocode'\n\n # Параметры запроса\n params = {\n 'q': address,\n 'apiKey': apiKey\n }\n \n import pdb; pdb.set_trace()\n # Парсинг ответа в JSON формате\n response = requests.get(URL, params=params).json()\n item = response['items'][0]\n\n address = item['address']\n position = item['position']\n\n result = {\n 'address': address['label'],\n 'lat': position['lat'],\n 'lng': position['lng'],\n }\n \n return result\n\nif __name__ == \"__main__\":\n address = \"Украина, Александрия, Соборный проспект 98\"\n res = Geocode(address, _apiKey)",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
#!/usr/bin/env python
from bumblebee.motion import *
from simulation.path import *
from simulation.settings import *
import tf.transformations
from geometry_msgs.msg import TransformStamped,Transform,Quaternion,Vector3
from bumblebee.baseTypes import basicGraph,slidingGraph
from simulation.dataset import stereo_simulator_node
import pickle
import os
import rospy
import time
import scipy.stats.mstats as stat
from scipy.stats import norm,cauchy
import matplotlib.pyplot as plt
import matplotlib.style as sty
from mpl_toolkits.mplot3d import Axes3D
sty.use("seaborn")
from tf import TransformListener,TransformBroadcaster
from tf.transformations import *
import numpy as np
out="/home/ryan/recording/poseGraph/ORB/summary"
inNet="/home/ryan/recording/poseGraph/ORB"
#["5000_A1","5000_A2","5000_A3",
replayFiles=["5000_A5","5000_A6","5000_A12","5000_A13","5000_A14"]#,"/media/ryan/EXTRA/Simulation/50/G_0.3.gauss"]#,"/home/ryan/recording/poseGraph/5000_A2_full.pose"]
rospy.init_node("graph_poses_extract")
for f in replayFiles:
print("new SLiding Graph")
inlierData=[]
rmsData=[]
inlierRatio=[]
inFile=inNet+"/"+f+".pose"
with open(inFile,"r") as fread:
print(f)
data=pickle.load(fread)
print("Loaded")
with open(out+"/"+f+".inlier",'w') as outFIle:
pickle.dump(data.getInlierMotion(),outFIle)
print("1")
with open(out+"/"+f+".inlierRMS",'w') as outFIle:
pickle.dump(data.getInlierRMS(),outFIle)
print("extracted2")
with open(out+"/"+f+".tracks",'w') as outFIle:
pickle.dump(data.getTotalTracks(),outFIle)
print("extracted3")
with open(out+"/"+f+".delta",'w') as outFIle:
pickle.dump(data.getDeltaMotion(),outFIle)
print("extracted4")
# pickle.data.getInlierMotion())
# print("inlier")
# rmsData.append(data.getInlierRMS())
# print("rms")
# inlierRatio.append(data.getTotalTracks())
# print("totalTrc")
|
normal
|
{
"blob_id": "4b3de2d817aa6f8b92d513bcdba612362becefdc",
"index": 9070,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsty.use('seaborn')\n<mask token>\nrospy.init_node('graph_poses_extract')\nfor f in replayFiles:\n print('new SLiding Graph')\n inlierData = []\n rmsData = []\n inlierRatio = []\n inFile = inNet + '/' + f + '.pose'\n with open(inFile, 'r') as fread:\n print(f)\n data = pickle.load(fread)\n print('Loaded')\n with open(out + '/' + f + '.inlier', 'w') as outFIle:\n pickle.dump(data.getInlierMotion(), outFIle)\n print('1')\n with open(out + '/' + f + '.inlierRMS', 'w') as outFIle:\n pickle.dump(data.getInlierRMS(), outFIle)\n print('extracted2')\n with open(out + '/' + f + '.tracks', 'w') as outFIle:\n pickle.dump(data.getTotalTracks(), outFIle)\n print('extracted3')\n with open(out + '/' + f + '.delta', 'w') as outFIle:\n pickle.dump(data.getDeltaMotion(), outFIle)\n print('extracted4')\n",
"step-3": "<mask token>\nsty.use('seaborn')\n<mask token>\nout = '/home/ryan/recording/poseGraph/ORB/summary'\ninNet = '/home/ryan/recording/poseGraph/ORB'\nreplayFiles = ['5000_A5', '5000_A6', '5000_A12', '5000_A13', '5000_A14']\nrospy.init_node('graph_poses_extract')\nfor f in replayFiles:\n print('new SLiding Graph')\n inlierData = []\n rmsData = []\n inlierRatio = []\n inFile = inNet + '/' + f + '.pose'\n with open(inFile, 'r') as fread:\n print(f)\n data = pickle.load(fread)\n print('Loaded')\n with open(out + '/' + f + '.inlier', 'w') as outFIle:\n pickle.dump(data.getInlierMotion(), outFIle)\n print('1')\n with open(out + '/' + f + '.inlierRMS', 'w') as outFIle:\n pickle.dump(data.getInlierRMS(), outFIle)\n print('extracted2')\n with open(out + '/' + f + '.tracks', 'w') as outFIle:\n pickle.dump(data.getTotalTracks(), outFIle)\n print('extracted3')\n with open(out + '/' + f + '.delta', 'w') as outFIle:\n pickle.dump(data.getDeltaMotion(), outFIle)\n print('extracted4')\n",
"step-4": "from bumblebee.motion import *\nfrom simulation.path import *\nfrom simulation.settings import *\nimport tf.transformations\nfrom geometry_msgs.msg import TransformStamped, Transform, Quaternion, Vector3\nfrom bumblebee.baseTypes import basicGraph, slidingGraph\nfrom simulation.dataset import stereo_simulator_node\nimport pickle\nimport os\nimport rospy\nimport time\nimport scipy.stats.mstats as stat\nfrom scipy.stats import norm, cauchy\nimport matplotlib.pyplot as plt\nimport matplotlib.style as sty\nfrom mpl_toolkits.mplot3d import Axes3D\nsty.use('seaborn')\nfrom tf import TransformListener, TransformBroadcaster\nfrom tf.transformations import *\nimport numpy as np\nout = '/home/ryan/recording/poseGraph/ORB/summary'\ninNet = '/home/ryan/recording/poseGraph/ORB'\nreplayFiles = ['5000_A5', '5000_A6', '5000_A12', '5000_A13', '5000_A14']\nrospy.init_node('graph_poses_extract')\nfor f in replayFiles:\n print('new SLiding Graph')\n inlierData = []\n rmsData = []\n inlierRatio = []\n inFile = inNet + '/' + f + '.pose'\n with open(inFile, 'r') as fread:\n print(f)\n data = pickle.load(fread)\n print('Loaded')\n with open(out + '/' + f + '.inlier', 'w') as outFIle:\n pickle.dump(data.getInlierMotion(), outFIle)\n print('1')\n with open(out + '/' + f + '.inlierRMS', 'w') as outFIle:\n pickle.dump(data.getInlierRMS(), outFIle)\n print('extracted2')\n with open(out + '/' + f + '.tracks', 'w') as outFIle:\n pickle.dump(data.getTotalTracks(), outFIle)\n print('extracted3')\n with open(out + '/' + f + '.delta', 'w') as outFIle:\n pickle.dump(data.getDeltaMotion(), outFIle)\n print('extracted4')\n",
"step-5": "#!/usr/bin/env python\n\nfrom bumblebee.motion import *\n\nfrom simulation.path import *\nfrom simulation.settings import *\nimport tf.transformations\nfrom geometry_msgs.msg import TransformStamped,Transform,Quaternion,Vector3\nfrom bumblebee.baseTypes import basicGraph,slidingGraph\nfrom simulation.dataset import stereo_simulator_node\nimport pickle\nimport os\nimport rospy\n\nimport time\nimport scipy.stats.mstats as stat\nfrom scipy.stats import norm,cauchy\nimport matplotlib.pyplot as plt\nimport matplotlib.style as sty\nfrom mpl_toolkits.mplot3d import Axes3D\nsty.use(\"seaborn\")\n\nfrom tf import TransformListener,TransformBroadcaster\nfrom tf.transformations import *\nimport numpy as np\n\n\nout=\"/home/ryan/recording/poseGraph/ORB/summary\"\ninNet=\"/home/ryan/recording/poseGraph/ORB\"\n#[\"5000_A1\",\"5000_A2\",\"5000_A3\",\nreplayFiles=[\"5000_A5\",\"5000_A6\",\"5000_A12\",\"5000_A13\",\"5000_A14\"]#,\"/media/ryan/EXTRA/Simulation/50/G_0.3.gauss\"]#,\"/home/ryan/recording/poseGraph/5000_A2_full.pose\"]\n\nrospy.init_node(\"graph_poses_extract\")\n\n\nfor f in replayFiles:\n print(\"new SLiding Graph\")\n inlierData=[]\n rmsData=[]\n inlierRatio=[]\n inFile=inNet+\"/\"+f+\".pose\"\n with open(inFile,\"r\") as fread:\n print(f)\n data=pickle.load(fread)\n print(\"Loaded\")\n with open(out+\"/\"+f+\".inlier\",'w') as outFIle:\n pickle.dump(data.getInlierMotion(),outFIle)\n print(\"1\")\n with open(out+\"/\"+f+\".inlierRMS\",'w') as outFIle:\n pickle.dump(data.getInlierRMS(),outFIle)\n print(\"extracted2\")\n with open(out+\"/\"+f+\".tracks\",'w') as outFIle:\n pickle.dump(data.getTotalTracks(),outFIle)\n print(\"extracted3\")\n with open(out+\"/\"+f+\".delta\",'w') as outFIle:\n pickle.dump(data.getDeltaMotion(),outFIle)\n print(\"extracted4\")\n # pickle.data.getInlierMotion())\n # print(\"inlier\")\n # rmsData.append(data.getInlierRMS())\n # print(\"rms\")\n # inlierRatio.append(data.getTotalTracks())\n # print(\"totalTrc\")",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Generated by Django 3.0.6 on 2020-06-23 10:58
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('printer', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='printers_stat',
name='type_printers',
),
]
|
normal
|
{
"blob_id": "e7bb5e9a91ec6a1644ddecd52a676c8136087941",
"index": 4719,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('printer', '0001_initial')]\n operations = [migrations.RemoveField(model_name='printers_stat', name=\n 'type_printers')]\n",
"step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('printer', '0001_initial')]\n operations = [migrations.RemoveField(model_name='printers_stat', name=\n 'type_printers')]\n",
"step-5": "# Generated by Django 3.0.6 on 2020-06-23 10:58\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('printer', '0001_initial'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='printers_stat',\n name='type_printers',\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import pygame
import os
from time import sleep
screen = pygame.display.set_mode((900,700))
screen.fill((255,255,255))
pygame.display.set_caption("NTUFOODIERECOMMENDSYSTEM")
'''
###########################
──╔╗────╔╗
──║║───╔╝╚╗
╔═╝╠╦══╬╗╔╬╦══╦═╗╔══╦═╦╗─╔╗
║╔╗╠╣╔═╝║║╠╣╔╗║╔╗╣╔╗║╔╣║─║║
║╚╝║║╚═╗║╚╣║╚╝║║║║╔╗║║║╚═╝║
╚══╩╩══╝╚═╩╩══╩╝╚╩╝╚╩╝╚═╗╔╝
──────────────────────╔═╝║
──────────────────────╚══╝
###########################
● Database is stored on site.
● Updating is relatively simple.
● Programme runs on the basis of pygame, it's hard to update it without text input.
● However, it can easily be done so on shell/console accordingly.
'''
# Food court lists is sorted by [Highest Cost, Lowest Cost, Cuisines Available, Closing Time, Food Preferences Available, Coordinates on NTU Map] ; THE items have keys and corresponding values expressed as a pair, key: value
# where the keys would be that of the canteen names and this would be associated with that of the corresponding properties tht is alloted to it.
canteen_list = {
"Food Court 1": [12, 3.5, ["Korean", "Japanese", "Western"], 2100, ["Halal", "Non-Halal/Non-Vegetarian"], (442, 473)],
"Food Court 2": [10, 3.6, ["Korean", "Chinese", "Malay", ], 2100, ["Halal", "Vegetarian", "Non-Halal/Non-Vegetarian"], (477, 409)],
"Food Court 4": [10, 3, ["Chinese", "Western"], 2100, ["Non-Halal/Non-Vegetarian"], (358,526)],
"Food Court 9": [10, 3.5, ["Chinese"], 2100, ["Halal", "Vegetarian", "Non-Halal/Non-Vegetarian"], (582, 288)],
"Food Court 11": [10, 2.5, ["Chinese", "Indian", "Japanese", "Western"], 2100, ["Halal", "Vegetarian", "Non-Halal/Non-Vegetarian"], (682, 243)],
"Food Court 13": [9, 2, ["Western", "Korean", "Japanese", "Chinese"], 2100, ["Halal", "Vegetarian", "Non-Halal/Non-Vegetarian"], (445, 176)],
"Food Court 14": [8, 3, ["Western", "Chinese", "Korean", "Malay"], 2100, ["Halal", "Vegetarian", "Non-Halal/Non-Vegetarian"], (509, 182)],
"Food Court 16": [10, 3.3, ["Japanese", "Chinese", "Korean", "Indian"], 2100, ["Halal", "Vegetarian", "Non-Halal/Non-Vegetarian"], (405, 221)],
"Tamarind Food Court": [10, 3, ["Malay", "Chinese", "Korean", "Western"], 2100, ["Halal", "Non-Halal", "Vegetarian","Non-Halal/Non-Vegetarian"], (627, 200)],
"Pioneer Food Court": [20, 2.3, ["Thai", "Chinese"], 0000, ["Vegetarian", "Non-Halal/Non-Vegetarian"], (497, 561)],
"North Spine Food Court": [10, 2.5, ["Korean", "Japanese", "Chinese", "Western", "Malay"], 2100, ["Vegetarian", "Non-Halal/Non-Vegetarian"], (275, 293)],
"North Spine Plaza": [10, 4, ["Western", "Korean"], 2130, ["Vegetarian", "Halal", "Non-Halal/Non-Vegetarian"], (287, 339)],
"South Spine Food Court": [10, 2, ["Chinese", "Malay", "Korean", "Japanese", "Western"], 2100, ["Vegetarian", "Halal", "Non-Halal/Non-Vegetarian"], (227, 496)],
"Quad Cafe": [10, 2.4, ["Korean", "Chinese", "Indian", "Malay"], 2100, ["Vegetarian", "Halal", "Non-Halal/Non-Vegetarian"], (224, 351)],
"Coffee Bean": [20, 4, ["Western"], 2000, ["Vegetarian", "Halal", "Non-Halal/Non-Vegetarian"], (219, 389)],
"North Hill Food Court": [10, 3.8, ["Chinese", "Malay", "Indian"], 2100, ["Vegetarian", "Halal", "Non-Halal/Non-Vegetarian"], (720,314)]
}
'''
###########################################
───╔╗───────────╔═╗─────╔╗─────╔╗─╔╗
───║║───────────║╔╝─────║║────╔╝╚╦╝╚╗
╔══╣║╔══╦══╦══╗╔╝╚╦══╦═╗║╚═╦╗╔╬╗╔╩╗╔╬══╦═╗
║╔═╣║║╔╗║══╣══╣╚╗╔╣╔╗║╔╝║╔╗║║║║║║─║║║╔╗║╔╗╗
║╚═╣╚╣╔╗╠══╠══║─║║║╚╝║║─║╚╝║╚╝║║╚╗║╚╣╚╝║║║║
╚══╩═╩╝╚╩══╩══╝─╚╝╚══╩╝─╚══╩══╝╚═╝╚═╩══╩╝╚╝
###########################################
● We had help from online tutorials to workout the UI buttons functionality.
● A bit of corresponding tweaks incorporating into project from the tutorial that I learnt from
● ref: https://www.youtube.com/watch?v=4_9twnEduFA
'''
class button():
def __init__(self, colour, x, y, width, height, text=''):
self.colour = colour
self.x = x
self.y = y
self.width = width
self.height = height
self.text = text
def draw(self,win,outline = None):
if outline:
#draw a bigger rectangle behind to create a border
pygame.draw.rect(win, outline, (self.x-2, self.y-2, self.width+4, self.height+4),0)
#draws the button rectangle
pygame.draw.rect(win, self.colour, (self.x, self.y, self.width, self.height),0)
if self.text != '':
font = pygame.font.SysFont('calligrapher.ttf', 60)
text = font.render(self.text, 1, (0,0,0))
win.blit(text, (self.x + (self.width/2 - text.get_width()/2), self.y + (self.height/2 - text.get_height()/2)))
def isOver(self, pos):
#pos is the mouse position (x,y) coordinates
if pos[0] > self.x and pos[0] < self.x + self.width:
if pos[1] > self.y and pos[1] < self.y + self.height:
return True
else:
return False
'''
##################################
─╔═╗─────────╔╗
─║╔╝────────╔╝╚╗
╔╝╚╦╗╔╦═╗╔══╬╗╔╬╦══╦═╗╔══╗
╚╗╔╣║║║╔╗╣╔═╝║║╠╣╔╗║╔╗╣══╣
─║║║╚╝║║║║╚═╗║╚╣║╚╝║║║╠══║
─╚╝╚══╩╝╚╩══╝╚═╩╩══╩╝╚╩══╝
##################################
╔═╗────────╔╗
║═╬═╦╦╗╔═╦╦╬╣
║╔╣╬║╔╝║╬║║║║
╚╝╚═╩╝─╠╗╠═╩╝
───────╚═╝
#################
● Most of the functions here help to draw out the different states of the screen, that the screen could be in
● The redraw functions help to update the display based on it's respective transitory states
'''
#3 functions here controls the Surface Text appearancese
def text(text,win,x,y):
font = pygame.font.SysFont('freesansbold.ttf', 50)
phrase = font.render(text, 1, (0,0,0))
win.blit(phrase, (x,y))
def instructionText(text,win,x,y):
font = pygame.font.SysFont('Arial', 20)
phrase = font.render(text, 1, (0,0,0))
win.blit(phrase, (x,y))
def header(text,win,x,y):
font = pygame.font.SysFont('TimesNewRoman', 70)
phrase = font.render(text, 1, (0,0,0))
win.blit(phrase, (x,y))
def mouseClick(screen):
#checks for mouseclick event, and fetches corresp. positions
x,y = pygame.mouse.get_pos()
if (x >= 65 and x <=727) and (y >=82 and y <= 618):
#print(event.button)
pygame.draw.circle(screen, (255,0,150), (x,y), 15)
return True, x, y
else:
print("Out of bounds!")
return False, x, y
def skeleExit(win):
#exit event
aryadelight = pygame.image.load(os.path.join("NTUFoodieRecsv1.png"))
win.blit(aryadelight,(0,0))
pygame.display.update()
xaxis = 100
for i in range(1,42):
image = str(i) + ".png"
skele = pygame.image.load(os.path.join(image))
win.blit(skele, (250,200))
text("Exiting...", win, (xaxis+20), 600)
pygame.display.update()
sleep(0.09)
def loading(win):
#loading screen, slep interval defined as 0.3 seconds to load subs. frame
x = 0
while x < 3:
load0 = pygame.image.load(os.path.join("load0.png"))
win.blit(load0, (0,0))
pygame.display.update()
sleep(0.3)
load1 = pygame.image.load(os.path.join("load1.png"))
win.blit(load1, (0,0))
pygame.display.update()
sleep(0.3)
load2 = pygame.image.load(os.path.join("load2.png"))
win.blit(load2, (0,0))
pygame.display.update()
sleep(0.3)
load3 = pygame.image.load(os.path.join("load3.png"))
win.blit(load3, (0,0))
pygame.display.update()
sleep(0.3)
x += 1
# ---------------------------------------------------------------------------#
def redrawMap(screen):
#draws the embedded NTU map image provided
NTUmap = pygame.image.load(os.path.join("NTUMap.jpg"))
screen.blit(NTUmap, (0,0))
for x in range(50,900,50):
#y axial grids
pygame.draw.rect(screen, (255,0,0), (x, 0, 1, 700), 0)
for y in range(50,700,50):
#x axial grids
pygame.draw.rect(screen, (255,0,0), (0, y, 900, 1), 0)
text('Please click on your current location!',screen,200,100)
def redrawGPSMap(screen, top3, x, y):
#redraw NTU map, but this time with corresponding location coordinates
NTUmap = pygame.image.load(os.path.join("NTUMap.jpg"))
screen.blit(NTUmap, (0,0))
redGPS = pygame.image.load(os.path.join("redgps.png"))
screen.blit(redGPS, (x-16,y-32))
instructionText("You are currently at this position.", screen, x+4, y-10)
counter = 1
for i in top3:
coor = canteen_list[i][5]
if counter == 1:
blueGPS = pygame.image.load(os.path.join("bluegps.png"))
screen.blit(blueGPS, (coor[0]-12,coor[1]-24))
instructionText(i, screen, coor[0]-24, coor[1])
pass
if counter == 2:
blackGPS = pygame.image.load(os.path.join("blackgps.png"))
screen.blit(blackGPS, (coor[0]-12,coor[1]-24))
instructionText(i, screen, coor[0]-24, coor[1])
pass
if counter == 3:
yellowGPS = pygame.image.load(os.path.join("yellowgps.png"))
screen.blit(yellowGPS, (coor[0]-12,coor[1]-24))
instructionText(i, screen, coor[0]-24, coor[1])
pass
counter += 1
restartButton.draw(screen, (0,0,0))
def redrawMainWin(screen):
#functionality that controls what is displayed on the main window
aryadelight = pygame.image.load(os.path.join("NTUFoodieRecsv1.png"))
screen.blit(aryadelight,(0,0))
mapButton.draw(screen, (0,0,0))
instructionText("(Choose your cuisines, preferences and budget for the meal here!)",screen,215,320)
predictButton.draw(screen, (0,0,0))
instructionText("(Find the nearest canteen!)",screen,132,470)
exitButton.draw(screen, (0,0,0))
ice = pygame.image.load(os.path.join("ice.png"))
screen.blit(ice, (500,670))
font = pygame.font.SysFont('verdana', 20)
creator = font.render("Made by HweeHean X Arya", 1, (0,0,200))
screen.blit(creator, (535,670))
def redrawCustWin(screen):
#controls what is displayed on the customisation window
bp = pygame.image.load(os.path.join("gradient.jpg"))
screen.blit(bp,(0,0))
instructionText('Left click again to reset!',screen,300,20)
text('Please select your food preference: ', screen, 100, 50)
halalButton.draw(screen, (0,0,0))
vegButton.draw(screen, (0,0,0))
nonhalalButton.draw(screen, (0,0,0))
text('Please select your cuisine type: ', screen, 100, 200)
koreanButton.draw(screen, (0,0,0))
malayButton.draw(screen, (0,0,0))
japanButton.draw(screen, (0,0,0))
chineseButton.draw(screen, (0,0,0))
indianButton.draw(screen, (0,0,0))
westernButton.draw(screen, (0,0,0))
text('Please select your maximum budget: ', screen, 100, 430)
button3.draw(screen, (0,0,0))
button5.draw(screen, (0,0,0))
button7.draw(screen, (0,0,0))
button9.draw(screen, (0,0,0))
nextButton.draw(screen, (0,0,0))
def redrawSearchWin(screen,x,y):
#gives the top 3 most relevant results for the prediction tab
bp = pygame.image.load(os.path.join("NTUFoodieRecsv1.png"))
screen.blit(bp,(0,0))
GordonRamsay = pygame.image.load(os.path.join("GordonRamsay.png"))
screen.blit(GordonRamsay, (400,100))
distList = []
for i in canteen_list:
distList.append(i)
print(distList)
top3 = nearest_can(distList, x, y)
print(top3)
text("Nearest Canteen:",screen,110,400)
yaxis = 490
canteenCount = 1
for k in top3:
if canteenCount == 1:
if k == "Food Court 1":
canteenPic = pygame.image.load(os.path.join("Canteen1.jpg"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 2":
canteenPic = pygame.image.load(os.path.join("Canteen2.png"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 4":
canteenPic = pygame.image.load(os.path.join("Canteen4.png"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 9":
canteenPic = pygame.image.load(os.path.join("Canteen9.png"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 11":
canteenPic = pygame.image.load(os.path.join("Canteen11.png"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 13":
canteenPic = pygame.image.load(os.path.join("Canteen13.png"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 14":
canteenPic = pygame.image.load(os.path.join("Canteen14.png"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 16":
canteenPic = pygame.image.load(os.path.join("Canteen16.png"))
screen.blit(canteenPic, (150,200))
if k == "Tamarind Food Court":
canteenPic = pygame.image.load(os.path.join("Tamarind.jpg"))
screen.blit(canteenPic, (150,200))
if k == "Pioneer Food Court":
canteenPic = pygame.image.load(os.path.join("Pioneer.png"))
screen.blit(canteenPic, (150,200))
if k == "North Spine Food Court":
canteenPic = pygame.image.load(os.path.join("NorthSpine.jpg"))
screen.blit(canteenPic, (150,200))
if k == "North Spine Plaza":
canteenPic = pygame.image.load(os.path.join("NorthSpinePlaza.jpg"))
screen.blit(canteenPic, (150,200))
if k == "South Spine Food Court":
canteenPic = pygame.image.load(os.path.join("SouthSpineKoufuFoodCourt.png"))
screen.blit(canteenPic, (150,200))
if k == "Quad Cafe":
canteenPic = pygame.image.load(os.path.join("Quad.jpg"))
screen.blit(canteenPic, (150,200))
if k == "Coffee Bean":
canteenPic = pygame.image.load(os.path.join("Coffee.jpg"))
screen.blit(canteenPic, (150,200))
if k == "North Hill Food Court":
canteenPic = pygame.image.load(os.path.join("NorthHill.jpg"))
screen.blit(canteenPic, (150,200))
text(str(canteenCount), screen, 110, yaxis)
text(".", screen, 135, yaxis)
text(k,screen,150,yaxis)
canteenCount += 1
yaxis += 70
return top3
def complicatedSearchWin(screen,top3):
#displays the top3 results for the end user after clicking customisation
bp = pygame.image.load(os.path.join("NTUFoodieRecsv1.png"))
screen.blit(bp,(0,0))
GordonRamsay = pygame.image.load(os.path.join("GordonRamsay.png"))
screen.blit(GordonRamsay, (400,100))
text("Nearest Canteen:",screen,110,400)
yaxis = 490
canteenCount = 1
for k in top3:
if canteenCount == 1:
if k == "Food Court 1":
canteenPic = pygame.image.load(os.path.join("Canteen1.jpg"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 2":
canteenPic = pygame.image.load(os.path.join("Canteen2.png"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 4":
canteenPic = pygame.image.load(os.path.join("Canteen4.png"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 9":
canteenPic = pygame.image.load(os.path.join("Canteen9.png"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 11":
canteenPic = pygame.image.load(os.path.join("Canteen11.png"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 13":
canteenPic = pygame.image.load(os.path.join("Canteen13.png"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 14":
canteenPic = pygame.image.load(os.path.join("Canteen14.png"))
screen.blit(canteenPic, (150,200))
if k == "Food Court 16":
canteenPic = pygame.image.load(os.path.join("Canteen16.png"))
screen.blit(canteenPic, (150,200))
if k == "Tamarind Food Court":
canteenPic = pygame.image.load(os.path.join("Tamarind.jpg"))
screen.blit(canteenPic, (150,200))
if k == "Pioneer Food Court":
canteenPic = pygame.image.load(os.path.join("Pioneer.png"))
screen.blit(canteenPic, (150,200))
if k == "North Spine Food Court":
canteenPic = pygame.image.load(os.path.join("NorthSpine.jpg"))
screen.blit(canteenPic, (150,200))
if k == "North Spine Plaza":
canteenPic = pygame.image.load(os.path.join("NorthSpinePlaza.jpg"))
screen.blit(canteenPic, (150,200))
if k == "South Spine Food Court":
canteenPic = pygame.image.load(os.path.join("SouthSpineKoufuFoodCourt.png"))
screen.blit(canteenPic, (150,200))
if k == "Quad Cafe":
canteenPic = pygame.image.load(os.path.join("Quad.jpg"))
screen.blit(canteenPic, (150,200))
if k == "Coffee Bean":
canteenPic = pygame.image.load(os.path.join("Coffee.jpg"))
screen.blit(canteenPic, (150,200))
if k == "North Hill Food Court":
canteenPic = pygame.image.load(os.path.join("NorthHill.jpg"))
screen.blit(canteenPic, (150,200))
text(str(canteenCount), screen, 110, yaxis)
text(".", screen, 135, yaxis)
text(k,screen,150,yaxis)
canteenCount += 1
yaxis += 70
'''
╔═╗────╔═╗───╔╗╔╗
║═╬═╦╦╗║═╬═╦╦╣╚╬╬═╦╦═╗
║╔╣╬║╔╝╠═║╬║╔╣╔╣║║║║╬║
╚╝╚═╩╝─╚═╩═╩╝╚═╩╩╩═╬╗║
───────────────────╚═╝
###########################
● Functions below control how we do the sorting for the distance
and the different cuisines
'''
#function provided by ARYA
#function to compile a list of all the relevant food courts
def final_list(user_budget, user_cuisine, user_preference):
new_list = []
#Creating a list of all food courts that fit in the user's budget
for i in canteen_list:
if user_budget >= canteen_list[i][1]:
new_list.append(i)
#Creating a list of all food courts according to the imposed constraints on cuisine
for c in user_cuisine:
for i in canteen_list:
if c in canteen_list[i][2]:
new_list.append(i)
#Adding to the list, all the food courts according to the food preferences specified
for c in user_preference:
for i in canteen_list:
if c in canteen_list[i][4]:
new_list.append(i)
#eliminating all the repeated options
new_list = list(set(new_list))
#if new_list is empty due to no selection made
if len(new_list) == 0:
for i in canteen_list:
new_list.append(i)
return(new_list)
#function to calulate the horizontal distance from you to proposed option
def calc_dis(x1, y1, x2, y2):
return ((x1-x2)**2 + (y1-y2)**2)**1/2
#function to find out the nearest suitable food outlet/food court
def nearest_can(new_list, x, y):
top3 = []
copy_list = new_list.copy()
while len(top3) != 3:
j = copy_list[0]
coor = canteen_list[j][5]
Min = calc_dis(x, y, coor[0], coor[1])
food_court = ''
for k in copy_list:
#coordinates of the food court
coor = canteen_list[k][5]
dist = calc_dis(x, y, coor[0], coor[1])
if Min >= dist:
Min = dist
food_court = k
index = copy_list.index(food_court)
copy_list.pop(index)
top3.append(food_court)
print(top3)
return top3
'''
#########################
╔╗─────╔╗─╔╗
║║────╔╝╚╦╝╚╗
║╚═╦╗╔╬╗╔╩╗╔╬══╦═╗╔══╗
║╔╗║║║║║║─║║║╔╗║╔╗╣══╣
║╚╝║╚╝║║╚╗║╚╣╚╝║║║╠══║
╚══╩══╝╚═╝╚═╩══╩╝╚╩══╝
#########################
● This is where the buttons are defined. Using the class...
● They are relatively self-explanatory
'''
#buttons for the main loading page:
mapButton = button((255,255,255), 200, 250, 500, 100, 'Canteen Customisation')
predictButton = button((255,255,255), 100, 400, 300, 100, 'Prediction')
exitButton = button((255,255,255), 500, 400, 300, 100, 'Exit')
#buttons for the custimisation screen:
halalButton = button((255,255,255), 50, 120, 250, 50, 'Halal')
vegButton = button((255,255,255), 320, 120, 250, 50, 'Vegetarian')
nonhalalButton = button((255,255,255), 590, 120, 250, 50, 'Non-Halal')
koreanButton = button((255,255,255), 50, 270, 250, 50, 'Korean')
malayButton = button((255,255,255), 320, 270, 250, 50, 'Malay')
japanButton = button((255,255,255), 590, 270, 250, 50, 'Japanese')
chineseButton = button((255,255,255), 50, 340, 250, 50, 'Chinese')
indianButton = button((255,255,255), 320, 340, 250, 50, 'Indian')
westernButton = button((255,255,255), 590, 340, 250, 50, 'Western')
button3 = button((255,255,255), 235, 490, 70, 50, '$3')
button5 = button((255,255,255), 355, 490, 70, 50, '$5')
button7 = button((255,255,255), 475, 490, 70, 50, '$7')
button9 = button((255,255,255), 595, 490, 70, 50, '$10')
nextButton = button((255,255,255), 730, 580, 120, 70, 'Next')
#buttons to showcase GPS:
gpsButton = button((255,255,255), 700, 600, 170, 50, 'to Map')
restartButton = button((255,255,255), 700, 600, 190, 50, 'Restart?')
'''
#############################
────╔╗────╔╗
───╔╝╚╗──╔╝╚╗
╔══╬╗╔╬══╬╗╔╬══╦══╗
║══╣║║║╔╗║║║║║═╣══╣
╠══║║╚╣╔╗║║╚╣║═╬══║
╚══╝╚═╩╝╚╝╚═╩══╩══╝
#############################
● Since I'm only using one while loop and all the functions are in here,
it is important to note that none of the "if" statements interfere with
each other
● Acts like a flip-flop which stores the data of the different STATES
'''
#originalstate of customisation buttons
halalButtonPressed = False
vegButtonPressed = False
nonhalalButtonPressed = False
koreanButtonPressed = False
malayButtonPressed = False
japanButtonPressed = False
chineseButtonPressed = False
indianButtonPressed = False
westernButtonPressed = False
button3Pressed = False
button5Pressed = False
button7Pressed = False
button9Pressed = False
nextButtonPressed = False
gpsButtonPressed = False
#original state of events
checkButton = True
mapCoor = False
customisationMenu = False
mapCoor2 = False
easySearch = False
complicatedMenu = False
oneTime = True
'''
####################################
╔═╗╔═╗───────╔═══╗
║║╚╝║║───────║╔═╗║
║╔╗╔╗╠══╦╦═╗─║╚═╝╠═╦══╦══╦═╦══╦╗╔╗
║║║║║║╔╗╠╣╔╗╗║╔══╣╔╣╔╗║╔╗║╔╣╔╗║╚╝║
║║║║║║╔╗║║║║║║║──║║║╚╝║╚╝║║║╔╗║║║║
╚╝╚╝╚╩╝╚╩╩╝╚╝╚╝──╚╝╚══╩═╗╠╝╚╝╚╩╩╩╝
──────────────────────╔═╝║
──────────────────────╚══╝
####################################
● It involves a lot of existing predefined states, turning on and off to display
multiple things without them interfering with each other's functionality
● I.e. Clicking customisation button will disable itself, hence
if the mouse is clicked over at the same area, it will not
be activated again.
● This is every important to have a smooth flow.
● Also left some debugging messages within the console to help
understand what is going on behind the scenes
'''
pygame.init()
run = True
clock = pygame.time.Clock()
#start the pygame programme
while run:
#if true, redraws the main window
if checkButton:
redrawMainWin(screen)
#if true, redraws the customisation window
if customisationMenu:
redrawCustWin(screen)
if easySearch:
if oneTime:
nearest_canteen = redrawSearchWin(screen, x, y)
sleep(2)
oneTime = False
gpsButton.draw(screen, (0,0,0))
#if true, redraws the complicated cusomisation results
if complicatedMenu:
if oneTime:
complicatedSearchWin(screen, nearest_canteen)
sleep(2)
oneTime = False
gpsButton.draw(screen, (0,0,0))
#redraws the GPS map, with point locaters indicated
if gpsButtonPressed == True:
redrawGPSMap(screen, nearest_canteen, x, y)
pygame.display.update()
clock.tick(30)
#checks event
for event in pygame.event.get():
#Fetches the mouse position
pos = pygame.mouse.get_pos()
#Quits the pygame programme
if event.type == pygame.QUIT:
run = False
pygame.quit()
if gpsButtonPressed:
if event.type == pygame.MOUSEBUTTONDOWN:
if restartButton.isOver(pos):
restartButton.colour = (50,50,50)
restartButton.draw(screen, (0,0,0))
pygame.display.update()
print('clicked the restart button')
#original state of customisation buttons
halalButtonPressed = False
vegButtonPressed = False
nonhalalButtonPressed = False
koreanButtonPressed = False
malayButtonPressed = False
japanButtonPressed = False
chineseButtonPressed = False
indianButtonPressed = False
westernButtonPressed = False
button3Pressed = False
button5Pressed = False
button7Pressed = False
button9Pressed = False
nextButtonPressed = False
gpsButtonPressed = False
#original state of events
checkButton = True
mapCoor = False
customisationMenu = False
mapCoor2 = False
easySearch = False
complicatedMenu = False
oneTime = True
if event.type == pygame.MOUSEMOTION:
if restartButton.isOver(pos):
restartButton.colour = (0,255,0)
continue
else:
restartButton.colour = (255,255,255)
continue
if easySearch == True or complicatedMenu == True:
if event.type == pygame.MOUSEBUTTONDOWN:
if gpsButton.isOver(pos):
gpsButton.colour = (50,50,50)
gpsButton.draw(screen, (0,0,0))
pygame.display.update()
print('clicked gps button')
gpsButtonPressed = True
easySearch = False
complicatedMenu = False
continue
if event.type == pygame.MOUSEMOTION:
if gpsButton.isOver(pos):
gpsButton.colour = (0,255,0)
continue
else:
gpsButton.colour = (255,255,255)
continue
#if mouse is clicked over buttons (main page)
if checkButton:
if event.type == pygame.MOUSEBUTTONDOWN:
if mapButton.isOver(pos):
mapButton.colour = (0,255,0)
redrawMainWin(screen)
pygame.display.update()
print('clicked map button')
sleep(0.5)
redrawMap(screen)
checkButton = False
mapCoor = True
continue
if predictButton.isOver(pos):
predictButton.colour = (0,255,0)
redrawMainWin(screen)
pygame.display.update()
print('clicked predict button')
sleep(0.5)
redrawMap(screen)
checkButton = False
mapCoor2 = True
continue
if exitButton.isOver(pos):
exitButton.colour = (0,255,0)
print('Exiting...')
skeleExit(screen)
pygame.quit()
run = False
exit()
#if mouse hovered over the button (main page)
if event.type == pygame.MOUSEMOTION:
if mapButton.isOver(pos):
mapButton.colour = (255,0,0)
else:
mapButton.colour = (255,255,255)
if predictButton.isOver(pos):
predictButton.colour = (255,0,0)
else:
predictButton.colour = (255,255,255)
if exitButton.isOver(pos):
exitButton.colour = (255,0,0)
else:
exitButton.colour = (255,255,255)
#clicking buttons in the customisation menu:
if customisationMenu:
if event.type == pygame.MOUSEMOTION:
if nextButton.isOver(pos):
nextButton.colour = (0,0,255)
else:
nextButton.colour = (255,255,255)
continue
if event.type == pygame.MOUSEBUTTONDOWN:
#clicking on next button
if nextButton.isOver(pos):
nextButton.colour = (255,255,0)
nextButtonPressed = True
customisationMenu = False
continue
if halalButton.isOver(pos):
if halalButtonPressed == False:
if nonhalalButtonPressed:
nonhalalButton.colour = (255,255,255)
nonhalalButtonPressed = False
halalButton.colour = (0,255,0)
print('clicked Halal button')
halalButtonPressed = True
continue
else:
halalButton.colour = (255,255,255)
halalButtonPressed = False
continue
if vegButton.isOver(pos):
if vegButtonPressed == False:
if nonhalalButtonPressed:
nonhalalButton.colour = (255,255,255)
nonhalalButtonPressed = False
vegButton.colour = (0,255,0)
print('clicked Vegetarian button')
vegButtonPressed = True
continue
else:
vegButton.colour = (255,255,255)
vegButtonPressed = False
continue
if nonhalalButton.isOver(pos):
if nonhalalButtonPressed == False:
if halalButtonPressed:
halalButton.colour = (255,255,255)
halalButtonPressed = False
if vegButtonPressed:
vegButton.colour = (255,255,255)
vegButtonPressed = False
nonhalalButton.colour = (0,255,0)
print('clicked non-halal button')
nonhalalButtonPressed = True
continue
else:
nonhalalButton.colour = (255,255,255)
nonhalalButtonPressed = False
if koreanButton.isOver(pos):
if koreanButtonPressed == False:
koreanButton.colour = (0,255,0)
print('clicked korean button')
koreanButtonPressed = True
continue
else:
koreanButton.colour = (255,255,255)
koreanButtonPressed = False
if malayButton.isOver(pos):
if malayButtonPressed == False:
malayButton.colour = (0,255,0)
print('clicked Malay button')
malayButtonPressed = True
continue
else:
malayButton.colour = (255,255,255)
malayButtonPressed = False
if japanButton.isOver(pos):
if japanButtonPressed == False:
japanButton.colour = (0,255,0)
print('clicked japan button')
japanButtonPressed = True
continue
else:
japanButton.colour = (255,255,255)
japanButtonPressed = False
if chineseButton.isOver(pos):
if chineseButtonPressed == False:
chineseButton.colour = (0,255,0)
print('clicked chinese button')
chineseButtonPressed = True
continue
else:
chineseButton.colour = (255,255,255)
chineseButtonPressed = False
if indianButton.isOver(pos):
if indianButtonPressed == False:
indianButton.colour = (0,255,0)
print('clicked indian button')
indianButtonPressed = True
continue
else:
indianButton.colour = (255,255,255)
indianButtonPressed = False
if westernButton.isOver(pos):
if westernButtonPressed == False:
westernButton.colour = (0,255,0)
print('clicked western button')
westernButtonPressed = True
continue
else:
westernButton.colour = (255,255,255)
westernButtonPressed = False
if button3.isOver(pos):
if button3Pressed == False:
if button5Pressed == True:
button5.colour = (255,255,255)
button5Pressed = False
if button7Pressed == True:
button7.colour = (255,255,255)
button7Pressed = False
if button9Pressed == True:
button9.colour = (255,255,255)
button9Pressed = False
button3.colour = (0,255,0)
print('clicked $3')
button3Pressed = True
continue
else:
button3.colour = (255,255,255)
button3Pressed = False
if button5.isOver(pos):
if button5Pressed == False:
if button3Pressed == True:
button3.colour = (255,255,255)
button3Pressed = False
if button7Pressed == True:
button7.colour = (255,255,255)
button7Pressed = False
if button9Pressed == True:
button9.colour = (255,255,255)
button9Pressed = False
button5.colour = (0,255,0)
print('Clicked $5')
button5Pressed = True
continue
else:
button5.colour = (255,255,255)
button5Pressed = False
if button7.isOver(pos):
if button7Pressed == False:
if button3Pressed == True:
button3.colour = (255,255,255)
button3Pressed = False
if button5Pressed == True:
button5.colour = (255,255,255)
button5Pressed = False
if button9Pressed == True:
button9.colour = (255,255,255)
button9Pressed = False
button7.colour = (0,255,0)
print('Clicked $7')
button7Pressed = True
continue
else:
button7.colour = (255,255,255)
button7Pressed = False
if button9.isOver(pos):
if button9Pressed == False:
if button3Pressed == True:
button3.colour = (255,255,255)
button3Pressed = False
if button5Pressed == True:
button5.colour = (255,255,255)
button5Pressed = False
if button7Pressed == True:
button7.colour = (255,255,255)
button7Pressed = False
button9.colour = (0,255,0)
print('Clicked $10')
button9Pressed = True
continue
else:
button9.colour = (255,255,255)
button9Pressed = False
#if mousebuttondown and map is already displayed
if mapCoor == True and event.type == pygame.MOUSEBUTTONDOWN:
mouseclick = mouseClick(screen)
if mouseclick[0]:
pygame.display.update()
x = mouseclick[1]
y = mouseclick[2]
print(x, ',', y)
#pygame.time.delay(2000)
mapCoor = False
sleep(1)
customisationMenu = True
#if prediction button is clicked
if mapCoor2 == True and event.type == pygame.MOUSEBUTTONDOWN:
mouseclick = mouseClick(screen)
if mouseclick[0]:
pygame.display.update()
x = mouseclick[1]
y = mouseclick[2]
print(x, ',', y)
#pygame.time.delay(2000)
mapCoor2 = False
sleep(1)
loading(screen)
easySearch = True
#things that happen after the next button is pressed
if nextButtonPressed:
sleep(1)
loading(screen)
user_prefList = []
user_cuisineList = []
user_budget = 0
if halalButtonPressed:
user_prefList.append("Halal")
if vegButtonPressed:
user_prefList.append("Vegetarian")
if nonhalalButtonPressed:
user_prefList.append("Non-Halal/Non-Vegetarian")
if koreanButtonPressed:
user_cuisineList.append("Korean")
if malayButtonPressed:
user_cuisineList.append("Malay")
if japanButtonPressed:
user_cuisineList.append("Japanese")
if chineseButtonPressed:
user_cuisineList.append("Chinese")
if indianButtonPressed:
user_cuisineList.append("Indian")
if westernButtonPressed:
user_cuisineList.append("Western")
if button3Pressed:
user_budget = 3
if button5Pressed:
user_budget = 5
if button7Pressed:
user_budget = 7
if button9Pressed:
user_budget = 9
#debug
print(user_cuisineList)
print(user_prefList)
print(user_budget)
#continue#
finalID = final_list(user_budget, user_cuisineList, user_prefList)
print(finalID)
nearest_canteen = nearest_can(finalID, x, y)
print(nearest_canteen)
sleep(1)
nextButtonPressed = False
complicatedMenu = True
|
normal
|
{
"blob_id": "2a8032c23e3c7aa3a7b0593c79db7adbc0353f93",
"index": 2125,
"step-1": "<mask token>\n\n\nclass button:\n\n def __init__(self, colour, x, y, width, height, text=''):\n self.colour = colour\n self.x = x\n self.y = y\n self.width = width\n self.height = height\n self.text = text\n\n def draw(self, win, outline=None):\n if outline:\n pygame.draw.rect(win, outline, (self.x - 2, self.y - 2, self.\n width + 4, self.height + 4), 0)\n pygame.draw.rect(win, self.colour, (self.x, self.y, self.width,\n self.height), 0)\n if self.text != '':\n font = pygame.font.SysFont('calligrapher.ttf', 60)\n text = font.render(self.text, 1, (0, 0, 0))\n win.blit(text, (self.x + (self.width / 2 - text.get_width() / 2\n ), self.y + (self.height / 2 - text.get_height() / 2)))\n\n def isOver(self, pos):\n if pos[0] > self.x and pos[0] < self.x + self.width:\n if pos[1] > self.y and pos[1] < self.y + self.height:\n return True\n else:\n return False\n\n\n<mask token>\n\n\ndef mouseClick(screen):\n x, y = pygame.mouse.get_pos()\n if (x >= 65 and x <= 727) and (y >= 82 and y <= 618):\n pygame.draw.circle(screen, (255, 0, 150), (x, y), 15)\n return True, x, y\n else:\n print('Out of bounds!')\n return False, x, y\n\n\ndef skeleExit(win):\n aryadelight = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n win.blit(aryadelight, (0, 0))\n pygame.display.update()\n xaxis = 100\n for i in range(1, 42):\n image = str(i) + '.png'\n skele = pygame.image.load(os.path.join(image))\n win.blit(skele, (250, 200))\n text('Exiting...', win, xaxis + 20, 600)\n pygame.display.update()\n sleep(0.09)\n\n\n<mask token>\n\n\ndef redrawMainWin(screen):\n aryadelight = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n screen.blit(aryadelight, (0, 0))\n mapButton.draw(screen, (0, 0, 0))\n instructionText(\n '(Choose your cuisines, preferences and budget for the meal here!)',\n screen, 215, 320)\n predictButton.draw(screen, (0, 0, 0))\n instructionText('(Find the nearest canteen!)', screen, 132, 470)\n exitButton.draw(screen, (0, 0, 0))\n ice = pygame.image.load(os.path.join('ice.png'))\n screen.blit(ice, (500, 670))\n font = pygame.font.SysFont('verdana', 20)\n creator = font.render('Made by HweeHean X Arya', 1, (0, 0, 200))\n screen.blit(creator, (535, 670))\n\n\n<mask token>\n\n\ndef redrawSearchWin(screen, x, y):\n bp = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n screen.blit(bp, (0, 0))\n GordonRamsay = pygame.image.load(os.path.join('GordonRamsay.png'))\n screen.blit(GordonRamsay, (400, 100))\n distList = []\n for i in canteen_list:\n distList.append(i)\n print(distList)\n top3 = nearest_can(distList, x, y)\n print(top3)\n text('Nearest Canteen:', screen, 110, 400)\n yaxis = 490\n canteenCount = 1\n for k in top3:\n if canteenCount == 1:\n if k == 'Food Court 1':\n canteenPic = pygame.image.load(os.path.join('Canteen1.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 2':\n canteenPic = pygame.image.load(os.path.join('Canteen2.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 4':\n canteenPic = pygame.image.load(os.path.join('Canteen4.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 9':\n canteenPic = pygame.image.load(os.path.join('Canteen9.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 11':\n canteenPic = pygame.image.load(os.path.join('Canteen11.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 13':\n canteenPic = pygame.image.load(os.path.join('Canteen13.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 14':\n canteenPic = pygame.image.load(os.path.join('Canteen14.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 16':\n canteenPic = pygame.image.load(os.path.join('Canteen16.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Tamarind Food Court':\n canteenPic = pygame.image.load(os.path.join('Tamarind.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Pioneer Food Court':\n canteenPic = pygame.image.load(os.path.join('Pioneer.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Spine Food Court':\n canteenPic = pygame.image.load(os.path.join('NorthSpine.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Spine Plaza':\n canteenPic = pygame.image.load(os.path.join(\n 'NorthSpinePlaza.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'South Spine Food Court':\n canteenPic = pygame.image.load(os.path.join(\n 'SouthSpineKoufuFoodCourt.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Quad Cafe':\n canteenPic = pygame.image.load(os.path.join('Quad.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Coffee Bean':\n canteenPic = pygame.image.load(os.path.join('Coffee.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Hill Food Court':\n canteenPic = pygame.image.load(os.path.join('NorthHill.jpg'))\n screen.blit(canteenPic, (150, 200))\n text(str(canteenCount), screen, 110, yaxis)\n text('.', screen, 135, yaxis)\n text(k, screen, 150, yaxis)\n canteenCount += 1\n yaxis += 70\n return top3\n\n\n<mask token>\n\n\ndef final_list(user_budget, user_cuisine, user_preference):\n new_list = []\n for i in canteen_list:\n if user_budget >= canteen_list[i][1]:\n new_list.append(i)\n for c in user_cuisine:\n for i in canteen_list:\n if c in canteen_list[i][2]:\n new_list.append(i)\n for c in user_preference:\n for i in canteen_list:\n if c in canteen_list[i][4]:\n new_list.append(i)\n new_list = list(set(new_list))\n if len(new_list) == 0:\n for i in canteen_list:\n new_list.append(i)\n return new_list\n\n\ndef calc_dis(x1, y1, x2, y2):\n return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 1 / 2\n\n\ndef nearest_can(new_list, x, y):\n top3 = []\n copy_list = new_list.copy()\n while len(top3) != 3:\n j = copy_list[0]\n coor = canteen_list[j][5]\n Min = calc_dis(x, y, coor[0], coor[1])\n food_court = ''\n for k in copy_list:\n coor = canteen_list[k][5]\n dist = calc_dis(x, y, coor[0], coor[1])\n if Min >= dist:\n Min = dist\n food_court = k\n index = copy_list.index(food_court)\n copy_list.pop(index)\n top3.append(food_court)\n print(top3)\n return top3\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass button:\n\n def __init__(self, colour, x, y, width, height, text=''):\n self.colour = colour\n self.x = x\n self.y = y\n self.width = width\n self.height = height\n self.text = text\n\n def draw(self, win, outline=None):\n if outline:\n pygame.draw.rect(win, outline, (self.x - 2, self.y - 2, self.\n width + 4, self.height + 4), 0)\n pygame.draw.rect(win, self.colour, (self.x, self.y, self.width,\n self.height), 0)\n if self.text != '':\n font = pygame.font.SysFont('calligrapher.ttf', 60)\n text = font.render(self.text, 1, (0, 0, 0))\n win.blit(text, (self.x + (self.width / 2 - text.get_width() / 2\n ), self.y + (self.height / 2 - text.get_height() / 2)))\n\n def isOver(self, pos):\n if pos[0] > self.x and pos[0] < self.x + self.width:\n if pos[1] > self.y and pos[1] < self.y + self.height:\n return True\n else:\n return False\n\n\n<mask token>\n\n\ndef instructionText(text, win, x, y):\n font = pygame.font.SysFont('Arial', 20)\n phrase = font.render(text, 1, (0, 0, 0))\n win.blit(phrase, (x, y))\n\n\n<mask token>\n\n\ndef mouseClick(screen):\n x, y = pygame.mouse.get_pos()\n if (x >= 65 and x <= 727) and (y >= 82 and y <= 618):\n pygame.draw.circle(screen, (255, 0, 150), (x, y), 15)\n return True, x, y\n else:\n print('Out of bounds!')\n return False, x, y\n\n\ndef skeleExit(win):\n aryadelight = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n win.blit(aryadelight, (0, 0))\n pygame.display.update()\n xaxis = 100\n for i in range(1, 42):\n image = str(i) + '.png'\n skele = pygame.image.load(os.path.join(image))\n win.blit(skele, (250, 200))\n text('Exiting...', win, xaxis + 20, 600)\n pygame.display.update()\n sleep(0.09)\n\n\n<mask token>\n\n\ndef redrawMainWin(screen):\n aryadelight = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n screen.blit(aryadelight, (0, 0))\n mapButton.draw(screen, (0, 0, 0))\n instructionText(\n '(Choose your cuisines, preferences and budget for the meal here!)',\n screen, 215, 320)\n predictButton.draw(screen, (0, 0, 0))\n instructionText('(Find the nearest canteen!)', screen, 132, 470)\n exitButton.draw(screen, (0, 0, 0))\n ice = pygame.image.load(os.path.join('ice.png'))\n screen.blit(ice, (500, 670))\n font = pygame.font.SysFont('verdana', 20)\n creator = font.render('Made by HweeHean X Arya', 1, (0, 0, 200))\n screen.blit(creator, (535, 670))\n\n\n<mask token>\n\n\ndef redrawSearchWin(screen, x, y):\n bp = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n screen.blit(bp, (0, 0))\n GordonRamsay = pygame.image.load(os.path.join('GordonRamsay.png'))\n screen.blit(GordonRamsay, (400, 100))\n distList = []\n for i in canteen_list:\n distList.append(i)\n print(distList)\n top3 = nearest_can(distList, x, y)\n print(top3)\n text('Nearest Canteen:', screen, 110, 400)\n yaxis = 490\n canteenCount = 1\n for k in top3:\n if canteenCount == 1:\n if k == 'Food Court 1':\n canteenPic = pygame.image.load(os.path.join('Canteen1.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 2':\n canteenPic = pygame.image.load(os.path.join('Canteen2.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 4':\n canteenPic = pygame.image.load(os.path.join('Canteen4.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 9':\n canteenPic = pygame.image.load(os.path.join('Canteen9.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 11':\n canteenPic = pygame.image.load(os.path.join('Canteen11.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 13':\n canteenPic = pygame.image.load(os.path.join('Canteen13.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 14':\n canteenPic = pygame.image.load(os.path.join('Canteen14.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 16':\n canteenPic = pygame.image.load(os.path.join('Canteen16.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Tamarind Food Court':\n canteenPic = pygame.image.load(os.path.join('Tamarind.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Pioneer Food Court':\n canteenPic = pygame.image.load(os.path.join('Pioneer.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Spine Food Court':\n canteenPic = pygame.image.load(os.path.join('NorthSpine.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Spine Plaza':\n canteenPic = pygame.image.load(os.path.join(\n 'NorthSpinePlaza.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'South Spine Food Court':\n canteenPic = pygame.image.load(os.path.join(\n 'SouthSpineKoufuFoodCourt.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Quad Cafe':\n canteenPic = pygame.image.load(os.path.join('Quad.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Coffee Bean':\n canteenPic = pygame.image.load(os.path.join('Coffee.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Hill Food Court':\n canteenPic = pygame.image.load(os.path.join('NorthHill.jpg'))\n screen.blit(canteenPic, (150, 200))\n text(str(canteenCount), screen, 110, yaxis)\n text('.', screen, 135, yaxis)\n text(k, screen, 150, yaxis)\n canteenCount += 1\n yaxis += 70\n return top3\n\n\n<mask token>\n\n\ndef final_list(user_budget, user_cuisine, user_preference):\n new_list = []\n for i in canteen_list:\n if user_budget >= canteen_list[i][1]:\n new_list.append(i)\n for c in user_cuisine:\n for i in canteen_list:\n if c in canteen_list[i][2]:\n new_list.append(i)\n for c in user_preference:\n for i in canteen_list:\n if c in canteen_list[i][4]:\n new_list.append(i)\n new_list = list(set(new_list))\n if len(new_list) == 0:\n for i in canteen_list:\n new_list.append(i)\n return new_list\n\n\ndef calc_dis(x1, y1, x2, y2):\n return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 1 / 2\n\n\ndef nearest_can(new_list, x, y):\n top3 = []\n copy_list = new_list.copy()\n while len(top3) != 3:\n j = copy_list[0]\n coor = canteen_list[j][5]\n Min = calc_dis(x, y, coor[0], coor[1])\n food_court = ''\n for k in copy_list:\n coor = canteen_list[k][5]\n dist = calc_dis(x, y, coor[0], coor[1])\n if Min >= dist:\n Min = dist\n food_court = k\n index = copy_list.index(food_court)\n copy_list.pop(index)\n top3.append(food_court)\n print(top3)\n return top3\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass button:\n\n def __init__(self, colour, x, y, width, height, text=''):\n self.colour = colour\n self.x = x\n self.y = y\n self.width = width\n self.height = height\n self.text = text\n\n def draw(self, win, outline=None):\n if outline:\n pygame.draw.rect(win, outline, (self.x - 2, self.y - 2, self.\n width + 4, self.height + 4), 0)\n pygame.draw.rect(win, self.colour, (self.x, self.y, self.width,\n self.height), 0)\n if self.text != '':\n font = pygame.font.SysFont('calligrapher.ttf', 60)\n text = font.render(self.text, 1, (0, 0, 0))\n win.blit(text, (self.x + (self.width / 2 - text.get_width() / 2\n ), self.y + (self.height / 2 - text.get_height() / 2)))\n\n def isOver(self, pos):\n if pos[0] > self.x and pos[0] < self.x + self.width:\n if pos[1] > self.y and pos[1] < self.y + self.height:\n return True\n else:\n return False\n\n\n<mask token>\n\n\ndef text(text, win, x, y):\n font = pygame.font.SysFont('freesansbold.ttf', 50)\n phrase = font.render(text, 1, (0, 0, 0))\n win.blit(phrase, (x, y))\n\n\ndef instructionText(text, win, x, y):\n font = pygame.font.SysFont('Arial', 20)\n phrase = font.render(text, 1, (0, 0, 0))\n win.blit(phrase, (x, y))\n\n\n<mask token>\n\n\ndef mouseClick(screen):\n x, y = pygame.mouse.get_pos()\n if (x >= 65 and x <= 727) and (y >= 82 and y <= 618):\n pygame.draw.circle(screen, (255, 0, 150), (x, y), 15)\n return True, x, y\n else:\n print('Out of bounds!')\n return False, x, y\n\n\ndef skeleExit(win):\n aryadelight = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n win.blit(aryadelight, (0, 0))\n pygame.display.update()\n xaxis = 100\n for i in range(1, 42):\n image = str(i) + '.png'\n skele = pygame.image.load(os.path.join(image))\n win.blit(skele, (250, 200))\n text('Exiting...', win, xaxis + 20, 600)\n pygame.display.update()\n sleep(0.09)\n\n\n<mask token>\n\n\ndef redrawMap(screen):\n NTUmap = pygame.image.load(os.path.join('NTUMap.jpg'))\n screen.blit(NTUmap, (0, 0))\n for x in range(50, 900, 50):\n pygame.draw.rect(screen, (255, 0, 0), (x, 0, 1, 700), 0)\n for y in range(50, 700, 50):\n pygame.draw.rect(screen, (255, 0, 0), (0, y, 900, 1), 0)\n text('Please click on your current location!', screen, 200, 100)\n\n\n<mask token>\n\n\ndef redrawMainWin(screen):\n aryadelight = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n screen.blit(aryadelight, (0, 0))\n mapButton.draw(screen, (0, 0, 0))\n instructionText(\n '(Choose your cuisines, preferences and budget for the meal here!)',\n screen, 215, 320)\n predictButton.draw(screen, (0, 0, 0))\n instructionText('(Find the nearest canteen!)', screen, 132, 470)\n exitButton.draw(screen, (0, 0, 0))\n ice = pygame.image.load(os.path.join('ice.png'))\n screen.blit(ice, (500, 670))\n font = pygame.font.SysFont('verdana', 20)\n creator = font.render('Made by HweeHean X Arya', 1, (0, 0, 200))\n screen.blit(creator, (535, 670))\n\n\n<mask token>\n\n\ndef redrawSearchWin(screen, x, y):\n bp = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n screen.blit(bp, (0, 0))\n GordonRamsay = pygame.image.load(os.path.join('GordonRamsay.png'))\n screen.blit(GordonRamsay, (400, 100))\n distList = []\n for i in canteen_list:\n distList.append(i)\n print(distList)\n top3 = nearest_can(distList, x, y)\n print(top3)\n text('Nearest Canteen:', screen, 110, 400)\n yaxis = 490\n canteenCount = 1\n for k in top3:\n if canteenCount == 1:\n if k == 'Food Court 1':\n canteenPic = pygame.image.load(os.path.join('Canteen1.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 2':\n canteenPic = pygame.image.load(os.path.join('Canteen2.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 4':\n canteenPic = pygame.image.load(os.path.join('Canteen4.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 9':\n canteenPic = pygame.image.load(os.path.join('Canteen9.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 11':\n canteenPic = pygame.image.load(os.path.join('Canteen11.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 13':\n canteenPic = pygame.image.load(os.path.join('Canteen13.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 14':\n canteenPic = pygame.image.load(os.path.join('Canteen14.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 16':\n canteenPic = pygame.image.load(os.path.join('Canteen16.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Tamarind Food Court':\n canteenPic = pygame.image.load(os.path.join('Tamarind.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Pioneer Food Court':\n canteenPic = pygame.image.load(os.path.join('Pioneer.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Spine Food Court':\n canteenPic = pygame.image.load(os.path.join('NorthSpine.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Spine Plaza':\n canteenPic = pygame.image.load(os.path.join(\n 'NorthSpinePlaza.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'South Spine Food Court':\n canteenPic = pygame.image.load(os.path.join(\n 'SouthSpineKoufuFoodCourt.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Quad Cafe':\n canteenPic = pygame.image.load(os.path.join('Quad.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Coffee Bean':\n canteenPic = pygame.image.load(os.path.join('Coffee.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Hill Food Court':\n canteenPic = pygame.image.load(os.path.join('NorthHill.jpg'))\n screen.blit(canteenPic, (150, 200))\n text(str(canteenCount), screen, 110, yaxis)\n text('.', screen, 135, yaxis)\n text(k, screen, 150, yaxis)\n canteenCount += 1\n yaxis += 70\n return top3\n\n\ndef complicatedSearchWin(screen, top3):\n bp = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n screen.blit(bp, (0, 0))\n GordonRamsay = pygame.image.load(os.path.join('GordonRamsay.png'))\n screen.blit(GordonRamsay, (400, 100))\n text('Nearest Canteen:', screen, 110, 400)\n yaxis = 490\n canteenCount = 1\n for k in top3:\n if canteenCount == 1:\n if k == 'Food Court 1':\n canteenPic = pygame.image.load(os.path.join('Canteen1.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 2':\n canteenPic = pygame.image.load(os.path.join('Canteen2.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 4':\n canteenPic = pygame.image.load(os.path.join('Canteen4.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 9':\n canteenPic = pygame.image.load(os.path.join('Canteen9.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 11':\n canteenPic = pygame.image.load(os.path.join('Canteen11.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 13':\n canteenPic = pygame.image.load(os.path.join('Canteen13.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 14':\n canteenPic = pygame.image.load(os.path.join('Canteen14.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 16':\n canteenPic = pygame.image.load(os.path.join('Canteen16.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Tamarind Food Court':\n canteenPic = pygame.image.load(os.path.join('Tamarind.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Pioneer Food Court':\n canteenPic = pygame.image.load(os.path.join('Pioneer.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Spine Food Court':\n canteenPic = pygame.image.load(os.path.join('NorthSpine.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Spine Plaza':\n canteenPic = pygame.image.load(os.path.join(\n 'NorthSpinePlaza.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'South Spine Food Court':\n canteenPic = pygame.image.load(os.path.join(\n 'SouthSpineKoufuFoodCourt.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Quad Cafe':\n canteenPic = pygame.image.load(os.path.join('Quad.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Coffee Bean':\n canteenPic = pygame.image.load(os.path.join('Coffee.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Hill Food Court':\n canteenPic = pygame.image.load(os.path.join('NorthHill.jpg'))\n screen.blit(canteenPic, (150, 200))\n text(str(canteenCount), screen, 110, yaxis)\n text('.', screen, 135, yaxis)\n text(k, screen, 150, yaxis)\n canteenCount += 1\n yaxis += 70\n\n\n<mask token>\n\n\ndef final_list(user_budget, user_cuisine, user_preference):\n new_list = []\n for i in canteen_list:\n if user_budget >= canteen_list[i][1]:\n new_list.append(i)\n for c in user_cuisine:\n for i in canteen_list:\n if c in canteen_list[i][2]:\n new_list.append(i)\n for c in user_preference:\n for i in canteen_list:\n if c in canteen_list[i][4]:\n new_list.append(i)\n new_list = list(set(new_list))\n if len(new_list) == 0:\n for i in canteen_list:\n new_list.append(i)\n return new_list\n\n\ndef calc_dis(x1, y1, x2, y2):\n return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 1 / 2\n\n\ndef nearest_can(new_list, x, y):\n top3 = []\n copy_list = new_list.copy()\n while len(top3) != 3:\n j = copy_list[0]\n coor = canteen_list[j][5]\n Min = calc_dis(x, y, coor[0], coor[1])\n food_court = ''\n for k in copy_list:\n coor = canteen_list[k][5]\n dist = calc_dis(x, y, coor[0], coor[1])\n if Min >= dist:\n Min = dist\n food_court = k\n index = copy_list.index(food_court)\n copy_list.pop(index)\n top3.append(food_court)\n print(top3)\n return top3\n\n\n<mask token>\n",
"step-4": "import pygame\nimport os\nfrom time import sleep\nscreen = pygame.display.set_mode((900, 700))\nscreen.fill((255, 255, 255))\npygame.display.set_caption('NTUFOODIERECOMMENDSYSTEM')\n<mask token>\ncanteen_list = {'Food Court 1': [12, 3.5, ['Korean', 'Japanese', 'Western'],\n 2100, ['Halal', 'Non-Halal/Non-Vegetarian'], (442, 473)],\n 'Food Court 2': [10, 3.6, ['Korean', 'Chinese', 'Malay'], 2100, [\n 'Halal', 'Vegetarian', 'Non-Halal/Non-Vegetarian'], (477, 409)],\n 'Food Court 4': [10, 3, ['Chinese', 'Western'], 2100, [\n 'Non-Halal/Non-Vegetarian'], (358, 526)], 'Food Court 9': [10, 3.5, [\n 'Chinese'], 2100, ['Halal', 'Vegetarian', 'Non-Halal/Non-Vegetarian'],\n (582, 288)], 'Food Court 11': [10, 2.5, ['Chinese', 'Indian',\n 'Japanese', 'Western'], 2100, ['Halal', 'Vegetarian',\n 'Non-Halal/Non-Vegetarian'], (682, 243)], 'Food Court 13': [9, 2, [\n 'Western', 'Korean', 'Japanese', 'Chinese'], 2100, ['Halal',\n 'Vegetarian', 'Non-Halal/Non-Vegetarian'], (445, 176)], 'Food Court 14':\n [8, 3, ['Western', 'Chinese', 'Korean', 'Malay'], 2100, ['Halal',\n 'Vegetarian', 'Non-Halal/Non-Vegetarian'], (509, 182)], 'Food Court 16':\n [10, 3.3, ['Japanese', 'Chinese', 'Korean', 'Indian'], 2100, ['Halal',\n 'Vegetarian', 'Non-Halal/Non-Vegetarian'], (405, 221)],\n 'Tamarind Food Court': [10, 3, ['Malay', 'Chinese', 'Korean', 'Western'\n ], 2100, ['Halal', 'Non-Halal', 'Vegetarian',\n 'Non-Halal/Non-Vegetarian'], (627, 200)], 'Pioneer Food Court': [20, \n 2.3, ['Thai', 'Chinese'], 0, ['Vegetarian', 'Non-Halal/Non-Vegetarian'],\n (497, 561)], 'North Spine Food Court': [10, 2.5, ['Korean', 'Japanese',\n 'Chinese', 'Western', 'Malay'], 2100, ['Vegetarian',\n 'Non-Halal/Non-Vegetarian'], (275, 293)], 'North Spine Plaza': [10, 4,\n ['Western', 'Korean'], 2130, ['Vegetarian', 'Halal',\n 'Non-Halal/Non-Vegetarian'], (287, 339)], 'South Spine Food Court': [10,\n 2, ['Chinese', 'Malay', 'Korean', 'Japanese', 'Western'], 2100, [\n 'Vegetarian', 'Halal', 'Non-Halal/Non-Vegetarian'], (227, 496)],\n 'Quad Cafe': [10, 2.4, ['Korean', 'Chinese', 'Indian', 'Malay'], 2100,\n ['Vegetarian', 'Halal', 'Non-Halal/Non-Vegetarian'], (224, 351)],\n 'Coffee Bean': [20, 4, ['Western'], 2000, ['Vegetarian', 'Halal',\n 'Non-Halal/Non-Vegetarian'], (219, 389)], 'North Hill Food Court': [10,\n 3.8, ['Chinese', 'Malay', 'Indian'], 2100, ['Vegetarian', 'Halal',\n 'Non-Halal/Non-Vegetarian'], (720, 314)]}\n<mask token>\n\n\nclass button:\n\n def __init__(self, colour, x, y, width, height, text=''):\n self.colour = colour\n self.x = x\n self.y = y\n self.width = width\n self.height = height\n self.text = text\n\n def draw(self, win, outline=None):\n if outline:\n pygame.draw.rect(win, outline, (self.x - 2, self.y - 2, self.\n width + 4, self.height + 4), 0)\n pygame.draw.rect(win, self.colour, (self.x, self.y, self.width,\n self.height), 0)\n if self.text != '':\n font = pygame.font.SysFont('calligrapher.ttf', 60)\n text = font.render(self.text, 1, (0, 0, 0))\n win.blit(text, (self.x + (self.width / 2 - text.get_width() / 2\n ), self.y + (self.height / 2 - text.get_height() / 2)))\n\n def isOver(self, pos):\n if pos[0] > self.x and pos[0] < self.x + self.width:\n if pos[1] > self.y and pos[1] < self.y + self.height:\n return True\n else:\n return False\n\n\n<mask token>\n\n\ndef text(text, win, x, y):\n font = pygame.font.SysFont('freesansbold.ttf', 50)\n phrase = font.render(text, 1, (0, 0, 0))\n win.blit(phrase, (x, y))\n\n\ndef instructionText(text, win, x, y):\n font = pygame.font.SysFont('Arial', 20)\n phrase = font.render(text, 1, (0, 0, 0))\n win.blit(phrase, (x, y))\n\n\ndef header(text, win, x, y):\n font = pygame.font.SysFont('TimesNewRoman', 70)\n phrase = font.render(text, 1, (0, 0, 0))\n win.blit(phrase, (x, y))\n\n\ndef mouseClick(screen):\n x, y = pygame.mouse.get_pos()\n if (x >= 65 and x <= 727) and (y >= 82 and y <= 618):\n pygame.draw.circle(screen, (255, 0, 150), (x, y), 15)\n return True, x, y\n else:\n print('Out of bounds!')\n return False, x, y\n\n\ndef skeleExit(win):\n aryadelight = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n win.blit(aryadelight, (0, 0))\n pygame.display.update()\n xaxis = 100\n for i in range(1, 42):\n image = str(i) + '.png'\n skele = pygame.image.load(os.path.join(image))\n win.blit(skele, (250, 200))\n text('Exiting...', win, xaxis + 20, 600)\n pygame.display.update()\n sleep(0.09)\n\n\ndef loading(win):\n x = 0\n while x < 3:\n load0 = pygame.image.load(os.path.join('load0.png'))\n win.blit(load0, (0, 0))\n pygame.display.update()\n sleep(0.3)\n load1 = pygame.image.load(os.path.join('load1.png'))\n win.blit(load1, (0, 0))\n pygame.display.update()\n sleep(0.3)\n load2 = pygame.image.load(os.path.join('load2.png'))\n win.blit(load2, (0, 0))\n pygame.display.update()\n sleep(0.3)\n load3 = pygame.image.load(os.path.join('load3.png'))\n win.blit(load3, (0, 0))\n pygame.display.update()\n sleep(0.3)\n x += 1\n\n\ndef redrawMap(screen):\n NTUmap = pygame.image.load(os.path.join('NTUMap.jpg'))\n screen.blit(NTUmap, (0, 0))\n for x in range(50, 900, 50):\n pygame.draw.rect(screen, (255, 0, 0), (x, 0, 1, 700), 0)\n for y in range(50, 700, 50):\n pygame.draw.rect(screen, (255, 0, 0), (0, y, 900, 1), 0)\n text('Please click on your current location!', screen, 200, 100)\n\n\ndef redrawGPSMap(screen, top3, x, y):\n NTUmap = pygame.image.load(os.path.join('NTUMap.jpg'))\n screen.blit(NTUmap, (0, 0))\n redGPS = pygame.image.load(os.path.join('redgps.png'))\n screen.blit(redGPS, (x - 16, y - 32))\n instructionText('You are currently at this position.', screen, x + 4, y -\n 10)\n counter = 1\n for i in top3:\n coor = canteen_list[i][5]\n if counter == 1:\n blueGPS = pygame.image.load(os.path.join('bluegps.png'))\n screen.blit(blueGPS, (coor[0] - 12, coor[1] - 24))\n instructionText(i, screen, coor[0] - 24, coor[1])\n pass\n if counter == 2:\n blackGPS = pygame.image.load(os.path.join('blackgps.png'))\n screen.blit(blackGPS, (coor[0] - 12, coor[1] - 24))\n instructionText(i, screen, coor[0] - 24, coor[1])\n pass\n if counter == 3:\n yellowGPS = pygame.image.load(os.path.join('yellowgps.png'))\n screen.blit(yellowGPS, (coor[0] - 12, coor[1] - 24))\n instructionText(i, screen, coor[0] - 24, coor[1])\n pass\n counter += 1\n restartButton.draw(screen, (0, 0, 0))\n\n\ndef redrawMainWin(screen):\n aryadelight = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n screen.blit(aryadelight, (0, 0))\n mapButton.draw(screen, (0, 0, 0))\n instructionText(\n '(Choose your cuisines, preferences and budget for the meal here!)',\n screen, 215, 320)\n predictButton.draw(screen, (0, 0, 0))\n instructionText('(Find the nearest canteen!)', screen, 132, 470)\n exitButton.draw(screen, (0, 0, 0))\n ice = pygame.image.load(os.path.join('ice.png'))\n screen.blit(ice, (500, 670))\n font = pygame.font.SysFont('verdana', 20)\n creator = font.render('Made by HweeHean X Arya', 1, (0, 0, 200))\n screen.blit(creator, (535, 670))\n\n\ndef redrawCustWin(screen):\n bp = pygame.image.load(os.path.join('gradient.jpg'))\n screen.blit(bp, (0, 0))\n instructionText('Left click again to reset!', screen, 300, 20)\n text('Please select your food preference: ', screen, 100, 50)\n halalButton.draw(screen, (0, 0, 0))\n vegButton.draw(screen, (0, 0, 0))\n nonhalalButton.draw(screen, (0, 0, 0))\n text('Please select your cuisine type: ', screen, 100, 200)\n koreanButton.draw(screen, (0, 0, 0))\n malayButton.draw(screen, (0, 0, 0))\n japanButton.draw(screen, (0, 0, 0))\n chineseButton.draw(screen, (0, 0, 0))\n indianButton.draw(screen, (0, 0, 0))\n westernButton.draw(screen, (0, 0, 0))\n text('Please select your maximum budget: ', screen, 100, 430)\n button3.draw(screen, (0, 0, 0))\n button5.draw(screen, (0, 0, 0))\n button7.draw(screen, (0, 0, 0))\n button9.draw(screen, (0, 0, 0))\n nextButton.draw(screen, (0, 0, 0))\n\n\ndef redrawSearchWin(screen, x, y):\n bp = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n screen.blit(bp, (0, 0))\n GordonRamsay = pygame.image.load(os.path.join('GordonRamsay.png'))\n screen.blit(GordonRamsay, (400, 100))\n distList = []\n for i in canteen_list:\n distList.append(i)\n print(distList)\n top3 = nearest_can(distList, x, y)\n print(top3)\n text('Nearest Canteen:', screen, 110, 400)\n yaxis = 490\n canteenCount = 1\n for k in top3:\n if canteenCount == 1:\n if k == 'Food Court 1':\n canteenPic = pygame.image.load(os.path.join('Canteen1.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 2':\n canteenPic = pygame.image.load(os.path.join('Canteen2.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 4':\n canteenPic = pygame.image.load(os.path.join('Canteen4.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 9':\n canteenPic = pygame.image.load(os.path.join('Canteen9.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 11':\n canteenPic = pygame.image.load(os.path.join('Canteen11.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 13':\n canteenPic = pygame.image.load(os.path.join('Canteen13.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 14':\n canteenPic = pygame.image.load(os.path.join('Canteen14.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 16':\n canteenPic = pygame.image.load(os.path.join('Canteen16.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Tamarind Food Court':\n canteenPic = pygame.image.load(os.path.join('Tamarind.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Pioneer Food Court':\n canteenPic = pygame.image.load(os.path.join('Pioneer.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Spine Food Court':\n canteenPic = pygame.image.load(os.path.join('NorthSpine.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Spine Plaza':\n canteenPic = pygame.image.load(os.path.join(\n 'NorthSpinePlaza.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'South Spine Food Court':\n canteenPic = pygame.image.load(os.path.join(\n 'SouthSpineKoufuFoodCourt.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Quad Cafe':\n canteenPic = pygame.image.load(os.path.join('Quad.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Coffee Bean':\n canteenPic = pygame.image.load(os.path.join('Coffee.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Hill Food Court':\n canteenPic = pygame.image.load(os.path.join('NorthHill.jpg'))\n screen.blit(canteenPic, (150, 200))\n text(str(canteenCount), screen, 110, yaxis)\n text('.', screen, 135, yaxis)\n text(k, screen, 150, yaxis)\n canteenCount += 1\n yaxis += 70\n return top3\n\n\ndef complicatedSearchWin(screen, top3):\n bp = pygame.image.load(os.path.join('NTUFoodieRecsv1.png'))\n screen.blit(bp, (0, 0))\n GordonRamsay = pygame.image.load(os.path.join('GordonRamsay.png'))\n screen.blit(GordonRamsay, (400, 100))\n text('Nearest Canteen:', screen, 110, 400)\n yaxis = 490\n canteenCount = 1\n for k in top3:\n if canteenCount == 1:\n if k == 'Food Court 1':\n canteenPic = pygame.image.load(os.path.join('Canteen1.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 2':\n canteenPic = pygame.image.load(os.path.join('Canteen2.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 4':\n canteenPic = pygame.image.load(os.path.join('Canteen4.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 9':\n canteenPic = pygame.image.load(os.path.join('Canteen9.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 11':\n canteenPic = pygame.image.load(os.path.join('Canteen11.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 13':\n canteenPic = pygame.image.load(os.path.join('Canteen13.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 14':\n canteenPic = pygame.image.load(os.path.join('Canteen14.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Food Court 16':\n canteenPic = pygame.image.load(os.path.join('Canteen16.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Tamarind Food Court':\n canteenPic = pygame.image.load(os.path.join('Tamarind.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Pioneer Food Court':\n canteenPic = pygame.image.load(os.path.join('Pioneer.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Spine Food Court':\n canteenPic = pygame.image.load(os.path.join('NorthSpine.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Spine Plaza':\n canteenPic = pygame.image.load(os.path.join(\n 'NorthSpinePlaza.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'South Spine Food Court':\n canteenPic = pygame.image.load(os.path.join(\n 'SouthSpineKoufuFoodCourt.png'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Quad Cafe':\n canteenPic = pygame.image.load(os.path.join('Quad.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'Coffee Bean':\n canteenPic = pygame.image.load(os.path.join('Coffee.jpg'))\n screen.blit(canteenPic, (150, 200))\n if k == 'North Hill Food Court':\n canteenPic = pygame.image.load(os.path.join('NorthHill.jpg'))\n screen.blit(canteenPic, (150, 200))\n text(str(canteenCount), screen, 110, yaxis)\n text('.', screen, 135, yaxis)\n text(k, screen, 150, yaxis)\n canteenCount += 1\n yaxis += 70\n\n\n<mask token>\n\n\ndef final_list(user_budget, user_cuisine, user_preference):\n new_list = []\n for i in canteen_list:\n if user_budget >= canteen_list[i][1]:\n new_list.append(i)\n for c in user_cuisine:\n for i in canteen_list:\n if c in canteen_list[i][2]:\n new_list.append(i)\n for c in user_preference:\n for i in canteen_list:\n if c in canteen_list[i][4]:\n new_list.append(i)\n new_list = list(set(new_list))\n if len(new_list) == 0:\n for i in canteen_list:\n new_list.append(i)\n return new_list\n\n\ndef calc_dis(x1, y1, x2, y2):\n return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 1 / 2\n\n\ndef nearest_can(new_list, x, y):\n top3 = []\n copy_list = new_list.copy()\n while len(top3) != 3:\n j = copy_list[0]\n coor = canteen_list[j][5]\n Min = calc_dis(x, y, coor[0], coor[1])\n food_court = ''\n for k in copy_list:\n coor = canteen_list[k][5]\n dist = calc_dis(x, y, coor[0], coor[1])\n if Min >= dist:\n Min = dist\n food_court = k\n index = copy_list.index(food_court)\n copy_list.pop(index)\n top3.append(food_court)\n print(top3)\n return top3\n\n\n<mask token>\nmapButton = button((255, 255, 255), 200, 250, 500, 100, 'Canteen Customisation'\n )\npredictButton = button((255, 255, 255), 100, 400, 300, 100, 'Prediction')\nexitButton = button((255, 255, 255), 500, 400, 300, 100, 'Exit')\nhalalButton = button((255, 255, 255), 50, 120, 250, 50, 'Halal')\nvegButton = button((255, 255, 255), 320, 120, 250, 50, 'Vegetarian')\nnonhalalButton = button((255, 255, 255), 590, 120, 250, 50, 'Non-Halal')\nkoreanButton = button((255, 255, 255), 50, 270, 250, 50, 'Korean')\nmalayButton = button((255, 255, 255), 320, 270, 250, 50, 'Malay')\njapanButton = button((255, 255, 255), 590, 270, 250, 50, 'Japanese')\nchineseButton = button((255, 255, 255), 50, 340, 250, 50, 'Chinese')\nindianButton = button((255, 255, 255), 320, 340, 250, 50, 'Indian')\nwesternButton = button((255, 255, 255), 590, 340, 250, 50, 'Western')\nbutton3 = button((255, 255, 255), 235, 490, 70, 50, '$3')\nbutton5 = button((255, 255, 255), 355, 490, 70, 50, '$5')\nbutton7 = button((255, 255, 255), 475, 490, 70, 50, '$7')\nbutton9 = button((255, 255, 255), 595, 490, 70, 50, '$10')\nnextButton = button((255, 255, 255), 730, 580, 120, 70, 'Next')\ngpsButton = button((255, 255, 255), 700, 600, 170, 50, 'to Map')\nrestartButton = button((255, 255, 255), 700, 600, 190, 50, 'Restart?')\n<mask token>\nhalalButtonPressed = False\nvegButtonPressed = False\nnonhalalButtonPressed = False\nkoreanButtonPressed = False\nmalayButtonPressed = False\njapanButtonPressed = False\nchineseButtonPressed = False\nindianButtonPressed = False\nwesternButtonPressed = False\nbutton3Pressed = False\nbutton5Pressed = False\nbutton7Pressed = False\nbutton9Pressed = False\nnextButtonPressed = False\ngpsButtonPressed = False\ncheckButton = True\nmapCoor = False\ncustomisationMenu = False\nmapCoor2 = False\neasySearch = False\ncomplicatedMenu = False\noneTime = True\n<mask token>\npygame.init()\nrun = True\nclock = pygame.time.Clock()\nwhile run:\n if checkButton:\n redrawMainWin(screen)\n if customisationMenu:\n redrawCustWin(screen)\n if easySearch:\n if oneTime:\n nearest_canteen = redrawSearchWin(screen, x, y)\n sleep(2)\n oneTime = False\n gpsButton.draw(screen, (0, 0, 0))\n if complicatedMenu:\n if oneTime:\n complicatedSearchWin(screen, nearest_canteen)\n sleep(2)\n oneTime = False\n gpsButton.draw(screen, (0, 0, 0))\n if gpsButtonPressed == True:\n redrawGPSMap(screen, nearest_canteen, x, y)\n pygame.display.update()\n clock.tick(30)\n for event in pygame.event.get():\n pos = pygame.mouse.get_pos()\n if event.type == pygame.QUIT:\n run = False\n pygame.quit()\n if gpsButtonPressed:\n if event.type == pygame.MOUSEBUTTONDOWN:\n if restartButton.isOver(pos):\n restartButton.colour = 50, 50, 50\n restartButton.draw(screen, (0, 0, 0))\n pygame.display.update()\n print('clicked the restart button')\n halalButtonPressed = False\n vegButtonPressed = False\n nonhalalButtonPressed = False\n koreanButtonPressed = False\n malayButtonPressed = False\n japanButtonPressed = False\n chineseButtonPressed = False\n indianButtonPressed = False\n westernButtonPressed = False\n button3Pressed = False\n button5Pressed = False\n button7Pressed = False\n button9Pressed = False\n nextButtonPressed = False\n gpsButtonPressed = False\n checkButton = True\n mapCoor = False\n customisationMenu = False\n mapCoor2 = False\n easySearch = False\n complicatedMenu = False\n oneTime = True\n if event.type == pygame.MOUSEMOTION:\n if restartButton.isOver(pos):\n restartButton.colour = 0, 255, 0\n continue\n else:\n restartButton.colour = 255, 255, 255\n continue\n if easySearch == True or complicatedMenu == True:\n if event.type == pygame.MOUSEBUTTONDOWN:\n if gpsButton.isOver(pos):\n gpsButton.colour = 50, 50, 50\n gpsButton.draw(screen, (0, 0, 0))\n pygame.display.update()\n print('clicked gps button')\n gpsButtonPressed = True\n easySearch = False\n complicatedMenu = False\n continue\n if event.type == pygame.MOUSEMOTION:\n if gpsButton.isOver(pos):\n gpsButton.colour = 0, 255, 0\n continue\n else:\n gpsButton.colour = 255, 255, 255\n continue\n if checkButton:\n if event.type == pygame.MOUSEBUTTONDOWN:\n if mapButton.isOver(pos):\n mapButton.colour = 0, 255, 0\n redrawMainWin(screen)\n pygame.display.update()\n print('clicked map button')\n sleep(0.5)\n redrawMap(screen)\n checkButton = False\n mapCoor = True\n continue\n if predictButton.isOver(pos):\n predictButton.colour = 0, 255, 0\n redrawMainWin(screen)\n pygame.display.update()\n print('clicked predict button')\n sleep(0.5)\n redrawMap(screen)\n checkButton = False\n mapCoor2 = True\n continue\n if exitButton.isOver(pos):\n exitButton.colour = 0, 255, 0\n print('Exiting...')\n skeleExit(screen)\n pygame.quit()\n run = False\n exit()\n if event.type == pygame.MOUSEMOTION:\n if mapButton.isOver(pos):\n mapButton.colour = 255, 0, 0\n else:\n mapButton.colour = 255, 255, 255\n if predictButton.isOver(pos):\n predictButton.colour = 255, 0, 0\n else:\n predictButton.colour = 255, 255, 255\n if exitButton.isOver(pos):\n exitButton.colour = 255, 0, 0\n else:\n exitButton.colour = 255, 255, 255\n if customisationMenu:\n if event.type == pygame.MOUSEMOTION:\n if nextButton.isOver(pos):\n nextButton.colour = 0, 0, 255\n else:\n nextButton.colour = 255, 255, 255\n continue\n if event.type == pygame.MOUSEBUTTONDOWN:\n if nextButton.isOver(pos):\n nextButton.colour = 255, 255, 0\n nextButtonPressed = True\n customisationMenu = False\n continue\n if halalButton.isOver(pos):\n if halalButtonPressed == False:\n if nonhalalButtonPressed:\n nonhalalButton.colour = 255, 255, 255\n nonhalalButtonPressed = False\n halalButton.colour = 0, 255, 0\n print('clicked Halal button')\n halalButtonPressed = True\n continue\n else:\n halalButton.colour = 255, 255, 255\n halalButtonPressed = False\n continue\n if vegButton.isOver(pos):\n if vegButtonPressed == False:\n if nonhalalButtonPressed:\n nonhalalButton.colour = 255, 255, 255\n nonhalalButtonPressed = False\n vegButton.colour = 0, 255, 0\n print('clicked Vegetarian button')\n vegButtonPressed = True\n continue\n else:\n vegButton.colour = 255, 255, 255\n vegButtonPressed = False\n continue\n if nonhalalButton.isOver(pos):\n if nonhalalButtonPressed == False:\n if halalButtonPressed:\n halalButton.colour = 255, 255, 255\n halalButtonPressed = False\n if vegButtonPressed:\n vegButton.colour = 255, 255, 255\n vegButtonPressed = False\n nonhalalButton.colour = 0, 255, 0\n print('clicked non-halal button')\n nonhalalButtonPressed = True\n continue\n else:\n nonhalalButton.colour = 255, 255, 255\n nonhalalButtonPressed = False\n if koreanButton.isOver(pos):\n if koreanButtonPressed == False:\n koreanButton.colour = 0, 255, 0\n print('clicked korean button')\n koreanButtonPressed = True\n continue\n else:\n koreanButton.colour = 255, 255, 255\n koreanButtonPressed = False\n if malayButton.isOver(pos):\n if malayButtonPressed == False:\n malayButton.colour = 0, 255, 0\n print('clicked Malay button')\n malayButtonPressed = True\n continue\n else:\n malayButton.colour = 255, 255, 255\n malayButtonPressed = False\n if japanButton.isOver(pos):\n if japanButtonPressed == False:\n japanButton.colour = 0, 255, 0\n print('clicked japan button')\n japanButtonPressed = True\n continue\n else:\n japanButton.colour = 255, 255, 255\n japanButtonPressed = False\n if chineseButton.isOver(pos):\n if chineseButtonPressed == False:\n chineseButton.colour = 0, 255, 0\n print('clicked chinese button')\n chineseButtonPressed = True\n continue\n else:\n chineseButton.colour = 255, 255, 255\n chineseButtonPressed = False\n if indianButton.isOver(pos):\n if indianButtonPressed == False:\n indianButton.colour = 0, 255, 0\n print('clicked indian button')\n indianButtonPressed = True\n continue\n else:\n indianButton.colour = 255, 255, 255\n indianButtonPressed = False\n if westernButton.isOver(pos):\n if westernButtonPressed == False:\n westernButton.colour = 0, 255, 0\n print('clicked western button')\n westernButtonPressed = True\n continue\n else:\n westernButton.colour = 255, 255, 255\n westernButtonPressed = False\n if button3.isOver(pos):\n if button3Pressed == False:\n if button5Pressed == True:\n button5.colour = 255, 255, 255\n button5Pressed = False\n if button7Pressed == True:\n button7.colour = 255, 255, 255\n button7Pressed = False\n if button9Pressed == True:\n button9.colour = 255, 255, 255\n button9Pressed = False\n button3.colour = 0, 255, 0\n print('clicked $3')\n button3Pressed = True\n continue\n else:\n button3.colour = 255, 255, 255\n button3Pressed = False\n if button5.isOver(pos):\n if button5Pressed == False:\n if button3Pressed == True:\n button3.colour = 255, 255, 255\n button3Pressed = False\n if button7Pressed == True:\n button7.colour = 255, 255, 255\n button7Pressed = False\n if button9Pressed == True:\n button9.colour = 255, 255, 255\n button9Pressed = False\n button5.colour = 0, 255, 0\n print('Clicked $5')\n button5Pressed = True\n continue\n else:\n button5.colour = 255, 255, 255\n button5Pressed = False\n if button7.isOver(pos):\n if button7Pressed == False:\n if button3Pressed == True:\n button3.colour = 255, 255, 255\n button3Pressed = False\n if button5Pressed == True:\n button5.colour = 255, 255, 255\n button5Pressed = False\n if button9Pressed == True:\n button9.colour = 255, 255, 255\n button9Pressed = False\n button7.colour = 0, 255, 0\n print('Clicked $7')\n button7Pressed = True\n continue\n else:\n button7.colour = 255, 255, 255\n button7Pressed = False\n if button9.isOver(pos):\n if button9Pressed == False:\n if button3Pressed == True:\n button3.colour = 255, 255, 255\n button3Pressed = False\n if button5Pressed == True:\n button5.colour = 255, 255, 255\n button5Pressed = False\n if button7Pressed == True:\n button7.colour = 255, 255, 255\n button7Pressed = False\n button9.colour = 0, 255, 0\n print('Clicked $10')\n button9Pressed = True\n continue\n else:\n button9.colour = 255, 255, 255\n button9Pressed = False\n if mapCoor == True and event.type == pygame.MOUSEBUTTONDOWN:\n mouseclick = mouseClick(screen)\n if mouseclick[0]:\n pygame.display.update()\n x = mouseclick[1]\n y = mouseclick[2]\n print(x, ',', y)\n mapCoor = False\n sleep(1)\n customisationMenu = True\n if mapCoor2 == True and event.type == pygame.MOUSEBUTTONDOWN:\n mouseclick = mouseClick(screen)\n if mouseclick[0]:\n pygame.display.update()\n x = mouseclick[1]\n y = mouseclick[2]\n print(x, ',', y)\n mapCoor2 = False\n sleep(1)\n loading(screen)\n easySearch = True\n if nextButtonPressed:\n sleep(1)\n loading(screen)\n user_prefList = []\n user_cuisineList = []\n user_budget = 0\n if halalButtonPressed:\n user_prefList.append('Halal')\n if vegButtonPressed:\n user_prefList.append('Vegetarian')\n if nonhalalButtonPressed:\n user_prefList.append('Non-Halal/Non-Vegetarian')\n if koreanButtonPressed:\n user_cuisineList.append('Korean')\n if malayButtonPressed:\n user_cuisineList.append('Malay')\n if japanButtonPressed:\n user_cuisineList.append('Japanese')\n if chineseButtonPressed:\n user_cuisineList.append('Chinese')\n if indianButtonPressed:\n user_cuisineList.append('Indian')\n if westernButtonPressed:\n user_cuisineList.append('Western')\n if button3Pressed:\n user_budget = 3\n if button5Pressed:\n user_budget = 5\n if button7Pressed:\n user_budget = 7\n if button9Pressed:\n user_budget = 9\n print(user_cuisineList)\n print(user_prefList)\n print(user_budget)\n finalID = final_list(user_budget, user_cuisineList, user_prefList)\n print(finalID)\n nearest_canteen = nearest_can(finalID, x, y)\n print(nearest_canteen)\n sleep(1)\n nextButtonPressed = False\n complicatedMenu = True\n",
"step-5": "\r\n\r\nimport pygame\r\nimport os\r\nfrom time import sleep\r\n\r\nscreen = pygame.display.set_mode((900,700))\r\nscreen.fill((255,255,255))\r\npygame.display.set_caption(\"NTUFOODIERECOMMENDSYSTEM\")\r\n\r\n'''\r\n###########################\r\n──╔╗────╔╗\r\n──║║───╔╝╚╗\r\n╔═╝╠╦══╬╗╔╬╦══╦═╗╔══╦═╦╗─╔╗\r\n║╔╗╠╣╔═╝║║╠╣╔╗║╔╗╣╔╗║╔╣║─║║\r\n║╚╝║║╚═╗║╚╣║╚╝║║║║╔╗║║║╚═╝║\r\n╚══╩╩══╝╚═╩╩══╩╝╚╩╝╚╩╝╚═╗╔╝\r\n──────────────────────╔═╝║\r\n──────────────────────╚══╝\r\n###########################\r\n● Database is stored on site.\r\n● Updating is relatively simple.\r\n● Programme runs on the basis of pygame, it's hard to update it without text input.\r\n● However, it can easily be done so on shell/console accordingly. \r\n'''\r\n# Food court lists is sorted by [Highest Cost, Lowest Cost, Cuisines Available, Closing Time, Food Preferences Available, Coordinates on NTU Map] ; THE items have keys and corresponding values expressed as a pair, key: value\r\n# where the keys would be that of the canteen names and this would be associated with that of the corresponding properties tht is alloted to it. \r\ncanteen_list = {\r\n \"Food Court 1\": [12, 3.5, [\"Korean\", \"Japanese\", \"Western\"], 2100, [\"Halal\", \"Non-Halal/Non-Vegetarian\"], (442, 473)],\r\n \"Food Court 2\": [10, 3.6, [\"Korean\", \"Chinese\", \"Malay\", ], 2100, [\"Halal\", \"Vegetarian\", \"Non-Halal/Non-Vegetarian\"], (477, 409)],\r\n \"Food Court 4\": [10, 3, [\"Chinese\", \"Western\"], 2100, [\"Non-Halal/Non-Vegetarian\"], (358,526)],\r\n \"Food Court 9\": [10, 3.5, [\"Chinese\"], 2100, [\"Halal\", \"Vegetarian\", \"Non-Halal/Non-Vegetarian\"], (582, 288)],\r\n \"Food Court 11\": [10, 2.5, [\"Chinese\", \"Indian\", \"Japanese\", \"Western\"], 2100, [\"Halal\", \"Vegetarian\", \"Non-Halal/Non-Vegetarian\"], (682, 243)],\r\n \"Food Court 13\": [9, 2, [\"Western\", \"Korean\", \"Japanese\", \"Chinese\"], 2100, [\"Halal\", \"Vegetarian\", \"Non-Halal/Non-Vegetarian\"], (445, 176)],\r\n \"Food Court 14\": [8, 3, [\"Western\", \"Chinese\", \"Korean\", \"Malay\"], 2100, [\"Halal\", \"Vegetarian\", \"Non-Halal/Non-Vegetarian\"], (509, 182)],\r\n \"Food Court 16\": [10, 3.3, [\"Japanese\", \"Chinese\", \"Korean\", \"Indian\"], 2100, [\"Halal\", \"Vegetarian\", \"Non-Halal/Non-Vegetarian\"], (405, 221)],\r\n \"Tamarind Food Court\": [10, 3, [\"Malay\", \"Chinese\", \"Korean\", \"Western\"], 2100, [\"Halal\", \"Non-Halal\", \"Vegetarian\",\"Non-Halal/Non-Vegetarian\"], (627, 200)],\r\n \"Pioneer Food Court\": [20, 2.3, [\"Thai\", \"Chinese\"], 0000, [\"Vegetarian\", \"Non-Halal/Non-Vegetarian\"], (497, 561)],\r\n \"North Spine Food Court\": [10, 2.5, [\"Korean\", \"Japanese\", \"Chinese\", \"Western\", \"Malay\"], 2100, [\"Vegetarian\", \"Non-Halal/Non-Vegetarian\"], (275, 293)],\r\n \"North Spine Plaza\": [10, 4, [\"Western\", \"Korean\"], 2130, [\"Vegetarian\", \"Halal\", \"Non-Halal/Non-Vegetarian\"], (287, 339)],\r\n \"South Spine Food Court\": [10, 2, [\"Chinese\", \"Malay\", \"Korean\", \"Japanese\", \"Western\"], 2100, [\"Vegetarian\", \"Halal\", \"Non-Halal/Non-Vegetarian\"], (227, 496)],\r\n \"Quad Cafe\": [10, 2.4, [\"Korean\", \"Chinese\", \"Indian\", \"Malay\"], 2100, [\"Vegetarian\", \"Halal\", \"Non-Halal/Non-Vegetarian\"], (224, 351)],\r\n \"Coffee Bean\": [20, 4, [\"Western\"], 2000, [\"Vegetarian\", \"Halal\", \"Non-Halal/Non-Vegetarian\"], (219, 389)],\r\n \"North Hill Food Court\": [10, 3.8, [\"Chinese\", \"Malay\", \"Indian\"], 2100, [\"Vegetarian\", \"Halal\", \"Non-Halal/Non-Vegetarian\"], (720,314)]\r\n }\r\n\r\n'''\r\n###########################################\r\n───╔╗───────────╔═╗─────╔╗─────╔╗─╔╗\r\n───║║───────────║╔╝─────║║────╔╝╚╦╝╚╗\r\n╔══╣║╔══╦══╦══╗╔╝╚╦══╦═╗║╚═╦╗╔╬╗╔╩╗╔╬══╦═╗\r\n║╔═╣║║╔╗║══╣══╣╚╗╔╣╔╗║╔╝║╔╗║║║║║║─║║║╔╗║╔╗╗\r\n║╚═╣╚╣╔╗╠══╠══║─║║║╚╝║║─║╚╝║╚╝║║╚╗║╚╣╚╝║║║║\r\n╚══╩═╩╝╚╩══╩══╝─╚╝╚══╩╝─╚══╩══╝╚═╝╚═╩══╩╝╚╝\r\n###########################################\r\n● We had help from online tutorials to workout the UI buttons functionality. \r\n● A bit of corresponding tweaks incorporating into project from the tutorial that I learnt from\r\n● ref: https://www.youtube.com/watch?v=4_9twnEduFA\r\n'''\r\nclass button():\r\n def __init__(self, colour, x, y, width, height, text=''):\r\n self.colour = colour\r\n self.x = x\r\n self.y = y\r\n self.width = width\r\n self.height = height\r\n self.text = text\r\n\r\n def draw(self,win,outline = None):\r\n if outline:\r\n #draw a bigger rectangle behind to create a border\r\n pygame.draw.rect(win, outline, (self.x-2, self.y-2, self.width+4, self.height+4),0)\r\n #draws the button rectangle\r\n pygame.draw.rect(win, self.colour, (self.x, self.y, self.width, self.height),0)\r\n\r\n if self.text != '':\r\n font = pygame.font.SysFont('calligrapher.ttf', 60)\r\n text = font.render(self.text, 1, (0,0,0))\r\n win.blit(text, (self.x + (self.width/2 - text.get_width()/2), self.y + (self.height/2 - text.get_height()/2)))\r\n\r\n def isOver(self, pos):\r\n #pos is the mouse position (x,y) coordinates\r\n if pos[0] > self.x and pos[0] < self.x + self.width:\r\n if pos[1] > self.y and pos[1] < self.y + self.height:\r\n return True\r\n else: \r\n return False\r\n\r\n'''\r\n##################################\r\n─╔═╗─────────╔╗\r\n─║╔╝────────╔╝╚╗\r\n╔╝╚╦╗╔╦═╗╔══╬╗╔╬╦══╦═╗╔══╗\r\n╚╗╔╣║║║╔╗╣╔═╝║║╠╣╔╗║╔╗╣══╣\r\n─║║║╚╝║║║║╚═╗║╚╣║╚╝║║║╠══║\r\n─╚╝╚══╩╝╚╩══╝╚═╩╩══╩╝╚╩══╝\r\n##################################\r\n╔═╗────────╔╗\r\n║═╬═╦╦╗╔═╦╦╬╣\r\n║╔╣╬║╔╝║╬║║║║\r\n╚╝╚═╩╝─╠╗╠═╩╝\r\n───────╚═╝\r\n#################\r\n● Most of the functions here help to draw out the different states of the screen, that the screen could be in\r\n● The redraw functions help to update the display based on it's respective transitory states\r\n'''\r\n#3 functions here controls the Surface Text appearancese\r\ndef text(text,win,x,y):\r\n font = pygame.font.SysFont('freesansbold.ttf', 50)\r\n phrase = font.render(text, 1, (0,0,0))\r\n win.blit(phrase, (x,y))\r\n\r\ndef instructionText(text,win,x,y):\r\n font = pygame.font.SysFont('Arial', 20)\r\n phrase = font.render(text, 1, (0,0,0))\r\n win.blit(phrase, (x,y))\r\n\r\ndef header(text,win,x,y):\r\n font = pygame.font.SysFont('TimesNewRoman', 70)\r\n phrase = font.render(text, 1, (0,0,0))\r\n win.blit(phrase, (x,y))\r\n\r\ndef mouseClick(screen):\r\n #checks for mouseclick event, and fetches corresp. positions \r\n x,y = pygame.mouse.get_pos()\r\n \r\n if (x >= 65 and x <=727) and (y >=82 and y <= 618):\r\n #print(event.button)\r\n pygame.draw.circle(screen, (255,0,150), (x,y), 15)\r\n return True, x, y\r\n else:\r\n print(\"Out of bounds!\")\r\n return False, x, y\r\n\r\ndef skeleExit(win):\r\n #exit event\r\n aryadelight = pygame.image.load(os.path.join(\"NTUFoodieRecsv1.png\"))\r\n win.blit(aryadelight,(0,0))\r\n pygame.display.update()\r\n xaxis = 100\r\n for i in range(1,42):\r\n image = str(i) + \".png\"\r\n skele = pygame.image.load(os.path.join(image))\r\n win.blit(skele, (250,200))\r\n text(\"Exiting...\", win, (xaxis+20), 600)\r\n pygame.display.update()\r\n sleep(0.09)\r\n\r\ndef loading(win):\r\n #loading screen, slep interval defined as 0.3 seconds to load subs. frame \r\n x = 0\r\n while x < 3:\r\n load0 = pygame.image.load(os.path.join(\"load0.png\"))\r\n win.blit(load0, (0,0))\r\n pygame.display.update()\r\n sleep(0.3)\r\n load1 = pygame.image.load(os.path.join(\"load1.png\"))\r\n win.blit(load1, (0,0))\r\n pygame.display.update()\r\n sleep(0.3)\r\n load2 = pygame.image.load(os.path.join(\"load2.png\"))\r\n win.blit(load2, (0,0))\r\n pygame.display.update()\r\n sleep(0.3)\r\n load3 = pygame.image.load(os.path.join(\"load3.png\"))\r\n win.blit(load3, (0,0))\r\n pygame.display.update()\r\n sleep(0.3)\r\n x += 1\r\n# ---------------------------------------------------------------------------# \r\ndef redrawMap(screen):\r\n #draws the embedded NTU map image provided \r\n NTUmap = pygame.image.load(os.path.join(\"NTUMap.jpg\"))\r\n screen.blit(NTUmap, (0,0))\r\n for x in range(50,900,50):\r\n #y axial grids\r\n pygame.draw.rect(screen, (255,0,0), (x, 0, 1, 700), 0)\r\n for y in range(50,700,50):\r\n #x axial grids\r\n pygame.draw.rect(screen, (255,0,0), (0, y, 900, 1), 0)\r\n text('Please click on your current location!',screen,200,100)\r\n\r\ndef redrawGPSMap(screen, top3, x, y):\r\n #redraw NTU map, but this time with corresponding location coordinates\r\n NTUmap = pygame.image.load(os.path.join(\"NTUMap.jpg\"))\r\n screen.blit(NTUmap, (0,0))\r\n redGPS = pygame.image.load(os.path.join(\"redgps.png\"))\r\n screen.blit(redGPS, (x-16,y-32))\r\n instructionText(\"You are currently at this position.\", screen, x+4, y-10)\r\n counter = 1\r\n for i in top3:\r\n coor = canteen_list[i][5]\r\n if counter == 1:\r\n blueGPS = pygame.image.load(os.path.join(\"bluegps.png\"))\r\n screen.blit(blueGPS, (coor[0]-12,coor[1]-24))\r\n instructionText(i, screen, coor[0]-24, coor[1])\r\n pass\r\n if counter == 2:\r\n blackGPS = pygame.image.load(os.path.join(\"blackgps.png\"))\r\n screen.blit(blackGPS, (coor[0]-12,coor[1]-24))\r\n instructionText(i, screen, coor[0]-24, coor[1])\r\n pass\r\n if counter == 3:\r\n yellowGPS = pygame.image.load(os.path.join(\"yellowgps.png\"))\r\n screen.blit(yellowGPS, (coor[0]-12,coor[1]-24))\r\n instructionText(i, screen, coor[0]-24, coor[1])\r\n pass\r\n counter += 1\r\n restartButton.draw(screen, (0,0,0))\r\n\r\ndef redrawMainWin(screen):\r\n #functionality that controls what is displayed on the main window\r\n aryadelight = pygame.image.load(os.path.join(\"NTUFoodieRecsv1.png\"))\r\n screen.blit(aryadelight,(0,0))\r\n mapButton.draw(screen, (0,0,0))\r\n instructionText(\"(Choose your cuisines, preferences and budget for the meal here!)\",screen,215,320)\r\n predictButton.draw(screen, (0,0,0))\r\n instructionText(\"(Find the nearest canteen!)\",screen,132,470)\r\n exitButton.draw(screen, (0,0,0))\r\n ice = pygame.image.load(os.path.join(\"ice.png\"))\r\n screen.blit(ice, (500,670))\r\n font = pygame.font.SysFont('verdana', 20)\r\n creator = font.render(\"Made by HweeHean X Arya\", 1, (0,0,200))\r\n screen.blit(creator, (535,670))\r\n\r\ndef redrawCustWin(screen):\r\n #controls what is displayed on the customisation window\r\n bp = pygame.image.load(os.path.join(\"gradient.jpg\"))\r\n screen.blit(bp,(0,0))\r\n instructionText('Left click again to reset!',screen,300,20)\r\n text('Please select your food preference: ', screen, 100, 50)\r\n halalButton.draw(screen, (0,0,0))\r\n vegButton.draw(screen, (0,0,0))\r\n nonhalalButton.draw(screen, (0,0,0))\r\n text('Please select your cuisine type: ', screen, 100, 200)\r\n koreanButton.draw(screen, (0,0,0))\r\n malayButton.draw(screen, (0,0,0))\r\n japanButton.draw(screen, (0,0,0))\r\n chineseButton.draw(screen, (0,0,0))\r\n indianButton.draw(screen, (0,0,0))\r\n westernButton.draw(screen, (0,0,0))\r\n text('Please select your maximum budget: ', screen, 100, 430)\r\n button3.draw(screen, (0,0,0))\r\n button5.draw(screen, (0,0,0))\r\n button7.draw(screen, (0,0,0))\r\n button9.draw(screen, (0,0,0))\r\n nextButton.draw(screen, (0,0,0))\r\n\r\ndef redrawSearchWin(screen,x,y):\r\n #gives the top 3 most relevant results for the prediction tab\r\n bp = pygame.image.load(os.path.join(\"NTUFoodieRecsv1.png\"))\r\n screen.blit(bp,(0,0))\r\n GordonRamsay = pygame.image.load(os.path.join(\"GordonRamsay.png\"))\r\n screen.blit(GordonRamsay, (400,100))\r\n distList = []\r\n for i in canteen_list:\r\n distList.append(i)\r\n print(distList)\r\n top3 = nearest_can(distList, x, y)\r\n print(top3)\r\n text(\"Nearest Canteen:\",screen,110,400)\r\n yaxis = 490\r\n canteenCount = 1\r\n for k in top3:\r\n if canteenCount == 1:\r\n if k == \"Food Court 1\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen1.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 2\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen2.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 4\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen4.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 9\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen9.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 11\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen11.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 13\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen13.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 14\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen14.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 16\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen16.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Tamarind Food Court\":\r\n canteenPic = pygame.image.load(os.path.join(\"Tamarind.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Pioneer Food Court\":\r\n canteenPic = pygame.image.load(os.path.join(\"Pioneer.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"North Spine Food Court\":\r\n canteenPic = pygame.image.load(os.path.join(\"NorthSpine.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"North Spine Plaza\":\r\n canteenPic = pygame.image.load(os.path.join(\"NorthSpinePlaza.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"South Spine Food Court\":\r\n canteenPic = pygame.image.load(os.path.join(\"SouthSpineKoufuFoodCourt.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Quad Cafe\":\r\n canteenPic = pygame.image.load(os.path.join(\"Quad.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Coffee Bean\":\r\n canteenPic = pygame.image.load(os.path.join(\"Coffee.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"North Hill Food Court\":\r\n canteenPic = pygame.image.load(os.path.join(\"NorthHill.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n text(str(canteenCount), screen, 110, yaxis)\r\n text(\".\", screen, 135, yaxis)\r\n text(k,screen,150,yaxis)\r\n canteenCount += 1\r\n yaxis += 70\r\n return top3\r\n\r\ndef complicatedSearchWin(screen,top3):\r\n #displays the top3 results for the end user after clicking customisation\r\n bp = pygame.image.load(os.path.join(\"NTUFoodieRecsv1.png\"))\r\n screen.blit(bp,(0,0))\r\n GordonRamsay = pygame.image.load(os.path.join(\"GordonRamsay.png\"))\r\n screen.blit(GordonRamsay, (400,100))\r\n text(\"Nearest Canteen:\",screen,110,400)\r\n yaxis = 490\r\n canteenCount = 1\r\n for k in top3:\r\n if canteenCount == 1:\r\n if k == \"Food Court 1\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen1.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 2\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen2.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 4\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen4.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 9\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen9.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 11\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen11.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 13\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen13.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 14\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen14.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Food Court 16\":\r\n canteenPic = pygame.image.load(os.path.join(\"Canteen16.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Tamarind Food Court\":\r\n canteenPic = pygame.image.load(os.path.join(\"Tamarind.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Pioneer Food Court\":\r\n canteenPic = pygame.image.load(os.path.join(\"Pioneer.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"North Spine Food Court\":\r\n canteenPic = pygame.image.load(os.path.join(\"NorthSpine.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"North Spine Plaza\":\r\n canteenPic = pygame.image.load(os.path.join(\"NorthSpinePlaza.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"South Spine Food Court\":\r\n canteenPic = pygame.image.load(os.path.join(\"SouthSpineKoufuFoodCourt.png\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Quad Cafe\":\r\n canteenPic = pygame.image.load(os.path.join(\"Quad.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"Coffee Bean\":\r\n canteenPic = pygame.image.load(os.path.join(\"Coffee.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n if k == \"North Hill Food Court\":\r\n canteenPic = pygame.image.load(os.path.join(\"NorthHill.jpg\"))\r\n screen.blit(canteenPic, (150,200))\r\n text(str(canteenCount), screen, 110, yaxis)\r\n text(\".\", screen, 135, yaxis)\r\n text(k,screen,150,yaxis)\r\n canteenCount += 1\r\n yaxis += 70\r\n\r\n'''\r\n╔═╗────╔═╗───╔╗╔╗\r\n║═╬═╦╦╗║═╬═╦╦╣╚╬╬═╦╦═╗\r\n║╔╣╬║╔╝╠═║╬║╔╣╔╣║║║║╬║\r\n╚╝╚═╩╝─╚═╩═╩╝╚═╩╩╩═╬╗║\r\n───────────────────╚═╝\r\n###########################\r\n● Functions below control how we do the sorting for the distance\r\n and the different cuisines\r\n'''\r\n#function provided by ARYA\r\n#function to compile a list of all the relevant food courts\r\ndef final_list(user_budget, user_cuisine, user_preference):\r\n new_list = []\r\n\r\n #Creating a list of all food courts that fit in the user's budget\r\n for i in canteen_list:\r\n if user_budget >= canteen_list[i][1]:\r\n new_list.append(i) \r\n \r\n #Creating a list of all food courts according to the imposed constraints on cuisine\r\n for c in user_cuisine:\r\n for i in canteen_list:\r\n if c in canteen_list[i][2]:\r\n new_list.append(i)\r\n\r\n #Adding to the list, all the food courts according to the food preferences specified \r\n for c in user_preference:\r\n for i in canteen_list:\r\n if c in canteen_list[i][4]:\r\n new_list.append(i)\r\n\r\n #eliminating all the repeated options\r\n new_list = list(set(new_list))\r\n\r\n #if new_list is empty due to no selection made\r\n if len(new_list) == 0:\r\n for i in canteen_list:\r\n new_list.append(i)\r\n return(new_list)\r\n\r\n#function to calulate the horizontal distance from you to proposed option\r\ndef calc_dis(x1, y1, x2, y2):\r\n return ((x1-x2)**2 + (y1-y2)**2)**1/2\r\n\r\n#function to find out the nearest suitable food outlet/food court\r\ndef nearest_can(new_list, x, y):\r\n top3 = []\r\n copy_list = new_list.copy()\r\n while len(top3) != 3:\r\n j = copy_list[0]\r\n coor = canteen_list[j][5]\r\n Min = calc_dis(x, y, coor[0], coor[1])\r\n food_court = ''\r\n for k in copy_list:\r\n #coordinates of the food court\r\n coor = canteen_list[k][5]\r\n dist = calc_dis(x, y, coor[0], coor[1])\r\n if Min >= dist:\r\n Min = dist\r\n food_court = k\r\n index = copy_list.index(food_court)\r\n copy_list.pop(index)\r\n top3.append(food_court)\r\n print(top3)\r\n return top3\r\n\r\n'''\r\n#########################\r\n╔╗─────╔╗─╔╗\r\n║║────╔╝╚╦╝╚╗\r\n║╚═╦╗╔╬╗╔╩╗╔╬══╦═╗╔══╗\r\n║╔╗║║║║║║─║║║╔╗║╔╗╣══╣\r\n║╚╝║╚╝║║╚╗║╚╣╚╝║║║╠══║\r\n╚══╩══╝╚═╝╚═╩══╩╝╚╩══╝\r\n#########################\r\n● This is where the buttons are defined. Using the class...\r\n● They are relatively self-explanatory\r\n'''\r\n\r\n#buttons for the main loading page:\r\nmapButton = button((255,255,255), 200, 250, 500, 100, 'Canteen Customisation')\r\npredictButton = button((255,255,255), 100, 400, 300, 100, 'Prediction')\r\nexitButton = button((255,255,255), 500, 400, 300, 100, 'Exit')\r\n\r\n#buttons for the custimisation screen:\r\nhalalButton = button((255,255,255), 50, 120, 250, 50, 'Halal')\r\nvegButton = button((255,255,255), 320, 120, 250, 50, 'Vegetarian')\r\nnonhalalButton = button((255,255,255), 590, 120, 250, 50, 'Non-Halal')\r\nkoreanButton = button((255,255,255), 50, 270, 250, 50, 'Korean')\r\nmalayButton = button((255,255,255), 320, 270, 250, 50, 'Malay')\r\njapanButton = button((255,255,255), 590, 270, 250, 50, 'Japanese')\r\nchineseButton = button((255,255,255), 50, 340, 250, 50, 'Chinese')\r\nindianButton = button((255,255,255), 320, 340, 250, 50, 'Indian')\r\nwesternButton = button((255,255,255), 590, 340, 250, 50, 'Western')\r\nbutton3 = button((255,255,255), 235, 490, 70, 50, '$3')\r\nbutton5 = button((255,255,255), 355, 490, 70, 50, '$5')\r\nbutton7 = button((255,255,255), 475, 490, 70, 50, '$7')\r\nbutton9 = button((255,255,255), 595, 490, 70, 50, '$10')\r\nnextButton = button((255,255,255), 730, 580, 120, 70, 'Next')\r\n\r\n#buttons to showcase GPS:\r\ngpsButton = button((255,255,255), 700, 600, 170, 50, 'to Map')\r\nrestartButton = button((255,255,255), 700, 600, 190, 50, 'Restart?')\r\n\r\n'''\r\n#############################\r\n────╔╗────╔╗\r\n───╔╝╚╗──╔╝╚╗\r\n╔══╬╗╔╬══╬╗╔╬══╦══╗\r\n║══╣║║║╔╗║║║║║═╣══╣\r\n╠══║║╚╣╔╗║║╚╣║═╬══║\r\n╚══╝╚═╩╝╚╝╚═╩══╩══╝\r\n#############################\r\n● Since I'm only using one while loop and all the functions are in here,\r\n it is important to note that none of the \"if\" statements interfere with\r\n each other\r\n● Acts like a flip-flop which stores the data of the different STATES\r\n'''\r\n#originalstate of customisation buttons\r\nhalalButtonPressed = False\r\nvegButtonPressed = False\r\nnonhalalButtonPressed = False\r\nkoreanButtonPressed = False\r\nmalayButtonPressed = False\r\njapanButtonPressed = False\r\nchineseButtonPressed = False\r\nindianButtonPressed = False\r\nwesternButtonPressed = False\r\nbutton3Pressed = False\r\nbutton5Pressed = False\r\nbutton7Pressed = False\r\nbutton9Pressed = False\r\nnextButtonPressed = False\r\ngpsButtonPressed = False\r\n\r\n#original state of events\r\ncheckButton = True\r\nmapCoor = False\r\ncustomisationMenu = False\r\nmapCoor2 = False\r\neasySearch = False\r\ncomplicatedMenu = False\r\noneTime = True\r\n\r\n'''\r\n####################################\r\n╔═╗╔═╗───────╔═══╗\r\n║║╚╝║║───────║╔═╗║\r\n║╔╗╔╗╠══╦╦═╗─║╚═╝╠═╦══╦══╦═╦══╦╗╔╗\r\n║║║║║║╔╗╠╣╔╗╗║╔══╣╔╣╔╗║╔╗║╔╣╔╗║╚╝║\r\n║║║║║║╔╗║║║║║║║──║║║╚╝║╚╝║║║╔╗║║║║\r\n╚╝╚╝╚╩╝╚╩╩╝╚╝╚╝──╚╝╚══╩═╗╠╝╚╝╚╩╩╩╝\r\n──────────────────────╔═╝║\r\n──────────────────────╚══╝\r\n####################################\r\n● It involves a lot of existing predefined states, turning on and off to display\r\n multiple things without them interfering with each other's functionality\r\n● I.e. Clicking customisation button will disable itself, hence\r\n if the mouse is clicked over at the same area, it will not\r\n be activated again.\r\n● This is every important to have a smooth flow. \r\n● Also left some debugging messages within the console to help\r\n understand what is going on behind the scenes\r\n'''\r\npygame.init()\r\nrun = True\r\nclock = pygame.time.Clock()\r\n#start the pygame programme \r\nwhile run:\r\n #if true, redraws the main window\r\n if checkButton:\r\n redrawMainWin(screen)\r\n #if true, redraws the customisation window\r\n if customisationMenu:\r\n redrawCustWin(screen)\r\n if easySearch:\r\n if oneTime:\r\n nearest_canteen = redrawSearchWin(screen, x, y)\r\n sleep(2)\r\n oneTime = False\r\n gpsButton.draw(screen, (0,0,0))\r\n #if true, redraws the complicated cusomisation results\r\n if complicatedMenu:\r\n if oneTime:\r\n complicatedSearchWin(screen, nearest_canteen)\r\n sleep(2)\r\n oneTime = False\r\n gpsButton.draw(screen, (0,0,0))\r\n #redraws the GPS map, with point locaters indicated\r\n if gpsButtonPressed == True:\r\n redrawGPSMap(screen, nearest_canteen, x, y)\r\n pygame.display.update()\r\n clock.tick(30)\r\n\r\n #checks event\r\n for event in pygame.event.get():\r\n #Fetches the mouse position\r\n pos = pygame.mouse.get_pos()\r\n\r\n #Quits the pygame programme\r\n if event.type == pygame.QUIT:\r\n run = False\r\n pygame.quit()\r\n\r\n if gpsButtonPressed:\r\n if event.type == pygame.MOUSEBUTTONDOWN:\r\n if restartButton.isOver(pos):\r\n restartButton.colour = (50,50,50)\r\n restartButton.draw(screen, (0,0,0))\r\n pygame.display.update()\r\n print('clicked the restart button')\r\n #original state of customisation buttons\r\n halalButtonPressed = False\r\n vegButtonPressed = False\r\n nonhalalButtonPressed = False\r\n koreanButtonPressed = False\r\n malayButtonPressed = False\r\n japanButtonPressed = False\r\n chineseButtonPressed = False\r\n indianButtonPressed = False\r\n westernButtonPressed = False\r\n button3Pressed = False\r\n button5Pressed = False\r\n button7Pressed = False\r\n button9Pressed = False\r\n nextButtonPressed = False\r\n gpsButtonPressed = False\r\n #original state of events\r\n checkButton = True\r\n mapCoor = False\r\n customisationMenu = False\r\n mapCoor2 = False\r\n easySearch = False\r\n complicatedMenu = False\r\n oneTime = True\r\n\r\n if event.type == pygame.MOUSEMOTION:\r\n if restartButton.isOver(pos):\r\n restartButton.colour = (0,255,0)\r\n continue\r\n else:\r\n restartButton.colour = (255,255,255)\r\n continue\r\n\r\n if easySearch == True or complicatedMenu == True:\r\n if event.type == pygame.MOUSEBUTTONDOWN:\r\n if gpsButton.isOver(pos):\r\n gpsButton.colour = (50,50,50)\r\n gpsButton.draw(screen, (0,0,0))\r\n pygame.display.update()\r\n print('clicked gps button')\r\n gpsButtonPressed = True\r\n easySearch = False\r\n complicatedMenu = False\r\n continue\r\n\r\n if event.type == pygame.MOUSEMOTION:\r\n if gpsButton.isOver(pos):\r\n gpsButton.colour = (0,255,0)\r\n continue\r\n else:\r\n gpsButton.colour = (255,255,255)\r\n continue\r\n \r\n #if mouse is clicked over buttons (main page)\r\n if checkButton:\r\n if event.type == pygame.MOUSEBUTTONDOWN:\r\n if mapButton.isOver(pos):\r\n mapButton.colour = (0,255,0)\r\n redrawMainWin(screen)\r\n pygame.display.update()\r\n print('clicked map button')\r\n sleep(0.5)\r\n redrawMap(screen)\r\n checkButton = False\r\n mapCoor = True\r\n continue\r\n \r\n if predictButton.isOver(pos):\r\n predictButton.colour = (0,255,0)\r\n redrawMainWin(screen)\r\n pygame.display.update()\r\n print('clicked predict button')\r\n sleep(0.5)\r\n redrawMap(screen)\r\n checkButton = False\r\n mapCoor2 = True\r\n continue\r\n\r\n if exitButton.isOver(pos):\r\n exitButton.colour = (0,255,0)\r\n print('Exiting...')\r\n skeleExit(screen)\r\n pygame.quit()\r\n run = False\r\n exit()\r\n\r\n #if mouse hovered over the button (main page)\r\n if event.type == pygame.MOUSEMOTION:\r\n if mapButton.isOver(pos):\r\n mapButton.colour = (255,0,0)\r\n else:\r\n mapButton.colour = (255,255,255)\r\n\r\n if predictButton.isOver(pos):\r\n predictButton.colour = (255,0,0)\r\n else:\r\n predictButton.colour = (255,255,255)\r\n\r\n if exitButton.isOver(pos):\r\n exitButton.colour = (255,0,0)\r\n else: \r\n exitButton.colour = (255,255,255)\r\n\r\n #clicking buttons in the customisation menu:\r\n if customisationMenu:\r\n if event.type == pygame.MOUSEMOTION:\r\n if nextButton.isOver(pos):\r\n nextButton.colour = (0,0,255)\r\n else:\r\n nextButton.colour = (255,255,255)\r\n continue\r\n if event.type == pygame.MOUSEBUTTONDOWN:\r\n\r\n #clicking on next button\r\n if nextButton.isOver(pos):\r\n nextButton.colour = (255,255,0)\r\n nextButtonPressed = True\r\n customisationMenu = False\r\n continue\r\n\r\n if halalButton.isOver(pos):\r\n if halalButtonPressed == False:\r\n if nonhalalButtonPressed:\r\n nonhalalButton.colour = (255,255,255)\r\n nonhalalButtonPressed = False\r\n halalButton.colour = (0,255,0)\r\n print('clicked Halal button')\r\n halalButtonPressed = True\r\n continue\r\n else:\r\n halalButton.colour = (255,255,255)\r\n halalButtonPressed = False\r\n continue\r\n \r\n if vegButton.isOver(pos):\r\n if vegButtonPressed == False:\r\n if nonhalalButtonPressed:\r\n nonhalalButton.colour = (255,255,255)\r\n nonhalalButtonPressed = False\r\n vegButton.colour = (0,255,0)\r\n print('clicked Vegetarian button')\r\n vegButtonPressed = True\r\n continue\r\n else:\r\n vegButton.colour = (255,255,255)\r\n vegButtonPressed = False\r\n continue\r\n\r\n if nonhalalButton.isOver(pos):\r\n if nonhalalButtonPressed == False:\r\n if halalButtonPressed:\r\n halalButton.colour = (255,255,255)\r\n halalButtonPressed = False\r\n if vegButtonPressed:\r\n vegButton.colour = (255,255,255)\r\n vegButtonPressed = False\r\n nonhalalButton.colour = (0,255,0)\r\n print('clicked non-halal button')\r\n nonhalalButtonPressed = True\r\n continue\r\n else:\r\n nonhalalButton.colour = (255,255,255)\r\n nonhalalButtonPressed = False\r\n\r\n if koreanButton.isOver(pos):\r\n if koreanButtonPressed == False:\r\n koreanButton.colour = (0,255,0)\r\n print('clicked korean button')\r\n koreanButtonPressed = True\r\n continue\r\n else:\r\n koreanButton.colour = (255,255,255)\r\n koreanButtonPressed = False\r\n\r\n if malayButton.isOver(pos):\r\n if malayButtonPressed == False:\r\n malayButton.colour = (0,255,0)\r\n print('clicked Malay button')\r\n malayButtonPressed = True\r\n continue\r\n else:\r\n malayButton.colour = (255,255,255)\r\n malayButtonPressed = False\r\n\r\n if japanButton.isOver(pos):\r\n if japanButtonPressed == False:\r\n japanButton.colour = (0,255,0)\r\n print('clicked japan button')\r\n japanButtonPressed = True\r\n continue\r\n else:\r\n japanButton.colour = (255,255,255)\r\n japanButtonPressed = False\r\n\r\n if chineseButton.isOver(pos):\r\n if chineseButtonPressed == False:\r\n chineseButton.colour = (0,255,0)\r\n print('clicked chinese button')\r\n chineseButtonPressed = True\r\n continue\r\n else:\r\n chineseButton.colour = (255,255,255)\r\n chineseButtonPressed = False\r\n\r\n if indianButton.isOver(pos):\r\n if indianButtonPressed == False:\r\n indianButton.colour = (0,255,0)\r\n print('clicked indian button')\r\n indianButtonPressed = True\r\n continue\r\n else:\r\n indianButton.colour = (255,255,255)\r\n indianButtonPressed = False\r\n\r\n if westernButton.isOver(pos):\r\n if westernButtonPressed == False:\r\n westernButton.colour = (0,255,0)\r\n print('clicked western button')\r\n westernButtonPressed = True\r\n continue\r\n else:\r\n westernButton.colour = (255,255,255)\r\n westernButtonPressed = False\r\n \r\n if button3.isOver(pos):\r\n if button3Pressed == False:\r\n if button5Pressed == True:\r\n button5.colour = (255,255,255)\r\n button5Pressed = False\r\n if button7Pressed == True:\r\n button7.colour = (255,255,255)\r\n button7Pressed = False\r\n if button9Pressed == True:\r\n button9.colour = (255,255,255)\r\n button9Pressed = False\r\n button3.colour = (0,255,0)\r\n print('clicked $3')\r\n button3Pressed = True\r\n continue\r\n else:\r\n button3.colour = (255,255,255)\r\n button3Pressed = False\r\n \r\n if button5.isOver(pos):\r\n if button5Pressed == False:\r\n if button3Pressed == True:\r\n button3.colour = (255,255,255)\r\n button3Pressed = False\r\n if button7Pressed == True:\r\n button7.colour = (255,255,255)\r\n button7Pressed = False\r\n if button9Pressed == True:\r\n button9.colour = (255,255,255)\r\n button9Pressed = False\r\n button5.colour = (0,255,0)\r\n print('Clicked $5')\r\n button5Pressed = True\r\n continue\r\n else:\r\n button5.colour = (255,255,255)\r\n button5Pressed = False\r\n\r\n if button7.isOver(pos):\r\n if button7Pressed == False:\r\n if button3Pressed == True:\r\n button3.colour = (255,255,255)\r\n button3Pressed = False\r\n if button5Pressed == True:\r\n button5.colour = (255,255,255)\r\n button5Pressed = False\r\n if button9Pressed == True:\r\n button9.colour = (255,255,255)\r\n button9Pressed = False\r\n button7.colour = (0,255,0)\r\n print('Clicked $7')\r\n button7Pressed = True\r\n continue\r\n else:\r\n button7.colour = (255,255,255)\r\n button7Pressed = False\r\n\r\n if button9.isOver(pos):\r\n if button9Pressed == False:\r\n if button3Pressed == True:\r\n button3.colour = (255,255,255)\r\n button3Pressed = False\r\n if button5Pressed == True:\r\n button5.colour = (255,255,255)\r\n button5Pressed = False\r\n if button7Pressed == True:\r\n button7.colour = (255,255,255)\r\n button7Pressed = False\r\n button9.colour = (0,255,0)\r\n print('Clicked $10')\r\n button9Pressed = True\r\n continue\r\n else:\r\n button9.colour = (255,255,255)\r\n button9Pressed = False \r\n\r\n #if mousebuttondown and map is already displayed\r\n if mapCoor == True and event.type == pygame.MOUSEBUTTONDOWN:\r\n mouseclick = mouseClick(screen)\r\n if mouseclick[0]:\r\n pygame.display.update()\r\n x = mouseclick[1]\r\n y = mouseclick[2]\r\n print(x, ',', y)\r\n #pygame.time.delay(2000) \r\n mapCoor = False\r\n sleep(1)\r\n customisationMenu = True\r\n\r\n #if prediction button is clicked\r\n if mapCoor2 == True and event.type == pygame.MOUSEBUTTONDOWN:\r\n mouseclick = mouseClick(screen)\r\n if mouseclick[0]:\r\n pygame.display.update()\r\n x = mouseclick[1]\r\n y = mouseclick[2]\r\n print(x, ',', y)\r\n #pygame.time.delay(2000) \r\n mapCoor2 = False\r\n sleep(1)\r\n loading(screen)\r\n easySearch = True\r\n\r\n #things that happen after the next button is pressed\r\n if nextButtonPressed:\r\n sleep(1)\r\n loading(screen)\r\n user_prefList = []\r\n user_cuisineList = []\r\n user_budget = 0\r\n if halalButtonPressed:\r\n user_prefList.append(\"Halal\")\r\n if vegButtonPressed:\r\n user_prefList.append(\"Vegetarian\")\r\n if nonhalalButtonPressed:\r\n user_prefList.append(\"Non-Halal/Non-Vegetarian\")\r\n if koreanButtonPressed:\r\n user_cuisineList.append(\"Korean\")\r\n if malayButtonPressed:\r\n user_cuisineList.append(\"Malay\")\r\n if japanButtonPressed:\r\n user_cuisineList.append(\"Japanese\")\r\n if chineseButtonPressed:\r\n user_cuisineList.append(\"Chinese\")\r\n if indianButtonPressed:\r\n user_cuisineList.append(\"Indian\")\r\n if westernButtonPressed:\r\n user_cuisineList.append(\"Western\")\r\n if button3Pressed:\r\n user_budget = 3\r\n if button5Pressed:\r\n user_budget = 5\r\n if button7Pressed:\r\n user_budget = 7\r\n if button9Pressed:\r\n user_budget = 9\r\n #debug\r\n print(user_cuisineList)\r\n print(user_prefList)\r\n print(user_budget)\r\n #continue#\r\n finalID = final_list(user_budget, user_cuisineList, user_prefList)\r\n print(finalID)\r\n nearest_canteen = nearest_can(finalID, x, y)\r\n print(nearest_canteen)\r\n sleep(1)\r\n nextButtonPressed = False\r\n complicatedMenu = True\r\n \r\n",
"step-ids": [
11,
12,
15,
22,
23
]
}
|
[
11,
12,
15,
22,
23
] |
<|reserved_special_token_0|>
class Skip_GAN(object):
def __init__(self, sess, epoch, batch_size, dataset_name, result_dir,
z_dim, y_dim, checkpoint_dir, num_resblock, Cycle_lr, Class_weight,
Resnet_weight):
self.sess = sess
self.dataset_name = dataset_name
self.result_dir = result_dir
self.epoch = epoch
self.batch_size = batch_size
self.z_dim = z_dim
self.y_dim = y_dim
self.checkpoint_dir = checkpoint_dir
self.num_resblock = num_resblock
self.Cycle_lr = Cycle_lr
self.Class_weight = Class_weight
self.la = 10
self.learningRateD = 0.0002
self.learningRateG = 0.0002
self.Resnet_weight = Resnet_weight
if self.dataset_name == 'anime':
print('loading anime .............')
self.height = 96
self.width = 96
self.c_dim = 3
self.data_X, self.data_Y = load_anime_old()
print('self.data_X:', self.data_X.shape, 'self.data_y:', self.
data_Y.shape)
elif self.dataset_name == 'celebA':
print('loading celebA ...............')
self.height = 96
self.width = 96
self.c_dim = 3
self.data_X, self.data_Y = load_CelebA()
print('self.data_X:', self.data_X.shape, 'self.data_y:', self.
data_Y.shape)
else:
print('Sorry there is no option for ', self.dataset_name)
sys.exit()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Skip_GAN(object):
def __init__(self, sess, epoch, batch_size, dataset_name, result_dir,
z_dim, y_dim, checkpoint_dir, num_resblock, Cycle_lr, Class_weight,
Resnet_weight):
self.sess = sess
self.dataset_name = dataset_name
self.result_dir = result_dir
self.epoch = epoch
self.batch_size = batch_size
self.z_dim = z_dim
self.y_dim = y_dim
self.checkpoint_dir = checkpoint_dir
self.num_resblock = num_resblock
self.Cycle_lr = Cycle_lr
self.Class_weight = Class_weight
self.la = 10
self.learningRateD = 0.0002
self.learningRateG = 0.0002
self.Resnet_weight = Resnet_weight
if self.dataset_name == 'anime':
print('loading anime .............')
self.height = 96
self.width = 96
self.c_dim = 3
self.data_X, self.data_Y = load_anime_old()
print('self.data_X:', self.data_X.shape, 'self.data_y:', self.
data_Y.shape)
elif self.dataset_name == 'celebA':
print('loading celebA ...............')
self.height = 96
self.width = 96
self.c_dim = 3
self.data_X, self.data_Y = load_CelebA()
print('self.data_X:', self.data_X.shape, 'self.data_y:', self.
data_Y.shape)
else:
print('Sorry there is no option for ', self.dataset_name)
sys.exit()
<|reserved_special_token_0|>
def train(self):
print('begin training ...........')
tf.global_variables_initializer().run()
sample_num = 64
tot_num_samples = min(sample_num, self.batch_size)
manifold_h = int(np.floor(np.sqrt(tot_num_samples)))
manifold_w = int(np.floor(np.sqrt(tot_num_samples)))
self.sample = np.random.uniform(-1, 1, size=(self.batch_size, self.
z_dim)).astype(np.float32)
self.sample_y = self.data_Y[0:self.batch_size]
counter = 0
batch_offset = 0
data_index = np.arange(self.data_X.shape[0])
np.random.shuffle(data_index)
self.data_X = self.data_X[data_index, :, :, :]
self.data_Y = self.data_Y[data_index]
for epoch in range(self.epoch):
if batch_offset + self.batch_size > len(self.data_X):
batch_offset = 0
data_index = np.arange(self.data_X.shape[0])
np.random.shuffle(data_index)
self.data_X = self.data_X[data_index, :, :, :]
self.data_Y = self.data_Y[data_index]
else:
batch_images = self.data_X[batch_offset:batch_offset + self
.batch_size]
batch_codes = self.data_Y[batch_offset:batch_offset + self.
batch_size]
batch_z = np.random.uniform(-1, 1, [self.batch_size, self.
z_dim]).astype(np.float32)
for i_d_loss in range(3):
_, d_loss = self.sess.run([self.d_updates, self.DC_loss
], feed_dict={self.img: batch_images, self.y:
batch_codes, self.z: batch_z})
for i_g_loss in range(1):
_, g_loss, _ = self.sess.run([self.g_updates, self.
GC_loss, self.G_sample], feed_dict={self.y:
batch_codes, self.img: batch_images, self.z: batch_z})
batch_offset = batch_offset + self.batch_size
if counter % 10 == 0:
print(
'Epoch: %2d counter: %5d d_loss: %.8f, g_loss: %.8f' %
(epoch, counter, d_loss, g_loss))
if counter % 500 == 0:
samples = self.sess.run(self.sampler, feed_dict={self.z:
self.sample, self.y: self.sample_y})
save_images(samples[:manifold_h * manifold_w, :, :, :],
[manifold_h, manifold_w], self.result_dir +
'/{}.png'.format(str(counter).zfill(7)))
if counter % 1000 == 0:
saver = tf.train.Saver(max_to_keep=5)
saver.save(self.sess, self.checkpoint_dir + '/{}'.
format(str(counter).zfill(7)))
if counter % 100 == 0:
if self.Cycle_lr:
self.learningRateD = self.learningRateD * 0.99
if self.learningRateD < 0.0001:
self.learningRateD = 0.0002
if counter % 500 == 0:
if self.Class_weight:
if self.la > 25:
self.la = 25
else:
self.la = self.la * 1.5
counter += 1
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Skip_GAN(object):
def __init__(self, sess, epoch, batch_size, dataset_name, result_dir,
z_dim, y_dim, checkpoint_dir, num_resblock, Cycle_lr, Class_weight,
Resnet_weight):
self.sess = sess
self.dataset_name = dataset_name
self.result_dir = result_dir
self.epoch = epoch
self.batch_size = batch_size
self.z_dim = z_dim
self.y_dim = y_dim
self.checkpoint_dir = checkpoint_dir
self.num_resblock = num_resblock
self.Cycle_lr = Cycle_lr
self.Class_weight = Class_weight
self.la = 10
self.learningRateD = 0.0002
self.learningRateG = 0.0002
self.Resnet_weight = Resnet_weight
if self.dataset_name == 'anime':
print('loading anime .............')
self.height = 96
self.width = 96
self.c_dim = 3
self.data_X, self.data_Y = load_anime_old()
print('self.data_X:', self.data_X.shape, 'self.data_y:', self.
data_Y.shape)
elif self.dataset_name == 'celebA':
print('loading celebA ...............')
self.height = 96
self.width = 96
self.c_dim = 3
self.data_X, self.data_Y = load_CelebA()
print('self.data_X:', self.data_X.shape, 'self.data_y:', self.
data_Y.shape)
else:
print('Sorry there is no option for ', self.dataset_name)
sys.exit()
def build_model(self):
self.y = tf.placeholder(tf.float32, [None, self.y_dim], name='y')
self.img = tf.placeholder(tf.float32, [self.batch_size, self.height,
self.width, 3], name='img')
self.z = tf.placeholder(tf.float32, [None, self.z_dim])
self.G_sample = Generator_srresnet(self.z, self.y, self.
num_resblock, self.Resnet_weight)
print('The return of Generator:', self.G_sample)
D_real, C_real = Discriminator_srresnet(self.img, dataset=self.
dataset_name)
print('The return of Discriminator:', D_real, C_real)
D_fake, C_fake = Discriminator_srresnet(self.G_sample, dataset=self
.dataset_name, reuse=True)
print('The return of Discriminator:', D_fake, C_fake)
self.C_real_loss = tf.reduce_mean(tf.reduce_sum(tf.nn.
sigmoid_cross_entropy_with_logits(logits=C_real, labels=self.y),
axis=1))
self.C_fake_loss = tf.reduce_mean(tf.reduce_sum(tf.nn.
sigmoid_cross_entropy_with_logits(logits=C_fake, labels=self.y),
axis=1))
D_real_loss = tf.reduce_mean(tf.nn.
sigmoid_cross_entropy_with_logits(logits=D_real, labels=tf.
ones_like(D_real)))
D_fake_loss = tf.reduce_mean(tf.nn.
sigmoid_cross_entropy_with_logits(logits=D_fake, labels=tf.
zeros_like(D_fake)))
"""注意 la也即是我是用动态学习率的时候要关注的参数
但是我的目标是使得类别损失变得更加的大 而不是真伪的损失"""
D_loss = D_real_loss + D_fake_loss
self.DC_loss = self.la * D_loss + self.C_real_loss
G_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(
logits=D_fake, labels=tf.ones_like(D_fake)))
self.GC_loss = self.la * G_loss + self.C_fake_loss
print('Calualtion the loss of Optimizer')
self.theta_D = [v for v in tf.global_variables() if 'd_net' in v.name]
self.theta_G = [v for v in tf.global_variables() if 'g_net' in v.name]
with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)
):
self.d_updates = tf.train.AdamOptimizer(self.learningRateD,
beta1=0.5, beta2=0.9).minimize(self.DC_loss, var_list=self.
theta_D)
self.g_updates = tf.train.AdamOptimizer(self.learningRateG,
beta1=0.5, beta2=0.9).minimize(self.GC_loss, var_list=self.
theta_G)
self.sampler = Generator_srresnet(self.y, self.z, self.num_resblock,
self.Resnet_weight, reuse=True, train=False)
def train(self):
print('begin training ...........')
tf.global_variables_initializer().run()
sample_num = 64
tot_num_samples = min(sample_num, self.batch_size)
manifold_h = int(np.floor(np.sqrt(tot_num_samples)))
manifold_w = int(np.floor(np.sqrt(tot_num_samples)))
self.sample = np.random.uniform(-1, 1, size=(self.batch_size, self.
z_dim)).astype(np.float32)
self.sample_y = self.data_Y[0:self.batch_size]
counter = 0
batch_offset = 0
data_index = np.arange(self.data_X.shape[0])
np.random.shuffle(data_index)
self.data_X = self.data_X[data_index, :, :, :]
self.data_Y = self.data_Y[data_index]
for epoch in range(self.epoch):
if batch_offset + self.batch_size > len(self.data_X):
batch_offset = 0
data_index = np.arange(self.data_X.shape[0])
np.random.shuffle(data_index)
self.data_X = self.data_X[data_index, :, :, :]
self.data_Y = self.data_Y[data_index]
else:
batch_images = self.data_X[batch_offset:batch_offset + self
.batch_size]
batch_codes = self.data_Y[batch_offset:batch_offset + self.
batch_size]
batch_z = np.random.uniform(-1, 1, [self.batch_size, self.
z_dim]).astype(np.float32)
for i_d_loss in range(3):
_, d_loss = self.sess.run([self.d_updates, self.DC_loss
], feed_dict={self.img: batch_images, self.y:
batch_codes, self.z: batch_z})
for i_g_loss in range(1):
_, g_loss, _ = self.sess.run([self.g_updates, self.
GC_loss, self.G_sample], feed_dict={self.y:
batch_codes, self.img: batch_images, self.z: batch_z})
batch_offset = batch_offset + self.batch_size
if counter % 10 == 0:
print(
'Epoch: %2d counter: %5d d_loss: %.8f, g_loss: %.8f' %
(epoch, counter, d_loss, g_loss))
if counter % 500 == 0:
samples = self.sess.run(self.sampler, feed_dict={self.z:
self.sample, self.y: self.sample_y})
save_images(samples[:manifold_h * manifold_w, :, :, :],
[manifold_h, manifold_w], self.result_dir +
'/{}.png'.format(str(counter).zfill(7)))
if counter % 1000 == 0:
saver = tf.train.Saver(max_to_keep=5)
saver.save(self.sess, self.checkpoint_dir + '/{}'.
format(str(counter).zfill(7)))
if counter % 100 == 0:
if self.Cycle_lr:
self.learningRateD = self.learningRateD * 0.99
if self.learningRateD < 0.0001:
self.learningRateD = 0.0002
if counter % 500 == 0:
if self.Class_weight:
if self.la > 25:
self.la = 25
else:
self.la = self.la * 1.5
counter += 1
<|reserved_special_token_1|>
from Dataload import load_anime_old, save_images, load_CelebA
from Srresnet_Model import Generator_srresnet, Discriminator_srresnet
import tensorflow as tf
import numpy as np
import sys
class Skip_GAN(object):
def __init__(self, sess, epoch, batch_size, dataset_name, result_dir,
z_dim, y_dim, checkpoint_dir, num_resblock, Cycle_lr, Class_weight,
Resnet_weight):
self.sess = sess
self.dataset_name = dataset_name
self.result_dir = result_dir
self.epoch = epoch
self.batch_size = batch_size
self.z_dim = z_dim
self.y_dim = y_dim
self.checkpoint_dir = checkpoint_dir
self.num_resblock = num_resblock
self.Cycle_lr = Cycle_lr
self.Class_weight = Class_weight
self.la = 10
self.learningRateD = 0.0002
self.learningRateG = 0.0002
self.Resnet_weight = Resnet_weight
if self.dataset_name == 'anime':
print('loading anime .............')
self.height = 96
self.width = 96
self.c_dim = 3
self.data_X, self.data_Y = load_anime_old()
print('self.data_X:', self.data_X.shape, 'self.data_y:', self.
data_Y.shape)
elif self.dataset_name == 'celebA':
print('loading celebA ...............')
self.height = 96
self.width = 96
self.c_dim = 3
self.data_X, self.data_Y = load_CelebA()
print('self.data_X:', self.data_X.shape, 'self.data_y:', self.
data_Y.shape)
else:
print('Sorry there is no option for ', self.dataset_name)
sys.exit()
def build_model(self):
self.y = tf.placeholder(tf.float32, [None, self.y_dim], name='y')
self.img = tf.placeholder(tf.float32, [self.batch_size, self.height,
self.width, 3], name='img')
self.z = tf.placeholder(tf.float32, [None, self.z_dim])
self.G_sample = Generator_srresnet(self.z, self.y, self.
num_resblock, self.Resnet_weight)
print('The return of Generator:', self.G_sample)
D_real, C_real = Discriminator_srresnet(self.img, dataset=self.
dataset_name)
print('The return of Discriminator:', D_real, C_real)
D_fake, C_fake = Discriminator_srresnet(self.G_sample, dataset=self
.dataset_name, reuse=True)
print('The return of Discriminator:', D_fake, C_fake)
self.C_real_loss = tf.reduce_mean(tf.reduce_sum(tf.nn.
sigmoid_cross_entropy_with_logits(logits=C_real, labels=self.y),
axis=1))
self.C_fake_loss = tf.reduce_mean(tf.reduce_sum(tf.nn.
sigmoid_cross_entropy_with_logits(logits=C_fake, labels=self.y),
axis=1))
D_real_loss = tf.reduce_mean(tf.nn.
sigmoid_cross_entropy_with_logits(logits=D_real, labels=tf.
ones_like(D_real)))
D_fake_loss = tf.reduce_mean(tf.nn.
sigmoid_cross_entropy_with_logits(logits=D_fake, labels=tf.
zeros_like(D_fake)))
"""注意 la也即是我是用动态学习率的时候要关注的参数
但是我的目标是使得类别损失变得更加的大 而不是真伪的损失"""
D_loss = D_real_loss + D_fake_loss
self.DC_loss = self.la * D_loss + self.C_real_loss
G_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(
logits=D_fake, labels=tf.ones_like(D_fake)))
self.GC_loss = self.la * G_loss + self.C_fake_loss
print('Calualtion the loss of Optimizer')
self.theta_D = [v for v in tf.global_variables() if 'd_net' in v.name]
self.theta_G = [v for v in tf.global_variables() if 'g_net' in v.name]
with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)
):
self.d_updates = tf.train.AdamOptimizer(self.learningRateD,
beta1=0.5, beta2=0.9).minimize(self.DC_loss, var_list=self.
theta_D)
self.g_updates = tf.train.AdamOptimizer(self.learningRateG,
beta1=0.5, beta2=0.9).minimize(self.GC_loss, var_list=self.
theta_G)
self.sampler = Generator_srresnet(self.y, self.z, self.num_resblock,
self.Resnet_weight, reuse=True, train=False)
def train(self):
print('begin training ...........')
tf.global_variables_initializer().run()
sample_num = 64
tot_num_samples = min(sample_num, self.batch_size)
manifold_h = int(np.floor(np.sqrt(tot_num_samples)))
manifold_w = int(np.floor(np.sqrt(tot_num_samples)))
self.sample = np.random.uniform(-1, 1, size=(self.batch_size, self.
z_dim)).astype(np.float32)
self.sample_y = self.data_Y[0:self.batch_size]
counter = 0
batch_offset = 0
data_index = np.arange(self.data_X.shape[0])
np.random.shuffle(data_index)
self.data_X = self.data_X[data_index, :, :, :]
self.data_Y = self.data_Y[data_index]
for epoch in range(self.epoch):
if batch_offset + self.batch_size > len(self.data_X):
batch_offset = 0
data_index = np.arange(self.data_X.shape[0])
np.random.shuffle(data_index)
self.data_X = self.data_X[data_index, :, :, :]
self.data_Y = self.data_Y[data_index]
else:
batch_images = self.data_X[batch_offset:batch_offset + self
.batch_size]
batch_codes = self.data_Y[batch_offset:batch_offset + self.
batch_size]
batch_z = np.random.uniform(-1, 1, [self.batch_size, self.
z_dim]).astype(np.float32)
for i_d_loss in range(3):
_, d_loss = self.sess.run([self.d_updates, self.DC_loss
], feed_dict={self.img: batch_images, self.y:
batch_codes, self.z: batch_z})
for i_g_loss in range(1):
_, g_loss, _ = self.sess.run([self.g_updates, self.
GC_loss, self.G_sample], feed_dict={self.y:
batch_codes, self.img: batch_images, self.z: batch_z})
batch_offset = batch_offset + self.batch_size
if counter % 10 == 0:
print(
'Epoch: %2d counter: %5d d_loss: %.8f, g_loss: %.8f' %
(epoch, counter, d_loss, g_loss))
if counter % 500 == 0:
samples = self.sess.run(self.sampler, feed_dict={self.z:
self.sample, self.y: self.sample_y})
save_images(samples[:manifold_h * manifold_w, :, :, :],
[manifold_h, manifold_w], self.result_dir +
'/{}.png'.format(str(counter).zfill(7)))
if counter % 1000 == 0:
saver = tf.train.Saver(max_to_keep=5)
saver.save(self.sess, self.checkpoint_dir + '/{}'.
format(str(counter).zfill(7)))
if counter % 100 == 0:
if self.Cycle_lr:
self.learningRateD = self.learningRateD * 0.99
if self.learningRateD < 0.0001:
self.learningRateD = 0.0002
if counter % 500 == 0:
if self.Class_weight:
if self.la > 25:
self.la = 25
else:
self.la = self.la * 1.5
counter += 1
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# @Time : 2020/3/4 10:34
# @Author : YYLin
# @Email : 854280599@qq.com
# @File : Skip_GAN.py
from Dataload import load_anime_old, save_images, load_CelebA
from Srresnet_Model import Generator_srresnet, Discriminator_srresnet
import tensorflow as tf
import numpy as np
import sys
class Skip_GAN(object):
def __init__(self, sess, epoch, batch_size, dataset_name, result_dir, z_dim, y_dim, checkpoint_dir, num_resblock,
Cycle_lr, Class_weight, Resnet_weight):
self.sess = sess
self.dataset_name = dataset_name
self.result_dir = result_dir
self.epoch = epoch
self.batch_size = batch_size
self.z_dim = z_dim
self.y_dim = y_dim
self.checkpoint_dir = checkpoint_dir
self.num_resblock = num_resblock
self.Cycle_lr = Cycle_lr
self.Class_weight = Class_weight
# La is used to increase the weight of image authenticity
self.la = 10
self.learningRateD = 2e-4
self.learningRateG = 2e-4
#
self.Resnet_weight = Resnet_weight
# 加载不同的数据集
if self.dataset_name == 'anime':
print('loading anime .............')
self.height = 96
self.width = 96
self.c_dim = 3
self.data_X, self.data_Y = load_anime_old()
print('self.data_X:', self.data_X.shape, 'self.data_y:', self.data_Y.shape)
elif self.dataset_name == 'celebA':
print('loading celebA ...............')
self.height = 96
self.width = 96
self.c_dim = 3
self.data_X, self.data_Y = load_CelebA()
print('self.data_X:', self.data_X.shape, 'self.data_y:', self.data_Y.shape)
else:
print('Sorry there is no option for ', self.dataset_name)
sys.exit()
def build_model(self):
# some placeholder in our model
self.y = tf.placeholder(tf.float32, [None, self.y_dim], name='y')
self.img = tf.placeholder(tf.float32, [self.batch_size, self.height, self.width, 3], name='img')
self.z = tf.placeholder(tf.float32, [None, self.z_dim])
self.G_sample = Generator_srresnet(self.z, self.y, self.num_resblock, self.Resnet_weight)
print('The return of Generator:', self.G_sample)
# 识别器对真实图像进行判断
D_real, C_real = Discriminator_srresnet(self.img, dataset=self.dataset_name)
print('The return of Discriminator:', D_real, C_real)
# 识别器对生成图像进行判断
D_fake, C_fake = Discriminator_srresnet(self.G_sample, dataset=self.dataset_name, reuse=True)
print('The return of Discriminator:', D_fake, C_fake)
# 判断图像的类别
self.C_real_loss = tf.reduce_mean(
tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits=C_real, labels=self.y), axis=1))
self.C_fake_loss = tf.reduce_mean(
tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits=C_fake, labels=self.y), axis=1))
# D_Loss 希望真实图像被判断为1 希望生成图像被判断为0
D_real_loss = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(logits=D_real, labels=tf.ones_like(D_real)))
D_fake_loss = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(logits=D_fake, labels=tf.zeros_like(D_fake)))
'''注意 la也即是我是用动态学习率的时候要关注的参数
但是我的目标是使得类别损失变得更加的大 而不是真伪的损失'''
D_loss = D_real_loss + D_fake_loss
self.DC_loss = (self.la * D_loss + self.C_real_loss)
# 对生成模型的损失也在关注该模型
G_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=D_fake, labels=tf.ones_like(D_fake)))
self.GC_loss = (self.la * G_loss + self.C_fake_loss)
print('Calualtion the loss of Optimizer')
self.theta_D = [v for v in tf.global_variables() if 'd_net' in v.name]
self.theta_G = [v for v in tf.global_variables() if 'g_net' in v.name]
with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):
self.d_updates = tf.train.AdamOptimizer(self.learningRateD, beta1=0.5, beta2=0.9).minimize(self.DC_loss,
var_list=self.theta_D)
self.g_updates = tf.train.AdamOptimizer(self.learningRateG, beta1=0.5, beta2=0.9).minimize(self.GC_loss,
var_list=self.theta_G)
self.sampler = Generator_srresnet(self.y, self.z, self.num_resblock, self.Resnet_weight, reuse=True, train=False)
def train(self):
print('begin training ...........')
tf.global_variables_initializer().run()
# sample_num 用于控制存储图像
sample_num = 64
tot_num_samples = min(sample_num, self.batch_size)
manifold_h = int(np.floor(np.sqrt(tot_num_samples)))
manifold_w = int(np.floor(np.sqrt(tot_num_samples)))
# 定义随机噪音以及标签 2019/09/29
self.sample = np.random.uniform(-1, 1, size=(self.batch_size, self.z_dim)).astype(np.float32)
self.sample_y = self.data_Y[0:self.batch_size]
counter = 0
# shuffle the dataset 2019/9/29
batch_offset = 0
data_index = np.arange(self.data_X.shape[0])
np.random.shuffle(data_index)
self.data_X = self.data_X[data_index, :, :, :]
self.data_Y = self.data_Y[data_index]
# 这种方式会有使得小于batch_size个数据用不上
for epoch in range(self.epoch):
if batch_offset + self.batch_size > len(self.data_X):
batch_offset = 0
# shuffle dataset
data_index = np.arange(self.data_X.shape[0])
np.random.shuffle(data_index)
self.data_X = self.data_X[data_index, :, :, :]
self.data_Y = self.data_Y[data_index]
else:
# 首先是得到输入的数据
batch_images = self.data_X[batch_offset:batch_offset + self.batch_size]
batch_codes = self.data_Y[batch_offset:batch_offset + self.batch_size]
batch_z = np.random.uniform(-1, 1, [self.batch_size, self.z_dim]).astype(np.float32)
# 然后更新识别器
for i_d_loss in range(3):
_, d_loss = self.sess.run([self.d_updates, self.DC_loss], feed_dict={self.img: batch_images,
self.y: batch_codes,
self.z: batch_z})
for i_g_loss in range(1):
# 最后更新生成器模型
_, g_loss, _ = self.sess.run([self.g_updates, self.GC_loss, self.G_sample],
feed_dict={self.y: batch_codes, self.img: batch_images, self.z: batch_z})
batch_offset = batch_offset + self.batch_size
# display the loss every 10 steps
if (counter % 10) == 0:
print('Epoch: %2d counter: %5d d_loss: %.8f, g_loss: %.8f' % (epoch, counter, d_loss, g_loss))
# save image every 500 steps
if counter % 500 == 0:
samples = self.sess.run(self.sampler,
feed_dict={self.z: self.sample, self.y: self.sample_y})
save_images(samples[:manifold_h * manifold_w, :, :, :], [manifold_h, manifold_w],
self.result_dir + '/{}.png'.format(str(counter).zfill(7)))
# save the model every 1000 steps
if counter % 1000 == 0:
saver = tf.train.Saver(max_to_keep=5)
saver.save(self.sess, self.checkpoint_dir + '/{}'.format(str(counter).zfill(7)))
if (counter % 100) == 0:
if self.Cycle_lr:
self.learningRateD = self.learningRateD * 0.99
if self.learningRateD < 0.0001:
self.learningRateD = 2e-4
if (counter % 500) == 0:
if self.Class_weight:
if self.la > 25:
self.la = 25
else:
self.la = self.la * 1.5
counter += 1
|
flexible
|
{
"blob_id": "d3b00a8d410248aedb1c43354e89ccc298b56a3c",
"index": 7693,
"step-1": "<mask token>\n\n\nclass Skip_GAN(object):\n\n def __init__(self, sess, epoch, batch_size, dataset_name, result_dir,\n z_dim, y_dim, checkpoint_dir, num_resblock, Cycle_lr, Class_weight,\n Resnet_weight):\n self.sess = sess\n self.dataset_name = dataset_name\n self.result_dir = result_dir\n self.epoch = epoch\n self.batch_size = batch_size\n self.z_dim = z_dim\n self.y_dim = y_dim\n self.checkpoint_dir = checkpoint_dir\n self.num_resblock = num_resblock\n self.Cycle_lr = Cycle_lr\n self.Class_weight = Class_weight\n self.la = 10\n self.learningRateD = 0.0002\n self.learningRateG = 0.0002\n self.Resnet_weight = Resnet_weight\n if self.dataset_name == 'anime':\n print('loading anime .............')\n self.height = 96\n self.width = 96\n self.c_dim = 3\n self.data_X, self.data_Y = load_anime_old()\n print('self.data_X:', self.data_X.shape, 'self.data_y:', self.\n data_Y.shape)\n elif self.dataset_name == 'celebA':\n print('loading celebA ...............')\n self.height = 96\n self.width = 96\n self.c_dim = 3\n self.data_X, self.data_Y = load_CelebA()\n print('self.data_X:', self.data_X.shape, 'self.data_y:', self.\n data_Y.shape)\n else:\n print('Sorry there is no option for ', self.dataset_name)\n sys.exit()\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Skip_GAN(object):\n\n def __init__(self, sess, epoch, batch_size, dataset_name, result_dir,\n z_dim, y_dim, checkpoint_dir, num_resblock, Cycle_lr, Class_weight,\n Resnet_weight):\n self.sess = sess\n self.dataset_name = dataset_name\n self.result_dir = result_dir\n self.epoch = epoch\n self.batch_size = batch_size\n self.z_dim = z_dim\n self.y_dim = y_dim\n self.checkpoint_dir = checkpoint_dir\n self.num_resblock = num_resblock\n self.Cycle_lr = Cycle_lr\n self.Class_weight = Class_weight\n self.la = 10\n self.learningRateD = 0.0002\n self.learningRateG = 0.0002\n self.Resnet_weight = Resnet_weight\n if self.dataset_name == 'anime':\n print('loading anime .............')\n self.height = 96\n self.width = 96\n self.c_dim = 3\n self.data_X, self.data_Y = load_anime_old()\n print('self.data_X:', self.data_X.shape, 'self.data_y:', self.\n data_Y.shape)\n elif self.dataset_name == 'celebA':\n print('loading celebA ...............')\n self.height = 96\n self.width = 96\n self.c_dim = 3\n self.data_X, self.data_Y = load_CelebA()\n print('self.data_X:', self.data_X.shape, 'self.data_y:', self.\n data_Y.shape)\n else:\n print('Sorry there is no option for ', self.dataset_name)\n sys.exit()\n <mask token>\n\n def train(self):\n print('begin training ...........')\n tf.global_variables_initializer().run()\n sample_num = 64\n tot_num_samples = min(sample_num, self.batch_size)\n manifold_h = int(np.floor(np.sqrt(tot_num_samples)))\n manifold_w = int(np.floor(np.sqrt(tot_num_samples)))\n self.sample = np.random.uniform(-1, 1, size=(self.batch_size, self.\n z_dim)).astype(np.float32)\n self.sample_y = self.data_Y[0:self.batch_size]\n counter = 0\n batch_offset = 0\n data_index = np.arange(self.data_X.shape[0])\n np.random.shuffle(data_index)\n self.data_X = self.data_X[data_index, :, :, :]\n self.data_Y = self.data_Y[data_index]\n for epoch in range(self.epoch):\n if batch_offset + self.batch_size > len(self.data_X):\n batch_offset = 0\n data_index = np.arange(self.data_X.shape[0])\n np.random.shuffle(data_index)\n self.data_X = self.data_X[data_index, :, :, :]\n self.data_Y = self.data_Y[data_index]\n else:\n batch_images = self.data_X[batch_offset:batch_offset + self\n .batch_size]\n batch_codes = self.data_Y[batch_offset:batch_offset + self.\n batch_size]\n batch_z = np.random.uniform(-1, 1, [self.batch_size, self.\n z_dim]).astype(np.float32)\n for i_d_loss in range(3):\n _, d_loss = self.sess.run([self.d_updates, self.DC_loss\n ], feed_dict={self.img: batch_images, self.y:\n batch_codes, self.z: batch_z})\n for i_g_loss in range(1):\n _, g_loss, _ = self.sess.run([self.g_updates, self.\n GC_loss, self.G_sample], feed_dict={self.y:\n batch_codes, self.img: batch_images, self.z: batch_z})\n batch_offset = batch_offset + self.batch_size\n if counter % 10 == 0:\n print(\n 'Epoch: %2d counter: %5d d_loss: %.8f, g_loss: %.8f' %\n (epoch, counter, d_loss, g_loss))\n if counter % 500 == 0:\n samples = self.sess.run(self.sampler, feed_dict={self.z:\n self.sample, self.y: self.sample_y})\n save_images(samples[:manifold_h * manifold_w, :, :, :],\n [manifold_h, manifold_w], self.result_dir +\n '/{}.png'.format(str(counter).zfill(7)))\n if counter % 1000 == 0:\n saver = tf.train.Saver(max_to_keep=5)\n saver.save(self.sess, self.checkpoint_dir + '/{}'.\n format(str(counter).zfill(7)))\n if counter % 100 == 0:\n if self.Cycle_lr:\n self.learningRateD = self.learningRateD * 0.99\n if self.learningRateD < 0.0001:\n self.learningRateD = 0.0002\n if counter % 500 == 0:\n if self.Class_weight:\n if self.la > 25:\n self.la = 25\n else:\n self.la = self.la * 1.5\n counter += 1\n",
"step-3": "<mask token>\n\n\nclass Skip_GAN(object):\n\n def __init__(self, sess, epoch, batch_size, dataset_name, result_dir,\n z_dim, y_dim, checkpoint_dir, num_resblock, Cycle_lr, Class_weight,\n Resnet_weight):\n self.sess = sess\n self.dataset_name = dataset_name\n self.result_dir = result_dir\n self.epoch = epoch\n self.batch_size = batch_size\n self.z_dim = z_dim\n self.y_dim = y_dim\n self.checkpoint_dir = checkpoint_dir\n self.num_resblock = num_resblock\n self.Cycle_lr = Cycle_lr\n self.Class_weight = Class_weight\n self.la = 10\n self.learningRateD = 0.0002\n self.learningRateG = 0.0002\n self.Resnet_weight = Resnet_weight\n if self.dataset_name == 'anime':\n print('loading anime .............')\n self.height = 96\n self.width = 96\n self.c_dim = 3\n self.data_X, self.data_Y = load_anime_old()\n print('self.data_X:', self.data_X.shape, 'self.data_y:', self.\n data_Y.shape)\n elif self.dataset_name == 'celebA':\n print('loading celebA ...............')\n self.height = 96\n self.width = 96\n self.c_dim = 3\n self.data_X, self.data_Y = load_CelebA()\n print('self.data_X:', self.data_X.shape, 'self.data_y:', self.\n data_Y.shape)\n else:\n print('Sorry there is no option for ', self.dataset_name)\n sys.exit()\n\n def build_model(self):\n self.y = tf.placeholder(tf.float32, [None, self.y_dim], name='y')\n self.img = tf.placeholder(tf.float32, [self.batch_size, self.height,\n self.width, 3], name='img')\n self.z = tf.placeholder(tf.float32, [None, self.z_dim])\n self.G_sample = Generator_srresnet(self.z, self.y, self.\n num_resblock, self.Resnet_weight)\n print('The return of Generator:', self.G_sample)\n D_real, C_real = Discriminator_srresnet(self.img, dataset=self.\n dataset_name)\n print('The return of Discriminator:', D_real, C_real)\n D_fake, C_fake = Discriminator_srresnet(self.G_sample, dataset=self\n .dataset_name, reuse=True)\n print('The return of Discriminator:', D_fake, C_fake)\n self.C_real_loss = tf.reduce_mean(tf.reduce_sum(tf.nn.\n sigmoid_cross_entropy_with_logits(logits=C_real, labels=self.y),\n axis=1))\n self.C_fake_loss = tf.reduce_mean(tf.reduce_sum(tf.nn.\n sigmoid_cross_entropy_with_logits(logits=C_fake, labels=self.y),\n axis=1))\n D_real_loss = tf.reduce_mean(tf.nn.\n sigmoid_cross_entropy_with_logits(logits=D_real, labels=tf.\n ones_like(D_real)))\n D_fake_loss = tf.reduce_mean(tf.nn.\n sigmoid_cross_entropy_with_logits(logits=D_fake, labels=tf.\n zeros_like(D_fake)))\n \"\"\"注意 la也即是我是用动态学习率的时候要关注的参数 \n 但是我的目标是使得类别损失变得更加的大 而不是真伪的损失\"\"\"\n D_loss = D_real_loss + D_fake_loss\n self.DC_loss = self.la * D_loss + self.C_real_loss\n G_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(\n logits=D_fake, labels=tf.ones_like(D_fake)))\n self.GC_loss = self.la * G_loss + self.C_fake_loss\n print('Calualtion the loss of Optimizer')\n self.theta_D = [v for v in tf.global_variables() if 'd_net' in v.name]\n self.theta_G = [v for v in tf.global_variables() if 'g_net' in v.name]\n with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)\n ):\n self.d_updates = tf.train.AdamOptimizer(self.learningRateD,\n beta1=0.5, beta2=0.9).minimize(self.DC_loss, var_list=self.\n theta_D)\n self.g_updates = tf.train.AdamOptimizer(self.learningRateG,\n beta1=0.5, beta2=0.9).minimize(self.GC_loss, var_list=self.\n theta_G)\n self.sampler = Generator_srresnet(self.y, self.z, self.num_resblock,\n self.Resnet_weight, reuse=True, train=False)\n\n def train(self):\n print('begin training ...........')\n tf.global_variables_initializer().run()\n sample_num = 64\n tot_num_samples = min(sample_num, self.batch_size)\n manifold_h = int(np.floor(np.sqrt(tot_num_samples)))\n manifold_w = int(np.floor(np.sqrt(tot_num_samples)))\n self.sample = np.random.uniform(-1, 1, size=(self.batch_size, self.\n z_dim)).astype(np.float32)\n self.sample_y = self.data_Y[0:self.batch_size]\n counter = 0\n batch_offset = 0\n data_index = np.arange(self.data_X.shape[0])\n np.random.shuffle(data_index)\n self.data_X = self.data_X[data_index, :, :, :]\n self.data_Y = self.data_Y[data_index]\n for epoch in range(self.epoch):\n if batch_offset + self.batch_size > len(self.data_X):\n batch_offset = 0\n data_index = np.arange(self.data_X.shape[0])\n np.random.shuffle(data_index)\n self.data_X = self.data_X[data_index, :, :, :]\n self.data_Y = self.data_Y[data_index]\n else:\n batch_images = self.data_X[batch_offset:batch_offset + self\n .batch_size]\n batch_codes = self.data_Y[batch_offset:batch_offset + self.\n batch_size]\n batch_z = np.random.uniform(-1, 1, [self.batch_size, self.\n z_dim]).astype(np.float32)\n for i_d_loss in range(3):\n _, d_loss = self.sess.run([self.d_updates, self.DC_loss\n ], feed_dict={self.img: batch_images, self.y:\n batch_codes, self.z: batch_z})\n for i_g_loss in range(1):\n _, g_loss, _ = self.sess.run([self.g_updates, self.\n GC_loss, self.G_sample], feed_dict={self.y:\n batch_codes, self.img: batch_images, self.z: batch_z})\n batch_offset = batch_offset + self.batch_size\n if counter % 10 == 0:\n print(\n 'Epoch: %2d counter: %5d d_loss: %.8f, g_loss: %.8f' %\n (epoch, counter, d_loss, g_loss))\n if counter % 500 == 0:\n samples = self.sess.run(self.sampler, feed_dict={self.z:\n self.sample, self.y: self.sample_y})\n save_images(samples[:manifold_h * manifold_w, :, :, :],\n [manifold_h, manifold_w], self.result_dir +\n '/{}.png'.format(str(counter).zfill(7)))\n if counter % 1000 == 0:\n saver = tf.train.Saver(max_to_keep=5)\n saver.save(self.sess, self.checkpoint_dir + '/{}'.\n format(str(counter).zfill(7)))\n if counter % 100 == 0:\n if self.Cycle_lr:\n self.learningRateD = self.learningRateD * 0.99\n if self.learningRateD < 0.0001:\n self.learningRateD = 0.0002\n if counter % 500 == 0:\n if self.Class_weight:\n if self.la > 25:\n self.la = 25\n else:\n self.la = self.la * 1.5\n counter += 1\n",
"step-4": "from Dataload import load_anime_old, save_images, load_CelebA\nfrom Srresnet_Model import Generator_srresnet, Discriminator_srresnet\nimport tensorflow as tf\nimport numpy as np\nimport sys\n\n\nclass Skip_GAN(object):\n\n def __init__(self, sess, epoch, batch_size, dataset_name, result_dir,\n z_dim, y_dim, checkpoint_dir, num_resblock, Cycle_lr, Class_weight,\n Resnet_weight):\n self.sess = sess\n self.dataset_name = dataset_name\n self.result_dir = result_dir\n self.epoch = epoch\n self.batch_size = batch_size\n self.z_dim = z_dim\n self.y_dim = y_dim\n self.checkpoint_dir = checkpoint_dir\n self.num_resblock = num_resblock\n self.Cycle_lr = Cycle_lr\n self.Class_weight = Class_weight\n self.la = 10\n self.learningRateD = 0.0002\n self.learningRateG = 0.0002\n self.Resnet_weight = Resnet_weight\n if self.dataset_name == 'anime':\n print('loading anime .............')\n self.height = 96\n self.width = 96\n self.c_dim = 3\n self.data_X, self.data_Y = load_anime_old()\n print('self.data_X:', self.data_X.shape, 'self.data_y:', self.\n data_Y.shape)\n elif self.dataset_name == 'celebA':\n print('loading celebA ...............')\n self.height = 96\n self.width = 96\n self.c_dim = 3\n self.data_X, self.data_Y = load_CelebA()\n print('self.data_X:', self.data_X.shape, 'self.data_y:', self.\n data_Y.shape)\n else:\n print('Sorry there is no option for ', self.dataset_name)\n sys.exit()\n\n def build_model(self):\n self.y = tf.placeholder(tf.float32, [None, self.y_dim], name='y')\n self.img = tf.placeholder(tf.float32, [self.batch_size, self.height,\n self.width, 3], name='img')\n self.z = tf.placeholder(tf.float32, [None, self.z_dim])\n self.G_sample = Generator_srresnet(self.z, self.y, self.\n num_resblock, self.Resnet_weight)\n print('The return of Generator:', self.G_sample)\n D_real, C_real = Discriminator_srresnet(self.img, dataset=self.\n dataset_name)\n print('The return of Discriminator:', D_real, C_real)\n D_fake, C_fake = Discriminator_srresnet(self.G_sample, dataset=self\n .dataset_name, reuse=True)\n print('The return of Discriminator:', D_fake, C_fake)\n self.C_real_loss = tf.reduce_mean(tf.reduce_sum(tf.nn.\n sigmoid_cross_entropy_with_logits(logits=C_real, labels=self.y),\n axis=1))\n self.C_fake_loss = tf.reduce_mean(tf.reduce_sum(tf.nn.\n sigmoid_cross_entropy_with_logits(logits=C_fake, labels=self.y),\n axis=1))\n D_real_loss = tf.reduce_mean(tf.nn.\n sigmoid_cross_entropy_with_logits(logits=D_real, labels=tf.\n ones_like(D_real)))\n D_fake_loss = tf.reduce_mean(tf.nn.\n sigmoid_cross_entropy_with_logits(logits=D_fake, labels=tf.\n zeros_like(D_fake)))\n \"\"\"注意 la也即是我是用动态学习率的时候要关注的参数 \n 但是我的目标是使得类别损失变得更加的大 而不是真伪的损失\"\"\"\n D_loss = D_real_loss + D_fake_loss\n self.DC_loss = self.la * D_loss + self.C_real_loss\n G_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(\n logits=D_fake, labels=tf.ones_like(D_fake)))\n self.GC_loss = self.la * G_loss + self.C_fake_loss\n print('Calualtion the loss of Optimizer')\n self.theta_D = [v for v in tf.global_variables() if 'd_net' in v.name]\n self.theta_G = [v for v in tf.global_variables() if 'g_net' in v.name]\n with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)\n ):\n self.d_updates = tf.train.AdamOptimizer(self.learningRateD,\n beta1=0.5, beta2=0.9).minimize(self.DC_loss, var_list=self.\n theta_D)\n self.g_updates = tf.train.AdamOptimizer(self.learningRateG,\n beta1=0.5, beta2=0.9).minimize(self.GC_loss, var_list=self.\n theta_G)\n self.sampler = Generator_srresnet(self.y, self.z, self.num_resblock,\n self.Resnet_weight, reuse=True, train=False)\n\n def train(self):\n print('begin training ...........')\n tf.global_variables_initializer().run()\n sample_num = 64\n tot_num_samples = min(sample_num, self.batch_size)\n manifold_h = int(np.floor(np.sqrt(tot_num_samples)))\n manifold_w = int(np.floor(np.sqrt(tot_num_samples)))\n self.sample = np.random.uniform(-1, 1, size=(self.batch_size, self.\n z_dim)).astype(np.float32)\n self.sample_y = self.data_Y[0:self.batch_size]\n counter = 0\n batch_offset = 0\n data_index = np.arange(self.data_X.shape[0])\n np.random.shuffle(data_index)\n self.data_X = self.data_X[data_index, :, :, :]\n self.data_Y = self.data_Y[data_index]\n for epoch in range(self.epoch):\n if batch_offset + self.batch_size > len(self.data_X):\n batch_offset = 0\n data_index = np.arange(self.data_X.shape[0])\n np.random.shuffle(data_index)\n self.data_X = self.data_X[data_index, :, :, :]\n self.data_Y = self.data_Y[data_index]\n else:\n batch_images = self.data_X[batch_offset:batch_offset + self\n .batch_size]\n batch_codes = self.data_Y[batch_offset:batch_offset + self.\n batch_size]\n batch_z = np.random.uniform(-1, 1, [self.batch_size, self.\n z_dim]).astype(np.float32)\n for i_d_loss in range(3):\n _, d_loss = self.sess.run([self.d_updates, self.DC_loss\n ], feed_dict={self.img: batch_images, self.y:\n batch_codes, self.z: batch_z})\n for i_g_loss in range(1):\n _, g_loss, _ = self.sess.run([self.g_updates, self.\n GC_loss, self.G_sample], feed_dict={self.y:\n batch_codes, self.img: batch_images, self.z: batch_z})\n batch_offset = batch_offset + self.batch_size\n if counter % 10 == 0:\n print(\n 'Epoch: %2d counter: %5d d_loss: %.8f, g_loss: %.8f' %\n (epoch, counter, d_loss, g_loss))\n if counter % 500 == 0:\n samples = self.sess.run(self.sampler, feed_dict={self.z:\n self.sample, self.y: self.sample_y})\n save_images(samples[:manifold_h * manifold_w, :, :, :],\n [manifold_h, manifold_w], self.result_dir +\n '/{}.png'.format(str(counter).zfill(7)))\n if counter % 1000 == 0:\n saver = tf.train.Saver(max_to_keep=5)\n saver.save(self.sess, self.checkpoint_dir + '/{}'.\n format(str(counter).zfill(7)))\n if counter % 100 == 0:\n if self.Cycle_lr:\n self.learningRateD = self.learningRateD * 0.99\n if self.learningRateD < 0.0001:\n self.learningRateD = 0.0002\n if counter % 500 == 0:\n if self.Class_weight:\n if self.la > 25:\n self.la = 25\n else:\n self.la = self.la * 1.5\n counter += 1\n",
"step-5": "# -*- coding: utf-8 -*-\r\n# @Time : 2020/3/4 10:34\r\n# @Author : YYLin\r\n# @Email : 854280599@qq.com\r\n# @File : Skip_GAN.py\r\nfrom Dataload import load_anime_old, save_images, load_CelebA\r\nfrom Srresnet_Model import Generator_srresnet, Discriminator_srresnet\r\nimport tensorflow as tf\r\nimport numpy as np\r\nimport sys\r\n\r\n\r\nclass Skip_GAN(object):\r\n def __init__(self, sess, epoch, batch_size, dataset_name, result_dir, z_dim, y_dim, checkpoint_dir, num_resblock,\r\n Cycle_lr, Class_weight, Resnet_weight):\r\n self.sess = sess\r\n self.dataset_name = dataset_name\r\n self.result_dir = result_dir\r\n self.epoch = epoch\r\n self.batch_size = batch_size\r\n self.z_dim = z_dim\r\n self.y_dim = y_dim\r\n self.checkpoint_dir = checkpoint_dir\r\n self.num_resblock = num_resblock\r\n self.Cycle_lr = Cycle_lr\r\n self.Class_weight = Class_weight\r\n\r\n # La is used to increase the weight of image authenticity\r\n self.la = 10\r\n self.learningRateD = 2e-4\r\n self.learningRateG = 2e-4\r\n\r\n #\r\n self.Resnet_weight = Resnet_weight\r\n\r\n # 加载不同的数据集\r\n if self.dataset_name == 'anime':\r\n print('loading anime .............')\r\n self.height = 96\r\n self.width = 96\r\n self.c_dim = 3\r\n\r\n self.data_X, self.data_Y = load_anime_old()\r\n print('self.data_X:', self.data_X.shape, 'self.data_y:', self.data_Y.shape)\r\n\r\n elif self.dataset_name == 'celebA':\r\n print('loading celebA ...............')\r\n self.height = 96\r\n self.width = 96\r\n self.c_dim = 3\r\n\r\n self.data_X, self.data_Y = load_CelebA()\r\n print('self.data_X:', self.data_X.shape, 'self.data_y:', self.data_Y.shape)\r\n else:\r\n print('Sorry there is no option for ', self.dataset_name)\r\n sys.exit()\r\n\r\n def build_model(self):\r\n # some placeholder in our model\r\n self.y = tf.placeholder(tf.float32, [None, self.y_dim], name='y')\r\n self.img = tf.placeholder(tf.float32, [self.batch_size, self.height, self.width, 3], name='img')\r\n self.z = tf.placeholder(tf.float32, [None, self.z_dim])\r\n\r\n self.G_sample = Generator_srresnet(self.z, self.y, self.num_resblock, self.Resnet_weight)\r\n print('The return of Generator:', self.G_sample)\r\n\r\n # 识别器对真实图像进行判断\r\n D_real, C_real = Discriminator_srresnet(self.img, dataset=self.dataset_name)\r\n print('The return of Discriminator:', D_real, C_real)\r\n\r\n # 识别器对生成图像进行判断\r\n D_fake, C_fake = Discriminator_srresnet(self.G_sample, dataset=self.dataset_name, reuse=True)\r\n print('The return of Discriminator:', D_fake, C_fake)\r\n\r\n # 判断图像的类别\r\n self.C_real_loss = tf.reduce_mean(\r\n tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits=C_real, labels=self.y), axis=1))\r\n self.C_fake_loss = tf.reduce_mean(\r\n tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits=C_fake, labels=self.y), axis=1))\r\n\r\n # D_Loss 希望真实图像被判断为1 希望生成图像被判断为0\r\n D_real_loss = tf.reduce_mean(\r\n tf.nn.sigmoid_cross_entropy_with_logits(logits=D_real, labels=tf.ones_like(D_real)))\r\n D_fake_loss = tf.reduce_mean(\r\n tf.nn.sigmoid_cross_entropy_with_logits(logits=D_fake, labels=tf.zeros_like(D_fake)))\r\n\r\n '''注意 la也即是我是用动态学习率的时候要关注的参数 \r\n 但是我的目标是使得类别损失变得更加的大 而不是真伪的损失'''\r\n D_loss = D_real_loss + D_fake_loss\r\n self.DC_loss = (self.la * D_loss + self.C_real_loss)\r\n\r\n # 对生成模型的损失也在关注该模型\r\n G_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=D_fake, labels=tf.ones_like(D_fake)))\r\n self.GC_loss = (self.la * G_loss + self.C_fake_loss)\r\n\r\n print('Calualtion the loss of Optimizer')\r\n self.theta_D = [v for v in tf.global_variables() if 'd_net' in v.name]\r\n self.theta_G = [v for v in tf.global_variables() if 'g_net' in v.name]\r\n\r\n with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):\r\n self.d_updates = tf.train.AdamOptimizer(self.learningRateD, beta1=0.5, beta2=0.9).minimize(self.DC_loss,\r\n var_list=self.theta_D)\r\n self.g_updates = tf.train.AdamOptimizer(self.learningRateG, beta1=0.5, beta2=0.9).minimize(self.GC_loss,\r\n var_list=self.theta_G)\r\n self.sampler = Generator_srresnet(self.y, self.z, self.num_resblock, self.Resnet_weight, reuse=True, train=False)\r\n\r\n def train(self):\r\n print('begin training ...........')\r\n tf.global_variables_initializer().run()\r\n\r\n # sample_num 用于控制存储图像\r\n sample_num = 64\r\n tot_num_samples = min(sample_num, self.batch_size)\r\n manifold_h = int(np.floor(np.sqrt(tot_num_samples)))\r\n manifold_w = int(np.floor(np.sqrt(tot_num_samples)))\r\n\r\n # 定义随机噪音以及标签 2019/09/29\r\n self.sample = np.random.uniform(-1, 1, size=(self.batch_size, self.z_dim)).astype(np.float32)\r\n self.sample_y = self.data_Y[0:self.batch_size]\r\n\r\n counter = 0\r\n\r\n # shuffle the dataset 2019/9/29\r\n batch_offset = 0\r\n data_index = np.arange(self.data_X.shape[0])\r\n np.random.shuffle(data_index)\r\n self.data_X = self.data_X[data_index, :, :, :]\r\n self.data_Y = self.data_Y[data_index]\r\n\r\n # 这种方式会有使得小于batch_size个数据用不上\r\n for epoch in range(self.epoch):\r\n if batch_offset + self.batch_size > len(self.data_X):\r\n batch_offset = 0\r\n # shuffle dataset\r\n data_index = np.arange(self.data_X.shape[0])\r\n np.random.shuffle(data_index)\r\n self.data_X = self.data_X[data_index, :, :, :]\r\n self.data_Y = self.data_Y[data_index]\r\n else:\r\n # 首先是得到输入的数据\r\n batch_images = self.data_X[batch_offset:batch_offset + self.batch_size]\r\n batch_codes = self.data_Y[batch_offset:batch_offset + self.batch_size]\r\n\r\n batch_z = np.random.uniform(-1, 1, [self.batch_size, self.z_dim]).astype(np.float32)\r\n\r\n # 然后更新识别器\r\n for i_d_loss in range(3):\r\n _, d_loss = self.sess.run([self.d_updates, self.DC_loss], feed_dict={self.img: batch_images,\r\n self.y: batch_codes,\r\n self.z: batch_z})\r\n for i_g_loss in range(1):\r\n # 最后更新生成器模型\r\n _, g_loss, _ = self.sess.run([self.g_updates, self.GC_loss, self.G_sample],\r\n feed_dict={self.y: batch_codes, self.img: batch_images, self.z: batch_z})\r\n\r\n batch_offset = batch_offset + self.batch_size\r\n\r\n # display the loss every 10 steps\r\n if (counter % 10) == 0:\r\n print('Epoch: %2d counter: %5d d_loss: %.8f, g_loss: %.8f' % (epoch, counter, d_loss, g_loss))\r\n\r\n # save image every 500 steps\r\n if counter % 500 == 0:\r\n samples = self.sess.run(self.sampler,\r\n feed_dict={self.z: self.sample, self.y: self.sample_y})\r\n\r\n save_images(samples[:manifold_h * manifold_w, :, :, :], [manifold_h, manifold_w],\r\n self.result_dir + '/{}.png'.format(str(counter).zfill(7)))\r\n\r\n # save the model every 1000 steps\r\n if counter % 1000 == 0:\r\n saver = tf.train.Saver(max_to_keep=5)\r\n saver.save(self.sess, self.checkpoint_dir + '/{}'.format(str(counter).zfill(7)))\r\n\r\n if (counter % 100) == 0:\r\n if self.Cycle_lr:\r\n self.learningRateD = self.learningRateD * 0.99\r\n if self.learningRateD < 0.0001:\r\n self.learningRateD = 2e-4\r\n\r\n if (counter % 500) == 0:\r\n if self.Class_weight:\r\n if self.la > 25:\r\n self.la = 25\r\n else:\r\n self.la = self.la * 1.5\r\n\r\n counter += 1\r\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('Store', '0004_remove_product_mcat')]
operations = [migrations.RemoveField(model_name='category', name=
'main_cat'), migrations.AddField(model_name='category', name=
'main_cat', field=models.ForeignKey(blank=True, null=True,
on_delete=django.db.models.deletion.SET_NULL, to='Store.maincategory'))
]
<|reserved_special_token_1|>
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [('Store', '0004_remove_product_mcat')]
operations = [migrations.RemoveField(model_name='category', name=
'main_cat'), migrations.AddField(model_name='category', name=
'main_cat', field=models.ForeignKey(blank=True, null=True,
on_delete=django.db.models.deletion.SET_NULL, to='Store.maincategory'))
]
<|reserved_special_token_1|>
# Generated by Django 3.1.1 on 2020-10-14 16:26
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Store', '0004_remove_product_mcat'),
]
operations = [
migrations.RemoveField(
model_name='category',
name='main_cat',
),
migrations.AddField(
model_name='category',
name='main_cat',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='Store.maincategory'),
),
]
|
flexible
|
{
"blob_id": "ec39dae7217ddc48b1ab5163d234542cb36c1d48",
"index": 5351,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Store', '0004_remove_product_mcat')]\n operations = [migrations.RemoveField(model_name='category', name=\n 'main_cat'), migrations.AddField(model_name='category', name=\n 'main_cat', field=models.ForeignKey(blank=True, null=True,\n on_delete=django.db.models.deletion.SET_NULL, to='Store.maincategory'))\n ]\n",
"step-4": "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Store', '0004_remove_product_mcat')]\n operations = [migrations.RemoveField(model_name='category', name=\n 'main_cat'), migrations.AddField(model_name='category', name=\n 'main_cat', field=models.ForeignKey(blank=True, null=True,\n on_delete=django.db.models.deletion.SET_NULL, to='Store.maincategory'))\n ]\n",
"step-5": "# Generated by Django 3.1.1 on 2020-10-14 16:26\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Store', '0004_remove_product_mcat'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='category',\n name='main_cat',\n ),\n migrations.AddField(\n model_name='category',\n name='main_cat',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='Store.maincategory'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import sys
import os
sys.path.append("C:/Users/Laptop/Documents/Repos/udacity_stats_functions/descriptive")
import normal_distribution_06
#import sampling_distributions_07
def lower_upper_confidence_intervals(avg, SD):
#avg is x bar. The mean value at the "would be" point. ie Bieber Tweeter
#SD is standard error (standard deviation of population dataset dvided by sqrt(number_in_sample)
lower = avg-2*SD
upper = avg+2*SD
return((lower, upper))
#7. Quiz: Confidence Interval Bounds
print(lower_upper_confidence_intervals(40, 2.71))
#8. Quiz: Exact Z-Scores
print(get_z_from_p(0.975))
|
normal
|
{
"blob_id": "d423b0bc6cd9ea9795317750141ad5f5eab01636",
"index": 1886,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef lower_upper_confidence_intervals(avg, SD):\n lower = avg - 2 * SD\n upper = avg + 2 * SD\n return lower, upper\n\n\n<mask token>\n",
"step-3": "<mask token>\nsys.path.append(\n 'C:/Users/Laptop/Documents/Repos/udacity_stats_functions/descriptive')\n<mask token>\n\n\ndef lower_upper_confidence_intervals(avg, SD):\n lower = avg - 2 * SD\n upper = avg + 2 * SD\n return lower, upper\n\n\nprint(lower_upper_confidence_intervals(40, 2.71))\nprint(get_z_from_p(0.975))\n",
"step-4": "import sys\nimport os\nsys.path.append(\n 'C:/Users/Laptop/Documents/Repos/udacity_stats_functions/descriptive')\nimport normal_distribution_06\n\n\ndef lower_upper_confidence_intervals(avg, SD):\n lower = avg - 2 * SD\n upper = avg + 2 * SD\n return lower, upper\n\n\nprint(lower_upper_confidence_intervals(40, 2.71))\nprint(get_z_from_p(0.975))\n",
"step-5": "import sys\nimport os\nsys.path.append(\"C:/Users/Laptop/Documents/Repos/udacity_stats_functions/descriptive\")\nimport normal_distribution_06\n#import sampling_distributions_07\n\ndef lower_upper_confidence_intervals(avg, SD):\n #avg is x bar. The mean value at the \"would be\" point. ie Bieber Tweeter\n #SD is standard error (standard deviation of population dataset dvided by sqrt(number_in_sample)\n lower = avg-2*SD\n upper = avg+2*SD\n return((lower, upper))\n \n#7. Quiz: Confidence Interval Bounds\nprint(lower_upper_confidence_intervals(40, 2.71))\n\n#8. Quiz: Exact Z-Scores\nprint(get_z_from_p(0.975))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class GoalCategory(NestedSet):
nsm_parent_field = 'parent_goal_category'
def on_update(self):
self.validate_name_with_goal()
super(GoalCategory, self).on_update()
self.validate_one_root()
def validate_name_with_goal(self):
if frappe.db.exists('Goal', self.name):
frappe.msgprint(_('A goal with the same name already exists'),
raise_exception=1)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GoalCategory(NestedSet):
nsm_parent_field = 'parent_goal_category'
def on_update(self):
self.validate_name_with_goal()
super(GoalCategory, self).on_update()
self.validate_one_root()
def validate_name_with_goal(self):
if frappe.db.exists('Goal', self.name):
frappe.msgprint(_('A goal with the same name already exists'),
raise_exception=1)
def get_parent_goal_categories(goal_category):
lft, rgt = frappe.db.get_value('Goal Category', goal_category, ['lft',
'rgt'])
return frappe.db.sql(
"""select name from `tabGoal Category`
where lft <= %s and rgt >= %s
order by lft asc"""
, (lft, rgt), as_dict=True)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GoalCategory(NestedSet):
nsm_parent_field = 'parent_goal_category'
def on_update(self):
self.validate_name_with_goal()
super(GoalCategory, self).on_update()
self.validate_one_root()
def validate_name_with_goal(self):
if frappe.db.exists('Goal', self.name):
frappe.msgprint(_('A goal with the same name already exists'),
raise_exception=1)
def get_parent_goal_categories(goal_category):
lft, rgt = frappe.db.get_value('Goal Category', goal_category, ['lft',
'rgt'])
return frappe.db.sql(
"""select name from `tabGoal Category`
where lft <= %s and rgt >= %s
order by lft asc"""
, (lft, rgt), as_dict=True)
def on_doctype_update():
frappe.db.add_index('Goal Category', ['lft', 'rgt'])
<|reserved_special_token_1|>
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils.nestedset import NestedSet
class GoalCategory(NestedSet):
nsm_parent_field = 'parent_goal_category'
def on_update(self):
self.validate_name_with_goal()
super(GoalCategory, self).on_update()
self.validate_one_root()
def validate_name_with_goal(self):
if frappe.db.exists('Goal', self.name):
frappe.msgprint(_('A goal with the same name already exists'),
raise_exception=1)
def get_parent_goal_categories(goal_category):
lft, rgt = frappe.db.get_value('Goal Category', goal_category, ['lft',
'rgt'])
return frappe.db.sql(
"""select name from `tabGoal Category`
where lft <= %s and rgt >= %s
order by lft asc"""
, (lft, rgt), as_dict=True)
def on_doctype_update():
frappe.db.add_index('Goal Category', ['lft', 'rgt'])
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Copyright (c) 2018, HSCH and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils.nestedset import NestedSet
class GoalCategory(NestedSet):
nsm_parent_field = 'parent_goal_category';
def on_update(self):
self.validate_name_with_goal()
super(GoalCategory, self).on_update()
self.validate_one_root()
def validate_name_with_goal(self):
if frappe.db.exists("Goal", self.name):
frappe.msgprint(_("A goal with the same name already exists"), raise_exception=1)
def get_parent_goal_categories(goal_category):
lft, rgt = frappe.db.get_value("Goal Category", goal_category, ['lft', 'rgt'])
return frappe.db.sql("""select name from `tabGoal Category`
where lft <= %s and rgt >= %s
order by lft asc""", (lft, rgt), as_dict=True)
def on_doctype_update():
frappe.db.add_index("Goal Category", ["lft", "rgt"])
|
flexible
|
{
"blob_id": "c6055c6b67ac28d304ed34ddc2f81e59da8e7f1b",
"index": 1103,
"step-1": "<mask token>\n\n\nclass GoalCategory(NestedSet):\n nsm_parent_field = 'parent_goal_category'\n\n def on_update(self):\n self.validate_name_with_goal()\n super(GoalCategory, self).on_update()\n self.validate_one_root()\n\n def validate_name_with_goal(self):\n if frappe.db.exists('Goal', self.name):\n frappe.msgprint(_('A goal with the same name already exists'),\n raise_exception=1)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass GoalCategory(NestedSet):\n nsm_parent_field = 'parent_goal_category'\n\n def on_update(self):\n self.validate_name_with_goal()\n super(GoalCategory, self).on_update()\n self.validate_one_root()\n\n def validate_name_with_goal(self):\n if frappe.db.exists('Goal', self.name):\n frappe.msgprint(_('A goal with the same name already exists'),\n raise_exception=1)\n\n\ndef get_parent_goal_categories(goal_category):\n lft, rgt = frappe.db.get_value('Goal Category', goal_category, ['lft',\n 'rgt'])\n return frappe.db.sql(\n \"\"\"select name from `tabGoal Category`\n\t\twhere lft <= %s and rgt >= %s\n\t\torder by lft asc\"\"\"\n , (lft, rgt), as_dict=True)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass GoalCategory(NestedSet):\n nsm_parent_field = 'parent_goal_category'\n\n def on_update(self):\n self.validate_name_with_goal()\n super(GoalCategory, self).on_update()\n self.validate_one_root()\n\n def validate_name_with_goal(self):\n if frappe.db.exists('Goal', self.name):\n frappe.msgprint(_('A goal with the same name already exists'),\n raise_exception=1)\n\n\ndef get_parent_goal_categories(goal_category):\n lft, rgt = frappe.db.get_value('Goal Category', goal_category, ['lft',\n 'rgt'])\n return frappe.db.sql(\n \"\"\"select name from `tabGoal Category`\n\t\twhere lft <= %s and rgt >= %s\n\t\torder by lft asc\"\"\"\n , (lft, rgt), as_dict=True)\n\n\ndef on_doctype_update():\n frappe.db.add_index('Goal Category', ['lft', 'rgt'])\n",
"step-4": "from __future__ import unicode_literals\nimport frappe\nfrom frappe import _\nfrom frappe.utils.nestedset import NestedSet\n\n\nclass GoalCategory(NestedSet):\n nsm_parent_field = 'parent_goal_category'\n\n def on_update(self):\n self.validate_name_with_goal()\n super(GoalCategory, self).on_update()\n self.validate_one_root()\n\n def validate_name_with_goal(self):\n if frappe.db.exists('Goal', self.name):\n frappe.msgprint(_('A goal with the same name already exists'),\n raise_exception=1)\n\n\ndef get_parent_goal_categories(goal_category):\n lft, rgt = frappe.db.get_value('Goal Category', goal_category, ['lft',\n 'rgt'])\n return frappe.db.sql(\n \"\"\"select name from `tabGoal Category`\n\t\twhere lft <= %s and rgt >= %s\n\t\torder by lft asc\"\"\"\n , (lft, rgt), as_dict=True)\n\n\ndef on_doctype_update():\n frappe.db.add_index('Goal Category', ['lft', 'rgt'])\n",
"step-5": "# -*- coding: utf-8 -*-\n# Copyright (c) 2018, HSCH and contributors\n# For license information, please see license.txt\n\nfrom __future__ import unicode_literals\nimport frappe\nfrom frappe import _\n\n\nfrom frappe.utils.nestedset import NestedSet\nclass GoalCategory(NestedSet):\n\tnsm_parent_field = 'parent_goal_category';\n\n\tdef on_update(self):\n\t\tself.validate_name_with_goal()\n\t\tsuper(GoalCategory, self).on_update()\n\t\tself.validate_one_root()\n\n\tdef validate_name_with_goal(self):\n\t\tif frappe.db.exists(\"Goal\", self.name):\n\t\t\tfrappe.msgprint(_(\"A goal with the same name already exists\"), raise_exception=1)\n\ndef get_parent_goal_categories(goal_category):\n\tlft, rgt = frappe.db.get_value(\"Goal Category\", goal_category, ['lft', 'rgt'])\n\n\treturn frappe.db.sql(\"\"\"select name from `tabGoal Category`\n\t\twhere lft <= %s and rgt >= %s\n\t\torder by lft asc\"\"\", (lft, rgt), as_dict=True)\n\ndef on_doctype_update():\n\tfrappe.db.add_index(\"Goal Category\", [\"lft\", \"rgt\"])\n\n\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from django.shortcuts import render
from django.http import HttpResponse
from chats.models import Chat
from usuario.models import Usuario
# Create your views here.
def chat(request):
chat_list = Chat.objects.order_by("id_chat")
chat_dict = {'chat': chat_list}
return render(request,'chats/Chat.html', context=chat_dict)
|
normal
|
{
"blob_id": "4a14265a9a2338be66e31110bba696e224b6a70f",
"index": 8395,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef chat(request):\n chat_list = Chat.objects.order_by('id_chat')\n chat_dict = {'chat': chat_list}\n return render(request, 'chats/Chat.html', context=chat_dict)\n",
"step-3": "from django.shortcuts import render\nfrom django.http import HttpResponse\nfrom chats.models import Chat\nfrom usuario.models import Usuario\n\n\ndef chat(request):\n chat_list = Chat.objects.order_by('id_chat')\n chat_dict = {'chat': chat_list}\n return render(request, 'chats/Chat.html', context=chat_dict)\n",
"step-4": "from django.shortcuts import render\nfrom django.http import HttpResponse\nfrom chats.models import Chat\nfrom usuario.models import Usuario\n\n# Create your views here.\ndef chat(request):\n \n chat_list = Chat.objects.order_by(\"id_chat\")\n chat_dict = {'chat': chat_list}\n\n return render(request,'chats/Chat.html', context=chat_dict)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_answer():
import sys
answer1 = None
answer2 = None
answer3 = None
try:
answer1 = fizz_buzz(3, 5, 16)
answer2 = fizz_buzz(2, 7, 20)
answer3 = fizz_buzz(100)
except:
print('An error occurred:', sys.exc_info()[1])
assert answer1 == [1, 2, 'Fizz', 4, 'Buzz', 'Fizz', 7, 8, 'Fizz',
'Buzz', 11, 'Fizz', 13, 14, 'FizzBuzz']
assert answer2 == [1, 'Fizz', 3, 'Fizz', 5, 'Fizz', 'Buzz', 'Fizz', 9,
'Fizz', 11, 'Fizz', 13, 'FizzBuzz', 15, 'Fizz', 17, 'Fizz', 19]
assert answer3 == None
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def fizz_buzz(num1, num2, end_range):
if not (isinstance(num1, int) and isinstance(num2, int) and isinstance(
end_range, int)) or (num1 < 0 or num2 < 0 or end_range < 0):
return 'Input should be a positive integer'
result = []
for i in range(1, end_range):
output = i
if i % num1 == 0 and i % num2 == 0:
output = 'FizzBuzz'
elif i % num1 == 0:
output = 'Fizz'
elif i % num2 == 0:
output = 'Buzz'
result.append(output)
print(output)
return result
def test_answer():
import sys
answer1 = None
answer2 = None
answer3 = None
try:
answer1 = fizz_buzz(3, 5, 16)
answer2 = fizz_buzz(2, 7, 20)
answer3 = fizz_buzz(100)
except:
print('An error occurred:', sys.exc_info()[1])
assert answer1 == [1, 2, 'Fizz', 4, 'Buzz', 'Fizz', 7, 8, 'Fizz',
'Buzz', 11, 'Fizz', 13, 14, 'FizzBuzz']
assert answer2 == [1, 'Fizz', 3, 'Fizz', 5, 'Fizz', 'Buzz', 'Fizz', 9,
'Fizz', 11, 'Fizz', 13, 'FizzBuzz', 15, 'Fizz', 17, 'Fizz', 19]
assert answer3 == None
<|reserved_special_token_1|>
__doc__
def fizz_buzz(num1, num2, end_range):
if not (isinstance(num1, int) and isinstance(num2, int) and isinstance(
end_range, int)) or (num1 < 0 or num2 < 0 or end_range < 0):
return 'Input should be a positive integer'
result = []
for i in range(1, end_range):
output = i
if i % num1 == 0 and i % num2 == 0:
output = 'FizzBuzz'
elif i % num1 == 0:
output = 'Fizz'
elif i % num2 == 0:
output = 'Buzz'
result.append(output)
print(output)
return result
def test_answer():
import sys
answer1 = None
answer2 = None
answer3 = None
try:
answer1 = fizz_buzz(3, 5, 16)
answer2 = fizz_buzz(2, 7, 20)
answer3 = fizz_buzz(100)
except:
print('An error occurred:', sys.exc_info()[1])
assert answer1 == [1, 2, 'Fizz', 4, 'Buzz', 'Fizz', 7, 8, 'Fizz',
'Buzz', 11, 'Fizz', 13, 14, 'FizzBuzz']
assert answer2 == [1, 'Fizz', 3, 'Fizz', 5, 'Fizz', 'Buzz', 'Fizz', 9,
'Fizz', 11, 'Fizz', 13, 'FizzBuzz', 15, 'Fizz', 17, 'Fizz', 19]
assert answer3 == None
<|reserved_special_token_1|>
__doc__
def fizz_buzz(num1, num2, end_range):
if not (
isinstance(num1, int) and isinstance(num2, int) and isinstance(end_range, int)
) or (num1 < 0 or num2 < 0 or end_range < 0):
return "Input should be a positive integer"
# I'm storing the result to test the returned value aka a list of outputs
result = []
for i in range(1, end_range):
output = i
if i % num1 == 0 and i % num2 == 0:
output = "FizzBuzz"
elif i % num1 == 0:
output = "Fizz"
elif i % num2 == 0:
output = "Buzz"
result.append(output)
print(output)
return result
def test_answer():
import sys
answer1 = None
answer2 = None
answer3 = None
try:
answer1 = fizz_buzz(3, 5, 16)
answer2 = fizz_buzz(2, 7, 20)
answer3 = fizz_buzz(100)
except:
print("An error occurred:", sys.exc_info()[1])
assert answer1 == [
1,
2,
"Fizz",
4,
"Buzz",
"Fizz",
7,
8,
"Fizz",
"Buzz",
11,
"Fizz",
13,
14,
"FizzBuzz",
]
assert answer2 == [
1,
"Fizz",
3,
"Fizz",
5,
"Fizz",
"Buzz",
"Fizz",
9,
"Fizz",
11,
"Fizz",
13,
"FizzBuzz",
15,
"Fizz",
17,
"Fizz",
19,
]
assert answer3 == None
|
flexible
|
{
"blob_id": "d00873c3ee72b55cb5b74f78a98de61a25b3cc21",
"index": 7227,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_answer():\n import sys\n answer1 = None\n answer2 = None\n answer3 = None\n try:\n answer1 = fizz_buzz(3, 5, 16)\n answer2 = fizz_buzz(2, 7, 20)\n answer3 = fizz_buzz(100)\n except:\n print('An error occurred:', sys.exc_info()[1])\n assert answer1 == [1, 2, 'Fizz', 4, 'Buzz', 'Fizz', 7, 8, 'Fizz',\n 'Buzz', 11, 'Fizz', 13, 14, 'FizzBuzz']\n assert answer2 == [1, 'Fizz', 3, 'Fizz', 5, 'Fizz', 'Buzz', 'Fizz', 9,\n 'Fizz', 11, 'Fizz', 13, 'FizzBuzz', 15, 'Fizz', 17, 'Fizz', 19]\n assert answer3 == None\n",
"step-3": "<mask token>\n\n\ndef fizz_buzz(num1, num2, end_range):\n if not (isinstance(num1, int) and isinstance(num2, int) and isinstance(\n end_range, int)) or (num1 < 0 or num2 < 0 or end_range < 0):\n return 'Input should be a positive integer'\n result = []\n for i in range(1, end_range):\n output = i\n if i % num1 == 0 and i % num2 == 0:\n output = 'FizzBuzz'\n elif i % num1 == 0:\n output = 'Fizz'\n elif i % num2 == 0:\n output = 'Buzz'\n result.append(output)\n print(output)\n return result\n\n\ndef test_answer():\n import sys\n answer1 = None\n answer2 = None\n answer3 = None\n try:\n answer1 = fizz_buzz(3, 5, 16)\n answer2 = fizz_buzz(2, 7, 20)\n answer3 = fizz_buzz(100)\n except:\n print('An error occurred:', sys.exc_info()[1])\n assert answer1 == [1, 2, 'Fizz', 4, 'Buzz', 'Fizz', 7, 8, 'Fizz',\n 'Buzz', 11, 'Fizz', 13, 14, 'FizzBuzz']\n assert answer2 == [1, 'Fizz', 3, 'Fizz', 5, 'Fizz', 'Buzz', 'Fizz', 9,\n 'Fizz', 11, 'Fizz', 13, 'FizzBuzz', 15, 'Fizz', 17, 'Fizz', 19]\n assert answer3 == None\n",
"step-4": "__doc__\n\n\ndef fizz_buzz(num1, num2, end_range):\n if not (isinstance(num1, int) and isinstance(num2, int) and isinstance(\n end_range, int)) or (num1 < 0 or num2 < 0 or end_range < 0):\n return 'Input should be a positive integer'\n result = []\n for i in range(1, end_range):\n output = i\n if i % num1 == 0 and i % num2 == 0:\n output = 'FizzBuzz'\n elif i % num1 == 0:\n output = 'Fizz'\n elif i % num2 == 0:\n output = 'Buzz'\n result.append(output)\n print(output)\n return result\n\n\ndef test_answer():\n import sys\n answer1 = None\n answer2 = None\n answer3 = None\n try:\n answer1 = fizz_buzz(3, 5, 16)\n answer2 = fizz_buzz(2, 7, 20)\n answer3 = fizz_buzz(100)\n except:\n print('An error occurred:', sys.exc_info()[1])\n assert answer1 == [1, 2, 'Fizz', 4, 'Buzz', 'Fizz', 7, 8, 'Fizz',\n 'Buzz', 11, 'Fizz', 13, 14, 'FizzBuzz']\n assert answer2 == [1, 'Fizz', 3, 'Fizz', 5, 'Fizz', 'Buzz', 'Fizz', 9,\n 'Fizz', 11, 'Fizz', 13, 'FizzBuzz', 15, 'Fizz', 17, 'Fizz', 19]\n assert answer3 == None\n",
"step-5": "__doc__\n\n\ndef fizz_buzz(num1, num2, end_range):\n if not (\n isinstance(num1, int) and isinstance(num2, int) and isinstance(end_range, int)\n ) or (num1 < 0 or num2 < 0 or end_range < 0):\n return \"Input should be a positive integer\"\n\n # I'm storing the result to test the returned value aka a list of outputs\n result = []\n\n for i in range(1, end_range):\n output = i\n if i % num1 == 0 and i % num2 == 0:\n output = \"FizzBuzz\"\n elif i % num1 == 0:\n output = \"Fizz\"\n elif i % num2 == 0:\n output = \"Buzz\"\n result.append(output)\n print(output)\n\n return result\n\n\ndef test_answer():\n import sys\n\n answer1 = None\n answer2 = None\n answer3 = None\n try:\n answer1 = fizz_buzz(3, 5, 16)\n answer2 = fizz_buzz(2, 7, 20)\n answer3 = fizz_buzz(100)\n except:\n print(\"An error occurred:\", sys.exc_info()[1])\n\n assert answer1 == [\n 1,\n 2,\n \"Fizz\",\n 4,\n \"Buzz\",\n \"Fizz\",\n 7,\n 8,\n \"Fizz\",\n \"Buzz\",\n 11,\n \"Fizz\",\n 13,\n 14,\n \"FizzBuzz\",\n ]\n assert answer2 == [\n 1,\n \"Fizz\",\n 3,\n \"Fizz\",\n 5,\n \"Fizz\",\n \"Buzz\",\n \"Fizz\",\n 9,\n \"Fizz\",\n 11,\n \"Fizz\",\n 13,\n \"FizzBuzz\",\n 15,\n \"Fizz\",\n 17,\n \"Fizz\",\n 19,\n ]\n assert answer3 == None\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def incr_reads(request, book_id):
if request.POST:
try:
readers = Book.objects.get(id=book_id).incr_reads()
return HttpResponse(readers)
except Book.DoesNotExist:
pass
return HttpResponse('FAILED')
def index(request):
"""
No processing, should use direct to template.
"""
return render_to_response('index.html', {}, context_instance=
RequestContext(request))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def incr_reads(request, book_id):
if request.POST:
try:
readers = Book.objects.get(id=book_id).incr_reads()
return HttpResponse(readers)
except Book.DoesNotExist:
pass
return HttpResponse('FAILED')
def index(request):
"""
No processing, should use direct to template.
"""
return render_to_response('index.html', {}, context_instance=
RequestContext(request))
<|reserved_special_token_0|>
def suggest_image(request, book_id):
"""
So this is a helper view for staff to update the picture.
"""
b = Book.objects.get(id=book_id)
_img = b.get_image_suggestions(first=False)
return render_to_response('books/image_suggestor.html', {'images': _img,
'book': b}, context_instance=RequestContext(request))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def incr_reads(request, book_id):
if request.POST:
try:
readers = Book.objects.get(id=book_id).incr_reads()
return HttpResponse(readers)
except Book.DoesNotExist:
pass
return HttpResponse('FAILED')
def index(request):
"""
No processing, should use direct to template.
"""
return render_to_response('index.html', {}, context_instance=
RequestContext(request))
def search(request):
if request.GET and 'q' in request.GET:
b = Book.search.query(request.GET['q'])
return render_to_response('books/book_list.html', {'object_list': b},
context_instance=RequestContext(request))
def suggest_image(request, book_id):
"""
So this is a helper view for staff to update the picture.
"""
b = Book.objects.get(id=book_id)
_img = b.get_image_suggestions(first=False)
return render_to_response('books/image_suggestor.html', {'images': _img,
'book': b}, context_instance=RequestContext(request))
<|reserved_special_token_1|>
from django.shortcuts import render_to_response, Http404, render
from django.template import RequestContext
from books.models import Book
from django.http import HttpResponse, HttpResponseRedirect
import urllib, urllib2
import json
def incr_reads(request, book_id):
if request.POST:
try:
readers = Book.objects.get(id=book_id).incr_reads()
return HttpResponse(readers)
except Book.DoesNotExist:
pass
return HttpResponse('FAILED')
def index(request):
"""
No processing, should use direct to template.
"""
return render_to_response('index.html', {}, context_instance=
RequestContext(request))
def search(request):
if request.GET and 'q' in request.GET:
b = Book.search.query(request.GET['q'])
return render_to_response('books/book_list.html', {'object_list': b},
context_instance=RequestContext(request))
def suggest_image(request, book_id):
"""
So this is a helper view for staff to update the picture.
"""
b = Book.objects.get(id=book_id)
_img = b.get_image_suggestions(first=False)
return render_to_response('books/image_suggestor.html', {'images': _img,
'book': b}, context_instance=RequestContext(request))
<|reserved_special_token_1|>
# Create your views here.
from django.shortcuts import render_to_response, Http404, render
from django.template import RequestContext
from books.models import Book
from django.http import HttpResponse, HttpResponseRedirect
import urllib, urllib2
import json
def incr_reads(request, book_id):
if request.POST:
try:
readers = Book.objects.get(id=book_id).incr_reads()
return HttpResponse(readers)
except Book.DoesNotExist:
pass
return HttpResponse('FAILED')
def index(request):
'''
No processing, should use direct to template.
'''
return render_to_response('index.html', {}, context_instance=RequestContext(request))
def search(request):
if request.GET and 'q' in request.GET:
b = Book.search.query(request.GET['q'])
return render_to_response('books/book_list.html', {'object_list':b}, context_instance=RequestContext(request))
def suggest_image(request, book_id):
'''
So this is a helper view for staff to update the picture.
'''
b = Book.objects.get(id=book_id)
_img = b.get_image_suggestions(first=False)
return render_to_response('books/image_suggestor.html', {'images':_img, 'book':b}, context_instance=RequestContext(request))
|
flexible
|
{
"blob_id": "bcbcb4ea3a3b8b5c11e9b107103418ae79a3921c",
"index": 3628,
"step-1": "<mask token>\n\n\ndef incr_reads(request, book_id):\n if request.POST:\n try:\n readers = Book.objects.get(id=book_id).incr_reads()\n return HttpResponse(readers)\n except Book.DoesNotExist:\n pass\n return HttpResponse('FAILED')\n\n\ndef index(request):\n \"\"\"\n No processing, should use direct to template.\n \"\"\"\n return render_to_response('index.html', {}, context_instance=\n RequestContext(request))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef incr_reads(request, book_id):\n if request.POST:\n try:\n readers = Book.objects.get(id=book_id).incr_reads()\n return HttpResponse(readers)\n except Book.DoesNotExist:\n pass\n return HttpResponse('FAILED')\n\n\ndef index(request):\n \"\"\"\n No processing, should use direct to template.\n \"\"\"\n return render_to_response('index.html', {}, context_instance=\n RequestContext(request))\n\n\n<mask token>\n\n\ndef suggest_image(request, book_id):\n \"\"\"\n So this is a helper view for staff to update the picture.\n \"\"\"\n b = Book.objects.get(id=book_id)\n _img = b.get_image_suggestions(first=False)\n return render_to_response('books/image_suggestor.html', {'images': _img,\n 'book': b}, context_instance=RequestContext(request))\n",
"step-3": "<mask token>\n\n\ndef incr_reads(request, book_id):\n if request.POST:\n try:\n readers = Book.objects.get(id=book_id).incr_reads()\n return HttpResponse(readers)\n except Book.DoesNotExist:\n pass\n return HttpResponse('FAILED')\n\n\ndef index(request):\n \"\"\"\n No processing, should use direct to template.\n \"\"\"\n return render_to_response('index.html', {}, context_instance=\n RequestContext(request))\n\n\ndef search(request):\n if request.GET and 'q' in request.GET:\n b = Book.search.query(request.GET['q'])\n return render_to_response('books/book_list.html', {'object_list': b},\n context_instance=RequestContext(request))\n\n\ndef suggest_image(request, book_id):\n \"\"\"\n So this is a helper view for staff to update the picture.\n \"\"\"\n b = Book.objects.get(id=book_id)\n _img = b.get_image_suggestions(first=False)\n return render_to_response('books/image_suggestor.html', {'images': _img,\n 'book': b}, context_instance=RequestContext(request))\n",
"step-4": "from django.shortcuts import render_to_response, Http404, render\nfrom django.template import RequestContext\nfrom books.models import Book\nfrom django.http import HttpResponse, HttpResponseRedirect\nimport urllib, urllib2\nimport json\n\n\ndef incr_reads(request, book_id):\n if request.POST:\n try:\n readers = Book.objects.get(id=book_id).incr_reads()\n return HttpResponse(readers)\n except Book.DoesNotExist:\n pass\n return HttpResponse('FAILED')\n\n\ndef index(request):\n \"\"\"\n No processing, should use direct to template.\n \"\"\"\n return render_to_response('index.html', {}, context_instance=\n RequestContext(request))\n\n\ndef search(request):\n if request.GET and 'q' in request.GET:\n b = Book.search.query(request.GET['q'])\n return render_to_response('books/book_list.html', {'object_list': b},\n context_instance=RequestContext(request))\n\n\ndef suggest_image(request, book_id):\n \"\"\"\n So this is a helper view for staff to update the picture.\n \"\"\"\n b = Book.objects.get(id=book_id)\n _img = b.get_image_suggestions(first=False)\n return render_to_response('books/image_suggestor.html', {'images': _img,\n 'book': b}, context_instance=RequestContext(request))\n",
"step-5": "# Create your views here.\nfrom django.shortcuts import render_to_response, Http404, render\nfrom django.template import RequestContext\nfrom books.models import Book\nfrom django.http import HttpResponse, HttpResponseRedirect\nimport urllib, urllib2\nimport json \n\ndef incr_reads(request, book_id):\n if request.POST:\n try:\n readers = Book.objects.get(id=book_id).incr_reads()\n return HttpResponse(readers)\n except Book.DoesNotExist:\n pass\n return HttpResponse('FAILED')\n\ndef index(request):\n '''\n No processing, should use direct to template.\n '''\n return render_to_response('index.html', {}, context_instance=RequestContext(request))\n\ndef search(request):\n if request.GET and 'q' in request.GET:\n b = Book.search.query(request.GET['q'])\n return render_to_response('books/book_list.html', {'object_list':b}, context_instance=RequestContext(request))\n\ndef suggest_image(request, book_id):\n '''\n So this is a helper view for staff to update the picture.\n '''\n b = Book.objects.get(id=book_id)\n _img = b.get_image_suggestions(first=False)\n return render_to_response('books/image_suggestor.html', {'images':_img, 'book':b}, context_instance=RequestContext(request))\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from sklearn.datasets import fetch_mldata
from sklearn.preprocessing import OneHotEncoder
from sklearn.model_selection import train_test_split
import numpy as np
import os
import tarfile
import pickle
import subprocess
import sys
if sys.version_info.major == 2:
# Backward compatibility with python 2.
from six.moves import urllib
urlretrieve = urllib.request.urlretrieve
else:
from urllib.request import urlretrieve
def get_gpu_name():
try:
out_str = subprocess.run(["nvidia-smi", "--query-gpu=gpu_name", "--format=csv"], stdout=subprocess.PIPE).stdout
out_list = out_str.decode("utf-8").split('\n')
out_list = out_list[1:-1]
return out_list
except Exception as e:
print(e)
def read_batch(src):
'''Unpack the pickle files
'''
with open(src, 'rb') as f:
if sys.version_info.major == 2:
data = pickle.load(f)
else:
data = pickle.load(f, encoding='latin1')
return data
def shuffle_data(X, y):
s = np.arange(len(X))
np.random.shuffle(s)
X = X[s]
y = y[s]
return X, y
def yield_mb(X, y, batchsize=64, shuffle=False):
assert len(X) == len(y)
if shuffle:
X, y = shuffle_data(X, y)
# Only complete batches are submitted
for i in range(len(X)//batchsize):
yield X[i*batchsize:(i+1)*batchsize], y[i*batchsize:(i+1)*batchsize]
def download_cifar(download_dir, src="http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz"):
'''Load the training and testing data
'''
if not os.path.isfile("{}/cifar-10-python.tar.gz".format(download_dir)):
print ('Downloading ' + src)
fname, h = urlretrieve(src, '{}/cifar-10-python.tar.gz'.format(download_dir))
print ('Done.')
print ('Extracting files...')
with tarfile.open(fname) as tar:
tar.extractall(download_dir)
print ('Done.')
print ('Preparing train set...')
train_list = [read_batch('{0}/cifar-10-batches-py/data_batch_{1}'.format(download_dir, i + 1)) for i in range(5)]
x_train = np.concatenate([t['data'] for t in train_list])
y_train = np.concatenate([t['labels'] for t in train_list])
print ('Preparing test set...')
tst = read_batch('{0}/cifar-10-batches-py/test_batch'.format(download_dir))
x_test = tst['data']
y_test = np.asarray(tst['labels'])
print ('Done.')
return x_train, x_test, y_train, y_test
def download_imdb(src="https://s3.amazonaws.com/text-datasets/imdb.npz"):
'''Load the training and testing data
'''
# FLAG: should we host this on azure?
print ('Downloading ' + src)
fname, h = urlretrieve(src, './delete.me')
print ('Done.')
try:
print ('Extracting files...')
with np.load(fname) as f:
x_train, y_train = f['x_train'], f['y_train']
x_test, y_test = f['x_test'], f['y_test']
print ('Done.')
finally:
os.remove(fname)
return x_train, x_test, y_train, y_test
def cifar_for_library(download_dir, channel_first=True, one_hot=False):
# Raw data
x_train, x_test, y_train, y_test = download_cifar(download_dir)
# Scale pixel intensity
x_train = x_train/255.0
x_test = x_test/255.0
# Reshape
x_train = x_train.reshape(-1, 3, 32, 32)
x_test = x_test.reshape(-1, 3, 32, 32)
# Channel last
if not channel_first:
x_train = np.swapaxes(x_train, 1, 3)
x_test = np.swapaxes(x_test, 1, 3)
# One-hot encode y
if one_hot:
y_train = np.expand_dims(y_train, axis=-1)
y_test = np.expand_dims(y_test, axis=-1)
enc = OneHotEncoder(categorical_features='all')
fit = enc.fit(y_train)
y_train = fit.transform(y_train).toarray()
y_test = fit.transform(y_test).toarray()
# dtypes
x_train = x_train.astype(np.float32)
x_test = x_test.astype(np.float32)
y_train = y_train.astype(np.int32)
y_test = y_test.astype(np.int32)
return x_train, x_test, y_train, y_test
def imdb_for_library(seq_len=100, max_features=20000, one_hot=False):
''' Replicates same pre-processing as:
https://github.com/fchollet/keras/blob/master/keras/datasets/imdb.py
I'm not sure if we want to load another version of IMDB that has got
words, but if it does have words we would still convert to index in this
backend script that is not meant for others to see ...
But I'm worried this obfuscates the data a bit?
'''
# 0 (padding), 1 (start), 2 (OOV)
START_CHAR=1
OOV_CHAR=2
INDEX_FROM=3
# Raw data (has been encoded into words already)
x_train, x_test, y_train, y_test = download_imdb()
# Combine for processing
idx = len(x_train)
_xs = np.concatenate([x_train, x_test])
# Words will start from INDEX_FROM (shift by 3)
_xs = [[START_CHAR] + [w + INDEX_FROM for w in x] for x in _xs]
# Max-features - replace words bigger than index with oov_char
# E.g. if max_features = 5 then keep 0, 1, 2, 3, 4 i.e. words 3 and 4
if max_features:
print("Trimming to {} max-features".format(max_features))
_xs = [[w if (w < max_features) else OOV_CHAR for w in x] for x in _xs]
# Pad to same sequences
print("Padding to length {}".format(seq_len))
xs = np.zeros((len(_xs), seq_len), dtype=np.int)
for o_idx, obs in enumerate(_xs):
# Match keras pre-processing of taking last elements
obs = obs[-seq_len:]
for i_idx in range(len(obs)):
if i_idx < seq_len:
xs[o_idx][i_idx] = obs[i_idx]
# One-hot
if one_hot:
y_train = np.expand_dims(y_train, axis=-1)
y_test = np.expand_dims(y_test, axis=-1)
enc = OneHotEncoder(categorical_features='all')
fit = enc.fit(y_train)
y_train = fit.transform(y_train).toarray()
y_test = fit.transform(y_test).toarray()
# dtypes
x_train = np.array(xs[:idx]).astype(np.int32)
x_test = np.array(xs[idx:]).astype(np.int32)
y_train = y_train.astype(np.int32)
y_test = y_test.astype(np.int32)
return x_train, x_test, y_train, y_test
|
normal
|
{
"blob_id": "6eec95932ef445ba588f200233495f59c4d77aac",
"index": 5396,
"step-1": "<mask token>\n\n\ndef get_gpu_name():\n try:\n out_str = subprocess.run(['nvidia-smi', '--query-gpu=gpu_name',\n '--format=csv'], stdout=subprocess.PIPE).stdout\n out_list = out_str.decode('utf-8').split('\\n')\n out_list = out_list[1:-1]\n return out_list\n except Exception as e:\n print(e)\n\n\ndef read_batch(src):\n \"\"\"Unpack the pickle files\n \"\"\"\n with open(src, 'rb') as f:\n if sys.version_info.major == 2:\n data = pickle.load(f)\n else:\n data = pickle.load(f, encoding='latin1')\n return data\n\n\ndef shuffle_data(X, y):\n s = np.arange(len(X))\n np.random.shuffle(s)\n X = X[s]\n y = y[s]\n return X, y\n\n\n<mask token>\n\n\ndef download_imdb(src='https://s3.amazonaws.com/text-datasets/imdb.npz'):\n \"\"\"Load the training and testing data\n \"\"\"\n print('Downloading ' + src)\n fname, h = urlretrieve(src, './delete.me')\n print('Done.')\n try:\n print('Extracting files...')\n with np.load(fname) as f:\n x_train, y_train = f['x_train'], f['y_train']\n x_test, y_test = f['x_test'], f['y_test']\n print('Done.')\n finally:\n os.remove(fname)\n return x_train, x_test, y_train, y_test\n\n\n<mask token>\n\n\ndef imdb_for_library(seq_len=100, max_features=20000, one_hot=False):\n \"\"\" Replicates same pre-processing as:\n https://github.com/fchollet/keras/blob/master/keras/datasets/imdb.py\n \n I'm not sure if we want to load another version of IMDB that has got \n words, but if it does have words we would still convert to index in this \n backend script that is not meant for others to see ... \n \n But I'm worried this obfuscates the data a bit?\n \"\"\"\n START_CHAR = 1\n OOV_CHAR = 2\n INDEX_FROM = 3\n x_train, x_test, y_train, y_test = download_imdb()\n idx = len(x_train)\n _xs = np.concatenate([x_train, x_test])\n _xs = [([START_CHAR] + [(w + INDEX_FROM) for w in x]) for x in _xs]\n if max_features:\n print('Trimming to {} max-features'.format(max_features))\n _xs = [[(w if w < max_features else OOV_CHAR) for w in x] for x in _xs]\n print('Padding to length {}'.format(seq_len))\n xs = np.zeros((len(_xs), seq_len), dtype=np.int)\n for o_idx, obs in enumerate(_xs):\n obs = obs[-seq_len:]\n for i_idx in range(len(obs)):\n if i_idx < seq_len:\n xs[o_idx][i_idx] = obs[i_idx]\n if one_hot:\n y_train = np.expand_dims(y_train, axis=-1)\n y_test = np.expand_dims(y_test, axis=-1)\n enc = OneHotEncoder(categorical_features='all')\n fit = enc.fit(y_train)\n y_train = fit.transform(y_train).toarray()\n y_test = fit.transform(y_test).toarray()\n x_train = np.array(xs[:idx]).astype(np.int32)\n x_test = np.array(xs[idx:]).astype(np.int32)\n y_train = y_train.astype(np.int32)\n y_test = y_test.astype(np.int32)\n return x_train, x_test, y_train, y_test\n",
"step-2": "<mask token>\n\n\ndef get_gpu_name():\n try:\n out_str = subprocess.run(['nvidia-smi', '--query-gpu=gpu_name',\n '--format=csv'], stdout=subprocess.PIPE).stdout\n out_list = out_str.decode('utf-8').split('\\n')\n out_list = out_list[1:-1]\n return out_list\n except Exception as e:\n print(e)\n\n\ndef read_batch(src):\n \"\"\"Unpack the pickle files\n \"\"\"\n with open(src, 'rb') as f:\n if sys.version_info.major == 2:\n data = pickle.load(f)\n else:\n data = pickle.load(f, encoding='latin1')\n return data\n\n\ndef shuffle_data(X, y):\n s = np.arange(len(X))\n np.random.shuffle(s)\n X = X[s]\n y = y[s]\n return X, y\n\n\n<mask token>\n\n\ndef download_imdb(src='https://s3.amazonaws.com/text-datasets/imdb.npz'):\n \"\"\"Load the training and testing data\n \"\"\"\n print('Downloading ' + src)\n fname, h = urlretrieve(src, './delete.me')\n print('Done.')\n try:\n print('Extracting files...')\n with np.load(fname) as f:\n x_train, y_train = f['x_train'], f['y_train']\n x_test, y_test = f['x_test'], f['y_test']\n print('Done.')\n finally:\n os.remove(fname)\n return x_train, x_test, y_train, y_test\n\n\ndef cifar_for_library(download_dir, channel_first=True, one_hot=False):\n x_train, x_test, y_train, y_test = download_cifar(download_dir)\n x_train = x_train / 255.0\n x_test = x_test / 255.0\n x_train = x_train.reshape(-1, 3, 32, 32)\n x_test = x_test.reshape(-1, 3, 32, 32)\n if not channel_first:\n x_train = np.swapaxes(x_train, 1, 3)\n x_test = np.swapaxes(x_test, 1, 3)\n if one_hot:\n y_train = np.expand_dims(y_train, axis=-1)\n y_test = np.expand_dims(y_test, axis=-1)\n enc = OneHotEncoder(categorical_features='all')\n fit = enc.fit(y_train)\n y_train = fit.transform(y_train).toarray()\n y_test = fit.transform(y_test).toarray()\n x_train = x_train.astype(np.float32)\n x_test = x_test.astype(np.float32)\n y_train = y_train.astype(np.int32)\n y_test = y_test.astype(np.int32)\n return x_train, x_test, y_train, y_test\n\n\ndef imdb_for_library(seq_len=100, max_features=20000, one_hot=False):\n \"\"\" Replicates same pre-processing as:\n https://github.com/fchollet/keras/blob/master/keras/datasets/imdb.py\n \n I'm not sure if we want to load another version of IMDB that has got \n words, but if it does have words we would still convert to index in this \n backend script that is not meant for others to see ... \n \n But I'm worried this obfuscates the data a bit?\n \"\"\"\n START_CHAR = 1\n OOV_CHAR = 2\n INDEX_FROM = 3\n x_train, x_test, y_train, y_test = download_imdb()\n idx = len(x_train)\n _xs = np.concatenate([x_train, x_test])\n _xs = [([START_CHAR] + [(w + INDEX_FROM) for w in x]) for x in _xs]\n if max_features:\n print('Trimming to {} max-features'.format(max_features))\n _xs = [[(w if w < max_features else OOV_CHAR) for w in x] for x in _xs]\n print('Padding to length {}'.format(seq_len))\n xs = np.zeros((len(_xs), seq_len), dtype=np.int)\n for o_idx, obs in enumerate(_xs):\n obs = obs[-seq_len:]\n for i_idx in range(len(obs)):\n if i_idx < seq_len:\n xs[o_idx][i_idx] = obs[i_idx]\n if one_hot:\n y_train = np.expand_dims(y_train, axis=-1)\n y_test = np.expand_dims(y_test, axis=-1)\n enc = OneHotEncoder(categorical_features='all')\n fit = enc.fit(y_train)\n y_train = fit.transform(y_train).toarray()\n y_test = fit.transform(y_test).toarray()\n x_train = np.array(xs[:idx]).astype(np.int32)\n x_test = np.array(xs[idx:]).astype(np.int32)\n y_train = y_train.astype(np.int32)\n y_test = y_test.astype(np.int32)\n return x_train, x_test, y_train, y_test\n",
"step-3": "<mask token>\n\n\ndef get_gpu_name():\n try:\n out_str = subprocess.run(['nvidia-smi', '--query-gpu=gpu_name',\n '--format=csv'], stdout=subprocess.PIPE).stdout\n out_list = out_str.decode('utf-8').split('\\n')\n out_list = out_list[1:-1]\n return out_list\n except Exception as e:\n print(e)\n\n\ndef read_batch(src):\n \"\"\"Unpack the pickle files\n \"\"\"\n with open(src, 'rb') as f:\n if sys.version_info.major == 2:\n data = pickle.load(f)\n else:\n data = pickle.load(f, encoding='latin1')\n return data\n\n\ndef shuffle_data(X, y):\n s = np.arange(len(X))\n np.random.shuffle(s)\n X = X[s]\n y = y[s]\n return X, y\n\n\n<mask token>\n\n\ndef download_cifar(download_dir, src=\n 'http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'):\n \"\"\"Load the training and testing data\n \"\"\"\n if not os.path.isfile('{}/cifar-10-python.tar.gz'.format(download_dir)):\n print('Downloading ' + src)\n fname, h = urlretrieve(src, '{}/cifar-10-python.tar.gz'.format(\n download_dir))\n print('Done.')\n print('Extracting files...')\n with tarfile.open(fname) as tar:\n tar.extractall(download_dir)\n print('Done.')\n print('Preparing train set...')\n train_list = [read_batch('{0}/cifar-10-batches-py/data_batch_{1}'.\n format(download_dir, i + 1)) for i in range(5)]\n x_train = np.concatenate([t['data'] for t in train_list])\n y_train = np.concatenate([t['labels'] for t in train_list])\n print('Preparing test set...')\n tst = read_batch('{0}/cifar-10-batches-py/test_batch'.format(download_dir))\n x_test = tst['data']\n y_test = np.asarray(tst['labels'])\n print('Done.')\n return x_train, x_test, y_train, y_test\n\n\ndef download_imdb(src='https://s3.amazonaws.com/text-datasets/imdb.npz'):\n \"\"\"Load the training and testing data\n \"\"\"\n print('Downloading ' + src)\n fname, h = urlretrieve(src, './delete.me')\n print('Done.')\n try:\n print('Extracting files...')\n with np.load(fname) as f:\n x_train, y_train = f['x_train'], f['y_train']\n x_test, y_test = f['x_test'], f['y_test']\n print('Done.')\n finally:\n os.remove(fname)\n return x_train, x_test, y_train, y_test\n\n\ndef cifar_for_library(download_dir, channel_first=True, one_hot=False):\n x_train, x_test, y_train, y_test = download_cifar(download_dir)\n x_train = x_train / 255.0\n x_test = x_test / 255.0\n x_train = x_train.reshape(-1, 3, 32, 32)\n x_test = x_test.reshape(-1, 3, 32, 32)\n if not channel_first:\n x_train = np.swapaxes(x_train, 1, 3)\n x_test = np.swapaxes(x_test, 1, 3)\n if one_hot:\n y_train = np.expand_dims(y_train, axis=-1)\n y_test = np.expand_dims(y_test, axis=-1)\n enc = OneHotEncoder(categorical_features='all')\n fit = enc.fit(y_train)\n y_train = fit.transform(y_train).toarray()\n y_test = fit.transform(y_test).toarray()\n x_train = x_train.astype(np.float32)\n x_test = x_test.astype(np.float32)\n y_train = y_train.astype(np.int32)\n y_test = y_test.astype(np.int32)\n return x_train, x_test, y_train, y_test\n\n\ndef imdb_for_library(seq_len=100, max_features=20000, one_hot=False):\n \"\"\" Replicates same pre-processing as:\n https://github.com/fchollet/keras/blob/master/keras/datasets/imdb.py\n \n I'm not sure if we want to load another version of IMDB that has got \n words, but if it does have words we would still convert to index in this \n backend script that is not meant for others to see ... \n \n But I'm worried this obfuscates the data a bit?\n \"\"\"\n START_CHAR = 1\n OOV_CHAR = 2\n INDEX_FROM = 3\n x_train, x_test, y_train, y_test = download_imdb()\n idx = len(x_train)\n _xs = np.concatenate([x_train, x_test])\n _xs = [([START_CHAR] + [(w + INDEX_FROM) for w in x]) for x in _xs]\n if max_features:\n print('Trimming to {} max-features'.format(max_features))\n _xs = [[(w if w < max_features else OOV_CHAR) for w in x] for x in _xs]\n print('Padding to length {}'.format(seq_len))\n xs = np.zeros((len(_xs), seq_len), dtype=np.int)\n for o_idx, obs in enumerate(_xs):\n obs = obs[-seq_len:]\n for i_idx in range(len(obs)):\n if i_idx < seq_len:\n xs[o_idx][i_idx] = obs[i_idx]\n if one_hot:\n y_train = np.expand_dims(y_train, axis=-1)\n y_test = np.expand_dims(y_test, axis=-1)\n enc = OneHotEncoder(categorical_features='all')\n fit = enc.fit(y_train)\n y_train = fit.transform(y_train).toarray()\n y_test = fit.transform(y_test).toarray()\n x_train = np.array(xs[:idx]).astype(np.int32)\n x_test = np.array(xs[idx:]).astype(np.int32)\n y_train = y_train.astype(np.int32)\n y_test = y_test.astype(np.int32)\n return x_train, x_test, y_train, y_test\n",
"step-4": "<mask token>\n\n\ndef get_gpu_name():\n try:\n out_str = subprocess.run(['nvidia-smi', '--query-gpu=gpu_name',\n '--format=csv'], stdout=subprocess.PIPE).stdout\n out_list = out_str.decode('utf-8').split('\\n')\n out_list = out_list[1:-1]\n return out_list\n except Exception as e:\n print(e)\n\n\ndef read_batch(src):\n \"\"\"Unpack the pickle files\n \"\"\"\n with open(src, 'rb') as f:\n if sys.version_info.major == 2:\n data = pickle.load(f)\n else:\n data = pickle.load(f, encoding='latin1')\n return data\n\n\ndef shuffle_data(X, y):\n s = np.arange(len(X))\n np.random.shuffle(s)\n X = X[s]\n y = y[s]\n return X, y\n\n\ndef yield_mb(X, y, batchsize=64, shuffle=False):\n assert len(X) == len(y)\n if shuffle:\n X, y = shuffle_data(X, y)\n for i in range(len(X) // batchsize):\n yield X[i * batchsize:(i + 1) * batchsize], y[i * batchsize:(i + 1) *\n batchsize]\n\n\ndef download_cifar(download_dir, src=\n 'http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'):\n \"\"\"Load the training and testing data\n \"\"\"\n if not os.path.isfile('{}/cifar-10-python.tar.gz'.format(download_dir)):\n print('Downloading ' + src)\n fname, h = urlretrieve(src, '{}/cifar-10-python.tar.gz'.format(\n download_dir))\n print('Done.')\n print('Extracting files...')\n with tarfile.open(fname) as tar:\n tar.extractall(download_dir)\n print('Done.')\n print('Preparing train set...')\n train_list = [read_batch('{0}/cifar-10-batches-py/data_batch_{1}'.\n format(download_dir, i + 1)) for i in range(5)]\n x_train = np.concatenate([t['data'] for t in train_list])\n y_train = np.concatenate([t['labels'] for t in train_list])\n print('Preparing test set...')\n tst = read_batch('{0}/cifar-10-batches-py/test_batch'.format(download_dir))\n x_test = tst['data']\n y_test = np.asarray(tst['labels'])\n print('Done.')\n return x_train, x_test, y_train, y_test\n\n\ndef download_imdb(src='https://s3.amazonaws.com/text-datasets/imdb.npz'):\n \"\"\"Load the training and testing data\n \"\"\"\n print('Downloading ' + src)\n fname, h = urlretrieve(src, './delete.me')\n print('Done.')\n try:\n print('Extracting files...')\n with np.load(fname) as f:\n x_train, y_train = f['x_train'], f['y_train']\n x_test, y_test = f['x_test'], f['y_test']\n print('Done.')\n finally:\n os.remove(fname)\n return x_train, x_test, y_train, y_test\n\n\ndef cifar_for_library(download_dir, channel_first=True, one_hot=False):\n x_train, x_test, y_train, y_test = download_cifar(download_dir)\n x_train = x_train / 255.0\n x_test = x_test / 255.0\n x_train = x_train.reshape(-1, 3, 32, 32)\n x_test = x_test.reshape(-1, 3, 32, 32)\n if not channel_first:\n x_train = np.swapaxes(x_train, 1, 3)\n x_test = np.swapaxes(x_test, 1, 3)\n if one_hot:\n y_train = np.expand_dims(y_train, axis=-1)\n y_test = np.expand_dims(y_test, axis=-1)\n enc = OneHotEncoder(categorical_features='all')\n fit = enc.fit(y_train)\n y_train = fit.transform(y_train).toarray()\n y_test = fit.transform(y_test).toarray()\n x_train = x_train.astype(np.float32)\n x_test = x_test.astype(np.float32)\n y_train = y_train.astype(np.int32)\n y_test = y_test.astype(np.int32)\n return x_train, x_test, y_train, y_test\n\n\ndef imdb_for_library(seq_len=100, max_features=20000, one_hot=False):\n \"\"\" Replicates same pre-processing as:\n https://github.com/fchollet/keras/blob/master/keras/datasets/imdb.py\n \n I'm not sure if we want to load another version of IMDB that has got \n words, but if it does have words we would still convert to index in this \n backend script that is not meant for others to see ... \n \n But I'm worried this obfuscates the data a bit?\n \"\"\"\n START_CHAR = 1\n OOV_CHAR = 2\n INDEX_FROM = 3\n x_train, x_test, y_train, y_test = download_imdb()\n idx = len(x_train)\n _xs = np.concatenate([x_train, x_test])\n _xs = [([START_CHAR] + [(w + INDEX_FROM) for w in x]) for x in _xs]\n if max_features:\n print('Trimming to {} max-features'.format(max_features))\n _xs = [[(w if w < max_features else OOV_CHAR) for w in x] for x in _xs]\n print('Padding to length {}'.format(seq_len))\n xs = np.zeros((len(_xs), seq_len), dtype=np.int)\n for o_idx, obs in enumerate(_xs):\n obs = obs[-seq_len:]\n for i_idx in range(len(obs)):\n if i_idx < seq_len:\n xs[o_idx][i_idx] = obs[i_idx]\n if one_hot:\n y_train = np.expand_dims(y_train, axis=-1)\n y_test = np.expand_dims(y_test, axis=-1)\n enc = OneHotEncoder(categorical_features='all')\n fit = enc.fit(y_train)\n y_train = fit.transform(y_train).toarray()\n y_test = fit.transform(y_test).toarray()\n x_train = np.array(xs[:idx]).astype(np.int32)\n x_test = np.array(xs[idx:]).astype(np.int32)\n y_train = y_train.astype(np.int32)\n y_test = y_test.astype(np.int32)\n return x_train, x_test, y_train, y_test\n",
"step-5": "from sklearn.datasets import fetch_mldata\nfrom sklearn.preprocessing import OneHotEncoder\nfrom sklearn.model_selection import train_test_split\n\nimport numpy as np\nimport os\nimport tarfile\nimport pickle\nimport subprocess\nimport sys\nif sys.version_info.major == 2:\n # Backward compatibility with python 2.\n from six.moves import urllib\n urlretrieve = urllib.request.urlretrieve\nelse:\n from urllib.request import urlretrieve\n\ndef get_gpu_name():\n try:\n out_str = subprocess.run([\"nvidia-smi\", \"--query-gpu=gpu_name\", \"--format=csv\"], stdout=subprocess.PIPE).stdout\n out_list = out_str.decode(\"utf-8\").split('\\n')\n out_list = out_list[1:-1]\n return out_list\n except Exception as e:\n print(e)\n \ndef read_batch(src):\n '''Unpack the pickle files\n '''\n with open(src, 'rb') as f:\n if sys.version_info.major == 2:\n data = pickle.load(f)\n else:\n data = pickle.load(f, encoding='latin1')\n return data\n\ndef shuffle_data(X, y):\n s = np.arange(len(X))\n np.random.shuffle(s)\n X = X[s]\n y = y[s]\n return X, y\n\ndef yield_mb(X, y, batchsize=64, shuffle=False):\n assert len(X) == len(y)\n if shuffle:\n X, y = shuffle_data(X, y)\n # Only complete batches are submitted\n for i in range(len(X)//batchsize):\n yield X[i*batchsize:(i+1)*batchsize], y[i*batchsize:(i+1)*batchsize]\n\ndef download_cifar(download_dir, src=\"http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz\"):\n '''Load the training and testing data\n '''\n\n if not os.path.isfile(\"{}/cifar-10-python.tar.gz\".format(download_dir)):\n print ('Downloading ' + src)\n fname, h = urlretrieve(src, '{}/cifar-10-python.tar.gz'.format(download_dir))\n print ('Done.')\n\n print ('Extracting files...')\n with tarfile.open(fname) as tar:\n tar.extractall(download_dir)\n print ('Done.')\n \n print ('Preparing train set...')\n train_list = [read_batch('{0}/cifar-10-batches-py/data_batch_{1}'.format(download_dir, i + 1)) for i in range(5)]\n x_train = np.concatenate([t['data'] for t in train_list])\n y_train = np.concatenate([t['labels'] for t in train_list])\n print ('Preparing test set...')\n tst = read_batch('{0}/cifar-10-batches-py/test_batch'.format(download_dir))\n x_test = tst['data']\n y_test = np.asarray(tst['labels'])\n print ('Done.')\n \n return x_train, x_test, y_train, y_test\n\ndef download_imdb(src=\"https://s3.amazonaws.com/text-datasets/imdb.npz\"):\n '''Load the training and testing data\n '''\n # FLAG: should we host this on azure?\n print ('Downloading ' + src)\n fname, h = urlretrieve(src, './delete.me')\n print ('Done.')\n try:\n print ('Extracting files...')\n with np.load(fname) as f:\n x_train, y_train = f['x_train'], f['y_train']\n x_test, y_test = f['x_test'], f['y_test']\n print ('Done.')\n finally:\n os.remove(fname)\n return x_train, x_test, y_train, y_test\n\ndef cifar_for_library(download_dir, channel_first=True, one_hot=False): \n # Raw data\n x_train, x_test, y_train, y_test = download_cifar(download_dir)\n # Scale pixel intensity\n x_train = x_train/255.0\n x_test = x_test/255.0\n # Reshape\n x_train = x_train.reshape(-1, 3, 32, 32)\n x_test = x_test.reshape(-1, 3, 32, 32) \n # Channel last\n if not channel_first:\n x_train = np.swapaxes(x_train, 1, 3)\n x_test = np.swapaxes(x_test, 1, 3)\n # One-hot encode y\n if one_hot:\n y_train = np.expand_dims(y_train, axis=-1)\n y_test = np.expand_dims(y_test, axis=-1)\n enc = OneHotEncoder(categorical_features='all')\n fit = enc.fit(y_train)\n y_train = fit.transform(y_train).toarray()\n y_test = fit.transform(y_test).toarray()\n # dtypes\n x_train = x_train.astype(np.float32)\n x_test = x_test.astype(np.float32)\n y_train = y_train.astype(np.int32)\n y_test = y_test.astype(np.int32)\n return x_train, x_test, y_train, y_test\n \ndef imdb_for_library(seq_len=100, max_features=20000, one_hot=False):\n ''' Replicates same pre-processing as:\n https://github.com/fchollet/keras/blob/master/keras/datasets/imdb.py\n \n I'm not sure if we want to load another version of IMDB that has got \n words, but if it does have words we would still convert to index in this \n backend script that is not meant for others to see ... \n \n But I'm worried this obfuscates the data a bit?\n '''\n # 0 (padding), 1 (start), 2 (OOV)\n START_CHAR=1\n OOV_CHAR=2\n INDEX_FROM=3\n # Raw data (has been encoded into words already)\n x_train, x_test, y_train, y_test = download_imdb()\n # Combine for processing\n idx = len(x_train)\n _xs = np.concatenate([x_train, x_test])\n # Words will start from INDEX_FROM (shift by 3)\n _xs = [[START_CHAR] + [w + INDEX_FROM for w in x] for x in _xs]\n # Max-features - replace words bigger than index with oov_char\n # E.g. if max_features = 5 then keep 0, 1, 2, 3, 4 i.e. words 3 and 4\n if max_features:\n print(\"Trimming to {} max-features\".format(max_features))\n _xs = [[w if (w < max_features) else OOV_CHAR for w in x] for x in _xs] \n # Pad to same sequences\n print(\"Padding to length {}\".format(seq_len))\n xs = np.zeros((len(_xs), seq_len), dtype=np.int)\n for o_idx, obs in enumerate(_xs): \n # Match keras pre-processing of taking last elements\n obs = obs[-seq_len:]\n for i_idx in range(len(obs)):\n if i_idx < seq_len:\n xs[o_idx][i_idx] = obs[i_idx]\n # One-hot\n if one_hot:\n y_train = np.expand_dims(y_train, axis=-1)\n y_test = np.expand_dims(y_test, axis=-1)\n enc = OneHotEncoder(categorical_features='all')\n fit = enc.fit(y_train)\n y_train = fit.transform(y_train).toarray()\n y_test = fit.transform(y_test).toarray()\n # dtypes\n x_train = np.array(xs[:idx]).astype(np.int32)\n x_test = np.array(xs[idx:]).astype(np.int32)\n y_train = y_train.astype(np.int32)\n y_test = y_test.astype(np.int32)\n return x_train, x_test, y_train, y_test\n",
"step-ids": [
5,
6,
7,
8,
11
]
}
|
[
5,
6,
7,
8,
11
] |
array = [1, 2, 3, 4, 5]
for x in array:
print(x)
|
normal
|
{
"blob_id": "224e13331ad93278f47a5582bbd24208d9ce5dcc",
"index": 3705,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor x in array:\n print(x)\n",
"step-3": "array = [1, 2, 3, 4, 5]\nfor x in array:\n print(x)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class Cluster(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Cluster(object):
<|reserved_special_token_0|>
def __init__(self, cluster_json):
"""
Initialize the cluster object from JSON payload
Args:
:cluster_json: JSON data of the cluster
"""
self.datapoint_name = cluster_json[constants.REST_CONFIG.
JSON_CLUSTERING_ANALYSIS_DATA_POINT_NAME]
self.cluster = int(cluster_json[constants.REST_CONFIG.
JSON_CLUSTERING_ANALYSIS_CLUSTER])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Cluster(object):
"""
Represents a Cluster in Cluster Analysis computed for a featuregroup or training dataset in the featurestore
"""
def __init__(self, cluster_json):
"""
Initialize the cluster object from JSON payload
Args:
:cluster_json: JSON data of the cluster
"""
self.datapoint_name = cluster_json[constants.REST_CONFIG.
JSON_CLUSTERING_ANALYSIS_DATA_POINT_NAME]
self.cluster = int(cluster_json[constants.REST_CONFIG.
JSON_CLUSTERING_ANALYSIS_CLUSTER])
<|reserved_special_token_1|>
from hops import constants
class Cluster(object):
"""
Represents a Cluster in Cluster Analysis computed for a featuregroup or training dataset in the featurestore
"""
def __init__(self, cluster_json):
"""
Initialize the cluster object from JSON payload
Args:
:cluster_json: JSON data of the cluster
"""
self.datapoint_name = cluster_json[constants.REST_CONFIG.
JSON_CLUSTERING_ANALYSIS_DATA_POINT_NAME]
self.cluster = int(cluster_json[constants.REST_CONFIG.
JSON_CLUSTERING_ANALYSIS_CLUSTER])
|
flexible
|
{
"blob_id": "753c87a3d22aeca1001eb770831b846b175d873e",
"index": 9139,
"step-1": "<mask token>\n\n\nclass Cluster(object):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Cluster(object):\n <mask token>\n\n def __init__(self, cluster_json):\n \"\"\"\n Initialize the cluster object from JSON payload\n\n Args:\n :cluster_json: JSON data of the cluster\n \"\"\"\n self.datapoint_name = cluster_json[constants.REST_CONFIG.\n JSON_CLUSTERING_ANALYSIS_DATA_POINT_NAME]\n self.cluster = int(cluster_json[constants.REST_CONFIG.\n JSON_CLUSTERING_ANALYSIS_CLUSTER])\n",
"step-3": "<mask token>\n\n\nclass Cluster(object):\n \"\"\"\n Represents a Cluster in Cluster Analysis computed for a featuregroup or training dataset in the featurestore\n \"\"\"\n\n def __init__(self, cluster_json):\n \"\"\"\n Initialize the cluster object from JSON payload\n\n Args:\n :cluster_json: JSON data of the cluster\n \"\"\"\n self.datapoint_name = cluster_json[constants.REST_CONFIG.\n JSON_CLUSTERING_ANALYSIS_DATA_POINT_NAME]\n self.cluster = int(cluster_json[constants.REST_CONFIG.\n JSON_CLUSTERING_ANALYSIS_CLUSTER])\n",
"step-4": "from hops import constants\n\n\nclass Cluster(object):\n \"\"\"\n Represents a Cluster in Cluster Analysis computed for a featuregroup or training dataset in the featurestore\n \"\"\"\n\n def __init__(self, cluster_json):\n \"\"\"\n Initialize the cluster object from JSON payload\n\n Args:\n :cluster_json: JSON data of the cluster\n \"\"\"\n self.datapoint_name = cluster_json[constants.REST_CONFIG.\n JSON_CLUSTERING_ANALYSIS_DATA_POINT_NAME]\n self.cluster = int(cluster_json[constants.REST_CONFIG.\n JSON_CLUSTERING_ANALYSIS_CLUSTER])\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
import io
import os
from setuptools import setup
setup(name='testcov-plugin',
version='1.0',
packages=['testcov'],
namespace_packages=['testcov'],
entry_points={
'plugins': ['testp = testcov.plugin:testp'],
},
description="Test for coverage bug")
|
normal
|
{
"blob_id": "88f5aa56eca6b61ba2b428bff0efdf4ec7f5f5d9",
"index": 1913,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetup(name='testcov-plugin', version='1.0', packages=['testcov'],\n namespace_packages=['testcov'], entry_points={'plugins': [\n 'testp = testcov.plugin:testp']}, description='Test for coverage bug')\n",
"step-3": "import io\nimport os\nfrom setuptools import setup\nsetup(name='testcov-plugin', version='1.0', packages=['testcov'],\n namespace_packages=['testcov'], entry_points={'plugins': [\n 'testp = testcov.plugin:testp']}, description='Test for coverage bug')\n",
"step-4": "import io\nimport os\nfrom setuptools import setup\n\n\nsetup(name='testcov-plugin',\n version='1.0',\n packages=['testcov'],\n namespace_packages=['testcov'],\n entry_points={\n 'plugins': ['testp = testcov.plugin:testp'],\n },\n description=\"Test for coverage bug\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
########################################################################################################################
# DEVELOPER README: #
# This is the main script, where the GUI is initialised from. All of the main layout objects live in their own scripts #
# under ./gui_scripts (i.e. the tab content). The settings and preferences script sets up all of the directory paths #
# and contains dictionaries defining the top menu, push buttons and the tables held in the main tabs. The layout #
# script contains functions for performing simple layout tasks, such as adding a combobox, and contains init. #
# functions for all of the main layout functions. #
# #
# In the future, the functions associated with buttons and frames etc. should be moved into the relevant script, but #
# this is a bit more complicated. For now, they are separated out into sections within this script. The only GUI stuff #
# going on in here is calling the initialisation functions. To change the layout of a tab, edit it in it's own script, #
# and add any new functions in this script, in the relevant section. (If there is one yet) #
# #
# There's still a lot of cleaning up to be done in the future... #
########################################################################################################################
# solve gtk startup error
#import gtk
#gtk.set_interactive(False)
import base64
import getpass
import glob
import math
import multiprocessing
import pickle
import subprocess
import sys, os
import webbrowser
from datetime import datetime
from PyQt4 import QtGui, QtCore, QtWebKit
sys.path.append(os.path.join(os.getenv('XChemExplorer_DIR'), 'lib'))
sys.path.append(os.path.join(os.getenv('XChemExplorer_DIR'), 'web'))
sys.path.append(os.path.join(os.getenv('XChemExplorer_DIR'), 'gui_scripts'))
from settings_preferences import *
from layout import *
from stylesheet import set_stylesheet
from XChemUtils import parse
import XChemThread
import XChemDB
import XChemPANDDA
import XChemToolTips
import XChemMain
import XChemPlots
import XChemLog
import XChemProcess
import XChemDeposit
import XChemWeb
import matplotlib.pyplot as plt
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
class XChemExplorer(QtGui.QApplication):
def __init__(self, args):
# init a QApplication object to hold XCE
QtGui.QApplication.__init__(self, args)
# start GUI
self.start_GUI()
# set stylesheet - how the gui looks
set_stylesheet(self)
self.exec_()
def start_GUI(self):
# check http://doc.qt.io/qt-4.8/stylesheet-customizing.html#the-box-model
# This needs moving somewhere more appropriate...
self.headlineLabelfont = QtGui.QFont("Arial", 20, QtGui.QFont.Bold)
setup().settings(self)
setup().preferences(self)
setup().tables(self)
self.layout_funcs = LayoutFuncs()
# GUI setup
self.window = QtGui.QWidget()
self.window.setWindowTitle("XChemExplorer")
self.screen = QtGui.QDesktopWidget().screenGeometry()
LayoutObjects(self).workflow(self)
LayoutObjects(self).main_layout(self)
LayoutFuncs().add_widgets_layouts(self)
self.checkLabXChemDir()
if os.path.isfile(os.path.join(self.database_directory, self.data_source_file)):
self.backup_soakDB()
def backup_soakDB(self):
XChemMain.backup_soakDB(os.path.join(self.database_directory, self.data_source_file),self.xce_logfile)
def checkLabXChemDir(self):
dirCheck = QtGui.QMessageBox()
dirCheckLayout = dirCheck.layout()
vbox = QtGui.QVBoxLayout()
try:
warning = (
'Are you sure you want to launch XCE here:\n\n'
+self.labxchem_directory_current+'\n\n'
'If this is not where you should be running XCE, please close!\n'
)
except AttributeError:
return
vbox.addWidget(QtGui.QLabel(warning))
dirCheckLayout.addLayout(vbox, 0, 0)
dirCheck.exec_();
# function to update datasource
def datasource_menu_reload_samples(self):
self.update_log.insert(
'reading samples from data source: ' + os.path.join(self.database_directory, self.data_source_file))
self.update_status_bar(
'reading samples from data source: ' + os.path.join(self.database_directory, self.data_source_file))
self.update_header_and_data_from_datasource()
self.update_all_tables()
self.overview_datasource_table.resizeColumnsToContents()
# function to create new datasource
def create_new_data_source(self):
file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'Save file', self.database_directory))
# make sure that the file always has .sqlite extension
if file_name.rfind('.') != -1:
file_name = file_name[:file_name.rfind('.')] + '.sqlite'
else:
file_name = file_name + '.sqlite'
self.db = XChemDB.data_source(file_name)
print('==> XCE: creating new data source')
self.db.create_empty_data_source_file()
self.db.create_missing_columns()
self.database_directory = file_name[:file_name.rfind('/')]
self.data_source_file = file_name[file_name.rfind('/') + 1:]
self.data_source_file_label.setText(os.path.join(self.database_directory, self.data_source_file))
self.settings['database_directory'] = self.database_directory
self.settings['data_source'] = self.data_source_file
self.data_source_set = True
self.datasource_menu_reload_samples()
####################################################################################################################
# #
# DATASETS TAB #
# #
####################################################################################################################
def continously_check_for_new_data_collection(self, state):
self.timer_to_check_for_new_data_collection.timeout.connect(
lambda: self.check_for_new_autoprocessing_or_rescore(False))
if state == QtCore.Qt.Checked:
print('==> XCE: checking automatically every 120s for new data collection')
self.timer_to_check_for_new_data_collection.start(120000)
else:
print('==> XCE: stopped checking for new data collections')
self.timer_to_check_for_new_data_collection.stop()
def target_selection_combobox_activated(self, text):
self.target = str(text)
def select_diffraction_data_directory(self):
self.diffraction_data_directory = str(QtGui.QFileDialog.getExistingDirectory(self.window, "Select Directory"))
self.diffraction_data_dir_label.setText(self.diffraction_data_directory)
self.settings['diffraction_data_directory'] = self.diffraction_data_directory
self.update_log.insert('setting diffraction data directory to ' + self.diffraction_data_directory)
def search_for_datasets(self):
self.update_log.insert('search diffraction data directory for datasets...')
print('will search ' + str(self.diffraction_data_directory))
self.work_thread = XChemMain.find_diffraction_image_directory_fast(self.diffraction_data_directory)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_datasets_reprocess_table"),
self.update_datasets_reprocess_table)
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
#self.work_thread = self.update_datasets_reprocess_table(self.diffraction_data_directory)
def translate_datasetID_to_sampleID(self):
translate = QtGui.QMessageBox()
translateLayout = translate.layout()
self.translate_datasetID_to_sampleID_file = '-'
vbox = QtGui.QVBoxLayout()
button = QtGui.QPushButton('Open CSV')
button.clicked.connect(self.open_csv_file_translate_datasetID_to_sampleID)
vbox.addWidget(button)
self.translate_datasetID_to_sampleID_csv_label = QtGui.QLabel(self.translate_datasetID_to_sampleID_file)
vbox.addWidget(self.translate_datasetID_to_sampleID_csv_label)
translateLayout.addLayout(vbox, 0, 0)
translate.addButton(QtGui.QPushButton('OK'), QtGui.QMessageBox.YesRole)
translate.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)
reply = translate.exec_();
if reply == 0:
if os.path.isfile(self.translate_datasetID_to_sampleID_file):
trans_dict = {}
for line in open(self.translate_datasetID_to_sampleID_file):
if len(line.split(',')) == 2:
dataset = line.split(',')[0]
new_sample_id = line.split(',')[1]
trans_dict[dataset] = new_sample_id
if len(trans_dict) >= 1:
allRows = self.datasets_reprocess_table.rowCount()
for row in xrange(0, allRows):
dataset_id = str(self.datasets_reprocess_table.item(row, 0).text())
sample_id = str(self.datasets_reprocess_table.item(row, 1).text())
if dataset_id in trans_dict:
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(trans_dict[dataset_id])
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
self.datasets_reprocess_table.setItem(row, 1, cell_text)
self.update_log.insert(
'dataset: {0!s} -> changing sampleID to: {1!s}'.format(dataset_id,
trans_dict[dataset_id]))
def select_sample_for_xia2(self):
indexes = self.datasets_reprocess_table.selectionModel().selectedRows()
for index in sorted(indexes):
xtal = str(self.datasets_reprocess_table.item(index.row(), 1).text())
print(xtal, self.diffraction_data_table_dict[xtal][0])
self.update_log.insert('{0!s} marked for reprocessing'.format(index.row()))
self.diffraction_data_table_dict[xtal][0].setChecked(True)
def select_reprocess_reference_mtz(self):
self.update_log.insert('trying to set new reference mtz file for reprocessing with xia2')
file_name = str(QtGui.QFileDialog.getOpenFileName(self.window, 'Select file', self.database_directory))
if os.path.isfile(file_name):
if file_name.endswith('.mtz'):
self.diffraction_data_reference_mtz = file_name
self.update_log.insert(
'new reference file for data processing with xia2: ' + self.diffraction_data_reference_mtz)
self.reprocess_reference_mtz_file_label.setText(self.diffraction_data_reference_mtz)
else:
self.update_log.insert('this does not seem to be a mtz file: ' + file_name)
def check_for_new_autoprocessing_or_rescore(self, rescore_only):
self.update_log.insert('checking for new data collection')
start_thread = False
if rescore_only:
# first pop up a warning message as this will overwrite all user selections
msgBox = QtGui.QMessageBox()
msgBox.setText("*** WARNING ***\nThis will overwrite all your manual selections!\nDo you want to continue?")
msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)
msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)
reply = msgBox.exec_();
if reply == 0:
start_thread = True
else:
start_thread = False
else:
start_thread = True
if start_thread:
if self.target == '=== SELECT TARGET ===':
msgBox = QtGui.QMessageBox()
warning = ('*** WARNING ***\n'
'Please select a target or\n'
'select "=== project directory ===" if you want to read reprocessed results\n'
'In case target list is empty, make sure that you have selected the actual\n'
'data collection visit (e.g. /dls/i04-1/data/2018/lb18145-70)' )
msgBox.setText(warning)
start_thread = False
# msgBox.setText(warning)
# msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)
# msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)
# reply = msgBox.exec_();
# if reply == 0:
# start_thread = True
# else:
# start_thread = False
# else:
# start_thread = True
if start_thread:
self.work_thread = XChemThread.read_autoprocessing_results_from_disc(self.visit_list,
self.target,
self.reference_file_list,
self.database_directory,
self.data_collection_dict,
self.preferences,
self.datasets_summary_file,
self.initial_model_directory,
rescore_only,
self.acceptable_low_resolution_limit_for_data,
os.path.join(self.database_directory,
self.data_source_file),
self.xce_logfile)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("create_widgets_for_autoprocessing_results_only"),
self.create_widgets_for_autoprocessing_results_only)
self.work_thread.start()
#################################################################################################################
#
#
#
# => for new module from hell
# > start
def update_gdaLog_parsing_instructions_and_score(self, gdaLogInstructions):
self.gdaLogInstructions = gdaLogInstructions
self.select_best_autoprocessing_result()
def read_pinIDs_from_gda_logs(self):
self.update_log.insert('reading pinIDs from gda logfiles...')
visit, beamline = XChemMain.getVisitAndBeamline(self.beamline_directory)
self.work_thread = XChemThread.read_pinIDs_from_gda_logs(beamline,
visit,
os.path.join(
self.database_directory,
self.data_source_file),
self.gdaLogInstructions,
self.xce_logfile)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("update_gdaLog_parsing_instructions_and_score"),
self.update_gdaLog_parsing_instructions_and_score)
self.work_thread.start()
def check_for_new_autoprocessing_results(self):
self.update_log.insert('checking for new data collection')
if self.target == '=== SELECT TARGET ===':
self.update_log.error('NO TARGET SELECTED, PLEASE SELECT A TARGET AND TRY AGAIN!')
start_thread = False
elif self.target == '=== project directory ===':
processedDir = self.initial_model_directory
start_thread = True
# elif self.read_agamemnon.isChecked():
# tmp = '/'.join(self.beamline_directory.split('/')[:6])
# processedDir = tmp[:tmp.rfind('-')]
## processedDir = os.path.join(self.beamline_directory[:self.beamline_directory.rfind('-') + 1] + '*/processed/agamemnon/'+self.target)
## processedDir = os.path.join(self.beamline_directory[:self.beamline_directory.rfind('-') + 1] + '*/processed/*/'+self.target)
# start_thread = True
else:
processedDir = os.path.join(self.beamline_directory, 'processed', self.target)
start_thread = True
if start_thread:
# processedDir=os.path.join(self.beamline_directory,'processed',self.target)
self.work_thread = XChemThread.read_write_autoprocessing_results_from_to_disc(processedDir,
os.path.join(
self.database_directory,
self.data_source_file),
self.initial_model_directory,
self.xce_logfile,
self.target,
self.read_agamemnon.isChecked())
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("read_pinIDs_from_gda_logs"),
self.read_pinIDs_from_gda_logs)
self.work_thread.start()
def select_best_autoprocessing_result(self):
if self.rescore:
# first pop up a warning message as this will overwrite all user selections
msgBox = QtGui.QMessageBox()
msgBox.setText("*** WARNING ***\nThis will overwrite all your manual selections!\nDo you want to continue?")
msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)
msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)
reply = msgBox.exec_();
if reply != 0:
start_thread = False
else:
start_thread = True
else:
start_thread = True
if start_thread:
self.update_log.insert('selecting best autoprocessing result')
self.update_log.insert('samples where user made manual changes will be ignored!')
if self.target == '=== project directory ===':
processedDir = self.initial_model_directory
else:
processedDir = os.path.join(self.beamline_directory, 'processed', self.target)
visit,beamline = XChemMain.getVisitAndBeamline(processedDir)
if self.read_agamemnon.isChecked():
visit = []
for v in glob.glob(
os.path.join(self.beamline_directory[:self.beamline_directory.rfind('-') + 1] + '*')):
visit.append(v[v.rfind('/') + 1:])
self.work_thread = XChemThread.choose_autoprocessing_outcome(os.path.join(self.database_directory,
self.data_source_file),
visit,
self.reference_file_list,
self.preferences,
self.initial_model_directory,
self.rescore,
self.xce_logfile,
self.read_agamemnon.isChecked())
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("populate_datasets_summary_table_NEW"),
self.populate_datasets_summary_table_NEW)
self.work_thread.start()
# < end
###################################################################################################################
####################################################################################################################
# #
# MAPS TAB #
# #
####################################################################################################################
def set_new_reference_if_applicable(self):
print('hallo')
reference_root = str(self.reference_file_selection_combobox.currentText())
pg_ref = ''
ucVol_ref = 0.0
for reference in self.reference_file_list:
print(reference[0], reference_root)
if reference[0] == reference_root:
pg_ref = reference[5]
ucVol_ref = reference[4]
break
if ucVol_ref == 0.0:
self.update_log.insert('cannot set reference file since unit cell volume of reference pdb is 0!')
return
for xtal in self.initial_model_dimple_dict:
reference_file_selection_combobox = self.initial_model_dimple_dict[xtal][1]
self.populate_reference_combobox(reference_file_selection_combobox)
db_dict = self.xtal_db_dict[xtal]
pg_xtal = db_dict['DataProcessingPointGroup']
ucVol_xtal = db_dict['DataProcessingUnitCellVolume']
try:
difference = math.fabs(1 - (float(ucVol_xtal) / float(ucVol_ref))) * 100
except ValueError:
self.update_log.insert(xtal + ' -> cannot calculate unit cell volume difference')
continue
if pg_xtal == pg_ref and difference < self.allowed_unitcell_difference_percent:
print(xtal, pg_xtal, ucVol_xtal)
index = reference_file_selection_combobox.findText(reference_root, QtCore.Qt.MatchFixedString)
reference_file_selection_combobox.setCurrentIndex(index)
self.update_log.insert(xtal + ' -> setting ' + reference_root + ' as input PDB file for DIMPLE')
def refresh_reference_file_list(self):
self.reference_file_list = self.get_reference_file_list(' ')
self.populate_reference_combobox(self.reference_file_selection_combobox)
def on_context_menu_initial_model(self, point):
# show context menu
self.popMenu_for_maps_table.exec_(self.sender().mapToGlobal(point))
####################################################################################################################
# #
# PANDDA TAB #
# #
####################################################################################################################
def select_pandda_input_template(self):
mtzin = ''
filepath_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select Example PDB or MTZ File',
self.initial_model_directory, '*.pdb;;*.mtz')
filepath = str(tuple(filepath_temp)[0])
pdbin = filepath.split('/')[-1]
if filepath.endswith('.pdb'):
pdbin = filepath.split('/')[-1]
mtzin_temp = pdbin.replace('.pdb', '.mtz')
if os.path.isfile(filepath.replace(pdbin, mtzin_temp)):
mtzin = mtzin_temp
else:
mtzin = ''
if filepath.endswith('.mtz'):
mtzin = filepath.split('/')[-1]
pdbin_temp = pdbin.replace('.mtz', '.pdb')
if os.path.isfile(filepath.replace(mtzin, pdbin_temp)):
pdbin = pdbin_temp
else:
pdbin = ''
try:
self.pandda_input_data_dir_entry.setText(
'/'+os.path.join(*filepath.split('/')[0:len(filepath.split('/'))-2]))
except TypeError:
self.update_log.error('directory selection invalid')
# if len(filepath.split('/')) - len(self.initial_model_directory.split('/')) == 2:
# self.pandda_input_data_dir_entry.setText(os.path.join(self.initial_model_directory, '*'))
# elif len(filepath.split('/')) - len(self.initial_model_directory.split('/')) > 2:
# subdir = os.path.join(
# *filepath.split('/')[len(self.initial_model_directory.split('/')) + 1:len(filepath.split('/')) - 1])
# self.pandda_input_data_dir_entry.setText(os.path.join(self.initial_model_directory, '*', subdir))
# else:
# pass
self.pandda_pdb_style_entry.setText(pdbin)
self.pandda_mtz_style_entry.setText(mtzin)
def change_pandda_spg_label(self):
combo_text = str(self.pandda_reference_file_selection_combobox.currentText())
for file in self.reference_file_list:
if file[0] == combo_text:
self.pandda_reference_file_spg_label.setText(file[1])
break
def on_context_menu_pandda(self, point):
# show context menu
self.popMenu_for_pandda_table.exec_(self.sender().mapToGlobal(point))
####################################################################################################################
# #
# DEPO TAB #
# #
####################################################################################################################
def export_to_html(self):
XChemWeb.export_to_html(self.html_export_directory,
self.initial_model_directory,
os.path.join(self.database_directory, self.data_source_file),
self.xce_logfile).prepare('0')
def export_to_html_CompChem(self):
XChemWeb.export_to_html(self.html_export_directory,
self.initial_model_directory,
os.path.join(self.database_directory, self.data_source_file),
self.xce_logfile).prepare('4')
def export_to_html_deposition_ready(self):
XChemWeb.export_to_html(self.html_export_directory,
self.initial_model_directory,
os.path.join(self.database_directory, self.data_source_file),
self.xce_logfile).prepare('5')
# self.update_log.insert('exporting contents of SQLite database into ' + self.html_export_directory)
# os.system(
# 'ccp4-python ' + os.getenv('XChemExplorer_DIR') + '/web/process_sqlite.py -t Summary -s ' + os.path.join(
# self.database_directory, self.data_source_file) + ' -d ' + self.html_export_directory)
# XChemWeb.create_ICM_input_file(self.html_export_directory,
# os.path.join(self.database_directory, self.data_source_file))
# self.update_log.insert('open ICMpro:')
# self.update_log.insert('/dls/science/groups/i04-1/software/icm-3.8-5/icm64 -g')
# self.update_log.insert('open file browser and navigate to ' + self.html_export_directory)
# self.update_log.insert('drag and drop dsEvent_sqlite.icm into the main window')
# self.update_log.insert('the script will appear in the Workspace Panel')
# self.update_log.insert('right click on the script and select RUN')
# self.update_log.insert('be patient, this may take a while, depending on the number of events')
# self.status_bar.showMessage('please check terminal window for further information')
# def select_ground_state_pdb(self):
# p = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select File', os.getcwd(),'*.pdb')
# pdb = str(tuple(p)[0])
# self.ground_state_pdb_button_label.setText(pdb)
def select_ground_state_mtz(self):
m = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select File', os.getcwd(),'*.mtz')
mtz = str(tuple(m)[0])
self.ground_state_mtz_button_label.setText(mtz)
def add_ground_state_db(self):
pdb, mtz = self.auto_select_ground_state_reference_PDB()
if pdb != None:
db_dict = {'DimplePANDDApath': self.panddas_directory,
'PDB_file': pdb,
'MTZ_file': mtz}
self.db.create_or_remove_missing_records_in_depositTable(self.xce_logfile, 'ground_state', 'ground_state',
db_dict)
else:
self.update_log.error('could not find a suitable reference file; see messages above!')
def auto_select_ground_state_reference_PDB(self):
pdb = None
mtz = None
xtalList = []
for dirs in glob.glob(os.path.join(self.panddas_directory,'processed_datasets','*')):
xtal = dirs[dirs.rfind('/')+1:]
if os.path.isfile(os.path.join(dirs,xtal+'-pandda-input.pdb')):
pdbHeader = parse().PDBheader(os.path.join(dirs,xtal+'-pandda-input.pdb'))
try:
xtalList.append( [xtal, float(pdbHeader['Rfree']) , float(pdbHeader['ResolutionHigh']) ] )
except ValueError:
self.update_log.error('%s: cannot read Rfree or Resolution from PDB header; skipping...')
pass
self.update_log.insert('found %s PDB files in %s' %(str(len(xtalList)),os.path.join(self.panddas_directory,'processed_datasets')))
if len(xtalList) >= 10:
self.update_log.insert('sorting PDBs by Rfree and selecting the 10 with lowest value')
rfree = sorted(xtalList, key=lambda x: x[1])[:10]
self.update_log.insert('top 10 PDB files with lowest Rfree:')
for item in rfree:
self.update_log.insert('%s: Rfree = %s | Resolution = %s' %(item[0],str(round(item[1],3)),str(round(item[2],2))))
self.update_log.insert('selecting PDB with highest resolution')
reso = sorted(rfree, key=lambda x: x[2])[:1]
self.update_log.insert('selected the following PDB file: %s: Rfree = %s | Resolution = %s' %(reso[0][0],str(round(reso[0][1],3)),str(round(reso[0][2],2))))
pdb = os.path.join(self.panddas_directory,'processed_datasets',reso[0][0],reso[0][0]+'-pandda-input.pdb')
mtz = os.path.join(self.panddas_directory,'processed_datasets',reso[0][0],reso[0][0]+'-pandda-input.mtz')
else:
self.update_log.error('found less than 10 valid PDB files in %s' %os.path.join(self.panddas_directory,'processed_datasets'))
return pdb, mtz
def prepare_ground_state_mmcif(self):
self.update_log.insert('preparing mmcif file for apo structure deposition')
self.prepare_models_for_deposition_ligand_bound('ground_state')
def open_icm(self):
self.update_log.insert('starting ICM...')
self.work_thread = XChemThread.start_ICM(self.html_export_directory)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def prepare_files_for_zenodo_upload(self):
self.update_log.insert('preparing files for ZENODO upload...')
os.system('ccp4-python ' + os.getenv(
'XChemExplorer_DIR') + '/helpers/prepare_for_zenodo_upload.py ' + self.html_export_directory)
def update_html_for_zenodo_upload(self):
try:
uploadID = int(self.zenodo_upload_id_entry.text())
self.update_log.insert('updating html files for ZENODO upload,...')
self.update_log.insert('ZENODO upload = ' + str(uploadID))
os.system('ccp4-python ' + os.getenv(
'XChemExplorer_DIR') + '/helpers/prepare_for_zenodo_upload.py {0!s} {1!s}'.format(
self.html_export_directory, uploadID))
except ValueError:
self.update_log.insert('zenodo upload ID must be an integer!')
####################################################################################################################
# #
# SETTINGS TAB #
# #
####################################################################################################################
def settings_button_clicked(self):
if self.sender().text() == 'Select Project Directory':
self.initial_model_directory = str(QtGui.QFileDialog.getExistingDirectory(self.window, "Select Directory"))
self.initial_model_directory_label.setText(self.initial_model_directory)
self.pandda_input_data_dir_entry.setText(self.initial_model_directory)
self.settings['initial_model_directory'] = self.initial_model_directory
if self.sender().text() == 'Select Reference Structure Directory':
reference_directory_temp = str(QtGui.QFileDialog.getExistingDirectory(self.window, "Select Directory"))
if reference_directory_temp != self.reference_directory:
self.reference_directory = reference_directory_temp
self.update_reference_files(' ')
self.reference_directory_label.setText(self.reference_directory)
self.settings['reference_directory'] = self.reference_directory
if self.sender().text() == 'Select Data Source File':
filepath_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select File',
self.database_directory, '*.sqlite')
filepath = str(tuple(filepath_temp)[0])
self.data_source_file = filepath.split('/')[-1]
self.database_directory = filepath[:filepath.rfind('/')]
self.settings['database_directory'] = self.database_directory
self.settings['data_source'] = os.path.join(self.database_directory, self.data_source_file)
write_enabled = self.check_write_permissions_of_data_source()
if not write_enabled:
self.data_source_set = False
else:
self.data_source_set = True
self.data_source_file_label.setText(os.path.join(self.database_directory, self.data_source_file))
self.db = XChemDB.data_source(os.path.join(self.database_directory, self.data_source_file))
self.db.create_missing_columns()
self.datasource_menu_reload_samples()
if self.sender().text() == 'Select Data Collection Directory':
dir_name = str(QtGui.QFileDialog.getExistingDirectory(self.window, "Select Directory"))
if dir_name != self.beamline_directory:
self.beamline_directory = dir_name
self.target_list, self.visit_list = XChemMain.get_target_and_visit_list(self.beamline_directory,self.read_agamemnon.isChecked())
self.populate_target_selection_combobox(self.target_selection_combobox)
self.beamline_directory_label.setText(self.beamline_directory)
self.settings['beamline_directory'] = self.beamline_directory
if self.sender().text() == 'Select Existing\nCollection Summary File':
if self.datasets_summary_file != '':
filepath_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select File',
self.datasets_summary_file[
:self.datasets_summary_file.rfind(
'/')], '*.pkl')
else:
filepath_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select File', os.getcwd(),
'*.pkl')
filepath = str(tuple(filepath_temp)[0])
self.datasets_summary_file = filepath
self.datasets_summary_file_label.setText(self.datasets_summary_file)
self.settings['datasets_summary'] = self.datasets_summary_file
if self.sender().text() == 'Assign New\nCollection Summary File':
if self.datasets_summary_file != '':
file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'New file',
self.datasets_summary_file[
:self.datasets_summary_file.rfind('/')]))
else:
file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'New file', self.current_directory))
# make sure that the file always has .pkl extension
if str(file_name).rfind('.') != -1:
file_name = file_name[:file_name.rfind('.')] + '.pkl'
else:
file_name = file_name + '.pkl'
self.datasets_summary_file = file_name
self.datasets_summary_file_label.setText(self.datasets_summary_file)
self.settings['datasets_summary'] = self.datasets_summary_file
if self.sender().text() == 'Select CCP4_SCR Directory':
self.ccp4_scratch_directory = str(QtGui.QFileDialog.getExistingDirectory(self.window, "Select Directory"))
self.ccp4_scratch_directory_label.setText(self.ccp4_scratch_directory)
self.settings['ccp4_scratch'] = self.ccp4_scratch_directory
if self.sender().text() == 'Select PanDDA Directory':
self.panddas_directory = str(QtGui.QFileDialog.getExistingDirectory(self.window, "Select Directory"))
self.panddas_directory_label.setText(self.panddas_directory)
self.pandda_output_data_dir_entry.setText(self.panddas_directory)
self.ground_state_pandda_directory_label.setText(self.panddas_directory)
print('PANDDA', self.panddas_directory)
self.settings['panddas_directory'] = self.panddas_directory
self.layout_funcs.pandda_html(self)
if self.sender().text() == 'Select HTML Export Directory':
self.html_export_directory = str(QtGui.QFileDialog.getExistingDirectory(self.window, "Select Directory"))
self.html_export_directory_label.setText(self.html_export_directory)
self.settings['html_export_directory'] = self.html_export_directory
if self.sender().text() == 'Select Group deposition Directory':
self.group_deposit_directory = str(QtGui.QFileDialog.getExistingDirectory(self.window, "Select Directory"))
self.group_deposition_directory_label.setText(self.group_deposit_directory)
self.settings['group_deposit_directory'] = self.group_deposit_directory
#self.datasource_menu_reload_samples()
######################################### sort stuff below here ####################################################
def select_sample_for_dimple(self):
indexes = self.maps_table.selectionModel().selectedRows()
for index in sorted(indexes):
xtal = str(self.maps_table.item(index.row(), 0).text())
self.update_log.insert('{0!s} is marked for DIMPLE'.format(index.row()))
self.initial_model_dimple_dict[xtal][0].setChecked(True)
def update_summary_plot(self):
if self.data_source_set:
XChemPlots.summary_plot(os.path.join(self.database_directory, self.data_source_file),
self.overview_axes).update_overview()
self.overview_canvas.draw()
def show_preferences(self):
preferences = QtGui.QMessageBox()
preferencesLayout = preferences.layout()
vbox = QtGui.QVBoxLayout()
settings_hbox_filename_root = QtGui.QHBoxLayout()
filename_root_label = QtGui.QLabel('filename root:')
settings_hbox_filename_root.addWidget(filename_root_label)
filename_root_input = QtGui.QLineEdit()
filename_root_input.setFixedWidth(400)
filename_root_input.setText(str(self.filename_root))
filename_root_input.textChanged[str].connect(self.change_filename_root)
settings_hbox_filename_root.addWidget(filename_root_input)
vbox.addLayout(settings_hbox_filename_root)
settings_hbox_adjust_allowed_unit_cell_difference = QtGui.QHBoxLayout()
adjust_allowed_unit_cell_difference_label = QtGui.QLabel(
'Max. Allowed Unit Cell Difference between Reference and Target (%):')
settings_hbox_adjust_allowed_unit_cell_difference.addWidget(adjust_allowed_unit_cell_difference_label)
adjust_allowed_unit_cell_difference = QtGui.QLineEdit()
adjust_allowed_unit_cell_difference.setFixedWidth(200)
adjust_allowed_unit_cell_difference.setText(str(self.allowed_unitcell_difference_percent))
adjust_allowed_unit_cell_difference.textChanged[str].connect(self.change_allowed_unitcell_difference_percent)
settings_hbox_adjust_allowed_unit_cell_difference.addWidget(adjust_allowed_unit_cell_difference)
vbox.addLayout(settings_hbox_adjust_allowed_unit_cell_difference)
settings_hbox_acceptable_low_resolution_limit = QtGui.QHBoxLayout()
adjust_acceptable_low_resolution_limit_label = QtGui.QLabel(
'Acceptable low resolution limit for datasets (in Angstrom):')
settings_hbox_acceptable_low_resolution_limit.addWidget(adjust_acceptable_low_resolution_limit_label)
adjust_acceptable_low_resolution_limit = QtGui.QLineEdit()
adjust_acceptable_low_resolution_limit.setFixedWidth(200)
adjust_acceptable_low_resolution_limit.setText(str(self.acceptable_low_resolution_limit_for_data))
adjust_acceptable_low_resolution_limit.textChanged[str].connect(self.change_acceptable_low_resolution_limit)
settings_hbox_acceptable_low_resolution_limit.addWidget(adjust_acceptable_low_resolution_limit)
vbox.addLayout(settings_hbox_acceptable_low_resolution_limit)
vbox_data = QtGui.QVBoxLayout()
vbox_data.addWidget(
QtGui.QLabel('Select amount of processed data you wish to copy to initial_model directory:'))
self.preferences_data_to_copy_combobox = QtGui.QComboBox()
for item in self.preferences_data_to_copy:
self.preferences_data_to_copy_combobox.addItem(item[0])
self.preferences_data_to_copy_combobox.currentIndexChanged.connect(
self.preferences_data_to_copy_combobox_changed)
vbox_data.addWidget(self.preferences_data_to_copy_combobox)
vbox.addLayout(vbox_data)
vbox_select = QtGui.QVBoxLayout()
vbox_select.addWidget(QtGui.QLabel('Dataset Selection Mechanism:'))
self.preferences_selection_mechanism_combobox = QtGui.QComboBox()
for item in self.preferences_selection_mechanism:
self.preferences_selection_mechanism_combobox.addItem(item)
self.preferences_selection_mechanism_combobox.currentIndexChanged.connect(
self.preferences_selection_mechanism_combobox_changed)
index = self.preferences_selection_mechanism_combobox.findText(self.preferences['dataset_selection_mechanism'], QtCore.Qt.MatchFixedString)
self.preferences_selection_mechanism_combobox.setCurrentIndex(index)
vbox_select.addWidget(self.preferences_selection_mechanism_combobox)
vbox.addLayout(vbox_select)
# vbox_inital_refinement = QtGui.QVBoxLayout()
# vbox_inital_refinement.addWidget(QtGui.QLabel('Initial Refinement Pipeline:'))
# self.preferences_initial_refinement_combobox = QtGui.QComboBox()
# for item in self.preferences_initial_refinement_pipeline:
# self.preferences_initial_refinement_combobox.addItem(item)
# self.preferences_initial_refinement_combobox.currentIndexChanged.connect(
# self.preferences_initial_refinement_combobox_changed)
# index = self.preferences_initial_refinement_combobox.findText(self.preferences['initial_refinement_pipeline'], QtCore.Qt.MatchFixedString)
# self.preferences_initial_refinement_combobox.setCurrentIndex(index)
# vbox_inital_refinement.addWidget(self.preferences_initial_refinement_combobox)
# vbox.addLayout(vbox_inital_refinement)
vbox_restraints = QtGui.QVBoxLayout()
vbox_restraints.addWidget(QtGui.QLabel('Restraints generation program:'))
self.preferences_restraints_generation_combobox = QtGui.QComboBox()
program_list = []
if self.external_software['acedrg']:
program_list.append('acedrg')
self.restraints_program = 'acedrg'
if self.external_software['phenix.elbow']: program_list.append('phenix.elbow')
if self.external_software['grade']: program_list.append('grade')
for item in program_list:
self.preferences_restraints_generation_combobox.addItem(item)
self.preferences_restraints_generation_combobox.currentIndexChanged.connect(
self.preferences_restraints_generation_combobox_changed)
index = self.preferences_restraints_generation_combobox.findText(self.restraints_program,
QtCore.Qt.MatchFixedString)
self.preferences_restraints_generation_combobox.setCurrentIndex(index)
vbox_restraints.addWidget(self.preferences_restraints_generation_combobox)
vbox.addLayout(vbox_restraints)
hbox = QtGui.QHBoxLayout()
hbox.addWidget(QtGui.QLabel('XCE logfile:'))
self.xce_logfile_label = QtGui.QLabel(self.xce_logfile)
hbox.addWidget(self.xce_logfile_label)
button = QtGui.QPushButton("Change")
button.clicked.connect(self.set_xce_logfile)
hbox.addWidget(button)
vbox.addLayout(hbox)
settings_hbox_max_queue_jobs = QtGui.QHBoxLayout()
adjust_max_queue_jobs_label = QtGui.QLabel('Max. number of jobs running at once on DLS cluster:')
settings_hbox_max_queue_jobs.addWidget(adjust_max_queue_jobs_label)
adjust_max_queue_jobs = QtGui.QLineEdit()
adjust_max_queue_jobs.setFixedWidth(200)
adjust_max_queue_jobs.setText(str(self.max_queue_jobs))
adjust_max_queue_jobs.textChanged[str].connect(self.change_max_queue_jobs)
settings_hbox_max_queue_jobs.addWidget(adjust_max_queue_jobs)
vbox.addLayout(settings_hbox_max_queue_jobs)
settings_hbox_remote_qsub = QtGui.QHBoxLayout()
remote_qsub_label = QtGui.QLabel('remote qsub:')
settings_hbox_remote_qsub.addWidget(remote_qsub_label)
self.remote_qsub_checkbox = QtGui.QCheckBox('use')
self.remote_qsub_checkbox.toggled.connect(self.run_qsub_remotely)
settings_hbox_dimple_twin_mode = QtGui.QHBoxLayout()
self.dimple_twin_mode_label_checkbox = QtGui.QCheckBox('run DIMPLE in TWIN mode')
if self.preferences['dimple_twin_mode']:
self.dimple_twin_mode_label_checkbox.setChecked(True)
self.dimple_twin_mode_label_checkbox.toggled.connect(self.dimple_change_twin_mode)
settings_hbox_dimple_twin_mode.addWidget(self.dimple_twin_mode_label_checkbox)
vbox.addLayout(settings_hbox_dimple_twin_mode)
if self.using_remote_qsub_submission:
self.remote_qsub_checkbox.setChecked(True)
settings_hbox_remote_qsub.addWidget(self.remote_qsub_checkbox)
self.remote_qsub_command = QtGui.QLineEdit()
self.remote_qsub_command.setFixedWidth(550)
self.remote_qsub_command.setText(self.remote_qsub_submission)
settings_hbox_remote_qsub.addWidget(self.remote_qsub_command)
vbox.addLayout(settings_hbox_remote_qsub)
hbox = QtGui.QHBoxLayout()
hbox.addWidget(QtGui.QLabel('Additional CIF file for non-standard ligand:'))
self.second_cif_file_label = QtGui.QLabel(self.second_cif_file)
hbox.addWidget(self.second_cif_file_label)
button = QtGui.QPushButton("Select")
button.clicked.connect(self.set_second_cif_file)
hbox.addWidget(button)
vbox.addLayout(hbox)
# settings_hbox_max_queue_jobs.addWidget(adjust_max_queue_jobs_label)
# adjust_max_queue_jobs = QtGui.QLineEdit()
# adjust_max_queue_jobs.setFixedWidth(200)
# adjust_max_queue_jobs.setText(str(self.max_queue_jobs))
# adjust_max_queue_jobs.textChanged[str].connect(self.change_max_queue_jobs)
# settings_hbox_max_queue_jobs.addWidget(adjust_max_queue_jobs)
# vbox.addLayout(settings_hbox_max_queue_jobs)
#
# apply_button = QtGui.QPushButton('Apply')
# apply_button.clicked.connect(self.run_qsub_remotely)
# settings_hbox_remote_qsub.addWidget(apply_button)
preferencesLayout.addLayout(vbox, 0, 0)
preferences.exec_();
# def set_second_cif_file(self):
# mb = QtGui.QMessageBox()
# mbLayout = mb.layout()
# vbox = QtGui.QVBoxLayout()
# vbox.addWidget(QtGui.QLabel('CIF file to be merged into ligand CIF files:'))
# self.second_cif_file_label = QtGui.QLabel(self.second_cif_file)
# vbox.addWidget(self.second_cif_file_label)
# button = QtGui.QPushButton("Select")
# button.clicked.connect(self.set_second_cif_file)
# vbox.addWidget(button)
# mbLayout.addLayout(vbox, 0, 0)
# mb.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)
# mb.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)
# reply = mb.exec_();
def dimple_change_twin_mode(self):
if self.preferences['dimple_twin_mode']:
self.update_log.insert('changing preferences: turning off DIMPLE in TWIN mode')
self.preferences['dimple_twin_mode'] = False
else:
self.update_log.insert('changing preferences: changing DIMPLE to TWIN mode')
self.preferences['dimple_twin_mode'] = True
def run_qsub_remotely(self):
self.remote_qsub_submission = str(self.remote_qsub_command.text())
print(str(self.remote_qsub_submission))
if self.remote_qsub_checkbox.isChecked():
self.update_log.insert('submitting jobs to remote machine with: %s' % self.remote_qsub_submission)
self.external_software['qsub_remote'] = self.remote_qsub_submission
self.using_remote_qsub_submission = True
self.settings['remote_qsub'] = self.remote_qsub_submission
else:
self.update_log.insert('switching off remote job submission')
self.external_software['qsub_remote'] = ''
self.settings['remote_qsub'] = ''
self.using_remote_qsub_submission = False
def enter_pdb_codes(self):
pdbID_entry = QtGui.QMessageBox()
pdbID_entryLayout = pdbID_entry.layout()
vbox = QtGui.QVBoxLayout()
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Text from PDB email'), 0, 0)
self.pdb_code_entry = QtGui.QTextEdit()
self.pdb_code_entry.setText('')
self.pdb_code_entry.setFixedWidth(500)
grid.addWidget(self.pdb_code_entry, 1, 0, 20, 1)
frame.setLayout(grid)
vbox.addWidget(frame)
hbox = QtGui.QHBoxLayout()
button = QtGui.QPushButton('Update Database')
button.clicked.connect(self.update_database_with_pdb_codes)
hbox.addWidget(button)
vbox.addLayout(hbox)
pdbID_entryLayout.addLayout(vbox, 0, 0)
pdbID_entry.exec_();
def add_label_information(self):
label_entry = QtGui.QMessageBox()
label_entryLayout = label_entry.layout()
try:
labelInfo = self.db.get_label_info_from_db()
except AttributeError:
self.update_log.warning('please specify DB file first')
return None
vbox = QtGui.QVBoxLayout()
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('label'), 0, 0)
grid.addWidget(QtGui.QLabel('description'), 0, 1)
self.remote_qsub_command = QtGui.QLineEdit()
self.remote_qsub_command.setFixedWidth(550)
self.remote_qsub_command.setText(self.remote_qsub_submission)
self.labelList = []
for i in range(5):
labelEdit = QtGui.QLineEdit()
descriptionEdit = QtGui.QLineEdit()
grid.addWidget(labelEdit, i + 1, 0)
grid.addWidget(descriptionEdit, i + 1, 1)
try:
labelEdit.setText(labelInfo[i][0])
descriptionEdit.setText(labelInfo[i][1])
except IndexError:
labelEdit.setText('')
descriptionEdit.setText('')
labelEdit.setFixedWidth(100)
descriptionEdit.setFixedWidth(500)
self.labelList.append([labelEdit,descriptionEdit])
frame.setLayout(grid)
vbox.addWidget(frame)
hbox = QtGui.QHBoxLayout()
button = QtGui.QPushButton('Update Database')
button.clicked.connect(self.update_database_with_labelInfo)
hbox.addWidget(button)
vbox.addLayout(hbox)
label_entryLayout.addLayout(vbox, 0, 0)
label_entry.exec_();
def create_missing_apo_records_in_depositTable(self):
self.db.create_missing_apo_records_for_all_structures_in_depositTable(self.initial_model_directory,
self.xce_logfile)
# def update_file_information_of_apo_records(self):
# XChemDeposit.update_file_locations_of_apo_structuresin_DB(
# os.path.join(self.database_directory, self.data_source_file), self.initial_model_directory,
# self.xce_logfile)
def prepare_models_for_deposition_ligand_bound(self,structureType):
start_thread = True
self.update_log.insert('preparing mmcif files for PDB group deposition...')
ignore_event_map = False
if structureType == 'ground_state':
try:
self.update_log.insert('ground-state deposition')
data_template_dict = self.db.get_deposit_dict_for_sample('ground_state')
pdb = data_template_dict['PDB_file']
self.update_log.insert('looking for ground-state PDB: ' + pdb)
if not os.path.isfile(pdb):
self.update_log.error('ground-state PDB does not exist; stopping...')
start_thread = False
mtz = data_template_dict['MTZ_file']
self.update_log.insert('looking for ground-state MTZ: ' + mtz)
if not os.path.isfile(mtz):
self.update_log.error('ground-state MTZ does not exist; stopping...')
start_thread = False
ground_state = [ pdb,
mtz,
self.panddas_directory ]
except KeyError:
self.update_log.error('seems like there is no entry for ground-state in database')
start_thread = False
else:
ground_state = []
if self.deposition_bounnd_state_preparation_ignore_event_map.isChecked():
ignore_event_map = True
# structureType = "ligand_bound"
if start_thread:
if ground_state != []:
self.update_log.insert('apo PDB: ' + ground_state[0])
self.update_log.insert('apo MTZ: ' + ground_state[1])
self.update_log.insert('pandda directory: ' + ground_state[2])
overwrite_existing_mmcif = True
self.work_thread = XChemDeposit.prepare_mmcif_files_for_deposition(
os.path.join(self.database_directory, self.data_source_file),
self.xce_logfile,
overwrite_existing_mmcif,
self.initial_model_directory,
ground_state,
ignore_event_map)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def prepare_models_for_deposition_apo(self):
structureType = "apo"
overwrite_existing_mmcif = True
self.work_thread = XChemDeposit.prepare_mmcif_files_for_deposition(
os.path.join(self.database_directory, self.data_source_file),
self.xce_logfile,
overwrite_existing_mmcif,
self.initial_model_directory,
structureType)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def prepare_for_group_deposition_upload_ligand_bound(self):
self.work_thread = XChemDeposit.prepare_for_group_deposition_upload(
os.path.join(self.database_directory, self.data_source_file),
self.xce_logfile,
self.group_deposit_directory,self.initial_model_directory,'ligand_bound')
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def prepare_for_group_deposition_upload_ground_state(self):
self.work_thread = XChemDeposit.prepare_for_group_deposition_upload(
os.path.join(self.database_directory, self.data_source_file),
self.xce_logfile,
self.group_deposit_directory,self.initial_model_directory,'ground_state')
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def check_smiles_in_db_and_pdb(self):
self.work_thread = XChemDeposit.compare_smiles_in_db_with_ligand_in_pdb(self.initial_model_directory,
os.path.join(self.database_directory,
self.data_source_file),
self.xce_logfile)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("show_error_dict"), self.show_error_dict)
self.work_thread.start()
def deposition_data(self):
depositData = QtGui.QMessageBox()
depositDataLayout = depositData.layout()
vbox = QtGui.QVBoxLayout()
deposit_tab_widget = QtGui.QTabWidget()
deposit_tab_list = ['Contact',
'General',
'Authors',
'Citation',
'Molecule',
'Misc',
'Methods',
'Software',
'Funding' ]
deposit_tab_dict = {}
for page in deposit_tab_list:
tab = QtGui.QWidget()
vb = QtGui.QVBoxLayout(tab)
deposit_tab_widget.addTab(tab, page)
deposit_tab_dict[page] = [tab, vb]
## PI and scientist info
vb = QtGui.QVBoxLayout()
hbox = QtGui.QHBoxLayout()
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Principal Investigator'), 0, 0)
grid.addWidget(QtGui.QLabel('Salutation'), 1, 0)
self.contact_author_PI_salutation = QtGui.QLineEdit()
self.contact_author_PI_salutation.setText('Dr.')
self.contact_author_PI_salutation.setFixedWidth(200)
grid.addWidget(self.contact_author_PI_salutation, 1, 1)
grid.addWidget(QtGui.QLabel('First name'), 2, 0)
self.contact_author_PI_first_name = QtGui.QLineEdit()
self.contact_author_PI_first_name.setText('')
self.contact_author_PI_first_name.setFixedWidth(200)
grid.addWidget(self.contact_author_PI_first_name, 2, 1)
grid.addWidget(QtGui.QLabel('Last name'), 3, 0)
self.contact_author_PI_last_name = QtGui.QLineEdit()
self.contact_author_PI_last_name.setText('')
self.contact_author_PI_last_name.setFixedWidth(200)
grid.addWidget(self.contact_author_PI_last_name, 3, 1)
grid.addWidget(QtGui.QLabel('Middle name'), 4, 0)
self.contact_author_PI_middle_name = QtGui.QLineEdit()
self.contact_author_PI_middle_name.setText('')
self.contact_author_PI_middle_name.setFixedWidth(200)
self.contact_author_PI_middle_name.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.contact_author_PI_middle_name, 4, 1)
grid.addWidget(QtGui.QLabel('PI role'), 5, 0)
self.contact_author_PI_role = QtGui.QComboBox()
# PIroles = ['group leader', 'principal investigator/group leader', 'investigator']
PIroles = ['principal investigator/group leader']
for item in PIroles: self.contact_author_PI_role.addItem(item)
grid.addWidget(self.contact_author_PI_role, 5, 1)
grid.addWidget(QtGui.QLabel('Organization type'), 6, 0)
self.contact_author_PI_organization_type = QtGui.QComboBox()
Organizations = ['academic', 'commercial', 'government']
for item in Organizations: self.contact_author_PI_organization_type.addItem(item)
grid.addWidget(self.contact_author_PI_organization_type, 6, 1)
grid.addWidget(QtGui.QLabel('Organization Name'), 7, 0)
self.contact_author_PI_organization_name = QtGui.QLineEdit()
self.contact_author_PI_organization_name.setText('')
self.contact_author_PI_organization_name.setFixedWidth(200)
grid.addWidget(self.contact_author_PI_organization_name, 7, 1)
grid.addWidget(QtGui.QLabel('Email'), 8, 0)
self.contact_author_PI_email = QtGui.QLineEdit()
self.contact_author_PI_email.setText('')
self.contact_author_PI_email.setFixedWidth(200)
grid.addWidget(self.contact_author_PI_email, 8, 1)
grid.addWidget(QtGui.QLabel('Street'), 9, 0)
self.contact_author_PI_address = QtGui.QLineEdit()
self.contact_author_PI_address.setText('')
self.contact_author_PI_address.setFixedWidth(200)
grid.addWidget(self.contact_author_PI_address, 9, 1)
grid.addWidget(QtGui.QLabel('City'), 10, 0)
self.contact_author_PI_city = QtGui.QLineEdit()
self.contact_author_PI_city.setText('')
self.contact_author_PI_city.setFixedWidth(200)
grid.addWidget(self.contact_author_PI_city, 10, 1)
grid.addWidget(QtGui.QLabel('State'), 11, 0)
self.contact_author_PI_State_or_Province = QtGui.QLineEdit()
self.contact_author_PI_State_or_Province.setText('')
self.contact_author_PI_State_or_Province.setFixedWidth(200)
self.contact_author_PI_State_or_Province.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.contact_author_PI_State_or_Province, 11, 1)
grid.addWidget(QtGui.QLabel('ZIP code'), 12, 0)
self.contact_author_PI_Zip_Code = QtGui.QLineEdit()
self.contact_author_PI_Zip_Code.setText('')
self.contact_author_PI_Zip_Code.setFixedWidth(200)
grid.addWidget(self.contact_author_PI_Zip_Code, 12, 1)
grid.addWidget(QtGui.QLabel('Country'), 13, 0)
self.contact_author_PI_Country = QtGui.QLineEdit()
self.contact_author_PI_Country.setText('')
self.contact_author_PI_Country.setFixedWidth(200)
grid.addWidget(self.contact_author_PI_Country, 13, 1)
grid.addWidget(QtGui.QLabel('Phone'), 14, 0)
self.contact_author_PI_phone_number = QtGui.QLineEdit()
self.contact_author_PI_phone_number.setText('')
self.contact_author_PI_phone_number.setFixedWidth(200)
grid.addWidget(self.contact_author_PI_phone_number, 14, 1)
grid.addWidget(QtGui.QLabel('ORCID'), 15, 0)
self.contact_author_PI_ORCID = QtGui.QLineEdit()
self.contact_author_PI_ORCID.setText('')
self.contact_author_PI_ORCID.setFixedWidth(200)
grid.addWidget(self.contact_author_PI_ORCID, 15, 1)
frame.setLayout(grid)
hbox.addWidget(frame)
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Responsible Scientist'), 0, 0)
grid.addWidget(QtGui.QLabel('Salutation'), 1, 0)
self.contact_author_salutation = QtGui.QLineEdit()
self.contact_author_salutation.setText('Dr.')
self.contact_author_salutation.setFixedWidth(200)
grid.addWidget(self.contact_author_salutation, 1, 1)
grid.addWidget(QtGui.QLabel('First name'), 2, 0)
self.contact_author_first_name = QtGui.QLineEdit()
self.contact_author_first_name.setText('')
self.contact_author_first_name.setFixedWidth(200)
grid.addWidget(self.contact_author_first_name, 2, 1)
grid.addWidget(QtGui.QLabel('Last name'), 3, 0)
self.contact_author_last_name = QtGui.QLineEdit()
self.contact_author_last_name.setText('')
self.contact_author_last_name.setFixedWidth(200)
grid.addWidget(self.contact_author_last_name, 3, 1)
grid.addWidget(QtGui.QLabel('Middle name'), 4, 0)
self.contact_author_middle_name = QtGui.QLineEdit()
self.contact_author_middle_name.setText('')
self.contact_author_middle_name.setFixedWidth(200)
self.contact_author_middle_name.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.contact_author_middle_name, 4, 1)
grid.addWidget(QtGui.QLabel('Role'), 5, 0)
self.contact_author_role = QtGui.QComboBox()
ScientistRoles = ['responsible scientist', 'investigator']
for item in ScientistRoles: self.contact_author_role.addItem(item)
grid.addWidget(self.contact_author_role, 5, 1)
grid.addWidget(QtGui.QLabel('Organization type'), 6, 0)
self.contact_author_organization_type = QtGui.QComboBox()
for item in Organizations: self.contact_author_organization_type.addItem(item)
grid.addWidget(self.contact_author_organization_type, 6, 1)
grid.addWidget(QtGui.QLabel('Organization Name'), 7, 0)
self.contact_author_organization_name = QtGui.QLineEdit()
self.contact_author_organization_name.setText('')
self.contact_author_organization_name.setFixedWidth(200)
grid.addWidget(self.contact_author_organization_name, 7, 1)
grid.addWidget(QtGui.QLabel('Email'), 8, 0)
self.contact_author_email = QtGui.QLineEdit()
self.contact_author_email.setText('')
self.contact_author_email.setFixedWidth(200)
grid.addWidget(self.contact_author_email, 8, 1)
grid.addWidget(QtGui.QLabel('Street'), 9, 0)
self.contact_author_address = QtGui.QLineEdit()
self.contact_author_address.setText('')
self.contact_author_address.setFixedWidth(200)
grid.addWidget(self.contact_author_address, 9, 1)
grid.addWidget(QtGui.QLabel('City'), 10, 0)
self.contact_author_city = QtGui.QLineEdit()
self.contact_author_city.setText('')
self.contact_author_city.setFixedWidth(200)
grid.addWidget(self.contact_author_city, 10, 1)
grid.addWidget(QtGui.QLabel('State'), 11, 0)
self.contact_author_State_or_Province = QtGui.QLineEdit()
self.contact_author_State_or_Province.setText('')
self.contact_author_State_or_Province.setFixedWidth(200)
self.contact_author_State_or_Province.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.contact_author_State_or_Province, 11, 1)
grid.addWidget(QtGui.QLabel('ZIP code'), 12, 0)
self.contact_author_Zip_Code = QtGui.QLineEdit()
self.contact_author_Zip_Code.setText('')
self.contact_author_Zip_Code.setFixedWidth(200)
grid.addWidget(self.contact_author_Zip_Code, 12, 1)
grid.addWidget(QtGui.QLabel('Country'), 13, 0)
self.contact_author_Country = QtGui.QLineEdit()
self.contact_author_Country.setText('')
self.contact_author_Country.setFixedWidth(200)
grid.addWidget(self.contact_author_Country, 13, 1)
grid.addWidget(QtGui.QLabel('Phone'), 14, 0)
self.contact_author_phone_number = QtGui.QLineEdit()
self.contact_author_phone_number.setText('')
self.contact_author_phone_number.setFixedWidth(200)
grid.addWidget(self.contact_author_phone_number, 14, 1)
grid.addWidget(QtGui.QLabel('ORCID'), 15, 0)
self.contact_author_ORCID = QtGui.QLineEdit()
self.contact_author_ORCID.setText('')
self.contact_author_ORCID.setFixedWidth(200)
grid.addWidget(self.contact_author_ORCID, 15, 1)
frame.setLayout(grid)
hbox.addWidget(frame)
vb.addLayout(hbox)
vb.addWidget(QtGui.QLabel(XChemToolTips.deposition_interface_note()))
vb.addStretch(1)
deposit_tab_dict['Contact'][1].addLayout(vb)
## release status
vb = QtGui.QVBoxLayout()
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Release status'), 0, 0)
grid.addWidget(QtGui.QLabel('Release Status for sequence'), 4, 0)
self.Release_status_for_sequence = QtGui.QComboBox()
codeStatus = ['RELEASE NOW', 'HOLD FOR RELEASE']
for item in codeStatus: self.Release_status_for_sequence.addItem(item)
grid.addWidget(self.Release_status_for_sequence, 4, 1)
grid.addWidget(QtGui.QLabel('Release Status for coordinates/ SF'), 8, 0)
self.Release_status_for_coordinates = QtGui.QComboBox()
coordStatus = ['RELEASE NOW', 'HOLD FOR PUBLICATION', 'HOLD FOR 4 WEEKS', 'HOLD FOR 6 MONTHS',
'HOLD FOR 1 YEAR']
for item in coordStatus: self.Release_status_for_coordinates.addItem(item)
grid.addWidget(self.Release_status_for_coordinates, 8, 1)
frame.setLayout(grid)
vb.addWidget(frame)
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Title & Details'), 0, 0)
note = (
'Note: supported wildcards: $ProteinName,$CompoundName; e.g. "Crystal Structure of human JMJD2D in complex with N2317a"')
grid.addWidget(QtGui.QLabel(note), 1, 0)
grid.addWidget(QtGui.QLabel('Group deposition title'), 2, 0)
self.group_deposition_title = QtGui.QLineEdit()
self.group_deposition_title.setText('PanDDA analysis group deposition')
self.group_deposition_title.setFixedWidth(600)
# self.group_deposition_title.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.group_deposition_title, 2, 1)
grid.addWidget(QtGui.QLabel('Description'), 3, 0)
self.group_description = QtGui.QLineEdit()
self.group_description.setText(
'XDomainX of XOrganismX $ProteinName screened against the XXX Fragment Library by X-ray Crystallography at the XChem facility of Diamond Light Source beamline I04-1')
self.group_description.setFixedWidth(600)
grid.addWidget(self.group_description, 3, 1)
grid.addWidget(QtGui.QLabel('Structure Title (ligand bound)'), 4, 0)
self.structure_title = QtGui.QLineEdit()
self.structure_title.setText('Crystal Structure of $ProteinName in complex with $CompoundName')
self.structure_title.setFixedWidth(600)
grid.addWidget(self.structure_title, 4, 1)
note = ('\n\nApo Structure:\nonly use if you want to deposit PanDDA models!')
grid.addWidget(QtGui.QLabel(note), 6, 0)
grid.addWidget(QtGui.QLabel('Structure Title (apo)'), 7, 0)
self.structure_title_apo = QtGui.QLineEdit()
self.structure_title_apo.setText(
'PanDDA analysis group deposition of ground-state model of $ProteinName')
self.structure_title_apo.setFixedWidth(600)
grid.addWidget(self.structure_title_apo, 7, 1)
frame.setLayout(grid)
vb.addWidget(frame)
vb.addStretch(1)
deposit_tab_dict['General'][1].addLayout(vb)
## authors
vb = QtGui.QVBoxLayout()
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Deposition authors (e.g. Surname, F.M.)'), 0, 0)
self.structure_author_name_List = []
for column in range(0, 2):
for row in range(1, 15):
structure_author_name = QtGui.QLineEdit()
structure_author_name.setText('')
structure_author_name.setFixedWidth(300)
grid.addWidget(structure_author_name, row, column)
self.structure_author_name_List.append(structure_author_name)
frame.setLayout(grid)
vb.addWidget(frame)
vb.addStretch(1)
deposit_tab_dict['Authors'][1].addLayout(vb)
## primary citation
vb = QtGui.QVBoxLayout()
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Primary Citation'), 0, 0)
grid.addWidget(QtGui.QLabel('ID'), 1, 0)
self.primary_citation_id = QtGui.QLineEdit()
self.primary_citation_id.setText('primary')
self.primary_citation_id.setFixedWidth(500)
grid.addWidget(self.primary_citation_id, 1, 1)
grid.addWidget(QtGui.QLabel('Journal'), 2, 0)
self.primary_citation_journal_abbrev = QtGui.QLineEdit()
self.primary_citation_journal_abbrev.setText('To be published')
self.primary_citation_journal_abbrev.setFixedWidth(500)
grid.addWidget(self.primary_citation_journal_abbrev, 2, 1)
grid.addWidget(QtGui.QLabel('Title'), 3, 0)
self.primary_citation_title = QtGui.QLineEdit()
self.primary_citation_title.setText('')
self.primary_citation_title.setFixedWidth(500)
self.primary_citation_title.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.primary_citation_title, 3, 1)
grid.addWidget(QtGui.QLabel('Year'), 4, 0)
self.primary_citation_year = QtGui.QLineEdit()
self.primary_citation_year.setText('')
self.primary_citation_year.setFixedWidth(500)
self.primary_citation_year.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.primary_citation_year, 4, 1)
grid.addWidget(QtGui.QLabel('Volume'), 5, 0)
self.primary_citation_journal_volume = QtGui.QLineEdit()
self.primary_citation_journal_volume.setText('')
self.primary_citation_journal_volume.setFixedWidth(500)
self.primary_citation_journal_volume.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.primary_citation_journal_volume, 5, 1)
grid.addWidget(QtGui.QLabel('Page, first'), 6, 0)
self.primary_citation_page_first = QtGui.QLineEdit()
self.primary_citation_page_first.setText('')
self.primary_citation_page_first.setFixedWidth(500)
self.primary_citation_page_first.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.primary_citation_page_first, 6, 1)
grid.addWidget(QtGui.QLabel('Page, last'), 7, 0)
self.primary_citation_page_last = QtGui.QLineEdit()
self.primary_citation_page_last.setText('')
self.primary_citation_page_last.setFixedWidth(500)
self.primary_citation_page_last.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.primary_citation_page_last, 7, 1)
frame.setLayout(grid)
vb.addWidget(frame)
## citation authors
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
self.set_primary_citation_authors = QtGui.QCheckBox('same as deposition authors')
self.layout_funcs.add_checkbox(self, self.set_primary_citation_authors,
'xce_object.set_primary_citation_as_structure_authors')
grid.addWidget(self.set_primary_citation_authors, 0, 0)
self.primary_citation_author_name_List = []
for column in range(0, 2):
for row in range(1, 15):
primary_citation_author_name = QtGui.QLineEdit()
primary_citation_author_name.setText('')
primary_citation_author_name.setFixedWidth(300)
grid.addWidget(primary_citation_author_name, row, column)
self.primary_citation_author_name_List.append(primary_citation_author_name)
frame.setLayout(grid)
vb.addWidget(frame)
vb.addStretch(1)
deposit_tab_dict['Citation'][1].addLayout(vb)
## molecule info
vb = QtGui.QVBoxLayout()
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Entity 1'), 1, 0)
grid.addWidget(QtGui.QLabel('Molecule Name'), 2, 0)
self.molecule_name = QtGui.QLineEdit()
self.molecule_name.setText('')
self.molecule_name.setFixedWidth(300)
# self.molecule_name.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.molecule_name, 2, 1)
grid.addWidget(QtGui.QLabel('(e.g. RNA Hammerhead Ribozyme)'), 2, 2)
grid.addWidget(QtGui.QLabel('Fragment Name'), 3, 0)
self.fragment_name_one = QtGui.QLineEdit()
self.fragment_name_one.setText('')
self.fragment_name_one.setFixedWidth(300)
self.fragment_name_one.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.fragment_name_one, 3, 1)
grid.addWidget(QtGui.QLabel('(e.g. ligand binding domain, hairpin)'), 3, 2)
grid.addWidget(QtGui.QLabel('Specific Mutation'), 4, 0)
self.fragment_name_one_specific_mutation = QtGui.QLineEdit()
self.fragment_name_one_specific_mutation.setText('')
self.fragment_name_one_specific_mutation.setFixedWidth(300)
self.fragment_name_one_specific_mutation.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.fragment_name_one_specific_mutation, 4, 1)
grid.addWidget(QtGui.QLabel('(e.g. C280S)'), 4, 2)
grid.addWidget(QtGui.QLabel('Enzyme Comission Number'), 5, 0)
self.fragment_name_one_enzyme_comission_number = QtGui.QLineEdit()
self.fragment_name_one_enzyme_comission_number.setText('')
self.fragment_name_one_enzyme_comission_number.setFixedWidth(300)
self.fragment_name_one_enzyme_comission_number.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.fragment_name_one_enzyme_comission_number, 5, 1)
grid.addWidget(QtGui.QLabel('(if known: e.g. 2.7.7.7)'), 5, 2)
grid.addWidget(QtGui.QLabel('Genetically Manipulated Source'), 6, 0)
grid.addWidget(QtGui.QLabel('Source organism scientific name'), 7, 0)
self.Source_organism_scientific_name = QtGui.QComboBox()
taxonomy_dict = XChemMain.NCBI_taxonomy_ID()
for item in taxonomy_dict:
self.Source_organism_scientific_name.addItem(taxonomy_dict[item])
grid.addWidget(self.Source_organism_scientific_name, 7, 1)
grid.addWidget(QtGui.QLabel('Source organism gene'), 8, 0)
self.Source_organism_gene = QtGui.QLineEdit()
self.Source_organism_gene.setText('')
self.Source_organism_gene.setFixedWidth(300)
grid.addWidget(self.Source_organism_gene, 8, 1)
grid.addWidget(QtGui.QLabel('(e.g. RPOD, ALKA...)'), 8, 2)
grid.addWidget(QtGui.QLabel('Source organism strain'), 9, 0)
self.Source_organism_strain = QtGui.QLineEdit()
self.Source_organism_strain.setText('')
self.Source_organism_strain.setFixedWidth(300)
self.Source_organism_strain.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.Source_organism_strain, 9, 1)
grid.addWidget(QtGui.QLabel('(e.g. BH10 ISOLATE, K-12...)'), 9, 2)
grid.addWidget(QtGui.QLabel('Expression system scientific name'), 10, 0)
self.Expression_system_scientific_name = QtGui.QComboBox()
for item in taxonomy_dict:
self.Expression_system_scientific_name.addItem(taxonomy_dict[item])
grid.addWidget(self.Expression_system_scientific_name, 10, 1)
grid.addWidget(QtGui.QLabel('Expression system strain'), 11, 0)
self.Expression_system_strain = QtGui.QLineEdit()
self.Expression_system_strain.setText('')
self.Expression_system_strain.setFixedWidth(300)
self.Expression_system_strain.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.Expression_system_strain, 11, 1)
grid.addWidget(QtGui.QLabel('(e.g. BL21(DE3))'), 11, 2)
grid.addWidget(QtGui.QLabel('Expression system vector type'), 12, 0)
self.Expression_system_vector_type = QtGui.QLineEdit()
self.Expression_system_vector_type.setText('')
self.Expression_system_vector_type.setFixedWidth(300)
self.Expression_system_vector_type.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.Expression_system_vector_type, 12, 1)
grid.addWidget(QtGui.QLabel('(e.g. plasmid)'), 12, 2)
grid.addWidget(QtGui.QLabel('Expression_system_plasmid_name'), 13, 0)
self.Expression_system_plasmid_name = QtGui.QLineEdit()
self.Expression_system_plasmid_name.setText('')
self.Expression_system_plasmid_name.setFixedWidth(300)
self.Expression_system_plasmid_name.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.Expression_system_plasmid_name, 13, 1)
grid.addWidget(QtGui.QLabel('(e.g. pET26)'), 13, 2)
grid.addWidget(QtGui.QLabel('Manipulated_source_details'), 14, 0)
self.Manipulated_source_details = QtGui.QLineEdit()
self.Manipulated_source_details.setText('')
self.Manipulated_source_details.setFixedWidth(300)
self.Manipulated_source_details.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.Manipulated_source_details, 14, 1)
grid.addWidget(QtGui.QLabel('(any other relevant information)'), 14, 2)
grid.addWidget(QtGui.QLabel('Chains'), 15, 0)
self.molecule_chain_one = QtGui.QLineEdit()
self.molecule_chain_one.setText('')
self.molecule_chain_one.setFixedWidth(300)
grid.addWidget(self.molecule_chain_one, 15, 1)
grid.addWidget(QtGui.QLabel('(e.g. A or A,B)'), 15, 2)
frame.setLayout(grid)
vb.addWidget(frame)
### entity 2
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Entity 2 (IMPORTANT: only fill in if you are working with a protein-protein complex!)'), 1, 0)
grid.addWidget(QtGui.QLabel('Molecule Name'), 2, 0)
self.molecule_name_two = QtGui.QLineEdit()
self.molecule_name_two.setText('')
self.molecule_name_two.setFixedWidth(300)
# self.molecule_name_two.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.molecule_name_two, 2, 1)
grid.addWidget(QtGui.QLabel('(e.g. RNA Hammerhead Ribozyme)'), 2, 2)
grid.addWidget(QtGui.QLabel('Fragment Name'), 3, 0)
self.fragment_name_two = QtGui.QLineEdit()
self.fragment_name_two.setText('')
self.fragment_name_two.setFixedWidth(300)
self.fragment_name_two.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.fragment_name_two, 3, 1)
grid.addWidget(QtGui.QLabel('(e.g. ligand binding domain, hairpin)'), 3, 2)
grid.addWidget(QtGui.QLabel('Specific Mutation'), 4, 0)
self.fragment_name_two_specific_mutation = QtGui.QLineEdit()
self.fragment_name_two_specific_mutation.setText('')
self.fragment_name_two_specific_mutation.setFixedWidth(300)
self.fragment_name_two_specific_mutation.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.fragment_name_two_specific_mutation, 4, 1)
grid.addWidget(QtGui.QLabel('(e.g. C280S)'), 4, 2)
grid.addWidget(QtGui.QLabel('Enzyme Comission Number'), 5, 0)
self.fragment_name_two_enzyme_comission_number = QtGui.QLineEdit()
self.fragment_name_two_enzyme_comission_number.setText('')
self.fragment_name_two_enzyme_comission_number.setFixedWidth(300)
self.fragment_name_two_enzyme_comission_number.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.fragment_name_two_enzyme_comission_number, 5, 1)
grid.addWidget(QtGui.QLabel('(if known: e.g. 2.7.7.7)'), 5, 2)
grid.addWidget(QtGui.QLabel('Genetically Manipulated Source'), 6, 0)
grid.addWidget(QtGui.QLabel('Source organism scientific name'), 7, 0)
self.Source_organism_scientific_name_two = QtGui.QComboBox()
taxonomy_dict = XChemMain.NCBI_taxonomy_ID()
for item in taxonomy_dict:
self.Source_organism_scientific_name_two.addItem(taxonomy_dict[item])
grid.addWidget(self.Source_organism_scientific_name_two, 7, 1)
grid.addWidget(QtGui.QLabel('Source organism gene'), 8, 0)
self.Source_organism_gene_two = QtGui.QLineEdit()
self.Source_organism_gene_two.setText('')
self.Source_organism_gene_two.setFixedWidth(300)
grid.addWidget(self.Source_organism_gene_two, 8, 1)
grid.addWidget(QtGui.QLabel('(e.g. RPOD, ALKA...)'), 8, 2)
grid.addWidget(QtGui.QLabel('Source organism strain'), 9, 0)
self.Source_organism_strain_two = QtGui.QLineEdit()
self.Source_organism_strain_two.setText('')
self.Source_organism_strain_two.setFixedWidth(300)
self.Source_organism_strain_two.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.Source_organism_strain_two, 9, 1)
grid.addWidget(QtGui.QLabel('(e.g. BH10 ISOLATE, K-12...)'), 9, 2)
grid.addWidget(QtGui.QLabel('Expression system scientific name'), 10, 0)
self.Expression_system_scientific_name_two = QtGui.QComboBox()
for item in taxonomy_dict:
self.Expression_system_scientific_name_two.addItem(taxonomy_dict[item])
grid.addWidget(self.Expression_system_scientific_name_two, 10, 1)
grid.addWidget(QtGui.QLabel('Expression system strain'), 11, 0)
self.Expression_system_strain_two = QtGui.QLineEdit()
self.Expression_system_strain_two.setText('')
self.Expression_system_strain_two.setFixedWidth(300)
self.Expression_system_strain_two.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.Expression_system_strain_two, 11, 1)
grid.addWidget(QtGui.QLabel('(e.g. BL21(DE3))'), 11, 2)
grid.addWidget(QtGui.QLabel('Expression system vector type'), 12, 0)
self.Expression_system_vector_type_two = QtGui.QLineEdit()
self.Expression_system_vector_type_two.setText('')
self.Expression_system_vector_type_two.setFixedWidth(300)
self.Expression_system_vector_type_two.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.Expression_system_vector_type_two, 12, 1)
grid.addWidget(QtGui.QLabel('(e.g. plasmid)'), 12, 2)
grid.addWidget(QtGui.QLabel('Expression_system_plasmid_name'), 13, 0)
self.Expression_system_plasmid_name_two = QtGui.QLineEdit()
self.Expression_system_plasmid_name_two.setText('')
self.Expression_system_plasmid_name_two.setFixedWidth(300)
self.Expression_system_plasmid_name_two.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.Expression_system_plasmid_name_two, 13, 1)
grid.addWidget(QtGui.QLabel('(e.g. pET26)'), 13, 2)
grid.addWidget(QtGui.QLabel('Manipulated_source_details'), 14, 0)
self.Manipulated_source_details_two = QtGui.QLineEdit()
self.Manipulated_source_details_two.setText('')
self.Manipulated_source_details_two.setFixedWidth(300)
self.Manipulated_source_details_two.setStyleSheet("background-color: rgb(192, 192, 192);")
grid.addWidget(self.Manipulated_source_details_two, 14, 1)
grid.addWidget(QtGui.QLabel('(any other relevant information)'), 14, 2)
grid.addWidget(QtGui.QLabel('Chains'), 15, 0)
self.molecule_chain_two = QtGui.QLineEdit()
self.molecule_chain_two.setText('')
self.molecule_chain_two.setFixedWidth(300)
grid.addWidget(self.molecule_chain_two, 15, 1)
grid.addWidget(QtGui.QLabel('(e.g. A or A,B)'), 15, 2)
frame.setLayout(grid)
vb.addWidget(frame)
### entity 2 --- END
vb.addStretch(1)
deposit_tab_dict['Molecule'][1].addLayout(vb)
## misc
vb = QtGui.QVBoxLayout()
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Keywords'), 1, 0)
self.structure_keywords = QtGui.QLineEdit()
self.structure_keywords.setText('SGC - Diamond I04-1 fragment screening, PanDDA, XChemExplorer')
self.structure_keywords.setFixedWidth(300)
grid.addWidget(self.structure_keywords, 1, 1)
grid.addWidget(QtGui.QLabel('(e.g. beta barrel, protein-DNA complex)'), 1, 2)
grid.addWidget(QtGui.QLabel('Type'), 2, 0)
self.structure_keywords_type = QtGui.QComboBox()
self.structure_keywords_type.setStyleSheet("background-color: rgb(192, 192, 192);")
for item in XChemMain.pdbx_keywords(): self.structure_keywords_type.addItem(item)
grid.addWidget(self.structure_keywords_type, 2, 1)
# self.structure_keywords = QtGui.QLineEdit()
# self.structure_keywords.setText('SGC - Diamond I04-1 fragment screening, PanDDA, XChemExplorer')
# self.structure_keywords.setFixedWidth(300)
# grid.addWidget(self.structure_keywords, 1, 1)
# grid.addWidget(QtGui.QLabel('(e.g. beta barrel, protein-DNA complex)'), 1, 2)
grid.addWidget(QtGui.QLabel('Biological Assembly'), 3, 0)
self.biological_assembly_chain_number = QtGui.QLineEdit()
self.biological_assembly_chain_number.setText('')
self.biological_assembly_chain_number.setFixedWidth(300)
grid.addWidget(self.biological_assembly_chain_number, 3, 1)
grid.addWidget(QtGui.QLabel('(e.g. 1 for monomer, 2 for dimer ..)'), 3, 2)
grid.addWidget(QtGui.QLabel('Sequence UNIPROT ID'), 4, 0)
self.molecule_one_letter_sequence_uniprot_id = QtGui.QLineEdit()
self.molecule_one_letter_sequence_uniprot_id.setText('')
self.molecule_one_letter_sequence_uniprot_id.setFixedWidth(300)
grid.addWidget(self.molecule_one_letter_sequence_uniprot_id, 4, 1)
grid.addWidget(QtGui.QLabel('(e.g. Q6B0I6)'), 4, 2)
grid.addWidget(QtGui.QLabel('Sequence'), 5, 0)
self.molecule_one_letter_sequence = QtGui.QTextEdit()
self.molecule_one_letter_sequence.setStyleSheet("background-color: rgb(255, 255, 255);")
# self.molecule_one_letter_sequence.setStyleSheet("background-color: rgb(192, 192, 192);")
self.molecule_one_letter_sequence.setText('')
self.molecule_one_letter_sequence.setFixedWidth(300)
grid.addWidget(self.molecule_one_letter_sequence, 5, 1, 8, 2)
# grid.addWidget(QtGui.QLabel('Sequence information for entity 2'), 10, 0)
# grid.addWidget(QtGui.QLabel('(Important: only for protein-protein complex'), 10, 1)
grid.addWidget(QtGui.QLabel('Sequence UNIPROT ID (Entity 2) - optional'), 13, 0)
self.molecule_one_letter_sequence_uniprot_id_two = QtGui.QLineEdit()
self.molecule_one_letter_sequence_uniprot_id_two.setText('')
self.molecule_one_letter_sequence_uniprot_id_two.setStyleSheet("background-color: rgb(192, 192, 192);")
self.molecule_one_letter_sequence_uniprot_id_two.setFixedWidth(300)
grid.addWidget(self.molecule_one_letter_sequence_uniprot_id_two, 13, 1)
grid.addWidget(QtGui.QLabel('(e.g. Q6B0I6)'), 13, 2)
grid.addWidget(QtGui.QLabel('Sequence (Entity 2) - optional'), 14, 0)
self.molecule_one_letter_sequence_two = QtGui.QTextEdit()
self.molecule_one_letter_sequence_two.setText('')
self.molecule_one_letter_sequence_two.setFixedWidth(300)
grid.addWidget(self.molecule_one_letter_sequence_two, 14, 1, 19, 2)
grid.addWidget(QtGui.QLabel('Structural Genomic (optional)'), 21, 0)
grid.addWidget(QtGui.QLabel('Project Name'), 22, 0)
self.SG_project_name = QtGui.QLineEdit()
self.SG_project_name.setText('')
self.SG_project_name.setStyleSheet("background-color: rgb(192, 192, 192);")
self.SG_project_name.setFixedWidth(300)
grid.addWidget(self.SG_project_name, 22, 1)
grid.addWidget(QtGui.QLabel('(e.g. SGC, Structural Genomics Consortium)'), 22, 2)
grid.addWidget(QtGui.QLabel('Full Name'), 23, 0)
self.full_name_of_SG_center = QtGui.QLineEdit()
self.full_name_of_SG_center.setText('')
self.full_name_of_SG_center.setStyleSheet("background-color: rgb(192, 192, 192);")
self.full_name_of_SG_center.setFixedWidth(300)
grid.addWidget(self.full_name_of_SG_center, 23, 1)
grid.addWidget(QtGui.QLabel('(e.g. Structural Genomics Consortium)'), 23, 2)
frame.setLayout(grid)
vb.addWidget(frame)
vb.addStretch(1)
deposit_tab_dict['Misc'][1].addLayout(vb)
## methods
vb = QtGui.QVBoxLayout()
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Crystallization'), 1, 0)
grid.addWidget(QtGui.QLabel('Method'), 2, 0)
self.crystallization_method = QtGui.QComboBox()
for item in XChemMain.crystal_growth_methods(): self.crystallization_method.addItem(item)
grid.addWidget(self.crystallization_method, 2, 1)
grid.addWidget(QtGui.QLabel('pH'), 3, 0)
self.crystallization_pH = QtGui.QLineEdit()
self.crystallization_pH.setText('')
self.crystallization_pH.setFixedWidth(300)
grid.addWidget(self.crystallization_pH, 3, 1)
grid.addWidget(QtGui.QLabel('(e.g. 7.5 ...)'), 3, 2)
grid.addWidget(QtGui.QLabel('Temperature'), 4, 0)
self.crystallization_temperature = QtGui.QLineEdit()
self.crystallization_temperature.setText('')
self.crystallization_temperature.setFixedWidth(300)
grid.addWidget(self.crystallization_temperature, 4, 1)
grid.addWidget(QtGui.QLabel('(e.g. 298) (in Kelvin)'), 4, 2)
grid.addWidget(QtGui.QLabel('Condition'), 5, 0)
self.crystallization_details = QtGui.QLineEdit()
self.crystallization_details.setText('')
self.crystallization_details.setFixedWidth(300)
grid.addWidget(self.crystallization_details, 5, 1)
grid.addWidget(QtGui.QLabel('(e.g. PEG 4000, NaCl etc.)'), 5, 2)
grid.addWidget(QtGui.QLabel('Diffraction Experiment'), 6, 0)
note = ('Note: this information will only be used if it is\n'
'not already available in the mainTable!\n'
'Ignore if data were collected at DLS')
grid.addWidget(QtGui.QLabel(note), 7, 0)
grid.addWidget(QtGui.QLabel('Source'), 8, 0)
self.radiation_source = QtGui.QComboBox()
for item in XChemMain.radiationSource(): self.radiation_source.addItem(item)
grid.addWidget(self.radiation_source, 8, 1)
grid.addWidget(QtGui.QLabel('Source Type'), 9, 0)
self.radiation_source_type = QtGui.QComboBox()
for item in XChemMain.wwBeamlines(): self.radiation_source_type.addItem(item)
grid.addWidget(self.radiation_source_type, 9, 1)
grid.addWidget(QtGui.QLabel('Wavelength'), 10, 0)
self.radiation_wavelengths = QtGui.QLineEdit()
self.radiation_wavelengths.setText('')
self.radiation_wavelengths.setFixedWidth(300)
grid.addWidget(self.radiation_wavelengths, 10, 1)
grid.addWidget(QtGui.QLabel('(e.g. 1.502)'), 10, 2)
grid.addWidget(QtGui.QLabel('Detector'), 11, 0)
self.radiation_detector = QtGui.QComboBox()
for item in XChemMain.detector(): self.radiation_detector.addItem(item)
grid.addWidget(self.radiation_detector, 11, 1)
grid.addWidget(QtGui.QLabel('Detector Type'), 12, 0)
self.radiation_detector_type = QtGui.QComboBox()
for item in XChemMain.detectorType(): self.radiation_detector_type.addItem(item)
grid.addWidget(self.radiation_detector_type, 12, 1)
grid.addWidget(QtGui.QLabel('Date'), 13, 0)
self.data_collection_date = QtGui.QLineEdit()
self.data_collection_date.setText('')
self.data_collection_date.setFixedWidth(300)
grid.addWidget(self.data_collection_date, 13, 1)
grid.addWidget(QtGui.QLabel('(e.g. 2004-01-07)'), 13, 2)
grid.addWidget(QtGui.QLabel('Temperature'), 14, 0)
self.data_collection_temperature = QtGui.QLineEdit()
self.data_collection_temperature.setText('')
self.data_collection_temperature.setFixedWidth(300)
grid.addWidget(self.data_collection_temperature, 14, 1)
grid.addWidget(QtGui.QLabel('(e.g. 100) (in Kelvin)'), 14, 2)
grid.addWidget(QtGui.QLabel('Protocol'), 15, 0)
self.data_collection_protocol = QtGui.QLineEdit()
self.data_collection_protocol.setText('SINGLE WAVELENGTH')
self.data_collection_protocol.setFixedWidth(300)
grid.addWidget(self.data_collection_protocol, 15, 1)
grid.addWidget(QtGui.QLabel('(e.g. SINGLE WAVELENGTH, MAD, ...)'), 15, 2)
frame.setLayout(grid)
vb.addWidget(frame)
vb.addStretch(1)
deposit_tab_dict['Methods'][1].addLayout(vb)
## software
vb = QtGui.QVBoxLayout()
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('PDB starting model'), 1, 0)
self.pdbx_starting_model = QtGui.QLineEdit()
self.pdbx_starting_model.setText('')
self.pdbx_starting_model.setFixedWidth(300)
grid.addWidget(self.pdbx_starting_model, 1, 1)
grid.addWidget(QtGui.QLabel('(e.g. 7.5 ...)'), 1, 2)
grid.addWidget(QtGui.QLabel('Data reduction'), 2, 0)
self.data_integration_software = QtGui.QComboBox()
for item in XChemMain.data_integration_software(): self.data_integration_software.addItem(item)
grid.addWidget(self.data_integration_software, 2, 1)
grid.addWidget(QtGui.QLabel('Phasing'), 3, 0)
self.phasing_software = QtGui.QComboBox()
for item in XChemMain.phasing_software(): self.phasing_software.addItem(item)
grid.addWidget(self.phasing_software, 3, 1)
frame.setLayout(grid)
vb.addWidget(frame)
vb.addStretch(1)
deposit_tab_dict['Software'][1].addLayout(vb)
## Funding
vb = QtGui.QVBoxLayout()
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Funding Organization'), 1, 0)
self.pdbx_funding_organization_one = QtGui.QLineEdit()
self.pdbx_funding_organization_one.setText('')
self.pdbx_funding_organization_one.setFixedWidth(700)
grid.addWidget(self.pdbx_funding_organization_one, 1, 1)
grid.addWidget(QtGui.QLabel('Grant Number'), 2, 0)
self.pdbx_grant_number_one = QtGui.QLineEdit()
self.pdbx_grant_number_one.setText('')
self.pdbx_grant_number_one.setFixedWidth(700)
grid.addWidget(self.pdbx_grant_number_one, 2, 1)
grid.addWidget(QtGui.QLabel('Country'), 3, 0)
self.pdbx_grant_country_one = QtGui.QComboBox()
for item in XChemMain.pdbx_country(): self.pdbx_grant_country_one.addItem(item)
grid.addWidget(self.pdbx_grant_country_one, 3, 1)
frame.setLayout(grid)
vb.addWidget(frame)
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Funding Organization'), 1, 0)
self.pdbx_funding_organization_two = QtGui.QLineEdit()
self.pdbx_funding_organization_two.setText('')
self.pdbx_funding_organization_two.setFixedWidth(700)
grid.addWidget(self.pdbx_funding_organization_two, 1, 1)
grid.addWidget(QtGui.QLabel('Grant Number'), 2, 0)
self.pdbx_grant_number_two = QtGui.QLineEdit()
self.pdbx_grant_number_two.setText('')
self.pdbx_grant_number_two.setFixedWidth(700)
grid.addWidget(self.pdbx_grant_number_two, 2, 1)
grid.addWidget(QtGui.QLabel('Country'), 3, 0)
self.pdbx_grant_country_two = QtGui.QComboBox()
for item in XChemMain.pdbx_country(): self.pdbx_grant_country_two.addItem(item)
grid.addWidget(self.pdbx_grant_country_two, 3, 1)
frame.setLayout(grid)
vb.addWidget(frame)
frame = QtGui.QFrame()
frame.setFrameShape(QtGui.QFrame.StyledPanel)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Funding Organization'), 1, 0)
self.pdbx_funding_organization_three = QtGui.QLineEdit()
self.pdbx_funding_organization_three.setText('')
self.pdbx_funding_organization_three.setFixedWidth(700)
grid.addWidget(self.pdbx_funding_organization_three, 1, 1)
grid.addWidget(QtGui.QLabel('Grant Number'), 2, 0)
self.pdbx_grant_number_three = QtGui.QLineEdit()
self.pdbx_grant_number_three.setText('')
self.pdbx_grant_number_three.setFixedWidth(700)
grid.addWidget(self.pdbx_grant_number_three, 2, 1)
grid.addWidget(QtGui.QLabel('Country'), 3, 0)
self.pdbx_grant_country_three = QtGui.QComboBox()
for item in XChemMain.pdbx_country(): self.pdbx_grant_country_three.addItem(item)
grid.addWidget(self.pdbx_grant_country_three, 3, 1)
frame.setLayout(grid)
vb.addWidget(frame)
vb.addStretch(1)
deposit_tab_dict['Funding'][1].addLayout(vb)
vbox.addWidget(deposit_tab_widget)
hbox = QtGui.QHBoxLayout()
button = QtGui.QPushButton('Load\nFile')
button.clicked.connect(self.load_deposit_config_file)
hbox.addWidget(button)
button = QtGui.QPushButton('Save\nFile')
button.clicked.connect(self.save_deposit_config_file)
hbox.addWidget(button)
button = QtGui.QPushButton('Load from\nDatabase')
button.clicked.connect(self.load_deposit_from_database)
button.setEnabled(False)
hbox.addWidget(button)
button = QtGui.QPushButton('Save to\nDatabase')
button.clicked.connect(self.save_deposit_to_database)
hbox.addWidget(button)
vbox.addLayout(hbox)
depositDataLayout.addLayout(vbox, 0, 0)
depositData.exec_()
def save_deposit_config_file(self):
self.update_deposit_dict()
file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'Save file', self.current_directory))
# make sure that the file always has .deposit extension
if str(file_name).rfind('.') != -1:
file_name = file_name[:file_name.rfind('.')] + '.deposit'
else:
file_name = file_name + '.deposit'
pickle.dump(self.deposit_dict, open(file_name, 'wb'))
def update_database_with_pdb_codes(self):
self.work_thread = XChemDeposit.import_PDB_IDs(str(self.pdb_code_entry.toPlainText()),
os.path.join(self.database_directory, self.data_source_file),
self.xce_logfile)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def update_database_with_labelInfo(self):
for n,l in enumerate(self.labelList):
label = str(l[0].text())
description = str(l[1].text())
# print "update labelTable set Label='%s',Description='%s' where ID=%s" %(label,description,str(n+1))
self.db.execute_statement("update labelTable set Label='%s',Description='%s' where ID=%s" %(label,description,str(n+1)))
# print label,description
def load_deposit_config_file(self):
file_name_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Open file', self.current_directory,
'*.deposit')
file_name = tuple(file_name_temp)[0]
self.deposit_dict = pickle.load(open(file_name, "rb"))
# print self.deposit_dict
for key in self.get_deposit_dict_template():
if key not in self.deposit_dict:
self.update_log.warning('field not in .deposit file: ' + str(key))
self.deposit_dict[key] = ''
self.update_deposit_input()
def load_deposit_from_database(self):
print('hallo')
def save_deposit_to_database(self):
self.update_deposit_dict()
msgBox = QtGui.QMessageBox()
msgBox.setText(
"*** WARNING ***\nAre you sure you want to update the database?\nThis will overwrite previous entries!")
msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)
msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)
reply = msgBox.exec_();
if reply == 0:
self.work_thread = XChemDeposit.update_depositTable(self.deposit_dict,
os.path.join(self.database_directory,
self.data_source_file),
self.xce_logfile)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def update_deposit_input(self):
try:
self.contact_author_PI_salutation.setText(self.deposit_dict['contact_author_PI_salutation'])
self.contact_author_PI_first_name.setText(self.deposit_dict['contact_author_PI_first_name'])
self.contact_author_PI_last_name.setText(self.deposit_dict['contact_author_PI_last_name'])
self.contact_author_PI_middle_name.setText(self.deposit_dict['contact_author_PI_middle_name'])
index = self.contact_author_PI_role.findText(self.deposit_dict['contact_author_PI_role'],
QtCore.Qt.MatchFixedString)
self.contact_author_PI_role.setCurrentIndex(index)
index = self.contact_author_PI_organization_type.findText(
self.deposit_dict['contact_author_PI_organization_type'], QtCore.Qt.MatchFixedString)
self.contact_author_PI_organization_type.setCurrentIndex(index)
self.contact_author_PI_organization_name.setText(self.deposit_dict['contact_author_PI_organization_name'])
self.contact_author_PI_email.setText(self.deposit_dict['contact_author_PI_email'])
self.contact_author_PI_address.setText(self.deposit_dict['contact_author_PI_address'])
self.contact_author_PI_city.setText(self.deposit_dict['contact_author_PI_city'])
self.contact_author_PI_State_or_Province.setText(self.deposit_dict['contact_author_PI_State_or_Province'])
self.contact_author_PI_Zip_Code.setText(self.deposit_dict['contact_author_PI_Zip_Code'])
self.contact_author_PI_Country.setText(self.deposit_dict['contact_author_PI_Country'])
self.contact_author_PI_phone_number.setText(self.deposit_dict['contact_author_PI_phone_number'])
self.contact_author_PI_ORCID.setText(self.deposit_dict['contact_author_PI_ORCID'])
self.contact_author_salutation.setText(self.deposit_dict['contact_author_salutation'])
self.contact_author_first_name.setText(self.deposit_dict['contact_author_first_name'])
self.contact_author_last_name.setText(self.deposit_dict['contact_author_last_name'])
self.contact_author_middle_name.setText(self.deposit_dict['contact_author_middle_name'])
index = self.contact_author_role.findText(self.deposit_dict['contact_author_role'],
QtCore.Qt.MatchFixedString)
self.contact_author_role.setCurrentIndex(index)
index = self.contact_author_organization_type.findText(
self.deposit_dict['contact_author_organization_type'], QtCore.Qt.MatchFixedString)
self.contact_author_organization_type.setCurrentIndex(index)
self.contact_author_organization_name.setText(self.deposit_dict['contact_author_organization_name'])
self.contact_author_email.setText(self.deposit_dict['contact_author_email'])
self.contact_author_address.setText(self.deposit_dict['contact_author_address'])
self.contact_author_city.setText(self.deposit_dict['contact_author_city'])
self.contact_author_State_or_Province.setText(self.deposit_dict['contact_author_State_or_Province'])
self.contact_author_Zip_Code.setText(self.deposit_dict['contact_author_Zip_Code'])
self.contact_author_Country.setText(self.deposit_dict['contact_author_Country'])
self.contact_author_phone_number.setText(self.deposit_dict['contact_author_phone_number'])
self.contact_author_ORCID.setText(self.deposit_dict['contact_author_ORCID'])
index = self.Release_status_for_coordinates.findText(self.deposit_dict['Release_status_for_coordinates'],
QtCore.Qt.MatchFixedString)
self.Release_status_for_coordinates.setCurrentIndex(index)
index = self.Release_status_for_sequence.findText(self.deposit_dict['Release_status_for_sequence'],
QtCore.Qt.MatchFixedString)
self.Release_status_for_sequence.setCurrentIndex(index)
self.group_deposition_title.setText(self.deposit_dict['group_deposition_title'])
self.group_description.setText(self.deposit_dict['group_description'])
self.structure_title.setText(self.deposit_dict['structure_title'])
self.structure_title_apo.setText(self.deposit_dict['structure_title_apo'])
for n, name in enumerate(self.deposit_dict['structure_author_name'].split(';')):
self.structure_author_name_List[n].setText(name)
self.primary_citation_id.setText(self.deposit_dict['primary_citation_id'])
self.primary_citation_journal_abbrev.setText(self.deposit_dict['primary_citation_journal_abbrev'])
self.primary_citation_title.setText(self.deposit_dict['primary_citation_title'])
self.primary_citation_year.setText(self.deposit_dict['primary_citation_year'])
self.primary_citation_journal_volume.setText(self.deposit_dict['primary_citation_journal_volume'])
self.primary_citation_page_first.setText(self.deposit_dict['primary_citation_page_first'])
self.primary_citation_page_last.setText(self.deposit_dict['primary_citation_page_last'])
for n, name in enumerate(self.deposit_dict['primary_citation_author_name'].split(';')):
self.primary_citation_author_name_List[n].setText(name)
### entity 1
self.molecule_name.setText(self.deposit_dict['molecule_name'])
self.fragment_name_one_specific_mutation.setText(self.deposit_dict['fragment_name_one_specific_mutation'])
index = self.Source_organism_scientific_name.findText(self.deposit_dict['Source_organism_scientific_name'],
QtCore.Qt.MatchFixedString)
self.Source_organism_scientific_name.setCurrentIndex(index)
self.Source_organism_gene.setText(self.deposit_dict['Source_organism_gene'])
self.Source_organism_strain.setText(self.deposit_dict['Source_organism_strain'])
index = self.Expression_system_scientific_name.findText(
self.deposit_dict['Expression_system_scientific_name'], QtCore.Qt.MatchFixedString)
self.Expression_system_scientific_name.setCurrentIndex(index)
self.Expression_system_strain.setText(self.deposit_dict['Expression_system_strain'])
self.Expression_system_vector_type.setText(self.deposit_dict['Expression_system_vector_type'])
self.Expression_system_plasmid_name.setText(self.deposit_dict['Expression_system_plasmid_name'])
self.Manipulated_source_details.setText(self.deposit_dict['Manipulated_source_details'])
# try:
self.molecule_chain_one.setText(self.deposit_dict['molecule_chain_one'])
### entity 2
self.molecule_name_two.setText(self.deposit_dict['molecule_name_two'])
self.fragment_name_two_specific_mutation.setText(self.deposit_dict['fragment_name_two_specific_mutation'])
index = self.Source_organism_scientific_name_two.findText(self.deposit_dict['Source_organism_scientific_name_two'],
QtCore.Qt.MatchFixedString)
self.Source_organism_scientific_name_two.setCurrentIndex(index)
self.Source_organism_gene_two.setText(self.deposit_dict['Source_organism_gene_two'])
self.Source_organism_strain_two.setText(self.deposit_dict['Source_organism_strain_two'])
index = self.Expression_system_scientific_name_two.findText(
self.deposit_dict['Expression_system_scientific_name_two'], QtCore.Qt.MatchFixedString)
self.Expression_system_scientific_name_two.setCurrentIndex(index)
self.Expression_system_strain_two.setText(self.deposit_dict['Expression_system_strain_two'])
self.Expression_system_vector_type_two.setText(self.deposit_dict['Expression_system_vector_type_two'])
self.Expression_system_plasmid_name_two.setText(self.deposit_dict['Expression_system_plasmid_name_two'])
self.Manipulated_source_details_two.setText(self.deposit_dict['Manipulated_source_details_two'])
self.molecule_chain_two.setText(self.deposit_dict['molecule_chain_two'])
self.molecule_one_letter_sequence_uniprot_id_two.setText(
self.deposit_dict['molecule_two_letter_sequence_uniprot_id'])
self.molecule_one_letter_sequence_two.setText(self.deposit_dict['molecule_two_letter_sequence'])
# except KeyError:
# self.molecule_chain_one.setText('')
# ### entity 2
# self.molecule_name_two.setText('')
# self.fragment_name_two_specific_mutation.setText('')
# self.Source_organism_scientific_name_two.setCurrentIndex(0)
# self.Source_organism_gene_two.setText('')
# self.Source_organism_strain_two.setText('')
# self.Expression_system_scientific_name_two.setCurrentIndex(0)
# self.Expression_system_strain_two.setText('')
# self.Expression_system_vector_type_two.setText('')
# self.Expression_system_plasmid_name_two.setText('')
# self.Manipulated_source_details_two.setText('')
# self.molecule_chain_two.setText('')
# self.molecule_one_letter_sequence_uniprot_id_two.setText('')
# self.molecule_one_letter_sequence_two.setText('')
###
self.structure_keywords.setText(self.deposit_dict['structure_keywords'])
self.biological_assembly_chain_number.setText(self.deposit_dict['biological_assembly_chain_number'])
self.molecule_one_letter_sequence_uniprot_id.setText(
self.deposit_dict['molecule_one_letter_sequence_uniprot_id'])
self.molecule_one_letter_sequence.setText(self.deposit_dict['molecule_one_letter_sequence'])
self.SG_project_name.setText(self.deposit_dict['SG_project_name'])
self.full_name_of_SG_center.setText(self.deposit_dict['full_name_of_SG_center'])
index = self.crystallization_method.findText(self.deposit_dict['crystallization_method'],
QtCore.Qt.MatchFixedString)
self.crystallization_method.setCurrentIndex(index)
self.crystallization_pH.setText(self.deposit_dict['crystallization_pH'])
self.crystallization_temperature.setText(self.deposit_dict['crystallization_temperature'])
self.crystallization_details.setText(self.deposit_dict['crystallization_details'])
index = self.radiation_source.findText(self.deposit_dict['radiation_source'], QtCore.Qt.MatchFixedString)
self.radiation_source.setCurrentIndex(index)
index = self.radiation_source_type.findText(self.deposit_dict['radiation_source_type'],
QtCore.Qt.MatchFixedString)
self.radiation_source_type.setCurrentIndex(index)
self.radiation_wavelengths.setText(self.deposit_dict['radiation_wavelengths'])
index = self.radiation_detector.findText(self.deposit_dict['radiation_detector'],
QtCore.Qt.MatchFixedString)
self.radiation_detector.setCurrentIndex(index)
index = self.radiation_detector_type.findText(self.deposit_dict['radiation_detector_type'],
QtCore.Qt.MatchFixedString)
self.radiation_detector_type.setCurrentIndex(index)
self.data_collection_date.setText(self.deposit_dict['data_collection_date'])
self.data_collection_temperature.setText(self.deposit_dict['data_collection_temperature'])
self.data_collection_protocol.setText(self.deposit_dict['data_collection_protocol'])
self.pdbx_starting_model.setText(self.deposit_dict['pdbx_starting_model'])
index = self.data_integration_software.findText(self.deposit_dict['data_integration_software'],
QtCore.Qt.MatchFixedString)
self.data_integration_software.setCurrentIndex(index)
index = self.phasing_software.findText(self.deposit_dict['phasing_software'], QtCore.Qt.MatchFixedString)
self.phasing_software.setCurrentIndex(index)
self.pdbx_funding_organization_one.setText(self.deposit_dict['pdbx_funding_organization_one'])
self.pdbx_grant_number_one.setText(self.deposit_dict['pdbx_grant_number_one'])
index = self.pdbx_grant_country_one.findText(
self.deposit_dict['pdbx_grant_country_one'], QtCore.Qt.MatchFixedString)
self.pdbx_grant_country_one.setCurrentIndex(index)
self.pdbx_funding_organization_two.setText(self.deposit_dict['pdbx_funding_organization_two'])
self.pdbx_grant_number_two.setText(self.deposit_dict['pdbx_grant_number_two'])
index = self.pdbx_grant_country_two.findText(
self.deposit_dict['pdbx_grant_country_two'], QtCore.Qt.MatchFixedString)
self.pdbx_grant_country_two.setCurrentIndex(index)
self.pdbx_funding_organization_three.setText(self.deposit_dict['pdbx_funding_organization_three'])
self.pdbx_grant_number_three.setText(self.deposit_dict['pdbx_grant_number_three'])
index = self.pdbx_grant_country_three.findText(
self.deposit_dict['pdbx_grant_country_three'], QtCore.Qt.MatchFixedString)
self.pdbx_grant_country_three.setCurrentIndex(index)
except ValueError, e:
# self.update_status_bar('Sorry, this is not a XChemExplorer deposit file!')
self.update_log.error('file is not a valid .deposit file: ' + str(e))
def update_deposit_dict(self):
pdbx_funding_ordinal_one = ''
pdbx_funding_organization_one = ''
pdbx_grant_number_one = ''
pdbx_grant_country_one = ''
if str(self.pdbx_funding_organization_one.text()).replace(' ','') != '':
pdbx_funding_ordinal_one = '1'
pdbx_funding_organization_one = str(self.pdbx_funding_organization_one.text())
pdbx_grant_number_one = str(self.pdbx_grant_number_one.text())
pdbx_grant_country_one = str(self.pdbx_grant_country_one.currentText())
pdbx_funding_ordinal_two = ''
pdbx_funding_organization_two = ''
pdbx_grant_number_two = ''
pdbx_grant_country_two = ''
if str(self.pdbx_funding_organization_two.text()).replace(' ','') != '':
pdbx_funding_ordinal_two = '2'
pdbx_funding_organization_two = str(self.pdbx_funding_organization_two.text())
pdbx_grant_number_two = str(self.pdbx_grant_number_two.text())
pdbx_grant_country_two = str(self.pdbx_grant_country_two.currentText())
pdbx_funding_ordinal_three = ''
pdbx_funding_organization_three = ''
pdbx_grant_number_three = ''
pdbx_grant_country_three = ''
if str(self.pdbx_funding_organization_three.text()).replace(' ','') != '':
pdbx_funding_ordinal_three = '3'
pdbx_funding_organization_three = str(self.pdbx_funding_organization_three.text())
pdbx_grant_number_three = str(self.pdbx_grant_number_three.text())
pdbx_grant_country_three = str(self.pdbx_grant_country_three.currentText())
self.deposit_dict = {
'contact_author_PI_salutation': str(self.contact_author_PI_salutation.text()),
'contact_author_PI_first_name': str(self.contact_author_PI_first_name.text()),
'contact_author_PI_last_name': str(self.contact_author_PI_last_name.text()),
'contact_author_PI_middle_name': str(self.contact_author_PI_middle_name.text()),
'contact_author_PI_role': str(self.contact_author_PI_role.currentText()),
'contact_author_PI_organization_type': str(self.contact_author_PI_organization_type.currentText()),
'contact_author_PI_organization_name': str(self.contact_author_PI_organization_name.text()),
'contact_author_PI_email': str(self.contact_author_PI_email.text()),
'contact_author_PI_address': str(self.contact_author_PI_address.text()),
'contact_author_PI_city': str(self.contact_author_PI_city.text()),
'contact_author_PI_State_or_Province': str(self.contact_author_PI_State_or_Province.text()),
'contact_author_PI_Zip_Code': str(self.contact_author_PI_Zip_Code.text()),
'contact_author_PI_Country': str(self.contact_author_PI_Country.text()),
'contact_author_PI_phone_number': str(self.contact_author_PI_phone_number.text()),
'contact_author_PI_ORCID': str(self.contact_author_PI_ORCID.text()),
'contact_author_salutation': str(self.contact_author_salutation.text()),
'contact_author_first_name': str(self.contact_author_first_name.text()),
'contact_author_last_name': str(self.contact_author_last_name.text()),
'contact_author_middle_name': str(self.contact_author_middle_name.text()),
'contact_author_role': str(self.contact_author_role.currentText()),
'contact_author_organization_type': str(self.contact_author_organization_type.currentText()),
'contact_author_organization_name': str(self.contact_author_organization_name.text()),
'contact_author_email': str(self.contact_author_email.text()),
'contact_author_address': str(self.contact_author_address.text()),
'contact_author_city': str(self.contact_author_city.text()),
'contact_author_State_or_Province': str(self.contact_author_State_or_Province.text()),
'contact_author_Zip_Code': str(self.contact_author_Zip_Code.text()),
'contact_author_Country': str(self.contact_author_Country.text()),
'contact_author_phone_number': str(self.contact_author_phone_number.text()),
'contact_author_ORCID': str(self.contact_author_ORCID.text()),
'Release_status_for_coordinates': str(self.Release_status_for_coordinates.currentText()),
'Release_status_for_sequence': str(self.Release_status_for_sequence.currentText()),
'group_deposition_title': str(self.group_deposition_title.text()),
'group_description': str(self.group_description.text()),
'structure_title': str(self.structure_title.text()),
'structure_title_apo': str(self.structure_title_apo.text()),
'primary_citation_id': str(self.primary_citation_id.text()),
'primary_citation_journal_abbrev': str(self.primary_citation_journal_abbrev.text()),
'primary_citation_title': str(self.primary_citation_title.text()),
'primary_citation_year': str(self.primary_citation_year.text()),
'primary_citation_journal_volume': str(self.primary_citation_journal_volume.text()),
'primary_citation_page_first': str(self.primary_citation_page_first.text()),
'primary_citation_page_last': str(self.primary_citation_page_last.text()),
### entity 1
'molecule_name': str(self.molecule_name.text()),
'Source_organism_scientific_name': str(self.Source_organism_scientific_name.currentText()),
'Source_organism_gene': str(self.Source_organism_gene.text()),
'Source_organism_strain': str(self.Source_organism_strain.text()),
'Expression_system_scientific_name': str(self.Expression_system_scientific_name.currentText()),
'Expression_system_strain': str(self.Expression_system_strain.text()),
'Expression_system_plasmid_name': str(self.Expression_system_plasmid_name.text()),
'Expression_system_vector_type': str(self.Expression_system_vector_type.text()),
'Manipulated_source_details': str(self.Manipulated_source_details.text()),
'fragment_name_one_specific_mutation': str(self.fragment_name_one_specific_mutation.text()),
'molecule_chain_one': str(self.molecule_chain_one.text()),
### entity 2
'molecule_name_two': str(self.molecule_name_two.text()),
'Source_organism_scientific_name_two': str(self.Source_organism_scientific_name_two.currentText()),
'Source_organism_gene_two': str(self.Source_organism_gene_two.text()),
'Source_organism_strain_two': str(self.Source_organism_strain_two.text()),
'Expression_system_scientific_name_two': str(self.Expression_system_scientific_name_two.currentText()),
'Expression_system_strain_two': str(self.Expression_system_strain_two.text()),
'Expression_system_plasmid_name_two': str(self.Expression_system_plasmid_name_two.text()),
'Expression_system_vector_type_two': str(self.Expression_system_vector_type_two.text()),
'Manipulated_source_details_two': str(self.Manipulated_source_details_two.text()),
'fragment_name_two_specific_mutation': str(self.fragment_name_two_specific_mutation.text()),
'molecule_chain_two': str(self.molecule_chain_two.text()),
'structure_keywords': str(self.structure_keywords.text()),
'biological_assembly_chain_number': str(self.biological_assembly_chain_number.text()),
'molecule_one_letter_sequence_uniprot_id': str(self.molecule_one_letter_sequence_uniprot_id.text()),
'molecule_two_letter_sequence_uniprot_id': str(self.molecule_one_letter_sequence_uniprot_id_two.text()),
'SG_project_name': str(self.SG_project_name.text()),
'full_name_of_SG_center': str(self.full_name_of_SG_center.text()),
'molecule_one_letter_sequence': str(self.molecule_one_letter_sequence.toPlainText()).replace(' ',
'').replace(
'\n', '').replace('\r', ''),
'molecule_two_letter_sequence': str(self.molecule_one_letter_sequence_two.toPlainText()).replace(' ',
'').replace(
'\n', '').replace('\r', ''),
'crystallization_method': str(self.crystallization_method.currentText()),
'crystallization_pH': str(self.crystallization_pH.text()),
'crystallization_temperature': str(self.crystallization_temperature.text()),
'crystallization_details': str(self.crystallization_details.text()),
'radiation_source': str(self.radiation_source.currentText()),
'radiation_source_type': str(self.radiation_source_type.currentText()),
'radiation_wavelengths': str(self.radiation_wavelengths.text()),
'radiation_detector': str(self.radiation_detector.currentText()),
'radiation_detector_type': str(self.radiation_detector_type.currentText()),
'data_collection_date': str(self.data_collection_date.text()),
'data_collection_temperature': str(self.data_collection_temperature.text()),
'data_collection_protocol': str(self.data_collection_protocol.text()),
'pdbx_starting_model': str(self.pdbx_starting_model.text()),
'data_integration_software': str(self.data_integration_software.currentText()),
'phasing_software': str(self.phasing_software.currentText()),
'pdbx_funding_ordinal_one': pdbx_funding_ordinal_one,
'pdbx_funding_organization_one': pdbx_funding_organization_one,
'pdbx_grant_number_one': pdbx_grant_number_one,
'pdbx_grant_country_one': pdbx_grant_country_one,
'pdbx_funding_ordinal_two': pdbx_funding_ordinal_two,
'pdbx_funding_organization_two': pdbx_funding_organization_two,
'pdbx_grant_number_two': pdbx_grant_number_two,
'pdbx_grant_country_two': pdbx_grant_country_two,
'pdbx_funding_ordinal_three': pdbx_funding_ordinal_three,
'pdbx_funding_organization_three': pdbx_funding_organization_three,
'pdbx_grant_number_three': pdbx_grant_number_three,
'pdbx_grant_country_three': pdbx_grant_country_three
}
structure_author_name = ''
for widget in self.structure_author_name_List:
structure_author_name += str(widget.text()) + ';'
self.deposit_dict['structure_author_name'] = structure_author_name[:-1]
primary_citation_author_name = ''
for widget in self.primary_citation_author_name_List:
primary_citation_author_name += str(widget.text()) + ';'
self.deposit_dict['primary_citation_author_name'] = primary_citation_author_name[:-1]
def get_deposit_dict_template(self):
deposit_dict_template = {
'contact_author_PI_salutation': None,
'contact_author_PI_first_name': None,
'contact_author_PI_last_name': None,
'contact_author_PI_middle_name': None,
'contact_author_PI_role': None,
'contact_author_PI_organization_type': None,
'contact_author_PI_organization_name': None,
'contact_author_PI_email': None,
'contact_author_PI_address': None,
'contact_author_PI_city': None,
'contact_author_PI_State_or_Province': None,
'contact_author_PI_Zip_Code': None,
'contact_author_PI_Country': None,
'contact_author_PI_phone_number': None,
'contact_author_PI_ORCID': None,
'contact_author_salutation': None,
'contact_author_first_name': None,
'contact_author_last_name': None,
'contact_author_middle_name': None,
'contact_author_role': None,
'contact_author_organization_type': None,
'contact_author_organization_name': None,
'contact_author_email': None,
'contact_author_address': None,
'contact_author_city': None,
'contact_author_State_or_Province': None,
'contact_author_Zip_Code': None,
'contact_author_Country': None,
'contact_author_phone_number': None,
'contact_author_ORCID': None,
'Release_status_for_coordinates': None,
'Release_status_for_sequence': None,
'group_deposition_title': None,
'group_description': None,
'structure_title': None,
'structure_title_apo': None,
'primary_citation_id': None,
'primary_citation_journal_abbrev': None,
'primary_citation_title': None,
'primary_citation_year': None,
'primary_citation_journal_volume': None,
'primary_citation_page_first': None,
'primary_citation_page_last': None,
### entity 1
'molecule_name': None,
'Source_organism_scientific_name': None,
'Source_organism_gene': None,
'Source_organism_strain': None,
'Expression_system_scientific_name': None,
'Expression_system_strain': None,
'Expression_system_plasmid_name': None,
'Expression_system_vector_type': None,
'Manipulated_source_details': None,
'fragment_name_one_specific_mutation': None,
'molecule_chain_one': None,
### entity 2
'molecule_name_two': None,
'Source_organism_scientific_name_two': None,
'Source_organism_gene_two': None,
'Source_organism_strain_two': None,
'Expression_system_scientific_name_two': None,
'Expression_system_strain_two': None,
'Expression_system_plasmid_name_two': None,
'Expression_system_vector_type_two': None,
'Manipulated_source_details_two': None,
'fragment_name_two_specific_mutation': None,
'molecule_chain_two': None,
'structure_keywords': None,
'biological_assembly_chain_number': None,
'molecule_one_letter_sequence_uniprot_id': None,
'molecule_two_letter_sequence_uniprot_id': None,
'SG_project_name': None,
'full_name_of_SG_center': None,
'molecule_one_letter_sequence': None,
'molecule_two_letter_sequence': None,
'crystallization_method': None,
'crystallization_pH': None,
'crystallization_temperature': None,
'crystallization_details': None,
'radiation_source': None,
'radiation_source_type': None,
'radiation_wavelengths': None,
'radiation_detector': None,
'radiation_detector_type': None,
'data_collection_date': None,
'data_collection_temperature': None,
'data_collection_protocol': None,
'pdbx_starting_model': None,
'data_integration_software': None,
'phasing_software': None,
'structure_author_name': None,
'primary_citation_author_name': None,
'pdbx_funding_organization_one': '',
'pdbx_grant_number_one': '',
'pdbx_grant_country_one': '',
'pdbx_funding_organization_two': '',
'pdbx_grant_number_two': '',
'pdbx_grant_country_two': '',
'pdbx_funding_organization_three': '',
'pdbx_grant_number_three': '',
'pdbx_grant_country_three': ''
}
return deposit_dict_template
def set_primary_citation_as_structure_authors(self, state):
if state == QtCore.Qt.Checked:
for n, entry in enumerate(self.structure_author_name_List):
self.primary_citation_author_name_List[n].setText(str(entry.text()))
else:
for n, entry in enumerate(self.primary_citation_author_name_List):
entry.setText('')
def set_xce_logfile(self):
file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'Save file', self.current_directory))
self.xce_logfile = str(file_name)
self.xce_logfile_label.setText(str(self.xce_logfile))
if self.xce_logfile == '' or self.xce_logfile[self.xce_logfile.rfind('/') + 1:] == '':
print('==> XCE: invalid file format')
else:
XChemLog.startLog(self.xce_logfile).create_logfile(self.xce_version)
self.update_log = XChemLog.updateLog(self.xce_logfile)
def set_second_cif_file(self):
filepath_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select CIF File',
self.initial_model_directory, '*.cif')
filepath = str(tuple(filepath_temp)[0])
self.second_cif_file = str(filepath)
self.second_cif_file_label.setText(str(self.second_cif_file))
self.update_log.insert('user selected %s as CIF file for merging into ligand CIF files' %self.second_cif_file)
def select_datasource_columns_to_display(self):
columns_to_show = QtGui.QMessageBox()
columns_to_showLayout = columns_to_show.layout()
columns_in_data_source = self.db.return_column_list()
try:
columns_in_data_source = self.db.return_column_list()
except AttributeError:
print('==> XCE: please select a datasource file')
self.status_bar.showMessage('please select a datasource file')
return
column_dict = {}
vbox = QtGui.QVBoxLayout()
number_of_entries = len(columns_in_data_source)
columns_shown_in_dialog_column = 15
grid = QtGui.QGridLayout()
x = 0
y = 0
columns_to_ignore = self.db.columns_not_to_display()
for entries_added in range(number_of_entries):
if not columns_in_data_source[entries_added][1] in columns_to_ignore:
data_source_column = QtGui.QCheckBox(columns_in_data_source[entries_added][1])
column_dict[entries_added] = data_source_column
if columns_in_data_source[entries_added][1] in self.overview_datasource_table_columns:
data_source_column.setChecked(True)
grid.addWidget(data_source_column, y, x)
y += 1
if y == columns_shown_in_dialog_column:
y = 0
x += 1
vbox.addLayout(grid)
columns_to_showLayout.addLayout(vbox, 0, 0)
columns_to_show.addButton(QtGui.QPushButton('OK'), QtGui.QMessageBox.YesRole)
columns_to_show.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)
reply = columns_to_show.exec_();
if reply == 0:
columns_to_show_list = ['Sample ID']
for key in column_dict:
if column_dict[key].isChecked():
columns_to_show_list.append(columns_in_data_source[key][1])
self.overview_datasource_table_columns = columns_to_show_list
self.populate_and_update_datasource_table()
def update_header_and_data_from_datasource(self):
self.update_log.insert('getting information for all samples from data source...')
self.db = XChemDB.data_source(os.path.join(self.database_directory, self.data_source_file))
self.update_log.insert('creating missing columns in data source')
self.db.create_missing_columns()
self.update_log.insert('load header and data from data source')
self.header, self.data = self.db.load_samples_from_data_source()
self.update_log.insert('get all samples in data source')
all_samples_in_db = self.db.execute_statement("select CrystalName from mainTable where CrystalName is not '';")
self.xtal_db_dict = {}
sampleID_column = 0
for n, entry in enumerate(self.header):
if entry == 'CrystalName':
sampleID_column = n
break
for line in self.data:
if str(line[sampleID_column]) != '':
db_dict = {}
for n, entry in enumerate(line):
if n != sampleID_column:
db_dict[str(self.header[n])] = str(entry)
self.xtal_db_dict[str(line[sampleID_column])] = db_dict
print('==> XCE: found ' + str(len(self.xtal_db_dict)) + ' samples')
def datasource_menu_save_samples(self):
print('hallo')
def datasource_menu_export_csv_file(self):
file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'Save file', self.database_directory))
if file_name.rfind('.') != -1:
file_name = file_name[:file_name.rfind('.')] + '.csv'
else:
file_name = file_name + '.csv'
self.db.export_to_csv_file(file_name)
def datasource_menu_import_csv_file(self):
if self.data_source_set:
file_name = QtGui.QFileDialog.getOpenFileName(self.window, 'Open file', self.database_directory)
self.db.import_csv_file(file_name)
else:
self.update_status_bar('Please load a data source file first')
def datasource_menu_update_datasource(self):
self.work_thread = XChemThread.synchronise_db_and_filesystem(self.initial_model_directory,
os.path.join(self.database_directory,
self.data_source_file),
self.panddas_directory, self.xce_logfile,
'project_directory')
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("datasource_menu_reload_samples"),
self.datasource_menu_reload_samples)
self.work_thread.start()
def export_data_for_WONKA(self):
self.update_log.insert('exporting CSV file for input into WONKA')
self.db.export_csv_for_WONKA()
def on_context_menu(self, point):
# show context menu
for key in self.dewar_configuration_dict:
if self.dewar_configuration_dict[key] == self.sender():
self.dewar_label_active = key
self.popMenu.exec_(self.sender().mapToGlobal(point))
def on_context_menu_reprocess_data(self, point):
# show context menu
self.popMenu_for_datasets_reprocess_table.exec_(self.sender().mapToGlobal(point))
def flag_sample_for_recollection(self):
self.dewar_configuration_dict[self.dewar_label_active].setStyleSheet("background-color: yellow")
def undo_flag_sample_for_recollection(self):
self.dewar_configuration_dict[self.dewar_label_active].setStyleSheet("background-color: gray")
def show_html_summary_in_firefox(self, xtal):
html_summary = self.albula_button_dict[xtal][2]
print('html_summary', html_summary)
new = 2
webbrowser.open(html_summary, new=new)
def update_pandda_crystal_from_combobox(self):
self.pandda_analyse_crystal_from_selection_combobox.clear()
self.pandda_analyse_crystal_from_selection_combobox.addItem('use all datasets')
if os.path.isfile(os.path.join(self.database_directory, self.data_source_file)):
self.load_crystal_form_from_datasource()
if self.xtalform_dict != {}:
print(self.xtalform_dict)
for key in self.xtalform_dict:
self.pandda_analyse_crystal_from_selection_combobox.addItem(key)
def populate_reference_combobox(self, combobox):
combobox.clear()
for reference_file in self.reference_file_list:
combobox.addItem(reference_file[0])
def populate_refinement_outcome_combobox(self, combobox):
combobox.clear()
for stage in self.refinement_stage:
combobox.addItem(stage)
def populate_target_selection_combobox(self, combobox):
combobox.clear()
for target in self.target_list:
combobox.addItem(target)
def combo_selected(self, text):
self.map_url = str(self.panddas_directory + '/analyses/html_summaries/pandda_map_' + text + '.html')
self.pandda_maps_html.load(QtCore.QUrl(self.map_url))
self.pandda_maps_html.show()
def add_map_html(self):
self.map_list = glob.glob(str(self.panddas_directory + '/analyses/html_summaries/pandda_map_*.html'))
self.list_options = []
for i in range(0, len(self.map_list)):
string = self.map_list[i]
string = string.replace('/analyses/html_summaries/pandda_map_', '')
string = string.replace('.html', '')
string = string.replace(self.panddas_directory, '')
self.list_options.append(string)
self.pandda_map_list.clear()
for i in range(0, len(self.list_options)):
self.pandda_map_list.addItem(self.list_options[i])
self.connect(self.pandda_map_list, QtCore.SIGNAL('activated(QString)'), self.combo_selected)
def open_config_file(self):
file_name_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Open file', self.current_directory,
'*.conf')
file_name = tuple(file_name_temp)[0]
try:
pickled_settings = pickle.load(open(file_name, 'rb'))
except:
print('==> XCE: failed to open config file...')
key_list = {#'beamline_directory': 'beamline_directory',
'initial_model_directory': 'initial_model_directory',
'panddas_directory': 'panddas_directory',
'html_export_directory': 'html_export_directory',
'group_deposit_directory': 'group_deposit_directory',
'database_directory': 'database_directory',
'datasets_summary_file': 'datasets_summary',
#"'data_source_file': 'data_source',
'ccp4_scratch_directory': 'ccp4_scratch',
'allowed_unitcell_difference_percent': 'unitcell_difference',
'acceptable_low_resolution_limit_for_data': 'too_low_resolution_data',
#'reference_directory_temp': 'reference_directory'
}
# self.pandda_input_data_dir_entry.setText(os.path.join(self.initial_model_directory, '*'))
for current_key in key_list:
try:
command = str('self.' + current_key + " = pickled_settings['" + key_list[current_key] +"']")
exec(command)
command = str('self.settings["' + key_list[current_key]+ '"]= self.' + current_key)
exec(command)
print('==> XCE: found ' + key_list[current_key])
except:
print('==> XCE: WARNING: Failed to find settings for: ' + key_list[current_key] + ' Error type: '
+ str(sys.exc_info()[0]))
exec(str(current_key + " = ''"))
continue
try:
pickled_settings = pickle.load(open(file_name, "rb"))
if pickled_settings['beamline_directory'] != self.beamline_directory:
self.beamline_directory = pickled_settings['beamline_directory']
self.target_list, self.visit_list = XChemMain.get_target_and_visit_list(self.beamline_directory,self.read_agamemnon.isChecked())
self.settings['beamline_directory'] = self.beamline_directory
self.populate_target_selection_combobox(self.target_selection_combobox)
self.layout_funcs.pandda_html(self)
self.show_pandda_html_summary()
self.html_export_directory_label.setText(self.html_export_directory)
self.group_deposition_directory_label.setText(self.group_deposit_directory)
self.datasets_summary_file_label.setText(self.datasets_summary_file)
self.data_source_file = pickled_settings['data_source']
if self.data_source_file != '':
self.settings['data_source'] = os.path.join(self.database_directory, self.data_source_file)
# this is probably not necessary
if os.path.isfile(self.settings['data_source']):
write_enabled = self.check_write_permissions_of_data_source()
if not write_enabled:
self.data_source_file_label.setText('')
self.data_source_set = False
else:
self.data_source_file_label.setText(
os.path.join(self.database_directory, self.data_source_file))
self.data_source_set = True
self.db = XChemDB.data_source(os.path.join(self.database_directory, self.data_source_file))
self.datasource_menu_reload_samples()
reference_directory_temp = pickled_settings['reference_directory']
if reference_directory_temp != self.reference_directory:
self.reference_directory = reference_directory_temp
self.settings['reference_directory'] = self.reference_directory
self.update_reference_files(' ')
for xtal in self.initial_model_dimple_dict:
reference_file_selection_combobox = self.initial_model_dimple_dict[xtal][1]
self.populate_reference_combobox(reference_file_selection_combobox)
self.initial_model_directory_label.setText(self.initial_model_directory)
self.panddas_directory_label.setText(self.panddas_directory)
self.pandda_output_data_dir_entry.setText(self.panddas_directory)
self.reference_directory_label.setText(self.reference_directory)
self.beamline_directory_label.setText(self.beamline_directory)
self.ccp4_scratch_directory_label.setText(self.ccp4_scratch_directory)
self.reference_file_list = self.get_reference_file_list(' ')
self.pandda_input_data_dir_entry.setText(os.path.join(self.initial_model_directory, '*'))
self.update_all_tables()
except KeyError:
self.update_status_bar('Sorry, this is not a XChemExplorer config file!')
self.update_log.insert('Sorry, this is not a XChemExplorer config file!')
except:
print("Unexpected error:", sys.exc_info()[0])
raise
def save_config_file(self):
file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'Save file', self.current_directory))
# make sure that the file always has .conf extension
if str(file_name).rfind('.') != -1:
file_name = file_name[:file_name.rfind('.')] + '.conf'
else:
file_name = file_name + '.conf'
pickle.dump(self.settings, open(file_name, 'wb'))
def update_reference_files(self, reference_root):
self.reference_file_list = self.get_reference_file_list(reference_root)
self.populate_reference_combobox(self.reference_file_selection_combobox)
self.populate_reference_combobox(self.pandda_reference_file_selection_combobox)
def check_status_rerun_dimple_on_all_autoprocessing_files(self):
print('hallo')
def rerun_dimple_on_all_autoprocessing_files(self):
job_list = []
self.update_log.insert('preparing to run DIMPLE on all autoprocessing files')
for xtal in self.data_collection_dict:
for entry in self.data_collection_dict[xtal]:
if entry[0] == 'logfile':
db_dict = entry[6]
try:
if os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'],
db_dict['DataProcessingMTZfileName'])) or \
os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'])):
job_list = self.get_job_list_for_dimple_rerun(xtal, job_list, db_dict, entry)
except KeyError:
try:
if os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'])):
job_list = self.get_job_list_for_dimple_rerun(xtal, job_list, db_dict, entry)
except KeyError:
continue
if job_list:
self.update_log.insert('trying to run DIMPLE on ALL auto-processing files')
self.check_before_running_dimple(job_list)
def run_dimple_on_selected_autoprocessing_file(self, instruction):
job_list = []
for xtal in sorted(self.initial_model_dimple_dict):
# print(xtal)
if self.initial_model_dimple_dict[xtal][0].isChecked():
# print(xtal + ' is checked...')
db_dict = self.xtal_db_dict[xtal]
# the if statement below is so convoluted, so that it is compatible with older data source files
if os.path.isfile(
os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'],
db_dict['DataProcessingMTZfileName'])) or \
os.path.isfile(
os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'])) or \
os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'],
db_dict['DataProcessingMTZfileName'])) or \
os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'])):
if os.path.isfile(
os.path.join(db_dict['DataProcessingPathToMTZfile'], db_dict['DataProcessingMTZfileName'])):
mtzin = os.path.join(db_dict['DataProcessingPathToMTZfile'],
db_dict['DataProcessingMTZfileName'])
elif os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'])):
mtzin = os.path.join(db_dict['DataProcessingPathToMTZfile'])
elif os.path.isfile(
os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'],
db_dict['DataProcessingMTZfileName'])):
mtzin = os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'],
db_dict['DataProcessingMTZfileName'])
elif os.path.isfile(
os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'])):
mtzin = os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'])
reference_file = str(self.initial_model_dimple_dict[xtal][1].currentText())
reference_file_pdb = os.path.join(self.reference_directory, reference_file + '.pdb')
if not os.path.isfile(reference_file_pdb):
continue
if os.path.isfile(os.path.join(self.reference_directory, reference_file + '.mtz')):
reference_file_mtz = ' -R ' + os.path.join(self.reference_directory, reference_file + '.mtz')
else:
reference_file_mtz = ''
if os.path.isfile(os.path.join(self.reference_directory, reference_file + '.cif')):
reference_file_cif = ' --libin ' + os.path.join(self.reference_directory,
reference_file + '.cif')
else:
reference_file_cif = ''
job_list.append([xtal,
'dimple_rerun_on_selected_file',
mtzin,
reference_file_pdb,
reference_file_mtz,
reference_file_cif])
else:
print('WARNING: ' + xtal + ' has not been submitted to dimple because no files were found: ')
if not os.path.isfile(os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'],
db_dict['DataProcessingMTZfileName'])):
print(' ' + str(os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'],
db_dict['DataProcessingMTZfileName'])) + ' is missing')
if not os.path.isfile(os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'])):
print(' ' + str(os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'])) + ' is missing')
if not os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'])):
print(' ' + str(os.path.join(db_dict['DataProcessingPathToMTZfile']) + ' is missing'))
if job_list:
self.update_log.insert('trying to run DIMPLE on SELECTED auto-processing files')
self.check_before_running_dimple(job_list,instruction)
def remove_selected_dimple_files(self,instruction):
if 'dimple' in instruction.lower():
pipeline = 'dimple'
elif 'pipedream' in instruction.lower():
pipeline = 'pipedream'
elif 'phenix' in instruction.lower():
pipeline = 'phenix.ligand_pipeline'
job_list = []
for xtal in sorted(self.initial_model_dimple_dict):
if self.initial_model_dimple_dict[xtal][0].isChecked():
job_list.append(xtal)
if job_list:
msgBox = QtGui.QMessageBox()
msgBox.setText("Do you really want to delete {0!s} {1!s} files?".format(len(job_list),self.preferences['initial_refinement_pipeline']))
msgBox.addButton(QtGui.QPushButton('Go'), QtGui.QMessageBox.YesRole)
msgBox.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)
reply = msgBox.exec_();
if reply == 0:
self.status_bar.showMessage('preparing to remove {0!s} files'.format(pipeline))
self.update_log.insert('preparing to remove {0!s} files'.format(pipeline))
self.work_thread = XChemThread.remove_selected_dimple_files(job_list,
self.initial_model_directory,
self.xce_logfile,
self.database_directory,
self.data_source_file,
pipeline)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("datasource_menu_reload_samples"),
self.datasource_menu_reload_samples)
self.work_thread.start()
def set_results_from_selected_pipeline(self,instruction):
if 'dimple' in instruction.lower():
pipeline = 'dimple'
elif 'pipedream' in instruction.lower():
pipeline = 'pipedream'
elif 'phenix' in instruction.lower():
pipeline = 'phenix.ligand_pipeline'
self.update_log.warning('selecting initial refinement results from '+pipeline)
job_list = []
for xtal in sorted(self.initial_model_dimple_dict):
if self.initial_model_dimple_dict[xtal][0].isChecked():
job_list.append(xtal)
self.work_thread = XChemThread.set_results_from_selected_pipeline(job_list,
self.initial_model_directory,
self.xce_logfile,
self.database_directory,
self.data_source_file,
pipeline)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("datasource_menu_reload_samples"),
self.datasource_menu_reload_samples)
self.work_thread.start()
def run_xia2_on_selected_datasets(self, overwrite):
# check which programs should be run
protocol = []
if self.xia2_3d_checkbox.isChecked():
protocol.append('3d')
if self.xia2_3dii_checkbox.isChecked():
protocol.append('3dii')
if self.xia2_dials_checkbox.isChecked():
protocol.append('dials')
# space group
spg = []
if str(self.reprocess_space_group_comboxbox.currentText()) != 'ignore':
spg.append(str(self.reprocess_space_group_comboxbox.currentText()))
# reference file
ref = []
if os.path.isfile(self.diffraction_data_reference_mtz):
ref.append(self.diffraction_data_reference_mtz)
# resolution limit
reso_limit = []
if str(self.reprocess_isigma_combobox.currentText()) != 'default':
reso_limit.append(str(self.reprocess_isigma_combobox.currentText()))
# cc 1/2
cc_half = []
if str(self.reprocess_cc_half_combobox.currentText()) != 'default':
cc_half.append(str(self.reprocess_cc_half_combobox.currentText()))
run_dict = {}
allRows = self.datasets_reprocess_table.rowCount()
for row in xrange(0, allRows):
dataset_id = str(self.datasets_reprocess_table.item(row, 0).text())
sample_id = str(self.datasets_reprocess_table.item(row, 1).text())
if self.diffraction_data_table_dict[dataset_id][0].isChecked():
run_dict[sample_id] = self.diffraction_data_dict[dataset_id]
if protocol != [] and run_dict != {}:
self.work_thread = XChemProcess.run_xia2(self.initial_model_directory,
run_dict,
protocol,
spg,
ref,
reso_limit,
cc_half,
self.xce_logfile,
self.external_software,
self.ccp4_scratch_directory,
self.max_queue_jobs,
os.path.join(self.database_directory, self.data_source_file),
overwrite)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
else:
self.update_log.insert('please select datasets and/ or data processing protocol')
self.update_status_bar('please select datasets and/ or data processing protocol')
def update_reprocessing_table(self):
allRows = self.datasets_reprocess_table.rowCount()
for row in xrange(0, allRows):
sample_id = str(self.datasets_reprocess_table.item(row, 1).text())
if sample_id in self.xtal_db_dict:
db_dict = self.xtal_db_dict[sample_id]
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(db_dict['DataProcessingStatus'])
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
if db_dict['DataProcessingStatus'] == 'running':
cell_text.setBackground(QtGui.QColor(100, 230, 150))
elif db_dict['DataProcessingStatus'] == 'pending':
cell_text.setBackground(QtGui.QColor(20, 100, 230))
elif db_dict['DataProcessingStatus'] == 'started':
cell_text.setBackground(QtGui.QColor(230, 240, 110))
elif db_dict['DataProcessingStatus'] == 'finished':
cell_text.setBackground(QtGui.QColor(255, 255, 255))
self.datasets_reprocess_table.setItem(row, 7, cell_text)
def get_job_list_for_dimple_rerun(self, xtal, job_list, db_dict, entry):
self.status_bar.showMessage('checking: ' + str(
os.path.join(db_dict['DataProcessingPathToMTZfile'], db_dict['DataProcessingMTZfileName'])))
suitable_reference = []
for reference in self.reference_file_list:
# first we need one in the same pointgroup
if reference[5] == db_dict['DataProcessingPointGroup']:
try:
difference = math.fabs(1 - (float(db_dict['DataProcessingUnitCellVolume']) / float(reference[4])))
suitable_reference.append([reference[0], difference])
except ValueError:
continue
if suitable_reference:
reference_file = min(suitable_reference, key=lambda x: x[1])[0]
visit = entry[1]
run = entry[2]
autoproc = entry[4]
reference_file_pdb = os.path.join(self.reference_directory, reference_file + '.pdb')
if os.path.isfile(os.path.join(self.reference_directory, reference_file + '.mtz')):
reference_file_mtz = ' -R ' + os.path.join(self.reference_directory, reference_file + '.mtz')
else:
reference_file_mtz = ''
if os.path.isfile(os.path.join(self.reference_directory, reference_file + '.cif')):
reference_file_cif = ' --libin ' + os.path.join(self.reference_directory, reference_file + '.cif')
else:
reference_file_cif = ''
if os.path.isfile(os.path.join(self.initial_model_directory, xtal, xtal +'.mtz')):
mtzin = os.path.join(self.initial_model_directory, xtal, xtal +'.mtz')
self.update_log.insert('adding ' + xtal + visit + '-' + run + autoproc + ' to list')
job_list.append([xtal,
visit + '-' + run + autoproc,
mtzin,
reference_file_pdb,
reference_file_mtz,
reference_file_cif])
self.status_bar.showMessage('idle')
return job_list
def check_before_running_dimple(self, job_list,instruction):
msgBox = QtGui.QMessageBox()
msgBox.setText(
"Do you really want to run {0!s} {1!s} jobs?\nNote: we will not run more than {2!s} at once on the cluster!".format(
len(job_list),self.preferences['initial_refinement_pipeline'],self.preferences['max_queue_jobs']))
msgBox.addButton(QtGui.QPushButton('Go'), QtGui.QMessageBox.YesRole)
msgBox.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)
reply = msgBox.exec_();
if reply == 0:
if 'dimple' in instruction.lower():
pipeline = 'dimple'
elif 'pipedream' in instruction.lower():
pipeline = 'pipedream'
elif 'phenix' in instruction.lower():
pipeline = 'phenix.ligand_pipeline'
self.status_bar.showMessage('preparing {0!s} DIMPLE jobs'.format(len(job_list)))
self.update_log.insert('preparing to run {0!s} DIMPLE jobs'.format(len(job_list)))
if self.external_software['qsub_array']:
self.update_log.insert('we will be running an ARRAY job on the DLS computer cluster')
self.update_log.insert(
'please note that the maximum number of jobs that will be running at once is {0!s}'.format(
self.max_queue_jobs))
self.update_log.insert(
'you can change this in the PREFERENCES menu, but be warned that to high a number might break the cluster!')
self.update_log.insert('preparing input files for DIMPLE...')
self.work_thread = XChemThread.run_dimple_on_all_autoprocessing_files_new(job_list,
self.initial_model_directory,
self.external_software,
self.ccp4_scratch_directory,
self.database_directory,
self.data_source_file,
self.max_queue_jobs,
self.xce_logfile,
self.using_remote_qsub_submission,
self.remote_qsub_submission,
self.preferences['dimple_twin_mode'],
pipeline )
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("datasource_menu_reload_samples"),
self.datasource_menu_reload_samples)
self.work_thread.start()
def open_csv_file_translate_datasetID_to_sampleID(self):
file_name_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Open file', self.current_directory,
'*.csv')
file_name = tuple(file_name_temp)[0]
self.translate_datasetID_to_sampleID_csv_label.setText(file_name)
self.translate_datasetID_to_sampleID_file = file_name
def update_datasets_reprocess_table(self, data_dict):
self.update_log.insert('updating reprocess datasets table')
print('updating reprocess datasets table')
self.diffraction_data_table_dict = {}
self.diffraction_data_dict = data_dict
self.diffraction_data_search_info = 'found ' + str(len(self.diffraction_data_dict)) + ' datasets'
self.diffraction_data_search_label.setText(self.diffraction_data_search_info)
self.update_log.insert(self.diffraction_data_search_info)
self.datasource_menu_reload_samples()
# update table
column_name = self.db.translate_xce_column_list_to_sqlite(self.datasets_reprocess_columns)
# set rows to 0
self.datasets_reprocess_table.setRowCount(0)
for entry in sorted(self.diffraction_data_dict):
self.update_log.insert(str(self.diffraction_data_dict[entry]))
if entry in self.xtal_db_dict:
db_dict = self.xtal_db_dict[entry]
else:
db_dict = {}
row = self.datasets_reprocess_table.rowCount()
self.datasets_reprocess_table.insertRow(row)
for column, header in enumerate(column_name):
if header[0] == 'Dataset ID' or header[0] == 'Sample ID':
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(str(entry))
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
self.datasets_reprocess_table.setItem(row, column, cell_text)
elif header[0] == 'Run\nxia2':
run_xia2 = QtGui.QCheckBox()
run_xia2.toggle()
self.datasets_reprocess_table.setCellWidget(row, column, run_xia2)
run_xia2.setChecked(False)
self.diffraction_data_table_dict[entry] = [run_xia2]
else:
cell_text = QtGui.QTableWidgetItem()
if db_dict != {}:
if header[0] == 'DataProcessing\nStatus':
if str(db_dict[header[1]]) == 'running':
cell_text.setBackground(QtGui.QColor(100, 230, 150))
elif str(db_dict[header[1]]) == 'pending':
cell_text.setBackground(QtGui.QColor(20, 100, 230))
elif str(db_dict[header[1]]) == 'started':
cell_text.setBackground(QtGui.QColor(230, 240, 110))
elif str(db_dict[header[1]]) == 'finished':
cell_text.setBackground(QtGui.QColor(255, 255, 255))
cell_text.setText(str(db_dict[header[1]]))
else:
cell_text.setText('')
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
self.datasets_reprocess_table.setItem(row, column, cell_text)
def update_all_tables(self):
self.update_log.insert('checking for new reference files')
self.update_status_bar('checking for new reference files')
self.reference_file_list = self.get_reference_file_list(' ')
self.update_log.insert('updating Overview table')
self.update_status_bar('updating Overview table')
self.populate_and_update_datasource_table()
self.update_log.insert('updating Maps table')
self.update_status_bar('updating Maps table')
self.create_maps_table()
self.update_log.insert('updating PANDDA table')
self.update_status_bar('updating PANDDA table')
self.populate_pandda_analyse_input_table()
self.update_log.insert('updating REFINEMENT table')
self.update_status_bar('updating REFINEMENT table')
self.populate_and_update_refinement_table()
self.update_log.insert('updating REPROCESSING table')
self.update_status_bar('updating REPROCESSING table')
self.update_reprocessing_table()
self.update_status_bar('idle')
self.update_summary_plot()
def change_allowed_unitcell_difference_percent(self, text):
try:
self.allowed_unitcell_difference_percent = int(text)
self.settings['unitcell_difference'] = self.allowed_unitcell_difference_percent
self.update_log.insert(
'changing max allowed unit cell difference between reference and xtal to {0!s} percent'.format(
self.allowed_unitcell_difference_percent))
except ValueError:
if str(text).find('.') != -1:
self.allowed_unitcell_difference_percent = int(str(text)[:str(text).find('.')])
self.settings['unitcell_difference'] = self.allowed_unitcell_difference_percent
self.update_log.insert(
'changing max allowed unit cell difference between reference and xtal to {0!s} percent'.format(
self.allowed_unitcell_difference_percent))
else:
pass
def change_max_queue_jobs(self, text):
try:
self.max_queue_jobs = int(text)
self.settings['max_queue_jobs'] = self.max_queue_jobs
self.update_log.insert('changing max number of jobs running simultaneously on DLS cluster to {0!s}'.format(
self.max_queue_jobs))
except ValueError:
if str(text).find('.') != -1:
self.max_queue_jobs = int(str(text)[:str(text).find('.')])
self.settings['max_queue_jobs'] = self.max_queue_jobs
self.update_log.insert(
'changing max number of jobs running simultaneously on DLS cluster to {0!s}'.format(
self.max_queue_jobs))
else:
pass
def change_acceptable_low_resolution_limit(self, text):
try:
self.acceptable_low_resolution_limit_for_data = float(text)
self.settings['too_low_resolution_data'] = self.acceptable_low_resolution_limit_for_data
except ValueError:
pass
def change_filename_root(self, text):
self.filename_root = str(text)
self.settings['filename_root'] = self.filename_root
def button_clicked(self):
if not self.data_source_set:
print('sender text bit')
if self.sender().text() == "Create New Data\nSource (SQLite)":
file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'Save file', self.database_directory))
# make sure that the file always has .sqlite extension
if file_name.rfind('.') != -1:
file_name = file_name[:file_name.rfind('.')] + '.sqlite'
else:
file_name = file_name + '.sqlite'
self.db = XChemDB.data_source(file_name)
print('==> XCE: creating new data source')
self.db.create_empty_data_source_file()
self.db.create_missing_columns()
if self.data_source_file == '':
self.database_directory = file_name[:file_name.rfind('/')]
self.data_source_file = file_name[file_name.rfind('/') + 1:]
self.data_source_file_label.setText(os.path.join(self.database_directory, self.data_source_file))
self.settings['database_directory'] = self.database_directory
self.settings['data_source'] = self.data_source_file
self.data_source_set = True
else:
self.no_data_source_selected()
print('No datasource selected')
pass
# first find out which of the 'Run' or 'Status' buttons is sending
for item in self.workflow_widget_dict:
for widget in self.workflow_widget_dict[item]:
if widget == self.sender():
# get index of item in self.workflow; Note this index should be the same as the index
# of the self.main_tab_widget which belongs to this task
task_index = self.workflow.index(item)
instruction = str(self.workflow_widget_dict[item][0].currentText())
print(instruction)
action = str(self.sender().text())
if self.main_tab_widget.currentIndex() == task_index:
if self.explorer_active == 0 and self.data_source_set == True:
if action == 'Run':
print('==> XCE: Remote submission status = ' + str(self.using_remote_qsub_submission))
# print(instruction)
self.prepare_and_run_task(instruction)
elif action == 'Status':
self.get_status_of_workflow_milestone(instruction)
if os.path.exists(str(self.panddas_directory + '/pandda.done')):
self.pandda_status = 'Finished!'
self.pandda_status_label.setStyleSheet('color: green')
if os.path.exists(str(self.panddas_directory + '/pandda.running')):
self.pandda_status = 'Running...'
self.pandda_status_label.setStyleSheet('color: orange')
if os.path.exists(str(self.panddas_directory + '/pandda.errored')):
self.pandda_status = 'Error encountered... please check the log files for pandda!'
self.pandda_status_label.setStyleSheet('color: red')
self.pandda_status_label.setText(str('STATUS: ' + self.pandda_status))
else:
self.need_to_switch_main_tab(task_index)
def get_status_of_workflow_milestone(self, instruction):
# first update all tables
self.datasource_menu_reload_samples()
cluster_dict = XChemMain.get_jobs_running_on_cluster()
self.update_log.insert('getting status updates...')
self.status_bar.showMessage('please check terminal window for further information')
self.update_log.insert('{0!s} samples are currently in database'.format(str(len(self.xtal_db_dict))))
if 'DIMPLE' in instruction:
XChemMain.print_cluster_status_message('dimple', cluster_dict, self.xce_logfile)
elif 'Create CIF/PDB/PNG file' in instruction:
XChemMain.print_acedrg_status(self.xce_logfile, self.xtal_db_dict)
XChemMain.print_cluster_status_message('acedrg', cluster_dict, self.xce_logfile)
elif instruction.startswith('Run xia2 on selected datasets'):
XChemMain.print_cluster_status_message('xia2', cluster_dict, self.xce_logfile)
elif 'pandda' in instruction.lower():
XChemMain.print_cluster_status_message('pandda', cluster_dict, self.xce_logfile)
elif 'coot' in instruction.lower():
XChemMain.print_cluster_status_message('refmac', cluster_dict, self.xce_logfile)
def prepare_and_run_task(self, instruction):
if instruction == 'Get New Results from Autoprocessing':
self.rescore = False
self.check_for_new_autoprocessing_results()
elif instruction == 'Rescore Datasets':
self.rescore = True
self.select_best_autoprocessing_result()
# if instruction == 'Get New Results from Autoprocessing':
# self.check_for_new_autoprocessing_or_rescore(False)
# self.update_header_and_data_from_datasource()
# self.update_all_tables()
#
# elif instruction == 'Rescore Datasets':
# self.check_for_new_autoprocessing_or_rescore(True)
# elif instruction == "Read PKL file":
# summary = pickle.load(open(self.datasets_summary_file, "rb"))
# self.create_widgets_for_autoprocessing_results_only(summary)
elif instruction == 'Run xia2 on selected datasets':
self.run_xia2_on_selected_datasets(False)
elif instruction == 'Run xia2 on selected datasets - overwrite':
self.run_xia2_on_selected_datasets(True)
# elif instruction == 'Run DIMPLE on All Autoprocessing MTZ files':
# self.rerun_dimple_on_all_autoprocessing_files()
# elif instruction == 'Run initial refinement on selected MTZ files':
# self.run_dimple_on_selected_autoprocessing_file()
elif instruction == 'Run DIMPLE on selected MTZ files':
self.run_dimple_on_selected_autoprocessing_file(instruction)
elif instruction == 'Run PIPEDREAM on selected MTZ files':
self.run_dimple_on_selected_autoprocessing_file(instruction)
elif instruction == 'Run PHENIX.LIGAND_PIPELINE on selected MTZ files':
self.run_dimple_on_selected_autoprocessing_file(instruction)
# elif instruction == 'Remove selected initial refinement files':
# self.remove_selected_dimple_files()
elif instruction == 'Remove selected DIMPLE files':
self.remove_selected_dimple_files(instruction)
elif instruction == 'Remove selected PIPEDREAM files':
self.remove_selected_dimple_files(instruction)
elif instruction == 'Remove selected PHENIX.LIGAND_PIPELINE files':
self.remove_selected_dimple_files(instruction)
# elif instruction == 'Set only results from selected pipeline':
# self.set_results_from_selected_pipeline()
elif instruction == 'Set DIMPLE output':
self.set_results_from_selected_pipeline(instruction)
elif instruction == 'Set PIPEDREAM output':
self.set_results_from_selected_pipeline(instruction)
elif instruction == 'Set PHENIX.LIGAND_PIPELINE output':
self.set_results_from_selected_pipeline(instruction)
# elif instruction == 'Create CIF/PDB/PNG file of ALL compounds':
# self.create_cif_pdb_png_files('ALL')
# elif instruction == 'Create CIF/PDB/PNG file of NEW compounds':
# self.create_cif_pdb_png_files('NEW')
elif instruction == 'Create CIF/PDB/PNG file of SELECTED compounds':
self.create_cif_pdb_png_files('SELECTED')
elif instruction == 'Merge ligand CIF file with selected compounds':
self.merge_cif_files('merge')
elif instruction == 'Restore original CIF file of selected compounds':
self.merge_cif_files('restore')
elif instruction == 'Fit ligands into maps after initial refinement':
self.fit_ligands_into_dimple_maps()
elif instruction == 'pandda.analyse':
self.run_pandda_analyse('production_run')
elif instruction == 'pandda.analyse (PanDDA2)':
self.run_pandda_analyse('production_run_pandda_two')
elif instruction == 'pre-run for ground state model':
self.run_pandda_analyse('pre_run')
elif instruction == 'pandda.inspect':
self.run_pandda_inspect()
elif instruction == 'run pandda.inspect at home':
self.run_pandda_inspect_at_home()
elif instruction == 'Export NEW PANDDA models':
update_datasource_only = False
which_models = 'new'
self.run_pandda_export(update_datasource_only, which_models)
elif instruction == 'Export ALL PANDDA models':
update_datasource_only = False
which_models = 'all'
self.run_pandda_export(update_datasource_only, which_models)
elif instruction == 'Export SELECTED PANDDA models':
update_datasource_only = False
which_models = 'selected'
self.run_pandda_export(update_datasource_only, which_models)
elif instruction == 'refine ALL bound-state models with BUSTER':
self.run_refine_bound_state_with_buster('all')
elif instruction == 'refine NEW bound-state models with BUSTER':
self.run_refine_bound_state_with_buster('new')
elif instruction == 'refine ALL bound-state models with BUSTER (no sanity check)':
self.run_refine_bound_state_with_buster('allnocheck')
elif instruction == 'refine NEW bound-state models with BUSTER (no sanity check)':
self.run_refine_bound_state_with_buster('newnocheck')
# elif instruction == 'refine NEW bound-state models with BUSTER - NEW':
# self.run_refine_bound_state_with_buster_new('new')
elif instruction == 'cluster datasets':
self.cluster_datasets_for_pandda()
elif instruction == 'Update datasource with results from pandda.inspect':
update_datasource_only = True
which_models = 'all'
self.run_pandda_export(update_datasource_only, which_models)
elif instruction == 'Show HTML summary':
self.show_pandda_html_summary()
elif instruction == 'Event Map -> SF':
self.convert_event_maps_to_SF()
elif instruction == 'apo -> mmcif':
self.convert_apo_to_mmcif()
elif instruction == 'check modelled ligands':
self.compare_modelled_ligands_and_panddaTable()
elif instruction.startswith("Open COOT") or instruction == 'Build ground state model':
if not self.coot_running:
self.update_log.insert('starting coot...')
if instruction == "Open COOT":
interface = 'new'
elif instruction == "Open COOT - REFMAC refinement -":
interface = 'new'
elif instruction == "Open COOT - test -":
interface = 'test'
elif instruction == "Open COOT for old PanDDA":
interface = 'panddaV1'
elif instruction == 'Build ground state model':
interface = 'reference'
elif instruction == 'Open COOT - BUSTER refinement -':
interface = 'buster'
elif instruction == 'Open COOT - dimple_twin -':
interface = 'dimple_twin'
else:
interface = 'old'
# print self.settings
self.work_thread = XChemThread.start_COOT(self.settings, interface)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
elif instruction == 'Update Deposition Table':
self.update_deposition_table()
def check_status_create_png_of_soaked_compound(self):
number_of_samples = 0
running = 0
timestamp_list = []
cif_file_generated = 0
for folder in glob.glob(os.path.join(self.initial_model_directory, '*', 'compound')):
number_of_samples += 1
if os.path.isfile(os.path.join(folder, 'RESTRAINTS_IN_PROGRESS')):
running += 1
timestamp = datetime.fromtimestamp(
os.path.getmtime(os.path.join(folder, 'RESTRAINTS_IN_PROGRESS'))).strftime('%Y-%m-%d %H:%M:%S')
timestamp_list.append(timestamp)
for cif_file in glob.glob(os.path.join(folder, '*.cif')):
if os.path.isfile(cif_file):
cif_file_generated += 1
if timestamp_list:
last_timestamp = max(timestamp_list)
else:
last_timestamp = 'n/a'
message = 'Datasets: ' + str(number_of_samples) + ', jobs running: ' + str(running) + ', jobs finished: ' + str(
cif_file_generated) + ', last job submmitted: ' + str(last_timestamp)
self.status_bar.showMessage(message)
if start_thread:
if self.target == '=== SELECT TARGET ===':
msgBox = QtGui.QMessageBox()
warning = ('*** WARNING ***\n'
'You did not select a target!\n'
'In this case we will only parse the project directory!\n'
'Please note that this option is usually only useful in case you reprocessed your data.\n'
'Do you want to continue?')
msgBox.setText(warning)
msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)
msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)
reply = msgBox.exec_();
if reply == 0:
start_thread = True
else:
start_thread = False
else:
start_thread = True
if start_thread:
self.work_thread = XChemThread.read_autoprocessing_results_from_disc(self.visit_list,
self.target,
self.reference_file_list,
self.database_directory,
self.data_collection_dict,
self.preferences,
self.datasets_summary_file,
self.initial_model_directory,
rescore_only,
self.acceptable_low_resolution_limit_for_data,
os.path.join(self.database_directory,
self.data_source_file),
self.xce_logfile)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("create_widgets_for_autoprocessing_results_only"),
self.create_widgets_for_autoprocessing_results_only)
self.work_thread.start()
def save_files_to_initial_model_folder(self):
self.work_thread = XChemThread.save_autoprocessing_results_to_disc(self.dataset_outcome_dict,
self.data_collection_table_dict,
self.data_collection_column_three_dict,
self.data_collection_dict,
self.database_directory,
self.data_source_file,
self.initial_model_directory,
self.preferences,
self.datasets_summary_file)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def run_pandda_analyse(self, run):
pandda_params = {
'data_dir': str(self.pandda_input_data_dir_entry.text()),
'out_dir': str(self.pandda_output_data_dir_entry.text()),
'submit_mode': str(self.pandda_submission_mode_selection_combobox.currentText()),
'nproc': str(self.pandda_nproc_entry.text()),
'min_build_datasets': str(self.pandda_min_build_dataset_entry.text()),
'pdb_style': str(self.pandda_pdb_style_entry.text()),
'mtz_style': str(self.pandda_mtz_style_entry.text()),
'sort_event': str(self.pandda_sort_event_combobox.currentText()),
'average_map': str(self.pandda_calc_map_combobox.currentText()),
'max_new_datasets': str(self.pandda_max_new_datasets_entry.text()),
'grid_spacing': str(self.pandda_grid_spacing_entry.text()),
'keyword_arguments': str(self.pandda_keyword_arguments_entry.text()),
'pandda_dir_structure': str(self.pandda_input_data_dir_entry.text()),
'perform_diffraction_data_scaling': str(self.wilson_checkbox.isChecked()),
'filter_pdb': str(self.pandda_reference_file_selection_combobox.currentText()),
'reference_dir': self.reference_directory,
'appendix': '',
'N_datasets': len(glob.glob(os.path.join(self.initial_model_directory, '*', 'dimple.pdb'))),
'write_mean_map': 'interesting',
'pandda_table': self.pandda_analyse_data_table,
'use_remote': self.using_remote_qsub_submission,
'remote_string': self.remote_qsub_submission
}
if run == 'pre_run':
msgBox = QtGui.QMessageBox()
msgBoxLayout = msgBox.layout()
vbox = QtGui.QVBoxLayout()
vbox.addWidget(QtGui.QLabel(XChemToolTips.pandda_pre_run(self.reference_directory)))
hbox = QtGui.QHBoxLayout()
hbox.addWidget(QtGui.QLabel('appendix:'))
appendix = QtGui.QLineEdit()
appendix.setText('pre')
appendix.setFixedWidth(200)
hbox.addWidget(appendix)
vbox.addLayout(hbox)
msgBoxLayout.addLayout(vbox, 0, 0)
msgBox.addButton(QtGui.QPushButton('Go'), QtGui.QMessageBox.YesRole)
msgBox.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)
reply = msgBox.exec_();
if reply == 0:
pandda_params['appendix'] = str(appendix.text())
pandda_params['max_new_datasets'] = '100'
pandda_params['N_datasets'] = 100
pandda_params['write_mean_map'] = 'all'
else:
return None
self.update_log.insert('preparing pandda.analyse input script')
if run == 'production_run_pandda_two':
self.work_thread = XChemPANDDA.run_pandda_two_analyse(pandda_params, self.xce_logfile,
os.path.join(self.database_directory, self.data_source_file))
else:
self.work_thread = XChemPANDDA.run_pandda_analyse(pandda_params, self.xce_logfile,
os.path.join(self.database_directory, self.data_source_file))
#self.connect(self.work_thread, QtCore.SIGNAL("datasource_menu_reload_samples"),
#self.datasource_menu_reload_samples)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def cluster_datasets_for_pandda(self):
pandda_params = {
'out_dir': str(self.pandda_output_data_dir_entry.text()),
'pdb_style': str(self.pandda_pdb_style_entry.text()),
'mtz_style': str(self.pandda_mtz_style_entry.text())
}
self.update_log.insert('starting giant.cluster_mtzs_and_pdbs')
self.work_thread = XChemPANDDA.giant_cluster_datasets(self.initial_model_directory, pandda_params,
self.xce_logfile, os.path.join(self.database_directory,
self.data_source_file),
run_pandda_analyse)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("datasource_menu_reload_samples"),
self.datasource_menu_reload_samples)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def run_pandda_inspect(self):
self.settings['panddas_directory'] = str(self.pandda_output_data_dir_entry.text())
print('==> XCE: starting pandda.inspect')
self.work_thread = XChemThread.start_pandda_inspect(self.settings, self.xce_logfile)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def run_pandda_inspect_at_home(self):
self.work_thread = XChemPANDDA.run_pandda_inspect_at_home(self.panddas_directory, self.xce_logfile)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
def convert_event_maps_to_SF(self):
self.update_log.insert('converting all event maps in {0!s} to mtz files'.format(self.initial_model_directory))
# self.work_thread = XChemPANDDA.convert_all_event_maps_in_database(self.initial_model_directory,
# self.xce_logfile,
# os.path.join(self.database_directory,
# self.data_source_file))
self.work_thread = XChemPANDDA.find_event_map_for_ligand(self.initial_model_directory,
self.xce_logfile,self.external_software)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def convert_apo_to_mmcif(self):
self.work_thread = XChemPANDDA.convert_apo_structures_to_mmcif(self.panddas_directory,
self.xce_logfile)
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def compare_modelled_ligands_and_panddaTable(self):
self.update_log.insert('checking agreement of ligands in refine.pdb and entries in panddaTable')
self.work_thread = XChemPANDDA.check_number_of_modelled_ligands(self.initial_model_directory,
self.xce_logfile,
os.path.join(self.database_directory,
self.data_source_file))
self.explorer_active = 1
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("show_error_dict"), self.show_error_dict)
self.work_thread.start()
def run_pandda_export(self, update_datasource_only, which_models):
pandda_params = {
'data_dir': str(self.pandda_input_data_dir_entry.text()),
'out_dir': str(self.pandda_output_data_dir_entry.text()),
'submit_mode': str(self.pandda_submission_mode_selection_combobox.currentText()),
'nproc': str(self.pandda_nproc_entry.text()),
'min_build_datasets': str(self.pandda_min_build_dataset_entry.text()),
'pdb_style': str(self.pandda_pdb_style_entry.text()),
'mtz_style': str(self.pandda_mtz_style_entry.text()),
'sort_event': str(self.pandda_sort_event_combobox.currentText()),
'average_map': str(self.pandda_calc_map_combobox.currentText()),
'max_new_datasets': str(self.pandda_max_new_datasets_entry.text()),
'grid_spacing': str(self.pandda_grid_spacing_entry.text()),
'pandda_dir_structure': str(self.pandda_input_data_dir_entry.text()),
'perform_diffraction_data_scaling': str(self.wilson_checkbox.isChecked()),
'filter_pdb': str(self.pandda_reference_file_selection_combobox.currentText()),
'reference_dir': self.reference_directory,
'appendix': '',
'N_datasets': len(glob.glob(os.path.join(self.initial_model_directory, '*', 'dimple.pdb'))),
'write_mean_map': 'interesting',
'pandda_table': self.pandda_analyse_data_table,
'use_remote': self.using_remote_qsub_submission,
'remote_string': self.remote_qsub_submission
}
self.settings['panddas_directory'] = str(self.pandda_output_data_dir_entry.text())
if update_datasource_only:
self.update_log.insert('updating data source with results from pandda.inspect')
else:
self.update_log.insert(
'exporting PANDDA models, updating data source and launching inital refinement for new models')
start_thread = False
if which_models == 'all':
self.update_log.insert('exporting ALL models! *** WARNING *** This may overwrite previous refinements!!!')
msgBox = QtGui.QMessageBox()
msgBox.setText("*** WARNING ***\nThis will overwrite all your manual selections!\nDo you want to continue?")
msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)
msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)
reply = msgBox.exec_();
if reply == 0:
if update_datasource_only:
self.update_log.insert('will update panddaTable in database only')
else:
self.update_log.insert('will export ALL models!')
start_thread = True
else:
start_thread = False
else:
self.update_log.insert('exporting new models only')
start_thread = True
if start_thread:
self.work_thread = XChemPANDDA.run_pandda_export(self.panddas_directory,
os.path.join(self.database_directory,
self.data_source_file),
self.initial_model_directory, self.xce_logfile,
update_datasource_only, which_models, pandda_params)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
# def run_refine_bound_state_with_buster(self,which_models):
# start_thread = True
# if start_thread:
# self.work_thread = XChemPANDDA.refine_bound_state_with_buster(self.panddas_directory,
# os.path.join(self.database_directory,
# self.data_source_file),
# self.initial_model_directory, self.xce_logfile,
# which_models)
# self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
# self.work_thread.start()
def run_refine_bound_state_with_buster(self,which_models):
start_thread = True
if start_thread:
self.work_thread = XChemPANDDA.export_and_refine_ligand_bound_models(self.panddas_directory,
os.path.join(self.database_directory,
self.data_source_file),
self.initial_model_directory, self.xce_logfile,
which_models)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.work_thread.start()
def show_pandda_html_summary(self):
self.pandda_initial_html.load(QtCore.QUrl(self.pandda_initial_html_file))
self.pandda_initial_html.show()
self.pandda_analyse_html.load(QtCore.QUrl(self.pandda_analyse_html_file))
self.pandda_analyse_html.show()
self.add_map_html()
self.pandda_inspect_html.load(QtCore.QUrl(self.pandda_inspect_html_file))
self.pandda_inspect_html.show()
def create_cif_pdb_png_files(self, todo):
tmp = self.db.execute_statement(
"select CrystalName,CompoundCode,CompoundSmiles from mainTable where CrystalName is not '' and CompoundSmiles is not '' and CompoundSmiles is not NULL;")
compound_list = []
for item in tmp:
if str(item[1]) == '' or str(item[1]) == 'NULL':
compoundID = 'compound'
else:
compoundID = str(item[1])
if todo == 'ALL':
compound_list.append([str(item[0]), compoundID, str(item[2])])
elif todo == 'NEW':
if not os.path.isfile(os.path.join(self.initial_model_directory, str(item[0]), compoundID + '.cif')):
compound_list.append([str(item[0]), compoundID, str(item[2])])
elif todo == 'SELECTED':
if str(item[0]) in self.initial_model_dimple_dict:
if self.initial_model_dimple_dict[str(item[0])][0].isChecked():
compound_list.append([str(item[0]), compoundID, str(item[2])])
if compound_list:
self.update_log.insert(
'trying to create cif and pdb files for ' + str(len(compound_list)) + ' compounds using ACEDRG...')
if self.external_software['qsub']:
self.update_log.insert(
'will try sending ' + str(len(compound_list)) + ' jobs to your computer cluster!')
elif self.external_software['qsub_array']:
self.update_log.insert('will try sending ' + str(
len(compound_list)) + ' jobs as part of an ARRAY job to your computer cluster!')
else:
self.update_log.insert('apparently no cluster available, so will run ' + str(
len(compound_list)) + ' sequential jobs on one core of your local machine.')
self.update_log.insert('this could take a while...')
self.explorer_active = 1
self.work_thread = XChemThread.create_png_and_cif_of_compound(self.external_software,
self.initial_model_directory,
compound_list,
self.database_directory,
self.data_source_file,
todo,
self.ccp4_scratch_directory,
self.xce_logfile,
self.max_queue_jobs,
self.restraints_program)
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("datasource_menu_reload_samples"),
self.datasource_menu_reload_samples)
self.work_thread.start()
def fit_ligands_into_dimple_maps(self):
tmp = self.db.execute_statement(
"select CrystalName,CompoundCode,CompoundSmiles from mainTable where CrystalName is not '' and CompoundSmiles is not '' and CompoundSmiles is not NULL;")
compound_list = []
for item in tmp:
if str(item[1]) == '' or str(item[1]) == 'NULL':
compoundID = 'compound'
else:
compoundID = str(item[1])
if str(item[0]) in self.initial_model_dimple_dict:
if self.initial_model_dimple_dict[str(item[0])][0].isChecked():
compound_list.append([str(item[0]), compoundID, str(item[2])])
if compound_list:
self.update_log.insert(
'trying to auto-fitting into inital maps for ' + str(len(compound_list)) + ' compounds...')
if self.external_software['qsub']:
self.update_log.insert(
'will try sending ' + str(len(compound_list)) + ' jobs to your computer cluster!')
elif self.external_software['qsub_array']:
self.update_log.insert('will try sending ' + str(
len(compound_list)) + ' jobs as part of an ARRAY job to your computer cluster!')
else:
self.update_log.insert('apparently no cluster available, so will run ' + str(
len(compound_list)) + ' sequential jobs on one core of your local machine.')
self.update_log.insert('this could take a while...')
self.explorer_active = 1
self.work_thread = XChemThread.fit_ligands(self.external_software,
self.initial_model_directory,
compound_list,
self.database_directory,
self.data_source_file,
self.ccp4_scratch_directory,
self.xce_logfile,
self.max_queue_jobs)
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("datasource_menu_reload_samples"),
self.datasource_menu_reload_samples)
self.work_thread.start()
def merge_cif_files(self,todo):
start_thread = False
if todo == 'merge':
self.update_log.insert('trying to merge %s with ligand restraint files in project directory' %self.second_cif_file)
elif todo == 'restore':
self.update_log.insert('restoring original CIF files')
start_thread = True
if todo == 'merge':
if os.path.isfile(str(self.second_cif_file)):
self.update_log.insert('checking compound code of second CIF file (%s)' % self.second_cif_file)
self.update_log.insert('Note: LIG and DRG are not allowed!')
import iotbx.cif
cif_model = iotbx.cif.reader(file_path=self.second_cif_file).model()
cif_block = cif_model["comp_list"]
ligID = cif_block["_chem_comp.id"]
self.update_log.insert('found the following compound codes in the supplied CIF file: %s' % str(list(ligID)))
if 'LIG' in list(ligID) or 'DRG' in list(ligID):
self.update_log.error('please change compound code to something other than LIG or DRG')
start_thread = False
else:
start_thread = True
else:
self.update_log.error(XChemToolTips.second_cif_file_not_exists())
start_thread = False
if start_thread:
msgBox = QtGui.QMessageBox()
msgBox.setText(XChemToolTips.second_cif_file_info(self.second_cif_file))
msgBox.addButton(QtGui.QPushButton('OK'), QtGui.QMessageBox.YesRole)
msgBox.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)
reply = msgBox.exec_();
if reply == 0:
start_thread = True
else:
start_thread = False
else:
self.status_bar.showMessage('Error. Please check terminal window for further information')
tmp = self.db.execute_statement(
"select CrystalName,CompoundCode from mainTable where CrystalName is not '' and CompoundSmiles is not '' and CompoundSmiles is not NULL;")
compound_list = []
for item in tmp:
xtal = str(item[0])
compoundID = str(item[1])
if compoundID == '' or compoundID == 'NULL':
self.update_log.warning('%s: no compound ID in database; skipping...' %xtal)
else:
if str(item[0]) in self.initial_model_dimple_dict:
if self.initial_model_dimple_dict[str(item[0])][0].isChecked():
self.update_log.warning('%s: %s is flagged for merging' % (xtal, compoundID))
compound_list.append([xtal, compoundID])
if compound_list == []:
self.update_log.error('Either no compound ID information in database or no sample selected!')
start_thread = False
if start_thread:
self.explorer_active = 1
self.work_thread = XChemThread.merge_cif_files(self.initial_model_directory,
self.xce_logfile,
self.second_cif_file,
compound_list,
todo)
self.connect(self.work_thread, QtCore.SIGNAL("update_progress_bar"), self.update_progress_bar)
self.connect(self.work_thread, QtCore.SIGNAL("update_status_bar(QString)"), self.update_status_bar)
self.connect(self.work_thread, QtCore.SIGNAL("finished()"), self.thread_finished)
self.connect(self.work_thread, QtCore.SIGNAL("datasource_menu_reload_samples"),
self.datasource_menu_reload_samples)
self.work_thread.start()
def update_deposition_table(self):
# check if PanDDA models are ready for deposition
depositChecks = XChemDeposit.update_deposition_table(
os.path.join(self.database_directory, self.data_source_file))
toDeposit, mismatch = depositChecks.PanDDA_models_to_deposit()
if mismatch != {}:
self.update_log.insert('The following samples contain ligand that are not ready for deposition:')
for entry in mismatch:
self.update_log.insert(entry[0] + ' -> site: ' + entry[1] + ' @ ' + entry[2] + ' => ' + entry[4])
self.update_log.insert('You need to change this before you can continue!')
return None
for xtal in toDeposit:
self.db.update_insert_depositTable(xtal, {})
def show_html_summary_and_diffraction_image(self):
for key in self.albula_button_dict:
if self.albula_button_dict[key][0] == self.sender():
print('==> XCE: showing html summary in firefox')
self.show_html_summary_in_firefox(key)
def need_to_switch_main_tab(self, task_index):
msgBox = QtGui.QMessageBox()
msgBox.setText("Need to switch main tab before you can launch this job")
msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)
msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)
reply = msgBox.exec_();
if reply == 0:
self.main_tab_widget.setCurrentIndex(task_index)
def check_write_permissions_of_data_source(self):
write_enabled = True
if not os.access(os.path.join(self.database_directory, self.data_source_file), os.W_OK):
QtGui.QMessageBox.warning(self.window, "Data Source Problem",
'\nData Source is Read-Only\n',
QtGui.QMessageBox.Cancel, QtGui.QMessageBox.NoButton,
QtGui.QMessageBox.NoButton)
write_enabled = False
return write_enabled
def no_data_source_selected(self):
QtGui.QMessageBox.warning(self.window, "Data Source Problem",
('Please set or create a data source file\n') +
('Options:\n') +
('1. Use an existing file:\n') +
('- Settings -> Select Data Source File\n') +
('2. Create a new file\n') +
('- Data Source -> Create New Data\nSource (SQLite)'),
QtGui.QMessageBox.Cancel, QtGui.QMessageBox.NoButton,
QtGui.QMessageBox.NoButton)
def update_progress_bar(self, progress):
self.progress_bar.setValue(progress)
def update_status_bar(self, message):
self.status_bar.showMessage(message)
def thread_finished(self):
self.explorer_active = 0
self.update_progress_bar(0)
self.update_status_bar('idle')
def show_error_dict(self, errorDict):
text = ''
for key in errorDict:
text += '{0!s}:\n'.format(key)
for entry in errorDict[key]:
text += ' - ' + entry + '\n'
msgBox = QtGui.QMessageBox()
msgBox.setText(text)
msgBox.exec_()
def create_widgets_for_autoprocessing_results_only(self, data_dict):
self.status_bar.showMessage('Building details table for data processing results')
self.data_collection_dict = data_dict
column_name = ['Program',
'Resolution\nOverall',
'Resolution\n[Mn<I/sig(I)> = 2.0]',
'DataProcessing\nSpaceGroup',
'Mn<I/sig(I)>\nHigh',
'Rmerge\nLow',
'Completeness\nOverall',
'DataProcessing\nUnitCell',
'DataProcessing\nRfree',
'DataProcessing\nScore']
# need to do this because db_dict keys are SQLite column names
diffraction_data_column_name = XChemDB.data_source(
os.path.join(self.database_directory, self.data_source_file)).translate_xce_column_list_to_sqlite(
column_name)
for xtal in sorted(self.data_collection_dict):
if os.path.isfile(os.path.join(self.initial_model_directory, xtal, xtal + '.mtz')):
mtz_already_in_inital_model_directory = True
# column 2: data collection date
# this one should always be there; it may need updating in case another run appears
# first find latest run
tmp = []
for entry in self.data_collection_dict[xtal]:
if entry[0] == 'image':
tmp.append([entry[3], datetime.strptime(entry[3], '%Y-%m-%d %H:%M:%S')])
latest_run = max(tmp, key=lambda x: x[1])[0]
# first check if it does already exist
if xtal not in self.data_collection_column_three_dict:
# generate all the widgets which can later be appended and add them to the dictionary
data_collection_table = QtGui.QTableWidget() # table with data processing results for each pipeline
selection_changed_by_user = False
self.data_collection_column_three_dict[xtal] = [data_collection_table, selection_changed_by_user]
xtal_in_table = True
else:
data_collection_table = self.data_collection_column_three_dict[xtal][0]
selection_changed_by_user = self.data_collection_column_three_dict[xtal][1]
data_collection_table.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
data_collection_table.setColumnCount(len(column_name))
font = QtGui.QFont()
font.setPointSize(8)
data_collection_table.setFont(font)
data_collection_table.setHorizontalHeaderLabels(column_name)
data_collection_table.horizontalHeader().setFont(font)
data_collection_table.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
#############################################################################
# crystal images
# first check there are new images that are not displayed yet; i.e. they are not in the self.data_collection_image_dict
if xtal not in self.data_collection_image_dict:
# OK this is the first time
self.data_collection_image_dict[xtal] = []
# sort crystal images by timestamp
# reminder: ['image',visit,run,timestamp,image_list,diffraction_image,run_number]
# a) get only image entries from self.data_collection_dict
tmp = []
for entry in self.data_collection_dict[xtal]:
if entry[0] == 'image':
tmp.append(entry)
# b) sort by the previously assigned run number
# note: entry[6]==run_number
for entry in sorted(tmp, key=lambda x: x[6]):
run_number = entry[6]
images_already_in_table = False
for image in self.data_collection_image_dict[xtal]:
if run_number == image[0]:
images_already_in_table = True
break
if not images_already_in_table:
# not if there is a run, but images are for whatever reason not present in self.data_collection_dict
# then use image not available from $XChemExplorer_DIR/image/IMAGE_NOT_AVAILABLE.png
# not sure how to do this at the moment; it will probably trigger an error that I can catch
self.data_collection_image_dict[xtal].append([entry[6], entry[1], entry[2], entry[3], entry[5]])
#############################################################################
# initialize dataset_outcome_dict for xtal
if xtal not in self.dataset_outcome_dict:
self.dataset_outcome_dict[xtal] = []
# dataset outcome buttons
#############################################################################
# table for data processing results
# check if results from particular pipeline are already in table;
# not really looking at the table here, but compare it to self.data_collection_table_dict
row_position = data_collection_table.rowCount()
if not xtal in self.data_collection_table_dict:
self.data_collection_table_dict[xtal] = []
# reminder: ['logfile',visit,run,timestamp,autoproc,file_name,aimless_results,<aimless_index>,False]
logfile_list = []
for entry in self.data_collection_dict[xtal]:
if entry[0] == 'logfile':
logfile_list.append(entry)
for entry in sorted(logfile_list, key=lambda x: x[7]): # sort by aimless_index and so make sure
entry_already_in_table = False # that aimless_index == row
for logfile in self.data_collection_table_dict[xtal]:
if entry[1] == logfile[1] and entry[2] == logfile[2] and entry[3] == logfile[3] and entry[4] == \
logfile[4]:
entry_already_in_table = True
# might have to update Rfree column
for column, header in enumerate(diffraction_data_column_name):
if header == 'DataProcessing\nRfree':
# entry[7]==aimless_index, i.e. row number
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(str(db_dict[header[1]]))
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
data_collection_table.setItem(entry[7], column, cell_text)
break
break
if not entry_already_in_table:
data_collection_table.insertRow(row_position)
db_dict = entry[6]
for column, header in enumerate(diffraction_data_column_name):
cell_text = QtGui.QTableWidgetItem()
try:
cell_text.setText(str(db_dict[header[1]]))
except KeyError:
# this may happen if not score exists
cell_text.setText('0')
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
data_collection_table.setItem(row_position, column, cell_text)
data_collection_table.setRowHeight(row_position, 20)
row_position += 1
self.data_collection_table_dict[xtal].append(
['logfile', entry[1], entry[2], entry[3], entry[4]]) # 'logfile' is just added to have
# same index numbers between lists
data_collection_table.cellClicked.connect(self.user_update_selected_autoproc_datasets_summary_table)
# select best resolution file + set data collection outcome
# the assumption is that index in data_collection_dict and row number are identical
# the assumption for data collection outcome is that as long as a logfile is found, it's a success
logfile_found = False
for entry in self.data_collection_dict[xtal]:
if entry[0] == 'logfile':
index = entry[7]
best_file = entry[8]
logfile_found = True
if best_file:
# we change the selection only if the user did not touch it, assuming that he/she knows best
# if not selection_changed_by_user:
data_collection_table.selectRow(index)
self.populate_datasets_summary_table()
def find_suitable_reference_file(self, db_dict):
reference_file = []
dummy = ['...', '', '', '', 0, '0']
reference_file.append([dummy, 999])
suitable_reference = []
for reference in self.reference_file_list:
# first we need one in the same pointgroup
if reference[5] == db_dict['DataProcessingPointGroup']:
try:
difference = math.fabs(
1 - (float(db_dict['DataProcessingUnitCellVolume']) / float(reference[4]))) * 100
reference_file.append([reference, difference])
except ValueError:
continue
return reference_file
def create_maps_table(self):
column_name = self.db.translate_xce_column_list_to_sqlite(self.maps_table_columns)
for xtal in sorted(self.xtal_db_dict):
new_xtal = False
db_dict = self.xtal_db_dict[xtal]
if str(db_dict['DataCollectionOutcome']).lower().startswith('success'):
reference_file = self.find_suitable_reference_file(db_dict)
smallest_uc_difference = min(reference_file, key=lambda x: x[1])
row = self.maps_table.rowCount()
if xtal not in self.initial_model_dimple_dict:
self.maps_table.insertRow(row)
current_row = row
new_xtal = True
else:
for table_row in range(row):
if self.maps_table.item(table_row, 0).text() == xtal:
current_row = table_row
break
for column, header in enumerate(column_name):
if header[0] == 'Sample ID':
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(str(xtal))
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
self.maps_table.setItem(current_row, column, cell_text)
elif header[0] == 'Select':
if new_xtal:
run_dimple = QtGui.QCheckBox()
run_dimple.toggle()
self.maps_table.setCellWidget(current_row, column, run_dimple)
run_dimple.setChecked(False)
elif header[0] == 'Reference\nSpaceGroup':
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(str(smallest_uc_difference[0][1]))
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
self.maps_table.setItem(current_row, column, cell_text)
elif header[0] == 'Difference\nUC Volume (%)':
cell_text = QtGui.QTableWidgetItem()
smallest_uc_difference = min(reference_file, key=lambda x: x[1])
cell_text.setText(str(round(float(smallest_uc_difference[1]), 1)))
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
self.maps_table.setItem(current_row, column, cell_text)
elif header[0] == 'Reference File':
if new_xtal:
reference_file_selection_combobox = QtGui.QComboBox()
self.populate_reference_combobox(reference_file_selection_combobox)
if float(smallest_uc_difference[1]) < self.allowed_unitcell_difference_percent:
index = reference_file_selection_combobox.findText(str(smallest_uc_difference[0][0]),
QtCore.Qt.MatchFixedString)
reference_file_selection_combobox.setCurrentIndex(index)
else:
reference_file_selection_combobox.setCurrentIndex(0)
self.maps_table.setCellWidget(current_row, column,
reference_file_selection_combobox)
else:
reference_file_selection_combobox = self.initial_model_dimple_dict[xtal][1]
self.populate_reference_combobox(reference_file_selection_combobox)
if float(smallest_uc_difference[1]) < self.allowed_unitcell_difference_percent:
index = reference_file_selection_combobox.findText(str(smallest_uc_difference[0][0]),
QtCore.Qt.MatchFixedString)
reference_file_selection_combobox.setCurrentIndex(index)
else:
reference_file_selection_combobox.setCurrentIndex(0)
else:
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(str(db_dict[header[1]]))
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
if header[0] == 'Dimple\nStatus':
if str(db_dict[header[1]]) == 'running':
cell_text.setBackground(QtGui.QColor(100, 230, 150))
elif str(db_dict[header[1]]) == 'pending':
cell_text.setBackground(QtGui.QColor(20, 100, 230))
elif str(db_dict[header[1]]) == 'started':
cell_text.setBackground(QtGui.QColor(230, 240, 110))
elif str(db_dict[header[1]]) == 'finished':
cell_text.setBackground(QtGui.QColor(255, 255, 255))
if header[0] == 'Compound\nStatus':
if str(db_dict[header[1]]) == 'running':
cell_text.setBackground(QtGui.QColor(100, 230, 150))
elif str(db_dict[header[1]]) == 'pending':
cell_text.setBackground(QtGui.QColor(20, 100, 230))
elif str(db_dict[header[1]]) == 'started':
cell_text.setBackground(QtGui.QColor(230, 240, 110))
elif str(db_dict[header[1]]) == 'restraints generated':
cell_text.setBackground(QtGui.QColor(255, 255, 255))
elif str(db_dict[header[1]]) == 'restraints failed':
cell_text.setBackground(QtGui.QColor(255, 0, 0))
elif str(db_dict[header[1]]) == 'missing smiles':
cell_text.setBackground(QtGui.QColor(240, 150, 20))
self.maps_table.setItem(current_row, column, cell_text)
if new_xtal:
self.initial_model_dimple_dict[xtal] = [run_dimple, reference_file_selection_combobox]
def preferences_data_to_copy_combobox_changed(self, i):
text = str(self.preferences_data_to_copy_combobox.currentText())
for item in self.preferences_data_to_copy:
if item[0] == text:
self.preferences['processed_data_to_copy'] = item[1]
break
def preferences_selection_mechanism_combobox_changed(self, i):
text = str(self.preferences_selection_mechanism_combobox.currentText())
self.preferences['dataset_selection_mechanism'] = text
self.update_log.insert('setting datasets selection mechanism to ' + text)
def preferences_initial_refinement_combobox_changed(self, i):
text = str(self.preferences_initial_refinement_combobox.currentText())
self.preferences['initial_refinement_pipeline'] = text
self.update_log.insert('setting initial refinement pipeline to ' + text)
def preferences_restraints_generation_combobox_changed(self):
text = str(self.preferences_restraints_generation_combobox.currentText())
self.restraints_program = text
self.update_log.insert('will use {0!s} for generation of ligand coordinates and restraints'.format(text))
def refinement_outcome_combobox_changed(self):
for xtal in self.refinement_table_dict:
if self.sender() == self.refinement_table_dict[xtal]:
# db_dict = {'RefinementOutcome': str(self.sender().currentText())}
db_dict = {}
db_dict['RefinementOutcome'] = str(self.sender().currentText())
db_dict['RefinementOutcomePerson'] = getpass.getuser()
db_dict['RefinementOutcomeDate'] = datetime.strftime(datetime.now(), '%Y-%m-%d_%H-%M-%S.%f')[:-4]
self.db.create_or_remove_missing_records_in_depositTable(self.xce_logfile, xtal, 'ligand_bound',
db_dict)
def get_reference_file_list(self, reference_root):
# check available reference files
reference_file_list = []
dummy = ['...', '', '', '', 0, '0']
reference_file_list.append(dummy)
if os.path.isfile(os.path.join(self.reference_directory, reference_root + '.pdb')):
pdb_reference = parse().PDBheader(os.path.join(self.reference_directory, reference_root + '.pdb'))
spg_reference = pdb_reference['SpaceGroup']
unitcell_reference = pdb_reference['UnitCell']
lattice_reference = pdb_reference['Lattice']
unitcell_volume_reference = pdb_reference['UnitCellVolume']
pointgroup_reference = pdb_reference['PointGroup']
reference_file_list.append([reference_root,
spg_reference,
unitcell_reference,
lattice_reference,
unitcell_volume_reference,
pointgroup_reference])
else:
for files in glob.glob(self.reference_directory + '/*'):
if files.endswith('.pdb'):
reference_root = files[files.rfind('/') + 1:files.rfind('.')]
if os.path.isfile(os.path.join(self.reference_directory, reference_root + '.pdb')):
# reference_file = reference_root + '.pdb'
pdb_reference = parse().PDBheader(
os.path.join(self.reference_directory, reference_root + '.pdb'))
spg_reference = pdb_reference['SpaceGroup']
unitcell_reference = pdb_reference['UnitCell']
lattice_reference = pdb_reference['Lattice']
unitcell_volume_reference = pdb_reference['UnitCellVolume']
pointgroup_reference = pdb_reference['PointGroup']
reference_file_list.append([reference_root,
spg_reference,
unitcell_reference,
lattice_reference,
unitcell_volume_reference,
pointgroup_reference])
for n, file in enumerate(reference_file_list):
self.update_log.insert('reference file {0!s}: {1!s}'.format(n, file))
return reference_file_list
def dataset_outcome_combobox_change_outcome(self, text):
outcome = str(text)
xtal = ''
for key in self.dataset_outcome_combobox_dict:
if self.dataset_outcome_combobox_dict[key] == self.sender():
xtal = key
self.update_log.insert('user changed data collection outcome of {0!s} to {1!s}'.format(xtal, outcome))
break
self.dataset_outcome_dict[xtal] = outcome
if xtal != '':
# # need to also update if not yet done
# user_already_changed_selection = False
# for n, entry in enumerate(self.data_collection_dict[xtal]):
# if entry[0] == 'user_changed_selection':
# user_already_changed_selection = True
# if entry[0] == 'logfile':
# db_dict = entry[6]
# db_dict['DataCollectionOutcome'] = outcome
# entry[6] = db_dict
# self.data_collection_dict[xtal][n] = entry
# if not user_already_changed_selection:
# self.data_collection_dict[xtal].append(['user_changed_selection'])
# # finally need to update outcome field in data source accordingly
self.update_log.insert('updating dataset outcome in datasource for {0!s}'.format(xtal))
update_dict = {'DataCollectionOutcome': outcome}
self.db.update_insert_data_source(xtal, update_dict)
def set_run_dimple_flag(self, state):
if state == QtCore.Qt.Checked:
for key in self.initial_model_dimple_dict:
self.initial_model_dimple_dict[key][0].setChecked(True)
else:
for key in self.initial_model_dimple_dict:
self.initial_model_dimple_dict[key][0].setChecked(False)
def show_data_collection_details(self, state):
# first remove currently displayed widget
if self.data_collection_details_currently_on_display is not None:
self.data_collection_details_currently_on_display.hide()
self.data_collection_details_currently_on_display = None
tmp = []
allRows = self.datasets_summary_table.rowCount()
for table_row in range(allRows):
tmp.append([self.datasets_summary_table.item(table_row, 0).text(), table_row])
for key in self.datasets_summary_dict:
if self.datasets_summary_dict[key][3] == self.sender():
if self.sender().isChecked():
for item in tmp:
if item[0] == key:
self.datasets_summary_table.selectRow(item[1])
self.data_collection_details_currently_on_display = self.data_collection_column_three_dict[key][0]
self.datasets_summarys_vbox_for_details.addWidget(
self.data_collection_details_currently_on_display)
self.data_collection_details_currently_on_display.show()
else:
# un-check all other ones
self.datasets_summary_dict[key][3].setChecked(False)
# def populate_datasets_summary_table(self):
# self.status_bar.showMessage(
# 'Building summary table for data processing results; be patient this may take a while')
# row = self.datasets_summary_table.rowCount()
# column_name = self.db.translate_xce_column_list_to_sqlite(self.datasets_summary_table_columns)
#
# pinList = self.db.execute_statement(
# "Select CrystalName,PinBarcode,DataCollectionPinBarcode from mainTable where CrystalName is not ''")
# pinDict = {}
# for item in pinList:
# pinDict[str(item[0])] = [str(item[1]), str(item[2])]
#
# for xtal in sorted(self.data_collection_dict):
# new_xtal = False
# if xtal not in self.datasets_summary_dict:
# row = self.datasets_summary_table.rowCount()
# self.datasets_summary_table.insertRow(row)
# self.datasets_summary_dict[xtal] = []
# new_xtal = True
#
# # check for dataset outcome
# outcome = ''
# logfile_found = False
# too_low_resolution = True
# db_dict = {}
# for entry in self.data_collection_dict[xtal]:
# if entry[0] == 'logfile':
# logfile_found = True
# if entry[8]: # if this was auto-selected best resolution file
# db_dict = entry[6]
# try:
# if float(db_dict['DataProcessingResolutionHigh']) <= float(
# self.acceptable_low_resolution_limit_for_data):
# too_low_resolution = False
# except ValueError:
# pass
#
# try:
# outcome = str(self.db.get_value_from_field(xtal, 'DataCollectionOutcome')[0])
# except TypeError:
# outcome = 'Failed - unknown'
# self.update_log.insert('cannot find DataCollectionOutcome for {0!s}'.format(xtal))
# self.dataset_outcome_dict[xtal] = outcome
#
# # find latest run for crystal and diffraction images
# tmp = []
# for entry in self.data_collection_dict[xtal]:
# if entry[0] == 'image':
# tmp.append([entry, datetime.strptime(entry[3], '%Y-%m-%d %H:%M:%S')])
# latest_run = max(tmp, key=lambda x: x[1])[0]
#
# new_run_for_exisiting_crystal_or_new_sample = True
# if new_xtal:
# self.datasets_summary_dict[xtal] = [outcome, db_dict, latest_run]
# else:
# # check if newer run appeared
# old_run_timestamp = self.datasets_summary_dict[xtal][2][3]
# new_run_timestamp = latest_run[3]
# if old_run_timestamp == new_run_timestamp:
# new_run_for_exisiting_crystal_or_new_sample = False
# else:
# checkbox_for_details = self.datasets_summary_dict[xtal][3]
# self.datasets_summary_dict[xtal] = [outcome, db_dict, latest_run, checkbox_for_details]
#
# if new_xtal:
# current_row = row
# else:
# allRows = self.datasets_summary_table.rowCount()
# for table_row in range(allRows):
# if self.datasets_summary_table.item(table_row, 0).text() == xtal:
# current_row = table_row
# break
#
# image_number = 0
# for column, header in enumerate(column_name):
# if header[0] == 'Sample ID':
# cell_text = QtGui.QTableWidgetItem()
# cell_text.setText(str(xtal))
# cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
# self.datasets_summary_table.setItem(current_row, column, cell_text)
# elif header[0] == 'DataCollection\nOutcome':
# if new_xtal:
# dataset_outcome_combobox = QtGui.QComboBox()
# for outcomeItem in self.dataset_outcome:
# dataset_outcome_combobox.addItem(outcomeItem)
# self.datasets_summary_table.setCellWidget(current_row, column, dataset_outcome_combobox)
# dataset_outcome_combobox.activated[str].connect(self.dataset_outcome_combobox_change_outcome)
# self.dataset_outcome_combobox_dict[xtal] = dataset_outcome_combobox
# index = self.dataset_outcome_combobox_dict[xtal].findText(str(outcome), QtCore.Qt.MatchFixedString)
# self.dataset_outcome_combobox_dict[xtal].setCurrentIndex(index)
# continue
#
# elif header[0].startswith('img'):
# if new_run_for_exisiting_crystal_or_new_sample:
# img = latest_run[4]
# pixmap = QtGui.QPixmap()
# # can do this (img[image_number][1]) because made sure in the threading module
# # that there are always exactly 5 images in there
# pixmap.loadFromData(base64.b64decode(img[image_number][1]))
# image = QtGui.QLabel()
# image.resize(128, 80)
# image.setPixmap(pixmap.scaled(image.size(), QtCore.Qt.KeepAspectRatio))
# self.datasets_summary_table.setCellWidget(current_row, column, image)
# image_number += 1
#
# elif header[0].startswith('Show Diffraction\nImage'):
# if new_run_for_exisiting_crystal_or_new_sample:
# diffraction_image = latest_run[5]
# diffraction_image_name = diffraction_image[diffraction_image.rfind('/') + 1:]
# try: # need to try because older pkl file may not have this item in list
# html_summary = latest_run[7]
# except IndexError:
# html_summary = ''
# if new_xtal:
# start_albula_button = QtGui.QPushButton('Show: \n' + diffraction_image_name)
# start_albula_button.clicked.connect(self.show_html_summary_and_diffraction_image)
# self.albula_button_dict[xtal] = [start_albula_button, diffraction_image, html_summary]
# self.datasets_summary_table.setCellWidget(current_row, column, start_albula_button)
# else:
# self.albula_button_dict[xtal][1] = diffraction_image
# elif header[0].startswith('Show\nDetails'):
# if new_xtal:
# show_data_collection_details_checkbox = QtGui.QCheckBox()
# show_data_collection_details_checkbox.toggle()
# show_data_collection_details_checkbox.setChecked(False)
# show_data_collection_details_checkbox.stateChanged.connect(self.show_data_collection_details)
# self.datasets_summary_table.setCellWidget(current_row, column,
# show_data_collection_details_checkbox)
# self.datasets_summary_dict[xtal].append(show_data_collection_details_checkbox)
# elif header[0].startswith('SoakDB\nBarcode') or header[0].startswith('GDA\nBarcode'):
# if new_xtal:
# cell_text = QtGui.QTableWidgetItem()
# if xtal in pinDict:
# if header[0].startswith('SoakDB\nBarcode'):
# cell_text.setText(str(pinDict[xtal][0]))
# elif header[0].startswith('GDA\nBarcode'):
# cell_text.setText(str(pinDict[xtal][1]))
# if pinDict[xtal][0] == 'NULL' or pinDict[xtal][1] == 'NULL':
# cell_text.setBackground(QtGui.QColor(255, 215, 0))
# elif pinDict[xtal][0] != pinDict[xtal][1]:
# cell_text.setBackground(QtGui.QColor(255, 0, 0))
# else:
# cell_text.setText('')
# cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
# self.datasets_summary_table.setItem(current_row, column, cell_text)
# else:
# cell_text = QtGui.QTableWidgetItem()
# # in case data collection failed for whatever reason
# if logfile_found:
# try:
# cell_text.setText(str(db_dict[header[1]]))
# except KeyError: # older pkl files may not have all the columns
# cell_text.setText('n/a')
# else:
# if header[0].startswith('Resolution\n[Mn<I/sig(I)> = 1.5]'):
# cell_text.setText('999')
# elif header[0].startswith('DataProcessing\nRfree'):
# cell_text.setText('999')
# elif header[0].startswith('Rmerge\nLow'):
# cell_text.setText('999')
# else:
# cell_text.setText('')
# cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
# self.datasets_summary_table.setItem(current_row, column, cell_text)
#
# row += 1
#
# self.datasets_summary_table.resizeRowsToContents()
# self.datasets_summary_table.resizeColumnsToContents()
#
# self.status_bar.showMessage('updating Overview table')
#
# self.status_bar.showMessage('idle')
#
# self.save_files_to_initial_model_folder()
#
################################################################################################################
#
#
#
# => new data collection summary table
# > start
def get_sample_list_from_table(self,table):
sampleList = []
allRows = table.rowCount()
for row in xrange(0, allRows):
sample_id = str(table.item(row, 0).text())
sampleList.append(sample_id)
return sorted(sampleList)
def get_row_of_sample_in_table(self,table,xtal):
allRows = table.rowCount()
sampleRow = allRows
for n,row in enumerate(xrange(0, allRows)):
sample_id = str(table.item(row, 0).text())
if sample_id == xtal:
sampleRow = n
break
return sampleRow
def update_row_in_table(self,sample,row,db_dict,table,columns_to_show):
xtal = str(sample)
column_name = self.db.translate_xce_column_list_to_sqlite(columns_to_show)
for column, header in enumerate(column_name):
if header[0] == 'Sample ID':
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(str(xtal))
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
table.setItem(row, column, cell_text)
elif header[0] == 'DataCollection\nOutcome':
if xtal not in self.dataset_outcome_combobox_dict:
dataset_outcome_combobox = QtGui.QComboBox()
for outcomeItem in self.dataset_outcome:
dataset_outcome_combobox.addItem(outcomeItem)
dataset_outcome_combobox.activated[str].connect(self.dataset_outcome_combobox_change_outcome)
self.dataset_outcome_combobox_dict[xtal] = dataset_outcome_combobox
table.setCellWidget(row, column, dataset_outcome_combobox)
index = self.dataset_outcome_combobox_dict[xtal].findText(str(db_dict['DataCollectionOutcome']), QtCore.Qt.MatchFixedString)
self.dataset_outcome_combobox_dict[xtal].setCurrentIndex(index)
elif header[0].startswith('img'):
if os.path.isfile(db_dict[header[1]]):
pixmap = QtGui.QPixmap(db_dict[header[1]])
else:
pixmap = QtGui.QPixmap(
os.path.join(os.getenv('XChemExplorer_DIR'), 'image', 'IMAGE_NOT_AVAILABLE.png'))
image = QtGui.QLabel()
image.resize(128, 80)
image.setPixmap(pixmap.scaled(image.size(), QtCore.Qt.KeepAspectRatio))
table.setCellWidget(row, column, image)
elif header[0] == 'Select':
checkbox = QtGui.QCheckBox()
checkbox.toggle()
if table == self.deposition_table_apo:
if xtal not in self.deposition_table_apo_dict:
self.deposition_table_apo_dict[xtal] = checkbox
if table == self.deposition_table_bound:
if xtal not in self.deposition_table_bound_dict:
self.deposition_table_bound_dict[xtal] = checkbox
table.setCellWidget(row, column, checkbox)
checkbox.setChecked(False)
#elif header[0].startswith('SoakDB\nBarcode') or header[0].startswith('GDA\nBarcode'):
# if new_xtal:
# cell_text = QtGui.QTableWidgetItem()
# if xtal in pinDict:
# if header[0].startswith('SoakDB\nBarcode'):
# cell_text.setText(str(pinDict[xtal][0]))
# elif header[0].startswith('GDA\nBarcode'):
# cell_text.setText(str(pinDict[xtal][1]))
# if pinDict[xtal][0] == 'NULL' or pinDict[xtal][1] == 'NULL':
# cell_text.setBackground(QtGui.QColor(255, 215, 0))
# elif pinDict[xtal][0] != pinDict[xtal][1]:
# cell_text.setBackground(QtGui.QColor(255, 0, 0))
# else:
# cell_text.setText('')
# cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
# self.datasets_summary_table.setItem(current_row, column, cell_text)
else:
cell_text = QtGui.QTableWidgetItem()
# in case data collection failed for whatever reason
try:
cell_text.setText(str(db_dict[header[1]]))
except KeyError: # older pkl files may not have all the columns
cell_text.setText('n/a')
# else:
# if header[0].startswith('Resolution\n[Mn<I/sig(I)> = 1.5]'):
# cell_text.setText('999')
# elif header[0].startswith('DataProcessing\nRfree'):
# cell_text.setText('999')
# elif header[0].startswith('Rmerge\nLow'):
# cell_text.setText('999')
# else:
# cell_text.setText('')
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
table.setItem(row, column, cell_text)
print('row: {0!s} column: {1!s} value: {2!s} header: {3!s}'.format(row, column, cell_text, header[0]))
print('column_name {0!s}'.format(column_name))
def populate_datasets_summary_table_NEW(self):
self.status_bar.showMessage(
'Building summary table for data processing results; be patient this may take a while')
# get information about all samples collected during the current visit
visit, beamline = XChemMain.getVisitAndBeamline(self.beamline_directory)
if self.read_agamemnon.isChecked():
visit = []
for v in glob.glob(os.path.join(self.beamline_directory[:self.beamline_directory.rfind('-') + 1] + '*')):
visit.append(v[v.rfind('/')+1:])
self.update_log.insert('reading information about collected crystals from database...')
collectedXtalsDict = self.db.xtals_collected_during_visit_as_dict(visit)
# instead of using dictionaries, query table of which crystals are in table
samples_in_table = self.get_sample_list_from_table(self.datasets_summary_table)
for xtal in sorted(collectedXtalsDict):
if xtal not in samples_in_table:
row = self.datasets_summary_table.rowCount()
self.datasets_summary_table.insertRow(row)
else:
row = self.get_row_of_sample_in_table(self.datasets_summary_table,xtal)
db_dict = collectedXtalsDict[xtal]
self.update_row_in_table(xtal, row, db_dict, self.datasets_summary_table,
self.datasets_summary_table_columns)
self.datasets_summary_table.resizeRowsToContents()
self.datasets_summary_table.resizeColumnsToContents()
self.status_bar.showMessage('updating Overview table')
self.status_bar.showMessage('idle')
def get_selected_row(self,table):
indexes = table.selectionModel().selectedRows()
for index in sorted(indexes):
selected_row = index.row()
return selected_row
def show_results_from_all_pipelines(self):
selected_row=self.get_selected_row(self.datasets_summary_table)
xtal = self.datasets_summary_table.item(selected_row, 0).text()
# get details of currently selected autoprocessing result
selectedResultDict = self.db.get_db_dict_for_sample(xtal)
dbList=self.db.all_autoprocessing_results_for_xtal_as_dict(xtal)
self.make_data_collection_table()
self.msgBox = QtGui.QMessageBox() # needs to be created here, otherwise the cellClicked function
# will reference it before it exists
for db_dict in dbList:
if str(db_dict['DataProcessingSpaceGroup']).lower() == 'null' or str(db_dict['DataProcessingSpaceGroup']).lower() == 'none':
continue
row = self.data_collection_table.rowCount()
self.data_collection_table.insertRow(row)
self.update_row_in_table(xtal, row, db_dict, self.data_collection_table, self.data_collection_table_columns)
if selectedResultDict['DataCollectionVisit'] == db_dict['DataCollectionVisit'] \
and selectedResultDict['DataCollectionRun'] == db_dict['DataCollectionRun'] \
and selectedResultDict['DataProcessingProgram'] == db_dict['DataProcessingProgram'] \
and selectedResultDict['DataProcessingScore'] == db_dict['DataProcessingScore']:
self.current_row = row
self.data_collection_table.selectRow(row)
self.data_collection_table.cellClicked.connect(self.select_different_autoprocessing_result)
self.data_collection_table_popup()
def make_data_collection_table(self):
# this creates a new table widget every time
# more elegant would be to delete or reset an existing widget...
self.data_collection_table = QtGui.QTableWidget()
self.data_collection_table.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.data_collection_table.setColumnCount(len(self.data_collection_table_columns))
font = QtGui.QFont()
font.setPointSize(8)
self.data_collection_table.setFont(font)
self.data_collection_table.setHorizontalHeaderLabels(self.data_collection_table_columns)
self.data_collection_table.horizontalHeader().setFont(font)
self.data_collection_table.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
def data_collection_table_popup(self):
# self.msgBox = QtGui.QMessageBox()
msgBoxLayout = self.msgBox.layout()
qWid = QtGui.QWidget()
qWid.setFixedWidth(3000)
qWid.setFixedHeight(500)
vbox = QtGui.QVBoxLayout()
vbox.addWidget(self.data_collection_table)
qWid.setLayout(vbox)
# msgBoxLayout.addLayout(vbox, 0, 0)
msgBoxLayout.addWidget(qWid)
self.msgBox.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)
self.msgBox.resize(1000,200)
self.msgBox.exec_();
def select_different_autoprocessing_result(self):
selected_row=self.get_selected_row(self.data_collection_table)
if selected_row != self.current_row:
xtal = self.data_collection_table.item(selected_row, 0).text()
visit = self.data_collection_table.item(selected_row, 1).text()
run = self.data_collection_table.item(selected_row, 2).text()
autoproc = self.data_collection_table.item(selected_row, 3).text()
score = self.data_collection_table.item(selected_row, 12).text()
for q in range(13):
try:
print('--> {0!s}: {1!s}'.format(q, self.data_collection_table.item(selected_row, q).text()))
except AttributeError:
print('--> {0!s}: None'.format(q))
# get db_dict from collectionTable for visit, run, autoproc
# dbDict = self.db.get_db_dict_for_visit_run_autoproc(xtal,visit,run,autoproc)
dbDict = self.db.get_db_dict_for_visit_run_autoproc_score(xtal, visit, run, autoproc, score)
dbDict['DataProcessingAutoAssigned'] = 'False'
self.update_log.insert('%s: changing selected autoprocessing result to %s %s %s' %(xtal,visit,run,autoproc))
# xtal is QString -> str(xtal)
XChemMain.linkAutoProcessingResult(str(xtal), dbDict, self.initial_model_directory,self.xce_logfile)
self.update_log.insert('%s: updating row in Datasets table' %xtal)
self.db.update_data_source(str(xtal),dbDict)
self.update_log.insert('%s: getting updated information from DB mainTable' %xtal)
dbDict = self.db.get_db_dict_for_sample(xtal)
row = self.get_row_of_sample_in_table(self.datasets_summary_table,xtal)
self.update_row_in_table(xtal, row, dbDict, self.datasets_summary_table,
self.datasets_summary_table_columns)
else:
print('nothing to change')
self.msgBox.done(1)
# < end
#################################################################################################################
def update_outcome_datasets_summary_table(self, sample, outcome):
rows_in_table = self.datasets_summary_table.rowCount()
for row in range(rows_in_table):
if self.datasets_summary_table.item(row, 0).text() == sample:
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(outcome)
self.datasets_summary_table.setItem(row, 3, cell_text)
def user_update_selected_autoproc_datasets_summary_table(self):
for key in self.data_collection_column_three_dict:
if self.data_collection_column_three_dict[key][0] == self.sender():
self.update_log.insert('here: ' + self.sender())
self.update_log.insert('herere' + str(self.data_collection_column_three_dict))
dbTmp = self.xtal_db_dict[key]
stage = dbTmp['RefinementOutcome'].split()[0]
print('===>', key, stage)
if int(stage) > 2:
msgBox = QtGui.QMessageBox()
msgBox.setText(
"*** WARNING ***\n%s is currently %s\nIt will disappear from the Refinement table,\n"
"when you refresh it next time.\nDo you want to continue?" % (
key, dbTmp['RefinementOutcome']))
msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.YesRole)
msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.RejectRole)
reply = msgBox.exec_();
if reply == 0:
self.update_log.insert('will not change data processing selection')
# restore previous selection
for n, entry in enumerate(self.data_collection_dict[key]):
print('==>', n)
if entry[0] == 'logfile':
if entry[8]:
print('===> found:', n)
self.data_collection_column_three_dict[key][0].selectRow(n)
break
indexes = self.sender().selectionModel().selectedRows()
selected_processing_result = 1000000
for index in sorted(indexes):
selected_processing_result = index.row()
# the user changed the selection, i.e. no automated selection will update it
self.update_log.insert('user changed selection')
self.data_collection_column_three_dict[key][1] = True
# need to also update if not yet done
user_already_changed_selection = False
for n, entry in enumerate(self.data_collection_dict[key]):
if entry[0] == 'user_changed_selection':
user_already_changed_selection = True
if entry[0] == 'logfile':
db_dict = entry[6]
db_dict['DataProcessingAutoAssigned'] = 'False'
if entry[7] == selected_processing_result:
db_dict_current = entry[6]
program = db_dict['DataProcessingProgram']
visit = db_dict['DataCollectionVisit']
run = db_dict['DataCollectionRun']
self.update_log.insert(
'user changed data processing files for {0!s} to visit={1!s}, '
'run={2!s}, program={3!s}'.format(key, visit, run, program))
# update datasource
self.update_log.insert('updating datasource...')
self.update_data_source(key, db_dict)
entry[8] = True
else:
entry[8] = False
entry[6] = db_dict
self.data_collection_dict[key][n] = entry
if not user_already_changed_selection:
self.data_collection_dict[key].append(['user_changed_selection'])
XChemMain.change_links_to_selected_data_collection_outcome(key, self.data_collection_dict,
self.data_collection_column_three_dict,
self.dataset_outcome_dict,
self.initial_model_directory,
os.path.join(self.database_directory,
self.data_source_file),
self.xce_logfile)
# update 'Datasets' table
column_name = XChemDB.data_source(
os.path.join(self.database_directory, self.data_source_file)).translate_xce_column_list_to_sqlite(
self.datasets_summary_table_columns)
rows_in_table = self.datasets_summary_table.rowCount()
for row in range(rows_in_table):
if self.datasets_summary_table.item(row, 0).text() == key:
for column, header in enumerate(column_name):
if header[0] == 'Sample ID':
continue
elif header[0] == 'DataCollection\nOutcome':
continue
elif header[0].startswith('img'):
continue
elif header[0].startswith('Show'):
continue
else:
cell_text = QtGui.QTableWidgetItem()
try:
cell_text.setText(str(db_dict_current[header[1]]))
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
self.datasets_summary_table.setItem(row, column, cell_text)
except KeyError:
pass
def update_selected_autoproc_datasets_summary_table(self):
for key in self.data_collection_column_three_dict:
if self.data_collection_column_three_dict[key][0] == self.sender():
sample = key
break
indexes = self.sender().selectionModel().selectedRows()
for index in sorted(indexes):
selected_processing_result = index.row()
for n, entry in enumerate(self.data_collection_dict[sample]):
if entry[0] == 'logfile':
if entry[7] == selected_processing_result:
db_dict = entry[6]
program = db_dict['DataProcessingProgram']
visit = db_dict['DataCollectionVisit']
run = db_dict['DataCollectionRun']
self.update_log.insert(
'user changed data processing files for {0!s} to visit={1!s}, run={2!s}, program={3!s}'.format(
sample, visit, run, program))
# update datasource
self.update_log.insert('updating datasource...')
self.update_data_source(sample, db_dict)
entry[8] = True
else:
entry[8] = False
self.data_collection_dict[sample][n] = entry
# update 'Datasets' table
column_name = XChemDB.data_source(
os.path.join(self.database_directory, self.data_source_file)).translate_xce_column_list_to_sqlite(
self.datasets_summary_table_columns)
rows_in_table = self.datasets_summary_table.rowCount()
for row in range(rows_in_table):
if self.datasets_summary_table.item(row, 0).text() == sample:
for column, header in enumerate(column_name):
if header[0] == 'Sample ID':
continue
elif header[0] == 'DataCollection\nOutcome':
continue
elif header[0].startswith('img'):
continue
elif header[0].startswith('Show'):
continue
else:
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(str(db_dict[header[1]]))
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
self.datasets_summary_table.setItem(row, column, cell_text)
def populate_and_update_datasource_table(self):
self.overview_datasource_table.setColumnCount(len(self.overview_datasource_table_columns))
# first get a list of all the samples that are already in the table and which will be updated
samples_in_table = []
current_row = self.overview_datasource_table.rowCount()
for row in range(current_row):
sampleID = str(self.overview_datasource_table.item(row, 0).text()) # this must be the case
samples_in_table.append(sampleID)
columns_to_show = self.get_columns_to_show(self.overview_datasource_table_columns)
n_rows = self.get_rows_with_sample_id_not_null_from_datasource()
sample_id_column = self.get_columns_to_show(['Sample ID'])
for row in self.data:
if str(row[sample_id_column[0]]).lower() == 'none' or str(row[sample_id_column[0]]).replace(' ', '') == '':
# do not show rows where sampleID is null
continue
else:
if not str(row[sample_id_column[0]]) in samples_in_table:
# insert row, this is a new sample
x = self.overview_datasource_table.rowCount()
self.overview_datasource_table.insertRow(x)
else:
# find row of this sample in data_source_table
for present_rows in range(self.overview_datasource_table.rowCount()):
if str(row[sample_id_column[0]]) == str(
self.overview_datasource_table.item(present_rows, 0).text()):
x = present_rows
break
for y, item in enumerate(columns_to_show):
cell_text = QtGui.QTableWidgetItem()
if row[item] is None:
cell_text.setText('')
else:
cell_text.setText(str(row[item]))
if self.overview_datasource_table_columns[y] == 'Sample ID': # assumption is that column 0 is always sampleID
cell_text.setFlags(QtCore.Qt.ItemIsEnabled) # and this field cannot be changed
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
self.overview_datasource_table.setItem(x, y, cell_text)
self.overview_datasource_table.setHorizontalHeaderLabels(self.overview_datasource_table_columns)
def kill_other_pandda_options(self):
for i in range(0, self.pandda_analyse_data_table.rowCount()):
checkbox0 = self.pandda_analyse_data_table.cellWidget(i,1)
checkbox1 = self.pandda_analyse_data_table.cellWidget(i,7)
checkbox2 = self.pandda_analyse_data_table.cellWidget(i,8)
checkbox3 = self.pandda_analyse_data_table.cellWidget(i,9)
if checkbox1.isChecked():
checkbox2.setChecked(False)
checkbox3.setChecked(False)
if checkbox1.isChecked() and checkbox2.isChecked() or checkbox3.isChecked():
checkbox1.setChecked(False)
if checkbox2.isChecked() or checkbox3.isChecked():
checkbox1.setChecked(False)
def populate_pandda_analyse_input_table(self):
column_name = self.db.translate_xce_column_list_to_sqlite(self.pandda_table_columns)
print(column_name)
for xtal in sorted(self.xtal_db_dict):
new_xtal = False
db_dict = self.xtal_db_dict[xtal]
if os.path.isfile(db_dict['DimplePathToPDB']):
row = self.pandda_analyse_data_table.rowCount()
if xtal not in self.pandda_analyse_input_table_dict:
self.pandda_analyse_data_table.insertRow(row)
current_row = row
new_xtal = True
else:
for table_row in range(row):
if self.pandda_analyse_data_table.item(table_row, 0).text() == xtal:
current_row = table_row
break
for column, header in enumerate(column_name):
if header[0]=='Exclude':
deselect_button = QtGui.QCheckBox()
deselect_button.stateChanged.connect(self.kill_other_pandda_options)
self.pandda_analyse_data_table.setCellWidget(current_row, column, deselect_button)
elif header[0]=='Ignore':
deselect_button = QtGui.QCheckBox()
deselect_button.stateChanged.connect(self.kill_other_pandda_options)
self.pandda_analyse_data_table.setCellWidget(current_row, column, deselect_button)
elif header[0]=='Export':
deselect_button = QtGui.QCheckBox()
deselect_button.stateChanged.connect(self.kill_other_pandda_options)
self.pandda_analyse_data_table.setCellWidget(current_row, column, deselect_button)
elif header[0] == 'Sample ID':
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(str(xtal))
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
self.pandda_analyse_data_table.setItem(current_row, column, cell_text)
else:
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(str(db_dict[header[1]]))
if header[0] == 'PanDDA\nStatus':
if str(db_dict[header[1]]) == 'running':
cell_text.setBackground(QtGui.QColor(100, 230, 150))
elif str(db_dict[header[1]]) == 'pending':
cell_text.setBackground(QtGui.QColor(20, 100, 230))
elif str(db_dict[header[1]]) == 'started':
cell_text.setBackground(QtGui.QColor(230, 240, 110))
elif str(db_dict[header[1]]) == 'finished':
cell_text.setBackground(QtGui.QColor(255, 255, 255))
elif 'problem' in str(db_dict[header[1]]):
cell_text.setBackground(QtGui.QColor(255, 0, 0))
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
self.pandda_analyse_data_table.setItem(current_row, column, cell_text)
if new_xtal:
self.pandda_analyse_input_table_dict[xtal] = []
def select_sample_for_pandda(self, option):
indexes = self.pandda_analyse_data_table.selectionModel().selectedRows()
if option == 'deselect':
for index in sorted(indexes):
self.pandda_analyse_data_table.cellWidget(index.row(), 6).setChecked(False)
self.pandda_analyse_data_table.cellWidget(index.row(), 7).setChecked(False)
self.pandda_analyse_data_table.cellWidget(index.row(), 8).setChecked(False)
else:
for index in sorted(indexes):
self.pandda_analyse_data_table.cellWidget(index.row(), 6).setChecked(False)
self.pandda_analyse_data_table.cellWidget(index.row(), 7).setChecked(False)
self.pandda_analyse_data_table.cellWidget(index.row(), 8).setChecked(False)
if option =='ignore':
checkbox = self.pandda_analyse_data_table.cellWidget(index.row(), 6)
if option == 'char':
checkbox = self.pandda_analyse_data_table.cellWidget(index.row(), 7)
if option == 'zmap':
checkbox = self.pandda_analyse_data_table.cellWidget(index.row(), 8)
checkbox.setChecked(True)
self.kill_other_pandda_options()
def populate_and_update_refinement_table(self):
# panddaList = self.db.execute_statement(
# "select CrystalName,PANDDA_site_index,PANDDA_site_name,RefinementOutcome "
# "from panddaTable where CrystalName is not '' and PANDDA_site_ligand_placed is 'True';")
# panddaDict = {}
# for item in panddaList:
# if str(item[0]) not in panddaDict:
# panddaDict[str(item[0])] = []
# panddaDict[str(item[0])].append([str(item[1]), str(item[2]), str(item[3])])
column_name = self.db.translate_xce_column_list_to_sqlite(self.refinement_table_columns)
for xtal in sorted(self.xtal_db_dict):
new_xtal = False
db_dict = self.xtal_db_dict[xtal]
try:
stage = int(str(db_dict['RefinementOutcome']).split()[0])
refinementStage = db_dict['RefinementOutcome']
except ValueError:
stage = 0
except IndexError:
stage = 0
if stage >= 3 and stage < 7:
row = self.refinement_table.rowCount()
if xtal not in self.refinement_table_dict:
self.refinement_table.insertRow(row)
current_row = row
new_xtal = True
else:
for table_row in range(row):
if self.refinement_table.item(table_row, 0).text() == xtal:
current_row = table_row
break
for column, header in enumerate(column_name):
if header[0] == 'Sample ID':
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(str(xtal))
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
self.refinement_table.setItem(current_row, column, cell_text)
elif header[0] == 'Refinement\nOutcome':
if new_xtal:
refinement_outcome_combobox = QtGui.QComboBox()
self.populate_refinement_outcome_combobox(refinement_outcome_combobox)
self.refinement_table.setCellWidget(current_row, column, refinement_outcome_combobox)
else:
refinement_outcome_combobox = self.refinement_table_dict[xtal]
index = refinement_outcome_combobox.findText(refinementStage, QtCore.Qt.MatchFixedString)
refinement_outcome_combobox.setCurrentIndex(index)
refinement_outcome_combobox.currentIndexChanged.connect(
self.refinement_outcome_combobox_changed)
elif header[0] == 'buster-reports':
#"<a href=\"{0!s}">'NAME'</a>".format(db_dict['RefinementBusterReportHTML'])
# db_dict['RefinementBusterReportHTML'] = 'www.google.com'
buster_report = db_dict['RefinementBusterReportHTML']
ref_name = buster_report.split('/')[len(buster_report.split('/'))-2]
buster_report_link = QtGui.QLabel("<a href=\"{0!s}\">{1!s}</a>".format(buster_report,ref_name))
buster_report_link.setOpenExternalLinks(True)
# buster_report_link.setTextInteractionFlags(QtCore.Qt.TextBrowserInteraction)
# buster_report_link.setTextFormat(QtCore.Qt.RichText)
# self.refinement_table.setItem(current_row, column, buster_report_link)
self.refinement_table.setCellWidget(current_row, column, buster_report_link)
# elif header[0] == 'PanDDA site details':
# try:
# panddaDict[xtal].insert(0, ['Index', 'Name', 'Status'])
# outerFrame = QtGui.QFrame()
# outerFrame.setFrameShape(QtGui.QFrame.Box)
# grid = QtGui.QGridLayout()
# for y, entry in enumerate(panddaDict[xtal]):
# for x, info in enumerate(entry):
# frame = QtGui.QFrame()
# frame.setFrameShape(QtGui.QFrame.Box)
# vbox = QtGui.QVBoxLayout()
# vbox.addWidget(QtGui.QLabel(str(entry[x])))
# frame.setLayout(vbox)
# grid.addWidget(frame, y, x)
# outerFrame.setLayout(grid)
# self.refinement_table.setCellWidget(current_row, column, outerFrame)
# except KeyError:
# cell_text = QtGui.QTableWidgetItem()
# cell_text.setText('*** N/A ***')
# cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
# self.refinement_table.setItem(current_row, column, cell_text)
else:
cell_text = QtGui.QTableWidgetItem()
cell_text.setText(str(db_dict[header[1]]))
if header[0] == 'Refinement\nStatus':
if str(db_dict[header[1]]) == 'running':
cell_text.setBackground(QtGui.QColor(100, 230, 150))
elif str(db_dict[header[1]]) == 'pending':
cell_text.setBackground(QtGui.QColor(20, 100, 230))
elif str(db_dict[header[1]]) == 'started':
cell_text.setBackground(QtGui.QColor(230, 240, 110))
elif str(db_dict[header[1]]) == 'finished':
cell_text.setBackground(QtGui.QColor(255, 255, 255))
elif 'problem' in str(db_dict[header[1]]):
cell_text.setBackground(QtGui.QColor(255, 0, 0))
cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)
self.refinement_table.setItem(current_row, column, cell_text)
if new_xtal:
self.refinement_table_dict[xtal] = refinement_outcome_combobox
self.refinement_table.resizeColumnsToContents()
self.refinement_table.resizeRowsToContents()
def get_columns_to_show(self, column_list):
# maybe I coded some garbage before, but I need to find out which column name in the
# data source corresponds to the actually displayed column name in the table
# reason being that the unique column ID for DB may not be nice to look at
columns_to_show = []
for column in column_list:
# first find out what the column name in the header is:
column_name = ''
for name in self.all_columns_in_data_source:
if column == name[1]:
column_name = name[0]
for n, all_column in enumerate(self.header):
if column_name == all_column:
columns_to_show.append(n)
break
return columns_to_show
def get_rows_with_sample_id_not_null_from_datasource(self):
sample_id_column = self.get_columns_to_show(['Sample ID'])
n_rows = 0
for row in self.data:
if not str(row[sample_id_column[0]]).lower() != 'none' or not str(row[sample_id_column[0]]).replace \
(' ', '') == '':
n_rows += 1
return n_rows
def update_data_source(self, sample, db_dict):
data_source = XChemDB.data_source(os.path.join(self.database_directory, self.data_source_file))
def quit_xce(self):
# save pkl file
if self.data_collection_dict != {}:
if os.path.isfile(self.datasets_summary_file):
self.update_log.insert('saving results to PKL file')
pickle.dump(self.data_collection_dict, open(self.datasets_summary_file, 'wb'))
self.update_log.insert('quitting XCE... bye,bye!')
QtGui.qApp.quit()
if __name__ == "__main__":
app = XChemExplorer(sys.argv[1:])
# "Debugging is twice as hard as writing the code in the first
# place. Therefore, if you write the code as cleverly as
# possible, you are, by definition, not smart enough to debug it."
# -- Brian W. Kernighan
# ^^ Who did this? :P
|
normal
|
{
"blob_id": "cc58e3944ee2bfb55cc2867395782a94c196e635",
"index": 6784,
"step-1": "########################################################################################################################\n# DEVELOPER README: #\n# This is the main script, where the GUI is initialised from. All of the main layout objects live in their own scripts #\n# under ./gui_scripts (i.e. the tab content). The settings and preferences script sets up all of the directory paths #\n# and contains dictionaries defining the top menu, push buttons and the tables held in the main tabs. The layout #\n# script contains functions for performing simple layout tasks, such as adding a combobox, and contains init. #\n# functions for all of the main layout functions. #\n# #\n# In the future, the functions associated with buttons and frames etc. should be moved into the relevant script, but #\n# this is a bit more complicated. For now, they are separated out into sections within this script. The only GUI stuff #\n# going on in here is calling the initialisation functions. To change the layout of a tab, edit it in it's own script, #\n# and add any new functions in this script, in the relevant section. (If there is one yet) #\n# #\n# There's still a lot of cleaning up to be done in the future... #\n########################################################################################################################\n\n# solve gtk startup error\n#import gtk\n\n#gtk.set_interactive(False)\n\nimport base64\nimport getpass\nimport glob\nimport math\nimport multiprocessing\nimport pickle\nimport subprocess\nimport sys, os\nimport webbrowser\nfrom datetime import datetime\nfrom PyQt4 import QtGui, QtCore, QtWebKit\n\nsys.path.append(os.path.join(os.getenv('XChemExplorer_DIR'), 'lib'))\nsys.path.append(os.path.join(os.getenv('XChemExplorer_DIR'), 'web'))\nsys.path.append(os.path.join(os.getenv('XChemExplorer_DIR'), 'gui_scripts'))\n\nfrom settings_preferences import *\nfrom layout import *\nfrom stylesheet import set_stylesheet\n\n\nfrom XChemUtils import parse\nimport XChemThread\nimport XChemDB\nimport XChemPANDDA\nimport XChemToolTips\nimport XChemMain\nimport XChemPlots\nimport XChemLog\nimport XChemProcess\nimport XChemDeposit\nimport XChemWeb\n\n\nimport matplotlib.pyplot as plt\nfrom matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas\n\nclass XChemExplorer(QtGui.QApplication):\n def __init__(self, args):\n\n # init a QApplication object to hold XCE\n QtGui.QApplication.__init__(self, args)\n\n # start GUI\n self.start_GUI()\n\n # set stylesheet - how the gui looks\n set_stylesheet(self)\n\n self.exec_()\n\n def start_GUI(self):\n\n # check http://doc.qt.io/qt-4.8/stylesheet-customizing.html#the-box-model\n # This needs moving somewhere more appropriate...\n self.headlineLabelfont = QtGui.QFont(\"Arial\", 20, QtGui.QFont.Bold)\n\n setup().settings(self)\n setup().preferences(self)\n setup().tables(self)\n\n self.layout_funcs = LayoutFuncs()\n\n # GUI setup\n self.window = QtGui.QWidget()\n self.window.setWindowTitle(\"XChemExplorer\")\n self.screen = QtGui.QDesktopWidget().screenGeometry()\n\n LayoutObjects(self).workflow(self)\n LayoutObjects(self).main_layout(self)\n LayoutFuncs().add_widgets_layouts(self)\n\n self.checkLabXChemDir()\n\n if os.path.isfile(os.path.join(self.database_directory, self.data_source_file)):\n self.backup_soakDB()\n\n def backup_soakDB(self):\n XChemMain.backup_soakDB(os.path.join(self.database_directory, self.data_source_file),self.xce_logfile)\n\n def checkLabXChemDir(self):\n dirCheck = QtGui.QMessageBox()\n dirCheckLayout = dirCheck.layout()\n vbox = QtGui.QVBoxLayout()\n try:\n warning = (\n 'Are you sure you want to launch XCE here:\\n\\n'\n +self.labxchem_directory_current+'\\n\\n'\n 'If this is not where you should be running XCE, please close!\\n'\n )\n except AttributeError:\n return\n vbox.addWidget(QtGui.QLabel(warning))\n dirCheckLayout.addLayout(vbox, 0, 0)\n dirCheck.exec_();\n\n\n # function to update datasource\n def datasource_menu_reload_samples(self):\n self.update_log.insert(\n 'reading samples from data source: ' + os.path.join(self.database_directory, self.data_source_file))\n self.update_status_bar(\n 'reading samples from data source: ' + os.path.join(self.database_directory, self.data_source_file))\n self.update_header_and_data_from_datasource()\n self.update_all_tables()\n self.overview_datasource_table.resizeColumnsToContents()\n\n # function to create new datasource\n def create_new_data_source(self):\n file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'Save file', self.database_directory))\n # make sure that the file always has .sqlite extension\n if file_name.rfind('.') != -1:\n file_name = file_name[:file_name.rfind('.')] + '.sqlite'\n else:\n file_name = file_name + '.sqlite'\n self.db = XChemDB.data_source(file_name)\n print('==> XCE: creating new data source')\n self.db.create_empty_data_source_file()\n self.db.create_missing_columns()\n self.database_directory = file_name[:file_name.rfind('/')]\n self.data_source_file = file_name[file_name.rfind('/') + 1:]\n self.data_source_file_label.setText(os.path.join(self.database_directory, self.data_source_file))\n self.settings['database_directory'] = self.database_directory\n self.settings['data_source'] = self.data_source_file\n self.data_source_set = True\n self.datasource_menu_reload_samples()\n\n\n ####################################################################################################################\n # #\n # DATASETS TAB #\n # #\n ####################################################################################################################\n def continously_check_for_new_data_collection(self, state):\n self.timer_to_check_for_new_data_collection.timeout.connect(\n lambda: self.check_for_new_autoprocessing_or_rescore(False))\n if state == QtCore.Qt.Checked:\n print('==> XCE: checking automatically every 120s for new data collection')\n self.timer_to_check_for_new_data_collection.start(120000)\n else:\n print('==> XCE: stopped checking for new data collections')\n self.timer_to_check_for_new_data_collection.stop()\n\n def target_selection_combobox_activated(self, text):\n self.target = str(text)\n\n def select_diffraction_data_directory(self):\n self.diffraction_data_directory = str(QtGui.QFileDialog.getExistingDirectory(self.window, \"Select Directory\"))\n self.diffraction_data_dir_label.setText(self.diffraction_data_directory)\n self.settings['diffraction_data_directory'] = self.diffraction_data_directory\n self.update_log.insert('setting diffraction data directory to ' + self.diffraction_data_directory)\n\n def search_for_datasets(self):\n self.update_log.insert('search diffraction data directory for datasets...')\n print('will search ' + str(self.diffraction_data_directory))\n self.work_thread = XChemMain.find_diffraction_image_directory_fast(self.diffraction_data_directory)\n self.explorer_active = 1\n\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_datasets_reprocess_table\"),\n self.update_datasets_reprocess_table)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n\n self.work_thread.start()\n\n #self.work_thread = self.update_datasets_reprocess_table(self.diffraction_data_directory)\n\n def translate_datasetID_to_sampleID(self):\n translate = QtGui.QMessageBox()\n translateLayout = translate.layout()\n self.translate_datasetID_to_sampleID_file = '-'\n vbox = QtGui.QVBoxLayout()\n button = QtGui.QPushButton('Open CSV')\n button.clicked.connect(self.open_csv_file_translate_datasetID_to_sampleID)\n vbox.addWidget(button)\n self.translate_datasetID_to_sampleID_csv_label = QtGui.QLabel(self.translate_datasetID_to_sampleID_file)\n vbox.addWidget(self.translate_datasetID_to_sampleID_csv_label)\n translateLayout.addLayout(vbox, 0, 0)\n translate.addButton(QtGui.QPushButton('OK'), QtGui.QMessageBox.YesRole)\n translate.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)\n reply = translate.exec_();\n if reply == 0:\n if os.path.isfile(self.translate_datasetID_to_sampleID_file):\n trans_dict = {}\n for line in open(self.translate_datasetID_to_sampleID_file):\n if len(line.split(',')) == 2:\n dataset = line.split(',')[0]\n new_sample_id = line.split(',')[1]\n trans_dict[dataset] = new_sample_id\n if len(trans_dict) >= 1:\n allRows = self.datasets_reprocess_table.rowCount()\n for row in xrange(0, allRows):\n dataset_id = str(self.datasets_reprocess_table.item(row, 0).text())\n sample_id = str(self.datasets_reprocess_table.item(row, 1).text())\n if dataset_id in trans_dict:\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(trans_dict[dataset_id])\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n self.datasets_reprocess_table.setItem(row, 1, cell_text)\n self.update_log.insert(\n 'dataset: {0!s} -> changing sampleID to: {1!s}'.format(dataset_id,\n trans_dict[dataset_id]))\n\n def select_sample_for_xia2(self):\n indexes = self.datasets_reprocess_table.selectionModel().selectedRows()\n for index in sorted(indexes):\n xtal = str(self.datasets_reprocess_table.item(index.row(), 1).text())\n print(xtal, self.diffraction_data_table_dict[xtal][0])\n self.update_log.insert('{0!s} marked for reprocessing'.format(index.row()))\n self.diffraction_data_table_dict[xtal][0].setChecked(True)\n\n def select_reprocess_reference_mtz(self):\n self.update_log.insert('trying to set new reference mtz file for reprocessing with xia2')\n file_name = str(QtGui.QFileDialog.getOpenFileName(self.window, 'Select file', self.database_directory))\n if os.path.isfile(file_name):\n if file_name.endswith('.mtz'):\n self.diffraction_data_reference_mtz = file_name\n self.update_log.insert(\n 'new reference file for data processing with xia2: ' + self.diffraction_data_reference_mtz)\n self.reprocess_reference_mtz_file_label.setText(self.diffraction_data_reference_mtz)\n else:\n self.update_log.insert('this does not seem to be a mtz file: ' + file_name)\n\n def check_for_new_autoprocessing_or_rescore(self, rescore_only):\n self.update_log.insert('checking for new data collection')\n start_thread = False\n if rescore_only:\n # first pop up a warning message as this will overwrite all user selections\n msgBox = QtGui.QMessageBox()\n msgBox.setText(\"*** WARNING ***\\nThis will overwrite all your manual selections!\\nDo you want to continue?\")\n msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)\n msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)\n reply = msgBox.exec_();\n if reply == 0:\n start_thread = True\n else:\n start_thread = False\n else:\n start_thread = True\n\n if start_thread:\n if self.target == '=== SELECT TARGET ===':\n msgBox = QtGui.QMessageBox()\n warning = ('*** WARNING ***\\n'\n 'Please select a target or\\n'\n 'select \"=== project directory ===\" if you want to read reprocessed results\\n'\n 'In case target list is empty, make sure that you have selected the actual\\n'\n 'data collection visit (e.g. /dls/i04-1/data/2018/lb18145-70)' )\n msgBox.setText(warning)\n start_thread = False\n\n# msgBox.setText(warning)\n# msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)\n# msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)\n# reply = msgBox.exec_();\n# if reply == 0:\n# start_thread = True\n# else:\n# start_thread = False\n# else:\n# start_thread = True\n\n if start_thread:\n self.work_thread = XChemThread.read_autoprocessing_results_from_disc(self.visit_list,\n self.target,\n self.reference_file_list,\n self.database_directory,\n self.data_collection_dict,\n self.preferences,\n self.datasets_summary_file,\n self.initial_model_directory,\n rescore_only,\n self.acceptable_low_resolution_limit_for_data,\n os.path.join(self.database_directory,\n self.data_source_file),\n self.xce_logfile)\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"create_widgets_for_autoprocessing_results_only\"),\n self.create_widgets_for_autoprocessing_results_only)\n self.work_thread.start()\n\n\n #################################################################################################################\n #\n #\n #\n # => for new module from hell\n # > start\n\n def update_gdaLog_parsing_instructions_and_score(self, gdaLogInstructions):\n self.gdaLogInstructions = gdaLogInstructions\n self.select_best_autoprocessing_result()\n\n def read_pinIDs_from_gda_logs(self):\n self.update_log.insert('reading pinIDs from gda logfiles...')\n visit, beamline = XChemMain.getVisitAndBeamline(self.beamline_directory)\n self.work_thread = XChemThread.read_pinIDs_from_gda_logs(beamline,\n visit,\n os.path.join(\n self.database_directory,\n self.data_source_file),\n self.gdaLogInstructions,\n self.xce_logfile)\n\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_gdaLog_parsing_instructions_and_score\"),\n self.update_gdaLog_parsing_instructions_and_score)\n self.work_thread.start()\n\n\n def check_for_new_autoprocessing_results(self):\n self.update_log.insert('checking for new data collection')\n if self.target == '=== SELECT TARGET ===':\n self.update_log.error('NO TARGET SELECTED, PLEASE SELECT A TARGET AND TRY AGAIN!')\n start_thread = False\n elif self.target == '=== project directory ===':\n processedDir = self.initial_model_directory\n start_thread = True\n# elif self.read_agamemnon.isChecked():\n# tmp = '/'.join(self.beamline_directory.split('/')[:6])\n# processedDir = tmp[:tmp.rfind('-')]\n## processedDir = os.path.join(self.beamline_directory[:self.beamline_directory.rfind('-') + 1] + '*/processed/agamemnon/'+self.target)\n## processedDir = os.path.join(self.beamline_directory[:self.beamline_directory.rfind('-') + 1] + '*/processed/*/'+self.target)\n# start_thread = True\n else:\n processedDir = os.path.join(self.beamline_directory, 'processed', self.target)\n start_thread = True\n\n if start_thread:\n# processedDir=os.path.join(self.beamline_directory,'processed',self.target)\n self.work_thread = XChemThread.read_write_autoprocessing_results_from_to_disc(processedDir,\n os.path.join(\n self.database_directory,\n self.data_source_file),\n self.initial_model_directory,\n self.xce_logfile,\n self.target,\n self.read_agamemnon.isChecked())\n\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"read_pinIDs_from_gda_logs\"),\n self.read_pinIDs_from_gda_logs)\n self.work_thread.start()\n\n def select_best_autoprocessing_result(self):\n if self.rescore:\n # first pop up a warning message as this will overwrite all user selections\n msgBox = QtGui.QMessageBox()\n msgBox.setText(\"*** WARNING ***\\nThis will overwrite all your manual selections!\\nDo you want to continue?\")\n msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)\n msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)\n reply = msgBox.exec_();\n if reply != 0:\n start_thread = False\n else:\n start_thread = True\n else:\n start_thread = True\n\n if start_thread:\n self.update_log.insert('selecting best autoprocessing result')\n self.update_log.insert('samples where user made manual changes will be ignored!')\n\n if self.target == '=== project directory ===':\n processedDir = self.initial_model_directory\n else:\n processedDir = os.path.join(self.beamline_directory, 'processed', self.target)\n\n visit,beamline = XChemMain.getVisitAndBeamline(processedDir)\n\n if self.read_agamemnon.isChecked():\n visit = []\n for v in glob.glob(\n os.path.join(self.beamline_directory[:self.beamline_directory.rfind('-') + 1] + '*')):\n visit.append(v[v.rfind('/') + 1:])\n\n self.work_thread = XChemThread.choose_autoprocessing_outcome(os.path.join(self.database_directory,\n self.data_source_file),\n visit,\n self.reference_file_list,\n self.preferences,\n self.initial_model_directory,\n self.rescore,\n self.xce_logfile,\n self.read_agamemnon.isChecked())\n\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"populate_datasets_summary_table_NEW\"),\n self.populate_datasets_summary_table_NEW)\n self.work_thread.start()\n\n # < end\n ###################################################################################################################\n\n\n ####################################################################################################################\n # #\n # MAPS TAB #\n # #\n ####################################################################################################################\n def set_new_reference_if_applicable(self):\n print('hallo')\n reference_root = str(self.reference_file_selection_combobox.currentText())\n pg_ref = ''\n ucVol_ref = 0.0\n for reference in self.reference_file_list:\n print(reference[0], reference_root)\n if reference[0] == reference_root:\n pg_ref = reference[5]\n ucVol_ref = reference[4]\n break\n if ucVol_ref == 0.0:\n self.update_log.insert('cannot set reference file since unit cell volume of reference pdb is 0!')\n return\n\n for xtal in self.initial_model_dimple_dict:\n reference_file_selection_combobox = self.initial_model_dimple_dict[xtal][1]\n self.populate_reference_combobox(reference_file_selection_combobox)\n db_dict = self.xtal_db_dict[xtal]\n pg_xtal = db_dict['DataProcessingPointGroup']\n ucVol_xtal = db_dict['DataProcessingUnitCellVolume']\n\n try:\n difference = math.fabs(1 - (float(ucVol_xtal) / float(ucVol_ref))) * 100\n except ValueError:\n self.update_log.insert(xtal + ' -> cannot calculate unit cell volume difference')\n continue\n\n if pg_xtal == pg_ref and difference < self.allowed_unitcell_difference_percent:\n print(xtal, pg_xtal, ucVol_xtal)\n index = reference_file_selection_combobox.findText(reference_root, QtCore.Qt.MatchFixedString)\n reference_file_selection_combobox.setCurrentIndex(index)\n self.update_log.insert(xtal + ' -> setting ' + reference_root + ' as input PDB file for DIMPLE')\n\n def refresh_reference_file_list(self):\n self.reference_file_list = self.get_reference_file_list(' ')\n self.populate_reference_combobox(self.reference_file_selection_combobox)\n\n def on_context_menu_initial_model(self, point):\n # show context menu\n self.popMenu_for_maps_table.exec_(self.sender().mapToGlobal(point))\n\n ####################################################################################################################\n # #\n # PANDDA TAB #\n # #\n ####################################################################################################################\n def select_pandda_input_template(self):\n mtzin = ''\n filepath_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select Example PDB or MTZ File',\n self.initial_model_directory, '*.pdb;;*.mtz')\n filepath = str(tuple(filepath_temp)[0])\n pdbin = filepath.split('/')[-1]\n if filepath.endswith('.pdb'):\n pdbin = filepath.split('/')[-1]\n mtzin_temp = pdbin.replace('.pdb', '.mtz')\n if os.path.isfile(filepath.replace(pdbin, mtzin_temp)):\n mtzin = mtzin_temp\n else:\n mtzin = ''\n if filepath.endswith('.mtz'):\n mtzin = filepath.split('/')[-1]\n pdbin_temp = pdbin.replace('.mtz', '.pdb')\n if os.path.isfile(filepath.replace(mtzin, pdbin_temp)):\n pdbin = pdbin_temp\n else:\n pdbin = ''\n\n try:\n self.pandda_input_data_dir_entry.setText(\n '/'+os.path.join(*filepath.split('/')[0:len(filepath.split('/'))-2]))\n except TypeError:\n self.update_log.error('directory selection invalid') \n# if len(filepath.split('/')) - len(self.initial_model_directory.split('/')) == 2:\n# self.pandda_input_data_dir_entry.setText(os.path.join(self.initial_model_directory, '*'))\n# elif len(filepath.split('/')) - len(self.initial_model_directory.split('/')) > 2:\n# subdir = os.path.join(\n# *filepath.split('/')[len(self.initial_model_directory.split('/')) + 1:len(filepath.split('/')) - 1])\n# self.pandda_input_data_dir_entry.setText(os.path.join(self.initial_model_directory, '*', subdir))\n# else:\n# pass\n self.pandda_pdb_style_entry.setText(pdbin)\n self.pandda_mtz_style_entry.setText(mtzin)\n\n def change_pandda_spg_label(self):\n combo_text = str(self.pandda_reference_file_selection_combobox.currentText())\n for file in self.reference_file_list:\n if file[0] == combo_text:\n self.pandda_reference_file_spg_label.setText(file[1])\n break\n\n def on_context_menu_pandda(self, point):\n # show context menu\n self.popMenu_for_pandda_table.exec_(self.sender().mapToGlobal(point))\n\n ####################################################################################################################\n # #\n # DEPO TAB #\n # #\n ####################################################################################################################\n def export_to_html(self):\n XChemWeb.export_to_html(self.html_export_directory,\n self.initial_model_directory,\n os.path.join(self.database_directory, self.data_source_file),\n self.xce_logfile).prepare('0')\n\n def export_to_html_CompChem(self):\n XChemWeb.export_to_html(self.html_export_directory,\n self.initial_model_directory,\n os.path.join(self.database_directory, self.data_source_file),\n self.xce_logfile).prepare('4')\n\n def export_to_html_deposition_ready(self):\n XChemWeb.export_to_html(self.html_export_directory,\n self.initial_model_directory,\n os.path.join(self.database_directory, self.data_source_file),\n self.xce_logfile).prepare('5')\n\n# self.update_log.insert('exporting contents of SQLite database into ' + self.html_export_directory)\n# os.system(\n# 'ccp4-python ' + os.getenv('XChemExplorer_DIR') + '/web/process_sqlite.py -t Summary -s ' + os.path.join(\n# self.database_directory, self.data_source_file) + ' -d ' + self.html_export_directory)\n# XChemWeb.create_ICM_input_file(self.html_export_directory,\n# os.path.join(self.database_directory, self.data_source_file))\n# self.update_log.insert('open ICMpro:')\n# self.update_log.insert('/dls/science/groups/i04-1/software/icm-3.8-5/icm64 -g')\n# self.update_log.insert('open file browser and navigate to ' + self.html_export_directory)\n# self.update_log.insert('drag and drop dsEvent_sqlite.icm into the main window')\n# self.update_log.insert('the script will appear in the Workspace Panel')\n# self.update_log.insert('right click on the script and select RUN')\n# self.update_log.insert('be patient, this may take a while, depending on the number of events')\n# self.status_bar.showMessage('please check terminal window for further information')\n\n# def select_ground_state_pdb(self):\n# p = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select File', os.getcwd(),'*.pdb')\n# pdb = str(tuple(p)[0])\n# self.ground_state_pdb_button_label.setText(pdb)\n\n def select_ground_state_mtz(self):\n m = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select File', os.getcwd(),'*.mtz')\n mtz = str(tuple(m)[0])\n self.ground_state_mtz_button_label.setText(mtz)\n\n def add_ground_state_db(self):\n pdb, mtz = self.auto_select_ground_state_reference_PDB()\n if pdb != None:\n db_dict = {'DimplePANDDApath': self.panddas_directory,\n 'PDB_file': pdb,\n 'MTZ_file': mtz}\n self.db.create_or_remove_missing_records_in_depositTable(self.xce_logfile, 'ground_state', 'ground_state',\n db_dict)\n else:\n self.update_log.error('could not find a suitable reference file; see messages above!')\n\n def auto_select_ground_state_reference_PDB(self):\n pdb = None\n mtz = None\n xtalList = []\n for dirs in glob.glob(os.path.join(self.panddas_directory,'processed_datasets','*')):\n xtal = dirs[dirs.rfind('/')+1:]\n if os.path.isfile(os.path.join(dirs,xtal+'-pandda-input.pdb')):\n pdbHeader = parse().PDBheader(os.path.join(dirs,xtal+'-pandda-input.pdb'))\n try:\n xtalList.append( [xtal, float(pdbHeader['Rfree']) , float(pdbHeader['ResolutionHigh']) ] )\n except ValueError:\n self.update_log.error('%s: cannot read Rfree or Resolution from PDB header; skipping...')\n pass\n self.update_log.insert('found %s PDB files in %s' %(str(len(xtalList)),os.path.join(self.panddas_directory,'processed_datasets')))\n if len(xtalList) >= 10:\n self.update_log.insert('sorting PDBs by Rfree and selecting the 10 with lowest value')\n rfree = sorted(xtalList, key=lambda x: x[1])[:10]\n self.update_log.insert('top 10 PDB files with lowest Rfree:')\n for item in rfree:\n self.update_log.insert('%s: Rfree = %s | Resolution = %s' %(item[0],str(round(item[1],3)),str(round(item[2],2))))\n self.update_log.insert('selecting PDB with highest resolution')\n reso = sorted(rfree, key=lambda x: x[2])[:1]\n self.update_log.insert('selected the following PDB file: %s: Rfree = %s | Resolution = %s' %(reso[0][0],str(round(reso[0][1],3)),str(round(reso[0][2],2))))\n pdb = os.path.join(self.panddas_directory,'processed_datasets',reso[0][0],reso[0][0]+'-pandda-input.pdb')\n mtz = os.path.join(self.panddas_directory,'processed_datasets',reso[0][0],reso[0][0]+'-pandda-input.mtz')\n else:\n self.update_log.error('found less than 10 valid PDB files in %s' %os.path.join(self.panddas_directory,'processed_datasets'))\n return pdb, mtz\n\n\n def prepare_ground_state_mmcif(self):\n self.update_log.insert('preparing mmcif file for apo structure deposition')\n self.prepare_models_for_deposition_ligand_bound('ground_state')\n\n def open_icm(self):\n self.update_log.insert('starting ICM...')\n self.work_thread = XChemThread.start_ICM(self.html_export_directory)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n def prepare_files_for_zenodo_upload(self):\n self.update_log.insert('preparing files for ZENODO upload...')\n os.system('ccp4-python ' + os.getenv(\n 'XChemExplorer_DIR') + '/helpers/prepare_for_zenodo_upload.py ' + self.html_export_directory)\n\n def update_html_for_zenodo_upload(self):\n try:\n uploadID = int(self.zenodo_upload_id_entry.text())\n self.update_log.insert('updating html files for ZENODO upload,...')\n self.update_log.insert('ZENODO upload = ' + str(uploadID))\n os.system('ccp4-python ' + os.getenv(\n 'XChemExplorer_DIR') + '/helpers/prepare_for_zenodo_upload.py {0!s} {1!s}'.format(\n self.html_export_directory, uploadID))\n except ValueError:\n self.update_log.insert('zenodo upload ID must be an integer!')\n\n ####################################################################################################################\n # #\n # SETTINGS TAB #\n # #\n ####################################################################################################################\n def settings_button_clicked(self):\n if self.sender().text() == 'Select Project Directory':\n self.initial_model_directory = str(QtGui.QFileDialog.getExistingDirectory(self.window, \"Select Directory\"))\n self.initial_model_directory_label.setText(self.initial_model_directory)\n self.pandda_input_data_dir_entry.setText(self.initial_model_directory)\n self.settings['initial_model_directory'] = self.initial_model_directory\n if self.sender().text() == 'Select Reference Structure Directory':\n reference_directory_temp = str(QtGui.QFileDialog.getExistingDirectory(self.window, \"Select Directory\"))\n if reference_directory_temp != self.reference_directory:\n self.reference_directory = reference_directory_temp\n self.update_reference_files(' ')\n self.reference_directory_label.setText(self.reference_directory)\n self.settings['reference_directory'] = self.reference_directory\n if self.sender().text() == 'Select Data Source File':\n filepath_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select File',\n self.database_directory, '*.sqlite')\n filepath = str(tuple(filepath_temp)[0])\n self.data_source_file = filepath.split('/')[-1]\n self.database_directory = filepath[:filepath.rfind('/')]\n self.settings['database_directory'] = self.database_directory\n self.settings['data_source'] = os.path.join(self.database_directory, self.data_source_file)\n write_enabled = self.check_write_permissions_of_data_source()\n if not write_enabled:\n self.data_source_set = False\n else:\n self.data_source_set = True\n self.data_source_file_label.setText(os.path.join(self.database_directory, self.data_source_file))\n self.db = XChemDB.data_source(os.path.join(self.database_directory, self.data_source_file))\n self.db.create_missing_columns()\n self.datasource_menu_reload_samples()\n if self.sender().text() == 'Select Data Collection Directory':\n dir_name = str(QtGui.QFileDialog.getExistingDirectory(self.window, \"Select Directory\"))\n if dir_name != self.beamline_directory:\n self.beamline_directory = dir_name\n self.target_list, self.visit_list = XChemMain.get_target_and_visit_list(self.beamline_directory,self.read_agamemnon.isChecked())\n self.populate_target_selection_combobox(self.target_selection_combobox)\n self.beamline_directory_label.setText(self.beamline_directory)\n self.settings['beamline_directory'] = self.beamline_directory\n\n if self.sender().text() == 'Select Existing\\nCollection Summary File':\n if self.datasets_summary_file != '':\n filepath_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select File',\n self.datasets_summary_file[\n :self.datasets_summary_file.rfind(\n '/')], '*.pkl')\n else:\n filepath_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select File', os.getcwd(),\n '*.pkl')\n filepath = str(tuple(filepath_temp)[0])\n self.datasets_summary_file = filepath\n self.datasets_summary_file_label.setText(self.datasets_summary_file)\n self.settings['datasets_summary'] = self.datasets_summary_file\n\n if self.sender().text() == 'Assign New\\nCollection Summary File':\n if self.datasets_summary_file != '':\n file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'New file',\n self.datasets_summary_file[\n :self.datasets_summary_file.rfind('/')]))\n else:\n file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'New file', self.current_directory))\n # make sure that the file always has .pkl extension\n if str(file_name).rfind('.') != -1:\n file_name = file_name[:file_name.rfind('.')] + '.pkl'\n else:\n file_name = file_name + '.pkl'\n self.datasets_summary_file = file_name\n self.datasets_summary_file_label.setText(self.datasets_summary_file)\n self.settings['datasets_summary'] = self.datasets_summary_file\n\n if self.sender().text() == 'Select CCP4_SCR Directory':\n self.ccp4_scratch_directory = str(QtGui.QFileDialog.getExistingDirectory(self.window, \"Select Directory\"))\n self.ccp4_scratch_directory_label.setText(self.ccp4_scratch_directory)\n self.settings['ccp4_scratch'] = self.ccp4_scratch_directory\n if self.sender().text() == 'Select PanDDA Directory':\n self.panddas_directory = str(QtGui.QFileDialog.getExistingDirectory(self.window, \"Select Directory\"))\n self.panddas_directory_label.setText(self.panddas_directory)\n self.pandda_output_data_dir_entry.setText(self.panddas_directory)\n self.ground_state_pandda_directory_label.setText(self.panddas_directory)\n print('PANDDA', self.panddas_directory)\n self.settings['panddas_directory'] = self.panddas_directory\n\n self.layout_funcs.pandda_html(self)\n\n if self.sender().text() == 'Select HTML Export Directory':\n self.html_export_directory = str(QtGui.QFileDialog.getExistingDirectory(self.window, \"Select Directory\"))\n self.html_export_directory_label.setText(self.html_export_directory)\n self.settings['html_export_directory'] = self.html_export_directory\n\n if self.sender().text() == 'Select Group deposition Directory':\n self.group_deposit_directory = str(QtGui.QFileDialog.getExistingDirectory(self.window, \"Select Directory\"))\n self.group_deposition_directory_label.setText(self.group_deposit_directory)\n self.settings['group_deposit_directory'] = self.group_deposit_directory\n\n #self.datasource_menu_reload_samples()\n\n\n\n ######################################### sort stuff below here ####################################################\n def select_sample_for_dimple(self):\n indexes = self.maps_table.selectionModel().selectedRows()\n for index in sorted(indexes):\n xtal = str(self.maps_table.item(index.row(), 0).text())\n self.update_log.insert('{0!s} is marked for DIMPLE'.format(index.row()))\n self.initial_model_dimple_dict[xtal][0].setChecked(True)\n\n def update_summary_plot(self):\n if self.data_source_set:\n XChemPlots.summary_plot(os.path.join(self.database_directory, self.data_source_file),\n self.overview_axes).update_overview()\n self.overview_canvas.draw()\n\n def show_preferences(self):\n preferences = QtGui.QMessageBox()\n preferencesLayout = preferences.layout()\n\n vbox = QtGui.QVBoxLayout()\n settings_hbox_filename_root = QtGui.QHBoxLayout()\n filename_root_label = QtGui.QLabel('filename root:')\n settings_hbox_filename_root.addWidget(filename_root_label)\n filename_root_input = QtGui.QLineEdit()\n filename_root_input.setFixedWidth(400)\n filename_root_input.setText(str(self.filename_root))\n filename_root_input.textChanged[str].connect(self.change_filename_root)\n settings_hbox_filename_root.addWidget(filename_root_input)\n vbox.addLayout(settings_hbox_filename_root)\n\n settings_hbox_adjust_allowed_unit_cell_difference = QtGui.QHBoxLayout()\n adjust_allowed_unit_cell_difference_label = QtGui.QLabel(\n 'Max. Allowed Unit Cell Difference between Reference and Target (%):')\n settings_hbox_adjust_allowed_unit_cell_difference.addWidget(adjust_allowed_unit_cell_difference_label)\n adjust_allowed_unit_cell_difference = QtGui.QLineEdit()\n adjust_allowed_unit_cell_difference.setFixedWidth(200)\n adjust_allowed_unit_cell_difference.setText(str(self.allowed_unitcell_difference_percent))\n adjust_allowed_unit_cell_difference.textChanged[str].connect(self.change_allowed_unitcell_difference_percent)\n settings_hbox_adjust_allowed_unit_cell_difference.addWidget(adjust_allowed_unit_cell_difference)\n vbox.addLayout(settings_hbox_adjust_allowed_unit_cell_difference)\n\n settings_hbox_acceptable_low_resolution_limit = QtGui.QHBoxLayout()\n adjust_acceptable_low_resolution_limit_label = QtGui.QLabel(\n 'Acceptable low resolution limit for datasets (in Angstrom):')\n settings_hbox_acceptable_low_resolution_limit.addWidget(adjust_acceptable_low_resolution_limit_label)\n adjust_acceptable_low_resolution_limit = QtGui.QLineEdit()\n adjust_acceptable_low_resolution_limit.setFixedWidth(200)\n adjust_acceptable_low_resolution_limit.setText(str(self.acceptable_low_resolution_limit_for_data))\n adjust_acceptable_low_resolution_limit.textChanged[str].connect(self.change_acceptable_low_resolution_limit)\n settings_hbox_acceptable_low_resolution_limit.addWidget(adjust_acceptable_low_resolution_limit)\n vbox.addLayout(settings_hbox_acceptable_low_resolution_limit)\n\n vbox_data = QtGui.QVBoxLayout()\n vbox_data.addWidget(\n QtGui.QLabel('Select amount of processed data you wish to copy to initial_model directory:'))\n self.preferences_data_to_copy_combobox = QtGui.QComboBox()\n for item in self.preferences_data_to_copy:\n self.preferences_data_to_copy_combobox.addItem(item[0])\n self.preferences_data_to_copy_combobox.currentIndexChanged.connect(\n self.preferences_data_to_copy_combobox_changed)\n vbox_data.addWidget(self.preferences_data_to_copy_combobox)\n vbox.addLayout(vbox_data)\n\n vbox_select = QtGui.QVBoxLayout()\n vbox_select.addWidget(QtGui.QLabel('Dataset Selection Mechanism:'))\n self.preferences_selection_mechanism_combobox = QtGui.QComboBox()\n for item in self.preferences_selection_mechanism:\n self.preferences_selection_mechanism_combobox.addItem(item)\n self.preferences_selection_mechanism_combobox.currentIndexChanged.connect(\n self.preferences_selection_mechanism_combobox_changed)\n index = self.preferences_selection_mechanism_combobox.findText(self.preferences['dataset_selection_mechanism'], QtCore.Qt.MatchFixedString)\n self.preferences_selection_mechanism_combobox.setCurrentIndex(index)\n vbox_select.addWidget(self.preferences_selection_mechanism_combobox)\n vbox.addLayout(vbox_select)\n\n# vbox_inital_refinement = QtGui.QVBoxLayout()\n# vbox_inital_refinement.addWidget(QtGui.QLabel('Initial Refinement Pipeline:'))\n# self.preferences_initial_refinement_combobox = QtGui.QComboBox()\n# for item in self.preferences_initial_refinement_pipeline:\n# self.preferences_initial_refinement_combobox.addItem(item)\n# self.preferences_initial_refinement_combobox.currentIndexChanged.connect(\n# self.preferences_initial_refinement_combobox_changed)\n# index = self.preferences_initial_refinement_combobox.findText(self.preferences['initial_refinement_pipeline'], QtCore.Qt.MatchFixedString)\n# self.preferences_initial_refinement_combobox.setCurrentIndex(index)\n# vbox_inital_refinement.addWidget(self.preferences_initial_refinement_combobox)\n# vbox.addLayout(vbox_inital_refinement)\n\n vbox_restraints = QtGui.QVBoxLayout()\n vbox_restraints.addWidget(QtGui.QLabel('Restraints generation program:'))\n self.preferences_restraints_generation_combobox = QtGui.QComboBox()\n program_list = []\n\n if self.external_software['acedrg']:\n program_list.append('acedrg')\n self.restraints_program = 'acedrg'\n if self.external_software['phenix.elbow']: program_list.append('phenix.elbow')\n if self.external_software['grade']: program_list.append('grade')\n for item in program_list:\n self.preferences_restraints_generation_combobox.addItem(item)\n self.preferences_restraints_generation_combobox.currentIndexChanged.connect(\n self.preferences_restraints_generation_combobox_changed)\n index = self.preferences_restraints_generation_combobox.findText(self.restraints_program,\n QtCore.Qt.MatchFixedString)\n self.preferences_restraints_generation_combobox.setCurrentIndex(index)\n vbox_restraints.addWidget(self.preferences_restraints_generation_combobox)\n vbox.addLayout(vbox_restraints)\n\n hbox = QtGui.QHBoxLayout()\n hbox.addWidget(QtGui.QLabel('XCE logfile:'))\n self.xce_logfile_label = QtGui.QLabel(self.xce_logfile)\n hbox.addWidget(self.xce_logfile_label)\n button = QtGui.QPushButton(\"Change\")\n button.clicked.connect(self.set_xce_logfile)\n hbox.addWidget(button)\n vbox.addLayout(hbox)\n\n settings_hbox_max_queue_jobs = QtGui.QHBoxLayout()\n adjust_max_queue_jobs_label = QtGui.QLabel('Max. number of jobs running at once on DLS cluster:')\n settings_hbox_max_queue_jobs.addWidget(adjust_max_queue_jobs_label)\n adjust_max_queue_jobs = QtGui.QLineEdit()\n adjust_max_queue_jobs.setFixedWidth(200)\n adjust_max_queue_jobs.setText(str(self.max_queue_jobs))\n adjust_max_queue_jobs.textChanged[str].connect(self.change_max_queue_jobs)\n settings_hbox_max_queue_jobs.addWidget(adjust_max_queue_jobs)\n vbox.addLayout(settings_hbox_max_queue_jobs)\n\n settings_hbox_remote_qsub = QtGui.QHBoxLayout()\n remote_qsub_label = QtGui.QLabel('remote qsub:')\n settings_hbox_remote_qsub.addWidget(remote_qsub_label)\n self.remote_qsub_checkbox = QtGui.QCheckBox('use')\n self.remote_qsub_checkbox.toggled.connect(self.run_qsub_remotely)\n\n settings_hbox_dimple_twin_mode = QtGui.QHBoxLayout()\n self.dimple_twin_mode_label_checkbox = QtGui.QCheckBox('run DIMPLE in TWIN mode')\n if self.preferences['dimple_twin_mode']:\n self.dimple_twin_mode_label_checkbox.setChecked(True)\n self.dimple_twin_mode_label_checkbox.toggled.connect(self.dimple_change_twin_mode)\n settings_hbox_dimple_twin_mode.addWidget(self.dimple_twin_mode_label_checkbox)\n vbox.addLayout(settings_hbox_dimple_twin_mode)\n\n if self.using_remote_qsub_submission:\n self.remote_qsub_checkbox.setChecked(True)\n settings_hbox_remote_qsub.addWidget(self.remote_qsub_checkbox)\n self.remote_qsub_command = QtGui.QLineEdit()\n self.remote_qsub_command.setFixedWidth(550)\n self.remote_qsub_command.setText(self.remote_qsub_submission)\n settings_hbox_remote_qsub.addWidget(self.remote_qsub_command)\n vbox.addLayout(settings_hbox_remote_qsub)\n\n hbox = QtGui.QHBoxLayout()\n hbox.addWidget(QtGui.QLabel('Additional CIF file for non-standard ligand:'))\n self.second_cif_file_label = QtGui.QLabel(self.second_cif_file)\n hbox.addWidget(self.second_cif_file_label)\n button = QtGui.QPushButton(\"Select\")\n button.clicked.connect(self.set_second_cif_file)\n hbox.addWidget(button)\n vbox.addLayout(hbox)\n\n\n# settings_hbox_max_queue_jobs.addWidget(adjust_max_queue_jobs_label)\n# adjust_max_queue_jobs = QtGui.QLineEdit()\n# adjust_max_queue_jobs.setFixedWidth(200)\n# adjust_max_queue_jobs.setText(str(self.max_queue_jobs))\n# adjust_max_queue_jobs.textChanged[str].connect(self.change_max_queue_jobs)\n# settings_hbox_max_queue_jobs.addWidget(adjust_max_queue_jobs)\n# vbox.addLayout(settings_hbox_max_queue_jobs)\n#\n# apply_button = QtGui.QPushButton('Apply')\n# apply_button.clicked.connect(self.run_qsub_remotely)\n# settings_hbox_remote_qsub.addWidget(apply_button)\n\n\n preferencesLayout.addLayout(vbox, 0, 0)\n\n preferences.exec_();\n\n# def set_second_cif_file(self):\n# mb = QtGui.QMessageBox()\n# mbLayout = mb.layout()\n# vbox = QtGui.QVBoxLayout()\n# vbox.addWidget(QtGui.QLabel('CIF file to be merged into ligand CIF files:'))\n# self.second_cif_file_label = QtGui.QLabel(self.second_cif_file)\n# vbox.addWidget(self.second_cif_file_label)\n# button = QtGui.QPushButton(\"Select\")\n# button.clicked.connect(self.set_second_cif_file)\n# vbox.addWidget(button)\n# mbLayout.addLayout(vbox, 0, 0)\n# mb.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)\n# mb.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)\n# reply = mb.exec_();\n\n\n\n def dimple_change_twin_mode(self):\n if self.preferences['dimple_twin_mode']:\n self.update_log.insert('changing preferences: turning off DIMPLE in TWIN mode')\n self.preferences['dimple_twin_mode'] = False\n else:\n self.update_log.insert('changing preferences: changing DIMPLE to TWIN mode')\n self.preferences['dimple_twin_mode'] = True\n\n def run_qsub_remotely(self):\n self.remote_qsub_submission = str(self.remote_qsub_command.text())\n print(str(self.remote_qsub_submission))\n if self.remote_qsub_checkbox.isChecked():\n self.update_log.insert('submitting jobs to remote machine with: %s' % self.remote_qsub_submission)\n self.external_software['qsub_remote'] = self.remote_qsub_submission\n self.using_remote_qsub_submission = True\n self.settings['remote_qsub'] = self.remote_qsub_submission\n else:\n self.update_log.insert('switching off remote job submission')\n self.external_software['qsub_remote'] = ''\n self.settings['remote_qsub'] = ''\n self.using_remote_qsub_submission = False\n\n def enter_pdb_codes(self):\n pdbID_entry = QtGui.QMessageBox()\n pdbID_entryLayout = pdbID_entry.layout()\n\n vbox = QtGui.QVBoxLayout()\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n\n grid.addWidget(QtGui.QLabel('Text from PDB email'), 0, 0)\n self.pdb_code_entry = QtGui.QTextEdit()\n self.pdb_code_entry.setText('')\n self.pdb_code_entry.setFixedWidth(500)\n grid.addWidget(self.pdb_code_entry, 1, 0, 20, 1)\n\n frame.setLayout(grid)\n vbox.addWidget(frame)\n\n hbox = QtGui.QHBoxLayout()\n button = QtGui.QPushButton('Update Database')\n button.clicked.connect(self.update_database_with_pdb_codes)\n hbox.addWidget(button)\n\n vbox.addLayout(hbox)\n pdbID_entryLayout.addLayout(vbox, 0, 0)\n pdbID_entry.exec_();\n\n\n def add_label_information(self):\n label_entry = QtGui.QMessageBox()\n label_entryLayout = label_entry.layout()\n\n try:\n labelInfo = self.db.get_label_info_from_db()\n except AttributeError:\n self.update_log.warning('please specify DB file first')\n return None\n\n vbox = QtGui.QVBoxLayout()\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n grid.addWidget(QtGui.QLabel('label'), 0, 0)\n grid.addWidget(QtGui.QLabel('description'), 0, 1)\n\n self.remote_qsub_command = QtGui.QLineEdit()\n self.remote_qsub_command.setFixedWidth(550)\n self.remote_qsub_command.setText(self.remote_qsub_submission)\n\n self.labelList = []\n for i in range(5):\n labelEdit = QtGui.QLineEdit()\n descriptionEdit = QtGui.QLineEdit()\n grid.addWidget(labelEdit, i + 1, 0)\n grid.addWidget(descriptionEdit, i + 1, 1)\n try:\n labelEdit.setText(labelInfo[i][0])\n descriptionEdit.setText(labelInfo[i][1])\n except IndexError:\n labelEdit.setText('')\n descriptionEdit.setText('')\n labelEdit.setFixedWidth(100)\n descriptionEdit.setFixedWidth(500)\n self.labelList.append([labelEdit,descriptionEdit])\n frame.setLayout(grid)\n vbox.addWidget(frame)\n\n hbox = QtGui.QHBoxLayout()\n button = QtGui.QPushButton('Update Database')\n button.clicked.connect(self.update_database_with_labelInfo)\n hbox.addWidget(button)\n\n vbox.addLayout(hbox)\n label_entryLayout.addLayout(vbox, 0, 0)\n label_entry.exec_();\n\n\n\n\n\n\n\n def create_missing_apo_records_in_depositTable(self):\n self.db.create_missing_apo_records_for_all_structures_in_depositTable(self.initial_model_directory,\n self.xce_logfile)\n\n# def update_file_information_of_apo_records(self):\n# XChemDeposit.update_file_locations_of_apo_structuresin_DB(\n# os.path.join(self.database_directory, self.data_source_file), self.initial_model_directory,\n# self.xce_logfile)\n\n def prepare_models_for_deposition_ligand_bound(self,structureType):\n start_thread = True\n self.update_log.insert('preparing mmcif files for PDB group deposition...')\n ignore_event_map = False\n if structureType == 'ground_state':\n try:\n self.update_log.insert('ground-state deposition')\n data_template_dict = self.db.get_deposit_dict_for_sample('ground_state')\n pdb = data_template_dict['PDB_file']\n self.update_log.insert('looking for ground-state PDB: ' + pdb)\n if not os.path.isfile(pdb):\n self.update_log.error('ground-state PDB does not exist; stopping...')\n start_thread = False\n mtz = data_template_dict['MTZ_file']\n self.update_log.insert('looking for ground-state MTZ: ' + mtz)\n if not os.path.isfile(mtz):\n self.update_log.error('ground-state MTZ does not exist; stopping...')\n start_thread = False\n ground_state = [ pdb,\n mtz,\n self.panddas_directory ]\n except KeyError:\n self.update_log.error('seems like there is no entry for ground-state in database')\n start_thread = False\n else:\n ground_state = []\n if self.deposition_bounnd_state_preparation_ignore_event_map.isChecked():\n ignore_event_map = True\n\n# structureType = \"ligand_bound\"\n\n if start_thread:\n if ground_state != []:\n self.update_log.insert('apo PDB: ' + ground_state[0])\n self.update_log.insert('apo MTZ: ' + ground_state[1])\n self.update_log.insert('pandda directory: ' + ground_state[2])\n overwrite_existing_mmcif = True\n self.work_thread = XChemDeposit.prepare_mmcif_files_for_deposition(\n os.path.join(self.database_directory, self.data_source_file),\n self.xce_logfile,\n overwrite_existing_mmcif,\n self.initial_model_directory,\n ground_state,\n ignore_event_map)\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n def prepare_models_for_deposition_apo(self):\n\n structureType = \"apo\"\n\n overwrite_existing_mmcif = True\n self.work_thread = XChemDeposit.prepare_mmcif_files_for_deposition(\n os.path.join(self.database_directory, self.data_source_file),\n self.xce_logfile,\n overwrite_existing_mmcif,\n self.initial_model_directory,\n structureType)\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n def prepare_for_group_deposition_upload_ligand_bound(self):\n\n self.work_thread = XChemDeposit.prepare_for_group_deposition_upload(\n os.path.join(self.database_directory, self.data_source_file),\n self.xce_logfile,\n self.group_deposit_directory,self.initial_model_directory,'ligand_bound')\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n def prepare_for_group_deposition_upload_ground_state(self):\n\n self.work_thread = XChemDeposit.prepare_for_group_deposition_upload(\n os.path.join(self.database_directory, self.data_source_file),\n self.xce_logfile,\n self.group_deposit_directory,self.initial_model_directory,'ground_state')\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n def check_smiles_in_db_and_pdb(self):\n\n self.work_thread = XChemDeposit.compare_smiles_in_db_with_ligand_in_pdb(self.initial_model_directory,\n os.path.join(self.database_directory,\n self.data_source_file),\n self.xce_logfile)\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"show_error_dict\"), self.show_error_dict)\n self.work_thread.start()\n\n def deposition_data(self):\n\n depositData = QtGui.QMessageBox()\n depositDataLayout = depositData.layout()\n\n vbox = QtGui.QVBoxLayout()\n\n deposit_tab_widget = QtGui.QTabWidget()\n deposit_tab_list = ['Contact',\n 'General',\n 'Authors',\n 'Citation',\n 'Molecule',\n 'Misc',\n 'Methods',\n 'Software',\n 'Funding' ]\n\n deposit_tab_dict = {}\n for page in deposit_tab_list:\n tab = QtGui.QWidget()\n vb = QtGui.QVBoxLayout(tab)\n deposit_tab_widget.addTab(tab, page)\n deposit_tab_dict[page] = [tab, vb]\n\n ## PI and scientist info\n vb = QtGui.QVBoxLayout()\n hbox = QtGui.QHBoxLayout()\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n grid.addWidget(QtGui.QLabel('Principal Investigator'), 0, 0)\n\n grid.addWidget(QtGui.QLabel('Salutation'), 1, 0)\n self.contact_author_PI_salutation = QtGui.QLineEdit()\n self.contact_author_PI_salutation.setText('Dr.')\n self.contact_author_PI_salutation.setFixedWidth(200)\n grid.addWidget(self.contact_author_PI_salutation, 1, 1)\n\n grid.addWidget(QtGui.QLabel('First name'), 2, 0)\n self.contact_author_PI_first_name = QtGui.QLineEdit()\n self.contact_author_PI_first_name.setText('')\n self.contact_author_PI_first_name.setFixedWidth(200)\n grid.addWidget(self.contact_author_PI_first_name, 2, 1)\n\n grid.addWidget(QtGui.QLabel('Last name'), 3, 0)\n self.contact_author_PI_last_name = QtGui.QLineEdit()\n self.contact_author_PI_last_name.setText('')\n self.contact_author_PI_last_name.setFixedWidth(200)\n grid.addWidget(self.contact_author_PI_last_name, 3, 1)\n\n grid.addWidget(QtGui.QLabel('Middle name'), 4, 0)\n self.contact_author_PI_middle_name = QtGui.QLineEdit()\n self.contact_author_PI_middle_name.setText('')\n self.contact_author_PI_middle_name.setFixedWidth(200)\n self.contact_author_PI_middle_name.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.contact_author_PI_middle_name, 4, 1)\n\n grid.addWidget(QtGui.QLabel('PI role'), 5, 0)\n self.contact_author_PI_role = QtGui.QComboBox()\n# PIroles = ['group leader', 'principal investigator/group leader', 'investigator']\n PIroles = ['principal investigator/group leader']\n for item in PIroles: self.contact_author_PI_role.addItem(item)\n grid.addWidget(self.contact_author_PI_role, 5, 1)\n\n grid.addWidget(QtGui.QLabel('Organization type'), 6, 0)\n self.contact_author_PI_organization_type = QtGui.QComboBox()\n Organizations = ['academic', 'commercial', 'government']\n for item in Organizations: self.contact_author_PI_organization_type.addItem(item)\n grid.addWidget(self.contact_author_PI_organization_type, 6, 1)\n\n grid.addWidget(QtGui.QLabel('Organization Name'), 7, 0)\n self.contact_author_PI_organization_name = QtGui.QLineEdit()\n self.contact_author_PI_organization_name.setText('')\n self.contact_author_PI_organization_name.setFixedWidth(200)\n grid.addWidget(self.contact_author_PI_organization_name, 7, 1)\n\n grid.addWidget(QtGui.QLabel('Email'), 8, 0)\n self.contact_author_PI_email = QtGui.QLineEdit()\n self.contact_author_PI_email.setText('')\n self.contact_author_PI_email.setFixedWidth(200)\n grid.addWidget(self.contact_author_PI_email, 8, 1)\n\n grid.addWidget(QtGui.QLabel('Street'), 9, 0)\n self.contact_author_PI_address = QtGui.QLineEdit()\n self.contact_author_PI_address.setText('')\n self.contact_author_PI_address.setFixedWidth(200)\n grid.addWidget(self.contact_author_PI_address, 9, 1)\n\n grid.addWidget(QtGui.QLabel('City'), 10, 0)\n self.contact_author_PI_city = QtGui.QLineEdit()\n self.contact_author_PI_city.setText('')\n self.contact_author_PI_city.setFixedWidth(200)\n grid.addWidget(self.contact_author_PI_city, 10, 1)\n\n grid.addWidget(QtGui.QLabel('State'), 11, 0)\n self.contact_author_PI_State_or_Province = QtGui.QLineEdit()\n self.contact_author_PI_State_or_Province.setText('')\n self.contact_author_PI_State_or_Province.setFixedWidth(200)\n self.contact_author_PI_State_or_Province.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.contact_author_PI_State_or_Province, 11, 1)\n\n grid.addWidget(QtGui.QLabel('ZIP code'), 12, 0)\n self.contact_author_PI_Zip_Code = QtGui.QLineEdit()\n self.contact_author_PI_Zip_Code.setText('')\n self.contact_author_PI_Zip_Code.setFixedWidth(200)\n grid.addWidget(self.contact_author_PI_Zip_Code, 12, 1)\n\n grid.addWidget(QtGui.QLabel('Country'), 13, 0)\n self.contact_author_PI_Country = QtGui.QLineEdit()\n self.contact_author_PI_Country.setText('')\n self.contact_author_PI_Country.setFixedWidth(200)\n grid.addWidget(self.contact_author_PI_Country, 13, 1)\n\n grid.addWidget(QtGui.QLabel('Phone'), 14, 0)\n self.contact_author_PI_phone_number = QtGui.QLineEdit()\n self.contact_author_PI_phone_number.setText('')\n self.contact_author_PI_phone_number.setFixedWidth(200)\n grid.addWidget(self.contact_author_PI_phone_number, 14, 1)\n\n grid.addWidget(QtGui.QLabel('ORCID'), 15, 0)\n self.contact_author_PI_ORCID = QtGui.QLineEdit()\n self.contact_author_PI_ORCID.setText('')\n self.contact_author_PI_ORCID.setFixedWidth(200)\n grid.addWidget(self.contact_author_PI_ORCID, 15, 1)\n\n frame.setLayout(grid)\n hbox.addWidget(frame)\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n grid = QtGui.QGridLayout()\n grid.addWidget(QtGui.QLabel('Responsible Scientist'), 0, 0)\n\n grid.addWidget(QtGui.QLabel('Salutation'), 1, 0)\n self.contact_author_salutation = QtGui.QLineEdit()\n self.contact_author_salutation.setText('Dr.')\n self.contact_author_salutation.setFixedWidth(200)\n grid.addWidget(self.contact_author_salutation, 1, 1)\n\n grid.addWidget(QtGui.QLabel('First name'), 2, 0)\n self.contact_author_first_name = QtGui.QLineEdit()\n self.contact_author_first_name.setText('')\n self.contact_author_first_name.setFixedWidth(200)\n grid.addWidget(self.contact_author_first_name, 2, 1)\n\n grid.addWidget(QtGui.QLabel('Last name'), 3, 0)\n self.contact_author_last_name = QtGui.QLineEdit()\n self.contact_author_last_name.setText('')\n self.contact_author_last_name.setFixedWidth(200)\n grid.addWidget(self.contact_author_last_name, 3, 1)\n\n grid.addWidget(QtGui.QLabel('Middle name'), 4, 0)\n self.contact_author_middle_name = QtGui.QLineEdit()\n self.contact_author_middle_name.setText('')\n self.contact_author_middle_name.setFixedWidth(200)\n self.contact_author_middle_name.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.contact_author_middle_name, 4, 1)\n\n grid.addWidget(QtGui.QLabel('Role'), 5, 0)\n\n self.contact_author_role = QtGui.QComboBox()\n ScientistRoles = ['responsible scientist', 'investigator']\n for item in ScientistRoles: self.contact_author_role.addItem(item)\n grid.addWidget(self.contact_author_role, 5, 1)\n\n grid.addWidget(QtGui.QLabel('Organization type'), 6, 0)\n\n self.contact_author_organization_type = QtGui.QComboBox()\n for item in Organizations: self.contact_author_organization_type.addItem(item)\n grid.addWidget(self.contact_author_organization_type, 6, 1)\n\n grid.addWidget(QtGui.QLabel('Organization Name'), 7, 0)\n self.contact_author_organization_name = QtGui.QLineEdit()\n self.contact_author_organization_name.setText('')\n self.contact_author_organization_name.setFixedWidth(200)\n grid.addWidget(self.contact_author_organization_name, 7, 1)\n\n grid.addWidget(QtGui.QLabel('Email'), 8, 0)\n self.contact_author_email = QtGui.QLineEdit()\n self.contact_author_email.setText('')\n self.contact_author_email.setFixedWidth(200)\n grid.addWidget(self.contact_author_email, 8, 1)\n\n grid.addWidget(QtGui.QLabel('Street'), 9, 0)\n self.contact_author_address = QtGui.QLineEdit()\n self.contact_author_address.setText('')\n self.contact_author_address.setFixedWidth(200)\n grid.addWidget(self.contact_author_address, 9, 1)\n\n grid.addWidget(QtGui.QLabel('City'), 10, 0)\n self.contact_author_city = QtGui.QLineEdit()\n self.contact_author_city.setText('')\n self.contact_author_city.setFixedWidth(200)\n grid.addWidget(self.contact_author_city, 10, 1)\n\n grid.addWidget(QtGui.QLabel('State'), 11, 0)\n self.contact_author_State_or_Province = QtGui.QLineEdit()\n self.contact_author_State_or_Province.setText('')\n self.contact_author_State_or_Province.setFixedWidth(200)\n self.contact_author_State_or_Province.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.contact_author_State_or_Province, 11, 1)\n\n grid.addWidget(QtGui.QLabel('ZIP code'), 12, 0)\n self.contact_author_Zip_Code = QtGui.QLineEdit()\n self.contact_author_Zip_Code.setText('')\n self.contact_author_Zip_Code.setFixedWidth(200)\n grid.addWidget(self.contact_author_Zip_Code, 12, 1)\n\n grid.addWidget(QtGui.QLabel('Country'), 13, 0)\n self.contact_author_Country = QtGui.QLineEdit()\n self.contact_author_Country.setText('')\n self.contact_author_Country.setFixedWidth(200)\n grid.addWidget(self.contact_author_Country, 13, 1)\n\n grid.addWidget(QtGui.QLabel('Phone'), 14, 0)\n self.contact_author_phone_number = QtGui.QLineEdit()\n self.contact_author_phone_number.setText('')\n self.contact_author_phone_number.setFixedWidth(200)\n grid.addWidget(self.contact_author_phone_number, 14, 1)\n\n grid.addWidget(QtGui.QLabel('ORCID'), 15, 0)\n self.contact_author_ORCID = QtGui.QLineEdit()\n self.contact_author_ORCID.setText('')\n self.contact_author_ORCID.setFixedWidth(200)\n grid.addWidget(self.contact_author_ORCID, 15, 1)\n\n frame.setLayout(grid)\n hbox.addWidget(frame)\n\n vb.addLayout(hbox)\n vb.addWidget(QtGui.QLabel(XChemToolTips.deposition_interface_note()))\n vb.addStretch(1)\n\n deposit_tab_dict['Contact'][1].addLayout(vb)\n\n ## release status\n vb = QtGui.QVBoxLayout()\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n grid.addWidget(QtGui.QLabel('Release status'), 0, 0)\n\n grid.addWidget(QtGui.QLabel('Release Status for sequence'), 4, 0)\n\n self.Release_status_for_sequence = QtGui.QComboBox()\n codeStatus = ['RELEASE NOW', 'HOLD FOR RELEASE']\n for item in codeStatus: self.Release_status_for_sequence.addItem(item)\n grid.addWidget(self.Release_status_for_sequence, 4, 1)\n\n grid.addWidget(QtGui.QLabel('Release Status for coordinates/ SF'), 8, 0)\n self.Release_status_for_coordinates = QtGui.QComboBox()\n coordStatus = ['RELEASE NOW', 'HOLD FOR PUBLICATION', 'HOLD FOR 4 WEEKS', 'HOLD FOR 6 MONTHS',\n 'HOLD FOR 1 YEAR']\n for item in coordStatus: self.Release_status_for_coordinates.addItem(item)\n grid.addWidget(self.Release_status_for_coordinates, 8, 1)\n\n frame.setLayout(grid)\n vb.addWidget(frame)\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n grid.addWidget(QtGui.QLabel('Title & Details'), 0, 0)\n note = (\n 'Note: supported wildcards: $ProteinName,$CompoundName; e.g. \"Crystal Structure of human JMJD2D in complex with N2317a\"')\n grid.addWidget(QtGui.QLabel(note), 1, 0)\n\n grid.addWidget(QtGui.QLabel('Group deposition title'), 2, 0)\n self.group_deposition_title = QtGui.QLineEdit()\n self.group_deposition_title.setText('PanDDA analysis group deposition')\n self.group_deposition_title.setFixedWidth(600)\n # self.group_deposition_title.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.group_deposition_title, 2, 1)\n\n grid.addWidget(QtGui.QLabel('Description'), 3, 0)\n self.group_description = QtGui.QLineEdit()\n self.group_description.setText(\n 'XDomainX of XOrganismX $ProteinName screened against the XXX Fragment Library by X-ray Crystallography at the XChem facility of Diamond Light Source beamline I04-1')\n self.group_description.setFixedWidth(600)\n grid.addWidget(self.group_description, 3, 1)\n\n grid.addWidget(QtGui.QLabel('Structure Title (ligand bound)'), 4, 0)\n self.structure_title = QtGui.QLineEdit()\n self.structure_title.setText('Crystal Structure of $ProteinName in complex with $CompoundName')\n self.structure_title.setFixedWidth(600)\n grid.addWidget(self.structure_title, 4, 1)\n\n note = ('\\n\\nApo Structure:\\nonly use if you want to deposit PanDDA models!')\n grid.addWidget(QtGui.QLabel(note), 6, 0)\n\n grid.addWidget(QtGui.QLabel('Structure Title (apo)'), 7, 0)\n self.structure_title_apo = QtGui.QLineEdit()\n self.structure_title_apo.setText(\n 'PanDDA analysis group deposition of ground-state model of $ProteinName')\n self.structure_title_apo.setFixedWidth(600)\n grid.addWidget(self.structure_title_apo, 7, 1)\n\n frame.setLayout(grid)\n vb.addWidget(frame)\n\n vb.addStretch(1)\n\n deposit_tab_dict['General'][1].addLayout(vb)\n\n ## authors\n vb = QtGui.QVBoxLayout()\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n grid.addWidget(QtGui.QLabel('Deposition authors (e.g. Surname, F.M.)'), 0, 0)\n\n self.structure_author_name_List = []\n\n for column in range(0, 2):\n for row in range(1, 15):\n structure_author_name = QtGui.QLineEdit()\n structure_author_name.setText('')\n structure_author_name.setFixedWidth(300)\n grid.addWidget(structure_author_name, row, column)\n self.structure_author_name_List.append(structure_author_name)\n\n frame.setLayout(grid)\n vb.addWidget(frame)\n\n vb.addStretch(1)\n\n deposit_tab_dict['Authors'][1].addLayout(vb)\n\n ## primary citation\n vb = QtGui.QVBoxLayout()\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n grid.addWidget(QtGui.QLabel('Primary Citation'), 0, 0)\n\n grid.addWidget(QtGui.QLabel('ID'), 1, 0)\n self.primary_citation_id = QtGui.QLineEdit()\n self.primary_citation_id.setText('primary')\n self.primary_citation_id.setFixedWidth(500)\n grid.addWidget(self.primary_citation_id, 1, 1)\n\n grid.addWidget(QtGui.QLabel('Journal'), 2, 0)\n self.primary_citation_journal_abbrev = QtGui.QLineEdit()\n self.primary_citation_journal_abbrev.setText('To be published')\n self.primary_citation_journal_abbrev.setFixedWidth(500)\n grid.addWidget(self.primary_citation_journal_abbrev, 2, 1)\n\n grid.addWidget(QtGui.QLabel('Title'), 3, 0)\n self.primary_citation_title = QtGui.QLineEdit()\n self.primary_citation_title.setText('')\n self.primary_citation_title.setFixedWidth(500)\n self.primary_citation_title.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.primary_citation_title, 3, 1)\n\n grid.addWidget(QtGui.QLabel('Year'), 4, 0)\n self.primary_citation_year = QtGui.QLineEdit()\n self.primary_citation_year.setText('')\n self.primary_citation_year.setFixedWidth(500)\n self.primary_citation_year.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.primary_citation_year, 4, 1)\n\n grid.addWidget(QtGui.QLabel('Volume'), 5, 0)\n self.primary_citation_journal_volume = QtGui.QLineEdit()\n self.primary_citation_journal_volume.setText('')\n self.primary_citation_journal_volume.setFixedWidth(500)\n self.primary_citation_journal_volume.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.primary_citation_journal_volume, 5, 1)\n\n grid.addWidget(QtGui.QLabel('Page, first'), 6, 0)\n self.primary_citation_page_first = QtGui.QLineEdit()\n self.primary_citation_page_first.setText('')\n self.primary_citation_page_first.setFixedWidth(500)\n self.primary_citation_page_first.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.primary_citation_page_first, 6, 1)\n\n grid.addWidget(QtGui.QLabel('Page, last'), 7, 0)\n self.primary_citation_page_last = QtGui.QLineEdit()\n self.primary_citation_page_last.setText('')\n self.primary_citation_page_last.setFixedWidth(500)\n self.primary_citation_page_last.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.primary_citation_page_last, 7, 1)\n\n frame.setLayout(grid)\n vb.addWidget(frame)\n\n ## citation authors\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n self.set_primary_citation_authors = QtGui.QCheckBox('same as deposition authors')\n self.layout_funcs.add_checkbox(self, self.set_primary_citation_authors,\n 'xce_object.set_primary_citation_as_structure_authors')\n grid.addWidget(self.set_primary_citation_authors, 0, 0)\n\n self.primary_citation_author_name_List = []\n\n for column in range(0, 2):\n for row in range(1, 15):\n primary_citation_author_name = QtGui.QLineEdit()\n primary_citation_author_name.setText('')\n primary_citation_author_name.setFixedWidth(300)\n grid.addWidget(primary_citation_author_name, row, column)\n self.primary_citation_author_name_List.append(primary_citation_author_name)\n\n frame.setLayout(grid)\n vb.addWidget(frame)\n\n vb.addStretch(1)\n\n deposit_tab_dict['Citation'][1].addLayout(vb)\n\n ## molecule info\n vb = QtGui.QVBoxLayout()\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n\n grid.addWidget(QtGui.QLabel('Entity 1'), 1, 0)\n\n grid.addWidget(QtGui.QLabel('Molecule Name'), 2, 0)\n self.molecule_name = QtGui.QLineEdit()\n self.molecule_name.setText('')\n self.molecule_name.setFixedWidth(300)\n# self.molecule_name.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.molecule_name, 2, 1)\n grid.addWidget(QtGui.QLabel('(e.g. RNA Hammerhead Ribozyme)'), 2, 2)\n\n grid.addWidget(QtGui.QLabel('Fragment Name'), 3, 0)\n self.fragment_name_one = QtGui.QLineEdit()\n self.fragment_name_one.setText('')\n self.fragment_name_one.setFixedWidth(300)\n self.fragment_name_one.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.fragment_name_one, 3, 1)\n grid.addWidget(QtGui.QLabel('(e.g. ligand binding domain, hairpin)'), 3, 2)\n\n grid.addWidget(QtGui.QLabel('Specific Mutation'), 4, 0)\n self.fragment_name_one_specific_mutation = QtGui.QLineEdit()\n self.fragment_name_one_specific_mutation.setText('')\n self.fragment_name_one_specific_mutation.setFixedWidth(300)\n self.fragment_name_one_specific_mutation.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.fragment_name_one_specific_mutation, 4, 1)\n grid.addWidget(QtGui.QLabel('(e.g. C280S)'), 4, 2)\n\n grid.addWidget(QtGui.QLabel('Enzyme Comission Number'), 5, 0)\n self.fragment_name_one_enzyme_comission_number = QtGui.QLineEdit()\n self.fragment_name_one_enzyme_comission_number.setText('')\n self.fragment_name_one_enzyme_comission_number.setFixedWidth(300)\n self.fragment_name_one_enzyme_comission_number.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.fragment_name_one_enzyme_comission_number, 5, 1)\n grid.addWidget(QtGui.QLabel('(if known: e.g. 2.7.7.7)'), 5, 2)\n\n grid.addWidget(QtGui.QLabel('Genetically Manipulated Source'), 6, 0)\n\n grid.addWidget(QtGui.QLabel('Source organism scientific name'), 7, 0)\n\n self.Source_organism_scientific_name = QtGui.QComboBox()\n taxonomy_dict = XChemMain.NCBI_taxonomy_ID()\n for item in taxonomy_dict:\n self.Source_organism_scientific_name.addItem(taxonomy_dict[item])\n grid.addWidget(self.Source_organism_scientific_name, 7, 1)\n\n grid.addWidget(QtGui.QLabel('Source organism gene'), 8, 0)\n self.Source_organism_gene = QtGui.QLineEdit()\n self.Source_organism_gene.setText('')\n self.Source_organism_gene.setFixedWidth(300)\n grid.addWidget(self.Source_organism_gene, 8, 1)\n grid.addWidget(QtGui.QLabel('(e.g. RPOD, ALKA...)'), 8, 2)\n\n grid.addWidget(QtGui.QLabel('Source organism strain'), 9, 0)\n self.Source_organism_strain = QtGui.QLineEdit()\n self.Source_organism_strain.setText('')\n self.Source_organism_strain.setFixedWidth(300)\n self.Source_organism_strain.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.Source_organism_strain, 9, 1)\n grid.addWidget(QtGui.QLabel('(e.g. BH10 ISOLATE, K-12...)'), 9, 2)\n\n grid.addWidget(QtGui.QLabel('Expression system scientific name'), 10, 0)\n\n self.Expression_system_scientific_name = QtGui.QComboBox()\n for item in taxonomy_dict:\n self.Expression_system_scientific_name.addItem(taxonomy_dict[item])\n grid.addWidget(self.Expression_system_scientific_name, 10, 1)\n\n grid.addWidget(QtGui.QLabel('Expression system strain'), 11, 0)\n self.Expression_system_strain = QtGui.QLineEdit()\n self.Expression_system_strain.setText('')\n self.Expression_system_strain.setFixedWidth(300)\n self.Expression_system_strain.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.Expression_system_strain, 11, 1)\n grid.addWidget(QtGui.QLabel('(e.g. BL21(DE3))'), 11, 2)\n\n grid.addWidget(QtGui.QLabel('Expression system vector type'), 12, 0)\n self.Expression_system_vector_type = QtGui.QLineEdit()\n self.Expression_system_vector_type.setText('')\n self.Expression_system_vector_type.setFixedWidth(300)\n self.Expression_system_vector_type.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.Expression_system_vector_type, 12, 1)\n grid.addWidget(QtGui.QLabel('(e.g. plasmid)'), 12, 2)\n\n grid.addWidget(QtGui.QLabel('Expression_system_plasmid_name'), 13, 0)\n self.Expression_system_plasmid_name = QtGui.QLineEdit()\n self.Expression_system_plasmid_name.setText('')\n self.Expression_system_plasmid_name.setFixedWidth(300)\n self.Expression_system_plasmid_name.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.Expression_system_plasmid_name, 13, 1)\n grid.addWidget(QtGui.QLabel('(e.g. pET26)'), 13, 2)\n\n grid.addWidget(QtGui.QLabel('Manipulated_source_details'), 14, 0)\n self.Manipulated_source_details = QtGui.QLineEdit()\n self.Manipulated_source_details.setText('')\n self.Manipulated_source_details.setFixedWidth(300)\n self.Manipulated_source_details.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.Manipulated_source_details, 14, 1)\n grid.addWidget(QtGui.QLabel('(any other relevant information)'), 14, 2)\n\n grid.addWidget(QtGui.QLabel('Chains'), 15, 0)\n self.molecule_chain_one = QtGui.QLineEdit()\n self.molecule_chain_one.setText('')\n self.molecule_chain_one.setFixedWidth(300)\n grid.addWidget(self.molecule_chain_one, 15, 1)\n grid.addWidget(QtGui.QLabel('(e.g. A or A,B)'), 15, 2)\n\n frame.setLayout(grid)\n vb.addWidget(frame)\n\n ### entity 2\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n\n grid.addWidget(QtGui.QLabel('Entity 2 (IMPORTANT: only fill in if you are working with a protein-protein complex!)'), 1, 0)\n\n grid.addWidget(QtGui.QLabel('Molecule Name'), 2, 0)\n self.molecule_name_two = QtGui.QLineEdit()\n self.molecule_name_two.setText('')\n self.molecule_name_two.setFixedWidth(300)\n# self.molecule_name_two.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.molecule_name_two, 2, 1)\n grid.addWidget(QtGui.QLabel('(e.g. RNA Hammerhead Ribozyme)'), 2, 2)\n\n grid.addWidget(QtGui.QLabel('Fragment Name'), 3, 0)\n self.fragment_name_two = QtGui.QLineEdit()\n self.fragment_name_two.setText('')\n self.fragment_name_two.setFixedWidth(300)\n self.fragment_name_two.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.fragment_name_two, 3, 1)\n grid.addWidget(QtGui.QLabel('(e.g. ligand binding domain, hairpin)'), 3, 2)\n\n grid.addWidget(QtGui.QLabel('Specific Mutation'), 4, 0)\n self.fragment_name_two_specific_mutation = QtGui.QLineEdit()\n self.fragment_name_two_specific_mutation.setText('')\n self.fragment_name_two_specific_mutation.setFixedWidth(300)\n self.fragment_name_two_specific_mutation.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.fragment_name_two_specific_mutation, 4, 1)\n grid.addWidget(QtGui.QLabel('(e.g. C280S)'), 4, 2)\n\n grid.addWidget(QtGui.QLabel('Enzyme Comission Number'), 5, 0)\n self.fragment_name_two_enzyme_comission_number = QtGui.QLineEdit()\n self.fragment_name_two_enzyme_comission_number.setText('')\n self.fragment_name_two_enzyme_comission_number.setFixedWidth(300)\n self.fragment_name_two_enzyme_comission_number.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.fragment_name_two_enzyme_comission_number, 5, 1)\n grid.addWidget(QtGui.QLabel('(if known: e.g. 2.7.7.7)'), 5, 2)\n\n grid.addWidget(QtGui.QLabel('Genetically Manipulated Source'), 6, 0)\n\n grid.addWidget(QtGui.QLabel('Source organism scientific name'), 7, 0)\n\n self.Source_organism_scientific_name_two = QtGui.QComboBox()\n taxonomy_dict = XChemMain.NCBI_taxonomy_ID()\n for item in taxonomy_dict:\n self.Source_organism_scientific_name_two.addItem(taxonomy_dict[item])\n grid.addWidget(self.Source_organism_scientific_name_two, 7, 1)\n\n grid.addWidget(QtGui.QLabel('Source organism gene'), 8, 0)\n self.Source_organism_gene_two = QtGui.QLineEdit()\n self.Source_organism_gene_two.setText('')\n self.Source_organism_gene_two.setFixedWidth(300)\n grid.addWidget(self.Source_organism_gene_two, 8, 1)\n grid.addWidget(QtGui.QLabel('(e.g. RPOD, ALKA...)'), 8, 2)\n\n grid.addWidget(QtGui.QLabel('Source organism strain'), 9, 0)\n self.Source_organism_strain_two = QtGui.QLineEdit()\n self.Source_organism_strain_two.setText('')\n self.Source_organism_strain_two.setFixedWidth(300)\n self.Source_organism_strain_two.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.Source_organism_strain_two, 9, 1)\n grid.addWidget(QtGui.QLabel('(e.g. BH10 ISOLATE, K-12...)'), 9, 2)\n\n grid.addWidget(QtGui.QLabel('Expression system scientific name'), 10, 0)\n\n self.Expression_system_scientific_name_two = QtGui.QComboBox()\n for item in taxonomy_dict:\n self.Expression_system_scientific_name_two.addItem(taxonomy_dict[item])\n grid.addWidget(self.Expression_system_scientific_name_two, 10, 1)\n\n grid.addWidget(QtGui.QLabel('Expression system strain'), 11, 0)\n self.Expression_system_strain_two = QtGui.QLineEdit()\n self.Expression_system_strain_two.setText('')\n self.Expression_system_strain_two.setFixedWidth(300)\n self.Expression_system_strain_two.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.Expression_system_strain_two, 11, 1)\n grid.addWidget(QtGui.QLabel('(e.g. BL21(DE3))'), 11, 2)\n\n grid.addWidget(QtGui.QLabel('Expression system vector type'), 12, 0)\n self.Expression_system_vector_type_two = QtGui.QLineEdit()\n self.Expression_system_vector_type_two.setText('')\n self.Expression_system_vector_type_two.setFixedWidth(300)\n self.Expression_system_vector_type_two.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.Expression_system_vector_type_two, 12, 1)\n grid.addWidget(QtGui.QLabel('(e.g. plasmid)'), 12, 2)\n\n grid.addWidget(QtGui.QLabel('Expression_system_plasmid_name'), 13, 0)\n self.Expression_system_plasmid_name_two = QtGui.QLineEdit()\n self.Expression_system_plasmid_name_two.setText('')\n self.Expression_system_plasmid_name_two.setFixedWidth(300)\n self.Expression_system_plasmid_name_two.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.Expression_system_plasmid_name_two, 13, 1)\n grid.addWidget(QtGui.QLabel('(e.g. pET26)'), 13, 2)\n\n grid.addWidget(QtGui.QLabel('Manipulated_source_details'), 14, 0)\n self.Manipulated_source_details_two = QtGui.QLineEdit()\n self.Manipulated_source_details_two.setText('')\n self.Manipulated_source_details_two.setFixedWidth(300)\n self.Manipulated_source_details_two.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n grid.addWidget(self.Manipulated_source_details_two, 14, 1)\n grid.addWidget(QtGui.QLabel('(any other relevant information)'), 14, 2)\n\n grid.addWidget(QtGui.QLabel('Chains'), 15, 0)\n self.molecule_chain_two = QtGui.QLineEdit()\n self.molecule_chain_two.setText('')\n self.molecule_chain_two.setFixedWidth(300)\n grid.addWidget(self.molecule_chain_two, 15, 1)\n grid.addWidget(QtGui.QLabel('(e.g. A or A,B)'), 15, 2)\n\n frame.setLayout(grid)\n\n vb.addWidget(frame)\n\n ### entity 2 --- END\n\n\n\n\n vb.addStretch(1)\n\n deposit_tab_dict['Molecule'][1].addLayout(vb)\n\n ## misc\n vb = QtGui.QVBoxLayout()\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n\n grid.addWidget(QtGui.QLabel('Keywords'), 1, 0)\n self.structure_keywords = QtGui.QLineEdit()\n self.structure_keywords.setText('SGC - Diamond I04-1 fragment screening, PanDDA, XChemExplorer')\n self.structure_keywords.setFixedWidth(300)\n grid.addWidget(self.structure_keywords, 1, 1)\n grid.addWidget(QtGui.QLabel('(e.g. beta barrel, protein-DNA complex)'), 1, 2)\n\n grid.addWidget(QtGui.QLabel('Type'), 2, 0)\n self.structure_keywords_type = QtGui.QComboBox()\n self.structure_keywords_type.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n for item in XChemMain.pdbx_keywords(): self.structure_keywords_type.addItem(item)\n grid.addWidget(self.structure_keywords_type, 2, 1)\n# self.structure_keywords = QtGui.QLineEdit()\n# self.structure_keywords.setText('SGC - Diamond I04-1 fragment screening, PanDDA, XChemExplorer')\n# self.structure_keywords.setFixedWidth(300)\n# grid.addWidget(self.structure_keywords, 1, 1)\n# grid.addWidget(QtGui.QLabel('(e.g. beta barrel, protein-DNA complex)'), 1, 2)\n\n grid.addWidget(QtGui.QLabel('Biological Assembly'), 3, 0)\n self.biological_assembly_chain_number = QtGui.QLineEdit()\n self.biological_assembly_chain_number.setText('')\n self.biological_assembly_chain_number.setFixedWidth(300)\n grid.addWidget(self.biological_assembly_chain_number, 3, 1)\n grid.addWidget(QtGui.QLabel('(e.g. 1 for monomer, 2 for dimer ..)'), 3, 2)\n\n grid.addWidget(QtGui.QLabel('Sequence UNIPROT ID'), 4, 0)\n self.molecule_one_letter_sequence_uniprot_id = QtGui.QLineEdit()\n self.molecule_one_letter_sequence_uniprot_id.setText('')\n self.molecule_one_letter_sequence_uniprot_id.setFixedWidth(300)\n grid.addWidget(self.molecule_one_letter_sequence_uniprot_id, 4, 1)\n grid.addWidget(QtGui.QLabel('(e.g. Q6B0I6)'), 4, 2)\n\n grid.addWidget(QtGui.QLabel('Sequence'), 5, 0)\n self.molecule_one_letter_sequence = QtGui.QTextEdit()\n self.molecule_one_letter_sequence.setStyleSheet(\"background-color: rgb(255, 255, 255);\")\n# self.molecule_one_letter_sequence.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n self.molecule_one_letter_sequence.setText('')\n self.molecule_one_letter_sequence.setFixedWidth(300)\n grid.addWidget(self.molecule_one_letter_sequence, 5, 1, 8, 2)\n\n# grid.addWidget(QtGui.QLabel('Sequence information for entity 2'), 10, 0)\n# grid.addWidget(QtGui.QLabel('(Important: only for protein-protein complex'), 10, 1)\n\n grid.addWidget(QtGui.QLabel('Sequence UNIPROT ID (Entity 2) - optional'), 13, 0)\n self.molecule_one_letter_sequence_uniprot_id_two = QtGui.QLineEdit()\n self.molecule_one_letter_sequence_uniprot_id_two.setText('')\n self.molecule_one_letter_sequence_uniprot_id_two.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n self.molecule_one_letter_sequence_uniprot_id_two.setFixedWidth(300)\n grid.addWidget(self.molecule_one_letter_sequence_uniprot_id_two, 13, 1)\n grid.addWidget(QtGui.QLabel('(e.g. Q6B0I6)'), 13, 2)\n\n grid.addWidget(QtGui.QLabel('Sequence (Entity 2) - optional'), 14, 0)\n self.molecule_one_letter_sequence_two = QtGui.QTextEdit()\n self.molecule_one_letter_sequence_two.setText('')\n self.molecule_one_letter_sequence_two.setFixedWidth(300)\n grid.addWidget(self.molecule_one_letter_sequence_two, 14, 1, 19, 2)\n\n\n grid.addWidget(QtGui.QLabel('Structural Genomic (optional)'), 21, 0)\n\n grid.addWidget(QtGui.QLabel('Project Name'), 22, 0)\n self.SG_project_name = QtGui.QLineEdit()\n self.SG_project_name.setText('')\n self.SG_project_name.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n self.SG_project_name.setFixedWidth(300)\n grid.addWidget(self.SG_project_name, 22, 1)\n grid.addWidget(QtGui.QLabel('(e.g. SGC, Structural Genomics Consortium)'), 22, 2)\n\n grid.addWidget(QtGui.QLabel('Full Name'), 23, 0)\n self.full_name_of_SG_center = QtGui.QLineEdit()\n self.full_name_of_SG_center.setText('')\n self.full_name_of_SG_center.setStyleSheet(\"background-color: rgb(192, 192, 192);\")\n self.full_name_of_SG_center.setFixedWidth(300)\n grid.addWidget(self.full_name_of_SG_center, 23, 1)\n grid.addWidget(QtGui.QLabel('(e.g. Structural Genomics Consortium)'), 23, 2)\n\n frame.setLayout(grid)\n vb.addWidget(frame)\n\n vb.addStretch(1)\n\n deposit_tab_dict['Misc'][1].addLayout(vb)\n\n ## methods\n vb = QtGui.QVBoxLayout()\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n\n grid.addWidget(QtGui.QLabel('Crystallization'), 1, 0)\n\n grid.addWidget(QtGui.QLabel('Method'), 2, 0)\n\n self.crystallization_method = QtGui.QComboBox()\n for item in XChemMain.crystal_growth_methods(): self.crystallization_method.addItem(item)\n grid.addWidget(self.crystallization_method, 2, 1)\n\n grid.addWidget(QtGui.QLabel('pH'), 3, 0)\n self.crystallization_pH = QtGui.QLineEdit()\n self.crystallization_pH.setText('')\n self.crystallization_pH.setFixedWidth(300)\n grid.addWidget(self.crystallization_pH, 3, 1)\n grid.addWidget(QtGui.QLabel('(e.g. 7.5 ...)'), 3, 2)\n\n grid.addWidget(QtGui.QLabel('Temperature'), 4, 0)\n self.crystallization_temperature = QtGui.QLineEdit()\n self.crystallization_temperature.setText('')\n self.crystallization_temperature.setFixedWidth(300)\n grid.addWidget(self.crystallization_temperature, 4, 1)\n grid.addWidget(QtGui.QLabel('(e.g. 298) (in Kelvin)'), 4, 2)\n\n grid.addWidget(QtGui.QLabel('Condition'), 5, 0)\n self.crystallization_details = QtGui.QLineEdit()\n self.crystallization_details.setText('')\n self.crystallization_details.setFixedWidth(300)\n grid.addWidget(self.crystallization_details, 5, 1)\n grid.addWidget(QtGui.QLabel('(e.g. PEG 4000, NaCl etc.)'), 5, 2)\n\n grid.addWidget(QtGui.QLabel('Diffraction Experiment'), 6, 0)\n note = ('Note: this information will only be used if it is\\n'\n 'not already available in the mainTable!\\n'\n 'Ignore if data were collected at DLS')\n grid.addWidget(QtGui.QLabel(note), 7, 0)\n\n grid.addWidget(QtGui.QLabel('Source'), 8, 0)\n\n self.radiation_source = QtGui.QComboBox()\n for item in XChemMain.radiationSource(): self.radiation_source.addItem(item)\n grid.addWidget(self.radiation_source, 8, 1)\n\n grid.addWidget(QtGui.QLabel('Source Type'), 9, 0)\n\n self.radiation_source_type = QtGui.QComboBox()\n for item in XChemMain.wwBeamlines(): self.radiation_source_type.addItem(item)\n grid.addWidget(self.radiation_source_type, 9, 1)\n\n grid.addWidget(QtGui.QLabel('Wavelength'), 10, 0)\n self.radiation_wavelengths = QtGui.QLineEdit()\n self.radiation_wavelengths.setText('')\n self.radiation_wavelengths.setFixedWidth(300)\n grid.addWidget(self.radiation_wavelengths, 10, 1)\n grid.addWidget(QtGui.QLabel('(e.g. 1.502)'), 10, 2)\n\n grid.addWidget(QtGui.QLabel('Detector'), 11, 0)\n\n self.radiation_detector = QtGui.QComboBox()\n for item in XChemMain.detector(): self.radiation_detector.addItem(item)\n grid.addWidget(self.radiation_detector, 11, 1)\n\n grid.addWidget(QtGui.QLabel('Detector Type'), 12, 0)\n\n self.radiation_detector_type = QtGui.QComboBox()\n for item in XChemMain.detectorType(): self.radiation_detector_type.addItem(item)\n grid.addWidget(self.radiation_detector_type, 12, 1)\n\n grid.addWidget(QtGui.QLabel('Date'), 13, 0)\n self.data_collection_date = QtGui.QLineEdit()\n self.data_collection_date.setText('')\n self.data_collection_date.setFixedWidth(300)\n grid.addWidget(self.data_collection_date, 13, 1)\n grid.addWidget(QtGui.QLabel('(e.g. 2004-01-07)'), 13, 2)\n\n grid.addWidget(QtGui.QLabel('Temperature'), 14, 0)\n self.data_collection_temperature = QtGui.QLineEdit()\n self.data_collection_temperature.setText('')\n self.data_collection_temperature.setFixedWidth(300)\n grid.addWidget(self.data_collection_temperature, 14, 1)\n grid.addWidget(QtGui.QLabel('(e.g. 100) (in Kelvin)'), 14, 2)\n\n grid.addWidget(QtGui.QLabel('Protocol'), 15, 0)\n self.data_collection_protocol = QtGui.QLineEdit()\n self.data_collection_protocol.setText('SINGLE WAVELENGTH')\n self.data_collection_protocol.setFixedWidth(300)\n grid.addWidget(self.data_collection_protocol, 15, 1)\n grid.addWidget(QtGui.QLabel('(e.g. SINGLE WAVELENGTH, MAD, ...)'), 15, 2)\n\n frame.setLayout(grid)\n vb.addWidget(frame)\n\n vb.addStretch(1)\n\n deposit_tab_dict['Methods'][1].addLayout(vb)\n\n ## software\n vb = QtGui.QVBoxLayout()\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n\n grid.addWidget(QtGui.QLabel('PDB starting model'), 1, 0)\n self.pdbx_starting_model = QtGui.QLineEdit()\n self.pdbx_starting_model.setText('')\n self.pdbx_starting_model.setFixedWidth(300)\n grid.addWidget(self.pdbx_starting_model, 1, 1)\n grid.addWidget(QtGui.QLabel('(e.g. 7.5 ...)'), 1, 2)\n\n grid.addWidget(QtGui.QLabel('Data reduction'), 2, 0)\n self.data_integration_software = QtGui.QComboBox()\n for item in XChemMain.data_integration_software(): self.data_integration_software.addItem(item)\n grid.addWidget(self.data_integration_software, 2, 1)\n\n grid.addWidget(QtGui.QLabel('Phasing'), 3, 0)\n self.phasing_software = QtGui.QComboBox()\n for item in XChemMain.phasing_software(): self.phasing_software.addItem(item)\n grid.addWidget(self.phasing_software, 3, 1)\n\n frame.setLayout(grid)\n vb.addWidget(frame)\n vb.addStretch(1)\n\n deposit_tab_dict['Software'][1].addLayout(vb)\n\n ## Funding\n\n vb = QtGui.QVBoxLayout()\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n\n grid.addWidget(QtGui.QLabel('Funding Organization'), 1, 0)\n self.pdbx_funding_organization_one = QtGui.QLineEdit()\n self.pdbx_funding_organization_one.setText('')\n self.pdbx_funding_organization_one.setFixedWidth(700)\n grid.addWidget(self.pdbx_funding_organization_one, 1, 1)\n\n grid.addWidget(QtGui.QLabel('Grant Number'), 2, 0)\n self.pdbx_grant_number_one = QtGui.QLineEdit()\n self.pdbx_grant_number_one.setText('')\n self.pdbx_grant_number_one.setFixedWidth(700)\n grid.addWidget(self.pdbx_grant_number_one, 2, 1)\n\n grid.addWidget(QtGui.QLabel('Country'), 3, 0)\n self.pdbx_grant_country_one = QtGui.QComboBox()\n for item in XChemMain.pdbx_country(): self.pdbx_grant_country_one.addItem(item)\n grid.addWidget(self.pdbx_grant_country_one, 3, 1)\n\n frame.setLayout(grid)\n vb.addWidget(frame)\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n\n grid.addWidget(QtGui.QLabel('Funding Organization'), 1, 0)\n self.pdbx_funding_organization_two = QtGui.QLineEdit()\n self.pdbx_funding_organization_two.setText('')\n self.pdbx_funding_organization_two.setFixedWidth(700)\n grid.addWidget(self.pdbx_funding_organization_two, 1, 1)\n\n grid.addWidget(QtGui.QLabel('Grant Number'), 2, 0)\n self.pdbx_grant_number_two = QtGui.QLineEdit()\n self.pdbx_grant_number_two.setText('')\n self.pdbx_grant_number_two.setFixedWidth(700)\n grid.addWidget(self.pdbx_grant_number_two, 2, 1)\n\n grid.addWidget(QtGui.QLabel('Country'), 3, 0)\n self.pdbx_grant_country_two = QtGui.QComboBox()\n for item in XChemMain.pdbx_country(): self.pdbx_grant_country_two.addItem(item)\n grid.addWidget(self.pdbx_grant_country_two, 3, 1)\n\n frame.setLayout(grid)\n vb.addWidget(frame)\n\n frame = QtGui.QFrame()\n frame.setFrameShape(QtGui.QFrame.StyledPanel)\n\n grid = QtGui.QGridLayout()\n\n grid.addWidget(QtGui.QLabel('Funding Organization'), 1, 0)\n self.pdbx_funding_organization_three = QtGui.QLineEdit()\n self.pdbx_funding_organization_three.setText('')\n self.pdbx_funding_organization_three.setFixedWidth(700)\n grid.addWidget(self.pdbx_funding_organization_three, 1, 1)\n\n grid.addWidget(QtGui.QLabel('Grant Number'), 2, 0)\n self.pdbx_grant_number_three = QtGui.QLineEdit()\n self.pdbx_grant_number_three.setText('')\n self.pdbx_grant_number_three.setFixedWidth(700)\n grid.addWidget(self.pdbx_grant_number_three, 2, 1)\n\n grid.addWidget(QtGui.QLabel('Country'), 3, 0)\n self.pdbx_grant_country_three = QtGui.QComboBox()\n for item in XChemMain.pdbx_country(): self.pdbx_grant_country_three.addItem(item)\n grid.addWidget(self.pdbx_grant_country_three, 3, 1)\n\n frame.setLayout(grid)\n vb.addWidget(frame)\n\n\n vb.addStretch(1)\n\n deposit_tab_dict['Funding'][1].addLayout(vb)\n\n\n\n\n\n\n\n vbox.addWidget(deposit_tab_widget)\n\n hbox = QtGui.QHBoxLayout()\n button = QtGui.QPushButton('Load\\nFile')\n button.clicked.connect(self.load_deposit_config_file)\n hbox.addWidget(button)\n button = QtGui.QPushButton('Save\\nFile')\n button.clicked.connect(self.save_deposit_config_file)\n hbox.addWidget(button)\n button = QtGui.QPushButton('Load from\\nDatabase')\n button.clicked.connect(self.load_deposit_from_database)\n button.setEnabled(False)\n hbox.addWidget(button)\n button = QtGui.QPushButton('Save to\\nDatabase')\n button.clicked.connect(self.save_deposit_to_database)\n hbox.addWidget(button)\n\n vbox.addLayout(hbox)\n depositDataLayout.addLayout(vbox, 0, 0)\n\n depositData.exec_()\n\n def save_deposit_config_file(self):\n self.update_deposit_dict()\n file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'Save file', self.current_directory))\n # make sure that the file always has .deposit extension\n if str(file_name).rfind('.') != -1:\n file_name = file_name[:file_name.rfind('.')] + '.deposit'\n else:\n file_name = file_name + '.deposit'\n pickle.dump(self.deposit_dict, open(file_name, 'wb'))\n\n def update_database_with_pdb_codes(self):\n self.work_thread = XChemDeposit.import_PDB_IDs(str(self.pdb_code_entry.toPlainText()),\n os.path.join(self.database_directory, self.data_source_file),\n self.xce_logfile)\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n def update_database_with_labelInfo(self):\n for n,l in enumerate(self.labelList):\n label = str(l[0].text())\n description = str(l[1].text())\n# print \"update labelTable set Label='%s',Description='%s' where ID=%s\" %(label,description,str(n+1))\n self.db.execute_statement(\"update labelTable set Label='%s',Description='%s' where ID=%s\" %(label,description,str(n+1)))\n# print label,description\n\n def load_deposit_config_file(self):\n file_name_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Open file', self.current_directory,\n '*.deposit')\n file_name = tuple(file_name_temp)[0]\n self.deposit_dict = pickle.load(open(file_name, \"rb\"))\n# print self.deposit_dict\n for key in self.get_deposit_dict_template():\n if key not in self.deposit_dict:\n self.update_log.warning('field not in .deposit file: ' + str(key))\n self.deposit_dict[key] = ''\n self.update_deposit_input()\n\n def load_deposit_from_database(self):\n print('hallo')\n\n def save_deposit_to_database(self):\n self.update_deposit_dict()\n msgBox = QtGui.QMessageBox()\n msgBox.setText(\n \"*** WARNING ***\\nAre you sure you want to update the database?\\nThis will overwrite previous entries!\")\n msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)\n msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)\n reply = msgBox.exec_();\n if reply == 0:\n self.work_thread = XChemDeposit.update_depositTable(self.deposit_dict,\n os.path.join(self.database_directory,\n self.data_source_file),\n self.xce_logfile)\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n def update_deposit_input(self):\n try:\n self.contact_author_PI_salutation.setText(self.deposit_dict['contact_author_PI_salutation'])\n self.contact_author_PI_first_name.setText(self.deposit_dict['contact_author_PI_first_name'])\n self.contact_author_PI_last_name.setText(self.deposit_dict['contact_author_PI_last_name'])\n self.contact_author_PI_middle_name.setText(self.deposit_dict['contact_author_PI_middle_name'])\n index = self.contact_author_PI_role.findText(self.deposit_dict['contact_author_PI_role'],\n QtCore.Qt.MatchFixedString)\n self.contact_author_PI_role.setCurrentIndex(index)\n index = self.contact_author_PI_organization_type.findText(\n self.deposit_dict['contact_author_PI_organization_type'], QtCore.Qt.MatchFixedString)\n self.contact_author_PI_organization_type.setCurrentIndex(index)\n self.contact_author_PI_organization_name.setText(self.deposit_dict['contact_author_PI_organization_name'])\n self.contact_author_PI_email.setText(self.deposit_dict['contact_author_PI_email'])\n self.contact_author_PI_address.setText(self.deposit_dict['contact_author_PI_address'])\n self.contact_author_PI_city.setText(self.deposit_dict['contact_author_PI_city'])\n self.contact_author_PI_State_or_Province.setText(self.deposit_dict['contact_author_PI_State_or_Province'])\n self.contact_author_PI_Zip_Code.setText(self.deposit_dict['contact_author_PI_Zip_Code'])\n self.contact_author_PI_Country.setText(self.deposit_dict['contact_author_PI_Country'])\n self.contact_author_PI_phone_number.setText(self.deposit_dict['contact_author_PI_phone_number'])\n self.contact_author_PI_ORCID.setText(self.deposit_dict['contact_author_PI_ORCID'])\n\n self.contact_author_salutation.setText(self.deposit_dict['contact_author_salutation'])\n self.contact_author_first_name.setText(self.deposit_dict['contact_author_first_name'])\n self.contact_author_last_name.setText(self.deposit_dict['contact_author_last_name'])\n self.contact_author_middle_name.setText(self.deposit_dict['contact_author_middle_name'])\n index = self.contact_author_role.findText(self.deposit_dict['contact_author_role'],\n QtCore.Qt.MatchFixedString)\n self.contact_author_role.setCurrentIndex(index)\n index = self.contact_author_organization_type.findText(\n self.deposit_dict['contact_author_organization_type'], QtCore.Qt.MatchFixedString)\n self.contact_author_organization_type.setCurrentIndex(index)\n self.contact_author_organization_name.setText(self.deposit_dict['contact_author_organization_name'])\n self.contact_author_email.setText(self.deposit_dict['contact_author_email'])\n self.contact_author_address.setText(self.deposit_dict['contact_author_address'])\n self.contact_author_city.setText(self.deposit_dict['contact_author_city'])\n self.contact_author_State_or_Province.setText(self.deposit_dict['contact_author_State_or_Province'])\n self.contact_author_Zip_Code.setText(self.deposit_dict['contact_author_Zip_Code'])\n self.contact_author_Country.setText(self.deposit_dict['contact_author_Country'])\n self.contact_author_phone_number.setText(self.deposit_dict['contact_author_phone_number'])\n self.contact_author_ORCID.setText(self.deposit_dict['contact_author_ORCID'])\n index = self.Release_status_for_coordinates.findText(self.deposit_dict['Release_status_for_coordinates'],\n QtCore.Qt.MatchFixedString)\n self.Release_status_for_coordinates.setCurrentIndex(index)\n index = self.Release_status_for_sequence.findText(self.deposit_dict['Release_status_for_sequence'],\n QtCore.Qt.MatchFixedString)\n self.Release_status_for_sequence.setCurrentIndex(index)\n\n self.group_deposition_title.setText(self.deposit_dict['group_deposition_title'])\n self.group_description.setText(self.deposit_dict['group_description'])\n\n self.structure_title.setText(self.deposit_dict['structure_title'])\n self.structure_title_apo.setText(self.deposit_dict['structure_title_apo'])\n\n for n, name in enumerate(self.deposit_dict['structure_author_name'].split(';')):\n self.structure_author_name_List[n].setText(name)\n\n self.primary_citation_id.setText(self.deposit_dict['primary_citation_id'])\n self.primary_citation_journal_abbrev.setText(self.deposit_dict['primary_citation_journal_abbrev'])\n self.primary_citation_title.setText(self.deposit_dict['primary_citation_title'])\n self.primary_citation_year.setText(self.deposit_dict['primary_citation_year'])\n self.primary_citation_journal_volume.setText(self.deposit_dict['primary_citation_journal_volume'])\n self.primary_citation_page_first.setText(self.deposit_dict['primary_citation_page_first'])\n self.primary_citation_page_last.setText(self.deposit_dict['primary_citation_page_last'])\n\n for n, name in enumerate(self.deposit_dict['primary_citation_author_name'].split(';')):\n self.primary_citation_author_name_List[n].setText(name)\n\n ### entity 1\n\n self.molecule_name.setText(self.deposit_dict['molecule_name'])\n self.fragment_name_one_specific_mutation.setText(self.deposit_dict['fragment_name_one_specific_mutation'])\n index = self.Source_organism_scientific_name.findText(self.deposit_dict['Source_organism_scientific_name'],\n QtCore.Qt.MatchFixedString)\n self.Source_organism_scientific_name.setCurrentIndex(index)\n\n self.Source_organism_gene.setText(self.deposit_dict['Source_organism_gene'])\n self.Source_organism_strain.setText(self.deposit_dict['Source_organism_strain'])\n index = self.Expression_system_scientific_name.findText(\n self.deposit_dict['Expression_system_scientific_name'], QtCore.Qt.MatchFixedString)\n self.Expression_system_scientific_name.setCurrentIndex(index)\n\n self.Expression_system_strain.setText(self.deposit_dict['Expression_system_strain'])\n self.Expression_system_vector_type.setText(self.deposit_dict['Expression_system_vector_type'])\n self.Expression_system_plasmid_name.setText(self.deposit_dict['Expression_system_plasmid_name'])\n self.Manipulated_source_details.setText(self.deposit_dict['Manipulated_source_details'])\n\n# try:\n self.molecule_chain_one.setText(self.deposit_dict['molecule_chain_one'])\n ### entity 2\n self.molecule_name_two.setText(self.deposit_dict['molecule_name_two'])\n self.fragment_name_two_specific_mutation.setText(self.deposit_dict['fragment_name_two_specific_mutation'])\n index = self.Source_organism_scientific_name_two.findText(self.deposit_dict['Source_organism_scientific_name_two'],\n QtCore.Qt.MatchFixedString)\n self.Source_organism_scientific_name_two.setCurrentIndex(index)\n self.Source_organism_gene_two.setText(self.deposit_dict['Source_organism_gene_two'])\n self.Source_organism_strain_two.setText(self.deposit_dict['Source_organism_strain_two'])\n index = self.Expression_system_scientific_name_two.findText(\n self.deposit_dict['Expression_system_scientific_name_two'], QtCore.Qt.MatchFixedString)\n self.Expression_system_scientific_name_two.setCurrentIndex(index)\n\n self.Expression_system_strain_two.setText(self.deposit_dict['Expression_system_strain_two'])\n self.Expression_system_vector_type_two.setText(self.deposit_dict['Expression_system_vector_type_two'])\n self.Expression_system_plasmid_name_two.setText(self.deposit_dict['Expression_system_plasmid_name_two'])\n self.Manipulated_source_details_two.setText(self.deposit_dict['Manipulated_source_details_two'])\n self.molecule_chain_two.setText(self.deposit_dict['molecule_chain_two'])\n self.molecule_one_letter_sequence_uniprot_id_two.setText(\n self.deposit_dict['molecule_two_letter_sequence_uniprot_id'])\n self.molecule_one_letter_sequence_two.setText(self.deposit_dict['molecule_two_letter_sequence'])\n# except KeyError:\n# self.molecule_chain_one.setText('')\n# ### entity 2\n# self.molecule_name_two.setText('')\n# self.fragment_name_two_specific_mutation.setText('')\n# self.Source_organism_scientific_name_two.setCurrentIndex(0)\n# self.Source_organism_gene_two.setText('')\n# self.Source_organism_strain_two.setText('')\n# self.Expression_system_scientific_name_two.setCurrentIndex(0)\n# self.Expression_system_strain_two.setText('')\n# self.Expression_system_vector_type_two.setText('')\n# self.Expression_system_plasmid_name_two.setText('')\n# self.Manipulated_source_details_two.setText('')\n# self.molecule_chain_two.setText('')\n# self.molecule_one_letter_sequence_uniprot_id_two.setText('')\n# self.molecule_one_letter_sequence_two.setText('')\n\n ###\n\n self.structure_keywords.setText(self.deposit_dict['structure_keywords'])\n self.biological_assembly_chain_number.setText(self.deposit_dict['biological_assembly_chain_number'])\n self.molecule_one_letter_sequence_uniprot_id.setText(\n self.deposit_dict['molecule_one_letter_sequence_uniprot_id'])\n self.molecule_one_letter_sequence.setText(self.deposit_dict['molecule_one_letter_sequence'])\n self.SG_project_name.setText(self.deposit_dict['SG_project_name'])\n self.full_name_of_SG_center.setText(self.deposit_dict['full_name_of_SG_center'])\n\n index = self.crystallization_method.findText(self.deposit_dict['crystallization_method'],\n QtCore.Qt.MatchFixedString)\n self.crystallization_method.setCurrentIndex(index)\n\n self.crystallization_pH.setText(self.deposit_dict['crystallization_pH'])\n self.crystallization_temperature.setText(self.deposit_dict['crystallization_temperature'])\n self.crystallization_details.setText(self.deposit_dict['crystallization_details'])\n index = self.radiation_source.findText(self.deposit_dict['radiation_source'], QtCore.Qt.MatchFixedString)\n self.radiation_source.setCurrentIndex(index)\n\n index = self.radiation_source_type.findText(self.deposit_dict['radiation_source_type'],\n QtCore.Qt.MatchFixedString)\n self.radiation_source_type.setCurrentIndex(index)\n\n self.radiation_wavelengths.setText(self.deposit_dict['radiation_wavelengths'])\n index = self.radiation_detector.findText(self.deposit_dict['radiation_detector'],\n QtCore.Qt.MatchFixedString)\n self.radiation_detector.setCurrentIndex(index)\n\n index = self.radiation_detector_type.findText(self.deposit_dict['radiation_detector_type'],\n QtCore.Qt.MatchFixedString)\n self.radiation_detector_type.setCurrentIndex(index)\n\n self.data_collection_date.setText(self.deposit_dict['data_collection_date'])\n self.data_collection_temperature.setText(self.deposit_dict['data_collection_temperature'])\n self.data_collection_protocol.setText(self.deposit_dict['data_collection_protocol'])\n\n self.pdbx_starting_model.setText(self.deposit_dict['pdbx_starting_model'])\n index = self.data_integration_software.findText(self.deposit_dict['data_integration_software'],\n QtCore.Qt.MatchFixedString)\n self.data_integration_software.setCurrentIndex(index)\n index = self.phasing_software.findText(self.deposit_dict['phasing_software'], QtCore.Qt.MatchFixedString)\n self.phasing_software.setCurrentIndex(index)\n\n self.pdbx_funding_organization_one.setText(self.deposit_dict['pdbx_funding_organization_one'])\n self.pdbx_grant_number_one.setText(self.deposit_dict['pdbx_grant_number_one'])\n index = self.pdbx_grant_country_one.findText(\n self.deposit_dict['pdbx_grant_country_one'], QtCore.Qt.MatchFixedString)\n self.pdbx_grant_country_one.setCurrentIndex(index)\n\n self.pdbx_funding_organization_two.setText(self.deposit_dict['pdbx_funding_organization_two'])\n self.pdbx_grant_number_two.setText(self.deposit_dict['pdbx_grant_number_two'])\n index = self.pdbx_grant_country_two.findText(\n self.deposit_dict['pdbx_grant_country_two'], QtCore.Qt.MatchFixedString)\n self.pdbx_grant_country_two.setCurrentIndex(index)\n\n self.pdbx_funding_organization_three.setText(self.deposit_dict['pdbx_funding_organization_three'])\n self.pdbx_grant_number_three.setText(self.deposit_dict['pdbx_grant_number_three'])\n index = self.pdbx_grant_country_three.findText(\n self.deposit_dict['pdbx_grant_country_three'], QtCore.Qt.MatchFixedString)\n self.pdbx_grant_country_three.setCurrentIndex(index)\n\n except ValueError, e:\n# self.update_status_bar('Sorry, this is not a XChemExplorer deposit file!')\n self.update_log.error('file is not a valid .deposit file: ' + str(e))\n\n def update_deposit_dict(self):\n pdbx_funding_ordinal_one = ''\n pdbx_funding_organization_one = ''\n pdbx_grant_number_one = ''\n pdbx_grant_country_one = ''\n if str(self.pdbx_funding_organization_one.text()).replace(' ','') != '':\n pdbx_funding_ordinal_one = '1'\n pdbx_funding_organization_one = str(self.pdbx_funding_organization_one.text())\n pdbx_grant_number_one = str(self.pdbx_grant_number_one.text())\n pdbx_grant_country_one = str(self.pdbx_grant_country_one.currentText())\n\n pdbx_funding_ordinal_two = ''\n pdbx_funding_organization_two = ''\n pdbx_grant_number_two = ''\n pdbx_grant_country_two = ''\n if str(self.pdbx_funding_organization_two.text()).replace(' ','') != '':\n pdbx_funding_ordinal_two = '2'\n pdbx_funding_organization_two = str(self.pdbx_funding_organization_two.text())\n pdbx_grant_number_two = str(self.pdbx_grant_number_two.text())\n pdbx_grant_country_two = str(self.pdbx_grant_country_two.currentText())\n\n pdbx_funding_ordinal_three = ''\n pdbx_funding_organization_three = ''\n pdbx_grant_number_three = ''\n pdbx_grant_country_three = ''\n if str(self.pdbx_funding_organization_three.text()).replace(' ','') != '':\n pdbx_funding_ordinal_three = '3'\n pdbx_funding_organization_three = str(self.pdbx_funding_organization_three.text())\n pdbx_grant_number_three = str(self.pdbx_grant_number_three.text())\n pdbx_grant_country_three = str(self.pdbx_grant_country_three.currentText())\n\n self.deposit_dict = {\n 'contact_author_PI_salutation': str(self.contact_author_PI_salutation.text()),\n 'contact_author_PI_first_name': str(self.contact_author_PI_first_name.text()),\n 'contact_author_PI_last_name': str(self.contact_author_PI_last_name.text()),\n 'contact_author_PI_middle_name': str(self.contact_author_PI_middle_name.text()),\n 'contact_author_PI_role': str(self.contact_author_PI_role.currentText()),\n 'contact_author_PI_organization_type': str(self.contact_author_PI_organization_type.currentText()),\n 'contact_author_PI_organization_name': str(self.contact_author_PI_organization_name.text()),\n 'contact_author_PI_email': str(self.contact_author_PI_email.text()),\n 'contact_author_PI_address': str(self.contact_author_PI_address.text()),\n 'contact_author_PI_city': str(self.contact_author_PI_city.text()),\n 'contact_author_PI_State_or_Province': str(self.contact_author_PI_State_or_Province.text()),\n 'contact_author_PI_Zip_Code': str(self.contact_author_PI_Zip_Code.text()),\n 'contact_author_PI_Country': str(self.contact_author_PI_Country.text()),\n 'contact_author_PI_phone_number': str(self.contact_author_PI_phone_number.text()),\n 'contact_author_PI_ORCID': str(self.contact_author_PI_ORCID.text()),\n\n 'contact_author_salutation': str(self.contact_author_salutation.text()),\n 'contact_author_first_name': str(self.contact_author_first_name.text()),\n 'contact_author_last_name': str(self.contact_author_last_name.text()),\n 'contact_author_middle_name': str(self.contact_author_middle_name.text()),\n 'contact_author_role': str(self.contact_author_role.currentText()),\n 'contact_author_organization_type': str(self.contact_author_organization_type.currentText()),\n 'contact_author_organization_name': str(self.contact_author_organization_name.text()),\n 'contact_author_email': str(self.contact_author_email.text()),\n 'contact_author_address': str(self.contact_author_address.text()),\n 'contact_author_city': str(self.contact_author_city.text()),\n 'contact_author_State_or_Province': str(self.contact_author_State_or_Province.text()),\n 'contact_author_Zip_Code': str(self.contact_author_Zip_Code.text()),\n 'contact_author_Country': str(self.contact_author_Country.text()),\n 'contact_author_phone_number': str(self.contact_author_phone_number.text()),\n 'contact_author_ORCID': str(self.contact_author_ORCID.text()),\n\n 'Release_status_for_coordinates': str(self.Release_status_for_coordinates.currentText()),\n 'Release_status_for_sequence': str(self.Release_status_for_sequence.currentText()),\n\n 'group_deposition_title': str(self.group_deposition_title.text()),\n 'group_description': str(self.group_description.text()),\n\n 'structure_title': str(self.structure_title.text()),\n 'structure_title_apo': str(self.structure_title_apo.text()),\n\n 'primary_citation_id': str(self.primary_citation_id.text()),\n 'primary_citation_journal_abbrev': str(self.primary_citation_journal_abbrev.text()),\n 'primary_citation_title': str(self.primary_citation_title.text()),\n 'primary_citation_year': str(self.primary_citation_year.text()),\n 'primary_citation_journal_volume': str(self.primary_citation_journal_volume.text()),\n 'primary_citation_page_first': str(self.primary_citation_page_first.text()),\n 'primary_citation_page_last': str(self.primary_citation_page_last.text()),\n ### entity 1\n 'molecule_name': str(self.molecule_name.text()),\n 'Source_organism_scientific_name': str(self.Source_organism_scientific_name.currentText()),\n 'Source_organism_gene': str(self.Source_organism_gene.text()),\n 'Source_organism_strain': str(self.Source_organism_strain.text()),\n 'Expression_system_scientific_name': str(self.Expression_system_scientific_name.currentText()),\n 'Expression_system_strain': str(self.Expression_system_strain.text()),\n 'Expression_system_plasmid_name': str(self.Expression_system_plasmid_name.text()),\n 'Expression_system_vector_type': str(self.Expression_system_vector_type.text()),\n 'Manipulated_source_details': str(self.Manipulated_source_details.text()),\n 'fragment_name_one_specific_mutation': str(self.fragment_name_one_specific_mutation.text()),\n 'molecule_chain_one': str(self.molecule_chain_one.text()),\n\n ### entity 2\n 'molecule_name_two': str(self.molecule_name_two.text()),\n 'Source_organism_scientific_name_two': str(self.Source_organism_scientific_name_two.currentText()),\n 'Source_organism_gene_two': str(self.Source_organism_gene_two.text()),\n 'Source_organism_strain_two': str(self.Source_organism_strain_two.text()),\n 'Expression_system_scientific_name_two': str(self.Expression_system_scientific_name_two.currentText()),\n 'Expression_system_strain_two': str(self.Expression_system_strain_two.text()),\n 'Expression_system_plasmid_name_two': str(self.Expression_system_plasmid_name_two.text()),\n 'Expression_system_vector_type_two': str(self.Expression_system_vector_type_two.text()),\n 'Manipulated_source_details_two': str(self.Manipulated_source_details_two.text()),\n 'fragment_name_two_specific_mutation': str(self.fragment_name_two_specific_mutation.text()),\n 'molecule_chain_two': str(self.molecule_chain_two.text()),\n\n 'structure_keywords': str(self.structure_keywords.text()),\n 'biological_assembly_chain_number': str(self.biological_assembly_chain_number.text()),\n 'molecule_one_letter_sequence_uniprot_id': str(self.molecule_one_letter_sequence_uniprot_id.text()),\n 'molecule_two_letter_sequence_uniprot_id': str(self.molecule_one_letter_sequence_uniprot_id_two.text()),\n 'SG_project_name': str(self.SG_project_name.text()),\n 'full_name_of_SG_center': str(self.full_name_of_SG_center.text()),\n 'molecule_one_letter_sequence': str(self.molecule_one_letter_sequence.toPlainText()).replace(' ',\n '').replace(\n '\\n', '').replace('\\r', ''),\n 'molecule_two_letter_sequence': str(self.molecule_one_letter_sequence_two.toPlainText()).replace(' ',\n '').replace(\n '\\n', '').replace('\\r', ''),\n\n 'crystallization_method': str(self.crystallization_method.currentText()),\n 'crystallization_pH': str(self.crystallization_pH.text()),\n 'crystallization_temperature': str(self.crystallization_temperature.text()),\n 'crystallization_details': str(self.crystallization_details.text()),\n\n 'radiation_source': str(self.radiation_source.currentText()),\n 'radiation_source_type': str(self.radiation_source_type.currentText()),\n 'radiation_wavelengths': str(self.radiation_wavelengths.text()),\n 'radiation_detector': str(self.radiation_detector.currentText()),\n 'radiation_detector_type': str(self.radiation_detector_type.currentText()),\n 'data_collection_date': str(self.data_collection_date.text()),\n 'data_collection_temperature': str(self.data_collection_temperature.text()),\n 'data_collection_protocol': str(self.data_collection_protocol.text()),\n 'pdbx_starting_model': str(self.pdbx_starting_model.text()),\n 'data_integration_software': str(self.data_integration_software.currentText()),\n 'phasing_software': str(self.phasing_software.currentText()),\n\n 'pdbx_funding_ordinal_one': pdbx_funding_ordinal_one,\n 'pdbx_funding_organization_one': pdbx_funding_organization_one,\n 'pdbx_grant_number_one': pdbx_grant_number_one,\n 'pdbx_grant_country_one': pdbx_grant_country_one,\n 'pdbx_funding_ordinal_two': pdbx_funding_ordinal_two,\n 'pdbx_funding_organization_two': pdbx_funding_organization_two,\n 'pdbx_grant_number_two': pdbx_grant_number_two,\n 'pdbx_grant_country_two': pdbx_grant_country_two,\n 'pdbx_funding_ordinal_three': pdbx_funding_ordinal_three,\n 'pdbx_funding_organization_three': pdbx_funding_organization_three,\n 'pdbx_grant_number_three': pdbx_grant_number_three,\n 'pdbx_grant_country_three': pdbx_grant_country_three\n\n }\n\n structure_author_name = ''\n for widget in self.structure_author_name_List:\n structure_author_name += str(widget.text()) + ';'\n self.deposit_dict['structure_author_name'] = structure_author_name[:-1]\n\n primary_citation_author_name = ''\n for widget in self.primary_citation_author_name_List:\n primary_citation_author_name += str(widget.text()) + ';'\n self.deposit_dict['primary_citation_author_name'] = primary_citation_author_name[:-1]\n\n def get_deposit_dict_template(self):\n deposit_dict_template = {\n 'contact_author_PI_salutation': None,\n 'contact_author_PI_first_name': None,\n 'contact_author_PI_last_name': None,\n 'contact_author_PI_middle_name': None,\n 'contact_author_PI_role': None,\n 'contact_author_PI_organization_type': None,\n 'contact_author_PI_organization_name': None,\n 'contact_author_PI_email': None,\n 'contact_author_PI_address': None,\n 'contact_author_PI_city': None,\n 'contact_author_PI_State_or_Province': None,\n 'contact_author_PI_Zip_Code': None,\n 'contact_author_PI_Country': None,\n 'contact_author_PI_phone_number': None,\n 'contact_author_PI_ORCID': None,\n\n 'contact_author_salutation': None,\n 'contact_author_first_name': None,\n 'contact_author_last_name': None,\n 'contact_author_middle_name': None,\n 'contact_author_role': None,\n 'contact_author_organization_type': None,\n 'contact_author_organization_name': None,\n 'contact_author_email': None,\n 'contact_author_address': None,\n 'contact_author_city': None,\n 'contact_author_State_or_Province': None,\n 'contact_author_Zip_Code': None,\n 'contact_author_Country': None,\n 'contact_author_phone_number': None,\n 'contact_author_ORCID': None,\n\n 'Release_status_for_coordinates': None,\n 'Release_status_for_sequence': None,\n\n 'group_deposition_title': None,\n 'group_description': None,\n\n 'structure_title': None,\n 'structure_title_apo': None,\n\n 'primary_citation_id': None,\n 'primary_citation_journal_abbrev': None,\n 'primary_citation_title': None,\n 'primary_citation_year': None,\n 'primary_citation_journal_volume': None,\n 'primary_citation_page_first': None,\n 'primary_citation_page_last': None,\n ### entity 1\n 'molecule_name': None,\n 'Source_organism_scientific_name': None,\n 'Source_organism_gene': None,\n 'Source_organism_strain': None,\n 'Expression_system_scientific_name': None,\n 'Expression_system_strain': None,\n 'Expression_system_plasmid_name': None,\n 'Expression_system_vector_type': None,\n 'Manipulated_source_details': None,\n 'fragment_name_one_specific_mutation': None,\n 'molecule_chain_one': None,\n\n ### entity 2\n 'molecule_name_two': None,\n 'Source_organism_scientific_name_two': None,\n 'Source_organism_gene_two': None,\n 'Source_organism_strain_two': None,\n 'Expression_system_scientific_name_two': None,\n 'Expression_system_strain_two': None,\n 'Expression_system_plasmid_name_two': None,\n 'Expression_system_vector_type_two': None,\n 'Manipulated_source_details_two': None,\n 'fragment_name_two_specific_mutation': None,\n 'molecule_chain_two': None,\n\n 'structure_keywords': None,\n 'biological_assembly_chain_number': None,\n 'molecule_one_letter_sequence_uniprot_id': None,\n 'molecule_two_letter_sequence_uniprot_id': None,\n 'SG_project_name': None,\n 'full_name_of_SG_center': None,\n 'molecule_one_letter_sequence': None,\n 'molecule_two_letter_sequence': None,\n\n 'crystallization_method': None,\n 'crystallization_pH': None,\n 'crystallization_temperature': None,\n 'crystallization_details': None,\n\n 'radiation_source': None,\n 'radiation_source_type': None,\n 'radiation_wavelengths': None,\n 'radiation_detector': None,\n 'radiation_detector_type': None,\n 'data_collection_date': None,\n 'data_collection_temperature': None,\n 'data_collection_protocol': None,\n 'pdbx_starting_model': None,\n 'data_integration_software': None,\n 'phasing_software': None,\n 'structure_author_name': None,\n 'primary_citation_author_name': None,\n\n 'pdbx_funding_organization_one': '',\n 'pdbx_grant_number_one': '',\n 'pdbx_grant_country_one': '',\n 'pdbx_funding_organization_two': '',\n 'pdbx_grant_number_two': '',\n 'pdbx_grant_country_two': '',\n 'pdbx_funding_organization_three': '',\n 'pdbx_grant_number_three': '',\n 'pdbx_grant_country_three': ''\n\n }\n\n return deposit_dict_template\n\n def set_primary_citation_as_structure_authors(self, state):\n if state == QtCore.Qt.Checked:\n for n, entry in enumerate(self.structure_author_name_List):\n self.primary_citation_author_name_List[n].setText(str(entry.text()))\n else:\n for n, entry in enumerate(self.primary_citation_author_name_List):\n entry.setText('')\n\n def set_xce_logfile(self):\n file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'Save file', self.current_directory))\n self.xce_logfile = str(file_name)\n self.xce_logfile_label.setText(str(self.xce_logfile))\n if self.xce_logfile == '' or self.xce_logfile[self.xce_logfile.rfind('/') + 1:] == '':\n print('==> XCE: invalid file format')\n else:\n XChemLog.startLog(self.xce_logfile).create_logfile(self.xce_version)\n self.update_log = XChemLog.updateLog(self.xce_logfile)\n\n def set_second_cif_file(self):\n filepath_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Select CIF File',\n self.initial_model_directory, '*.cif')\n filepath = str(tuple(filepath_temp)[0])\n self.second_cif_file = str(filepath)\n self.second_cif_file_label.setText(str(self.second_cif_file))\n self.update_log.insert('user selected %s as CIF file for merging into ligand CIF files' %self.second_cif_file)\n\n def select_datasource_columns_to_display(self):\n columns_to_show = QtGui.QMessageBox()\n columns_to_showLayout = columns_to_show.layout()\n columns_in_data_source = self.db.return_column_list()\n try:\n columns_in_data_source = self.db.return_column_list()\n except AttributeError:\n print('==> XCE: please select a datasource file')\n self.status_bar.showMessage('please select a datasource file')\n return\n\n column_dict = {}\n vbox = QtGui.QVBoxLayout()\n number_of_entries = len(columns_in_data_source)\n columns_shown_in_dialog_column = 15\n grid = QtGui.QGridLayout()\n x = 0\n y = 0\n columns_to_ignore = self.db.columns_not_to_display()\n for entries_added in range(number_of_entries):\n if not columns_in_data_source[entries_added][1] in columns_to_ignore:\n data_source_column = QtGui.QCheckBox(columns_in_data_source[entries_added][1])\n column_dict[entries_added] = data_source_column\n if columns_in_data_source[entries_added][1] in self.overview_datasource_table_columns:\n data_source_column.setChecked(True)\n grid.addWidget(data_source_column, y, x)\n y += 1\n if y == columns_shown_in_dialog_column:\n y = 0\n x += 1\n vbox.addLayout(grid)\n columns_to_showLayout.addLayout(vbox, 0, 0)\n\n columns_to_show.addButton(QtGui.QPushButton('OK'), QtGui.QMessageBox.YesRole)\n columns_to_show.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)\n reply = columns_to_show.exec_();\n if reply == 0:\n columns_to_show_list = ['Sample ID']\n for key in column_dict:\n if column_dict[key].isChecked():\n columns_to_show_list.append(columns_in_data_source[key][1])\n self.overview_datasource_table_columns = columns_to_show_list\n self.populate_and_update_datasource_table()\n\n def update_header_and_data_from_datasource(self):\n self.update_log.insert('getting information for all samples from data source...')\n self.db = XChemDB.data_source(os.path.join(self.database_directory, self.data_source_file))\n self.update_log.insert('creating missing columns in data source')\n self.db.create_missing_columns()\n self.update_log.insert('load header and data from data source')\n self.header, self.data = self.db.load_samples_from_data_source()\n self.update_log.insert('get all samples in data source')\n all_samples_in_db = self.db.execute_statement(\"select CrystalName from mainTable where CrystalName is not '';\")\n\n self.xtal_db_dict = {}\n sampleID_column = 0\n for n, entry in enumerate(self.header):\n if entry == 'CrystalName':\n sampleID_column = n\n break\n for line in self.data:\n if str(line[sampleID_column]) != '':\n db_dict = {}\n for n, entry in enumerate(line):\n if n != sampleID_column:\n db_dict[str(self.header[n])] = str(entry)\n self.xtal_db_dict[str(line[sampleID_column])] = db_dict\n\n print('==> XCE: found ' + str(len(self.xtal_db_dict)) + ' samples')\n\n def datasource_menu_save_samples(self):\n print('hallo')\n\n def datasource_menu_export_csv_file(self):\n file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'Save file', self.database_directory))\n if file_name.rfind('.') != -1:\n file_name = file_name[:file_name.rfind('.')] + '.csv'\n else:\n file_name = file_name + '.csv'\n self.db.export_to_csv_file(file_name)\n\n def datasource_menu_import_csv_file(self):\n if self.data_source_set:\n file_name = QtGui.QFileDialog.getOpenFileName(self.window, 'Open file', self.database_directory)\n self.db.import_csv_file(file_name)\n else:\n self.update_status_bar('Please load a data source file first')\n\n def datasource_menu_update_datasource(self):\n self.work_thread = XChemThread.synchronise_db_and_filesystem(self.initial_model_directory,\n os.path.join(self.database_directory,\n self.data_source_file),\n self.panddas_directory, self.xce_logfile,\n 'project_directory')\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"datasource_menu_reload_samples\"),\n self.datasource_menu_reload_samples)\n self.work_thread.start()\n\n def export_data_for_WONKA(self):\n self.update_log.insert('exporting CSV file for input into WONKA')\n self.db.export_csv_for_WONKA()\n\n def on_context_menu(self, point):\n # show context menu\n for key in self.dewar_configuration_dict:\n if self.dewar_configuration_dict[key] == self.sender():\n self.dewar_label_active = key\n self.popMenu.exec_(self.sender().mapToGlobal(point))\n\n\n\n def on_context_menu_reprocess_data(self, point):\n # show context menu\n self.popMenu_for_datasets_reprocess_table.exec_(self.sender().mapToGlobal(point))\n\n def flag_sample_for_recollection(self):\n self.dewar_configuration_dict[self.dewar_label_active].setStyleSheet(\"background-color: yellow\")\n\n def undo_flag_sample_for_recollection(self):\n self.dewar_configuration_dict[self.dewar_label_active].setStyleSheet(\"background-color: gray\")\n\n def show_html_summary_in_firefox(self, xtal):\n html_summary = self.albula_button_dict[xtal][2]\n print('html_summary', html_summary)\n new = 2\n webbrowser.open(html_summary, new=new)\n\n def update_pandda_crystal_from_combobox(self):\n self.pandda_analyse_crystal_from_selection_combobox.clear()\n self.pandda_analyse_crystal_from_selection_combobox.addItem('use all datasets')\n if os.path.isfile(os.path.join(self.database_directory, self.data_source_file)):\n self.load_crystal_form_from_datasource()\n if self.xtalform_dict != {}:\n print(self.xtalform_dict)\n for key in self.xtalform_dict:\n self.pandda_analyse_crystal_from_selection_combobox.addItem(key)\n\n def populate_reference_combobox(self, combobox):\n combobox.clear()\n for reference_file in self.reference_file_list:\n combobox.addItem(reference_file[0])\n\n\n\n def populate_refinement_outcome_combobox(self, combobox):\n combobox.clear()\n for stage in self.refinement_stage:\n combobox.addItem(stage)\n\n\n\n def populate_target_selection_combobox(self, combobox):\n combobox.clear()\n for target in self.target_list:\n combobox.addItem(target)\n\n def combo_selected(self, text):\n self.map_url = str(self.panddas_directory + '/analyses/html_summaries/pandda_map_' + text + '.html')\n self.pandda_maps_html.load(QtCore.QUrl(self.map_url))\n self.pandda_maps_html.show()\n\n def add_map_html(self):\n self.map_list = glob.glob(str(self.panddas_directory + '/analyses/html_summaries/pandda_map_*.html'))\n self.list_options = []\n for i in range(0, len(self.map_list)):\n string = self.map_list[i]\n string = string.replace('/analyses/html_summaries/pandda_map_', '')\n string = string.replace('.html', '')\n string = string.replace(self.panddas_directory, '')\n self.list_options.append(string)\n self.pandda_map_list.clear()\n for i in range(0, len(self.list_options)):\n self.pandda_map_list.addItem(self.list_options[i])\n self.connect(self.pandda_map_list, QtCore.SIGNAL('activated(QString)'), self.combo_selected)\n\n def open_config_file(self):\n file_name_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Open file', self.current_directory,\n '*.conf')\n file_name = tuple(file_name_temp)[0]\n\n try:\n pickled_settings = pickle.load(open(file_name, 'rb'))\n\n except:\n print('==> XCE: failed to open config file...')\n\n key_list = {#'beamline_directory': 'beamline_directory',\n 'initial_model_directory': 'initial_model_directory',\n 'panddas_directory': 'panddas_directory',\n 'html_export_directory': 'html_export_directory',\n 'group_deposit_directory': 'group_deposit_directory',\n 'database_directory': 'database_directory',\n 'datasets_summary_file': 'datasets_summary',\n #\"'data_source_file': 'data_source',\n 'ccp4_scratch_directory': 'ccp4_scratch',\n 'allowed_unitcell_difference_percent': 'unitcell_difference',\n 'acceptable_low_resolution_limit_for_data': 'too_low_resolution_data',\n #'reference_directory_temp': 'reference_directory'\n }\n# self.pandda_input_data_dir_entry.setText(os.path.join(self.initial_model_directory, '*'))\n\n for current_key in key_list:\n try:\n command = str('self.' + current_key + \" = pickled_settings['\" + key_list[current_key] +\"']\")\n exec(command)\n command = str('self.settings[\"' + key_list[current_key]+ '\"]= self.' + current_key)\n exec(command)\n print('==> XCE: found ' + key_list[current_key])\n except:\n print('==> XCE: WARNING: Failed to find settings for: ' + key_list[current_key] + ' Error type: '\n + str(sys.exc_info()[0]))\n exec(str(current_key + \" = ''\"))\n continue\n\n\n try:\n pickled_settings = pickle.load(open(file_name, \"rb\"))\n if pickled_settings['beamline_directory'] != self.beamline_directory:\n self.beamline_directory = pickled_settings['beamline_directory']\n self.target_list, self.visit_list = XChemMain.get_target_and_visit_list(self.beamline_directory,self.read_agamemnon.isChecked())\n self.settings['beamline_directory'] = self.beamline_directory\n self.populate_target_selection_combobox(self.target_selection_combobox)\n\n\n self.layout_funcs.pandda_html(self)\n self.show_pandda_html_summary()\n\n self.html_export_directory_label.setText(self.html_export_directory)\n\n self.group_deposition_directory_label.setText(self.group_deposit_directory)\n\n self.datasets_summary_file_label.setText(self.datasets_summary_file)\n\n self.data_source_file = pickled_settings['data_source']\n if self.data_source_file != '':\n self.settings['data_source'] = os.path.join(self.database_directory, self.data_source_file)\n # this is probably not necessary\n if os.path.isfile(self.settings['data_source']):\n write_enabled = self.check_write_permissions_of_data_source()\n if not write_enabled:\n self.data_source_file_label.setText('')\n self.data_source_set = False\n else:\n self.data_source_file_label.setText(\n os.path.join(self.database_directory, self.data_source_file))\n self.data_source_set = True\n self.db = XChemDB.data_source(os.path.join(self.database_directory, self.data_source_file))\n self.datasource_menu_reload_samples()\n\n reference_directory_temp = pickled_settings['reference_directory']\n if reference_directory_temp != self.reference_directory:\n self.reference_directory = reference_directory_temp\n self.settings['reference_directory'] = self.reference_directory\n self.update_reference_files(' ')\n for xtal in self.initial_model_dimple_dict:\n reference_file_selection_combobox = self.initial_model_dimple_dict[xtal][1]\n self.populate_reference_combobox(reference_file_selection_combobox)\n\n self.initial_model_directory_label.setText(self.initial_model_directory)\n self.panddas_directory_label.setText(self.panddas_directory)\n self.pandda_output_data_dir_entry.setText(self.panddas_directory)\n self.reference_directory_label.setText(self.reference_directory)\n self.beamline_directory_label.setText(self.beamline_directory)\n self.ccp4_scratch_directory_label.setText(self.ccp4_scratch_directory)\n self.reference_file_list = self.get_reference_file_list(' ')\n self.pandda_input_data_dir_entry.setText(os.path.join(self.initial_model_directory, '*'))\n\n self.update_all_tables()\n\n except KeyError:\n self.update_status_bar('Sorry, this is not a XChemExplorer config file!')\n self.update_log.insert('Sorry, this is not a XChemExplorer config file!')\n\n except:\n print(\"Unexpected error:\", sys.exc_info()[0])\n raise\n\n def save_config_file(self):\n file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'Save file', self.current_directory))\n # make sure that the file always has .conf extension\n if str(file_name).rfind('.') != -1:\n file_name = file_name[:file_name.rfind('.')] + '.conf'\n else:\n file_name = file_name + '.conf'\n pickle.dump(self.settings, open(file_name, 'wb'))\n\n def update_reference_files(self, reference_root):\n self.reference_file_list = self.get_reference_file_list(reference_root)\n self.populate_reference_combobox(self.reference_file_selection_combobox)\n self.populate_reference_combobox(self.pandda_reference_file_selection_combobox)\n\n\n\n def check_status_rerun_dimple_on_all_autoprocessing_files(self):\n print('hallo')\n\n def rerun_dimple_on_all_autoprocessing_files(self):\n job_list = []\n self.update_log.insert('preparing to run DIMPLE on all autoprocessing files')\n for xtal in self.data_collection_dict:\n for entry in self.data_collection_dict[xtal]:\n if entry[0] == 'logfile':\n db_dict = entry[6]\n try:\n if os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'],\n db_dict['DataProcessingMTZfileName'])) or \\\n os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'])):\n job_list = self.get_job_list_for_dimple_rerun(xtal, job_list, db_dict, entry)\n except KeyError:\n try:\n if os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'])):\n job_list = self.get_job_list_for_dimple_rerun(xtal, job_list, db_dict, entry)\n except KeyError:\n continue\n if job_list:\n self.update_log.insert('trying to run DIMPLE on ALL auto-processing files')\n self.check_before_running_dimple(job_list)\n\n def run_dimple_on_selected_autoprocessing_file(self, instruction):\n job_list = []\n for xtal in sorted(self.initial_model_dimple_dict):\n # print(xtal)\n if self.initial_model_dimple_dict[xtal][0].isChecked():\n # print(xtal + ' is checked...')\n db_dict = self.xtal_db_dict[xtal]\n\n # the if statement below is so convoluted, so that it is compatible with older data source files\n\n if os.path.isfile(\n os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'],\n db_dict['DataProcessingMTZfileName'])) or \\\n os.path.isfile(\n os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'])) or \\\n os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'],\n db_dict['DataProcessingMTZfileName'])) or \\\n os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'])):\n\n if os.path.isfile(\n os.path.join(db_dict['DataProcessingPathToMTZfile'], db_dict['DataProcessingMTZfileName'])):\n mtzin = os.path.join(db_dict['DataProcessingPathToMTZfile'],\n db_dict['DataProcessingMTZfileName'])\n elif os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'])):\n mtzin = os.path.join(db_dict['DataProcessingPathToMTZfile'])\n elif os.path.isfile(\n os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'],\n db_dict['DataProcessingMTZfileName'])):\n mtzin = os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'],\n db_dict['DataProcessingMTZfileName'])\n elif os.path.isfile(\n os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'])):\n mtzin = os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'])\n\n reference_file = str(self.initial_model_dimple_dict[xtal][1].currentText())\n\n reference_file_pdb = os.path.join(self.reference_directory, reference_file + '.pdb')\n\n if not os.path.isfile(reference_file_pdb):\n continue\n\n if os.path.isfile(os.path.join(self.reference_directory, reference_file + '.mtz')):\n reference_file_mtz = ' -R ' + os.path.join(self.reference_directory, reference_file + '.mtz')\n else:\n reference_file_mtz = ''\n\n if os.path.isfile(os.path.join(self.reference_directory, reference_file + '.cif')):\n reference_file_cif = ' --libin ' + os.path.join(self.reference_directory,\n reference_file + '.cif')\n else:\n reference_file_cif = ''\n\n job_list.append([xtal,\n 'dimple_rerun_on_selected_file',\n mtzin,\n reference_file_pdb,\n reference_file_mtz,\n reference_file_cif])\n else:\n print('WARNING: ' + xtal + ' has not been submitted to dimple because no files were found: ')\n if not os.path.isfile(os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'],\n db_dict['DataProcessingMTZfileName'])):\n print(' ' + str(os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'],\n db_dict['DataProcessingMTZfileName'])) + ' is missing')\n if not os.path.isfile(os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'])):\n print(' ' + str(os.path.join(db_dict['ProjectDirectory'], xtal, db_dict['DataProcessingPathToMTZfile'])) + ' is missing')\n if not os.path.isfile(os.path.join(db_dict['DataProcessingPathToMTZfile'])):\n print(' ' + str(os.path.join(db_dict['DataProcessingPathToMTZfile']) + ' is missing'))\n\n\n if job_list:\n self.update_log.insert('trying to run DIMPLE on SELECTED auto-processing files')\n self.check_before_running_dimple(job_list,instruction)\n\n def remove_selected_dimple_files(self,instruction):\n if 'dimple' in instruction.lower():\n pipeline = 'dimple'\n elif 'pipedream' in instruction.lower():\n pipeline = 'pipedream'\n elif 'phenix' in instruction.lower():\n pipeline = 'phenix.ligand_pipeline'\n\n job_list = []\n for xtal in sorted(self.initial_model_dimple_dict):\n if self.initial_model_dimple_dict[xtal][0].isChecked():\n job_list.append(xtal)\n\n if job_list:\n msgBox = QtGui.QMessageBox()\n msgBox.setText(\"Do you really want to delete {0!s} {1!s} files?\".format(len(job_list),self.preferences['initial_refinement_pipeline']))\n msgBox.addButton(QtGui.QPushButton('Go'), QtGui.QMessageBox.YesRole)\n msgBox.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)\n reply = msgBox.exec_();\n\n if reply == 0:\n self.status_bar.showMessage('preparing to remove {0!s} files'.format(pipeline))\n self.update_log.insert('preparing to remove {0!s} files'.format(pipeline))\n self.work_thread = XChemThread.remove_selected_dimple_files(job_list,\n self.initial_model_directory,\n self.xce_logfile,\n self.database_directory,\n self.data_source_file,\n pipeline)\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"datasource_menu_reload_samples\"),\n self.datasource_menu_reload_samples)\n self.work_thread.start()\n\n def set_results_from_selected_pipeline(self,instruction):\n if 'dimple' in instruction.lower():\n pipeline = 'dimple'\n elif 'pipedream' in instruction.lower():\n pipeline = 'pipedream'\n elif 'phenix' in instruction.lower():\n pipeline = 'phenix.ligand_pipeline'\n\n self.update_log.warning('selecting initial refinement results from '+pipeline)\n\n job_list = []\n for xtal in sorted(self.initial_model_dimple_dict):\n if self.initial_model_dimple_dict[xtal][0].isChecked():\n job_list.append(xtal)\n\n self.work_thread = XChemThread.set_results_from_selected_pipeline(job_list,\n self.initial_model_directory,\n self.xce_logfile,\n self.database_directory,\n self.data_source_file,\n pipeline)\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"datasource_menu_reload_samples\"),\n self.datasource_menu_reload_samples)\n self.work_thread.start()\n\n\n\n def run_xia2_on_selected_datasets(self, overwrite):\n\n # check which programs should be run\n protocol = []\n if self.xia2_3d_checkbox.isChecked():\n protocol.append('3d')\n if self.xia2_3dii_checkbox.isChecked():\n protocol.append('3dii')\n if self.xia2_dials_checkbox.isChecked():\n protocol.append('dials')\n\n # space group\n spg = []\n if str(self.reprocess_space_group_comboxbox.currentText()) != 'ignore':\n spg.append(str(self.reprocess_space_group_comboxbox.currentText()))\n\n # reference file\n ref = []\n if os.path.isfile(self.diffraction_data_reference_mtz):\n ref.append(self.diffraction_data_reference_mtz)\n\n # resolution limit\n reso_limit = []\n if str(self.reprocess_isigma_combobox.currentText()) != 'default':\n reso_limit.append(str(self.reprocess_isigma_combobox.currentText()))\n\n # cc 1/2\n cc_half = []\n if str(self.reprocess_cc_half_combobox.currentText()) != 'default':\n cc_half.append(str(self.reprocess_cc_half_combobox.currentText()))\n\n run_dict = {}\n allRows = self.datasets_reprocess_table.rowCount()\n for row in xrange(0, allRows):\n dataset_id = str(self.datasets_reprocess_table.item(row, 0).text())\n sample_id = str(self.datasets_reprocess_table.item(row, 1).text())\n if self.diffraction_data_table_dict[dataset_id][0].isChecked():\n run_dict[sample_id] = self.diffraction_data_dict[dataset_id]\n\n if protocol != [] and run_dict != {}:\n self.work_thread = XChemProcess.run_xia2(self.initial_model_directory,\n run_dict,\n protocol,\n spg,\n ref,\n reso_limit,\n cc_half,\n self.xce_logfile,\n self.external_software,\n self.ccp4_scratch_directory,\n self.max_queue_jobs,\n os.path.join(self.database_directory, self.data_source_file),\n overwrite)\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n else:\n self.update_log.insert('please select datasets and/ or data processing protocol')\n self.update_status_bar('please select datasets and/ or data processing protocol')\n\n def update_reprocessing_table(self):\n allRows = self.datasets_reprocess_table.rowCount()\n for row in xrange(0, allRows):\n sample_id = str(self.datasets_reprocess_table.item(row, 1).text())\n if sample_id in self.xtal_db_dict:\n db_dict = self.xtal_db_dict[sample_id]\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(db_dict['DataProcessingStatus'])\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n if db_dict['DataProcessingStatus'] == 'running':\n cell_text.setBackground(QtGui.QColor(100, 230, 150))\n elif db_dict['DataProcessingStatus'] == 'pending':\n cell_text.setBackground(QtGui.QColor(20, 100, 230))\n elif db_dict['DataProcessingStatus'] == 'started':\n cell_text.setBackground(QtGui.QColor(230, 240, 110))\n elif db_dict['DataProcessingStatus'] == 'finished':\n cell_text.setBackground(QtGui.QColor(255, 255, 255))\n self.datasets_reprocess_table.setItem(row, 7, cell_text)\n\n def get_job_list_for_dimple_rerun(self, xtal, job_list, db_dict, entry):\n self.status_bar.showMessage('checking: ' + str(\n os.path.join(db_dict['DataProcessingPathToMTZfile'], db_dict['DataProcessingMTZfileName'])))\n suitable_reference = []\n for reference in self.reference_file_list:\n # first we need one in the same pointgroup\n if reference[5] == db_dict['DataProcessingPointGroup']:\n try:\n difference = math.fabs(1 - (float(db_dict['DataProcessingUnitCellVolume']) / float(reference[4])))\n suitable_reference.append([reference[0], difference])\n except ValueError:\n continue\n if suitable_reference:\n reference_file = min(suitable_reference, key=lambda x: x[1])[0]\n visit = entry[1]\n run = entry[2]\n autoproc = entry[4]\n\n reference_file_pdb = os.path.join(self.reference_directory, reference_file + '.pdb')\n\n if os.path.isfile(os.path.join(self.reference_directory, reference_file + '.mtz')):\n reference_file_mtz = ' -R ' + os.path.join(self.reference_directory, reference_file + '.mtz')\n else:\n reference_file_mtz = ''\n\n if os.path.isfile(os.path.join(self.reference_directory, reference_file + '.cif')):\n reference_file_cif = ' --libin ' + os.path.join(self.reference_directory, reference_file + '.cif')\n else:\n reference_file_cif = ''\n\n if os.path.isfile(os.path.join(self.initial_model_directory, xtal, xtal +'.mtz')):\n mtzin = os.path.join(self.initial_model_directory, xtal, xtal +'.mtz')\n\n self.update_log.insert('adding ' + xtal + visit + '-' + run + autoproc + ' to list')\n job_list.append([xtal,\n visit + '-' + run + autoproc,\n mtzin,\n reference_file_pdb,\n reference_file_mtz,\n reference_file_cif])\n self.status_bar.showMessage('idle')\n return job_list\n\n def check_before_running_dimple(self, job_list,instruction):\n\n msgBox = QtGui.QMessageBox()\n msgBox.setText(\n \"Do you really want to run {0!s} {1!s} jobs?\\nNote: we will not run more than {2!s} at once on the cluster!\".format(\n len(job_list),self.preferences['initial_refinement_pipeline'],self.preferences['max_queue_jobs']))\n msgBox.addButton(QtGui.QPushButton('Go'), QtGui.QMessageBox.YesRole)\n msgBox.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)\n reply = msgBox.exec_();\n\n if reply == 0:\n if 'dimple' in instruction.lower():\n pipeline = 'dimple'\n elif 'pipedream' in instruction.lower():\n pipeline = 'pipedream'\n elif 'phenix' in instruction.lower():\n pipeline = 'phenix.ligand_pipeline'\n\n self.status_bar.showMessage('preparing {0!s} DIMPLE jobs'.format(len(job_list)))\n self.update_log.insert('preparing to run {0!s} DIMPLE jobs'.format(len(job_list)))\n if self.external_software['qsub_array']:\n self.update_log.insert('we will be running an ARRAY job on the DLS computer cluster')\n self.update_log.insert(\n 'please note that the maximum number of jobs that will be running at once is {0!s}'.format(\n self.max_queue_jobs))\n self.update_log.insert(\n 'you can change this in the PREFERENCES menu, but be warned that to high a number might break the cluster!')\n self.update_log.insert('preparing input files for DIMPLE...')\n self.work_thread = XChemThread.run_dimple_on_all_autoprocessing_files_new(job_list,\n self.initial_model_directory,\n self.external_software,\n self.ccp4_scratch_directory,\n self.database_directory,\n self.data_source_file,\n self.max_queue_jobs,\n self.xce_logfile,\n self.using_remote_qsub_submission,\n self.remote_qsub_submission,\n self.preferences['dimple_twin_mode'],\n pipeline )\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"datasource_menu_reload_samples\"),\n self.datasource_menu_reload_samples)\n self.work_thread.start()\n\n\n\n\n\n\n\n\n\n def open_csv_file_translate_datasetID_to_sampleID(self):\n file_name_temp = QtGui.QFileDialog.getOpenFileNameAndFilter(self.window, 'Open file', self.current_directory,\n '*.csv')\n file_name = tuple(file_name_temp)[0]\n self.translate_datasetID_to_sampleID_csv_label.setText(file_name)\n self.translate_datasetID_to_sampleID_file = file_name\n\n\n\n def update_datasets_reprocess_table(self, data_dict):\n self.update_log.insert('updating reprocess datasets table')\n print('updating reprocess datasets table')\n self.diffraction_data_table_dict = {}\n self.diffraction_data_dict = data_dict\n\n self.diffraction_data_search_info = 'found ' + str(len(self.diffraction_data_dict)) + ' datasets'\n self.diffraction_data_search_label.setText(self.diffraction_data_search_info)\n self.update_log.insert(self.diffraction_data_search_info)\n self.datasource_menu_reload_samples()\n # update table\n column_name = self.db.translate_xce_column_list_to_sqlite(self.datasets_reprocess_columns)\n # set rows to 0\n self.datasets_reprocess_table.setRowCount(0)\n for entry in sorted(self.diffraction_data_dict):\n self.update_log.insert(str(self.diffraction_data_dict[entry]))\n if entry in self.xtal_db_dict:\n db_dict = self.xtal_db_dict[entry]\n else:\n db_dict = {}\n row = self.datasets_reprocess_table.rowCount()\n self.datasets_reprocess_table.insertRow(row)\n for column, header in enumerate(column_name):\n if header[0] == 'Dataset ID' or header[0] == 'Sample ID':\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(str(entry))\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n self.datasets_reprocess_table.setItem(row, column, cell_text)\n elif header[0] == 'Run\\nxia2':\n run_xia2 = QtGui.QCheckBox()\n run_xia2.toggle()\n self.datasets_reprocess_table.setCellWidget(row, column, run_xia2)\n run_xia2.setChecked(False)\n self.diffraction_data_table_dict[entry] = [run_xia2]\n else:\n cell_text = QtGui.QTableWidgetItem()\n if db_dict != {}:\n if header[0] == 'DataProcessing\\nStatus':\n if str(db_dict[header[1]]) == 'running':\n cell_text.setBackground(QtGui.QColor(100, 230, 150))\n elif str(db_dict[header[1]]) == 'pending':\n cell_text.setBackground(QtGui.QColor(20, 100, 230))\n elif str(db_dict[header[1]]) == 'started':\n cell_text.setBackground(QtGui.QColor(230, 240, 110))\n elif str(db_dict[header[1]]) == 'finished':\n cell_text.setBackground(QtGui.QColor(255, 255, 255))\n cell_text.setText(str(db_dict[header[1]]))\n else:\n cell_text.setText('')\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n self.datasets_reprocess_table.setItem(row, column, cell_text)\n\n def update_all_tables(self):\n self.update_log.insert('checking for new reference files')\n self.update_status_bar('checking for new reference files')\n self.reference_file_list = self.get_reference_file_list(' ')\n self.update_log.insert('updating Overview table')\n self.update_status_bar('updating Overview table')\n self.populate_and_update_datasource_table()\n self.update_log.insert('updating Maps table')\n self.update_status_bar('updating Maps table')\n self.create_maps_table()\n self.update_log.insert('updating PANDDA table')\n self.update_status_bar('updating PANDDA table')\n self.populate_pandda_analyse_input_table()\n self.update_log.insert('updating REFINEMENT table')\n self.update_status_bar('updating REFINEMENT table')\n self.populate_and_update_refinement_table()\n self.update_log.insert('updating REPROCESSING table')\n self.update_status_bar('updating REPROCESSING table')\n self.update_reprocessing_table()\n self.update_status_bar('idle')\n self.update_summary_plot()\n\n\n\n def change_allowed_unitcell_difference_percent(self, text):\n try:\n self.allowed_unitcell_difference_percent = int(text)\n self.settings['unitcell_difference'] = self.allowed_unitcell_difference_percent\n self.update_log.insert(\n 'changing max allowed unit cell difference between reference and xtal to {0!s} percent'.format(\n self.allowed_unitcell_difference_percent))\n except ValueError:\n if str(text).find('.') != -1:\n self.allowed_unitcell_difference_percent = int(str(text)[:str(text).find('.')])\n self.settings['unitcell_difference'] = self.allowed_unitcell_difference_percent\n self.update_log.insert(\n 'changing max allowed unit cell difference between reference and xtal to {0!s} percent'.format(\n self.allowed_unitcell_difference_percent))\n else:\n pass\n\n def change_max_queue_jobs(self, text):\n try:\n self.max_queue_jobs = int(text)\n self.settings['max_queue_jobs'] = self.max_queue_jobs\n self.update_log.insert('changing max number of jobs running simultaneously on DLS cluster to {0!s}'.format(\n self.max_queue_jobs))\n except ValueError:\n if str(text).find('.') != -1:\n self.max_queue_jobs = int(str(text)[:str(text).find('.')])\n self.settings['max_queue_jobs'] = self.max_queue_jobs\n self.update_log.insert(\n 'changing max number of jobs running simultaneously on DLS cluster to {0!s}'.format(\n self.max_queue_jobs))\n else:\n pass\n\n def change_acceptable_low_resolution_limit(self, text):\n try:\n self.acceptable_low_resolution_limit_for_data = float(text)\n self.settings['too_low_resolution_data'] = self.acceptable_low_resolution_limit_for_data\n except ValueError:\n pass\n\n def change_filename_root(self, text):\n self.filename_root = str(text)\n self.settings['filename_root'] = self.filename_root\n\n def button_clicked(self):\n if not self.data_source_set:\n print('sender text bit')\n if self.sender().text() == \"Create New Data\\nSource (SQLite)\":\n file_name = str(QtGui.QFileDialog.getSaveFileName(self.window, 'Save file', self.database_directory))\n # make sure that the file always has .sqlite extension\n if file_name.rfind('.') != -1:\n file_name = file_name[:file_name.rfind('.')] + '.sqlite'\n else:\n file_name = file_name + '.sqlite'\n self.db = XChemDB.data_source(file_name)\n print('==> XCE: creating new data source')\n self.db.create_empty_data_source_file()\n self.db.create_missing_columns()\n if self.data_source_file == '':\n self.database_directory = file_name[:file_name.rfind('/')]\n self.data_source_file = file_name[file_name.rfind('/') + 1:]\n self.data_source_file_label.setText(os.path.join(self.database_directory, self.data_source_file))\n self.settings['database_directory'] = self.database_directory\n self.settings['data_source'] = self.data_source_file\n self.data_source_set = True\n else:\n self.no_data_source_selected()\n print('No datasource selected')\n pass\n\n # first find out which of the 'Run' or 'Status' buttons is sending\n for item in self.workflow_widget_dict:\n for widget in self.workflow_widget_dict[item]:\n if widget == self.sender():\n # get index of item in self.workflow; Note this index should be the same as the index\n # of the self.main_tab_widget which belongs to this task\n task_index = self.workflow.index(item)\n instruction = str(self.workflow_widget_dict[item][0].currentText())\n print(instruction)\n action = str(self.sender().text())\n if self.main_tab_widget.currentIndex() == task_index:\n if self.explorer_active == 0 and self.data_source_set == True:\n if action == 'Run':\n print('==> XCE: Remote submission status = ' + str(self.using_remote_qsub_submission))\n # print(instruction)\n self.prepare_and_run_task(instruction)\n elif action == 'Status':\n self.get_status_of_workflow_milestone(instruction)\n if os.path.exists(str(self.panddas_directory + '/pandda.done')):\n self.pandda_status = 'Finished!'\n self.pandda_status_label.setStyleSheet('color: green')\n if os.path.exists(str(self.panddas_directory + '/pandda.running')):\n self.pandda_status = 'Running...'\n self.pandda_status_label.setStyleSheet('color: orange')\n if os.path.exists(str(self.panddas_directory + '/pandda.errored')):\n self.pandda_status = 'Error encountered... please check the log files for pandda!'\n self.pandda_status_label.setStyleSheet('color: red')\n self.pandda_status_label.setText(str('STATUS: ' + self.pandda_status))\n else:\n self.need_to_switch_main_tab(task_index)\n\n def get_status_of_workflow_milestone(self, instruction):\n # first update all tables\n self.datasource_menu_reload_samples()\n\n cluster_dict = XChemMain.get_jobs_running_on_cluster()\n\n self.update_log.insert('getting status updates...')\n\n self.status_bar.showMessage('please check terminal window for further information')\n\n self.update_log.insert('{0!s} samples are currently in database'.format(str(len(self.xtal_db_dict))))\n\n if 'DIMPLE' in instruction:\n XChemMain.print_cluster_status_message('dimple', cluster_dict, self.xce_logfile)\n\n elif 'Create CIF/PDB/PNG file' in instruction:\n XChemMain.print_acedrg_status(self.xce_logfile, self.xtal_db_dict)\n XChemMain.print_cluster_status_message('acedrg', cluster_dict, self.xce_logfile)\n\n elif instruction.startswith('Run xia2 on selected datasets'):\n XChemMain.print_cluster_status_message('xia2', cluster_dict, self.xce_logfile)\n\n elif 'pandda' in instruction.lower():\n XChemMain.print_cluster_status_message('pandda', cluster_dict, self.xce_logfile)\n\n elif 'coot' in instruction.lower():\n XChemMain.print_cluster_status_message('refmac', cluster_dict, self.xce_logfile)\n\n def prepare_and_run_task(self, instruction):\n\n if instruction == 'Get New Results from Autoprocessing':\n self.rescore = False\n self.check_for_new_autoprocessing_results()\n\n elif instruction == 'Rescore Datasets':\n self.rescore = True\n self.select_best_autoprocessing_result()\n\n# if instruction == 'Get New Results from Autoprocessing':\n# self.check_for_new_autoprocessing_or_rescore(False)\n# self.update_header_and_data_from_datasource()\n# self.update_all_tables()\n#\n# elif instruction == 'Rescore Datasets':\n# self.check_for_new_autoprocessing_or_rescore(True)\n\n# elif instruction == \"Read PKL file\":\n# summary = pickle.load(open(self.datasets_summary_file, \"rb\"))\n# self.create_widgets_for_autoprocessing_results_only(summary)\n\n elif instruction == 'Run xia2 on selected datasets':\n self.run_xia2_on_selected_datasets(False)\n\n elif instruction == 'Run xia2 on selected datasets - overwrite':\n self.run_xia2_on_selected_datasets(True)\n\n# elif instruction == 'Run DIMPLE on All Autoprocessing MTZ files':\n# self.rerun_dimple_on_all_autoprocessing_files()\n\n# elif instruction == 'Run initial refinement on selected MTZ files':\n# self.run_dimple_on_selected_autoprocessing_file()\n\n elif instruction == 'Run DIMPLE on selected MTZ files':\n self.run_dimple_on_selected_autoprocessing_file(instruction)\n\n elif instruction == 'Run PIPEDREAM on selected MTZ files':\n self.run_dimple_on_selected_autoprocessing_file(instruction)\n\n elif instruction == 'Run PHENIX.LIGAND_PIPELINE on selected MTZ files':\n self.run_dimple_on_selected_autoprocessing_file(instruction)\n\n\n# elif instruction == 'Remove selected initial refinement files':\n# self.remove_selected_dimple_files()\n\n elif instruction == 'Remove selected DIMPLE files':\n self.remove_selected_dimple_files(instruction)\n\n elif instruction == 'Remove selected PIPEDREAM files':\n self.remove_selected_dimple_files(instruction)\n\n elif instruction == 'Remove selected PHENIX.LIGAND_PIPELINE files':\n self.remove_selected_dimple_files(instruction)\n\n# elif instruction == 'Set only results from selected pipeline':\n# self.set_results_from_selected_pipeline()\n\n elif instruction == 'Set DIMPLE output':\n self.set_results_from_selected_pipeline(instruction)\n\n elif instruction == 'Set PIPEDREAM output':\n self.set_results_from_selected_pipeline(instruction)\n\n elif instruction == 'Set PHENIX.LIGAND_PIPELINE output':\n self.set_results_from_selected_pipeline(instruction)\n\n\n# elif instruction == 'Create CIF/PDB/PNG file of ALL compounds':\n# self.create_cif_pdb_png_files('ALL')\n\n# elif instruction == 'Create CIF/PDB/PNG file of NEW compounds':\n# self.create_cif_pdb_png_files('NEW')\n\n elif instruction == 'Create CIF/PDB/PNG file of SELECTED compounds':\n self.create_cif_pdb_png_files('SELECTED')\n\n elif instruction == 'Merge ligand CIF file with selected compounds':\n self.merge_cif_files('merge')\n\n elif instruction == 'Restore original CIF file of selected compounds':\n self.merge_cif_files('restore')\n\n elif instruction == 'Fit ligands into maps after initial refinement':\n self.fit_ligands_into_dimple_maps()\n\n elif instruction == 'pandda.analyse':\n self.run_pandda_analyse('production_run')\n\n elif instruction == 'pandda.analyse (PanDDA2)':\n self.run_pandda_analyse('production_run_pandda_two')\n\n elif instruction == 'pre-run for ground state model':\n self.run_pandda_analyse('pre_run')\n\n elif instruction == 'pandda.inspect':\n self.run_pandda_inspect()\n\n elif instruction == 'run pandda.inspect at home':\n self.run_pandda_inspect_at_home()\n\n elif instruction == 'Export NEW PANDDA models':\n update_datasource_only = False\n which_models = 'new'\n self.run_pandda_export(update_datasource_only, which_models)\n\n elif instruction == 'Export ALL PANDDA models':\n update_datasource_only = False\n which_models = 'all'\n self.run_pandda_export(update_datasource_only, which_models)\n\n elif instruction == 'Export SELECTED PANDDA models':\n update_datasource_only = False\n which_models = 'selected'\n self.run_pandda_export(update_datasource_only, which_models)\n\n elif instruction == 'refine ALL bound-state models with BUSTER':\n self.run_refine_bound_state_with_buster('all')\n\n elif instruction == 'refine NEW bound-state models with BUSTER':\n self.run_refine_bound_state_with_buster('new')\n\n elif instruction == 'refine ALL bound-state models with BUSTER (no sanity check)':\n self.run_refine_bound_state_with_buster('allnocheck')\n\n elif instruction == 'refine NEW bound-state models with BUSTER (no sanity check)':\n self.run_refine_bound_state_with_buster('newnocheck')\n\n# elif instruction == 'refine NEW bound-state models with BUSTER - NEW':\n# self.run_refine_bound_state_with_buster_new('new')\n\n elif instruction == 'cluster datasets':\n self.cluster_datasets_for_pandda()\n\n elif instruction == 'Update datasource with results from pandda.inspect':\n update_datasource_only = True\n which_models = 'all'\n self.run_pandda_export(update_datasource_only, which_models)\n\n elif instruction == 'Show HTML summary':\n self.show_pandda_html_summary()\n\n elif instruction == 'Event Map -> SF':\n self.convert_event_maps_to_SF()\n\n elif instruction == 'apo -> mmcif':\n self.convert_apo_to_mmcif()\n\n elif instruction == 'check modelled ligands':\n self.compare_modelled_ligands_and_panddaTable()\n\n elif instruction.startswith(\"Open COOT\") or instruction == 'Build ground state model':\n if not self.coot_running:\n self.update_log.insert('starting coot...')\n if instruction == \"Open COOT\":\n interface = 'new'\n elif instruction == \"Open COOT - REFMAC refinement -\":\n interface = 'new'\n elif instruction == \"Open COOT - test -\":\n interface = 'test'\n elif instruction == \"Open COOT for old PanDDA\":\n interface = 'panddaV1'\n elif instruction == 'Build ground state model':\n interface = 'reference'\n elif instruction == 'Open COOT - BUSTER refinement -':\n interface = 'buster'\n elif instruction == 'Open COOT - dimple_twin -':\n interface = 'dimple_twin'\n else:\n interface = 'old'\n# print self.settings\n self.work_thread = XChemThread.start_COOT(self.settings, interface)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n\n elif instruction == 'Update Deposition Table':\n self.update_deposition_table()\n\n\n\n def check_status_create_png_of_soaked_compound(self):\n number_of_samples = 0\n running = 0\n timestamp_list = []\n cif_file_generated = 0\n for folder in glob.glob(os.path.join(self.initial_model_directory, '*', 'compound')):\n number_of_samples += 1\n if os.path.isfile(os.path.join(folder, 'RESTRAINTS_IN_PROGRESS')):\n running += 1\n timestamp = datetime.fromtimestamp(\n os.path.getmtime(os.path.join(folder, 'RESTRAINTS_IN_PROGRESS'))).strftime('%Y-%m-%d %H:%M:%S')\n timestamp_list.append(timestamp)\n for cif_file in glob.glob(os.path.join(folder, '*.cif')):\n if os.path.isfile(cif_file):\n cif_file_generated += 1\n if timestamp_list:\n last_timestamp = max(timestamp_list)\n else:\n last_timestamp = 'n/a'\n message = 'Datasets: ' + str(number_of_samples) + ', jobs running: ' + str(running) + ', jobs finished: ' + str(\n cif_file_generated) + ', last job submmitted: ' + str(last_timestamp)\n self.status_bar.showMessage(message)\n\n\n\n if start_thread:\n if self.target == '=== SELECT TARGET ===':\n msgBox = QtGui.QMessageBox()\n warning = ('*** WARNING ***\\n'\n 'You did not select a target!\\n'\n 'In this case we will only parse the project directory!\\n'\n 'Please note that this option is usually only useful in case you reprocessed your data.\\n'\n 'Do you want to continue?')\n msgBox.setText(warning)\n msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)\n msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)\n reply = msgBox.exec_();\n if reply == 0:\n start_thread = True\n else:\n start_thread = False\n else:\n start_thread = True\n\n if start_thread:\n self.work_thread = XChemThread.read_autoprocessing_results_from_disc(self.visit_list,\n self.target,\n self.reference_file_list,\n self.database_directory,\n self.data_collection_dict,\n self.preferences,\n self.datasets_summary_file,\n self.initial_model_directory,\n rescore_only,\n self.acceptable_low_resolution_limit_for_data,\n os.path.join(self.database_directory,\n self.data_source_file),\n self.xce_logfile)\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"create_widgets_for_autoprocessing_results_only\"),\n self.create_widgets_for_autoprocessing_results_only)\n self.work_thread.start()\n\n def save_files_to_initial_model_folder(self):\n self.work_thread = XChemThread.save_autoprocessing_results_to_disc(self.dataset_outcome_dict,\n self.data_collection_table_dict,\n self.data_collection_column_three_dict,\n self.data_collection_dict,\n self.database_directory,\n self.data_source_file,\n self.initial_model_directory,\n self.preferences,\n self.datasets_summary_file)\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n def run_pandda_analyse(self, run):\n pandda_params = {\n 'data_dir': str(self.pandda_input_data_dir_entry.text()),\n 'out_dir': str(self.pandda_output_data_dir_entry.text()),\n 'submit_mode': str(self.pandda_submission_mode_selection_combobox.currentText()),\n 'nproc': str(self.pandda_nproc_entry.text()),\n 'min_build_datasets': str(self.pandda_min_build_dataset_entry.text()),\n 'pdb_style': str(self.pandda_pdb_style_entry.text()),\n 'mtz_style': str(self.pandda_mtz_style_entry.text()),\n 'sort_event': str(self.pandda_sort_event_combobox.currentText()),\n 'average_map': str(self.pandda_calc_map_combobox.currentText()),\n 'max_new_datasets': str(self.pandda_max_new_datasets_entry.text()),\n 'grid_spacing': str(self.pandda_grid_spacing_entry.text()),\n 'keyword_arguments': str(self.pandda_keyword_arguments_entry.text()),\n 'pandda_dir_structure': str(self.pandda_input_data_dir_entry.text()),\n 'perform_diffraction_data_scaling': str(self.wilson_checkbox.isChecked()),\n 'filter_pdb': str(self.pandda_reference_file_selection_combobox.currentText()),\n 'reference_dir': self.reference_directory,\n 'appendix': '',\n 'N_datasets': len(glob.glob(os.path.join(self.initial_model_directory, '*', 'dimple.pdb'))),\n 'write_mean_map': 'interesting',\n 'pandda_table': self.pandda_analyse_data_table,\n 'use_remote': self.using_remote_qsub_submission,\n 'remote_string': self.remote_qsub_submission\n }\n\n if run == 'pre_run':\n msgBox = QtGui.QMessageBox()\n msgBoxLayout = msgBox.layout()\n vbox = QtGui.QVBoxLayout()\n vbox.addWidget(QtGui.QLabel(XChemToolTips.pandda_pre_run(self.reference_directory)))\n hbox = QtGui.QHBoxLayout()\n hbox.addWidget(QtGui.QLabel('appendix:'))\n appendix = QtGui.QLineEdit()\n appendix.setText('pre')\n appendix.setFixedWidth(200)\n hbox.addWidget(appendix)\n vbox.addLayout(hbox)\n\n msgBoxLayout.addLayout(vbox, 0, 0)\n msgBox.addButton(QtGui.QPushButton('Go'), QtGui.QMessageBox.YesRole)\n msgBox.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)\n reply = msgBox.exec_();\n if reply == 0:\n pandda_params['appendix'] = str(appendix.text())\n pandda_params['max_new_datasets'] = '100'\n pandda_params['N_datasets'] = 100\n pandda_params['write_mean_map'] = 'all'\n else:\n return None\n\n self.update_log.insert('preparing pandda.analyse input script')\n if run == 'production_run_pandda_two':\n self.work_thread = XChemPANDDA.run_pandda_two_analyse(pandda_params, self.xce_logfile,\n os.path.join(self.database_directory, self.data_source_file))\n else:\n self.work_thread = XChemPANDDA.run_pandda_analyse(pandda_params, self.xce_logfile,\n os.path.join(self.database_directory, self.data_source_file))\n #self.connect(self.work_thread, QtCore.SIGNAL(\"datasource_menu_reload_samples\"),\n #self.datasource_menu_reload_samples)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n def cluster_datasets_for_pandda(self):\n\n pandda_params = {\n 'out_dir': str(self.pandda_output_data_dir_entry.text()),\n 'pdb_style': str(self.pandda_pdb_style_entry.text()),\n 'mtz_style': str(self.pandda_mtz_style_entry.text())\n }\n self.update_log.insert('starting giant.cluster_mtzs_and_pdbs')\n self.work_thread = XChemPANDDA.giant_cluster_datasets(self.initial_model_directory, pandda_params,\n self.xce_logfile, os.path.join(self.database_directory,\n self.data_source_file),\n run_pandda_analyse)\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"datasource_menu_reload_samples\"),\n self.datasource_menu_reload_samples)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n def run_pandda_inspect(self):\n self.settings['panddas_directory'] = str(self.pandda_output_data_dir_entry.text())\n print('==> XCE: starting pandda.inspect')\n self.work_thread = XChemThread.start_pandda_inspect(self.settings, self.xce_logfile)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n def run_pandda_inspect_at_home(self):\n self.work_thread = XChemPANDDA.run_pandda_inspect_at_home(self.panddas_directory, self.xce_logfile)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n\n def convert_event_maps_to_SF(self):\n self.update_log.insert('converting all event maps in {0!s} to mtz files'.format(self.initial_model_directory))\n# self.work_thread = XChemPANDDA.convert_all_event_maps_in_database(self.initial_model_directory,\n# self.xce_logfile,\n# os.path.join(self.database_directory,\n# self.data_source_file))\n self.work_thread = XChemPANDDA.find_event_map_for_ligand(self.initial_model_directory,\n self.xce_logfile,self.external_software)\n\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n def convert_apo_to_mmcif(self):\n self.work_thread = XChemPANDDA.convert_apo_structures_to_mmcif(self.panddas_directory,\n self.xce_logfile)\n\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n\n def compare_modelled_ligands_and_panddaTable(self):\n self.update_log.insert('checking agreement of ligands in refine.pdb and entries in panddaTable')\n self.work_thread = XChemPANDDA.check_number_of_modelled_ligands(self.initial_model_directory,\n self.xce_logfile,\n os.path.join(self.database_directory,\n self.data_source_file))\n self.explorer_active = 1\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"show_error_dict\"), self.show_error_dict)\n self.work_thread.start()\n\n def run_pandda_export(self, update_datasource_only, which_models):\n\n pandda_params = {\n 'data_dir': str(self.pandda_input_data_dir_entry.text()),\n 'out_dir': str(self.pandda_output_data_dir_entry.text()),\n 'submit_mode': str(self.pandda_submission_mode_selection_combobox.currentText()),\n 'nproc': str(self.pandda_nproc_entry.text()),\n 'min_build_datasets': str(self.pandda_min_build_dataset_entry.text()),\n 'pdb_style': str(self.pandda_pdb_style_entry.text()),\n 'mtz_style': str(self.pandda_mtz_style_entry.text()),\n 'sort_event': str(self.pandda_sort_event_combobox.currentText()),\n 'average_map': str(self.pandda_calc_map_combobox.currentText()),\n 'max_new_datasets': str(self.pandda_max_new_datasets_entry.text()),\n 'grid_spacing': str(self.pandda_grid_spacing_entry.text()),\n 'pandda_dir_structure': str(self.pandda_input_data_dir_entry.text()),\n 'perform_diffraction_data_scaling': str(self.wilson_checkbox.isChecked()),\n 'filter_pdb': str(self.pandda_reference_file_selection_combobox.currentText()),\n 'reference_dir': self.reference_directory,\n 'appendix': '',\n 'N_datasets': len(glob.glob(os.path.join(self.initial_model_directory, '*', 'dimple.pdb'))),\n 'write_mean_map': 'interesting',\n 'pandda_table': self.pandda_analyse_data_table,\n 'use_remote': self.using_remote_qsub_submission,\n 'remote_string': self.remote_qsub_submission\n }\n\n self.settings['panddas_directory'] = str(self.pandda_output_data_dir_entry.text())\n if update_datasource_only:\n self.update_log.insert('updating data source with results from pandda.inspect')\n else:\n self.update_log.insert(\n 'exporting PANDDA models, updating data source and launching inital refinement for new models')\n\n start_thread = False\n if which_models == 'all':\n self.update_log.insert('exporting ALL models! *** WARNING *** This may overwrite previous refinements!!!')\n msgBox = QtGui.QMessageBox()\n msgBox.setText(\"*** WARNING ***\\nThis will overwrite all your manual selections!\\nDo you want to continue?\")\n msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)\n msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)\n reply = msgBox.exec_();\n if reply == 0:\n if update_datasource_only:\n self.update_log.insert('will update panddaTable in database only')\n else:\n self.update_log.insert('will export ALL models!')\n start_thread = True\n else:\n start_thread = False\n else:\n self.update_log.insert('exporting new models only')\n start_thread = True\n\n if start_thread:\n self.work_thread = XChemPANDDA.run_pandda_export(self.panddas_directory,\n os.path.join(self.database_directory,\n self.data_source_file),\n self.initial_model_directory, self.xce_logfile,\n update_datasource_only, which_models, pandda_params)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n# def run_refine_bound_state_with_buster(self,which_models):\n# start_thread = True\n# if start_thread:\n# self.work_thread = XChemPANDDA.refine_bound_state_with_buster(self.panddas_directory,\n# os.path.join(self.database_directory,\n# self.data_source_file),\n# self.initial_model_directory, self.xce_logfile,\n# which_models)\n# self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n# self.work_thread.start()\n\n def run_refine_bound_state_with_buster(self,which_models):\n start_thread = True\n if start_thread:\n self.work_thread = XChemPANDDA.export_and_refine_ligand_bound_models(self.panddas_directory,\n os.path.join(self.database_directory,\n self.data_source_file),\n self.initial_model_directory, self.xce_logfile,\n which_models)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.work_thread.start()\n\n\n\n\n\n def show_pandda_html_summary(self):\n self.pandda_initial_html.load(QtCore.QUrl(self.pandda_initial_html_file))\n self.pandda_initial_html.show()\n self.pandda_analyse_html.load(QtCore.QUrl(self.pandda_analyse_html_file))\n self.pandda_analyse_html.show()\n self.add_map_html()\n self.pandda_inspect_html.load(QtCore.QUrl(self.pandda_inspect_html_file))\n self.pandda_inspect_html.show()\n\n def create_cif_pdb_png_files(self, todo):\n tmp = self.db.execute_statement(\n \"select CrystalName,CompoundCode,CompoundSmiles from mainTable where CrystalName is not '' and CompoundSmiles is not '' and CompoundSmiles is not NULL;\")\n compound_list = []\n for item in tmp:\n if str(item[1]) == '' or str(item[1]) == 'NULL':\n compoundID = 'compound'\n else:\n compoundID = str(item[1])\n\n if todo == 'ALL':\n compound_list.append([str(item[0]), compoundID, str(item[2])])\n elif todo == 'NEW':\n if not os.path.isfile(os.path.join(self.initial_model_directory, str(item[0]), compoundID + '.cif')):\n compound_list.append([str(item[0]), compoundID, str(item[2])])\n elif todo == 'SELECTED':\n if str(item[0]) in self.initial_model_dimple_dict:\n if self.initial_model_dimple_dict[str(item[0])][0].isChecked():\n compound_list.append([str(item[0]), compoundID, str(item[2])])\n\n if compound_list:\n self.update_log.insert(\n 'trying to create cif and pdb files for ' + str(len(compound_list)) + ' compounds using ACEDRG...')\n if self.external_software['qsub']:\n self.update_log.insert(\n 'will try sending ' + str(len(compound_list)) + ' jobs to your computer cluster!')\n elif self.external_software['qsub_array']:\n self.update_log.insert('will try sending ' + str(\n len(compound_list)) + ' jobs as part of an ARRAY job to your computer cluster!')\n else:\n self.update_log.insert('apparently no cluster available, so will run ' + str(\n len(compound_list)) + ' sequential jobs on one core of your local machine.')\n self.update_log.insert('this could take a while...')\n self.explorer_active = 1\n self.work_thread = XChemThread.create_png_and_cif_of_compound(self.external_software,\n self.initial_model_directory,\n compound_list,\n self.database_directory,\n self.data_source_file,\n todo,\n self.ccp4_scratch_directory,\n self.xce_logfile,\n self.max_queue_jobs,\n self.restraints_program)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"datasource_menu_reload_samples\"),\n self.datasource_menu_reload_samples)\n self.work_thread.start()\n\n def fit_ligands_into_dimple_maps(self):\n tmp = self.db.execute_statement(\n \"select CrystalName,CompoundCode,CompoundSmiles from mainTable where CrystalName is not '' and CompoundSmiles is not '' and CompoundSmiles is not NULL;\")\n compound_list = []\n for item in tmp:\n if str(item[1]) == '' or str(item[1]) == 'NULL':\n compoundID = 'compound'\n else:\n compoundID = str(item[1])\n\n if str(item[0]) in self.initial_model_dimple_dict:\n if self.initial_model_dimple_dict[str(item[0])][0].isChecked():\n compound_list.append([str(item[0]), compoundID, str(item[2])])\n\n if compound_list:\n self.update_log.insert(\n 'trying to auto-fitting into inital maps for ' + str(len(compound_list)) + ' compounds...')\n if self.external_software['qsub']:\n self.update_log.insert(\n 'will try sending ' + str(len(compound_list)) + ' jobs to your computer cluster!')\n elif self.external_software['qsub_array']:\n self.update_log.insert('will try sending ' + str(\n len(compound_list)) + ' jobs as part of an ARRAY job to your computer cluster!')\n else:\n self.update_log.insert('apparently no cluster available, so will run ' + str(\n len(compound_list)) + ' sequential jobs on one core of your local machine.')\n self.update_log.insert('this could take a while...')\n self.explorer_active = 1\n self.work_thread = XChemThread.fit_ligands(self.external_software,\n self.initial_model_directory,\n compound_list,\n self.database_directory,\n self.data_source_file,\n self.ccp4_scratch_directory,\n self.xce_logfile,\n self.max_queue_jobs)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"datasource_menu_reload_samples\"),\n self.datasource_menu_reload_samples)\n self.work_thread.start()\n\n\n\n\n def merge_cif_files(self,todo):\n start_thread = False\n if todo == 'merge':\n self.update_log.insert('trying to merge %s with ligand restraint files in project directory' %self.second_cif_file)\n elif todo == 'restore':\n self.update_log.insert('restoring original CIF files')\n start_thread = True\n\n if todo == 'merge':\n if os.path.isfile(str(self.second_cif_file)):\n self.update_log.insert('checking compound code of second CIF file (%s)' % self.second_cif_file)\n self.update_log.insert('Note: LIG and DRG are not allowed!')\n import iotbx.cif\n cif_model = iotbx.cif.reader(file_path=self.second_cif_file).model()\n cif_block = cif_model[\"comp_list\"]\n ligID = cif_block[\"_chem_comp.id\"]\n self.update_log.insert('found the following compound codes in the supplied CIF file: %s' % str(list(ligID)))\n if 'LIG' in list(ligID) or 'DRG' in list(ligID):\n self.update_log.error('please change compound code to something other than LIG or DRG')\n start_thread = False\n else:\n start_thread = True\n else:\n self.update_log.error(XChemToolTips.second_cif_file_not_exists())\n start_thread = False\n\n if start_thread:\n msgBox = QtGui.QMessageBox()\n msgBox.setText(XChemToolTips.second_cif_file_info(self.second_cif_file))\n msgBox.addButton(QtGui.QPushButton('OK'), QtGui.QMessageBox.YesRole)\n msgBox.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)\n reply = msgBox.exec_();\n if reply == 0:\n start_thread = True\n else:\n start_thread = False\n else:\n self.status_bar.showMessage('Error. Please check terminal window for further information')\n\n tmp = self.db.execute_statement(\n \"select CrystalName,CompoundCode from mainTable where CrystalName is not '' and CompoundSmiles is not '' and CompoundSmiles is not NULL;\")\n compound_list = []\n for item in tmp:\n xtal = str(item[0])\n compoundID = str(item[1])\n if compoundID == '' or compoundID == 'NULL':\n self.update_log.warning('%s: no compound ID in database; skipping...' %xtal)\n else:\n if str(item[0]) in self.initial_model_dimple_dict:\n if self.initial_model_dimple_dict[str(item[0])][0].isChecked():\n self.update_log.warning('%s: %s is flagged for merging' % (xtal, compoundID))\n compound_list.append([xtal, compoundID])\n\n if compound_list == []:\n self.update_log.error('Either no compound ID information in database or no sample selected!')\n start_thread = False\n\n if start_thread:\n\n self.explorer_active = 1\n self.work_thread = XChemThread.merge_cif_files(self.initial_model_directory,\n self.xce_logfile,\n self.second_cif_file,\n compound_list,\n todo)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_progress_bar\"), self.update_progress_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"update_status_bar(QString)\"), self.update_status_bar)\n self.connect(self.work_thread, QtCore.SIGNAL(\"finished()\"), self.thread_finished)\n self.connect(self.work_thread, QtCore.SIGNAL(\"datasource_menu_reload_samples\"),\n self.datasource_menu_reload_samples)\n self.work_thread.start()\n\n\n def update_deposition_table(self):\n # check if PanDDA models are ready for deposition\n\n depositChecks = XChemDeposit.update_deposition_table(\n os.path.join(self.database_directory, self.data_source_file))\n\n toDeposit, mismatch = depositChecks.PanDDA_models_to_deposit()\n\n if mismatch != {}:\n self.update_log.insert('The following samples contain ligand that are not ready for deposition:')\n for entry in mismatch:\n self.update_log.insert(entry[0] + ' -> site: ' + entry[1] + ' @ ' + entry[2] + ' => ' + entry[4])\n self.update_log.insert('You need to change this before you can continue!')\n return None\n\n for xtal in toDeposit:\n self.db.update_insert_depositTable(xtal, {})\n\n def show_html_summary_and_diffraction_image(self):\n for key in self.albula_button_dict:\n if self.albula_button_dict[key][0] == self.sender():\n print('==> XCE: showing html summary in firefox')\n self.show_html_summary_in_firefox(key)\n\n def need_to_switch_main_tab(self, task_index):\n msgBox = QtGui.QMessageBox()\n msgBox.setText(\"Need to switch main tab before you can launch this job\")\n msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.YesRole)\n msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.RejectRole)\n reply = msgBox.exec_();\n if reply == 0:\n self.main_tab_widget.setCurrentIndex(task_index)\n\n def check_write_permissions_of_data_source(self):\n write_enabled = True\n if not os.access(os.path.join(self.database_directory, self.data_source_file), os.W_OK):\n QtGui.QMessageBox.warning(self.window, \"Data Source Problem\",\n '\\nData Source is Read-Only\\n',\n QtGui.QMessageBox.Cancel, QtGui.QMessageBox.NoButton,\n QtGui.QMessageBox.NoButton)\n write_enabled = False\n return write_enabled\n\n def no_data_source_selected(self):\n QtGui.QMessageBox.warning(self.window, \"Data Source Problem\",\n ('Please set or create a data source file\\n') +\n ('Options:\\n') +\n ('1. Use an existing file:\\n') +\n ('- Settings -> Select Data Source File\\n') +\n ('2. Create a new file\\n') +\n ('- Data Source -> Create New Data\\nSource (SQLite)'),\n QtGui.QMessageBox.Cancel, QtGui.QMessageBox.NoButton,\n QtGui.QMessageBox.NoButton)\n\n def update_progress_bar(self, progress):\n self.progress_bar.setValue(progress)\n\n def update_status_bar(self, message):\n self.status_bar.showMessage(message)\n\n def thread_finished(self):\n self.explorer_active = 0\n self.update_progress_bar(0)\n self.update_status_bar('idle')\n\n def show_error_dict(self, errorDict):\n text = ''\n for key in errorDict:\n text += '{0!s}:\\n'.format(key)\n for entry in errorDict[key]:\n text += ' - ' + entry + '\\n'\n msgBox = QtGui.QMessageBox()\n msgBox.setText(text)\n msgBox.exec_()\n\n def create_widgets_for_autoprocessing_results_only(self, data_dict):\n self.status_bar.showMessage('Building details table for data processing results')\n self.data_collection_dict = data_dict\n\n column_name = ['Program',\n 'Resolution\\nOverall',\n 'Resolution\\n[Mn<I/sig(I)> = 2.0]',\n 'DataProcessing\\nSpaceGroup',\n 'Mn<I/sig(I)>\\nHigh',\n 'Rmerge\\nLow',\n 'Completeness\\nOverall',\n 'DataProcessing\\nUnitCell',\n 'DataProcessing\\nRfree',\n 'DataProcessing\\nScore']\n\n # need to do this because db_dict keys are SQLite column names\n diffraction_data_column_name = XChemDB.data_source(\n os.path.join(self.database_directory, self.data_source_file)).translate_xce_column_list_to_sqlite(\n column_name)\n\n for xtal in sorted(self.data_collection_dict):\n if os.path.isfile(os.path.join(self.initial_model_directory, xtal, xtal + '.mtz')):\n mtz_already_in_inital_model_directory = True\n\n # column 2: data collection date\n # this one should always be there; it may need updating in case another run appears\n # first find latest run\n tmp = []\n for entry in self.data_collection_dict[xtal]:\n if entry[0] == 'image':\n tmp.append([entry[3], datetime.strptime(entry[3], '%Y-%m-%d %H:%M:%S')])\n latest_run = max(tmp, key=lambda x: x[1])[0]\n\n # first check if it does already exist\n if xtal not in self.data_collection_column_three_dict:\n # generate all the widgets which can later be appended and add them to the dictionary\n data_collection_table = QtGui.QTableWidget() # table with data processing results for each pipeline\n selection_changed_by_user = False\n self.data_collection_column_three_dict[xtal] = [data_collection_table, selection_changed_by_user]\n xtal_in_table = True\n else:\n data_collection_table = self.data_collection_column_three_dict[xtal][0]\n selection_changed_by_user = self.data_collection_column_three_dict[xtal][1]\n\n data_collection_table.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)\n data_collection_table.setColumnCount(len(column_name))\n font = QtGui.QFont()\n font.setPointSize(8)\n data_collection_table.setFont(font)\n data_collection_table.setHorizontalHeaderLabels(column_name)\n data_collection_table.horizontalHeader().setFont(font)\n data_collection_table.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)\n\n #############################################################################\n # crystal images\n # first check there are new images that are not displayed yet; i.e. they are not in the self.data_collection_image_dict\n if xtal not in self.data_collection_image_dict:\n # OK this is the first time\n self.data_collection_image_dict[xtal] = []\n\n # sort crystal images by timestamp\n # reminder: ['image',visit,run,timestamp,image_list,diffraction_image,run_number]\n # a) get only image entries from self.data_collection_dict\n tmp = []\n for entry in self.data_collection_dict[xtal]:\n if entry[0] == 'image':\n tmp.append(entry)\n\n # b) sort by the previously assigned run number\n # note: entry[6]==run_number\n for entry in sorted(tmp, key=lambda x: x[6]):\n run_number = entry[6]\n images_already_in_table = False\n for image in self.data_collection_image_dict[xtal]:\n if run_number == image[0]:\n images_already_in_table = True\n break\n if not images_already_in_table:\n # not if there is a run, but images are for whatever reason not present in self.data_collection_dict\n # then use image not available from $XChemExplorer_DIR/image/IMAGE_NOT_AVAILABLE.png\n # not sure how to do this at the moment; it will probably trigger an error that I can catch\n self.data_collection_image_dict[xtal].append([entry[6], entry[1], entry[2], entry[3], entry[5]])\n\n #############################################################################\n # initialize dataset_outcome_dict for xtal\n if xtal not in self.dataset_outcome_dict:\n self.dataset_outcome_dict[xtal] = []\n # dataset outcome buttons\n\n #############################################################################\n # table for data processing results\n # check if results from particular pipeline are already in table;\n # not really looking at the table here, but compare it to self.data_collection_table_dict\n row_position = data_collection_table.rowCount()\n if not xtal in self.data_collection_table_dict:\n self.data_collection_table_dict[xtal] = []\n # reminder: ['logfile',visit,run,timestamp,autoproc,file_name,aimless_results,<aimless_index>,False]\n logfile_list = []\n for entry in self.data_collection_dict[xtal]:\n if entry[0] == 'logfile':\n logfile_list.append(entry)\n for entry in sorted(logfile_list, key=lambda x: x[7]): # sort by aimless_index and so make sure\n entry_already_in_table = False # that aimless_index == row\n for logfile in self.data_collection_table_dict[xtal]:\n if entry[1] == logfile[1] and entry[2] == logfile[2] and entry[3] == logfile[3] and entry[4] == \\\n logfile[4]:\n entry_already_in_table = True\n # might have to update Rfree column\n for column, header in enumerate(diffraction_data_column_name):\n if header == 'DataProcessing\\nRfree':\n # entry[7]==aimless_index, i.e. row number\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(str(db_dict[header[1]]))\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n data_collection_table.setItem(entry[7], column, cell_text)\n break\n break\n if not entry_already_in_table:\n data_collection_table.insertRow(row_position)\n db_dict = entry[6]\n for column, header in enumerate(diffraction_data_column_name):\n cell_text = QtGui.QTableWidgetItem()\n try:\n cell_text.setText(str(db_dict[header[1]]))\n except KeyError:\n # this may happen if not score exists\n cell_text.setText('0')\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n data_collection_table.setItem(row_position, column, cell_text)\n data_collection_table.setRowHeight(row_position, 20)\n row_position += 1\n\n self.data_collection_table_dict[xtal].append(\n ['logfile', entry[1], entry[2], entry[3], entry[4]]) # 'logfile' is just added to have\n # same index numbers between lists\n data_collection_table.cellClicked.connect(self.user_update_selected_autoproc_datasets_summary_table)\n\n # select best resolution file + set data collection outcome\n # the assumption is that index in data_collection_dict and row number are identical\n # the assumption for data collection outcome is that as long as a logfile is found, it's a success\n logfile_found = False\n for entry in self.data_collection_dict[xtal]:\n if entry[0] == 'logfile':\n index = entry[7]\n best_file = entry[8]\n logfile_found = True\n if best_file:\n # we change the selection only if the user did not touch it, assuming that he/she knows best\n # if not selection_changed_by_user:\n data_collection_table.selectRow(index)\n\n self.populate_datasets_summary_table()\n\n def find_suitable_reference_file(self, db_dict):\n reference_file = []\n dummy = ['...', '', '', '', 0, '0']\n reference_file.append([dummy, 999])\n suitable_reference = []\n for reference in self.reference_file_list:\n # first we need one in the same pointgroup\n if reference[5] == db_dict['DataProcessingPointGroup']:\n try:\n difference = math.fabs(\n 1 - (float(db_dict['DataProcessingUnitCellVolume']) / float(reference[4]))) * 100\n reference_file.append([reference, difference])\n except ValueError:\n continue\n return reference_file\n\n def create_maps_table(self):\n column_name = self.db.translate_xce_column_list_to_sqlite(self.maps_table_columns)\n\n for xtal in sorted(self.xtal_db_dict):\n new_xtal = False\n db_dict = self.xtal_db_dict[xtal]\n if str(db_dict['DataCollectionOutcome']).lower().startswith('success'):\n reference_file = self.find_suitable_reference_file(db_dict)\n smallest_uc_difference = min(reference_file, key=lambda x: x[1])\n row = self.maps_table.rowCount()\n if xtal not in self.initial_model_dimple_dict:\n self.maps_table.insertRow(row)\n current_row = row\n new_xtal = True\n else:\n for table_row in range(row):\n if self.maps_table.item(table_row, 0).text() == xtal:\n current_row = table_row\n break\n for column, header in enumerate(column_name):\n if header[0] == 'Sample ID':\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(str(xtal))\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n self.maps_table.setItem(current_row, column, cell_text)\n elif header[0] == 'Select':\n if new_xtal:\n run_dimple = QtGui.QCheckBox()\n run_dimple.toggle()\n self.maps_table.setCellWidget(current_row, column, run_dimple)\n run_dimple.setChecked(False)\n elif header[0] == 'Reference\\nSpaceGroup':\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(str(smallest_uc_difference[0][1]))\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n self.maps_table.setItem(current_row, column, cell_text)\n elif header[0] == 'Difference\\nUC Volume (%)':\n cell_text = QtGui.QTableWidgetItem()\n smallest_uc_difference = min(reference_file, key=lambda x: x[1])\n cell_text.setText(str(round(float(smallest_uc_difference[1]), 1)))\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n self.maps_table.setItem(current_row, column, cell_text)\n elif header[0] == 'Reference File':\n if new_xtal:\n reference_file_selection_combobox = QtGui.QComboBox()\n self.populate_reference_combobox(reference_file_selection_combobox)\n if float(smallest_uc_difference[1]) < self.allowed_unitcell_difference_percent:\n index = reference_file_selection_combobox.findText(str(smallest_uc_difference[0][0]),\n QtCore.Qt.MatchFixedString)\n reference_file_selection_combobox.setCurrentIndex(index)\n else:\n reference_file_selection_combobox.setCurrentIndex(0)\n self.maps_table.setCellWidget(current_row, column,\n reference_file_selection_combobox)\n else:\n reference_file_selection_combobox = self.initial_model_dimple_dict[xtal][1]\n self.populate_reference_combobox(reference_file_selection_combobox)\n if float(smallest_uc_difference[1]) < self.allowed_unitcell_difference_percent:\n index = reference_file_selection_combobox.findText(str(smallest_uc_difference[0][0]),\n QtCore.Qt.MatchFixedString)\n reference_file_selection_combobox.setCurrentIndex(index)\n else:\n reference_file_selection_combobox.setCurrentIndex(0)\n else:\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(str(db_dict[header[1]]))\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n if header[0] == 'Dimple\\nStatus':\n if str(db_dict[header[1]]) == 'running':\n cell_text.setBackground(QtGui.QColor(100, 230, 150))\n elif str(db_dict[header[1]]) == 'pending':\n cell_text.setBackground(QtGui.QColor(20, 100, 230))\n elif str(db_dict[header[1]]) == 'started':\n cell_text.setBackground(QtGui.QColor(230, 240, 110))\n elif str(db_dict[header[1]]) == 'finished':\n cell_text.setBackground(QtGui.QColor(255, 255, 255))\n if header[0] == 'Compound\\nStatus':\n if str(db_dict[header[1]]) == 'running':\n cell_text.setBackground(QtGui.QColor(100, 230, 150))\n elif str(db_dict[header[1]]) == 'pending':\n cell_text.setBackground(QtGui.QColor(20, 100, 230))\n elif str(db_dict[header[1]]) == 'started':\n cell_text.setBackground(QtGui.QColor(230, 240, 110))\n elif str(db_dict[header[1]]) == 'restraints generated':\n cell_text.setBackground(QtGui.QColor(255, 255, 255))\n elif str(db_dict[header[1]]) == 'restraints failed':\n cell_text.setBackground(QtGui.QColor(255, 0, 0))\n elif str(db_dict[header[1]]) == 'missing smiles':\n cell_text.setBackground(QtGui.QColor(240, 150, 20))\n self.maps_table.setItem(current_row, column, cell_text)\n if new_xtal:\n self.initial_model_dimple_dict[xtal] = [run_dimple, reference_file_selection_combobox]\n\n def preferences_data_to_copy_combobox_changed(self, i):\n text = str(self.preferences_data_to_copy_combobox.currentText())\n for item in self.preferences_data_to_copy:\n if item[0] == text:\n self.preferences['processed_data_to_copy'] = item[1]\n break\n\n def preferences_selection_mechanism_combobox_changed(self, i):\n text = str(self.preferences_selection_mechanism_combobox.currentText())\n self.preferences['dataset_selection_mechanism'] = text\n self.update_log.insert('setting datasets selection mechanism to ' + text)\n\n def preferences_initial_refinement_combobox_changed(self, i):\n text = str(self.preferences_initial_refinement_combobox.currentText())\n self.preferences['initial_refinement_pipeline'] = text\n self.update_log.insert('setting initial refinement pipeline to ' + text)\n\n def preferences_restraints_generation_combobox_changed(self):\n text = str(self.preferences_restraints_generation_combobox.currentText())\n self.restraints_program = text\n self.update_log.insert('will use {0!s} for generation of ligand coordinates and restraints'.format(text))\n\n def refinement_outcome_combobox_changed(self):\n for xtal in self.refinement_table_dict:\n if self.sender() == self.refinement_table_dict[xtal]:\n# db_dict = {'RefinementOutcome': str(self.sender().currentText())}\n db_dict = {}\n db_dict['RefinementOutcome'] = str(self.sender().currentText())\n db_dict['RefinementOutcomePerson'] = getpass.getuser()\n db_dict['RefinementOutcomeDate'] = datetime.strftime(datetime.now(), '%Y-%m-%d_%H-%M-%S.%f')[:-4]\n self.db.create_or_remove_missing_records_in_depositTable(self.xce_logfile, xtal, 'ligand_bound',\n db_dict)\n\n def get_reference_file_list(self, reference_root):\n # check available reference files\n reference_file_list = []\n dummy = ['...', '', '', '', 0, '0']\n reference_file_list.append(dummy)\n if os.path.isfile(os.path.join(self.reference_directory, reference_root + '.pdb')):\n pdb_reference = parse().PDBheader(os.path.join(self.reference_directory, reference_root + '.pdb'))\n spg_reference = pdb_reference['SpaceGroup']\n unitcell_reference = pdb_reference['UnitCell']\n lattice_reference = pdb_reference['Lattice']\n unitcell_volume_reference = pdb_reference['UnitCellVolume']\n pointgroup_reference = pdb_reference['PointGroup']\n reference_file_list.append([reference_root,\n spg_reference,\n unitcell_reference,\n lattice_reference,\n unitcell_volume_reference,\n pointgroup_reference])\n else:\n for files in glob.glob(self.reference_directory + '/*'):\n if files.endswith('.pdb'):\n reference_root = files[files.rfind('/') + 1:files.rfind('.')]\n\n if os.path.isfile(os.path.join(self.reference_directory, reference_root + '.pdb')):\n # reference_file = reference_root + '.pdb'\n pdb_reference = parse().PDBheader(\n os.path.join(self.reference_directory, reference_root + '.pdb'))\n spg_reference = pdb_reference['SpaceGroup']\n unitcell_reference = pdb_reference['UnitCell']\n lattice_reference = pdb_reference['Lattice']\n unitcell_volume_reference = pdb_reference['UnitCellVolume']\n pointgroup_reference = pdb_reference['PointGroup']\n reference_file_list.append([reference_root,\n spg_reference,\n unitcell_reference,\n lattice_reference,\n unitcell_volume_reference,\n pointgroup_reference])\n for n, file in enumerate(reference_file_list):\n self.update_log.insert('reference file {0!s}: {1!s}'.format(n, file))\n return reference_file_list\n\n def dataset_outcome_combobox_change_outcome(self, text):\n outcome = str(text)\n xtal = ''\n for key in self.dataset_outcome_combobox_dict:\n if self.dataset_outcome_combobox_dict[key] == self.sender():\n xtal = key\n self.update_log.insert('user changed data collection outcome of {0!s} to {1!s}'.format(xtal, outcome))\n break\n self.dataset_outcome_dict[xtal] = outcome\n if xtal != '':\n# # need to also update if not yet done\n# user_already_changed_selection = False\n# for n, entry in enumerate(self.data_collection_dict[xtal]):\n# if entry[0] == 'user_changed_selection':\n# user_already_changed_selection = True\n# if entry[0] == 'logfile':\n# db_dict = entry[6]\n# db_dict['DataCollectionOutcome'] = outcome\n# entry[6] = db_dict\n# self.data_collection_dict[xtal][n] = entry\n# if not user_already_changed_selection:\n# self.data_collection_dict[xtal].append(['user_changed_selection'])\n# # finally need to update outcome field in data source accordingly\n self.update_log.insert('updating dataset outcome in datasource for {0!s}'.format(xtal))\n update_dict = {'DataCollectionOutcome': outcome}\n self.db.update_insert_data_source(xtal, update_dict)\n\n def set_run_dimple_flag(self, state):\n if state == QtCore.Qt.Checked:\n for key in self.initial_model_dimple_dict:\n self.initial_model_dimple_dict[key][0].setChecked(True)\n else:\n for key in self.initial_model_dimple_dict:\n self.initial_model_dimple_dict[key][0].setChecked(False)\n\n\n def show_data_collection_details(self, state):\n # first remove currently displayed widget\n if self.data_collection_details_currently_on_display is not None:\n self.data_collection_details_currently_on_display.hide()\n self.data_collection_details_currently_on_display = None\n\n tmp = []\n allRows = self.datasets_summary_table.rowCount()\n for table_row in range(allRows):\n tmp.append([self.datasets_summary_table.item(table_row, 0).text(), table_row])\n\n for key in self.datasets_summary_dict:\n if self.datasets_summary_dict[key][3] == self.sender():\n if self.sender().isChecked():\n for item in tmp:\n if item[0] == key:\n self.datasets_summary_table.selectRow(item[1])\n self.data_collection_details_currently_on_display = self.data_collection_column_three_dict[key][0]\n self.datasets_summarys_vbox_for_details.addWidget(\n self.data_collection_details_currently_on_display)\n self.data_collection_details_currently_on_display.show()\n else:\n # un-check all other ones\n self.datasets_summary_dict[key][3].setChecked(False)\n\n# def populate_datasets_summary_table(self):\n# self.status_bar.showMessage(\n# 'Building summary table for data processing results; be patient this may take a while')\n# row = self.datasets_summary_table.rowCount()\n# column_name = self.db.translate_xce_column_list_to_sqlite(self.datasets_summary_table_columns)\n#\n# pinList = self.db.execute_statement(\n# \"Select CrystalName,PinBarcode,DataCollectionPinBarcode from mainTable where CrystalName is not ''\")\n# pinDict = {}\n# for item in pinList:\n# pinDict[str(item[0])] = [str(item[1]), str(item[2])]\n#\n# for xtal in sorted(self.data_collection_dict):\n# new_xtal = False\n# if xtal not in self.datasets_summary_dict:\n# row = self.datasets_summary_table.rowCount()\n# self.datasets_summary_table.insertRow(row)\n# self.datasets_summary_dict[xtal] = []\n# new_xtal = True\n#\n# # check for dataset outcome\n# outcome = ''\n# logfile_found = False\n# too_low_resolution = True\n# db_dict = {}\n# for entry in self.data_collection_dict[xtal]:\n# if entry[0] == 'logfile':\n# logfile_found = True\n# if entry[8]: # if this was auto-selected best resolution file\n# db_dict = entry[6]\n# try:\n# if float(db_dict['DataProcessingResolutionHigh']) <= float(\n# self.acceptable_low_resolution_limit_for_data):\n# too_low_resolution = False\n# except ValueError:\n# pass\n#\n# try:\n# outcome = str(self.db.get_value_from_field(xtal, 'DataCollectionOutcome')[0])\n# except TypeError:\n# outcome = 'Failed - unknown'\n# self.update_log.insert('cannot find DataCollectionOutcome for {0!s}'.format(xtal))\n# self.dataset_outcome_dict[xtal] = outcome\n#\n# # find latest run for crystal and diffraction images\n# tmp = []\n# for entry in self.data_collection_dict[xtal]:\n# if entry[0] == 'image':\n# tmp.append([entry, datetime.strptime(entry[3], '%Y-%m-%d %H:%M:%S')])\n# latest_run = max(tmp, key=lambda x: x[1])[0]\n#\n# new_run_for_exisiting_crystal_or_new_sample = True\n# if new_xtal:\n# self.datasets_summary_dict[xtal] = [outcome, db_dict, latest_run]\n# else:\n# # check if newer run appeared\n# old_run_timestamp = self.datasets_summary_dict[xtal][2][3]\n# new_run_timestamp = latest_run[3]\n# if old_run_timestamp == new_run_timestamp:\n# new_run_for_exisiting_crystal_or_new_sample = False\n# else:\n# checkbox_for_details = self.datasets_summary_dict[xtal][3]\n# self.datasets_summary_dict[xtal] = [outcome, db_dict, latest_run, checkbox_for_details]\n#\n# if new_xtal:\n# current_row = row\n# else:\n# allRows = self.datasets_summary_table.rowCount()\n# for table_row in range(allRows):\n# if self.datasets_summary_table.item(table_row, 0).text() == xtal:\n# current_row = table_row\n# break\n#\n# image_number = 0\n# for column, header in enumerate(column_name):\n# if header[0] == 'Sample ID':\n# cell_text = QtGui.QTableWidgetItem()\n# cell_text.setText(str(xtal))\n# cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n# self.datasets_summary_table.setItem(current_row, column, cell_text)\n# elif header[0] == 'DataCollection\\nOutcome':\n# if new_xtal:\n# dataset_outcome_combobox = QtGui.QComboBox()\n# for outcomeItem in self.dataset_outcome:\n# dataset_outcome_combobox.addItem(outcomeItem)\n# self.datasets_summary_table.setCellWidget(current_row, column, dataset_outcome_combobox)\n# dataset_outcome_combobox.activated[str].connect(self.dataset_outcome_combobox_change_outcome)\n# self.dataset_outcome_combobox_dict[xtal] = dataset_outcome_combobox\n# index = self.dataset_outcome_combobox_dict[xtal].findText(str(outcome), QtCore.Qt.MatchFixedString)\n# self.dataset_outcome_combobox_dict[xtal].setCurrentIndex(index)\n# continue\n#\n# elif header[0].startswith('img'):\n# if new_run_for_exisiting_crystal_or_new_sample:\n# img = latest_run[4]\n# pixmap = QtGui.QPixmap()\n# # can do this (img[image_number][1]) because made sure in the threading module\n# # that there are always exactly 5 images in there\n# pixmap.loadFromData(base64.b64decode(img[image_number][1]))\n# image = QtGui.QLabel()\n# image.resize(128, 80)\n# image.setPixmap(pixmap.scaled(image.size(), QtCore.Qt.KeepAspectRatio))\n# self.datasets_summary_table.setCellWidget(current_row, column, image)\n# image_number += 1\n#\n# elif header[0].startswith('Show Diffraction\\nImage'):\n# if new_run_for_exisiting_crystal_or_new_sample:\n# diffraction_image = latest_run[5]\n# diffraction_image_name = diffraction_image[diffraction_image.rfind('/') + 1:]\n# try: # need to try because older pkl file may not have this item in list\n# html_summary = latest_run[7]\n# except IndexError:\n# html_summary = ''\n# if new_xtal:\n# start_albula_button = QtGui.QPushButton('Show: \\n' + diffraction_image_name)\n# start_albula_button.clicked.connect(self.show_html_summary_and_diffraction_image)\n# self.albula_button_dict[xtal] = [start_albula_button, diffraction_image, html_summary]\n# self.datasets_summary_table.setCellWidget(current_row, column, start_albula_button)\n# else:\n# self.albula_button_dict[xtal][1] = diffraction_image\n# elif header[0].startswith('Show\\nDetails'):\n# if new_xtal:\n# show_data_collection_details_checkbox = QtGui.QCheckBox()\n# show_data_collection_details_checkbox.toggle()\n# show_data_collection_details_checkbox.setChecked(False)\n# show_data_collection_details_checkbox.stateChanged.connect(self.show_data_collection_details)\n# self.datasets_summary_table.setCellWidget(current_row, column,\n# show_data_collection_details_checkbox)\n# self.datasets_summary_dict[xtal].append(show_data_collection_details_checkbox)\n# elif header[0].startswith('SoakDB\\nBarcode') or header[0].startswith('GDA\\nBarcode'):\n# if new_xtal:\n# cell_text = QtGui.QTableWidgetItem()\n# if xtal in pinDict:\n# if header[0].startswith('SoakDB\\nBarcode'):\n# cell_text.setText(str(pinDict[xtal][0]))\n# elif header[0].startswith('GDA\\nBarcode'):\n# cell_text.setText(str(pinDict[xtal][1]))\n# if pinDict[xtal][0] == 'NULL' or pinDict[xtal][1] == 'NULL':\n# cell_text.setBackground(QtGui.QColor(255, 215, 0))\n# elif pinDict[xtal][0] != pinDict[xtal][1]:\n# cell_text.setBackground(QtGui.QColor(255, 0, 0))\n# else:\n# cell_text.setText('')\n# cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n# self.datasets_summary_table.setItem(current_row, column, cell_text)\n# else:\n# cell_text = QtGui.QTableWidgetItem()\n# # in case data collection failed for whatever reason\n# if logfile_found:\n# try:\n# cell_text.setText(str(db_dict[header[1]]))\n# except KeyError: # older pkl files may not have all the columns\n# cell_text.setText('n/a')\n# else:\n# if header[0].startswith('Resolution\\n[Mn<I/sig(I)> = 1.5]'):\n# cell_text.setText('999')\n# elif header[0].startswith('DataProcessing\\nRfree'):\n# cell_text.setText('999')\n# elif header[0].startswith('Rmerge\\nLow'):\n# cell_text.setText('999')\n# else:\n# cell_text.setText('')\n# cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n# self.datasets_summary_table.setItem(current_row, column, cell_text)\n#\n# row += 1\n#\n# self.datasets_summary_table.resizeRowsToContents()\n# self.datasets_summary_table.resizeColumnsToContents()\n#\n# self.status_bar.showMessage('updating Overview table')\n#\n# self.status_bar.showMessage('idle')\n#\n# self.save_files_to_initial_model_folder()\n#\n\n ################################################################################################################\n #\n #\n #\n # => new data collection summary table\n # > start\n\n def get_sample_list_from_table(self,table):\n sampleList = []\n allRows = table.rowCount()\n for row in xrange(0, allRows):\n sample_id = str(table.item(row, 0).text())\n sampleList.append(sample_id)\n return sorted(sampleList)\n\n def get_row_of_sample_in_table(self,table,xtal):\n allRows = table.rowCount()\n sampleRow = allRows\n for n,row in enumerate(xrange(0, allRows)):\n sample_id = str(table.item(row, 0).text())\n if sample_id == xtal:\n sampleRow = n\n break\n return sampleRow\n\n def update_row_in_table(self,sample,row,db_dict,table,columns_to_show):\n xtal = str(sample)\n column_name = self.db.translate_xce_column_list_to_sqlite(columns_to_show)\n\n for column, header in enumerate(column_name):\n\n if header[0] == 'Sample ID':\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(str(xtal))\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n table.setItem(row, column, cell_text)\n\n elif header[0] == 'DataCollection\\nOutcome':\n if xtal not in self.dataset_outcome_combobox_dict:\n dataset_outcome_combobox = QtGui.QComboBox()\n for outcomeItem in self.dataset_outcome:\n dataset_outcome_combobox.addItem(outcomeItem)\n dataset_outcome_combobox.activated[str].connect(self.dataset_outcome_combobox_change_outcome)\n self.dataset_outcome_combobox_dict[xtal] = dataset_outcome_combobox\n table.setCellWidget(row, column, dataset_outcome_combobox)\n index = self.dataset_outcome_combobox_dict[xtal].findText(str(db_dict['DataCollectionOutcome']), QtCore.Qt.MatchFixedString)\n self.dataset_outcome_combobox_dict[xtal].setCurrentIndex(index)\n\n elif header[0].startswith('img'):\n if os.path.isfile(db_dict[header[1]]):\n pixmap = QtGui.QPixmap(db_dict[header[1]])\n else:\n pixmap = QtGui.QPixmap(\n os.path.join(os.getenv('XChemExplorer_DIR'), 'image', 'IMAGE_NOT_AVAILABLE.png'))\n image = QtGui.QLabel()\n image.resize(128, 80)\n image.setPixmap(pixmap.scaled(image.size(), QtCore.Qt.KeepAspectRatio))\n table.setCellWidget(row, column, image)\n\n elif header[0] == 'Select':\n checkbox = QtGui.QCheckBox()\n checkbox.toggle()\n if table == self.deposition_table_apo:\n if xtal not in self.deposition_table_apo_dict:\n self.deposition_table_apo_dict[xtal] = checkbox\n if table == self.deposition_table_bound:\n if xtal not in self.deposition_table_bound_dict:\n self.deposition_table_bound_dict[xtal] = checkbox\n table.setCellWidget(row, column, checkbox)\n checkbox.setChecked(False)\n\n #elif header[0].startswith('SoakDB\\nBarcode') or header[0].startswith('GDA\\nBarcode'):\n # if new_xtal:\n # cell_text = QtGui.QTableWidgetItem()\n # if xtal in pinDict:\n # if header[0].startswith('SoakDB\\nBarcode'):\n # cell_text.setText(str(pinDict[xtal][0]))\n # elif header[0].startswith('GDA\\nBarcode'):\n # cell_text.setText(str(pinDict[xtal][1]))\n # if pinDict[xtal][0] == 'NULL' or pinDict[xtal][1] == 'NULL':\n # cell_text.setBackground(QtGui.QColor(255, 215, 0))\n # elif pinDict[xtal][0] != pinDict[xtal][1]:\n # cell_text.setBackground(QtGui.QColor(255, 0, 0))\n # else:\n # cell_text.setText('')\n # cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n # self.datasets_summary_table.setItem(current_row, column, cell_text)\n else:\n cell_text = QtGui.QTableWidgetItem()\n # in case data collection failed for whatever reason\n try:\n cell_text.setText(str(db_dict[header[1]]))\n except KeyError: # older pkl files may not have all the columns\n cell_text.setText('n/a')\n # else:\n # if header[0].startswith('Resolution\\n[Mn<I/sig(I)> = 1.5]'):\n # cell_text.setText('999')\n # elif header[0].startswith('DataProcessing\\nRfree'):\n # cell_text.setText('999')\n # elif header[0].startswith('Rmerge\\nLow'):\n # cell_text.setText('999')\n # else:\n # cell_text.setText('')\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n table.setItem(row, column, cell_text)\n print('row: {0!s} column: {1!s} value: {2!s} header: {3!s}'.format(row, column, cell_text, header[0]))\n print('column_name {0!s}'.format(column_name))\n\n def populate_datasets_summary_table_NEW(self):\n self.status_bar.showMessage(\n 'Building summary table for data processing results; be patient this may take a while')\n\n # get information about all samples collected during the current visit\n visit, beamline = XChemMain.getVisitAndBeamline(self.beamline_directory)\n if self.read_agamemnon.isChecked():\n visit = []\n for v in glob.glob(os.path.join(self.beamline_directory[:self.beamline_directory.rfind('-') + 1] + '*')):\n visit.append(v[v.rfind('/')+1:])\n\n self.update_log.insert('reading information about collected crystals from database...')\n collectedXtalsDict = self.db.xtals_collected_during_visit_as_dict(visit)\n\n # instead of using dictionaries, query table of which crystals are in table\n samples_in_table = self.get_sample_list_from_table(self.datasets_summary_table)\n for xtal in sorted(collectedXtalsDict):\n if xtal not in samples_in_table:\n row = self.datasets_summary_table.rowCount()\n self.datasets_summary_table.insertRow(row)\n else:\n row = self.get_row_of_sample_in_table(self.datasets_summary_table,xtal)\n db_dict = collectedXtalsDict[xtal]\n self.update_row_in_table(xtal, row, db_dict, self.datasets_summary_table,\n self.datasets_summary_table_columns)\n\n self.datasets_summary_table.resizeRowsToContents()\n self.datasets_summary_table.resizeColumnsToContents()\n\n self.status_bar.showMessage('updating Overview table')\n\n self.status_bar.showMessage('idle')\n\n\n def get_selected_row(self,table):\n indexes = table.selectionModel().selectedRows()\n for index in sorted(indexes):\n selected_row = index.row()\n return selected_row\n\n def show_results_from_all_pipelines(self):\n selected_row=self.get_selected_row(self.datasets_summary_table)\n xtal = self.datasets_summary_table.item(selected_row, 0).text()\n # get details of currently selected autoprocessing result\n selectedResultDict = self.db.get_db_dict_for_sample(xtal)\n\n dbList=self.db.all_autoprocessing_results_for_xtal_as_dict(xtal)\n\n self.make_data_collection_table()\n self.msgBox = QtGui.QMessageBox() # needs to be created here, otherwise the cellClicked function\n # will reference it before it exists\n for db_dict in dbList:\n if str(db_dict['DataProcessingSpaceGroup']).lower() == 'null' or str(db_dict['DataProcessingSpaceGroup']).lower() == 'none':\n continue\n row = self.data_collection_table.rowCount()\n self.data_collection_table.insertRow(row)\n self.update_row_in_table(xtal, row, db_dict, self.data_collection_table, self.data_collection_table_columns)\n if selectedResultDict['DataCollectionVisit'] == db_dict['DataCollectionVisit'] \\\n and selectedResultDict['DataCollectionRun'] == db_dict['DataCollectionRun'] \\\n and selectedResultDict['DataProcessingProgram'] == db_dict['DataProcessingProgram'] \\\n and selectedResultDict['DataProcessingScore'] == db_dict['DataProcessingScore']:\n self.current_row = row\n self.data_collection_table.selectRow(row)\n self.data_collection_table.cellClicked.connect(self.select_different_autoprocessing_result)\n self.data_collection_table_popup()\n\n def make_data_collection_table(self):\n # this creates a new table widget every time\n # more elegant would be to delete or reset an existing widget...\n self.data_collection_table = QtGui.QTableWidget()\n self.data_collection_table.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)\n self.data_collection_table.setColumnCount(len(self.data_collection_table_columns))\n font = QtGui.QFont()\n font.setPointSize(8)\n self.data_collection_table.setFont(font)\n self.data_collection_table.setHorizontalHeaderLabels(self.data_collection_table_columns)\n self.data_collection_table.horizontalHeader().setFont(font)\n self.data_collection_table.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)\n\n def data_collection_table_popup(self):\n# self.msgBox = QtGui.QMessageBox()\n msgBoxLayout = self.msgBox.layout()\n qWid = QtGui.QWidget()\n qWid.setFixedWidth(3000)\n qWid.setFixedHeight(500)\n vbox = QtGui.QVBoxLayout()\n vbox.addWidget(self.data_collection_table)\n qWid.setLayout(vbox)\n# msgBoxLayout.addLayout(vbox, 0, 0)\n msgBoxLayout.addWidget(qWid)\n self.msgBox.addButton(QtGui.QPushButton('Cancel'), QtGui.QMessageBox.RejectRole)\n self.msgBox.resize(1000,200)\n self.msgBox.exec_();\n\n def select_different_autoprocessing_result(self):\n selected_row=self.get_selected_row(self.data_collection_table)\n if selected_row != self.current_row:\n xtal = self.data_collection_table.item(selected_row, 0).text()\n visit = self.data_collection_table.item(selected_row, 1).text()\n run = self.data_collection_table.item(selected_row, 2).text()\n autoproc = self.data_collection_table.item(selected_row, 3).text()\n score = self.data_collection_table.item(selected_row, 12).text()\n for q in range(13):\n try:\n print('--> {0!s}: {1!s}'.format(q, self.data_collection_table.item(selected_row, q).text()))\n except AttributeError:\n print('--> {0!s}: None'.format(q))\n # get db_dict from collectionTable for visit, run, autoproc\n# dbDict = self.db.get_db_dict_for_visit_run_autoproc(xtal,visit,run,autoproc)\n dbDict = self.db.get_db_dict_for_visit_run_autoproc_score(xtal, visit, run, autoproc, score)\n dbDict['DataProcessingAutoAssigned'] = 'False'\n self.update_log.insert('%s: changing selected autoprocessing result to %s %s %s' %(xtal,visit,run,autoproc))\n # xtal is QString -> str(xtal)\n XChemMain.linkAutoProcessingResult(str(xtal), dbDict, self.initial_model_directory,self.xce_logfile)\n self.update_log.insert('%s: updating row in Datasets table' %xtal)\n self.db.update_data_source(str(xtal),dbDict)\n self.update_log.insert('%s: getting updated information from DB mainTable' %xtal)\n dbDict = self.db.get_db_dict_for_sample(xtal)\n row = self.get_row_of_sample_in_table(self.datasets_summary_table,xtal)\n self.update_row_in_table(xtal, row, dbDict, self.datasets_summary_table,\n self.datasets_summary_table_columns)\n else:\n print('nothing to change')\n self.msgBox.done(1)\n\n\n\n # < end\n #################################################################################################################\n\n\n\n\n\n\n\n\n\n\n\n\n def update_outcome_datasets_summary_table(self, sample, outcome):\n rows_in_table = self.datasets_summary_table.rowCount()\n for row in range(rows_in_table):\n if self.datasets_summary_table.item(row, 0).text() == sample:\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(outcome)\n self.datasets_summary_table.setItem(row, 3, cell_text)\n\n def user_update_selected_autoproc_datasets_summary_table(self):\n for key in self.data_collection_column_three_dict:\n if self.data_collection_column_three_dict[key][0] == self.sender():\n self.update_log.insert('here: ' + self.sender())\n self.update_log.insert('herere' + str(self.data_collection_column_three_dict))\n dbTmp = self.xtal_db_dict[key]\n stage = dbTmp['RefinementOutcome'].split()[0]\n print('===>', key, stage)\n if int(stage) > 2:\n msgBox = QtGui.QMessageBox()\n msgBox.setText(\n \"*** WARNING ***\\n%s is currently %s\\nIt will disappear from the Refinement table,\\n\"\n \"when you refresh it next time.\\nDo you want to continue?\" % (\n key, dbTmp['RefinementOutcome']))\n msgBox.addButton(QtGui.QPushButton('No'), QtGui.QMessageBox.YesRole)\n msgBox.addButton(QtGui.QPushButton('Yes'), QtGui.QMessageBox.RejectRole)\n reply = msgBox.exec_();\n if reply == 0:\n self.update_log.insert('will not change data processing selection')\n # restore previous selection\n for n, entry in enumerate(self.data_collection_dict[key]):\n print('==>', n)\n if entry[0] == 'logfile':\n if entry[8]:\n print('===> found:', n)\n self.data_collection_column_three_dict[key][0].selectRow(n)\n break\n\n indexes = self.sender().selectionModel().selectedRows()\n selected_processing_result = 1000000\n for index in sorted(indexes):\n selected_processing_result = index.row()\n # the user changed the selection, i.e. no automated selection will update it\n self.update_log.insert('user changed selection')\n self.data_collection_column_three_dict[key][1] = True\n # need to also update if not yet done\n user_already_changed_selection = False\n for n, entry in enumerate(self.data_collection_dict[key]):\n if entry[0] == 'user_changed_selection':\n user_already_changed_selection = True\n if entry[0] == 'logfile':\n db_dict = entry[6]\n db_dict['DataProcessingAutoAssigned'] = 'False'\n if entry[7] == selected_processing_result:\n db_dict_current = entry[6]\n program = db_dict['DataProcessingProgram']\n visit = db_dict['DataCollectionVisit']\n run = db_dict['DataCollectionRun']\n self.update_log.insert(\n 'user changed data processing files for {0!s} to visit={1!s}, '\n 'run={2!s}, program={3!s}'.format(key, visit, run, program))\n # update datasource\n self.update_log.insert('updating datasource...')\n self.update_data_source(key, db_dict)\n entry[8] = True\n else:\n entry[8] = False\n\n entry[6] = db_dict\n self.data_collection_dict[key][n] = entry\n if not user_already_changed_selection:\n self.data_collection_dict[key].append(['user_changed_selection'])\n XChemMain.change_links_to_selected_data_collection_outcome(key, self.data_collection_dict,\n self.data_collection_column_three_dict,\n self.dataset_outcome_dict,\n self.initial_model_directory,\n os.path.join(self.database_directory,\n self.data_source_file),\n self.xce_logfile)\n\n # update 'Datasets' table\n column_name = XChemDB.data_source(\n os.path.join(self.database_directory, self.data_source_file)).translate_xce_column_list_to_sqlite(\n self.datasets_summary_table_columns)\n rows_in_table = self.datasets_summary_table.rowCount()\n for row in range(rows_in_table):\n if self.datasets_summary_table.item(row, 0).text() == key:\n for column, header in enumerate(column_name):\n if header[0] == 'Sample ID':\n continue\n elif header[0] == 'DataCollection\\nOutcome':\n continue\n elif header[0].startswith('img'):\n continue\n elif header[0].startswith('Show'):\n continue\n else:\n cell_text = QtGui.QTableWidgetItem()\n try:\n cell_text.setText(str(db_dict_current[header[1]]))\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n self.datasets_summary_table.setItem(row, column, cell_text)\n except KeyError:\n pass\n\n def update_selected_autoproc_datasets_summary_table(self):\n for key in self.data_collection_column_three_dict:\n if self.data_collection_column_three_dict[key][0] == self.sender():\n sample = key\n break\n indexes = self.sender().selectionModel().selectedRows()\n for index in sorted(indexes):\n selected_processing_result = index.row()\n\n for n, entry in enumerate(self.data_collection_dict[sample]):\n if entry[0] == 'logfile':\n if entry[7] == selected_processing_result:\n db_dict = entry[6]\n program = db_dict['DataProcessingProgram']\n visit = db_dict['DataCollectionVisit']\n run = db_dict['DataCollectionRun']\n self.update_log.insert(\n 'user changed data processing files for {0!s} to visit={1!s}, run={2!s}, program={3!s}'.format(\n sample, visit, run, program))\n # update datasource\n self.update_log.insert('updating datasource...')\n self.update_data_source(sample, db_dict)\n entry[8] = True\n else:\n entry[8] = False\n self.data_collection_dict[sample][n] = entry\n\n # update 'Datasets' table\n column_name = XChemDB.data_source(\n os.path.join(self.database_directory, self.data_source_file)).translate_xce_column_list_to_sqlite(\n self.datasets_summary_table_columns)\n rows_in_table = self.datasets_summary_table.rowCount()\n for row in range(rows_in_table):\n if self.datasets_summary_table.item(row, 0).text() == sample:\n for column, header in enumerate(column_name):\n if header[0] == 'Sample ID':\n continue\n elif header[0] == 'DataCollection\\nOutcome':\n continue\n elif header[0].startswith('img'):\n continue\n elif header[0].startswith('Show'):\n continue\n else:\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(str(db_dict[header[1]]))\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n self.datasets_summary_table.setItem(row, column, cell_text)\n\n def populate_and_update_datasource_table(self):\n self.overview_datasource_table.setColumnCount(len(self.overview_datasource_table_columns))\n\n # first get a list of all the samples that are already in the table and which will be updated\n samples_in_table = []\n current_row = self.overview_datasource_table.rowCount()\n for row in range(current_row):\n sampleID = str(self.overview_datasource_table.item(row, 0).text()) # this must be the case\n samples_in_table.append(sampleID)\n\n columns_to_show = self.get_columns_to_show(self.overview_datasource_table_columns)\n n_rows = self.get_rows_with_sample_id_not_null_from_datasource()\n sample_id_column = self.get_columns_to_show(['Sample ID'])\n\n for row in self.data:\n if str(row[sample_id_column[0]]).lower() == 'none' or str(row[sample_id_column[0]]).replace(' ', '') == '':\n # do not show rows where sampleID is null\n continue\n else:\n if not str(row[sample_id_column[0]]) in samples_in_table:\n # insert row, this is a new sample\n x = self.overview_datasource_table.rowCount()\n self.overview_datasource_table.insertRow(x)\n else:\n # find row of this sample in data_source_table\n for present_rows in range(self.overview_datasource_table.rowCount()):\n if str(row[sample_id_column[0]]) == str(\n self.overview_datasource_table.item(present_rows, 0).text()):\n x = present_rows\n break\n for y, item in enumerate(columns_to_show):\n cell_text = QtGui.QTableWidgetItem()\n if row[item] is None:\n cell_text.setText('')\n else:\n cell_text.setText(str(row[item]))\n if self.overview_datasource_table_columns[y] == 'Sample ID': # assumption is that column 0 is always sampleID\n cell_text.setFlags(QtCore.Qt.ItemIsEnabled) # and this field cannot be changed\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n self.overview_datasource_table.setItem(x, y, cell_text)\n self.overview_datasource_table.setHorizontalHeaderLabels(self.overview_datasource_table_columns)\n\n def kill_other_pandda_options(self):\n for i in range(0, self.pandda_analyse_data_table.rowCount()):\n checkbox0 = self.pandda_analyse_data_table.cellWidget(i,1)\n checkbox1 = self.pandda_analyse_data_table.cellWidget(i,7)\n checkbox2 = self.pandda_analyse_data_table.cellWidget(i,8)\n checkbox3 = self.pandda_analyse_data_table.cellWidget(i,9)\n if checkbox1.isChecked():\n checkbox2.setChecked(False)\n checkbox3.setChecked(False)\n if checkbox1.isChecked() and checkbox2.isChecked() or checkbox3.isChecked():\n checkbox1.setChecked(False)\n if checkbox2.isChecked() or checkbox3.isChecked():\n checkbox1.setChecked(False)\n\n def populate_pandda_analyse_input_table(self):\n\n column_name = self.db.translate_xce_column_list_to_sqlite(self.pandda_table_columns)\n print(column_name)\n for xtal in sorted(self.xtal_db_dict):\n new_xtal = False\n db_dict = self.xtal_db_dict[xtal]\n if os.path.isfile(db_dict['DimplePathToPDB']):\n row = self.pandda_analyse_data_table.rowCount()\n if xtal not in self.pandda_analyse_input_table_dict:\n self.pandda_analyse_data_table.insertRow(row)\n current_row = row\n new_xtal = True\n else:\n for table_row in range(row):\n if self.pandda_analyse_data_table.item(table_row, 0).text() == xtal:\n current_row = table_row\n break\n for column, header in enumerate(column_name):\n if header[0]=='Exclude':\n deselect_button = QtGui.QCheckBox()\n deselect_button.stateChanged.connect(self.kill_other_pandda_options)\n self.pandda_analyse_data_table.setCellWidget(current_row, column, deselect_button)\n\n elif header[0]=='Ignore':\n deselect_button = QtGui.QCheckBox()\n deselect_button.stateChanged.connect(self.kill_other_pandda_options)\n self.pandda_analyse_data_table.setCellWidget(current_row, column, deselect_button)\n\n elif header[0]=='Export':\n deselect_button = QtGui.QCheckBox()\n deselect_button.stateChanged.connect(self.kill_other_pandda_options)\n self.pandda_analyse_data_table.setCellWidget(current_row, column, deselect_button)\n\n elif header[0] == 'Sample ID':\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(str(xtal))\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n self.pandda_analyse_data_table.setItem(current_row, column, cell_text)\n else:\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(str(db_dict[header[1]]))\n if header[0] == 'PanDDA\\nStatus':\n if str(db_dict[header[1]]) == 'running':\n cell_text.setBackground(QtGui.QColor(100, 230, 150))\n elif str(db_dict[header[1]]) == 'pending':\n cell_text.setBackground(QtGui.QColor(20, 100, 230))\n elif str(db_dict[header[1]]) == 'started':\n cell_text.setBackground(QtGui.QColor(230, 240, 110))\n elif str(db_dict[header[1]]) == 'finished':\n cell_text.setBackground(QtGui.QColor(255, 255, 255))\n elif 'problem' in str(db_dict[header[1]]):\n cell_text.setBackground(QtGui.QColor(255, 0, 0))\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n self.pandda_analyse_data_table.setItem(current_row, column, cell_text)\n if new_xtal:\n self.pandda_analyse_input_table_dict[xtal] = []\n\n def select_sample_for_pandda(self, option):\n indexes = self.pandda_analyse_data_table.selectionModel().selectedRows()\n if option == 'deselect':\n for index in sorted(indexes):\n self.pandda_analyse_data_table.cellWidget(index.row(), 6).setChecked(False)\n self.pandda_analyse_data_table.cellWidget(index.row(), 7).setChecked(False)\n self.pandda_analyse_data_table.cellWidget(index.row(), 8).setChecked(False)\n else:\n for index in sorted(indexes):\n self.pandda_analyse_data_table.cellWidget(index.row(), 6).setChecked(False)\n self.pandda_analyse_data_table.cellWidget(index.row(), 7).setChecked(False)\n self.pandda_analyse_data_table.cellWidget(index.row(), 8).setChecked(False)\n if option =='ignore':\n checkbox = self.pandda_analyse_data_table.cellWidget(index.row(), 6)\n if option == 'char':\n checkbox = self.pandda_analyse_data_table.cellWidget(index.row(), 7)\n if option == 'zmap':\n checkbox = self.pandda_analyse_data_table.cellWidget(index.row(), 8)\n\n checkbox.setChecked(True)\n self.kill_other_pandda_options()\n\n def populate_and_update_refinement_table(self):\n\n# panddaList = self.db.execute_statement(\n# \"select CrystalName,PANDDA_site_index,PANDDA_site_name,RefinementOutcome \"\n# \"from panddaTable where CrystalName is not '' and PANDDA_site_ligand_placed is 'True';\")\n# panddaDict = {}\n# for item in panddaList:\n# if str(item[0]) not in panddaDict:\n# panddaDict[str(item[0])] = []\n# panddaDict[str(item[0])].append([str(item[1]), str(item[2]), str(item[3])])\n\n column_name = self.db.translate_xce_column_list_to_sqlite(self.refinement_table_columns)\n for xtal in sorted(self.xtal_db_dict):\n new_xtal = False\n db_dict = self.xtal_db_dict[xtal]\n try:\n stage = int(str(db_dict['RefinementOutcome']).split()[0])\n refinementStage = db_dict['RefinementOutcome']\n except ValueError:\n stage = 0\n except IndexError:\n stage = 0\n\n if stage >= 3 and stage < 7:\n row = self.refinement_table.rowCount()\n if xtal not in self.refinement_table_dict:\n self.refinement_table.insertRow(row)\n current_row = row\n new_xtal = True\n else:\n for table_row in range(row):\n if self.refinement_table.item(table_row, 0).text() == xtal:\n current_row = table_row\n break\n for column, header in enumerate(column_name):\n if header[0] == 'Sample ID':\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(str(xtal))\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n self.refinement_table.setItem(current_row, column, cell_text)\n\n elif header[0] == 'Refinement\\nOutcome':\n if new_xtal:\n refinement_outcome_combobox = QtGui.QComboBox()\n self.populate_refinement_outcome_combobox(refinement_outcome_combobox)\n self.refinement_table.setCellWidget(current_row, column, refinement_outcome_combobox)\n else:\n refinement_outcome_combobox = self.refinement_table_dict[xtal]\n index = refinement_outcome_combobox.findText(refinementStage, QtCore.Qt.MatchFixedString)\n refinement_outcome_combobox.setCurrentIndex(index)\n refinement_outcome_combobox.currentIndexChanged.connect(\n self.refinement_outcome_combobox_changed)\n\n elif header[0] == 'buster-reports':\n #\"<a href=\\\"{0!s}\">'NAME'</a>\".format(db_dict['RefinementBusterReportHTML'])\n # db_dict['RefinementBusterReportHTML'] = 'www.google.com'\n buster_report = db_dict['RefinementBusterReportHTML']\n ref_name = buster_report.split('/')[len(buster_report.split('/'))-2]\n buster_report_link = QtGui.QLabel(\"<a href=\\\"{0!s}\\\">{1!s}</a>\".format(buster_report,ref_name))\n buster_report_link.setOpenExternalLinks(True)\n# buster_report_link.setTextInteractionFlags(QtCore.Qt.TextBrowserInteraction)\n# buster_report_link.setTextFormat(QtCore.Qt.RichText)\n# self.refinement_table.setItem(current_row, column, buster_report_link)\n self.refinement_table.setCellWidget(current_row, column, buster_report_link)\n\n\n# elif header[0] == 'PanDDA site details':\n# try:\n# panddaDict[xtal].insert(0, ['Index', 'Name', 'Status'])\n# outerFrame = QtGui.QFrame()\n# outerFrame.setFrameShape(QtGui.QFrame.Box)\n# grid = QtGui.QGridLayout()\n# for y, entry in enumerate(panddaDict[xtal]):\n# for x, info in enumerate(entry):\n# frame = QtGui.QFrame()\n# frame.setFrameShape(QtGui.QFrame.Box)\n# vbox = QtGui.QVBoxLayout()\n# vbox.addWidget(QtGui.QLabel(str(entry[x])))\n# frame.setLayout(vbox)\n# grid.addWidget(frame, y, x)\n# outerFrame.setLayout(grid)\n# self.refinement_table.setCellWidget(current_row, column, outerFrame)\n# except KeyError:\n# cell_text = QtGui.QTableWidgetItem()\n# cell_text.setText('*** N/A ***')\n# cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n# self.refinement_table.setItem(current_row, column, cell_text)\n else:\n cell_text = QtGui.QTableWidgetItem()\n cell_text.setText(str(db_dict[header[1]]))\n if header[0] == 'Refinement\\nStatus':\n if str(db_dict[header[1]]) == 'running':\n cell_text.setBackground(QtGui.QColor(100, 230, 150))\n elif str(db_dict[header[1]]) == 'pending':\n cell_text.setBackground(QtGui.QColor(20, 100, 230))\n elif str(db_dict[header[1]]) == 'started':\n cell_text.setBackground(QtGui.QColor(230, 240, 110))\n elif str(db_dict[header[1]]) == 'finished':\n cell_text.setBackground(QtGui.QColor(255, 255, 255))\n elif 'problem' in str(db_dict[header[1]]):\n cell_text.setBackground(QtGui.QColor(255, 0, 0))\n cell_text.setTextAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter)\n self.refinement_table.setItem(current_row, column, cell_text)\n if new_xtal:\n self.refinement_table_dict[xtal] = refinement_outcome_combobox\n\n self.refinement_table.resizeColumnsToContents()\n self.refinement_table.resizeRowsToContents()\n\n def get_columns_to_show(self, column_list):\n # maybe I coded some garbage before, but I need to find out which column name in the\n # data source corresponds to the actually displayed column name in the table\n # reason being that the unique column ID for DB may not be nice to look at\n columns_to_show = []\n for column in column_list:\n # first find out what the column name in the header is:\n column_name = ''\n for name in self.all_columns_in_data_source:\n if column == name[1]:\n column_name = name[0]\n for n, all_column in enumerate(self.header):\n if column_name == all_column:\n columns_to_show.append(n)\n break\n return columns_to_show\n\n def get_rows_with_sample_id_not_null_from_datasource(self):\n sample_id_column = self.get_columns_to_show(['Sample ID'])\n n_rows = 0\n for row in self.data:\n if not str(row[sample_id_column[0]]).lower() != 'none' or not str(row[sample_id_column[0]]).replace \\\n (' ', '') == '':\n n_rows += 1\n return n_rows\n\n def update_data_source(self, sample, db_dict):\n data_source = XChemDB.data_source(os.path.join(self.database_directory, self.data_source_file))\n\n def quit_xce(self):\n # save pkl file\n if self.data_collection_dict != {}:\n if os.path.isfile(self.datasets_summary_file):\n self.update_log.insert('saving results to PKL file')\n pickle.dump(self.data_collection_dict, open(self.datasets_summary_file, 'wb'))\n self.update_log.insert('quitting XCE... bye,bye!')\n QtGui.qApp.quit()\n\n\nif __name__ == \"__main__\":\n app = XChemExplorer(sys.argv[1:])\n\n\n# \"Debugging is twice as hard as writing the code in the first\n# place. Therefore, if you write the code as cleverly as\n# possible, you are, by definition, not smart enough to debug it.\"\n# -- Brian W. Kernighan\n# ^^ Who did this? :P\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('Hello! What is your name?')
<|reserved_special_token_0|>
print('Well, ' + myName + ', I am thinking of a number between 1 and 20.')
while guesses_taken < 6:
print('Take a guess.')
guess = input()
guess = int(guess)
guesses_taken += 1
if guess < number:
print('Your guess is too low.')
if guess > number:
print('Your guess is too high.')
if guess == number:
break
if guess == number:
guesses_taken = str(guesses_taken)
print('Good job, ' + myName + '! You guessed my number in ' +
guesses_taken + ' guesses!')
if guess != number:
number = str(number)
print('Nope. The number I was thinking of was ' + number)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
guesses_taken = 0
print('Hello! What is your name?')
myName = input()
number = random.randint(1, 20)
print('Well, ' + myName + ', I am thinking of a number between 1 and 20.')
while guesses_taken < 6:
print('Take a guess.')
guess = input()
guess = int(guess)
guesses_taken += 1
if guess < number:
print('Your guess is too low.')
if guess > number:
print('Your guess is too high.')
if guess == number:
break
if guess == number:
guesses_taken = str(guesses_taken)
print('Good job, ' + myName + '! You guessed my number in ' +
guesses_taken + ' guesses!')
if guess != number:
number = str(number)
print('Nope. The number I was thinking of was ' + number)
<|reserved_special_token_1|>
import random
guesses_taken = 0
print('Hello! What is your name?')
myName = input()
number = random.randint(1, 20)
print('Well, ' + myName + ', I am thinking of a number between 1 and 20.')
while guesses_taken < 6:
print('Take a guess.')
guess = input()
guess = int(guess)
guesses_taken += 1
if guess < number:
print('Your guess is too low.')
if guess > number:
print('Your guess is too high.')
if guess == number:
break
if guess == number:
guesses_taken = str(guesses_taken)
print('Good job, ' + myName + '! You guessed my number in ' +
guesses_taken + ' guesses!')
if guess != number:
number = str(number)
print('Nope. The number I was thinking of was ' + number)
<|reserved_special_token_1|>
import random #import random module
guesses_taken = 0 #assign 0 to guesses_taken variable
print('Hello! What is your name?')# print Hello! What is your name? to console
myName = input()#take an input from user(name)
number = random.randint(1, 20)# make random number between 1 and 19 and save in number variable
print('Well, ' + myName + ', I am thinking of a number between 1 and 20.') #print the explanation
while guesses_taken < 6: #while loop looping until guesses_taken < 6
print('Take a guess.') # print the introduction
guess = input() # user input
guess = int(guess) #convert the input to integer
guesses_taken += 1 #guess opportunity reduce
if guess < number:#if guess less than number print Your guess is too low.
print('Your guess is too low.')
if guess > number:#if guess bigger than number print Your guess is too low.
print('Your guess is too high.')
if guess == number:#if guess equal to number break
break
if guess == number:#if guess equal to number, user guessed the number and print the underline
guesses_taken = str(guesses_taken)
print('Good job, ' + myName + '! You guessed my number in ' + guesses_taken + ' guesses!')
if guess != number:#if guess not equal to number user try till guess_take is 6 and print under
number = str(number)
print('Nope. The number I was thinking of was ' + number)
|
flexible
|
{
"blob_id": "3302dc058032d9fe412bde6fd89699203526a72d",
"index": 4695,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('Hello! What is your name?')\n<mask token>\nprint('Well, ' + myName + ', I am thinking of a number between 1 and 20.')\nwhile guesses_taken < 6:\n print('Take a guess.')\n guess = input()\n guess = int(guess)\n guesses_taken += 1\n if guess < number:\n print('Your guess is too low.')\n if guess > number:\n print('Your guess is too high.')\n if guess == number:\n break\nif guess == number:\n guesses_taken = str(guesses_taken)\n print('Good job, ' + myName + '! You guessed my number in ' +\n guesses_taken + ' guesses!')\nif guess != number:\n number = str(number)\n print('Nope. The number I was thinking of was ' + number)\n",
"step-3": "<mask token>\nguesses_taken = 0\nprint('Hello! What is your name?')\nmyName = input()\nnumber = random.randint(1, 20)\nprint('Well, ' + myName + ', I am thinking of a number between 1 and 20.')\nwhile guesses_taken < 6:\n print('Take a guess.')\n guess = input()\n guess = int(guess)\n guesses_taken += 1\n if guess < number:\n print('Your guess is too low.')\n if guess > number:\n print('Your guess is too high.')\n if guess == number:\n break\nif guess == number:\n guesses_taken = str(guesses_taken)\n print('Good job, ' + myName + '! You guessed my number in ' +\n guesses_taken + ' guesses!')\nif guess != number:\n number = str(number)\n print('Nope. The number I was thinking of was ' + number)\n",
"step-4": "import random\nguesses_taken = 0\nprint('Hello! What is your name?')\nmyName = input()\nnumber = random.randint(1, 20)\nprint('Well, ' + myName + ', I am thinking of a number between 1 and 20.')\nwhile guesses_taken < 6:\n print('Take a guess.')\n guess = input()\n guess = int(guess)\n guesses_taken += 1\n if guess < number:\n print('Your guess is too low.')\n if guess > number:\n print('Your guess is too high.')\n if guess == number:\n break\nif guess == number:\n guesses_taken = str(guesses_taken)\n print('Good job, ' + myName + '! You guessed my number in ' +\n guesses_taken + ' guesses!')\nif guess != number:\n number = str(number)\n print('Nope. The number I was thinking of was ' + number)\n",
"step-5": "import random #import random module\n\nguesses_taken = 0 #assign 0 to guesses_taken variable\n\nprint('Hello! What is your name?')# print Hello! What is your name? to console\nmyName = input()#take an input from user(name)\n\nnumber = random.randint(1, 20)# make random number between 1 and 19 and save in number variable\nprint('Well, ' + myName + ', I am thinking of a number between 1 and 20.') #print the explanation\n\nwhile guesses_taken < 6: #while loop looping until guesses_taken < 6\n print('Take a guess.') # print the introduction\n guess = input() # user input\n guess = int(guess) #convert the input to integer\n\n\n guesses_taken += 1 #guess opportunity reduce\n\n if guess < number:#if guess less than number print Your guess is too low.\n print('Your guess is too low.')\n\n if guess > number:#if guess bigger than number print Your guess is too low.\n print('Your guess is too high.')\n\n if guess == number:#if guess equal to number break\n break\n\nif guess == number:#if guess equal to number, user guessed the number and print the underline\n guesses_taken = str(guesses_taken)\n print('Good job, ' + myName + '! You guessed my number in ' + guesses_taken + ' guesses!')\n\nif guess != number:#if guess not equal to number user try till guess_take is 6 and print under\n number = str(number)\n print('Nope. The number I was thinking of was ' + number)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
"""
@author: chris
Modified from THOMAS MCTAVISH (2010-11-04).
mpiexec -f ~/machinefile -enable-x -n 96 python Population.py --noplot
"""
from __future__ import with_statement
from __future__ import division
import sys
sys.path.append('../NET/sheff/weasel/')
sys.path.append('../NET/sheffprk/template/')
import os
#use_pc = True
import sys
argv = sys.argv
if "-python" in argv:
use_pc = True
else:
use_pc = False
if use_pc == True:
from neuron import h
pc = h.ParallelContext()
rank = int(pc.id())
nhost = pc.nhost()
else:
from mpi4py import MPI
from neuron import h
rank = MPI.COMM_WORLD.rank
#print sys.version
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-o', action='store', dest='opt')
parser.add_argument('--noplot', action='store_true')
parser.add_argument('--norun', action='store_true')
parser.add_argument('--noconst', action='store_true')
parser.add_argument('--noqual', action='store_true')
pars, unknown = parser.parse_known_args(['-o','--noplot','--norun','--noconst','--noqual'])
if __name__ == '__main__':
import matplotlib
if rank == 0:
matplotlib.use('Tkagg', warn=True)
else:
matplotlib.use('Agg', warn=True)
if __name__ == '__main__':
do_plot = 1
if results.noplot: # do not plot to windows
matplotlib.use('Agg', warn=True)
if rank == 0: print "- No plotting"
do_plot = 0
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
import random as rnd
import neuronpy.util.spiketrain
#set_printoptions(threshold='nan')
from Stimulation import *
from Stimhelp import *
from units import *
from cells.PassiveCell import *
from itertools import izip
try:
import cPickle as pickle
except:
import pickle
import gzip
import h5py
from templates.synapse.synapse import Synapse
from synapsepfpurk import Synapse as Synapse2
if use_pc is False: import mdp
import time as ttime
from scipy.optimize import fmin, leastsq
from NeuroTools import stgen, signals
import md5
#from guppy import hpy
#hpy = hpy()
class Population:
"""
A population of N cells
"""
def __init__(self, cellimport = [], celltype = None, N = [10], temperature = 6.3, cell_exe = 0, ihold = [0*nA], ihold_sigma = [0*nA], amp = [0*nA], amod = [None], anoise = [None], give_freq = False, do_run = 1, pickle_prefix = "default", istart = 0, istop = 0.07, di = 0.001, dt = 0.025*ms, use_mpi = True, use_pc = False):
"""
:param N: Number of cells.
:param fluct_m:
:param fluct_s:
:param fluct_tau:
"""
self.use_pc = use_pc
if type(celltype) is not list: celltype = [celltype] #convert to list if it is not given as one
self.celltype = celltype
if type(cell_exe) is not list: cell_exe = [cell_exe] #convert to list if it is not given as one
self.cell_exe = cell_exe
if cellimport is not None:
if cellimport == []:
for n in range(len(celltype)):
cellimport.append("from cells." + self.celltype[n] + " import *")
self.cellimport = cellimport
if type(N) is not list: N = [N]
self.N = N # Total number of cells in the net
self.n_celltypes = len(self.N)
self.a_celltype = [0] # celltype to analyse
self.factor_celltype = [1]*self.n_celltypes
self.set_init(ihold, ihold_sigma, amp, amod)
self.CF_var = False
self.inh_hold_sigma = [0]
self.intr_hold_sigma = [0]
#self.sigma_inh_hold = 0
#self.sigma_ihold = 0
if type(anoise) is not list: anoise = [anoise]*self.n_celltypes
if len(anoise) < self.n_celltypes: anoise = [anoise[0]]*self.n_celltypes
self.anoise = anoise # RUN self.set_i()
self.give_freq = give_freq # RUN self.set_i()
self.temperature = temperature
self.gid_count = 0
self.gidlist = [] # List of global identifiers on this host
self.global_gidlist = [] # List of global identifiers
self.cells = [] # Cells on this host
self.t_vec = []
self.id_vec = []
self.rec_v = []
for n in range(self.n_celltypes):
if use_mpi:
self.t_vec.append(h.Vector()) # np.array([0])
self.id_vec.append(h.Vector()) # np.array([-1], dtype=int)
else:
self.t_vec.append([])
self.rec_v.append(h.Vector())
#self.t_vec = h.Vector(np.array([0])) # Spike time of all cells on this host
#self.id_vec = h.Vector(np.array([-1])) # Ids of spike times on this host
self.flucts = [] # Fluctuating inputs on this host
self.fluct_m = 0 # [nA]
self.fluct_s = [0] # [nA]
self.fluct_tau = 0*ms # [ms]
self.noises = [] # Random number generators on this host
self.plays = [] # Play inputs on this host
self.rec_is = []
self.trains = []
self.vecstim = []
self.nc_vecstim = []
self.spike_vec = []
self.syn_tau1 = 5*ms # Synapse of virtual target neuron
self.syn_tau2 = 5*ms # Synapse of virtual target neuron
self.tmax = 10*sec # maximum length of plot that should be plotted!!
self.nc_delay = 0 #500*ms # only important if syn_output is used, not used currently
self.dt = dt
self.bin_width = dt
self.jitter = 0*ms
self.delta_t = 0*ms
self.istart = istart
self.istop = istop
self.di = di
self.ic_holds = []
self.i_holdrs = []
self.i_holds = []
self.ic_starts = []
self.vc_starts = []
self.ic_steps = []
self.rec_step = []
self.tvecs = []
self.ivecs = []
self.noises = []
self.record_syn = []
self.id_all_vec_input = []
self.t_all_vec_input = []
if len(self.N) == len(self.cell_exe) == len(self.celltype):
pass
else:
raise ValueError('N, cell_exe, celltype do NOT have equal length!')
self.use_mpi = use_mpi
self.use_pc = use_pc
if self.use_mpi:
#### Make a new ParallelContext object
self.pc = h.ParallelContext()
self.id = self.pc.id()
self.nhost = int(self.pc.nhost())
if self.use_pc == False:
s = "mpi4py thinks I am %d of %d on %s, NEURON thinks I am %d of %d\n"
processorname = MPI.Get_processor_name()
self.comm = MPI.COMM_WORLD
if self.id == 0:
print s % (self.comm.rank, self.comm.size, processorname, self.id, self.nhost)
else:
s = "NEURON thinks I am %d of %d\n"
if self.id == 0:
print s % (self.id, self.nhost)
self.barrier()
else:
self.id = 0
self.nhost = 1
self.do_run = do_run
self.first_run = True
self.set_numcells() # Build the portion of cells on this host.
self.pickle_prefix = pickle_prefix
# plot options
self.ymax = 0
self.ax = None
self.linewidth = 1.5
self.color_vec = None
self.alpha = 0.8
self.method_interpol = np.array(['bin','syn'])
self.dumpsave = 1
self.called_syn_out_all = False
self.no_fmean=False
self.tau1_ex=[0*ms]*self.n_celltypes
self.tau2_ex=[10*ms]*self.n_celltypes
self.tau1_inh=[0*ms]*self.n_celltypes
self.tau2_inh=[100*ms]*self.n_celltypes
self.n_syn_ex = [0]*self.n_celltypes
self.g_syn_ex = [1]*self.n_celltypes
self.g_syn_ex_s = [0]*self.n_celltypes
self.mglufac_ex = [1,0]
self.noise_syn = [0]*self.n_celltypes
self.noise_syn_tau = [0*ms]*self.n_celltypes
self.noise_syn_inh = [0]*self.n_celltypes
self.noise_syn_tau_inh = [0*ms]*self.n_celltypes
self.noise_a = [1e9]*self.n_celltypes
self.noise_a_inh = [1e9]*self.n_celltypes
self.inh_hold = [0]*self.n_celltypes
self.n_syn_inh = [0]*self.n_celltypes
self.g_syn_inh = [1]*self.n_celltypes
self.g_syn_inh_s = [0]*self.n_celltypes
self.intr_hold = [0]*self.n_celltypes
self.n_syn_intr = [0]*self.n_celltypes
self.g_syn_intr = [0]*self.n_celltypes
self.syn_max_mf = [1]*self.n_celltypes # possible mossy fibres per synapse
self.syn_max_inh = [1]*self.n_celltypes # possible Golgi cells per synapse
self.syn_max_intr = [1]*self.n_celltypes # possible Intruding cells per synapse
self.seed = 50
self.force_run = False
self.give_psd = False
self.do_if = True
self.fluct_g_e0 = []
self.fluct_g_i0 = []
self.fluct_std_e = []
self.fluct_std_i = []
self.fluct_tau_e = []
self.fluct_tau_i = []
self.adjinh = True # adjust inhibition to get CFo instead of g_ex
self.adjfinh = True # adjust frequnecy of inhibition to get CFo instead of g_ex
self.syn_ex_dist = []
self.syn_inh_dist = []
self.stdp_used = False
self.xmax = 20
self.use_multisplit = False
self.use_local_dt = False
self.simstep = 0
self.plot_train = True
self.inh_delay = 0 # in ms
self.plot_input = True
self.delay_baseline = 8
self.tstop_if = 1
self.gsyn_in_fac = []
self.netcons = [] # keeping track of!
self.nclist = []
self.ST_stims = []
self.PF_stims = []
self.data_dir = "./data"
self.minimal_dir = False
def set_init(self, ihold, ihold_sigma, amp, amod):
# important for all methods:
if type(ihold) is not list: ihold = [ihold] #convert to list if it is not given as one
self.ihold = ihold
self.ihold_orig = ihold
if type(amp) is not list: amp = [amp]
if len(amp) < self.n_celltypes: amp = [amp[0]]*self.n_celltypes
self.amp = amp
if type(amod) is not list: amod = [amod]*self.n_celltypes
self.amod = amod # RUN self.set_i()
self.ihold_sigma = ihold_sigma
def barrier(self):
if self.use_mpi:
if self.use_pc == True:
self.pc.barrier()
else:
self.comm.Barrier()
def broadcast(self, vec, root = 0, fast = False):
if self.use_mpi:
if self.use_pc:
if fast:
hvec = h.Vector(vec)
v = self.pc.broadcast(hvec,root)
vec = np.array(hvec)
else:
sendlist = [None]*self.nhost
if self.id == root:
for i in range(self.nhost):
sendlist[i] = vec
getlist = self.pc.py_alltoall(sendlist)
vec = getlist[root]
else:
#vec = np.array(vec, dtype=np.float64)
#self.comm.Bcast([vec, MPI.DOUBLE])
vec = self.comm.bcast(vec, root=0)
return vec
def set_numcells(self, N = []):
"""
Create, layout, and connect N cells.
"""
self.set_gids(N)
self.create_cells()
#self.syn_output() # generate synaptic "output" in neuron
#self.connect_cells()
def set_gids(self, N = []):
"""Set the gidlist on this host.
Round-robin counting. Each host as an id from 0 to pc.nhost()-1.
Example:
if N = 5 cells and nhost() = 3
node id() = 0 will get cells [0, 3]
node id() = 1 will get cells [1, 4]
node id() = 2 will get cells [2]
"""
self.gidlist = []
if N == []:
N = self.N
# borders where another celltype begins
self.global_gidlist = []
self.n_borders = [0]
for l in range(1,self.n_celltypes+1):
self.n_borders.append(sum(N[0:l]))
self.global_gidlist.append(range(self.n_borders[-2], self.n_borders[-1]))
for n in range(self.n_celltypes): # create list in list
self.gidlist.append([])
for i in range(int(self.id), sum(N), int(self.nhost)): # loop over all cells
n = np.where((np.array(self.n_borders)-i)>0)[0][0]-1 # find out what cell type this is
self.gidlist[n].append(i) # put in specific gidlist for that celltype
self.gid_count = self.gid_count + sum(N)
if self.id == 0: print "nodeid:" , self.id , ", gidlist:" , self.gidlist , ", total gids:" , len(self.global_gidlist) , ", sum(N):" , sum(N) # check gids of node
def del_cells(self):
if self.cells != []:
for n in range(self.n_celltypes):
for m in self.cells[n]:
print "deleting cell", m
del m
del self.cells
self.cells = []
if self.use_mpi: self.pc.gid_clear()
def create_cells(self):
"""
Create cell objects on this host.
"""
if self.do_run:
self.del_cells()
if self.id == 0: print "creating cells"
for n in range(self.n_celltypes):
self.cells.append([]) # create list in list
#print self.cellimport[n]
exec self.cellimport[n]
#print self.gidlist
for i in self.gidlist[n]:
#if "sigma" not in self.cell_exe[n]:
# exec self.cell_exe[n]
# cell.gid = i # tell cell it's gid!
# print i
#else:
if (self.celltype[n] == "IfCell") or (self.celltype[n] == "Grc"):
# add gid to cell and execute!
if self.cell_exe[n][-2] == "(":
exec self.cell_exe[n][0:-1] + "gid=" + str(i) + ")"
else:
exec self.cell_exe[n][0:-1] + ", gid=" + str(i) + ")"
else:
exec self.cell_exe[n]
cell.gid = i
self.cells[n].append(cell) # add to (local) list
if self.use_mpi:
#### Tell this host it has this gid
#### gids can be any integer, they just need to be unique.
#### In this simple case, we set the gid to i.
self.pc.set_gid2node(i, int(self.id))
self.pc.cell(i, cell.nc_spike) # Associate the cell with this host and gid
## NOT NECESSARY ANYMORE ##
#### Means to tell the ParallelContext that this cell is a source.
#nc = cell.connect_target(None)
#self.ncs[n].append(nc)
#### Record spikes of this cell
self.pc.spike_record(i, self.t_vec[n], self.id_vec[n])
#print n, self.cells[n][-1].nc_spike.thresh
else:
self.t_vec[n].append(h.Vector())
cell.nc_spike.record(self.t_vec[n][-1])
def connect_cells(self, conntype=[], stdp=[], tend=1e9):
"""
Connect cells as specified.
"""
if self.do_run:
stdp = stdp[:]
conntype = conntype[:]
if len(stdp) == 0:
for i in conntype:
stdp.append({'wmax':0, 'taupre':0, 'taupost':0, 'apre':0, 'apost':0})
else:
self.stdp_used = True
for i, conn in enumerate(conntype):
typ = conn['type']
conv = conn['conv']
src = conn['src']
tgt = conn['tgt']
w0 = conn['w']
var = conn['var']
tau1 = conn['tau1']
tau2 = conn['tau2']
if 'mgr2' in conn.keys():
mgr2 = conn['mgr2']
mgr2_var = conn['mgr2_var']
else:
mgr2 = 0
mgr2_var = 0
if 'e_inh' in conn.keys():
e_inh = conn['e_inh']
else:
e_inh = -65
if 'e_ex' in conn.keys():
e_ex = conn['e_ex']
else:
e_ex = 0
wmax = stdp[i]['wmax']
taupre = stdp[i]['taupre']
taupost = stdp[i]['taupost']
apre = stdp[i]['apre']
apost = stdp[i]['apost']
# Connect conv cells of celltype src to every cell of celltype tgt
for ni, i in enumerate(self.cells[tgt]):
rnd.seed(i.gid*10*self.seed)
if conv >= len(self.global_gidlist[src]):
gids = self.global_gidlist[src]
if self.id == 0: print "more or equal conv to len(self.global_gidlist[src])"
else:
gids = rnd.sample(self.global_gidlist[src],conv)
if self.id == 0: print conn['type'], ":", ni, ":", gids[0], "\n"
for ng, g in enumerate(gids):
np.random.seed(g*12)
#np.random.seed(int(g%10+1)*12)
if len(shape(w0))>0: # array is given
print "w array is given"
if len(w0[ng]) == self.N[0]:
w = w0[ng][ni]
elif (var > 0) and (w0>0):
w = np.random.normal(w0, w0*var, 1).clip(min=0)
else:
w = w0
if (mgr2_var > 0) and (mgr2>0):
mg = np.random.normal(mgr2, mgr2*mgr2_var, 1).clip(min=0)
else:
mg = mgr2
#print conn['type'], ":", i.gid, ":", g, ", w:", w, "\n"
if self.celltype[tgt] == 'IfCell':
if typ == 'gogr':
i.whatami = "grc"
i.synlist_inh.append(Synapse('goc', i, i.soma, nrel=0, record_all=0, weight_gmax=w))
i0 = int(len(i.synlist_inh)-1)
i.nc_inh.append(self.pc.gid_connect(g, i.synlist_inh[i0].input))
i.nc_inh[-1].delay = 1
i.nc_inh[-1].weight[0] = 1
if typ == 'grgo':
i.whatami = "goc"
i.synlist.append(Synapse('grc', i, i.soma, syntype = 'D', nrel=0, record_all=0, weight_gmax=w))
e0 = int(len(i.synlist)-1)
i.nc.append(self.pc.gid_connect(g, i.synlist[e0].input))
i.nc[-1].delay = 1
i.nc[-1].weight[0] = 1
if typ == 'grgom':
i.whatami = "goc"
i.synlist.append(Synapse('grc', i, i.soma, syntype = 'DM', nrel=0, record_all=0, weight_gmax=w, mglufac = mg))
e0 = int(len(i.synlist)-1)
i.nc.append(self.pc.gid_connect(g, i.synlist[e0].input))
i.nc[-1].delay = 1
i.nc[-1].weight[0] = 1
if typ == 'e2inh':
i.create_synapses(n_inh=1, tau1_inh=tau1, tau2_inh=tau2, e_inh=e_inh, w = w, wmax = wmax, taupre = taupre, taupost = taupost, apre = apre, apost = apost, tend=tend)
i0 = len(i.synlist_inh)-1
if self.use_mpi:
if wmax == 0:
i.pconnect_target(self.pc, source=g, target=i0, syntype='inh', weight=w, delay=1)
else:
i.pconnect_target(self.pc, source=g, target=i0, syntype='inh', weight=1, delay=1)
else:
if wmax == 0:
i.nc_inh.append(self.cells[1][g-self.N[0]].connect_target(target=i.synlist_inh[i0], weight=w, delay=1))
else:
i.nc_inh.append(self.cells[1][g-self.N[0]].connect_target(target=i.synlist_inh[i0], weight=1, delay=1))
if typ == 'e2ex':
i.create_synapses(n_ex = 1, tau1 = tau1, tau2 = tau2, e_ex=e_ex, w = w, wmax = wmax, taupre = taupre, taupost = taupost, apre = apre, apost = apost, tend=tend)
e0 = len(i.synlist)-1
if self.use_mpi:
if wmax == 0:
i.pconnect_target(self.pc, source=g, target=e0, syntype='ex', weight=w, delay=1)
else:
i.pconnect_target(self.pc, source=g, target=e0, syntype='ex', weight=1, delay=1)
else:
if wmax == 0:
i.nc.append(self.cells[0][g].connect_target(target=i.synlist[e0], weight=w, delay=1))
else:
i.nc.append(self.cells[0][g].connect_target(target=i.synlist[e0], weight=1, delay=1))
else: # No IfCell
if typ == 'gogr':
i.createsyn(ngoc = 1, weight_gmax=w) # multiplication factor
i0 = len(i.GOC_L)-1 # get number of current synapse!
i.pconnect(self.pc,g,i0,'goc')
if typ == 'grgo':
i.createsyn(ngrc = 1, weight_gmax=w) # multiplication factor
i0 = len(i.GRC_L)-1 # get number of current synapse!
i.pconnect(self.pc,g,i0,'grc',conduction_speed=0,grc_positions=[1])
if typ == 'grgom':
#print w, mg
i.createsyn(ngrcm = 1, weight_gmax=w, mglufac = mg) # multiplication factor
i0 = len(i.GRC_L)-1 # get number of current synapse!
i.pconnect(self.pc,g,i0,'grc',conduction_speed=0,grc_positions=[1])
if typ == 'grstl':
i.createsyn(ngrc = 1, weight_gmax=w) # multiplication factor
i0 = len(i.GRC_L)-1 # get number of current synapse!
i.pconnect(self.pc,g,i0,'grc',conduction_speed=0,grc_positions=[1])
if 'e2' in typ:
if 'inh' in typ:
Erev = -65
elif 'ex' in typ:
Erev = 0
if tau1 == 0:
syn = h.ExpSyn(i.soma(0.5))
syn.tau = tau2/ms
else:
if wmax == 0:
syn = h.Exp2Syn(i.soma(0.5))
syn.tau1 = tau1/ms
syn.tau2 = tau2/ms
else: # STDP
syn = h.stdpE2S(i.soma(0.5))
syn.tau1 = tau1/ms
syn.tau2 = tau2/ms
syn.on = 1
syn.thresh = -20
syn.wmax = wmax
syn.w = w
syn.taupre = taupre/ms
syn.taupost = taupost/ms
syn.apre = apre
syn.apost = apost
syn.e = Erev/mV
if self.celltype[tgt] == 'Grc':
i.GOC_L.append(syn)
i0 = int(len(i.GOC_L)-1) # get number of current synapse!
i.gocncpc.append(self.pc.gid_connect(g, i.GOC_L[i0]))
i.gocncpc[-1].delay = 1
if wmax == 0:
i.gocncpc[-1].weight[0] = w
else:
i.gocncpc[-1].weight[0] = 1
elif self.celltype[tgt] == 'Goc':
i.GRC_L.append(syn)
e0 = int(len(i.GRC_L)-1) # get number of current synapse!
i.pfncpc.append(self.pc.gid_connect(g, i.GRC_L[e0]))
i.pfncpc[-1].delay = 1
i.pfncpc[-1].weight[0] = w
if wmax == 0:
i.pfncpc[-1].weight[0] = w
else:
i.pfncpc[-1].weight[0] = 1
#self.rec_s1 = h.Vector()
#self.rec_s1.record(self.cells[0][0].synlist_inh[0]._ref_g)
#self.rec_s2 = h.Vector()
#self.rec_s2.record(self.cells[1][0].synlist_inh[0]._ref_g)
def syn_output(self):
"""
Connect cell n to target cell sum(self.N) + 100.
"""
if self.id == 0: # create target cell
tgt_gid = self.gid_count
self.gid_count = self.gid_count + 1
# Synaptic integrated response
self.rec_g = h.Vector()
self.passive_target = PassiveCell()
if self.use_mpi: self.pc.set_gid2node(tgt_gid, 0) # Tell this host it has this gid
syn = self.passive_target.create_synapses(tau1 = self.syn_tau1, tau2 = self.syn_tau2) # if tau1=tau2: alpha synapse!
for i in range(self.n_borders[self.a_celltype[0]],self.n_borders[self.a_celltype[0]+1]): # take all cells, corresponding to self.a_celltype, not just the ones in self.gidlist:
src_gid = i
if self.use_mpi:
nc = self.pc.gid_connect(src_gid, syn)
nc.weight[0] = 1
nc.delay = self.nc_delay/ms #0.05 # MUST be larger than dt!!!
else:
nc = self.cells[self.a_celltype[0]][src_gid].connect_target(target=syn, weight=1, delay=self.nc_delay/ms)
self.nclist.append(nc)
self.rec_g.record(syn._ref_g)
def syn_out_all(self, tau1 = 1*ms, tau2 = 30*ms):
if self.do_run:
for n in range(self.n_celltypes):
for i, gid in enumerate(self.gidlist[n]):
self.cells[n][i].start_record(tau1 = tau1/ms, tau2 = tau2/ms)
self.called_syn_out_all = True
def get_i(self, a, n, do_plot = True):
import md5
m = md5.new()
if ", sigma" in self.cell_exe[n]:
cell_exe_new = self.cell_exe[n].split(", sigma")[0] + ")"
else:
cell_exe_new = self.cell_exe[n]
m.update(cell_exe_new)
filename = self.data_dir + '/if_' + self.celltype[n] + '_' + m.hexdigest() + '.p'
#print filename
if self.id == 0:
is_there = os.path.isfile(filename)
else:
is_there = None
is_there = self.broadcast(is_there)
if (is_there is not True) or (self.force_run is True): # run i/f estimation
if self.id == 0: print '- running i/f estimation for ', self.celltype[n], ' id: ' , m.hexdigest()
exec self.cellimport[n]
exec cell_exe_new
sim = Stimulation(cell, temperature = self.temperature, use_multisplit = self.use_multisplit)
sim.spikes_from_neuron = False
sim.celltype = self.celltype[n]
current_vector, freq_vector, freq_onset_vector = sim.get_if(istart = self.istart, istop = self.istop, di = self.di, tstop = self.tstop_if)
sim = None
cell = None
if self.id == 0:
if do_plot:
plt.figure(99)
plt.plot(current_vector, freq_vector, 'r*-')
plt.plot(current_vector, freq_onset_vector, 'b*-')
plt.savefig("./figs/dump/latest_if_" + self.celltype[n] + ".pdf", dpi = 300) # save it
plt.clf()
#plt.show()
ifv = {'i':current_vector,'f':freq_vector}
print ifv
pickle.dump(ifv, gzip.GzipFile(filename, "wb" ))
self.barrier()
else:
if self.id == 0:
ifv = pickle.load(gzip.GzipFile(filename, "rb" ))
#print ifv
self.barrier()
if self.id == 0:
f = ifv.get('f')
i = ifv.get('i')
i = i[~isnan(f)]
f = f[~isnan(f)]
iin = if_extrap(a, f, i)
else:
iin = [0]
iin = self.broadcast(iin, root=0, fast = True)
self.barrier()
return iin
def set_i(self, ihold = [0]):
ihold = list(ihold)
self.ihold_orig = list(ihold)
self.barrier() # wait for other nodes
# Ihold given as frequency, convert to current
if ((self.give_freq)):
ihold0 = [[] for _ in range(self.n_celltypes)]
for n in range(self.n_celltypes):
a = np.array([ihold[n]])
#print "a:", a
iin = self.get_i(a, n)
#print "iin:", iin
ihold0[n] = iin[0]
if self.id == 0: print '- ihold: ', ihold, 'Hz, => ihold: ', ihold0, 'nA'
# Modulation depth given, not always applied to current!
for n in range(self.n_celltypes):
if self.amod[n] is not None:
if self.give_freq:
# Apply to amplitude:
a = np.array([ihold[n]]) + self.amod[n]*np.array([ihold[n]])
self.amp[n] = self.get_i(a, n) - ihold0[n]
if self.id == 0:
print '- amp: ihold: ', ihold[n], 'Hz , amod: ', self.amod[n], ', => amp: ', self.amp[n], 'nA (' #, self.get_i(a, n), ')'
elif self.n_syn_ex[n] > 0:
if self.id == 0:
print '- amp: ihold: ', ihold[n], 'Hz , amod: ', self.amod[n], ', => amp will be set for each spike generator'
else:
self.amp[n] = self.amod[n] * ihold[n]
if self.id == 0:
print '- amp: ihold: ', ihold[n], 'nA , amod: ', self.amod[n], ', => amp: ', self.amp[n], 'nA'
# Noise depth given, not always applied to current!
if self.anoise[n] is not None:
if (self.give_freq is True) or (self.n_syn_ex[n] > 0):
# Apply to amplitude:
a = np.array([ihold[n]]) + self.anoise[n]*np.array([ihold[n]])
self.fluct_s[n] = ((self.get_i(a, n) - ihold0[n]))/2. # adjust with /2 so that noise = +-2*std
if self.id == 0:
print '- noise: ihold: ', ihold[n], 'Hz , anoise: ', self.anoise[n], ', => fluct_s: ', self.fluct_s[n], 'nA'
else:
self.fluct_s[n] = self.anoise[n] * ihold[n]
if self.id == 0:
print '- noise: ihold: ', ihold[n], 'nA , anoise: ', self.anoise[n], ', => fluct_s: ', self.fluct_s[n], 'nA'
if self.give_freq is True:
ihold = ihold0
return ihold
def calc_fmean(self, t_vec, t_startstop):
#t_startstop[0] = 1
#t_startstop[1] = 5
f_cells_mean = 0
f_cells_cv = np.nan
f_cells_std = np.nan
if len(t_vec) > 0:
f_start_in = mlab.find(t_vec >= t_startstop[0]) # 1
f_stop_in = mlab.find(t_vec <= t_startstop[1]) # 5
if (len(f_start_in) > 0) & (len(f_stop_in) > 0):
f_start = f_start_in[0]
f_stop = f_stop_in[-1]+1
use_spikes = t_vec[f_start:f_stop]*1e3
if len(use_spikes) > 1:
s1 = signals.SpikeTrain(use_spikes)
isi = s1.isi()
f_cells_mean = s1.mean_rate() # use mean of single cells
f_cells_cv = np.std(isi)/np.mean(isi)
f_cells_std = np.std(isi)
#f_start_in = mlab.find(t_vec >= 1)
#f_stop_in = mlab.find(t_vec <= 2)
#if (len(f_start_in) > 0) & (len(f_stop_in) > 0):
# f_start = f_start_in[0]
# f_stop = f_stop_in[-1]+1
# use_spikes = t_vec[f_start:f_stop]*1e3
# if len(use_spikes) > 1:
# s1 = signals.SpikeTrain(use_spikes)
# isi = s1.isi()
# f_cells_cv = np.std(isi)/np.mean(isi)
return f_cells_mean, f_cells_cv, f_cells_std
def get_fmean(self, t_all_vec_vecn, id_all_vec_vecn, t_startstop, gidlist, facborder = 3): # 1e9
f_cells_mean = zeros(len(gidlist))
f_cells_base = zeros(len(gidlist))
f_cells_std = nans(len(gidlist))
f_cells_cv = nans(len(gidlist))
f_cells_gid = nans(len(gidlist))
fbase = np.nan
fmean = np.nan
fmax = np.nan
fmstd = np.nan
fcvm = np.nan
fstdm = np.nan
f_cells_mean_all = []
f_cells_base_all = []
f_cells_cv_all = []
f_cells_std_all = []
gid_del = np.array([])
if self.no_fmean == False:
if self.id == 0: print "- sorting for fmean"
for i, l in enumerate(gidlist):
t_0_vec = t_all_vec_vecn[where(id_all_vec_vecn==l)]
f_cells_mean[i], f_cells_cv[i], f_cells_std[i] = self.calc_fmean(t_0_vec, t_startstop)
f_cells_base[i], _, _ = self.calc_fmean(t_0_vec, [self.delay_baseline-4,self.delay_baseline])
f_cells_gid[i] = l
if self.id == 0: print "- gather fmean"
f_cells_mean_all = self.do_gather(f_cells_mean)
f_cells_base_all = self.do_gather(f_cells_base)
f_cells_std_all = self.do_gather(f_cells_std)
f_cells_cv_all = self.do_gather(f_cells_cv)
f_cells_gid_all = self.do_gather(f_cells_gid)
if self.id == 0:
#print f_cells_mean_all
f_cells_mean_all = np.nan_to_num(f_cells_mean_all)
fmean = mean(f_cells_mean_all) # compute mean of mean rate for all cells
fmstd = std(f_cells_mean_all)
fmax = max(f_cells_mean_all)
f_cells_base_all = np.nan_to_num(f_cells_base_all)
fbase = mean(f_cells_base_all) # compute mean of mean rate for all cells
f_cells_cv_all = f_cells_cv_all[~np.isnan(f_cells_cv_all)]
f_cells_std_all = f_cells_std_all[~np.isnan(f_cells_std_all)]
fcvm = mean(f_cells_cv_all)
fstdm = mean(f_cells_std_all)
print "- get_fmean, fmean: ",fmean, "fmax: ",fmax, "Hz", "fmstd: ",fmstd, "Hz", "fcvm: ",fcvm, "fstdm: ",fstdm, "Hz" ,"fbase: ", fbase, "Hz"
if facborder < 1e9:
fborder = fmean + facborder*fmstd
i = mlab.find(f_cells_mean_all > fborder)
gid_del = f_cells_gid_all[i]
# f_cells_mean_all[i] = 0
# f_cells_cv_all[i] = np.nan
# f_cells_std_all[i] = np.nan
# fmean2 = mean(np.nan_to_num(f_cells_mean_all)) # compute mean of mean rate for all cells
# fmstd2 = std(np.nan_to_num(f_cells_mean_all))
# fmax2 = max(np.nan_to_num(f_cells_mean_all))
# fcvm2 = mean(f_cells_cv_all[~np.isnan(f_cells_cv_all)])
# fstdm2 = mean(f_cells_std_all[~np.isnan(f_cells_std_all)])
# print "- after facborder: get_fmean, fmean: ",fmean2, "fmax: ",fmax2, "Hz", "fmstd: ",fmstd2, "Hz", "fcvm: ",fcvm2, "fstdm: ",fstdm2, "Hz, gid_del: ", gid_del
return fmean, fmax, fmstd, fcvm, fstdm, gid_del, f_cells_mean_all, f_cells_cv_all, f_cells_std_all, fbase, f_cells_base_all
def connect_fluct(self):
"""
Create fluctuating input onto every cell.
"""
if self.do_run:
for m in self.flucts:
del m
del self.flucts
for m in self.noises:
del m
del self.noises
self.flucts = []
self.noises = []
for n in range(self.n_celltypes):
for i, gid in enumerate(self.gidlist[n]): # for every cell in the gidlist
#h.mcell_ran4_init(gid)
noiseRandObj = h.Random() # provides NOISE with random stream
self.noises.append(noiseRandObj) # has to be set here not inside the nmodl function!!
# print str(gid) + ": " + str(noiseRandObj.normal(0,1))
fluct = h.Ifluct2(self.cells[n][i].soma(0.5))
fluct.m = self.fluct_m/nA # [nA]
fluct.s = self.fluct_s[n]/nA # [nA]
fluct.tau = self.fluct_tau/ms # [ms]
self.flucts.append(fluct) # add to list
self.flucts[-1].noiseFromRandom(self.noises[-1]) # connect random generator!
self.noises[-1].MCellRan4(1, gid+1) # set lowindex to gid+1, set highindex to > 0
self.noises[-1].normal(0,1)
def connect_gfluct(self, E_e=0, E_i=-65):
"""
Create fluctuating conductance input onto every cell.
"""
if self.do_run:
for m in self.flucts:
del m
del self.flucts
for m in self.noises:
del m
del self.noises
self.flucts = []
self.noises = []
for n in range(self.n_celltypes):
fluct_g_i0_n = self.fluct_g_i0[n]
if type(fluct_g_i0_n) is not ndarray: fluct_g_i0_n = np.array([fluct_g_i0_n])
if len(fluct_g_i0_n) == len(self.global_gidlist[n]):
pass
else:
fluct_g_i0_n = np.ones(int(len(self.global_gidlist[n])))*fluct_g_i0_n[0]
if self.id == 0: print "- single value in fluct_g_i0_n"
#print fluct_g_i0_n
for i, gid in enumerate(self.gidlist[n]): # for every cell in the gidlist
#h.mcell_ran4_init(gid)
noiseRandObj = h.Random() # provides NOISE with random stream
self.noises.append(noiseRandObj) # has to be set here not inside the nmodl function!!
# print str(gid) + ": " + str(noiseRandObj.normal(0,1))
fluct = h.Gfluct3(self.cells[n][i].soma(0.5))
fluct.E_e = E_e/mV # [mV]
fluct.E_i = E_i/mV # [mV]
fluct.g_e0 = self.fluct_g_e0[n]/uS # [uS]
fluct.g_i0 = fluct_g_i0_n[i]/uS # [uS]
fluct.std_e = self.fluct_std_e[n]/uS # [uS]
fluct.std_i = self.fluct_std_i[n]/uS # [uS]
fluct.tau_e = self.fluct_tau_e/ms #tau_e/ms # [ms]
fluct.tau_i = self.fluct_tau_i/ms #tau_i/ms # [ms]
self.flucts.append(fluct) # add to list
self.flucts[-1].noiseFromRandom(self.noises[-1]) # connect random generator!
self.noises[-1].MCellRan4(1, gid+1) # set lowindex to gid+1, set highindex to > 0
self.noises[-1].normal(0,1)
def connect_synfluct(self, PF_BG_rate=6, PF_BG_cv=1, STL_BG_rate=20, STL_BG_cv=1):
"""
Create fluctuating synaptic input onto every cell.
"""
if self.do_run:
for m in self.ST_stims:
del m
del self.ST_stims
for m in self.PF_stims:
del m
del self.PF_stims
self.ST_stims = []
self.PF_stims = []
for n in range(self.n_celltypes):
for i, gid in enumerate(self.gidlist[n]): # for every cell in the gidlist
PF_syn_list = self.cells[n][i].createsyn_PF()
for d in PF_syn_list:
d.input.newnetstim.number = 1e9
d.input.newnetstim.noise = PF_BG_cv
d.input.newnetstim.interval = 1000.0 / PF_BG_rate
d.input.newnetstim.start = 0
self.PF_stims.append(PF_syn_list)
ST_stim_list = self.cells[n][i].createsyn_ST(record_all=0)
for d in ST_stim_list:
d.newnetstim.number = 1e9
d.newnetstim.noise = STL_BG_cv
d.newnetstim.interval = 1000.0 / STL_BG_rate
d.newnetstim.start = 0
self.ST_stims.append(ST_stim_list)
if self.id == 0: print "- PF and ST stimulation added."
def set_IStim(self, ihold = None, ihold_sigma = None, random_start = True, tstart_offset = 0):
"""
Add (random) ihold for each cell and offset!
"""
if self.do_run:
# if not given, use the one in self
if ihold == None:
ihold = self.ihold
if ihold_sigma == None:
ihold_sigma = self.ihold_sigma
if ihold[self.a_celltype[0]] != 0:
ihold = self.set_i(ihold)
for m in self.ic_holds:
#m.destroy()
del m
del self.ic_holds
for m in self.ic_starts:
#m.destroy()
del m
del self.ic_starts
for m in self.vc_starts:
#m.destroy()
del m
del self.vc_starts
self.ic_holds = []
self.ic_starts = []
self.vc_starts = []
self.i_holdrs = []
self.i_holds = ihold
for n in range(self.n_celltypes):
self.i_holdrs.append([])
for i, gid in enumerate(self.gidlist[n]): # for every cell in the gidlist
np.random.seed(gid*20)
tis = 1
if random_start == True:
# random start time
tstart = np.random.uniform(tstart_offset+0, tstart_offset+0.5)
#if self.id == 0: print "tstart:", tstart
vc_start = h.SEClamp(self.cells[n][i].soma(0.5))
vc_start.dur1 = tstart/ms
vc_start.amp1 = -80
self.vc_starts.append(vc_start)
tis = 0
else:
tis = 0
if ihold_sigma[n] != 0:
#print ihold_sigma[n], ihold[n]
ihold_r = np.random.normal(ihold[n], ihold[n]*ihold_sigma[n], 1).clip(min=0)
#ihold_r = np.random.uniform(ihold[n]*ihold_sigma[n], ihold[n])
elif self.CF_var is not False: # CF gets not adapted to current but final frequnecy!
r_ok = False
while r_ok == False:
r_temp = np.random.normal(self.ihold_orig[n], self.CF_var[n][1], 1)
if (r_temp <= self.CF_var[n][2]) and (r_temp >= self.CF_var[n][0]): # check borders!
r_ok = True
#print r_temp
ihold_r = self.get_i(r_temp, n)
#print ihold_r
#if self.id == 0:
print "set self.CF_var", r_temp, ihold_r
else: # same ihold for all cells!
ihold_r = ihold[n]
self.i_holdrs[n].append(ihold_r)
if ihold_r != 0:
if hasattr(self.cells[n][i], 'input_vec'):
ic_hold = []
for vec in self.cells[n][i].input_vec:
for inv in vec:
#print ihold_r
ic_hold.append(h.IClamp(inv(0.5)))
ic_hold[-1].amp = self.cells[n][i].ifac * ihold_r / self.cells[n][i].n_input_spiny / nA
ic_hold[-1].delay = tis/ms
ic_hold[-1].dur = 1e9
else:
# holding current
ic_hold = h.IClamp(self.cells[n][i].soma(0.5))
ic_hold.delay = tis/ms
ic_hold.dur = 1e9
ic_hold.amp = ihold_r/nA
self.ic_holds.append(ic_hold)
if self.id == 0: print "set_IStim finished. ihold: ", ihold, ", ihold_sigma: ", ihold_sigma
def set_IStep(self, istep = [0], istep_sigma = [0], tstep = 5, tdur = 1e6, give_freq = True):
"""
Add istep for each cell and offset!
"""
if self.do_run:
#for m in self.ic_steps:
# m.destroy()
# del m
#del self.ic_steps
#self.ic_steps = []
istep = list(istep)
neg = False
for n in range(self.n_celltypes):
if istep[n] < 0:
neg = True
istep[n] = abs(istep[n]) # make positive again
if istep[n] != 0:
if give_freq is True:
a = np.array([istep[n]])
iin = self.get_i(a, n)[0]
if self.id == 0: print "celltype: ", n, " istep: ", istep[n], "Hz => ", iin, " nA"
istep[n] = iin
for n in range(self.n_celltypes):
for i, gid in enumerate(self.gidlist[n]): # for every cell in the gidlist
np.random.seed(gid*30)
if self.i_holdrs == []:
if istep_sigma[n] != 0:
istep_r = np.random.normal(istep[n], istep[n]*istep_sigma[n], 1).clip(min=0)
else: # same ihold for all cells!
istep_r = istep[n]
else: # ihold has been set!
if istep_sigma[n] != 0:
istep_r = np.random.normal(istep[n]-self.i_holds[n], (istep[n]-self.i_holds[n])*istep_sigma[n], 1).clip(min=0) # delta now! put on top of hold!
else: # same ihold for all cells!
istep_r = istep[n]-self.i_holds[n] # delta now! put on top of hold!
if neg:
istep_r = -1*istep_r
if istep[n] == 0:
istep_r = -1*self.i_holdrs[n][i]
#print 'is:' + str(istep_r) + 'was:' + str(self.i_holdrs[n][i])
if istep_r != 0:
# step current
ic_step = h.IClamp(self.cells[n][i].soma(0.5))
ic_step.delay = tstep/ms
ic_step.dur = tdur/ms
ic_step.amp = istep_r/nA
self.ic_steps.append(ic_step)
if self.id == 0: print "set_IStep finished. istep: ", istep, ", istep_sigma: ", istep_sigma
def set_IPlay(self, stimulus, t):
"""
Initializes values for current clamp to play a signal.
"""
if self.do_run:
for m in self.tvecs:
#m.destroy()
del m
del self.tvecs
for m in self.ivecs:
#m.destroy()
del m
del self.ivecs
for m in self.plays:
#m.destroy()
del m
del self.plays
self.tvecs = []
self.ivecs = []
self.plays = []
for i, gid in enumerate(self.gidlist[self.a_celltype[0]]): # for every cell in the gidlist
tvec = h.Vector(t/ms)
ivec = h.Vector(stimulus/nA)
play = h.IClamp(self.cells[self.a_celltype[0]][i].soma(0.5))
play.delay = 0
play.dur = 1e9
ivec.play(play._ref_amp, tvec, 1)
self.plays.append(play) # add to list
self.tvecs.append(tvec) # add to list
self.ivecs.append(ivec) # add to list
if self.id == 0: print "set_IPlay finished."
def set_IPlay2(self, stimulus, t):
"""
Initializes values for current clamp to play a signal.
"""
if self.do_run:
for m in self.tvecs:
#m.destroy()
del m
del self.tvecs
for m in self.ivecs:
#m.destroy()
del m
del self.ivecs
for m in self.plays:
#m.destroy()
del m
del self.plays
self.tvecs = []
self.ivecs = []
self.plays = []
for j in self.a_celltype:
tvec = h.Vector(t/ms)
ivec = []
for s in stimulus:
if hasattr(self.cells[j][0], 'input_vec'):
ivec.append(h.Vector(self.factor_celltype[j] * self.cells[j][0].ifac * s / self.cells[j][0].n_input_spiny / nA))
else:
ivec.append(h.Vector(self.factor_celltype[j]*s/nA))
self.tvecs.append(tvec) # add to list
self.ivecs.append(ivec) # add to list
for i, gid in enumerate(self.gidlist[j]): # for every cell in the gidlist
if hasattr(self.cells[j][i], 'input_vec'):
play = []
for iloc, vec in enumerate(self.cells[j][i].input_vec):
isig = self.syn_ex_dist[j][iloc]-1
#print isig
for inv in vec:
play.append(h.IClamp(inv(0.5)))
play[-1].delay = 0
play[-1].dur = 1e9
ivec[isig].play(play[-1]._ref_amp, tvec, 1)
else:
#fluctuating current
play = h.IClamp(self.cells[j][i].soma(0.5))
play.delay = 0
play.dur = 1e9
ivec[0].play(play._ref_amp, tvec, 1)
self.plays.append(play) # add to list
if self.id == 0: print "set_IPlay2 finished."
def set_IPlay3(self, stimulus, t, amp = None):
"""
Initializes values for current clamp to play a signal.
"""
if self.do_run:
for m in self.tvecs:
#m.destroy()
del m
del self.tvecs
for m in self.ivecs:
#m.destroy()
del m
del self.ivecs
for m in self.plays:
#m.destroy()
del m
del self.plays
self.tvecs = []
self.ivecs = []
self.plays = []
for j in self.a_celltype:
if amp is None:
amp0 = 0
else:
amp0 = amp[j]
tvec = h.Vector(t/ms)
self.tvecs.append(tvec) # add to list
for i, gid in enumerate(self.gidlist[j]): # for every cell in the gidlist
if isinstance(self.factor_celltype[j], ( int, long ) ):
ivec = h.Vector(self.factor_celltype[j]*(stimulus*amp0)/nA)
else:
np.random.seed(gid*40)
rnd.seed(gid*40)
if self.factor_celltype[j][1] > 0:
f = np.random.normal(self.factor_celltype[j][0], self.factor_celltype[j][1], 1).clip(min=0)
else:
f = self.factor_celltype[j][0]
if self.factor_celltype[j][2] > 0: # add inverted input with 50% probability, in future versions this will indicate the propability for -1 and 1
f = rnd.sample([-1,1],1)[0] * f
if self.id == 0: print "- inverted input with 50% probability:", f
if self.id == 0: print "- randomize play stimulus height"
ivec = h.Vector(f*(stimulus*amp0)/nA)
self.ivecs.append(ivec) # add to list
#fluctuating current
play = h.IClamp(self.cells[j][i].soma(0.5))
play.delay = 0
play.dur = 1e9
ivec.play(play._ref_amp, tvec, 1)
self.plays.append(play) # add to list
if self.id == 0: print "set_IPlay3 finished."
def set_PulseStim(self, start_time=[100*ms], dur=[1500*ms], steadyf=[100*Hz], pulsef=[150*Hz], pulse_start=[500*ms], pulse_len=[500*ms], weight0=1, tau01=[1*ms], tau02=[20*ms], weight1=1, tau11=[0*ms], tau12=[1*ms], noise = 1):
if self.do_run:
modulation_vec = []
for n in range(self.n_celltypes):
t_input = np.arange(0, dur[n], self.dt) # create stimulus time vector has to be in ms!!
mod = np.concatenate(([np.zeros(round(start_time[n]/self.dt)), steadyf[n]*np.ones(round((pulse_start[n]-start_time[n])/self.dt)), pulsef[n]*np.ones(round(pulse_len[n]/self.dt)),steadyf[n]*np.ones(round((dur[n]-pulse_start[n]-pulse_len[n])/self.dt)) ]))
modulation = (t_input, mod)
#print shape(t_input), shape(mod), shape(modulation)
for i, gid in enumerate(self.gidlist[n]): # for every cell in the gidlist
if dur[n] > 0:
if self.celltype[n] == 'Grc':
nmf = 4
for j in range(nmf):
self.cells[n][i].createsyn(nmf = 1, ngoc = 0, weight = weight0)
e0 = len(self.cells[n][i].MF_L)-1 # get number of current synapse!
pulse_gid = int(self.gid_count + gid*1000 + j)
train = mod_spike_train(modulation, noise = noise, seed = pulse_gid)
self.setup_Play_train(train = train, input_gid = pulse_gid)
self.cells[n][i].pconnect(self.pc,pulse_gid,int(e0),'mf')
elif self.celltype[n] == 'Goc':
nmf = 53
for j in range(nmf):
self.cells[n][i].createsyn(nmf = 1, weight = weight1)
e0 = len(self.cells[n][i].MF_L)-1 # get number of current synapse!
pulse_gid = int(self.gid_count + gid*1000 + j)
train = mod_spike_train(modulation, noise = noise, seed = pulse_gid)
self.setup_Play_train(train = train, input_gid = pulse_gid)
self.cells[n][i].pconnect(self.pc,pulse_gid,int(e0),'mf')
elif self.celltype[n] == 'Goc_noloop':
ngrc = 100
for j in range(ngrc):
self.cells[n][i].createsyn(ngrc = 1, weight = weight0)
e0 = len(self.cells[n][i].GRC_L)-1 # get number of current synapse!
pulse_gid = int(self.gid_count + gid*1000 + j)
train = mod_spike_train(modulation, noise = noise, seed=pulse_gid)
self.setup_Play_train(train = train, input_gid = pulse_gid)
self.cells[n][i].pconnect(self.pc,pulse_gid,int(e0),'grc')
else:
pulse_gid = int(self.gid_count + gid*1000 + 100)
train = mod_spike_train(modulation, noise = noise, seed = pulse_gid)
self.trains.append(train)
setup_Play_train(train = train, input_gid = pulse_gid)
# NMDA
self.cells[n][i].create_synapses(n_ex=1, tau1=tau01[n], tau2=tau02[n])
e0 = len(self.cells[n][i].synlist)-1
weight=weight0[n]
np.random.seed(gid*60)
#weight = np.random.normal(weight, weight*0.5, 1).clip(min=0)
self.cells[n][i].pconnect_target(self.pc, source=pulse_gid, target=e0, syntype='ex', weight=weight, delay=1)
# AMPA
self.cells[n][i].create_synapses(n_ex=1, tau1=tau11[n], tau2=tau12[n])
e0 = len(self.cells[n][i].synlist)-1
weight=weight1[n]
np.random.seed(gid*60)
#weight = np.random.normal(weight, weight*0.5, 1).clip(min=0)
self.cells[n][i].pconnect_target(self.pc, source=pulse_gid, target=e0, syntype='ex', weight=weight, delay=1)
modulation = (t_input, mod) # mack to s!
modulation_vec.append(modulation)
return modulation_vec
def connect_Synapse(self, pulse_gid, nt, i, n, gid, j, syntype = "ex", nsyn=0):
if self.do_run:
if 'gsyn_in' in self.method_interpol:
if isinstance(self.factor_celltype[nt], ( int, long ) ):
f = self.factor_celltype[nt]
else:
f = self.factor_celltype[nt][0]
if syntype == "ex":
# each cell can receive different g_syn_ex !
if type(self.g_syn_ex[nt]) is ndarray:
if len(self.g_syn_ex[nt]) == len(self.global_gidlist[nt]):
w = self.g_syn_ex[nt][n]
else:
w = self.g_syn_ex[nt]
else:
w = self.g_syn_ex[nt]
seed = int(10000 + 10*gid + j)
np.random.seed(seed*41)
if self.g_syn_ex_s[nt] > 0:
w = np.random.normal(w, w*self.g_syn_ex_s[nt], 1).clip(min=0) # self.g_syn_ex_s[nt]
if self.celltype[nt] == 'Grc':
# delete old
if j == 0:
self.cells[nt][i].MF_L = []
self.cells[nt][i].mfncpc = []
if "gr" not in str(self.tau1_ex[nt]):
if "amfit" in str(self.tau1_ex[nt]):
syn = h.ExpZSyn(self.cells[nt][i].soma(0.5))
syn.tau1_ampa = 0.254
syn.tau2_ampa = 0.254
syn.tau3_ampa = 0.363
syn.tau4_ampa = 6.523
syn.f1_ampa = 8.8376e-05
syn.f2_ampa = 5.5257e-05
syn.f1_nmda = 0
elif "nmfit" in str(self.tau1_ex[nt]):
syn = h.ExpYSyn(self.cells[nt][i].soma(0.5))
syn.f1_ampa = 0
syn.f2_ampa = 0
syn.tau1_nmda = 1.902
syn.tau2_nmda = 82.032
syn.f1_nmda = 7.853857483005277e-05
elif "fit" in str(self.tau1_ex[nt]):
syn = h.ExpGrcSyn(self.cells[nt][i].soma(0.5))
syn.tau1_ampa = 0.254
syn.tau2_ampa = 0.254
syn.tau3_ampa = 0.363
syn.tau4_ampa = 6.523
syn.f1_ampa = 8.8376e-05
syn.f2_ampa = 5.5257e-05
syn.tau1_nmda = 1.902
syn.tau2_nmda = 82.032
syn.f1_nmda = 7.853857483005277e-05
else:
tau1 = self.tau1_ex[nt]
tau2 = self.tau2_ex[nt]
if tau1 == 0:
syn = h.ExpSyn(self.cells[nt][i].soma(0.5))
syn.tau = tau2/ms
else:
syn = h.Exp2Syn(self.cells[nt][i].soma(0.5))
syn.tau1 = tau1/ms
syn.tau2 = tau2/ms
syn.e = 0/mV
self.cells[nt][i].MF_L.append(syn)
e0 = len(self.cells[nt][i].MF_L)-1 # get number of current synapse!
syn_idx = int(e0)
source = int(pulse_gid)
self.cells[nt][i].mfncpc.append(self.pc.gid_connect(source, self.cells[nt][i].MF_L[syn_idx]))
self.cells[nt][i].mfncpc[-1].delay = 1
self.cells[nt][i].mfncpc[-1].weight[0] = w
if 'gsyn_in' in self.method_interpol:
self.record_syn.append(h.Vector())
self.record_syn[-1].record(self.cells[nt][i].MF_L[-1]._ref_g)
self.gsyn_in_fac.append(f)
else:
nrel = 0
if "stoch" in str(self.tau1_ex[nt]):
nrel = 4
self.cells[nt][i].createsyn(nmf = 1, ngoc = 0, weight_gmax = w, nrel=nrel)
if "ampa" in str(self.tau1_ex[nt]):
self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].gmax_factor = 0
if "nopre" in str(self.tau1_ex[nt]):
print "- no pre"
self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].tau_rec = 1e-9
self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].tau_facil = 1e-9
self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].tau_1 = 0
if "nostdampa" in str(self.tau1_ex[nt]):
self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].gmax_factor = 0
self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].tau_rec = 1e-9
self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].tau_facil = 1e-9
self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].tau_1 = 0
self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].r6FIX = 0
if "nostdnmda" in str(self.tau1_ex[nt]):
self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].gmax_factor = 0
self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].tau_rec = 1e-9
self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].tau_facil = 1e-9
self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].tau_1 = 0
self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].RdRate = 0
if "nmda" in str(self.tau1_ex[nt]):
self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].gmax_factor = 0
if "nopre" in str(self.tau1_ex[nt]):
self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].tau_rec = 1e-9
self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].tau_facil = 1e-9
self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].tau_1 = 0
if "nostdgr" in str(self.tau1_ex[nt]):
self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].r6FIX = 0 #1.12
self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].RdRate = 0 #12e-3
print "- no std"
if "nomggr" in str(self.tau1_ex[nt]):
self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].v0_block = -1e9
print "- no mg block"
e0 = len(self.cells[nt][i].MF_L)-1 # get number of current synapse!
self.cells[nt][i].pconnect(self.pc,pulse_gid,int(e0),'mf')
if 'gsyn_in' in self.method_interpol:
self.record_syn.append(h.Vector())
self.record_syn[-1].record(self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0]._ref_g)
self.record_syn.append(h.Vector())
self.record_syn[-1].record(self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0]._ref_g)
self.gsyn_in_fac.append(f)
self.gsyn_in_fac.append(f)
elif self.celltype[nt] == 'Goc':
# delete old
if j == 0:
self.cells[nt][i].MF_L = []
self.cells[nt][i].mfncpc = []
if "go" not in str(self.tau1_ex[nt]):
tau1 = self.tau1_ex[nt]
tau2 = self.tau2_ex[nt]
if tau1 == 0:
syn = h.ExpSyn(self.cells[nt][i].soma(0.5))
syn.tau = tau2/ms
else:
syn = h.Exp2Syn(self.cells[nt][i].soma(0.5))
syn.tau1 = tau1/ms
syn.tau2 = tau2/ms
syn.e = 0/mV
self.cells[nt][i].MF_L.append(syn)
e0 = len(self.cells[nt][i].MF_L)-1 # get number of current synapse!
syn_idx = int(e0)
source = int(pulse_gid)
self.cells[nt][i].mfncpc.append(self.pc.gid_connect(source, self.cells[nt][i].MF_L[syn_idx]))
self.cells[nt][i].mfncpc[-1].delay = 1
self.cells[nt][i].mfncpc[-1].weight[0] = w
if 'gsyn_in' in self.method_interpol:
self.record_syn.append(h.Vector())
self.record_syn[-1].record(self.cells[nt][i].MF_L[-1]._ref_g)
self.gsyn_in_fac.append(f)
else:
nrel = 0
mg = self.mglufac_ex[0]
if self.mglufac_ex[1] > 0:
mg = np.random.normal(self.mglufac_ex[0], self.mglufac_ex[1]*self.mglufac_ex[0], 1).clip(min=0) # self.g_syn_ex_s[nt]
if "stoch" in str(self.tau1_ex[nt]):
nrel = 4
self.cells[nt][i].createsyn(nmf = 1, weight_gmax = w, nrel=nrel, mglufac = mg)
e0 = len(self.cells[nt][i].MF_L)-1 # get number of current synapse!
self.cells[nt][i].pconnect(self.pc,pulse_gid,int(e0),'mf')
if 'gsyn_in' in self.method_interpol:
self.record_syn.append(h.Vector())
self.record_syn[-1].record(self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0]._ref_g)
self.record_syn.append(h.Vector())
self.record_syn[-1].record(self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0]._ref_g)
self.gsyn_in_fac.append(f)
self.gsyn_in_fac.append(f)
elif self.celltype[nt] == 'IfCell':
# delete old
if j == 0:
self.cells[nt][i].synlist = []
self.cells[nt][i].nc = []
if "gr" in str(self.tau1_ex[nt]):
self.cells[nt][i].whatami = "grc"
nrel = 0
if "stoch" in str(self.tau1_ex[nt]):
nrel = 4
self.cells[nt][i].MF_L = self.cells[nt][i].synlist
self.cells[nt][i].synlist.append(Synapse('glom', self.cells[nt][i], self.cells[nt][i].soma, nrel=nrel, record_all=0, weight_gmax = w))
if "ampa" in str(self.tau1_ex[nt]):
self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].gmax_factor = 0
if "nopre" in str(self.tau1_ex[nt]):
print "- no pre"
self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].tau_rec = 1e-9
self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].tau_facil = 1e-9
self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].tau_1 = 0
if "nmda" in str(self.tau1_ex[nt]):
self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].gmax_factor = 0
if "nopre" in str(self.tau1_ex[nt]):
self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].tau_rec = 1e-9
self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].tau_facil = 1e-9
self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].tau_1 = 0
if "nostdampa" in str(self.tau1_ex[nt]):
self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].tau_rec = 1e-9
self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].tau_facil = 1e-9
self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].tau_1 = 0
self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].r6FIX = 0 #1.12
if "nostdnmda" in str(self.tau1_ex[nt]):
self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].tau_rec = 1e-9
self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].tau_facil = 1e-9
self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].tau_1 = 0
self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].RdRate = 0
if "nostdgr" in str(self.tau1_ex[nt]):
self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].r6FIX = 0 #1.12
self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].RdRate = 0 #12e-3
print "- no std"
if "nomggr" in str(self.tau1_ex[nt]):
self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].v0_block = -1e9 #.k_block = 1e-9
print "- no mg block"
e0 = len(self.cells[nt][i].synlist)-1
syn_idx = int(e0)
source = int(pulse_gid)
self.cells[nt][i].nc.append(self.pc.gid_connect(source, self.cells[nt][i].synlist[syn_idx].input))
self.cells[nt][i].nc[-1].delay = 1
self.cells[nt][i].nc[-1].weight[0] = 1
if 'gsyn_in' in self.method_interpol:
self.record_syn.append(h.Vector())
self.record_syn[-1].record(self.cells[nt][i].synlist[syn_idx].postsyns['AMPA'][0]._ref_g)
self.record_syn.append(h.Vector())
self.record_syn[-1].record(self.cells[nt][i].synlist[syn_idx].postsyns['NMDA'][0]._ref_g)
self.gsyn_in_fac.append(f)
self.gsyn_in_fac.append(f)
else:
if "amfit" in str(self.tau1_ex):
syn = h.ExpGrcSyn(self.cells[nt][i].soma(0.5))
syn.tau1_ampa = 0.254
syn.tau2_ampa = 0.254
syn.tau3_ampa = 0.363
syn.tau4_ampa = 6.523
syn.f1_ampa = 8.8376e-05
syn.f2_ampa = 5.5257e-05
syn.f1_nmda = 0
self.cells[nt][i].synlist.append(syn) # synlist is defined in Cell
elif "nmfit" in str(self.tau1_ex):
syn = h.ExpGrcSyn(self.cells[nt][i].soma(0.5))
syn.f1_ampa = 0
syn.f2_ampa = 0
syn.tau1_nmda = 1.902
syn.tau2_nmda = 82.032
syn.f1_nmda = 7.853857483005277e-05
self.cells[nt][i].synlist.append(syn) # synlist is defined in Cell
elif "fit" in str(self.tau1_ex):
syn = h.ExpGrcSyn(self.cells[nt][i].soma(0.5))
syn.tau1_ampa = 0.254
syn.tau2_ampa = 0.254
syn.tau3_ampa = 0.363
syn.tau4_ampa = 6.523
syn.f1_ampa = 8.8376e-05
syn.f2_ampa = 5.5257e-05
syn.tau1_nmda = 1.902
syn.tau2_nmda = 82.032
syn.f1_nmda = 7.853857483005277e-05
self.cells[nt][i].synlist.append(syn) # synlist is defined in Cell
else:
self.cells[nt][i].create_synapses(n_ex=1, tau1=self.tau1_ex[nt], tau2=self.tau2_ex[nt])
e0 = len(self.cells[nt][i].synlist)-1
syn_idx = int(e0)
self.cells[nt][i].pconnect_target(self.pc, source=pulse_gid, target=int(e0), syntype='ex', weight=w, delay=1)
if 'gsyn_in' in self.method_interpol:
self.record_syn.append(h.Vector())
self.record_syn[-1].record(self.cells[nt][i].synlist[syn_idx]._ref_g)
self.gsyn_in_fac.append(f)
elif self.celltype[nt] == 'Prk':
# delete old
if j == 0:
self.cells[nt][i].PF_Lsync = []
self.cells[nt][i].spk_nc_pfsync = []
self.cells[nt][i].pfrand = []
m = len(self.cells[nt][i].dendrange)
seed = int(4*gid)
np.random.seed(seed)
for k in xrange(nsyn):
m -= 1
mi = np.random.randint(0, m)
self.cells[nt][i].dendrange[mi], self.cells[nt][i].dendrange[m] = self.cells[nt][i].dendrange[m], self.cells[nt][i].dendrange[mi]
self.cells[nt][i].pfrand.append(self.cells[nt][i].dendrange[m])
#print self.cells[nt][i].pfrand
if "prk" not in str(self.tau1_ex[nt]):
pass
else:
self.cells[nt][i].PF_Lsync.append(Synapse2('pf',self.cells[nt][i],self.cells[nt][i].pfrand[j],record_all=0))
e0 = len(self.cells[nt][i].PF_Lsync)-1 # get number of current synapse!
syn_idx = int(e0)
self.cells[nt][i].spk_nc_pfsync.append(self.pc.gid_connect(pulse_gid, self.cells[nt][i].PF_Lsync[syn_idx].input.newnetstim))
self.cells[nt][i].spk_nc_pfsync[-1].delay = 1
self.cells[nt][i].spk_nc_pfsync[-1].weight[0] = 1
if 'gsyn_in' in self.method_interpol:
self.record_syn.append(h.Vector())
self.record_syn[-1].record(self.cells[nt][i].PF_Lsync[-1].postsyns['AMPA'][0]._ref_g)
self.gsyn_in_fac.append(f)
elif syntype == "inh":
w = self.g_syn_inh[nt]
seed = int(10000 + 10*gid + j)
np.random.seed(seed*42)
if self.g_syn_inh_s[nt] > 0:
w = np.random.normal(w, w*self.g_syn_inh_s[nt], 1).clip(min=w*0.1) # self.g_syn_inh_s[nt]
if self.celltype[nt] == 'Grc':
if j == 0:
self.cells[nt][i].GOC_L = []
self.cells[nt][i].gocncpc = []
if "gr" not in str(self.tau1_inh[nt]):
tau1 = self.tau1_inh[nt]
tau2 = self.tau2_inh[nt]
if tau1 == 0:
syn = h.ExpSyn(self.cells[nt][i].soma(0.5))
syn.tau = tau2/ms
else:
syn = h.Exp2Syn(self.cells[nt][i].soma(0.5))
syn.tau1 = tau1/ms
syn.tau2 = tau2/ms
syn.e = -65
self.cells[nt][i].GOC_L.append(syn)
i0 = len(self.cells[nt][i].GOC_L)-1 # get number of current synapse!
syn_idx = int(i0)
source = int(pulse_gid)
self.cells[nt][i].gocncpc.append(self.pc.gid_connect(source, self.cells[nt][i].GOC_L[syn_idx]))
self.cells[nt][i].gocncpc[-1].delay = 1
self.cells[nt][i].gocncpc[-1].weight[0] = w
else:
self.cells[nt][i].createsyn(nmf = 0, ngoc = 1, weight_gmax = w)
i0 = len(self.cells[nt][i].GOC_L)-1 # get number of current synapse!
self.cells[nt][i].pconnect(self.pc,pulse_gid,int(i0),'goc')
if self.celltype[nt] == 'IfCell':
if j == 0:
self.cells[nt][i].synlist_inh = []
self.cells[nt][i].nc_inh = []
if "gr" in str(self.tau1_inh[nt]):
nrel = 0
if "stoch" in str(self.tau1_ex[nt]):
nrel = 4
self.cells[nt][i].GOC_L = self.cells[nt][i].synlist
self.cells[nt][i].whatami = "grc"
self.cells[nt][i].synlist_inh.append(Synapse('goc', self.cells[nt][i], self.cells[nt][i].soma, nrel=nrel, record_all=0, weight_gmax = w))
i0 = len(self.cells[nt][i].synlist_inh)-1
syn_idx = int(i0)
source = int(pulse_gid)
self.cells[nt][i].nc_inh.append(self.pc.gid_connect(source, self.cells[nt][i].synlist_inh[syn_idx].input))
self.cells[nt][i].nc_inh[-1].delay = 1
self.cells[nt][i].nc_inh[-1].weight[0] = 1
if "gaba" in str(self.tau1_ex[nt]):
if 'gsyn_in' in self.method_interpol:
if "nostdgaba" in str(self.tau1_ex[nt]):
self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].tau_rec = 1e-9
self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].tau_facil = 1e-9
self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].tau_1 = 0
self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d3 = 0
self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d1d2 = 0
self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d1 = 0
self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d2 = 0
self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d3_a6 = 0
self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d1d2_a6 = 0
self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d1_a6 = 0
self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d2_a6 = 0
self.record_syn.append(h.Vector())
self.record_syn[-1].record(self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0]._ref_g)
self.gsyn_in_fac.append(f)
else:
self.cells[nt][i].create_synapses(n_inh=1, tau1_inh=self.tau1_inh[nt], tau2_inh=self.tau2_inh[nt], e_inh=-65)
i0 = len(self.cells[nt][i].synlist_inh)-1
syn_idx = int(i0)
self.cells[nt][i].pconnect_target(self.pc, source=pulse_gid, target=int(i0), syntype='inh', weight=w, delay=1)
elif syntype == "intr":
if self.celltype[nt] == 'Prk':
pass
def set_SynPlay(self, farray, tarray, N = [], t_startstop = [], amode = 1):
if self.do_run:
delay = 1
if (self.use_pc is False):
delay = 0.1
if N == []:
N = self.N
self.pulse_list = []
self.global_pulse_list = []
self.global_pulse_list_inh = []
self.global_pulse_list_intr = []
f_cells_mean_local = []
f_cells_cv_local = []
f_cells_std_local = []
for nt in range(self.n_celltypes): # loop over all cells
if (self.n_syn_ex[nt] > 0) or (self.n_syn_inh[nt] > 0) or (self.n_syn_intr[nt] > 0):
local_gid_count = 0
local_gid_count_type = []
# EXCITATION
if str(type(self.g_syn_ex[nt] )) is not ndarray: self.g_syn_ex[nt] = np.array([self.g_syn_ex[nt] ]) # each cell can receive different g_syn_ex !
if len(self.g_syn_ex[nt]) == len(self.global_gidlist[nt]):
pass
else:
self.g_syn_ex[nt] = np.ones(len(self.global_gidlist[nt]))*self.g_syn_ex[nt][0]
#print "- single value in g_syn_ex, cells:", len(self.global_gidlist[nt])
self.global_pulse_list.append([])
for ns in range(self.n_syn_ex[nt]): # loop over all excitatory synapses!
self.global_pulse_list[-1].append([])
for n in range(self.syn_max_mf[nt]): # number of cells of this celltype
self.global_pulse_list[-1][-1].append(local_gid_count+self.gid_count)
local_gid_count += 1
local_gid_count_type.append([])
local_gid_count_type[-1].append('ex')
local_gid_count_type[-1].append(n) # number of cell within their population 0..N[nt]
local_gid_count_type[-1].append(ns) # number of synapse
# INHIBITION
if np.array(self.inh_hold[nt]).size <= 1:
self.inh_hold[nt] = np.ones(len(self.global_gidlist[nt]))*self.inh_hold[nt]
#print "- single value in inh_hold", self.inh_hold[nt]
self.global_pulse_list_inh.append([])
for ns in range(self.n_syn_inh[nt]): # loop over all inhibitory synapses!
self.global_pulse_list_inh[-1].append([])
for n in range(self.syn_max_inh[nt]): # number of cells of this celltype
self.global_pulse_list_inh[-1][-1].append(local_gid_count+self.gid_count)
local_gid_count += 1
local_gid_count_type.append([])
local_gid_count_type[-1].append('inh')
local_gid_count_type[-1].append(n) # number of cell within their population 0..N[nt]
local_gid_count_type[-1].append(ns) # number of synapse
# INTRUDER SYNAPSE
if str(type(self.g_syn_intr[nt] )) is not ndarray: self.g_syn_intr[nt] = np.array([self.g_syn_intr[nt] ]) # each cell can receive different g_syn_intr !
if len(self.g_syn_intr[nt]) == len(self.global_gidlist[nt]):
pass
else:
self.g_syn_intr[nt] = np.ones(len(self.global_gidlist[nt]))*self.g_syn_intr[nt][0]
#print "- single value in g_syn_intr, cells:", len(self.global_gidlist[nt])
self.global_pulse_list_intr.append([])
for ns in range(self.n_syn_intr[nt]): # loop over all intruding synapses!
self.global_pulse_list_intr[-1].append([])
for n in range(self.syn_max_intr[nt]): # number of generators for this celltype
self.global_pulse_list_intr[-1][-1].append(local_gid_count+self.gid_count)
local_gid_count += 1
local_gid_count_type.append([])
local_gid_count_type[-1].append('intr')
local_gid_count_type[-1].append(n) # number of cell within their population 0..N[nt]
local_gid_count_type[-1].append(ns) # number of synapse
t_vec_input = np.array([]) # input trains
id_vec_input = np.array([]) # input trains id
fs = 1 / self.dt
ih_use_v = []
for i in range(int(self.id), local_gid_count, int(self.nhost)): # loop over all train generators and generate them
self.pulse_list.append(i+self.gid_count)
pulse_gid = self.pulse_list[-1]
gid = local_gid_count_type[i][1] # should correspond to this gid when multiple values inserted
if local_gid_count_type[i][0] == 'ex':
seed = int(10001 + pulse_gid) # unique gid for generators!
np.random.seed(seed*423)
if self.ihold_sigma[nt] > 0:
ih_use = np.random.normal(self.ihold[nt], self.ihold[nt]*self.ihold_sigma[nt], 1).clip(min=0) # self.ihold[nt]*self.ihold_sigma[nt]
elif self.ihold_sigma[nt] < 0:
ih_use = np.random.uniform(0.1, self.ihold[nt])
else:
ih_use = self.ihold[nt]
ih_use_v.append(ih_use)
if ih_use > 0:
# train has to be contructed here, to insert different train into each "dendrite"
## different ihold has to be implemented here!!
iholdvec = concatenate((zeros(round(fs)), ones(round(len(tarray) - 1 * fs)) * ih_use))
if isinstance(self.syn_ex_dist[nt], ( tuple ) ): # distribution of amplitude, only one noise source!
np.random.seed(pulse_gid*40)
if self.syn_ex_dist[nt][1] > 0:
f = np.random.normal(self.syn_ex_dist[nt][0], self.syn_ex_dist[nt][1], 1).clip(min=0)
else:
f = self.syn_ex_dist[nt][0]
f2 = f
rnd.seed(pulse_gid*40) # use gid so type 1, 2 is identical for each cell
#rnd.seed(gid*40) # use gid so type 1, 2 is identical for each cell
if self.syn_ex_dist[nt][2] > 0: # add inverted input with 50% probability, in future versions this will indicate the propability for -1 and 1
f2 = rnd.sample([-1,1],1)[0] * f
#f2 = f
if amode == 1:
inamp = (f2 * self.amod[nt] * ih_use)
elif amode == 2:
inamp = (f2 * self.amod[nt] * self.ihold[nt])
modulation = (tarray, inamp * farray[0] + iholdvec)
#if self.id == 0: print "- randomize play stimulus height, pulse_gid=", pulse_gid, " gid=", gid ," f=", f
if (gid==0): print "- randomize play stimulus height, pulse_gid=", pulse_gid, " gid=", gid ," f2=", f2,"inamp=",inamp
#rnd.seed(local_gid_count_type[i][1]*300) # pick seed based on number of cell
#nj = rnd.sample(range(len(farray)),1)[0]
nj = 1
else: # different noise sources can be used at different synapses, linear combination test in openloop
nj = self.syn_ex_dist[nt][local_gid_count_type[i][2]]
if nj == 0:
modulation = (tarray, iholdvec)
else:
if amode == 1:
inamp = (self.factor_celltype[nt] * self.amod[nt] * ih_use)
elif amode == 2:
inamp = (self.factor_celltype[nt] * self.amod[nt] * self.ihold[nt])
modulation = (tarray, inamp * farray[nj-1] + iholdvec)
if self.id == 0: print "ex farray number:", nj-1, "ih_use:", ih_use, "self.amod[nt]:", self.amod[nt], "inamp: ", inamp
# will be done n_syn_ex * number of cells!
if self.noise_syn_tau[nt] < 0: # variable threshold
no = self.noise_syn[nt]
else:
no = self.noise_syn[nt]*ih_use
train, self.n_train_ex = mod_spike_train(modulation, noise = no, seed = seed, noise_tau = self.noise_syn_tau[nt], noise_a = self.noise_a[nt])
#plt.figure("input")
#plt.plot(train, train*0, '|')
#plt.show()
t_vec_input = np.append(t_vec_input, train*ms).flatten() # use ms to save!!
id_vec_input = np.append(id_vec_input, np.ones(len(train))*pulse_gid).flatten()
f_cells_mean_local0, f_cells_cv_local0, f_cells_std_local0 = self.calc_fmean(train*ms, t_startstop)
f_cells_mean_local.append(f_cells_mean_local0); f_cells_cv_local.append(f_cells_cv_local0); f_cells_std_local.append(f_cells_std_local0)
if self.id == 0: print "TRAIN: requ. mean:", ih_use ,"eff. mean:", f_cells_mean_local0, "cv: " , f_cells_cv_local0, "std:" , f_cells_std_local0
else:
train = []
self.n_train_ex = []
elif local_gid_count_type[i][0] == 'intr':
# train has to be contructed here, to insert different train into each "dendrite"
nj = 0
seed = int(10001 + pulse_gid)
np.random.seed(seed*4411)
if self.intr_hold_sigma[nt] > 0:
ih_use = np.random.normal(self.intr_hold[nt], self.intr_hold[nt]*self.intr_hold_sigma[nt], 1).clip(min=0)
else:
ih_use = self.intr_hold[nt]
ih_use_v.append(ih_use)
if ih_use > 0:
iholdvec = concatenate((zeros(round(fs)), ones(round(len(tarray) - 1 * fs)) * ih_use))
modulation = (tarray, iholdvec)
# will be done n_syn_in * number of cells!
if self.noise_syn_tau_intr[nt] < 0: # variable threshold
no = self.noise_syn_intr[nt]
else:
no = self.noise_syn_intr[nt]*ih_use
if self.noise_syn_tau_intr[nt] >= -1:
train, _ = mod_spike_train(modulation, noise = no, seed = seed, noise_tau = self.noise_syn_tau_intr[nt], noise_a = self.noise_a_intr[nt]) # train in ms
else:
train = oscill_spike_train(sor = 4, spike_prob = 1/4, noise_fraction = 4, end_time = tarray[-1]/ms, seed = seed)
elif local_gid_count_type[i][0] == 'inh':
# train has to be contructed here, to insert different train into each "dendrite"
seed = int(10001 + pulse_gid)
np.random.seed(seed*44)
if self.inh_hold_sigma[nt] > 0:
ih_use = np.random.normal(self.inh_hold[nt][gid], self.inh_hold[nt][gid]*self.inh_hold_sigma[nt], 1).clip(min=0)
else:
ih_use = self.inh_hold[nt][gid]
iholdvec = concatenate((zeros(round(fs)), ones(round(len(tarray) - 1 * fs)) * ih_use))
nj = self.syn_inh_dist[nt][local_gid_count_type[i][2]]
if nj == 0:
modulation = (tarray, iholdvec)
else:
inamp = (self.amod[nt] * ih_use)
modulation = (tarray, inamp * farray[nj-1] + iholdvec)
#print "inh farray number:", nj-1, "ih_use:", ih_use, "amp: ", inamp #old: nj-1+nemax
# will be done n_syn_in * number of cells!
if self.noise_syn_tau_inh[nt] < 0: # variable threshold
no = self.noise_syn_inh[nt]
else:
no = self.noise_syn_inh[nt]*ih_use
train, _ = mod_spike_train(modulation, noise = no, seed = seed, noise_tau = self.noise_syn_tau_inh[nt], noise_a = self.noise_a_inh[nt]) # train in ms
#print train
#print train
if len(train) > 0:
if self.id == 0:
print "-", pulse_gid, local_gid_count_type[i], "seed: ", seed, "ih_use:", ih_use, no, nj #, "first spike: ", train[0]
self.setup_Play_train(train = train+self.inh_delay, input_gid = pulse_gid, delay = delay) # train in ms
self.gid_count += local_gid_count # increase gid count
self.barrier()
for i, gid in enumerate(self.gidlist[nt]): # for all input cells
rnd.seed(gid*200)
n = self.global_gidlist[nt].index(gid) # index of cell within their population 0..N[nt]
# i is index on this node only!
self.record_syn = []
for j in range(self.n_syn_ex[nt]):
if N[nt] == len(self.global_pulse_list[nt][j]):
pulse_gid = self.global_pulse_list[nt][j][n] #every cell of this type receives one pulse gid
if self.id == 0: print "- gid:", gid ," n:", n ," one ex train for each synapse:", pulse_gid, "self.g_syn_ex[nt][n]:", self.g_syn_ex[nt][n]
else:
pulse_gid = rnd.sample(self.global_pulse_list[nt][j],1)[0] # not enough, just pick one at random, for inh/f search only one synapse available!
if self.id == 0: print "- gid:", gid ," n:", n ," one ex train from", len(self.global_pulse_list[nt][j]), ":", pulse_gid, "self.g_syn_ex[nt][n]:", self.g_syn_ex[nt][n]
if "gaba" in str(self.tau1_ex[nt]):
self.connect_Synapse(pulse_gid, nt, i, n, gid, j, syntype = "inh")
else:
self.connect_Synapse(pulse_gid, nt, i, n, gid, j, syntype = "ex", nsyn = self.n_syn_ex[nt])
if self.n_syn_inh[nt] > 0:
for j in range(self.n_syn_inh[nt]):
if N[nt] == len(self.global_pulse_list_inh[nt][j]):
pulse_gid = self.global_pulse_list_inh[nt][j][n] #every cell of this type receives one pulse gid
if self.id == 0: print "- one inh train for each synapse:", pulse_gid
else:
pulse_gid = rnd.sample(self.global_pulse_list_inh[nt][j],1)[0] # not enough, just pick one at random
if self.id == 0: print "- one inh train from", len(self.global_pulse_list_inh[nt][j]), ":", pulse_gid
self.connect_Synapse(pulse_gid, nt, i, n, gid, j, syntype = "inh")
if self.n_syn_intr[nt] > 0:
for j in range(self.n_syn_intr[nt]):
if N[nt] == len(self.global_pulse_list_intr[nt][j]):
pulse_gid = self.global_pulse_list_intr[nt][j][n] #every cell of this type receives one pulse gid
if self.id == 0: print "- one intruding train for each synapse:", pulse_gid
else:
pulse_gid = rnd.sample(self.global_pulse_list_intr[nt][j],1)[0] # not enough, just pick one at random
if self.id == 0: print "- one intruding train from", len(self.global_pulse_list_intr[nt][j]), ":", pulse_gid
if (self.use_pc is False):
if self.celltype[nt] == 'Prk': self.cells[nt][i].delrerun()
(msg,CF_input) = self.cells[nt][i].createsyn_CF(record_all=0,factor=self.g_syn_intr[nt][0],cf_setup_select='old')
CF_input.number = 3 # three bursts
CF_input.start = -0.3 # See synapsepfpurk.py
CF_input.interval = 3 # 3 ms interval between bursts
self.cells[nt][i].input_to_CF_nc.append(h.NetCon(self.vecstim[j], CF_input, 0, 0.1, 1))
self.netcons.append(self.cells[nt][i].input_to_CF_nc[-1])
else:
print "NOT IMPLEMENTED"
if self.id == 0: print "trains connected"
if local_gid_count_type[i][0] == 'intr':
pass
else:
self.id_all_vec_input.append(self.do_gather(id_vec_input, dtype = 'i'))
self.t_all_vec_input.append(self.do_gather(t_vec_input))
f_cells_mean = self.do_gather(f_cells_mean_local)
f_cells_cv = self.do_gather(f_cells_cv_local)
f_cells_std = self.do_gather(f_cells_std_local)
self.fmean_input = np.nan
self.fmax_input = np.nan
self.fmstd_input = np.nan
self.fcvm_input = np.nan
self.fstdm_input = np.nan
ih_use_v_all = self.do_gather(ih_use_v)
if self.id == 0 and local_gid_count_type[i][0] != 'intr':
self.fmean_input = mean(np.nan_to_num(f_cells_mean)) # compute mean of mean rate for all cells
self.fmstd_input = std(np.nan_to_num(f_cells_mean))
self.fmax_input = max(np.nan_to_num(f_cells_mean))
self.fcvm_input = mean(f_cells_cv[~np.isnan(f_cells_cv)])
self.fstdm_input = mean(f_cells_std[~np.isnan(f_cells_std)])
self.ih_use_max = max(ih_use_v_all)
print "- trains, fmean: ",self.fmean_input, "fmax: ",self.fmax_input, "Hz", "fmstd: ",self.fmstd_input, "Hz", "fcvm: ",self.fcvm_input, "fstdm: ",self.fstdm_input, "Hz, ih_use_max:", self.ih_use_max
else:
self.global_pulse_list.append([])
self.global_pulse_list_inh.append([])
def do_gather(self, v_local, dtype = 'd'):
if self.use_mpi:
self.barrier()
#v_local = v_local.astype(dtype).flatten()
v_local = np.array(v_local, dtype=dtype).flatten()
if self.use_pc == False:
v_global = None
counts_local = np.array(len(v_local), dtype='i')
counts = 0
if self.id == 0:
counts = np.empty(self.nhost, dtype='i')
self.comm.Gather(sendbuf=[counts_local, MPI.INT], recvbuf=[counts, MPI.INT], root=0)
if self.id == 0:
v_global = np.empty(sum(counts), dtype=dtype)
if dtype == 'd':
self.comm.Gatherv(sendbuf=[v_local, MPI.DOUBLE], recvbuf=[v_global, (counts, None), MPI.DOUBLE], root=0)
elif dtype == 'i':
self.comm.Gatherv(sendbuf=[v_local, MPI.INT], recvbuf=[v_global, (counts, None), MPI.INT], root=0)
#v_global = np.hstack(v_global)
else:
sendlist = [None]*self.nhost
sendlist[0] = v_local
getlist = self.pc.py_alltoall(sendlist)
v_global = np.hstack(getlist)
else:
v_global = np.hstack(v_local)
return v_global
def setup_Play_train(self, train = [], input_gid = 0, delay = 1):
self.trains.append(train)
# possibility to play spikes into the cells!
self.vecstim.append(h.VecStim(.5))
self.nc_vecstim.append(h.NetCon(self.vecstim[-1],None))
self.nc_vecstim[-1].delay = delay
self.spike_vec.append(h.Vector(self.trains[-1]))
self.vecstim[-1].play(self.spike_vec[-1])
if (self.use_mpi):
self.pc.set_gid2node(input_gid, self.id) # associate gid with this host
self.pc.cell(input_gid,self.nc_vecstim[-1]) # associate gid with spike detector
def record(self):
"""
Initializes recording vectors. Internal function
"""
if self.n_celltypes > 1:
#print "self.n_borders:",self.n_borders
for n in range(self.n_celltypes):
if self.n_borders[n] in self.gidlist[n]:
#print "np.shape(self.rec_v):",np.shape(self.rec_v)
#print "np.shape(self.cells):",np.shape(self.cells)
self.rec_v[n].record(self.cells[n][0].soma(0.5)._ref_v)
if self.id == 0: # only for first node and first cell
# Voltage
self.rec_v[0].record(self.cells[self.a_celltype[0]][0].soma(0.5)._ref_v)
# Stimuli
self.rec_i = h.Vector()
if (self.plays != []):
if (isinstance(self.plays[0], list) is False):
self.rec_i.record(self.plays[0]._ref_i)
else:
self.rec_i.record(self.plays[0][0]._ref_i)
self.rec_ich = h.Vector()
if self.ic_holds != [] and (isinstance(self.ic_holds[0], list) is False):
self.rec_ich.record(self.ic_holds[0]._ref_i)
self.rec_ics = h.Vector()
if self.ic_starts != []:
self.rec_ics.record(self.ic_starts[0]._ref_i)
self.rec_n = h.Vector()
if self.fluct_s[0] > 0:
# Fluctuating input
self.rec_n.record(self.flucts[0]._ref_i)
print "recording noise"
elif (len(self.flucts) > 0) and (len(self.fluct_g_i0)>0):
self.rec_n.record(self.flucts[0]._ref_g_i)
print "recording g noise"
else:
print "nonoise"
if hasattr(self.cells[self.a_celltype[0]][0], 'lkg2_noise'):
if self.cells[self.a_celltype[0]][0].lkg2_noise > 0:
self.rec_n.record(self.cells[self.a_celltype[0]][0].fluct._ref_il)
print "recording tonic gaba noise"
self.rec_step = h.Vector()
if self.ic_steps != []:
self.rec_step.record(self.ic_steps[0]._ref_i)
# Time
self.rec_t = h.Vector()
self.rec_t.record(h._ref_t)
def run(self, tstop = 10*s, do_loadstate = True):
"""
Starts the stimulation.
"""
self.record()
if self.first_run:
if self.use_mpi: self.pc.set_maxstep(100)
#self.pc.spike_compress(1) #test
if self.use_multisplit:
import multiprocessing
Hines = h.CVode()
Hines.active(0)
h.load_file("parcom.hoc")
p = h.ParallelComputeTool()
if self.use_mpi:
cpus = multiprocessing.cpu_count() #32 #self.pc.nhost()
else:
cpus = multiprocessing.cpu_count() #32
p.change_nthread(cpus,1)
p.multisplit(1)
print "Using multisplit, cpus:", cpus
else:
h.load_file("stdrun.hoc")
if self.use_local_dt:
h.cvode.active(1)
h.cvode.use_local_dt(1)
h.celsius = self.temperature
h.dt = self.dt/ms # Fixed dt
h.steps_per_ms = 1 / (self.dt/ms)
if self.cells[self.a_celltype[0]] != []:
if hasattr(self.cells[self.a_celltype[0]][0], 'v_init'):
h.v_init = self.cells[self.a_celltype[0]][0].v_init # v_init is supplied by cell itself!
else:
h.v_init = -60
h.stdinit()
h.finitialize()
if hasattr(self.cells[self.a_celltype[0]][0], 'load_states') and do_loadstate:
m = md5.new()
cell_exe_new = self.cell_exe[0]
m.update(cell_exe_new)
filename = './states_' + self.celltype[0] + '_' + m.hexdigest() + '_Population.b'
self.cells[self.a_celltype[0]][0].load_states(filename)
else:
pass
if self.id == 0:
import time
t0 = time.time()
if self.simstep == 0:
if self.id == 0: print "Running without steps",
if self.use_mpi:
self.pc.psolve(tstop/ms)
else:
h.init()
h.tstop = tstop/ms
h.run()
else:
h.finitialize()
cnt = 1
#if self.id == 50:
# print len(self.cells[1][0].nc), self.cells[1][0].nc[0].weight[0]
# print len(self.cells[0][0].nc_inh), self.cells[0][0].nc_inh[0].weight[0]
h.t = 0
while h.t < tstop/ms:
if self.id == 0:
print "Running...",
if self.use_mpi:
past_time = self.pc.time()
h.continuerun(cnt*self.simstep/ms)
if self.use_mpi: self.pc.barrier()
if self.id == 0:
if self.use_mpi:
print "Simulated time =",h.t*ms, "s, Real time = ", (self.pc.time()-past_time), 's'
else:
print "Simulated time =",h.t*ms, "s"
#if self.id == 0:
# print hpy.heap().byrcs
cnt += 1
if self.id == 0: print "psolve took ", time.time() - t0, "seconds"
self.first_run = False
self.barrier() # wait for other nodes
self.tstop = tstop
def get(self, t_startstop=[], i_startstop=[], N = []):
"""
Gets the recordings.
"""
if N == []:
N = self.N
if t_startstop == []:
t_startstop = np.array([2, self.tstop])
t_all_vec = []
id_all_vec = []
fmean = []
fbase = []
fmax = []
fmstd = []
fcvm = []
fstdm = []
gid_del = []
f_cells_mean_all = []
f_cells_base_all = []
f_cells_cv_all = []
f_cells_std_all = []
fmeanA = []
fmstdA = []
fmaxA = []
fcvmA = []
fstdmA = []
fbaseA = []
fbstdA = []
if self.id == 0: print "start gathering spikes"
for n in range(self.n_celltypes):
if self.use_mpi:
self.barrier() # wait for other node
t_vec = np.array(self.t_vec[n]).flatten()*ms - 1*ms # shift time because of output delay
id_vec = np.array(self.id_vec[n]).flatten()
else:
t_vec = np.array([])
id_vec = np.array([])
print np.shape(self.t_vec)
for i in self.gidlist[n]:
t_vec0 = np.array(self.t_vec[n][i]).flatten()*ms
t_vec = np.append(t_vec, t_vec0).flatten()
id_vec = np.append(id_vec, np.ones(len(t_vec0))*i).flatten()
fmean0, fmax0, fmstd0, fcvm0, fstdm0, gid_del0, f_cells_mean_all0, f_cells_cv_all0, f_cells_std_all0, fbase0, f_cells_base_all0 = self.get_fmean(t_vec, id_vec, t_startstop = t_startstop, gidlist = self.gidlist[n])
fmean.append(fmean0); fmax.append(fmax0), fmstd.append(fmstd0), fcvm.append(fcvm0), fstdm.append(fstdm0), gid_del.append(gid_del0), f_cells_mean_all.append(f_cells_mean_all0), f_cells_cv_all.append(f_cells_cv_all0), f_cells_std_all.append(f_cells_std_all0)
fbase.append(fbase0); f_cells_base_all.append(f_cells_base_all0)
t_all_vec.append(self.do_gather(t_vec))
id_all_vec.append(self.do_gather(id_vec))
if (self.id == 0) and (self.no_fmean == False):
f_cells_mean_all = np.array(f_cells_mean_all).flatten()
fmeanA = mean(f_cells_mean_all) # compute mean of mean rate for all cells
fmstdA = std(f_cells_mean_all)
fmaxA = max(f_cells_mean_all)
f_cells_base_all = np.array(f_cells_base_all).flatten()
fbaseA = mean(f_cells_base_all) # compute mean of mean rate for all cells
fbstdA = std(f_cells_base_all)
f_cells_cv_all = np.concatenate((np.array(f_cells_cv_all)))
f_cells_std_all = np.concatenate((np.array(f_cells_std_all)))
fcvmA = mean(f_cells_cv_all)
fstdmA = mean(f_cells_std_all)
print "- ALL, fmean: ",fmeanA, "fmax: ",fmaxA, "Hz", "fmstd: ",fmstdA, "Hz", "fcvm: ",fcvmA, "fstdm: ",fstdmA, "Hz", "fbase: ",fbaseA, "Hz", "fbstd: ", fbstdA, "Hz"
if self.id == 0: print "all spikes have been gathered"
self.barrier()
# do this here to have something to return
voltage = []
current = []
time = []
freq_times = []
spike_freq = []
gsyn = []
if self.id == 0: # only for first node
time = np.array(self.rec_t)*ms
# use self.bin_width as bin width!
freq_times = arange(0, time[-1], self.bin_width)
voltage.append(np.array(self.rec_v[0])*mV)
current = np.zeros(len(time))
if len(np.array(self.rec_ics)) > 0:
current = current + np.array(self.rec_ics)
if len(np.array(self.rec_ich)) > 0:
current = current + np.array(self.rec_ich)
if len(np.array(self.rec_i)) > 0:
current = current + np.array(self.rec_i)
if len(np.array(self.rec_n)) > 0:
current = current + np.array(self.rec_n)
print np.array(self.rec_n)
if len(np.array(self.rec_step)) > 0:
current = current + np.array(self.rec_step)
else:
time = [0]
self.barrier()
time = self.broadcast(time, fast = True)
gsyn_in = []
gsyn_in0 = []
if 'gsyn_in' in self.method_interpol:
gsyn_in = None
if self.id == 0: print "- collecting gsyn_in"
gsyn_in0 = np.zeros(len(time), dtype='d')
if self.record_syn is not []:
for i, j in enumerate(self.record_syn):
gsyn_in0 = gsyn_in0 + self.gsyn_in_fac[i] * np.array(j, dtype='d')
if self.use_mpi:
count = len(time)
#if self.id == 0: gsyn_in = np.empty(count*self.nhost, dtype='d')
#self.comm.Gatherv(sendbuf=[gsyn_in0, MPI.DOUBLE], recvbuf=[gsyn_in, MPI.DOUBLE], root=0)
gsyn_in = self.do_gather(gsyn_in0)
if self.id == 0:
gsyn_in = np.reshape(gsyn_in, (self.nhost,count))
gsyn_in = sum(gsyn_in,0)
else:
gsyn_in = gsyn_in0
self.barrier() # wait for other nodes
if self.n_celltypes > 1:
if self.id == 0: print "more than one celltype send voltage of first other cell to root"
for n in range(1, self.n_celltypes):
if self.use_pc == True:
srclist = [None]*self.nhost
if (self.n_borders[n] in self.gidlist[n]):
srclist[0] = np.array(self.rec_v[n])*mV
destlist = self.pc.py_alltoall(srclist)
if self.id == 0:
idx = [i for i, x in enumerate(destlist) if x is not None]
if len(idx) > 1: raise ValueError('Error, too many vectors sent, should be one at a time!')
voltage.append(np.array(destlist[idx[0]]))
else:
if self.id == 0:
if (self.n_borders[n] in self.gidlist[n]): # first node has it, do not wait to receive it!
v_temp = np.array(self.rec_v[n])*mV
else:
v_temp = np.zeros(len(voltage[0]))
self.comm.Recv([v_temp, MPI.DOUBLE], source = MPI.ANY_SOURCE, tag=int(sum(N)+33))
voltage.append(v_temp)
else:
if self.n_borders[n] in self.gidlist[n]:
voltage = np.array(self.rec_v[n])*mV
self.comm.Ssend([voltage, MPI.DOUBLE], dest=0, tag=int(sum(N)+33))
self.barrier() # wait for other nodes
times = arange(0, time[-1], 1*ms)
gsyns = []
if self.called_syn_out_all == True:
for n in range(self.n_celltypes):
gsyns.append([])
if self.use_pc == True:
for i, gid in enumerate(self.global_gidlist[n]):
srclist = [None]*self.nhost
if gid in self.gidlist[n]: #only one node does this
a = np.array(self.cells[n][self.gidlist[n].index(gid)].record['gsyn'])
c = np.zeros(int((1*ms)/self.dt))
temp = np.append(a, c).flatten()
temp = temp[int((1*ms)/self.dt):len(temp)+1]
gtemp = interp(times,time,temp)
srclist[0] = gtemp # send to root only
destlist = self.pc.py_alltoall(srclist)
if self.id == 0:
idx = [i for i, x in enumerate(destlist) if x is not None]
if len(idx) > 1: raise ValueError('Error, too many vectors sent, should be one at a time!')
gsyns[n].append(np.array(destlist[idx[0]]))
else:
for i, gid in enumerate(self.global_gidlist[n]):
if self.id == 0:
if gid in self.gidlist[n]:
a = np.array(self.cells[n][self.gidlist[n].index(gid)].record['gsyn'])
c = np.zeros(int((1*ms)/self.dt))
temp = np.append(a, c).flatten()
temp = temp[int((1*ms)/self.dt):len(temp)+1]
gtemp = interp(times,time,temp)
else:
gtemp = np.zeros(len(times))
self.comm.Recv([gtemp, MPI.DOUBLE], source = MPI.ANY_SOURCE, tag=int(gid))
gsyns[n].append(np.array(gtemp))
else:
if gid in self.gidlist[n]:
a = np.array(self.cells[n][self.gidlist[n].index(gid)].record['gsyn'])
c = np.zeros(int((1*ms)/self.dt))
temp = np.append(a, c).flatten()
temp = temp[int((1*ms)/self.dt):len(temp)+1]
gtemp = interp(times,time,temp)
#np.array(self.cells[n][self.gidlist[n].index(gid)].record['gsyn'])
self.comm.Ssend([gtemp, MPI.DOUBLE], dest=0, tag=int(gid))
if self.id == 0: print "root gathered synaptic output conductance"
self.barrier() # wait for other nodes
times = arange(0, time[-1], 10*ms)
w_mat = []
winh_mat = []
if self.stdp_used == True:
for n in range(self.n_celltypes):
w_mat.append([])
for i, gid in enumerate(self.global_gidlist[n]):
if self.id == 0:
wall = []
if gid in self.gidlist[n]:
walltemp = self.cells[n][self.gidlist[n].index(gid)].record['w']
if len(walltemp) > 0:
for l in range(len(walltemp)):
wtemp = np.array(walltemp[l])
wtemp = interp(times,time,wtemp)
wall.append(wtemp)
else:
while 1:
wtemp = np.zeros(len(times))
self.comm.Recv([wtemp, MPI.DOUBLE], source = MPI.ANY_SOURCE, tag=int(gid))
if wtemp[0] == -1:
break
else:
wall.append(wtemp)
w_mat[n].append(wall)
else:
if gid in self.gidlist[n]:
walltemp = self.cells[n][self.gidlist[n].index(gid)].record['w']
if len(walltemp) > 0:
for l in range(len(walltemp)):
wtemp = np.array(walltemp[l])
wtemp = interp(times,time,wtemp)
self.comm.Ssend([wtemp, MPI.DOUBLE], dest=0, tag=int(gid))
wtemp = np.ones(len(times))*-1
self.comm.Ssend([wtemp, MPI.DOUBLE], dest=0, tag=int(gid))
if self.id == 0:
print "root gathered synaptic input conductance"
self.barrier() # wait for other nodes
for n in range(self.n_celltypes):
winh_mat.append([])
for i, gid in enumerate(self.global_gidlist[n]):
if self.id == 0:
wall = []
if gid in self.gidlist[n]:
walltemp = self.cells[n][self.gidlist[n].index(gid)].record['w_inh']
if len(walltemp) > 0:
for l in range(len(walltemp)):
wtemp = np.array(walltemp[l])
wtemp = interp(times,time,wtemp)
wall.append(wtemp)
else:
while 1:
wtemp = np.zeros(len(times))
self.comm.Recv([wtemp, MPI.DOUBLE], source = MPI.ANY_SOURCE, tag=int(gid))
if wtemp[0] == -1:
break
else:
wall.append(wtemp)
winh_mat[n].append(wall)
else:
if gid in self.gidlist[n]:
walltemp = self.cells[n][self.gidlist[n].index(gid)].record['w_inh']
if len(walltemp) > 0:
for l in range(len(walltemp)):
wtemp = np.array(walltemp[l])
wtemp = interp(times,time,wtemp)
self.comm.Ssend([wtemp, MPI.DOUBLE], dest=0, tag=int(gid))
wtemp = np.ones(len(times))*-1
self.comm.Ssend([wtemp, MPI.DOUBLE], dest=0, tag=int(gid))
if self.id == 0:
print "root gathered synaptic input conductance"
self.barrier() # wait for other nodes
t_all_vec_vec = []
id_all_vec_vec = []
f_cells_mean = []
if self.id == 0: # only for first node
for n in range(self.n_celltypes):
ie = argsort(t_all_vec[n])
t_all_vec_vec.append( t_all_vec[n][ie] )
id_all_vec_vec.append( id_all_vec[n][ie].astype(int) ) #
print "all spikes have been sorted"
if self.jitter > 0: # add jitter!
np.random.seed(40)
x = np.random.normal(0, self.jitter, len(t_all_vec_vec[self.a_celltype[0]]))
t_all_vec_vec[self.a_celltype[0]] = t_all_vec_vec[self.a_celltype[0]] + x
if self.delta_t > 0:
t_all_vec_vec[self.a_celltype[0]] = t_all_vec_vec[self.a_celltype[0]] + self.delta_t
gsyn = zeros(len(freq_times))
if 'gsyn_in' in self.method_interpol:
pass
else:
bvec = ["syn" in st for st in self.method_interpol]
if np.any(bvec):
if (not hasattr(self, 'passive_target')) | (self.jitter > 0): # if not already done in neuron via artificial cell
[resp, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[self.a_celltype[0]], bins = freq_times)
resp = np.concatenate((zeros(1),resp))
Ksyn = syn_kernel(arange(0,10*self.syn_tau2,self.bin_width), self.syn_tau1, self.syn_tau2)
Ksyn = np.concatenate((zeros(len(Ksyn)-1),Ksyn))
gsyn = np.convolve(Ksyn, resp, mode='same')
print "Generated gsyn by convolution with Ksyn"
self.nc_delay = 0
else:
gsyn = interp(freq_times,time,np.array(self.rec_g))
spike_freq = np.zeros(len(freq_times))
for j in self.a_celltype:
#plt.figure('results_voltage')
#ax99 = plt.subplot(2,1,1)
#ax99.plot(time,voltage[j])
#plt.text(0.5, 1.1, r'CF=' + str(round(fmean,1)) + ',fmax=' + str(round(fmax,1)) + ',fmstd=' + str(round(fmstd,1)), transform=ax99.transAxes, fontsize=10, va='center', ha='center')
#plt.savefig("./figs/Pub/Voltage_" + str(self.pickle_prefix) + "_cell" + str(j) + "_N" + str(self.N[j]) + ".pdf", dpi = 300, transparent=True) # save it
#plt.show()
#plt.clf()
[num_spikes, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[j], bins = freq_times)
if isinstance(self.factor_celltype[j], ( int, long ) ):
f = self.factor_celltype[j]
else:
f = self.factor_celltype[j][0]
spike_freq = spike_freq + f * np.concatenate((zeros(1),num_spikes)) / self.bin_width
self.barrier() # wait for other nodes
#figure('1')
#plot(time,np.array(self.rec_s1),'b', time,np.array(self.rec_s2),'r')
#plt.show()
return {'time':time, 'voltage':voltage, 'current':current, 'fmean':fmean, 'f_cells_mean':f_cells_mean,
'gsyn':gsyn, 'freq_times':freq_times, 'spike_freq':spike_freq, 'gsyn_in':gsyn_in, 'fmeanA':fmeanA, 'fmaxA':fmaxA, 'fmstdA':fmstdA, 'fcvmA':fcvmA, 'fstdmA':fstdmA, 'fbstdA':fbstdA,
't_all_vec_vec':t_all_vec_vec, 'id_all_vec_vec':id_all_vec_vec, 'gsyns':gsyns, 'w_mat':w_mat, 'winh_mat':winh_mat, 'fmax':fmax, 'fmstd':fmstd, 'fcvm':fcvm, 'fbaseA':fbaseA, 'fbase':fbase}
def clean(self):
self.pc.runworker()
self.pc.done()
def compute_Transfer(self, stimulus, spike_freq, freq_times, t, noise_data_points, gsyn, gsyn_in, do_csd, t_qual, K_mat_old, t_startstop, inh_factor=[1]):
stimulus0 = np.zeros(len(stimulus[0]))
for a in self.a_celltype:
# sum input to produce linear input that should be reconstructed!
if (any(self.syn_inh_dist) > 0) and (any(self.syn_ex_dist) > 0):
if max(self.syn_inh_dist) == max(self.syn_ex_dist): # same signal through ex and inh
print "inh_factor = [0,1]"
inh_factor = [0,1]
for ni in self.syn_ex_dist[a]:
if ni != 0:
stimulus0 += inh_factor[ni-1] * stimulus[ni-1]
print "+ex:", ni-1
for ni in self.syn_inh_dist[a]:
if ni != 0:
stimulus0 -= inh_factor[ni-1] * stimulus[ni-1] #old: +nemax
print "-inh:", ni-1 #old: +nemax
if (max(self.n_syn_ex) == 0) and (max(self.n_syn_inh) == 0):
stimulus0 += stimulus[0]
print "current"
#if self.n_syn_ex[self.celltype_syn[0]] == 0:
# stimulus0 += stimulus[0]
# amplitude should not matter since filter amplitude is simply adjusted
#stimulus = stimulus0 #/len(self.syn_ex_dist)
stimulus0 = stimulus0 / std(stimulus0) / 2
# linear interpolation inside compute_Transfer !!!
print "max(stimulus0):",max(stimulus0)
results = compute_Transfer(spike_freq = spike_freq, freq_times = freq_times,
stimulus = stimulus0, t = t, noise_data_points = noise_data_points, gsyn = gsyn, gsyn_in = gsyn_in, do_csd = do_csd, t_kernel = 1*s,
method_interpol = self.method_interpol, nc_delay = self.nc_delay, w_length = 3, t_qual = t_qual, K_mat_old = K_mat_old, t_startstop = t_startstop, give_psd = self.give_psd) # freq_wp not defined, use all frequencies
# TEST:
#VAF = results.get('VAFf_mat')
#freq_used = results.get('freq_used')
#iend = mlab.find(freq_used >= self.xmax)[0]
#err = 1-mean(VAF[1][0,1:iend-1])
#print "err: ", err
return results
def residuals_compute_Transfer(self, p, stimulus, spike_freq, freq_times, t, noise_data_points, gsyn, gsyn_in, do_csd, t_qual, K_mat_old, t_startstop, inh_factor):
inh_factor_in = inh_factor[:]
ip = 0
for i, inhf in enumerate(inh_factor_in):
if inhf < 0:
inh_factor_in[i] = p[ip]
ip += 1
results = self.compute_Transfer(stimulus = stimulus, spike_freq = spike_freq, freq_times = freq_times,
t = t, noise_data_points = noise_data_points, gsyn = gsyn, gsyn_in = gsyn_in,
do_csd = do_csd, t_qual = t_qual, K_mat_old = K_mat_old, t_startstop = t_startstop, inh_factor = inh_factor_in)
VAF = results.get('VAFf_mat')
freq_used = results.get('freq_used')
iend = mlab.find(freq_used >= self.xmax)[0]
err = 1-mean(VAF[1][0,0:iend])
print "inh_factor:", inh_factor_in, "err: ", err
return err
#@profile
def fun_cnoise_Stim(self, t_stim = 10*s, sexp = 0, cutf = 0, do_csd = 1, t_qual = 0, freq_used = np.array([]), K_mat_old = np.array([]), inh_factor = [1], onf = None, equi = 0):
"""
Stimulate cell with colored noise
sexp = spectral exponent: Power ~ 1/freq^sexp
cutf = frequency cutoff: Power flat (white) for freq <~ cutf
do_csd = 1: use cross spectral density function for computation
"""
self.barrier() # wait for other nodes
filename = str(self.pickle_prefix) + "_results_pop_cnoise.p"
filepath = self.data_dir + "/" + filename
if self.id == 0: print "- filepath:", filepath
if self.do_run or (os.path.isfile(filepath) is False):
tstart = 0;
fs = 1 / self.dt # sampling rate
fmax = fs / 2 # maximum frequency (nyquist)
t_noise = arange(tstart, t_stim, self.dt) # create stimulus time vector, make sure stimulus is even!!!
#print self.syn_ex_dist
#print self.syn_inh_dist
#exit()
if (self.syn_ex_dist == []):
for nt in range(self.n_celltypes): # loop over all cells
#print "nt", nt
if hasattr(self.cells[nt][0], 'input_vec'):
self.syn_ex_dist.append([1] * len(self.cells[nt][0].input_vec)) # default ex for all by default!!!
else:
self.syn_ex_dist.append([1] * self.n_syn_ex[nt]) # default ex for all by default!!!
#print self.syn_ex_dist
if (self.syn_ex_dist[0] == []):
nemax = 1
else:
nemax = max([item for sublist in self.syn_ex_dist for item in sublist])
if (self.syn_inh_dist == []): # and (any(self.n_syn_inh) > 0)
for nt in range(self.n_celltypes): # loop over all cells
self.syn_inh_dist.append([0] * self.n_syn_inh[nt]) # default no inh for all by default!!!
#print self.syn_inh_dist
#exit()
if (self.syn_inh_dist[0] == []):
nimax = 0
else:
nimax = max([item for sublist in self.syn_inh_dist for item in sublist])
#print "self.syn_inh_dist, self.syn_ex_dist", self.syn_inh_dist, self.syn_ex_dist
n_noise = max([nemax,nimax]) # number of noise sources
#print n_noise,nemax,nimax
# create reproduceable input
noise_data = []
for nj in range(n_noise):
if self.id == 0: # make sure all have the same signal !!!
if len(freq_used) == 0:
noise_data0 = create_colnoise(t_noise, sexp, cutf, self.seed+nj, onf = onf)
else:
noise_data0, _, _, _ = create_multisines(t_noise, freq_used) # create multi sine signal
else:
noise_data0 = np.empty(len(t_noise), dtype=np.float64)
noise_data0 = self.broadcast(noise_data0, fast = True)
noise_data.append(noise_data0)
noise_data0 = []
noise_data_points = len(noise_data[0])
# Create signal weight vector inh_factor if it is not fully given
if len(noise_data) > len(inh_factor):
inh_factor = [inh_factor[0]] * len(noise_data)
print "inh_factor:", inh_factor
#if equi:
#pass
# tstop = t_stim
if max(self.n_syn_ex) == 0: # this means current input
self.set_IStim() # sets amp
if self.fluct_s != []:
if self.fluct_s[self.a_celltype[0]] > 0:
if self.id == 0: print "- adding i fluct"
self.connect_fluct()
for i, m in enumerate(self.method_interpol):
if "syn" in m: self.method_interpol[i] = "syn " + str(self.syn_tau1/ms) + "/" + str(self.syn_tau2/ms) + "ms"
if "bin" in m: self.method_interpol[i] = "bin " + str(self.bin_width/ms) + "ms"
stimulus = []
for nj in range(len(noise_data)):
stimulus0, t, t_startstop = construct_Stimulus(noise_data[nj], fs, self.amp[self.a_celltype[0]], ihold = 0, delay_baseline = self.delay_baseline) # , tail_points = 0
stimulus.append(stimulus0)
tstop = t[-1]
self.set_IPlay2(stimulus, t)
if self.id == 0: print "- starting colored noise transfer function estimation! with amp = " + str(np.round(self.amp[self.a_celltype[0]],4)) + ", ihold = " + str(np.round(self.ihold[self.a_celltype[0]],4)) + ", ihold_sigma = " + str(np.round(self.ihold_sigma,4)) + ", dt = " + str(self.dt) + " => maximum frequency = " + str(fmax) + "\r"
else:
self.give_freq = False
ihold = self.set_i(self.ihold) # just sets amp, ihold should not change!
if 'gsyn_in' not in self.method_interpol:
pass
else:
self.g_syn_ex = [1]*len(self.N)
if ((self.fluct_g_e0 != []) or (self.fluct_g_i0 != [])):
if ((self.fluct_g_e0[self.a_celltype[0]] > 0) or (self.fluct_g_i0[self.a_celltype[0]] > 0)):
if self.id == 0: print "- adding g fluct"
self.connect_gfluct(E_i=-65)
stimulus = []
for nj in range(len(noise_data)):
stimulus0, t, t_startstop = construct_Stimulus(noise_data[nj], fs, amp=1, ihold = 0, tail_points = 0, delay_baseline = self.delay_baseline) # self.amp
stimulus.append(stimulus0)
noise_data = []
tstop = t[-1]
if self.N[self.a_celltype[0]] > 1:
self.set_IStim(ihold = [0]*self.n_celltypes, ihold_sigma = [0]*self.n_celltypes, random_start = True, tstart_offset = 1)
if self.id == 0: print "- add random start"
#print "Enter Synplay()"
self.set_SynPlay(stimulus, t, t_startstop = t_startstop)
#print "Exit Synplay()"
if self.id == 0: print "- starting colored noise transfer function estimation with synaptic input! with amp = " + str(np.round(self.amp,4)) + ", ihold = " + str(np.round(self.ihold,4)) + ", ihold_sigma = " + str(np.round(self.ihold_sigma,4)) + ", dt = " + str(self.dt) + " => maximum frequency = " + str(fmax) + "\r"
amp_vec = []
mag_vec = []
pha_vec = []
freq_used = []
ca = []
SNR_mat = []
VAFf_mat = []
Qual_mat = []
CF_mat = []
VAF_mat = []
stim = []
stim_re_mat = []
resp_mat = []
current_re = []
ihold1 = []
tk = []
K_mat = []
gsyn_in = []
fmean = []
fmax = []
fmstd = []
fcvm = []
fmeanA = []
fmaxA = []
fmstdA = []
fcvmA = []
t_all_vec_input_sorted = []
id_all_vec_input_sorted = []
if (self.id == 0) and (max(self.n_syn_ex) > 0):
print range(self.n_celltypes), np.shape(self.t_all_vec_input)
for l in range(self.n_celltypes):
ie = argsort(self.t_all_vec_input[l])
t_all_vec_input_sorted.append( self.t_all_vec_input[l][ie] )
id_all_vec_input_sorted.append( self.id_all_vec_input[l][ie].astype(int) )
#if (self.id == 0):
# print self.g_syn_ex
# print np.array(self.g_syn_ex)>= 0
#print "g_syn_ex:",self.g_syn_ex
if np.array(np.array(self.g_syn_ex)>= 0).any():
if hasattr(self.cells[self.a_celltype[0]][0], 'get_states') and equi:
print "- Equilibrate!"
self.run(tstop, do_loadstate = False)
m = md5.new()
cell_exe_new = self.cell_exe[0]
m.update(cell_exe_new)
filename = './states_' + self.celltype[0] + '_' + m.hexdigest() + '_Population.b'
self.cells[self.a_celltype[0]][0].get_states(filename)
else:
self.run(tstop, do_loadstate = False)
i_startstop = []
results = self.get(t_startstop, i_startstop)
time = results.get('time')
current = results.get('current')
voltage = results.get('voltage')
fmean = results.get('fmean')
gsyn = results.get('gsyn')
freq_times = results.get('freq_times')
spike_freq = results.get('spike_freq')
t_all_vec_vec = results.get('t_all_vec_vec')
id_all_vec_vec = results.get('id_all_vec_vec')
gsyns = results.get('gsyns')
gsyn_in = results.get('gsyn_in')
fmax = results.get('fmax')
fmstd = results.get('fmstd')
fcvm = results.get('fcvm')
fmeanA = results.get('fmeanA')
fmaxA = results.get('fmaxA')
fmstdA = results.get('fmstdA')
fcvmA = results.get('fcvmA')
fbaseA = results.get('fbaseA')
fbase = results.get('fbase')
fbstdA = results.get('fbstdA')
else: # do not run, analyse input!!!
time = t
voltage = []
for l in range(self.n_celltypes):
voltage.append(np.zeros(len(t)))
current = []
freq_times = []
spike_freq = []
gsyn = []
gsyn_in = []
t_all_vec_vec = []
id_all_vec_vec = []
fmean = []
fmax = []
fmstd = []
fcvm = []
fstdm = []
fmeanA = []
fmaxA = []
fmstdA = []
fcvmA = []
fbaseA = []
fbase = []
fbstdA = []
if self.id == 0:
current = self.n_train_ex
#t_all_vec = self.t_all_vec_input
#id_all_vec = self.id_all_vec_input
#ie = argsort(t_all_vec)
#t_all_vec_vec.append( t_all_vec[ie] )
#id_all_vec_vec.append( id_all_vec[ie].astype(int) )
t_all_vec_vec = t_all_vec_input_sorted
id_all_vec_vec = id_all_vec_input_sorted
freq_times = arange(0, tstop, self.bin_width)
spike_freq = np.zeros(len(freq_times))
for j in self.a_celltype:
[num_spikes, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[j], bins = freq_times)
if self.tau2_ex[0] > 0:
spike_freq = np.concatenate((zeros(1),num_spikes))
print "NOSYN TEST: start convolution with Ksyn"
Ksyn = syn_kernel(arange(0,10*self.tau2_ex[0],self.bin_width), self.tau1_ex[0], self.tau2_ex[0])
Ksyn = np.concatenate((zeros(len(Ksyn)-1),Ksyn))
spike_freq = np.convolve(Ksyn, spike_freq, mode='same')
print "NOSYN TEST: convolution finished"
else:
if isinstance(self.factor_celltype[j], ( int, long ) ):
f = self.factor_celltype[j]
else:
f = self.factor_celltype[j][0]
spike_freq = spike_freq + f * np.concatenate((zeros(1),num_spikes)) / self.bin_width
fmean.append(self.fmean_input)
fmax.append(self.fmax_input)
fmstd.append(self.fmstd_input)
fcvm.append(self.fcvm_input)
fstdm.append(self.fstdm_input)
if self.no_fmean == True:
fmean.append(ihold)
#plt.figure('spike_freq')
#plt.plot(freq_times, spike_freq)
#plt.savefig("./figs/Pub/Spike_freq_" + str(self.pickle_prefix) + ".pdf", dpi = 300, transparent=True) # save it
#plt.clf()
fmeanA = fmean[0]
fmaxA = fmax[0]
fmstdA = fmstd [0]
fcvmA = fcvm[0]
fstdmA = fstdm[0]
if self.id == 0:
if any([i<0 for i in inh_factor]):
p0 = []
inhf_idx = []
for i, inhf in enumerate(inh_factor):
if inhf < 0:
p0.append(0)
inhf_idx.append(i)
plsq = fmin(self.residuals_compute_Transfer, p0, args=(stimulus, spike_freq, freq_times, t, noise_data_points, gsyn, gsyn_in, do_csd, t_qual, K_mat_old, t_startstop, inh_factor))
p = plsq
ip = 0
for i in inhf_idx:
inh_factor[i] = p[ip]
ip += 1
print "Final inh_factor: ", inh_factor
results = self.compute_Transfer(stimulus, spike_freq = spike_freq, freq_times = freq_times,
t = t, noise_data_points = noise_data_points, gsyn = gsyn, gsyn_in = gsyn_in,
do_csd = do_csd, t_qual = t_qual, K_mat_old = K_mat_old, t_startstop = t_startstop, inh_factor=inh_factor)
mag_vec, pha_vec, ca, freq, freq_used, fmean_all = results.get('mag_mat'), results.get('pha_mat'), results.get('ca_mat'), results.get('freq'), results.get('freq_used'), results.get('fmean')
SNR_mat, VAFf_mat, Qual_mat, CF_mat, VAF_mat = results.get('SNR_mat'), results.get('VAFf_mat'), results.get('Qual_mat'), results.get('CF_mat'), results.get('VAF_mat')
stim, resp_mat, stim_re_mat, tk, K_mat = results.get('stim'), results.get('resp_mat'), results.get('stim_re_mat'), results.get('tk'), results.get('K_mat')
self.barrier() # wait for other nodes
if self.id == 0:
if t_qual > 0:
#print t_startstop[0], t_startstop[0]/self.dt, (t_startstop[0]+t_qual)/self.dt
current_re = current[int(t_startstop[0]/self.dt):int((t_startstop[0]+t_qual)/self.dt)]
current_re = current_re[int(len(K_mat[self.a_celltype[0]])):int(len(current_re))-int(len(K_mat[self.a_celltype[0]]))]
if len(self.i_holdrs) > 0:
ihold1 = self.i_holdrs[self.a_celltype[0]][0]
else:
ihold1 = []
for l in range(len(self.method_interpol)): # unwrap
pha_vec[l,:] = unwrap(pha_vec[l,:] * (pi / 180)) * (180 / pi) # unwrap for smooth phase
# only return fraction of actual signal, it is too long!!!
if time[-1] > self.tmax:
imax = -1*int(self.tmax/self.dt)
time = time[imax:]; current = current[imax:]; gsyn = gsyn[imax:]; gsyn_in = gsyn_in[imax:]
for n in range(self.n_celltypes):
voltage[n] = voltage[n][imax:]
if freq_times != []:
if freq_times[-1] > self.tmax:
imax2 = where(freq_times > self.tmax)[0][0] # for spike frequency
freq_times = freq_times[0:imax2]; spike_freq = spike_freq[0:imax2]
bvec = ["_syn" in st for st in self.method_interpol]
if np.any(bvec):
# normalize synaptic integration with others
mag_vec[1,:]= mag_vec[0,0]*mag_vec[1,:]/mag_vec[1,0]
if self.id == 0: print "start pickle"
results = {'freq_used':freq_used, 'amp':amp_vec,'mag':mag_vec,'pha':pha_vec,'ca':ca,'voltage':voltage,'tk':tk,'K_mat':K_mat, 'ihold1': ihold1, 't_startstop':t_startstop, #'stimulus':stimulus,
'current':current,'t1':time,'freq_times':freq_times,'spike_freq':spike_freq, 'stim':stim, 'stim_re_mat':stim_re_mat, 'resp_mat':resp_mat, 'current_re':current_re, 'gsyn_in':gsyn_in, 'fmeanA':fmeanA, 'fmaxA':fmaxA, 'fmstdA':fmstdA, 'fcvmA':fcvmA, 'fbaseA':fbaseA, 'fbase':fbase, 'fbstdA':fbstdA,
'fmean':fmean,'method_interpol':self.method_interpol, 'SNR':SNR_mat, 'VAF':VAFf_mat, 'Qual':Qual_mat, 'CF':CF_mat, 'VAFs':VAF_mat, 'fmax':fmax, 'fmstd':fmstd, 'fcvm':fcvm, 'inh_factor':inh_factor, 't_all_vec_vec':t_all_vec_vec, 'id_all_vec_vec':id_all_vec_vec}
if self.id == 0:
if self.dumpsave == 1:
pickle.dump( results, gzip.GzipFile( filepath, "wb" ) )
print "pickle done"
if self.plot_train:
for a in self.a_celltype:
#i_start = mlab.find(t_all_vec_vec[a] >= 0)[0]
#i_stop = mlab.find(t_all_vec_vec[a] >= 5)[0]
#t_all_cut = t_all_vec_vec[a][i_start:i_stop]
#id_all_cut = id_all_vec_vec[a][i_start:i_stop]
t_all_cut = t_all_vec_vec[a]
id_all_cut = id_all_vec_vec[a]
f_start_in = mlab.find(t_all_cut >= 0)
f_stop_in = mlab.find(t_all_cut <= 10)
f_start = f_start_in[0]
f_stop = f_stop_in[-1]+1
use_spikes = t_all_cut[f_start:f_stop]
use_id = id_all_cut[f_start:f_stop]
plt.figure('results_train')
ax99 = plt.subplot(1,1,1)
ax99.plot(use_spikes,use_id,'|', ms=2)
plt.text(0.5, 1.1, r'CF=' + str(round(fmean,1)) + ',fmax=' + str(round(fmax,1)) + ',fmstd=' + str(round(fmstd,1)), transform=ax99.transAxes, fontsize=10, va='center', ha='center')
plt.savefig("./figs/Pub/Train_" + str(self.pickle_prefix) + "_cell" + str(a) + "_N" + str(self.N[a]) + ".pdf", dpi = 300, transparent=True) # save it
plt.clf()
if len(t_all_cut) > 0:
tbin = 100*ms
tb = np.arange(0,t[-1],tbin)
[all_rate, _] = neuronpy.util.spiketrain.get_histogram(t_all_cut, bins = tb)
all_rate = np.concatenate((np.zeros(1),all_rate)) / self.N[a] / tbin
plt.figure('results_train2')
plt.plot(tb,all_rate)
plt.savefig("./figs/Pub/PSTH_" + str(self.pickle_prefix) + "_cell" + str(a) + "_N" + str(self.N[a]) + ".pdf", dpi = 300, transparent=True) # save it
plt.clf()
plt.figure('results_noise')
plt.plot(time,current)
plt.savefig("./figs/Pub/Noise_" + str(self.pickle_prefix) + "_cell" + str(a) + "_N" + str(self.N[a]) + ".pdf", dpi = 300, transparent=True) # save it
plt.clf()
if self.plot_input:
if len(t_all_vec_input_sorted[0]) > 0:
i_start = mlab.find(t_all_vec_input_sorted[0] >= 0)[0]
i_stop = mlab.find(t_all_vec_input_sorted[0] >= 5)[0]
t_all_cut = t_all_vec_input_sorted[0][i_start:i_stop]
id_all_cut = id_all_vec_input_sorted[0][i_start:i_stop]
plt.figure('results_input')
ax99 = plt.subplot(1,1,1)
ax99.plot(t_all_cut,id_all_cut,'|', ms=2)
plt.text(0.5, 1.1, r'fmean=' + str(round(self.fmean_input,1)) + ',fmax=' + str(round(self.fmax_input,1)) + ',fmstd=' + str(round(self.fmstd_input,1)) + ',fcvm=' + str(round(self.fcvm_input,1)) + ',fstdm=' + str(round(self.fstdm_input,1)), transform=ax99.transAxes, fontsize=10, va='center', ha='center')
plt.savefig("./figs/Pub/Input_" + str(self.pickle_prefix) + "_N" + str(self.N[self.a_celltype[0]]) + ".pdf", dpi = 300, transparent=True) # save it
plt.clf()
else:
if self.id == 0:
results = pickle.load( gzip.GzipFile( filepath, "rb" ) )
#print results
#print {key:np.shape(value) for key,value in results.iteritems()}
if self.minimal_dir: # save only info needed for plot
print {key:np.shape(value) for key,value in results.iteritems()}
if "Fig6_pop_transfer_grc_syngr_nsyn4_cn_a1_noisesynlow_inhlow_adjfinh_varih_N100_CFo6.0_results_pop_cnoise.p" in filename:
results['ca'] = []
results['resp_mat'] = []
results['stim'] = []
results['current'] = []
results['tk'] = []
results['K_mat'] = []
results['freq_times'] = []
results['spike_freq'] = []
results['stim_re_mat'] = []
results['current_re'] = []
results['t_all_vec_vec'] = []
results['id_all_vec_vec'] = []
results['gsyn_in'] = []
elif ("Fig8_pop_transfer_none_synno_cn_cutf30_a1_noisesynlow_ih20_varih_N100_CFo-1_results_pop_cnoise.p" in filename) \
or ("Fig8_pop_transfer_none_synno_cn_cutf30_a10_noisesynlow_ih20_varih_N100_CFo-1_results_pop_cnoise.p" in filename) \
or ("Fig8_pop_transfer_grc_syngr_nsyn4_cn_cutf30_a1_noisesynlow_inhlow_adjfinh_varih_varinhn_N100_CFo9.0_results_pop_cnoise.p" in filename) \
or ("Fig8_pop_transfer_grc_syngr_nsyn4_cn_cutf30_a10_noisesynlow_inhlow_adjfinh_varih_varinhn_N100_is0.14_CFo9.0_results_pop_cnoise.p" in filename) \
:
results['ca'] = []
results['resp_mat'] = []
results['current'] = []
results['tk'] = []
results['K_mat'] = []
results['voltage'] = []
results['current_re'] = []
results['t_all_vec_vec'] = []
results['id_all_vec_vec'] = []
results['t1'] = []
results['gsyn_in'] = []
elif ("Fig8_pop_transfer_none_synno_cn_cutf30_a1_noisesynlow_ih20_varih_N50_twopop_CFo-1_results_pop_cnoise.p" in filename) \
or ("Fig8_pop_transfer_none_synno_cn_cutf30_a10_noisesynlow_ih20_varih_N50_twopop_CFo-1_results_pop_cnoise.p" in filename) \
or ("Fig8_pop_transfer_grc_syngr_nsyn4_cn_cutf30_a1_noisesynlow_inhlow_adjfinh_varih_varinhn_N50_twopop_CFo9.0_results_pop_cnoise.p" in filename) \
or ("Fig8_pop_transfer_grc_syngr_nsyn4_cn_cutf30_a10_noisesynlow_inhlow_adjfinh_varih_varinhn_N50_is0.14_twopop_CFo9.0_results_pop_cnoise.p" in filename) \
or ("Fig8_pop_transfer_grc_syngr_nsyn4_cn_cutf5_a1_noisesynlow_inhlow_adjfinh_varih_varinhn_N100_CFo14.0_results_pop_cnoise.p" in filename) \
or ("Fig8_pop_transfer_grc_syngr_nsyn4_cn_cutf5_a1_noisesynlow_inhlow_adjfinh_varih_varinhn_N50_twopop_CFo14.0_results_pop_cnoise.p" in filename) \
:
results['ca'] = []
results['resp_mat'] = []
results['current'] = []
results['tk'] = []
results['K_mat'] = []
results['voltage'] = []
results['current_re'] = []
results['t_all_vec_vec'] = []
results['id_all_vec_vec'] = []
results['t1'] = []
results['gsyn_in'] = []
results['freq_times'] = []
results['spike_freq'] = []
elif ("Fig4_pop_transfer_grc_cn_addn100_N[100]_CF[40]_amod[1]_results_pop_cnoise.p" in filename) \
or ("Fig4_pop_transfer_grc_cn_addn1_N[100]_CF[40]_amod[1]_results_pop_cnoise.p" in filename) \
or ("Fig4b_pop_transfer_grc_lowcf_cn_twopop_N[50, 50]_CF[0.0055, 0.0055]_amod[None, None]_results_pop_cnoise.p" in filename) \
or ("Fig4b_pop_transfer_grc_lowcf_cn_N[100]_CF[0.0055]_amod[None]_results_pop_cnoise.p" in filename) \
or ("Fig4b_pop_transfer_grc_lowcf_slownoise_cn_twopop_N[50, 50]_CF[0.0051, 0.0051]_amod[None, None]_results_pop_cnoise.p" in filename) \
or ("Fig4b_pop_transfer_grc_lowcf_slownoise_cn_N[100]_CF[0.0051]_amod[None]_results_pop_cnoise.p" in filename) \
:
results['ca'] = []
results['resp_mat'] = []
results['current'] = []
results['tk'] = []
results['K_mat'] = []
results['voltage'] = []
results['t_all_vec_vec'] = []
results['id_all_vec_vec'] = []
results['t1'] = []
results['gsyn_in'] = []
results['freq_times'] = []
results['spike_freq'] = []
elif ("Fig2_pop_transfer_" in filename) \
:
results['ca'] = []
results['resp_mat'] = []
results['current'] = []
results['t1'] = []
results['voltage'] = []
results['freq_times'] = []
results['spike_freq'] = []
results['current_re'] = []
results['t_all_vec_vec'] = []
results['id_all_vec_vec'] = []
results['gsyn_in'] = []
else:
results['ca'] = []
results['resp_mat'] = []
results['stim'] = []
results['current'] = []
results['tk'] = []
results['K_mat'] = []
results['t1'] = []
results['voltage'] = []
results['freq_times'] = []
results['spike_freq'] = []
results['stim_re_mat'] = []
results['current_re'] = []
results['t_all_vec_vec'] = []
results['id_all_vec_vec'] = []
results['gsyn_in'] = []
print {key:np.shape(value) for key,value in results.iteritems()}
pickle.dump( results, gzip.GzipFile( self.minimal_dir + "/" + filename, "wb" ) )
else:
results = {'freq_used':[], 'amp':[],'mag':[],'pha':[],'ca':[],'voltage':[], 'tk':[],'K_mat':[], 'ihold1':[], 't_startstop':[], #'stimulus':[],
'current':[],'t1':[],'freq_times':[],'spike_freq':[], 'stim':[], 'stim_re_mat':[], 'current_re':[], 'gsyn_in':[], 'fmeanA':[], 'fmaxA':[], 'fmstdA':[], 'fcvmA':[], 'fbaseA':[], 'fbase':[], 'fbstdA':[],
'fmean':[],'method_interpol':self.method_interpol, 'SNR':[], 'VAF':[], 'Qual':[], 'CF':[], 'VAFs':[], 'fmax':[], 'fmstd':[], 'fcvm':[], 'inh_factor':[], 't_all_vec_vec':[], 'id_all_vec_vec':[]}
if self.id == 0:
if self.plot_train:
for a in self.a_celltype:
t1 = results.get('t1')
voltage = results.get('voltage')
fmean = results.get('fmean')
fmax = results.get('fmax')
fmstd = results.get('fmstd')
if results.has_key('t_all_vec_vec'):
if len(results['t_all_vec_vec']) > 0:
t_all_vec_vec = results.get('t_all_vec_vec')
id_all_vec_vec = results.get('id_all_vec_vec')
t_all_cut = t_all_vec_vec[a]
id_all_cut = id_all_vec_vec[a]
f_start_in = mlab.find(t_all_cut >= 0)
f_stop_in = mlab.find(t_all_cut <= 10)
f_start = f_start_in[0]
f_stop = f_stop_in[-1]+1
use_spikes = t_all_cut[f_start:f_stop]
use_id = id_all_cut[f_start:f_stop]
plt.figure('results_train')
ax97 = plt.subplot(1,1,1)
ax97.plot(use_spikes,use_id,'|', ms=6)
plt.text(0.5, 1.1, r'CF=' + str(round(fmean,1)) + ',fmax=' + str(round(fmax,1)) + ',fmstd=' + str(round(fmstd,1)), transform=ax97.transAxes, fontsize=10, va='center', ha='center')
plt.savefig("./figs/Pub/Train_" + str(self.pickle_prefix) + "_cell" + str(a) + "_N" + str(self.N[a]) + ".pdf", dpi = 300, transparent=True) # save it
plt.figure('results_voltage')
ax99 = plt.subplot(2,1,1)
ax99.plot(t1,voltage[a])
t_noise = arange(0, t_stim, self.dt)
noise_data = create_colnoise(t_noise, sexp, cutf, 50, onf = onf)
stimulus, t, t_startstop = construct_Stimulus(noise_data, 1/self.dt, amp=1, ihold = 0, tail_points = 0, delay_baseline = self.delay_baseline)
ax98 = plt.subplot(2,1,2)
ax98.plot(t[0:10/self.dt],stimulus[0:10/self.dt],color='k')
plt.text(0.5, 1.1, r'CF=' + str(round(fmean,1)) + ',fmax=' + str(round(fmax,1)) + ',fmstd=' + str(round(fmstd,1)), transform=ax99.transAxes, fontsize=10, va='center', ha='center')
plt.savefig("./figs/Pub/Voltage_" + str(self.pickle_prefix) + "_cell" + str(a) + "_N" + str(self.N[a]) + ".pdf", dpi = 300, transparent=True) # save it
plt.show()
plt.clf()
if (self.id == 0) and (do_csd == 1):
Qual = results.get('Qual')
for i, ii in enumerate(self.method_interpol):
print "\n[QUAL:] Interpol:", ii, "SNR0:", Qual[i,0,0], "SNR_cutff:", Qual[i,0,1], "SNR_mean:", Qual[i,0,2], "\n VAF0:", Qual[i,1,0], "VAF_cutff:", Qual[i,1,1], "VAF_mean:", Qual[i,1,2], "\n CF(subtracted):", Qual[i,2,0], "VAF(subtracted):", Qual[i,2,1]
VAF = results.get('VAF')
freq_used = results.get('freq_used')
iend = mlab.find(freq_used >= self.xmax)[0]
print 'm(VAF)=' + str(np.mean(VAF[1][0,0:iend]))
self.barrier() # wait for other nodes
return results
# def fun_ssine_Stim(self, freq_used = np.array([1, 10, 100, 1000])*Hz):
# """
# Compute impedance and/or transfer function using Single sine stimulation
# Only compute transfer function if there is a steady state (resting) firing rate!
# """
# self.barrier() # wait for other nodes
#
# filepath = "./data/" + str(self.pickle_prefix) + "_results_pop_ssine.p"
#
# if self.do_run or (os.path.isfile(filepath) is False):
#
# fs = 1 / self.dt # sampling rate
# fmax = fs / 2 # maximum frequency (nyquist)
#
# if self.id == 0: print "- starting single sine transfer function estimation! with amp = " + str(np.round(self.amp[a_celltype[0]],4)) + ", ihold = " + str(np.round(self.ihold[self.a_celltype[0]],4)) + ", dt = " + str(self.dt) + " => maximum frequency = " + str(fmax) + "\r"
#
# if max(self.n_syn_ex) == 0:
# self.set_IStim()
#
# if self.fluct_s != []:
# if self.fluct_s[self.a_celltype[0]] > 0:
# if self.id == 0: print "- adding i fluct"
# self.connect_fluct()
#
# for i, m in enumerate(self.method_interpol):
# if "syn" in m: self.method_interpol[i] = "syn " + str(self.syn_tau1/ms) + "/" + str(self.syn_tau2/ms) + "ms"
# if "bin" in m: self.method_interpol[i] = "bin " + str(self.bin_width/ms) + "ms"
#
# else:
# self.give_freq = False
# ihold = self.set_i(self.ihold) # just sets amp, ihold should not change!
#
# if ((self.fluct_g_e0 != []) or (self.fluct_g_i0 != [])):
# if ((self.fluct_g_e0[self.a_celltype[0]] > 0) or (self.fluct_g_i0[self.a_celltype[0]] > 0)):
# if self.id == 0: print "- adding g fluct"
# self.connect_gfluct(E_i=-65)
#
# #if ((self.fluct_std_e[self.a_celltype[0]] != []) or (self.fluct_std_i[self.a_celltype[0]] != [])):
# # if ((self.fluct_std_e[self.a_celltype[0]] > 0) or (self.fluct_std_i[self.a_celltype[0]] > 0)):
# # if self.id == 0: print "- adding g fluct"
# # self.connect_gfluct(E_i=-65)
#
# if 'gsyn_in' not in self.method_interpol:
# pass
# else:
# self.g_syn_ex = 1
#
#
# for i, fu in enumerate(freq_used):
#
# if self.id == 0: print "- single sine processing frequency = " + str(fu)
#
# t, stimulus, i_startstop, t_startstop = create_singlesine(fu = fu, amp = self.amp[a_celltype[0]], ihold = 0, dt = self.dt, periods = 20, minlength = 2*s, t_prestim = 1*s)
# tstop = t[-1]
#
# if i == 0: t_startstop_plot = t_startstop
#
# if max(self.n_syn_ex) == 0:
# self.set_IPlay(stimulus, t)
# else:
# self.set_SynPlay(stimulus, t)
#
# if self.g_syn_ex >= 0: # should also be true for current input!!!
#
# self.run(tstop)
#
# if i == 0: # do this here to have something to return
#
# # select first sinusoidal to plot, later
# voltage_plot = []
# current_plot = []
# time_plot = []
# freq_times_plot = []
# spike_freq_plot = []
# gsyn_plot = []
#
# # construct vectors
# amp_vec = zeros(len(freq_used)) # amplitude vector
# fmean_all = zeros(len(freq_used)) # mean firing frequency (all cells combined)
# fmean = zeros(len(freq_used)) # mean firing frequency (one cell)
# ca = zeros(len(freq_used), dtype=complex)
#
# # create matrix to hold all different interpolation methods:
# mag_vec = zeros((len(self.method_interpol),len(freq_used))) # magnitude vector
# pha_vec = zeros((len(self.method_interpol),len(freq_used))) # phase vector
# NI_vec = zeros((len(self.method_interpol),len(freq_used))) # NI vector
# VAF_vec = zeros((len(self.method_interpol),len(freq_used))) # VAF vector
#
# results = self.get(t_startstop, i_startstop) # t1 should be equal to t!!!
# time, voltage, current, fmean0, gsyn = results.get('time'), results.get('voltage'), results.get('current'), results.get('fmean'), results.get('gsyn')
# freq_times, spike_freq, t_all_vec_vec, id_all_vec_vec, gsyns = results.get('freq_times'), results.get('spike_freq'), results.get('t_all_vec_vec'), results.get('id_all_vec_vec'), results.get('gsyns')
#
# else:
#
# time = t
# voltage = []
# voltage.append(np.zeros(len(t)))
# current = stimulus
#
# freq_times = []
# spike_freq = []
# fmean0 = ihold
# gsyn = []
# gsyn_in = []
#
# t_all_vec_vec = []
# id_all_vec_vec = []
#
#
# if self.id == 0:
#
# t_all_vec = []
# t_all_vec.append([])
# t_all_vec[0] = np.concatenate(self.t_all_vec_input)
#
# id_all_vec = []
# id_all_vec.append([])
# id_all_vec[0] = np.concatenate(self.id_all_vec_input)
#
# ie = argsort(t_all_vec[0])
# t_all_vec_vec.append( t_all_vec[0][ie] )
# id_all_vec_vec.append( id_all_vec[0][ie].astype(int) ) #
#
#
# freq_times = arange(0, tstop, self.bin_width)
# [num_spikes, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[0], bins = freq_times)
# spike_freq = np.concatenate((zeros(1),num_spikes)) / self.bin_width
#
#
# if self.id == 0:
#
# fmean[i] = fmean0[0]
#
# if i == 0:
#
# # select first sinusoidal to plot
# voltage_plot = voltage
# current_plot = current
# time_plot = time
# freq_times_plot = freq_times
# spike_freq_plot = spike_freq
# gsyn_plot = gsyn
#
#
# for l in range(len(self.method_interpol)):
#
# if "bin" in self.method_interpol[l]:
#
# # binning and linear interpolation
# stimulus_signal = stimulus[i_startstop[0]:i_startstop[1]] # cut out relevant signal
# t_input_signal = t[i_startstop[0]:i_startstop[1]] - t[i_startstop[0]]
#
# spike_freq_interp = interp(t, freq_times, spike_freq, left=0, right=0) # interpolate to be eqivalent with input, set zero at beginning and end!
# freq_out_signal_interp = spike_freq_interp[i_startstop[0]:i_startstop[1]] # cut out relevant signal
# vamp, mag_vec[l,i], pha_vec[l,i], fmean_all[i], _ = get_magphase(stimulus_signal, t_input_signal, freq_out_signal_interp, t_input_signal, method = "fft", f = fu)
#
# results = est_quality(t_input_signal, fu, freq_out_signal_interp, self.amp[a_celltype[0]]*mag_vec[l,i], pha_vec[l,i]/ (180 / pi), fmean_all[i])
# NI_vec[l,i], VAF_vec[l,i] = results.get('NI'), results.get('VAF')
# print "-[bin] NI: " + str(NI_vec[l,i]) + ", VAF: " + str(VAF_vec[l,i])
#
# if "syn" in self.method_interpol[l]:
#
# # synaptic integration
# dt_out = t_input_signal[2] - t_input_signal[1]
# shift = self.nc_delay/dt_out # shift response by the nc delay to remove offset
# freq_out_signal_syn = gsyn[i_startstop[0]+shift:i_startstop[1]+shift] # cut out relevant signal
#
# vamp, mag_vec[l,i], pha_vec[l,i], fm, _ = get_magphase(stimulus_signal, t_input_signal, freq_out_signal_syn, t_input_signal, method = "fft", f = fu)
#
# results = est_quality(t_input_signal, fu, freq_out_signal_syn, self.amp[a_celltype[0]]*mag_vec[l,i], pha_vec[l,i]/ (180 / pi), fm)
# NI_vec[l,i], VAF_vec[l,i] = results.get('NI'), results.get('VAF')
# print "-[syn] NI: " + str(NI_vec[l,i]) + ", VAF: " + str(VAF_vec[l,i])
#
#
# self.barrier() # wait for other nodes
#
# #print "rest: " + str(vrest) + " freq_used:" + str(freq_used) + " amp_vec:" + str(amp_vec) + " mag_vec:" + str(mag_vec) + " pha_vec:" + str(pha_vec)
#
# if self.id == 0:
#
# for l in range(len(self.method_interpol)): # unwrap
# pha_vec[l,:] = unwrap(pha_vec[l,:] * (pi / 180)) * (180 / pi) # unwrap for smooth phase
#
# # only return fraction of actual signal, it is too long!!!
# if time_plot[-1] > self.tmax:
# imax = where(time_plot > self.tmax)[0][0] # for voltage, current and time
# time_plot = time_plot[0:imax]; current_plot = current_plot[0:imax]; gsyn_plot = gsyn_plot[0:imax]
# for n in range(self.n_celltypes):
# voltage_plot[n] = voltage_plot[n][0:imax]
#
# if freq_times_plot != []:
# if freq_times_plot[-1] > self.tmax:
# imax2 = where(freq_times_plot > self.tmax)[0][0] # for spike frequency
# freq_times_plot = freq_times_plot[0:imax2]; spike_freq_plot = spike_freq_plot[0:imax2]
#
# # normalize synaptic integration with with first magnitude, may by syn itself!
# bvec = ["syn" in st for st in self.method_interpol]
# if np.any(bvec):
# k = where(bvec)
# mag_vec[k,:]= mag_vec[0,0]*mag_vec[k,:]/mag_vec[k,0]
#
# NI_vec = (freq_used, NI_vec)
# VAF_vec = (freq_used, VAF_vec)
# results = {'freq_used':freq_used, 'amp':amp_vec,'mag':mag_vec,'pha':pha_vec,'ca':ca,'voltage':voltage_plot, 't_startstop':t_startstop_plot,
# 'current':current_plot,'t1':time_plot,'freq_times':freq_times_plot,'spike_freq':spike_freq_plot,
# 'fmean':mean(fmean),'method_interpol':self.method_interpol, 'NI':NI_vec, 'VAF':VAF_vec}
#
# if self.id == 0:
# pickle.dump( results, gzip.GzipFile( filepath, "wb" ) )
#
# else:
#
# if self.id == 0:
# results = pickle.load( gzip.GzipFile( filepath, "rb" ) )
# else:
# results = {'freq_used':[], 'amp':[],'mag':[],'pha':[],'ca':[],'voltage':[], 't_startstop':[],
# 'current':[],'t1':[],'freq_times':[],'spike_freq':[],
# 'fmean':[],'method_interpol':self.method_interpol,'NI':[],'VAF':[]}
#
# return results
def get_RC(self, opt_plot):
if self.id == 0:
if "analytical" in opt_plot: # simplest case, only uses rm and tau, scaling necessary
exec self.cell_exe[self.a_celltype[0]]
sim = Stimulation(cell, temperature = self.temperature)
rm, cm, taum = sim.get_RCtau()
else:
rm = cm = taum = 0
if "if" in opt_plot:
Vrest = cell.soma(0.5).pas.e*mV
Vth = cell.spkout.thresh*mV
Vreset = cell.spkout.vrefrac*mV
else:
Vreset = 0*mV; Vth = 1*mV; Vrest = 0*mV
sim = None
cell = None
else:
rm = cm = taum = 0
Vreset = 0*mV; Vth = 1*mV; Vrest = 0*mV
return rm, cm, taum, Vreset, Vth, Vrest
def fun_plot(self, currlabel="control", dowhat="cnoise", freq_used=np.array([]), cutf=10, sexp=0, t_stim=100*s, ymax=0, ax=None, SNR=None, VAF=None, t_qual=0, opt_plot=np.array([]), method_interpol_plot=[], do_csd = 1):
SNR_switch = SNR
VAF_switch = VAF
rm, cm, taum, Vreset, Vth, Vrest = self.get_RC(opt_plot)
if dowhat == "cnoise":
if do_csd == 0:
t_qual = 0; SNR_switch = 0; VAF_switch = 0
results = self.fun_cnoise_Stim(t_stim = t_stim, cutf = cutf, sexp = sexp, t_qual = t_qual, freq_used = freq_used, do_csd = do_csd)
freq_used, amp_vec, mag, pha, ca, voltage, current, t1 = results.get('freq_used'), results.get('amp'), results.get('mag'), results.get('pha'), results.get('ca'), results.get('voltage'), results.get('current'), results.get('t1')
freq_times, spike_freq, fmean, method_interpol, SNR, VAF, Qual = results.get('freq_times'), results.get('spike_freq'), results.get('fmean'), results.get('method_interpol'), results.get('SNR'), results.get('VAF'), results.get('Qual')
stim, stim_re_mat, current_re, tk, K_mat_old = results.get('stim'), results.get('stim_re_mat'), results.get('current_re'), results.get('tk'), results.get('K_mat')
elif dowhat == "ssine":
results = self.fun_ssine_Stim(freq_used = freq_used0)
freq_used, amp_vec, mag, pha, ca, voltage, current, t1 = results.get('freq_used'), results.get('amp'), results.get('mag'), results.get('pha'), results.get('ca'), results.get('voltage'), results.get('current'), results.get('t1')
freq_times, spike_freq, fmean, method_interpol, VAF = results.get('freq_times'), results.get('spike_freq'), results.get('fmean'), results.get('method_interpol'), results.get('VAF')
tk = []
K_mat_old = []
# analyse
if self.id == 0:
print "Mean rate: " + str(fmean)
# Turn it off if set to zero
if SNR_switch == 0: SNR = None
if VAF_switch == 0: VAF = None
if t_qual > 0:
plt.figure("Reconstruct")
ax1 = subplot(2,1,1)
ax1.plot(np.arange(len(stim))*dt-1, current_re*1e3, 'b', linewidth=1)
ax1.plot(np.arange(len(stim))*dt-1, (stim)*1e3, 'k-', linewidth=1)
ax1.plot(np.arange(len(stim))*dt-1, (stim_re_mat[0,:])*1e3, 'r', linewidth=1, alpha=1)
#adjust_spines(ax1, ['left','bottom'], d_out = 10)
#ax1.axis(xmin=0, xmax=1)
#ax1.axis(ymin=8.3, ymax=10.7)
#ax1.yaxis.set_ticks(array([8.5,9,9.5,10,10.5]))
#ax1.set_title("Reconstruction")
#ax1.set_xlabel("s")
#ax1.set_ylabel("pA")
#ax1.text(0.15, 10.7, "Input current", color=color3, fontsize = 8)
#ax1.text(0.8, 10.7, "Signal", color="#000000", fontsize = 8)
#ax1.text(0.0, 8.2, "Reconstruction", color=color2, fontsize = 8)
ax2 = subplot(2,1,2)
ax2.plot(tk, K_mat_old[0], 'k', linewidth=1)
self.save_plot(directory = "./figs/dump/", prefix = "reconstruct")
plt.figure("Transfer")
currtitle = currlabel + " pop " + dowhat + ", " + self.celltype[self.a_celltype[0]]
ax = plot_transfer(currtitle, freq_used, mag, pha, t1, current, voltage[self.a_celltype[0]], freq_times, spike_freq, taum, fmean, self.ihold, rm, Vreset, Vth, Vrest, method_interpol, method_interpol_plot, SNR = SNR, VAF = VAF, ymax = self.ymax, ax = self.ax, linewidth = self.linewidth, color_vec = self.color_vec, alpha = self.alpha, opt_plot = opt_plot)
suptitle("Population transfer function of " + str(self.N[self.a_celltype[0]]) + " " + self.celltype[self.a_celltype[0]] + ", amp: " + str(np.round(self.amp[self.a_celltype[0]],4)) + ", amod: " + str(self.amod) + ", ih: " + str(np.round(self.ihold,4)) + ", ih_s: " + str(np.round(self.ihold_sigma,4)) + ", fm: " + str(np.round(fmean,2)) + ", fl_s: " + str(self.fluct_s))
return VAF, SNR, ax, tk, K_mat_old
def save_plot(self, directory = "./figs/dump/", prefix = " "):
if pop.id == 0:
from datetime import datetime
idate = datetime.now().strftime('%Y%m%d_%H%M') # %S
savefig(directory + idate + "-pop_transfer_" + prefix + "_" + self.celltype[self.a_celltype[0]] + "_N" + str(self.N[self.a_celltype[0]]) + "_ihold" + str(np.round(self.ihold,4)) + "_amp" + str(np.round(self.amp[self.a_celltype[0]],4)) + ".pdf", dpi = 300) # save it
def do_pca_ica(self, t_analysis_delay=0, t_analysis_stop=1, time=0, signals=0, output_dim=10, n_processes=32, n_chunks=32, do_ica=1, n_celltype = 0):
if self.use_mpi:
filepath = self.data_dir + "/" + str(self.pickle_prefix) + "_results_pop_pca_ica.p"
if self.do_run or (os.path.isfile(filepath) is False):
# PCA
# remove beginning
dt = time[2]-time[1]
t = time[int(t_analysis_delay/dt):int(t_analysis_stop/dt)]
pca_mat = np.array(signals[n_celltype]).T[int(t_analysis_delay/dt):int(t_analysis_stop/dt),:]
node = mdp.nodes.PCANode(output_dim=output_dim, svd=True)
# pad with zeros to be able to split into chunks!
n_add = n_chunks-np.remainder(np.shape(pca_mat)[0],n_chunks)
mat_add = np.zeros((n_add, np.shape(pca_mat)[1]))
pca_mat_add = np.concatenate((pca_mat, mat_add))
pca_mat_iter = np.split(pca_mat_add, n_chunks)
flow = mdp.parallel.ParallelFlow([node])
start_time = ttime.time()
with mdp.parallel.ProcessScheduler(n_processes=n_processes, verbose=True) as scheduler:
flow.train([pca_mat_iter], scheduler=scheduler) # input has to be list, why??
process_time = ttime.time() - start_time
s = np.array(flow.execute(pca_mat_iter))
s = s[0:len(t),:] # resize to length of t!
#print "node.d: ",node.d
var_vec = node.d/sum(node.d)
print 'Explained variance (', 0, ') : ', round(node.explained_variance,4)
print 'Variance (' , 0, ') : ', var_vec
print 'Time to run (' , 0, ') : ', process_time
s2 = []
if do_ica:
# ICA
#s2 = mdp.fastica(s)
ica = mdp.nodes.FastICANode() #CuBICANode()
ica.train(s)
s2 = ica(s)
results = {'t':t, 'pca':s,'pca_var':var_vec,'pca_var_expl':round(node.explained_variance,4), 'ica':s2}
if self.id == 0:
if self.dumpsave == 1:
pickle.dump( results, gzip.GzipFile( filepath, "wb" ) )
else:
if self.id == 0:
results = pickle.load( gzip.GzipFile( filepath, "rb" ) )
else:
# remove beginning
dt = time[2]-time[1]
t = time[int(t_analysis_delay/dt):int(t_analysis_stop/dt)]
pca_mat = np.array(signals[n_celltype]).T[int(t_analysis_delay/dt):int(t_analysis_stop/dt),:]
node = mdp.nodes.PCANode(output_dim=output_dim, svd=True)
start_time = ttime.time()
node.train(pca_mat)
s = node(pca_mat)
process_time = ttime.time() - start_time
#print "node.d: ",node.d
var_vec = node.d/sum(node.d)
print 'Explained variance (', 0, ') : ', round(node.explained_variance,4)
print 'Variance (' , 0, ') : ', var_vec
print 'Time to run (' , 0, ') : ', process_time
s2 = []
if do_ica:
# ICA
#s2 = mdp.fastica(s)
ica = mdp.nodes.FastICANode() #CuBICANode()
ica.train(s)
s2 = ica(s)
results = {'t':t, 'pca':s,'pca_var':var_vec,'pca_var_expl':round(node.explained_variance,4), 'ica':s2}
return results
def net_run(self, tstop, simprop = "default", t_analysis_delay=0, t_analysis_stop=1, stim_start=0):
freq_times = []
t_all_vec_vec = []
id_all_vec_vec = []
gsyns = []
w_mat = []
winh_mat = []
time = []
voltage = []
current = []
filepath = self.data_dir + "/" + str(self.pickle_prefix) + "_results_pop_randomnet.hdf5"
if self.do_run or (os.path.isfile(filepath) is False):
self.run(tstop)
self.no_fmean = True
results = self.get()
time, voltage, current, fmean, gsyn = results.get('time'), results.get('voltage'), results.get('current'), results.get('fmean'), results.get('gsyn')
freq_times, spike_freq, t_all_vec_vec, id_all_vec_vec, gsyns, w_mat, winh_mat = results.get('freq_times'), results.get('spike_freq'), results.get('t_all_vec_vec'), results.get('id_all_vec_vec'), results.get('gsyns'), results.get('w_mat'), results.get('winh_mat')
if self.id == 0:
if self.dumpsave == 1:
#pickle.dump( results, open( filepath, "wb" ) ) # gzip.GzipFile
print "- Saving", filepath
f = h5py.File(filepath, 'w')
f.create_dataset('time', data=time, compression='gzip', shuffle=True)
f.create_dataset('voltage', data=np.array(voltage), compression='gzip', shuffle=True)
f.create_dataset('current', data=current, compression='gzip', shuffle=True)
f.create_dataset('freq_times', data=freq_times, compression='gzip', shuffle=True)
#f.create_dataset('t_all_vec_vec', data=np.array(t_all_vec_vec), compression='lzf', shuffle=True)
#f.create_dataset('id_all_vec_vec', data=np.array(id_all_vec_vec), compression='lzf', shuffle=True)
#f.create_dataset('gsyns', data=np.array(gsyns), compression='lzf', shuffle=True)
for i in range(len(self.N)):
subgroup = f.create_group("cell" + str(i))
subgroup.create_dataset('t_all_vec_vec', data=t_all_vec_vec[i], compression='gzip', shuffle=True)
subgroup.create_dataset('id_all_vec_vec', data=id_all_vec_vec[i], compression='gzip', shuffle=True)
subgroup.create_dataset('g', data=gsyns[i], compression='gzip', shuffle=True)
#for j in range(len(gsyns[i])):
# subsubgroup = subgroup.create_group("gsyn" + str(j))
# subsubgroup.create_dataset('g', data=gsyns[i][j], compression='lzf', shuffle=True)
f.close()
print "- Save finished"
#filename = slugify(simprop)
#syn_grc = np.array(gsyns[0])
#import scipy
#from scipy import io
#print "Saving .mat"
#data = {}
#data['syn_grc'] = syn_grc[:,int(t_analysis_delay/self.bin_width):int(t_analysis_stop/self.bin_width)]
#data['time'] = freq_times[int(t_analysis_delay/self.bin_width):int(t_analysis_stop/self.bin_width)]-stim_start
#scipy.io.savemat('./figs/' + filename + '.mat',data)
else:
if self.id == 0:
#results = pickle.load( open( filepath, "rb" ) ) #gzip.GzipFile
f = h5py.File(filepath, 'r')
time = np.array(f['time'])
voltage = np.array(f['voltage'])
current = np.array(f['current'])
freq_times = np.array(f['freq_times'])
for i in range(len(self.N)):
t_all_vec_vec.append(np.array(f['/cell' + str(i) + '/t_all_vec_vec']))
id_all_vec_vec.append(np.array(f['/cell' + str(i) + '/id_all_vec_vec']))
gsyns.append(np.array(f['/cell' + str(i) + '/g']))
#gsyns.append([])
#for j in range(self.N[i]):
# gsyns[i].append(np.array(f['/cell' + str(i) + '/gsyn' + str(j) + '/g' ]))
f.close()
return time, voltage, current, t_all_vec_vec, id_all_vec_vec, gsyns, freq_times, w_mat, winh_mat
def delall(self):
if self.use_mpi:
self.pc.gid_clear()
print "- clearing gids"
else:
pass
#h.topology()
#for sec in h.allsec():
# print "- deleting section:", sec.name()
# #h("%s{delete_section()}"%sec.name())
# sec.push()
# h.delete_section()
#h.topology()
for n in range(self.n_celltypes):
for m in self.cells[n]:
m.destroy()
del m
del self.cells
del self.nc_vecstim
del self.netcons
del self.nclist
print h.topology()
def delrerun(self):
del self.nc_vecstim
del self.netcons
del self.nclist
del self.vecstim
del self.spike_vec
del self.ST_stims
del self.PF_stims
self.netcons = []
self.nclist = []
self.nc_vecstim = []
self.vecstim = []
self.spike_vec = []
self.ST_stims = []
self.PF_stims = []
self.t_vec = []
self.id_vec = []
self.rec_v = []
for n in range(self.n_celltypes):
if self.use_mpi:
self.t_vec.append(h.Vector()) # np.array([0])
self.id_vec.append(h.Vector()) # np.array([-1], dtype=int)
else:
self.t_vec.append([])
self.rec_v.append(h.Vector())
for cell in self.cells[n]:
self.t_vec[n].append(h.Vector())
cell.nc_spike.record(self.t_vec[n][-1])
self.flucts = [] # Fluctuating inputs on this host
self.noises = [] # Random number generators on this host
self.plays = [] # Play inputs on this host
self.rec_is = []
self.trains = []
self.ic_holds = []
self.i_holdrs = []
self.i_holds = []
self.ic_starts = []
self.vc_starts = []
self.ic_steps = []
self.tvecs = []
self.ivecs = []
self.noises = []
self.record_syn = []
self.id_all_vec_input = []
self.t_all_vec_input = []
self.syn_ex_dist = []
self.syn_inh_dist = []
# test code
if __name__ == '__main__':
# mpiexec -f ~/machinefile -enable-x -n 96 python Population.py --noplot
from Stimulation import *
from Plotter import *
from Stimhelp import *
from cells.IfCell import *
import scipy
from scipy import io
dt = 0.1*ms
dt = 0.025*ms
do_run = 1
if results.norun: # do not run again use pickled files!
print "- Not running, using saved files"
do_run = 0
do = np.array(["transfer"])
opts = np.array(["if_cnoise", "grc_cnoise"]) #ssine
#opts = np.array(["if_cnoise"]) #ssine
#opts = np.array(["if_recon"]) #ssine
opts = np.array(["if_syn_CFvec"])
#opts = np.array(["prk_cnoise"])
opts = np.array(["if_cnoise", "if_ssine"]) #ssine
opts = np.array(["if_ssine"]) #ssine
opts = np.array(["grc_cnoise_addn_cn_", "grc_cnoise_cn_", "grc_cnoise_addn_cn_a01"])
opts = np.array(["grc_cnoise_addn100_cn_", "grc_cnoise_addn_cn_", "grc_cnoise_cn_"])
opts = np.array(["grc_cnoise_addn100_cn_"])
opts = np.array(["grc_cnoise_addn100_"])
opts = np.array(["grc_cnoise_addn_cn_"])
#opts = np.array(["grc_cnoise"])
#opts = np.array(["grc_cnoise_cn", "grc_cnoise_addn_cn"])
#opts = np.array(["if_cnoise_addn", "if_cnoise"])
do = np.array(["timeconst"])
#do = np.array(["transfer"])
#opts = np.array(["grc_cnoise_syn"])
#opts = np.array(["grc_recon_syn"])
#do = np.array(["prk_test"])
if "prk_test" in do:
import multiprocessing
from Purkinje import Purkinje
cell = Purkinje()
# set up recording
# Time
rec_t = h.Vector()
rec_t.record(h._ref_t)
# Voltage
rec_v = h.Vector()
rec_v.record(cell.soma(0.5)._ref_v)
tstop = 500
v_init = -60
stim = h.IClamp(cell.soma(0.5))
stim.amp = 0.0/nA
stim.delay = 1
stim.dur = 1000
cpu = multiprocessing.cpu_count()
h.load_file("parcom.hoc")
p = h.ParallelComputeTool()
p.change_nthread(cpu,1)
p.multisplit(1)
print 'cpus:', cpu
h.load_file("stdrun.hoc")
h.celsius = 37
h.init()
h.tstop = tstop
dt = 0.025 # ms
h.dt = dt
h.steps_per_ms = 1 / dt
h.v_init = v_init
h.finitialize()
h.run()
t1 = np.array(rec_t)
voltage = np.array(rec_v)
s, spike_times = get_spikes(voltage, -20, t1)
print 1000/diff( spike_times)
plt.figure()
plt.subplot(2,1,1)
plt.plot(t1, voltage)
plt.show()
if "transfer" in do:
# SET DEFAULT VALUES FOR THIS PLOT
fig_size = [11.7, 8.3]
params = {'backend': 'ps', 'axes.labelsize': 9, 'axes.linewidth' : 0.5, 'title.fontsize': 8, 'text.fontsize': 9,
'legend.borderpad': 0.2, 'legend.fontsize': 8, 'legend.linewidth': 0.1, 'legend.loc': 'best', # 'lower right'
'legend.ncol': 4, 'xtick.labelsize': 8, 'ytick.labelsize': 8, 'text.usetex': False, 'figure.figsize': fig_size}
rcParams.update(params)
freq_used0 = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 35, 40, 45, 50, 55, 60, 65, 70, 80, 100, 1000])*Hz
#freq_used0 = np.concatenate((arange(0.1, 1, 0.1), arange(1, 501, 1) ))
freq_used0 = np.array([1, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 200, 400, 600, 800, 1000])
SNR = None
NI = None
VAF = None
t_stim = 1000*s # only for cnoise
opt_plot = np.array(["only_mag","normalize", "dB"]) #
#opt_plot = np.array(["normalize", "dB"]) #
color_vec = (np.array(["Red", "Blue", "HotPink", "Indigo"]), np.array(["Blue", "Orange", "HotPink", "Indigo"]))
#color=cm.jet(1.*i/x)
method_interpol = np.array(['bin','syn'])
method_interpol = np.array(['bin'])
for i, o in enumerate(opts):
dt = 0.025*ms
bin_width = 5*ms
bin_width = dt
jitter = 0*ms
n_syn_ex = [0]
g_syn_ex = [1]
noise_syn = 0
inh_hold = 0
n_syn_inh = [0]
g_syn_inh = [1]
tau1_ex = 0
tau2_ex = 10*ms
tau1_inh = 0
tau2_inh = 100*ms
cutf = 20
sexp = -1
cutf = 0
sexp = 0
ihold = [10]
amod = 0.1 # relative value
give_freq = True
anoise = [0]
fluct_tau = 0*ms
N = [100]
amp = 0 # absolute value
fluct_s = [0] # absolute value 0.0008
ihold_sigma = [0] # 0.01 absolute value
CF_var = [[5,10,20]]
CF_var = False
syn_tau1 = 5*ms
syn_tau2 = 5*ms
do_csd = 1
if "if" in o:
do_csd = 1
color_vec = (np.array(["Blue"]), np.array(["Blue"]))
#color_vec = (np.array(["Red"]), np.array(["Red"]))
cellimport = []
celltype = ["IfCell"]
#cell_exe = ["cell = IfCell()"]
#cell_exe = ["cell = IfCell(e = -70*mV, thresh = -69*mV, vrefrac = -70*mV)"]
#cell_exe = ["cell = IfCell(e = 0*mV, thresh = 1*mV, vrefrac = 0*mV)"]
# Brunel
#cell_exe = ["cell = IfCell(C = 0.0005 *uF, R = 40*MOhm, e = -70*mV, thresh = -50*mV, vrefrac = -56*mV); cell.add_resonance(tau_r = 100*ms, gr = 0.025*uS)"]
#cell_exe = ["cell = IfCell(C = 0.0001*uF, R = 40*MOhm, sigma_C = 0.2, sigma_R = 0.2)"]
#cell_exe = ["cell = IfCell(C = 0.0001*uF, R = 40*MOhm)"] # tau = 4 ms
#cell_exe = ["cell = IfCell(C = 0.0001*uF, R = 40*MOhm, s_reset_noise = 0*mV)"] # tau = 4 ms
#GrC resting: 737 MOhm, 2.985e-06 uF tau: 0.0022 s
#GrC transfer fit: tau: 0.027 s => with 2.985e-06 uF, R = 0.027/2.985e-12 = 9045 MOhm
#cell_exe = ["cell = IfCell(C = 2.985e-06*uF, R = 9045*MOhm)"]
thresh = -41.8
R = 5227*MOhm
#tau_passive = 3e-06*5227 = 15.7ms
cell_exe = ["cell = IfCell(C = 3.0e-06*uF, R = " + str(R) + ", e = -71.5*mV, thresh =" + str(thresh) + ", vrefrac = -71.5*mV)"]
prefix = "if_tf"
istart = 0
istop = 0.01
di = 0.00001
syn_tau1 = 10*ms
syn_tau2 = 10*ms
# Indirect
give_freq = True
ihold = [40]
amod = 1 # relative value
anoise = [0]
fluct_tau = 0*ms
#anoise = 0.1
#fluct_tau = 100*ms
# # Direct
# give_freq = False
# ihold = [0.00569223341176]
# amod = None
# amp = 7.31353725e-06
#
# anoise = None
# fluct_s = [3.65676863e-06]
# fluct_tau = 0*ms
#
# # Low CF, No low noise
# N = [10000]
# give_freq = False
# ihold = [0.004]
# ihold_sigma = [0.1/2] # 0.1/2 0.01 realtive value
# amod = None
# amp = 0.0021
#
# anoise = None
# fluct_s = [0.00] # .005
# fluct_tau = 0*ms
# # Low CF, With low noise
# N = [10000]
# give_freq = False
# ihold = [0.002]
# ihold_sigma = [0.1/2] # 0.1/2 0.01 realtive value
# amod = None
# amp = 0.001
#
# anoise = None
# fluct_s = [0.002] # .005
# fluct_tau = 100*ms
if "resif" in o:
do_csd = 1
color_vec = (np.array(["Blue"]), np.array(["Blue"]))
#color_vec = (np.array(["Red"]), np.array(["Red"]))
cellimport = []
celltype = ["IfCell"]
gr = 5.56e-05*uS
tau_r = 19.6*ms
R = 5227*MOhm
delta_t = 4.85*ms
thresh = (0.00568*nA * R) - 71.5*mV #
thresh = -41.8
cellimport = []
celltype = "IfCell"
cell_exe = "cell = IfCell(C = 3e-06*uF, R = " + str(R) + ", e = -71.5*mV, thresh =" + str(thresh) + ", vrefrac = -71.5*mV, dgk =" + str(gr) + ", egk = -71.5*mV, ctau =" + str(tau_r) + ")"
prefix = "resif_tf"
istart = 0
istop = 0.01
di = 0.00001
syn_tau1 = 10*ms
syn_tau2 = 10*ms
# Indirect
give_freq = True
ihold = [40]
amod = 1 # relative value
anoise = [0]
fluct_tau = 0*ms
dt = 0.1*ms
if "if_syn" in o:
N = [1]
ihold = [40]
amod = 1 # relative value
prefix = "if_syntf"
n_syn_ex = 1
g_syn_ex = 0
noise_syn = 0
fluct_tau = 0*ms
freq_used = np.array([])
tau1_ex=0*ms
tau2_ex=10*ms
anoise = [0]
if "grc" in o:
color_vec = (np.array(["Blue"]), np.array(["Blue"]))
cellimport = ["from GRANULE_Cell import Grc"]
celltype = ["Grc"]
cell_exe = ["cell = Grc(np.array([0.,0.,0.]))"]
prefix = "grc_tf"
istart = 0
istop = 0.1
di = 0.01
syn_tau1 = 10*ms
syn_tau2 = 10*ms
# Indirect
give_freq = True
ihold = [40]
amod = 1 # relative value
anoise = [0]
fluct_tau = 0*ms
#anoise = 0.1
#fluct_tau = 100*ms
# # Direct
# give_freq = False
# ihold = [0.0058021085712642992]
# amod = None
# amp = 7.31353725e-06
#
# anoise = None
# fluct_s = [3.65676863e-06]
# fluct_tau = 0*ms
#
# # Low CF, No low noise
# N = [50]
# give_freq = False
# ihold = [0.0049]
# ihold_sigma = [0.1/2] # 0.1/2 0.01 realtive value
# amod = None
# amp = 0.0021
#
# anoise = None
# fluct_s = [0.00] # .005
# fluct_tau = 0*ms
#
#
# # Low CF, With low noise
# N = [10000]
# give_freq = False
# ihold = [0.003]
# ihold_sigma = [0.1/2] # 0.1/2 0.01 realtive value
# amod = None
# amp = 0.001
#
# anoise = None
# fluct_s = [0.002] # .005
# fluct_tau = 100*ms
use_multisplit = False
use_mpi = True
simstep = 1*s
if "prk" in o:
N = [1]
ihold = [60]
color_vec = (np.array(["Blue"]), np.array(["Blue"]))
cellimport = ["from Purkinje import Purkinje"]
celltype = ["Prk"]
cell_exe = ["cell = Purkinje()"]
prefix = "prk_tf"
temperature = 37
istart = 0
istop = 0.1
di = 0.005
use_multisplit = True
use_mpi = False
t_stim = 5*s # only for cnoise
simstep = 1*s
if "grc_syn" in o:
N = [1]
ihold = [125]
amod = 1 # relative value
prefix = "grc_syntf"
cutf = 20
sexp = -1
cutf = 0
sexp = 0
n_syn_ex = 1
g_syn_ex = -1
noise_syn = 1
n_syn_inh = -1
inh_hold = 0
g_syn_inh = 0
fluct_tau = 0*ms
freq_used = np.array([])
anoise = 0
if "_addn" in o:
anoise = [6] # RESPONSIBLE FOR FILTERING EFFECT!!!
fluct_tau = 1*ms
prefix = prefix + "_addn"
color_vec = (np.array(["Red"]), np.array(["Red"]))
if "_addn100" in o:
anoise = [2] # RESPONSIBLE FOR FILTERING EFFECT!!!
fluct_tau = 100*ms
prefix = prefix + "100"
color_vec = (np.array(["Green"]), np.array(["Green"]))
if "_cn_" in o:
cutf = 20
sexp = -1
prefix = prefix + "_cn"
if "_a01" in o:
amod=0.1
prefix = prefix + "_a01"
plt.figure(i)
pickle_prefix = "Population.py_" + prefix
#comm = MPI.COMM_WORLD
#comm.Barrier() # wait for other nodes
pop = Population(cellimport = cellimport, celltype = celltype, cell_exe = cell_exe, N = N, temperature = 37, ihold = ihold, ihold_sigma = ihold_sigma, amp = amp, amod = amod, give_freq = give_freq, do_run = do_run, pickle_prefix = pickle_prefix, istart = istart, istop = istop, di = di, dt = dt)
pop.bin_width = bin_width
pop.jitter = jitter
pop.anoise = anoise
pop.fluct_s = fluct_s
pop.fluct_tau = fluct_tau
pop.method_interpol = method_interpol
pop.no_fmean = False
pop.CF_var = CF_var
pop.tau1_ex=tau1_ex
pop.tau2_ex=tau2_ex
pop.tau1_inh=tau1_inh
pop.tau2_inh=tau2_inh
pop.n_syn_ex = n_syn_ex
pop.g_syn_ex = g_syn_ex
pop.noise_syn = noise_syn
pop.inh_hold = inh_hold
pop.n_syn_inh = n_syn_inh
pop.g_syn_inh = g_syn_inh
pop.force_run = False
pop.use_multisplit = use_multisplit
pop.use_mpi = use_mpi
pop.simstep = simstep
pop.use_local_dt = False
pop.syn_tau1 = syn_tau1
pop.syn_tau2 = syn_tau2
pop.plot_input = False
if n_syn_inh == -1:
pop.connect_gfluct(g_i0=g_syn_inh)
#pop.test_mod(n_syn_ex = n_syn_ex, g_syn_ex = g_syn_ex, noise_syn = noise_syn, inh_hold = inh_hold, n_syn_inh = n_syn_inh, g_syn_inh = g_syn_inh, do_plot = True)
if "ssine" in o:
pop.color_vec = color_vec
#pop.color_vec = (np.array(["Red", "Orange", "HotPink", "Indigo"]), np.array(["Red", "Orange", "HotPink", "Indigo"]))
pop.fun_plot(currlabel = "control", dowhat = "ssine", freq_used = freq_used0, opt_plot = opt_plot)
pop.save_plot(directory = "./figs/dump/")
if "cnoise" in o:
freq_used = np.array([])
pop.color_vec = color_vec
#pop.color_vec = (np.array(["Blue", "Green", "DimGray", "DarkGoldenRod"]), np.array(["Blue", "Green", "DimGray", "DarkGoldenRod"]))
pop.fun_plot(currlabel = "control", dowhat = "cnoise", t_stim = t_stim, cutf = cutf, sexp = sexp, t_qual = 0, opt_plot = opt_plot, freq_used = freq_used, do_csd = do_csd)
pop.save_plot(directory = "./figs/dump/")
if "recon" in o:
pop.color_vec = color_vec
#VAF, SNR, ax, tk, K_mat_old = pop.fun_plot(currlabel = "control", dowhat = "cnoise", t_stim = t_stim, cutf = cutf, sexp = sexp, t_qual = 0, opt_plot = opt_plot, n_syn_ex = n_syn_ex, g_syn_ex = g_syn_ex, noise_syn = noise_syn, inh_hold = inh_hold, n_syn_inh = n_syn_inh, g_syn_inh = g_syn_inh, SNR=0, freq_used = freq_used)
# RECONSTRUCT!
freq_used = np.array([9, 47, 111, 1000])*Hz
t_stim = 10*s
tk = arange(0,0.8192*2,pop.dt)
K_mat_old = zeros((len(method_interpol),len(tk)), dtype=complex)
if pop.id == 0:
sigma = 0.1e-3
a=0.1
t0 = tk[floor(len(tk)/2)]
K_mat_old[0] = gauss_func(tk, a, t0, sigma)
K_mat_old = np.array([])
results = pop.fun_cnoise_Stim(t_stim = t_stim, cutf = cutf, sexp = sexp, t_qual = 5, n_syn_ex = n_syn_ex, g_syn_ex = g_syn_ex, noise_syn = noise_syn, inh_hold = inh_hold, n_syn_inh = n_syn_inh, g_syn_inh = g_syn_inh, freq_used = freq_used, K_mat_old = K_mat_old, seed = 311)
freq_used, amp_vec, mag, pha, ca, voltage, current, t1 = results.get('freq_used'), results.get('amp'), results.get('mag'), results.get('pha'), results.get('ca'), results.get('voltage'), results.get('current'), results.get('t1')
freq_times, spike_freq, fmean, method_interpol, SNR, VAF, Qual = results.get('freq_times'), results.get('spike_freq'), results.get('fmean'), results.get('method_interpol'), results.get('SNR'), results.get('VAF'), results.get('Qual')
stim, resp_mat, stim_re_mat = results.get('stim'), results.get('resp_mat'), results.get('stim_re_mat')
if pop.id == 0:
plt.figure('Reconstruct')
axR0 = plt.subplot(4,1,1)
axR1 = plt.subplot(4,1,2)
axR2 = plt.subplot(4,1,3)
axR3 = plt.subplot(4,1,4)
axR0.plot(np.arange(len(stim))*pop.dt, resp_mat[0,:])
axR0.axis(xmin=0.9, xmax=1)
#axR0.plot(t1, voltage[0])
axR1.plot(np.arange(len(stim))*pop.dt, stim, 'b')
axR1.axis(xmin=0.9, xmax=1)
axR2.plot(np.arange(len(stim))*pop.dt, stim_re_mat[0,:], 'r')
axR2.axis(xmin=0.9, xmax=1)
axR3.plot(tk, K_mat_old[0])
plt.savefig("./figs/dump/Reconstruct.pdf", dpi = 300, transparent=True) # save it
pop = None
plt.show()
if "timeconst" in do:
from lmfit import minimize, Parameters
# SET DEFAULT VALUES FOR THIS PLOT
fig_size = [11.7, 8.3]
params = {'backend': 'ps', 'axes.labelsize': 9, 'axes.linewidth' : 0.5, 'title.fontsize': 8, 'text.fontsize': 9,
'legend.borderpad': 0.2, 'legend.fontsize': 8, 'legend.linewidth': 0.1, 'legend.loc': 'best', # 'lower right'
'legend.ncol': 4, 'xtick.labelsize': 8, 'ytick.labelsize': 8, 'text.usetex': False, 'figure.figsize': fig_size}
rcParams.update(params)
dt = 0.025*ms
prefix = "timeconst"
pickle_prefix = "Population.py_" + prefix
stimtype = "inh_50ms_20ms"
if stimtype == "ex_20ms":
trun = 2.9
tstart = 1.8
tstop = 2.7
celltype = ["IfCell"]
cell_exe = ["cell = IfCell(C = 0.0001*uF, R = 200*MOhm)"]
N = [5000]
pop = Population(celltype = celltype, cell_exe = cell_exe, N = N, temperature = 0, do_run = do_run, pickle_prefix = pickle_prefix, dt = dt)
pop.method_interpol = np.array(["bin", "syn"])
pop.method_interpol = np.array(["bin"])
modulation_vec = pop.set_PulseStim(start_time=[100*ms], dur=[3000*ms], steadyf=[100*Hz], pulsef=[150*Hz], pulse_start=[2000*ms], pulse_len=[500*ms], weight0=[1*nS], tau01=[0*ms], tau02=[20*ms], weight1=[0*nS], tau11=[0*ms], tau12=[1*ms])
params = Parameters()
params.add('amp', value=0.1)
params.add('shift', value=10)
params.add('tau1', value=1, vary=False) # alpha!
params.add('tau2', value=20*ms)
if stimtype == "ex_gr":
trun = 6.9
tstart = 4.8
tstop = 6.5
cellimport = ["from GRANULE_Cell import Grc"]
celltype = ["Grc"]
cell_exe = ["cell = Grc(np.array([0.,0.,0.]))"]
N = [4096*10]
pop = Population(cellimport = cellimport, celltype = celltype, cell_exe = cell_exe, N = N, temperature = 37, do_run = do_run, pickle_prefix = pickle_prefix, dt = dt)
pop.method_interpol = np.array(["bin", "syn"])
pop.method_interpol = np.array(["bin"])
modulation_vec = pop.set_PulseStim(start_time=[100*ms], dur=[7000*ms], steadyf=[20*Hz], pulsef=[30*Hz], pulse_start=[5000*ms], pulse_len=[500*ms])
params = Parameters()
params.add('amp', value=0.1)
params.add('shift', value=10)
params.add('tau1', value=1, vary=False) # alpha!
params.add('tau2', value=20*ms)
if stimtype == "inh_50ms_20ms":
trun = 2.9
tstart = 1.8
tstop = 2.7
celltype = ["IfCell", "IfCell"]
cell_exe = ["cell = IfCell()", "cell = IfCell()"]
N = [10000,10000]
pop = Population(celltype = celltype, cell_exe = cell_exe, N = N, temperature = 0, do_run = do_run, pickle_prefix = pickle_prefix, dt = dt)
pop.method_interpol = np.array(["bin", "syn"])
pop.method_interpol = np.array(["bin"])
modulation_vec = pop.set_PulseStim(start_time=[100*ms,100*ms], dur=[3000*ms,3000*ms], steadyf=[100*Hz,50*Hz], pulsef=[100*Hz,80*Hz], pulse_start=[2000*ms,2000*ms], pulse_len=[500*ms,500*ms], weight0=[1*nS,1*nS], tau01=[1*ms,1*ms], tau02=[20*ms,20*ms], weight1=[0,0], tau11=[0*ms,0*ms], tau12=[1*ms,1*ms])
pop.connect_cells(conntype='inh', weight=0.001, tau=50)
params = Parameters()
params.add('amp', value=-0.1)
params.add('shift', value=10)
params.add('tau1', value=1, vary=False) # alpha!
params.add('tau2', value=20*ms)
if stimtype == "inh_gr":
trun = 9.9
tstart = 4.8
tstop = 8
cellimport = ["from GRANULE_Cell import Grc", "from templates.golgi.Golgi_template import Goc"]
celltype = ["Grc","Goc_noloop"]
cell_exe = ["cell = Grc(np.array([0.,0.,0.]))","cell = Goc(np.array([0.,0.,0.]))"]
N = [100,4]
#N = [4096, 27]
#N = [4096*5, 27*5]
pop = Population(cellimport = cellimport, celltype = celltype, cell_exe = cell_exe, N = N, temperature = 37, do_run = do_run, pickle_prefix = pickle_prefix, dt = dt)
pop.method_interpol = np.array(["bin", "syn"])
pop.method_interpol = np.array(["bin"])
modulation_vec = pop.set_PulseStim(start_time=[100*ms,100*ms], dur=[9800*ms,9800*ms], steadyf=[60*Hz,10*Hz], pulsef=[60*Hz,22*Hz], pulse_start=[5000*ms,5000*ms], pulse_len=[1500*ms,1500*ms])
pop.connect_cells(conntype='inh_gr', weight = 0.3)
params = Parameters()
params.add('amp', value=-0.1)
params.add('shift', value=10)
params.add('tau1', value=1, vary=False) # alpha!
params.add('tau2', value=20*ms)
if stimtype == "inh_50ms_curr":
trun = 2.9
tstart = 1.8
tstop = 2.8
celltype = ["IfCell", "IfCell"]
cell_exe = ["cell = IfCell()", "cell = IfCell()"]
N = [1000,1000]
give_freq = True
istart = 0
istop = 0.2
di = 0.01
ihold = [100, 50]
ihold_sigma = [0.01, 0.01] # relative sigma
pop = Population(celltype = celltype, cell_exe = cell_exe, N = N, temperature = 0, ihold = ihold, ihold_sigma = ihold_sigma, give_freq = give_freq, do_run = do_run, pickle_prefix = pickle_prefix, istart = istart, istop = istop, di = di, dt = dt)
pop.method_interpol = np.array(["bin", "syn"])
pop.method_interpol = np.array(["bin"])
tstep = 2
tdur = 0.5
istep = [100,100]
current1 = np.concatenate(([ihold[1]*np.ones(round((tstep)/pop.dt)), istep[1]*np.ones(round(tdur/pop.dt)),ihold[1]*np.ones(round((trun-tstep-tdur)/pop.dt)) ]))
pop.set_IStim()
pop.set_IStep(istep = istep, istep_sigma = [0.01,0.01], tstep = tstep, tdur = tdur)
pop.connect_cells(conntype='inh', weight=0.0003, tau=50)
pop.fluct_s = [0.02,0.05]
pop.connect_fluct()
params = Parameters()
params.add('amp', value=-0.1)
params.add('shift', value=10)
params.add('tau1', value=1, vary=False) # alpha!
params.add('tau2', value=20*ms)
if stimtype == "inh_gr_curr":
trun = 9.9
tstart = 4.8
tstop = 8
cellimport = ["from GRANULE_Cell import Grc", "from templates.golgi.Golgi_template import Goc"]
celltype = ["Grc","Goc_noloop"]
cell_exe = ["cell = Grc(np.array([0.,0.,0.]))","cell = Goc(np.array([0.,0.,0.]))"]
N = [100,4]
N = [4096, 27]
N = [4096*10, 27*10]
give_freq = True
# GRC
#istart = 0
#istop = 0.1
#di = 0.01
#GOC
istart = 0
istop = 0.5
di = 0.02
ihold = [100, 10]
ihold_sigma = [0, 0] # relative sigma
pop = Population(cellimport = cellimport, celltype = celltype, cell_exe = cell_exe, N = N, temperature = 37, ihold = ihold, ihold_sigma = ihold_sigma, give_freq = give_freq, do_run = do_run, pickle_prefix = pickle_prefix, istart = istart, istop = istop, di = di, dt = dt)
pop.method_interpol = np.array(["bin", "syn"])
pop.method_interpol = np.array(["bin"])
tstep = 5
tdur = 2
istep = [100,50]
current1 = np.concatenate(([ihold[1]*np.ones(round((tstep)/pop.dt)), istep[1]*np.ones(round(tdur/pop.dt)),ihold[1]*np.ones(round((trun-tstep-tdur)/pop.dt)) ]))
pop.set_IStim()
pop.set_IStep(istep = istep, istep_sigma = [0,0], tstep = tstep, tdur = tdur)
pop.connect_cells(conntype='inh_gr', weight = 0.4)
pop.fluct_s = [0.05,2]
pop.connect_fluct()
params = Parameters()
params.add('amp', value=-0.1)
params.add('shift', value=10)
params.add('tau1', value=1, vary=False) # alpha!
params.add('tau2', value=20*ms)
pop.run_steps(trun)
self.no_fmean = True
results = pop.get()
time, voltage, current, fmean, gsyn = results.get('time'), results.get('voltage'), results.get('current'), results.get('fmean'), results.get('gsyn')
freq_times, spike_freq, t_all_vec_vec, id_all_vec_vec, gsyns = results.get('freq_times'), results.get('spike_freq'), results.get('t_all_vec_vec'), results.get('id_all_vec_vec'), results.get('gsyns')
if pop.id == 0:
bin_width = 1*ms
freq_times = arange(0, time[-1], bin_width)
[num_spikes, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[0], bins = freq_times)
spike_freq = np.concatenate((zeros(1),num_spikes)) / bin_width / N[0]
if "inh" in stimtype: # generate input current, to complicated to get it out
if "curr" in stimtype:
time1 = np.arange(0, trun, pop.dt)
r_mod = interp(freq_times, time1, current1, left=0, right=0)
[num_spikes, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[1], bins = freq_times)
spike_freq1 = np.concatenate((zeros(1),num_spikes)) / bin_width / N[1]
else:
r_mod = interp(freq_times, modulation_vec[1][0], modulation_vec[1][1], left=0, right=0)
[num_spikes, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[1], bins = freq_times)
spike_freq1 = np.concatenate((zeros(1),num_spikes)) / bin_width / N[1]
elif "ex" in stimtype:
r_mod = interp(freq_times, modulation_vec[0][0], modulation_vec[0][1], left=0, right=0)
def modelfun(amp, shift, tau1, tau2, bin_width, r_mod):
tau1 = tau1
tau2 = tau2
t1 = np.arange(0,10*tau2,bin_width)
K = amp*syn_kernel(t1, tau1, tau2)
K = np.concatenate((np.zeros(len(K)-1),K))
t2 = np.arange(0,len(K)*bin_width,bin_width)
model = np.convolve(K, r_mod, mode='same') + shift
return model
def residual(params, r_mod, data=None, bin_width=1*ms, tstart=0, tstop=3):
amp = params['amp'].value
shift = params['shift'].value
tau1 = params['tau1'].value
tau2 = params['tau2'].value
model = modelfun(amp, shift, tau1, tau2, bin_width, r_mod)
return (data[int(tstart/bin_width):int(tstop/bin_width)]-model[int(tstart/bin_width):int(tstop/bin_width)])
result = minimize(residual, params, args=(r_mod, spike_freq, bin_width, tstart, tstop))
print "chisqr: ", result.chisqr
print 'Best-Fit Values:'
for name, par in params.items():
print ' %s = %.4f +/- %.4f ' % (name, par.value, par.stderr)
amp = params['amp'].value
shift = params['shift'].value
tau1 = params['tau1'].value
tau2 = params['tau2'].value
model = modelfun(amp, shift, tau1, tau2, bin_width = bin_width, r_mod = r_mod)
if "ex" in stimtype:
plt.figure(0)
plt.plot(freq_times[int(0.5/bin_width):int(trun/bin_width)], spike_freq[int(0.5/bin_width):int(trun/bin_width)], freq_times[int(0.5/bin_width):int(trun/bin_width)], model[int(0.5/bin_width):int(trun/bin_width)])
plt.figure(1)
plt.plot(time, voltage[0]), freq_times, r_mod, time, current
#plt.figure(100)
#plt.plot(t_all_vec_vec[0],id_all_vec_vec[0],'k|')
#plt.savefig("./figs/dump/taufit_" + str(stimtype) + "_spikes.pdf", dpi = 300) # save it
else:
plt.figure(0)
plt.plot(freq_times[int(0.5/bin_width):int(trun/bin_width)], spike_freq1[int(0.5/bin_width):int(trun/bin_width)], freq_times[int(0.5/bin_width):int(trun/bin_width)], spike_freq[int(0.5/bin_width):int(trun/bin_width)], freq_times[int(0.5/bin_width):int(trun/bin_width)], model[int(0.5/bin_width):int(trun/bin_width)])
plt.figure(1)
plt.plot(time, voltage[0], time, voltage[1], freq_times, r_mod, time, current)
plt.figure(100)
#plt.plot(t_all_vec_vec[0],id_all_vec_vec[0],'k|')
#plt.plot(t_all_vec_vec[1],id_all_vec_vec[1],'b|')
#plt.savefig("./figs/dump/taufit_" + str(stimtype) + "_spikes.pdf", dpi = 300) # save it
plt.figure(0)
plt.title('Fit: ' + str(stimtype) + ', tau1=' + str(tau1) + ' tau2=' + str(tau2))
plt.savefig("./figs/dump/taufit_" + str(stimtype) + "_rate.png", dpi = 300) # save it
plt.figure(1)
plt.savefig("./figs/dump/taufit_" + str(stimtype) + "_voltage.png", dpi = 300) # save it
plt.show()
|
normal
|
{
"blob_id": "06ea697989f8f9ac539559690dcfd7aa73151e0f",
"index": 2700,
"step-1": "# -*- coding: utf-8 -*-\n\"\"\"\n@author: chris\n\nModified from THOMAS MCTAVISH (2010-11-04).\n\nmpiexec -f ~/machinefile -enable-x -n 96 python Population.py --noplot\n\"\"\"\n\nfrom __future__ import with_statement\nfrom __future__ import division\n\nimport sys\nsys.path.append('../NET/sheff/weasel/')\nsys.path.append('../NET/sheffprk/template/')\n\nimport os\n\n#use_pc = True\n\nimport sys\nargv = sys.argv\n\nif \"-python\" in argv:\n use_pc = True\nelse:\n use_pc = False \n\nif use_pc == True:\n \n from neuron import h\n pc = h.ParallelContext()\n rank = int(pc.id())\n nhost = pc.nhost()\n \nelse:\n \n from mpi4py import MPI\n from neuron import h\n rank = MPI.COMM_WORLD.rank\n\n#print sys.version\n\nif __name__ == '__main__':\n \n import argparse\n parser = argparse.ArgumentParser()\n parser.add_argument('-o', action='store', dest='opt')\n parser.add_argument('--noplot', action='store_true')\n parser.add_argument('--norun', action='store_true')\n parser.add_argument('--noconst', action='store_true')\n parser.add_argument('--noqual', action='store_true')\n pars, unknown = parser.parse_known_args(['-o','--noplot','--norun','--noconst','--noqual'])\n\nif __name__ == '__main__':\n \n import matplotlib\n if rank == 0: \n matplotlib.use('Tkagg', warn=True) \n else: \n matplotlib.use('Agg', warn=True) \n\nif __name__ == '__main__':\n \n do_plot = 1\n if results.noplot: # do not plot to windows\n matplotlib.use('Agg', warn=True)\n if rank == 0: print \"- No plotting\"\n do_plot = 0\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib.mlab as mlab\n\nimport random as rnd\nimport neuronpy.util.spiketrain\n\n#set_printoptions(threshold='nan')\n\nfrom Stimulation import *\nfrom Stimhelp import *\nfrom units import *\n\nfrom cells.PassiveCell import *\n\nfrom itertools import izip\n\ntry:\n import cPickle as pickle\nexcept:\n import pickle\n \nimport gzip\nimport h5py\n\nfrom templates.synapse.synapse import Synapse\nfrom synapsepfpurk import Synapse as Synapse2\nif use_pc is False: import mdp\n \nimport time as ttime\nfrom scipy.optimize import fmin, leastsq\n\nfrom NeuroTools import stgen, signals\n\nimport md5\n\n#from guppy import hpy\n#hpy = hpy()\n \n\nclass Population:\n \"\"\"\n A population of N cells\n \"\"\"\n \n def __init__(self, cellimport = [], celltype = None, N = [10], temperature = 6.3, cell_exe = 0, ihold = [0*nA], ihold_sigma = [0*nA], amp = [0*nA], amod = [None], anoise = [None], give_freq = False, do_run = 1, pickle_prefix = \"default\", istart = 0, istop = 0.07, di = 0.001, dt = 0.025*ms, use_mpi = True, use_pc = False):\n \"\"\"\n :param N: Number of cells.\n :param fluct_m: \n :param fluct_s: \n :param fluct_tau: \n \"\"\"\n\n self.use_pc = use_pc\n \n if type(celltype) is not list: celltype = [celltype] #convert to list if it is not given as one\n self.celltype = celltype\n \n if type(cell_exe) is not list: cell_exe = [cell_exe] #convert to list if it is not given as one\n self.cell_exe = cell_exe \n \n if cellimport is not None:\n if cellimport == []: \n for n in range(len(celltype)):\n cellimport.append(\"from cells.\" + self.celltype[n] + \" import *\")\n self.cellimport = cellimport\n \n if type(N) is not list: N = [N]\n self.N = N # Total number of cells in the net\n \n self.n_celltypes = len(self.N)\n self.a_celltype = [0] # celltype to analyse\n \n self.factor_celltype = [1]*self.n_celltypes\n \n self.set_init(ihold, ihold_sigma, amp, amod)\n \n self.CF_var = False\n\n self.inh_hold_sigma = [0]\n self.intr_hold_sigma = [0]\n \n #self.sigma_inh_hold = 0\n #self.sigma_ihold = 0\n \n \n if type(anoise) is not list: anoise = [anoise]*self.n_celltypes\n if len(anoise) < self.n_celltypes: anoise = [anoise[0]]*self.n_celltypes\n self.anoise = anoise # RUN self.set_i()\n \n self.give_freq = give_freq # RUN self.set_i()\n \n self.temperature = temperature\n \n self.gid_count = 0\n self.gidlist = [] # List of global identifiers on this host\n self.global_gidlist = [] # List of global identifiers\n self.cells = [] # Cells on this host\n \n self.t_vec = []\n self.id_vec = []\n self.rec_v = []\n \n for n in range(self.n_celltypes):\n if use_mpi:\n self.t_vec.append(h.Vector()) # np.array([0])\n self.id_vec.append(h.Vector()) # np.array([-1], dtype=int)\n else:\n self.t_vec.append([])\n \n self.rec_v.append(h.Vector())\n \n #self.t_vec = h.Vector(np.array([0])) # Spike time of all cells on this host\n #self.id_vec = h.Vector(np.array([-1])) # Ids of spike times on this host\n \n self.flucts = [] # Fluctuating inputs on this host\n self.fluct_m = 0 # [nA]\n self.fluct_s = [0] # [nA]\n self.fluct_tau = 0*ms # [ms]\n \n self.noises = [] # Random number generators on this host\n self.plays = [] # Play inputs on this host\n self.rec_is = []\n \n self.trains = []\n self.vecstim = []\n self.nc_vecstim = []\n self.spike_vec = [] \n \n self.syn_tau1 = 5*ms # Synapse of virtual target neuron\n self.syn_tau2 = 5*ms # Synapse of virtual target neuron\n self.tmax = 10*sec # maximum length of plot that should be plotted!!\n \n self.nc_delay = 0 #500*ms # only important if syn_output is used, not used currently\n self.dt = dt\n self.bin_width = dt\n self.jitter = 0*ms\n self.delta_t = 0*ms\n \n self.istart = istart\n self.istop = istop\n self.di = di\n \n self.ic_holds = []\n self.i_holdrs = []\n self.i_holds = []\n self.ic_starts = [] \n self.vc_starts = []\n self.ic_steps = []\n \n self.rec_step = []\n \n self.tvecs = []\n self.ivecs = [] \n\n self.noises = []\n \n self.record_syn = []\n self.id_all_vec_input = []\n self.t_all_vec_input = []\n \n if len(self.N) == len(self.cell_exe) == len(self.celltype):\n pass\n else:\n raise ValueError('N, cell_exe, celltype do NOT have equal length!')\n\n self.use_mpi = use_mpi\n self.use_pc = use_pc\n \n if self.use_mpi:\n \n #### Make a new ParallelContext object\n self.pc = h.ParallelContext()\n self.id = self.pc.id()\n self.nhost = int(self.pc.nhost())\n \n if self.use_pc == False:\n\n s = \"mpi4py thinks I am %d of %d on %s, NEURON thinks I am %d of %d\\n\"\n processorname = MPI.Get_processor_name()\n self.comm = MPI.COMM_WORLD\n \n if self.id == 0:\n print s % (self.comm.rank, self.comm.size, processorname, self.id, self.nhost)\n \n else:\n \n s = \"NEURON thinks I am %d of %d\\n\"\n if self.id == 0:\n print s % (self.id, self.nhost)\n \n self.barrier()\n \n else:\n self.id = 0\n self.nhost = 1\n \n self.do_run = do_run\n\n self.first_run = True\n \n self.set_numcells() # Build the portion of cells on this host. \n \n self.pickle_prefix = pickle_prefix\n \n # plot options\n self.ymax = 0 \n self.ax = None \n self.linewidth = 1.5\n self.color_vec = None \n self.alpha = 0.8 \n self.method_interpol = np.array(['bin','syn']) \n self.dumpsave = 1 \n self.called_syn_out_all = False\n self.no_fmean=False\n \n self.tau1_ex=[0*ms]*self.n_celltypes\n self.tau2_ex=[10*ms]*self.n_celltypes\n self.tau1_inh=[0*ms]*self.n_celltypes\n self.tau2_inh=[100*ms]*self.n_celltypes\n \n self.n_syn_ex = [0]*self.n_celltypes \n self.g_syn_ex = [1]*self.n_celltypes\n self.g_syn_ex_s = [0]*self.n_celltypes\n self.mglufac_ex = [1,0] \n \n self.noise_syn = [0]*self.n_celltypes \n self.noise_syn_tau = [0*ms]*self.n_celltypes\n self.noise_syn_inh = [0]*self.n_celltypes\n self.noise_syn_tau_inh = [0*ms]*self.n_celltypes\n \n self.noise_a = [1e9]*self.n_celltypes\n self.noise_a_inh = [1e9]*self.n_celltypes\n \n self.inh_hold = [0]*self.n_celltypes\n self.n_syn_inh = [0]*self.n_celltypes\n self.g_syn_inh = [1]*self.n_celltypes\n self.g_syn_inh_s = [0]*self.n_celltypes\n self.intr_hold = [0]*self.n_celltypes\n self.n_syn_intr = [0]*self.n_celltypes\n self.g_syn_intr = [0]*self.n_celltypes\n self.syn_max_mf = [1]*self.n_celltypes # possible mossy fibres per synapse\n self.syn_max_inh = [1]*self.n_celltypes # possible Golgi cells per synapse\n self.syn_max_intr = [1]*self.n_celltypes # possible Intruding cells per synapse\n \n \n self.seed = 50\n \n self.force_run = False\n self.give_psd = False\n self.do_if = True\n \n self.fluct_g_e0 = []\n self.fluct_g_i0 = []\n self.fluct_std_e = [] \n self.fluct_std_i = [] \n self.fluct_tau_e = [] \n self.fluct_tau_i = [] \n \n self.adjinh = True # adjust inhibition to get CFo instead of g_ex\n self.adjfinh = True # adjust frequnecy of inhibition to get CFo instead of g_ex\n \n self.syn_ex_dist = []\n self.syn_inh_dist = []\n \n self.stdp_used = False\n self.xmax = 20\n self.use_multisplit = False\n self.use_local_dt = False\n self.simstep = 0\n self.plot_train = True\n self.inh_delay = 0 # in ms\n self.plot_input = True\n self.delay_baseline = 8\n \n self.tstop_if = 1\n self.gsyn_in_fac = []\n \n self.netcons = [] # keeping track of!\n self.nclist = []\n \n self.ST_stims = []\n self.PF_stims = []\n \n self.data_dir = \"./data\"\n self.minimal_dir = False\n \n\n def set_init(self, ihold, ihold_sigma, amp, amod):\n # important for all methods:\n if type(ihold) is not list: ihold = [ihold] #convert to list if it is not given as one\n self.ihold = ihold\n self.ihold_orig = ihold\n \n if type(amp) is not list: amp = [amp]\n if len(amp) < self.n_celltypes: amp = [amp[0]]*self.n_celltypes\n self.amp = amp \n \n if type(amod) is not list: amod = [amod]*self.n_celltypes\n self.amod = amod # RUN self.set_i()\n \n self.ihold_sigma = ihold_sigma\n \n def barrier(self):\n if self.use_mpi:\n if self.use_pc == True:\n self.pc.barrier()\n else:\n self.comm.Barrier()\n \n def broadcast(self, vec, root = 0, fast = False):\n if self.use_mpi: \n if self.use_pc:\n \n if fast:\n hvec = h.Vector(vec)\n v = self.pc.broadcast(hvec,root)\n vec = np.array(hvec)\n else:\n \n sendlist = [None]*self.nhost\n if self.id == root:\n for i in range(self.nhost):\n sendlist[i] = vec \n getlist = self.pc.py_alltoall(sendlist)\n vec = getlist[root]\n \n else:\n #vec = np.array(vec, dtype=np.float64)\n #self.comm.Bcast([vec, MPI.DOUBLE])\n vec = self.comm.bcast(vec, root=0)\n\n return vec \n \n def set_numcells(self, N = []):\n \"\"\"\n Create, layout, and connect N cells.\n \"\"\"\n self.set_gids(N)\n self.create_cells()\n\n #self.syn_output() # generate synaptic \"output\" in neuron\n #self.connect_cells()\n \n\n def set_gids(self, N = []):\n \"\"\"Set the gidlist on this host.\n Round-robin counting. Each host as an id from 0 to pc.nhost()-1.\n Example:\n if N = 5 cells and nhost() = 3\n node id() = 0 will get cells [0, 3]\n node id() = 1 will get cells [1, 4]\n node id() = 2 will get cells [2] \n \"\"\"\n \n self.gidlist = [] \n \n if N == []:\n N = self.N\n \n # borders where another celltype begins\n self.global_gidlist = []\n self.n_borders = [0]\n for l in range(1,self.n_celltypes+1):\n self.n_borders.append(sum(N[0:l]))\n self.global_gidlist.append(range(self.n_borders[-2], self.n_borders[-1]))\n\n for n in range(self.n_celltypes): # create list in list \n self.gidlist.append([]) \n \n for i in range(int(self.id), sum(N), int(self.nhost)): # loop over all cells\n \n n = np.where((np.array(self.n_borders)-i)>0)[0][0]-1 # find out what cell type this is\n self.gidlist[n].append(i) # put in specific gidlist for that celltype\n \n self.gid_count = self.gid_count + sum(N)\n \n if self.id == 0: print \"nodeid:\" , self.id , \", gidlist:\" , self.gidlist , \", total gids:\" , len(self.global_gidlist) , \", sum(N):\" , sum(N) # check gids of node\n \n \n def del_cells(self):\n if self.cells != []: \n for n in range(self.n_celltypes): \n for m in self.cells[n]:\n print \"deleting cell\", m\n del m \n del self.cells\n self.cells = [] \n if self.use_mpi: self.pc.gid_clear() \n\n\n def create_cells(self):\n \"\"\"\n Create cell objects on this host.\n \"\"\"\n if self.do_run:\n \n self.del_cells()\n \n if self.id == 0: print \"creating cells\"\n \n for n in range(self.n_celltypes): \n self.cells.append([]) # create list in list \n \n #print self.cellimport[n]\n exec self.cellimport[n]\n \n #print self.gidlist\n for i in self.gidlist[n]:\n \n #if \"sigma\" not in self.cell_exe[n]:\n # exec self.cell_exe[n]\n # cell.gid = i # tell cell it's gid!\n # print i\n #else:\n \n if (self.celltype[n] == \"IfCell\") or (self.celltype[n] == \"Grc\"):\n \n # add gid to cell and execute!\n if self.cell_exe[n][-2] == \"(\":\n exec self.cell_exe[n][0:-1] + \"gid=\" + str(i) + \")\"\n else:\n exec self.cell_exe[n][0:-1] + \", gid=\" + str(i) + \")\"\n \n else:\n exec self.cell_exe[n] \n cell.gid = i\n \n self.cells[n].append(cell) # add to (local) list\n \n if self.use_mpi:\n #### Tell this host it has this gid\n #### gids can be any integer, they just need to be unique.\n #### In this simple case, we set the gid to i.\n self.pc.set_gid2node(i, int(self.id))\n self.pc.cell(i, cell.nc_spike) # Associate the cell with this host and gid\n \n ## NOT NECESSARY ANYMORE ##\n #### Means to tell the ParallelContext that this cell is a source.\n #nc = cell.connect_target(None)\n #self.ncs[n].append(nc) \n \n #### Record spikes of this cell\n self.pc.spike_record(i, self.t_vec[n], self.id_vec[n])\n \n #print n, self.cells[n][-1].nc_spike.thresh\n else:\n \n self.t_vec[n].append(h.Vector())\n cell.nc_spike.record(self.t_vec[n][-1]) \n \n\n\n def connect_cells(self, conntype=[], stdp=[], tend=1e9):\n \"\"\"\n Connect cells as specified.\n \"\"\"\n \n if self.do_run:\n \n stdp = stdp[:]\n conntype = conntype[:]\n \n if len(stdp) == 0:\n for i in conntype:\n stdp.append({'wmax':0, 'taupre':0, 'taupost':0, 'apre':0, 'apost':0}) \n else:\n self.stdp_used = True\n \n for i, conn in enumerate(conntype): \n \n typ = conn['type']\n conv = conn['conv']\n src = conn['src']\n tgt = conn['tgt']\n w0 = conn['w']\n var = conn['var']\n tau1 = conn['tau1']\n tau2 = conn['tau2']\n \n if 'mgr2' in conn.keys():\n mgr2 = conn['mgr2']\n mgr2_var = conn['mgr2_var']\n else:\n mgr2 = 0\n mgr2_var = 0\n \n if 'e_inh' in conn.keys(): \n e_inh = conn['e_inh']\n else:\n e_inh = -65\n \n if 'e_ex' in conn.keys(): \n e_ex = conn['e_ex']\n else:\n e_ex = 0\n \n wmax = stdp[i]['wmax']\n taupre = stdp[i]['taupre']\n taupost = stdp[i]['taupost']\n apre = stdp[i]['apre']\n apost = stdp[i]['apost']\n \n # Connect conv cells of celltype src to every cell of celltype tgt\n for ni, i in enumerate(self.cells[tgt]):\n \n rnd.seed(i.gid*10*self.seed)\n \n if conv >= len(self.global_gidlist[src]):\n gids = self.global_gidlist[src]\n if self.id == 0: print \"more or equal conv to len(self.global_gidlist[src])\"\n else:\n gids = rnd.sample(self.global_gidlist[src],conv) \n \n if self.id == 0: print conn['type'], \":\", ni, \":\", gids[0], \"\\n\"\n \n for ng, g in enumerate(gids):\n \n np.random.seed(g*12) \n #np.random.seed(int(g%10+1)*12) \n \n if len(shape(w0))>0: # array is given\n print \"w array is given\"\n \n if len(w0[ng]) == self.N[0]:\n w = w0[ng][ni]\n \n elif (var > 0) and (w0>0):\n w = np.random.normal(w0, w0*var, 1).clip(min=0)\n else:\n w = w0\n \n if (mgr2_var > 0) and (mgr2>0):\n mg = np.random.normal(mgr2, mgr2*mgr2_var, 1).clip(min=0)\n else:\n mg = mgr2\n \n \n #print conn['type'], \":\", i.gid, \":\", g, \", w:\", w, \"\\n\"\n \n if self.celltype[tgt] == 'IfCell':\n \n if typ == 'gogr':\n \n i.whatami = \"grc\"\n i.synlist_inh.append(Synapse('goc', i, i.soma, nrel=0, record_all=0, weight_gmax=w))\n i0 = int(len(i.synlist_inh)-1)\n \n i.nc_inh.append(self.pc.gid_connect(g, i.synlist_inh[i0].input))\n i.nc_inh[-1].delay = 1\n i.nc_inh[-1].weight[0] = 1\n \n if typ == 'grgo':\n \n i.whatami = \"goc\"\n i.synlist.append(Synapse('grc', i, i.soma, syntype = 'D', nrel=0, record_all=0, weight_gmax=w))\n e0 = int(len(i.synlist)-1)\n \n i.nc.append(self.pc.gid_connect(g, i.synlist[e0].input))\n i.nc[-1].delay = 1\n i.nc[-1].weight[0] = 1\n \n if typ == 'grgom':\n \n i.whatami = \"goc\"\n i.synlist.append(Synapse('grc', i, i.soma, syntype = 'DM', nrel=0, record_all=0, weight_gmax=w, mglufac = mg))\n e0 = int(len(i.synlist)-1)\n \n i.nc.append(self.pc.gid_connect(g, i.synlist[e0].input))\n i.nc[-1].delay = 1\n i.nc[-1].weight[0] = 1\n \n \n if typ == 'e2inh':\n \n i.create_synapses(n_inh=1, tau1_inh=tau1, tau2_inh=tau2, e_inh=e_inh, w = w, wmax = wmax, taupre = taupre, taupost = taupost, apre = apre, apost = apost, tend=tend)\n i0 = len(i.synlist_inh)-1\n \n if self.use_mpi:\n if wmax == 0:\n i.pconnect_target(self.pc, source=g, target=i0, syntype='inh', weight=w, delay=1)\n else:\n i.pconnect_target(self.pc, source=g, target=i0, syntype='inh', weight=1, delay=1)\n \n else: \n if wmax == 0:\n i.nc_inh.append(self.cells[1][g-self.N[0]].connect_target(target=i.synlist_inh[i0], weight=w, delay=1))\n else:\n i.nc_inh.append(self.cells[1][g-self.N[0]].connect_target(target=i.synlist_inh[i0], weight=1, delay=1))\n \n if typ == 'e2ex':\n \n i.create_synapses(n_ex = 1, tau1 = tau1, tau2 = tau2, e_ex=e_ex, w = w, wmax = wmax, taupre = taupre, taupost = taupost, apre = apre, apost = apost, tend=tend)\n e0 = len(i.synlist)-1\n \n if self.use_mpi:\n if wmax == 0:\n i.pconnect_target(self.pc, source=g, target=e0, syntype='ex', weight=w, delay=1) \n else:\n i.pconnect_target(self.pc, source=g, target=e0, syntype='ex', weight=1, delay=1) \n \n else: \n if wmax == 0:\n i.nc.append(self.cells[0][g].connect_target(target=i.synlist[e0], weight=w, delay=1))\n else:\n i.nc.append(self.cells[0][g].connect_target(target=i.synlist[e0], weight=1, delay=1))\n \n else: # No IfCell\n \n if typ == 'gogr':\n i.createsyn(ngoc = 1, weight_gmax=w) # multiplication factor\n i0 = len(i.GOC_L)-1 # get number of current synapse!\n i.pconnect(self.pc,g,i0,'goc')\n \n if typ == 'grgo':\n i.createsyn(ngrc = 1, weight_gmax=w) # multiplication factor\n i0 = len(i.GRC_L)-1 # get number of current synapse!\n i.pconnect(self.pc,g,i0,'grc',conduction_speed=0,grc_positions=[1])\n \n if typ == 'grgom':\n #print w, mg\n i.createsyn(ngrcm = 1, weight_gmax=w, mglufac = mg) # multiplication factor\n i0 = len(i.GRC_L)-1 # get number of current synapse!\n i.pconnect(self.pc,g,i0,'grc',conduction_speed=0,grc_positions=[1])\n \n if typ == 'grstl':\n i.createsyn(ngrc = 1, weight_gmax=w) # multiplication factor\n i0 = len(i.GRC_L)-1 # get number of current synapse!\n i.pconnect(self.pc,g,i0,'grc',conduction_speed=0,grc_positions=[1])\n \n \n if 'e2' in typ:\n \n if 'inh' in typ:\n Erev = -65\n elif 'ex' in typ:\n Erev = 0\n \n if tau1 == 0:\n syn = h.ExpSyn(i.soma(0.5))\n syn.tau = tau2/ms\n else: \n if wmax == 0:\n syn = h.Exp2Syn(i.soma(0.5))\n syn.tau1 = tau1/ms\n syn.tau2 = tau2/ms\n \n else: # STDP\n syn = h.stdpE2S(i.soma(0.5))\n syn.tau1 = tau1/ms\n syn.tau2 = tau2/ms\n \n syn.on = 1\n syn.thresh = -20\n \n syn.wmax = wmax\n syn.w = w\n \n syn.taupre = taupre/ms\t\n syn.taupost = taupost/ms\n syn.apre = apre\n syn.apost = apost\n \n syn.e = Erev/mV\n \n if self.celltype[tgt] == 'Grc':\n \n i.GOC_L.append(syn)\n i0 = int(len(i.GOC_L)-1) # get number of current synapse!\n \n i.gocncpc.append(self.pc.gid_connect(g, i.GOC_L[i0]))\n i.gocncpc[-1].delay = 1\n \n if wmax == 0:\n i.gocncpc[-1].weight[0] = w\n else:\n i.gocncpc[-1].weight[0] = 1\n \n elif self.celltype[tgt] == 'Goc':\n \n i.GRC_L.append(syn)\n e0 = int(len(i.GRC_L)-1) # get number of current synapse!\n \n i.pfncpc.append(self.pc.gid_connect(g, i.GRC_L[e0]))\n i.pfncpc[-1].delay = 1\n i.pfncpc[-1].weight[0] = w\n \n if wmax == 0:\n i.pfncpc[-1].weight[0] = w\n else:\n i.pfncpc[-1].weight[0] = 1\n \n #self.rec_s1 = h.Vector()\n #self.rec_s1.record(self.cells[0][0].synlist_inh[0]._ref_g) \n #self.rec_s2 = h.Vector()\n #self.rec_s2.record(self.cells[1][0].synlist_inh[0]._ref_g) \n \n \n def syn_output(self):\n \"\"\"\n Connect cell n to target cell sum(self.N) + 100.\n \"\"\"\n \n if self.id == 0: # create target cell\n\n tgt_gid = self.gid_count\n self.gid_count = self.gid_count + 1 \n \n # Synaptic integrated response\n self.rec_g = h.Vector()\n self.passive_target = PassiveCell()\n if self.use_mpi: self.pc.set_gid2node(tgt_gid, 0) # Tell this host it has this gid\n \n syn = self.passive_target.create_synapses(tau1 = self.syn_tau1, tau2 = self.syn_tau2) # if tau1=tau2: alpha synapse!\n \n for i in range(self.n_borders[self.a_celltype[0]],self.n_borders[self.a_celltype[0]+1]): # take all cells, corresponding to self.a_celltype, not just the ones in self.gidlist:\n \n src_gid = i\n \n if self.use_mpi:\n nc = self.pc.gid_connect(src_gid, syn)\n nc.weight[0] = 1\n nc.delay = self.nc_delay/ms #0.05 # MUST be larger than dt!!!\n \n else:\n nc = self.cells[self.a_celltype[0]][src_gid].connect_target(target=syn, weight=1, delay=self.nc_delay/ms)\n \n self.nclist.append(nc) \n \n self.rec_g.record(syn._ref_g)\n \n \n def syn_out_all(self, tau1 = 1*ms, tau2 = 30*ms):\n \n if self.do_run:\n \n for n in range(self.n_celltypes): \n for i, gid in enumerate(self.gidlist[n]):\n \n self.cells[n][i].start_record(tau1 = tau1/ms, tau2 = tau2/ms)\n \n self.called_syn_out_all = True\n \n \n def get_i(self, a, n, do_plot = True):\n \n import md5\n m = md5.new()\n \n if \", sigma\" in self.cell_exe[n]: \n cell_exe_new = self.cell_exe[n].split(\", sigma\")[0] + \")\"\n else:\n cell_exe_new = self.cell_exe[n]\n \n m.update(cell_exe_new)\n filename = self.data_dir + '/if_' + self.celltype[n] + '_' + m.hexdigest() + '.p'\n \n #print filename\n \n if self.id == 0:\n is_there = os.path.isfile(filename)\n else:\n is_there = None\n \n is_there = self.broadcast(is_there)\n \n if (is_there is not True) or (self.force_run is True): # run i/f estimation\n \n if self.id == 0: print '- running i/f estimation for ', self.celltype[n], ' id: ' , m.hexdigest() \n exec self.cellimport[n]\n exec cell_exe_new\n sim = Stimulation(cell, temperature = self.temperature, use_multisplit = self.use_multisplit)\n sim.spikes_from_neuron = False\n sim.celltype = self.celltype[n]\n current_vector, freq_vector, freq_onset_vector = sim.get_if(istart = self.istart, istop = self.istop, di = self.di, tstop = self.tstop_if) \n \n sim = None\n cell = None\n \n if self.id == 0:\n if do_plot:\n plt.figure(99)\n plt.plot(current_vector, freq_vector, 'r*-')\n plt.plot(current_vector, freq_onset_vector, 'b*-')\n plt.savefig(\"./figs/dump/latest_if_\" + self.celltype[n] + \".pdf\", dpi = 300) # save it \n plt.clf()\n #plt.show()\n \n ifv = {'i':current_vector,'f':freq_vector}\n print ifv\n \n pickle.dump(ifv, gzip.GzipFile(filename, \"wb\" ))\n \n self.barrier()\n \n else:\n \n if self.id == 0: \n ifv = pickle.load(gzip.GzipFile(filename, \"rb\" ))\n #print ifv\n \n self.barrier()\n \n if self.id == 0:\n \n f = ifv.get('f') \n i = ifv.get('i')\n \n i = i[~isnan(f)]\n f = f[~isnan(f)]\n \n iin = if_extrap(a, f, i)\n \n else:\n \n iin = [0]\n \n iin = self.broadcast(iin, root=0, fast = True)\n self.barrier()\n \n return iin\n\n\n def set_i(self, ihold = [0]):\n \n ihold = list(ihold)\n self.ihold_orig = list(ihold)\n \n self.barrier() # wait for other nodes\n \n # Ihold given as frequency, convert to current\n \n if ((self.give_freq)): \n \n ihold0 = [[] for _ in range(self.n_celltypes)]\n \n for n in range(self.n_celltypes):\n a = np.array([ihold[n]])\n #print \"a:\", a\n iin = self.get_i(a, n)\n #print \"iin:\", iin\n ihold0[n] = iin[0]\n \n if self.id == 0: print '- ihold: ', ihold, 'Hz, => ihold: ', ihold0, 'nA' \n \n # Modulation depth given, not always applied to current!\n for n in range(self.n_celltypes):\n \n if self.amod[n] is not None:\n \n if self.give_freq:\n \n # Apply to amplitude:\n a = np.array([ihold[n]]) + self.amod[n]*np.array([ihold[n]])\n self.amp[n] = self.get_i(a, n) - ihold0[n]\n \n if self.id == 0:\n print '- amp: ihold: ', ihold[n], 'Hz , amod: ', self.amod[n], ', => amp: ', self.amp[n], 'nA (' #, self.get_i(a, n), ')'\n \n elif self.n_syn_ex[n] > 0:\n \n if self.id == 0:\n print '- amp: ihold: ', ihold[n], 'Hz , amod: ', self.amod[n], ', => amp will be set for each spike generator'\n\n else:\n \n self.amp[n] = self.amod[n] * ihold[n] \n \n if self.id == 0:\n print '- amp: ihold: ', ihold[n], 'nA , amod: ', self.amod[n], ', => amp: ', self.amp[n], 'nA'\n \n # Noise depth given, not always applied to current!\n if self.anoise[n] is not None:\n \n if (self.give_freq is True) or (self.n_syn_ex[n] > 0):\n \n # Apply to amplitude:\n a = np.array([ihold[n]]) + self.anoise[n]*np.array([ihold[n]])\n self.fluct_s[n] = ((self.get_i(a, n) - ihold0[n]))/2. # adjust with /2 so that noise = +-2*std\n \n if self.id == 0:\n print '- noise: ihold: ', ihold[n], 'Hz , anoise: ', self.anoise[n], ', => fluct_s: ', self.fluct_s[n], 'nA'\n \n else:\n \n self.fluct_s[n] = self.anoise[n] * ihold[n] \n \n if self.id == 0:\n print '- noise: ihold: ', ihold[n], 'nA , anoise: ', self.anoise[n], ', => fluct_s: ', self.fluct_s[n], 'nA'\n \n \n if self.give_freq is True: \n ihold = ihold0\n \n return ihold\n \n \n def calc_fmean(self, t_vec, t_startstop):\n \n #t_startstop[0] = 1\n #t_startstop[1] = 5\n \n f_cells_mean = 0\n f_cells_cv = np.nan\n f_cells_std = np.nan\n \n if len(t_vec) > 0: \n \n f_start_in = mlab.find(t_vec >= t_startstop[0]) # 1\n f_stop_in = mlab.find(t_vec <= t_startstop[1]) # 5\n \n if (len(f_start_in) > 0) & (len(f_stop_in) > 0):\n \n f_start = f_start_in[0] \n f_stop = f_stop_in[-1]+1 \n use_spikes = t_vec[f_start:f_stop]*1e3\n \n if len(use_spikes) > 1:\n s1 = signals.SpikeTrain(use_spikes)\n isi = s1.isi()\n f_cells_mean = s1.mean_rate() # use mean of single cells\n f_cells_cv = np.std(isi)/np.mean(isi)\n f_cells_std = np.std(isi)\n \n #f_start_in = mlab.find(t_vec >= 1) \n #f_stop_in = mlab.find(t_vec <= 2) \n \n #if (len(f_start_in) > 0) & (len(f_stop_in) > 0):\n \n # f_start = f_start_in[0] \n # f_stop = f_stop_in[-1]+1 \n # use_spikes = t_vec[f_start:f_stop]*1e3\n \n # if len(use_spikes) > 1:\n # s1 = signals.SpikeTrain(use_spikes)\n # isi = s1.isi()\n # f_cells_cv = np.std(isi)/np.mean(isi)\n \n return f_cells_mean, f_cells_cv, f_cells_std \n \n \n def get_fmean(self, t_all_vec_vecn, id_all_vec_vecn, t_startstop, gidlist, facborder = 3): # 1e9\n \n f_cells_mean = zeros(len(gidlist))\n f_cells_base = zeros(len(gidlist))\n f_cells_std = nans(len(gidlist))\n f_cells_cv = nans(len(gidlist))\n f_cells_gid = nans(len(gidlist))\n \n fbase = np.nan\n fmean = np.nan\n fmax = np.nan\n fmstd = np.nan\n fcvm = np.nan\n fstdm = np.nan\n \n f_cells_mean_all = []\n f_cells_base_all = []\n f_cells_cv_all = []\n f_cells_std_all = []\n \n gid_del = np.array([])\n \n if self.no_fmean == False:\n \n if self.id == 0: print \"- sorting for fmean\"\n\n for i, l in enumerate(gidlist):\n \n t_0_vec = t_all_vec_vecn[where(id_all_vec_vecn==l)]\n f_cells_mean[i], f_cells_cv[i], f_cells_std[i] = self.calc_fmean(t_0_vec, t_startstop)\n f_cells_base[i], _, _ = self.calc_fmean(t_0_vec, [self.delay_baseline-4,self.delay_baseline])\n f_cells_gid[i] = l\n \n if self.id == 0: print \"- gather fmean\" \n f_cells_mean_all = self.do_gather(f_cells_mean)\n f_cells_base_all = self.do_gather(f_cells_base)\n f_cells_std_all = self.do_gather(f_cells_std)\n f_cells_cv_all = self.do_gather(f_cells_cv)\n f_cells_gid_all = self.do_gather(f_cells_gid)\n\n if self.id == 0:\n \n #print f_cells_mean_all\n \n f_cells_mean_all = np.nan_to_num(f_cells_mean_all)\n fmean = mean(f_cells_mean_all) # compute mean of mean rate for all cells\n fmstd = std(f_cells_mean_all) \n fmax = max(f_cells_mean_all)\n \n f_cells_base_all = np.nan_to_num(f_cells_base_all)\n fbase = mean(f_cells_base_all) # compute mean of mean rate for all cells\n \n f_cells_cv_all = f_cells_cv_all[~np.isnan(f_cells_cv_all)]\n f_cells_std_all = f_cells_std_all[~np.isnan(f_cells_std_all)]\n fcvm = mean(f_cells_cv_all)\n fstdm = mean(f_cells_std_all)\n \n print \"- get_fmean, fmean: \",fmean, \"fmax: \",fmax, \"Hz\", \"fmstd: \",fmstd, \"Hz\", \"fcvm: \",fcvm, \"fstdm: \",fstdm, \"Hz\" ,\"fbase: \", fbase, \"Hz\"\n \n if facborder < 1e9:\n \n fborder = fmean + facborder*fmstd\n i = mlab.find(f_cells_mean_all > fborder)\n gid_del = f_cells_gid_all[i]\n \n # f_cells_mean_all[i] = 0\n # f_cells_cv_all[i] = np.nan\n # f_cells_std_all[i] = np.nan\n \n # fmean2 = mean(np.nan_to_num(f_cells_mean_all)) # compute mean of mean rate for all cells\n # fmstd2 = std(np.nan_to_num(f_cells_mean_all)) \n # fmax2 = max(np.nan_to_num(f_cells_mean_all))\n \n # fcvm2 = mean(f_cells_cv_all[~np.isnan(f_cells_cv_all)])\n # fstdm2 = mean(f_cells_std_all[~np.isnan(f_cells_std_all)])\n \n # print \"- after facborder: get_fmean, fmean: \",fmean2, \"fmax: \",fmax2, \"Hz\", \"fmstd: \",fmstd2, \"Hz\", \"fcvm: \",fcvm2, \"fstdm: \",fstdm2, \"Hz, gid_del: \", gid_del\n \n\n return fmean, fmax, fmstd, fcvm, fstdm, gid_del, f_cells_mean_all, f_cells_cv_all, f_cells_std_all, fbase, f_cells_base_all \n\n \n def connect_fluct(self):\n \"\"\"\n Create fluctuating input onto every cell.\n \"\"\"\n \n if self.do_run:\n \n for m in self.flucts:\n del m \n del self.flucts\n \n for m in self.noises:\n del m \n del self.noises\n \n self.flucts = []\n self.noises = []\n \n for n in range(self.n_celltypes):\n \n for i, gid in enumerate(self.gidlist[n]): # for every cell in the gidlist \n \n #h.mcell_ran4_init(gid)\n \n noiseRandObj = h.Random() # provides NOISE with random stream\n self.noises.append(noiseRandObj) # has to be set here not inside the nmodl function!! \n \n # print str(gid) + \": \" + str(noiseRandObj.normal(0,1))\n \n fluct = h.Ifluct2(self.cells[n][i].soma(0.5))\n fluct.m = self.fluct_m/nA # [nA]\n fluct.s = self.fluct_s[n]/nA # [nA]\n fluct.tau = self.fluct_tau/ms # [ms]\n self.flucts.append(fluct) # add to list \n self.flucts[-1].noiseFromRandom(self.noises[-1]) # connect random generator!\n \n self.noises[-1].MCellRan4(1, gid+1) # set lowindex to gid+1, set highindex to > 0 \n self.noises[-1].normal(0,1)\n \n \n def connect_gfluct(self, E_e=0, E_i=-65):\n \"\"\"\n Create fluctuating conductance input onto every cell.\n \"\"\"\n if self.do_run:\n \n for m in self.flucts:\n del m \n del self.flucts\n \n for m in self.noises:\n del m \n del self.noises\n \n self.flucts = []\n self.noises = []\n \n for n in range(self.n_celltypes):\n \n fluct_g_i0_n = self.fluct_g_i0[n]\n \n if type(fluct_g_i0_n) is not ndarray: fluct_g_i0_n = np.array([fluct_g_i0_n])\n \n if len(fluct_g_i0_n) == len(self.global_gidlist[n]):\n pass\n else:\n fluct_g_i0_n = np.ones(int(len(self.global_gidlist[n])))*fluct_g_i0_n[0]\n if self.id == 0: print \"- single value in fluct_g_i0_n\"\n \n #print fluct_g_i0_n\n \n for i, gid in enumerate(self.gidlist[n]): # for every cell in the gidlist \n \n #h.mcell_ran4_init(gid)\n \n noiseRandObj = h.Random() # provides NOISE with random stream\n self.noises.append(noiseRandObj) # has to be set here not inside the nmodl function!! \n \n # print str(gid) + \": \" + str(noiseRandObj.normal(0,1))\n \n fluct = h.Gfluct3(self.cells[n][i].soma(0.5))\n fluct.E_e = E_e/mV # [mV]\n fluct.E_i = E_i/mV # [mV]\n fluct.g_e0 = self.fluct_g_e0[n]/uS # [uS]\n fluct.g_i0 = fluct_g_i0_n[i]/uS # [uS]\n fluct.std_e = self.fluct_std_e[n]/uS # [uS] \n fluct.std_i = self.fluct_std_i[n]/uS # [uS] \n fluct.tau_e = self.fluct_tau_e/ms #tau_e/ms # [ms] \n fluct.tau_i = self.fluct_tau_i/ms #tau_i/ms # [ms]\n \n self.flucts.append(fluct) # add to list \n self.flucts[-1].noiseFromRandom(self.noises[-1]) # connect random generator!\n \n self.noises[-1].MCellRan4(1, gid+1) # set lowindex to gid+1, set highindex to > 0 \n self.noises[-1].normal(0,1)\n \n \n def connect_synfluct(self, PF_BG_rate=6, PF_BG_cv=1, STL_BG_rate=20, STL_BG_cv=1):\n \"\"\"\n Create fluctuating synaptic input onto every cell.\n \"\"\"\n \n if self.do_run:\n \n for m in self.ST_stims:\n del m \n del self.ST_stims\n \n for m in self.PF_stims:\n del m \n del self.PF_stims\n \n self.ST_stims = []\n self.PF_stims = []\n \n \n for n in range(self.n_celltypes):\n \n for i, gid in enumerate(self.gidlist[n]): # for every cell in the gidlist \n \n PF_syn_list = self.cells[n][i].createsyn_PF()\n \n for d in PF_syn_list:\n d.input.newnetstim.number = 1e9\n d.input.newnetstim.noise = PF_BG_cv\n d.input.newnetstim.interval = 1000.0 / PF_BG_rate\n d.input.newnetstim.start = 0\n \n self.PF_stims.append(PF_syn_list)\n \n ST_stim_list = self.cells[n][i].createsyn_ST(record_all=0)\n\n for d in ST_stim_list:\n d.newnetstim.number = 1e9\n d.newnetstim.noise = STL_BG_cv\n d.newnetstim.interval = 1000.0 / STL_BG_rate\n d.newnetstim.start = 0\n \n self.ST_stims.append(ST_stim_list)\n \n if self.id == 0: print \"- PF and ST stimulation added.\"\n \n \n\n def set_IStim(self, ihold = None, ihold_sigma = None, random_start = True, tstart_offset = 0):\n \"\"\"\n Add (random) ihold for each cell and offset!\n \"\"\"\n if self.do_run:\n \n # if not given, use the one in self\n if ihold == None:\n ihold = self.ihold\n if ihold_sigma == None:\n ihold_sigma = self.ihold_sigma\n \n if ihold[self.a_celltype[0]] != 0:\n ihold = self.set_i(ihold) \n \n for m in self.ic_holds:\n #m.destroy()\n del m \n del self.ic_holds\n \n for m in self.ic_starts:\n #m.destroy()\n del m \n del self.ic_starts\n \n for m in self.vc_starts:\n #m.destroy()\n del m \n del self.vc_starts\n \n self.ic_holds = []\n self.ic_starts = [] \n self.vc_starts = []\n self.i_holdrs = []\n self.i_holds = ihold\n \n for n in range(self.n_celltypes):\n self.i_holdrs.append([])\n \n for i, gid in enumerate(self.gidlist[n]): # for every cell in the gidlist \n \n np.random.seed(gid*20)\n \n tis = 1\n \n if random_start == True:\n \n # random start time\n tstart = np.random.uniform(tstart_offset+0, tstart_offset+0.5)\n #if self.id == 0: print \"tstart:\", tstart\n vc_start = h.SEClamp(self.cells[n][i].soma(0.5))\n vc_start.dur1 = tstart/ms\n vc_start.amp1 = -80\n self.vc_starts.append(vc_start)\n tis = 0\n \n else:\n \n tis = 0 \n \n \n if ihold_sigma[n] != 0:\n #print ihold_sigma[n], ihold[n]\n ihold_r = np.random.normal(ihold[n], ihold[n]*ihold_sigma[n], 1).clip(min=0)\n #ihold_r = np.random.uniform(ihold[n]*ihold_sigma[n], ihold[n])\n \n elif self.CF_var is not False: # CF gets not adapted to current but final frequnecy!\n \n r_ok = False\n while r_ok == False:\n r_temp = np.random.normal(self.ihold_orig[n], self.CF_var[n][1], 1) \n if (r_temp <= self.CF_var[n][2]) and (r_temp >= self.CF_var[n][0]): # check borders!\n r_ok = True\n \n #print r_temp \n ihold_r = self.get_i(r_temp, n)\n #print ihold_r\n #if self.id == 0: \n print \"set self.CF_var\", r_temp, ihold_r\n \n else: # same ihold for all cells!\n ihold_r = ihold[n]\n \n self.i_holdrs[n].append(ihold_r)\n \n if ihold_r != 0:\n \n if hasattr(self.cells[n][i], 'input_vec'):\n \n ic_hold = []\n for vec in self.cells[n][i].input_vec:\n for inv in vec:\n #print ihold_r\n ic_hold.append(h.IClamp(inv(0.5))) \n ic_hold[-1].amp = self.cells[n][i].ifac * ihold_r / self.cells[n][i].n_input_spiny / nA\n ic_hold[-1].delay = tis/ms\n ic_hold[-1].dur = 1e9\n \n else: \n\n # holding current\n ic_hold = h.IClamp(self.cells[n][i].soma(0.5))\n ic_hold.delay = tis/ms\n ic_hold.dur = 1e9\n ic_hold.amp = ihold_r/nA\n \n self.ic_holds.append(ic_hold)\n \n if self.id == 0: print \"set_IStim finished. ihold: \", ihold, \", ihold_sigma: \", ihold_sigma\n \n \n def set_IStep(self, istep = [0], istep_sigma = [0], tstep = 5, tdur = 1e6, give_freq = True):\n \"\"\"\n Add istep for each cell and offset!\n \"\"\"\n if self.do_run:\n #for m in self.ic_steps:\n # m.destroy()\n # del m \n #del self.ic_steps\n \n #self.ic_steps = []\n \n istep = list(istep)\n neg = False\n \n for n in range(self.n_celltypes):\n \n if istep[n] < 0: \n neg = True\n istep[n] = abs(istep[n]) # make positive again\n \n if istep[n] != 0:\n if give_freq is True:\n a = np.array([istep[n]])\n iin = self.get_i(a, n)[0]\n if self.id == 0: print \"celltype: \", n, \" istep: \", istep[n], \"Hz => \", iin, \" nA\"\n istep[n] = iin \n \n for n in range(self.n_celltypes):\n \n for i, gid in enumerate(self.gidlist[n]): # for every cell in the gidlist \n \n np.random.seed(gid*30)\n \n if self.i_holdrs == []:\n \n if istep_sigma[n] != 0:\n istep_r = np.random.normal(istep[n], istep[n]*istep_sigma[n], 1).clip(min=0)\n else: # same ihold for all cells!\n istep_r = istep[n]\n \n else: # ihold has been set!\n \n if istep_sigma[n] != 0:\n istep_r = np.random.normal(istep[n]-self.i_holds[n], (istep[n]-self.i_holds[n])*istep_sigma[n], 1).clip(min=0) # delta now! put on top of hold!\n else: # same ihold for all cells!\n istep_r = istep[n]-self.i_holds[n] # delta now! put on top of hold!\n \n if neg:\n istep_r = -1*istep_r\n \n if istep[n] == 0:\n istep_r = -1*self.i_holdrs[n][i] \n \n #print 'is:' + str(istep_r) + 'was:' + str(self.i_holdrs[n][i])\n \n if istep_r != 0: \n # step current\n ic_step = h.IClamp(self.cells[n][i].soma(0.5))\n ic_step.delay = tstep/ms\n ic_step.dur = tdur/ms\n ic_step.amp = istep_r/nA\n self.ic_steps.append(ic_step)\n \n \n if self.id == 0: print \"set_IStep finished. istep: \", istep, \", istep_sigma: \", istep_sigma\n \n\n def set_IPlay(self, stimulus, t):\n \"\"\"\n Initializes values for current clamp to play a signal. \n \"\"\"\n \n if self.do_run:\n \n for m in self.tvecs:\n #m.destroy()\n del m \n del self.tvecs\n \n for m in self.ivecs:\n #m.destroy()\n del m \n del self.ivecs\n \n for m in self.plays:\n #m.destroy()\n del m \n del self.plays\n \n self.tvecs = []\n self.ivecs = []\n self.plays = []\n \n for i, gid in enumerate(self.gidlist[self.a_celltype[0]]): # for every cell in the gidlist \n \n tvec = h.Vector(t/ms)\n ivec = h.Vector(stimulus/nA)\n \n play = h.IClamp(self.cells[self.a_celltype[0]][i].soma(0.5))\n play.delay = 0\n play.dur = 1e9\n \n ivec.play(play._ref_amp, tvec, 1)\n \n self.plays.append(play) # add to list\n self.tvecs.append(tvec) # add to list\n self.ivecs.append(ivec) # add to list \n \n if self.id == 0: print \"set_IPlay finished.\"\n \n \n def set_IPlay2(self, stimulus, t):\n \"\"\"\n Initializes values for current clamp to play a signal. \n \"\"\"\n \n if self.do_run:\n \n for m in self.tvecs:\n #m.destroy()\n del m \n del self.tvecs\n \n for m in self.ivecs:\n #m.destroy()\n del m \n del self.ivecs\n \n for m in self.plays:\n #m.destroy()\n del m \n del self.plays\n \n self.tvecs = []\n self.ivecs = []\n self.plays = []\n \n for j in self.a_celltype:\n \n tvec = h.Vector(t/ms)\n ivec = []\n for s in stimulus:\n if hasattr(self.cells[j][0], 'input_vec'):\n ivec.append(h.Vector(self.factor_celltype[j] * self.cells[j][0].ifac * s / self.cells[j][0].n_input_spiny / nA))\n else:\n ivec.append(h.Vector(self.factor_celltype[j]*s/nA))\n\n self.tvecs.append(tvec) # add to list\n self.ivecs.append(ivec) # add to list \n \n for i, gid in enumerate(self.gidlist[j]): # for every cell in the gidlist \n\n if hasattr(self.cells[j][i], 'input_vec'):\n \n play = []\n for iloc, vec in enumerate(self.cells[j][i].input_vec):\n isig = self.syn_ex_dist[j][iloc]-1\n #print isig\n for inv in vec:\n play.append(h.IClamp(inv(0.5))) \n play[-1].delay = 0\n play[-1].dur = 1e9\n ivec[isig].play(play[-1]._ref_amp, tvec, 1)\n \n else: \n #fluctuating current\n play = h.IClamp(self.cells[j][i].soma(0.5))\n play.delay = 0\n play.dur = 1e9\n ivec[0].play(play._ref_amp, tvec, 1)\n \n self.plays.append(play) # add to list\n\n \n if self.id == 0: print \"set_IPlay2 finished.\"\n \n \n def set_IPlay3(self, stimulus, t, amp = None):\n \"\"\"\n Initializes values for current clamp to play a signal. \n \"\"\"\n \n if self.do_run:\n \n for m in self.tvecs:\n #m.destroy()\n del m \n del self.tvecs\n \n for m in self.ivecs:\n #m.destroy()\n del m \n del self.ivecs\n \n for m in self.plays:\n #m.destroy()\n del m \n del self.plays\n \n self.tvecs = []\n self.ivecs = []\n self.plays = []\n \n for j in self.a_celltype:\n \n if amp is None:\n amp0 = 0\n else:\n amp0 = amp[j]\n \n tvec = h.Vector(t/ms)\n self.tvecs.append(tvec) # add to list\n \n for i, gid in enumerate(self.gidlist[j]): # for every cell in the gidlist \n \n if isinstance(self.factor_celltype[j], ( int, long ) ): \n ivec = h.Vector(self.factor_celltype[j]*(stimulus*amp0)/nA) \n else:\n np.random.seed(gid*40)\n rnd.seed(gid*40)\n if self.factor_celltype[j][1] > 0:\n f = np.random.normal(self.factor_celltype[j][0], self.factor_celltype[j][1], 1).clip(min=0)\n else:\n f = self.factor_celltype[j][0] \n if self.factor_celltype[j][2] > 0: # add inverted input with 50% probability, in future versions this will indicate the propability for -1 and 1\n f = rnd.sample([-1,1],1)[0] * f\n if self.id == 0: print \"- inverted input with 50% probability:\", f \n if self.id == 0: print \"- randomize play stimulus height\" \n ivec = h.Vector(f*(stimulus*amp0)/nA)\n \n self.ivecs.append(ivec) # add to list \n \n #fluctuating current\n play = h.IClamp(self.cells[j][i].soma(0.5))\n play.delay = 0\n play.dur = 1e9\n ivec.play(play._ref_amp, tvec, 1)\n \n self.plays.append(play) # add to list\n \n if self.id == 0: print \"set_IPlay3 finished.\"\n \n \n def set_PulseStim(self, start_time=[100*ms], dur=[1500*ms], steadyf=[100*Hz], pulsef=[150*Hz], pulse_start=[500*ms], pulse_len=[500*ms], weight0=1, tau01=[1*ms], tau02=[20*ms], weight1=1, tau11=[0*ms], tau12=[1*ms], noise = 1):\n \n if self.do_run:\n \n modulation_vec = []\n \n for n in range(self.n_celltypes):\n \n t_input = np.arange(0, dur[n], self.dt) # create stimulus time vector has to be in ms!! \n mod = np.concatenate(([np.zeros(round(start_time[n]/self.dt)), steadyf[n]*np.ones(round((pulse_start[n]-start_time[n])/self.dt)), pulsef[n]*np.ones(round(pulse_len[n]/self.dt)),steadyf[n]*np.ones(round((dur[n]-pulse_start[n]-pulse_len[n])/self.dt)) ])) \n modulation = (t_input, mod)\n \n #print shape(t_input), shape(mod), shape(modulation)\n \n for i, gid in enumerate(self.gidlist[n]): # for every cell in the gidlist \n \n if dur[n] > 0:\n \n if self.celltype[n] == 'Grc':\n \n nmf = 4\n \n for j in range(nmf):\n \n self.cells[n][i].createsyn(nmf = 1, ngoc = 0, weight = weight0) \n e0 = len(self.cells[n][i].MF_L)-1 # get number of current synapse!\n \n pulse_gid = int(self.gid_count + gid*1000 + j)\n \n train = mod_spike_train(modulation, noise = noise, seed = pulse_gid)\n \n self.setup_Play_train(train = train, input_gid = pulse_gid)\n \n self.cells[n][i].pconnect(self.pc,pulse_gid,int(e0),'mf') \n \n elif self.celltype[n] == 'Goc':\n \n nmf = 53\n \n for j in range(nmf):\n \n self.cells[n][i].createsyn(nmf = 1, weight = weight1)\n e0 = len(self.cells[n][i].MF_L)-1 # get number of current synapse!\n \n pulse_gid = int(self.gid_count + gid*1000 + j)\n \n train = mod_spike_train(modulation, noise = noise, seed = pulse_gid)\n \n self.setup_Play_train(train = train, input_gid = pulse_gid)\n \n self.cells[n][i].pconnect(self.pc,pulse_gid,int(e0),'mf') \n \n \n elif self.celltype[n] == 'Goc_noloop':\n \n ngrc = 100\n \n for j in range(ngrc):\n \n self.cells[n][i].createsyn(ngrc = 1, weight = weight0)\n e0 = len(self.cells[n][i].GRC_L)-1 # get number of current synapse!\n \n pulse_gid = int(self.gid_count + gid*1000 + j)\n \n train = mod_spike_train(modulation, noise = noise, seed=pulse_gid)\n \n self.setup_Play_train(train = train, input_gid = pulse_gid)\n \n self.cells[n][i].pconnect(self.pc,pulse_gid,int(e0),'grc') \n \n else:\n \n pulse_gid = int(self.gid_count + gid*1000 + 100)\n \n train = mod_spike_train(modulation, noise = noise, seed = pulse_gid)\n self.trains.append(train)\n \n setup_Play_train(train = train, input_gid = pulse_gid)\n \n # NMDA\n self.cells[n][i].create_synapses(n_ex=1, tau1=tau01[n], tau2=tau02[n])\n e0 = len(self.cells[n][i].synlist)-1\n \n weight=weight0[n]\n np.random.seed(gid*60)\n #weight = np.random.normal(weight, weight*0.5, 1).clip(min=0)\n self.cells[n][i].pconnect_target(self.pc, source=pulse_gid, target=e0, syntype='ex', weight=weight, delay=1)\n \n # AMPA\n self.cells[n][i].create_synapses(n_ex=1, tau1=tau11[n], tau2=tau12[n])\n e0 = len(self.cells[n][i].synlist)-1\n \n weight=weight1[n]\n np.random.seed(gid*60)\n #weight = np.random.normal(weight, weight*0.5, 1).clip(min=0)\n self.cells[n][i].pconnect_target(self.pc, source=pulse_gid, target=e0, syntype='ex', weight=weight, delay=1)\n \n \n modulation = (t_input, mod) # mack to s!\n modulation_vec.append(modulation) \n \n return modulation_vec\n \n \n def connect_Synapse(self, pulse_gid, nt, i, n, gid, j, syntype = \"ex\", nsyn=0): \n \n if self.do_run:\n \n if 'gsyn_in' in self.method_interpol: \n if isinstance(self.factor_celltype[nt], ( int, long ) ):\n f = self.factor_celltype[nt] \n else:\n f = self.factor_celltype[nt][0] \n \n if syntype == \"ex\":\n \n # each cell can receive different g_syn_ex ! \n if type(self.g_syn_ex[nt]) is ndarray:\n if len(self.g_syn_ex[nt]) == len(self.global_gidlist[nt]):\n w = self.g_syn_ex[nt][n]\n else:\n w = self.g_syn_ex[nt] \n else:\n w = self.g_syn_ex[nt] \n \n seed = int(10000 + 10*gid + j)\n np.random.seed(seed*41)\n \n if self.g_syn_ex_s[nt] > 0:\n w = np.random.normal(w, w*self.g_syn_ex_s[nt], 1).clip(min=0) # self.g_syn_ex_s[nt] \n \n if self.celltype[nt] == 'Grc':\n \n # delete old\n if j == 0: \n self.cells[nt][i].MF_L = []\n self.cells[nt][i].mfncpc = []\n \n if \"gr\" not in str(self.tau1_ex[nt]):\n \n if \"amfit\" in str(self.tau1_ex[nt]):\n syn = h.ExpZSyn(self.cells[nt][i].soma(0.5)) \n \n syn.tau1_ampa = 0.254\n syn.tau2_ampa = 0.254\n syn.tau3_ampa = 0.363\n syn.tau4_ampa = 6.523\n syn.f1_ampa = 8.8376e-05\n syn.f2_ampa = 5.5257e-05\n \n syn.f1_nmda = 0\n \n elif \"nmfit\" in str(self.tau1_ex[nt]):\n syn = h.ExpYSyn(self.cells[nt][i].soma(0.5))\n \n syn.f1_ampa = 0\n syn.f2_ampa = 0\n \n syn.tau1_nmda = 1.902\n syn.tau2_nmda = 82.032\n syn.f1_nmda = 7.853857483005277e-05\n \n elif \"fit\" in str(self.tau1_ex[nt]): \n syn = h.ExpGrcSyn(self.cells[nt][i].soma(0.5))\n \n syn.tau1_ampa = 0.254\n syn.tau2_ampa = 0.254\n syn.tau3_ampa = 0.363\n syn.tau4_ampa = 6.523\n syn.f1_ampa = 8.8376e-05\n syn.f2_ampa = 5.5257e-05\n \n syn.tau1_nmda = 1.902\n syn.tau2_nmda = 82.032\n syn.f1_nmda = 7.853857483005277e-05\n \n else:\n tau1 = self.tau1_ex[nt]\n tau2 = self.tau2_ex[nt]\n \n if tau1 == 0:\n syn = h.ExpSyn(self.cells[nt][i].soma(0.5))\n syn.tau = tau2/ms\n \n else: \n syn = h.Exp2Syn(self.cells[nt][i].soma(0.5))\n syn.tau1 = tau1/ms\n syn.tau2 = tau2/ms\n \n syn.e = 0/mV\n \n self.cells[nt][i].MF_L.append(syn)\n \n e0 = len(self.cells[nt][i].MF_L)-1 # get number of current synapse!\n \n syn_idx = int(e0)\n \n source = int(pulse_gid)\n self.cells[nt][i].mfncpc.append(self.pc.gid_connect(source, self.cells[nt][i].MF_L[syn_idx]))\n self.cells[nt][i].mfncpc[-1].delay = 1\n self.cells[nt][i].mfncpc[-1].weight[0] = w\n \n if 'gsyn_in' in self.method_interpol:\n self.record_syn.append(h.Vector())\n self.record_syn[-1].record(self.cells[nt][i].MF_L[-1]._ref_g)\n self.gsyn_in_fac.append(f)\n \n else:\n \n nrel = 0\n \n if \"stoch\" in str(self.tau1_ex[nt]):\n nrel = 4\n \n self.cells[nt][i].createsyn(nmf = 1, ngoc = 0, weight_gmax = w, nrel=nrel) \n \n if \"ampa\" in str(self.tau1_ex[nt]):\n self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].gmax_factor = 0\n if \"nopre\" in str(self.tau1_ex[nt]):\n print \"- no pre\"\n self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].tau_rec = 1e-9\n self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].tau_facil = 1e-9\n self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].tau_1 = 0\n \n if \"nostdampa\" in str(self.tau1_ex[nt]):\n self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].gmax_factor = 0\n self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].tau_rec = 1e-9\n self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].tau_facil = 1e-9\n self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].tau_1 = 0\n self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].r6FIX = 0\n \n if \"nostdnmda\" in str(self.tau1_ex[nt]):\n self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].gmax_factor = 0\n self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].tau_rec = 1e-9\n self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].tau_facil = 1e-9\n self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].tau_1 = 0\n self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].RdRate = 0\t\n \n if \"nmda\" in str(self.tau1_ex[nt]):\n self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].gmax_factor = 0\n if \"nopre\" in str(self.tau1_ex[nt]):\n self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].tau_rec = 1e-9\n self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].tau_facil = 1e-9\n self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].tau_1 = 0\n \n if \"nostdgr\" in str(self.tau1_ex[nt]):\n self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0].r6FIX\t= 0 #1.12\t\n self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].RdRate = 0 #12e-3\n print \"- no std\"\n \n if \"nomggr\" in str(self.tau1_ex[nt]): \n self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0].v0_block = -1e9\n print \"- no mg block\"\n \n e0 = len(self.cells[nt][i].MF_L)-1 # get number of current synapse!\n \n self.cells[nt][i].pconnect(self.pc,pulse_gid,int(e0),'mf') \n \n if 'gsyn_in' in self.method_interpol:\n self.record_syn.append(h.Vector())\n self.record_syn[-1].record(self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0]._ref_g)\n self.record_syn.append(h.Vector())\n self.record_syn[-1].record(self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0]._ref_g)\n self.gsyn_in_fac.append(f)\n self.gsyn_in_fac.append(f)\n \n \n elif self.celltype[nt] == 'Goc':\n \n # delete old\n if j == 0: \n self.cells[nt][i].MF_L = []\n self.cells[nt][i].mfncpc = []\n \n if \"go\" not in str(self.tau1_ex[nt]):\n \n tau1 = self.tau1_ex[nt]\n tau2 = self.tau2_ex[nt]\n \n if tau1 == 0:\n syn = h.ExpSyn(self.cells[nt][i].soma(0.5))\n syn.tau = tau2/ms\n \n else: \n syn = h.Exp2Syn(self.cells[nt][i].soma(0.5))\n syn.tau1 = tau1/ms\n syn.tau2 = tau2/ms\n \n syn.e = 0/mV\n \n self.cells[nt][i].MF_L.append(syn)\n \n e0 = len(self.cells[nt][i].MF_L)-1 # get number of current synapse!\n \n syn_idx = int(e0)\n \n source = int(pulse_gid)\n self.cells[nt][i].mfncpc.append(self.pc.gid_connect(source, self.cells[nt][i].MF_L[syn_idx]))\n self.cells[nt][i].mfncpc[-1].delay = 1\n self.cells[nt][i].mfncpc[-1].weight[0] = w\n \n if 'gsyn_in' in self.method_interpol:\n self.record_syn.append(h.Vector())\n self.record_syn[-1].record(self.cells[nt][i].MF_L[-1]._ref_g)\n self.gsyn_in_fac.append(f)\n else:\n \n nrel = 0\n \n mg = self.mglufac_ex[0]\n if self.mglufac_ex[1] > 0:\n mg = np.random.normal(self.mglufac_ex[0], self.mglufac_ex[1]*self.mglufac_ex[0], 1).clip(min=0) # self.g_syn_ex_s[nt] \n \n if \"stoch\" in str(self.tau1_ex[nt]):\n nrel = 4\n \n self.cells[nt][i].createsyn(nmf = 1, weight_gmax = w, nrel=nrel, mglufac = mg) \n \n e0 = len(self.cells[nt][i].MF_L)-1 # get number of current synapse!\n \n self.cells[nt][i].pconnect(self.pc,pulse_gid,int(e0),'mf') \n \n if 'gsyn_in' in self.method_interpol:\n self.record_syn.append(h.Vector())\n self.record_syn[-1].record(self.cells[nt][i].MF_L[-1].postsyns['AMPA'][0]._ref_g)\n self.record_syn.append(h.Vector())\n self.record_syn[-1].record(self.cells[nt][i].MF_L[-1].postsyns['NMDA'][0]._ref_g)\n self.gsyn_in_fac.append(f)\n self.gsyn_in_fac.append(f)\n \n elif self.celltype[nt] == 'IfCell': \n \n # delete old\n if j == 0: \n self.cells[nt][i].synlist = []\n self.cells[nt][i].nc = []\n \n if \"gr\" in str(self.tau1_ex[nt]):\n \n self.cells[nt][i].whatami = \"grc\"\n \n nrel = 0\n if \"stoch\" in str(self.tau1_ex[nt]):\n nrel = 4\n \n self.cells[nt][i].MF_L = self.cells[nt][i].synlist\n self.cells[nt][i].synlist.append(Synapse('glom', self.cells[nt][i], self.cells[nt][i].soma, nrel=nrel, record_all=0, weight_gmax = w))\n \n if \"ampa\" in str(self.tau1_ex[nt]):\n self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].gmax_factor = 0\n if \"nopre\" in str(self.tau1_ex[nt]):\n print \"- no pre\"\n self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].tau_rec = 1e-9\n self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].tau_facil = 1e-9\n self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].tau_1 = 0\n \n if \"nmda\" in str(self.tau1_ex[nt]):\n self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].gmax_factor = 0\n if \"nopre\" in str(self.tau1_ex[nt]):\n self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].tau_rec = 1e-9\n self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].tau_facil = 1e-9\n self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].tau_1 = 0\n \n if \"nostdampa\" in str(self.tau1_ex[nt]):\n self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].tau_rec = 1e-9\n self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].tau_facil = 1e-9\n self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].tau_1 = 0\n self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].r6FIX\t= 0 #1.12\t\n \n if \"nostdnmda\" in str(self.tau1_ex[nt]):\n self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].tau_rec = 1e-9\n self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].tau_facil = 1e-9\n self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].tau_1 = 0\n self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].RdRate = 0\t\n \n if \"nostdgr\" in str(self.tau1_ex[nt]):\n self.cells[nt][i].synlist[-1].postsyns['AMPA'][0].r6FIX\t= 0 #1.12\t\n self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].RdRate = 0 #12e-3\n print \"- no std\"\n \n if \"nomggr\" in str(self.tau1_ex[nt]): \n self.cells[nt][i].synlist[-1].postsyns['NMDA'][0].v0_block = -1e9 #.k_block = 1e-9\n print \"- no mg block\"\n \n e0 = len(self.cells[nt][i].synlist)-1\n syn_idx = int(e0)\n \n source = int(pulse_gid)\n self.cells[nt][i].nc.append(self.pc.gid_connect(source, self.cells[nt][i].synlist[syn_idx].input))\n self.cells[nt][i].nc[-1].delay = 1\n self.cells[nt][i].nc[-1].weight[0] = 1\n \n if 'gsyn_in' in self.method_interpol:\n self.record_syn.append(h.Vector())\n self.record_syn[-1].record(self.cells[nt][i].synlist[syn_idx].postsyns['AMPA'][0]._ref_g)\n self.record_syn.append(h.Vector())\n self.record_syn[-1].record(self.cells[nt][i].synlist[syn_idx].postsyns['NMDA'][0]._ref_g) \n self.gsyn_in_fac.append(f)\n self.gsyn_in_fac.append(f)\n else:\n \n if \"amfit\" in str(self.tau1_ex):\n \n syn = h.ExpGrcSyn(self.cells[nt][i].soma(0.5)) \n \n syn.tau1_ampa = 0.254\n syn.tau2_ampa = 0.254\n syn.tau3_ampa = 0.363\n syn.tau4_ampa = 6.523\n syn.f1_ampa = 8.8376e-05\n syn.f2_ampa = 5.5257e-05\n \n syn.f1_nmda = 0\n \n self.cells[nt][i].synlist.append(syn) # synlist is defined in Cell \n \n elif \"nmfit\" in str(self.tau1_ex):\n \n syn = h.ExpGrcSyn(self.cells[nt][i].soma(0.5))\n \n syn.f1_ampa = 0\n syn.f2_ampa = 0\n \n syn.tau1_nmda = 1.902\n syn.tau2_nmda = 82.032\n syn.f1_nmda = 7.853857483005277e-05\n \n self.cells[nt][i].synlist.append(syn) # synlist is defined in Cell \n \n elif \"fit\" in str(self.tau1_ex): \n \n syn = h.ExpGrcSyn(self.cells[nt][i].soma(0.5))\n \n syn.tau1_ampa = 0.254\n syn.tau2_ampa = 0.254\n syn.tau3_ampa = 0.363\n syn.tau4_ampa = 6.523\n syn.f1_ampa = 8.8376e-05\n syn.f2_ampa = 5.5257e-05\n \n syn.tau1_nmda = 1.902\n syn.tau2_nmda = 82.032\n syn.f1_nmda = 7.853857483005277e-05 \n \n self.cells[nt][i].synlist.append(syn) # synlist is defined in Cell \n \n else:\n \n self.cells[nt][i].create_synapses(n_ex=1, tau1=self.tau1_ex[nt], tau2=self.tau2_ex[nt])\n \n \n e0 = len(self.cells[nt][i].synlist)-1\n syn_idx = int(e0)\n \n self.cells[nt][i].pconnect_target(self.pc, source=pulse_gid, target=int(e0), syntype='ex', weight=w, delay=1)\n \n if 'gsyn_in' in self.method_interpol:\n self.record_syn.append(h.Vector())\n self.record_syn[-1].record(self.cells[nt][i].synlist[syn_idx]._ref_g)\n self.gsyn_in_fac.append(f)\n \n elif self.celltype[nt] == 'Prk':\n \n # delete old\n if j == 0: \n self.cells[nt][i].PF_Lsync = []\n self.cells[nt][i].spk_nc_pfsync = []\n self.cells[nt][i].pfrand = []\n \n m = len(self.cells[nt][i].dendrange)\n \n seed = int(4*gid)\n np.random.seed(seed)\n \n for k in xrange(nsyn):\n m -= 1\n \t mi = np.random.randint(0, m)\t \n \t self.cells[nt][i].dendrange[mi], self.cells[nt][i].dendrange[m] = self.cells[nt][i].dendrange[m], self.cells[nt][i].dendrange[mi]\n \t self.cells[nt][i].pfrand.append(self.cells[nt][i].dendrange[m])\n \n #print self.cells[nt][i].pfrand\n\n if \"prk\" not in str(self.tau1_ex[nt]):\n pass\n else:\n self.cells[nt][i].PF_Lsync.append(Synapse2('pf',self.cells[nt][i],self.cells[nt][i].pfrand[j],record_all=0))\n\n e0 = len(self.cells[nt][i].PF_Lsync)-1 # get number of current synapse!\n syn_idx = int(e0)\n\n self.cells[nt][i].spk_nc_pfsync.append(self.pc.gid_connect(pulse_gid, self.cells[nt][i].PF_Lsync[syn_idx].input.newnetstim))\n self.cells[nt][i].spk_nc_pfsync[-1].delay = 1\n self.cells[nt][i].spk_nc_pfsync[-1].weight[0] = 1\n \n if 'gsyn_in' in self.method_interpol:\n self.record_syn.append(h.Vector())\n self.record_syn[-1].record(self.cells[nt][i].PF_Lsync[-1].postsyns['AMPA'][0]._ref_g)\n self.gsyn_in_fac.append(f) \n \n elif syntype == \"inh\":\n \n w = self.g_syn_inh[nt]\n \n seed = int(10000 + 10*gid + j)\n np.random.seed(seed*42)\n \n if self.g_syn_inh_s[nt] > 0:\n w = np.random.normal(w, w*self.g_syn_inh_s[nt], 1).clip(min=w*0.1) # self.g_syn_inh_s[nt] \n \n if self.celltype[nt] == 'Grc':\n \n if j == 0: \n self.cells[nt][i].GOC_L = []\n self.cells[nt][i].gocncpc = []\n \n if \"gr\" not in str(self.tau1_inh[nt]):\n \n tau1 = self.tau1_inh[nt]\n tau2 = self.tau2_inh[nt]\n \n if tau1 == 0:\n syn = h.ExpSyn(self.cells[nt][i].soma(0.5))\n syn.tau = tau2/ms\n \n else: \n syn = h.Exp2Syn(self.cells[nt][i].soma(0.5))\n syn.tau1 = tau1/ms\n syn.tau2 = tau2/ms\n \n syn.e = -65\n \n self.cells[nt][i].GOC_L.append(syn)\n \n i0 = len(self.cells[nt][i].GOC_L)-1 # get number of current synapse!\n \n syn_idx = int(i0)\n source = int(pulse_gid)\n self.cells[nt][i].gocncpc.append(self.pc.gid_connect(source, self.cells[nt][i].GOC_L[syn_idx]))\n self.cells[nt][i].gocncpc[-1].delay = 1\n self.cells[nt][i].gocncpc[-1].weight[0] = w\n \n else:\n \n self.cells[nt][i].createsyn(nmf = 0, ngoc = 1, weight_gmax = w) \n i0 = len(self.cells[nt][i].GOC_L)-1 # get number of current synapse!\n self.cells[nt][i].pconnect(self.pc,pulse_gid,int(i0),'goc')\n \n \n if self.celltype[nt] == 'IfCell': \n \n if j == 0: \n self.cells[nt][i].synlist_inh = []\n self.cells[nt][i].nc_inh = []\n \n if \"gr\" in str(self.tau1_inh[nt]):\n \n nrel = 0\n if \"stoch\" in str(self.tau1_ex[nt]):\n nrel = 4\n \n self.cells[nt][i].GOC_L = self.cells[nt][i].synlist\n self.cells[nt][i].whatami = \"grc\"\n self.cells[nt][i].synlist_inh.append(Synapse('goc', self.cells[nt][i], self.cells[nt][i].soma, nrel=nrel, record_all=0, weight_gmax = w))\n \n i0 = len(self.cells[nt][i].synlist_inh)-1\n syn_idx = int(i0)\n \n source = int(pulse_gid)\n self.cells[nt][i].nc_inh.append(self.pc.gid_connect(source, self.cells[nt][i].synlist_inh[syn_idx].input))\n self.cells[nt][i].nc_inh[-1].delay = 1\n self.cells[nt][i].nc_inh[-1].weight[0] = 1\n \n if \"gaba\" in str(self.tau1_ex[nt]):\n \n if 'gsyn_in' in self.method_interpol:\n \n if \"nostdgaba\" in str(self.tau1_ex[nt]):\n \n self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].tau_rec = 1e-9 \n self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].tau_facil = 1e-9 \n self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].tau_1 = 0 \n self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d3 = 0 \n self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d1d2 = 0 \n self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d1 = 0 \n self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d2 = 0 \n self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d3_a6 = 0 \n self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d1d2_a6 = 0 \n self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d1_a6 = 0 \n self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0].d2_a6 = 0 \n \n self.record_syn.append(h.Vector())\n self.record_syn[-1].record(self.cells[nt][i].synlist_inh[syn_idx].postsyns['GABA'][0]._ref_g)\n self.gsyn_in_fac.append(f)\n \n else:\n \n self.cells[nt][i].create_synapses(n_inh=1, tau1_inh=self.tau1_inh[nt], tau2_inh=self.tau2_inh[nt], e_inh=-65)\n i0 = len(self.cells[nt][i].synlist_inh)-1\n syn_idx = int(i0)\n self.cells[nt][i].pconnect_target(self.pc, source=pulse_gid, target=int(i0), syntype='inh', weight=w, delay=1)\n \n \n elif syntype == \"intr\":\n \n if self.celltype[nt] == 'Prk':\n \n pass\n\n \n def set_SynPlay(self, farray, tarray, N = [], t_startstop = [], amode = 1):\n \n if self.do_run:\n \n delay = 1\n if (self.use_pc is False):\n delay = 0.1\n \n if N == []:\n N = self.N\n \n self.pulse_list = [] \n self.global_pulse_list = []\n self.global_pulse_list_inh = []\n self.global_pulse_list_intr = []\n \n f_cells_mean_local = []\n f_cells_cv_local = []\n f_cells_std_local = []\n \n for nt in range(self.n_celltypes): # loop over all cells\n \n if (self.n_syn_ex[nt] > 0) or (self.n_syn_inh[nt] > 0) or (self.n_syn_intr[nt] > 0):\n\n local_gid_count = 0\n local_gid_count_type = []\n \n \n # EXCITATION\n if str(type(self.g_syn_ex[nt] )) is not ndarray: self.g_syn_ex[nt] = np.array([self.g_syn_ex[nt] ]) # each cell can receive different g_syn_ex !\n \n if len(self.g_syn_ex[nt]) == len(self.global_gidlist[nt]):\n pass\n else:\n self.g_syn_ex[nt] = np.ones(len(self.global_gidlist[nt]))*self.g_syn_ex[nt][0]\n #print \"- single value in g_syn_ex, cells:\", len(self.global_gidlist[nt])\n \n self.global_pulse_list.append([])\n for ns in range(self.n_syn_ex[nt]): # loop over all excitatory synapses!\n self.global_pulse_list[-1].append([])\n for n in range(self.syn_max_mf[nt]): # number of cells of this celltype\n self.global_pulse_list[-1][-1].append(local_gid_count+self.gid_count)\n local_gid_count += 1\n local_gid_count_type.append([])\n local_gid_count_type[-1].append('ex')\n local_gid_count_type[-1].append(n) # number of cell within their population 0..N[nt]\n local_gid_count_type[-1].append(ns) # number of synapse \n \n \n # INHIBITION \n if np.array(self.inh_hold[nt]).size <= 1:\n self.inh_hold[nt] = np.ones(len(self.global_gidlist[nt]))*self.inh_hold[nt]\n #print \"- single value in inh_hold\", self.inh_hold[nt] \n \n \n self.global_pulse_list_inh.append([])\n for ns in range(self.n_syn_inh[nt]): # loop over all inhibitory synapses!\n self.global_pulse_list_inh[-1].append([])\n for n in range(self.syn_max_inh[nt]): # number of cells of this celltype\n self.global_pulse_list_inh[-1][-1].append(local_gid_count+self.gid_count)\n local_gid_count += 1\n local_gid_count_type.append([])\n local_gid_count_type[-1].append('inh')\n local_gid_count_type[-1].append(n) # number of cell within their population 0..N[nt]\n local_gid_count_type[-1].append(ns) # number of synapse \n\n \n # INTRUDER SYNAPSE\n if str(type(self.g_syn_intr[nt] )) is not ndarray: self.g_syn_intr[nt] = np.array([self.g_syn_intr[nt] ]) # each cell can receive different g_syn_intr !\n \n if len(self.g_syn_intr[nt]) == len(self.global_gidlist[nt]):\n pass \n else:\n self.g_syn_intr[nt] = np.ones(len(self.global_gidlist[nt]))*self.g_syn_intr[nt][0]\n #print \"- single value in g_syn_intr, cells:\", len(self.global_gidlist[nt])\n \n self.global_pulse_list_intr.append([])\n for ns in range(self.n_syn_intr[nt]): # loop over all intruding synapses!\n self.global_pulse_list_intr[-1].append([])\n for n in range(self.syn_max_intr[nt]): # number of generators for this celltype\n self.global_pulse_list_intr[-1][-1].append(local_gid_count+self.gid_count)\n local_gid_count += 1\n local_gid_count_type.append([])\n local_gid_count_type[-1].append('intr')\n local_gid_count_type[-1].append(n) # number of cell within their population 0..N[nt]\n local_gid_count_type[-1].append(ns) # number of synapse \n \n \n t_vec_input = np.array([]) # input trains \n id_vec_input = np.array([]) # input trains id\n fs = 1 / self.dt\n ih_use_v = []\n \n for i in range(int(self.id), local_gid_count, int(self.nhost)): # loop over all train generators and generate them\n \n self.pulse_list.append(i+self.gid_count)\n pulse_gid = self.pulse_list[-1] \n gid = local_gid_count_type[i][1] # should correspond to this gid when multiple values inserted\n \n if local_gid_count_type[i][0] == 'ex':\n \n seed = int(10001 + pulse_gid) # unique gid for generators! \n np.random.seed(seed*423)\n \n if self.ihold_sigma[nt] > 0:\n ih_use = np.random.normal(self.ihold[nt], self.ihold[nt]*self.ihold_sigma[nt], 1).clip(min=0) # self.ihold[nt]*self.ihold_sigma[nt] \n \n elif self.ihold_sigma[nt] < 0:\n ih_use = np.random.uniform(0.1, self.ihold[nt])\n \n else:\n ih_use = self.ihold[nt] \n \n ih_use_v.append(ih_use)\n \n if ih_use > 0:\n # train has to be contructed here, to insert different train into each \"dendrite\"\n ## different ihold has to be implemented here!!\n iholdvec = concatenate((zeros(round(fs)), ones(round(len(tarray) - 1 * fs)) * ih_use))\n \n if isinstance(self.syn_ex_dist[nt], ( tuple ) ): # distribution of amplitude, only one noise source!\n \n np.random.seed(pulse_gid*40)\n if self.syn_ex_dist[nt][1] > 0:\n f = np.random.normal(self.syn_ex_dist[nt][0], self.syn_ex_dist[nt][1], 1).clip(min=0)\n else:\n f = self.syn_ex_dist[nt][0]\n \n f2 = f\n rnd.seed(pulse_gid*40) # use gid so type 1, 2 is identical for each cell\n #rnd.seed(gid*40) # use gid so type 1, 2 is identical for each cell\n if self.syn_ex_dist[nt][2] > 0: # add inverted input with 50% probability, in future versions this will indicate the propability for -1 and 1 \n f2 = rnd.sample([-1,1],1)[0] * f\n #f2 = f\n \n if amode == 1:\n inamp = (f2 * self.amod[nt] * ih_use)\n elif amode == 2:\n inamp = (f2 * self.amod[nt] * self.ihold[nt]) \n \n modulation = (tarray, inamp * farray[0] + iholdvec)\n \n #if self.id == 0: print \"- randomize play stimulus height, pulse_gid=\", pulse_gid, \" gid=\", gid ,\" f=\", f \n if (gid==0): print \"- randomize play stimulus height, pulse_gid=\", pulse_gid, \" gid=\", gid ,\" f2=\", f2,\"inamp=\",inamp \n \n #rnd.seed(local_gid_count_type[i][1]*300) # pick seed based on number of cell\n #nj = rnd.sample(range(len(farray)),1)[0] \n nj = 1\n \n else: # different noise sources can be used at different synapses, linear combination test in openloop\n \n nj = self.syn_ex_dist[nt][local_gid_count_type[i][2]]\n \n if nj == 0:\n modulation = (tarray, iholdvec)\n else:\n if amode == 1:\n inamp = (self.factor_celltype[nt] * self.amod[nt] * ih_use)\n elif amode == 2:\n inamp = (self.factor_celltype[nt] * self.amod[nt] * self.ihold[nt]) \n\n modulation = (tarray, inamp * farray[nj-1] + iholdvec)\n if self.id == 0: print \"ex farray number:\", nj-1, \"ih_use:\", ih_use, \"self.amod[nt]:\", self.amod[nt], \"inamp: \", inamp\n \n \n # will be done n_syn_ex * number of cells!\n if self.noise_syn_tau[nt] < 0: # variable threshold\n no = self.noise_syn[nt]\n else: \n no = self.noise_syn[nt]*ih_use\n\n train, self.n_train_ex = mod_spike_train(modulation, noise = no, seed = seed, noise_tau = self.noise_syn_tau[nt], noise_a = self.noise_a[nt]) \n \n #plt.figure(\"input\")\n #plt.plot(train, train*0, '|')\n #plt.show()\n \n t_vec_input = np.append(t_vec_input, train*ms).flatten() # use ms to save!!\n id_vec_input = np.append(id_vec_input, np.ones(len(train))*pulse_gid).flatten()\n \n f_cells_mean_local0, f_cells_cv_local0, f_cells_std_local0 = self.calc_fmean(train*ms, t_startstop)\n f_cells_mean_local.append(f_cells_mean_local0); f_cells_cv_local.append(f_cells_cv_local0); f_cells_std_local.append(f_cells_std_local0)\n \n if self.id == 0: print \"TRAIN: requ. mean:\", ih_use ,\"eff. mean:\", f_cells_mean_local0, \"cv: \" , f_cells_cv_local0, \"std:\" , f_cells_std_local0\n \n else:\n train = []\n self.n_train_ex = []\n \n\n\n elif local_gid_count_type[i][0] == 'intr':\n \n # train has to be contructed here, to insert different train into each \"dendrite\"\n nj = 0\n \n seed = int(10001 + pulse_gid)\n np.random.seed(seed*4411)\n \n if self.intr_hold_sigma[nt] > 0: \n ih_use = np.random.normal(self.intr_hold[nt], self.intr_hold[nt]*self.intr_hold_sigma[nt], 1).clip(min=0) \n else:\n ih_use = self.intr_hold[nt]\n \n ih_use_v.append(ih_use)\n \n if ih_use > 0: \n \n iholdvec = concatenate((zeros(round(fs)), ones(round(len(tarray) - 1 * fs)) * ih_use))\n modulation = (tarray, iholdvec)\n \n # will be done n_syn_in * number of cells! \n if self.noise_syn_tau_intr[nt] < 0: # variable threshold\n no = self.noise_syn_intr[nt]\n else: \n no = self.noise_syn_intr[nt]*ih_use\n \n if self.noise_syn_tau_intr[nt] >= -1:\n train, _ = mod_spike_train(modulation, noise = no, seed = seed, noise_tau = self.noise_syn_tau_intr[nt], noise_a = self.noise_a_intr[nt]) # train in ms\n else:\n train = oscill_spike_train(sor = 4, spike_prob = 1/4, noise_fraction = 4, end_time = tarray[-1]/ms, seed = seed) \n \n \n elif local_gid_count_type[i][0] == 'inh':\n \n # train has to be contructed here, to insert different train into each \"dendrite\"\n \n seed = int(10001 + pulse_gid)\n \n np.random.seed(seed*44)\n \n if self.inh_hold_sigma[nt] > 0: \n ih_use = np.random.normal(self.inh_hold[nt][gid], self.inh_hold[nt][gid]*self.inh_hold_sigma[nt], 1).clip(min=0) \n else:\n ih_use = self.inh_hold[nt][gid]\n \n \n iholdvec = concatenate((zeros(round(fs)), ones(round(len(tarray) - 1 * fs)) * ih_use))\n \n nj = self.syn_inh_dist[nt][local_gid_count_type[i][2]]\n if nj == 0:\n modulation = (tarray, iholdvec)\n else:\n inamp = (self.amod[nt] * ih_use)\n modulation = (tarray, inamp * farray[nj-1] + iholdvec)\n #print \"inh farray number:\", nj-1, \"ih_use:\", ih_use, \"amp: \", inamp #old: nj-1+nemax\n \n # will be done n_syn_in * number of cells! \n if self.noise_syn_tau_inh[nt] < 0: # variable threshold\n no = self.noise_syn_inh[nt]\n else: \n no = self.noise_syn_inh[nt]*ih_use\n \n train, _ = mod_spike_train(modulation, noise = no, seed = seed, noise_tau = self.noise_syn_tau_inh[nt], noise_a = self.noise_a_inh[nt]) # train in ms\n #print train\n \n #print train\n if len(train) > 0:\n if self.id == 0: \n print \"-\", pulse_gid, local_gid_count_type[i], \"seed: \", seed, \"ih_use:\", ih_use, no, nj #, \"first spike: \", train[0] \n self.setup_Play_train(train = train+self.inh_delay, input_gid = pulse_gid, delay = delay) # train in ms\n \n \n self.gid_count += local_gid_count # increase gid count\n \n self.barrier()\n \n for i, gid in enumerate(self.gidlist[nt]): # for all input cells\n \n rnd.seed(gid*200)\n n = self.global_gidlist[nt].index(gid) # index of cell within their population 0..N[nt]\n # i is index on this node only!\n \n self.record_syn = []\n for j in range(self.n_syn_ex[nt]):\n if N[nt] == len(self.global_pulse_list[nt][j]):\n pulse_gid = self.global_pulse_list[nt][j][n] #every cell of this type receives one pulse gid \n if self.id == 0: print \"- gid:\", gid ,\" n:\", n ,\" one ex train for each synapse:\", pulse_gid, \"self.g_syn_ex[nt][n]:\", self.g_syn_ex[nt][n] \n else:\n pulse_gid = rnd.sample(self.global_pulse_list[nt][j],1)[0] # not enough, just pick one at random, for inh/f search only one synapse available!\n if self.id == 0: print \"- gid:\", gid ,\" n:\", n ,\" one ex train from\", len(self.global_pulse_list[nt][j]), \":\", pulse_gid, \"self.g_syn_ex[nt][n]:\", self.g_syn_ex[nt][n] \n \n if \"gaba\" in str(self.tau1_ex[nt]):\n self.connect_Synapse(pulse_gid, nt, i, n, gid, j, syntype = \"inh\") \n else:\n self.connect_Synapse(pulse_gid, nt, i, n, gid, j, syntype = \"ex\", nsyn = self.n_syn_ex[nt]) \n \n \n if self.n_syn_inh[nt] > 0:\n for j in range(self.n_syn_inh[nt]):\n \n if N[nt] == len(self.global_pulse_list_inh[nt][j]):\n pulse_gid = self.global_pulse_list_inh[nt][j][n] #every cell of this type receives one pulse gid \n if self.id == 0: print \"- one inh train for each synapse:\", pulse_gid\n else:\n pulse_gid = rnd.sample(self.global_pulse_list_inh[nt][j],1)[0] # not enough, just pick one at random \n if self.id == 0: print \"- one inh train from\", len(self.global_pulse_list_inh[nt][j]), \":\", pulse_gid\n \n self.connect_Synapse(pulse_gid, nt, i, n, gid, j, syntype = \"inh\") \n \n \n if self.n_syn_intr[nt] > 0:\n for j in range(self.n_syn_intr[nt]):\n \n if N[nt] == len(self.global_pulse_list_intr[nt][j]):\n pulse_gid = self.global_pulse_list_intr[nt][j][n] #every cell of this type receives one pulse gid \n if self.id == 0: print \"- one intruding train for each synapse:\", pulse_gid\n else:\n pulse_gid = rnd.sample(self.global_pulse_list_intr[nt][j],1)[0] # not enough, just pick one at random \n if self.id == 0: print \"- one intruding train from\", len(self.global_pulse_list_intr[nt][j]), \":\", pulse_gid\n \n if (self.use_pc is False):\n \n if self.celltype[nt] == 'Prk': self.cells[nt][i].delrerun() \n \n (msg,CF_input) = self.cells[nt][i].createsyn_CF(record_all=0,factor=self.g_syn_intr[nt][0],cf_setup_select='old')\n CF_input.number = 3 # three bursts\n CF_input.start = -0.3 # See synapsepfpurk.py\n CF_input.interval = 3 # 3 ms interval between bursts\n\n self.cells[nt][i].input_to_CF_nc.append(h.NetCon(self.vecstim[j], CF_input, 0, 0.1, 1))\n self.netcons.append(self.cells[nt][i].input_to_CF_nc[-1])\n \n else:\n print \"NOT IMPLEMENTED\"\n \n \n if self.id == 0: print \"trains connected\"\n \n if local_gid_count_type[i][0] == 'intr':\n pass\n else:\n self.id_all_vec_input.append(self.do_gather(id_vec_input, dtype = 'i')) \n self.t_all_vec_input.append(self.do_gather(t_vec_input)) \n \n f_cells_mean = self.do_gather(f_cells_mean_local) \n f_cells_cv = self.do_gather(f_cells_cv_local) \n f_cells_std = self.do_gather(f_cells_std_local) \n \n self.fmean_input = np.nan\n self.fmax_input = np.nan\n self.fmstd_input = np.nan\n self.fcvm_input = np.nan\n self.fstdm_input = np.nan\n \n ih_use_v_all = self.do_gather(ih_use_v)\n \n if self.id == 0 and local_gid_count_type[i][0] != 'intr':\n \n self.fmean_input = mean(np.nan_to_num(f_cells_mean)) # compute mean of mean rate for all cells\n self.fmstd_input = std(np.nan_to_num(f_cells_mean)) \n self.fmax_input = max(np.nan_to_num(f_cells_mean))\n \n self.fcvm_input = mean(f_cells_cv[~np.isnan(f_cells_cv)])\n self.fstdm_input = mean(f_cells_std[~np.isnan(f_cells_std)])\n \n self.ih_use_max = max(ih_use_v_all)\n \n print \"- trains, fmean: \",self.fmean_input, \"fmax: \",self.fmax_input, \"Hz\", \"fmstd: \",self.fmstd_input, \"Hz\", \"fcvm: \",self.fcvm_input, \"fstdm: \",self.fstdm_input, \"Hz, ih_use_max:\", self.ih_use_max \n \n else:\n self.global_pulse_list.append([])\n self.global_pulse_list_inh.append([])\n \n\n\n def do_gather(self, v_local, dtype = 'd'):\n \n if self.use_mpi:\n \n self.barrier()\n \n #v_local = v_local.astype(dtype).flatten()\n v_local = np.array(v_local, dtype=dtype).flatten() \n \n if self.use_pc == False:\n\n v_global = None\n counts_local = np.array(len(v_local), dtype='i')\n \n counts = 0\n if self.id == 0:\n counts = np.empty(self.nhost, dtype='i')\n \n self.comm.Gather(sendbuf=[counts_local, MPI.INT], recvbuf=[counts, MPI.INT], root=0)\n \n if self.id == 0:\n v_global = np.empty(sum(counts), dtype=dtype)\n \n \n if dtype == 'd':\n self.comm.Gatherv(sendbuf=[v_local, MPI.DOUBLE], recvbuf=[v_global, (counts, None), MPI.DOUBLE], root=0)\n elif dtype == 'i':\n self.comm.Gatherv(sendbuf=[v_local, MPI.INT], recvbuf=[v_global, (counts, None), MPI.INT], root=0) \n \n #v_global = np.hstack(v_global)\n \n else:\n sendlist = [None]*self.nhost \n sendlist[0] = v_local\n getlist = self.pc.py_alltoall(sendlist)\n \n v_global = np.hstack(getlist) \n \n else:\n \n v_global = np.hstack(v_local)\n \n return v_global\n \n\n def setup_Play_train(self, train = [], input_gid = 0, delay = 1):\n \n self.trains.append(train)\n\n # possibility to play spikes into the cells!\n self.vecstim.append(h.VecStim(.5))\n self.nc_vecstim.append(h.NetCon(self.vecstim[-1],None))\n self.nc_vecstim[-1].delay = delay\n\n self.spike_vec.append(h.Vector(self.trains[-1]))\n self.vecstim[-1].play(self.spike_vec[-1]) \n\n if (self.use_mpi):\n self.pc.set_gid2node(input_gid, self.id) # associate gid with this host\n self.pc.cell(input_gid,self.nc_vecstim[-1]) # associate gid with spike detector\n \n\n def record(self):\n \"\"\"\n Initializes recording vectors. Internal function\n \"\"\"\n\n if self.n_celltypes > 1:\n #print \"self.n_borders:\",self.n_borders\n for n in range(self.n_celltypes):\n if self.n_borders[n] in self.gidlist[n]:\n #print \"np.shape(self.rec_v):\",np.shape(self.rec_v)\n #print \"np.shape(self.cells):\",np.shape(self.cells)\n self.rec_v[n].record(self.cells[n][0].soma(0.5)._ref_v) \n\n \n if self.id == 0: # only for first node and first cell\n \n # Voltage\n self.rec_v[0].record(self.cells[self.a_celltype[0]][0].soma(0.5)._ref_v) \n \n # Stimuli\n self.rec_i = h.Vector()\n\n if (self.plays != []): \n if (isinstance(self.plays[0], list) is False): \n self.rec_i.record(self.plays[0]._ref_i)\n else:\n self.rec_i.record(self.plays[0][0]._ref_i) \n \n self.rec_ich = h.Vector()\n if self.ic_holds != [] and (isinstance(self.ic_holds[0], list) is False): \n self.rec_ich.record(self.ic_holds[0]._ref_i)\n \n self.rec_ics = h.Vector()\n if self.ic_starts != []: \n self.rec_ics.record(self.ic_starts[0]._ref_i)\n \n self.rec_n = h.Vector() \n \n if self.fluct_s[0] > 0:\n # Fluctuating input \n self.rec_n.record(self.flucts[0]._ref_i)\n print \"recording noise\"\n elif (len(self.flucts) > 0) and (len(self.fluct_g_i0)>0):\n self.rec_n.record(self.flucts[0]._ref_g_i)\n print \"recording g noise\"\n else:\n print \"nonoise\"\n \n if hasattr(self.cells[self.a_celltype[0]][0], 'lkg2_noise'):\n if self.cells[self.a_celltype[0]][0].lkg2_noise > 0:\n self.rec_n.record(self.cells[self.a_celltype[0]][0].fluct._ref_il)\n print \"recording tonic gaba noise\" \n \n self.rec_step = h.Vector()\n if self.ic_steps != []: \n self.rec_step.record(self.ic_steps[0]._ref_i) \n \n # Time\n self.rec_t = h.Vector()\n self.rec_t.record(h._ref_t)\n \n \n def run(self, tstop = 10*s, do_loadstate = True):\n \"\"\"\n Starts the stimulation.\n \"\"\"\n self.record()\n \n if self.first_run:\n\n if self.use_mpi: self.pc.set_maxstep(100)\n #self.pc.spike_compress(1) #test\n \n if self.use_multisplit:\n import multiprocessing\n \n Hines = h.CVode()\n Hines.active(0)\n \n h.load_file(\"parcom.hoc\")\n p = h.ParallelComputeTool()\n \n if self.use_mpi:\n cpus = multiprocessing.cpu_count() #32 #self.pc.nhost()\n else:\n cpus = multiprocessing.cpu_count() #32 \n \n p.change_nthread(cpus,1) \n p.multisplit(1)\n print \"Using multisplit, cpus:\", cpus\n \n else:\n \n h.load_file(\"stdrun.hoc\")\n \n if self.use_local_dt:\n h.cvode.active(1) \n h.cvode.use_local_dt(1) \n \n h.celsius = self.temperature \n h.dt = self.dt/ms # Fixed dt\n h.steps_per_ms = 1 / (self.dt/ms)\n \n if self.cells[self.a_celltype[0]] != []: \n if hasattr(self.cells[self.a_celltype[0]][0], 'v_init'):\n h.v_init = self.cells[self.a_celltype[0]][0].v_init # v_init is supplied by cell itself!\n else: \n h.v_init = -60 \n \n h.stdinit() \n\n h.finitialize()\n \n if hasattr(self.cells[self.a_celltype[0]][0], 'load_states') and do_loadstate:\n m = md5.new()\n cell_exe_new = self.cell_exe[0]\n m.update(cell_exe_new)\n filename = './states_' + self.celltype[0] + '_' + m.hexdigest() + '_Population.b'\n self.cells[self.a_celltype[0]][0].load_states(filename)\n \n else:\n\n pass \n \n \n if self.id == 0:\n import time\n t0 = time.time()\n\n if self.simstep == 0:\n if self.id == 0: print \"Running without steps\",\n \n if self.use_mpi:\n self.pc.psolve(tstop/ms)\n else:\n h.init()\n h.tstop = tstop/ms\n h.run()\n\n else:\n \n h.finitialize()\n cnt = 1\n \n #if self.id == 50: \n # print len(self.cells[1][0].nc), self.cells[1][0].nc[0].weight[0]\n # print len(self.cells[0][0].nc_inh), self.cells[0][0].nc_inh[0].weight[0]\n \n h.t = 0\n while h.t < tstop/ms:\n \n if self.id == 0:\n print \"Running...\",\n if self.use_mpi:\n past_time = self.pc.time()\n \n h.continuerun(cnt*self.simstep/ms)\n if self.use_mpi: self.pc.barrier()\n \n if self.id == 0:\n if self.use_mpi:\n print \"Simulated time =\",h.t*ms, \"s, Real time = \", (self.pc.time()-past_time), 's'\n else:\n print \"Simulated time =\",h.t*ms, \"s\"\n \n #if self.id == 0:\n # print hpy.heap().byrcs\n cnt += 1\n\n if self.id == 0: print \"psolve took \", time.time() - t0, \"seconds\"\n \n self.first_run = False\n \n self.barrier() # wait for other nodes\n\n self.tstop = tstop \n \n \n def get(self, t_startstop=[], i_startstop=[], N = []):\n \"\"\"\n Gets the recordings.\n \"\"\"\n \n if N == []:\n N = self.N\n \n if t_startstop == []:\n t_startstop = np.array([2, self.tstop])\n \n t_all_vec = []\n id_all_vec = []\n \n fmean = []\n fbase = []\n fmax = []\n fmstd = []\n fcvm = []\n fstdm = []\n gid_del = []\n f_cells_mean_all = []\n f_cells_base_all = []\n f_cells_cv_all = [] \n f_cells_std_all = []\n \n fmeanA = []\n fmstdA = []\n fmaxA = []\n fcvmA = []\n fstdmA = []\n fbaseA = []\n fbstdA = []\n \n if self.id == 0: print \"start gathering spikes\"\n \n for n in range(self.n_celltypes):\n\n if self.use_mpi: \n \n self.barrier() # wait for other node\n t_vec = np.array(self.t_vec[n]).flatten()*ms - 1*ms # shift time because of output delay\n id_vec = np.array(self.id_vec[n]).flatten()\n \n else:\n \n t_vec = np.array([])\n id_vec = np.array([])\n print np.shape(self.t_vec)\n for i in self.gidlist[n]:\n t_vec0 = np.array(self.t_vec[n][i]).flatten()*ms \n t_vec = np.append(t_vec, t_vec0).flatten()\n id_vec = np.append(id_vec, np.ones(len(t_vec0))*i).flatten() \n\n fmean0, fmax0, fmstd0, fcvm0, fstdm0, gid_del0, f_cells_mean_all0, f_cells_cv_all0, f_cells_std_all0, fbase0, f_cells_base_all0 = self.get_fmean(t_vec, id_vec, t_startstop = t_startstop, gidlist = self.gidlist[n]) \n fmean.append(fmean0); fmax.append(fmax0), fmstd.append(fmstd0), fcvm.append(fcvm0), fstdm.append(fstdm0), gid_del.append(gid_del0), f_cells_mean_all.append(f_cells_mean_all0), f_cells_cv_all.append(f_cells_cv_all0), f_cells_std_all.append(f_cells_std_all0)\n fbase.append(fbase0); f_cells_base_all.append(f_cells_base_all0)\n \n t_all_vec.append(self.do_gather(t_vec))\n id_all_vec.append(self.do_gather(id_vec))\n \n if (self.id == 0) and (self.no_fmean == False): \n f_cells_mean_all = np.array(f_cells_mean_all).flatten()\n fmeanA = mean(f_cells_mean_all) # compute mean of mean rate for all cells\n fmstdA = std(f_cells_mean_all) \n fmaxA = max(f_cells_mean_all)\n \n f_cells_base_all = np.array(f_cells_base_all).flatten()\n fbaseA = mean(f_cells_base_all) # compute mean of mean rate for all cells\n fbstdA = std(f_cells_base_all)\n \n f_cells_cv_all = np.concatenate((np.array(f_cells_cv_all)))\n f_cells_std_all = np.concatenate((np.array(f_cells_std_all)))\n \n fcvmA = mean(f_cells_cv_all)\n fstdmA = mean(f_cells_std_all)\n \n print \"- ALL, fmean: \",fmeanA, \"fmax: \",fmaxA, \"Hz\", \"fmstd: \",fmstdA, \"Hz\", \"fcvm: \",fcvmA, \"fstdm: \",fstdmA, \"Hz\", \"fbase: \",fbaseA, \"Hz\", \"fbstd: \", fbstdA, \"Hz\"\n \n if self.id == 0: print \"all spikes have been gathered\"\n\n self.barrier()\n\n # do this here to have something to return\n voltage = []\n current = []\n time = []\n \n freq_times = []\n spike_freq = []\n gsyn = []\n \n if self.id == 0: # only for first node\n \n time = np.array(self.rec_t)*ms\n\n # use self.bin_width as bin width!\n freq_times = arange(0, time[-1], self.bin_width)\n\n voltage.append(np.array(self.rec_v[0])*mV)\n current = np.zeros(len(time))\n\n if len(np.array(self.rec_ics)) > 0:\n current = current + np.array(self.rec_ics) \n \n if len(np.array(self.rec_ich)) > 0:\n current = current + np.array(self.rec_ich)\n \n if len(np.array(self.rec_i)) > 0:\n current = current + np.array(self.rec_i) \n \n if len(np.array(self.rec_n)) > 0:\n current = current + np.array(self.rec_n) \n print np.array(self.rec_n) \n \n if len(np.array(self.rec_step)) > 0:\n current = current + np.array(self.rec_step) \n\n else:\n time = [0]\n \n self.barrier()\n time = self.broadcast(time, fast = True)\n\n gsyn_in = []\n gsyn_in0 = []\n \n if 'gsyn_in' in self.method_interpol:\n \n gsyn_in = None\n if self.id == 0: print \"- collecting gsyn_in\"\n gsyn_in0 = np.zeros(len(time), dtype='d')\n if self.record_syn is not []:\n for i, j in enumerate(self.record_syn):\n gsyn_in0 = gsyn_in0 + self.gsyn_in_fac[i] * np.array(j, dtype='d') \n \n if self.use_mpi:\n count = len(time)\n \n #if self.id == 0: gsyn_in = np.empty(count*self.nhost, dtype='d')\n #self.comm.Gatherv(sendbuf=[gsyn_in0, MPI.DOUBLE], recvbuf=[gsyn_in, MPI.DOUBLE], root=0)\n \n gsyn_in = self.do_gather(gsyn_in0)\n \n if self.id == 0:\n gsyn_in = np.reshape(gsyn_in, (self.nhost,count))\n gsyn_in = sum(gsyn_in,0)\n \n else:\n gsyn_in = gsyn_in0\n \n self.barrier() # wait for other nodes\n \n if self.n_celltypes > 1:\n if self.id == 0: print \"more than one celltype send voltage of first other cell to root\"\n \n for n in range(1, self.n_celltypes):\n \n if self.use_pc == True:\n \n srclist = [None]*self.nhost\n \n if (self.n_borders[n] in self.gidlist[n]):\n srclist[0] = np.array(self.rec_v[n])*mV\n \n destlist = self.pc.py_alltoall(srclist) \n \n if self.id == 0:\n idx = [i for i, x in enumerate(destlist) if x is not None]\n if len(idx) > 1: raise ValueError('Error, too many vectors sent, should be one at a time!')\n voltage.append(np.array(destlist[idx[0]]))\n \n else:\n \n if self.id == 0:\n if (self.n_borders[n] in self.gidlist[n]): # first node has it, do not wait to receive it!\n v_temp = np.array(self.rec_v[n])*mV\n else:\n v_temp = np.zeros(len(voltage[0]))\n self.comm.Recv([v_temp, MPI.DOUBLE], source = MPI.ANY_SOURCE, tag=int(sum(N)+33))\n \n voltage.append(v_temp)\n else:\n if self.n_borders[n] in self.gidlist[n]:\n voltage = np.array(self.rec_v[n])*mV \n self.comm.Ssend([voltage, MPI.DOUBLE], dest=0, tag=int(sum(N)+33))\n\n self.barrier() # wait for other nodes \n\n times = arange(0, time[-1], 1*ms) \n gsyns = []\n if self.called_syn_out_all == True:\n \n for n in range(self.n_celltypes):\n gsyns.append([])\n \n if self.use_pc == True:\n\n for i, gid in enumerate(self.global_gidlist[n]): \n \n srclist = [None]*self.nhost\n \n if gid in self.gidlist[n]: #only one node does this\n a = np.array(self.cells[n][self.gidlist[n].index(gid)].record['gsyn'])\n c = np.zeros(int((1*ms)/self.dt))\n temp = np.append(a, c).flatten()\n temp = temp[int((1*ms)/self.dt):len(temp)+1]\n gtemp = interp(times,time,temp)\n \n srclist[0] = gtemp # send to root only\n \n destlist = self.pc.py_alltoall(srclist) \n \n if self.id == 0:\n idx = [i for i, x in enumerate(destlist) if x is not None]\n if len(idx) > 1: raise ValueError('Error, too many vectors sent, should be one at a time!')\n gsyns[n].append(np.array(destlist[idx[0]]))\n \n else: \n \n for i, gid in enumerate(self.global_gidlist[n]): \n \n if self.id == 0:\n if gid in self.gidlist[n]:\n a = np.array(self.cells[n][self.gidlist[n].index(gid)].record['gsyn'])\n c = np.zeros(int((1*ms)/self.dt))\n temp = np.append(a, c).flatten()\n temp = temp[int((1*ms)/self.dt):len(temp)+1]\n gtemp = interp(times,time,temp)\n \n else:\n gtemp = np.zeros(len(times))\n self.comm.Recv([gtemp, MPI.DOUBLE], source = MPI.ANY_SOURCE, tag=int(gid))\n \n gsyns[n].append(np.array(gtemp))\n \n else:\n if gid in self.gidlist[n]:\n a = np.array(self.cells[n][self.gidlist[n].index(gid)].record['gsyn'])\n c = np.zeros(int((1*ms)/self.dt))\n temp = np.append(a, c).flatten()\n temp = temp[int((1*ms)/self.dt):len(temp)+1]\n gtemp = interp(times,time,temp) \n #np.array(self.cells[n][self.gidlist[n].index(gid)].record['gsyn'])\n self.comm.Ssend([gtemp, MPI.DOUBLE], dest=0, tag=int(gid))\n \n if self.id == 0: print \"root gathered synaptic output conductance\" \n \n \n self.barrier() # wait for other nodes \n \n times = arange(0, time[-1], 10*ms)\n \n w_mat = []\n winh_mat = []\n \n if self.stdp_used == True:\n \n for n in range(self.n_celltypes):\n w_mat.append([]) \n \n for i, gid in enumerate(self.global_gidlist[n]): \n \n if self.id == 0:\n \n wall = []\n \n if gid in self.gidlist[n]:\n\n walltemp = self.cells[n][self.gidlist[n].index(gid)].record['w'] \n if len(walltemp) > 0:\n for l in range(len(walltemp)):\n wtemp = np.array(walltemp[l])\n wtemp = interp(times,time,wtemp)\n wall.append(wtemp)\n \n else:\n \n while 1:\n wtemp = np.zeros(len(times))\n self.comm.Recv([wtemp, MPI.DOUBLE], source = MPI.ANY_SOURCE, tag=int(gid))\n \n if wtemp[0] == -1:\n break\n else:\n wall.append(wtemp)\n \n w_mat[n].append(wall)\n \n else:\n if gid in self.gidlist[n]:\n walltemp = self.cells[n][self.gidlist[n].index(gid)].record['w']\n \n if len(walltemp) > 0:\n for l in range(len(walltemp)):\n wtemp = np.array(walltemp[l])\n wtemp = interp(times,time,wtemp)\n self.comm.Ssend([wtemp, MPI.DOUBLE], dest=0, tag=int(gid))\n \n wtemp = np.ones(len(times))*-1\n self.comm.Ssend([wtemp, MPI.DOUBLE], dest=0, tag=int(gid)) \n\n if self.id == 0: \n print \"root gathered synaptic input conductance\" \n\n\n self.barrier() # wait for other nodes \n\n \n for n in range(self.n_celltypes):\n winh_mat.append([])\n \n for i, gid in enumerate(self.global_gidlist[n]): \n \n if self.id == 0:\n \n wall = []\n \n if gid in self.gidlist[n]:\n \n walltemp = self.cells[n][self.gidlist[n].index(gid)].record['w_inh'] \n if len(walltemp) > 0:\n for l in range(len(walltemp)):\n wtemp = np.array(walltemp[l])\n wtemp = interp(times,time,wtemp)\n wall.append(wtemp)\n \n else:\n \n while 1:\n wtemp = np.zeros(len(times))\n self.comm.Recv([wtemp, MPI.DOUBLE], source = MPI.ANY_SOURCE, tag=int(gid))\n \n if wtemp[0] == -1:\n break\n else:\n wall.append(wtemp)\n \n winh_mat[n].append(wall)\n \n else:\n if gid in self.gidlist[n]:\n walltemp = self.cells[n][self.gidlist[n].index(gid)].record['w_inh']\n \n if len(walltemp) > 0:\n for l in range(len(walltemp)):\n wtemp = np.array(walltemp[l])\n wtemp = interp(times,time,wtemp)\n self.comm.Ssend([wtemp, MPI.DOUBLE], dest=0, tag=int(gid))\n \n wtemp = np.ones(len(times))*-1\n self.comm.Ssend([wtemp, MPI.DOUBLE], dest=0, tag=int(gid))\n \n \n if self.id == 0: \n print \"root gathered synaptic input conductance\" \n \n\n self.barrier() # wait for other nodes \n \n\n t_all_vec_vec = []\n id_all_vec_vec = []\n f_cells_mean = []\n \n if self.id == 0: # only for first node\n \n for n in range(self.n_celltypes):\n \n ie = argsort(t_all_vec[n]) \n t_all_vec_vec.append( t_all_vec[n][ie] )\n id_all_vec_vec.append( id_all_vec[n][ie].astype(int) ) # \n \n print \"all spikes have been sorted\"\n\n if self.jitter > 0: # add jitter!\n np.random.seed(40)\n x = np.random.normal(0, self.jitter, len(t_all_vec_vec[self.a_celltype[0]])) \n t_all_vec_vec[self.a_celltype[0]] = t_all_vec_vec[self.a_celltype[0]] + x\n \n if self.delta_t > 0:\n t_all_vec_vec[self.a_celltype[0]] = t_all_vec_vec[self.a_celltype[0]] + self.delta_t\n \n gsyn = zeros(len(freq_times))\n \n if 'gsyn_in' in self.method_interpol:\n pass\n else: \n bvec = [\"syn\" in st for st in self.method_interpol]\n if np.any(bvec):\n \n if (not hasattr(self, 'passive_target')) | (self.jitter > 0): # if not already done in neuron via artificial cell\n \n [resp, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[self.a_celltype[0]], bins = freq_times)\n resp = np.concatenate((zeros(1),resp))\n \n Ksyn = syn_kernel(arange(0,10*self.syn_tau2,self.bin_width), self.syn_tau1, self.syn_tau2) \n Ksyn = np.concatenate((zeros(len(Ksyn)-1),Ksyn))\n gsyn = np.convolve(Ksyn, resp, mode='same')\n print \"Generated gsyn by convolution with Ksyn\"\n self.nc_delay = 0 \n \n else:\n gsyn = interp(freq_times,time,np.array(self.rec_g)) \n \n spike_freq = np.zeros(len(freq_times))\n \n for j in self.a_celltype:\n \n #plt.figure('results_voltage') \n #ax99 = plt.subplot(2,1,1)\n #ax99.plot(time,voltage[j])\n \n #plt.text(0.5, 1.1, r'CF=' + str(round(fmean,1)) + ',fmax=' + str(round(fmax,1)) + ',fmstd=' + str(round(fmstd,1)), transform=ax99.transAxes, fontsize=10, va='center', ha='center')\n #plt.savefig(\"./figs/Pub/Voltage_\" + str(self.pickle_prefix) + \"_cell\" + str(j) + \"_N\" + str(self.N[j]) + \".pdf\", dpi = 300, transparent=True) # save it \n #plt.show()\n #plt.clf() \n \n [num_spikes, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[j], bins = freq_times)\n \n if isinstance(self.factor_celltype[j], ( int, long ) ):\n f = self.factor_celltype[j] \n else:\n f = self.factor_celltype[j][0] \n \n spike_freq = spike_freq + f * np.concatenate((zeros(1),num_spikes)) / self.bin_width\n\n self.barrier() # wait for other nodes\n \n #figure('1')\n #plot(time,np.array(self.rec_s1),'b', time,np.array(self.rec_s2),'r')\n #plt.show()\n \n return {'time':time, 'voltage':voltage, 'current':current, 'fmean':fmean, 'f_cells_mean':f_cells_mean,\n 'gsyn':gsyn, 'freq_times':freq_times, 'spike_freq':spike_freq, 'gsyn_in':gsyn_in, 'fmeanA':fmeanA, 'fmaxA':fmaxA, 'fmstdA':fmstdA, 'fcvmA':fcvmA, 'fstdmA':fstdmA, 'fbstdA':fbstdA,\n 't_all_vec_vec':t_all_vec_vec, 'id_all_vec_vec':id_all_vec_vec, 'gsyns':gsyns, 'w_mat':w_mat, 'winh_mat':winh_mat, 'fmax':fmax, 'fmstd':fmstd, 'fcvm':fcvm, 'fbaseA':fbaseA, 'fbase':fbase}\n \n \n def clean(self):\n \n self.pc.runworker() \n self.pc.done() \n \n \n def compute_Transfer(self, stimulus, spike_freq, freq_times, t, noise_data_points, gsyn, gsyn_in, do_csd, t_qual, K_mat_old, t_startstop, inh_factor=[1]):\n\n stimulus0 = np.zeros(len(stimulus[0]))\n \n for a in self.a_celltype:\n # sum input to produce linear input that should be reconstructed!\n \n if (any(self.syn_inh_dist) > 0) and (any(self.syn_ex_dist) > 0):\n if max(self.syn_inh_dist) == max(self.syn_ex_dist): # same signal through ex and inh\n print \"inh_factor = [0,1]\"\n inh_factor = [0,1] \n \n for ni in self.syn_ex_dist[a]:\n if ni != 0:\n stimulus0 += inh_factor[ni-1] * stimulus[ni-1]\n print \"+ex:\", ni-1\n\n for ni in self.syn_inh_dist[a]:\n if ni != 0:\n stimulus0 -= inh_factor[ni-1] * stimulus[ni-1] #old: +nemax\n print \"-inh:\", ni-1 #old: +nemax\n \n if (max(self.n_syn_ex) == 0) and (max(self.n_syn_inh) == 0): \n stimulus0 += stimulus[0] \n print \"current\"\n \n #if self.n_syn_ex[self.celltype_syn[0]] == 0:\n # stimulus0 += stimulus[0] \n \n # amplitude should not matter since filter amplitude is simply adjusted \n #stimulus = stimulus0 #/len(self.syn_ex_dist)\n\n stimulus0 = stimulus0 / std(stimulus0) / 2\n \n # linear interpolation inside compute_Transfer !!!\n print \"max(stimulus0):\",max(stimulus0)\n results = compute_Transfer(spike_freq = spike_freq, freq_times = freq_times, \n stimulus = stimulus0, t = t, noise_data_points = noise_data_points, gsyn = gsyn, gsyn_in = gsyn_in, do_csd = do_csd, t_kernel = 1*s,\n method_interpol = self.method_interpol, nc_delay = self.nc_delay, w_length = 3, t_qual = t_qual, K_mat_old = K_mat_old, t_startstop = t_startstop, give_psd = self.give_psd) # freq_wp not defined, use all frequencies\n \n # TEST:\n #VAF = results.get('VAFf_mat')\n #freq_used = results.get('freq_used')\n \n #iend = mlab.find(freq_used >= self.xmax)[0] \n #err = 1-mean(VAF[1][0,1:iend-1])\n #print \"err: \", err \n \n return results\n \n \n def residuals_compute_Transfer(self, p, stimulus, spike_freq, freq_times, t, noise_data_points, gsyn, gsyn_in, do_csd, t_qual, K_mat_old, t_startstop, inh_factor):\n \n inh_factor_in = inh_factor[:]\n ip = 0\n for i, inhf in enumerate(inh_factor_in):\n if inhf < 0:\n inh_factor_in[i] = p[ip]\n ip += 1\n \n results = self.compute_Transfer(stimulus = stimulus, spike_freq = spike_freq, freq_times = freq_times, \n t = t, noise_data_points = noise_data_points, gsyn = gsyn, gsyn_in = gsyn_in, \n do_csd = do_csd, t_qual = t_qual, K_mat_old = K_mat_old, t_startstop = t_startstop, inh_factor = inh_factor_in) \n \n VAF = results.get('VAFf_mat')\n freq_used = results.get('freq_used')\n \n iend = mlab.find(freq_used >= self.xmax)[0] \n err = 1-mean(VAF[1][0,0:iend])\n print \"inh_factor:\", inh_factor_in, \"err: \", err \n \n return err \n \n #@profile \n def fun_cnoise_Stim(self, t_stim = 10*s, sexp = 0, cutf = 0, do_csd = 1, t_qual = 0, freq_used = np.array([]), K_mat_old = np.array([]), inh_factor = [1], onf = None, equi = 0):\n \"\"\"\n Stimulate cell with colored noise\n sexp = spectral exponent: Power ~ 1/freq^sexp\n cutf = frequency cutoff: Power flat (white) for freq <~ cutf \n do_csd = 1: use cross spectral density function for computation\n \"\"\"\n self.barrier() # wait for other nodes\n \n filename = str(self.pickle_prefix) + \"_results_pop_cnoise.p\"\n filepath = self.data_dir + \"/\" + filename\n \n if self.id == 0: print \"- filepath:\", filepath \n \n if self.do_run or (os.path.isfile(filepath) is False):\n\n tstart = 0; \n fs = 1 / self.dt # sampling rate \n fmax = fs / 2 # maximum frequency (nyquist)\n \n t_noise = arange(tstart, t_stim, self.dt) # create stimulus time vector, make sure stimulus is even!!!\n\n #print self.syn_ex_dist\n #print self.syn_inh_dist\n #exit()\n \n if (self.syn_ex_dist == []):\n for nt in range(self.n_celltypes): # loop over all cells\n #print \"nt\", nt\n if hasattr(self.cells[nt][0], 'input_vec'):\n self.syn_ex_dist.append([1] * len(self.cells[nt][0].input_vec)) # default ex for all by default!!!\n else: \n self.syn_ex_dist.append([1] * self.n_syn_ex[nt]) # default ex for all by default!!!\n \n #print self.syn_ex_dist\n \n if (self.syn_ex_dist[0] == []):\n nemax = 1\n else:\n nemax = max([item for sublist in self.syn_ex_dist for item in sublist])\n \n if (self.syn_inh_dist == []): # and (any(self.n_syn_inh) > 0)\n for nt in range(self.n_celltypes): # loop over all cells\n self.syn_inh_dist.append([0] * self.n_syn_inh[nt]) # default no inh for all by default!!!\n \n #print self.syn_inh_dist\n #exit()\n \n if (self.syn_inh_dist[0] == []):\n nimax = 0\n else:\n nimax = max([item for sublist in self.syn_inh_dist for item in sublist]) \n \n #print \"self.syn_inh_dist, self.syn_ex_dist\", self.syn_inh_dist, self.syn_ex_dist\n \n n_noise = max([nemax,nimax]) # number of noise sources\n #print n_noise,nemax,nimax\n # create reproduceable input\n noise_data = []\n\n for nj in range(n_noise):\n \n if self.id == 0: # make sure all have the same signal !!!\n if len(freq_used) == 0: \n noise_data0 = create_colnoise(t_noise, sexp, cutf, self.seed+nj, onf = onf)\n else:\n noise_data0, _, _, _ = create_multisines(t_noise, freq_used) # create multi sine signal\n else:\n noise_data0 = np.empty(len(t_noise), dtype=np.float64)\n\n noise_data0 = self.broadcast(noise_data0, fast = True) \n \n noise_data.append(noise_data0)\n noise_data0 = [] \n \n noise_data_points = len(noise_data[0]) \n\n # Create signal weight vector inh_factor if it is not fully given\n if len(noise_data) > len(inh_factor):\n inh_factor = [inh_factor[0]] * len(noise_data) \n print \"inh_factor:\", inh_factor\n\n #if equi:\n #pass\n # tstop = t_stim\n \n if max(self.n_syn_ex) == 0: # this means current input\n \n self.set_IStim() # sets amp\n \n if self.fluct_s != []:\n if self.fluct_s[self.a_celltype[0]] > 0:\n if self.id == 0: print \"- adding i fluct\"\n self.connect_fluct()\n \n for i, m in enumerate(self.method_interpol):\n if \"syn\" in m: self.method_interpol[i] = \"syn \" + str(self.syn_tau1/ms) + \"/\" + str(self.syn_tau2/ms) + \"ms\"\n if \"bin\" in m: self.method_interpol[i] = \"bin \" + str(self.bin_width/ms) + \"ms\"\n \n stimulus = []\n for nj in range(len(noise_data)):\n stimulus0, t, t_startstop = construct_Stimulus(noise_data[nj], fs, self.amp[self.a_celltype[0]], ihold = 0, delay_baseline = self.delay_baseline) # , tail_points = 0\n stimulus.append(stimulus0)\n tstop = t[-1]\n \n self.set_IPlay2(stimulus, t)\n if self.id == 0: print \"- starting colored noise transfer function estimation! with amp = \" + str(np.round(self.amp[self.a_celltype[0]],4)) + \", ihold = \" + str(np.round(self.ihold[self.a_celltype[0]],4)) + \", ihold_sigma = \" + str(np.round(self.ihold_sigma,4)) + \", dt = \" + str(self.dt) + \" => maximum frequency = \" + str(fmax) + \"\\r\" \n \n else:\n\n self.give_freq = False\n ihold = self.set_i(self.ihold) # just sets amp, ihold should not change! \n\n if 'gsyn_in' not in self.method_interpol: \n pass\n else:\n self.g_syn_ex = [1]*len(self.N)\n \n \n if ((self.fluct_g_e0 != []) or (self.fluct_g_i0 != [])):\n if ((self.fluct_g_e0[self.a_celltype[0]] > 0) or (self.fluct_g_i0[self.a_celltype[0]] > 0)):\n if self.id == 0: print \"- adding g fluct\"\n self.connect_gfluct(E_i=-65)\n \n stimulus = []\n for nj in range(len(noise_data)):\n stimulus0, t, t_startstop = construct_Stimulus(noise_data[nj], fs, amp=1, ihold = 0, tail_points = 0, delay_baseline = self.delay_baseline) # self.amp\n stimulus.append(stimulus0)\n \n noise_data = [] \n tstop = t[-1]\n \n if self.N[self.a_celltype[0]] > 1:\n self.set_IStim(ihold = [0]*self.n_celltypes, ihold_sigma = [0]*self.n_celltypes, random_start = True, tstart_offset = 1)\n if self.id == 0: print \"- add random start\"\n \n #print \"Enter Synplay()\"\n self.set_SynPlay(stimulus, t, t_startstop = t_startstop) \n #print \"Exit Synplay()\"\n\n if self.id == 0: print \"- starting colored noise transfer function estimation with synaptic input! with amp = \" + str(np.round(self.amp,4)) + \", ihold = \" + str(np.round(self.ihold,4)) + \", ihold_sigma = \" + str(np.round(self.ihold_sigma,4)) + \", dt = \" + str(self.dt) + \" => maximum frequency = \" + str(fmax) + \"\\r\" \n \n amp_vec = []\n mag_vec = [] \n pha_vec = []\n freq_used = []\n ca = []\n SNR_mat = []\n VAFf_mat = []\n Qual_mat = []\n CF_mat = [] \n VAF_mat = []\n stim = []\n stim_re_mat = []\n resp_mat = []\n current_re = []\n ihold1 = []\n tk = []\n K_mat = []\n gsyn_in = []\n fmean = []\n fmax = [] \n fmstd = [] \n fcvm = [] \n fmeanA = []\n fmaxA = [] \n fmstdA = [] \n fcvmA = [] \n t_all_vec_input_sorted = []\n id_all_vec_input_sorted = []\n \n if (self.id == 0) and (max(self.n_syn_ex) > 0):\n print range(self.n_celltypes), np.shape(self.t_all_vec_input)\n for l in range(self.n_celltypes): \n ie = argsort(self.t_all_vec_input[l]) \n t_all_vec_input_sorted.append( self.t_all_vec_input[l][ie] )\n id_all_vec_input_sorted.append( self.id_all_vec_input[l][ie].astype(int) )\n \n #if (self.id == 0): \n # print self.g_syn_ex\n # print np.array(self.g_syn_ex)>= 0\n \n #print \"g_syn_ex:\",self.g_syn_ex\n if np.array(np.array(self.g_syn_ex)>= 0).any():\n \n if hasattr(self.cells[self.a_celltype[0]][0], 'get_states') and equi:\n print \"- Equilibrate!\"\n self.run(tstop, do_loadstate = False)\n m = md5.new()\n cell_exe_new = self.cell_exe[0]\n m.update(cell_exe_new)\n filename = './states_' + self.celltype[0] + '_' + m.hexdigest() + '_Population.b'\n self.cells[self.a_celltype[0]][0].get_states(filename)\n else:\n self.run(tstop, do_loadstate = False)\n \n i_startstop = []\n \n results = self.get(t_startstop, i_startstop) \n time = results.get('time')\n current = results.get('current') \n voltage = results.get('voltage') \n fmean = results.get('fmean') \n gsyn = results.get('gsyn') \n freq_times = results.get('freq_times')\n spike_freq = results.get('spike_freq')\n t_all_vec_vec = results.get('t_all_vec_vec')\n id_all_vec_vec = results.get('id_all_vec_vec')\n gsyns = results.get('gsyns')\n gsyn_in = results.get('gsyn_in')\n \n fmax = results.get('fmax')\n fmstd = results.get('fmstd')\n fcvm = results.get('fcvm')\n \n fmeanA = results.get('fmeanA') \n fmaxA = results.get('fmaxA')\n fmstdA = results.get('fmstdA')\n fcvmA = results.get('fcvmA')\n \n fbaseA = results.get('fbaseA') \n fbase = results.get('fbase')\n fbstdA = results.get('fbstdA')\n \n \n else: # do not run, analyse input!!!\n \n time = t\n voltage = []\n for l in range(self.n_celltypes): \n voltage.append(np.zeros(len(t)))\n current = []\n \n freq_times = []\n spike_freq = []\n gsyn = []\n gsyn_in = []\n \n t_all_vec_vec = []\n id_all_vec_vec = []\n \n fmean = []\n fmax = []\n fmstd = []\n fcvm = []\n fstdm = []\n \n fmeanA = []\n fmaxA = []\n fmstdA = []\n fcvmA = []\n fbaseA = []\n fbase = []\n fbstdA = []\n \n if self.id == 0:\n \n current = self.n_train_ex\n \n #t_all_vec = self.t_all_vec_input\n #id_all_vec = self.id_all_vec_input\n\n #ie = argsort(t_all_vec) \n #t_all_vec_vec.append( t_all_vec[ie] )\n #id_all_vec_vec.append( id_all_vec[ie].astype(int) )\n \n t_all_vec_vec = t_all_vec_input_sorted\n id_all_vec_vec = id_all_vec_input_sorted\n \n freq_times = arange(0, tstop, self.bin_width)\n spike_freq = np.zeros(len(freq_times))\n \n for j in self.a_celltype:\n \n [num_spikes, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[j], bins = freq_times)\n\n if self.tau2_ex[0] > 0:\n spike_freq = np.concatenate((zeros(1),num_spikes)) \n print \"NOSYN TEST: start convolution with Ksyn\"\n Ksyn = syn_kernel(arange(0,10*self.tau2_ex[0],self.bin_width), self.tau1_ex[0], self.tau2_ex[0]) \n Ksyn = np.concatenate((zeros(len(Ksyn)-1),Ksyn))\n spike_freq = np.convolve(Ksyn, spike_freq, mode='same')\n print \"NOSYN TEST: convolution finished\"\n else:\n\n if isinstance(self.factor_celltype[j], ( int, long ) ):\n f = self.factor_celltype[j] \n else:\n f = self.factor_celltype[j][0] \n \n spike_freq = spike_freq + f * np.concatenate((zeros(1),num_spikes)) / self.bin_width\n\n fmean.append(self.fmean_input)\n fmax.append(self.fmax_input) \n fmstd.append(self.fmstd_input) \n fcvm.append(self.fcvm_input) \n fstdm.append(self.fstdm_input)\n\n if self.no_fmean == True:\n fmean.append(ihold)\n \n #plt.figure('spike_freq') \n #plt.plot(freq_times, spike_freq)\n #plt.savefig(\"./figs/Pub/Spike_freq_\" + str(self.pickle_prefix) + \".pdf\", dpi = 300, transparent=True) # save it \n #plt.clf()\n \n fmeanA = fmean[0]\n fmaxA = fmax[0]\n fmstdA = fmstd [0] \n fcvmA = fcvm[0]\n fstdmA = fstdm[0]\n \n \n if self.id == 0: \n \n if any([i<0 for i in inh_factor]):\n \n p0 = []\n inhf_idx = []\n for i, inhf in enumerate(inh_factor):\n if inhf < 0: \n p0.append(0) \n inhf_idx.append(i)\n \n plsq = fmin(self.residuals_compute_Transfer, p0, args=(stimulus, spike_freq, freq_times, t, noise_data_points, gsyn, gsyn_in, do_csd, t_qual, K_mat_old, t_startstop, inh_factor))\n p = plsq\n \n ip = 0\n for i in inhf_idx:\n inh_factor[i] = p[ip]\n ip += 1\n \n\n print \"Final inh_factor: \", inh_factor\n \n \n results = self.compute_Transfer(stimulus, spike_freq = spike_freq, freq_times = freq_times, \n t = t, noise_data_points = noise_data_points, gsyn = gsyn, gsyn_in = gsyn_in, \n do_csd = do_csd, t_qual = t_qual, K_mat_old = K_mat_old, t_startstop = t_startstop, inh_factor=inh_factor)\n \n mag_vec, pha_vec, ca, freq, freq_used, fmean_all = results.get('mag_mat'), results.get('pha_mat'), results.get('ca_mat'), results.get('freq'), results.get('freq_used'), results.get('fmean') \n SNR_mat, VAFf_mat, Qual_mat, CF_mat, VAF_mat = results.get('SNR_mat'), results.get('VAFf_mat'), results.get('Qual_mat'), results.get('CF_mat'), results.get('VAF_mat') \n stim, resp_mat, stim_re_mat, tk, K_mat = results.get('stim'), results.get('resp_mat'), results.get('stim_re_mat'), results.get('tk'), results.get('K_mat') \n \n \n self.barrier() # wait for other nodes\n \n \n if self.id == 0:\n \n if t_qual > 0:\n #print t_startstop[0], t_startstop[0]/self.dt, (t_startstop[0]+t_qual)/self.dt\n current_re = current[int(t_startstop[0]/self.dt):int((t_startstop[0]+t_qual)/self.dt)]\n current_re = current_re[int(len(K_mat[self.a_celltype[0]])):int(len(current_re))-int(len(K_mat[self.a_celltype[0]]))]\n \n if len(self.i_holdrs) > 0:\n ihold1 = self.i_holdrs[self.a_celltype[0]][0]\n else:\n ihold1 = []\n \n for l in range(len(self.method_interpol)): # unwrap \n pha_vec[l,:] = unwrap(pha_vec[l,:] * (pi / 180)) * (180 / pi) # unwrap for smooth phase\n \n # only return fraction of actual signal, it is too long!!! \n if time[-1] > self.tmax: \n imax = -1*int(self.tmax/self.dt)\n time = time[imax:]; current = current[imax:]; gsyn = gsyn[imax:]; gsyn_in = gsyn_in[imax:]\n for n in range(self.n_celltypes): \n voltage[n] = voltage[n][imax:]\n \n if freq_times != []: \n if freq_times[-1] > self.tmax:\n imax2 = where(freq_times > self.tmax)[0][0] # for spike frequency \n freq_times = freq_times[0:imax2]; spike_freq = spike_freq[0:imax2] \n \n bvec = [\"_syn\" in st for st in self.method_interpol]\n if np.any(bvec):\n # normalize synaptic integration with others \n mag_vec[1,:]= mag_vec[0,0]*mag_vec[1,:]/mag_vec[1,0] \n \n if self.id == 0: print \"start pickle\"\n \n results = {'freq_used':freq_used, 'amp':amp_vec,'mag':mag_vec,'pha':pha_vec,'ca':ca,'voltage':voltage,'tk':tk,'K_mat':K_mat, 'ihold1': ihold1, 't_startstop':t_startstop, #'stimulus':stimulus,\n 'current':current,'t1':time,'freq_times':freq_times,'spike_freq':spike_freq, 'stim':stim, 'stim_re_mat':stim_re_mat, 'resp_mat':resp_mat, 'current_re':current_re, 'gsyn_in':gsyn_in, 'fmeanA':fmeanA, 'fmaxA':fmaxA, 'fmstdA':fmstdA, 'fcvmA':fcvmA, 'fbaseA':fbaseA, 'fbase':fbase, 'fbstdA':fbstdA,\n 'fmean':fmean,'method_interpol':self.method_interpol, 'SNR':SNR_mat, 'VAF':VAFf_mat, 'Qual':Qual_mat, 'CF':CF_mat, 'VAFs':VAF_mat, 'fmax':fmax, 'fmstd':fmstd, 'fcvm':fcvm, 'inh_factor':inh_factor, 't_all_vec_vec':t_all_vec_vec, 'id_all_vec_vec':id_all_vec_vec} \n \n if self.id == 0:\n if self.dumpsave == 1:\n pickle.dump( results, gzip.GzipFile( filepath, \"wb\" ) )\n print \"pickle done\" \n \n \n if self.plot_train:\n \n for a in self.a_celltype:\n\n #i_start = mlab.find(t_all_vec_vec[a] >= 0)[0]\n #i_stop = mlab.find(t_all_vec_vec[a] >= 5)[0]\n \n #t_all_cut = t_all_vec_vec[a][i_start:i_stop]\n #id_all_cut = id_all_vec_vec[a][i_start:i_stop]\n \n t_all_cut = t_all_vec_vec[a]\n id_all_cut = id_all_vec_vec[a]\n \n f_start_in = mlab.find(t_all_cut >= 0) \n f_stop_in = mlab.find(t_all_cut <= 10) \n \n f_start = f_start_in[0] \n f_stop = f_stop_in[-1]+1 \n use_spikes = t_all_cut[f_start:f_stop]\n use_id = id_all_cut[f_start:f_stop]\n \n plt.figure('results_train') \n ax99 = plt.subplot(1,1,1)\n ax99.plot(use_spikes,use_id,'|', ms=2)\n plt.text(0.5, 1.1, r'CF=' + str(round(fmean,1)) + ',fmax=' + str(round(fmax,1)) + ',fmstd=' + str(round(fmstd,1)), transform=ax99.transAxes, fontsize=10, va='center', ha='center')\n plt.savefig(\"./figs/Pub/Train_\" + str(self.pickle_prefix) + \"_cell\" + str(a) + \"_N\" + str(self.N[a]) + \".pdf\", dpi = 300, transparent=True) # save it \n \n plt.clf()\n \n if len(t_all_cut) > 0:\n \n tbin = 100*ms\n tb = np.arange(0,t[-1],tbin)\n [all_rate, _] = neuronpy.util.spiketrain.get_histogram(t_all_cut, bins = tb)\n all_rate = np.concatenate((np.zeros(1),all_rate)) / self.N[a] / tbin\n \n plt.figure('results_train2') \n plt.plot(tb,all_rate)\n plt.savefig(\"./figs/Pub/PSTH_\" + str(self.pickle_prefix) + \"_cell\" + str(a) + \"_N\" + str(self.N[a]) + \".pdf\", dpi = 300, transparent=True) # save it \n plt.clf()\n \n plt.figure('results_noise') \n plt.plot(time,current)\n plt.savefig(\"./figs/Pub/Noise_\" + str(self.pickle_prefix) + \"_cell\" + str(a) + \"_N\" + str(self.N[a]) + \".pdf\", dpi = 300, transparent=True) # save it \n plt.clf()\n \n \n if self.plot_input:\n \n if len(t_all_vec_input_sorted[0]) > 0:\n \n i_start = mlab.find(t_all_vec_input_sorted[0] >= 0)[0]\n i_stop = mlab.find(t_all_vec_input_sorted[0] >= 5)[0]\n \n t_all_cut = t_all_vec_input_sorted[0][i_start:i_stop]\n id_all_cut = id_all_vec_input_sorted[0][i_start:i_stop]\n \n plt.figure('results_input') \n ax99 = plt.subplot(1,1,1)\n ax99.plot(t_all_cut,id_all_cut,'|', ms=2)\n plt.text(0.5, 1.1, r'fmean=' + str(round(self.fmean_input,1)) + ',fmax=' + str(round(self.fmax_input,1)) + ',fmstd=' + str(round(self.fmstd_input,1)) + ',fcvm=' + str(round(self.fcvm_input,1)) + ',fstdm=' + str(round(self.fstdm_input,1)), transform=ax99.transAxes, fontsize=10, va='center', ha='center')\n plt.savefig(\"./figs/Pub/Input_\" + str(self.pickle_prefix) + \"_N\" + str(self.N[self.a_celltype[0]]) + \".pdf\", dpi = 300, transparent=True) # save it \n plt.clf()\n \n\n else:\n \n if self.id == 0:\n results = pickle.load( gzip.GzipFile( filepath, \"rb\" ) )\n \n #print results\n #print {key:np.shape(value) for key,value in results.iteritems()}\n \n if self.minimal_dir: # save only info needed for plot\n \n print {key:np.shape(value) for key,value in results.iteritems()}\n \n if \"Fig6_pop_transfer_grc_syngr_nsyn4_cn_a1_noisesynlow_inhlow_adjfinh_varih_N100_CFo6.0_results_pop_cnoise.p\" in filename:\n results['ca'] = [] \n results['resp_mat'] = []\n results['stim'] = []\n results['current'] = []\n results['tk'] = []\n results['K_mat'] = []\n results['freq_times'] = []\n results['spike_freq'] = []\n results['stim_re_mat'] = []\n results['current_re'] = []\n results['t_all_vec_vec'] = []\n results['id_all_vec_vec'] = [] \n results['gsyn_in'] = []\n \n elif (\"Fig8_pop_transfer_none_synno_cn_cutf30_a1_noisesynlow_ih20_varih_N100_CFo-1_results_pop_cnoise.p\" in filename) \\\n or (\"Fig8_pop_transfer_none_synno_cn_cutf30_a10_noisesynlow_ih20_varih_N100_CFo-1_results_pop_cnoise.p\" in filename) \\\n or (\"Fig8_pop_transfer_grc_syngr_nsyn4_cn_cutf30_a1_noisesynlow_inhlow_adjfinh_varih_varinhn_N100_CFo9.0_results_pop_cnoise.p\" in filename) \\\n or (\"Fig8_pop_transfer_grc_syngr_nsyn4_cn_cutf30_a10_noisesynlow_inhlow_adjfinh_varih_varinhn_N100_is0.14_CFo9.0_results_pop_cnoise.p\" in filename) \\\n :\n\n results['ca'] = [] \n results['resp_mat'] = []\n results['current'] = []\n results['tk'] = []\n results['K_mat'] = []\n results['voltage'] = [] \n results['current_re'] = []\n results['t_all_vec_vec'] = []\n results['id_all_vec_vec'] = []\n results['t1'] = []\n results['gsyn_in'] = []\n \n elif (\"Fig8_pop_transfer_none_synno_cn_cutf30_a1_noisesynlow_ih20_varih_N50_twopop_CFo-1_results_pop_cnoise.p\" in filename) \\\n or (\"Fig8_pop_transfer_none_synno_cn_cutf30_a10_noisesynlow_ih20_varih_N50_twopop_CFo-1_results_pop_cnoise.p\" in filename) \\\n or (\"Fig8_pop_transfer_grc_syngr_nsyn4_cn_cutf30_a1_noisesynlow_inhlow_adjfinh_varih_varinhn_N50_twopop_CFo9.0_results_pop_cnoise.p\" in filename) \\\n or (\"Fig8_pop_transfer_grc_syngr_nsyn4_cn_cutf30_a10_noisesynlow_inhlow_adjfinh_varih_varinhn_N50_is0.14_twopop_CFo9.0_results_pop_cnoise.p\" in filename) \\\n or (\"Fig8_pop_transfer_grc_syngr_nsyn4_cn_cutf5_a1_noisesynlow_inhlow_adjfinh_varih_varinhn_N100_CFo14.0_results_pop_cnoise.p\" in filename) \\\n or (\"Fig8_pop_transfer_grc_syngr_nsyn4_cn_cutf5_a1_noisesynlow_inhlow_adjfinh_varih_varinhn_N50_twopop_CFo14.0_results_pop_cnoise.p\" in filename) \\\n :\n \n results['ca'] = [] \n results['resp_mat'] = []\n results['current'] = []\n results['tk'] = []\n results['K_mat'] = []\n results['voltage'] = [] \n results['current_re'] = []\n results['t_all_vec_vec'] = []\n results['id_all_vec_vec'] = []\n results['t1'] = []\n results['gsyn_in'] = []\n results['freq_times'] = []\n results['spike_freq'] = []\n \n elif (\"Fig4_pop_transfer_grc_cn_addn100_N[100]_CF[40]_amod[1]_results_pop_cnoise.p\" in filename) \\\n or (\"Fig4_pop_transfer_grc_cn_addn1_N[100]_CF[40]_amod[1]_results_pop_cnoise.p\" in filename) \\\n or (\"Fig4b_pop_transfer_grc_lowcf_cn_twopop_N[50, 50]_CF[0.0055, 0.0055]_amod[None, None]_results_pop_cnoise.p\" in filename) \\\n or (\"Fig4b_pop_transfer_grc_lowcf_cn_N[100]_CF[0.0055]_amod[None]_results_pop_cnoise.p\" in filename) \\\n or (\"Fig4b_pop_transfer_grc_lowcf_slownoise_cn_twopop_N[50, 50]_CF[0.0051, 0.0051]_amod[None, None]_results_pop_cnoise.p\" in filename) \\\n or (\"Fig4b_pop_transfer_grc_lowcf_slownoise_cn_N[100]_CF[0.0051]_amod[None]_results_pop_cnoise.p\" in filename) \\\n :\n \n results['ca'] = [] \n results['resp_mat'] = []\n results['current'] = []\n results['tk'] = []\n results['K_mat'] = []\n results['voltage'] = [] \n results['t_all_vec_vec'] = []\n results['id_all_vec_vec'] = []\n results['t1'] = []\n results['gsyn_in'] = []\n results['freq_times'] = []\n results['spike_freq'] = []\n \n elif (\"Fig2_pop_transfer_\" in filename) \\\n :\n \n results['ca'] = [] \n results['resp_mat'] = []\n results['current'] = []\n results['t1'] = []\n results['voltage'] = [] \n results['freq_times'] = []\n results['spike_freq'] = []\n results['current_re'] = []\n results['t_all_vec_vec'] = []\n results['id_all_vec_vec'] = []\n results['gsyn_in'] = []\n \n else:\n results['ca'] = [] \n results['resp_mat'] = []\n results['stim'] = []\n results['current'] = []\n results['tk'] = []\n results['K_mat'] = []\n results['t1'] = []\n results['voltage'] = [] \n results['freq_times'] = []\n results['spike_freq'] = []\n results['stim_re_mat'] = []\n results['current_re'] = []\n results['t_all_vec_vec'] = []\n results['id_all_vec_vec'] = []\n results['gsyn_in'] = []\n\n print {key:np.shape(value) for key,value in results.iteritems()}\n\n pickle.dump( results, gzip.GzipFile( self.minimal_dir + \"/\" + filename, \"wb\" ) ) \n \n else:\n results = {'freq_used':[], 'amp':[],'mag':[],'pha':[],'ca':[],'voltage':[], 'tk':[],'K_mat':[], 'ihold1':[], 't_startstop':[], #'stimulus':[],\n 'current':[],'t1':[],'freq_times':[],'spike_freq':[], 'stim':[], 'stim_re_mat':[], 'current_re':[], 'gsyn_in':[], 'fmeanA':[], 'fmaxA':[], 'fmstdA':[], 'fcvmA':[], 'fbaseA':[], 'fbase':[], 'fbstdA':[],\n 'fmean':[],'method_interpol':self.method_interpol, 'SNR':[], 'VAF':[], 'Qual':[], 'CF':[], 'VAFs':[], 'fmax':[], 'fmstd':[], 'fcvm':[], 'inh_factor':[], 't_all_vec_vec':[], 'id_all_vec_vec':[]} \n \n if self.id == 0: \n\n if self.plot_train: \n\n for a in self.a_celltype:\n \n t1 = results.get('t1') \n voltage = results.get('voltage') \n fmean = results.get('fmean') \n fmax = results.get('fmax') \n fmstd = results.get('fmstd') \n \n \n if results.has_key('t_all_vec_vec'):\n \n if len(results['t_all_vec_vec']) > 0: \n t_all_vec_vec = results.get('t_all_vec_vec') \n id_all_vec_vec = results.get('id_all_vec_vec') \n \n t_all_cut = t_all_vec_vec[a]\n id_all_cut = id_all_vec_vec[a]\n \n f_start_in = mlab.find(t_all_cut >= 0) \n f_stop_in = mlab.find(t_all_cut <= 10) \n \n f_start = f_start_in[0] \n f_stop = f_stop_in[-1]+1 \n use_spikes = t_all_cut[f_start:f_stop]\n use_id = id_all_cut[f_start:f_stop]\n \n plt.figure('results_train') \n ax97 = plt.subplot(1,1,1)\n ax97.plot(use_spikes,use_id,'|', ms=6)\n plt.text(0.5, 1.1, r'CF=' + str(round(fmean,1)) + ',fmax=' + str(round(fmax,1)) + ',fmstd=' + str(round(fmstd,1)), transform=ax97.transAxes, fontsize=10, va='center', ha='center')\n plt.savefig(\"./figs/Pub/Train_\" + str(self.pickle_prefix) + \"_cell\" + str(a) + \"_N\" + str(self.N[a]) + \".pdf\", dpi = 300, transparent=True) # save it \n\n \n plt.figure('results_voltage') \n ax99 = plt.subplot(2,1,1)\n ax99.plot(t1,voltage[a])\n \n t_noise = arange(0, t_stim, self.dt)\n noise_data = create_colnoise(t_noise, sexp, cutf, 50, onf = onf)\n stimulus, t, t_startstop = construct_Stimulus(noise_data, 1/self.dt, amp=1, ihold = 0, tail_points = 0, delay_baseline = self.delay_baseline) \n ax98 = plt.subplot(2,1,2)\n ax98.plot(t[0:10/self.dt],stimulus[0:10/self.dt],color='k')\n \n plt.text(0.5, 1.1, r'CF=' + str(round(fmean,1)) + ',fmax=' + str(round(fmax,1)) + ',fmstd=' + str(round(fmstd,1)), transform=ax99.transAxes, fontsize=10, va='center', ha='center')\n plt.savefig(\"./figs/Pub/Voltage_\" + str(self.pickle_prefix) + \"_cell\" + str(a) + \"_N\" + str(self.N[a]) + \".pdf\", dpi = 300, transparent=True) # save it \n plt.show()\n plt.clf()\n \n if (self.id == 0) and (do_csd == 1):\n Qual = results.get('Qual') \n for i, ii in enumerate(self.method_interpol):\n print \"\\n[QUAL:] Interpol:\", ii, \"SNR0:\", Qual[i,0,0], \"SNR_cutff:\", Qual[i,0,1], \"SNR_mean:\", Qual[i,0,2], \"\\n VAF0:\", Qual[i,1,0], \"VAF_cutff:\", Qual[i,1,1], \"VAF_mean:\", Qual[i,1,2], \"\\n CF(subtracted):\", Qual[i,2,0], \"VAF(subtracted):\", Qual[i,2,1] \n \n VAF = results.get('VAF')\n freq_used = results.get('freq_used') \n iend = mlab.find(freq_used >= self.xmax)[0] \n print 'm(VAF)=' + str(np.mean(VAF[1][0,0:iend])) \n \n self.barrier() # wait for other nodes\n \n return results\n\n\n# def fun_ssine_Stim(self, freq_used = np.array([1, 10, 100, 1000])*Hz):\n# \"\"\"\n# Compute impedance and/or transfer function using Single sine stimulation\n# Only compute transfer function if there is a steady state (resting) firing rate!\n# \"\"\"\n# self.barrier() # wait for other nodes\n# \n# filepath = \"./data/\" + str(self.pickle_prefix) + \"_results_pop_ssine.p\"\n# \n# if self.do_run or (os.path.isfile(filepath) is False):\n# \n# fs = 1 / self.dt # sampling rate \n# fmax = fs / 2 # maximum frequency (nyquist)\n# \n# if self.id == 0: print \"- starting single sine transfer function estimation! with amp = \" + str(np.round(self.amp[a_celltype[0]],4)) + \", ihold = \" + str(np.round(self.ihold[self.a_celltype[0]],4)) + \", dt = \" + str(self.dt) + \" => maximum frequency = \" + str(fmax) + \"\\r\" \n# \n# if max(self.n_syn_ex) == 0:\n# self.set_IStim() \n# \n# if self.fluct_s != []:\n# if self.fluct_s[self.a_celltype[0]] > 0:\n# if self.id == 0: print \"- adding i fluct\"\n# self.connect_fluct()\n# \n# for i, m in enumerate(self.method_interpol):\n# if \"syn\" in m: self.method_interpol[i] = \"syn \" + str(self.syn_tau1/ms) + \"/\" + str(self.syn_tau2/ms) + \"ms\"\n# if \"bin\" in m: self.method_interpol[i] = \"bin \" + str(self.bin_width/ms) + \"ms\"\n# \n# else:\n# self.give_freq = False\n# ihold = self.set_i(self.ihold) # just sets amp, ihold should not change! \n# \n# if ((self.fluct_g_e0 != []) or (self.fluct_g_i0 != [])):\n# if ((self.fluct_g_e0[self.a_celltype[0]] > 0) or (self.fluct_g_i0[self.a_celltype[0]] > 0)):\n# if self.id == 0: print \"- adding g fluct\"\n# self.connect_gfluct(E_i=-65)\n# \n# #if ((self.fluct_std_e[self.a_celltype[0]] != []) or (self.fluct_std_i[self.a_celltype[0]] != [])):\n# # if ((self.fluct_std_e[self.a_celltype[0]] > 0) or (self.fluct_std_i[self.a_celltype[0]] > 0)):\n# # if self.id == 0: print \"- adding g fluct\"\n# # self.connect_gfluct(E_i=-65)\n# \n# if 'gsyn_in' not in self.method_interpol: \n# pass\n# else:\n# self.g_syn_ex = 1\n# \n# \n# for i, fu in enumerate(freq_used):\n# \n# if self.id == 0: print \"- single sine processing frequency = \" + str(fu)\n# \n# t, stimulus, i_startstop, t_startstop = create_singlesine(fu = fu, amp = self.amp[a_celltype[0]], ihold = 0, dt = self.dt, periods = 20, minlength = 2*s, t_prestim = 1*s)\n# tstop = t[-1]\n# \n# if i == 0: t_startstop_plot = t_startstop\n# \n# if max(self.n_syn_ex) == 0:\n# self.set_IPlay(stimulus, t)\n# else:\n# self.set_SynPlay(stimulus, t) \n# \n# if self.g_syn_ex >= 0: # should also be true for current input!!!\n# \n# self.run(tstop)\n# \n# if i == 0: # do this here to have something to return\n# \n# # select first sinusoidal to plot, later\n# voltage_plot = []\n# current_plot = []\n# time_plot = []\n# freq_times_plot = []\n# spike_freq_plot = []\n# gsyn_plot = []\n# \n# # construct vectors\n# amp_vec = zeros(len(freq_used)) # amplitude vector\n# fmean_all = zeros(len(freq_used)) # mean firing frequency (all cells combined)\n# fmean = zeros(len(freq_used)) # mean firing frequency (one cell)\n# ca = zeros(len(freq_used), dtype=complex)\n# \n# # create matrix to hold all different interpolation methods:\n# mag_vec = zeros((len(self.method_interpol),len(freq_used))) # magnitude vector\n# pha_vec = zeros((len(self.method_interpol),len(freq_used))) # phase vector \n# NI_vec = zeros((len(self.method_interpol),len(freq_used))) # NI vector\n# VAF_vec = zeros((len(self.method_interpol),len(freq_used))) # VAF vector\n# \n# results = self.get(t_startstop, i_startstop) # t1 should be equal to t!!!\n# time, voltage, current, fmean0, gsyn = results.get('time'), results.get('voltage'), results.get('current'), results.get('fmean'), results.get('gsyn')\n# freq_times, spike_freq, t_all_vec_vec, id_all_vec_vec, gsyns = results.get('freq_times'), results.get('spike_freq'), results.get('t_all_vec_vec'), results.get('id_all_vec_vec'), results.get('gsyns')\n# \n# else:\n# \n# time = t\n# voltage = []\n# voltage.append(np.zeros(len(t)))\n# current = stimulus\n# \n# freq_times = []\n# spike_freq = []\n# fmean0 = ihold\n# gsyn = []\n# gsyn_in = []\n# \n# t_all_vec_vec = []\n# id_all_vec_vec = []\n# \n# \n# if self.id == 0:\n# \n# t_all_vec = []\n# t_all_vec.append([])\n# t_all_vec[0] = np.concatenate(self.t_all_vec_input)\n# \n# id_all_vec = []\n# id_all_vec.append([])\n# id_all_vec[0] = np.concatenate(self.id_all_vec_input)\n# \n# ie = argsort(t_all_vec[0]) \n# t_all_vec_vec.append( t_all_vec[0][ie] )\n# id_all_vec_vec.append( id_all_vec[0][ie].astype(int) ) # \n# \n# \n# freq_times = arange(0, tstop, self.bin_width)\n# [num_spikes, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[0], bins = freq_times)\n# spike_freq = np.concatenate((zeros(1),num_spikes)) / self.bin_width\n#\n# \n# if self.id == 0:\n#\n# fmean[i] = fmean0[0]\n#\n# if i == 0: \n# \n# # select first sinusoidal to plot\n# voltage_plot = voltage\n# current_plot = current\n# time_plot = time\n# freq_times_plot = freq_times\n# spike_freq_plot = spike_freq\n# gsyn_plot = gsyn\n# \n# \n# for l in range(len(self.method_interpol)):\n# \n# if \"bin\" in self.method_interpol[l]:\n# \n# # binning and linear interpolation\n# stimulus_signal = stimulus[i_startstop[0]:i_startstop[1]] # cut out relevant signal\n# t_input_signal = t[i_startstop[0]:i_startstop[1]] - t[i_startstop[0]]\n# \n# spike_freq_interp = interp(t, freq_times, spike_freq, left=0, right=0) # interpolate to be eqivalent with input, set zero at beginning and end!\n# freq_out_signal_interp = spike_freq_interp[i_startstop[0]:i_startstop[1]] # cut out relevant signal\n# vamp, mag_vec[l,i], pha_vec[l,i], fmean_all[i], _ = get_magphase(stimulus_signal, t_input_signal, freq_out_signal_interp, t_input_signal, method = \"fft\", f = fu)\n# \n# results = est_quality(t_input_signal, fu, freq_out_signal_interp, self.amp[a_celltype[0]]*mag_vec[l,i], pha_vec[l,i]/ (180 / pi), fmean_all[i]) \n# NI_vec[l,i], VAF_vec[l,i] = results.get('NI'), results.get('VAF')\n# print \"-[bin] NI: \" + str(NI_vec[l,i]) + \", VAF: \" + str(VAF_vec[l,i])\n# \n# if \"syn\" in self.method_interpol[l]:\n# \n# # synaptic integration \n# dt_out = t_input_signal[2] - t_input_signal[1]\n# shift = self.nc_delay/dt_out # shift response by the nc delay to remove offset\n# freq_out_signal_syn = gsyn[i_startstop[0]+shift:i_startstop[1]+shift] # cut out relevant signal\n# \n# vamp, mag_vec[l,i], pha_vec[l,i], fm, _ = get_magphase(stimulus_signal, t_input_signal, freq_out_signal_syn, t_input_signal, method = \"fft\", f = fu)\n# \n# results = est_quality(t_input_signal, fu, freq_out_signal_syn, self.amp[a_celltype[0]]*mag_vec[l,i], pha_vec[l,i]/ (180 / pi), fm) \n# NI_vec[l,i], VAF_vec[l,i] = results.get('NI'), results.get('VAF')\n# print \"-[syn] NI: \" + str(NI_vec[l,i]) + \", VAF: \" + str(VAF_vec[l,i])\n# \n# \n# self.barrier() # wait for other nodes\n# \n# #print \"rest: \" + str(vrest) + \" freq_used:\" + str(freq_used) + \" amp_vec:\" + str(amp_vec) + \" mag_vec:\" + str(mag_vec) + \" pha_vec:\" + str(pha_vec)\n# \n# if self.id == 0:\n# \n# for l in range(len(self.method_interpol)): # unwrap \n# pha_vec[l,:] = unwrap(pha_vec[l,:] * (pi / 180)) * (180 / pi) # unwrap for smooth phase\n# \n# # only return fraction of actual signal, it is too long!!! \n# if time_plot[-1] > self.tmax: \n# imax = where(time_plot > self.tmax)[0][0] # for voltage, current and time\n# time_plot = time_plot[0:imax]; current_plot = current_plot[0:imax]; gsyn_plot = gsyn_plot[0:imax]\n# for n in range(self.n_celltypes): \n# voltage_plot[n] = voltage_plot[n][0:imax]\n# \n# if freq_times_plot != []: \n# if freq_times_plot[-1] > self.tmax:\n# imax2 = where(freq_times_plot > self.tmax)[0][0] # for spike frequency \n# freq_times_plot = freq_times_plot[0:imax2]; spike_freq_plot = spike_freq_plot[0:imax2] \n# \n# # normalize synaptic integration with with first magnitude, may by syn itself! \n# bvec = [\"syn\" in st for st in self.method_interpol]\n# if np.any(bvec):\n# k = where(bvec) \n# mag_vec[k,:]= mag_vec[0,0]*mag_vec[k,:]/mag_vec[k,0]\n# \n# NI_vec = (freq_used, NI_vec)\n# VAF_vec = (freq_used, VAF_vec)\n# results = {'freq_used':freq_used, 'amp':amp_vec,'mag':mag_vec,'pha':pha_vec,'ca':ca,'voltage':voltage_plot, 't_startstop':t_startstop_plot,\n# 'current':current_plot,'t1':time_plot,'freq_times':freq_times_plot,'spike_freq':spike_freq_plot,\n# 'fmean':mean(fmean),'method_interpol':self.method_interpol, 'NI':NI_vec, 'VAF':VAF_vec}\n# \n# if self.id == 0:\n# pickle.dump( results, gzip.GzipFile( filepath, \"wb\" ) )\n# \n# else:\n# \n# if self.id == 0:\n# results = pickle.load( gzip.GzipFile( filepath, \"rb\" ) )\n# else:\n# results = {'freq_used':[], 'amp':[],'mag':[],'pha':[],'ca':[],'voltage':[], 't_startstop':[],\n# 'current':[],'t1':[],'freq_times':[],'spike_freq':[],\n# 'fmean':[],'method_interpol':self.method_interpol,'NI':[],'VAF':[]} \n# \n# return results\n \n def get_RC(self, opt_plot):\n \n if self.id == 0:\n if \"analytical\" in opt_plot: # simplest case, only uses rm and tau, scaling necessary \n exec self.cell_exe[self.a_celltype[0]]\n sim = Stimulation(cell, temperature = self.temperature)\n rm, cm, taum = sim.get_RCtau()\n else:\n rm = cm = taum = 0\n \n if \"if\" in opt_plot:\n Vrest = cell.soma(0.5).pas.e*mV\n Vth = cell.spkout.thresh*mV \n Vreset = cell.spkout.vrefrac*mV\n else:\n Vreset = 0*mV; Vth = 1*mV; Vrest = 0*mV\n \n sim = None\n cell = None \n else:\n rm = cm = taum = 0\n Vreset = 0*mV; Vth = 1*mV; Vrest = 0*mV\n \n return rm, cm, taum, Vreset, Vth, Vrest\n\n\n def fun_plot(self, currlabel=\"control\", dowhat=\"cnoise\", freq_used=np.array([]), cutf=10, sexp=0, t_stim=100*s, ymax=0, ax=None, SNR=None, VAF=None, t_qual=0, opt_plot=np.array([]), method_interpol_plot=[], do_csd = 1):\n\n SNR_switch = SNR\n VAF_switch = VAF\n \n rm, cm, taum, Vreset, Vth, Vrest = self.get_RC(opt_plot)\n \n if dowhat == \"cnoise\":\n \n if do_csd == 0:\n t_qual = 0; SNR_switch = 0; VAF_switch = 0\n\n results = self.fun_cnoise_Stim(t_stim = t_stim, cutf = cutf, sexp = sexp, t_qual = t_qual, freq_used = freq_used, do_csd = do_csd)\n \n freq_used, amp_vec, mag, pha, ca, voltage, current, t1 = results.get('freq_used'), results.get('amp'), results.get('mag'), results.get('pha'), results.get('ca'), results.get('voltage'), results.get('current'), results.get('t1') \n freq_times, spike_freq, fmean, method_interpol, SNR, VAF, Qual = results.get('freq_times'), results.get('spike_freq'), results.get('fmean'), results.get('method_interpol'), results.get('SNR'), results.get('VAF'), results.get('Qual') \n stim, stim_re_mat, current_re, tk, K_mat_old = results.get('stim'), results.get('stim_re_mat'), results.get('current_re'), results.get('tk'), results.get('K_mat')\n \n elif dowhat == \"ssine\":\n \n results = self.fun_ssine_Stim(freq_used = freq_used0)\n \n freq_used, amp_vec, mag, pha, ca, voltage, current, t1 = results.get('freq_used'), results.get('amp'), results.get('mag'), results.get('pha'), results.get('ca'), results.get('voltage'), results.get('current'), results.get('t1') \n freq_times, spike_freq, fmean, method_interpol, VAF = results.get('freq_times'), results.get('spike_freq'), results.get('fmean'), results.get('method_interpol'), results.get('VAF') \n tk = []\n K_mat_old = []\n\n # analyse\n if self.id == 0:\n \n print \"Mean rate: \" + str(fmean)\n \n # Turn it off if set to zero\n if SNR_switch == 0: SNR = None\n if VAF_switch == 0: VAF = None \n\n \n if t_qual > 0:\n \n plt.figure(\"Reconstruct\")\n \n ax1 = subplot(2,1,1)\n \n ax1.plot(np.arange(len(stim))*dt-1, current_re*1e3, 'b', linewidth=1) \n ax1.plot(np.arange(len(stim))*dt-1, (stim)*1e3, 'k-', linewidth=1)\n ax1.plot(np.arange(len(stim))*dt-1, (stim_re_mat[0,:])*1e3, 'r', linewidth=1, alpha=1)\n \n #adjust_spines(ax1, ['left','bottom'], d_out = 10) \n #ax1.axis(xmin=0, xmax=1) \n \n #ax1.axis(ymin=8.3, ymax=10.7)\n #ax1.yaxis.set_ticks(array([8.5,9,9.5,10,10.5]))\n #ax1.set_title(\"Reconstruction\") \n \n #ax1.set_xlabel(\"s\") \n #ax1.set_ylabel(\"pA\")\n \n #ax1.text(0.15, 10.7, \"Input current\", color=color3, fontsize = 8)\n #ax1.text(0.8, 10.7, \"Signal\", color=\"#000000\", fontsize = 8)\n #ax1.text(0.0, 8.2, \"Reconstruction\", color=color2, fontsize = 8)\n \n ax2 = subplot(2,1,2)\n ax2.plot(tk, K_mat_old[0], 'k', linewidth=1) \n \n \n self.save_plot(directory = \"./figs/dump/\", prefix = \"reconstruct\")\n \n plt.figure(\"Transfer\")\n \n currtitle = currlabel + \" pop \" + dowhat + \", \" + self.celltype[self.a_celltype[0]] \n \n ax = plot_transfer(currtitle, freq_used, mag, pha, t1, current, voltage[self.a_celltype[0]], freq_times, spike_freq, taum, fmean, self.ihold, rm, Vreset, Vth, Vrest, method_interpol, method_interpol_plot, SNR = SNR, VAF = VAF, ymax = self.ymax, ax = self.ax, linewidth = self.linewidth, color_vec = self.color_vec, alpha = self.alpha, opt_plot = opt_plot) \n \n suptitle(\"Population transfer function of \" + str(self.N[self.a_celltype[0]]) + \" \" + self.celltype[self.a_celltype[0]] + \", amp: \" + str(np.round(self.amp[self.a_celltype[0]],4)) + \", amod: \" + str(self.amod) + \", ih: \" + str(np.round(self.ihold,4)) + \", ih_s: \" + str(np.round(self.ihold_sigma,4)) + \", fm: \" + str(np.round(fmean,2)) + \", fl_s: \" + str(self.fluct_s)) \n \n return VAF, SNR, ax, tk, K_mat_old \n \n\n def save_plot(self, directory = \"./figs/dump/\", prefix = \" \"):\n \n if pop.id == 0:\n \n from datetime import datetime\n idate = datetime.now().strftime('%Y%m%d_%H%M') # %S\n savefig(directory + idate + \"-pop_transfer_\" + prefix + \"_\" + self.celltype[self.a_celltype[0]] + \"_N\" + str(self.N[self.a_celltype[0]]) + \"_ihold\" + str(np.round(self.ihold,4)) + \"_amp\" + str(np.round(self.amp[self.a_celltype[0]],4)) + \".pdf\", dpi = 300) # save it\n\n \n def do_pca_ica(self, t_analysis_delay=0, t_analysis_stop=1, time=0, signals=0, output_dim=10, n_processes=32, n_chunks=32, do_ica=1, n_celltype = 0):\n \n if self.use_mpi:\n \n filepath = self.data_dir + \"/\" + str(self.pickle_prefix) + \"_results_pop_pca_ica.p\"\n \n if self.do_run or (os.path.isfile(filepath) is False):\n \n # PCA\n \n # remove beginning\n dt = time[2]-time[1]\n t = time[int(t_analysis_delay/dt):int(t_analysis_stop/dt)] \n pca_mat = np.array(signals[n_celltype]).T[int(t_analysis_delay/dt):int(t_analysis_stop/dt),:]\n \n node = mdp.nodes.PCANode(output_dim=output_dim, svd=True)\n \n # pad with zeros to be able to split into chunks!\n n_add = n_chunks-np.remainder(np.shape(pca_mat)[0],n_chunks)\n mat_add = np.zeros((n_add, np.shape(pca_mat)[1]))\n pca_mat_add = np.concatenate((pca_mat, mat_add))\n pca_mat_iter = np.split(pca_mat_add, n_chunks) \n \n flow = mdp.parallel.ParallelFlow([node])\n \n start_time = ttime.time()\n \n with mdp.parallel.ProcessScheduler(n_processes=n_processes, verbose=True) as scheduler:\n flow.train([pca_mat_iter], scheduler=scheduler) # input has to be list, why??\n \n process_time = ttime.time() - start_time\n \n s = np.array(flow.execute(pca_mat_iter))\n s = s[0:len(t),:] # resize to length of t!\n \n #print \"node.d: \",node.d\n var_vec = node.d/sum(node.d)\n print 'Explained variance (', 0, ') : ', round(node.explained_variance,4)\n print 'Variance (' , 0, ') : ', var_vec\n print 'Time to run (' , 0, ') : ', process_time\n \n s2 = []\n if do_ica:\n # ICA\n #s2 = mdp.fastica(s)\n ica = mdp.nodes.FastICANode() #CuBICANode()\n ica.train(s)\n s2 = ica(s)\n \n results = {'t':t, 'pca':s,'pca_var':var_vec,'pca_var_expl':round(node.explained_variance,4), 'ica':s2}\n \n if self.id == 0:\n if self.dumpsave == 1:\n pickle.dump( results, gzip.GzipFile( filepath, \"wb\" ) )\n \n else:\n \n if self.id == 0:\n results = pickle.load( gzip.GzipFile( filepath, \"rb\" ) ) \n \n else:\n \n # remove beginning\n dt = time[2]-time[1]\n t = time[int(t_analysis_delay/dt):int(t_analysis_stop/dt)] \n pca_mat = np.array(signals[n_celltype]).T[int(t_analysis_delay/dt):int(t_analysis_stop/dt),:]\n \n node = mdp.nodes.PCANode(output_dim=output_dim, svd=True)\n\n start_time = ttime.time()\n \n node.train(pca_mat)\n s = node(pca_mat)\n \n process_time = ttime.time() - start_time \n #print \"node.d: \",node.d\n var_vec = node.d/sum(node.d)\n print 'Explained variance (', 0, ') : ', round(node.explained_variance,4)\n print 'Variance (' , 0, ') : ', var_vec\n print 'Time to run (' , 0, ') : ', process_time\n \n s2 = []\n if do_ica:\n # ICA\n #s2 = mdp.fastica(s)\n ica = mdp.nodes.FastICANode() #CuBICANode()\n ica.train(s)\n s2 = ica(s)\n \n results = {'t':t, 'pca':s,'pca_var':var_vec,'pca_var_expl':round(node.explained_variance,4), 'ica':s2}\n\n return results\n \n \n def net_run(self, tstop, simprop = \"default\", t_analysis_delay=0, t_analysis_stop=1, stim_start=0):\n\n freq_times = []\n t_all_vec_vec = []\n id_all_vec_vec = []\n gsyns = []\n w_mat = []\n winh_mat = []\n time = []\n voltage = []\n current = []\n \n filepath = self.data_dir + \"/\" + str(self.pickle_prefix) + \"_results_pop_randomnet.hdf5\"\n \n if self.do_run or (os.path.isfile(filepath) is False):\n \n self.run(tstop)\n \n self.no_fmean = True\n results = self.get()\n \n time, voltage, current, fmean, gsyn = results.get('time'), results.get('voltage'), results.get('current'), results.get('fmean'), results.get('gsyn')\n freq_times, spike_freq, t_all_vec_vec, id_all_vec_vec, gsyns, w_mat, winh_mat = results.get('freq_times'), results.get('spike_freq'), results.get('t_all_vec_vec'), results.get('id_all_vec_vec'), results.get('gsyns'), results.get('w_mat'), results.get('winh_mat')\n \n if self.id == 0:\n if self.dumpsave == 1:\n #pickle.dump( results, open( filepath, \"wb\" ) ) # gzip.GzipFile\n \n print \"- Saving\", filepath\n \n f = h5py.File(filepath, 'w')\n f.create_dataset('time', data=time, compression='gzip', shuffle=True)\n f.create_dataset('voltage', data=np.array(voltage), compression='gzip', shuffle=True)\n f.create_dataset('current', data=current, compression='gzip', shuffle=True)\n f.create_dataset('freq_times', data=freq_times, compression='gzip', shuffle=True)\n \n #f.create_dataset('t_all_vec_vec', data=np.array(t_all_vec_vec), compression='lzf', shuffle=True)\n #f.create_dataset('id_all_vec_vec', data=np.array(id_all_vec_vec), compression='lzf', shuffle=True)\n #f.create_dataset('gsyns', data=np.array(gsyns), compression='lzf', shuffle=True)\n\n for i in range(len(self.N)):\n subgroup = f.create_group(\"cell\" + str(i))\n subgroup.create_dataset('t_all_vec_vec', data=t_all_vec_vec[i], compression='gzip', shuffle=True)\n subgroup.create_dataset('id_all_vec_vec', data=id_all_vec_vec[i], compression='gzip', shuffle=True)\n subgroup.create_dataset('g', data=gsyns[i], compression='gzip', shuffle=True)\n\n #for j in range(len(gsyns[i])):\n # subsubgroup = subgroup.create_group(\"gsyn\" + str(j))\n # subsubgroup.create_dataset('g', data=gsyns[i][j], compression='lzf', shuffle=True)\n \n f.close() \n print \"- Save finished\"\n \n #filename = slugify(simprop)\n\n #syn_grc = np.array(gsyns[0])\n \n #import scipy\n #from scipy import io\n \n #print \"Saving .mat\"\n #data = {}\n #data['syn_grc'] = syn_grc[:,int(t_analysis_delay/self.bin_width):int(t_analysis_stop/self.bin_width)]\n #data['time'] = freq_times[int(t_analysis_delay/self.bin_width):int(t_analysis_stop/self.bin_width)]-stim_start\n #scipy.io.savemat('./figs/' + filename + '.mat',data)\n \n else:\n \n if self.id == 0:\n #results = pickle.load( open( filepath, \"rb\" ) ) #gzip.GzipFile\n f = h5py.File(filepath, 'r')\n \n time = np.array(f['time'])\n voltage = np.array(f['voltage'])\n current = np.array(f['current'])\n freq_times = np.array(f['freq_times'])\n \n \n for i in range(len(self.N)):\n t_all_vec_vec.append(np.array(f['/cell' + str(i) + '/t_all_vec_vec'])) \n id_all_vec_vec.append(np.array(f['/cell' + str(i) + '/id_all_vec_vec'])) \n gsyns.append(np.array(f['/cell' + str(i) + '/g'])) \n \n #gsyns.append([])\n #for j in range(self.N[i]):\n # gsyns[i].append(np.array(f['/cell' + str(i) + '/gsyn' + str(j) + '/g' ])) \n\n f.close()\n \n return time, voltage, current, t_all_vec_vec, id_all_vec_vec, gsyns, freq_times, w_mat, winh_mat \n\n \n def delall(self): \n \n if self.use_mpi: \n self.pc.gid_clear()\n print \"- clearing gids\"\n else:\n pass\n #h.topology() \n #for sec in h.allsec():\n # print \"- deleting section:\", sec.name()\n # #h(\"%s{delete_section()}\"%sec.name())\n # sec.push()\n # h.delete_section()\n #h.topology()\n \n for n in range(self.n_celltypes): \n for m in self.cells[n]:\n m.destroy()\n del m \n del self.cells\n del self.nc_vecstim\n del self.netcons\n del self.nclist\n print h.topology() \n \n \n def delrerun(self): \n \n del self.nc_vecstim\n del self.netcons\n del self.nclist\n del self.vecstim\n del self.spike_vec\n del self.ST_stims\n del self.PF_stims\n \n self.netcons = [] \n self.nclist = []\n self.nc_vecstim = []\n self.vecstim = []\n self.spike_vec = []\n self.ST_stims = []\n self.PF_stims = []\n \n self.t_vec = []\n self.id_vec = []\n self.rec_v = []\n \n for n in range(self.n_celltypes):\n if self.use_mpi:\n self.t_vec.append(h.Vector()) # np.array([0])\n self.id_vec.append(h.Vector()) # np.array([-1], dtype=int)\n else:\n self.t_vec.append([])\n \n self.rec_v.append(h.Vector())\n\n for cell in self.cells[n]:\n self.t_vec[n].append(h.Vector())\n cell.nc_spike.record(self.t_vec[n][-1]) \n\n self.flucts = [] # Fluctuating inputs on this host\n self.noises = [] # Random number generators on this host\n self.plays = [] # Play inputs on this host\n self.rec_is = []\n self.trains = [] \n \n self.ic_holds = []\n self.i_holdrs = []\n self.i_holds = []\n self.ic_starts = [] \n self.vc_starts = []\n self.ic_steps = []\n self.tvecs = []\n self.ivecs = [] \n self.noises = []\n self.record_syn = []\n self.id_all_vec_input = []\n self.t_all_vec_input = []\n self.syn_ex_dist = []\n self.syn_inh_dist = []\n\n \n# test code\nif __name__ == '__main__':\n \n # mpiexec -f ~/machinefile -enable-x -n 96 python Population.py --noplot\n \n from Stimulation import *\n from Plotter import *\n from Stimhelp import *\n\n from cells.IfCell import *\n import scipy\n from scipy import io\n \n dt = 0.1*ms\n dt = 0.025*ms\n \n do_run = 1\n if results.norun: # do not run again use pickled files!\n print \"- Not running, using saved files\"\n do_run = 0\n \n \n do = np.array([\"transfer\"])\n opts = np.array([\"if_cnoise\", \"grc_cnoise\"]) #ssine \n #opts = np.array([\"if_cnoise\"]) #ssine\n #opts = np.array([\"if_recon\"]) #ssine\n opts = np.array([\"if_syn_CFvec\"]) \n #opts = np.array([\"prk_cnoise\"])\n opts = np.array([\"if_cnoise\", \"if_ssine\"]) #ssine \n opts = np.array([\"if_ssine\"]) #ssine \n opts = np.array([\"grc_cnoise_addn_cn_\", \"grc_cnoise_cn_\", \"grc_cnoise_addn_cn_a01\"]) \n opts = np.array([\"grc_cnoise_addn100_cn_\", \"grc_cnoise_addn_cn_\", \"grc_cnoise_cn_\"]) \n opts = np.array([\"grc_cnoise_addn100_cn_\"])\n opts = np.array([\"grc_cnoise_addn100_\"])\n opts = np.array([\"grc_cnoise_addn_cn_\"])\n #opts = np.array([\"grc_cnoise\"])\n #opts = np.array([\"grc_cnoise_cn\", \"grc_cnoise_addn_cn\"]) \n #opts = np.array([\"if_cnoise_addn\", \"if_cnoise\"]) \n \n do = np.array([\"timeconst\"])\n \n #do = np.array([\"transfer\"])\n #opts = np.array([\"grc_cnoise_syn\"])\n #opts = np.array([\"grc_recon_syn\"])\n \n #do = np.array([\"prk_test\"])\n \n \n if \"prk_test\" in do:\n \n import multiprocessing\n from Purkinje import Purkinje\n cell = Purkinje() \n\n # set up recording\n # Time\n rec_t = h.Vector()\n rec_t.record(h._ref_t)\n \n # Voltage\n rec_v = h.Vector()\n rec_v.record(cell.soma(0.5)._ref_v)\n\n tstop = 500\n v_init = -60\n \n stim = h.IClamp(cell.soma(0.5))\n stim.amp = 0.0/nA\n stim.delay = 1\n stim.dur = 1000\n \n cpu = multiprocessing.cpu_count()\n h.load_file(\"parcom.hoc\")\n p = h.ParallelComputeTool()\n p.change_nthread(cpu,1)\n p.multisplit(1)\n print 'cpus:', cpu\n \n h.load_file(\"stdrun.hoc\")\n h.celsius = 37 \n h.init()\n h.tstop = tstop\n dt = 0.025 # ms\n h.dt = dt\n h.steps_per_ms = 1 / dt \n h.v_init = v_init \n \n h.finitialize()\n h.run()\n \n t1 = np.array(rec_t)\n voltage = np.array(rec_v)\n s, spike_times = get_spikes(voltage, -20, t1)\n\n print 1000/diff( spike_times)\n\n plt.figure()\n plt.subplot(2,1,1)\n plt.plot(t1, voltage)\n \n plt.show()\n\n\n if \"transfer\" in do:\n \n # SET DEFAULT VALUES FOR THIS PLOT\n fig_size = [11.7, 8.3]\n params = {'backend': 'ps', 'axes.labelsize': 9, 'axes.linewidth' : 0.5, 'title.fontsize': 8, 'text.fontsize': 9,\n 'legend.borderpad': 0.2, 'legend.fontsize': 8, 'legend.linewidth': 0.1, 'legend.loc': 'best', # 'lower right' \n 'legend.ncol': 4, 'xtick.labelsize': 8, 'ytick.labelsize': 8, 'text.usetex': False, 'figure.figsize': fig_size}\n rcParams.update(params) \n \n \n freq_used0 = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 35, 40, 45, 50, 55, 60, 65, 70, 80, 100, 1000])*Hz\n #freq_used0 = np.concatenate((arange(0.1, 1, 0.1), arange(1, 501, 1) ))\n freq_used0 = np.array([1, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 200, 400, 600, 800, 1000])\n \n SNR = None \n NI = None\n VAF = None\n \n t_stim = 1000*s # only for cnoise \n \n opt_plot = np.array([\"only_mag\",\"normalize\", \"dB\"]) # \n #opt_plot = np.array([\"normalize\", \"dB\"]) # \n \n color_vec = (np.array([\"Red\", \"Blue\", \"HotPink\", \"Indigo\"]), np.array([\"Blue\", \"Orange\", \"HotPink\", \"Indigo\"]))\n #color=cm.jet(1.*i/x)\n \n method_interpol = np.array(['bin','syn']) \n method_interpol = np.array(['bin']) \n \n for i, o in enumerate(opts):\n \n dt = 0.025*ms\n bin_width = 5*ms\n bin_width = dt\n jitter = 0*ms\n \n n_syn_ex = [0] \n g_syn_ex = [1]\n noise_syn = 0 \n inh_hold = 0 \n n_syn_inh = [0] \n g_syn_inh = [1]\n tau1_ex = 0\n tau2_ex = 10*ms\n tau1_inh = 0\n tau2_inh = 100*ms\n \n cutf = 20\n sexp = -1\n\n cutf = 0\n sexp = 0\n \n ihold = [10]\n amod = 0.1 # relative value\n give_freq = True\n \n anoise = [0]\n fluct_tau = 0*ms \n \n N = [100]\n \n amp = 0 # absolute value\n fluct_s = [0] # absolute value 0.0008\n ihold_sigma = [0] # 0.01 absolute value\n \n CF_var = [[5,10,20]]\n CF_var = False\n \n syn_tau1 = 5*ms\n syn_tau2 = 5*ms\n \n do_csd = 1\n \n if \"if\" in o:\n \n do_csd = 1\n \n color_vec = (np.array([\"Blue\"]), np.array([\"Blue\"]))\n #color_vec = (np.array([\"Red\"]), np.array([\"Red\"]))\n \n cellimport = []\n celltype = [\"IfCell\"]\n #cell_exe = [\"cell = IfCell()\"]\n #cell_exe = [\"cell = IfCell(e = -70*mV, thresh = -69*mV, vrefrac = -70*mV)\"] \n #cell_exe = [\"cell = IfCell(e = 0*mV, thresh = 1*mV, vrefrac = 0*mV)\"]\n \n # Brunel\n #cell_exe = [\"cell = IfCell(C = 0.0005 *uF, R = 40*MOhm, e = -70*mV, thresh = -50*mV, vrefrac = -56*mV); cell.add_resonance(tau_r = 100*ms, gr = 0.025*uS)\"] \n \n #cell_exe = [\"cell = IfCell(C = 0.0001*uF, R = 40*MOhm, sigma_C = 0.2, sigma_R = 0.2)\"] \n #cell_exe = [\"cell = IfCell(C = 0.0001*uF, R = 40*MOhm)\"] # tau = 4 ms\n #cell_exe = [\"cell = IfCell(C = 0.0001*uF, R = 40*MOhm, s_reset_noise = 0*mV)\"] # tau = 4 ms\n \n #GrC resting: 737 MOhm, 2.985e-06 uF tau: 0.0022 s\n #GrC transfer fit: tau: 0.027 s => with 2.985e-06 uF, R = 0.027/2.985e-12 = 9045 MOhm\n \n #cell_exe = [\"cell = IfCell(C = 2.985e-06*uF, R = 9045*MOhm)\"] \n \n thresh = -41.8 \n R = 5227*MOhm\n #tau_passive = 3e-06*5227 = 15.7ms\n \n cell_exe = [\"cell = IfCell(C = 3.0e-06*uF, R = \" + str(R) + \", e = -71.5*mV, thresh =\" + str(thresh) + \", vrefrac = -71.5*mV)\"]\n \n prefix = \"if_tf\"\n \n istart = 0 \n istop = 0.01\n di = 0.00001\n \n \n syn_tau1 = 10*ms\n syn_tau2 = 10*ms\n \n # Indirect\n give_freq = True\n ihold = [40]\n amod = 1 # relative value\n \n anoise = [0] \n fluct_tau = 0*ms \n \n #anoise = 0.1\n #fluct_tau = 100*ms\n \n# # Direct\n# give_freq = False\n# ihold = [0.00569223341176]\n# amod = None\n# amp = 7.31353725e-06\n# \n# anoise = None\n# fluct_s = [3.65676863e-06] \n# fluct_tau = 0*ms\n# \n# # Low CF, No low noise\n# N = [10000]\n# give_freq = False\n# ihold = [0.004]\n# ihold_sigma = [0.1/2] # 0.1/2 0.01 realtive value\n# amod = None\n# amp = 0.0021\n# \n# anoise = None\n# fluct_s = [0.00] # .005\n# fluct_tau = 0*ms\n \n \n# # Low CF, With low noise\n# N = [10000]\n# give_freq = False\n# ihold = [0.002]\n# ihold_sigma = [0.1/2] # 0.1/2 0.01 realtive value\n# amod = None\n# amp = 0.001\n# \n# anoise = None\n# fluct_s = [0.002] # .005\n# fluct_tau = 100*ms\n \n if \"resif\" in o:\n \n do_csd = 1\n \n color_vec = (np.array([\"Blue\"]), np.array([\"Blue\"]))\n #color_vec = (np.array([\"Red\"]), np.array([\"Red\"]))\n \n cellimport = []\n celltype = [\"IfCell\"]\n \n gr = 5.56e-05*uS \n tau_r = 19.6*ms\n R = 5227*MOhm\n delta_t = 4.85*ms\n thresh = (0.00568*nA * R) - 71.5*mV # \n thresh = -41.8 \n \n cellimport = []\n celltype = \"IfCell\"\n cell_exe = \"cell = IfCell(C = 3e-06*uF, R = \" + str(R) + \", e = -71.5*mV, thresh =\" + str(thresh) + \", vrefrac = -71.5*mV, dgk =\" + str(gr) + \", egk = -71.5*mV, ctau =\" + str(tau_r) + \")\"\n\n prefix = \"resif_tf\"\n \n istart = 0 \n istop = 0.01\n di = 0.00001\n \n \n syn_tau1 = 10*ms\n syn_tau2 = 10*ms\n \n # Indirect\n give_freq = True\n ihold = [40]\n amod = 1 # relative value\n \n anoise = [0] \n fluct_tau = 0*ms \n dt = 0.1*ms\n \n \n \n if \"if_syn\" in o:\n \n N = [1] \n ihold = [40] \n amod = 1 # relative value\n \n prefix = \"if_syntf\" \n \n n_syn_ex = 1 \n\n g_syn_ex = 0 \n \n noise_syn = 0\n\n fluct_tau = 0*ms \n \n freq_used = np.array([])\n \n tau1_ex=0*ms\n tau2_ex=10*ms\n \n anoise = [0]\n\n \n if \"grc\" in o:\n \n color_vec = (np.array([\"Blue\"]), np.array([\"Blue\"]))\n\n cellimport = [\"from GRANULE_Cell import Grc\"]\n celltype = [\"Grc\"]\n cell_exe = [\"cell = Grc(np.array([0.,0.,0.]))\"] \n \n prefix = \"grc_tf\" \n\n istart = 0 \n istop = 0.1\n di = 0.01\n \n syn_tau1 = 10*ms\n syn_tau2 = 10*ms\n \n # Indirect\n give_freq = True\n ihold = [40]\n amod = 1 # relative value\n \n anoise = [0]\n fluct_tau = 0*ms \n \n #anoise = 0.1\n #fluct_tau = 100*ms\n \n# # Direct\n# give_freq = False\n# ihold = [0.0058021085712642992]\n# amod = None\n# amp = 7.31353725e-06\n# \n# anoise = None\n# fluct_s = [3.65676863e-06] \n# fluct_tau = 0*ms\n# \n# # Low CF, No low noise\n# N = [50]\n# give_freq = False\n# ihold = [0.0049]\n# ihold_sigma = [0.1/2] # 0.1/2 0.01 realtive value\n# amod = None\n# amp = 0.0021\n# \n# anoise = None\n# fluct_s = [0.00] # .005\n# fluct_tau = 0*ms\n# \n# \n# # Low CF, With low noise\n# N = [10000]\n# give_freq = False\n# ihold = [0.003]\n# ihold_sigma = [0.1/2] # 0.1/2 0.01 realtive value\n# amod = None\n# amp = 0.001\n# \n# anoise = None\n# fluct_s = [0.002] # .005\n# fluct_tau = 100*ms\n \n \n use_multisplit = False\n use_mpi = True\n simstep = 1*s\n \n if \"prk\" in o:\n \n N = [1] \n ihold = [60] \n \n color_vec = (np.array([\"Blue\"]), np.array([\"Blue\"]))\n\n cellimport = [\"from Purkinje import Purkinje\"]\n celltype = [\"Prk\"]\n cell_exe = [\"cell = Purkinje()\"] \n \n prefix = \"prk_tf\" \n\n temperature = 37\n\n istart = 0 \n istop = 0.1\n di = 0.005\n \n use_multisplit = True\n use_mpi = False\n \n t_stim = 5*s # only for cnoise \n simstep = 1*s\n\n\n if \"grc_syn\" in o:\n \n N = [1] \n ihold = [125] \n amod = 1 # relative value\n \n prefix = \"grc_syntf\" \n \n cutf = 20\n sexp = -1\n \n cutf = 0\n sexp = 0\n \n n_syn_ex = 1 \n g_syn_ex = -1\n noise_syn = 1\n\n n_syn_inh = -1\n inh_hold = 0\n g_syn_inh = 0\n \n fluct_tau = 0*ms \n \n freq_used = np.array([])\n \n anoise = 0\n \n \n if \"_addn\" in o:\n \n anoise = [6] # RESPONSIBLE FOR FILTERING EFFECT!!!\n fluct_tau = 1*ms \n prefix = prefix + \"_addn\"\n color_vec = (np.array([\"Red\"]), np.array([\"Red\"]))\n \n if \"_addn100\" in o:\n \n anoise = [2] # RESPONSIBLE FOR FILTERING EFFECT!!!\n fluct_tau = 100*ms \n prefix = prefix + \"100\"\n color_vec = (np.array([\"Green\"]), np.array([\"Green\"]))\n \n if \"_cn_\" in o:\n \n cutf = 20\n sexp = -1\n prefix = prefix + \"_cn\"\n \n if \"_a01\" in o:\n \n amod=0.1\n prefix = prefix + \"_a01\"\n \n\n \n plt.figure(i)\n pickle_prefix = \"Population.py_\" + prefix\n \n #comm = MPI.COMM_WORLD\n #comm.Barrier() # wait for other nodes\n \n pop = Population(cellimport = cellimport, celltype = celltype, cell_exe = cell_exe, N = N, temperature = 37, ihold = ihold, ihold_sigma = ihold_sigma, amp = amp, amod = amod, give_freq = give_freq, do_run = do_run, pickle_prefix = pickle_prefix, istart = istart, istop = istop, di = di, dt = dt) \n pop.bin_width = bin_width\n pop.jitter = jitter\n pop.anoise = anoise\n pop.fluct_s = fluct_s \n pop.fluct_tau = fluct_tau \n pop.method_interpol = method_interpol \n pop.no_fmean = False\n pop.CF_var = CF_var\n \n pop.tau1_ex=tau1_ex\n pop.tau2_ex=tau2_ex\n pop.tau1_inh=tau1_inh\n pop.tau2_inh=tau2_inh\n \n pop.n_syn_ex = n_syn_ex \n pop.g_syn_ex = g_syn_ex \n \n pop.noise_syn = noise_syn \n pop.inh_hold = inh_hold \n pop.n_syn_inh = n_syn_inh \n pop.g_syn_inh = g_syn_inh\n \n pop.force_run = False\n pop.use_multisplit = use_multisplit\n pop.use_mpi = use_mpi\n pop.simstep = simstep\n pop.use_local_dt = False\n pop.syn_tau1 = syn_tau1\n pop.syn_tau2 = syn_tau2\n pop.plot_input = False\n \n \n if n_syn_inh == -1:\n pop.connect_gfluct(g_i0=g_syn_inh)\n \n #pop.test_mod(n_syn_ex = n_syn_ex, g_syn_ex = g_syn_ex, noise_syn = noise_syn, inh_hold = inh_hold, n_syn_inh = n_syn_inh, g_syn_inh = g_syn_inh, do_plot = True)\n \n if \"ssine\" in o:\n pop.color_vec = color_vec\n #pop.color_vec = (np.array([\"Red\", \"Orange\", \"HotPink\", \"Indigo\"]), np.array([\"Red\", \"Orange\", \"HotPink\", \"Indigo\"])) \n pop.fun_plot(currlabel = \"control\", dowhat = \"ssine\", freq_used = freq_used0, opt_plot = opt_plot)\n\n pop.save_plot(directory = \"./figs/dump/\") \n \n if \"cnoise\" in o:\n \n freq_used = np.array([])\n pop.color_vec = color_vec\n #pop.color_vec = (np.array([\"Blue\", \"Green\", \"DimGray\", \"DarkGoldenRod\"]), np.array([\"Blue\", \"Green\", \"DimGray\", \"DarkGoldenRod\"])) \n pop.fun_plot(currlabel = \"control\", dowhat = \"cnoise\", t_stim = t_stim, cutf = cutf, sexp = sexp, t_qual = 0, opt_plot = opt_plot, freq_used = freq_used, do_csd = do_csd)\n \n pop.save_plot(directory = \"./figs/dump/\") \n \n \n if \"recon\" in o:\n \n pop.color_vec = color_vec \n #VAF, SNR, ax, tk, K_mat_old = pop.fun_plot(currlabel = \"control\", dowhat = \"cnoise\", t_stim = t_stim, cutf = cutf, sexp = sexp, t_qual = 0, opt_plot = opt_plot, n_syn_ex = n_syn_ex, g_syn_ex = g_syn_ex, noise_syn = noise_syn, inh_hold = inh_hold, n_syn_inh = n_syn_inh, g_syn_inh = g_syn_inh, SNR=0, freq_used = freq_used)\n \n # RECONSTRUCT!\n freq_used = np.array([9, 47, 111, 1000])*Hz\n t_stim = 10*s\n\n tk = arange(0,0.8192*2,pop.dt)\n K_mat_old = zeros((len(method_interpol),len(tk)), dtype=complex)\n \n if pop.id == 0:\n\n sigma = 0.1e-3\n a=0.1\n t0 = tk[floor(len(tk)/2)]\n K_mat_old[0] = gauss_func(tk, a, t0, sigma)\n \n K_mat_old = np.array([])\n\n results = pop.fun_cnoise_Stim(t_stim = t_stim, cutf = cutf, sexp = sexp, t_qual = 5, n_syn_ex = n_syn_ex, g_syn_ex = g_syn_ex, noise_syn = noise_syn, inh_hold = inh_hold, n_syn_inh = n_syn_inh, g_syn_inh = g_syn_inh, freq_used = freq_used, K_mat_old = K_mat_old, seed = 311)\n \n freq_used, amp_vec, mag, pha, ca, voltage, current, t1 = results.get('freq_used'), results.get('amp'), results.get('mag'), results.get('pha'), results.get('ca'), results.get('voltage'), results.get('current'), results.get('t1') \n freq_times, spike_freq, fmean, method_interpol, SNR, VAF, Qual = results.get('freq_times'), results.get('spike_freq'), results.get('fmean'), results.get('method_interpol'), results.get('SNR'), results.get('VAF'), results.get('Qual') \n stim, resp_mat, stim_re_mat = results.get('stim'), results.get('resp_mat'), results.get('stim_re_mat')\n \n if pop.id == 0:\n \n plt.figure('Reconstruct')\n axR0 = plt.subplot(4,1,1)\n axR1 = plt.subplot(4,1,2)\n axR2 = plt.subplot(4,1,3)\n axR3 = plt.subplot(4,1,4)\n \n axR0.plot(np.arange(len(stim))*pop.dt, resp_mat[0,:])\n axR0.axis(xmin=0.9, xmax=1)\n #axR0.plot(t1, voltage[0])\n axR1.plot(np.arange(len(stim))*pop.dt, stim, 'b')\n axR1.axis(xmin=0.9, xmax=1)\n axR2.plot(np.arange(len(stim))*pop.dt, stim_re_mat[0,:], 'r')\n axR2.axis(xmin=0.9, xmax=1)\n axR3.plot(tk, K_mat_old[0])\n plt.savefig(\"./figs/dump/Reconstruct.pdf\", dpi = 300, transparent=True) # save it\n \n pop = None\n \n \n plt.show()\n \n \n if \"timeconst\" in do:\n \n from lmfit import minimize, Parameters\n \n # SET DEFAULT VALUES FOR THIS PLOT\n fig_size = [11.7, 8.3]\n params = {'backend': 'ps', 'axes.labelsize': 9, 'axes.linewidth' : 0.5, 'title.fontsize': 8, 'text.fontsize': 9,\n 'legend.borderpad': 0.2, 'legend.fontsize': 8, 'legend.linewidth': 0.1, 'legend.loc': 'best', # 'lower right' \n 'legend.ncol': 4, 'xtick.labelsize': 8, 'ytick.labelsize': 8, 'text.usetex': False, 'figure.figsize': fig_size}\n rcParams.update(params) \n \n dt = 0.025*ms\n \n prefix = \"timeconst\"\n pickle_prefix = \"Population.py_\" + prefix\n \n stimtype = \"inh_50ms_20ms\"\n \n if stimtype == \"ex_20ms\":\n \n trun = 2.9\n tstart = 1.8\n tstop = 2.7\n\n celltype = [\"IfCell\"]\n cell_exe = [\"cell = IfCell(C = 0.0001*uF, R = 200*MOhm)\"]\n N = [5000]\n \n pop = Population(celltype = celltype, cell_exe = cell_exe, N = N, temperature = 0, do_run = do_run, pickle_prefix = pickle_prefix, dt = dt) \n \n pop.method_interpol = np.array([\"bin\", \"syn\"])\n pop.method_interpol = np.array([\"bin\"])\n \n modulation_vec = pop.set_PulseStim(start_time=[100*ms], dur=[3000*ms], steadyf=[100*Hz], pulsef=[150*Hz], pulse_start=[2000*ms], pulse_len=[500*ms], weight0=[1*nS], tau01=[0*ms], tau02=[20*ms], weight1=[0*nS], tau11=[0*ms], tau12=[1*ms])\n \n params = Parameters()\n params.add('amp', value=0.1)\n params.add('shift', value=10)\n params.add('tau1', value=1, vary=False) # alpha! \n params.add('tau2', value=20*ms) \n \n \n if stimtype == \"ex_gr\":\n \n trun = 6.9\n tstart = 4.8\n tstop = 6.5\n\n cellimport = [\"from GRANULE_Cell import Grc\"]\n celltype = [\"Grc\"]\n cell_exe = [\"cell = Grc(np.array([0.,0.,0.]))\"]\n N = [4096*10]\n \n pop = Population(cellimport = cellimport, celltype = celltype, cell_exe = cell_exe, N = N, temperature = 37, do_run = do_run, pickle_prefix = pickle_prefix, dt = dt) \n \n pop.method_interpol = np.array([\"bin\", \"syn\"])\n pop.method_interpol = np.array([\"bin\"])\n \n modulation_vec = pop.set_PulseStim(start_time=[100*ms], dur=[7000*ms], steadyf=[20*Hz], pulsef=[30*Hz], pulse_start=[5000*ms], pulse_len=[500*ms])\n \n params = Parameters()\n params.add('amp', value=0.1)\n params.add('shift', value=10)\n params.add('tau1', value=1, vary=False) # alpha! \n params.add('tau2', value=20*ms) \n \n \n if stimtype == \"inh_50ms_20ms\":\n \n trun = 2.9\n tstart = 1.8\n tstop = 2.7\n \n celltype = [\"IfCell\", \"IfCell\"]\n cell_exe = [\"cell = IfCell()\", \"cell = IfCell()\"]\n \n N = [10000,10000]\n \n pop = Population(celltype = celltype, cell_exe = cell_exe, N = N, temperature = 0, do_run = do_run, pickle_prefix = pickle_prefix, dt = dt) \n \n pop.method_interpol = np.array([\"bin\", \"syn\"])\n pop.method_interpol = np.array([\"bin\"])\n \n modulation_vec = pop.set_PulseStim(start_time=[100*ms,100*ms], dur=[3000*ms,3000*ms], steadyf=[100*Hz,50*Hz], pulsef=[100*Hz,80*Hz], pulse_start=[2000*ms,2000*ms], pulse_len=[500*ms,500*ms], weight0=[1*nS,1*nS], tau01=[1*ms,1*ms], tau02=[20*ms,20*ms], weight1=[0,0], tau11=[0*ms,0*ms], tau12=[1*ms,1*ms])\n\n pop.connect_cells(conntype='inh', weight=0.001, tau=50)\n \n params = Parameters()\n params.add('amp', value=-0.1)\n params.add('shift', value=10)\n params.add('tau1', value=1, vary=False) # alpha! \n params.add('tau2', value=20*ms)\n \n \n if stimtype == \"inh_gr\":\n\n trun = 9.9 \n tstart = 4.8\n tstop = 8\n \n cellimport = [\"from GRANULE_Cell import Grc\", \"from templates.golgi.Golgi_template import Goc\"]\n celltype = [\"Grc\",\"Goc_noloop\"]\n cell_exe = [\"cell = Grc(np.array([0.,0.,0.]))\",\"cell = Goc(np.array([0.,0.,0.]))\"]\n N = [100,4]\n #N = [4096, 27]\n #N = [4096*5, 27*5]\n\n pop = Population(cellimport = cellimport, celltype = celltype, cell_exe = cell_exe, N = N, temperature = 37, do_run = do_run, pickle_prefix = pickle_prefix, dt = dt) \n \n pop.method_interpol = np.array([\"bin\", \"syn\"])\n pop.method_interpol = np.array([\"bin\"])\n \n modulation_vec = pop.set_PulseStim(start_time=[100*ms,100*ms], dur=[9800*ms,9800*ms], steadyf=[60*Hz,10*Hz], pulsef=[60*Hz,22*Hz], pulse_start=[5000*ms,5000*ms], pulse_len=[1500*ms,1500*ms])\n\n pop.connect_cells(conntype='inh_gr', weight = 0.3)\n \n params = Parameters()\n params.add('amp', value=-0.1)\n params.add('shift', value=10)\n params.add('tau1', value=1, vary=False) # alpha! \n params.add('tau2', value=20*ms)\n \n \n if stimtype == \"inh_50ms_curr\":\n \n trun = 2.9\n tstart = 1.8\n tstop = 2.8\n \n celltype = [\"IfCell\", \"IfCell\"]\n cell_exe = [\"cell = IfCell()\", \"cell = IfCell()\"]\n \n N = [1000,1000]\n \n give_freq = True\n \n istart = 0 \n istop = 0.2\n di = 0.01\n \n ihold = [100, 50] \n ihold_sigma = [0.01, 0.01] # relative sigma\n \n pop = Population(celltype = celltype, cell_exe = cell_exe, N = N, temperature = 0, ihold = ihold, ihold_sigma = ihold_sigma, give_freq = give_freq, do_run = do_run, pickle_prefix = pickle_prefix, istart = istart, istop = istop, di = di, dt = dt) \n \n pop.method_interpol = np.array([\"bin\", \"syn\"])\n pop.method_interpol = np.array([\"bin\"])\n \n tstep = 2 \n tdur = 0.5\n \n istep = [100,100]\n current1 = np.concatenate(([ihold[1]*np.ones(round((tstep)/pop.dt)), istep[1]*np.ones(round(tdur/pop.dt)),ihold[1]*np.ones(round((trun-tstep-tdur)/pop.dt)) ])) \n \n pop.set_IStim()\n pop.set_IStep(istep = istep, istep_sigma = [0.01,0.01], tstep = tstep, tdur = tdur)\n \n pop.connect_cells(conntype='inh', weight=0.0003, tau=50)\n \n pop.fluct_s = [0.02,0.05]\n pop.connect_fluct() \n \n params = Parameters()\n params.add('amp', value=-0.1)\n params.add('shift', value=10)\n params.add('tau1', value=1, vary=False) # alpha! \n params.add('tau2', value=20*ms)\n \n \n if stimtype == \"inh_gr_curr\":\n \n trun = 9.9 \n tstart = 4.8\n tstop = 8\n \n cellimport = [\"from GRANULE_Cell import Grc\", \"from templates.golgi.Golgi_template import Goc\"]\n celltype = [\"Grc\",\"Goc_noloop\"]\n cell_exe = [\"cell = Grc(np.array([0.,0.,0.]))\",\"cell = Goc(np.array([0.,0.,0.]))\"]\n N = [100,4]\n N = [4096, 27]\n N = [4096*10, 27*10] \n\n give_freq = True\n \n # GRC \n #istart = 0 \n #istop = 0.1\n #di = 0.01\n \n #GOC\n istart = 0 \n istop = 0.5\n di = 0.02\n \n ihold = [100, 10] \n ihold_sigma = [0, 0] # relative sigma\n \n pop = Population(cellimport = cellimport, celltype = celltype, cell_exe = cell_exe, N = N, temperature = 37, ihold = ihold, ihold_sigma = ihold_sigma, give_freq = give_freq, do_run = do_run, pickle_prefix = pickle_prefix, istart = istart, istop = istop, di = di, dt = dt) \n \n pop.method_interpol = np.array([\"bin\", \"syn\"])\n pop.method_interpol = np.array([\"bin\"])\n \n tstep = 5 \n tdur = 2\n \n istep = [100,50]\n current1 = np.concatenate(([ihold[1]*np.ones(round((tstep)/pop.dt)), istep[1]*np.ones(round(tdur/pop.dt)),ihold[1]*np.ones(round((trun-tstep-tdur)/pop.dt)) ])) \n \n pop.set_IStim()\n pop.set_IStep(istep = istep, istep_sigma = [0,0], tstep = tstep, tdur = tdur)\n \n pop.connect_cells(conntype='inh_gr', weight = 0.4)\n \n pop.fluct_s = [0.05,2]\n pop.connect_fluct() \n \n params = Parameters()\n params.add('amp', value=-0.1)\n params.add('shift', value=10)\n params.add('tau1', value=1, vary=False) # alpha! \n params.add('tau2', value=20*ms) \n \n \n pop.run_steps(trun)\n \n self.no_fmean = True\n results = pop.get()\n time, voltage, current, fmean, gsyn = results.get('time'), results.get('voltage'), results.get('current'), results.get('fmean'), results.get('gsyn')\n freq_times, spike_freq, t_all_vec_vec, id_all_vec_vec, gsyns = results.get('freq_times'), results.get('spike_freq'), results.get('t_all_vec_vec'), results.get('id_all_vec_vec'), results.get('gsyns')\n \n if pop.id == 0:\n \n bin_width = 1*ms\n freq_times = arange(0, time[-1], bin_width)\n [num_spikes, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[0], bins = freq_times)\n spike_freq = np.concatenate((zeros(1),num_spikes)) / bin_width / N[0]\n \n \n if \"inh\" in stimtype: # generate input current, to complicated to get it out\n \n if \"curr\" in stimtype:\n time1 = np.arange(0, trun, pop.dt)\n \n r_mod = interp(freq_times, time1, current1, left=0, right=0)\n \n [num_spikes, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[1], bins = freq_times)\n spike_freq1 = np.concatenate((zeros(1),num_spikes)) / bin_width / N[1]\n else:\n r_mod = interp(freq_times, modulation_vec[1][0], modulation_vec[1][1], left=0, right=0)\n \n [num_spikes, _] = neuronpy.util.spiketrain.get_histogram(t_all_vec_vec[1], bins = freq_times)\n spike_freq1 = np.concatenate((zeros(1),num_spikes)) / bin_width / N[1]\n \n elif \"ex\" in stimtype:\n r_mod = interp(freq_times, modulation_vec[0][0], modulation_vec[0][1], left=0, right=0) \n\n\n def modelfun(amp, shift, tau1, tau2, bin_width, r_mod):\n \n tau1 = tau1\n tau2 = tau2\n \n t1 = np.arange(0,10*tau2,bin_width)\n K = amp*syn_kernel(t1, tau1, tau2) \n K = np.concatenate((np.zeros(len(K)-1),K))\n t2 = np.arange(0,len(K)*bin_width,bin_width)\n \n model = np.convolve(K, r_mod, mode='same') + shift\n \n return model\n\n \n def residual(params, r_mod, data=None, bin_width=1*ms, tstart=0, tstop=3):\n \n amp = params['amp'].value\n shift = params['shift'].value\n tau1 = params['tau1'].value\n tau2 = params['tau2'].value\n \n model = modelfun(amp, shift, tau1, tau2, bin_width, r_mod)\n \n return (data[int(tstart/bin_width):int(tstop/bin_width)]-model[int(tstart/bin_width):int(tstop/bin_width)])\n \n \n result = minimize(residual, params, args=(r_mod, spike_freq, bin_width, tstart, tstop))\n \n print \"chisqr: \", result.chisqr\n print 'Best-Fit Values:'\n for name, par in params.items():\n print ' %s = %.4f +/- %.4f ' % (name, par.value, par.stderr)\n \n amp = params['amp'].value\n shift = params['shift'].value\n tau1 = params['tau1'].value\n tau2 = params['tau2'].value\n \n model = modelfun(amp, shift, tau1, tau2, bin_width = bin_width, r_mod = r_mod) \n \n \n if \"ex\" in stimtype:\n plt.figure(0)\n plt.plot(freq_times[int(0.5/bin_width):int(trun/bin_width)], spike_freq[int(0.5/bin_width):int(trun/bin_width)], freq_times[int(0.5/bin_width):int(trun/bin_width)], model[int(0.5/bin_width):int(trun/bin_width)])\n plt.figure(1)\n plt.plot(time, voltage[0]), freq_times, r_mod, time, current\n #plt.figure(100) \n #plt.plot(t_all_vec_vec[0],id_all_vec_vec[0],'k|')\n #plt.savefig(\"./figs/dump/taufit_\" + str(stimtype) + \"_spikes.pdf\", dpi = 300) # save it \n \n else:\n plt.figure(0)\n plt.plot(freq_times[int(0.5/bin_width):int(trun/bin_width)], spike_freq1[int(0.5/bin_width):int(trun/bin_width)], freq_times[int(0.5/bin_width):int(trun/bin_width)], spike_freq[int(0.5/bin_width):int(trun/bin_width)], freq_times[int(0.5/bin_width):int(trun/bin_width)], model[int(0.5/bin_width):int(trun/bin_width)])\n plt.figure(1)\n plt.plot(time, voltage[0], time, voltage[1], freq_times, r_mod, time, current)\n plt.figure(100) \n #plt.plot(t_all_vec_vec[0],id_all_vec_vec[0],'k|')\n #plt.plot(t_all_vec_vec[1],id_all_vec_vec[1],'b|')\n #plt.savefig(\"./figs/dump/taufit_\" + str(stimtype) + \"_spikes.pdf\", dpi = 300) # save it \n \n \n plt.figure(0)\n plt.title('Fit: ' + str(stimtype) + ', tau1=' + str(tau1) + ' tau2=' + str(tau2))\n plt.savefig(\"./figs/dump/taufit_\" + str(stimtype) + \"_rate.png\", dpi = 300) # save it \n \n plt.figure(1)\n plt.savefig(\"./figs/dump/taufit_\" + str(stimtype) + \"_voltage.png\", dpi = 300) # save it \n \n \n plt.show()\n ",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding: utf-8 -*-
elements = str(input("Type the elements of list: ")).split()
elements = list(map(float,elements))
times = int(input("How many times you wish shift to right: "))
for _ in range(times):
removed = elements.pop()
elements.insert(0,removed)
print(elements)
|
normal
|
{
"blob_id": "307bb7461a729ba979f6a862fe7c292c42f96ce6",
"index": 1164,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor _ in range(times):\n removed = elements.pop()\n elements.insert(0, removed)\nprint(elements)\n",
"step-3": "elements = str(input('Type the elements of list: ')).split()\nelements = list(map(float, elements))\ntimes = int(input('How many times you wish shift to right: '))\nfor _ in range(times):\n removed = elements.pop()\n elements.insert(0, removed)\nprint(elements)\n",
"step-4": "# -*- coding: utf-8 -*-\n\nelements = str(input(\"Type the elements of list: \")).split()\nelements = list(map(float,elements))\n\ntimes = int(input(\"How many times you wish shift to right: \"))\n\nfor _ in range(times):\n\tremoved = elements.pop()\n\telements.insert(0,removed)\n\nprint(elements)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import numpy as np
from scipy.stats import loguniform
import sys
def generate_parameters(seed):
np.random.seed(seed)
out={}
out['nfeatures'] = np.random.randint(3, 25)
out['lr'] = float(loguniform.rvs(0.001, 0.01, size=1))
out['gamma'] = np.random.uniform(0.75, 0.05)
out['penalty'] = float(loguniform.rvs(0.00001, 0.1, size=1))
out['batch'] = np.random.choice([32,64])
return out
if __name__ == '__main__':
out = generate_parameters(int(sys.argv[1]))
out_str = '--nfeatures {} --lr {} --gamma {} --penalty {} --batch {}'.format(out['nfeatures'], out['lr'], out['gamma'], out['penalty'], out['batch'])
print(out_str)
|
normal
|
{
"blob_id": "7571e86be1077ae0f7ae542824cfcaaa2949dc83",
"index": 8731,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef generate_parameters(seed):\n np.random.seed(seed)\n out = {}\n out['nfeatures'] = np.random.randint(3, 25)\n out['lr'] = float(loguniform.rvs(0.001, 0.01, size=1))\n out['gamma'] = np.random.uniform(0.75, 0.05)\n out['penalty'] = float(loguniform.rvs(1e-05, 0.1, size=1))\n out['batch'] = np.random.choice([32, 64])\n return out\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef generate_parameters(seed):\n np.random.seed(seed)\n out = {}\n out['nfeatures'] = np.random.randint(3, 25)\n out['lr'] = float(loguniform.rvs(0.001, 0.01, size=1))\n out['gamma'] = np.random.uniform(0.75, 0.05)\n out['penalty'] = float(loguniform.rvs(1e-05, 0.1, size=1))\n out['batch'] = np.random.choice([32, 64])\n return out\n\n\nif __name__ == '__main__':\n out = generate_parameters(int(sys.argv[1]))\n out_str = ('--nfeatures {} --lr {} --gamma {} --penalty {} --batch {}'.\n format(out['nfeatures'], out['lr'], out['gamma'], out['penalty'],\n out['batch']))\n print(out_str)\n",
"step-4": "import numpy as np\nfrom scipy.stats import loguniform\nimport sys\n\n\ndef generate_parameters(seed):\n np.random.seed(seed)\n out = {}\n out['nfeatures'] = np.random.randint(3, 25)\n out['lr'] = float(loguniform.rvs(0.001, 0.01, size=1))\n out['gamma'] = np.random.uniform(0.75, 0.05)\n out['penalty'] = float(loguniform.rvs(1e-05, 0.1, size=1))\n out['batch'] = np.random.choice([32, 64])\n return out\n\n\nif __name__ == '__main__':\n out = generate_parameters(int(sys.argv[1]))\n out_str = ('--nfeatures {} --lr {} --gamma {} --penalty {} --batch {}'.\n format(out['nfeatures'], out['lr'], out['gamma'], out['penalty'],\n out['batch']))\n print(out_str)\n",
"step-5": "import numpy as np\nfrom scipy.stats import loguniform\nimport sys\n\ndef generate_parameters(seed):\n np.random.seed(seed)\n out={}\n out['nfeatures'] = np.random.randint(3, 25)\n out['lr'] = float(loguniform.rvs(0.001, 0.01, size=1))\n out['gamma'] = np.random.uniform(0.75, 0.05)\n out['penalty'] = float(loguniform.rvs(0.00001, 0.1, size=1))\n out['batch'] = np.random.choice([32,64])\n return out\n\nif __name__ == '__main__':\n out = generate_parameters(int(sys.argv[1]))\n out_str = '--nfeatures {} --lr {} --gamma {} --penalty {} --batch {}'.format(out['nfeatures'], out['lr'], out['gamma'], out['penalty'], out['batch'])\n print(out_str)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class TestSchedule(RunbotCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestSchedule(RunbotCase):
<|reserved_special_token_0|>
@patch('odoo.addons.runbot.models.build.os.path.getmtime')
@patch('odoo.addons.runbot.models.build.docker_state')
def test_schedule_mark_done(self, mock_docker_state, mock_getmtime):
""" Test that results are set even when job_30_run is skipped """
job_end_time = datetime.datetime.now()
mock_getmtime.return_value = job_end_time.timestamp()
build = self.Build.create({'local_state': 'testing', 'branch_id':
self.branch.id, 'name':
'd0d0caca0000ffffffffffffffffffffffffffff', 'port': '1234',
'host': 'runbotxx', 'job_start': datetime.datetime.now(),
'config_id': self.env.ref('runbot.runbot_build_config_default')
.id, 'active_step': self.env.ref(
'runbot.runbot_build_config_step_run').id})
domain = [('repo_id', 'in', (self.repo.id,))]
domain_host = domain + [('host', '=', 'runbotxx')]
build_ids = self.Build.search(domain_host + [('local_state', 'in',
['testing', 'running'])])
mock_docker_state.return_value = 'UNKNOWN'
self.assertEqual(build.local_state, 'testing')
build_ids._schedule()
self.assertEqual(build.local_state, 'testing')
build_ids.write({'job_start': datetime.datetime.now() - datetime.
timedelta(seconds=70)})
build_ids._schedule()
self.assertEqual(build.local_state, 'done')
self.assertEqual(build.local_result, 'ok')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestSchedule(RunbotCase):
def setUp(self):
registry = odoo.registry()
super(TestSchedule, self).setUp()
self.repo = self.Repo.create({'name': 'bla@example.com:foo/bar'})
self.branch = self.Branch.create({'repo_id': self.repo.id, 'name':
'refs/heads/master'})
@patch('odoo.addons.runbot.models.build.os.path.getmtime')
@patch('odoo.addons.runbot.models.build.docker_state')
def test_schedule_mark_done(self, mock_docker_state, mock_getmtime):
""" Test that results are set even when job_30_run is skipped """
job_end_time = datetime.datetime.now()
mock_getmtime.return_value = job_end_time.timestamp()
build = self.Build.create({'local_state': 'testing', 'branch_id':
self.branch.id, 'name':
'd0d0caca0000ffffffffffffffffffffffffffff', 'port': '1234',
'host': 'runbotxx', 'job_start': datetime.datetime.now(),
'config_id': self.env.ref('runbot.runbot_build_config_default')
.id, 'active_step': self.env.ref(
'runbot.runbot_build_config_step_run').id})
domain = [('repo_id', 'in', (self.repo.id,))]
domain_host = domain + [('host', '=', 'runbotxx')]
build_ids = self.Build.search(domain_host + [('local_state', 'in',
['testing', 'running'])])
mock_docker_state.return_value = 'UNKNOWN'
self.assertEqual(build.local_state, 'testing')
build_ids._schedule()
self.assertEqual(build.local_state, 'testing')
build_ids.write({'job_start': datetime.datetime.now() - datetime.
timedelta(seconds=70)})
build_ids._schedule()
self.assertEqual(build.local_state, 'done')
self.assertEqual(build.local_result, 'ok')
<|reserved_special_token_1|>
import datetime
from unittest.mock import patch
from odoo.tests import common
import odoo
from .common import RunbotCase
class TestSchedule(RunbotCase):
def setUp(self):
registry = odoo.registry()
super(TestSchedule, self).setUp()
self.repo = self.Repo.create({'name': 'bla@example.com:foo/bar'})
self.branch = self.Branch.create({'repo_id': self.repo.id, 'name':
'refs/heads/master'})
@patch('odoo.addons.runbot.models.build.os.path.getmtime')
@patch('odoo.addons.runbot.models.build.docker_state')
def test_schedule_mark_done(self, mock_docker_state, mock_getmtime):
""" Test that results are set even when job_30_run is skipped """
job_end_time = datetime.datetime.now()
mock_getmtime.return_value = job_end_time.timestamp()
build = self.Build.create({'local_state': 'testing', 'branch_id':
self.branch.id, 'name':
'd0d0caca0000ffffffffffffffffffffffffffff', 'port': '1234',
'host': 'runbotxx', 'job_start': datetime.datetime.now(),
'config_id': self.env.ref('runbot.runbot_build_config_default')
.id, 'active_step': self.env.ref(
'runbot.runbot_build_config_step_run').id})
domain = [('repo_id', 'in', (self.repo.id,))]
domain_host = domain + [('host', '=', 'runbotxx')]
build_ids = self.Build.search(domain_host + [('local_state', 'in',
['testing', 'running'])])
mock_docker_state.return_value = 'UNKNOWN'
self.assertEqual(build.local_state, 'testing')
build_ids._schedule()
self.assertEqual(build.local_state, 'testing')
build_ids.write({'job_start': datetime.datetime.now() - datetime.
timedelta(seconds=70)})
build_ids._schedule()
self.assertEqual(build.local_state, 'done')
self.assertEqual(build.local_result, 'ok')
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
import datetime
from unittest.mock import patch
from odoo.tests import common
import odoo
from .common import RunbotCase
class TestSchedule(RunbotCase):
def setUp(self):
# entering test mode to avoid that the _schedule method commits records
registry = odoo.registry()
super(TestSchedule, self).setUp()
self.repo = self.Repo.create({'name': 'bla@example.com:foo/bar'})
self.branch = self.Branch.create({
'repo_id': self.repo.id,
'name': 'refs/heads/master'
})
@patch('odoo.addons.runbot.models.build.os.path.getmtime')
@patch('odoo.addons.runbot.models.build.docker_state')
def test_schedule_mark_done(self, mock_docker_state, mock_getmtime):
""" Test that results are set even when job_30_run is skipped """
job_end_time = datetime.datetime.now()
mock_getmtime.return_value = job_end_time.timestamp()
build = self.Build.create({
'local_state': 'testing',
'branch_id': self.branch.id,
'name': 'd0d0caca0000ffffffffffffffffffffffffffff',
'port': '1234',
'host': 'runbotxx',
'job_start': datetime.datetime.now(),
'config_id': self.env.ref('runbot.runbot_build_config_default').id,
'active_step': self.env.ref('runbot.runbot_build_config_step_run').id,
})
domain = [('repo_id', 'in', (self.repo.id, ))]
domain_host = domain + [('host', '=', 'runbotxx')]
build_ids = self.Build.search(domain_host + [('local_state', 'in', ['testing', 'running'])])
mock_docker_state.return_value = 'UNKNOWN'
self.assertEqual(build.local_state, 'testing')
build_ids._schedule() # too fast, docker not started
self.assertEqual(build.local_state, 'testing')
build_ids.write({'job_start': datetime.datetime.now() - datetime.timedelta(seconds=70)}) # docker never started
build_ids._schedule()
self.assertEqual(build.local_state, 'done')
self.assertEqual(build.local_result, 'ok')
|
flexible
|
{
"blob_id": "aa515b1b919eb557cd8c7e5f4d22773980b5af96",
"index": 8213,
"step-1": "<mask token>\n\n\nclass TestSchedule(RunbotCase):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestSchedule(RunbotCase):\n <mask token>\n\n @patch('odoo.addons.runbot.models.build.os.path.getmtime')\n @patch('odoo.addons.runbot.models.build.docker_state')\n def test_schedule_mark_done(self, mock_docker_state, mock_getmtime):\n \"\"\" Test that results are set even when job_30_run is skipped \"\"\"\n job_end_time = datetime.datetime.now()\n mock_getmtime.return_value = job_end_time.timestamp()\n build = self.Build.create({'local_state': 'testing', 'branch_id':\n self.branch.id, 'name':\n 'd0d0caca0000ffffffffffffffffffffffffffff', 'port': '1234',\n 'host': 'runbotxx', 'job_start': datetime.datetime.now(),\n 'config_id': self.env.ref('runbot.runbot_build_config_default')\n .id, 'active_step': self.env.ref(\n 'runbot.runbot_build_config_step_run').id})\n domain = [('repo_id', 'in', (self.repo.id,))]\n domain_host = domain + [('host', '=', 'runbotxx')]\n build_ids = self.Build.search(domain_host + [('local_state', 'in',\n ['testing', 'running'])])\n mock_docker_state.return_value = 'UNKNOWN'\n self.assertEqual(build.local_state, 'testing')\n build_ids._schedule()\n self.assertEqual(build.local_state, 'testing')\n build_ids.write({'job_start': datetime.datetime.now() - datetime.\n timedelta(seconds=70)})\n build_ids._schedule()\n self.assertEqual(build.local_state, 'done')\n self.assertEqual(build.local_result, 'ok')\n",
"step-3": "<mask token>\n\n\nclass TestSchedule(RunbotCase):\n\n def setUp(self):\n registry = odoo.registry()\n super(TestSchedule, self).setUp()\n self.repo = self.Repo.create({'name': 'bla@example.com:foo/bar'})\n self.branch = self.Branch.create({'repo_id': self.repo.id, 'name':\n 'refs/heads/master'})\n\n @patch('odoo.addons.runbot.models.build.os.path.getmtime')\n @patch('odoo.addons.runbot.models.build.docker_state')\n def test_schedule_mark_done(self, mock_docker_state, mock_getmtime):\n \"\"\" Test that results are set even when job_30_run is skipped \"\"\"\n job_end_time = datetime.datetime.now()\n mock_getmtime.return_value = job_end_time.timestamp()\n build = self.Build.create({'local_state': 'testing', 'branch_id':\n self.branch.id, 'name':\n 'd0d0caca0000ffffffffffffffffffffffffffff', 'port': '1234',\n 'host': 'runbotxx', 'job_start': datetime.datetime.now(),\n 'config_id': self.env.ref('runbot.runbot_build_config_default')\n .id, 'active_step': self.env.ref(\n 'runbot.runbot_build_config_step_run').id})\n domain = [('repo_id', 'in', (self.repo.id,))]\n domain_host = domain + [('host', '=', 'runbotxx')]\n build_ids = self.Build.search(domain_host + [('local_state', 'in',\n ['testing', 'running'])])\n mock_docker_state.return_value = 'UNKNOWN'\n self.assertEqual(build.local_state, 'testing')\n build_ids._schedule()\n self.assertEqual(build.local_state, 'testing')\n build_ids.write({'job_start': datetime.datetime.now() - datetime.\n timedelta(seconds=70)})\n build_ids._schedule()\n self.assertEqual(build.local_state, 'done')\n self.assertEqual(build.local_result, 'ok')\n",
"step-4": "import datetime\nfrom unittest.mock import patch\nfrom odoo.tests import common\nimport odoo\nfrom .common import RunbotCase\n\n\nclass TestSchedule(RunbotCase):\n\n def setUp(self):\n registry = odoo.registry()\n super(TestSchedule, self).setUp()\n self.repo = self.Repo.create({'name': 'bla@example.com:foo/bar'})\n self.branch = self.Branch.create({'repo_id': self.repo.id, 'name':\n 'refs/heads/master'})\n\n @patch('odoo.addons.runbot.models.build.os.path.getmtime')\n @patch('odoo.addons.runbot.models.build.docker_state')\n def test_schedule_mark_done(self, mock_docker_state, mock_getmtime):\n \"\"\" Test that results are set even when job_30_run is skipped \"\"\"\n job_end_time = datetime.datetime.now()\n mock_getmtime.return_value = job_end_time.timestamp()\n build = self.Build.create({'local_state': 'testing', 'branch_id':\n self.branch.id, 'name':\n 'd0d0caca0000ffffffffffffffffffffffffffff', 'port': '1234',\n 'host': 'runbotxx', 'job_start': datetime.datetime.now(),\n 'config_id': self.env.ref('runbot.runbot_build_config_default')\n .id, 'active_step': self.env.ref(\n 'runbot.runbot_build_config_step_run').id})\n domain = [('repo_id', 'in', (self.repo.id,))]\n domain_host = domain + [('host', '=', 'runbotxx')]\n build_ids = self.Build.search(domain_host + [('local_state', 'in',\n ['testing', 'running'])])\n mock_docker_state.return_value = 'UNKNOWN'\n self.assertEqual(build.local_state, 'testing')\n build_ids._schedule()\n self.assertEqual(build.local_state, 'testing')\n build_ids.write({'job_start': datetime.datetime.now() - datetime.\n timedelta(seconds=70)})\n build_ids._schedule()\n self.assertEqual(build.local_state, 'done')\n self.assertEqual(build.local_result, 'ok')\n",
"step-5": "# -*- coding: utf-8 -*-\nimport datetime\nfrom unittest.mock import patch\nfrom odoo.tests import common\nimport odoo\nfrom .common import RunbotCase\n\n\nclass TestSchedule(RunbotCase):\n\n def setUp(self):\n # entering test mode to avoid that the _schedule method commits records\n registry = odoo.registry()\n super(TestSchedule, self).setUp()\n\n self.repo = self.Repo.create({'name': 'bla@example.com:foo/bar'})\n self.branch = self.Branch.create({\n 'repo_id': self.repo.id,\n 'name': 'refs/heads/master'\n })\n\n @patch('odoo.addons.runbot.models.build.os.path.getmtime')\n @patch('odoo.addons.runbot.models.build.docker_state')\n def test_schedule_mark_done(self, mock_docker_state, mock_getmtime):\n \"\"\" Test that results are set even when job_30_run is skipped \"\"\"\n job_end_time = datetime.datetime.now()\n mock_getmtime.return_value = job_end_time.timestamp()\n\n build = self.Build.create({\n 'local_state': 'testing',\n 'branch_id': self.branch.id,\n 'name': 'd0d0caca0000ffffffffffffffffffffffffffff',\n 'port': '1234',\n 'host': 'runbotxx',\n 'job_start': datetime.datetime.now(),\n 'config_id': self.env.ref('runbot.runbot_build_config_default').id,\n 'active_step': self.env.ref('runbot.runbot_build_config_step_run').id,\n })\n domain = [('repo_id', 'in', (self.repo.id, ))]\n domain_host = domain + [('host', '=', 'runbotxx')]\n build_ids = self.Build.search(domain_host + [('local_state', 'in', ['testing', 'running'])])\n mock_docker_state.return_value = 'UNKNOWN'\n self.assertEqual(build.local_state, 'testing')\n build_ids._schedule() # too fast, docker not started\n self.assertEqual(build.local_state, 'testing')\n\n build_ids.write({'job_start': datetime.datetime.now() - datetime.timedelta(seconds=70)}) # docker never started\n build_ids._schedule()\n self.assertEqual(build.local_state, 'done')\n self.assertEqual(build.local_result, 'ok')\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author: Yuan
import time
import sys
def jindutiao(jindu,zonge):
ret = (jindu/zonge)*100
r = "\r%s%d%%"%("="*jindu,ret)
sys.stdout.write(r)
sys.stdout.flush()
if __name__ =="__main__":
for i in range(101):
time.sleep(0.1)
jindutiao(i,100)
|
normal
|
{
"blob_id": "f7afd08fb8316e44c314d17ef382b98dde7eef91",
"index": 1605,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef jindutiao(jindu, zonge):\n ret = jindu / zonge * 100\n r = '\\r%s%d%%' % ('=' * jindu, ret)\n sys.stdout.write(r)\n sys.stdout.flush()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef jindutiao(jindu, zonge):\n ret = jindu / zonge * 100\n r = '\\r%s%d%%' % ('=' * jindu, ret)\n sys.stdout.write(r)\n sys.stdout.flush()\n\n\nif __name__ == '__main__':\n for i in range(101):\n time.sleep(0.1)\n jindutiao(i, 100)\n",
"step-4": "import time\nimport sys\n\n\ndef jindutiao(jindu, zonge):\n ret = jindu / zonge * 100\n r = '\\r%s%d%%' % ('=' * jindu, ret)\n sys.stdout.write(r)\n sys.stdout.flush()\n\n\nif __name__ == '__main__':\n for i in range(101):\n time.sleep(0.1)\n jindutiao(i, 100)\n",
"step-5": "#!/usr/bin/env python\n# -*- coding:utf-8 -*-\n# Author: Yuan\n\n\nimport time\n\nimport sys\n\ndef jindutiao(jindu,zonge):\n\n ret = (jindu/zonge)*100\n\n r = \"\\r%s%d%%\"%(\"=\"*jindu,ret)\n sys.stdout.write(r)\n sys.stdout.flush()\n\n\nif __name__ ==\"__main__\":\n for i in range(101):\n time.sleep(0.1)\n jindutiao(i,100)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class total_land_value_if_in_plan_type_group_SSS(Variable):
<|reserved_special_token_0|>
def __init__(self, group):
self.group = group
Variable.__init__(self)
def dependencies(self):
return [my_attribute_label('is_in_plan_type_group_%s' % self.group),
my_attribute_label('total_land_value')]
def compute(self, dataset_pool):
return self.get_dataset().get_attribute('is_in_plan_type_group_%s' %
self.group) * self.get_dataset().get_attribute('total_land_value')
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Tests(opus_unittest.OpusTestCase):
def test_my_inputs(self):
total_land_value = array([100, 200, 300])
is_in_plan_type_group_residential = array([1, 0, 1])
tester = VariableTester(__file__, package_order=['urbansim'],
test_data={'gridcell': {'grid_id': array([1, 2, 3]),
'total_land_value': total_land_value,
'is_in_plan_type_group_residential':
is_in_plan_type_group_residential}})
should_be = array([100, 0, 300])
instance_name = (
'urbansim.gridcell.total_land_value_if_in_plan_type_group_residential'
)
tester.test_is_equal_for_family_variable(self, should_be, instance_name
)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class total_land_value_if_in_plan_type_group_SSS(Variable):
<|reserved_special_token_0|>
def __init__(self, group):
self.group = group
Variable.__init__(self)
def dependencies(self):
return [my_attribute_label('is_in_plan_type_group_%s' % self.group),
my_attribute_label('total_land_value')]
def compute(self, dataset_pool):
return self.get_dataset().get_attribute('is_in_plan_type_group_%s' %
self.group) * self.get_dataset().get_attribute('total_land_value')
def post_check(self, values, dataset_pool):
self.do_check('x >= 0', values)
<|reserved_special_token_0|>
class Tests(opus_unittest.OpusTestCase):
def test_my_inputs(self):
total_land_value = array([100, 200, 300])
is_in_plan_type_group_residential = array([1, 0, 1])
tester = VariableTester(__file__, package_order=['urbansim'],
test_data={'gridcell': {'grid_id': array([1, 2, 3]),
'total_land_value': total_land_value,
'is_in_plan_type_group_residential':
is_in_plan_type_group_residential}})
should_be = array([100, 0, 300])
instance_name = (
'urbansim.gridcell.total_land_value_if_in_plan_type_group_residential'
)
tester.test_is_equal_for_family_variable(self, should_be, instance_name
)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class total_land_value_if_in_plan_type_group_SSS(Variable):
"""Sum of land values of locations if in plan_type_group SSS, 0 otherwise."""
def __init__(self, group):
self.group = group
Variable.__init__(self)
def dependencies(self):
return [my_attribute_label('is_in_plan_type_group_%s' % self.group),
my_attribute_label('total_land_value')]
def compute(self, dataset_pool):
return self.get_dataset().get_attribute('is_in_plan_type_group_%s' %
self.group) * self.get_dataset().get_attribute('total_land_value')
def post_check(self, values, dataset_pool):
self.do_check('x >= 0', values)
<|reserved_special_token_0|>
class Tests(opus_unittest.OpusTestCase):
def test_my_inputs(self):
total_land_value = array([100, 200, 300])
is_in_plan_type_group_residential = array([1, 0, 1])
tester = VariableTester(__file__, package_order=['urbansim'],
test_data={'gridcell': {'grid_id': array([1, 2, 3]),
'total_land_value': total_land_value,
'is_in_plan_type_group_residential':
is_in_plan_type_group_residential}})
should_be = array([100, 0, 300])
instance_name = (
'urbansim.gridcell.total_land_value_if_in_plan_type_group_residential'
)
tester.test_is_equal_for_family_variable(self, should_be, instance_name
)
if __name__ == '__main__':
opus_unittest.main()
<|reserved_special_token_1|>
from opus_core.variables.variable import Variable
from variable_functions import my_attribute_label
class total_land_value_if_in_plan_type_group_SSS(Variable):
"""Sum of land values of locations if in plan_type_group SSS, 0 otherwise."""
def __init__(self, group):
self.group = group
Variable.__init__(self)
def dependencies(self):
return [my_attribute_label('is_in_plan_type_group_%s' % self.group),
my_attribute_label('total_land_value')]
def compute(self, dataset_pool):
return self.get_dataset().get_attribute('is_in_plan_type_group_%s' %
self.group) * self.get_dataset().get_attribute('total_land_value')
def post_check(self, values, dataset_pool):
self.do_check('x >= 0', values)
from opus_core.tests import opus_unittest
from opus_core.tests.utils.variable_tester import VariableTester
from numpy import array
class Tests(opus_unittest.OpusTestCase):
def test_my_inputs(self):
total_land_value = array([100, 200, 300])
is_in_plan_type_group_residential = array([1, 0, 1])
tester = VariableTester(__file__, package_order=['urbansim'],
test_data={'gridcell': {'grid_id': array([1, 2, 3]),
'total_land_value': total_land_value,
'is_in_plan_type_group_residential':
is_in_plan_type_group_residential}})
should_be = array([100, 0, 300])
instance_name = (
'urbansim.gridcell.total_land_value_if_in_plan_type_group_residential'
)
tester.test_is_equal_for_family_variable(self, should_be, instance_name
)
if __name__ == '__main__':
opus_unittest.main()
<|reserved_special_token_1|>
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2010-2011 University of California, Berkeley, 2005-2009 University of Washington
# See opus_core/LICENSE
from opus_core.variables.variable import Variable
from variable_functions import my_attribute_label
class total_land_value_if_in_plan_type_group_SSS(Variable):
"""Sum of land values of locations if in plan_type_group SSS, 0 otherwise."""
def __init__(self, group):
self.group = group
Variable.__init__(self)
def dependencies(self):
return [my_attribute_label("is_in_plan_type_group_%s" % self.group),
my_attribute_label("total_land_value")]
def compute(self, dataset_pool):
return self.get_dataset().get_attribute("is_in_plan_type_group_%s" % self.group) * \
self.get_dataset().get_attribute("total_land_value")
def post_check(self, values, dataset_pool):
self.do_check("x >= 0", values)
from opus_core.tests import opus_unittest
from opus_core.tests.utils.variable_tester import VariableTester
from numpy import array
class Tests(opus_unittest.OpusTestCase):
def test_my_inputs(self):
total_land_value = array([100, 200, 300])
is_in_plan_type_group_residential = array([1, 0, 1])
tester = VariableTester(
__file__,
package_order=['urbansim'],
test_data={
"gridcell":{
"grid_id":array([1,2,3]),
"total_land_value":total_land_value,
"is_in_plan_type_group_residential":is_in_plan_type_group_residential
}
}
)
should_be = array([100, 0, 300])
instance_name = "urbansim.gridcell.total_land_value_if_in_plan_type_group_residential"
tester.test_is_equal_for_family_variable(self, should_be, instance_name)
if __name__=='__main__':
opus_unittest.main()
|
flexible
|
{
"blob_id": "52bb10e19c7a5645ca3cf91705b9b0affe75f570",
"index": 4764,
"step-1": "<mask token>\n\n\nclass total_land_value_if_in_plan_type_group_SSS(Variable):\n <mask token>\n\n def __init__(self, group):\n self.group = group\n Variable.__init__(self)\n\n def dependencies(self):\n return [my_attribute_label('is_in_plan_type_group_%s' % self.group),\n my_attribute_label('total_land_value')]\n\n def compute(self, dataset_pool):\n return self.get_dataset().get_attribute('is_in_plan_type_group_%s' %\n self.group) * self.get_dataset().get_attribute('total_land_value')\n <mask token>\n\n\n<mask token>\n\n\nclass Tests(opus_unittest.OpusTestCase):\n\n def test_my_inputs(self):\n total_land_value = array([100, 200, 300])\n is_in_plan_type_group_residential = array([1, 0, 1])\n tester = VariableTester(__file__, package_order=['urbansim'],\n test_data={'gridcell': {'grid_id': array([1, 2, 3]),\n 'total_land_value': total_land_value,\n 'is_in_plan_type_group_residential':\n is_in_plan_type_group_residential}})\n should_be = array([100, 0, 300])\n instance_name = (\n 'urbansim.gridcell.total_land_value_if_in_plan_type_group_residential'\n )\n tester.test_is_equal_for_family_variable(self, should_be, instance_name\n )\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass total_land_value_if_in_plan_type_group_SSS(Variable):\n <mask token>\n\n def __init__(self, group):\n self.group = group\n Variable.__init__(self)\n\n def dependencies(self):\n return [my_attribute_label('is_in_plan_type_group_%s' % self.group),\n my_attribute_label('total_land_value')]\n\n def compute(self, dataset_pool):\n return self.get_dataset().get_attribute('is_in_plan_type_group_%s' %\n self.group) * self.get_dataset().get_attribute('total_land_value')\n\n def post_check(self, values, dataset_pool):\n self.do_check('x >= 0', values)\n\n\n<mask token>\n\n\nclass Tests(opus_unittest.OpusTestCase):\n\n def test_my_inputs(self):\n total_land_value = array([100, 200, 300])\n is_in_plan_type_group_residential = array([1, 0, 1])\n tester = VariableTester(__file__, package_order=['urbansim'],\n test_data={'gridcell': {'grid_id': array([1, 2, 3]),\n 'total_land_value': total_land_value,\n 'is_in_plan_type_group_residential':\n is_in_plan_type_group_residential}})\n should_be = array([100, 0, 300])\n instance_name = (\n 'urbansim.gridcell.total_land_value_if_in_plan_type_group_residential'\n )\n tester.test_is_equal_for_family_variable(self, should_be, instance_name\n )\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass total_land_value_if_in_plan_type_group_SSS(Variable):\n \"\"\"Sum of land values of locations if in plan_type_group SSS, 0 otherwise.\"\"\"\n\n def __init__(self, group):\n self.group = group\n Variable.__init__(self)\n\n def dependencies(self):\n return [my_attribute_label('is_in_plan_type_group_%s' % self.group),\n my_attribute_label('total_land_value')]\n\n def compute(self, dataset_pool):\n return self.get_dataset().get_attribute('is_in_plan_type_group_%s' %\n self.group) * self.get_dataset().get_attribute('total_land_value')\n\n def post_check(self, values, dataset_pool):\n self.do_check('x >= 0', values)\n\n\n<mask token>\n\n\nclass Tests(opus_unittest.OpusTestCase):\n\n def test_my_inputs(self):\n total_land_value = array([100, 200, 300])\n is_in_plan_type_group_residential = array([1, 0, 1])\n tester = VariableTester(__file__, package_order=['urbansim'],\n test_data={'gridcell': {'grid_id': array([1, 2, 3]),\n 'total_land_value': total_land_value,\n 'is_in_plan_type_group_residential':\n is_in_plan_type_group_residential}})\n should_be = array([100, 0, 300])\n instance_name = (\n 'urbansim.gridcell.total_land_value_if_in_plan_type_group_residential'\n )\n tester.test_is_equal_for_family_variable(self, should_be, instance_name\n )\n\n\nif __name__ == '__main__':\n opus_unittest.main()\n",
"step-4": "from opus_core.variables.variable import Variable\nfrom variable_functions import my_attribute_label\n\n\nclass total_land_value_if_in_plan_type_group_SSS(Variable):\n \"\"\"Sum of land values of locations if in plan_type_group SSS, 0 otherwise.\"\"\"\n\n def __init__(self, group):\n self.group = group\n Variable.__init__(self)\n\n def dependencies(self):\n return [my_attribute_label('is_in_plan_type_group_%s' % self.group),\n my_attribute_label('total_land_value')]\n\n def compute(self, dataset_pool):\n return self.get_dataset().get_attribute('is_in_plan_type_group_%s' %\n self.group) * self.get_dataset().get_attribute('total_land_value')\n\n def post_check(self, values, dataset_pool):\n self.do_check('x >= 0', values)\n\n\nfrom opus_core.tests import opus_unittest\nfrom opus_core.tests.utils.variable_tester import VariableTester\nfrom numpy import array\n\n\nclass Tests(opus_unittest.OpusTestCase):\n\n def test_my_inputs(self):\n total_land_value = array([100, 200, 300])\n is_in_plan_type_group_residential = array([1, 0, 1])\n tester = VariableTester(__file__, package_order=['urbansim'],\n test_data={'gridcell': {'grid_id': array([1, 2, 3]),\n 'total_land_value': total_land_value,\n 'is_in_plan_type_group_residential':\n is_in_plan_type_group_residential}})\n should_be = array([100, 0, 300])\n instance_name = (\n 'urbansim.gridcell.total_land_value_if_in_plan_type_group_residential'\n )\n tester.test_is_equal_for_family_variable(self, should_be, instance_name\n )\n\n\nif __name__ == '__main__':\n opus_unittest.main()\n",
"step-5": "# Opus/UrbanSim urban simulation software.\r\n# Copyright (C) 2010-2011 University of California, Berkeley, 2005-2009 University of Washington\r\n# See opus_core/LICENSE\r\n\r\nfrom opus_core.variables.variable import Variable\r\nfrom variable_functions import my_attribute_label\r\n\r\nclass total_land_value_if_in_plan_type_group_SSS(Variable):\r\n \"\"\"Sum of land values of locations if in plan_type_group SSS, 0 otherwise.\"\"\"\r\n\r\n def __init__(self, group):\r\n self.group = group\r\n Variable.__init__(self)\r\n\r\n def dependencies(self):\r\n return [my_attribute_label(\"is_in_plan_type_group_%s\" % self.group), \r\n my_attribute_label(\"total_land_value\")]\r\n\r\n def compute(self, dataset_pool):\r\n return self.get_dataset().get_attribute(\"is_in_plan_type_group_%s\" % self.group) * \\\r\n self.get_dataset().get_attribute(\"total_land_value\")\r\n\r\n def post_check(self, values, dataset_pool):\r\n self.do_check(\"x >= 0\", values)\r\n\r\n\r\nfrom opus_core.tests import opus_unittest\r\nfrom opus_core.tests.utils.variable_tester import VariableTester\r\nfrom numpy import array\r\nclass Tests(opus_unittest.OpusTestCase):\r\n def test_my_inputs(self):\r\n total_land_value = array([100, 200, 300])\r\n is_in_plan_type_group_residential = array([1, 0, 1])\r\n\r\n tester = VariableTester(\r\n __file__,\r\n package_order=['urbansim'],\r\n test_data={\r\n \"gridcell\":{ \r\n \"grid_id\":array([1,2,3]),\r\n \"total_land_value\":total_land_value, \r\n \"is_in_plan_type_group_residential\":is_in_plan_type_group_residential\r\n }\r\n }\r\n )\r\n \r\n should_be = array([100, 0, 300])\r\n instance_name = \"urbansim.gridcell.total_land_value_if_in_plan_type_group_residential\"\r\n tester.test_is_equal_for_family_variable(self, should_be, instance_name)\r\n\r\n\r\nif __name__=='__main__':\r\n opus_unittest.main()",
"step-ids": [
6,
7,
9,
10,
11
]
}
|
[
6,
7,
9,
10,
11
] |
<|reserved_special_token_0|>
class Session(Destroyable):
def __init__(self, physical_device, queue_index=None):
super(Session, self).__init__()
self.instance = lava.instance()
if physical_device not in lava.devices():
raise RuntimeError('Provided invalid / outdated device object')
self.queue_index = queue_index or physical_device.get_queue_indices(
QueueType.COMPUTE)[0]
self.device = Device(physical_device, [(QueueType.COMPUTE, self.
queue_index)], validation_lvl=lava.VALIDATION_LEVEL)
self.buffers = set()
self.shaders = set()
self.stages = set()
sessions.add(self)
def _destroy(self):
for stage in self.stages:
stage.destroy()
for shader in self.shaders:
shader.destroy()
for buffer in self.buffers:
buffer.destroy()
self.device.destroy()
def register_buffer(self, buffer):
self.buffers.add(buffer)
<|reserved_special_token_0|>
def register_stage(self, stage):
self.stages.add(stage)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Session(Destroyable):
def __init__(self, physical_device, queue_index=None):
super(Session, self).__init__()
self.instance = lava.instance()
if physical_device not in lava.devices():
raise RuntimeError('Provided invalid / outdated device object')
self.queue_index = queue_index or physical_device.get_queue_indices(
QueueType.COMPUTE)[0]
self.device = Device(physical_device, [(QueueType.COMPUTE, self.
queue_index)], validation_lvl=lava.VALIDATION_LEVEL)
self.buffers = set()
self.shaders = set()
self.stages = set()
sessions.add(self)
def _destroy(self):
for stage in self.stages:
stage.destroy()
for shader in self.shaders:
shader.destroy()
for buffer in self.buffers:
buffer.destroy()
self.device.destroy()
def register_buffer(self, buffer):
self.buffers.add(buffer)
def register_shader(self, shader):
self.shaders.add(shader)
def register_stage(self, stage):
self.stages.add(stage)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
__all__ = ['Session']
sessions = set()
class Session(Destroyable):
def __init__(self, physical_device, queue_index=None):
super(Session, self).__init__()
self.instance = lava.instance()
if physical_device not in lava.devices():
raise RuntimeError('Provided invalid / outdated device object')
self.queue_index = queue_index or physical_device.get_queue_indices(
QueueType.COMPUTE)[0]
self.device = Device(physical_device, [(QueueType.COMPUTE, self.
queue_index)], validation_lvl=lava.VALIDATION_LEVEL)
self.buffers = set()
self.shaders = set()
self.stages = set()
sessions.add(self)
def _destroy(self):
for stage in self.stages:
stage.destroy()
for shader in self.shaders:
shader.destroy()
for buffer in self.buffers:
buffer.destroy()
self.device.destroy()
def register_buffer(self, buffer):
self.buffers.add(buffer)
def register_shader(self, shader):
self.shaders.add(shader)
def register_stage(self, stage):
self.stages.add(stage)
<|reserved_special_token_1|>
import lava
from lava.api.constants.vk import QueueType
from lava.api.device import Device
from lava.api.util import Destroyable
__all__ = ['Session']
sessions = set()
class Session(Destroyable):
def __init__(self, physical_device, queue_index=None):
super(Session, self).__init__()
self.instance = lava.instance()
if physical_device not in lava.devices():
raise RuntimeError('Provided invalid / outdated device object')
self.queue_index = queue_index or physical_device.get_queue_indices(
QueueType.COMPUTE)[0]
self.device = Device(physical_device, [(QueueType.COMPUTE, self.
queue_index)], validation_lvl=lava.VALIDATION_LEVEL)
self.buffers = set()
self.shaders = set()
self.stages = set()
sessions.add(self)
def _destroy(self):
for stage in self.stages:
stage.destroy()
for shader in self.shaders:
shader.destroy()
for buffer in self.buffers:
buffer.destroy()
self.device.destroy()
def register_buffer(self, buffer):
self.buffers.add(buffer)
def register_shader(self, shader):
self.shaders.add(shader)
def register_stage(self, stage):
self.stages.add(stage)
<|reserved_special_token_1|>
# -*- coding: UTF-8 -*-
import lava
from lava.api.constants.vk import QueueType
from lava.api.device import Device
from lava.api.util import Destroyable
__all__ = ["Session"]
sessions = set()
class Session(Destroyable):
def __init__(self, physical_device, queue_index=None):
super(Session, self).__init__()
self.instance = lava.instance() # validation level might has been changed
if physical_device not in lava.devices():
raise RuntimeError("Provided invalid / outdated device object")
self.queue_index = queue_index or physical_device.get_queue_indices(QueueType.COMPUTE)[0]
self.device = Device(physical_device, [(QueueType.COMPUTE, self.queue_index)],
validation_lvl=lava.VALIDATION_LEVEL)
self.buffers = set()
self.shaders = set()
self.stages = set()
sessions.add(self)
def _destroy(self):
for stage in self.stages:
stage.destroy()
for shader in self.shaders:
shader.destroy()
for buffer in self.buffers:
buffer.destroy()
self.device.destroy()
def register_buffer(self, buffer):
self.buffers.add(buffer)
def register_shader(self, shader):
self.shaders.add(shader)
def register_stage(self, stage):
self.stages.add(stage)
|
flexible
|
{
"blob_id": "193dcf7bd658f88afe0a1f2fa28605f262e45bc2",
"index": 1554,
"step-1": "<mask token>\n\n\nclass Session(Destroyable):\n\n def __init__(self, physical_device, queue_index=None):\n super(Session, self).__init__()\n self.instance = lava.instance()\n if physical_device not in lava.devices():\n raise RuntimeError('Provided invalid / outdated device object')\n self.queue_index = queue_index or physical_device.get_queue_indices(\n QueueType.COMPUTE)[0]\n self.device = Device(physical_device, [(QueueType.COMPUTE, self.\n queue_index)], validation_lvl=lava.VALIDATION_LEVEL)\n self.buffers = set()\n self.shaders = set()\n self.stages = set()\n sessions.add(self)\n\n def _destroy(self):\n for stage in self.stages:\n stage.destroy()\n for shader in self.shaders:\n shader.destroy()\n for buffer in self.buffers:\n buffer.destroy()\n self.device.destroy()\n\n def register_buffer(self, buffer):\n self.buffers.add(buffer)\n <mask token>\n\n def register_stage(self, stage):\n self.stages.add(stage)\n",
"step-2": "<mask token>\n\n\nclass Session(Destroyable):\n\n def __init__(self, physical_device, queue_index=None):\n super(Session, self).__init__()\n self.instance = lava.instance()\n if physical_device not in lava.devices():\n raise RuntimeError('Provided invalid / outdated device object')\n self.queue_index = queue_index or physical_device.get_queue_indices(\n QueueType.COMPUTE)[0]\n self.device = Device(physical_device, [(QueueType.COMPUTE, self.\n queue_index)], validation_lvl=lava.VALIDATION_LEVEL)\n self.buffers = set()\n self.shaders = set()\n self.stages = set()\n sessions.add(self)\n\n def _destroy(self):\n for stage in self.stages:\n stage.destroy()\n for shader in self.shaders:\n shader.destroy()\n for buffer in self.buffers:\n buffer.destroy()\n self.device.destroy()\n\n def register_buffer(self, buffer):\n self.buffers.add(buffer)\n\n def register_shader(self, shader):\n self.shaders.add(shader)\n\n def register_stage(self, stage):\n self.stages.add(stage)\n",
"step-3": "<mask token>\n__all__ = ['Session']\nsessions = set()\n\n\nclass Session(Destroyable):\n\n def __init__(self, physical_device, queue_index=None):\n super(Session, self).__init__()\n self.instance = lava.instance()\n if physical_device not in lava.devices():\n raise RuntimeError('Provided invalid / outdated device object')\n self.queue_index = queue_index or physical_device.get_queue_indices(\n QueueType.COMPUTE)[0]\n self.device = Device(physical_device, [(QueueType.COMPUTE, self.\n queue_index)], validation_lvl=lava.VALIDATION_LEVEL)\n self.buffers = set()\n self.shaders = set()\n self.stages = set()\n sessions.add(self)\n\n def _destroy(self):\n for stage in self.stages:\n stage.destroy()\n for shader in self.shaders:\n shader.destroy()\n for buffer in self.buffers:\n buffer.destroy()\n self.device.destroy()\n\n def register_buffer(self, buffer):\n self.buffers.add(buffer)\n\n def register_shader(self, shader):\n self.shaders.add(shader)\n\n def register_stage(self, stage):\n self.stages.add(stage)\n",
"step-4": "import lava\nfrom lava.api.constants.vk import QueueType\nfrom lava.api.device import Device\nfrom lava.api.util import Destroyable\n__all__ = ['Session']\nsessions = set()\n\n\nclass Session(Destroyable):\n\n def __init__(self, physical_device, queue_index=None):\n super(Session, self).__init__()\n self.instance = lava.instance()\n if physical_device not in lava.devices():\n raise RuntimeError('Provided invalid / outdated device object')\n self.queue_index = queue_index or physical_device.get_queue_indices(\n QueueType.COMPUTE)[0]\n self.device = Device(physical_device, [(QueueType.COMPUTE, self.\n queue_index)], validation_lvl=lava.VALIDATION_LEVEL)\n self.buffers = set()\n self.shaders = set()\n self.stages = set()\n sessions.add(self)\n\n def _destroy(self):\n for stage in self.stages:\n stage.destroy()\n for shader in self.shaders:\n shader.destroy()\n for buffer in self.buffers:\n buffer.destroy()\n self.device.destroy()\n\n def register_buffer(self, buffer):\n self.buffers.add(buffer)\n\n def register_shader(self, shader):\n self.shaders.add(shader)\n\n def register_stage(self, stage):\n self.stages.add(stage)\n",
"step-5": "# -*- coding: UTF-8 -*-\n\nimport lava\nfrom lava.api.constants.vk import QueueType\nfrom lava.api.device import Device\nfrom lava.api.util import Destroyable\n\n__all__ = [\"Session\"]\n\nsessions = set()\n\n\nclass Session(Destroyable):\n\n def __init__(self, physical_device, queue_index=None):\n super(Session, self).__init__()\n\n self.instance = lava.instance() # validation level might has been changed\n if physical_device not in lava.devices():\n raise RuntimeError(\"Provided invalid / outdated device object\")\n\n self.queue_index = queue_index or physical_device.get_queue_indices(QueueType.COMPUTE)[0]\n self.device = Device(physical_device, [(QueueType.COMPUTE, self.queue_index)],\n validation_lvl=lava.VALIDATION_LEVEL)\n\n self.buffers = set()\n self.shaders = set()\n self.stages = set()\n\n sessions.add(self)\n\n def _destroy(self):\n for stage in self.stages:\n stage.destroy()\n for shader in self.shaders:\n shader.destroy()\n for buffer in self.buffers:\n buffer.destroy()\n self.device.destroy()\n\n def register_buffer(self, buffer):\n self.buffers.add(buffer)\n\n def register_shader(self, shader):\n self.shaders.add(shader)\n\n def register_stage(self, stage):\n self.stages.add(stage)\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
#!/usr/bin/env python3
"""Transfer learning with xception"""
import tensorflow.keras as K
from GPyOpt.methods import BayesianOptimization
import pickle
import os
import numpy as np
class my_model():
"""A model bassed on xception"""
def make_model(self, param):
"""makes the model"""
self.lr = param[0][0]
dr = param[0][1]
layer_units0 = param[0][2]
layer_units1 = param[0][3]
layer_units2 = param[0][4]
def learning_rate(epoch):
"""The learning rate scheduler"""
self.lr = self.lr / 1.00000001
return self.lr
"""Do not touch from here..."""
# load data
(X, Y), (X_test, Y_test) = K.datasets.cifar10.load_data()
# uncomment for rapid test
# X = X[0:256, :, :, :]
# Y = Y[0:256, :]
# X_test = X_test[0:256, :, :, :]
# Y_test = Y_test[0:256, :]
# preprocessing
Y = K.utils.to_categorical(Y[:])
X = K.applications.xception.preprocess_input(X)
Y_test = K.utils.to_categorical(Y_test[:])
X_test = K.applications.xception.preprocess_input(X_test)
# data format
df = "channels_last"
# call backs
save_best = K.callbacks.ModelCheckpoint(filepath="model_lr{:.2f}_dr{:.2f}_l0{}_l1{}_l2{}.h5"
.format(self.lr,
dr,
layer_units0,
layer_units1,
layer_units2),
monitor="val_loss",
save_best_only=True,
)
early_stop = K.callbacks.EarlyStopping(monitor="val_loss",
patience=7
)
learning_rate_0 = K.callbacks.LearningRateScheduler(learning_rate,
verbose=1
)
# input layer and lambda layer save and load for faster training
try:
loaded_model = K.models.load_model("frozen_layers.h5")
print("Loaded frozen layers!")
except Exception as e:
if isinstance(e, OSError):
pass
else:
exit()
print("Failed to load frozen layers.")
inputs = K.Input(shape=(32, 32, 3))
l = K.layers.Lambda(lambda X:
K.backend.resize_images(X,
height_factor=7,
width_factor=7,
data_format="channels_last"
))(inputs)
# Transfer learning layers
xception = K.applications.Xception(include_top=False,
input_tensor=l,
weights="imagenet",
pooling="max"
)
# freeze the resnet50 layers
for layer in xception.layers:
layer.trainable = False
# get outputs
outputs = xception.layers[-1].output
outputs = K.layers.Dense(units=10,
activation="softmax",
kernel_initializer=K.initializers.he_normal()
)(outputs)
# compile frozen model
model = K.Model(inputs=inputs, outputs=outputs)
model.compile(optimizer="adam",
loss="categorical_crossentropy",
metrics=["accuracy"])
model.fit(X,
Y,
epochs=1,
verbose=True,
batch_size=128
)
model.save("frozen_layers.h5")
loaded_model = K.models.load_model("frozen_layers.h5")
except MemoryError("Try lowering the batch size"):
exit()
# set up new model
if os.path.exists("X_inputs") and os.path.exists("X_test_inputs"):
with open("X_inputs", "rb") as X_file:
X = pickle.load(X_file)
with open("X_test_inputs", "rb") as X_test_file:
X_test = pickle.load(X_test_file)
else:
frozen_layers = K.Model(inputs=loaded_model.input,
outputs=loaded_model.layers[-2].output
)
X = frozen_layers.predict(X,
verbose=True
)
X_test = frozen_layers.predict(X_test,
verbose=True
)
with open("X_inputs", "wb") as X_file:
pickle.dump(X, X_file)
with open("X_test_inputs", "wb") as X_test_file:
pickle.dump(X_test, X_test_file)
# inputs
inputs = K.Input((2048,))
"""... to here!!!"""
# new layers here
layer = K.layers.Dense(units=layer_units0,
activation="relu",
kernel_initializer=K.initializers.he_normal()
)(inputs)
layer = K.layers.Dropout(dr)(layer)
layer = K.layers.Dense(units=layer_units1,
activation="relu",
kernel_initializer=K.initializers.he_normal()
)(layer)
# layer = K.layers.Dropout(dr)(layer)
layer = K.layers.Dense(units=layer_units2,
activation="relu",
kernel_initializer=K.initializers.he_normal()
)(layer)
# layer = K.layers.Dropout(dr)(layer)
outputs = K.layers.Dense(units=10,
activation="softmax",
kernel_initializer=K.initializers.he_normal()
)(layer)
model = K.Model(inputs=inputs, outputs=outputs)
model.compile(optimizer="adam",
loss="categorical_crossentropy",
metrics=["accuracy"])
# train
h = model.fit(X,
Y,
validation_data=(X_test, Y_test),
epochs=64,
verbose=True,
batch_size=128,
shuffle=True,
callbacks=[early_stop, learning_rate_0, save_best]
)
val_accuracy = np.min(h.history["val_loss"])
return val_accuracy
def opt(self):
"""the optimization function"""
search_space = [
{"name": "lr", "type": "continuous", "domain": (0.01, 0.001)},
{"name": "dr", "type": "continuous", "domain": (0.1, 0.3)},
{"name": "layer_units0", "type": "discrete", "domain": (32, 64, 128, 256, 512)},
{"name": "layer_units1", "type": "discrete", "domain": (32, 64, 128, 256, 512)},
{"name": "layer_units2", "type": "discrete", "domain": (32, 64, 128, 256, 512)}
]
my_bayesian_opt = BayesianOptimization(self.make_model,
domain=search_space,
model_type="GP",
initial_design_numdata=1,
acquisition_type="EI",
maximize=False,
verbosity=True
)
print("==============================")
my_bayesian_opt.run_optimization(max_iter=29,
report_file="report",
evaluations_file="evaluation",
models_file="models")
print("PLOTTING")
my_bayesian_opt.plot_acquisition()
my_bayesian_opt.plot_convergence()
print("==============================")
def preprocess_data(X, Y):
"""The data preprocessing"""
Y_p = K.utils.to_categorical(Y[:])
X_p = K.applications.xception.preprocess_input(X)
loaded_model = K.models.load_model("frozen_layers.h5")
frozen_layers = K.Model(inputs=loaded_model.input,
outputs=loaded_model.layers[-2].output
)
X_p = frozen_layers.predict(X_p,
verbose=True
)
with open("Preprocessed_data_Xs", "wb") as my_file0:
pickle.dump(X_p, my_file0)
with open("Preprocessed_data_Ys", "wb") as my_file1:
pickle.dump(Y_p, my_file1)
return X_p, Y_p
|
normal
|
{
"blob_id": "d015a1b27a3a9e7f5e6614da752137064000b905",
"index": 239,
"step-1": "<mask token>\n\n\nclass my_model:\n <mask token>\n\n def make_model(self, param):\n \"\"\"makes the model\"\"\"\n self.lr = param[0][0]\n dr = param[0][1]\n layer_units0 = param[0][2]\n layer_units1 = param[0][3]\n layer_units2 = param[0][4]\n\n def learning_rate(epoch):\n \"\"\"The learning rate scheduler\"\"\"\n self.lr = self.lr / 1.00000001\n return self.lr\n \"\"\"Do not touch from here...\"\"\"\n (X, Y), (X_test, Y_test) = K.datasets.cifar10.load_data()\n Y = K.utils.to_categorical(Y[:])\n X = K.applications.xception.preprocess_input(X)\n Y_test = K.utils.to_categorical(Y_test[:])\n X_test = K.applications.xception.preprocess_input(X_test)\n df = 'channels_last'\n save_best = K.callbacks.ModelCheckpoint(filepath=\n 'model_lr{:.2f}_dr{:.2f}_l0{}_l1{}_l2{}.h5'.format(self.lr, dr,\n layer_units0, layer_units1, layer_units2), monitor='val_loss',\n save_best_only=True)\n early_stop = K.callbacks.EarlyStopping(monitor='val_loss', patience=7)\n learning_rate_0 = K.callbacks.LearningRateScheduler(learning_rate,\n verbose=1)\n try:\n loaded_model = K.models.load_model('frozen_layers.h5')\n print('Loaded frozen layers!')\n except Exception as e:\n if isinstance(e, OSError):\n pass\n else:\n exit()\n print('Failed to load frozen layers.')\n inputs = K.Input(shape=(32, 32, 3))\n l = K.layers.Lambda(lambda X: K.backend.resize_images(X,\n height_factor=7, width_factor=7, data_format='channels_last'))(\n inputs)\n xception = K.applications.Xception(include_top=False,\n input_tensor=l, weights='imagenet', pooling='max')\n for layer in xception.layers:\n layer.trainable = False\n outputs = xception.layers[-1].output\n outputs = K.layers.Dense(units=10, activation='softmax',\n kernel_initializer=K.initializers.he_normal())(outputs)\n model = K.Model(inputs=inputs, outputs=outputs)\n model.compile(optimizer='adam', loss='categorical_crossentropy',\n metrics=['accuracy'])\n model.fit(X, Y, epochs=1, verbose=True, batch_size=128)\n model.save('frozen_layers.h5')\n loaded_model = K.models.load_model('frozen_layers.h5')\n except MemoryError('Try lowering the batch size'):\n exit()\n if os.path.exists('X_inputs') and os.path.exists('X_test_inputs'):\n with open('X_inputs', 'rb') as X_file:\n X = pickle.load(X_file)\n with open('X_test_inputs', 'rb') as X_test_file:\n X_test = pickle.load(X_test_file)\n else:\n frozen_layers = K.Model(inputs=loaded_model.input, outputs=\n loaded_model.layers[-2].output)\n X = frozen_layers.predict(X, verbose=True)\n X_test = frozen_layers.predict(X_test, verbose=True)\n with open('X_inputs', 'wb') as X_file:\n pickle.dump(X, X_file)\n with open('X_test_inputs', 'wb') as X_test_file:\n pickle.dump(X_test, X_test_file)\n inputs = K.Input((2048,))\n \"\"\"... to here!!!\"\"\"\n layer = K.layers.Dense(units=layer_units0, activation='relu',\n kernel_initializer=K.initializers.he_normal())(inputs)\n layer = K.layers.Dropout(dr)(layer)\n layer = K.layers.Dense(units=layer_units1, activation='relu',\n kernel_initializer=K.initializers.he_normal())(layer)\n layer = K.layers.Dense(units=layer_units2, activation='relu',\n kernel_initializer=K.initializers.he_normal())(layer)\n outputs = K.layers.Dense(units=10, activation='softmax',\n kernel_initializer=K.initializers.he_normal())(layer)\n model = K.Model(inputs=inputs, outputs=outputs)\n model.compile(optimizer='adam', loss='categorical_crossentropy',\n metrics=['accuracy'])\n h = model.fit(X, Y, validation_data=(X_test, Y_test), epochs=64,\n verbose=True, batch_size=128, shuffle=True, callbacks=[\n early_stop, learning_rate_0, save_best])\n val_accuracy = np.min(h.history['val_loss'])\n return val_accuracy\n\n def opt(self):\n \"\"\"the optimization function\"\"\"\n search_space = [{'name': 'lr', 'type': 'continuous', 'domain': (\n 0.01, 0.001)}, {'name': 'dr', 'type': 'continuous', 'domain': (\n 0.1, 0.3)}, {'name': 'layer_units0', 'type': 'discrete',\n 'domain': (32, 64, 128, 256, 512)}, {'name': 'layer_units1',\n 'type': 'discrete', 'domain': (32, 64, 128, 256, 512)}, {'name':\n 'layer_units2', 'type': 'discrete', 'domain': (32, 64, 128, 256,\n 512)}]\n my_bayesian_opt = BayesianOptimization(self.make_model, domain=\n search_space, model_type='GP', initial_design_numdata=1,\n acquisition_type='EI', maximize=False, verbosity=True)\n print('==============================')\n my_bayesian_opt.run_optimization(max_iter=29, report_file='report',\n evaluations_file='evaluation', models_file='models')\n print('PLOTTING')\n my_bayesian_opt.plot_acquisition()\n my_bayesian_opt.plot_convergence()\n print('==============================')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass my_model:\n \"\"\"A model bassed on xception\"\"\"\n\n def make_model(self, param):\n \"\"\"makes the model\"\"\"\n self.lr = param[0][0]\n dr = param[0][1]\n layer_units0 = param[0][2]\n layer_units1 = param[0][3]\n layer_units2 = param[0][4]\n\n def learning_rate(epoch):\n \"\"\"The learning rate scheduler\"\"\"\n self.lr = self.lr / 1.00000001\n return self.lr\n \"\"\"Do not touch from here...\"\"\"\n (X, Y), (X_test, Y_test) = K.datasets.cifar10.load_data()\n Y = K.utils.to_categorical(Y[:])\n X = K.applications.xception.preprocess_input(X)\n Y_test = K.utils.to_categorical(Y_test[:])\n X_test = K.applications.xception.preprocess_input(X_test)\n df = 'channels_last'\n save_best = K.callbacks.ModelCheckpoint(filepath=\n 'model_lr{:.2f}_dr{:.2f}_l0{}_l1{}_l2{}.h5'.format(self.lr, dr,\n layer_units0, layer_units1, layer_units2), monitor='val_loss',\n save_best_only=True)\n early_stop = K.callbacks.EarlyStopping(monitor='val_loss', patience=7)\n learning_rate_0 = K.callbacks.LearningRateScheduler(learning_rate,\n verbose=1)\n try:\n loaded_model = K.models.load_model('frozen_layers.h5')\n print('Loaded frozen layers!')\n except Exception as e:\n if isinstance(e, OSError):\n pass\n else:\n exit()\n print('Failed to load frozen layers.')\n inputs = K.Input(shape=(32, 32, 3))\n l = K.layers.Lambda(lambda X: K.backend.resize_images(X,\n height_factor=7, width_factor=7, data_format='channels_last'))(\n inputs)\n xception = K.applications.Xception(include_top=False,\n input_tensor=l, weights='imagenet', pooling='max')\n for layer in xception.layers:\n layer.trainable = False\n outputs = xception.layers[-1].output\n outputs = K.layers.Dense(units=10, activation='softmax',\n kernel_initializer=K.initializers.he_normal())(outputs)\n model = K.Model(inputs=inputs, outputs=outputs)\n model.compile(optimizer='adam', loss='categorical_crossentropy',\n metrics=['accuracy'])\n model.fit(X, Y, epochs=1, verbose=True, batch_size=128)\n model.save('frozen_layers.h5')\n loaded_model = K.models.load_model('frozen_layers.h5')\n except MemoryError('Try lowering the batch size'):\n exit()\n if os.path.exists('X_inputs') and os.path.exists('X_test_inputs'):\n with open('X_inputs', 'rb') as X_file:\n X = pickle.load(X_file)\n with open('X_test_inputs', 'rb') as X_test_file:\n X_test = pickle.load(X_test_file)\n else:\n frozen_layers = K.Model(inputs=loaded_model.input, outputs=\n loaded_model.layers[-2].output)\n X = frozen_layers.predict(X, verbose=True)\n X_test = frozen_layers.predict(X_test, verbose=True)\n with open('X_inputs', 'wb') as X_file:\n pickle.dump(X, X_file)\n with open('X_test_inputs', 'wb') as X_test_file:\n pickle.dump(X_test, X_test_file)\n inputs = K.Input((2048,))\n \"\"\"... to here!!!\"\"\"\n layer = K.layers.Dense(units=layer_units0, activation='relu',\n kernel_initializer=K.initializers.he_normal())(inputs)\n layer = K.layers.Dropout(dr)(layer)\n layer = K.layers.Dense(units=layer_units1, activation='relu',\n kernel_initializer=K.initializers.he_normal())(layer)\n layer = K.layers.Dense(units=layer_units2, activation='relu',\n kernel_initializer=K.initializers.he_normal())(layer)\n outputs = K.layers.Dense(units=10, activation='softmax',\n kernel_initializer=K.initializers.he_normal())(layer)\n model = K.Model(inputs=inputs, outputs=outputs)\n model.compile(optimizer='adam', loss='categorical_crossentropy',\n metrics=['accuracy'])\n h = model.fit(X, Y, validation_data=(X_test, Y_test), epochs=64,\n verbose=True, batch_size=128, shuffle=True, callbacks=[\n early_stop, learning_rate_0, save_best])\n val_accuracy = np.min(h.history['val_loss'])\n return val_accuracy\n\n def opt(self):\n \"\"\"the optimization function\"\"\"\n search_space = [{'name': 'lr', 'type': 'continuous', 'domain': (\n 0.01, 0.001)}, {'name': 'dr', 'type': 'continuous', 'domain': (\n 0.1, 0.3)}, {'name': 'layer_units0', 'type': 'discrete',\n 'domain': (32, 64, 128, 256, 512)}, {'name': 'layer_units1',\n 'type': 'discrete', 'domain': (32, 64, 128, 256, 512)}, {'name':\n 'layer_units2', 'type': 'discrete', 'domain': (32, 64, 128, 256,\n 512)}]\n my_bayesian_opt = BayesianOptimization(self.make_model, domain=\n search_space, model_type='GP', initial_design_numdata=1,\n acquisition_type='EI', maximize=False, verbosity=True)\n print('==============================')\n my_bayesian_opt.run_optimization(max_iter=29, report_file='report',\n evaluations_file='evaluation', models_file='models')\n print('PLOTTING')\n my_bayesian_opt.plot_acquisition()\n my_bayesian_opt.plot_convergence()\n print('==============================')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass my_model:\n \"\"\"A model bassed on xception\"\"\"\n\n def make_model(self, param):\n \"\"\"makes the model\"\"\"\n self.lr = param[0][0]\n dr = param[0][1]\n layer_units0 = param[0][2]\n layer_units1 = param[0][3]\n layer_units2 = param[0][4]\n\n def learning_rate(epoch):\n \"\"\"The learning rate scheduler\"\"\"\n self.lr = self.lr / 1.00000001\n return self.lr\n \"\"\"Do not touch from here...\"\"\"\n (X, Y), (X_test, Y_test) = K.datasets.cifar10.load_data()\n Y = K.utils.to_categorical(Y[:])\n X = K.applications.xception.preprocess_input(X)\n Y_test = K.utils.to_categorical(Y_test[:])\n X_test = K.applications.xception.preprocess_input(X_test)\n df = 'channels_last'\n save_best = K.callbacks.ModelCheckpoint(filepath=\n 'model_lr{:.2f}_dr{:.2f}_l0{}_l1{}_l2{}.h5'.format(self.lr, dr,\n layer_units0, layer_units1, layer_units2), monitor='val_loss',\n save_best_only=True)\n early_stop = K.callbacks.EarlyStopping(monitor='val_loss', patience=7)\n learning_rate_0 = K.callbacks.LearningRateScheduler(learning_rate,\n verbose=1)\n try:\n loaded_model = K.models.load_model('frozen_layers.h5')\n print('Loaded frozen layers!')\n except Exception as e:\n if isinstance(e, OSError):\n pass\n else:\n exit()\n print('Failed to load frozen layers.')\n inputs = K.Input(shape=(32, 32, 3))\n l = K.layers.Lambda(lambda X: K.backend.resize_images(X,\n height_factor=7, width_factor=7, data_format='channels_last'))(\n inputs)\n xception = K.applications.Xception(include_top=False,\n input_tensor=l, weights='imagenet', pooling='max')\n for layer in xception.layers:\n layer.trainable = False\n outputs = xception.layers[-1].output\n outputs = K.layers.Dense(units=10, activation='softmax',\n kernel_initializer=K.initializers.he_normal())(outputs)\n model = K.Model(inputs=inputs, outputs=outputs)\n model.compile(optimizer='adam', loss='categorical_crossentropy',\n metrics=['accuracy'])\n model.fit(X, Y, epochs=1, verbose=True, batch_size=128)\n model.save('frozen_layers.h5')\n loaded_model = K.models.load_model('frozen_layers.h5')\n except MemoryError('Try lowering the batch size'):\n exit()\n if os.path.exists('X_inputs') and os.path.exists('X_test_inputs'):\n with open('X_inputs', 'rb') as X_file:\n X = pickle.load(X_file)\n with open('X_test_inputs', 'rb') as X_test_file:\n X_test = pickle.load(X_test_file)\n else:\n frozen_layers = K.Model(inputs=loaded_model.input, outputs=\n loaded_model.layers[-2].output)\n X = frozen_layers.predict(X, verbose=True)\n X_test = frozen_layers.predict(X_test, verbose=True)\n with open('X_inputs', 'wb') as X_file:\n pickle.dump(X, X_file)\n with open('X_test_inputs', 'wb') as X_test_file:\n pickle.dump(X_test, X_test_file)\n inputs = K.Input((2048,))\n \"\"\"... to here!!!\"\"\"\n layer = K.layers.Dense(units=layer_units0, activation='relu',\n kernel_initializer=K.initializers.he_normal())(inputs)\n layer = K.layers.Dropout(dr)(layer)\n layer = K.layers.Dense(units=layer_units1, activation='relu',\n kernel_initializer=K.initializers.he_normal())(layer)\n layer = K.layers.Dense(units=layer_units2, activation='relu',\n kernel_initializer=K.initializers.he_normal())(layer)\n outputs = K.layers.Dense(units=10, activation='softmax',\n kernel_initializer=K.initializers.he_normal())(layer)\n model = K.Model(inputs=inputs, outputs=outputs)\n model.compile(optimizer='adam', loss='categorical_crossentropy',\n metrics=['accuracy'])\n h = model.fit(X, Y, validation_data=(X_test, Y_test), epochs=64,\n verbose=True, batch_size=128, shuffle=True, callbacks=[\n early_stop, learning_rate_0, save_best])\n val_accuracy = np.min(h.history['val_loss'])\n return val_accuracy\n\n def opt(self):\n \"\"\"the optimization function\"\"\"\n search_space = [{'name': 'lr', 'type': 'continuous', 'domain': (\n 0.01, 0.001)}, {'name': 'dr', 'type': 'continuous', 'domain': (\n 0.1, 0.3)}, {'name': 'layer_units0', 'type': 'discrete',\n 'domain': (32, 64, 128, 256, 512)}, {'name': 'layer_units1',\n 'type': 'discrete', 'domain': (32, 64, 128, 256, 512)}, {'name':\n 'layer_units2', 'type': 'discrete', 'domain': (32, 64, 128, 256,\n 512)}]\n my_bayesian_opt = BayesianOptimization(self.make_model, domain=\n search_space, model_type='GP', initial_design_numdata=1,\n acquisition_type='EI', maximize=False, verbosity=True)\n print('==============================')\n my_bayesian_opt.run_optimization(max_iter=29, report_file='report',\n evaluations_file='evaluation', models_file='models')\n print('PLOTTING')\n my_bayesian_opt.plot_acquisition()\n my_bayesian_opt.plot_convergence()\n print('==============================')\n\n\ndef preprocess_data(X, Y):\n \"\"\"The data preprocessing\"\"\"\n Y_p = K.utils.to_categorical(Y[:])\n X_p = K.applications.xception.preprocess_input(X)\n loaded_model = K.models.load_model('frozen_layers.h5')\n frozen_layers = K.Model(inputs=loaded_model.input, outputs=loaded_model\n .layers[-2].output)\n X_p = frozen_layers.predict(X_p, verbose=True)\n with open('Preprocessed_data_Xs', 'wb') as my_file0:\n pickle.dump(X_p, my_file0)\n with open('Preprocessed_data_Ys', 'wb') as my_file1:\n pickle.dump(Y_p, my_file1)\n return X_p, Y_p\n",
"step-4": "<mask token>\nimport tensorflow.keras as K\nfrom GPyOpt.methods import BayesianOptimization\nimport pickle\nimport os\nimport numpy as np\n\n\nclass my_model:\n \"\"\"A model bassed on xception\"\"\"\n\n def make_model(self, param):\n \"\"\"makes the model\"\"\"\n self.lr = param[0][0]\n dr = param[0][1]\n layer_units0 = param[0][2]\n layer_units1 = param[0][3]\n layer_units2 = param[0][4]\n\n def learning_rate(epoch):\n \"\"\"The learning rate scheduler\"\"\"\n self.lr = self.lr / 1.00000001\n return self.lr\n \"\"\"Do not touch from here...\"\"\"\n (X, Y), (X_test, Y_test) = K.datasets.cifar10.load_data()\n Y = K.utils.to_categorical(Y[:])\n X = K.applications.xception.preprocess_input(X)\n Y_test = K.utils.to_categorical(Y_test[:])\n X_test = K.applications.xception.preprocess_input(X_test)\n df = 'channels_last'\n save_best = K.callbacks.ModelCheckpoint(filepath=\n 'model_lr{:.2f}_dr{:.2f}_l0{}_l1{}_l2{}.h5'.format(self.lr, dr,\n layer_units0, layer_units1, layer_units2), monitor='val_loss',\n save_best_only=True)\n early_stop = K.callbacks.EarlyStopping(monitor='val_loss', patience=7)\n learning_rate_0 = K.callbacks.LearningRateScheduler(learning_rate,\n verbose=1)\n try:\n loaded_model = K.models.load_model('frozen_layers.h5')\n print('Loaded frozen layers!')\n except Exception as e:\n if isinstance(e, OSError):\n pass\n else:\n exit()\n print('Failed to load frozen layers.')\n inputs = K.Input(shape=(32, 32, 3))\n l = K.layers.Lambda(lambda X: K.backend.resize_images(X,\n height_factor=7, width_factor=7, data_format='channels_last'))(\n inputs)\n xception = K.applications.Xception(include_top=False,\n input_tensor=l, weights='imagenet', pooling='max')\n for layer in xception.layers:\n layer.trainable = False\n outputs = xception.layers[-1].output\n outputs = K.layers.Dense(units=10, activation='softmax',\n kernel_initializer=K.initializers.he_normal())(outputs)\n model = K.Model(inputs=inputs, outputs=outputs)\n model.compile(optimizer='adam', loss='categorical_crossentropy',\n metrics=['accuracy'])\n model.fit(X, Y, epochs=1, verbose=True, batch_size=128)\n model.save('frozen_layers.h5')\n loaded_model = K.models.load_model('frozen_layers.h5')\n except MemoryError('Try lowering the batch size'):\n exit()\n if os.path.exists('X_inputs') and os.path.exists('X_test_inputs'):\n with open('X_inputs', 'rb') as X_file:\n X = pickle.load(X_file)\n with open('X_test_inputs', 'rb') as X_test_file:\n X_test = pickle.load(X_test_file)\n else:\n frozen_layers = K.Model(inputs=loaded_model.input, outputs=\n loaded_model.layers[-2].output)\n X = frozen_layers.predict(X, verbose=True)\n X_test = frozen_layers.predict(X_test, verbose=True)\n with open('X_inputs', 'wb') as X_file:\n pickle.dump(X, X_file)\n with open('X_test_inputs', 'wb') as X_test_file:\n pickle.dump(X_test, X_test_file)\n inputs = K.Input((2048,))\n \"\"\"... to here!!!\"\"\"\n layer = K.layers.Dense(units=layer_units0, activation='relu',\n kernel_initializer=K.initializers.he_normal())(inputs)\n layer = K.layers.Dropout(dr)(layer)\n layer = K.layers.Dense(units=layer_units1, activation='relu',\n kernel_initializer=K.initializers.he_normal())(layer)\n layer = K.layers.Dense(units=layer_units2, activation='relu',\n kernel_initializer=K.initializers.he_normal())(layer)\n outputs = K.layers.Dense(units=10, activation='softmax',\n kernel_initializer=K.initializers.he_normal())(layer)\n model = K.Model(inputs=inputs, outputs=outputs)\n model.compile(optimizer='adam', loss='categorical_crossentropy',\n metrics=['accuracy'])\n h = model.fit(X, Y, validation_data=(X_test, Y_test), epochs=64,\n verbose=True, batch_size=128, shuffle=True, callbacks=[\n early_stop, learning_rate_0, save_best])\n val_accuracy = np.min(h.history['val_loss'])\n return val_accuracy\n\n def opt(self):\n \"\"\"the optimization function\"\"\"\n search_space = [{'name': 'lr', 'type': 'continuous', 'domain': (\n 0.01, 0.001)}, {'name': 'dr', 'type': 'continuous', 'domain': (\n 0.1, 0.3)}, {'name': 'layer_units0', 'type': 'discrete',\n 'domain': (32, 64, 128, 256, 512)}, {'name': 'layer_units1',\n 'type': 'discrete', 'domain': (32, 64, 128, 256, 512)}, {'name':\n 'layer_units2', 'type': 'discrete', 'domain': (32, 64, 128, 256,\n 512)}]\n my_bayesian_opt = BayesianOptimization(self.make_model, domain=\n search_space, model_type='GP', initial_design_numdata=1,\n acquisition_type='EI', maximize=False, verbosity=True)\n print('==============================')\n my_bayesian_opt.run_optimization(max_iter=29, report_file='report',\n evaluations_file='evaluation', models_file='models')\n print('PLOTTING')\n my_bayesian_opt.plot_acquisition()\n my_bayesian_opt.plot_convergence()\n print('==============================')\n\n\ndef preprocess_data(X, Y):\n \"\"\"The data preprocessing\"\"\"\n Y_p = K.utils.to_categorical(Y[:])\n X_p = K.applications.xception.preprocess_input(X)\n loaded_model = K.models.load_model('frozen_layers.h5')\n frozen_layers = K.Model(inputs=loaded_model.input, outputs=loaded_model\n .layers[-2].output)\n X_p = frozen_layers.predict(X_p, verbose=True)\n with open('Preprocessed_data_Xs', 'wb') as my_file0:\n pickle.dump(X_p, my_file0)\n with open('Preprocessed_data_Ys', 'wb') as my_file1:\n pickle.dump(Y_p, my_file1)\n return X_p, Y_p\n",
"step-5": "#!/usr/bin/env python3\n\"\"\"Transfer learning with xception\"\"\"\nimport tensorflow.keras as K\nfrom GPyOpt.methods import BayesianOptimization\nimport pickle\nimport os\nimport numpy as np\n\n\nclass my_model():\n \"\"\"A model bassed on xception\"\"\"\n\n def make_model(self, param):\n \"\"\"makes the model\"\"\"\n self.lr = param[0][0]\n dr = param[0][1]\n layer_units0 = param[0][2]\n layer_units1 = param[0][3]\n layer_units2 = param[0][4]\n\n def learning_rate(epoch):\n \"\"\"The learning rate scheduler\"\"\"\n self.lr = self.lr / 1.00000001\n return self.lr\n\n \"\"\"Do not touch from here...\"\"\"\n # load data\n (X, Y), (X_test, Y_test) = K.datasets.cifar10.load_data()\n # uncomment for rapid test\n # X = X[0:256, :, :, :]\n # Y = Y[0:256, :]\n # X_test = X_test[0:256, :, :, :]\n # Y_test = Y_test[0:256, :]\n # preprocessing\n Y = K.utils.to_categorical(Y[:])\n X = K.applications.xception.preprocess_input(X)\n Y_test = K.utils.to_categorical(Y_test[:])\n X_test = K.applications.xception.preprocess_input(X_test)\n # data format\n df = \"channels_last\"\n # call backs\n save_best = K.callbacks.ModelCheckpoint(filepath=\"model_lr{:.2f}_dr{:.2f}_l0{}_l1{}_l2{}.h5\"\n .format(self.lr,\n dr,\n layer_units0,\n layer_units1,\n layer_units2),\n monitor=\"val_loss\",\n save_best_only=True,\n )\n early_stop = K.callbacks.EarlyStopping(monitor=\"val_loss\",\n patience=7\n )\n learning_rate_0 = K.callbacks.LearningRateScheduler(learning_rate,\n verbose=1\n )\n # input layer and lambda layer save and load for faster training\n try:\n loaded_model = K.models.load_model(\"frozen_layers.h5\")\n print(\"Loaded frozen layers!\")\n except Exception as e:\n if isinstance(e, OSError):\n pass\n else:\n exit()\n print(\"Failed to load frozen layers.\")\n inputs = K.Input(shape=(32, 32, 3))\n l = K.layers.Lambda(lambda X:\n K.backend.resize_images(X,\n height_factor=7,\n width_factor=7,\n data_format=\"channels_last\"\n ))(inputs)\n # Transfer learning layers\n xception = K.applications.Xception(include_top=False,\n input_tensor=l,\n weights=\"imagenet\",\n pooling=\"max\"\n )\n # freeze the resnet50 layers\n for layer in xception.layers:\n layer.trainable = False\n # get outputs\n outputs = xception.layers[-1].output\n outputs = K.layers.Dense(units=10,\n activation=\"softmax\",\n kernel_initializer=K.initializers.he_normal()\n )(outputs)\n # compile frozen model\n model = K.Model(inputs=inputs, outputs=outputs)\n model.compile(optimizer=\"adam\",\n loss=\"categorical_crossentropy\",\n metrics=[\"accuracy\"])\n model.fit(X,\n Y,\n epochs=1,\n verbose=True,\n batch_size=128\n )\n model.save(\"frozen_layers.h5\")\n loaded_model = K.models.load_model(\"frozen_layers.h5\")\n except MemoryError(\"Try lowering the batch size\"):\n exit()\n # set up new model\n if os.path.exists(\"X_inputs\") and os.path.exists(\"X_test_inputs\"):\n with open(\"X_inputs\", \"rb\") as X_file:\n X = pickle.load(X_file)\n with open(\"X_test_inputs\", \"rb\") as X_test_file:\n X_test = pickle.load(X_test_file)\n else:\n frozen_layers = K.Model(inputs=loaded_model.input,\n outputs=loaded_model.layers[-2].output\n )\n X = frozen_layers.predict(X,\n verbose=True\n )\n X_test = frozen_layers.predict(X_test,\n verbose=True\n )\n with open(\"X_inputs\", \"wb\") as X_file:\n pickle.dump(X, X_file)\n with open(\"X_test_inputs\", \"wb\") as X_test_file:\n pickle.dump(X_test, X_test_file)\n\n # inputs\n inputs = K.Input((2048,))\n \"\"\"... to here!!!\"\"\"\n # new layers here\n layer = K.layers.Dense(units=layer_units0,\n activation=\"relu\",\n kernel_initializer=K.initializers.he_normal()\n )(inputs)\n layer = K.layers.Dropout(dr)(layer)\n layer = K.layers.Dense(units=layer_units1,\n activation=\"relu\",\n kernel_initializer=K.initializers.he_normal()\n )(layer)\n # layer = K.layers.Dropout(dr)(layer)\n layer = K.layers.Dense(units=layer_units2,\n activation=\"relu\",\n kernel_initializer=K.initializers.he_normal()\n )(layer)\n # layer = K.layers.Dropout(dr)(layer)\n outputs = K.layers.Dense(units=10,\n activation=\"softmax\",\n kernel_initializer=K.initializers.he_normal()\n )(layer)\n model = K.Model(inputs=inputs, outputs=outputs)\n model.compile(optimizer=\"adam\",\n loss=\"categorical_crossentropy\",\n metrics=[\"accuracy\"])\n # train\n h = model.fit(X,\n Y,\n validation_data=(X_test, Y_test),\n epochs=64,\n verbose=True,\n batch_size=128,\n shuffle=True,\n callbacks=[early_stop, learning_rate_0, save_best]\n )\n\n val_accuracy = np.min(h.history[\"val_loss\"])\n\n return val_accuracy\n\n def opt(self):\n \"\"\"the optimization function\"\"\"\n search_space = [\n {\"name\": \"lr\", \"type\": \"continuous\", \"domain\": (0.01, 0.001)},\n {\"name\": \"dr\", \"type\": \"continuous\", \"domain\": (0.1, 0.3)},\n {\"name\": \"layer_units0\", \"type\": \"discrete\", \"domain\": (32, 64, 128, 256, 512)},\n {\"name\": \"layer_units1\", \"type\": \"discrete\", \"domain\": (32, 64, 128, 256, 512)},\n {\"name\": \"layer_units2\", \"type\": \"discrete\", \"domain\": (32, 64, 128, 256, 512)}\n ]\n my_bayesian_opt = BayesianOptimization(self.make_model,\n domain=search_space,\n model_type=\"GP\",\n initial_design_numdata=1,\n acquisition_type=\"EI\",\n maximize=False,\n verbosity=True\n )\n print(\"==============================\")\n my_bayesian_opt.run_optimization(max_iter=29,\n report_file=\"report\",\n evaluations_file=\"evaluation\",\n models_file=\"models\")\n print(\"PLOTTING\")\n my_bayesian_opt.plot_acquisition()\n my_bayesian_opt.plot_convergence()\n print(\"==============================\")\n\n\ndef preprocess_data(X, Y):\n \"\"\"The data preprocessing\"\"\"\n Y_p = K.utils.to_categorical(Y[:])\n X_p = K.applications.xception.preprocess_input(X)\n loaded_model = K.models.load_model(\"frozen_layers.h5\")\n frozen_layers = K.Model(inputs=loaded_model.input,\n outputs=loaded_model.layers[-2].output\n )\n X_p = frozen_layers.predict(X_p,\n verbose=True\n )\n with open(\"Preprocessed_data_Xs\", \"wb\") as my_file0:\n pickle.dump(X_p, my_file0)\n with open(\"Preprocessed_data_Ys\", \"wb\") as my_file1:\n pickle.dump(Y_p, my_file1)\n return X_p, Y_p\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(dir(math))
<|reserved_special_token_1|>
import math
print(dir(math))
<|reserved_special_token_1|>
import math
print(dir(math))
# Prints a list of entities residing in the math module
|
flexible
|
{
"blob_id": "94056e8920d265831da67bd1d999330a47a7ef0d",
"index": 1991,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(dir(math))\n",
"step-3": "import math\nprint(dir(math))\n",
"step-4": "import math\nprint(dir(math))\n\n# Prints a list of entities residing in the math module",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# coding=UTF-8
"""
View for managing accounts
"""
from django.contrib import messages
from django.http import Http404, HttpResponse
from django.shortcuts import redirect
from django import forms
from athena.core import render_to_response
from athena.users.models import User
from athena.users import must_be_admin
def klist(**kwargs):
kwargs.update({
'teachers': [x for x in User.objects.filter(status=1) if not x.is_demo()],
'admins': User.objects.filter(status=2),
})
return kwargs
@must_be_admin
def list(request):
return render_to_response('radmin/manage_accounts_list.html', request, **klist())
@must_be_admin
def account(request, account_id):
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
class AccountBaseForm(forms.ModelForm):
class Meta:
model = User
fields = ['name', 'surname', 'number']
widgets = {
'name': forms.TextInput(),
'surname': forms.TextInput(),
}
if request.method == 'POST':
form = AccountBaseForm(request.POST, instance=acc)
if form.is_valid():
form.save()
messages.add_message(request, messages.SUCCESS, u'Zapisano.')
else:
form = AccountBaseForm(instance=acc)
if acc.status != 0:
return render_to_response('radmin/manage_accounts_acc.html', request, **klist(
account=acc,
selected_user_id=acc.id,
form=form))
else:
return render_to_response('radmin/manage_accounts_students_acc.html', request,
account=acc,
selected_user_id=acc.id,
form=form,
page=Paginator(User.objects.filter(status=0).order_by('surname', 'name'), 30).page(1))
@must_be_admin
def reset_pwd(request, account_id):
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
from random import choice
randompass = ''.join([choice('1234567890qwertyupasdfghjklzxcvbnm') for i in range(7)])
acc.set_password(randompass)
messages.add_message(request, messages.SUCCESS, u'Nowe hasło to %s' % (randompass, ))
return redirect('/admin/accounts/%s/' % (acc.id, ))
@must_be_admin
def su(request, account_id):
"""Login as this user"""
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
request.logout()
request.login(acc.login)
messages.add_message(request, messages.SUCCESS, u'Zalogowano jako %s' % (acc.login, ))
return redirect('/')
@must_be_admin
def delete(request, account_id):
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
if acc.login in ('demo@example.com', 'teacher@example.com', 'root@example.com'):
messages.add_message(request, messages.ERROR, u'Nie można usunąć konta wbudowanego')
return redirect('/admin/accounts/%s/' % (acc.id, ))
if acc.status == 1:
# This is a teacher. You should reparent all of it's tests
# and groups to user to teacher@example.com
pass
messages.add_message(request, messages.SUCCESS, u'Konto "%s %s" usunięte.' % (acc.name, acc.surname))
acc.delete()
return redirect('/admin/accounts/')
@must_be_admin
def create(request):
class NewAccountForm(forms.Form):
_CHOICE = ((1, 'Nauczyciel'), (2, 'Adminstrator'))
login = forms.EmailField(label=u'E-mail')
name = forms.CharField(label=u'Imię', required=False)
surname = forms.CharField(label=u'Nazwisko', required=False)
status = forms.ChoiceField(choices=_CHOICE, initial=1, label=u'Typ')
if request.method == 'POST':
form = NewAccountForm(request.POST)
if form.is_valid():
# grab a random password
from random import choice
randompass = ''.join([choice('1234567890qwertyupasdfghjklzxcvbnm') for i in range(7)])
u = User(login=form.cleaned_data['login'],
name=form.cleaned_data['name'],
surname=form.cleaned_data['surname'],
status=form.cleaned_data['status'])
u.save()
u.set_password(randompass)
messages.add_message(request, messages.SUCCESS, u'Konto stworzone. Nowe hasło to %s' % (randompass, ))
return redirect('/admin/accounts/%s/' % (u.id, ))
else:
form = NewAccountForm()
return render_to_response('radmin/manage_accounts_add.html', request, **klist(
selected_user_id='create',
form=form))
from django.core.paginator import Paginator
@must_be_admin
def view_students(request, page='1'):
page = int(page)
students = User.objects.filter(status=0).order_by('surname', 'name')
students = [x for x in students if not x.is_demo()]
p = Paginator(students, 30)
cpage = p.page(page)
return render_to_response('radmin/manage_accounts_students_list.html', request,
page=cpage)
|
normal
|
{
"blob_id": "a01ca49c3fa8ea76de2880c1b04bf15ccd341edd",
"index": 924,
"step-1": "<mask token>\n\n\ndef klist(**kwargs):\n kwargs.update({'teachers': [x for x in User.objects.filter(status=1) if\n not x.is_demo()], 'admins': User.objects.filter(status=2)})\n return kwargs\n\n\n<mask token>\n\n\n@must_be_admin\ndef account(request, account_id):\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n\n\n class AccountBaseForm(forms.ModelForm):\n\n\n class Meta:\n model = User\n fields = ['name', 'surname', 'number']\n widgets = {'name': forms.TextInput(), 'surname': forms.TextInput()}\n if request.method == 'POST':\n form = AccountBaseForm(request.POST, instance=acc)\n if form.is_valid():\n form.save()\n messages.add_message(request, messages.SUCCESS, u'Zapisano.')\n else:\n form = AccountBaseForm(instance=acc)\n if acc.status != 0:\n return render_to_response('radmin/manage_accounts_acc.html',\n request, **klist(account=acc, selected_user_id=acc.id, form=form))\n else:\n return render_to_response('radmin/manage_accounts_students_acc.html',\n request, account=acc, selected_user_id=acc.id, form=form, page=\n Paginator(User.objects.filter(status=0).order_by('surname',\n 'name'), 30).page(1))\n\n\n<mask token>\n\n\n@must_be_admin\ndef view_students(request, page='1'):\n page = int(page)\n students = User.objects.filter(status=0).order_by('surname', 'name')\n students = [x for x in students if not x.is_demo()]\n p = Paginator(students, 30)\n cpage = p.page(page)\n return render_to_response('radmin/manage_accounts_students_list.html',\n request, page=cpage)\n",
"step-2": "<mask token>\n\n\ndef klist(**kwargs):\n kwargs.update({'teachers': [x for x in User.objects.filter(status=1) if\n not x.is_demo()], 'admins': User.objects.filter(status=2)})\n return kwargs\n\n\n<mask token>\n\n\n@must_be_admin\ndef account(request, account_id):\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n\n\n class AccountBaseForm(forms.ModelForm):\n\n\n class Meta:\n model = User\n fields = ['name', 'surname', 'number']\n widgets = {'name': forms.TextInput(), 'surname': forms.TextInput()}\n if request.method == 'POST':\n form = AccountBaseForm(request.POST, instance=acc)\n if form.is_valid():\n form.save()\n messages.add_message(request, messages.SUCCESS, u'Zapisano.')\n else:\n form = AccountBaseForm(instance=acc)\n if acc.status != 0:\n return render_to_response('radmin/manage_accounts_acc.html',\n request, **klist(account=acc, selected_user_id=acc.id, form=form))\n else:\n return render_to_response('radmin/manage_accounts_students_acc.html',\n request, account=acc, selected_user_id=acc.id, form=form, page=\n Paginator(User.objects.filter(status=0).order_by('surname',\n 'name'), 30).page(1))\n\n\n@must_be_admin\ndef reset_pwd(request, account_id):\n if request.method != 'POST':\n return HttpResponse(status=400)\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n from random import choice\n randompass = ''.join([choice('1234567890qwertyupasdfghjklzxcvbnm') for\n i in range(7)])\n acc.set_password(randompass)\n messages.add_message(request, messages.SUCCESS, u'Nowe hasło to %s' % (\n randompass,))\n return redirect('/admin/accounts/%s/' % (acc.id,))\n\n\n@must_be_admin\ndef su(request, account_id):\n \"\"\"Login as this user\"\"\"\n if request.method != 'POST':\n return HttpResponse(status=400)\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n request.logout()\n request.login(acc.login)\n messages.add_message(request, messages.SUCCESS, u'Zalogowano jako %s' %\n (acc.login,))\n return redirect('/')\n\n\n@must_be_admin\ndef delete(request, account_id):\n if request.method != 'POST':\n return HttpResponse(status=400)\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n if acc.login in ('demo@example.com', 'teacher@example.com',\n 'root@example.com'):\n messages.add_message(request, messages.ERROR,\n u'Nie można usunąć konta wbudowanego')\n return redirect('/admin/accounts/%s/' % (acc.id,))\n if acc.status == 1:\n pass\n messages.add_message(request, messages.SUCCESS, \n u'Konto \"%s %s\" usunięte.' % (acc.name, acc.surname))\n acc.delete()\n return redirect('/admin/accounts/')\n\n\n<mask token>\n\n\n@must_be_admin\ndef view_students(request, page='1'):\n page = int(page)\n students = User.objects.filter(status=0).order_by('surname', 'name')\n students = [x for x in students if not x.is_demo()]\n p = Paginator(students, 30)\n cpage = p.page(page)\n return render_to_response('radmin/manage_accounts_students_list.html',\n request, page=cpage)\n",
"step-3": "<mask token>\n\n\ndef klist(**kwargs):\n kwargs.update({'teachers': [x for x in User.objects.filter(status=1) if\n not x.is_demo()], 'admins': User.objects.filter(status=2)})\n return kwargs\n\n\n@must_be_admin\ndef list(request):\n return render_to_response('radmin/manage_accounts_list.html', request,\n **klist())\n\n\n@must_be_admin\ndef account(request, account_id):\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n\n\n class AccountBaseForm(forms.ModelForm):\n\n\n class Meta:\n model = User\n fields = ['name', 'surname', 'number']\n widgets = {'name': forms.TextInput(), 'surname': forms.TextInput()}\n if request.method == 'POST':\n form = AccountBaseForm(request.POST, instance=acc)\n if form.is_valid():\n form.save()\n messages.add_message(request, messages.SUCCESS, u'Zapisano.')\n else:\n form = AccountBaseForm(instance=acc)\n if acc.status != 0:\n return render_to_response('radmin/manage_accounts_acc.html',\n request, **klist(account=acc, selected_user_id=acc.id, form=form))\n else:\n return render_to_response('radmin/manage_accounts_students_acc.html',\n request, account=acc, selected_user_id=acc.id, form=form, page=\n Paginator(User.objects.filter(status=0).order_by('surname',\n 'name'), 30).page(1))\n\n\n@must_be_admin\ndef reset_pwd(request, account_id):\n if request.method != 'POST':\n return HttpResponse(status=400)\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n from random import choice\n randompass = ''.join([choice('1234567890qwertyupasdfghjklzxcvbnm') for\n i in range(7)])\n acc.set_password(randompass)\n messages.add_message(request, messages.SUCCESS, u'Nowe hasło to %s' % (\n randompass,))\n return redirect('/admin/accounts/%s/' % (acc.id,))\n\n\n@must_be_admin\ndef su(request, account_id):\n \"\"\"Login as this user\"\"\"\n if request.method != 'POST':\n return HttpResponse(status=400)\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n request.logout()\n request.login(acc.login)\n messages.add_message(request, messages.SUCCESS, u'Zalogowano jako %s' %\n (acc.login,))\n return redirect('/')\n\n\n@must_be_admin\ndef delete(request, account_id):\n if request.method != 'POST':\n return HttpResponse(status=400)\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n if acc.login in ('demo@example.com', 'teacher@example.com',\n 'root@example.com'):\n messages.add_message(request, messages.ERROR,\n u'Nie można usunąć konta wbudowanego')\n return redirect('/admin/accounts/%s/' % (acc.id,))\n if acc.status == 1:\n pass\n messages.add_message(request, messages.SUCCESS, \n u'Konto \"%s %s\" usunięte.' % (acc.name, acc.surname))\n acc.delete()\n return redirect('/admin/accounts/')\n\n\n<mask token>\n\n\n@must_be_admin\ndef view_students(request, page='1'):\n page = int(page)\n students = User.objects.filter(status=0).order_by('surname', 'name')\n students = [x for x in students if not x.is_demo()]\n p = Paginator(students, 30)\n cpage = p.page(page)\n return render_to_response('radmin/manage_accounts_students_list.html',\n request, page=cpage)\n",
"step-4": "<mask token>\n\n\ndef klist(**kwargs):\n kwargs.update({'teachers': [x for x in User.objects.filter(status=1) if\n not x.is_demo()], 'admins': User.objects.filter(status=2)})\n return kwargs\n\n\n@must_be_admin\ndef list(request):\n return render_to_response('radmin/manage_accounts_list.html', request,\n **klist())\n\n\n@must_be_admin\ndef account(request, account_id):\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n\n\n class AccountBaseForm(forms.ModelForm):\n\n\n class Meta:\n model = User\n fields = ['name', 'surname', 'number']\n widgets = {'name': forms.TextInput(), 'surname': forms.TextInput()}\n if request.method == 'POST':\n form = AccountBaseForm(request.POST, instance=acc)\n if form.is_valid():\n form.save()\n messages.add_message(request, messages.SUCCESS, u'Zapisano.')\n else:\n form = AccountBaseForm(instance=acc)\n if acc.status != 0:\n return render_to_response('radmin/manage_accounts_acc.html',\n request, **klist(account=acc, selected_user_id=acc.id, form=form))\n else:\n return render_to_response('radmin/manage_accounts_students_acc.html',\n request, account=acc, selected_user_id=acc.id, form=form, page=\n Paginator(User.objects.filter(status=0).order_by('surname',\n 'name'), 30).page(1))\n\n\n@must_be_admin\ndef reset_pwd(request, account_id):\n if request.method != 'POST':\n return HttpResponse(status=400)\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n from random import choice\n randompass = ''.join([choice('1234567890qwertyupasdfghjklzxcvbnm') for\n i in range(7)])\n acc.set_password(randompass)\n messages.add_message(request, messages.SUCCESS, u'Nowe hasło to %s' % (\n randompass,))\n return redirect('/admin/accounts/%s/' % (acc.id,))\n\n\n@must_be_admin\ndef su(request, account_id):\n \"\"\"Login as this user\"\"\"\n if request.method != 'POST':\n return HttpResponse(status=400)\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n request.logout()\n request.login(acc.login)\n messages.add_message(request, messages.SUCCESS, u'Zalogowano jako %s' %\n (acc.login,))\n return redirect('/')\n\n\n@must_be_admin\ndef delete(request, account_id):\n if request.method != 'POST':\n return HttpResponse(status=400)\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n if acc.login in ('demo@example.com', 'teacher@example.com',\n 'root@example.com'):\n messages.add_message(request, messages.ERROR,\n u'Nie można usunąć konta wbudowanego')\n return redirect('/admin/accounts/%s/' % (acc.id,))\n if acc.status == 1:\n pass\n messages.add_message(request, messages.SUCCESS, \n u'Konto \"%s %s\" usunięte.' % (acc.name, acc.surname))\n acc.delete()\n return redirect('/admin/accounts/')\n\n\n@must_be_admin\ndef create(request):\n\n\n class NewAccountForm(forms.Form):\n _CHOICE = (1, 'Nauczyciel'), (2, 'Adminstrator')\n login = forms.EmailField(label=u'E-mail')\n name = forms.CharField(label=u'Imię', required=False)\n surname = forms.CharField(label=u'Nazwisko', required=False)\n status = forms.ChoiceField(choices=_CHOICE, initial=1, label=u'Typ')\n if request.method == 'POST':\n form = NewAccountForm(request.POST)\n if form.is_valid():\n from random import choice\n randompass = ''.join([choice(\n '1234567890qwertyupasdfghjklzxcvbnm') for i in range(7)])\n u = User(login=form.cleaned_data['login'], name=form.\n cleaned_data['name'], surname=form.cleaned_data['surname'],\n status=form.cleaned_data['status'])\n u.save()\n u.set_password(randompass)\n messages.add_message(request, messages.SUCCESS, \n u'Konto stworzone. Nowe hasło to %s' % (randompass,))\n return redirect('/admin/accounts/%s/' % (u.id,))\n else:\n form = NewAccountForm()\n return render_to_response('radmin/manage_accounts_add.html', request,\n **klist(selected_user_id='create', form=form))\n\n\n<mask token>\n\n\n@must_be_admin\ndef view_students(request, page='1'):\n page = int(page)\n students = User.objects.filter(status=0).order_by('surname', 'name')\n students = [x for x in students if not x.is_demo()]\n p = Paginator(students, 30)\n cpage = p.page(page)\n return render_to_response('radmin/manage_accounts_students_list.html',\n request, page=cpage)\n",
"step-5": "# coding=UTF-8\n\"\"\"\nView for managing accounts\n\"\"\"\n\nfrom django.contrib import messages\nfrom django.http import Http404, HttpResponse\nfrom django.shortcuts import redirect\nfrom django import forms\nfrom athena.core import render_to_response\nfrom athena.users.models import User\nfrom athena.users import must_be_admin\n\n\ndef klist(**kwargs):\n kwargs.update({\n 'teachers': [x for x in User.objects.filter(status=1) if not x.is_demo()],\n 'admins': User.objects.filter(status=2),\n })\n return kwargs\n\n\n@must_be_admin\ndef list(request):\n return render_to_response('radmin/manage_accounts_list.html', request, **klist())\n\n@must_be_admin\ndef account(request, account_id):\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n\n class AccountBaseForm(forms.ModelForm):\n class Meta:\n model = User\n fields = ['name', 'surname', 'number']\n widgets = {\n 'name': forms.TextInput(),\n 'surname': forms.TextInput(),\n }\n\n if request.method == 'POST':\n form = AccountBaseForm(request.POST, instance=acc)\n\n if form.is_valid():\n form.save()\n messages.add_message(request, messages.SUCCESS, u'Zapisano.')\n\n else:\n form = AccountBaseForm(instance=acc)\n\n if acc.status != 0:\n return render_to_response('radmin/manage_accounts_acc.html', request, **klist(\n account=acc,\n selected_user_id=acc.id,\n form=form))\n else:\n return render_to_response('radmin/manage_accounts_students_acc.html', request,\n account=acc,\n selected_user_id=acc.id,\n form=form,\n page=Paginator(User.objects.filter(status=0).order_by('surname', 'name'), 30).page(1))\n\n\n@must_be_admin\ndef reset_pwd(request, account_id):\n if request.method != 'POST':\n return HttpResponse(status=400)\n\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n\n from random import choice\n randompass = ''.join([choice('1234567890qwertyupasdfghjklzxcvbnm') for i in range(7)])\n\n acc.set_password(randompass)\n\n messages.add_message(request, messages.SUCCESS, u'Nowe hasło to %s' % (randompass, ))\n\n return redirect('/admin/accounts/%s/' % (acc.id, ))\n\n\n@must_be_admin\ndef su(request, account_id):\n \"\"\"Login as this user\"\"\"\n if request.method != 'POST':\n return HttpResponse(status=400)\n\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n\n request.logout()\n request.login(acc.login)\n\n messages.add_message(request, messages.SUCCESS, u'Zalogowano jako %s' % (acc.login, ))\n\n return redirect('/')\n\n@must_be_admin\ndef delete(request, account_id):\n if request.method != 'POST':\n return HttpResponse(status=400)\n\n try:\n acc = User.objects.get(id=int(account_id))\n except:\n raise Http404\n\n if acc.login in ('demo@example.com', 'teacher@example.com', 'root@example.com'):\n messages.add_message(request, messages.ERROR, u'Nie można usunąć konta wbudowanego')\n return redirect('/admin/accounts/%s/' % (acc.id, ))\n\n if acc.status == 1:\n # This is a teacher. You should reparent all of it's tests\n # and groups to user to teacher@example.com\n pass\n\n messages.add_message(request, messages.SUCCESS, u'Konto \"%s %s\" usunięte.' % (acc.name, acc.surname))\n\n acc.delete()\n\n return redirect('/admin/accounts/')\n\n\n@must_be_admin\ndef create(request):\n\n class NewAccountForm(forms.Form):\n _CHOICE = ((1, 'Nauczyciel'), (2, 'Adminstrator'))\n login = forms.EmailField(label=u'E-mail')\n name = forms.CharField(label=u'Imię', required=False) \n surname = forms.CharField(label=u'Nazwisko', required=False)\n status = forms.ChoiceField(choices=_CHOICE, initial=1, label=u'Typ')\n\n if request.method == 'POST':\n form = NewAccountForm(request.POST)\n\n if form.is_valid():\n\n # grab a random password\n from random import choice\n randompass = ''.join([choice('1234567890qwertyupasdfghjklzxcvbnm') for i in range(7)])\n\n u = User(login=form.cleaned_data['login'],\n name=form.cleaned_data['name'],\n surname=form.cleaned_data['surname'],\n status=form.cleaned_data['status'])\n u.save()\n u.set_password(randompass)\n\n messages.add_message(request, messages.SUCCESS, u'Konto stworzone. Nowe hasło to %s' % (randompass, ))\n\n return redirect('/admin/accounts/%s/' % (u.id, ))\n\n else:\n form = NewAccountForm()\n\n return render_to_response('radmin/manage_accounts_add.html', request, **klist(\n selected_user_id='create',\n form=form))\n\nfrom django.core.paginator import Paginator\n\n@must_be_admin\ndef view_students(request, page='1'):\n page = int(page)\n students = User.objects.filter(status=0).order_by('surname', 'name')\n students = [x for x in students if not x.is_demo()]\n p = Paginator(students, 30)\n\n cpage = p.page(page)\n\n return render_to_response('radmin/manage_accounts_students_list.html', request,\n page=cpage)",
"step-ids": [
3,
6,
7,
8,
10
]
}
|
[
3,
6,
7,
8,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
router.register('post', PostViewSet)
router.register('post_upvote', UpvoteView)
router.register('comment', CommentViewSet)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
router = SimpleRouter()
router.register('post', PostViewSet)
router.register('post_upvote', UpvoteView)
router.register('comment', CommentViewSet)
urlpatterns = [path('', include(router.urls))]
<|reserved_special_token_1|>
from django.urls import path, include
from rest_framework.routers import SimpleRouter
from board_api.views import PostViewSet, UpvoteView, CommentViewSet
router = SimpleRouter()
router.register('post', PostViewSet)
router.register('post_upvote', UpvoteView)
router.register('comment', CommentViewSet)
urlpatterns = [path('', include(router.urls))]
<|reserved_special_token_1|>
from django.urls import path, include
from rest_framework.routers import SimpleRouter
from board_api.views import PostViewSet, UpvoteView, CommentViewSet
router = SimpleRouter()
router.register(r"post", PostViewSet)
router.register(r"post_upvote", UpvoteView)
router.register(r"comment", CommentViewSet)
urlpatterns = [
path("", include(router.urls)),
]
|
flexible
|
{
"blob_id": "db309283137383cd698f235e7326c6e5c50f6cf3",
"index": 6671,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nrouter.register('post', PostViewSet)\nrouter.register('post_upvote', UpvoteView)\nrouter.register('comment', CommentViewSet)\n<mask token>\n",
"step-3": "<mask token>\nrouter = SimpleRouter()\nrouter.register('post', PostViewSet)\nrouter.register('post_upvote', UpvoteView)\nrouter.register('comment', CommentViewSet)\nurlpatterns = [path('', include(router.urls))]\n",
"step-4": "from django.urls import path, include\nfrom rest_framework.routers import SimpleRouter\nfrom board_api.views import PostViewSet, UpvoteView, CommentViewSet\nrouter = SimpleRouter()\nrouter.register('post', PostViewSet)\nrouter.register('post_upvote', UpvoteView)\nrouter.register('comment', CommentViewSet)\nurlpatterns = [path('', include(router.urls))]\n",
"step-5": "from django.urls import path, include\nfrom rest_framework.routers import SimpleRouter\n\nfrom board_api.views import PostViewSet, UpvoteView, CommentViewSet\n\nrouter = SimpleRouter()\n\nrouter.register(r\"post\", PostViewSet)\nrouter.register(r\"post_upvote\", UpvoteView)\nrouter.register(r\"comment\", CommentViewSet)\n\nurlpatterns = [\n path(\"\", include(router.urls)),\n]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
if len(sys.argv) < 2:
print('usage: sqlite_file ...')
sys.exit()
db_filenames = sys.argv[1:]
num_of_dbs = len(db_filenames)
conn = sqlite3.connect(':memory:')
c = conn.cursor()
for i in range(num_of_dbs):
sql = "ATTACH DATABASE '{}' as db{}".format(db_filenames[i], i)
c.execute(sql)
sql = 'SELECT text'
for i in range(num_of_dbs):
sql += ', SUM(db{}) as db{}'.format(i, i)
sql += ' FROM (\n'
for i in range(num_of_dbs):
if i > 0:
sql += ' UNION\n'
sql += ' SELECT text'
for j in range(num_of_dbs):
if i == j:
sql += ', SUM(end - start)'
else:
sql += ', 0'
sql += ' as db{}'.format(j)
sql += (' FROM db{}.NVTX_EVENTS WHERE text IS NOT NULL GROUP BY text\n'
.format(i))
sql += ') GROUP BY text'
labels = []
durations = []
i = 0
for j in range(num_of_dbs):
durations.append([])
for row in c.execute(sql):
labels.append(row[0])
lst = []
for j in range(num_of_dbs):
durations[j].append(row[1 + j])
i += 1
conn.close()
x = np.arange(len(labels))
width = 1.5 / (num_of_dbs * len(labels))
fig, ax = plt.subplots()
def autolabel(rects):
"""Attach a text label above each bar in *rects*, displaying its height."""
for rect in rects:
height = rect.get_height()
ax.annotate('{:.1f}'.format(height / 1000000000.0), xy=(rect.
get_x() + rect.get_width() / 2, height), xytext=(0, 3),
textcoords='offset points', ha='center', va='bottom')
for i in range(num_of_dbs):
autolabel(ax.bar(-(num_of_dbs * width) / 2 + width / 2 + x + width *
i, durations[i], width * 0.95, label=os.path.splitext(
db_filenames[i])[0]))
plt.xticks(x, labels, rotation=60, rotation_mode='anchor',
horizontalalignment='right', verticalalignment='center')
ax.legend(bbox_to_anchor=(1.1, 1.05))
plt.yticks([])
plt.ylabel('Time(sec)')
x1, x2, y1, y2 = plt.axis()
plt.axis((x1, x2, y1, y2 * 1.05))
plt.tight_layout()
plt.show()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
if len(sys.argv) < 2:
print('usage: sqlite_file ...')
sys.exit()
db_filenames = sys.argv[1:]
num_of_dbs = len(db_filenames)
conn = sqlite3.connect(':memory:')
c = conn.cursor()
for i in range(num_of_dbs):
sql = "ATTACH DATABASE '{}' as db{}".format(db_filenames[i], i)
c.execute(sql)
sql = 'SELECT text'
for i in range(num_of_dbs):
sql += ', SUM(db{}) as db{}'.format(i, i)
sql += ' FROM (\n'
for i in range(num_of_dbs):
if i > 0:
sql += ' UNION\n'
sql += ' SELECT text'
for j in range(num_of_dbs):
if i == j:
sql += ', SUM(end - start)'
else:
sql += ', 0'
sql += ' as db{}'.format(j)
sql += (' FROM db{}.NVTX_EVENTS WHERE text IS NOT NULL GROUP BY text\n'
.format(i))
sql += ') GROUP BY text'
labels = []
durations = []
i = 0
for j in range(num_of_dbs):
durations.append([])
for row in c.execute(sql):
labels.append(row[0])
lst = []
for j in range(num_of_dbs):
durations[j].append(row[1 + j])
i += 1
conn.close()
x = np.arange(len(labels))
width = 1.5 / (num_of_dbs * len(labels))
fig, ax = plt.subplots()
def autolabel(rects):
"""Attach a text label above each bar in *rects*, displaying its height."""
for rect in rects:
height = rect.get_height()
ax.annotate('{:.1f}'.format(height / 1000000000.0), xy=(rect.
get_x() + rect.get_width() / 2, height), xytext=(0, 3),
textcoords='offset points', ha='center', va='bottom')
for i in range(num_of_dbs):
autolabel(ax.bar(-(num_of_dbs * width) / 2 + width / 2 + x + width *
i, durations[i], width * 0.95, label=os.path.splitext(
db_filenames[i])[0]))
plt.xticks(x, labels, rotation=60, rotation_mode='anchor',
horizontalalignment='right', verticalalignment='center')
ax.legend(bbox_to_anchor=(1.1, 1.05))
plt.yticks([])
plt.ylabel('Time(sec)')
x1, x2, y1, y2 = plt.axis()
plt.axis((x1, x2, y1, y2 * 1.05))
plt.tight_layout()
plt.show()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import sys
import os
import sqlite3
from matplotlib import pyplot as plt
import numpy as np
def main():
if len(sys.argv) < 2:
print('usage: sqlite_file ...')
sys.exit()
db_filenames = sys.argv[1:]
num_of_dbs = len(db_filenames)
conn = sqlite3.connect(':memory:')
c = conn.cursor()
for i in range(num_of_dbs):
sql = "ATTACH DATABASE '{}' as db{}".format(db_filenames[i], i)
c.execute(sql)
sql = 'SELECT text'
for i in range(num_of_dbs):
sql += ', SUM(db{}) as db{}'.format(i, i)
sql += ' FROM (\n'
for i in range(num_of_dbs):
if i > 0:
sql += ' UNION\n'
sql += ' SELECT text'
for j in range(num_of_dbs):
if i == j:
sql += ', SUM(end - start)'
else:
sql += ', 0'
sql += ' as db{}'.format(j)
sql += (' FROM db{}.NVTX_EVENTS WHERE text IS NOT NULL GROUP BY text\n'
.format(i))
sql += ') GROUP BY text'
labels = []
durations = []
i = 0
for j in range(num_of_dbs):
durations.append([])
for row in c.execute(sql):
labels.append(row[0])
lst = []
for j in range(num_of_dbs):
durations[j].append(row[1 + j])
i += 1
conn.close()
x = np.arange(len(labels))
width = 1.5 / (num_of_dbs * len(labels))
fig, ax = plt.subplots()
def autolabel(rects):
"""Attach a text label above each bar in *rects*, displaying its height."""
for rect in rects:
height = rect.get_height()
ax.annotate('{:.1f}'.format(height / 1000000000.0), xy=(rect.
get_x() + rect.get_width() / 2, height), xytext=(0, 3),
textcoords='offset points', ha='center', va='bottom')
for i in range(num_of_dbs):
autolabel(ax.bar(-(num_of_dbs * width) / 2 + width / 2 + x + width *
i, durations[i], width * 0.95, label=os.path.splitext(
db_filenames[i])[0]))
plt.xticks(x, labels, rotation=60, rotation_mode='anchor',
horizontalalignment='right', verticalalignment='center')
ax.legend(bbox_to_anchor=(1.1, 1.05))
plt.yticks([])
plt.ylabel('Time(sec)')
x1, x2, y1, y2 = plt.axis()
plt.axis((x1, x2, y1, y2 * 1.05))
plt.tight_layout()
plt.show()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
#!/usr/bin/python
import sys
import os
import sqlite3
from matplotlib import pyplot as plt
import numpy as np
def main():
if len(sys.argv) < 2:
print('usage: sqlite_file ...')
sys.exit()
db_filenames = sys.argv[1:]
num_of_dbs = len(db_filenames)
conn = sqlite3.connect(":memory:")
c = conn.cursor()
for i in range(num_of_dbs):
sql = "ATTACH DATABASE '{}' as db{}".format(db_filenames[i], i)
c.execute(sql)
sql = 'SELECT text'
for i in range(num_of_dbs):
sql += ', SUM(db{}) as db{}'.format(i, i)
sql += ' FROM (\n'
for i in range(num_of_dbs):
if i > 0:
sql += ' UNION\n'
sql += ' SELECT text'
for j in range(num_of_dbs):
if i == j:
sql += ', SUM(end - start)'
else:
sql += ', 0'
sql += ' as db{}'.format(j)
sql += ' FROM db{}.NVTX_EVENTS WHERE text IS NOT NULL GROUP BY text\n'.format(i)
sql += ') GROUP BY text'
# print(sql)
labels = []
durations = []
i = 0
for j in range(num_of_dbs):
durations.append([])
for row in c.execute(sql):
#print(row)
labels.append(row[0])
lst = []
for j in range(num_of_dbs):
durations[j].append(row[1+j])
i += 1
conn.close()
x = np.arange(len(labels))
width = 1.5 / (num_of_dbs * len(labels))
fig, ax = plt.subplots()
def autolabel(rects):
"""Attach a text label above each bar in *rects*, displaying its height."""
for rect in rects:
height = rect.get_height()
ax.annotate('{:.1f}'.format(height/1e9),
xy=(rect.get_x() + rect.get_width() / 2, height),
xytext=(0, 3), # 3 points vertical offset
textcoords="offset points",
ha='center', va='bottom')
for i in range(num_of_dbs):
autolabel(ax.bar(-(num_of_dbs*width)/2 + width/2 + x + width*i, durations[i], width * 0.95, label=os.path.splitext(db_filenames[i])[0]))
plt.xticks(x, labels, rotation=60, rotation_mode="anchor", horizontalalignment="right", verticalalignment="center")
ax.legend(bbox_to_anchor=(1.1, 1.05))
# plt.yticks([1e8, 1e8 * 5, 1e9, 1e9 * 5])
plt.yticks([])
plt.ylabel('Time(sec)')
x1,x2,y1,y2 = plt.axis()
plt.axis((x1,x2,y1,y2*1.05))
plt.tight_layout()
plt.show()
# plt.savefig(os.path.splitext(db_filenames[0])[0] + ".svg")
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "b24ce9ed2df11df4cbf47949915685c09ec7543a",
"index": 7070,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n if len(sys.argv) < 2:\n print('usage: sqlite_file ...')\n sys.exit()\n db_filenames = sys.argv[1:]\n num_of_dbs = len(db_filenames)\n conn = sqlite3.connect(':memory:')\n c = conn.cursor()\n for i in range(num_of_dbs):\n sql = \"ATTACH DATABASE '{}' as db{}\".format(db_filenames[i], i)\n c.execute(sql)\n sql = 'SELECT text'\n for i in range(num_of_dbs):\n sql += ', SUM(db{}) as db{}'.format(i, i)\n sql += ' FROM (\\n'\n for i in range(num_of_dbs):\n if i > 0:\n sql += ' UNION\\n'\n sql += ' SELECT text'\n for j in range(num_of_dbs):\n if i == j:\n sql += ', SUM(end - start)'\n else:\n sql += ', 0'\n sql += ' as db{}'.format(j)\n sql += (' FROM db{}.NVTX_EVENTS WHERE text IS NOT NULL GROUP BY text\\n'\n .format(i))\n sql += ') GROUP BY text'\n labels = []\n durations = []\n i = 0\n for j in range(num_of_dbs):\n durations.append([])\n for row in c.execute(sql):\n labels.append(row[0])\n lst = []\n for j in range(num_of_dbs):\n durations[j].append(row[1 + j])\n i += 1\n conn.close()\n x = np.arange(len(labels))\n width = 1.5 / (num_of_dbs * len(labels))\n fig, ax = plt.subplots()\n\n def autolabel(rects):\n \"\"\"Attach a text label above each bar in *rects*, displaying its height.\"\"\"\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{:.1f}'.format(height / 1000000000.0), xy=(rect.\n get_x() + rect.get_width() / 2, height), xytext=(0, 3),\n textcoords='offset points', ha='center', va='bottom')\n for i in range(num_of_dbs):\n autolabel(ax.bar(-(num_of_dbs * width) / 2 + width / 2 + x + width *\n i, durations[i], width * 0.95, label=os.path.splitext(\n db_filenames[i])[0]))\n plt.xticks(x, labels, rotation=60, rotation_mode='anchor',\n horizontalalignment='right', verticalalignment='center')\n ax.legend(bbox_to_anchor=(1.1, 1.05))\n plt.yticks([])\n plt.ylabel('Time(sec)')\n x1, x2, y1, y2 = plt.axis()\n plt.axis((x1, x2, y1, y2 * 1.05))\n plt.tight_layout()\n plt.show()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n if len(sys.argv) < 2:\n print('usage: sqlite_file ...')\n sys.exit()\n db_filenames = sys.argv[1:]\n num_of_dbs = len(db_filenames)\n conn = sqlite3.connect(':memory:')\n c = conn.cursor()\n for i in range(num_of_dbs):\n sql = \"ATTACH DATABASE '{}' as db{}\".format(db_filenames[i], i)\n c.execute(sql)\n sql = 'SELECT text'\n for i in range(num_of_dbs):\n sql += ', SUM(db{}) as db{}'.format(i, i)\n sql += ' FROM (\\n'\n for i in range(num_of_dbs):\n if i > 0:\n sql += ' UNION\\n'\n sql += ' SELECT text'\n for j in range(num_of_dbs):\n if i == j:\n sql += ', SUM(end - start)'\n else:\n sql += ', 0'\n sql += ' as db{}'.format(j)\n sql += (' FROM db{}.NVTX_EVENTS WHERE text IS NOT NULL GROUP BY text\\n'\n .format(i))\n sql += ') GROUP BY text'\n labels = []\n durations = []\n i = 0\n for j in range(num_of_dbs):\n durations.append([])\n for row in c.execute(sql):\n labels.append(row[0])\n lst = []\n for j in range(num_of_dbs):\n durations[j].append(row[1 + j])\n i += 1\n conn.close()\n x = np.arange(len(labels))\n width = 1.5 / (num_of_dbs * len(labels))\n fig, ax = plt.subplots()\n\n def autolabel(rects):\n \"\"\"Attach a text label above each bar in *rects*, displaying its height.\"\"\"\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{:.1f}'.format(height / 1000000000.0), xy=(rect.\n get_x() + rect.get_width() / 2, height), xytext=(0, 3),\n textcoords='offset points', ha='center', va='bottom')\n for i in range(num_of_dbs):\n autolabel(ax.bar(-(num_of_dbs * width) / 2 + width / 2 + x + width *\n i, durations[i], width * 0.95, label=os.path.splitext(\n db_filenames[i])[0]))\n plt.xticks(x, labels, rotation=60, rotation_mode='anchor',\n horizontalalignment='right', verticalalignment='center')\n ax.legend(bbox_to_anchor=(1.1, 1.05))\n plt.yticks([])\n plt.ylabel('Time(sec)')\n x1, x2, y1, y2 = plt.axis()\n plt.axis((x1, x2, y1, y2 * 1.05))\n plt.tight_layout()\n plt.show()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import sys\nimport os\nimport sqlite3\nfrom matplotlib import pyplot as plt\nimport numpy as np\n\n\ndef main():\n if len(sys.argv) < 2:\n print('usage: sqlite_file ...')\n sys.exit()\n db_filenames = sys.argv[1:]\n num_of_dbs = len(db_filenames)\n conn = sqlite3.connect(':memory:')\n c = conn.cursor()\n for i in range(num_of_dbs):\n sql = \"ATTACH DATABASE '{}' as db{}\".format(db_filenames[i], i)\n c.execute(sql)\n sql = 'SELECT text'\n for i in range(num_of_dbs):\n sql += ', SUM(db{}) as db{}'.format(i, i)\n sql += ' FROM (\\n'\n for i in range(num_of_dbs):\n if i > 0:\n sql += ' UNION\\n'\n sql += ' SELECT text'\n for j in range(num_of_dbs):\n if i == j:\n sql += ', SUM(end - start)'\n else:\n sql += ', 0'\n sql += ' as db{}'.format(j)\n sql += (' FROM db{}.NVTX_EVENTS WHERE text IS NOT NULL GROUP BY text\\n'\n .format(i))\n sql += ') GROUP BY text'\n labels = []\n durations = []\n i = 0\n for j in range(num_of_dbs):\n durations.append([])\n for row in c.execute(sql):\n labels.append(row[0])\n lst = []\n for j in range(num_of_dbs):\n durations[j].append(row[1 + j])\n i += 1\n conn.close()\n x = np.arange(len(labels))\n width = 1.5 / (num_of_dbs * len(labels))\n fig, ax = plt.subplots()\n\n def autolabel(rects):\n \"\"\"Attach a text label above each bar in *rects*, displaying its height.\"\"\"\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{:.1f}'.format(height / 1000000000.0), xy=(rect.\n get_x() + rect.get_width() / 2, height), xytext=(0, 3),\n textcoords='offset points', ha='center', va='bottom')\n for i in range(num_of_dbs):\n autolabel(ax.bar(-(num_of_dbs * width) / 2 + width / 2 + x + width *\n i, durations[i], width * 0.95, label=os.path.splitext(\n db_filenames[i])[0]))\n plt.xticks(x, labels, rotation=60, rotation_mode='anchor',\n horizontalalignment='right', verticalalignment='center')\n ax.legend(bbox_to_anchor=(1.1, 1.05))\n plt.yticks([])\n plt.ylabel('Time(sec)')\n x1, x2, y1, y2 = plt.axis()\n plt.axis((x1, x2, y1, y2 * 1.05))\n plt.tight_layout()\n plt.show()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/python\n\nimport sys\nimport os\nimport sqlite3\nfrom matplotlib import pyplot as plt\nimport numpy as np\n\ndef main():\n if len(sys.argv) < 2:\n print('usage: sqlite_file ...')\n sys.exit()\n db_filenames = sys.argv[1:]\n num_of_dbs = len(db_filenames)\n conn = sqlite3.connect(\":memory:\")\n c = conn.cursor()\n\n for i in range(num_of_dbs):\n sql = \"ATTACH DATABASE '{}' as db{}\".format(db_filenames[i], i)\n c.execute(sql)\n\n sql = 'SELECT text'\n for i in range(num_of_dbs):\n sql += ', SUM(db{}) as db{}'.format(i, i)\n sql += ' FROM (\\n'\n for i in range(num_of_dbs):\n if i > 0:\n sql += ' UNION\\n'\n sql += ' SELECT text'\n for j in range(num_of_dbs):\n if i == j:\n sql += ', SUM(end - start)'\n else:\n sql += ', 0'\n sql += ' as db{}'.format(j)\n sql += ' FROM db{}.NVTX_EVENTS WHERE text IS NOT NULL GROUP BY text\\n'.format(i)\n sql += ') GROUP BY text'\n # print(sql)\n\n labels = []\n durations = []\n i = 0\n for j in range(num_of_dbs):\n durations.append([])\n for row in c.execute(sql):\n #print(row)\n labels.append(row[0])\n lst = []\n for j in range(num_of_dbs):\n durations[j].append(row[1+j])\n i += 1\n conn.close()\n x = np.arange(len(labels))\n width = 1.5 / (num_of_dbs * len(labels))\n fig, ax = plt.subplots()\n\n def autolabel(rects):\n \"\"\"Attach a text label above each bar in *rects*, displaying its height.\"\"\"\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{:.1f}'.format(height/1e9),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')\n\n\n for i in range(num_of_dbs):\n autolabel(ax.bar(-(num_of_dbs*width)/2 + width/2 + x + width*i, durations[i], width * 0.95, label=os.path.splitext(db_filenames[i])[0]))\n plt.xticks(x, labels, rotation=60, rotation_mode=\"anchor\", horizontalalignment=\"right\", verticalalignment=\"center\")\n ax.legend(bbox_to_anchor=(1.1, 1.05))\n # plt.yticks([1e8, 1e8 * 5, 1e9, 1e9 * 5])\n plt.yticks([])\n plt.ylabel('Time(sec)')\n\n x1,x2,y1,y2 = plt.axis()\n plt.axis((x1,x2,y1,y2*1.05))\n\n plt.tight_layout()\n plt.show()\n # plt.savefig(os.path.splitext(db_filenames[0])[0] + \".svg\")\n\nif __name__ == \"__main__\":\n main()\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import pytest
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import extract_tables_columns
def test_get_tables():
sql_str = "SELECT * FROM table1, table2 WHERE table1.column1 = table2.column1;"
assert(extract_tables_columns.get_tables(sql_str)) == [('TABLE1', 'TABLE1'), ('TABLE2', 'TABLE2')]
def test_get_tables_mutiline():
sql_str = """
SELECT *
FROM table1, table2
WHERE table1.column1 = table2.column1;
"""
assert(extract_tables_columns.get_tables(sql_str)) == [('TABLE1', 'TABLE1'), ('TABLE2', 'TABLE2')]
def test_get_tables_tables_on_muti_lines():
sql_str = """
SELECT *
FROM table1, table2,
table3
WHERE table1.column1 = table2.column1;
"""
assert(extract_tables_columns.get_tables(sql_str)) == [('TABLE1', 'TABLE1'), ('TABLE2', 'TABLE2'), ('TABLE3', 'TABLE3')]
def test_get_tables_single_table():
sql_str = """
SELECT *
FROM table1
WHERE table1.column1 = table2.column1;
"""
assert(extract_tables_columns.get_tables(sql_str)) == [('TABLE1', 'TABLE1')]
def test_get_tables_left_join():
sql_str = """
SELECT *
FROM table1
LEFT JOIN table2 ON table1.column1 = table2.column2
WHERE table1.column1 < 10;
"""
assert(extract_tables_columns.get_tables(sql_str)) == [('TABLE1', 'TABLE1'), ('TABLE2', 'TABLE2')]
|
normal
|
{
"blob_id": "72286078841c7fe5b297767576741dbbd0a80411",
"index": 3457,
"step-1": "<mask token>\n\n\ndef test_get_tables():\n sql_str = (\n 'SELECT * FROM table1, table2 WHERE table1.column1 = table2.column1;')\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2')]\n\n\ndef test_get_tables_mutiline():\n sql_str = \"\"\"\n SELECT * \n FROM table1, table2 \n WHERE table1.column1 = table2.column1;\n \"\"\"\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2')]\n\n\n<mask token>\n\n\ndef test_get_tables_left_join():\n sql_str = \"\"\"\n SELECT * \n FROM table1\n LEFT JOIN table2 ON table1.column1 = table2.column2\n WHERE table1.column1 < 10;\n \"\"\"\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2')]\n",
"step-2": "<mask token>\n\n\ndef test_get_tables():\n sql_str = (\n 'SELECT * FROM table1, table2 WHERE table1.column1 = table2.column1;')\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2')]\n\n\ndef test_get_tables_mutiline():\n sql_str = \"\"\"\n SELECT * \n FROM table1, table2 \n WHERE table1.column1 = table2.column1;\n \"\"\"\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2')]\n\n\n<mask token>\n\n\ndef test_get_tables_single_table():\n sql_str = \"\"\"\n SELECT * \n FROM table1\n WHERE table1.column1 = table2.column1;\n \"\"\"\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1', 'TABLE1')]\n\n\ndef test_get_tables_left_join():\n sql_str = \"\"\"\n SELECT * \n FROM table1\n LEFT JOIN table2 ON table1.column1 = table2.column2\n WHERE table1.column1 < 10;\n \"\"\"\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2')]\n",
"step-3": "<mask token>\nsys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__),\n '..')))\n<mask token>\n\n\ndef test_get_tables():\n sql_str = (\n 'SELECT * FROM table1, table2 WHERE table1.column1 = table2.column1;')\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2')]\n\n\ndef test_get_tables_mutiline():\n sql_str = \"\"\"\n SELECT * \n FROM table1, table2 \n WHERE table1.column1 = table2.column1;\n \"\"\"\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2')]\n\n\ndef test_get_tables_tables_on_muti_lines():\n sql_str = \"\"\"\n SELECT * \n FROM table1, table2,\n table3\n WHERE table1.column1 = table2.column1;\n \"\"\"\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2'), ('TABLE3', 'TABLE3')]\n\n\ndef test_get_tables_single_table():\n sql_str = \"\"\"\n SELECT * \n FROM table1\n WHERE table1.column1 = table2.column1;\n \"\"\"\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1', 'TABLE1')]\n\n\ndef test_get_tables_left_join():\n sql_str = \"\"\"\n SELECT * \n FROM table1\n LEFT JOIN table2 ON table1.column1 = table2.column2\n WHERE table1.column1 < 10;\n \"\"\"\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2')]\n",
"step-4": "import pytest\nimport os\nimport sys\nsys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__),\n '..')))\nimport extract_tables_columns\n\n\ndef test_get_tables():\n sql_str = (\n 'SELECT * FROM table1, table2 WHERE table1.column1 = table2.column1;')\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2')]\n\n\ndef test_get_tables_mutiline():\n sql_str = \"\"\"\n SELECT * \n FROM table1, table2 \n WHERE table1.column1 = table2.column1;\n \"\"\"\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2')]\n\n\ndef test_get_tables_tables_on_muti_lines():\n sql_str = \"\"\"\n SELECT * \n FROM table1, table2,\n table3\n WHERE table1.column1 = table2.column1;\n \"\"\"\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2'), ('TABLE3', 'TABLE3')]\n\n\ndef test_get_tables_single_table():\n sql_str = \"\"\"\n SELECT * \n FROM table1\n WHERE table1.column1 = table2.column1;\n \"\"\"\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1', 'TABLE1')]\n\n\ndef test_get_tables_left_join():\n sql_str = \"\"\"\n SELECT * \n FROM table1\n LEFT JOIN table2 ON table1.column1 = table2.column2\n WHERE table1.column1 < 10;\n \"\"\"\n assert extract_tables_columns.get_tables(sql_str) == [('TABLE1',\n 'TABLE1'), ('TABLE2', 'TABLE2')]\n",
"step-5": "import pytest\nimport os\nimport sys\nsys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))\n\nimport extract_tables_columns\n\ndef test_get_tables():\n sql_str = \"SELECT * FROM table1, table2 WHERE table1.column1 = table2.column1;\"\n assert(extract_tables_columns.get_tables(sql_str)) == [('TABLE1', 'TABLE1'), ('TABLE2', 'TABLE2')]\n\ndef test_get_tables_mutiline():\n sql_str = \"\"\"\n SELECT * \n FROM table1, table2 \n WHERE table1.column1 = table2.column1;\n \"\"\"\n assert(extract_tables_columns.get_tables(sql_str)) == [('TABLE1', 'TABLE1'), ('TABLE2', 'TABLE2')]\n\ndef test_get_tables_tables_on_muti_lines():\n sql_str = \"\"\"\n SELECT * \n FROM table1, table2,\n table3\n WHERE table1.column1 = table2.column1;\n \"\"\"\n assert(extract_tables_columns.get_tables(sql_str)) == [('TABLE1', 'TABLE1'), ('TABLE2', 'TABLE2'), ('TABLE3', 'TABLE3')]\n\ndef test_get_tables_single_table():\n sql_str = \"\"\"\n SELECT * \n FROM table1\n WHERE table1.column1 = table2.column1;\n \"\"\"\n assert(extract_tables_columns.get_tables(sql_str)) == [('TABLE1', 'TABLE1')]\n\ndef test_get_tables_left_join():\n sql_str = \"\"\"\n SELECT * \n FROM table1\n LEFT JOIN table2 ON table1.column1 = table2.column2\n WHERE table1.column1 < 10;\n \"\"\"\n assert(extract_tables_columns.get_tables(sql_str)) == [('TABLE1', 'TABLE1'), ('TABLE2', 'TABLE2')]",
"step-ids": [
3,
4,
6,
7,
8
]
}
|
[
3,
4,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class User(AbstractNamedUser):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class User(AbstractNamedUser):
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['name']
<|reserved_special_token_1|>
from authtools.models import AbstractNamedUser
class User(AbstractNamedUser):
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['name']
|
flexible
|
{
"blob_id": "e7d7a002547047a9bcae830be96dd35db80a86e8",
"index": 7001,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass User(AbstractNamedUser):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass User(AbstractNamedUser):\n USERNAME_FIELD = 'email'\n REQUIRED_FIELDS = ['name']\n",
"step-4": "from authtools.models import AbstractNamedUser\n\n\nclass User(AbstractNamedUser):\n USERNAME_FIELD = 'email'\n REQUIRED_FIELDS = ['name']\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
cnn.add(Convolution2D(32, 3, 3, input_shape=(rgb, rgb, 3), activation='relu'))
cnn.add(MaxPool2D(pool_size=(2, 2)))
cnn.add(Flatten())
cnn.add(Dense(output_dim=128, activation='relu'))
cnn.add(Dense(output_dim=1, activation='sigmoid'))
cnn.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
<|reserved_special_token_0|>
cnn.fit_generator(train_set, steps_per_epoch=8000, epochs=10,
validation_data=test_set, validation_steps=2000)
print(cnn.summary())
cnn.save('CatDogModel.h5')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
cnn = Sequential()
rgb = 64
cnn.add(Convolution2D(32, 3, 3, input_shape=(rgb, rgb, 3), activation='relu'))
cnn.add(MaxPool2D(pool_size=(2, 2)))
cnn.add(Flatten())
cnn.add(Dense(output_dim=128, activation='relu'))
cnn.add(Dense(output_dim=1, activation='sigmoid'))
cnn.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
train_datagen = ImageDataGenerator(rescale=1.0 / 255, shear_range=0.2,
zoom_range=0.2, horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1.0 / 255)
train_set = train_datagen.flow_from_directory('dataset/training_set',
target_size=(rgb, rgb), batch_size=32, class_mode='binary')
test_set = test_datagen.flow_from_directory('dataset/test_set', target_size
=(rgb, rgb), batch_size=32, class_mode='binary')
cnn.fit_generator(train_set, steps_per_epoch=8000, epochs=10,
validation_data=test_set, validation_steps=2000)
print(cnn.summary())
cnn.save('CatDogModel.h5')
<|reserved_special_token_1|>
from keras.models import Sequential
from keras.layers import Convolution2D
from keras.layers import MaxPool2D
from keras.layers import Flatten
from keras.layers import Dense
import tensorflow as tf
from keras_preprocessing.image import ImageDataGenerator
cnn = Sequential()
rgb = 64
cnn.add(Convolution2D(32, 3, 3, input_shape=(rgb, rgb, 3), activation='relu'))
cnn.add(MaxPool2D(pool_size=(2, 2)))
cnn.add(Flatten())
cnn.add(Dense(output_dim=128, activation='relu'))
cnn.add(Dense(output_dim=1, activation='sigmoid'))
cnn.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
train_datagen = ImageDataGenerator(rescale=1.0 / 255, shear_range=0.2,
zoom_range=0.2, horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1.0 / 255)
train_set = train_datagen.flow_from_directory('dataset/training_set',
target_size=(rgb, rgb), batch_size=32, class_mode='binary')
test_set = test_datagen.flow_from_directory('dataset/test_set', target_size
=(rgb, rgb), batch_size=32, class_mode='binary')
cnn.fit_generator(train_set, steps_per_epoch=8000, epochs=10,
validation_data=test_set, validation_steps=2000)
print(cnn.summary())
cnn.save('CatDogModel.h5')
<|reserved_special_token_1|>
from keras.models import Sequential
from keras.layers import Convolution2D # for 2d images
from keras.layers import MaxPool2D
from keras.layers import Flatten
from keras.layers import Dense
import tensorflow as tf
from keras_preprocessing.image import ImageDataGenerator
cnn = Sequential()
rgb = 64
# step 1: convolution
# slide feature detectors ("filters") along image
# results feature maps that form convolutional layer
cnn.add(Convolution2D(32, 3, 3, input_shape=(rgb, rgb, 3), activation='relu')) # 32, 3x3 filters
# step 2: pooling
cnn.add(MaxPool2D(pool_size=(2, 2)))
# step 3: flatten
# this vector will be the input of a future ann
cnn.add(Flatten())
# step 4: full connection
cnn.add(Dense(output_dim=128, activation='relu')) # add hidden layers
cnn.add(Dense(output_dim=1, activation='sigmoid')) # sigmoid for binary output
# compile cnn
cnn.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
# image augmentation - prevent overfitting
train_datagen = ImageDataGenerator(
rescale=1./255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1./255)
train_set = train_datagen.flow_from_directory(
'dataset/training_set',
target_size=(rgb, rgb),
batch_size=32,
class_mode='binary')
test_set = test_datagen.flow_from_directory(
'dataset/test_set',
target_size=(rgb, rgb),
batch_size=32,
class_mode='binary')
cnn.fit_generator(
train_set,
steps_per_epoch=8000, # we have 8k images in our training set
epochs=10,
validation_data=test_set,
validation_steps=2000)
print(cnn.summary())
cnn.save('CatDogModel.h5')
|
flexible
|
{
"blob_id": "9fa5f4b4aeb7fe42d313a0ec4e57ce15acbfcf46",
"index": 3960,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncnn.add(Convolution2D(32, 3, 3, input_shape=(rgb, rgb, 3), activation='relu'))\ncnn.add(MaxPool2D(pool_size=(2, 2)))\ncnn.add(Flatten())\ncnn.add(Dense(output_dim=128, activation='relu'))\ncnn.add(Dense(output_dim=1, activation='sigmoid'))\ncnn.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])\n<mask token>\ncnn.fit_generator(train_set, steps_per_epoch=8000, epochs=10,\n validation_data=test_set, validation_steps=2000)\nprint(cnn.summary())\ncnn.save('CatDogModel.h5')\n",
"step-3": "<mask token>\ncnn = Sequential()\nrgb = 64\ncnn.add(Convolution2D(32, 3, 3, input_shape=(rgb, rgb, 3), activation='relu'))\ncnn.add(MaxPool2D(pool_size=(2, 2)))\ncnn.add(Flatten())\ncnn.add(Dense(output_dim=128, activation='relu'))\ncnn.add(Dense(output_dim=1, activation='sigmoid'))\ncnn.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])\ntrain_datagen = ImageDataGenerator(rescale=1.0 / 255, shear_range=0.2,\n zoom_range=0.2, horizontal_flip=True)\ntest_datagen = ImageDataGenerator(rescale=1.0 / 255)\ntrain_set = train_datagen.flow_from_directory('dataset/training_set',\n target_size=(rgb, rgb), batch_size=32, class_mode='binary')\ntest_set = test_datagen.flow_from_directory('dataset/test_set', target_size\n =(rgb, rgb), batch_size=32, class_mode='binary')\ncnn.fit_generator(train_set, steps_per_epoch=8000, epochs=10,\n validation_data=test_set, validation_steps=2000)\nprint(cnn.summary())\ncnn.save('CatDogModel.h5')\n",
"step-4": "from keras.models import Sequential\nfrom keras.layers import Convolution2D\nfrom keras.layers import MaxPool2D\nfrom keras.layers import Flatten\nfrom keras.layers import Dense\nimport tensorflow as tf\nfrom keras_preprocessing.image import ImageDataGenerator\ncnn = Sequential()\nrgb = 64\ncnn.add(Convolution2D(32, 3, 3, input_shape=(rgb, rgb, 3), activation='relu'))\ncnn.add(MaxPool2D(pool_size=(2, 2)))\ncnn.add(Flatten())\ncnn.add(Dense(output_dim=128, activation='relu'))\ncnn.add(Dense(output_dim=1, activation='sigmoid'))\ncnn.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])\ntrain_datagen = ImageDataGenerator(rescale=1.0 / 255, shear_range=0.2,\n zoom_range=0.2, horizontal_flip=True)\ntest_datagen = ImageDataGenerator(rescale=1.0 / 255)\ntrain_set = train_datagen.flow_from_directory('dataset/training_set',\n target_size=(rgb, rgb), batch_size=32, class_mode='binary')\ntest_set = test_datagen.flow_from_directory('dataset/test_set', target_size\n =(rgb, rgb), batch_size=32, class_mode='binary')\ncnn.fit_generator(train_set, steps_per_epoch=8000, epochs=10,\n validation_data=test_set, validation_steps=2000)\nprint(cnn.summary())\ncnn.save('CatDogModel.h5')\n",
"step-5": "from keras.models import Sequential\nfrom keras.layers import Convolution2D # for 2d images\nfrom keras.layers import MaxPool2D\nfrom keras.layers import Flatten\nfrom keras.layers import Dense\nimport tensorflow as tf\nfrom keras_preprocessing.image import ImageDataGenerator\n\ncnn = Sequential()\n\nrgb = 64\n\n# step 1: convolution\n# slide feature detectors (\"filters\") along image\n# results feature maps that form convolutional layer\ncnn.add(Convolution2D(32, 3, 3, input_shape=(rgb, rgb, 3), activation='relu')) # 32, 3x3 filters\n\n# step 2: pooling\ncnn.add(MaxPool2D(pool_size=(2, 2)))\n\n# step 3: flatten\n# this vector will be the input of a future ann\ncnn.add(Flatten())\n\n# step 4: full connection\ncnn.add(Dense(output_dim=128, activation='relu')) # add hidden layers\ncnn.add(Dense(output_dim=1, activation='sigmoid')) # sigmoid for binary output\n\n# compile cnn\ncnn.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])\n\n# image augmentation - prevent overfitting\ntrain_datagen = ImageDataGenerator(\n rescale=1./255,\n shear_range=0.2,\n zoom_range=0.2,\n horizontal_flip=True)\n\ntest_datagen = ImageDataGenerator(rescale=1./255)\n\ntrain_set = train_datagen.flow_from_directory(\n 'dataset/training_set',\n target_size=(rgb, rgb),\n batch_size=32,\n class_mode='binary')\n\ntest_set = test_datagen.flow_from_directory(\n 'dataset/test_set',\n target_size=(rgb, rgb),\n batch_size=32,\n class_mode='binary')\n\ncnn.fit_generator(\n train_set,\n steps_per_epoch=8000, # we have 8k images in our training set\n epochs=10,\n validation_data=test_set,\n validation_steps=2000)\n\nprint(cnn.summary())\n\ncnn.save('CatDogModel.h5')\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from yoloPydarknet import pydarknetYOLO
import cv2
import imutils
import time
yolo = pydarknetYOLO(obdata="../darknet/cfg/coco.data", weights="yolov3.weights",
cfg="../darknet/cfg/yolov3.cfg")
video_out = "yolo_output.avi"
start_time = time.time()
if __name__ == "__main__":
VIDEO_IN = cv2.VideoCapture(0)
if(video_out!=""):
width = int(VIDEO_IN.get(cv2.CAP_PROP_FRAME_WIDTH)) # float
height = int(VIDEO_IN.get(cv2.CAP_PROP_FRAME_HEIGHT)) # float
fourcc = cv2.VideoWriter_fourcc(*'MJPG')
out = cv2.VideoWriter(video_out,fourcc, 30.0, (int(width),int(height)))
frameID = 0
while True:
hasFrame, frame = VIDEO_IN.read()
# Stop the program if reached end of video
if not hasFrame:
print("Done processing !!!")
print("--- %s seconds ---" % (time.time() - start_time))
break
yolo.getObject(frame, labelWant="", drawBox=True, bold=1, textsize=0.6, bcolor=(0,0,255), tcolor=(255,255,255))
print ("Object counts:", yolo.objCounts)
cv2.imshow("Frame", imutils.resize(frame, width=850))
if(video_out!=""):
out.write(frame)
k = cv2.waitKey(1)
if k == 0xFF & ord("q"):
out.release()
break
|
normal
|
{
"blob_id": "669eb2e898c3a127ae01e0ee3020a3674e5e340d",
"index": 1091,
"step-1": "from yoloPydarknet import pydarknetYOLO\nimport cv2\nimport imutils\nimport time\n\nyolo = pydarknetYOLO(obdata=\"../darknet/cfg/coco.data\", weights=\"yolov3.weights\", \n cfg=\"../darknet/cfg/yolov3.cfg\")\nvideo_out = \"yolo_output.avi\"\n\nstart_time = time.time()\n\nif __name__ == \"__main__\":\n\n VIDEO_IN = cv2.VideoCapture(0)\n if(video_out!=\"\"):\n width = int(VIDEO_IN.get(cv2.CAP_PROP_FRAME_WIDTH)) # float\n height = int(VIDEO_IN.get(cv2.CAP_PROP_FRAME_HEIGHT)) # float\n fourcc = cv2.VideoWriter_fourcc(*'MJPG')\n out = cv2.VideoWriter(video_out,fourcc, 30.0, (int(width),int(height)))\n\n frameID = 0\n while True:\n hasFrame, frame = VIDEO_IN.read()\n # Stop the program if reached end of video\n if not hasFrame:\n print(\"Done processing !!!\")\n print(\"--- %s seconds ---\" % (time.time() - start_time))\n break\n\n yolo.getObject(frame, labelWant=\"\", drawBox=True, bold=1, textsize=0.6, bcolor=(0,0,255), tcolor=(255,255,255))\n print (\"Object counts:\", yolo.objCounts)\n cv2.imshow(\"Frame\", imutils.resize(frame, width=850))\n if(video_out!=\"\"):\n out.write(frame)\n\n k = cv2.waitKey(1)\n if k == 0xFF & ord(\"q\"):\n out.release()\n break\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
__author__ = 'sudab'
""" Generate a grid world """
import os, sys, getopt, pdb, string
import random
import numpy as np
import pygame
from skimage import io
import cv2
import pygame.locals as pgl
class Gridworld():
# a gridworld with uneven terrain
def __init__(self, filename=None, initial=0, nrows=8, ncols=8, nagents=1, targets=[], obstacles=[], moveobstacles = [], regions=dict()):
# walls are the obstacles. The edges of the gridworld will be included into the walls.
# region is a string and can be one of: ['pavement','gravel', 'grass', 'sand']
if filename != None:
data = io.imread(filename)
data = cv2.resize(data, dsize=(16, 16), interpolation=cv2.INTER_AREA)
regionkeys = {'pavement', 'gravel', 'grass', 'sand', 'deterministic'}
(nrows,ncols) = data.shape
data = data.flatten()
obstacles = list(np.where(data==0)[0])
regions = dict.fromkeys(regionkeys, {-1})
regions['deterministic'] = range(nrows * ncols)
self.current = initial
self.nrows = nrows
self.ncols = ncols
self.obstacles = obstacles
self.regions = regions
self.nagents = nagents
self.nstates = nrows * ncols
self.nactions = 5
self.obstacles = obstacles
self.actlist = ['R','N', 'S', 'W', 'E']
self.targets = targets
self.left_edge = []
self.right_edge = []
self.top_edge = []
self.bottom_edge = []
self.regions = regions
self.moveobstacles = moveobstacles
self.states = range(nrows*ncols)
self.colorstates = set()
for x in range(self.nstates):
# note that edges are not disjoint, so we cannot use elif
if x % self.ncols == 0:
self.left_edge.append(x)
if 0 <= x < self.ncols:
self.top_edge.append(x)
if x % self.ncols == self.ncols - 1:
self.right_edge.append(x)
if (self.nrows - 1) * self.ncols <= x <= self.nstates:
self.bottom_edge.append(x)
self.edges = self.left_edge + self.top_edge + self.right_edge + self.bottom_edge
self.walls = self.edges + obstacles
self.prob = {a: np.zeros((self.nstates, self.nstates)) for a in self.actlist}
self.probOfSuccess = dict([])
self.getProbRegions()
for s in self.states:
for a in self.actlist:
self.getProbs(s, a)
def coords(self, s):
return (s / self.ncols, s % self.ncols) # the coordinate for state s.
def isAllowed(self, (row,col)):
if col not in range(self.ncols) or row not in range(self.nrows):
return False
return True
def isAllowedState(self,(row,col),returnState):
if self.isAllowed((row,col)):
return self.rcoords((row,col))
return returnState
def getProbRegions(self):
probOfSuccess = dict([])
for ground in self.regions.keys():
for direction in ['N', 'S', 'E', 'W']:
if ground == 'pavement':
mass = random.choice(range(90, 95))
massleft = 100 - mass
oneleft = random.choice(range(1, massleft))
twoleft = massleft - oneleft
if ground == 'gravel':
mass = random.choice(range(80, 85))
massleft = 100 - mass
oneleft = random.choice(range(1, massleft))
twoleft = massleft - oneleft
if ground == 'grass':
mass = random.choice(range(85, 90))
massleft = 100 - mass
oneleft = random.choice(range(1, massleft))
twoleft = massleft - oneleft
if ground == 'sand':
mass = random.choice(range(65, 70))
massleft = 100 - mass
oneleft = random.choice(range(1, massleft))
twoleft = massleft - oneleft
if ground == 'deterministic':
mass = 100
oneleft = 0
twoleft = 0
probOfSuccess[(ground, direction)] = [float(mass) / 100, float(oneleft) / 100, float(twoleft) / 100]
self.probOfSuccess = probOfSuccess
return
def rcoords(self, coords):
s = coords[0] * self.ncols + coords[1]
return s
def getProbs(self, state, action):
successors = []
if state in self.obstacles:
successors = [(state, 1)]
for (next_state, p) in successors:
self.prob[action][state, next_state] = p
return
row,col = self.coords(state)
northState = self.isAllowedState((row-1,col),state)
northwestState = self.isAllowedState((row-1,col-1),state)
northeastState = self.isAllowedState((row-1,col+1),state)
southState = self.isAllowedState((row+1,col),state)
southeastState = self.isAllowedState((row+1,col+1),state)
southwestState = self.isAllowedState((row+1,col-1),state)
westState = self.isAllowedState((row,col-1),state)
eastState = self.isAllowedState((row,col+1),state)
reg = self.getStateRegion(state)
if action == 'N':
[p0, p1, p2] = self.probOfSuccess[(reg, 'N')]
successors.append((northState, p0))
successors.append((northwestState, p1))
successors.append((northeastState, p2))
if action == 'S':
[p0, p1, p2] = self.probOfSuccess[(reg, 'S')]
successors.append((southState, p0))
successors.append((southwestState, p1))
successors.append((southeastState, p2))
if action == 'W':
[p0, p1, p2] = self.probOfSuccess[(reg, 'W')]
successors.append((westState, p0))
successors.append((southwestState, p1))
successors.append((northwestState, p2))
if action == 'E':
[p0, p1, p2] = self.probOfSuccess[(reg, 'W')]
successors.append((eastState, p0))
successors.append((southeastState, p1))
successors.append((northeastState, p2))
if action == 'R':
successors.append((state,1))
for (next_state, p) in successors:
self.prob[action][state, next_state] += p
def getStateRegion(self, state):
if state in self.regions['pavement']:
return 'pavement'
if state in self.regions['grass']:
return 'grass'
if state in self.regions['gravel']:
return 'gravel'
if state in self.regions['sand']:
return 'sand'
if state in self.regions['deterministic']:
return 'deterministic'
## Everything from here onwards is for creating the image
def render(self, size=10):
self.height = self.nrows * size + self.nrows + 1
self.width = self.ncols * size + self.ncols + 1
self.size = size
# # initialize pygame ( SDL extensions )
pygame.init()
pygame.display.set_mode((self.width, self.height))
pygame.display.set_caption('Gridworld')
self.screen = pygame.display.get_surface()
self.surface = pygame.Surface(self.screen.get_size())
self.bg = pygame.Surface(self.screen.get_size())
self.bg_rendered = False # optimize background render
self.background()
self.screen.blit(self.surface, (0, 0))
pygame.display.flip()
self.build_templates()
self.updategui = True # switch to stop updating gui if you want to collect a trace quickly
self.state2circle(self.current)
def getkeyinput(self):
events = pygame.event.get()
for event in events:
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
return 'W'
elif event.key == pygame.K_RIGHT:
return 'E'
if event.key == pygame.K_UP:
return 'N'
elif event.key == pygame.K_DOWN:
return 'S'
elif event.key == pygame.K_SPACE:
return 'Space'
def build_templates(self):
# Note: template already in "graphics" coordinates
template = np.array([(-1, 0), (0, 0), (1, 0), (0, 1), (1, 0), (0, -1)])
template = self.size / 3 * template # scale template
v = 1.0 / np.sqrt(2)
rot90 = np.array([(0, 1), (-1, 0)])
rot45 = np.array([(v, -v), (v, v)]) # neg
#
# align the template with the first action.
t0 = np.dot(template, rot90)
t0 = np.dot(t0, rot90)
t0 = np.dot(t0, rot90)
t1 = np.dot(t0, rot45)
t2 = np.dot(t1, rot45)
t3 = np.dot(t2, rot45)
t4 = np.dot(t3, rot45)
t5 = np.dot(t4, rot45)
t6 = np.dot(t5, rot45)
t7 = np.dot(t6, rot45)
self.t = [t0, t1, t2, t3, t4, t5, t6, t7]
def indx2coord(self, s, center=False):
# the +1 indexing business is to ensure that the grid cells
# have borders of width 1px
i, j = self.coords(s)
if center:
return i * (self.size + 1) + 1 + self.size / 2, \
j * (self.size + 1) + 1 + self.size / 2
else:
return i * (self.size + 1) + 1, j * (self.size + 1) + 1
def accessible_blocks(self, s):
"""
For a give state s, generate the list of walls around it.
"""
W = []
if s in self.walls:
return W
if s - self.ncols < 0 or s - self.ncols in self.walls:
pass
else:
W.append(s - self.ncols)
if s - 1 < 0 or s - 1 in self.walls:
pass
else:
W.append(s - 1)
if s + 1 in self.walls:
pass
else:
W.append(s + 1)
if s + self.ncols in self.walls:
pass
else:
W.append(s + self.ncols)
return W
def coord2indx(self, (x, y)):
return self.rcoords((x / (self.size + 1), y / (self.size + 1)))
def draw_state_labels(self):
font = pygame.font.SysFont("FreeSans", 10)
for s in range(self.nstates):
x, y = self.indx2coord(s, False)
txt = font.render("%d" % s, True, (0, 0, 0))
self.surface.blit(txt, (y, x))
self.screen.blit(self.surface, (0, 0))
pygame.display.flip()
def coord2state(self, coord):
s = self.coord2indx((coord[0], coord[1]))
return s
def state2circle(self, state, bg=True, blit=True):
if bg:
self.background()
for n in range(self.nagents):
x, y = self.indx2coord(state[n], center=True)
pygame.draw.circle(self.surface, (0+(50*n), 0+(20*n), 255.0/(n+1)), (y, x), self.size / 2)
if len(self.moveobstacles) > 0:
for s in self.moveobstacles:
x, y = self.indx2coord(s, center=True)
pygame.draw.circle(self.surface, (205, 92, 0), (y, x), self.size / 2)
if blit:
self.screen.blit(self.surface, (0, 0))
pygame.display.flip()
def draw_values(self, vals):
"""
vals: a dict with state labels as the key
"""
font = pygame.font.SysFont("FreeSans", 10)
for s in range(self.nstates):
x, y = self.indx2coord(s, False)
v = vals[s]
txt = font.render("%.1f" % v, True, (0, 0, 0))
self.surface.blit(txt, (y, x))
self.screen.blit(self.surface, (0, 0))
pygame.display.flip()
#
def save(self, filename):
pygame.image.save(self.surface, filename)
def redraw(self):
self.screen.blit(self.surface, (0, 0))
pygame.display.flip()
def move_obj(self, s, bg=True, blit=True):
"""Including A moving object into the gridworld, which moves uniformly at
random in all accessible directions (including idle), without
hitting the wall or another other statitic obstacle. Input: a
gridworld gui, the current state index for the obstacle and the
number of steps.
"""
if bg:
self.background()
x, y = self.indx2coord(s, center=True)
pygame.draw.circle(self.surface, (205, 92, 0), (y, x), self.size / 2)
if blit:
self.screen.blit(self.surface, (0, 0))
pygame.display.flip()
return
def move_deter(self, next_state):
self.current = next_state
return
def background(self):
if self.bg_rendered:
self.surface.blit(self.bg, (0, 0))
else:
self.bg.fill((84, 84, 84))
font = pygame.font.SysFont("FreeSans", 10)
for s in range(self.nstates):
x, y = self.indx2coord(s, False)
coords = pygame.Rect(y, x, self.size, self.size)
pygame.draw.rect(self.bg, ((250, 250, 250)), coords)
for n in range(self.nagents):
for t in self.targets[n]:
x, y = self.indx2coord(t, center=True)
coords = pygame.Rect(y - self.size / 2, x - self.size / 2, self.size, self.size)
pygame.draw.rect(self.bg, (0+(50*n), 204.0/(n+1), 102.0+(50*n)/(n+1)), coords)
for s in self.obstacles:
(x, y) = self.indx2coord(s)
coords = pygame.Rect(y, x, self.size, self.size)
pygame.draw.rect(self.bg, (255, 0, 0), coords) # the obstacles are in color red
color = {'sand': (223, 225, 179), 'gravel': (255, 255, 255), 'grass': (211, 255, 192),
'pavement': (192, 255, 253),'deterministic': (255,255,255)}
for s in range(self.nstates):
if s not in self.edges and not any(s in x for x in self.targets) and s not in self.obstacles and not any(s in x for x in self.colorstates):
(x, y) = self.indx2coord(s)
coords = pygame.Rect(y - self.size / 2, x - self.size / 2, self.size, self.size)
coords = pygame.Rect(y, x, self.size, self.size)
pygame.draw.rect(self.bg, color[self.getStateRegion(s)], coords) # the obstacles are in color grey
statecols = [(0,0,0),(150,150,150)]
for i in range(len(self.colorstates)):
for s in self.colorstates[i]:
if s not in self.edges and not any(s in x for x in self.targets) and s not in self.obstacles:
(x, y) = self.indx2coord(s)
coords = pygame.Rect(y, x, self.size, self.size)
pygame.draw.rect(self.bg, statecols[i], coords) # the obstacles are in color grey
self.bg_rendered = True # don't render again unless flag is set
self.surface.blit(self.bg, (0, 0))
|
normal
|
{
"blob_id": "1fbd4e45b061b4d6cefb46e3bc612533ec94250b",
"index": 481,
"step-1": "__author__ = 'sudab'\n\"\"\" Generate a grid world \"\"\"\nimport os, sys, getopt, pdb, string\nimport random\nimport numpy as np\nimport pygame\nfrom skimage import io\nimport cv2\nimport pygame.locals as pgl\n\nclass Gridworld():\n # a gridworld with uneven terrain\n def __init__(self, filename=None, initial=0, nrows=8, ncols=8, nagents=1, targets=[], obstacles=[], moveobstacles = [], regions=dict()):\n # walls are the obstacles. The edges of the gridworld will be included into the walls.\n # region is a string and can be one of: ['pavement','gravel', 'grass', 'sand']\n if filename != None:\n data = io.imread(filename)\n data = cv2.resize(data, dsize=(16, 16), interpolation=cv2.INTER_AREA)\n regionkeys = {'pavement', 'gravel', 'grass', 'sand', 'deterministic'}\n (nrows,ncols) = data.shape\n data = data.flatten()\n obstacles = list(np.where(data==0)[0])\n regions = dict.fromkeys(regionkeys, {-1})\n regions['deterministic'] = range(nrows * ncols)\n\n self.current = initial\n self.nrows = nrows\n self.ncols = ncols\n self.obstacles = obstacles\n self.regions = regions\n self.nagents = nagents\n self.nstates = nrows * ncols\n self.nactions = 5\n self.obstacles = obstacles\n self.actlist = ['R','N', 'S', 'W', 'E']\n self.targets = targets\n self.left_edge = []\n self.right_edge = []\n self.top_edge = []\n self.bottom_edge = []\n self.regions = regions\n self.moveobstacles = moveobstacles\n self.states = range(nrows*ncols)\n self.colorstates = set()\n for x in range(self.nstates):\n # note that edges are not disjoint, so we cannot use elif\n if x % self.ncols == 0:\n self.left_edge.append(x)\n if 0 <= x < self.ncols:\n self.top_edge.append(x)\n if x % self.ncols == self.ncols - 1:\n self.right_edge.append(x)\n if (self.nrows - 1) * self.ncols <= x <= self.nstates:\n self.bottom_edge.append(x)\n self.edges = self.left_edge + self.top_edge + self.right_edge + self.bottom_edge\n self.walls = self.edges + obstacles\n self.prob = {a: np.zeros((self.nstates, self.nstates)) for a in self.actlist}\n\n self.probOfSuccess = dict([])\n self.getProbRegions()\n\n for s in self.states:\n for a in self.actlist:\n self.getProbs(s, a)\n\n def coords(self, s):\n return (s / self.ncols, s % self.ncols) # the coordinate for state s.\n\n def isAllowed(self, (row,col)):\n if col not in range(self.ncols) or row not in range(self.nrows):\n return False\n return True\n\n def isAllowedState(self,(row,col),returnState):\n if self.isAllowed((row,col)):\n return self.rcoords((row,col))\n return returnState\n\n def getProbRegions(self):\n probOfSuccess = dict([])\n for ground in self.regions.keys():\n for direction in ['N', 'S', 'E', 'W']:\n if ground == 'pavement':\n mass = random.choice(range(90, 95))\n massleft = 100 - mass\n oneleft = random.choice(range(1, massleft))\n twoleft = massleft - oneleft\n if ground == 'gravel':\n mass = random.choice(range(80, 85))\n massleft = 100 - mass\n oneleft = random.choice(range(1, massleft))\n twoleft = massleft - oneleft\n if ground == 'grass':\n mass = random.choice(range(85, 90))\n massleft = 100 - mass\n oneleft = random.choice(range(1, massleft))\n twoleft = massleft - oneleft\n if ground == 'sand':\n mass = random.choice(range(65, 70))\n massleft = 100 - mass\n oneleft = random.choice(range(1, massleft))\n twoleft = massleft - oneleft\n if ground == 'deterministic':\n mass = 100\n oneleft = 0\n twoleft = 0\n probOfSuccess[(ground, direction)] = [float(mass) / 100, float(oneleft) / 100, float(twoleft) / 100]\n self.probOfSuccess = probOfSuccess\n return\n\n def rcoords(self, coords):\n s = coords[0] * self.ncols + coords[1]\n return s\n\n def getProbs(self, state, action):\n successors = []\n\n if state in self.obstacles:\n successors = [(state, 1)]\n for (next_state, p) in successors:\n self.prob[action][state, next_state] = p\n return\n row,col = self.coords(state)\n northState = self.isAllowedState((row-1,col),state)\n northwestState = self.isAllowedState((row-1,col-1),state)\n northeastState = self.isAllowedState((row-1,col+1),state)\n southState = self.isAllowedState((row+1,col),state)\n southeastState = self.isAllowedState((row+1,col+1),state)\n southwestState = self.isAllowedState((row+1,col-1),state)\n westState = self.isAllowedState((row,col-1),state)\n eastState = self.isAllowedState((row,col+1),state)\n\n reg = self.getStateRegion(state)\n if action == 'N':\n [p0, p1, p2] = self.probOfSuccess[(reg, 'N')]\n successors.append((northState, p0))\n successors.append((northwestState, p1))\n successors.append((northeastState, p2))\n\n if action == 'S':\n [p0, p1, p2] = self.probOfSuccess[(reg, 'S')]\n successors.append((southState, p0))\n successors.append((southwestState, p1))\n successors.append((southeastState, p2))\n\n if action == 'W':\n [p0, p1, p2] = self.probOfSuccess[(reg, 'W')]\n successors.append((westState, p0))\n successors.append((southwestState, p1))\n successors.append((northwestState, p2))\n\n if action == 'E':\n [p0, p1, p2] = self.probOfSuccess[(reg, 'W')]\n successors.append((eastState, p0))\n successors.append((southeastState, p1))\n successors.append((northeastState, p2))\n\n if action == 'R':\n successors.append((state,1))\n\n for (next_state, p) in successors:\n self.prob[action][state, next_state] += p\n\n def getStateRegion(self, state):\n if state in self.regions['pavement']:\n return 'pavement'\n if state in self.regions['grass']:\n return 'grass'\n if state in self.regions['gravel']:\n return 'gravel'\n if state in self.regions['sand']:\n return 'sand'\n if state in self.regions['deterministic']:\n return 'deterministic'\n\n ## Everything from here onwards is for creating the image\n\n def render(self, size=10):\n self.height = self.nrows * size + self.nrows + 1\n self.width = self.ncols * size + self.ncols + 1\n self.size = size\n\n # # initialize pygame ( SDL extensions )\n pygame.init()\n pygame.display.set_mode((self.width, self.height))\n pygame.display.set_caption('Gridworld')\n self.screen = pygame.display.get_surface()\n self.surface = pygame.Surface(self.screen.get_size())\n self.bg = pygame.Surface(self.screen.get_size())\n self.bg_rendered = False # optimize background render\n\n self.background()\n self.screen.blit(self.surface, (0, 0))\n pygame.display.flip()\n\n self.build_templates()\n self.updategui = True # switch to stop updating gui if you want to collect a trace quickly\n\n self.state2circle(self.current)\n\n def getkeyinput(self):\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_LEFT:\n return 'W'\n elif event.key == pygame.K_RIGHT:\n return 'E'\n if event.key == pygame.K_UP:\n return 'N'\n elif event.key == pygame.K_DOWN:\n return 'S'\n elif event.key == pygame.K_SPACE:\n return 'Space'\n\n def build_templates(self):\n\n # Note: template already in \"graphics\" coordinates\n template = np.array([(-1, 0), (0, 0), (1, 0), (0, 1), (1, 0), (0, -1)])\n template = self.size / 3 * template # scale template\n\n v = 1.0 / np.sqrt(2)\n rot90 = np.array([(0, 1), (-1, 0)])\n rot45 = np.array([(v, -v), (v, v)]) # neg\n\n\n #\n # align the template with the first action.\n t0 = np.dot(template, rot90)\n t0 = np.dot(t0, rot90)\n t0 = np.dot(t0, rot90)\n\n t1 = np.dot(t0, rot45)\n t2 = np.dot(t1, rot45)\n t3 = np.dot(t2, rot45)\n t4 = np.dot(t3, rot45)\n t5 = np.dot(t4, rot45)\n t6 = np.dot(t5, rot45)\n t7 = np.dot(t6, rot45)\n\n self.t = [t0, t1, t2, t3, t4, t5, t6, t7]\n\n def indx2coord(self, s, center=False):\n # the +1 indexing business is to ensure that the grid cells\n # have borders of width 1px\n i, j = self.coords(s)\n if center:\n return i * (self.size + 1) + 1 + self.size / 2, \\\n j * (self.size + 1) + 1 + self.size / 2\n else:\n return i * (self.size + 1) + 1, j * (self.size + 1) + 1\n\n def accessible_blocks(self, s):\n \"\"\"\n For a give state s, generate the list of walls around it.\n \"\"\"\n W = []\n if s in self.walls:\n return W\n if s - self.ncols < 0 or s - self.ncols in self.walls:\n pass\n else:\n W.append(s - self.ncols)\n if s - 1 < 0 or s - 1 in self.walls:\n pass\n else:\n W.append(s - 1)\n if s + 1 in self.walls:\n pass\n else:\n W.append(s + 1)\n if s + self.ncols in self.walls:\n pass\n else:\n W.append(s + self.ncols)\n return W\n\n def coord2indx(self, (x, y)):\n return self.rcoords((x / (self.size + 1), y / (self.size + 1)))\n\n def draw_state_labels(self):\n font = pygame.font.SysFont(\"FreeSans\", 10)\n for s in range(self.nstates):\n x, y = self.indx2coord(s, False)\n txt = font.render(\"%d\" % s, True, (0, 0, 0))\n self.surface.blit(txt, (y, x))\n\n self.screen.blit(self.surface, (0, 0))\n pygame.display.flip()\n\n def coord2state(self, coord):\n s = self.coord2indx((coord[0], coord[1]))\n return s\n\n def state2circle(self, state, bg=True, blit=True):\n if bg:\n self.background()\n\n for n in range(self.nagents):\n x, y = self.indx2coord(state[n], center=True)\n pygame.draw.circle(self.surface, (0+(50*n), 0+(20*n), 255.0/(n+1)), (y, x), self.size / 2)\n if len(self.moveobstacles) > 0:\n for s in self.moveobstacles:\n x, y = self.indx2coord(s, center=True)\n pygame.draw.circle(self.surface, (205, 92, 0), (y, x), self.size / 2)\n if blit:\n self.screen.blit(self.surface, (0, 0))\n pygame.display.flip()\n\n def draw_values(self, vals):\n \"\"\"\n vals: a dict with state labels as the key\n \"\"\"\n font = pygame.font.SysFont(\"FreeSans\", 10)\n\n for s in range(self.nstates):\n x, y = self.indx2coord(s, False)\n v = vals[s]\n txt = font.render(\"%.1f\" % v, True, (0, 0, 0))\n self.surface.blit(txt, (y, x))\n\n self.screen.blit(self.surface, (0, 0))\n pygame.display.flip()\n\n #\n def save(self, filename):\n pygame.image.save(self.surface, filename)\n\n def redraw(self):\n self.screen.blit(self.surface, (0, 0))\n pygame.display.flip()\n\n def move_obj(self, s, bg=True, blit=True):\n\n \"\"\"Including A moving object into the gridworld, which moves uniformly at\n random in all accessible directions (including idle), without\n hitting the wall or another other statitic obstacle. Input: a\n gridworld gui, the current state index for the obstacle and the\n number of steps.\n\n \"\"\"\n if bg:\n self.background()\n x, y = self.indx2coord(s, center=True)\n pygame.draw.circle(self.surface, (205, 92, 0), (y, x), self.size / 2)\n\n if blit:\n self.screen.blit(self.surface, (0, 0))\n pygame.display.flip()\n\n return\n\n def move_deter(self, next_state):\n self.current = next_state\n\n return\n\n def background(self):\n\n if self.bg_rendered:\n self.surface.blit(self.bg, (0, 0))\n else:\n self.bg.fill((84, 84, 84))\n font = pygame.font.SysFont(\"FreeSans\", 10)\n\n for s in range(self.nstates):\n x, y = self.indx2coord(s, False)\n coords = pygame.Rect(y, x, self.size, self.size)\n pygame.draw.rect(self.bg, ((250, 250, 250)), coords)\n for n in range(self.nagents):\n\n for t in self.targets[n]:\n x, y = self.indx2coord(t, center=True)\n coords = pygame.Rect(y - self.size / 2, x - self.size / 2, self.size, self.size)\n pygame.draw.rect(self.bg, (0+(50*n), 204.0/(n+1), 102.0+(50*n)/(n+1)), coords)\n\n for s in self.obstacles:\n (x, y) = self.indx2coord(s)\n coords = pygame.Rect(y, x, self.size, self.size)\n pygame.draw.rect(self.bg, (255, 0, 0), coords) # the obstacles are in color red\n\n color = {'sand': (223, 225, 179), 'gravel': (255, 255, 255), 'grass': (211, 255, 192),\n 'pavement': (192, 255, 253),'deterministic': (255,255,255)}\n for s in range(self.nstates):\n if s not in self.edges and not any(s in x for x in self.targets) and s not in self.obstacles and not any(s in x for x in self.colorstates):\n (x, y) = self.indx2coord(s)\n coords = pygame.Rect(y - self.size / 2, x - self.size / 2, self.size, self.size)\n coords = pygame.Rect(y, x, self.size, self.size)\n pygame.draw.rect(self.bg, color[self.getStateRegion(s)], coords) # the obstacles are in color grey\n statecols = [(0,0,0),(150,150,150)]\n for i in range(len(self.colorstates)):\n for s in self.colorstates[i]:\n if s not in self.edges and not any(s in x for x in self.targets) and s not in self.obstacles:\n (x, y) = self.indx2coord(s)\n coords = pygame.Rect(y, x, self.size, self.size)\n pygame.draw.rect(self.bg, statecols[i], coords) # the obstacles are in color grey\n\n self.bg_rendered = True # don't render again unless flag is set\n self.surface.blit(self.bg, (0, 0))",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
setup(name='coding_exercises', version='1.0', description=
'Coding Exercises in Python', author='Gustavo Gama', author_email=
'gustavo.gama@gmail.com', url='https://gama.igenesis.com.br', packages=
find_packages())
<|reserved_special_token_1|>
from setuptools import find_packages, setup
setup(name='coding_exercises', version='1.0', description=
'Coding Exercises in Python', author='Gustavo Gama', author_email=
'gustavo.gama@gmail.com', url='https://gama.igenesis.com.br', packages=
find_packages())
<|reserved_special_token_1|>
#!/usr/bin/env python
# pylama:ignore=E221,E251
from setuptools import find_packages, setup
setup(
name = 'coding_exercises',
version = '1.0',
description = 'Coding Exercises in Python',
author = 'Gustavo Gama',
author_email = 'gustavo.gama@gmail.com',
url = 'https://gama.igenesis.com.br',
packages = find_packages()
)
|
flexible
|
{
"blob_id": "5f4abc7e9397034737ee214b0d0aae39ebf1548b",
"index": 8098,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetup(name='coding_exercises', version='1.0', description=\n 'Coding Exercises in Python', author='Gustavo Gama', author_email=\n 'gustavo.gama@gmail.com', url='https://gama.igenesis.com.br', packages=\n find_packages())\n",
"step-3": "from setuptools import find_packages, setup\nsetup(name='coding_exercises', version='1.0', description=\n 'Coding Exercises in Python', author='Gustavo Gama', author_email=\n 'gustavo.gama@gmail.com', url='https://gama.igenesis.com.br', packages=\n find_packages())\n",
"step-4": "#!/usr/bin/env python\n# pylama:ignore=E221,E251\n\nfrom setuptools import find_packages, setup\n\nsetup(\n name = 'coding_exercises',\n version = '1.0',\n description = 'Coding Exercises in Python',\n author = 'Gustavo Gama',\n author_email = 'gustavo.gama@gmail.com',\n url = 'https://gama.igenesis.com.br',\n packages = find_packages()\n)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Basic script which send some request via rest api to the test-management-tool.
# Be sure you setup host and api_token variable
import http.client
host = "localhost:8000"
api_token = "fuukp8LhdxxwoVdtJu5K8LQtpTods8ddLMq66wSUFXGsqJKpmJAa1YyqkHN3"
# Connection
conn = http.client.HTTPConnection(host)
# Create a header of http request
headers = {
'authorization': "Bearer " + api_token,
'content-type': "application/json",
'cache-control': "no-cache",
'postman-token': "44709a5c-ca4a-bbce-4b24-f0632a29bde4"
}
################################################
payload = "{\n \"Name\": \"Create and edit project\"\n}"
conn.request("POST", "/api/v1/testsuites", payload, headers)
###
res = conn.getresponse()
data = res.read()
payload = "{\n \"Name\": \"Create and edit requirement\"\n}"
conn.request("POST", "/api/v1/testsuites", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 1,\n \"Name\": \"Not selected project\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 1,\n \"Name\": \"Create project\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 1,\n \"Name\": \"Create project without name\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 1,\n \"Name\": \"Check if overview contains project\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 1,\n \"Name\": \"Edit project\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
################################################
###
payload = "{\n \"TestSuite_id\": 2,\n \"Name\": \"Create project\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 2,\n \"Name\": \"Create requirement\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 2,\n \"Name\": \"Create requirement without name\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 2,\n \"Name\": \"Overview contains requirement\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 2,\n \"Name\": \"Edit requirement\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 2,\n \"Name\": \"Cover requirement\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
################################################
payload = "{\n \"Name\": \"Create and edit TestSuites and TestCase\"\n}"
conn.request("POST", "/api/v1/testsuites", payload, headers)
###
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test suite\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test suite without name\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Check if overview contains suite\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Edit test suite\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test case without details\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test case with details\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test case without name\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Check if overview contains case\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Edit test case\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
################################################
payload = "{\n \"Name\": \"Create test set and run\"\n}"
conn.request("POST", "/api/v1/testsuites", payload, headers)
###
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Create project\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Create set\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Overview contains set\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Create set without name\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Create set without tests\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Edit test set\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Create test run\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Overview contains run\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Execute contains tests\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
################################################
payload = "{\n \"Name\": \"Registration and log test\"\n}"
conn.request("POST", "/api/v1/testsuites", payload, headers)
###
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 5,\n \"Name\": \"Redirect to login page\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 5,\n \"Name\": \"Registration\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 5,\n \"Name\": \"Registrate same user\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 5,\n \"Name\": \"Log and logout\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
|
normal
|
{
"blob_id": "0cc1aaa182fcf002ff2ae6cbcd6cbb84a08a3bc1",
"index": 936,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nconn.request('POST', '/api/v1/testsuites', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testsuites', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testsuites', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testsuites', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testsuites', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n<mask token>\nconn.request('POST', '/api/v1/testcases', payload, headers)\n",
"step-3": "<mask token>\nhost = 'localhost:8000'\napi_token = 'fuukp8LhdxxwoVdtJu5K8LQtpTods8ddLMq66wSUFXGsqJKpmJAa1YyqkHN3'\nconn = http.client.HTTPConnection(host)\nheaders = {'authorization': 'Bearer ' + api_token, 'content-type':\n 'application/json', 'cache-control': 'no-cache', 'postman-token':\n '44709a5c-ca4a-bbce-4b24-f0632a29bde4'}\npayload = \"\"\"{\n \"Name\": \"Create and edit project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testsuites', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"Name\": \"Create and edit requirement\"\n}\"\"\"\nconn.request('POST', '/api/v1/testsuites', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 1,\n \"Name\": \"Not selected project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 1,\n \"Name\": \"Create project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 1,\n \"Name\": \"Create project without name\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 1,\n \"Name\": \"Check if overview contains project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 1,\n \"Name\": \"Edit project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 2,\n \"Name\": \"Create project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 2,\n \"Name\": \"Create requirement\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 2,\n \"Name\": \"Create requirement without name\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 2,\n \"Name\": \"Overview contains requirement\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 2,\n \"Name\": \"Edit requirement\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 2,\n \"Name\": \"Cover requirement\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"Name\": \"Create and edit TestSuites and TestCase\"\n}\"\"\"\nconn.request('POST', '/api/v1/testsuites', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test suite\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test suite without name\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Check if overview contains suite\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Edit test suite\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test case without details\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test case with details\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test case without name\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Check if overview contains case\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Edit test case\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"Name\": \"Create test set and run\"\n}\"\"\"\nconn.request('POST', '/api/v1/testsuites', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Create project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Create set\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Overview contains set\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Create set without name\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Create set without tests\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Edit test set\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Create test run\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Overview contains run\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Execute contains tests\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"Name\": \"Registration and log test\"\n}\"\"\"\nconn.request('POST', '/api/v1/testsuites', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 5,\n \"Name\": \"Redirect to login page\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 5,\n \"Name\": \"Registration\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 5,\n \"Name\": \"Registrate same user\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 5,\n \"Name\": \"Log and logout\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\n",
"step-4": "import http.client\nhost = 'localhost:8000'\napi_token = 'fuukp8LhdxxwoVdtJu5K8LQtpTods8ddLMq66wSUFXGsqJKpmJAa1YyqkHN3'\nconn = http.client.HTTPConnection(host)\nheaders = {'authorization': 'Bearer ' + api_token, 'content-type':\n 'application/json', 'cache-control': 'no-cache', 'postman-token':\n '44709a5c-ca4a-bbce-4b24-f0632a29bde4'}\npayload = \"\"\"{\n \"Name\": \"Create and edit project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testsuites', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"Name\": \"Create and edit requirement\"\n}\"\"\"\nconn.request('POST', '/api/v1/testsuites', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 1,\n \"Name\": \"Not selected project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 1,\n \"Name\": \"Create project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 1,\n \"Name\": \"Create project without name\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 1,\n \"Name\": \"Check if overview contains project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 1,\n \"Name\": \"Edit project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 2,\n \"Name\": \"Create project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 2,\n \"Name\": \"Create requirement\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 2,\n \"Name\": \"Create requirement without name\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 2,\n \"Name\": \"Overview contains requirement\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 2,\n \"Name\": \"Edit requirement\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 2,\n \"Name\": \"Cover requirement\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"Name\": \"Create and edit TestSuites and TestCase\"\n}\"\"\"\nconn.request('POST', '/api/v1/testsuites', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test suite\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test suite without name\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Check if overview contains suite\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Edit test suite\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test case without details\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test case with details\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test case without name\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Check if overview contains case\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 3,\n \"Name\": \"Edit test case\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"Name\": \"Create test set and run\"\n}\"\"\"\nconn.request('POST', '/api/v1/testsuites', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Create project\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Create set\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Overview contains set\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Create set without name\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Create set without tests\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Edit test set\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Create test run\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Overview contains run\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 4,\n \"Name\": \"Execute contains tests\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"Name\": \"Registration and log test\"\n}\"\"\"\nconn.request('POST', '/api/v1/testsuites', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 5,\n \"Name\": \"Redirect to login page\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 5,\n \"Name\": \"Registration\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 5,\n \"Name\": \"Registrate same user\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\nres = conn.getresponse()\ndata = res.read()\npayload = \"\"\"{\n \"TestSuite_id\": 5,\n \"Name\": \"Log and logout\"\n}\"\"\"\nconn.request('POST', '/api/v1/testcases', payload, headers)\n",
"step-5": "# Basic script which send some request via rest api to the test-management-tool.\n# Be sure you setup host and api_token variable\n\nimport http.client\n\nhost = \"localhost:8000\"\napi_token = \"fuukp8LhdxxwoVdtJu5K8LQtpTods8ddLMq66wSUFXGsqJKpmJAa1YyqkHN3\"\n\n# Connection\nconn = http.client.HTTPConnection(host)\n\n# Create a header of http request\nheaders = {\n 'authorization': \"Bearer \" + api_token,\n 'content-type': \"application/json\",\n 'cache-control': \"no-cache\",\n 'postman-token': \"44709a5c-ca4a-bbce-4b24-f0632a29bde4\"\n }\n\n################################################\npayload = \"{\\n \\\"Name\\\": \\\"Create and edit project\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testsuites\", payload, headers)\n###\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"Name\\\": \\\"Create and edit requirement\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testsuites\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 1,\\n \\\"Name\\\": \\\"Not selected project\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 1,\\n \\\"Name\\\": \\\"Create project\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 1,\\n \\\"Name\\\": \\\"Create project without name\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 1,\\n \\\"Name\\\": \\\"Check if overview contains project\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 1,\\n \\\"Name\\\": \\\"Edit project\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\n################################################\n\n###\n\npayload = \"{\\n \\\"TestSuite_id\\\": 2,\\n \\\"Name\\\": \\\"Create project\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 2,\\n \\\"Name\\\": \\\"Create requirement\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 2,\\n \\\"Name\\\": \\\"Create requirement without name\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 2,\\n \\\"Name\\\": \\\"Overview contains requirement\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 2,\\n \\\"Name\\\": \\\"Edit requirement\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 2,\\n \\\"Name\\\": \\\"Cover requirement\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\n################################################\npayload = \"{\\n \\\"Name\\\": \\\"Create and edit TestSuites and TestCase\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testsuites\", payload, headers)\n###\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 3,\\n \\\"Name\\\": \\\"Create test suite\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 3,\\n \\\"Name\\\": \\\"Create test suite without name\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 3,\\n \\\"Name\\\": \\\"Check if overview contains suite\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 3,\\n \\\"Name\\\": \\\"Edit test suite\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 3,\\n \\\"Name\\\": \\\"Create test case without details\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 3,\\n \\\"Name\\\": \\\"Create test case with details\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 3,\\n \\\"Name\\\": \\\"Create test case without name\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 3,\\n \\\"Name\\\": \\\"Check if overview contains case\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 3,\\n \\\"Name\\\": \\\"Edit test case\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\n################################################\npayload = \"{\\n \\\"Name\\\": \\\"Create test set and run\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testsuites\", payload, headers)\n###\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 4,\\n \\\"Name\\\": \\\"Create project\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 4,\\n \\\"Name\\\": \\\"Create set\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 4,\\n \\\"Name\\\": \\\"Overview contains set\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 4,\\n \\\"Name\\\": \\\"Create set without name\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 4,\\n \\\"Name\\\": \\\"Create set without tests\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 4,\\n \\\"Name\\\": \\\"Edit test set\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 4,\\n \\\"Name\\\": \\\"Create test run\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 4,\\n \\\"Name\\\": \\\"Overview contains run\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 4,\\n \\\"Name\\\": \\\"Execute contains tests\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\n\n################################################\npayload = \"{\\n \\\"Name\\\": \\\"Registration and log test\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testsuites\", payload, headers)\n###\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 5,\\n \\\"Name\\\": \\\"Redirect to login page\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 5,\\n \\\"Name\\\": \\\"Registration\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 5,\\n \\\"Name\\\": \\\"Registrate same user\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n\nres = conn.getresponse()\ndata = res.read()\n\npayload = \"{\\n \\\"TestSuite_id\\\": 5,\\n \\\"Name\\\": \\\"Log and logout\\\"\\n}\"\nconn.request(\"POST\", \"/api/v1/testcases\", payload, headers)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app_name = 'blogs'
urlpatterns = [path('', views.index, name='index'), re_path(
'^blogs/(?P<blog_id>\\d+)/$', views.blog, name='blog'), path(
'new_blog/', views.new_blog, name='new_blog'), re_path(
'^edit_blog/(?P<blog_id>\\d+)/$', views.edit_blog, name='edit_blog')]
<|reserved_special_token_1|>
from . import views
from django.urls import path, re_path
app_name = 'blogs'
urlpatterns = [path('', views.index, name='index'), re_path(
'^blogs/(?P<blog_id>\\d+)/$', views.blog, name='blog'), path(
'new_blog/', views.new_blog, name='new_blog'), re_path(
'^edit_blog/(?P<blog_id>\\d+)/$', views.edit_blog, name='edit_blog')]
<|reserved_special_token_1|>
from . import views
from django.urls import path, re_path
app_name = "blogs"
urlpatterns = [
path('', views.index, name='index'),
re_path(r'^blogs/(?P<blog_id>\d+)/$', views.blog, name='blog'),
path('new_blog/', views.new_blog, name='new_blog'),
re_path(r'^edit_blog/(?P<blog_id>\d+)/$', views.edit_blog, name='edit_blog'),
]
|
flexible
|
{
"blob_id": "d73491d6673abdabad85176c5f75a191995c806d",
"index": 1260,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'blogs'\nurlpatterns = [path('', views.index, name='index'), re_path(\n '^blogs/(?P<blog_id>\\\\d+)/$', views.blog, name='blog'), path(\n 'new_blog/', views.new_blog, name='new_blog'), re_path(\n '^edit_blog/(?P<blog_id>\\\\d+)/$', views.edit_blog, name='edit_blog')]\n",
"step-3": "from . import views\nfrom django.urls import path, re_path\napp_name = 'blogs'\nurlpatterns = [path('', views.index, name='index'), re_path(\n '^blogs/(?P<blog_id>\\\\d+)/$', views.blog, name='blog'), path(\n 'new_blog/', views.new_blog, name='new_blog'), re_path(\n '^edit_blog/(?P<blog_id>\\\\d+)/$', views.edit_blog, name='edit_blog')]\n",
"step-4": "from . import views\nfrom django.urls import path, re_path\n\napp_name = \"blogs\"\n\nurlpatterns = [\npath('', views.index, name='index'),\nre_path(r'^blogs/(?P<blog_id>\\d+)/$', views.blog, name='blog'),\npath('new_blog/', views.new_blog, name='new_blog'),\nre_path(r'^edit_blog/(?P<blog_id>\\d+)/$', views.edit_blog, name='edit_blog'),\n]\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.conf.urls import url
from . import views
from .HouseView import CreateHouseView
app_name = 'voronoi'
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^search/$', views.search, name='search'),
url(r'^house/create/$', CreateHouseView.as_view(), name='create'),
#url(r'^get_search_results/$', views.get_search_results, name='get_search_results'),
url(r'^get_search_json/$', views.get_search_json, name='get_search_json'),
url(r'^get_search_suggestions/$', views.get_search_suggestions, name='get_search_suggestions'),
# ex: /polls/5/
url(r'^(?P<house_id>[0-9]+)/$', views.detail, name='detail'),
# ex: /polls/5/results/
url(r'^(?P<house_id>[0-9]+)/ratings/$', views.ratings, name='ratings'),
]
|
normal
|
{
"blob_id": "e3ee00efa0e929b87ca33b79dc6a6064b8758d4a",
"index": 2640,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'voronoi'\nurlpatterns = [url('^$', views.index, name='index'), url('^search/$', views\n .search, name='search'), url('^house/create/$', CreateHouseView.as_view\n (), name='create'), url('^get_search_json/$', views.get_search_json,\n name='get_search_json'), url('^get_search_suggestions/$', views.\n get_search_suggestions, name='get_search_suggestions'), url(\n '^(?P<house_id>[0-9]+)/$', views.detail, name='detail'), url(\n '^(?P<house_id>[0-9]+)/ratings/$', views.ratings, name='ratings')]\n",
"step-3": "from django.conf.urls import url\nfrom . import views\nfrom .HouseView import CreateHouseView\napp_name = 'voronoi'\nurlpatterns = [url('^$', views.index, name='index'), url('^search/$', views\n .search, name='search'), url('^house/create/$', CreateHouseView.as_view\n (), name='create'), url('^get_search_json/$', views.get_search_json,\n name='get_search_json'), url('^get_search_suggestions/$', views.\n get_search_suggestions, name='get_search_suggestions'), url(\n '^(?P<house_id>[0-9]+)/$', views.detail, name='detail'), url(\n '^(?P<house_id>[0-9]+)/ratings/$', views.ratings, name='ratings')]\n",
"step-4": "from django.conf.urls import url\n\nfrom . import views\nfrom .HouseView import CreateHouseView\n\napp_name = 'voronoi'\n\nurlpatterns = [\n url(r'^$', views.index, name='index'),\n url(r'^search/$', views.search, name='search'),\n url(r'^house/create/$', CreateHouseView.as_view(), name='create'),\n #url(r'^get_search_results/$', views.get_search_results, name='get_search_results'),\n url(r'^get_search_json/$', views.get_search_json, name='get_search_json'),\n url(r'^get_search_suggestions/$', views.get_search_suggestions, name='get_search_suggestions'),\n\n \n \t# ex: /polls/5/\n url(r'^(?P<house_id>[0-9]+)/$', views.detail, name='detail'),\n # ex: /polls/5/results/\n url(r'^(?P<house_id>[0-9]+)/ratings/$', views.ratings, name='ratings'),\n\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.web_perf.metrics import timeline_based_metric
from telemetry.web_perf.metrics.trace_event_stats import TraceEventStats
from telemetry.web_perf.metrics.trace_event_stats import TraceEventStatsInput
class IndexedDBTimelineMetric(timeline_based_metric.TimelineBasedMetric):
"""Metrics for IndexedDB operations.
"""
def __init__(self):
super(IndexedDBTimelineMetric, self).__init__()
self._stats = TraceEventStats()
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBDatabase::GetOperation',
metric_name='idb-gets',
metric_description='The duration of all "get" ops in IndexedDB',
units='ms',
process_name='Browser'))
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBDatabase::PutOperation',
metric_name='idb-puts',
metric_description='The duration of all "put" ops in IndexedDB',
units='ms',
process_name='Browser'))
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBFactoryImpl::Open',
metric_name='idb-opens',
metric_description='The duration of all "open" ops in IndexedDB',
units='ms',
process_name='Browser'))
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBTransaction::Commit',
metric_name='idb-transaction-commits',
metric_description=('The duration of all "commit" ops of ' +
'transactions in IndexedDB.'),
units='ms',
process_name='Browser'))
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBFactoryImpl::DeleteDatabase',
metric_name='idb-database-deletes',
metric_description=('The duration of all "delete" ops of ' +
'IndexedDB databases.'),
units='ms',
process_name='Browser'))
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBDatabase::OpenCursorOperation',
metric_name='idb-cursor-opens',
metric_description=('The duration of all "open" ops of ' +
'IndexedDB cursors.'),
units='ms',
process_name='Browser'))
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBCursor::CursorIterationOperation',
metric_name='idb-cursor-iterations',
metric_description=('The duration of all "iteration" ops of ' +
'IndexedDB cursors.'),
units='ms',
process_name='Browser'))
def AddResults(self, model, renderer_process, interactions, results):
self._stats.AddResults(model, renderer_process, interactions, results)
|
normal
|
{
"blob_id": "47f88bc3836490e08f464f71351096b54118420e",
"index": 5297,
"step-1": "<mask token>\n\n\nclass IndexedDBTimelineMetric(timeline_based_metric.TimelineBasedMetric):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass IndexedDBTimelineMetric(timeline_based_metric.TimelineBasedMetric):\n <mask token>\n\n def __init__(self):\n super(IndexedDBTimelineMetric, self).__init__()\n self._stats = TraceEventStats()\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBDatabase::GetOperation',\n metric_name='idb-gets', metric_description=\n 'The duration of all \"get\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBDatabase::PutOperation',\n metric_name='idb-puts', metric_description=\n 'The duration of all \"put\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBFactoryImpl::Open',\n metric_name='idb-opens', metric_description=\n 'The duration of all \"open\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBTransaction::Commit',\n metric_name='idb-transaction-commits', metric_description=\n 'The duration of all \"commit\" ops of ' +\n 'transactions in IndexedDB.', units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBFactoryImpl::DeleteDatabase',\n metric_name='idb-database-deletes', metric_description=\n 'The duration of all \"delete\" ops of ' + 'IndexedDB databases.',\n units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name=\n 'IndexedDBDatabase::OpenCursorOperation', metric_name=\n 'idb-cursor-opens', metric_description=\n 'The duration of all \"open\" ops of ' + 'IndexedDB cursors.',\n units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name=\n 'IndexedDBCursor::CursorIterationOperation', metric_name=\n 'idb-cursor-iterations', metric_description=\n 'The duration of all \"iteration\" ops of ' +\n 'IndexedDB cursors.', units='ms', process_name='Browser'))\n\n def AddResults(self, model, renderer_process, interactions, results):\n self._stats.AddResults(model, renderer_process, interactions, results)\n",
"step-3": "<mask token>\n\n\nclass IndexedDBTimelineMetric(timeline_based_metric.TimelineBasedMetric):\n \"\"\"Metrics for IndexedDB operations.\n \"\"\"\n\n def __init__(self):\n super(IndexedDBTimelineMetric, self).__init__()\n self._stats = TraceEventStats()\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBDatabase::GetOperation',\n metric_name='idb-gets', metric_description=\n 'The duration of all \"get\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBDatabase::PutOperation',\n metric_name='idb-puts', metric_description=\n 'The duration of all \"put\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBFactoryImpl::Open',\n metric_name='idb-opens', metric_description=\n 'The duration of all \"open\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBTransaction::Commit',\n metric_name='idb-transaction-commits', metric_description=\n 'The duration of all \"commit\" ops of ' +\n 'transactions in IndexedDB.', units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBFactoryImpl::DeleteDatabase',\n metric_name='idb-database-deletes', metric_description=\n 'The duration of all \"delete\" ops of ' + 'IndexedDB databases.',\n units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name=\n 'IndexedDBDatabase::OpenCursorOperation', metric_name=\n 'idb-cursor-opens', metric_description=\n 'The duration of all \"open\" ops of ' + 'IndexedDB cursors.',\n units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name=\n 'IndexedDBCursor::CursorIterationOperation', metric_name=\n 'idb-cursor-iterations', metric_description=\n 'The duration of all \"iteration\" ops of ' +\n 'IndexedDB cursors.', units='ms', process_name='Browser'))\n\n def AddResults(self, model, renderer_process, interactions, results):\n self._stats.AddResults(model, renderer_process, interactions, results)\n",
"step-4": "from telemetry.web_perf.metrics import timeline_based_metric\nfrom telemetry.web_perf.metrics.trace_event_stats import TraceEventStats\nfrom telemetry.web_perf.metrics.trace_event_stats import TraceEventStatsInput\n\n\nclass IndexedDBTimelineMetric(timeline_based_metric.TimelineBasedMetric):\n \"\"\"Metrics for IndexedDB operations.\n \"\"\"\n\n def __init__(self):\n super(IndexedDBTimelineMetric, self).__init__()\n self._stats = TraceEventStats()\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBDatabase::GetOperation',\n metric_name='idb-gets', metric_description=\n 'The duration of all \"get\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBDatabase::PutOperation',\n metric_name='idb-puts', metric_description=\n 'The duration of all \"put\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBFactoryImpl::Open',\n metric_name='idb-opens', metric_description=\n 'The duration of all \"open\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBTransaction::Commit',\n metric_name='idb-transaction-commits', metric_description=\n 'The duration of all \"commit\" ops of ' +\n 'transactions in IndexedDB.', units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBFactoryImpl::DeleteDatabase',\n metric_name='idb-database-deletes', metric_description=\n 'The duration of all \"delete\" ops of ' + 'IndexedDB databases.',\n units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name=\n 'IndexedDBDatabase::OpenCursorOperation', metric_name=\n 'idb-cursor-opens', metric_description=\n 'The duration of all \"open\" ops of ' + 'IndexedDB cursors.',\n units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name=\n 'IndexedDBCursor::CursorIterationOperation', metric_name=\n 'idb-cursor-iterations', metric_description=\n 'The duration of all \"iteration\" ops of ' +\n 'IndexedDB cursors.', units='ms', process_name='Browser'))\n\n def AddResults(self, model, renderer_process, interactions, results):\n self._stats.AddResults(model, renderer_process, interactions, results)\n",
"step-5": "# Copyright 2015 The Chromium Authors. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\n\nfrom telemetry.web_perf.metrics import timeline_based_metric\nfrom telemetry.web_perf.metrics.trace_event_stats import TraceEventStats\nfrom telemetry.web_perf.metrics.trace_event_stats import TraceEventStatsInput\n\n\nclass IndexedDBTimelineMetric(timeline_based_metric.TimelineBasedMetric):\n \"\"\"Metrics for IndexedDB operations.\n \"\"\"\n\n def __init__(self):\n super(IndexedDBTimelineMetric, self).__init__()\n self._stats = TraceEventStats()\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBDatabase::GetOperation',\n metric_name='idb-gets',\n metric_description='The duration of all \"get\" ops in IndexedDB',\n units='ms',\n process_name='Browser'))\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBDatabase::PutOperation',\n metric_name='idb-puts',\n metric_description='The duration of all \"put\" ops in IndexedDB',\n units='ms',\n process_name='Browser'))\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBFactoryImpl::Open',\n metric_name='idb-opens',\n metric_description='The duration of all \"open\" ops in IndexedDB',\n units='ms',\n process_name='Browser'))\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBTransaction::Commit',\n metric_name='idb-transaction-commits',\n metric_description=('The duration of all \"commit\" ops of ' +\n 'transactions in IndexedDB.'),\n units='ms',\n process_name='Browser'))\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBFactoryImpl::DeleteDatabase',\n metric_name='idb-database-deletes',\n metric_description=('The duration of all \"delete\" ops of ' +\n 'IndexedDB databases.'),\n units='ms',\n process_name='Browser'))\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBDatabase::OpenCursorOperation',\n metric_name='idb-cursor-opens',\n metric_description=('The duration of all \"open\" ops of ' +\n 'IndexedDB cursors.'),\n units='ms',\n process_name='Browser'))\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBCursor::CursorIterationOperation',\n metric_name='idb-cursor-iterations',\n metric_description=('The duration of all \"iteration\" ops of ' +\n 'IndexedDB cursors.'),\n units='ms',\n process_name='Browser'))\n\n def AddResults(self, model, renderer_process, interactions, results):\n self._stats.AddResults(model, renderer_process, interactions, results)\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
####################################
## Readable code versus less code ##
####################################
import threading
from web_server.general_api import general_api as api
logger = api.__get_logger('ConnTimeout.run')
class ConnTimeout(object):
def __init__(self, timeout, function, servers=5, args=[], kwargs=[]):
self.timeout = timeout
self.timer = None #threading.Timer(timeout, pickle.loads(function), args)
self.count = 0
self.f = function
self.servers = servers
self.args = args
self.kwargs = kwargs
super(ConnTimeout, self).__init__()
#def __reduce__(self):
# return (self.__class__, (self.name, self.address))
def start(self):
return self._start()
def _start(self):
self.timer = threading.Timer(self.timeout, self._handler)
self.timer.start()
def is_alive(self):
return self._is_alive()
def _is_alive(self):
if self.timer:
return self.timer.is_alive()
else:
return self.timer
def _handler(self):
if self.count<self.servers:
self.count+=1
else:
self.count=0
## recursive timer call
self.timer = threading.Timer(self.timeout, self._handler)
self.timer.start()
args = self.args[:]
args.append(api.MN_RKEY+str(self.count))
logger.info(" trying to connect to "+api.MN_RKEY+str(self.count))
self.f(*args)
del args[:]
def stop(self):
if self.timer.is_alive():
self.timer.cancel()
logger.info("timer killed...")
return True
return False
## other approach, didn't like that has to keep the main thread running by force
## using while inside main
## http://code.activestate.com/recipes/496800-event-scheduling-threadingtimer/
"""
import thread
import threading
class Operation(threading._Timer):
def __init__(self, *args, **kwargs):
threading._Timer.__init__(self, *args, **kwargs)
def run(self):
while True:
self.finished.clear()
self.finished.wait(self.interval)
if not self.finished.isSet():
self.function(*self.args, **self.kwargs)
else:
return
self.finished.set()
class Manager(object):
def add_operation(self, operation, interval, args=[], kwargs={}):
self.op = Operation(interval, operation, args, kwargs)
thread.start_new_thread(self.op.run, ())
def cancel(self):
if self.op:
self.op.cancel()
if __name__ == '__main__':
# Print "Hello World!" every 5 seconds
import time
def hello():
print "Hello World!"
timer = Manager()
timer.add_operation(hello, 5)
while True:
time.sleep(.1)
"""
|
normal
|
{
"blob_id": "ed5ba72443b70c84941af3d112e0246cb3ae97d9",
"index": 5337,
"step-1": "<mask token>\n\n\nclass ConnTimeout(object):\n\n def __init__(self, timeout, function, servers=5, args=[], kwargs=[]):\n self.timeout = timeout\n self.timer = None\n self.count = 0\n self.f = function\n self.servers = servers\n self.args = args\n self.kwargs = kwargs\n super(ConnTimeout, self).__init__()\n\n def start(self):\n return self._start()\n <mask token>\n <mask token>\n\n def _is_alive(self):\n if self.timer:\n return self.timer.is_alive()\n else:\n return self.timer\n\n def _handler(self):\n if self.count < self.servers:\n self.count += 1\n else:\n self.count = 0\n self.timer = threading.Timer(self.timeout, self._handler)\n self.timer.start()\n args = self.args[:]\n args.append(api.MN_RKEY + str(self.count))\n logger.info(' trying to connect to ' + api.MN_RKEY + str(self.count))\n self.f(*args)\n del args[:]\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ConnTimeout(object):\n\n def __init__(self, timeout, function, servers=5, args=[], kwargs=[]):\n self.timeout = timeout\n self.timer = None\n self.count = 0\n self.f = function\n self.servers = servers\n self.args = args\n self.kwargs = kwargs\n super(ConnTimeout, self).__init__()\n\n def start(self):\n return self._start()\n\n def _start(self):\n self.timer = threading.Timer(self.timeout, self._handler)\n self.timer.start()\n <mask token>\n\n def _is_alive(self):\n if self.timer:\n return self.timer.is_alive()\n else:\n return self.timer\n\n def _handler(self):\n if self.count < self.servers:\n self.count += 1\n else:\n self.count = 0\n self.timer = threading.Timer(self.timeout, self._handler)\n self.timer.start()\n args = self.args[:]\n args.append(api.MN_RKEY + str(self.count))\n logger.info(' trying to connect to ' + api.MN_RKEY + str(self.count))\n self.f(*args)\n del args[:]\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ConnTimeout(object):\n\n def __init__(self, timeout, function, servers=5, args=[], kwargs=[]):\n self.timeout = timeout\n self.timer = None\n self.count = 0\n self.f = function\n self.servers = servers\n self.args = args\n self.kwargs = kwargs\n super(ConnTimeout, self).__init__()\n\n def start(self):\n return self._start()\n\n def _start(self):\n self.timer = threading.Timer(self.timeout, self._handler)\n self.timer.start()\n\n def is_alive(self):\n return self._is_alive()\n\n def _is_alive(self):\n if self.timer:\n return self.timer.is_alive()\n else:\n return self.timer\n\n def _handler(self):\n if self.count < self.servers:\n self.count += 1\n else:\n self.count = 0\n self.timer = threading.Timer(self.timeout, self._handler)\n self.timer.start()\n args = self.args[:]\n args.append(api.MN_RKEY + str(self.count))\n logger.info(' trying to connect to ' + api.MN_RKEY + str(self.count))\n self.f(*args)\n del args[:]\n <mask token>\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass ConnTimeout(object):\n\n def __init__(self, timeout, function, servers=5, args=[], kwargs=[]):\n self.timeout = timeout\n self.timer = None\n self.count = 0\n self.f = function\n self.servers = servers\n self.args = args\n self.kwargs = kwargs\n super(ConnTimeout, self).__init__()\n\n def start(self):\n return self._start()\n\n def _start(self):\n self.timer = threading.Timer(self.timeout, self._handler)\n self.timer.start()\n\n def is_alive(self):\n return self._is_alive()\n\n def _is_alive(self):\n if self.timer:\n return self.timer.is_alive()\n else:\n return self.timer\n\n def _handler(self):\n if self.count < self.servers:\n self.count += 1\n else:\n self.count = 0\n self.timer = threading.Timer(self.timeout, self._handler)\n self.timer.start()\n args = self.args[:]\n args.append(api.MN_RKEY + str(self.count))\n logger.info(' trying to connect to ' + api.MN_RKEY + str(self.count))\n self.f(*args)\n del args[:]\n\n def stop(self):\n if self.timer.is_alive():\n self.timer.cancel()\n logger.info('timer killed...')\n return True\n return False\n\n\n<mask token>\n",
"step-5": "####################################\n## Readable code versus less code ##\n####################################\n\nimport threading\nfrom web_server.general_api import general_api as api\n\nlogger = api.__get_logger('ConnTimeout.run')\n\n\nclass ConnTimeout(object):\n def __init__(self, timeout, function, servers=5, args=[], kwargs=[]):\n self.timeout = timeout\n self.timer = None #threading.Timer(timeout, pickle.loads(function), args)\n self.count = 0\n self.f = function\n self.servers = servers\n self.args = args\n self.kwargs = kwargs\n super(ConnTimeout, self).__init__()\n\n\n #def __reduce__(self):\n # return (self.__class__, (self.name, self.address))\n\n\n def start(self):\n return self._start()\n\n\n def _start(self):\n self.timer = threading.Timer(self.timeout, self._handler)\n self.timer.start()\n\n\n def is_alive(self):\n return self._is_alive()\n\n\n def _is_alive(self):\n if self.timer:\n return self.timer.is_alive()\n else:\n return self.timer\n\n\n def _handler(self):\n if self.count<self.servers:\n self.count+=1\n else:\n self.count=0\n \n ## recursive timer call\n self.timer = threading.Timer(self.timeout, self._handler)\n self.timer.start()\n \n args = self.args[:]\n args.append(api.MN_RKEY+str(self.count))\n logger.info(\" trying to connect to \"+api.MN_RKEY+str(self.count))\n\n self.f(*args)\n del args[:]\n\n\n def stop(self):\n if self.timer.is_alive():\n self.timer.cancel()\n logger.info(\"timer killed...\")\n return True\n return False\n\n\n## other approach, didn't like that has to keep the main thread running by force\n## using while inside main\n## http://code.activestate.com/recipes/496800-event-scheduling-threadingtimer/\n\"\"\"\nimport thread\nimport threading\n\nclass Operation(threading._Timer):\n def __init__(self, *args, **kwargs):\n threading._Timer.__init__(self, *args, **kwargs)\n\n def run(self):\n while True:\n self.finished.clear()\n self.finished.wait(self.interval)\n if not self.finished.isSet():\n self.function(*self.args, **self.kwargs)\n else:\n return\n self.finished.set()\n\nclass Manager(object):\n\n def add_operation(self, operation, interval, args=[], kwargs={}):\n self.op = Operation(interval, operation, args, kwargs)\n thread.start_new_thread(self.op.run, ())\n\n def cancel(self):\n if self.op:\n self.op.cancel()\n\nif __name__ == '__main__':\n # Print \"Hello World!\" every 5 seconds\n \n import time\n\n def hello():\n print \"Hello World!\"\n\n timer = Manager()\n timer.add_operation(hello, 5)\n\n while True:\n time.sleep(.1)\n\"\"\"\n",
"step-ids": [
5,
6,
7,
8,
11
]
}
|
[
5,
6,
7,
8,
11
] |
from data_loaders.data_module import ChestDataModule
from utils.visualisation import showInRow
from models import get_model
from transforms.finetuning import ChestTrainTransforms, ChestValTransforms
from models.baseline import BaseLineClassifier
from pytorch_lightning.loggers import WandbLogger
from pytorch_lightning.callbacks import ModelCheckpoint
import torch
import pytorch_lightning as pl
from pytorch_lightning import seed_everything
seed_everything(12345)
dm = ChestDataModule(["chexpert_14"], batch_size=32, num_workers=2, balanced=False)
dm.train_transforms = ChestTrainTransforms(height=224)
dm.val_transforms = ChestValTransforms(height=224)
classifier = BaseLineClassifier(get_model("resnet18", pretrained=True),
num_classes=14,
linear=False,
learning_rate=1e-5,
b1=0.9,
b2=0.999,
weight_decay=1e-4,
multi_class=True,
mixup=False,
ct_reg=False)
wandb_logger = WandbLogger(name='baseline-NL-chexpert_14-full-Adam-1e_5',project='thesis')
checkpoint_callback = ModelCheckpoint(monitor='val_loss',
dirpath='logs/baseline/chexpert_14/',
filename='NL-full-Adam-1e_5-{epoch:02d}-{val_loss:.4f}')
trainer = pl.Trainer(gpus=1, deterministic=True,
logger=wandb_logger, callbacks=[checkpoint_callback], max_epochs=20, num_sanity_val_steps=10)
if torch.cuda.is_available():
classifier = classifier.cuda()
trainer.fit(classifier, dm)
|
normal
|
{
"blob_id": "05ca7bbc3285a9e37921c0e514a2e31b05abe051",
"index": 6396,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nseed_everything(12345)\n<mask token>\nif torch.cuda.is_available():\n classifier = classifier.cuda()\ntrainer.fit(classifier, dm)\n",
"step-3": "<mask token>\nseed_everything(12345)\ndm = ChestDataModule(['chexpert_14'], batch_size=32, num_workers=2,\n balanced=False)\ndm.train_transforms = ChestTrainTransforms(height=224)\ndm.val_transforms = ChestValTransforms(height=224)\nclassifier = BaseLineClassifier(get_model('resnet18', pretrained=True),\n num_classes=14, linear=False, learning_rate=1e-05, b1=0.9, b2=0.999,\n weight_decay=0.0001, multi_class=True, mixup=False, ct_reg=False)\nwandb_logger = WandbLogger(name='baseline-NL-chexpert_14-full-Adam-1e_5',\n project='thesis')\ncheckpoint_callback = ModelCheckpoint(monitor='val_loss', dirpath=\n 'logs/baseline/chexpert_14/', filename=\n 'NL-full-Adam-1e_5-{epoch:02d}-{val_loss:.4f}')\ntrainer = pl.Trainer(gpus=1, deterministic=True, logger=wandb_logger,\n callbacks=[checkpoint_callback], max_epochs=20, num_sanity_val_steps=10)\nif torch.cuda.is_available():\n classifier = classifier.cuda()\ntrainer.fit(classifier, dm)\n",
"step-4": "from data_loaders.data_module import ChestDataModule\nfrom utils.visualisation import showInRow\nfrom models import get_model\nfrom transforms.finetuning import ChestTrainTransforms, ChestValTransforms\nfrom models.baseline import BaseLineClassifier\nfrom pytorch_lightning.loggers import WandbLogger\nfrom pytorch_lightning.callbacks import ModelCheckpoint\nimport torch\nimport pytorch_lightning as pl\nfrom pytorch_lightning import seed_everything\nseed_everything(12345)\ndm = ChestDataModule(['chexpert_14'], batch_size=32, num_workers=2,\n balanced=False)\ndm.train_transforms = ChestTrainTransforms(height=224)\ndm.val_transforms = ChestValTransforms(height=224)\nclassifier = BaseLineClassifier(get_model('resnet18', pretrained=True),\n num_classes=14, linear=False, learning_rate=1e-05, b1=0.9, b2=0.999,\n weight_decay=0.0001, multi_class=True, mixup=False, ct_reg=False)\nwandb_logger = WandbLogger(name='baseline-NL-chexpert_14-full-Adam-1e_5',\n project='thesis')\ncheckpoint_callback = ModelCheckpoint(monitor='val_loss', dirpath=\n 'logs/baseline/chexpert_14/', filename=\n 'NL-full-Adam-1e_5-{epoch:02d}-{val_loss:.4f}')\ntrainer = pl.Trainer(gpus=1, deterministic=True, logger=wandb_logger,\n callbacks=[checkpoint_callback], max_epochs=20, num_sanity_val_steps=10)\nif torch.cuda.is_available():\n classifier = classifier.cuda()\ntrainer.fit(classifier, dm)\n",
"step-5": "from data_loaders.data_module import ChestDataModule\nfrom utils.visualisation import showInRow\nfrom models import get_model\n\nfrom transforms.finetuning import ChestTrainTransforms, ChestValTransforms\n\nfrom models.baseline import BaseLineClassifier\n\nfrom pytorch_lightning.loggers import WandbLogger\nfrom pytorch_lightning.callbacks import ModelCheckpoint\n\nimport torch\nimport pytorch_lightning as pl\nfrom pytorch_lightning import seed_everything\nseed_everything(12345)\n\n\ndm = ChestDataModule([\"chexpert_14\"], batch_size=32, num_workers=2, balanced=False)\ndm.train_transforms = ChestTrainTransforms(height=224)\ndm.val_transforms = ChestValTransforms(height=224)\n\nclassifier = BaseLineClassifier(get_model(\"resnet18\", pretrained=True), \n num_classes=14, \n linear=False,\n learning_rate=1e-5,\n b1=0.9,\n b2=0.999,\n weight_decay=1e-4,\n multi_class=True,\n mixup=False,\n ct_reg=False)\n\n\nwandb_logger = WandbLogger(name='baseline-NL-chexpert_14-full-Adam-1e_5',project='thesis')\ncheckpoint_callback = ModelCheckpoint(monitor='val_loss', \n dirpath='logs/baseline/chexpert_14/', \n filename='NL-full-Adam-1e_5-{epoch:02d}-{val_loss:.4f}')\n\ntrainer = pl.Trainer(gpus=1, deterministic=True,\n logger=wandb_logger, callbacks=[checkpoint_callback], max_epochs=20, num_sanity_val_steps=10)\n\nif torch.cuda.is_available():\n classifier = classifier.cuda()\n\ntrainer.fit(classifier, dm)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class DataViewsetRegistryTest(TestCase):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DataViewsetRegistryTest(TestCase):
def test_register_data_model(self) ->None:
registry = DataViewsetRegistry()
registry.register(FearConditioningData)
self.assertEqual(registry.data_models, [FearConditioningData])
self.assertEqual(registry.urls[0].pattern._route,
'projects/<int:project_pk>/experiments/<int:experiment_pk>/data/fear-conditioning/'
)
self.assertEqual(registry.urls[0].callback, registry.views[
'fear_conditioning_data_list'])
self.assertEqual(registry.urls[0].name, 'fear_conditioning_data_list')
self.assertEqual(registry.urls[1].pattern._route,
'projects/<int:project_pk>/experiments/<int:experiment_pk>/data/fear-conditioning/<int:data_pk>/'
)
self.assertEqual(registry.urls[1].callback, registry.views[
'fear_conditioning_data_detail'])
self.assertEqual(registry.urls[1].name, 'fear_conditioning_data_detail'
)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ModuleRegistryTest(TestCase):
def test_register_module_create_view(self) ->None:
registry = ModuleRegistry()
registry.register(FearConditioningModule)
self.assertEqual(registry.urls[0].pattern._route,
'projects/<int:project_pk>/experiments/<int:experiment_pk>/modules/fear-conditioning/add/'
)
self.assertEqual(registry.urls[0].callback, registry.views[
'fear_conditioning_create'])
self.assertEqual(registry.urls[0].name, 'fear_conditioning_create')
self.assertEqual(registry.modules, [FearConditioningModule])
class DataViewsetRegistryTest(TestCase):
def test_register_data_model(self) ->None:
registry = DataViewsetRegistry()
registry.register(FearConditioningData)
self.assertEqual(registry.data_models, [FearConditioningData])
self.assertEqual(registry.urls[0].pattern._route,
'projects/<int:project_pk>/experiments/<int:experiment_pk>/data/fear-conditioning/'
)
self.assertEqual(registry.urls[0].callback, registry.views[
'fear_conditioning_data_list'])
self.assertEqual(registry.urls[0].name, 'fear_conditioning_data_list')
self.assertEqual(registry.urls[1].pattern._route,
'projects/<int:project_pk>/experiments/<int:experiment_pk>/data/fear-conditioning/<int:data_pk>/'
)
self.assertEqual(registry.urls[1].callback, registry.views[
'fear_conditioning_data_detail'])
self.assertEqual(registry.urls[1].name, 'fear_conditioning_data_detail'
)
<|reserved_special_token_1|>
from django.test import TestCase
from ..models import FearConditioningData, FearConditioningModule
from ..registry import DataViewsetRegistry, ModuleRegistry
class ModuleRegistryTest(TestCase):
def test_register_module_create_view(self) ->None:
registry = ModuleRegistry()
registry.register(FearConditioningModule)
self.assertEqual(registry.urls[0].pattern._route,
'projects/<int:project_pk>/experiments/<int:experiment_pk>/modules/fear-conditioning/add/'
)
self.assertEqual(registry.urls[0].callback, registry.views[
'fear_conditioning_create'])
self.assertEqual(registry.urls[0].name, 'fear_conditioning_create')
self.assertEqual(registry.modules, [FearConditioningModule])
class DataViewsetRegistryTest(TestCase):
def test_register_data_model(self) ->None:
registry = DataViewsetRegistry()
registry.register(FearConditioningData)
self.assertEqual(registry.data_models, [FearConditioningData])
self.assertEqual(registry.urls[0].pattern._route,
'projects/<int:project_pk>/experiments/<int:experiment_pk>/data/fear-conditioning/'
)
self.assertEqual(registry.urls[0].callback, registry.views[
'fear_conditioning_data_list'])
self.assertEqual(registry.urls[0].name, 'fear_conditioning_data_list')
self.assertEqual(registry.urls[1].pattern._route,
'projects/<int:project_pk>/experiments/<int:experiment_pk>/data/fear-conditioning/<int:data_pk>/'
)
self.assertEqual(registry.urls[1].callback, registry.views[
'fear_conditioning_data_detail'])
self.assertEqual(registry.urls[1].name, 'fear_conditioning_data_detail'
)
<|reserved_special_token_1|>
from django.test import TestCase
from ..models import FearConditioningData, FearConditioningModule
from ..registry import DataViewsetRegistry, ModuleRegistry
class ModuleRegistryTest(TestCase):
def test_register_module_create_view(self) -> None:
registry = ModuleRegistry()
registry.register(FearConditioningModule)
self.assertEqual(
registry.urls[0].pattern._route,
"projects/<int:project_pk>/experiments/<int:experiment_pk>/modules/"
"fear-conditioning/add/",
)
self.assertEqual(
registry.urls[0].callback, registry.views["fear_conditioning_create"]
)
self.assertEqual(registry.urls[0].name, "fear_conditioning_create")
self.assertEqual(registry.modules, [FearConditioningModule])
class DataViewsetRegistryTest(TestCase):
def test_register_data_model(self) -> None:
registry = DataViewsetRegistry()
registry.register(FearConditioningData)
self.assertEqual(registry.data_models, [FearConditioningData])
# List view
self.assertEqual(
registry.urls[0].pattern._route,
"projects/<int:project_pk>/experiments/<int:experiment_pk>/data/"
"fear-conditioning/",
)
self.assertEqual(
registry.urls[0].callback, registry.views["fear_conditioning_data_list"]
)
self.assertEqual(registry.urls[0].name, "fear_conditioning_data_list")
# Detail view
self.assertEqual(
registry.urls[1].pattern._route,
"projects/<int:project_pk>/experiments/<int:experiment_pk>/data/"
"fear-conditioning/<int:data_pk>/",
)
self.assertEqual(
registry.urls[1].callback, registry.views["fear_conditioning_data_detail"]
)
self.assertEqual(registry.urls[1].name, "fear_conditioning_data_detail")
|
flexible
|
{
"blob_id": "14cc048f517efd3dad9960f35fff66a78f68fb45",
"index": 8975,
"step-1": "<mask token>\n\n\nclass DataViewsetRegistryTest(TestCase):\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass DataViewsetRegistryTest(TestCase):\n\n def test_register_data_model(self) ->None:\n registry = DataViewsetRegistry()\n registry.register(FearConditioningData)\n self.assertEqual(registry.data_models, [FearConditioningData])\n self.assertEqual(registry.urls[0].pattern._route,\n 'projects/<int:project_pk>/experiments/<int:experiment_pk>/data/fear-conditioning/'\n )\n self.assertEqual(registry.urls[0].callback, registry.views[\n 'fear_conditioning_data_list'])\n self.assertEqual(registry.urls[0].name, 'fear_conditioning_data_list')\n self.assertEqual(registry.urls[1].pattern._route,\n 'projects/<int:project_pk>/experiments/<int:experiment_pk>/data/fear-conditioning/<int:data_pk>/'\n )\n self.assertEqual(registry.urls[1].callback, registry.views[\n 'fear_conditioning_data_detail'])\n self.assertEqual(registry.urls[1].name, 'fear_conditioning_data_detail'\n )\n",
"step-3": "<mask token>\n\n\nclass ModuleRegistryTest(TestCase):\n\n def test_register_module_create_view(self) ->None:\n registry = ModuleRegistry()\n registry.register(FearConditioningModule)\n self.assertEqual(registry.urls[0].pattern._route,\n 'projects/<int:project_pk>/experiments/<int:experiment_pk>/modules/fear-conditioning/add/'\n )\n self.assertEqual(registry.urls[0].callback, registry.views[\n 'fear_conditioning_create'])\n self.assertEqual(registry.urls[0].name, 'fear_conditioning_create')\n self.assertEqual(registry.modules, [FearConditioningModule])\n\n\nclass DataViewsetRegistryTest(TestCase):\n\n def test_register_data_model(self) ->None:\n registry = DataViewsetRegistry()\n registry.register(FearConditioningData)\n self.assertEqual(registry.data_models, [FearConditioningData])\n self.assertEqual(registry.urls[0].pattern._route,\n 'projects/<int:project_pk>/experiments/<int:experiment_pk>/data/fear-conditioning/'\n )\n self.assertEqual(registry.urls[0].callback, registry.views[\n 'fear_conditioning_data_list'])\n self.assertEqual(registry.urls[0].name, 'fear_conditioning_data_list')\n self.assertEqual(registry.urls[1].pattern._route,\n 'projects/<int:project_pk>/experiments/<int:experiment_pk>/data/fear-conditioning/<int:data_pk>/'\n )\n self.assertEqual(registry.urls[1].callback, registry.views[\n 'fear_conditioning_data_detail'])\n self.assertEqual(registry.urls[1].name, 'fear_conditioning_data_detail'\n )\n",
"step-4": "from django.test import TestCase\nfrom ..models import FearConditioningData, FearConditioningModule\nfrom ..registry import DataViewsetRegistry, ModuleRegistry\n\n\nclass ModuleRegistryTest(TestCase):\n\n def test_register_module_create_view(self) ->None:\n registry = ModuleRegistry()\n registry.register(FearConditioningModule)\n self.assertEqual(registry.urls[0].pattern._route,\n 'projects/<int:project_pk>/experiments/<int:experiment_pk>/modules/fear-conditioning/add/'\n )\n self.assertEqual(registry.urls[0].callback, registry.views[\n 'fear_conditioning_create'])\n self.assertEqual(registry.urls[0].name, 'fear_conditioning_create')\n self.assertEqual(registry.modules, [FearConditioningModule])\n\n\nclass DataViewsetRegistryTest(TestCase):\n\n def test_register_data_model(self) ->None:\n registry = DataViewsetRegistry()\n registry.register(FearConditioningData)\n self.assertEqual(registry.data_models, [FearConditioningData])\n self.assertEqual(registry.urls[0].pattern._route,\n 'projects/<int:project_pk>/experiments/<int:experiment_pk>/data/fear-conditioning/'\n )\n self.assertEqual(registry.urls[0].callback, registry.views[\n 'fear_conditioning_data_list'])\n self.assertEqual(registry.urls[0].name, 'fear_conditioning_data_list')\n self.assertEqual(registry.urls[1].pattern._route,\n 'projects/<int:project_pk>/experiments/<int:experiment_pk>/data/fear-conditioning/<int:data_pk>/'\n )\n self.assertEqual(registry.urls[1].callback, registry.views[\n 'fear_conditioning_data_detail'])\n self.assertEqual(registry.urls[1].name, 'fear_conditioning_data_detail'\n )\n",
"step-5": "from django.test import TestCase\n\nfrom ..models import FearConditioningData, FearConditioningModule\nfrom ..registry import DataViewsetRegistry, ModuleRegistry\n\n\nclass ModuleRegistryTest(TestCase):\n def test_register_module_create_view(self) -> None:\n registry = ModuleRegistry()\n\n registry.register(FearConditioningModule)\n\n self.assertEqual(\n registry.urls[0].pattern._route,\n \"projects/<int:project_pk>/experiments/<int:experiment_pk>/modules/\"\n \"fear-conditioning/add/\",\n )\n self.assertEqual(\n registry.urls[0].callback, registry.views[\"fear_conditioning_create\"]\n )\n self.assertEqual(registry.urls[0].name, \"fear_conditioning_create\")\n self.assertEqual(registry.modules, [FearConditioningModule])\n\n\nclass DataViewsetRegistryTest(TestCase):\n def test_register_data_model(self) -> None:\n registry = DataViewsetRegistry()\n\n registry.register(FearConditioningData)\n\n self.assertEqual(registry.data_models, [FearConditioningData])\n\n # List view\n self.assertEqual(\n registry.urls[0].pattern._route,\n \"projects/<int:project_pk>/experiments/<int:experiment_pk>/data/\"\n \"fear-conditioning/\",\n )\n self.assertEqual(\n registry.urls[0].callback, registry.views[\"fear_conditioning_data_list\"]\n )\n self.assertEqual(registry.urls[0].name, \"fear_conditioning_data_list\")\n\n # Detail view\n self.assertEqual(\n registry.urls[1].pattern._route,\n \"projects/<int:project_pk>/experiments/<int:experiment_pk>/data/\"\n \"fear-conditioning/<int:data_pk>/\",\n )\n self.assertEqual(\n registry.urls[1].callback, registry.views[\"fear_conditioning_data_detail\"]\n )\n self.assertEqual(registry.urls[1].name, \"fear_conditioning_data_detail\")\n",
"step-ids": [
1,
2,
4,
5,
6
]
}
|
[
1,
2,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
parser.add_argument('-pred_path', type=str, required=True)
parser.add_argument('-n_list_path', type=str, required=True)
parser.add_argument('-refer_path', type=str, required=True)
<|reserved_special_token_0|>
with open(args.pred_path, 'r') as f:
preds = f.readlines()
with open(args.n_list_path, 'r') as f:
for line in f:
n_list.append(int(line.strip()))
with open(args.refer_path, 'r') as f:
golds = f.readlines()
<|reserved_special_token_0|>
for idx, pred in enumerate(preds):
if idx == sum(n_list[:gold_idx + 1]):
gold_idx += 1
gold = golds[gold_idx].strip()
refs = [[gold.split()]]
pred = [pred.strip().split()]
sent_bleu = bleu.bleu(pred, refs, smooth=True)
print('%s : %s : %f' % (pred, refs, sent_bleu))
f_summary.write(' '.join(pred[0]) + '|||' + str(sent_bleu) + '\n')
f_summary.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
parser = argparse.ArgumentParser('Compute sentence bleu.')
parser.add_argument('-pred_path', type=str, required=True)
parser.add_argument('-n_list_path', type=str, required=True)
parser.add_argument('-refer_path', type=str, required=True)
args = parser.parse_args()
n_list = []
with open(args.pred_path, 'r') as f:
preds = f.readlines()
with open(args.n_list_path, 'r') as f:
for line in f:
n_list.append(int(line.strip()))
with open(args.refer_path, 'r') as f:
golds = f.readlines()
f_summary = open(args.pred_path + '.sent-bleu', 'w')
gold_idx = 0
for idx, pred in enumerate(preds):
if idx == sum(n_list[:gold_idx + 1]):
gold_idx += 1
gold = golds[gold_idx].strip()
refs = [[gold.split()]]
pred = [pred.strip().split()]
sent_bleu = bleu.bleu(pred, refs, smooth=True)
print('%s : %s : %f' % (pred, refs, sent_bleu))
f_summary.write(' '.join(pred[0]) + '|||' + str(sent_bleu) + '\n')
f_summary.close()
<|reserved_special_token_1|>
import thumt.utils.bleu as bleu
import argparse
parser = argparse.ArgumentParser('Compute sentence bleu.')
parser.add_argument('-pred_path', type=str, required=True)
parser.add_argument('-n_list_path', type=str, required=True)
parser.add_argument('-refer_path', type=str, required=True)
args = parser.parse_args()
n_list = []
with open(args.pred_path, 'r') as f:
preds = f.readlines()
with open(args.n_list_path, 'r') as f:
for line in f:
n_list.append(int(line.strip()))
with open(args.refer_path, 'r') as f:
golds = f.readlines()
f_summary = open(args.pred_path + '.sent-bleu', 'w')
gold_idx = 0
for idx, pred in enumerate(preds):
if idx == sum(n_list[:gold_idx + 1]):
gold_idx += 1
gold = golds[gold_idx].strip()
refs = [[gold.split()]]
pred = [pred.strip().split()]
sent_bleu = bleu.bleu(pred, refs, smooth=True)
print('%s : %s : %f' % (pred, refs, sent_bleu))
f_summary.write(' '.join(pred[0]) + '|||' + str(sent_bleu) + '\n')
f_summary.close()
<|reserved_special_token_1|>
import thumt.utils.bleu as bleu
import argparse
parser = argparse.ArgumentParser("Compute sentence bleu.")
parser.add_argument("-pred_path", type=str, required=True)
parser.add_argument("-n_list_path", type=str, required=True)
parser.add_argument("-refer_path", type=str, required=True)
args = parser.parse_args()
n_list = []
with open(args.pred_path, 'r') as f:
preds = f.readlines()
with open(args.n_list_path, 'r') as f:
for line in f:
n_list.append(int(line.strip()))
with open(args.refer_path, 'r') as f:
golds = f.readlines()
f_summary = open(args.pred_path + ".sent-bleu", 'w')
gold_idx = 0
for idx, pred in enumerate(preds):
#import ipdb; ipdb.set_trace()
if idx == sum(n_list[:gold_idx + 1]):
gold_idx += 1
gold = golds[gold_idx].strip() # remove `\n`
#refs = [gold.split()]
refs = [[gold.split()]]
pred = [pred.strip().split()]
#import ipdb; ipdb.set_trace()
sent_bleu = bleu.bleu(pred, refs, smooth=True)
print("%s : %s : %f" % (pred, refs, sent_bleu))
f_summary.write(" ".join(pred[0]) + "|||" + str(sent_bleu) + "\n")
f_summary.close()
|
flexible
|
{
"blob_id": "4437075901751adeaf3df63345e270a9b0090c14",
"index": 1918,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nparser.add_argument('-pred_path', type=str, required=True)\nparser.add_argument('-n_list_path', type=str, required=True)\nparser.add_argument('-refer_path', type=str, required=True)\n<mask token>\nwith open(args.pred_path, 'r') as f:\n preds = f.readlines()\nwith open(args.n_list_path, 'r') as f:\n for line in f:\n n_list.append(int(line.strip()))\nwith open(args.refer_path, 'r') as f:\n golds = f.readlines()\n<mask token>\nfor idx, pred in enumerate(preds):\n if idx == sum(n_list[:gold_idx + 1]):\n gold_idx += 1\n gold = golds[gold_idx].strip()\n refs = [[gold.split()]]\n pred = [pred.strip().split()]\n sent_bleu = bleu.bleu(pred, refs, smooth=True)\n print('%s : %s : %f' % (pred, refs, sent_bleu))\n f_summary.write(' '.join(pred[0]) + '|||' + str(sent_bleu) + '\\n')\nf_summary.close()\n",
"step-3": "<mask token>\nparser = argparse.ArgumentParser('Compute sentence bleu.')\nparser.add_argument('-pred_path', type=str, required=True)\nparser.add_argument('-n_list_path', type=str, required=True)\nparser.add_argument('-refer_path', type=str, required=True)\nargs = parser.parse_args()\nn_list = []\nwith open(args.pred_path, 'r') as f:\n preds = f.readlines()\nwith open(args.n_list_path, 'r') as f:\n for line in f:\n n_list.append(int(line.strip()))\nwith open(args.refer_path, 'r') as f:\n golds = f.readlines()\nf_summary = open(args.pred_path + '.sent-bleu', 'w')\ngold_idx = 0\nfor idx, pred in enumerate(preds):\n if idx == sum(n_list[:gold_idx + 1]):\n gold_idx += 1\n gold = golds[gold_idx].strip()\n refs = [[gold.split()]]\n pred = [pred.strip().split()]\n sent_bleu = bleu.bleu(pred, refs, smooth=True)\n print('%s : %s : %f' % (pred, refs, sent_bleu))\n f_summary.write(' '.join(pred[0]) + '|||' + str(sent_bleu) + '\\n')\nf_summary.close()\n",
"step-4": "import thumt.utils.bleu as bleu\nimport argparse\nparser = argparse.ArgumentParser('Compute sentence bleu.')\nparser.add_argument('-pred_path', type=str, required=True)\nparser.add_argument('-n_list_path', type=str, required=True)\nparser.add_argument('-refer_path', type=str, required=True)\nargs = parser.parse_args()\nn_list = []\nwith open(args.pred_path, 'r') as f:\n preds = f.readlines()\nwith open(args.n_list_path, 'r') as f:\n for line in f:\n n_list.append(int(line.strip()))\nwith open(args.refer_path, 'r') as f:\n golds = f.readlines()\nf_summary = open(args.pred_path + '.sent-bleu', 'w')\ngold_idx = 0\nfor idx, pred in enumerate(preds):\n if idx == sum(n_list[:gold_idx + 1]):\n gold_idx += 1\n gold = golds[gold_idx].strip()\n refs = [[gold.split()]]\n pred = [pred.strip().split()]\n sent_bleu = bleu.bleu(pred, refs, smooth=True)\n print('%s : %s : %f' % (pred, refs, sent_bleu))\n f_summary.write(' '.join(pred[0]) + '|||' + str(sent_bleu) + '\\n')\nf_summary.close()\n",
"step-5": "import thumt.utils.bleu as bleu\nimport argparse\n\nparser = argparse.ArgumentParser(\"Compute sentence bleu.\")\nparser.add_argument(\"-pred_path\", type=str, required=True)\nparser.add_argument(\"-n_list_path\", type=str, required=True)\nparser.add_argument(\"-refer_path\", type=str, required=True)\n\nargs = parser.parse_args()\n\nn_list = []\nwith open(args.pred_path, 'r') as f:\n\tpreds = f.readlines()\nwith open(args.n_list_path, 'r') as f:\n for line in f:\n n_list.append(int(line.strip()))\n\nwith open(args.refer_path, 'r') as f:\n\tgolds = f.readlines()\n\nf_summary = open(args.pred_path + \".sent-bleu\", 'w')\ngold_idx = 0\nfor idx, pred in enumerate(preds):\n #import ipdb; ipdb.set_trace()\n if idx == sum(n_list[:gold_idx + 1]):\n gold_idx += 1\n\n gold = golds[gold_idx].strip()\t# remove `\\n`\n\t#refs = [gold.split()]\n refs = [[gold.split()]]\n pred = [pred.strip().split()]\n #import ipdb; ipdb.set_trace()\n sent_bleu = bleu.bleu(pred, refs, smooth=True)\n print(\"%s : %s : %f\" % (pred, refs, sent_bleu))\n f_summary.write(\" \".join(pred[0]) + \"|||\" + str(sent_bleu) + \"\\n\")\nf_summary.close()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Compute grid scores using the new dataset format
import matplotlib
import os
# allow code to work on machines without a display or in a screen session
display = os.environ.get('DISPLAY')
if display is None or 'localhost' in display:
matplotlib.use('agg')
import argparse
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from datasets import train_test_loaders, angular_train_test_loaders, tf_train_test_loaders, load_from_cache
from models import SSPPathIntegrationModel
from datetime import datetime
from tensorboardX import SummaryWriter
import json
from spatial_semantic_pointers.utils import get_heatmap_vectors, ssp_to_loc, ssp_to_loc_v
from spatial_semantic_pointers.plots import plot_predictions, plot_predictions_v
import matplotlib.pyplot as plt
from path_integration_utils import pc_to_loc_v, encoding_func_from_model, pc_gauss_encoding_func, ssp_encoding_func, \
hd_gauss_encoding_func, hex_trig_encoding_func
from ssp_navigation.utils.encodings import get_encoding_function
import grid_scoring.scores as scores
import grid_scoring.utils as utils
# from grid_scoring.run_network import run_and_gather_activations, run_and_gather_localization_activations
from path_integration_utils import encoding_func_from_model, pc_gauss_encoding_func
parser = argparse.ArgumentParser('Compute grid scores for a path integration model')
parser.add_argument('--n-samples', type=int, default=5000)
parser.add_argument('--use-localization', action='store_true')
# TODO: use these parameters
parser.add_argument('--dataset', type=str, default='')
parser.add_argument('--model', type=str, default='')
parser.add_argument('--fname-prefix', type=str, default='sac')
parser.add_argument('--spatial-encoding', type=str, default='ssp',
choices=[
'ssp', 'hex-ssp', 'periodic-hex-ssp', 'grid-ssp', 'ind-ssp', 'orth-proj-ssp',
'rec-ssp', 'rec-hex-ssp', 'rec-ind-ssp', 'sub-toroid-ssp', 'var-sub-toroid-ssp',
'random', '2d', '2d-normalized', 'one-hot', 'hex-trig',
'trig', 'random-trig', 'random-rotated-trig', 'random-proj', 'legendre',
'learned', 'learned-normalized', 'frozen-learned', 'frozen-learned-normalized',
'pc-gauss', 'pc-dog', 'tile-coding'
])
# choices=['ssp', '2d', 'frozen-learned', 'pc-gauss', 'pc-dog', 'pc-gauss-softmax', 'hex-trig', 'hex-trig-all-freq'])
parser.add_argument('--frozen-model', type=str, default='', help='model to use frozen encoding weights from')
parser.add_argument('--pc-gauss-sigma', type=float, default=0.25)
parser.add_argument('--pc-diff-sigma', type=float, default=0.5)
parser.add_argument('--hex-freq-coef', type=float, default=2.5, help='constant to scale frequencies by')
parser.add_argument('--n-tiles', type=int, default=8, help='number of layers for tile coding')
parser.add_argument('--n-bins', type=int, default=8, help='number of bins for tile coding')
parser.add_argument('--ssp-scaling', type=float, default=1.0)
parser.add_argument('--grid-ssp-min', type=float, default=0.25, help='minimum plane wave scale')
parser.add_argument('--grid-ssp-max', type=float, default=2.0, help='maximum plane wave scale')
parser.add_argument('--phi', type=float, default=0.5, help='phi as a fraction of pi for orth-proj-ssp')
parser.add_argument('--n-proj', type=int, default=3, help='projection dimension for sub toroids')
parser.add_argument('--scale-ratio', type=float, default=0, help='ratio between sub toroid scales')
parser.add_argument('--hilbert-points', type=int, default=1, choices=[0, 1, 2, 3],
help='pc centers. 0: random uniform. 1: hilbert curve. 2: evenly spaced grid. 3: hex grid')
parser.add_argument('--seed', type=int, default=13)
parser.add_argument('--dropout-p', type=float, default=0.5)
parser.add_argument('--dim', type=int, default=512)
parser.add_argument('--train-split', type=float, default=0.8, help='Training fraction of the train/test split')
parser.add_argument('--allow-cache', action='store_true',
help='once the dataset has been generated, it will be saved to a file to be loaded faster')
parser.add_argument('--trajectory-length', type=int, default=100)
parser.add_argument('--minibatch-size', type=int, default=10)
parser.add_argument('--n-image-bins', type=int, default=20)
parser.add_argument('--n-hd-cells', type=int, default=0, help='If non-zero, use linear and angular velocity as well as HD cell output')
parser.add_argument('--sin-cos-ang', type=int, default=1, choices=[0, 1],
help='Use the sin and cos of the angular velocity if angular velocities are used')
parser.add_argument('--use-lmu', action='store_true')
parser.add_argument('--lmu-order', type=int, default=6)
parser.add_argument('--no-cache-load', action='store_true', help='do not load from cache')
args = parser.parse_args()
ssp_scaling = args.ssp_scaling
torch.manual_seed(args.seed)
np.random.seed(args.seed)
data = np.load(args.dataset)
# only used for frozen-learned and other custom encoding functions
# encoding_func = None
limit_low = 0 #* args.ssp_scaling
limit_high = 2.2 #* args.ssp_scaling
res = 128 #256
encoding_func, dim = get_encoding_function(args, limit_low=limit_low, limit_high=limit_high)
xs = np.linspace(limit_low, limit_high, res)
ys = np.linspace(limit_low, limit_high, res)
# FIXME: inefficient but will work for now
heatmap_vectors = np.zeros((len(xs), len(ys), dim))
print("Generating Heatmap Vectors")
for i, x in enumerate(xs):
for j, y in enumerate(ys):
heatmap_vectors[i, j, :] = encoding_func(
# batch dim
# np.array(
# [[x, y]]
# )
# no batch dim
# np.array(
# [x, y]
# )
# new signature
x=x, y=y
)
heatmap_vectors[i, j, :] /= np.linalg.norm(heatmap_vectors[i, j, :])
print("Heatmap Vector Generation Complete")
n_samples = args.n_samples
rollout_length = args.trajectory_length
batch_size = args.minibatch_size
if args.n_hd_cells > 0:
hd_encoding_func = hd_gauss_encoding_func(dim=args.n_hd_cells, sigma=0.25, use_softmax=False, rng=np.random.RandomState(args.seed))
if args.sin_cos_ang:
input_size = 3
else:
input_size = 2
model = SSPPathIntegrationModel(
input_size=input_size, unroll_length=rollout_length,
sp_dim=dim + args.n_hd_cells, dropout_p=args.dropout_p, use_lmu=args.use_lmu, order=args.lmu_order
)
else:
hd_encoding_func = None
model = SSPPathIntegrationModel(
input_size=2, unroll_length=rollout_length,
sp_dim=dim, dropout_p=args.dropout_p, use_lmu=args.use_lmu, order=args.lmu_order
)
# model = SSPPathIntegrationModel(unroll_length=rollout_length, sp_dim=dim, dropout_p=args.dropout_p)
model.load_state_dict(torch.load(args.model), strict=False)
model.eval()
# encoding specific cache string
encoding_specific = ''
if 'ssp' in args.spatial_encoding:
encoding_specific = args.ssp_scaling
elif args.spatial_encoding == 'frozen-learned':
encoding_specific = args.frozen_model
elif args.spatial_encoding == 'pc-gauss' or args.spatial_encoding == 'pc-gauss-softmax':
encoding_specific = args.pc_gauss_sigma
elif args.spatial_encoding == 'pc-dog':
encoding_specific = '{}-{}'.format(args.pc_gauss_sigma, args.pc_diff_sigma)
elif args.spatial_encoding == 'hex-trig':
encoding_specific = args.hex_freq_coef
if 'tf' in args.dataset:
cache_fname = 'dataset_cache/tf_{}_{}_{}_{}_{}_{}.npz'.format(
args.spatial_encoding, args.dim, args.seed, args.n_samples, args.n_hd_cells, encoding_specific
)
else:
cache_fname = 'dataset_cache/{}_{}_{}_{}_{}_{}.npz'.format(
args.spatial_encoding, args.dim, args.seed, args.n_samples, args.n_hd_cells, encoding_specific
)
# if the file exists, load it from cache
if os.path.exists(cache_fname) and not args.no_cache_load:
print("Generating Train and Test Loaders from Cache")
trainloader, testloader = load_from_cache(cache_fname, batch_size=batch_size, n_samples=n_samples)
else:
print("Generating Train and Test Loaders")
if 'tf' in args.dataset:
# tfrecord dataset only supports using the sin and cos of angular velocity
assert args.sin_cos_ang == 1
trainloader, testloader = tf_train_test_loaders(
data,
n_train_samples=n_samples,
n_test_samples=n_samples,
rollout_length=rollout_length,
batch_size=batch_size,
encoding=args.spatial_encoding,
encoding_func=encoding_func,
encoding_dim=args.dim,
train_split=args.train_split,
hd_dim=args.n_hd_cells,
hd_encoding_func=hd_encoding_func,
sin_cos_ang=args.sin_cos_ang,
)
else:
if args.n_hd_cells > 0:
trainloader, testloader = angular_train_test_loaders(
data,
n_train_samples=n_samples,
n_test_samples=n_samples,
rollout_length=rollout_length,
batch_size=batch_size,
encoding=args.spatial_encoding,
encoding_func=encoding_func,
encoding_dim=args.dim,
train_split=args.train_split,
hd_dim=args.n_hd_cells,
hd_encoding_func=hd_encoding_func,
sin_cos_ang=args.sin_cos_ang,
)
else:
trainloader, testloader = train_test_loaders(
data,
n_train_samples=n_samples,
n_test_samples=n_samples,
rollout_length=rollout_length,
batch_size=batch_size,
encoding=args.spatial_encoding,
encoding_func=encoding_func,
encoding_dim=args.dim,
train_split=args.train_split,
)
if args.allow_cache:
if not os.path.exists('dataset_cache'):
os.makedirs('dataset_cache')
np.savez(
cache_fname,
train_velocity_inputs=trainloader.dataset.velocity_inputs,
train_ssp_inputs=trainloader.dataset.ssp_inputs,
train_ssp_outputs=trainloader.dataset.ssp_outputs,
test_velocity_inputs=testloader.dataset.velocity_inputs,
test_ssp_inputs=testloader.dataset.ssp_inputs,
test_ssp_outputs=testloader.dataset.ssp_outputs,
)
print("Train and Test Loaders Generation Complete")
starts = [0.2] * 10
ends = np.linspace(0.4, 1.0, num=10)
masks_parameters = zip(starts, ends.tolist())
latest_epoch_scorer = scores.GridScorer(
nbins=args.n_image_bins,
coords_range=((0, 2.2), (0, 2.2)), # data_reader.get_coord_range(),
mask_parameters=masks_parameters,
)
fname_lstm_pred = '{}_{}samples_lstm_pred.pdf'.format(args.fname_prefix, args.n_samples)
fname_lstm_truth = '{}_{}samples_lstm_truth.pdf'.format(args.fname_prefix, args.n_samples)
fname_dense_pred = '{}_{}samples_dense_pred.pdf'.format(args.fname_prefix, args.n_samples)
fname_dense_truth = '{}_{}samples_dense_truth.pdf'.format(args.fname_prefix, args.n_samples)
# Run and gather activations
print("Testing")
with torch.no_grad():
# Everything is in one batch, so this loop will only happen once
for i, data in enumerate(testloader):
velocity_inputs, ssp_inputs, ssp_outputs = data
ssp_pred, lstm_outputs, dense_outputs = model.forward_activations(velocity_inputs, ssp_inputs)
predictions = np.zeros((ssp_pred.shape[0]*ssp_pred.shape[1], 2))
coords = np.zeros((ssp_pred.shape[0]*ssp_pred.shape[1], 2))
lstm_activations = np.zeros((ssp_pred.shape[0]*ssp_pred.shape[1], model.lstm_hidden_size))
dense_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], model.linear_hidden_size))
assert rollout_length == ssp_pred.shape[0]
# # For each neuron, contains the average activity at each spatial bin
# # Computing for both ground truth and predicted location
# rate_maps_pred = np.zeros((model.lstm_hidden_size, len(xs), len(ys)))
# rate_maps_truth = np.zeros((model.lstm_hidden_size, len(xs), len(ys)))
print("Computing predicted locations and true locations")
# Using all data, one chunk at a time
for ri in range(rollout_length):
# trim out head direction info if that was included by only looking up to args.encoding_dim
# computing 'predicted' coordinates, where the agent thinks it is
pred = ssp_pred.detach().numpy()[ri, :, :args.dim]
# pred = pred / pred.sum(axis=1)[:, np.newaxis]
predictions[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :] = ssp_to_loc_v(
pred,
heatmap_vectors, xs, ys
)
# computing 'ground truth' coordinates, where the agent should be
coord = ssp_outputs.detach().numpy()[:, ri, :args.dim]
# coord = coord / coord.sum(axis=1)[:, np.newaxis]
coords[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :] = ssp_to_loc_v(
coord,
heatmap_vectors, xs, ys
)
# reshaping activations and converting to numpy array
lstm_activations[ri*ssp_pred.shape[1]:(ri+1)*ssp_pred.shape[1], :] = lstm_outputs.detach().numpy()[ri, :, :]
dense_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :] = dense_outputs.detach().numpy()[ri, :, :]
# predictions = predictions / args.ssp_scaling
# coords = coords / args.ssp_scaling
print(np.max(predictions))
print(np.min(predictions))
grid_scores_60_pred, grid_scores_90_pred, grid_scores_60_separation_pred, grid_scores_90_separation_pred = utils.get_scores_and_plot(
scorer=latest_epoch_scorer,
data_abs_xy=predictions, #res['pos_xy'],
activations=lstm_activations, #res['bottleneck'],
directory='output_grid_scores', #FLAGS.saver_results_directory,
filename=fname_lstm_pred,
)
grid_scores_60_truth, grid_scores_90_truth, grid_scores_60_separation_truth, grid_scores_90_separation_truth = utils.get_scores_and_plot(
scorer=latest_epoch_scorer,
data_abs_xy=coords, #res['pos_xy'],
activations=lstm_activations, #res['bottleneck'],
directory='output_grid_scores', #FLAGS.saver_results_directory,
filename=fname_lstm_truth,
)
grid_scores_60_dense_pred, grid_scores_90_dense_pred, grid_scores_60_separation_dense_pred, grid_scores_90_separation_dense_pred = utils.get_scores_and_plot(
scorer=latest_epoch_scorer,
data_abs_xy=predictions, #res['pos_xy'],
activations=dense_activations, #res['bottleneck'],
directory='output_grid_scores', #FLAGS.saver_results_directory,
filename=fname_dense_pred,
)
grid_scores_60_dense_truth, grid_scores_90_dense_truth, grid_scores_60_separation_dense_truth, grid_scores_90_separation_dense_truth = utils.get_scores_and_plot(
scorer=latest_epoch_scorer,
data_abs_xy=coords, #res['pos_xy'],
activations=dense_activations, #res['bottleneck'],
directory='output_grid_scores', #FLAGS.saver_results_directory,
filename=fname_dense_truth,
)
print(grid_scores_60_truth, grid_scores_90_truth, grid_scores_60_separation_truth, grid_scores_90_separation_truth)
# Saving to make grid score values easy to compare for different variations
fname = 'output_grid_scores/{}_{}samples.npz'.format(args.fname_prefix, args.n_samples)
np.savez(
fname,
grid_scores_60_pred=grid_scores_60_pred,
grid_scores_90_pred=grid_scores_90_pred,
grid_scores_60_separation_pred=grid_scores_60_separation_pred,
grid_scores_90_separation_pred=grid_scores_90_separation_pred,
grid_scores_60_truth=grid_scores_60_truth,
grid_scores_90_truth=grid_scores_90_truth,
grid_scores_60_separation_truth=grid_scores_60_separation_truth,
grid_scores_90_separation_truth=grid_scores_90_separation_truth,
grid_scores_60_dense_pred=grid_scores_60_dense_pred,
grid_scores_90_dense_pred=grid_scores_90_dense_pred,
grid_scores_60_separation_dense_pred=grid_scores_60_separation_dense_pred,
grid_scores_90_separation_dense_pred=grid_scores_90_separation_dense_pred,
grid_scores_60_dense_truth=grid_scores_60_dense_truth,
grid_scores_90_dense_truth=grid_scores_90_dense_truth,
grid_scores_60_separation_dense_truth=grid_scores_60_separation_dense_truth,
grid_scores_90_separation_dense_truth=grid_scores_90_separation_dense_truth,
)
|
normal
|
{
"blob_id": "f4bc5663ab2b2a6dbb41a2fc3d7ca67100b455a4",
"index": 838,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif display is None or 'localhost' in display:\n matplotlib.use('agg')\n<mask token>\nparser.add_argument('--n-samples', type=int, default=5000)\nparser.add_argument('--use-localization', action='store_true')\nparser.add_argument('--dataset', type=str, default='')\nparser.add_argument('--model', type=str, default='')\nparser.add_argument('--fname-prefix', type=str, default='sac')\nparser.add_argument('--spatial-encoding', type=str, default='ssp', choices=\n ['ssp', 'hex-ssp', 'periodic-hex-ssp', 'grid-ssp', 'ind-ssp',\n 'orth-proj-ssp', 'rec-ssp', 'rec-hex-ssp', 'rec-ind-ssp',\n 'sub-toroid-ssp', 'var-sub-toroid-ssp', 'random', '2d', '2d-normalized',\n 'one-hot', 'hex-trig', 'trig', 'random-trig', 'random-rotated-trig',\n 'random-proj', 'legendre', 'learned', 'learned-normalized',\n 'frozen-learned', 'frozen-learned-normalized', 'pc-gauss', 'pc-dog',\n 'tile-coding'])\nparser.add_argument('--frozen-model', type=str, default='', help=\n 'model to use frozen encoding weights from')\nparser.add_argument('--pc-gauss-sigma', type=float, default=0.25)\nparser.add_argument('--pc-diff-sigma', type=float, default=0.5)\nparser.add_argument('--hex-freq-coef', type=float, default=2.5, help=\n 'constant to scale frequencies by')\nparser.add_argument('--n-tiles', type=int, default=8, help=\n 'number of layers for tile coding')\nparser.add_argument('--n-bins', type=int, default=8, help=\n 'number of bins for tile coding')\nparser.add_argument('--ssp-scaling', type=float, default=1.0)\nparser.add_argument('--grid-ssp-min', type=float, default=0.25, help=\n 'minimum plane wave scale')\nparser.add_argument('--grid-ssp-max', type=float, default=2.0, help=\n 'maximum plane wave scale')\nparser.add_argument('--phi', type=float, default=0.5, help=\n 'phi as a fraction of pi for orth-proj-ssp')\nparser.add_argument('--n-proj', type=int, default=3, help=\n 'projection dimension for sub toroids')\nparser.add_argument('--scale-ratio', type=float, default=0, help=\n 'ratio between sub toroid scales')\nparser.add_argument('--hilbert-points', type=int, default=1, choices=[0, 1,\n 2, 3], help=\n 'pc centers. 0: random uniform. 1: hilbert curve. 2: evenly spaced grid. 3: hex grid'\n )\nparser.add_argument('--seed', type=int, default=13)\nparser.add_argument('--dropout-p', type=float, default=0.5)\nparser.add_argument('--dim', type=int, default=512)\nparser.add_argument('--train-split', type=float, default=0.8, help=\n 'Training fraction of the train/test split')\nparser.add_argument('--allow-cache', action='store_true', help=\n 'once the dataset has been generated, it will be saved to a file to be loaded faster'\n )\nparser.add_argument('--trajectory-length', type=int, default=100)\nparser.add_argument('--minibatch-size', type=int, default=10)\nparser.add_argument('--n-image-bins', type=int, default=20)\nparser.add_argument('--n-hd-cells', type=int, default=0, help=\n 'If non-zero, use linear and angular velocity as well as HD cell output')\nparser.add_argument('--sin-cos-ang', type=int, default=1, choices=[0, 1],\n help=\n 'Use the sin and cos of the angular velocity if angular velocities are used'\n )\nparser.add_argument('--use-lmu', action='store_true')\nparser.add_argument('--lmu-order', type=int, default=6)\nparser.add_argument('--no-cache-load', action='store_true', help=\n 'do not load from cache')\n<mask token>\ntorch.manual_seed(args.seed)\nnp.random.seed(args.seed)\n<mask token>\nprint('Generating Heatmap Vectors')\nfor i, x in enumerate(xs):\n for j, y in enumerate(ys):\n heatmap_vectors[i, j, :] = encoding_func(x=x, y=y)\n heatmap_vectors[i, j, :] /= np.linalg.norm(heatmap_vectors[i, j, :])\nprint('Heatmap Vector Generation Complete')\n<mask token>\nif args.n_hd_cells > 0:\n hd_encoding_func = hd_gauss_encoding_func(dim=args.n_hd_cells, sigma=\n 0.25, use_softmax=False, rng=np.random.RandomState(args.seed))\n if args.sin_cos_ang:\n input_size = 3\n else:\n input_size = 2\n model = SSPPathIntegrationModel(input_size=input_size, unroll_length=\n rollout_length, sp_dim=dim + args.n_hd_cells, dropout_p=args.\n dropout_p, use_lmu=args.use_lmu, order=args.lmu_order)\nelse:\n hd_encoding_func = None\n model = SSPPathIntegrationModel(input_size=2, unroll_length=\n rollout_length, sp_dim=dim, dropout_p=args.dropout_p, use_lmu=args.\n use_lmu, order=args.lmu_order)\nmodel.load_state_dict(torch.load(args.model), strict=False)\nmodel.eval()\n<mask token>\nif 'ssp' in args.spatial_encoding:\n encoding_specific = args.ssp_scaling\nelif args.spatial_encoding == 'frozen-learned':\n encoding_specific = args.frozen_model\nelif args.spatial_encoding == 'pc-gauss' or args.spatial_encoding == 'pc-gauss-softmax':\n encoding_specific = args.pc_gauss_sigma\nelif args.spatial_encoding == 'pc-dog':\n encoding_specific = '{}-{}'.format(args.pc_gauss_sigma, args.pc_diff_sigma)\nelif args.spatial_encoding == 'hex-trig':\n encoding_specific = args.hex_freq_coef\nif 'tf' in args.dataset:\n cache_fname = 'dataset_cache/tf_{}_{}_{}_{}_{}_{}.npz'.format(args.\n spatial_encoding, args.dim, args.seed, args.n_samples, args.\n n_hd_cells, encoding_specific)\nelse:\n cache_fname = 'dataset_cache/{}_{}_{}_{}_{}_{}.npz'.format(args.\n spatial_encoding, args.dim, args.seed, args.n_samples, args.\n n_hd_cells, encoding_specific)\nif os.path.exists(cache_fname) and not args.no_cache_load:\n print('Generating Train and Test Loaders from Cache')\n trainloader, testloader = load_from_cache(cache_fname, batch_size=\n batch_size, n_samples=n_samples)\nelse:\n print('Generating Train and Test Loaders')\n if 'tf' in args.dataset:\n assert args.sin_cos_ang == 1\n trainloader, testloader = tf_train_test_loaders(data,\n n_train_samples=n_samples, n_test_samples=n_samples,\n rollout_length=rollout_length, batch_size=batch_size, encoding=\n args.spatial_encoding, encoding_func=encoding_func,\n encoding_dim=args.dim, train_split=args.train_split, hd_dim=\n args.n_hd_cells, hd_encoding_func=hd_encoding_func, sin_cos_ang\n =args.sin_cos_ang)\n elif args.n_hd_cells > 0:\n trainloader, testloader = angular_train_test_loaders(data,\n n_train_samples=n_samples, n_test_samples=n_samples,\n rollout_length=rollout_length, batch_size=batch_size, encoding=\n args.spatial_encoding, encoding_func=encoding_func,\n encoding_dim=args.dim, train_split=args.train_split, hd_dim=\n args.n_hd_cells, hd_encoding_func=hd_encoding_func, sin_cos_ang\n =args.sin_cos_ang)\n else:\n trainloader, testloader = train_test_loaders(data, n_train_samples=\n n_samples, n_test_samples=n_samples, rollout_length=\n rollout_length, batch_size=batch_size, encoding=args.\n spatial_encoding, encoding_func=encoding_func, encoding_dim=\n args.dim, train_split=args.train_split)\n if args.allow_cache:\n if not os.path.exists('dataset_cache'):\n os.makedirs('dataset_cache')\n np.savez(cache_fname, train_velocity_inputs=trainloader.dataset.\n velocity_inputs, train_ssp_inputs=trainloader.dataset.\n ssp_inputs, train_ssp_outputs=trainloader.dataset.ssp_outputs,\n test_velocity_inputs=testloader.dataset.velocity_inputs,\n test_ssp_inputs=testloader.dataset.ssp_inputs, test_ssp_outputs\n =testloader.dataset.ssp_outputs)\nprint('Train and Test Loaders Generation Complete')\n<mask token>\nprint('Testing')\nwith torch.no_grad():\n for i, data in enumerate(testloader):\n velocity_inputs, ssp_inputs, ssp_outputs = data\n ssp_pred, lstm_outputs, dense_outputs = model.forward_activations(\n velocity_inputs, ssp_inputs)\n predictions = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], 2))\n coords = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], 2))\n lstm_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1],\n model.lstm_hidden_size))\n dense_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1],\n model.linear_hidden_size))\n assert rollout_length == ssp_pred.shape[0]\n print('Computing predicted locations and true locations')\n for ri in range(rollout_length):\n pred = ssp_pred.detach().numpy()[ri, :, :args.dim]\n predictions[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = ssp_to_loc_v(pred, heatmap_vectors, xs, ys)\n coord = ssp_outputs.detach().numpy()[:, ri, :args.dim]\n coords[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = ssp_to_loc_v(coord, heatmap_vectors, xs, ys)\n lstm_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = lstm_outputs.detach().numpy()[ri, :, :]\n dense_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[\n 1], :] = dense_outputs.detach().numpy()[ri, :, :]\nprint(np.max(predictions))\nprint(np.min(predictions))\n<mask token>\nprint(grid_scores_60_truth, grid_scores_90_truth,\n grid_scores_60_separation_truth, grid_scores_90_separation_truth)\n<mask token>\nnp.savez(fname, grid_scores_60_pred=grid_scores_60_pred,\n grid_scores_90_pred=grid_scores_90_pred, grid_scores_60_separation_pred\n =grid_scores_60_separation_pred, grid_scores_90_separation_pred=\n grid_scores_90_separation_pred, grid_scores_60_truth=\n grid_scores_60_truth, grid_scores_90_truth=grid_scores_90_truth,\n grid_scores_60_separation_truth=grid_scores_60_separation_truth,\n grid_scores_90_separation_truth=grid_scores_90_separation_truth,\n grid_scores_60_dense_pred=grid_scores_60_dense_pred,\n grid_scores_90_dense_pred=grid_scores_90_dense_pred,\n grid_scores_60_separation_dense_pred=\n grid_scores_60_separation_dense_pred,\n grid_scores_90_separation_dense_pred=\n grid_scores_90_separation_dense_pred, grid_scores_60_dense_truth=\n grid_scores_60_dense_truth, grid_scores_90_dense_truth=\n grid_scores_90_dense_truth, grid_scores_60_separation_dense_truth=\n grid_scores_60_separation_dense_truth,\n grid_scores_90_separation_dense_truth=grid_scores_90_separation_dense_truth\n )\n",
"step-3": "<mask token>\ndisplay = os.environ.get('DISPLAY')\nif display is None or 'localhost' in display:\n matplotlib.use('agg')\n<mask token>\nparser = argparse.ArgumentParser(\n 'Compute grid scores for a path integration model')\nparser.add_argument('--n-samples', type=int, default=5000)\nparser.add_argument('--use-localization', action='store_true')\nparser.add_argument('--dataset', type=str, default='')\nparser.add_argument('--model', type=str, default='')\nparser.add_argument('--fname-prefix', type=str, default='sac')\nparser.add_argument('--spatial-encoding', type=str, default='ssp', choices=\n ['ssp', 'hex-ssp', 'periodic-hex-ssp', 'grid-ssp', 'ind-ssp',\n 'orth-proj-ssp', 'rec-ssp', 'rec-hex-ssp', 'rec-ind-ssp',\n 'sub-toroid-ssp', 'var-sub-toroid-ssp', 'random', '2d', '2d-normalized',\n 'one-hot', 'hex-trig', 'trig', 'random-trig', 'random-rotated-trig',\n 'random-proj', 'legendre', 'learned', 'learned-normalized',\n 'frozen-learned', 'frozen-learned-normalized', 'pc-gauss', 'pc-dog',\n 'tile-coding'])\nparser.add_argument('--frozen-model', type=str, default='', help=\n 'model to use frozen encoding weights from')\nparser.add_argument('--pc-gauss-sigma', type=float, default=0.25)\nparser.add_argument('--pc-diff-sigma', type=float, default=0.5)\nparser.add_argument('--hex-freq-coef', type=float, default=2.5, help=\n 'constant to scale frequencies by')\nparser.add_argument('--n-tiles', type=int, default=8, help=\n 'number of layers for tile coding')\nparser.add_argument('--n-bins', type=int, default=8, help=\n 'number of bins for tile coding')\nparser.add_argument('--ssp-scaling', type=float, default=1.0)\nparser.add_argument('--grid-ssp-min', type=float, default=0.25, help=\n 'minimum plane wave scale')\nparser.add_argument('--grid-ssp-max', type=float, default=2.0, help=\n 'maximum plane wave scale')\nparser.add_argument('--phi', type=float, default=0.5, help=\n 'phi as a fraction of pi for orth-proj-ssp')\nparser.add_argument('--n-proj', type=int, default=3, help=\n 'projection dimension for sub toroids')\nparser.add_argument('--scale-ratio', type=float, default=0, help=\n 'ratio between sub toroid scales')\nparser.add_argument('--hilbert-points', type=int, default=1, choices=[0, 1,\n 2, 3], help=\n 'pc centers. 0: random uniform. 1: hilbert curve. 2: evenly spaced grid. 3: hex grid'\n )\nparser.add_argument('--seed', type=int, default=13)\nparser.add_argument('--dropout-p', type=float, default=0.5)\nparser.add_argument('--dim', type=int, default=512)\nparser.add_argument('--train-split', type=float, default=0.8, help=\n 'Training fraction of the train/test split')\nparser.add_argument('--allow-cache', action='store_true', help=\n 'once the dataset has been generated, it will be saved to a file to be loaded faster'\n )\nparser.add_argument('--trajectory-length', type=int, default=100)\nparser.add_argument('--minibatch-size', type=int, default=10)\nparser.add_argument('--n-image-bins', type=int, default=20)\nparser.add_argument('--n-hd-cells', type=int, default=0, help=\n 'If non-zero, use linear and angular velocity as well as HD cell output')\nparser.add_argument('--sin-cos-ang', type=int, default=1, choices=[0, 1],\n help=\n 'Use the sin and cos of the angular velocity if angular velocities are used'\n )\nparser.add_argument('--use-lmu', action='store_true')\nparser.add_argument('--lmu-order', type=int, default=6)\nparser.add_argument('--no-cache-load', action='store_true', help=\n 'do not load from cache')\nargs = parser.parse_args()\nssp_scaling = args.ssp_scaling\ntorch.manual_seed(args.seed)\nnp.random.seed(args.seed)\ndata = np.load(args.dataset)\nlimit_low = 0\nlimit_high = 2.2\nres = 128\nencoding_func, dim = get_encoding_function(args, limit_low=limit_low,\n limit_high=limit_high)\nxs = np.linspace(limit_low, limit_high, res)\nys = np.linspace(limit_low, limit_high, res)\nheatmap_vectors = np.zeros((len(xs), len(ys), dim))\nprint('Generating Heatmap Vectors')\nfor i, x in enumerate(xs):\n for j, y in enumerate(ys):\n heatmap_vectors[i, j, :] = encoding_func(x=x, y=y)\n heatmap_vectors[i, j, :] /= np.linalg.norm(heatmap_vectors[i, j, :])\nprint('Heatmap Vector Generation Complete')\nn_samples = args.n_samples\nrollout_length = args.trajectory_length\nbatch_size = args.minibatch_size\nif args.n_hd_cells > 0:\n hd_encoding_func = hd_gauss_encoding_func(dim=args.n_hd_cells, sigma=\n 0.25, use_softmax=False, rng=np.random.RandomState(args.seed))\n if args.sin_cos_ang:\n input_size = 3\n else:\n input_size = 2\n model = SSPPathIntegrationModel(input_size=input_size, unroll_length=\n rollout_length, sp_dim=dim + args.n_hd_cells, dropout_p=args.\n dropout_p, use_lmu=args.use_lmu, order=args.lmu_order)\nelse:\n hd_encoding_func = None\n model = SSPPathIntegrationModel(input_size=2, unroll_length=\n rollout_length, sp_dim=dim, dropout_p=args.dropout_p, use_lmu=args.\n use_lmu, order=args.lmu_order)\nmodel.load_state_dict(torch.load(args.model), strict=False)\nmodel.eval()\nencoding_specific = ''\nif 'ssp' in args.spatial_encoding:\n encoding_specific = args.ssp_scaling\nelif args.spatial_encoding == 'frozen-learned':\n encoding_specific = args.frozen_model\nelif args.spatial_encoding == 'pc-gauss' or args.spatial_encoding == 'pc-gauss-softmax':\n encoding_specific = args.pc_gauss_sigma\nelif args.spatial_encoding == 'pc-dog':\n encoding_specific = '{}-{}'.format(args.pc_gauss_sigma, args.pc_diff_sigma)\nelif args.spatial_encoding == 'hex-trig':\n encoding_specific = args.hex_freq_coef\nif 'tf' in args.dataset:\n cache_fname = 'dataset_cache/tf_{}_{}_{}_{}_{}_{}.npz'.format(args.\n spatial_encoding, args.dim, args.seed, args.n_samples, args.\n n_hd_cells, encoding_specific)\nelse:\n cache_fname = 'dataset_cache/{}_{}_{}_{}_{}_{}.npz'.format(args.\n spatial_encoding, args.dim, args.seed, args.n_samples, args.\n n_hd_cells, encoding_specific)\nif os.path.exists(cache_fname) and not args.no_cache_load:\n print('Generating Train and Test Loaders from Cache')\n trainloader, testloader = load_from_cache(cache_fname, batch_size=\n batch_size, n_samples=n_samples)\nelse:\n print('Generating Train and Test Loaders')\n if 'tf' in args.dataset:\n assert args.sin_cos_ang == 1\n trainloader, testloader = tf_train_test_loaders(data,\n n_train_samples=n_samples, n_test_samples=n_samples,\n rollout_length=rollout_length, batch_size=batch_size, encoding=\n args.spatial_encoding, encoding_func=encoding_func,\n encoding_dim=args.dim, train_split=args.train_split, hd_dim=\n args.n_hd_cells, hd_encoding_func=hd_encoding_func, sin_cos_ang\n =args.sin_cos_ang)\n elif args.n_hd_cells > 0:\n trainloader, testloader = angular_train_test_loaders(data,\n n_train_samples=n_samples, n_test_samples=n_samples,\n rollout_length=rollout_length, batch_size=batch_size, encoding=\n args.spatial_encoding, encoding_func=encoding_func,\n encoding_dim=args.dim, train_split=args.train_split, hd_dim=\n args.n_hd_cells, hd_encoding_func=hd_encoding_func, sin_cos_ang\n =args.sin_cos_ang)\n else:\n trainloader, testloader = train_test_loaders(data, n_train_samples=\n n_samples, n_test_samples=n_samples, rollout_length=\n rollout_length, batch_size=batch_size, encoding=args.\n spatial_encoding, encoding_func=encoding_func, encoding_dim=\n args.dim, train_split=args.train_split)\n if args.allow_cache:\n if not os.path.exists('dataset_cache'):\n os.makedirs('dataset_cache')\n np.savez(cache_fname, train_velocity_inputs=trainloader.dataset.\n velocity_inputs, train_ssp_inputs=trainloader.dataset.\n ssp_inputs, train_ssp_outputs=trainloader.dataset.ssp_outputs,\n test_velocity_inputs=testloader.dataset.velocity_inputs,\n test_ssp_inputs=testloader.dataset.ssp_inputs, test_ssp_outputs\n =testloader.dataset.ssp_outputs)\nprint('Train and Test Loaders Generation Complete')\nstarts = [0.2] * 10\nends = np.linspace(0.4, 1.0, num=10)\nmasks_parameters = zip(starts, ends.tolist())\nlatest_epoch_scorer = scores.GridScorer(nbins=args.n_image_bins,\n coords_range=((0, 2.2), (0, 2.2)), mask_parameters=masks_parameters)\nfname_lstm_pred = '{}_{}samples_lstm_pred.pdf'.format(args.fname_prefix,\n args.n_samples)\nfname_lstm_truth = '{}_{}samples_lstm_truth.pdf'.format(args.fname_prefix,\n args.n_samples)\nfname_dense_pred = '{}_{}samples_dense_pred.pdf'.format(args.fname_prefix,\n args.n_samples)\nfname_dense_truth = '{}_{}samples_dense_truth.pdf'.format(args.fname_prefix,\n args.n_samples)\nprint('Testing')\nwith torch.no_grad():\n for i, data in enumerate(testloader):\n velocity_inputs, ssp_inputs, ssp_outputs = data\n ssp_pred, lstm_outputs, dense_outputs = model.forward_activations(\n velocity_inputs, ssp_inputs)\n predictions = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], 2))\n coords = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], 2))\n lstm_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1],\n model.lstm_hidden_size))\n dense_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1],\n model.linear_hidden_size))\n assert rollout_length == ssp_pred.shape[0]\n print('Computing predicted locations and true locations')\n for ri in range(rollout_length):\n pred = ssp_pred.detach().numpy()[ri, :, :args.dim]\n predictions[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = ssp_to_loc_v(pred, heatmap_vectors, xs, ys)\n coord = ssp_outputs.detach().numpy()[:, ri, :args.dim]\n coords[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = ssp_to_loc_v(coord, heatmap_vectors, xs, ys)\n lstm_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = lstm_outputs.detach().numpy()[ri, :, :]\n dense_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[\n 1], :] = dense_outputs.detach().numpy()[ri, :, :]\nprint(np.max(predictions))\nprint(np.min(predictions))\n(grid_scores_60_pred, grid_scores_90_pred, grid_scores_60_separation_pred,\n grid_scores_90_separation_pred) = (utils.get_scores_and_plot(scorer=\n latest_epoch_scorer, data_abs_xy=predictions, activations=\n lstm_activations, directory='output_grid_scores', filename=fname_lstm_pred)\n )\n(grid_scores_60_truth, grid_scores_90_truth,\n grid_scores_60_separation_truth, grid_scores_90_separation_truth) = (utils\n .get_scores_and_plot(scorer=latest_epoch_scorer, data_abs_xy=coords,\n activations=lstm_activations, directory='output_grid_scores', filename=\n fname_lstm_truth))\n(grid_scores_60_dense_pred, grid_scores_90_dense_pred,\n grid_scores_60_separation_dense_pred, grid_scores_90_separation_dense_pred\n ) = (utils.get_scores_and_plot(scorer=latest_epoch_scorer, data_abs_xy=\n predictions, activations=dense_activations, directory=\n 'output_grid_scores', filename=fname_dense_pred))\n(grid_scores_60_dense_truth, grid_scores_90_dense_truth,\n grid_scores_60_separation_dense_truth,\n grid_scores_90_separation_dense_truth) = (utils.get_scores_and_plot(\n scorer=latest_epoch_scorer, data_abs_xy=coords, activations=\n dense_activations, directory='output_grid_scores', filename=\n fname_dense_truth))\nprint(grid_scores_60_truth, grid_scores_90_truth,\n grid_scores_60_separation_truth, grid_scores_90_separation_truth)\nfname = 'output_grid_scores/{}_{}samples.npz'.format(args.fname_prefix,\n args.n_samples)\nnp.savez(fname, grid_scores_60_pred=grid_scores_60_pred,\n grid_scores_90_pred=grid_scores_90_pred, grid_scores_60_separation_pred\n =grid_scores_60_separation_pred, grid_scores_90_separation_pred=\n grid_scores_90_separation_pred, grid_scores_60_truth=\n grid_scores_60_truth, grid_scores_90_truth=grid_scores_90_truth,\n grid_scores_60_separation_truth=grid_scores_60_separation_truth,\n grid_scores_90_separation_truth=grid_scores_90_separation_truth,\n grid_scores_60_dense_pred=grid_scores_60_dense_pred,\n grid_scores_90_dense_pred=grid_scores_90_dense_pred,\n grid_scores_60_separation_dense_pred=\n grid_scores_60_separation_dense_pred,\n grid_scores_90_separation_dense_pred=\n grid_scores_90_separation_dense_pred, grid_scores_60_dense_truth=\n grid_scores_60_dense_truth, grid_scores_90_dense_truth=\n grid_scores_90_dense_truth, grid_scores_60_separation_dense_truth=\n grid_scores_60_separation_dense_truth,\n grid_scores_90_separation_dense_truth=grid_scores_90_separation_dense_truth\n )\n",
"step-4": "import matplotlib\nimport os\ndisplay = os.environ.get('DISPLAY')\nif display is None or 'localhost' in display:\n matplotlib.use('agg')\nimport argparse\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nfrom datasets import train_test_loaders, angular_train_test_loaders, tf_train_test_loaders, load_from_cache\nfrom models import SSPPathIntegrationModel\nfrom datetime import datetime\nfrom tensorboardX import SummaryWriter\nimport json\nfrom spatial_semantic_pointers.utils import get_heatmap_vectors, ssp_to_loc, ssp_to_loc_v\nfrom spatial_semantic_pointers.plots import plot_predictions, plot_predictions_v\nimport matplotlib.pyplot as plt\nfrom path_integration_utils import pc_to_loc_v, encoding_func_from_model, pc_gauss_encoding_func, ssp_encoding_func, hd_gauss_encoding_func, hex_trig_encoding_func\nfrom ssp_navigation.utils.encodings import get_encoding_function\nimport grid_scoring.scores as scores\nimport grid_scoring.utils as utils\nfrom path_integration_utils import encoding_func_from_model, pc_gauss_encoding_func\nparser = argparse.ArgumentParser(\n 'Compute grid scores for a path integration model')\nparser.add_argument('--n-samples', type=int, default=5000)\nparser.add_argument('--use-localization', action='store_true')\nparser.add_argument('--dataset', type=str, default='')\nparser.add_argument('--model', type=str, default='')\nparser.add_argument('--fname-prefix', type=str, default='sac')\nparser.add_argument('--spatial-encoding', type=str, default='ssp', choices=\n ['ssp', 'hex-ssp', 'periodic-hex-ssp', 'grid-ssp', 'ind-ssp',\n 'orth-proj-ssp', 'rec-ssp', 'rec-hex-ssp', 'rec-ind-ssp',\n 'sub-toroid-ssp', 'var-sub-toroid-ssp', 'random', '2d', '2d-normalized',\n 'one-hot', 'hex-trig', 'trig', 'random-trig', 'random-rotated-trig',\n 'random-proj', 'legendre', 'learned', 'learned-normalized',\n 'frozen-learned', 'frozen-learned-normalized', 'pc-gauss', 'pc-dog',\n 'tile-coding'])\nparser.add_argument('--frozen-model', type=str, default='', help=\n 'model to use frozen encoding weights from')\nparser.add_argument('--pc-gauss-sigma', type=float, default=0.25)\nparser.add_argument('--pc-diff-sigma', type=float, default=0.5)\nparser.add_argument('--hex-freq-coef', type=float, default=2.5, help=\n 'constant to scale frequencies by')\nparser.add_argument('--n-tiles', type=int, default=8, help=\n 'number of layers for tile coding')\nparser.add_argument('--n-bins', type=int, default=8, help=\n 'number of bins for tile coding')\nparser.add_argument('--ssp-scaling', type=float, default=1.0)\nparser.add_argument('--grid-ssp-min', type=float, default=0.25, help=\n 'minimum plane wave scale')\nparser.add_argument('--grid-ssp-max', type=float, default=2.0, help=\n 'maximum plane wave scale')\nparser.add_argument('--phi', type=float, default=0.5, help=\n 'phi as a fraction of pi for orth-proj-ssp')\nparser.add_argument('--n-proj', type=int, default=3, help=\n 'projection dimension for sub toroids')\nparser.add_argument('--scale-ratio', type=float, default=0, help=\n 'ratio between sub toroid scales')\nparser.add_argument('--hilbert-points', type=int, default=1, choices=[0, 1,\n 2, 3], help=\n 'pc centers. 0: random uniform. 1: hilbert curve. 2: evenly spaced grid. 3: hex grid'\n )\nparser.add_argument('--seed', type=int, default=13)\nparser.add_argument('--dropout-p', type=float, default=0.5)\nparser.add_argument('--dim', type=int, default=512)\nparser.add_argument('--train-split', type=float, default=0.8, help=\n 'Training fraction of the train/test split')\nparser.add_argument('--allow-cache', action='store_true', help=\n 'once the dataset has been generated, it will be saved to a file to be loaded faster'\n )\nparser.add_argument('--trajectory-length', type=int, default=100)\nparser.add_argument('--minibatch-size', type=int, default=10)\nparser.add_argument('--n-image-bins', type=int, default=20)\nparser.add_argument('--n-hd-cells', type=int, default=0, help=\n 'If non-zero, use linear and angular velocity as well as HD cell output')\nparser.add_argument('--sin-cos-ang', type=int, default=1, choices=[0, 1],\n help=\n 'Use the sin and cos of the angular velocity if angular velocities are used'\n )\nparser.add_argument('--use-lmu', action='store_true')\nparser.add_argument('--lmu-order', type=int, default=6)\nparser.add_argument('--no-cache-load', action='store_true', help=\n 'do not load from cache')\nargs = parser.parse_args()\nssp_scaling = args.ssp_scaling\ntorch.manual_seed(args.seed)\nnp.random.seed(args.seed)\ndata = np.load(args.dataset)\nlimit_low = 0\nlimit_high = 2.2\nres = 128\nencoding_func, dim = get_encoding_function(args, limit_low=limit_low,\n limit_high=limit_high)\nxs = np.linspace(limit_low, limit_high, res)\nys = np.linspace(limit_low, limit_high, res)\nheatmap_vectors = np.zeros((len(xs), len(ys), dim))\nprint('Generating Heatmap Vectors')\nfor i, x in enumerate(xs):\n for j, y in enumerate(ys):\n heatmap_vectors[i, j, :] = encoding_func(x=x, y=y)\n heatmap_vectors[i, j, :] /= np.linalg.norm(heatmap_vectors[i, j, :])\nprint('Heatmap Vector Generation Complete')\nn_samples = args.n_samples\nrollout_length = args.trajectory_length\nbatch_size = args.minibatch_size\nif args.n_hd_cells > 0:\n hd_encoding_func = hd_gauss_encoding_func(dim=args.n_hd_cells, sigma=\n 0.25, use_softmax=False, rng=np.random.RandomState(args.seed))\n if args.sin_cos_ang:\n input_size = 3\n else:\n input_size = 2\n model = SSPPathIntegrationModel(input_size=input_size, unroll_length=\n rollout_length, sp_dim=dim + args.n_hd_cells, dropout_p=args.\n dropout_p, use_lmu=args.use_lmu, order=args.lmu_order)\nelse:\n hd_encoding_func = None\n model = SSPPathIntegrationModel(input_size=2, unroll_length=\n rollout_length, sp_dim=dim, dropout_p=args.dropout_p, use_lmu=args.\n use_lmu, order=args.lmu_order)\nmodel.load_state_dict(torch.load(args.model), strict=False)\nmodel.eval()\nencoding_specific = ''\nif 'ssp' in args.spatial_encoding:\n encoding_specific = args.ssp_scaling\nelif args.spatial_encoding == 'frozen-learned':\n encoding_specific = args.frozen_model\nelif args.spatial_encoding == 'pc-gauss' or args.spatial_encoding == 'pc-gauss-softmax':\n encoding_specific = args.pc_gauss_sigma\nelif args.spatial_encoding == 'pc-dog':\n encoding_specific = '{}-{}'.format(args.pc_gauss_sigma, args.pc_diff_sigma)\nelif args.spatial_encoding == 'hex-trig':\n encoding_specific = args.hex_freq_coef\nif 'tf' in args.dataset:\n cache_fname = 'dataset_cache/tf_{}_{}_{}_{}_{}_{}.npz'.format(args.\n spatial_encoding, args.dim, args.seed, args.n_samples, args.\n n_hd_cells, encoding_specific)\nelse:\n cache_fname = 'dataset_cache/{}_{}_{}_{}_{}_{}.npz'.format(args.\n spatial_encoding, args.dim, args.seed, args.n_samples, args.\n n_hd_cells, encoding_specific)\nif os.path.exists(cache_fname) and not args.no_cache_load:\n print('Generating Train and Test Loaders from Cache')\n trainloader, testloader = load_from_cache(cache_fname, batch_size=\n batch_size, n_samples=n_samples)\nelse:\n print('Generating Train and Test Loaders')\n if 'tf' in args.dataset:\n assert args.sin_cos_ang == 1\n trainloader, testloader = tf_train_test_loaders(data,\n n_train_samples=n_samples, n_test_samples=n_samples,\n rollout_length=rollout_length, batch_size=batch_size, encoding=\n args.spatial_encoding, encoding_func=encoding_func,\n encoding_dim=args.dim, train_split=args.train_split, hd_dim=\n args.n_hd_cells, hd_encoding_func=hd_encoding_func, sin_cos_ang\n =args.sin_cos_ang)\n elif args.n_hd_cells > 0:\n trainloader, testloader = angular_train_test_loaders(data,\n n_train_samples=n_samples, n_test_samples=n_samples,\n rollout_length=rollout_length, batch_size=batch_size, encoding=\n args.spatial_encoding, encoding_func=encoding_func,\n encoding_dim=args.dim, train_split=args.train_split, hd_dim=\n args.n_hd_cells, hd_encoding_func=hd_encoding_func, sin_cos_ang\n =args.sin_cos_ang)\n else:\n trainloader, testloader = train_test_loaders(data, n_train_samples=\n n_samples, n_test_samples=n_samples, rollout_length=\n rollout_length, batch_size=batch_size, encoding=args.\n spatial_encoding, encoding_func=encoding_func, encoding_dim=\n args.dim, train_split=args.train_split)\n if args.allow_cache:\n if not os.path.exists('dataset_cache'):\n os.makedirs('dataset_cache')\n np.savez(cache_fname, train_velocity_inputs=trainloader.dataset.\n velocity_inputs, train_ssp_inputs=trainloader.dataset.\n ssp_inputs, train_ssp_outputs=trainloader.dataset.ssp_outputs,\n test_velocity_inputs=testloader.dataset.velocity_inputs,\n test_ssp_inputs=testloader.dataset.ssp_inputs, test_ssp_outputs\n =testloader.dataset.ssp_outputs)\nprint('Train and Test Loaders Generation Complete')\nstarts = [0.2] * 10\nends = np.linspace(0.4, 1.0, num=10)\nmasks_parameters = zip(starts, ends.tolist())\nlatest_epoch_scorer = scores.GridScorer(nbins=args.n_image_bins,\n coords_range=((0, 2.2), (0, 2.2)), mask_parameters=masks_parameters)\nfname_lstm_pred = '{}_{}samples_lstm_pred.pdf'.format(args.fname_prefix,\n args.n_samples)\nfname_lstm_truth = '{}_{}samples_lstm_truth.pdf'.format(args.fname_prefix,\n args.n_samples)\nfname_dense_pred = '{}_{}samples_dense_pred.pdf'.format(args.fname_prefix,\n args.n_samples)\nfname_dense_truth = '{}_{}samples_dense_truth.pdf'.format(args.fname_prefix,\n args.n_samples)\nprint('Testing')\nwith torch.no_grad():\n for i, data in enumerate(testloader):\n velocity_inputs, ssp_inputs, ssp_outputs = data\n ssp_pred, lstm_outputs, dense_outputs = model.forward_activations(\n velocity_inputs, ssp_inputs)\n predictions = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], 2))\n coords = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], 2))\n lstm_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1],\n model.lstm_hidden_size))\n dense_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1],\n model.linear_hidden_size))\n assert rollout_length == ssp_pred.shape[0]\n print('Computing predicted locations and true locations')\n for ri in range(rollout_length):\n pred = ssp_pred.detach().numpy()[ri, :, :args.dim]\n predictions[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = ssp_to_loc_v(pred, heatmap_vectors, xs, ys)\n coord = ssp_outputs.detach().numpy()[:, ri, :args.dim]\n coords[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = ssp_to_loc_v(coord, heatmap_vectors, xs, ys)\n lstm_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = lstm_outputs.detach().numpy()[ri, :, :]\n dense_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[\n 1], :] = dense_outputs.detach().numpy()[ri, :, :]\nprint(np.max(predictions))\nprint(np.min(predictions))\n(grid_scores_60_pred, grid_scores_90_pred, grid_scores_60_separation_pred,\n grid_scores_90_separation_pred) = (utils.get_scores_and_plot(scorer=\n latest_epoch_scorer, data_abs_xy=predictions, activations=\n lstm_activations, directory='output_grid_scores', filename=fname_lstm_pred)\n )\n(grid_scores_60_truth, grid_scores_90_truth,\n grid_scores_60_separation_truth, grid_scores_90_separation_truth) = (utils\n .get_scores_and_plot(scorer=latest_epoch_scorer, data_abs_xy=coords,\n activations=lstm_activations, directory='output_grid_scores', filename=\n fname_lstm_truth))\n(grid_scores_60_dense_pred, grid_scores_90_dense_pred,\n grid_scores_60_separation_dense_pred, grid_scores_90_separation_dense_pred\n ) = (utils.get_scores_and_plot(scorer=latest_epoch_scorer, data_abs_xy=\n predictions, activations=dense_activations, directory=\n 'output_grid_scores', filename=fname_dense_pred))\n(grid_scores_60_dense_truth, grid_scores_90_dense_truth,\n grid_scores_60_separation_dense_truth,\n grid_scores_90_separation_dense_truth) = (utils.get_scores_and_plot(\n scorer=latest_epoch_scorer, data_abs_xy=coords, activations=\n dense_activations, directory='output_grid_scores', filename=\n fname_dense_truth))\nprint(grid_scores_60_truth, grid_scores_90_truth,\n grid_scores_60_separation_truth, grid_scores_90_separation_truth)\nfname = 'output_grid_scores/{}_{}samples.npz'.format(args.fname_prefix,\n args.n_samples)\nnp.savez(fname, grid_scores_60_pred=grid_scores_60_pred,\n grid_scores_90_pred=grid_scores_90_pred, grid_scores_60_separation_pred\n =grid_scores_60_separation_pred, grid_scores_90_separation_pred=\n grid_scores_90_separation_pred, grid_scores_60_truth=\n grid_scores_60_truth, grid_scores_90_truth=grid_scores_90_truth,\n grid_scores_60_separation_truth=grid_scores_60_separation_truth,\n grid_scores_90_separation_truth=grid_scores_90_separation_truth,\n grid_scores_60_dense_pred=grid_scores_60_dense_pred,\n grid_scores_90_dense_pred=grid_scores_90_dense_pred,\n grid_scores_60_separation_dense_pred=\n grid_scores_60_separation_dense_pred,\n grid_scores_90_separation_dense_pred=\n grid_scores_90_separation_dense_pred, grid_scores_60_dense_truth=\n grid_scores_60_dense_truth, grid_scores_90_dense_truth=\n grid_scores_90_dense_truth, grid_scores_60_separation_dense_truth=\n grid_scores_60_separation_dense_truth,\n grid_scores_90_separation_dense_truth=grid_scores_90_separation_dense_truth\n )\n",
"step-5": "# Compute grid scores using the new dataset format\n\nimport matplotlib\nimport os\n# allow code to work on machines without a display or in a screen session\ndisplay = os.environ.get('DISPLAY')\nif display is None or 'localhost' in display:\n matplotlib.use('agg')\n\nimport argparse\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nfrom datasets import train_test_loaders, angular_train_test_loaders, tf_train_test_loaders, load_from_cache\nfrom models import SSPPathIntegrationModel\nfrom datetime import datetime\nfrom tensorboardX import SummaryWriter\nimport json\nfrom spatial_semantic_pointers.utils import get_heatmap_vectors, ssp_to_loc, ssp_to_loc_v\nfrom spatial_semantic_pointers.plots import plot_predictions, plot_predictions_v\nimport matplotlib.pyplot as plt\nfrom path_integration_utils import pc_to_loc_v, encoding_func_from_model, pc_gauss_encoding_func, ssp_encoding_func, \\\n hd_gauss_encoding_func, hex_trig_encoding_func\nfrom ssp_navigation.utils.encodings import get_encoding_function\n\nimport grid_scoring.scores as scores\nimport grid_scoring.utils as utils\n# from grid_scoring.run_network import run_and_gather_activations, run_and_gather_localization_activations\nfrom path_integration_utils import encoding_func_from_model, pc_gauss_encoding_func\n\n\nparser = argparse.ArgumentParser('Compute grid scores for a path integration model')\nparser.add_argument('--n-samples', type=int, default=5000)\nparser.add_argument('--use-localization', action='store_true')\n# TODO: use these parameters\nparser.add_argument('--dataset', type=str, default='')\nparser.add_argument('--model', type=str, default='')\nparser.add_argument('--fname-prefix', type=str, default='sac')\n\nparser.add_argument('--spatial-encoding', type=str, default='ssp',\n choices=[\n 'ssp', 'hex-ssp', 'periodic-hex-ssp', 'grid-ssp', 'ind-ssp', 'orth-proj-ssp',\n 'rec-ssp', 'rec-hex-ssp', 'rec-ind-ssp', 'sub-toroid-ssp', 'var-sub-toroid-ssp',\n 'random', '2d', '2d-normalized', 'one-hot', 'hex-trig',\n 'trig', 'random-trig', 'random-rotated-trig', 'random-proj', 'legendre',\n 'learned', 'learned-normalized', 'frozen-learned', 'frozen-learned-normalized',\n 'pc-gauss', 'pc-dog', 'tile-coding'\n ])\n # choices=['ssp', '2d', 'frozen-learned', 'pc-gauss', 'pc-dog', 'pc-gauss-softmax', 'hex-trig', 'hex-trig-all-freq'])\nparser.add_argument('--frozen-model', type=str, default='', help='model to use frozen encoding weights from')\nparser.add_argument('--pc-gauss-sigma', type=float, default=0.25)\nparser.add_argument('--pc-diff-sigma', type=float, default=0.5)\nparser.add_argument('--hex-freq-coef', type=float, default=2.5, help='constant to scale frequencies by')\nparser.add_argument('--n-tiles', type=int, default=8, help='number of layers for tile coding')\nparser.add_argument('--n-bins', type=int, default=8, help='number of bins for tile coding')\nparser.add_argument('--ssp-scaling', type=float, default=1.0)\nparser.add_argument('--grid-ssp-min', type=float, default=0.25, help='minimum plane wave scale')\nparser.add_argument('--grid-ssp-max', type=float, default=2.0, help='maximum plane wave scale')\nparser.add_argument('--phi', type=float, default=0.5, help='phi as a fraction of pi for orth-proj-ssp')\nparser.add_argument('--n-proj', type=int, default=3, help='projection dimension for sub toroids')\nparser.add_argument('--scale-ratio', type=float, default=0, help='ratio between sub toroid scales')\nparser.add_argument('--hilbert-points', type=int, default=1, choices=[0, 1, 2, 3],\n help='pc centers. 0: random uniform. 1: hilbert curve. 2: evenly spaced grid. 3: hex grid')\n\nparser.add_argument('--seed', type=int, default=13)\nparser.add_argument('--dropout-p', type=float, default=0.5)\nparser.add_argument('--dim', type=int, default=512)\nparser.add_argument('--train-split', type=float, default=0.8, help='Training fraction of the train/test split')\nparser.add_argument('--allow-cache', action='store_true',\n help='once the dataset has been generated, it will be saved to a file to be loaded faster')\n\nparser.add_argument('--trajectory-length', type=int, default=100)\nparser.add_argument('--minibatch-size', type=int, default=10)\n\nparser.add_argument('--n-image-bins', type=int, default=20)\n\nparser.add_argument('--n-hd-cells', type=int, default=0, help='If non-zero, use linear and angular velocity as well as HD cell output')\nparser.add_argument('--sin-cos-ang', type=int, default=1, choices=[0, 1],\n help='Use the sin and cos of the angular velocity if angular velocities are used')\nparser.add_argument('--use-lmu', action='store_true')\nparser.add_argument('--lmu-order', type=int, default=6)\n\nparser.add_argument('--no-cache-load', action='store_true', help='do not load from cache')\n\nargs = parser.parse_args()\n\nssp_scaling = args.ssp_scaling\n\ntorch.manual_seed(args.seed)\nnp.random.seed(args.seed)\n\ndata = np.load(args.dataset)\n\n# only used for frozen-learned and other custom encoding functions\n# encoding_func = None\n\nlimit_low = 0 #* args.ssp_scaling\nlimit_high = 2.2 #* args.ssp_scaling\nres = 128 #256\n\nencoding_func, dim = get_encoding_function(args, limit_low=limit_low, limit_high=limit_high)\n\nxs = np.linspace(limit_low, limit_high, res)\nys = np.linspace(limit_low, limit_high, res)\n\n# FIXME: inefficient but will work for now\nheatmap_vectors = np.zeros((len(xs), len(ys), dim))\n\nprint(\"Generating Heatmap Vectors\")\n\nfor i, x in enumerate(xs):\n for j, y in enumerate(ys):\n heatmap_vectors[i, j, :] = encoding_func(\n # batch dim\n # np.array(\n # [[x, y]]\n # )\n # no batch dim\n # np.array(\n # [x, y]\n # )\n # new signature\n x=x, y=y\n )\n\n heatmap_vectors[i, j, :] /= np.linalg.norm(heatmap_vectors[i, j, :])\n\nprint(\"Heatmap Vector Generation Complete\")\n\nn_samples = args.n_samples\nrollout_length = args.trajectory_length\nbatch_size = args.minibatch_size\n\n\nif args.n_hd_cells > 0:\n hd_encoding_func = hd_gauss_encoding_func(dim=args.n_hd_cells, sigma=0.25, use_softmax=False, rng=np.random.RandomState(args.seed))\n if args.sin_cos_ang:\n input_size = 3\n else:\n input_size = 2\n model = SSPPathIntegrationModel(\n input_size=input_size, unroll_length=rollout_length,\n sp_dim=dim + args.n_hd_cells, dropout_p=args.dropout_p, use_lmu=args.use_lmu, order=args.lmu_order\n )\nelse:\n hd_encoding_func = None\n model = SSPPathIntegrationModel(\n input_size=2, unroll_length=rollout_length,\n sp_dim=dim, dropout_p=args.dropout_p, use_lmu=args.use_lmu, order=args.lmu_order\n )\n\n\n# model = SSPPathIntegrationModel(unroll_length=rollout_length, sp_dim=dim, dropout_p=args.dropout_p)\n\nmodel.load_state_dict(torch.load(args.model), strict=False)\n\nmodel.eval()\n\n# encoding specific cache string\nencoding_specific = ''\nif 'ssp' in args.spatial_encoding:\n encoding_specific = args.ssp_scaling\nelif args.spatial_encoding == 'frozen-learned':\n encoding_specific = args.frozen_model\nelif args.spatial_encoding == 'pc-gauss' or args.spatial_encoding == 'pc-gauss-softmax':\n encoding_specific = args.pc_gauss_sigma\nelif args.spatial_encoding == 'pc-dog':\n encoding_specific = '{}-{}'.format(args.pc_gauss_sigma, args.pc_diff_sigma)\nelif args.spatial_encoding == 'hex-trig':\n encoding_specific = args.hex_freq_coef\n\nif 'tf' in args.dataset:\n cache_fname = 'dataset_cache/tf_{}_{}_{}_{}_{}_{}.npz'.format(\n args.spatial_encoding, args.dim, args.seed, args.n_samples, args.n_hd_cells, encoding_specific\n )\nelse:\n cache_fname = 'dataset_cache/{}_{}_{}_{}_{}_{}.npz'.format(\n args.spatial_encoding, args.dim, args.seed, args.n_samples, args.n_hd_cells, encoding_specific\n )\n\n# if the file exists, load it from cache\nif os.path.exists(cache_fname) and not args.no_cache_load:\n print(\"Generating Train and Test Loaders from Cache\")\n trainloader, testloader = load_from_cache(cache_fname, batch_size=batch_size, n_samples=n_samples)\nelse:\n print(\"Generating Train and Test Loaders\")\n\n if 'tf' in args.dataset:\n # tfrecord dataset only supports using the sin and cos of angular velocity\n assert args.sin_cos_ang == 1\n\n trainloader, testloader = tf_train_test_loaders(\n data,\n n_train_samples=n_samples,\n n_test_samples=n_samples,\n rollout_length=rollout_length,\n batch_size=batch_size,\n encoding=args.spatial_encoding,\n encoding_func=encoding_func,\n encoding_dim=args.dim,\n train_split=args.train_split,\n hd_dim=args.n_hd_cells,\n hd_encoding_func=hd_encoding_func,\n sin_cos_ang=args.sin_cos_ang,\n )\n\n else:\n\n if args.n_hd_cells > 0:\n trainloader, testloader = angular_train_test_loaders(\n data,\n n_train_samples=n_samples,\n n_test_samples=n_samples,\n rollout_length=rollout_length,\n batch_size=batch_size,\n encoding=args.spatial_encoding,\n encoding_func=encoding_func,\n encoding_dim=args.dim,\n train_split=args.train_split,\n hd_dim=args.n_hd_cells,\n hd_encoding_func=hd_encoding_func,\n sin_cos_ang=args.sin_cos_ang,\n )\n else:\n trainloader, testloader = train_test_loaders(\n data,\n n_train_samples=n_samples,\n n_test_samples=n_samples,\n rollout_length=rollout_length,\n batch_size=batch_size,\n encoding=args.spatial_encoding,\n encoding_func=encoding_func,\n encoding_dim=args.dim,\n train_split=args.train_split,\n )\n\n if args.allow_cache:\n\n if not os.path.exists('dataset_cache'):\n os.makedirs('dataset_cache')\n\n np.savez(\n cache_fname,\n train_velocity_inputs=trainloader.dataset.velocity_inputs,\n train_ssp_inputs=trainloader.dataset.ssp_inputs,\n train_ssp_outputs=trainloader.dataset.ssp_outputs,\n test_velocity_inputs=testloader.dataset.velocity_inputs,\n test_ssp_inputs=testloader.dataset.ssp_inputs,\n test_ssp_outputs=testloader.dataset.ssp_outputs,\n )\n\nprint(\"Train and Test Loaders Generation Complete\")\n\nstarts = [0.2] * 10\nends = np.linspace(0.4, 1.0, num=10)\nmasks_parameters = zip(starts, ends.tolist())\nlatest_epoch_scorer = scores.GridScorer(\n nbins=args.n_image_bins,\n coords_range=((0, 2.2), (0, 2.2)), # data_reader.get_coord_range(),\n mask_parameters=masks_parameters,\n)\n\n\nfname_lstm_pred = '{}_{}samples_lstm_pred.pdf'.format(args.fname_prefix, args.n_samples)\nfname_lstm_truth = '{}_{}samples_lstm_truth.pdf'.format(args.fname_prefix, args.n_samples)\nfname_dense_pred = '{}_{}samples_dense_pred.pdf'.format(args.fname_prefix, args.n_samples)\nfname_dense_truth = '{}_{}samples_dense_truth.pdf'.format(args.fname_prefix, args.n_samples)\n\n# Run and gather activations\n\nprint(\"Testing\")\nwith torch.no_grad():\n # Everything is in one batch, so this loop will only happen once\n for i, data in enumerate(testloader):\n velocity_inputs, ssp_inputs, ssp_outputs = data\n\n ssp_pred, lstm_outputs, dense_outputs = model.forward_activations(velocity_inputs, ssp_inputs)\n\n predictions = np.zeros((ssp_pred.shape[0]*ssp_pred.shape[1], 2))\n coords = np.zeros((ssp_pred.shape[0]*ssp_pred.shape[1], 2))\n lstm_activations = np.zeros((ssp_pred.shape[0]*ssp_pred.shape[1], model.lstm_hidden_size))\n dense_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], model.linear_hidden_size))\n\n assert rollout_length == ssp_pred.shape[0]\n\n # # For each neuron, contains the average activity at each spatial bin\n # # Computing for both ground truth and predicted location\n # rate_maps_pred = np.zeros((model.lstm_hidden_size, len(xs), len(ys)))\n # rate_maps_truth = np.zeros((model.lstm_hidden_size, len(xs), len(ys)))\n\n print(\"Computing predicted locations and true locations\")\n # Using all data, one chunk at a time\n for ri in range(rollout_length):\n\n # trim out head direction info if that was included by only looking up to args.encoding_dim\n\n # computing 'predicted' coordinates, where the agent thinks it is\n pred = ssp_pred.detach().numpy()[ri, :, :args.dim]\n # pred = pred / pred.sum(axis=1)[:, np.newaxis]\n predictions[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :] = ssp_to_loc_v(\n pred,\n heatmap_vectors, xs, ys\n )\n\n # computing 'ground truth' coordinates, where the agent should be\n coord = ssp_outputs.detach().numpy()[:, ri, :args.dim]\n # coord = coord / coord.sum(axis=1)[:, np.newaxis]\n coords[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :] = ssp_to_loc_v(\n coord,\n heatmap_vectors, xs, ys\n )\n\n # reshaping activations and converting to numpy array\n lstm_activations[ri*ssp_pred.shape[1]:(ri+1)*ssp_pred.shape[1], :] = lstm_outputs.detach().numpy()[ri, :, :]\n dense_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :] = dense_outputs.detach().numpy()[ri, :, :]\n\n# predictions = predictions / args.ssp_scaling\n# coords = coords / args.ssp_scaling\n\nprint(np.max(predictions))\nprint(np.min(predictions))\n\ngrid_scores_60_pred, grid_scores_90_pred, grid_scores_60_separation_pred, grid_scores_90_separation_pred = utils.get_scores_and_plot(\n scorer=latest_epoch_scorer,\n data_abs_xy=predictions, #res['pos_xy'],\n activations=lstm_activations, #res['bottleneck'],\n directory='output_grid_scores', #FLAGS.saver_results_directory,\n filename=fname_lstm_pred,\n)\n\ngrid_scores_60_truth, grid_scores_90_truth, grid_scores_60_separation_truth, grid_scores_90_separation_truth = utils.get_scores_and_plot(\n scorer=latest_epoch_scorer,\n data_abs_xy=coords, #res['pos_xy'],\n activations=lstm_activations, #res['bottleneck'],\n directory='output_grid_scores', #FLAGS.saver_results_directory,\n filename=fname_lstm_truth,\n)\n\ngrid_scores_60_dense_pred, grid_scores_90_dense_pred, grid_scores_60_separation_dense_pred, grid_scores_90_separation_dense_pred = utils.get_scores_and_plot(\n scorer=latest_epoch_scorer,\n data_abs_xy=predictions, #res['pos_xy'],\n activations=dense_activations, #res['bottleneck'],\n directory='output_grid_scores', #FLAGS.saver_results_directory,\n filename=fname_dense_pred,\n)\n\ngrid_scores_60_dense_truth, grid_scores_90_dense_truth, grid_scores_60_separation_dense_truth, grid_scores_90_separation_dense_truth = utils.get_scores_and_plot(\n scorer=latest_epoch_scorer,\n data_abs_xy=coords, #res['pos_xy'],\n activations=dense_activations, #res['bottleneck'],\n directory='output_grid_scores', #FLAGS.saver_results_directory,\n filename=fname_dense_truth,\n)\n\n\nprint(grid_scores_60_truth, grid_scores_90_truth, grid_scores_60_separation_truth, grid_scores_90_separation_truth)\n\n# Saving to make grid score values easy to compare for different variations\nfname = 'output_grid_scores/{}_{}samples.npz'.format(args.fname_prefix, args.n_samples)\nnp.savez(\n fname,\n grid_scores_60_pred=grid_scores_60_pred,\n grid_scores_90_pred=grid_scores_90_pred,\n grid_scores_60_separation_pred=grid_scores_60_separation_pred,\n grid_scores_90_separation_pred=grid_scores_90_separation_pred,\n grid_scores_60_truth=grid_scores_60_truth,\n grid_scores_90_truth=grid_scores_90_truth,\n grid_scores_60_separation_truth=grid_scores_60_separation_truth,\n grid_scores_90_separation_truth=grid_scores_90_separation_truth,\n\n grid_scores_60_dense_pred=grid_scores_60_dense_pred,\n grid_scores_90_dense_pred=grid_scores_90_dense_pred,\n grid_scores_60_separation_dense_pred=grid_scores_60_separation_dense_pred,\n grid_scores_90_separation_dense_pred=grid_scores_90_separation_dense_pred,\n grid_scores_60_dense_truth=grid_scores_60_dense_truth,\n grid_scores_90_dense_truth=grid_scores_90_dense_truth,\n grid_scores_60_separation_dense_truth=grid_scores_60_separation_dense_truth,\n grid_scores_90_separation_dense_truth=grid_scores_90_separation_dense_truth,\n)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import math
def upsample1(d, p):
# 普通结界
assert 1 <= p <= 10
return d + p
def upsample2(d, p):
# 倍增结界
assert 2 <= p <= 3
return d * p
def downsample(d, p):
# 聚集结界
assert 2 <= p <= 10
return math.ceil(d / p)
# 初始化杀伤力范围
lethal_radius = 1
# 结界参数(z, p)
config = [(1, 6),
(2, 3),
(3, 3),
(2, 3),
(2, 3),
(3, 7)]
for i in range(int(input())):
z, p = list(map(int, input().strip().split()))
if z == 1:
lethal_radius = upsample1(lethal_radius, p)
if z == 2:
lethal_radius = upsample2(lethal_radius, p)
if z == 3:
lethal_radius = downsample(lethal_radius, p)
print(lethal_radius)
|
normal
|
{
"blob_id": "cb6f68c8b8a6cead1d9fcd25fa2a4e60f7a8fb28",
"index": 9746,
"step-1": "<mask token>\n\n\ndef upsample1(d, p):\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef upsample1(d, p):\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\n<mask token>\nfor i in range(int(input())):\n z, p = list(map(int, input().strip().split()))\n if z == 1:\n lethal_radius = upsample1(lethal_radius, p)\n if z == 2:\n lethal_radius = upsample2(lethal_radius, p)\n if z == 3:\n lethal_radius = downsample(lethal_radius, p)\nprint(lethal_radius)\n",
"step-3": "<mask token>\n\n\ndef upsample1(d, p):\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\nlethal_radius = 1\nconfig = [(1, 6), (2, 3), (3, 3), (2, 3), (2, 3), (3, 7)]\nfor i in range(int(input())):\n z, p = list(map(int, input().strip().split()))\n if z == 1:\n lethal_radius = upsample1(lethal_radius, p)\n if z == 2:\n lethal_radius = upsample2(lethal_radius, p)\n if z == 3:\n lethal_radius = downsample(lethal_radius, p)\nprint(lethal_radius)\n",
"step-4": "import math\n\n\ndef upsample1(d, p):\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\nlethal_radius = 1\nconfig = [(1, 6), (2, 3), (3, 3), (2, 3), (2, 3), (3, 7)]\nfor i in range(int(input())):\n z, p = list(map(int, input().strip().split()))\n if z == 1:\n lethal_radius = upsample1(lethal_radius, p)\n if z == 2:\n lethal_radius = upsample2(lethal_radius, p)\n if z == 3:\n lethal_radius = downsample(lethal_radius, p)\nprint(lethal_radius)\n",
"step-5": "import math\n\n\ndef upsample1(d, p):\n # 普通结界\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n # 倍增结界\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n # 聚集结界\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\n# 初始化杀伤力范围\nlethal_radius = 1\n\n# 结界参数(z, p)\nconfig = [(1, 6),\n (2, 3),\n (3, 3),\n (2, 3),\n (2, 3),\n (3, 7)]\n\nfor i in range(int(input())):\n z, p = list(map(int, input().strip().split()))\n if z == 1:\n lethal_radius = upsample1(lethal_radius, p)\n if z == 2:\n lethal_radius = upsample2(lethal_radius, p)\n if z == 3:\n lethal_radius = downsample(lethal_radius, p)\nprint(lethal_radius)\n\n\n\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import tensorflow as tf
from tensorflow.keras import Model
from tensorflow.keras.layers import Dense, Flatten, Conv2D, BatchNormalization, LeakyReLU, Reshape, Conv2DTranspose
import tensorflow_hub as hub
from collections import Counter
import numpy as np
import sys
sys.path.append('../data')
from imageio import imwrite
import os
import argparse
from preprocessing import *
# this time, katherine is here T_TTTT
# Killing optional CPU driver warnings
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
gpu_available = tf.test.is_gpu_available()
print("GPU Available: ", gpu_available)
performance_dict = {}
parser = argparse.ArgumentParser(description='DCGAN')
parser.add_argument('--img-dir', type=str, default='./data/celebA',
help='Data where training images live')
parser.add_argument('--out-dir', type=str, default='./output',
help='Data where sampled output images will be written')
parser.add_argument('--mode', type=str, default='train',
help='Can be "train" or "test"')
parser.add_argument('--restore-checkpoint', action='store_true',
help='Use this flag if you want to resuming training from a previously-saved checkpoint')
parser.add_argument('--z-dim', type=int, default=100,
help='Dimensionality of the latent space')
parser.add_argument('--batch-size', type=int, default=128,
help='Sizes of image batches fed through the network')
parser.add_argument('--num-data-threads', type=int, default=2,
help='Number of threads to use when loading & pre-processing training images')
parser.add_argument('--num-epochs', type=int, default=10,
help='Number of passes through the training data to make before stopping')
parser.add_argument('--learn-rate', type=float, default=0.0002,
help='Learning rate for Adam optimizer')
parser.add_argument('--beta1', type=float, default=0.5,
help='"beta1" parameter for Adam optimizer')
parser.add_argument('--num-gen-updates', type=int, default=2,
help='Number of generator updates per discriminator update')
parser.add_argument('--log-every', type=int, default=7,
help='Print losses after every [this many] training iterations')
parser.add_argument('--save-every', type=int, default=500,
help='Save the state of the network after every [this many] training iterations')
parser.add_argument('--device', type=str, default='GPU:0' if gpu_available else 'CPU:0',
help='specific the device of computation eg. CPU:0, GPU:0, GPU:1, GPU:2, ... ')
args = parser.parse_args()
class DeepFont(tf.keras.Model):
def __init__(self):
super(DeepFont, self).__init__()
self.batch_size = 128
self.model = tf.keras.Sequential()
self.model.add(tf.keras.layers.Reshape((96, 96, 1)))
self.model.add(tf.keras.layers.Conv2D(trainable=False, filters=64, strides=(2,2), kernel_size=(3,3), padding='same', name='conv_layer1', input_shape=(96, 96,1)))
self.model.add(tf.keras.layers.BatchNormalization())
self.model.add(tf.keras.layers.MaxPooling2D(pool_size=(2,2), strides=None, padding='same'))
self.model.add(tf.keras.layers.Conv2D(trainable=False, filters=128, strides=(1,1), kernel_size=(3,3), padding='same', name='conv_layer2'))
self.model.add(tf.keras.layers.BatchNormalization())
self.model.add(tf.keras.layers.MaxPooling2D(pool_size=(2,2), strides=None, padding='same'))
self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3,3), strides=(1,1), padding='same'))
self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3,3), strides=(1,1), padding='same'))
self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3,3), strides=(1,1), padding='same'))
self.model.add(tf.keras.layers.Flatten())
self.model.add(tf.keras.layers.Dense(512, activation='relu'))
self.model.add(tf.keras.layers.Dense(512, activation='relu'))
self.model.add(tf.keras.layers.Dense(150, activation='softmax'))
self.optimizer = tf.keras.optimizers.Adam(learning_rate = 0.01)
def call(self, inputs):
""" input: batch of preprocessed 96x96 images
output: probabilities for each batch image and its classification distribution
Runs the model on a batch of inputs.
"""
return self.model(inputs)
def loss_function(self, probs, labels):
""" input: probs - probabilities generated by the model
labels - true labels for every imag
output: return loss of the batch being processed
Uses sparse categorical crossentropy loss.
"""
loss = tf.keras.losses.sparse_categorical_crossentropy(labels, probs)
return tf.reduce_mean(loss)
def total_accuracy(self, probs, labels):
""" input: probs - batch of probs (batch size x 150)
labels - batch of true labels for images(batch size x 150)
output: the accuracy of the model (+1 if correct label) over a batch
"""
acc = 0
top_five = np.argsort(probs, axis = 1) # 256 x 150
top_five = np.array(top_five).reshape((self.batch_size, 150))
top_five = top_five[:, -1:] # 5 x 150
for i in range (len(labels)):
if labels[i] not in performance_dict:
performance_dict[labels[i]] = 0
if labels[i] in top_five[i]:
acc += 1
performance_dict[labels[i]] += 1
else:
performance_dict[labels[i]] -= 1
return (acc / float(self.batch_size))
def get_top_five(self, predictions):
""" input: predictions - prbs generated by the model
output: array of top 5 font families that the model thinks the image belongs to
Runs the model on a batch of inputs.
"""
predictions = np.sum(predictions, axis = 0) # sums the columns of the logits shape is (150,)
top_five = np.argsort(predictions, axis = 0)
top_five = np.array(top_five)
top_five = top_five[-5:]
with open('150_fonts_backwards.json') as json_file:
font_subset = json.load(json_file)
top_five_fonts = []
for num in top_five:
top_five_fonts.append(font_subset[str(num)])
return top_five_fonts
def train(model, train_inputs, train_labels):
""" input: train_inputs - batch of training images
train_labels - batch of training labels
output: none
Trains the model for a certain number of batches.
"""
average_loss = 0
num_batches = len(train_inputs)//model.batch_size
for i in range(num_batches):
with tf.GradientTape() as tape:
temp_inputs = train_inputs[i*model.batch_size:(i+1)*model.batch_size]
temp_train_labels = train_labels[i*model.batch_size:(i+1)*model.batch_size]
predictions = model.call(temp_inputs)
loss = model.loss_function(predictions, temp_train_labels)
average_loss += loss
if i % 1000 == 0:
print("---Batch", i, " Loss: ", loss)
gradients = tape.gradient(loss, model.trainable_variables)
model.optimizer.apply_gradients(zip(gradients, model.trainable_variables))
print("****AVERAGE LOSS: ", average_loss / float(num_batches))
def test(model, test_inputs, test_labels):
""" input: test_inputs - batch of testing images
test_labels - batch of testing labels
output: accuracy across the entire set of batches
Tests the training inputs against the model's prediction of what font class it thinks each training image
belongs to.
"""
num_batches = len(test_inputs) // (model.batch_size)
acc = 0
for i in range(num_batches):
batch_inputs = test_inputs[i * model.batch_size: (i+1) * model.batch_size]
batch_labels = test_labels[i * model.batch_size: (i+1) * model.batch_size]
batch_inputs = np.array(batch_inputs)
batch_labels = np.array(batch_labels)
predictions = model.call(batch_inputs) # prediction for a single image
batch_accuracy = model.total_accuracy(predictions, batch_labels)
if i % 100 == 0:
print("batch accuracy", batch_accuracy)
acc += batch_accuracy
average_accuracy = acc / float(num_batches)
return average_accuracy
def test_single_img(model, image_path):
""" input: image_path - the image path of whatever image file you would like to test
output: none
Prints the top 5 fonts the model predicts for a particular image.
"""
crops = []
image = alter_image(image_path)
image = resize_image(image, 96)
cropped_images = generate_crop(image, 96, 10)
for c in cropped_images:
crops.append(c)
predictions = model.call(crops) # prediction for a single image
print(predictions.shape)
top_5 = model.get_top_five(predictions)
print(top_5)
## --------------------------------------------------------------------------------------
def main():
model = DeepFont()
model.load_weights('weights_leaky_relu.h5', by_name=True)
# For saving/loading models
checkpoint_dir = './checkpoints_df'
checkpoint_prefix = os.path.join(checkpoint_dir, "ckpt")
checkpoint = tf.train.Checkpoint(model = model)
manager = tf.train.CheckpointManager(checkpoint, checkpoint_dir, max_to_keep=3)
# Ensure the output directory exists
if not os.path.exists(args.out_dir):
os.makedirs(args.out_dir)
if args.restore_checkpoint or args.mode == 'test' or args.mode == 'single_img':
# restores the lates checkpoint using from the manager
print("Running test mode...")
checkpoint.restore(manager.latest_checkpoint)
try:
# Specify an invalid GPU device
with tf.device('/device:' + args.device):
if args.mode == 'train':
train_inputs, train_labels = get_train_df('./shuffled_train_inputs.hdf5', './shuffled_train_labels.hdf5')
for epoch in range(0, args.num_epochs):
print('========================== EPOCH %d ==========================' % epoch)
train(model, train_inputs, train_labels)
# Save at the end of the epoch, too
print("**** SAVING CHECKPOINT AT END OF EPOCH ****")
manager.save()
if args.mode == 'test':
test_inputs, test_labels = get_test_df("./combined_test_inputs.hdf5", "./combined_test_labels.hdf5")
print("--test accuracy--", test(model, test_inputs, test_labels))
if args.mode == "single_img":
test_single_img(model, './0.png')
except RuntimeError as e:
print(e)
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "919239391c6f74d0d8627d3b851beb374eb11d25",
"index": 4785,
"step-1": "<mask token>\n\n\nclass DeepFont(tf.keras.Model):\n\n def __init__(self):\n super(DeepFont, self).__init__()\n self.batch_size = 128\n self.model = tf.keras.Sequential()\n self.model.add(tf.keras.layers.Reshape((96, 96, 1)))\n self.model.add(tf.keras.layers.Conv2D(trainable=False, filters=64,\n strides=(2, 2), kernel_size=(3, 3), padding='same', name=\n 'conv_layer1', input_shape=(96, 96, 1)))\n self.model.add(tf.keras.layers.BatchNormalization())\n self.model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2),\n strides=None, padding='same'))\n self.model.add(tf.keras.layers.Conv2D(trainable=False, filters=128,\n strides=(1, 1), kernel_size=(3, 3), padding='same', name=\n 'conv_layer2'))\n self.model.add(tf.keras.layers.BatchNormalization())\n self.model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2),\n strides=None, padding='same'))\n self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3, 3),\n strides=(1, 1), padding='same'))\n self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3, 3),\n strides=(1, 1), padding='same'))\n self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3, 3),\n strides=(1, 1), padding='same'))\n self.model.add(tf.keras.layers.Flatten())\n self.model.add(tf.keras.layers.Dense(512, activation='relu'))\n self.model.add(tf.keras.layers.Dense(512, activation='relu'))\n self.model.add(tf.keras.layers.Dense(150, activation='softmax'))\n self.optimizer = tf.keras.optimizers.Adam(learning_rate=0.01)\n\n def call(self, inputs):\n \"\"\" input: batch of preprocessed 96x96 images\n\t\t\toutput: probabilities for each batch image and its classification distribution\n\n\t\t\tRuns the model on a batch of inputs.\n\t\t\"\"\"\n return self.model(inputs)\n\n def loss_function(self, probs, labels):\n \"\"\" input: probs - probabilities generated by the model\n\t\t\t\t labels - true labels for every imag\n\t\t\toutput: return loss of the batch being processed\n\n\t\t\tUses sparse categorical crossentropy loss.\n\t\t\"\"\"\n loss = tf.keras.losses.sparse_categorical_crossentropy(labels, probs)\n return tf.reduce_mean(loss)\n\n def total_accuracy(self, probs, labels):\n \"\"\" input: probs - batch of probs (batch size x 150)\n\t\t\t\t\t labels - batch of true labels for images(batch size x 150)\n\t\t\toutput: the accuracy of the model (+1 if correct label) over a batch\n\t\t\"\"\"\n acc = 0\n top_five = np.argsort(probs, axis=1)\n top_five = np.array(top_five).reshape((self.batch_size, 150))\n top_five = top_five[:, -1:]\n for i in range(len(labels)):\n if labels[i] not in performance_dict:\n performance_dict[labels[i]] = 0\n if labels[i] in top_five[i]:\n acc += 1\n performance_dict[labels[i]] += 1\n else:\n performance_dict[labels[i]] -= 1\n return acc / float(self.batch_size)\n\n def get_top_five(self, predictions):\n \"\"\" input: predictions - prbs generated by the model\n\t\t\toutput: array of top 5 font families that the model thinks the image belongs to\n\n\t\t\tRuns the model on a batch of inputs.\n\t\t\"\"\"\n predictions = np.sum(predictions, axis=0)\n top_five = np.argsort(predictions, axis=0)\n top_five = np.array(top_five)\n top_five = top_five[-5:]\n with open('150_fonts_backwards.json') as json_file:\n font_subset = json.load(json_file)\n top_five_fonts = []\n for num in top_five:\n top_five_fonts.append(font_subset[str(num)])\n return top_five_fonts\n\n\ndef train(model, train_inputs, train_labels):\n \"\"\" input: train_inputs - batch of training images\n\t\t\t train_labels - batch of training labels\n\t\toutput: none\n\n\t\tTrains the model for a certain number of batches.\n\t\"\"\"\n average_loss = 0\n num_batches = len(train_inputs) // model.batch_size\n for i in range(num_batches):\n with tf.GradientTape() as tape:\n temp_inputs = train_inputs[i * model.batch_size:(i + 1) * model\n .batch_size]\n temp_train_labels = train_labels[i * model.batch_size:(i + 1) *\n model.batch_size]\n predictions = model.call(temp_inputs)\n loss = model.loss_function(predictions, temp_train_labels)\n average_loss += loss\n if i % 1000 == 0:\n print('---Batch', i, ' Loss: ', loss)\n gradients = tape.gradient(loss, model.trainable_variables)\n model.optimizer.apply_gradients(zip(gradients, model.\n trainable_variables))\n print('****AVERAGE LOSS: ', average_loss / float(num_batches))\n\n\ndef test(model, test_inputs, test_labels):\n \"\"\" input: test_inputs - batch of testing images\n\t\t\t test_labels - batch of testing labels\n\t\toutput: accuracy across the entire set of batches\n\n\t\tTests the training inputs against the model's prediction of what font class it thinks each training image\n\t\tbelongs to.\n\t\"\"\"\n num_batches = len(test_inputs) // model.batch_size\n acc = 0\n for i in range(num_batches):\n batch_inputs = test_inputs[i * model.batch_size:(i + 1) * model.\n batch_size]\n batch_labels = test_labels[i * model.batch_size:(i + 1) * model.\n batch_size]\n batch_inputs = np.array(batch_inputs)\n batch_labels = np.array(batch_labels)\n predictions = model.call(batch_inputs)\n batch_accuracy = model.total_accuracy(predictions, batch_labels)\n if i % 100 == 0:\n print('batch accuracy', batch_accuracy)\n acc += batch_accuracy\n average_accuracy = acc / float(num_batches)\n return average_accuracy\n\n\ndef test_single_img(model, image_path):\n \"\"\" input: image_path - the image path of whatever image file you would like to test\n\t\toutput: none\n\n\t\tPrints the top 5 fonts the model predicts for a particular image.\n\t\"\"\"\n crops = []\n image = alter_image(image_path)\n image = resize_image(image, 96)\n cropped_images = generate_crop(image, 96, 10)\n for c in cropped_images:\n crops.append(c)\n predictions = model.call(crops)\n print(predictions.shape)\n top_5 = model.get_top_five(predictions)\n print(top_5)\n\n\ndef main():\n model = DeepFont()\n model.load_weights('weights_leaky_relu.h5', by_name=True)\n checkpoint_dir = './checkpoints_df'\n checkpoint_prefix = os.path.join(checkpoint_dir, 'ckpt')\n checkpoint = tf.train.Checkpoint(model=model)\n manager = tf.train.CheckpointManager(checkpoint, checkpoint_dir,\n max_to_keep=3)\n if not os.path.exists(args.out_dir):\n os.makedirs(args.out_dir)\n if (args.restore_checkpoint or args.mode == 'test' or args.mode ==\n 'single_img'):\n print('Running test mode...')\n checkpoint.restore(manager.latest_checkpoint)\n try:\n with tf.device('/device:' + args.device):\n if args.mode == 'train':\n train_inputs, train_labels = get_train_df(\n './shuffled_train_inputs.hdf5',\n './shuffled_train_labels.hdf5')\n for epoch in range(0, args.num_epochs):\n print(\n '========================== EPOCH %d =========================='\n % epoch)\n train(model, train_inputs, train_labels)\n print('**** SAVING CHECKPOINT AT END OF EPOCH ****')\n manager.save()\n if args.mode == 'test':\n test_inputs, test_labels = get_test_df(\n './combined_test_inputs.hdf5',\n './combined_test_labels.hdf5')\n print('--test accuracy--', test(model, test_inputs,\n test_labels))\n if args.mode == 'single_img':\n test_single_img(model, './0.png')\n except RuntimeError as e:\n print(e)\n\n\n<mask token>\n",
"step-2": "<mask token>\nsys.path.append('../data')\n<mask token>\nprint('GPU Available: ', gpu_available)\n<mask token>\nparser.add_argument('--img-dir', type=str, default='./data/celebA', help=\n 'Data where training images live')\nparser.add_argument('--out-dir', type=str, default='./output', help=\n 'Data where sampled output images will be written')\nparser.add_argument('--mode', type=str, default='train', help=\n 'Can be \"train\" or \"test\"')\nparser.add_argument('--restore-checkpoint', action='store_true', help=\n 'Use this flag if you want to resuming training from a previously-saved checkpoint'\n )\nparser.add_argument('--z-dim', type=int, default=100, help=\n 'Dimensionality of the latent space')\nparser.add_argument('--batch-size', type=int, default=128, help=\n 'Sizes of image batches fed through the network')\nparser.add_argument('--num-data-threads', type=int, default=2, help=\n 'Number of threads to use when loading & pre-processing training images')\nparser.add_argument('--num-epochs', type=int, default=10, help=\n 'Number of passes through the training data to make before stopping')\nparser.add_argument('--learn-rate', type=float, default=0.0002, help=\n 'Learning rate for Adam optimizer')\nparser.add_argument('--beta1', type=float, default=0.5, help=\n '\"beta1\" parameter for Adam optimizer')\nparser.add_argument('--num-gen-updates', type=int, default=2, help=\n 'Number of generator updates per discriminator update')\nparser.add_argument('--log-every', type=int, default=7, help=\n 'Print losses after every [this many] training iterations')\nparser.add_argument('--save-every', type=int, default=500, help=\n 'Save the state of the network after every [this many] training iterations'\n )\nparser.add_argument('--device', type=str, default='GPU:0' if gpu_available else\n 'CPU:0', help=\n 'specific the device of computation eg. CPU:0, GPU:0, GPU:1, GPU:2, ... ')\n<mask token>\n\n\nclass DeepFont(tf.keras.Model):\n\n def __init__(self):\n super(DeepFont, self).__init__()\n self.batch_size = 128\n self.model = tf.keras.Sequential()\n self.model.add(tf.keras.layers.Reshape((96, 96, 1)))\n self.model.add(tf.keras.layers.Conv2D(trainable=False, filters=64,\n strides=(2, 2), kernel_size=(3, 3), padding='same', name=\n 'conv_layer1', input_shape=(96, 96, 1)))\n self.model.add(tf.keras.layers.BatchNormalization())\n self.model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2),\n strides=None, padding='same'))\n self.model.add(tf.keras.layers.Conv2D(trainable=False, filters=128,\n strides=(1, 1), kernel_size=(3, 3), padding='same', name=\n 'conv_layer2'))\n self.model.add(tf.keras.layers.BatchNormalization())\n self.model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2),\n strides=None, padding='same'))\n self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3, 3),\n strides=(1, 1), padding='same'))\n self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3, 3),\n strides=(1, 1), padding='same'))\n self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3, 3),\n strides=(1, 1), padding='same'))\n self.model.add(tf.keras.layers.Flatten())\n self.model.add(tf.keras.layers.Dense(512, activation='relu'))\n self.model.add(tf.keras.layers.Dense(512, activation='relu'))\n self.model.add(tf.keras.layers.Dense(150, activation='softmax'))\n self.optimizer = tf.keras.optimizers.Adam(learning_rate=0.01)\n\n def call(self, inputs):\n \"\"\" input: batch of preprocessed 96x96 images\n\t\t\toutput: probabilities for each batch image and its classification distribution\n\n\t\t\tRuns the model on a batch of inputs.\n\t\t\"\"\"\n return self.model(inputs)\n\n def loss_function(self, probs, labels):\n \"\"\" input: probs - probabilities generated by the model\n\t\t\t\t labels - true labels for every imag\n\t\t\toutput: return loss of the batch being processed\n\n\t\t\tUses sparse categorical crossentropy loss.\n\t\t\"\"\"\n loss = tf.keras.losses.sparse_categorical_crossentropy(labels, probs)\n return tf.reduce_mean(loss)\n\n def total_accuracy(self, probs, labels):\n \"\"\" input: probs - batch of probs (batch size x 150)\n\t\t\t\t\t labels - batch of true labels for images(batch size x 150)\n\t\t\toutput: the accuracy of the model (+1 if correct label) over a batch\n\t\t\"\"\"\n acc = 0\n top_five = np.argsort(probs, axis=1)\n top_five = np.array(top_five).reshape((self.batch_size, 150))\n top_five = top_five[:, -1:]\n for i in range(len(labels)):\n if labels[i] not in performance_dict:\n performance_dict[labels[i]] = 0\n if labels[i] in top_five[i]:\n acc += 1\n performance_dict[labels[i]] += 1\n else:\n performance_dict[labels[i]] -= 1\n return acc / float(self.batch_size)\n\n def get_top_five(self, predictions):\n \"\"\" input: predictions - prbs generated by the model\n\t\t\toutput: array of top 5 font families that the model thinks the image belongs to\n\n\t\t\tRuns the model on a batch of inputs.\n\t\t\"\"\"\n predictions = np.sum(predictions, axis=0)\n top_five = np.argsort(predictions, axis=0)\n top_five = np.array(top_five)\n top_five = top_five[-5:]\n with open('150_fonts_backwards.json') as json_file:\n font_subset = json.load(json_file)\n top_five_fonts = []\n for num in top_five:\n top_five_fonts.append(font_subset[str(num)])\n return top_five_fonts\n\n\ndef train(model, train_inputs, train_labels):\n \"\"\" input: train_inputs - batch of training images\n\t\t\t train_labels - batch of training labels\n\t\toutput: none\n\n\t\tTrains the model for a certain number of batches.\n\t\"\"\"\n average_loss = 0\n num_batches = len(train_inputs) // model.batch_size\n for i in range(num_batches):\n with tf.GradientTape() as tape:\n temp_inputs = train_inputs[i * model.batch_size:(i + 1) * model\n .batch_size]\n temp_train_labels = train_labels[i * model.batch_size:(i + 1) *\n model.batch_size]\n predictions = model.call(temp_inputs)\n loss = model.loss_function(predictions, temp_train_labels)\n average_loss += loss\n if i % 1000 == 0:\n print('---Batch', i, ' Loss: ', loss)\n gradients = tape.gradient(loss, model.trainable_variables)\n model.optimizer.apply_gradients(zip(gradients, model.\n trainable_variables))\n print('****AVERAGE LOSS: ', average_loss / float(num_batches))\n\n\ndef test(model, test_inputs, test_labels):\n \"\"\" input: test_inputs - batch of testing images\n\t\t\t test_labels - batch of testing labels\n\t\toutput: accuracy across the entire set of batches\n\n\t\tTests the training inputs against the model's prediction of what font class it thinks each training image\n\t\tbelongs to.\n\t\"\"\"\n num_batches = len(test_inputs) // model.batch_size\n acc = 0\n for i in range(num_batches):\n batch_inputs = test_inputs[i * model.batch_size:(i + 1) * model.\n batch_size]\n batch_labels = test_labels[i * model.batch_size:(i + 1) * model.\n batch_size]\n batch_inputs = np.array(batch_inputs)\n batch_labels = np.array(batch_labels)\n predictions = model.call(batch_inputs)\n batch_accuracy = model.total_accuracy(predictions, batch_labels)\n if i % 100 == 0:\n print('batch accuracy', batch_accuracy)\n acc += batch_accuracy\n average_accuracy = acc / float(num_batches)\n return average_accuracy\n\n\ndef test_single_img(model, image_path):\n \"\"\" input: image_path - the image path of whatever image file you would like to test\n\t\toutput: none\n\n\t\tPrints the top 5 fonts the model predicts for a particular image.\n\t\"\"\"\n crops = []\n image = alter_image(image_path)\n image = resize_image(image, 96)\n cropped_images = generate_crop(image, 96, 10)\n for c in cropped_images:\n crops.append(c)\n predictions = model.call(crops)\n print(predictions.shape)\n top_5 = model.get_top_five(predictions)\n print(top_5)\n\n\ndef main():\n model = DeepFont()\n model.load_weights('weights_leaky_relu.h5', by_name=True)\n checkpoint_dir = './checkpoints_df'\n checkpoint_prefix = os.path.join(checkpoint_dir, 'ckpt')\n checkpoint = tf.train.Checkpoint(model=model)\n manager = tf.train.CheckpointManager(checkpoint, checkpoint_dir,\n max_to_keep=3)\n if not os.path.exists(args.out_dir):\n os.makedirs(args.out_dir)\n if (args.restore_checkpoint or args.mode == 'test' or args.mode ==\n 'single_img'):\n print('Running test mode...')\n checkpoint.restore(manager.latest_checkpoint)\n try:\n with tf.device('/device:' + args.device):\n if args.mode == 'train':\n train_inputs, train_labels = get_train_df(\n './shuffled_train_inputs.hdf5',\n './shuffled_train_labels.hdf5')\n for epoch in range(0, args.num_epochs):\n print(\n '========================== EPOCH %d =========================='\n % epoch)\n train(model, train_inputs, train_labels)\n print('**** SAVING CHECKPOINT AT END OF EPOCH ****')\n manager.save()\n if args.mode == 'test':\n test_inputs, test_labels = get_test_df(\n './combined_test_inputs.hdf5',\n './combined_test_labels.hdf5')\n print('--test accuracy--', test(model, test_inputs,\n test_labels))\n if args.mode == 'single_img':\n test_single_img(model, './0.png')\n except RuntimeError as e:\n print(e)\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nsys.path.append('../data')\n<mask token>\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'\ngpu_available = tf.test.is_gpu_available()\nprint('GPU Available: ', gpu_available)\nperformance_dict = {}\nparser = argparse.ArgumentParser(description='DCGAN')\nparser.add_argument('--img-dir', type=str, default='./data/celebA', help=\n 'Data where training images live')\nparser.add_argument('--out-dir', type=str, default='./output', help=\n 'Data where sampled output images will be written')\nparser.add_argument('--mode', type=str, default='train', help=\n 'Can be \"train\" or \"test\"')\nparser.add_argument('--restore-checkpoint', action='store_true', help=\n 'Use this flag if you want to resuming training from a previously-saved checkpoint'\n )\nparser.add_argument('--z-dim', type=int, default=100, help=\n 'Dimensionality of the latent space')\nparser.add_argument('--batch-size', type=int, default=128, help=\n 'Sizes of image batches fed through the network')\nparser.add_argument('--num-data-threads', type=int, default=2, help=\n 'Number of threads to use when loading & pre-processing training images')\nparser.add_argument('--num-epochs', type=int, default=10, help=\n 'Number of passes through the training data to make before stopping')\nparser.add_argument('--learn-rate', type=float, default=0.0002, help=\n 'Learning rate for Adam optimizer')\nparser.add_argument('--beta1', type=float, default=0.5, help=\n '\"beta1\" parameter for Adam optimizer')\nparser.add_argument('--num-gen-updates', type=int, default=2, help=\n 'Number of generator updates per discriminator update')\nparser.add_argument('--log-every', type=int, default=7, help=\n 'Print losses after every [this many] training iterations')\nparser.add_argument('--save-every', type=int, default=500, help=\n 'Save the state of the network after every [this many] training iterations'\n )\nparser.add_argument('--device', type=str, default='GPU:0' if gpu_available else\n 'CPU:0', help=\n 'specific the device of computation eg. CPU:0, GPU:0, GPU:1, GPU:2, ... ')\nargs = parser.parse_args()\n\n\nclass DeepFont(tf.keras.Model):\n\n def __init__(self):\n super(DeepFont, self).__init__()\n self.batch_size = 128\n self.model = tf.keras.Sequential()\n self.model.add(tf.keras.layers.Reshape((96, 96, 1)))\n self.model.add(tf.keras.layers.Conv2D(trainable=False, filters=64,\n strides=(2, 2), kernel_size=(3, 3), padding='same', name=\n 'conv_layer1', input_shape=(96, 96, 1)))\n self.model.add(tf.keras.layers.BatchNormalization())\n self.model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2),\n strides=None, padding='same'))\n self.model.add(tf.keras.layers.Conv2D(trainable=False, filters=128,\n strides=(1, 1), kernel_size=(3, 3), padding='same', name=\n 'conv_layer2'))\n self.model.add(tf.keras.layers.BatchNormalization())\n self.model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2),\n strides=None, padding='same'))\n self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3, 3),\n strides=(1, 1), padding='same'))\n self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3, 3),\n strides=(1, 1), padding='same'))\n self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3, 3),\n strides=(1, 1), padding='same'))\n self.model.add(tf.keras.layers.Flatten())\n self.model.add(tf.keras.layers.Dense(512, activation='relu'))\n self.model.add(tf.keras.layers.Dense(512, activation='relu'))\n self.model.add(tf.keras.layers.Dense(150, activation='softmax'))\n self.optimizer = tf.keras.optimizers.Adam(learning_rate=0.01)\n\n def call(self, inputs):\n \"\"\" input: batch of preprocessed 96x96 images\n\t\t\toutput: probabilities for each batch image and its classification distribution\n\n\t\t\tRuns the model on a batch of inputs.\n\t\t\"\"\"\n return self.model(inputs)\n\n def loss_function(self, probs, labels):\n \"\"\" input: probs - probabilities generated by the model\n\t\t\t\t labels - true labels for every imag\n\t\t\toutput: return loss of the batch being processed\n\n\t\t\tUses sparse categorical crossentropy loss.\n\t\t\"\"\"\n loss = tf.keras.losses.sparse_categorical_crossentropy(labels, probs)\n return tf.reduce_mean(loss)\n\n def total_accuracy(self, probs, labels):\n \"\"\" input: probs - batch of probs (batch size x 150)\n\t\t\t\t\t labels - batch of true labels for images(batch size x 150)\n\t\t\toutput: the accuracy of the model (+1 if correct label) over a batch\n\t\t\"\"\"\n acc = 0\n top_five = np.argsort(probs, axis=1)\n top_five = np.array(top_five).reshape((self.batch_size, 150))\n top_five = top_five[:, -1:]\n for i in range(len(labels)):\n if labels[i] not in performance_dict:\n performance_dict[labels[i]] = 0\n if labels[i] in top_five[i]:\n acc += 1\n performance_dict[labels[i]] += 1\n else:\n performance_dict[labels[i]] -= 1\n return acc / float(self.batch_size)\n\n def get_top_five(self, predictions):\n \"\"\" input: predictions - prbs generated by the model\n\t\t\toutput: array of top 5 font families that the model thinks the image belongs to\n\n\t\t\tRuns the model on a batch of inputs.\n\t\t\"\"\"\n predictions = np.sum(predictions, axis=0)\n top_five = np.argsort(predictions, axis=0)\n top_five = np.array(top_five)\n top_five = top_five[-5:]\n with open('150_fonts_backwards.json') as json_file:\n font_subset = json.load(json_file)\n top_five_fonts = []\n for num in top_five:\n top_five_fonts.append(font_subset[str(num)])\n return top_five_fonts\n\n\ndef train(model, train_inputs, train_labels):\n \"\"\" input: train_inputs - batch of training images\n\t\t\t train_labels - batch of training labels\n\t\toutput: none\n\n\t\tTrains the model for a certain number of batches.\n\t\"\"\"\n average_loss = 0\n num_batches = len(train_inputs) // model.batch_size\n for i in range(num_batches):\n with tf.GradientTape() as tape:\n temp_inputs = train_inputs[i * model.batch_size:(i + 1) * model\n .batch_size]\n temp_train_labels = train_labels[i * model.batch_size:(i + 1) *\n model.batch_size]\n predictions = model.call(temp_inputs)\n loss = model.loss_function(predictions, temp_train_labels)\n average_loss += loss\n if i % 1000 == 0:\n print('---Batch', i, ' Loss: ', loss)\n gradients = tape.gradient(loss, model.trainable_variables)\n model.optimizer.apply_gradients(zip(gradients, model.\n trainable_variables))\n print('****AVERAGE LOSS: ', average_loss / float(num_batches))\n\n\ndef test(model, test_inputs, test_labels):\n \"\"\" input: test_inputs - batch of testing images\n\t\t\t test_labels - batch of testing labels\n\t\toutput: accuracy across the entire set of batches\n\n\t\tTests the training inputs against the model's prediction of what font class it thinks each training image\n\t\tbelongs to.\n\t\"\"\"\n num_batches = len(test_inputs) // model.batch_size\n acc = 0\n for i in range(num_batches):\n batch_inputs = test_inputs[i * model.batch_size:(i + 1) * model.\n batch_size]\n batch_labels = test_labels[i * model.batch_size:(i + 1) * model.\n batch_size]\n batch_inputs = np.array(batch_inputs)\n batch_labels = np.array(batch_labels)\n predictions = model.call(batch_inputs)\n batch_accuracy = model.total_accuracy(predictions, batch_labels)\n if i % 100 == 0:\n print('batch accuracy', batch_accuracy)\n acc += batch_accuracy\n average_accuracy = acc / float(num_batches)\n return average_accuracy\n\n\ndef test_single_img(model, image_path):\n \"\"\" input: image_path - the image path of whatever image file you would like to test\n\t\toutput: none\n\n\t\tPrints the top 5 fonts the model predicts for a particular image.\n\t\"\"\"\n crops = []\n image = alter_image(image_path)\n image = resize_image(image, 96)\n cropped_images = generate_crop(image, 96, 10)\n for c in cropped_images:\n crops.append(c)\n predictions = model.call(crops)\n print(predictions.shape)\n top_5 = model.get_top_five(predictions)\n print(top_5)\n\n\ndef main():\n model = DeepFont()\n model.load_weights('weights_leaky_relu.h5', by_name=True)\n checkpoint_dir = './checkpoints_df'\n checkpoint_prefix = os.path.join(checkpoint_dir, 'ckpt')\n checkpoint = tf.train.Checkpoint(model=model)\n manager = tf.train.CheckpointManager(checkpoint, checkpoint_dir,\n max_to_keep=3)\n if not os.path.exists(args.out_dir):\n os.makedirs(args.out_dir)\n if (args.restore_checkpoint or args.mode == 'test' or args.mode ==\n 'single_img'):\n print('Running test mode...')\n checkpoint.restore(manager.latest_checkpoint)\n try:\n with tf.device('/device:' + args.device):\n if args.mode == 'train':\n train_inputs, train_labels = get_train_df(\n './shuffled_train_inputs.hdf5',\n './shuffled_train_labels.hdf5')\n for epoch in range(0, args.num_epochs):\n print(\n '========================== EPOCH %d =========================='\n % epoch)\n train(model, train_inputs, train_labels)\n print('**** SAVING CHECKPOINT AT END OF EPOCH ****')\n manager.save()\n if args.mode == 'test':\n test_inputs, test_labels = get_test_df(\n './combined_test_inputs.hdf5',\n './combined_test_labels.hdf5')\n print('--test accuracy--', test(model, test_inputs,\n test_labels))\n if args.mode == 'single_img':\n test_single_img(model, './0.png')\n except RuntimeError as e:\n print(e)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import tensorflow as tf\nfrom tensorflow.keras import Model\nfrom tensorflow.keras.layers import Dense, Flatten, Conv2D, BatchNormalization, LeakyReLU, Reshape, Conv2DTranspose\nimport tensorflow_hub as hub\nfrom collections import Counter\nimport numpy as np\nimport sys\nsys.path.append('../data')\nfrom imageio import imwrite\nimport os\nimport argparse\nfrom preprocessing import *\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'\ngpu_available = tf.test.is_gpu_available()\nprint('GPU Available: ', gpu_available)\nperformance_dict = {}\nparser = argparse.ArgumentParser(description='DCGAN')\nparser.add_argument('--img-dir', type=str, default='./data/celebA', help=\n 'Data where training images live')\nparser.add_argument('--out-dir', type=str, default='./output', help=\n 'Data where sampled output images will be written')\nparser.add_argument('--mode', type=str, default='train', help=\n 'Can be \"train\" or \"test\"')\nparser.add_argument('--restore-checkpoint', action='store_true', help=\n 'Use this flag if you want to resuming training from a previously-saved checkpoint'\n )\nparser.add_argument('--z-dim', type=int, default=100, help=\n 'Dimensionality of the latent space')\nparser.add_argument('--batch-size', type=int, default=128, help=\n 'Sizes of image batches fed through the network')\nparser.add_argument('--num-data-threads', type=int, default=2, help=\n 'Number of threads to use when loading & pre-processing training images')\nparser.add_argument('--num-epochs', type=int, default=10, help=\n 'Number of passes through the training data to make before stopping')\nparser.add_argument('--learn-rate', type=float, default=0.0002, help=\n 'Learning rate for Adam optimizer')\nparser.add_argument('--beta1', type=float, default=0.5, help=\n '\"beta1\" parameter for Adam optimizer')\nparser.add_argument('--num-gen-updates', type=int, default=2, help=\n 'Number of generator updates per discriminator update')\nparser.add_argument('--log-every', type=int, default=7, help=\n 'Print losses after every [this many] training iterations')\nparser.add_argument('--save-every', type=int, default=500, help=\n 'Save the state of the network after every [this many] training iterations'\n )\nparser.add_argument('--device', type=str, default='GPU:0' if gpu_available else\n 'CPU:0', help=\n 'specific the device of computation eg. CPU:0, GPU:0, GPU:1, GPU:2, ... ')\nargs = parser.parse_args()\n\n\nclass DeepFont(tf.keras.Model):\n\n def __init__(self):\n super(DeepFont, self).__init__()\n self.batch_size = 128\n self.model = tf.keras.Sequential()\n self.model.add(tf.keras.layers.Reshape((96, 96, 1)))\n self.model.add(tf.keras.layers.Conv2D(trainable=False, filters=64,\n strides=(2, 2), kernel_size=(3, 3), padding='same', name=\n 'conv_layer1', input_shape=(96, 96, 1)))\n self.model.add(tf.keras.layers.BatchNormalization())\n self.model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2),\n strides=None, padding='same'))\n self.model.add(tf.keras.layers.Conv2D(trainable=False, filters=128,\n strides=(1, 1), kernel_size=(3, 3), padding='same', name=\n 'conv_layer2'))\n self.model.add(tf.keras.layers.BatchNormalization())\n self.model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2),\n strides=None, padding='same'))\n self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3, 3),\n strides=(1, 1), padding='same'))\n self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3, 3),\n strides=(1, 1), padding='same'))\n self.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3, 3),\n strides=(1, 1), padding='same'))\n self.model.add(tf.keras.layers.Flatten())\n self.model.add(tf.keras.layers.Dense(512, activation='relu'))\n self.model.add(tf.keras.layers.Dense(512, activation='relu'))\n self.model.add(tf.keras.layers.Dense(150, activation='softmax'))\n self.optimizer = tf.keras.optimizers.Adam(learning_rate=0.01)\n\n def call(self, inputs):\n \"\"\" input: batch of preprocessed 96x96 images\n\t\t\toutput: probabilities for each batch image and its classification distribution\n\n\t\t\tRuns the model on a batch of inputs.\n\t\t\"\"\"\n return self.model(inputs)\n\n def loss_function(self, probs, labels):\n \"\"\" input: probs - probabilities generated by the model\n\t\t\t\t labels - true labels for every imag\n\t\t\toutput: return loss of the batch being processed\n\n\t\t\tUses sparse categorical crossentropy loss.\n\t\t\"\"\"\n loss = tf.keras.losses.sparse_categorical_crossentropy(labels, probs)\n return tf.reduce_mean(loss)\n\n def total_accuracy(self, probs, labels):\n \"\"\" input: probs - batch of probs (batch size x 150)\n\t\t\t\t\t labels - batch of true labels for images(batch size x 150)\n\t\t\toutput: the accuracy of the model (+1 if correct label) over a batch\n\t\t\"\"\"\n acc = 0\n top_five = np.argsort(probs, axis=1)\n top_five = np.array(top_five).reshape((self.batch_size, 150))\n top_five = top_five[:, -1:]\n for i in range(len(labels)):\n if labels[i] not in performance_dict:\n performance_dict[labels[i]] = 0\n if labels[i] in top_five[i]:\n acc += 1\n performance_dict[labels[i]] += 1\n else:\n performance_dict[labels[i]] -= 1\n return acc / float(self.batch_size)\n\n def get_top_five(self, predictions):\n \"\"\" input: predictions - prbs generated by the model\n\t\t\toutput: array of top 5 font families that the model thinks the image belongs to\n\n\t\t\tRuns the model on a batch of inputs.\n\t\t\"\"\"\n predictions = np.sum(predictions, axis=0)\n top_five = np.argsort(predictions, axis=0)\n top_five = np.array(top_five)\n top_five = top_five[-5:]\n with open('150_fonts_backwards.json') as json_file:\n font_subset = json.load(json_file)\n top_five_fonts = []\n for num in top_five:\n top_five_fonts.append(font_subset[str(num)])\n return top_five_fonts\n\n\ndef train(model, train_inputs, train_labels):\n \"\"\" input: train_inputs - batch of training images\n\t\t\t train_labels - batch of training labels\n\t\toutput: none\n\n\t\tTrains the model for a certain number of batches.\n\t\"\"\"\n average_loss = 0\n num_batches = len(train_inputs) // model.batch_size\n for i in range(num_batches):\n with tf.GradientTape() as tape:\n temp_inputs = train_inputs[i * model.batch_size:(i + 1) * model\n .batch_size]\n temp_train_labels = train_labels[i * model.batch_size:(i + 1) *\n model.batch_size]\n predictions = model.call(temp_inputs)\n loss = model.loss_function(predictions, temp_train_labels)\n average_loss += loss\n if i % 1000 == 0:\n print('---Batch', i, ' Loss: ', loss)\n gradients = tape.gradient(loss, model.trainable_variables)\n model.optimizer.apply_gradients(zip(gradients, model.\n trainable_variables))\n print('****AVERAGE LOSS: ', average_loss / float(num_batches))\n\n\ndef test(model, test_inputs, test_labels):\n \"\"\" input: test_inputs - batch of testing images\n\t\t\t test_labels - batch of testing labels\n\t\toutput: accuracy across the entire set of batches\n\n\t\tTests the training inputs against the model's prediction of what font class it thinks each training image\n\t\tbelongs to.\n\t\"\"\"\n num_batches = len(test_inputs) // model.batch_size\n acc = 0\n for i in range(num_batches):\n batch_inputs = test_inputs[i * model.batch_size:(i + 1) * model.\n batch_size]\n batch_labels = test_labels[i * model.batch_size:(i + 1) * model.\n batch_size]\n batch_inputs = np.array(batch_inputs)\n batch_labels = np.array(batch_labels)\n predictions = model.call(batch_inputs)\n batch_accuracy = model.total_accuracy(predictions, batch_labels)\n if i % 100 == 0:\n print('batch accuracy', batch_accuracy)\n acc += batch_accuracy\n average_accuracy = acc / float(num_batches)\n return average_accuracy\n\n\ndef test_single_img(model, image_path):\n \"\"\" input: image_path - the image path of whatever image file you would like to test\n\t\toutput: none\n\n\t\tPrints the top 5 fonts the model predicts for a particular image.\n\t\"\"\"\n crops = []\n image = alter_image(image_path)\n image = resize_image(image, 96)\n cropped_images = generate_crop(image, 96, 10)\n for c in cropped_images:\n crops.append(c)\n predictions = model.call(crops)\n print(predictions.shape)\n top_5 = model.get_top_five(predictions)\n print(top_5)\n\n\ndef main():\n model = DeepFont()\n model.load_weights('weights_leaky_relu.h5', by_name=True)\n checkpoint_dir = './checkpoints_df'\n checkpoint_prefix = os.path.join(checkpoint_dir, 'ckpt')\n checkpoint = tf.train.Checkpoint(model=model)\n manager = tf.train.CheckpointManager(checkpoint, checkpoint_dir,\n max_to_keep=3)\n if not os.path.exists(args.out_dir):\n os.makedirs(args.out_dir)\n if (args.restore_checkpoint or args.mode == 'test' or args.mode ==\n 'single_img'):\n print('Running test mode...')\n checkpoint.restore(manager.latest_checkpoint)\n try:\n with tf.device('/device:' + args.device):\n if args.mode == 'train':\n train_inputs, train_labels = get_train_df(\n './shuffled_train_inputs.hdf5',\n './shuffled_train_labels.hdf5')\n for epoch in range(0, args.num_epochs):\n print(\n '========================== EPOCH %d =========================='\n % epoch)\n train(model, train_inputs, train_labels)\n print('**** SAVING CHECKPOINT AT END OF EPOCH ****')\n manager.save()\n if args.mode == 'test':\n test_inputs, test_labels = get_test_df(\n './combined_test_inputs.hdf5',\n './combined_test_labels.hdf5')\n print('--test accuracy--', test(model, test_inputs,\n test_labels))\n if args.mode == 'single_img':\n test_single_img(model, './0.png')\n except RuntimeError as e:\n print(e)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import tensorflow as tf\nfrom tensorflow.keras import Model\nfrom tensorflow.keras.layers import Dense, Flatten, Conv2D, BatchNormalization, LeakyReLU, Reshape, Conv2DTranspose\nimport tensorflow_hub as hub\nfrom collections import Counter\nimport numpy as np\n\nimport sys\nsys.path.append('../data')\n\nfrom imageio import imwrite\nimport os\nimport argparse\nfrom preprocessing import *\n\n# this time, katherine is here T_TTTT\n\n\n# Killing optional CPU driver warnings\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'\n\ngpu_available = tf.test.is_gpu_available()\nprint(\"GPU Available: \", gpu_available)\n\n\nperformance_dict = {}\n\n\nparser = argparse.ArgumentParser(description='DCGAN')\n\nparser.add_argument('--img-dir', type=str, default='./data/celebA',\n\t\t\t\t\thelp='Data where training images live')\n\nparser.add_argument('--out-dir', type=str, default='./output',\n\t\t\t\t\thelp='Data where sampled output images will be written')\n\nparser.add_argument('--mode', type=str, default='train',\n\t\t\t\t\thelp='Can be \"train\" or \"test\"')\n\nparser.add_argument('--restore-checkpoint', action='store_true',\n\t\t\t\t\thelp='Use this flag if you want to resuming training from a previously-saved checkpoint')\n\nparser.add_argument('--z-dim', type=int, default=100,\n\t\t\t\t\thelp='Dimensionality of the latent space')\n\nparser.add_argument('--batch-size', type=int, default=128,\n\t\t\t\t\thelp='Sizes of image batches fed through the network')\n\nparser.add_argument('--num-data-threads', type=int, default=2,\n\t\t\t\t\thelp='Number of threads to use when loading & pre-processing training images')\n\nparser.add_argument('--num-epochs', type=int, default=10,\n\t\t\t\t\thelp='Number of passes through the training data to make before stopping')\n\nparser.add_argument('--learn-rate', type=float, default=0.0002,\n\t\t\t\t\thelp='Learning rate for Adam optimizer')\n\nparser.add_argument('--beta1', type=float, default=0.5,\n\t\t\t\t\thelp='\"beta1\" parameter for Adam optimizer')\n\nparser.add_argument('--num-gen-updates', type=int, default=2,\n\t\t\t\t\thelp='Number of generator updates per discriminator update')\n\nparser.add_argument('--log-every', type=int, default=7,\n\t\t\t\t\thelp='Print losses after every [this many] training iterations')\n\nparser.add_argument('--save-every', type=int, default=500,\n\t\t\t\t\thelp='Save the state of the network after every [this many] training iterations')\n\nparser.add_argument('--device', type=str, default='GPU:0' if gpu_available else 'CPU:0',\n\t\t\t\t\thelp='specific the device of computation eg. CPU:0, GPU:0, GPU:1, GPU:2, ... ')\n\nargs = parser.parse_args()\n\n\n\nclass DeepFont(tf.keras.Model):\n\tdef __init__(self):\n\t\tsuper(DeepFont, self).__init__()\n\t\tself.batch_size = 128\n\t\tself.model = tf.keras.Sequential()\n\t\tself.model.add(tf.keras.layers.Reshape((96, 96, 1)))\n\t\tself.model.add(tf.keras.layers.Conv2D(trainable=False, filters=64, strides=(2,2), kernel_size=(3,3), padding='same', name='conv_layer1', input_shape=(96, 96,1)))\n\t\tself.model.add(tf.keras.layers.BatchNormalization())\n\t\tself.model.add(tf.keras.layers.MaxPooling2D(pool_size=(2,2), strides=None, padding='same'))\n\n\t\tself.model.add(tf.keras.layers.Conv2D(trainable=False, filters=128, strides=(1,1), kernel_size=(3,3), padding='same', name='conv_layer2'))\n\t\tself.model.add(tf.keras.layers.BatchNormalization())\n\t\tself.model.add(tf.keras.layers.MaxPooling2D(pool_size=(2,2), strides=None, padding='same'))\n\n\t\tself.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3,3), strides=(1,1), padding='same'))\n\t\tself.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3,3), strides=(1,1), padding='same'))\n\t\tself.model.add(tf.keras.layers.Conv2D(256, kernel_size=(3,3), strides=(1,1), padding='same'))\n\n\t\tself.model.add(tf.keras.layers.Flatten())\n\t\tself.model.add(tf.keras.layers.Dense(512, activation='relu'))\n\t\tself.model.add(tf.keras.layers.Dense(512, activation='relu'))\n\t\tself.model.add(tf.keras.layers.Dense(150, activation='softmax'))\n\n\t\tself.optimizer = tf.keras.optimizers.Adam(learning_rate = 0.01)\n\n\tdef call(self, inputs):\n\t\t\"\"\" input: batch of preprocessed 96x96 images\n\t\t\toutput: probabilities for each batch image and its classification distribution\n\n\t\t\tRuns the model on a batch of inputs.\n\t\t\"\"\"\n\t\treturn self.model(inputs)\n\n\tdef loss_function(self, probs, labels):\n\t\t\"\"\" input: probs - probabilities generated by the model\n\t\t\t\t labels - true labels for every imag\n\t\t\toutput: return loss of the batch being processed\n\n\t\t\tUses sparse categorical crossentropy loss.\n\t\t\"\"\"\n\t\tloss = tf.keras.losses.sparse_categorical_crossentropy(labels, probs)\n\t\treturn tf.reduce_mean(loss)\n\n\tdef total_accuracy(self, probs, labels):\n\t\t\"\"\" input: probs - batch of probs (batch size x 150)\n\t\t\t\t\t labels - batch of true labels for images(batch size x 150)\n\t\t\toutput: the accuracy of the model (+1 if correct label) over a batch\n\t\t\"\"\"\n\t\tacc = 0\n\n\t\ttop_five = np.argsort(probs, axis = 1) # 256 x 150\n\t\ttop_five = np.array(top_five).reshape((self.batch_size, 150))\n\t\ttop_five = top_five[:, -1:] # 5 x 150\n\n\t\tfor i in range (len(labels)):\n\t\t\tif labels[i] not in performance_dict:\n\t\t\t\tperformance_dict[labels[i]] = 0\n\n\t\t\tif labels[i] in top_five[i]:\n\t\t\t\tacc += 1\n\t\t\t\tperformance_dict[labels[i]] += 1\n\t\t\telse:\n\t\t\t\tperformance_dict[labels[i]] -= 1\n\n\t\treturn (acc / float(self.batch_size))\n\n\tdef get_top_five(self, predictions):\n\t\t\"\"\" input: predictions - prbs generated by the model\n\t\t\toutput: array of top 5 font families that the model thinks the image belongs to\n\n\t\t\tRuns the model on a batch of inputs.\n\t\t\"\"\"\n\t\tpredictions = np.sum(predictions, axis = 0) # sums the columns of the logits shape is (150,)\n\n\t\ttop_five = np.argsort(predictions, axis = 0)\n\t\ttop_five = np.array(top_five)\n\t\ttop_five = top_five[-5:]\n\n\t\twith open('150_fonts_backwards.json') as json_file:\n\t\t\tfont_subset = json.load(json_file)\n\n\t\ttop_five_fonts = []\n\t\tfor num in top_five:\n\t\t\ttop_five_fonts.append(font_subset[str(num)])\n\t\treturn top_five_fonts\n\ndef train(model, train_inputs, train_labels):\n\t\"\"\" input: train_inputs - batch of training images\n\t\t\t train_labels - batch of training labels\n\t\toutput: none\n\n\t\tTrains the model for a certain number of batches.\n\t\"\"\"\n\taverage_loss = 0\n\tnum_batches = len(train_inputs)//model.batch_size\n\tfor i in range(num_batches):\n\t\twith tf.GradientTape() as tape:\n\t\t\ttemp_inputs = train_inputs[i*model.batch_size:(i+1)*model.batch_size]\n\t\t\ttemp_train_labels = train_labels[i*model.batch_size:(i+1)*model.batch_size]\n\n\t\t\tpredictions = model.call(temp_inputs)\n\t\t\tloss = model.loss_function(predictions, temp_train_labels)\n\t\t\taverage_loss += loss\n\t\t\tif i % 1000 == 0:\n\t\t\t\tprint(\"---Batch\", i, \" Loss: \", loss)\n\t\tgradients = tape.gradient(loss, model.trainable_variables)\n\t\tmodel.optimizer.apply_gradients(zip(gradients, model.trainable_variables))\n\tprint(\"****AVERAGE LOSS: \", average_loss / float(num_batches))\n\n\ndef test(model, test_inputs, test_labels):\n\t\"\"\" input: test_inputs - batch of testing images\n\t\t\t test_labels - batch of testing labels\n\t\toutput: accuracy across the entire set of batches\n\n\t\tTests the training inputs against the model's prediction of what font class it thinks each training image\n\t\tbelongs to.\n\t\"\"\"\n\tnum_batches = len(test_inputs) // (model.batch_size)\n\n\n\tacc = 0\n\tfor i in range(num_batches):\n\t\tbatch_inputs = test_inputs[i * model.batch_size: (i+1) * model.batch_size]\n\t\tbatch_labels = test_labels[i * model.batch_size: (i+1) * model.batch_size]\n\n\t\tbatch_inputs = np.array(batch_inputs)\n\t\tbatch_labels = np.array(batch_labels)\n\n\t\tpredictions = model.call(batch_inputs) # prediction for a single image\n\n\t\tbatch_accuracy = model.total_accuracy(predictions, batch_labels)\n\n\t\tif i % 100 == 0:\n\t\t\tprint(\"batch accuracy\", batch_accuracy)\n\t\tacc += batch_accuracy\n\n\taverage_accuracy = acc / float(num_batches)\n\n\treturn average_accuracy\n\ndef test_single_img(model, image_path):\n\t\"\"\" input: image_path - the image path of whatever image file you would like to test\n\t\toutput: none\n\n\t\tPrints the top 5 fonts the model predicts for a particular image.\n\t\"\"\"\n\tcrops = []\n\n\timage = alter_image(image_path)\n\timage = resize_image(image, 96)\n\tcropped_images = generate_crop(image, 96, 10)\n\n\tfor c in cropped_images:\n\t\tcrops.append(c)\n\n\tpredictions = model.call(crops) # prediction for a single image\n\tprint(predictions.shape)\n\ttop_5 = model.get_top_five(predictions)\n\tprint(top_5)\n\n## --------------------------------------------------------------------------------------\n\ndef main():\n\n\tmodel = DeepFont()\n\tmodel.load_weights('weights_leaky_relu.h5', by_name=True)\n\n\t# For saving/loading models\n\tcheckpoint_dir = './checkpoints_df'\n\tcheckpoint_prefix = os.path.join(checkpoint_dir, \"ckpt\")\n\tcheckpoint = tf.train.Checkpoint(model = model)\n\tmanager = tf.train.CheckpointManager(checkpoint, checkpoint_dir, max_to_keep=3)\n\t# Ensure the output directory exists\n\tif not os.path.exists(args.out_dir):\n\t\tos.makedirs(args.out_dir)\n\n\tif args.restore_checkpoint or args.mode == 'test' or args.mode == 'single_img':\n\t\t# restores the lates checkpoint using from the manager\n\t\tprint(\"Running test mode...\")\n\t\tcheckpoint.restore(manager.latest_checkpoint)\n\n\ttry:\n\t\t# Specify an invalid GPU device\n\t\twith tf.device('/device:' + args.device):\n\t\t\tif args.mode == 'train':\n\t\t\t\ttrain_inputs, train_labels = get_train_df('./shuffled_train_inputs.hdf5', './shuffled_train_labels.hdf5')\n\t\t\t\tfor epoch in range(0, args.num_epochs):\n\t\t\t\t\tprint('========================== EPOCH %d ==========================' % epoch)\n\t\t\t\t\ttrain(model, train_inputs, train_labels)\n\t\t\t\t\t# Save at the end of the epoch, too\n\t\t\t\t\tprint(\"**** SAVING CHECKPOINT AT END OF EPOCH ****\")\n\t\t\t\t\tmanager.save()\n\t\t\tif args.mode == 'test':\n\t\t\t\ttest_inputs, test_labels = get_test_df(\"./combined_test_inputs.hdf5\", \"./combined_test_labels.hdf5\")\n\t\t\t\tprint(\"--test accuracy--\", test(model, test_inputs, test_labels))\n\t\t\tif args.mode == \"single_img\":\n\t\t\t\ttest_single_img(model, './0.png')\n\texcept RuntimeError as e:\n\t\tprint(e)\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
10,
11,
12,
13,
14
]
}
|
[
10,
11,
12,
13,
14
] |
'''
@Description:
@Version: 1.0
@Autor: Henggao
@Date: 2020-02-20 16:17:05
@LastEditors: Henggao
@LastEditTime: 2020-02-20 16:32:45
'''
name = "henggao"
def change():
name = "Brill"
print(name)
print(locals())
print(globals())
change()
print(name)
|
normal
|
{
"blob_id": "6c7162a9bd81d618abda204c24031c5a5acc61b4",
"index": 7967,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef change():\n name = 'Brill'\n print(name)\n print(locals())\n print(globals())\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef change():\n name = 'Brill'\n print(name)\n print(locals())\n print(globals())\n\n\nchange()\nprint(name)\n",
"step-4": "<mask token>\nname = 'henggao'\n\n\ndef change():\n name = 'Brill'\n print(name)\n print(locals())\n print(globals())\n\n\nchange()\nprint(name)\n",
"step-5": "'''\n@Description: \n@Version: 1.0\n@Autor: Henggao\n@Date: 2020-02-20 16:17:05\n@LastEditors: Henggao\n@LastEditTime: 2020-02-20 16:32:45\n'''\nname = \"henggao\"\ndef change():\n name = \"Brill\"\n print(name)\n print(locals())\n print(globals())\n \nchange() \n\nprint(name)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from django.core.urlresolvers import reverse
from google_product_feeder.feed import CSVMerchantFeed, MERCHANT_FEED_COLUMNS
CSV_HEADINGS = ','.join(MERCHANT_FEED_COLUMNS) + '\r\n'
class AttrNameFakeModel(object):
# A fake model that returns the attribute name upon attribute access.
def __getattr__(self, name):
return name
class EmptyFakeModel(object):
# A fake model with no attributes.
def __getattr__(self, name):
raise AttributeError
class UppercaseBrandFeed(CSVMerchantFeed):
def get_brand(self, obj):
return obj.brand.upper()
class CSVMerchantFeedTest(TestCase):
def test_csv_empty(self):
feed = CSVMerchantFeed([])
output = feed.get_content()
self.assertEquals(output, CSV_HEADINGS)
def test_csv(self):
feed = CSVMerchantFeed([AttrNameFakeModel()])
output = feed.get_content()
self.assertEquals(output, CSV_HEADINGS * 2)
def test_csv_missing_attribute(self):
feed = CSVMerchantFeed([EmptyFakeModel()])
output = feed.get_content()
empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\r\n'
self.assertEquals(output, CSV_HEADINGS + empty_data_row)
def test_csv_with_get_method(self):
feed = UppercaseBrandFeed([AttrNameFakeModel()])
output = feed.get_content()
data_row = CSV_HEADINGS.replace('brand', 'BRAND')
self.assertEquals(output, CSV_HEADINGS + data_row)
class CSVFeedViewTest(TestCase):
def test_view_empty(self):
url = reverse('google_feed')
response = self.client.get(url)
self.assertEquals(response.content, CSV_HEADINGS)
def test_has_correct_headers(self):
# content-type is 'text/csv', content-disposition is 'attachment',
# filename is 'google.csv'
url = reverse('google_feed')
response = self.client.get(url)
self.assertEqual(response['Content-Type'],
'text/csv')
self.assertEqual(response['Content-Disposition'],
'attachment; filename="google.csv"')
|
normal
|
{
"blob_id": "924fd89a835528fa28e1226912a2e4be9c4e1d5d",
"index": 152,
"step-1": "<mask token>\n\n\nclass UppercaseBrandFeed(CSVMerchantFeed):\n\n def get_brand(self, obj):\n return obj.brand.upper()\n\n\nclass CSVMerchantFeedTest(TestCase):\n\n def test_csv_empty(self):\n feed = CSVMerchantFeed([])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS)\n\n def test_csv(self):\n feed = CSVMerchantFeed([AttrNameFakeModel()])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS * 2)\n\n def test_csv_missing_attribute(self):\n feed = CSVMerchantFeed([EmptyFakeModel()])\n output = feed.get_content()\n empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\\r\\n'\n self.assertEquals(output, CSV_HEADINGS + empty_data_row)\n\n def test_csv_with_get_method(self):\n feed = UppercaseBrandFeed([AttrNameFakeModel()])\n output = feed.get_content()\n data_row = CSV_HEADINGS.replace('brand', 'BRAND')\n self.assertEquals(output, CSV_HEADINGS + data_row)\n\n\nclass CSVFeedViewTest(TestCase):\n\n def test_view_empty(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEquals(response.content, CSV_HEADINGS)\n\n def test_has_correct_headers(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEqual(response['Content-Type'], 'text/csv')\n self.assertEqual(response['Content-Disposition'],\n 'attachment; filename=\"google.csv\"')\n",
"step-2": "<mask token>\n\n\nclass AttrNameFakeModel(object):\n <mask token>\n\n\nclass EmptyFakeModel(object):\n\n def __getattr__(self, name):\n raise AttributeError\n\n\nclass UppercaseBrandFeed(CSVMerchantFeed):\n\n def get_brand(self, obj):\n return obj.brand.upper()\n\n\nclass CSVMerchantFeedTest(TestCase):\n\n def test_csv_empty(self):\n feed = CSVMerchantFeed([])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS)\n\n def test_csv(self):\n feed = CSVMerchantFeed([AttrNameFakeModel()])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS * 2)\n\n def test_csv_missing_attribute(self):\n feed = CSVMerchantFeed([EmptyFakeModel()])\n output = feed.get_content()\n empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\\r\\n'\n self.assertEquals(output, CSV_HEADINGS + empty_data_row)\n\n def test_csv_with_get_method(self):\n feed = UppercaseBrandFeed([AttrNameFakeModel()])\n output = feed.get_content()\n data_row = CSV_HEADINGS.replace('brand', 'BRAND')\n self.assertEquals(output, CSV_HEADINGS + data_row)\n\n\nclass CSVFeedViewTest(TestCase):\n\n def test_view_empty(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEquals(response.content, CSV_HEADINGS)\n\n def test_has_correct_headers(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEqual(response['Content-Type'], 'text/csv')\n self.assertEqual(response['Content-Disposition'],\n 'attachment; filename=\"google.csv\"')\n",
"step-3": "<mask token>\n\n\nclass AttrNameFakeModel(object):\n\n def __getattr__(self, name):\n return name\n\n\nclass EmptyFakeModel(object):\n\n def __getattr__(self, name):\n raise AttributeError\n\n\nclass UppercaseBrandFeed(CSVMerchantFeed):\n\n def get_brand(self, obj):\n return obj.brand.upper()\n\n\nclass CSVMerchantFeedTest(TestCase):\n\n def test_csv_empty(self):\n feed = CSVMerchantFeed([])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS)\n\n def test_csv(self):\n feed = CSVMerchantFeed([AttrNameFakeModel()])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS * 2)\n\n def test_csv_missing_attribute(self):\n feed = CSVMerchantFeed([EmptyFakeModel()])\n output = feed.get_content()\n empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\\r\\n'\n self.assertEquals(output, CSV_HEADINGS + empty_data_row)\n\n def test_csv_with_get_method(self):\n feed = UppercaseBrandFeed([AttrNameFakeModel()])\n output = feed.get_content()\n data_row = CSV_HEADINGS.replace('brand', 'BRAND')\n self.assertEquals(output, CSV_HEADINGS + data_row)\n\n\nclass CSVFeedViewTest(TestCase):\n\n def test_view_empty(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEquals(response.content, CSV_HEADINGS)\n\n def test_has_correct_headers(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEqual(response['Content-Type'], 'text/csv')\n self.assertEqual(response['Content-Disposition'],\n 'attachment; filename=\"google.csv\"')\n",
"step-4": "from __future__ import unicode_literals\nfrom django.test import TestCase\nfrom django.core.urlresolvers import reverse\nfrom google_product_feeder.feed import CSVMerchantFeed, MERCHANT_FEED_COLUMNS\nCSV_HEADINGS = ','.join(MERCHANT_FEED_COLUMNS) + '\\r\\n'\n\n\nclass AttrNameFakeModel(object):\n\n def __getattr__(self, name):\n return name\n\n\nclass EmptyFakeModel(object):\n\n def __getattr__(self, name):\n raise AttributeError\n\n\nclass UppercaseBrandFeed(CSVMerchantFeed):\n\n def get_brand(self, obj):\n return obj.brand.upper()\n\n\nclass CSVMerchantFeedTest(TestCase):\n\n def test_csv_empty(self):\n feed = CSVMerchantFeed([])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS)\n\n def test_csv(self):\n feed = CSVMerchantFeed([AttrNameFakeModel()])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS * 2)\n\n def test_csv_missing_attribute(self):\n feed = CSVMerchantFeed([EmptyFakeModel()])\n output = feed.get_content()\n empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\\r\\n'\n self.assertEquals(output, CSV_HEADINGS + empty_data_row)\n\n def test_csv_with_get_method(self):\n feed = UppercaseBrandFeed([AttrNameFakeModel()])\n output = feed.get_content()\n data_row = CSV_HEADINGS.replace('brand', 'BRAND')\n self.assertEquals(output, CSV_HEADINGS + data_row)\n\n\nclass CSVFeedViewTest(TestCase):\n\n def test_view_empty(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEquals(response.content, CSV_HEADINGS)\n\n def test_has_correct_headers(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEqual(response['Content-Type'], 'text/csv')\n self.assertEqual(response['Content-Disposition'],\n 'attachment; filename=\"google.csv\"')\n",
"step-5": "#! /usr/bin/env python\n# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.test import TestCase\nfrom django.core.urlresolvers import reverse\n\nfrom google_product_feeder.feed import CSVMerchantFeed, MERCHANT_FEED_COLUMNS\n\n\nCSV_HEADINGS = ','.join(MERCHANT_FEED_COLUMNS) + '\\r\\n'\n\n\nclass AttrNameFakeModel(object):\n # A fake model that returns the attribute name upon attribute access.\n def __getattr__(self, name):\n return name\n\n\nclass EmptyFakeModel(object):\n # A fake model with no attributes.\n def __getattr__(self, name):\n raise AttributeError\n\n\nclass UppercaseBrandFeed(CSVMerchantFeed):\n def get_brand(self, obj):\n return obj.brand.upper()\n\n\nclass CSVMerchantFeedTest(TestCase):\n\n def test_csv_empty(self):\n feed = CSVMerchantFeed([])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS)\n\n def test_csv(self):\n feed = CSVMerchantFeed([AttrNameFakeModel()])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS * 2)\n\n def test_csv_missing_attribute(self):\n feed = CSVMerchantFeed([EmptyFakeModel()])\n output = feed.get_content()\n empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\\r\\n'\n self.assertEquals(output, CSV_HEADINGS + empty_data_row)\n\n def test_csv_with_get_method(self):\n feed = UppercaseBrandFeed([AttrNameFakeModel()])\n output = feed.get_content()\n data_row = CSV_HEADINGS.replace('brand', 'BRAND')\n self.assertEquals(output, CSV_HEADINGS + data_row)\n\n\nclass CSVFeedViewTest(TestCase):\n\n def test_view_empty(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEquals(response.content, CSV_HEADINGS)\n\n def test_has_correct_headers(self):\n # content-type is 'text/csv', content-disposition is 'attachment',\n # filename is 'google.csv'\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEqual(response['Content-Type'],\n 'text/csv')\n self.assertEqual(response['Content-Disposition'],\n 'attachment; filename=\"google.csv\"')\n",
"step-ids": [
10,
13,
14,
16,
17
]
}
|
[
10,
13,
14,
16,
17
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
def sumSubarrayMins(self, A: List[int]) ->int:
stack = []
prev = [None] * len(A)
for i in range(len(A)):
while stack and A[stack[-1]] >= A[i]:
stack.pop()
prev[i] = stack[-1] if stack else -1
stack.append(i)
stack = []
nex = [None] * len(A)
for i in range(len(A) - 1, -1, -1):
while stack and A[stack[-1]] > A[i]:
stack.pop()
nex[i] = stack[-1] if stack else len(A)
stack.append(i)
return sum((i - prev[i]) * (nex[i] - i) * A[i] for i in range(len(A))
) % (10 ** 9 + 7)
|
flexible
|
{
"blob_id": "97029ac9f05037bf9304dacf86c35f5534d887c4",
"index": 8303,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def sumSubarrayMins(self, A: List[int]) ->int:\n stack = []\n prev = [None] * len(A)\n for i in range(len(A)):\n while stack and A[stack[-1]] >= A[i]:\n stack.pop()\n prev[i] = stack[-1] if stack else -1\n stack.append(i)\n stack = []\n nex = [None] * len(A)\n for i in range(len(A) - 1, -1, -1):\n while stack and A[stack[-1]] > A[i]:\n stack.pop()\n nex[i] = stack[-1] if stack else len(A)\n stack.append(i)\n return sum((i - prev[i]) * (nex[i] - i) * A[i] for i in range(len(A))\n ) % (10 ** 9 + 7)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 29 20:33:32 2013
@author: ste
"""
#Convert input file for graph from adjacency list version, where each line is
#vertex adjacent adjacent adjacent ...
#to edge representation where each line is
#tail head
edges=[]
with open("/Users/ste/Desktop/Ste/Python/AlgorithmsCourse/KargerMinCut.txt") as v_list_file:
for line in v_list_file:
node=map(int, line.split())
for adjacent in node[1:]:
edges.append([node[0], adjacent])
with open("/Users/ste/Desktop/Ste/C++/Programs/AlgorithmCourse/GraphSearch/KargerMinCut(edges).txt", "w+") as outfile:
for edge in edges:
outfile.write(str(edge[0])+' '+str(edge[1])+'\n')
|
normal
|
{
"blob_id": "1b7b94a0331e2462f83f4f77bcfaefbeefdf24f4",
"index": 3754,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('/Users/ste/Desktop/Ste/Python/AlgorithmsCourse/KargerMinCut.txt'\n ) as v_list_file:\n for line in v_list_file:\n node = map(int, line.split())\n for adjacent in node[1:]:\n edges.append([node[0], adjacent])\nwith open(\n '/Users/ste/Desktop/Ste/C++/Programs/AlgorithmCourse/GraphSearch/KargerMinCut(edges).txt'\n , 'w+') as outfile:\n for edge in edges:\n outfile.write(str(edge[0]) + ' ' + str(edge[1]) + '\\n')\n",
"step-3": "<mask token>\nedges = []\nwith open('/Users/ste/Desktop/Ste/Python/AlgorithmsCourse/KargerMinCut.txt'\n ) as v_list_file:\n for line in v_list_file:\n node = map(int, line.split())\n for adjacent in node[1:]:\n edges.append([node[0], adjacent])\nwith open(\n '/Users/ste/Desktop/Ste/C++/Programs/AlgorithmCourse/GraphSearch/KargerMinCut(edges).txt'\n , 'w+') as outfile:\n for edge in edges:\n outfile.write(str(edge[0]) + ' ' + str(edge[1]) + '\\n')\n",
"step-4": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Jul 29 20:33:32 2013\n\n@author: ste\n\"\"\"\n\n#Convert input file for graph from adjacency list version, where each line is\n#vertex adjacent adjacent adjacent ...\n#to edge representation where each line is\n#tail head\n\nedges=[]\nwith open(\"/Users/ste/Desktop/Ste/Python/AlgorithmsCourse/KargerMinCut.txt\") as v_list_file:\n for line in v_list_file:\n node=map(int, line.split())\n for adjacent in node[1:]:\n edges.append([node[0], adjacent])\n\nwith open(\"/Users/ste/Desktop/Ste/C++/Programs/AlgorithmCourse/GraphSearch/KargerMinCut(edges).txt\", \"w+\") as outfile:\n for edge in edges:\n outfile.write(str(edge[0])+' '+str(edge[1])+'\\n')\n ",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#recapitulare polimorfism
class Caine:
def sunet(self):
print("ham ham")
class Pisica:
def sunet(self):
print("miau")
def asculta_sunet(tipul_animalului):# astapta obiect tipul animalului
tipul_animalului.sunet()#
CaineObj=Caine()#dau obiect
PisicaObj=Pisica()
asculta_sunet(CaineObj)
asculta_sunet(PisicaObj)
|
normal
|
{
"blob_id": "594fdec916520014faff80dd06c7a5553320664d",
"index": 4746,
"step-1": "class Caine:\n <mask token>\n\n\nclass Pisica:\n\n def sunet(self):\n print('miau')\n\n\n<mask token>\n",
"step-2": "class Caine:\n\n def sunet(self):\n print('ham ham')\n\n\nclass Pisica:\n\n def sunet(self):\n print('miau')\n\n\ndef asculta_sunet(tipul_animalului):\n tipul_animalului.sunet()\n\n\n<mask token>\n",
"step-3": "class Caine:\n\n def sunet(self):\n print('ham ham')\n\n\nclass Pisica:\n\n def sunet(self):\n print('miau')\n\n\ndef asculta_sunet(tipul_animalului):\n tipul_animalului.sunet()\n\n\n<mask token>\nasculta_sunet(CaineObj)\nasculta_sunet(PisicaObj)\n",
"step-4": "class Caine:\n\n def sunet(self):\n print('ham ham')\n\n\nclass Pisica:\n\n def sunet(self):\n print('miau')\n\n\ndef asculta_sunet(tipul_animalului):\n tipul_animalului.sunet()\n\n\nCaineObj = Caine()\nPisicaObj = Pisica()\nasculta_sunet(CaineObj)\nasculta_sunet(PisicaObj)\n",
"step-5": "#recapitulare polimorfism\r\nclass Caine:\r\n def sunet(self):\r\n print(\"ham ham\")\r\nclass Pisica:\r\n def sunet(self):\r\n print(\"miau\")\r\ndef asculta_sunet(tipul_animalului):# astapta obiect tipul animalului\r\n tipul_animalului.sunet()#\r\nCaineObj=Caine()#dau obiect\r\nPisicaObj=Pisica()\r\n\r\nasculta_sunet(CaineObj)\r\nasculta_sunet(PisicaObj)\r\n\r\n\r\n\r\n",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
from rest_framework import serializers
from api.models.Phones import Phones
class PhoneSerializer(serializers.ModelSerializer):
class Meta:
model = Phones
fields = (
'id', 'number', 'area_code', 'country_code'
)
|
normal
|
{
"blob_id": "e3ba6395a8d7272fc7e5a8be37e6b0b18c355e14",
"index": 9272,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass PhoneSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Phones\n fields = 'id', 'number', 'area_code', 'country_code'\n",
"step-3": "from rest_framework import serializers\nfrom api.models.Phones import Phones\n\n\nclass PhoneSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Phones\n fields = 'id', 'number', 'area_code', 'country_code'\n",
"step-4": "from rest_framework import serializers\n\nfrom api.models.Phones import Phones\n\n\nclass PhoneSerializer(serializers.ModelSerializer):\n class Meta:\n model = Phones\n fields = (\n 'id', 'number', 'area_code', 'country_code'\n )\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#! /usr/bin/env python
import ldac
from numpy import *
import shearprofile as sp
import sys
import os, subprocess
import pylab
if len(sys.argv) != 6:
sys.stderr.write("wrong number of arguments!\n")
sys.exit(1)
catfile= sys.argv[1]
clusterz=float(sys.argv[2])
center= map(float,sys.argv[3].split(','))
pixscale=float(sys.argv[4]) # arcsec / pix
clustername=sys.argv[5]
catalog= ldac.openObjectFile(catfile)
r, E = sp.calcTangentialShear(catalog, center, pixscale)
beta=sp.beta(catalog["Z_BEST"],clusterz, calcAverage = False)
kappacut = sp.calcWLViolationCut(r, beta, sigma_v = 1300)
radiuscut = r > 60 #arcseconds
largeradiuscut = r < 500
zcut = logical_and(catalog['Z_BEST'] > 1.2*clusterz, catalog['Z_BEST'] < 1.2)
cleancut = logical_and(kappacut, logical_and(radiuscut, logical_and(largeradiuscut,zcut)))
cleancat = catalog.filter(cleancut)
samples = sp.simpleBootstrap(cleancat, clusterz, pixscale, center, beta[cleancut])
r500x = float(subprocess.Popen("grep %s /nfs/slac/g/ki/ki05/anja/SUBARU/clusters.r500x.dat | awk '{print $2}'" % clustername, stdout=subprocess.PIPE, shell=True).communicate()[0])
mass = lambda sigma2,r500x: 2*sigma2*r500x/4.3e-09
masses = [ mass(sigma2, r500x) for sigma2 in samples]
confidenceRegion = sp.ConfidenceRegion(masses)
filebase,ext=os.path.splitext(catfile)
#output = open(filebase+"_profile.dat", 'w')
#for i in xrange(len(r_as)):
# output.write("%f %f %f %f %f\n" % (r_as[i], E[i], Err[i], B[i], Berr[i]))
#output.close()
#veldisp = sqrt( confidenceRegion[0][0] * 4.3e-09 / (3*r500x) )
#veldisperr = (veldisp / 2) * ((confidenceRegion[1][0]+confidenceRegion[2][0])/confidenceRegion[0][0])
#
output = open(filebase+"_sisfit.ml.dat", 'w')
samples = array(samples)
samples[samples < 0] = 0.
veldispersions = sqrt(samples)
veldisp_confidenceregion = sp.ConfidenceRegion(veldispersions[veldispersions > 0])
output = open(filebase+"_sisfit.sigma.dat", 'w')
output.write("M500: %e -%e %e\n" % (confidenceRegion[0][0], confidenceRegion[1][0], confidenceRegion[2][0]))
output.write("sigma: %e -%e %e\n" % (veldisp_confidenceregion[0][0], veldisp_confidenceregion[1][0], veldisp_confidenceregion[2][0]))
output.close()
#sys.stderr.write("sigma: %e -%e %e\n" % (veldisp_confidenceregion[0][0], veldisp_confidenceregion[1][0], veldisp_confidenceregion[2][0]))
#
#pylab.hist(veldispersions[veldispersions > 0], bins=50)
#pylab.show()
#
print '%s %e -%e %e' % (clustername, confidenceRegion[0][0], confidenceRegion[1][0], confidenceRegion[2][0])
|
normal
|
{
"blob_id": "f19d8aa2104240cc93a0146f1b14c635e7cd3a41",
"index": 268,
"step-1": "#! /usr/bin/env python\n\nimport ldac\nfrom numpy import *\nimport shearprofile as sp\nimport sys\nimport os, subprocess\n\nimport pylab\n\n\nif len(sys.argv) != 6:\n sys.stderr.write(\"wrong number of arguments!\\n\")\n sys.exit(1)\ncatfile= sys.argv[1]\nclusterz=float(sys.argv[2])\ncenter= map(float,sys.argv[3].split(','))\npixscale=float(sys.argv[4]) # arcsec / pix\nclustername=sys.argv[5]\n\n\ncatalog= ldac.openObjectFile(catfile)\n\nr, E = sp.calcTangentialShear(catalog, center, pixscale)\n\nbeta=sp.beta(catalog[\"Z_BEST\"],clusterz, calcAverage = False)\n\nkappacut = sp.calcWLViolationCut(r, beta, sigma_v = 1300)\nradiuscut = r > 60 #arcseconds\nlargeradiuscut = r < 500\nzcut = logical_and(catalog['Z_BEST'] > 1.2*clusterz, catalog['Z_BEST'] < 1.2)\n\n\ncleancut = logical_and(kappacut, logical_and(radiuscut, logical_and(largeradiuscut,zcut)))\n\ncleancat = catalog.filter(cleancut)\n\nsamples = sp.simpleBootstrap(cleancat, clusterz, pixscale, center, beta[cleancut])\n\n\nr500x = float(subprocess.Popen(\"grep %s /nfs/slac/g/ki/ki05/anja/SUBARU/clusters.r500x.dat | awk '{print $2}'\" % clustername, stdout=subprocess.PIPE, shell=True).communicate()[0])\n\nmass = lambda sigma2,r500x: 2*sigma2*r500x/4.3e-09\n\nmasses = [ mass(sigma2, r500x) for sigma2 in samples]\n\nconfidenceRegion = sp.ConfidenceRegion(masses)\n\nfilebase,ext=os.path.splitext(catfile)\n\n#output = open(filebase+\"_profile.dat\", 'w')\n#for i in xrange(len(r_as)):\n# output.write(\"%f %f %f %f %f\\n\" % (r_as[i], E[i], Err[i], B[i], Berr[i]))\n#output.close()\n\n#veldisp = sqrt( confidenceRegion[0][0] * 4.3e-09 / (3*r500x) )\n#veldisperr = (veldisp / 2) * ((confidenceRegion[1][0]+confidenceRegion[2][0])/confidenceRegion[0][0])\n#\n\noutput = open(filebase+\"_sisfit.ml.dat\", 'w')\n\nsamples = array(samples)\nsamples[samples < 0] = 0.\n\n\nveldispersions = sqrt(samples)\n\n\nveldisp_confidenceregion = sp.ConfidenceRegion(veldispersions[veldispersions > 0])\n\noutput = open(filebase+\"_sisfit.sigma.dat\", 'w')\n\n\n\noutput.write(\"M500: %e -%e %e\\n\" % (confidenceRegion[0][0], confidenceRegion[1][0], confidenceRegion[2][0]))\noutput.write(\"sigma: %e -%e %e\\n\" % (veldisp_confidenceregion[0][0], veldisp_confidenceregion[1][0], veldisp_confidenceregion[2][0]))\noutput.close()\n\n#sys.stderr.write(\"sigma: %e -%e %e\\n\" % (veldisp_confidenceregion[0][0], veldisp_confidenceregion[1][0], veldisp_confidenceregion[2][0]))\n#\n#pylab.hist(veldispersions[veldispersions > 0], bins=50)\n#pylab.show()\n#\nprint '%s %e -%e %e' % (clustername, confidenceRegion[0][0], confidenceRegion[1][0], confidenceRegion[2][0])\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
simulation = Simulation(particle_count=50, dt=0.016, box_width=250)
FluidRenderer(simulation.box_width, 800, simulation)
arcade.run()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
simulation = Simulation(particle_count=50, dt=0.016, box_width=250)
FluidRenderer(simulation.box_width, 800, simulation)
arcade.run()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
from simulating_blobs_of_fluid.simulation import Simulation
from simulating_blobs_of_fluid.fluid_renderer import FluidRenderer
import arcade
def main():
simulation = Simulation(particle_count=50, dt=0.016, box_width=250)
FluidRenderer(simulation.box_width, 800, simulation)
arcade.run()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
from simulating_blobs_of_fluid.simulation import Simulation
from simulating_blobs_of_fluid.fluid_renderer import FluidRenderer
import arcade
def main():
simulation = Simulation(particle_count=50, dt=0.016, box_width=250)
FluidRenderer(simulation.box_width, 800, simulation)
arcade.run()
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "83733e707a1be131335c4980cdf4beed365eb530",
"index": 6011,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n simulation = Simulation(particle_count=50, dt=0.016, box_width=250)\n FluidRenderer(simulation.box_width, 800, simulation)\n arcade.run()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n simulation = Simulation(particle_count=50, dt=0.016, box_width=250)\n FluidRenderer(simulation.box_width, 800, simulation)\n arcade.run()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from simulating_blobs_of_fluid.simulation import Simulation\nfrom simulating_blobs_of_fluid.fluid_renderer import FluidRenderer\nimport arcade\n\n\ndef main():\n simulation = Simulation(particle_count=50, dt=0.016, box_width=250)\n FluidRenderer(simulation.box_width, 800, simulation)\n arcade.run()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from simulating_blobs_of_fluid.simulation import Simulation\nfrom simulating_blobs_of_fluid.fluid_renderer import FluidRenderer\n\nimport arcade\n\n\ndef main():\n simulation = Simulation(particle_count=50, dt=0.016, box_width=250)\n FluidRenderer(simulation.box_width, 800, simulation)\n\n arcade.run()\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def unit_circle_points(num_samples):
a = 2 * pi / num_samples
return [vec2(cos(a * i), sin(a * i)) for i in range(num_samples)]
def calculate_circle_deviation(spline):
ideal_d = 1.0
center_x = 0.0
center_y = 0.0
deviation = 0.0
for p in spline.control_points:
deviation += sqrt((p.x - center_x) ** 2 + (p.y - center_y) ** 2)
deviation /= len(spline.control_points)
deviation -= ideal_d
return deviation
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def unit_circle_points(num_samples):
a = 2 * pi / num_samples
return [vec2(cos(a * i), sin(a * i)) for i in range(num_samples)]
def calculate_circle_deviation(spline):
ideal_d = 1.0
center_x = 0.0
center_y = 0.0
deviation = 0.0
for p in spline.control_points:
deviation += sqrt((p.x - center_x) ** 2 + (p.y - center_y) ** 2)
deviation /= len(spline.control_points)
deviation -= ideal_d
return deviation
<|reserved_special_token_0|>
p.set_color('blue')
<|reserved_special_token_0|>
sc.set_resolution(900)
sc.add_element(s)
sc.add_element(p)
<|reserved_special_token_0|>
p_circle.set_color('blue')
<|reserved_special_token_0|>
print('The error is: ' + str(error))
sc.write_image()
sc.show()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def unit_circle_points(num_samples):
a = 2 * pi / num_samples
return [vec2(cos(a * i), sin(a * i)) for i in range(num_samples)]
def calculate_circle_deviation(spline):
ideal_d = 1.0
center_x = 0.0
center_y = 0.0
deviation = 0.0
for p in spline.control_points:
deviation += sqrt((p.x - center_x) ** 2 + (p.y - center_y) ** 2)
deviation /= len(spline.control_points)
deviation -= ideal_d
return deviation
pts = [vec2(0, 2.5), vec2(-1, 1), vec2(1, -1), vec2(0, -2.5), vec2(-1, -1),
vec2(1, 1)]
s = spline.interpolate_cubic_periodic(pts)
p = s.get_polyline_from_control_points()
p.set_color('blue')
sc = scene_2d.scene()
sc.set_resolution(900)
sc.add_element(s)
sc.add_element(p)
n = 100
circle_pts = unit_circle_points(n)
circle = spline.interpolate_cubic_periodic(circle_pts)
p_circle = circle.get_polyline_from_control_points()
p_circle.set_color('blue')
error = calculate_circle_deviation(circle)
print('The error is: ' + str(error))
sc.write_image()
sc.show()
<|reserved_special_token_1|>
from cagd.polyline import polyline
from cagd.spline import spline, knots
from cagd.vec import vec2
import cagd.scene_2d as scene_2d
from math import sin, cos, pi, sqrt
def unit_circle_points(num_samples):
a = 2 * pi / num_samples
return [vec2(cos(a * i), sin(a * i)) for i in range(num_samples)]
def calculate_circle_deviation(spline):
ideal_d = 1.0
center_x = 0.0
center_y = 0.0
deviation = 0.0
for p in spline.control_points:
deviation += sqrt((p.x - center_x) ** 2 + (p.y - center_y) ** 2)
deviation /= len(spline.control_points)
deviation -= ideal_d
return deviation
pts = [vec2(0, 2.5), vec2(-1, 1), vec2(1, -1), vec2(0, -2.5), vec2(-1, -1),
vec2(1, 1)]
s = spline.interpolate_cubic_periodic(pts)
p = s.get_polyline_from_control_points()
p.set_color('blue')
sc = scene_2d.scene()
sc.set_resolution(900)
sc.add_element(s)
sc.add_element(p)
n = 100
circle_pts = unit_circle_points(n)
circle = spline.interpolate_cubic_periodic(circle_pts)
p_circle = circle.get_polyline_from_control_points()
p_circle.set_color('blue')
error = calculate_circle_deviation(circle)
print('The error is: ' + str(error))
sc.write_image()
sc.show()
<|reserved_special_token_1|>
#!/usr/bin/python
from cagd.polyline import polyline
from cagd.spline import spline, knots
from cagd.vec import vec2
import cagd.scene_2d as scene_2d
from math import sin,cos,pi, sqrt
#returns a list of num_samples points that are uniformly distributed on the unit circle
def unit_circle_points(num_samples):
a = 2*pi/num_samples
return [vec2(cos(a*i), sin(a*i)) for i in range(num_samples)]
#calculates the deviation between the given spline and a unit circle
#the Manhattan Metrics is chosen
def calculate_circle_deviation(spline):
ideal_d = 1.0
center_x = 0.0
center_y = 0.0
deviation = 0.0
for p in spline.control_points:
deviation += sqrt((p.x - center_x)**2 + (p.y - center_y)**2)
deviation /= len(spline.control_points)
deviation -= ideal_d
return deviation
#interpolate 6 points with a periodic spline to create the number "8"
pts = [vec2( 0, 2.5), vec2(-1, 1), vec2( 1,-1), vec2( 0,-2.5), vec2(-1,-1), vec2(1,1)]
s = spline.interpolate_cubic_periodic(pts)
p = s.get_polyline_from_control_points()
p.set_color("blue")
sc = scene_2d.scene()
sc.set_resolution(900)
sc.add_element(s)
sc.add_element(p)
#generate a spline that approximates the unit circle
n = 100
circle_pts = unit_circle_points(n)
circle = spline.interpolate_cubic_periodic(circle_pts)
p_circle = circle.get_polyline_from_control_points()
#sc.add_element(circle)
#sc.add_element(p_circle)
p_circle.set_color("blue")
error = calculate_circle_deviation(circle)
print("The error is: " + str(error))
sc.write_image()
sc.show()
|
flexible
|
{
"blob_id": "35e61add90b5c12f94d5f8071f00d98316461dd6",
"index": 8497,
"step-1": "<mask token>\n\n\ndef unit_circle_points(num_samples):\n a = 2 * pi / num_samples\n return [vec2(cos(a * i), sin(a * i)) for i in range(num_samples)]\n\n\ndef calculate_circle_deviation(spline):\n ideal_d = 1.0\n center_x = 0.0\n center_y = 0.0\n deviation = 0.0\n for p in spline.control_points:\n deviation += sqrt((p.x - center_x) ** 2 + (p.y - center_y) ** 2)\n deviation /= len(spline.control_points)\n deviation -= ideal_d\n return deviation\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef unit_circle_points(num_samples):\n a = 2 * pi / num_samples\n return [vec2(cos(a * i), sin(a * i)) for i in range(num_samples)]\n\n\ndef calculate_circle_deviation(spline):\n ideal_d = 1.0\n center_x = 0.0\n center_y = 0.0\n deviation = 0.0\n for p in spline.control_points:\n deviation += sqrt((p.x - center_x) ** 2 + (p.y - center_y) ** 2)\n deviation /= len(spline.control_points)\n deviation -= ideal_d\n return deviation\n\n\n<mask token>\np.set_color('blue')\n<mask token>\nsc.set_resolution(900)\nsc.add_element(s)\nsc.add_element(p)\n<mask token>\np_circle.set_color('blue')\n<mask token>\nprint('The error is: ' + str(error))\nsc.write_image()\nsc.show()\n",
"step-3": "<mask token>\n\n\ndef unit_circle_points(num_samples):\n a = 2 * pi / num_samples\n return [vec2(cos(a * i), sin(a * i)) for i in range(num_samples)]\n\n\ndef calculate_circle_deviation(spline):\n ideal_d = 1.0\n center_x = 0.0\n center_y = 0.0\n deviation = 0.0\n for p in spline.control_points:\n deviation += sqrt((p.x - center_x) ** 2 + (p.y - center_y) ** 2)\n deviation /= len(spline.control_points)\n deviation -= ideal_d\n return deviation\n\n\npts = [vec2(0, 2.5), vec2(-1, 1), vec2(1, -1), vec2(0, -2.5), vec2(-1, -1),\n vec2(1, 1)]\ns = spline.interpolate_cubic_periodic(pts)\np = s.get_polyline_from_control_points()\np.set_color('blue')\nsc = scene_2d.scene()\nsc.set_resolution(900)\nsc.add_element(s)\nsc.add_element(p)\nn = 100\ncircle_pts = unit_circle_points(n)\ncircle = spline.interpolate_cubic_periodic(circle_pts)\np_circle = circle.get_polyline_from_control_points()\np_circle.set_color('blue')\nerror = calculate_circle_deviation(circle)\nprint('The error is: ' + str(error))\nsc.write_image()\nsc.show()\n",
"step-4": "from cagd.polyline import polyline\nfrom cagd.spline import spline, knots\nfrom cagd.vec import vec2\nimport cagd.scene_2d as scene_2d\nfrom math import sin, cos, pi, sqrt\n\n\ndef unit_circle_points(num_samples):\n a = 2 * pi / num_samples\n return [vec2(cos(a * i), sin(a * i)) for i in range(num_samples)]\n\n\ndef calculate_circle_deviation(spline):\n ideal_d = 1.0\n center_x = 0.0\n center_y = 0.0\n deviation = 0.0\n for p in spline.control_points:\n deviation += sqrt((p.x - center_x) ** 2 + (p.y - center_y) ** 2)\n deviation /= len(spline.control_points)\n deviation -= ideal_d\n return deviation\n\n\npts = [vec2(0, 2.5), vec2(-1, 1), vec2(1, -1), vec2(0, -2.5), vec2(-1, -1),\n vec2(1, 1)]\ns = spline.interpolate_cubic_periodic(pts)\np = s.get_polyline_from_control_points()\np.set_color('blue')\nsc = scene_2d.scene()\nsc.set_resolution(900)\nsc.add_element(s)\nsc.add_element(p)\nn = 100\ncircle_pts = unit_circle_points(n)\ncircle = spline.interpolate_cubic_periodic(circle_pts)\np_circle = circle.get_polyline_from_control_points()\np_circle.set_color('blue')\nerror = calculate_circle_deviation(circle)\nprint('The error is: ' + str(error))\nsc.write_image()\nsc.show()\n",
"step-5": "#!/usr/bin/python\n\nfrom cagd.polyline import polyline\nfrom cagd.spline import spline, knots\nfrom cagd.vec import vec2\nimport cagd.scene_2d as scene_2d\nfrom math import sin,cos,pi, sqrt\n\n#returns a list of num_samples points that are uniformly distributed on the unit circle\ndef unit_circle_points(num_samples):\n a = 2*pi/num_samples\n return [vec2(cos(a*i), sin(a*i)) for i in range(num_samples)]\n\n#calculates the deviation between the given spline and a unit circle\n#the Manhattan Metrics is chosen\ndef calculate_circle_deviation(spline):\n ideal_d = 1.0\n center_x = 0.0\n center_y = 0.0\n deviation = 0.0\n for p in spline.control_points:\n deviation += sqrt((p.x - center_x)**2 + (p.y - center_y)**2)\n deviation /= len(spline.control_points)\n deviation -= ideal_d\n return deviation\n\n\n#interpolate 6 points with a periodic spline to create the number \"8\"\npts = [vec2( 0, 2.5), vec2(-1, 1), vec2( 1,-1), vec2( 0,-2.5), vec2(-1,-1), vec2(1,1)]\ns = spline.interpolate_cubic_periodic(pts)\np = s.get_polyline_from_control_points()\np.set_color(\"blue\")\nsc = scene_2d.scene()\nsc.set_resolution(900)\nsc.add_element(s)\nsc.add_element(p)\n\n#generate a spline that approximates the unit circle\nn = 100\ncircle_pts = unit_circle_points(n)\ncircle = spline.interpolate_cubic_periodic(circle_pts)\np_circle = circle.get_polyline_from_control_points()\n#sc.add_element(circle)\n#sc.add_element(p_circle)\np_circle.set_color(\"blue\")\nerror = calculate_circle_deviation(circle)\nprint(\"The error is: \" + str(error))\n\nsc.write_image()\nsc.show()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import h5py
import sys
f = h5py.File(sys.argv[1], 'r+')
try:
del f['optimizer_weights']
except:
print "done"
f.close()
|
normal
|
{
"blob_id": "3458e1efdc492a08d8272469aa9e3f0ca72c7ba3",
"index": 9146,
"step-1": "import h5py\nimport sys\nf = h5py.File(sys.argv[1], 'r+')\ntry:\n\tdel f['optimizer_weights']\nexcept:\n\tprint \"done\"\nf.close()",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
"""
Read a real number. If it is positive print it's square root, if it's not print the square of it.
"""
import math
print('Insert a number')
num1 = float(input())
if num1 > 0:
print(f'The square root of {num1} is {math.sqrt(num1)}')
else:
print(f'The square of {num1} is {num1**2}')
|
normal
|
{
"blob_id": "a68d682ba6d441b9d7fb69ec1ee318a0ef65ed40",
"index": 3146,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('Insert a number')\n<mask token>\nif num1 > 0:\n print(f'The square root of {num1} is {math.sqrt(num1)}')\nelse:\n print(f'The square of {num1} is {num1 ** 2}')\n",
"step-3": "<mask token>\nprint('Insert a number')\nnum1 = float(input())\nif num1 > 0:\n print(f'The square root of {num1} is {math.sqrt(num1)}')\nelse:\n print(f'The square of {num1} is {num1 ** 2}')\n",
"step-4": "<mask token>\nimport math\nprint('Insert a number')\nnum1 = float(input())\nif num1 > 0:\n print(f'The square root of {num1} is {math.sqrt(num1)}')\nelse:\n print(f'The square of {num1} is {num1 ** 2}')\n",
"step-5": "\"\"\"\n\nRead a real number. If it is positive print it's square root, if it's not print the square of it.\n\n\"\"\"\nimport math\n\nprint('Insert a number')\nnum1 = float(input())\n\nif num1 > 0:\n print(f'The square root of {num1} is {math.sqrt(num1)}')\nelse:\n print(f'The square of {num1} is {num1**2}')\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class GCI:
def banner():
print('[---- OSINT By FajarTheGGman ----]\n')
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GCI:
def banner():
print('[---- OSINT By FajarTheGGman ----]\n')
def main():
user = str(input('[!] Input Name Victim ? '))
init = url.PoolManager()
a = init.request('GET', 'https://facebook.com/' + user)
b = init.request('GET', 'https://instagram.com/' + user)
c = init.request('GET', 'https://twitter.com/' + user)
if a.status == 200:
print('[+] ' + user + ' => Found In Facebook')
else:
print('[-] ' + user + ' => NotFound in Facebook')
if b.status == 200:
print('[+] ' + user + ' => Found In Instagram')
else:
print('[-] ' + user + ' => NotFound in Instagram')
if b.status == 200:
print('[+] ' + user + ' => Found In Twitter')
else:
print('[-] ' + user + ' => NotFound in Twitter')
<|reserved_special_token_0|>
x.banner()
x.main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GCI:
def banner():
print('[---- OSINT By FajarTheGGman ----]\n')
def main():
user = str(input('[!] Input Name Victim ? '))
init = url.PoolManager()
a = init.request('GET', 'https://facebook.com/' + user)
b = init.request('GET', 'https://instagram.com/' + user)
c = init.request('GET', 'https://twitter.com/' + user)
if a.status == 200:
print('[+] ' + user + ' => Found In Facebook')
else:
print('[-] ' + user + ' => NotFound in Facebook')
if b.status == 200:
print('[+] ' + user + ' => Found In Instagram')
else:
print('[-] ' + user + ' => NotFound in Instagram')
if b.status == 200:
print('[+] ' + user + ' => Found In Twitter')
else:
print('[-] ' + user + ' => NotFound in Twitter')
x = GCI
x.banner()
x.main()
<|reserved_special_token_1|>
import urllib3 as url
class GCI:
def banner():
print('[---- OSINT By FajarTheGGman ----]\n')
def main():
user = str(input('[!] Input Name Victim ? '))
init = url.PoolManager()
a = init.request('GET', 'https://facebook.com/' + user)
b = init.request('GET', 'https://instagram.com/' + user)
c = init.request('GET', 'https://twitter.com/' + user)
if a.status == 200:
print('[+] ' + user + ' => Found In Facebook')
else:
print('[-] ' + user + ' => NotFound in Facebook')
if b.status == 200:
print('[+] ' + user + ' => Found In Instagram')
else:
print('[-] ' + user + ' => NotFound in Instagram')
if b.status == 200:
print('[+] ' + user + ' => Found In Twitter')
else:
print('[-] ' + user + ' => NotFound in Twitter')
x = GCI
x.banner()
x.main()
<|reserved_special_token_1|>
# OSINT By FajarTheGGman For Google Code-in 2019©
import urllib3 as url
class GCI:
def banner():
print("[---- OSINT By FajarTheGGman ----]\n")
def main():
user = str(input("[!] Input Name Victim ? "))
init = url.PoolManager()
a = init.request("GET", "https://facebook.com/" + user)
b = init.request("GET", "https://instagram.com/" + user)
c = init.request("GET", "https://twitter.com/" + user)
if a.status == 200:
print("[+] " + user + " => Found In Facebook")
else:
print("[-] " + user + " => NotFound in Facebook")
if b.status == 200:
print("[+] " + user + " => Found In Instagram")
else:
print("[-] " + user + " => NotFound in Instagram")
if b.status == 200:
print("[+] " + user + " => Found In Twitter")
else:
print("[-] " + user + " => NotFound in Twitter")
x = GCI
x.banner()
x.main()
|
flexible
|
{
"blob_id": "6c8180d24110045348d9c2041c0cca26fa9ea2d2",
"index": 4318,
"step-1": "<mask token>\n\n\nclass GCI:\n\n def banner():\n print('[---- OSINT By FajarTheGGman ----]\\n')\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass GCI:\n\n def banner():\n print('[---- OSINT By FajarTheGGman ----]\\n')\n\n def main():\n user = str(input('[!] Input Name Victim ? '))\n init = url.PoolManager()\n a = init.request('GET', 'https://facebook.com/' + user)\n b = init.request('GET', 'https://instagram.com/' + user)\n c = init.request('GET', 'https://twitter.com/' + user)\n if a.status == 200:\n print('[+] ' + user + ' => Found In Facebook')\n else:\n print('[-] ' + user + ' => NotFound in Facebook')\n if b.status == 200:\n print('[+] ' + user + ' => Found In Instagram')\n else:\n print('[-] ' + user + ' => NotFound in Instagram')\n if b.status == 200:\n print('[+] ' + user + ' => Found In Twitter')\n else:\n print('[-] ' + user + ' => NotFound in Twitter')\n\n\n<mask token>\nx.banner()\nx.main()\n",
"step-3": "<mask token>\n\n\nclass GCI:\n\n def banner():\n print('[---- OSINT By FajarTheGGman ----]\\n')\n\n def main():\n user = str(input('[!] Input Name Victim ? '))\n init = url.PoolManager()\n a = init.request('GET', 'https://facebook.com/' + user)\n b = init.request('GET', 'https://instagram.com/' + user)\n c = init.request('GET', 'https://twitter.com/' + user)\n if a.status == 200:\n print('[+] ' + user + ' => Found In Facebook')\n else:\n print('[-] ' + user + ' => NotFound in Facebook')\n if b.status == 200:\n print('[+] ' + user + ' => Found In Instagram')\n else:\n print('[-] ' + user + ' => NotFound in Instagram')\n if b.status == 200:\n print('[+] ' + user + ' => Found In Twitter')\n else:\n print('[-] ' + user + ' => NotFound in Twitter')\n\n\nx = GCI\nx.banner()\nx.main()\n",
"step-4": "import urllib3 as url\n\n\nclass GCI:\n\n def banner():\n print('[---- OSINT By FajarTheGGman ----]\\n')\n\n def main():\n user = str(input('[!] Input Name Victim ? '))\n init = url.PoolManager()\n a = init.request('GET', 'https://facebook.com/' + user)\n b = init.request('GET', 'https://instagram.com/' + user)\n c = init.request('GET', 'https://twitter.com/' + user)\n if a.status == 200:\n print('[+] ' + user + ' => Found In Facebook')\n else:\n print('[-] ' + user + ' => NotFound in Facebook')\n if b.status == 200:\n print('[+] ' + user + ' => Found In Instagram')\n else:\n print('[-] ' + user + ' => NotFound in Instagram')\n if b.status == 200:\n print('[+] ' + user + ' => Found In Twitter')\n else:\n print('[-] ' + user + ' => NotFound in Twitter')\n\n\nx = GCI\nx.banner()\nx.main()\n",
"step-5": "# OSINT By FajarTheGGman For Google Code-in 2019©\r\n\r\nimport urllib3 as url\r\n\r\nclass GCI:\r\n\tdef banner():\r\n\t\tprint(\"[---- OSINT By FajarTheGGman ----]\\n\")\r\n\r\n\tdef main():\r\n\t\tuser = str(input(\"[!] Input Name Victim ? \"))\r\n\t\tinit = url.PoolManager()\r\n\t\ta = init.request(\"GET\", \"https://facebook.com/\" + user)\r\n\t\tb = init.request(\"GET\", \"https://instagram.com/\" + user)\r\n\t\tc = init.request(\"GET\", \"https://twitter.com/\" + user)\r\n\t\tif a.status == 200:\r\n\t\t\tprint(\"[+] \" + user + \" => Found In Facebook\")\r\n\t\telse:\r\n\t\t\tprint(\"[-] \" + user + \" => NotFound in Facebook\")\r\n\r\n\t\tif b.status == 200:\r\n\t\t\tprint(\"[+] \" + user + \" => Found In Instagram\")\r\n\t\telse:\r\n\t\t\tprint(\"[-] \" + user + \" => NotFound in Instagram\")\r\n\r\n\t\tif b.status == 200:\r\n\t\t\tprint(\"[+] \" + user + \" => Found In Twitter\")\r\n\t\telse:\r\n\t\t\tprint(\"[-] \" + user + \" => NotFound in Twitter\")\r\n\r\nx = GCI\r\nx.banner()\r\nx.main()",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
# -*- coding: utf-8 -*-
"""
@Author: xiezizhe
@Date: 5/7/2020 下午8:52
"""
from typing import List
class KMP:
def partial(self, pattern):
""" Calculate partial match table: String -> [Int]"""
ret = [0]
for i in range(1, len(pattern)):
j = ret[i - 1]
while j > 0 and pattern[j] != pattern[i]:
j = ret[j - 1]
ret.append(j + 1 if pattern[j] == pattern[i] else j)
return ret
def search(self, T, P):
"""
KMP search main algorithm: String -> String -> [Int]
Return all the matching position of pattern string P in T
"""
partial, j = self.partial(P), 0
for i in range(len(T)):
while j > 0 and T[i] != P[j]:
j = partial[j - 1]
if T[i] == P[j]: j += 1
if j == len(P):
return i - (j - 1)
return -1
class Trie:
def __init__(self):
self.dicts = dict()
def add(self, word):
node = self.dicts
for w in word:
if w not in node:
node[w] = dict()
node = node[w]
def search(self, word):
node = self.dicts
for w in word:
if w not in node:
return False
node = node[w]
return True
class Solution:
# def minimumLengthEncoding(self, words: List[str]) -> int:
# kmp = KMP()
# ret = 0
# texts = ''
# words.sort(key=lambda w: len(w), reverse=True)
# for word in words:
# idx = kmp.search(texts, word)
# if idx == -1:
# ret += len(word)
# if len(texts) == 0:
# texts = word + "#"
# else:
# texts = texts + word + '#'
# ret += 1
#
# # print(texts)
# for word in words:
# if word not in texts:
# print(word)
# return len(texts)
def minimumLengthEncoding(self, words: List[str]) -> int:
trie = Trie()
ret = 0
words.sort(key=lambda w: len(w), reverse=True)
for word in words:
if trie.search(word[::-1]):
continue
trie.add(word[::-1])
ret += len(word) + 1
return ret
if __name__ == "__main__":
s = Solution()
assert s.minimumLengthEncoding(["time", "me", "bell"]) == 10
assert s.minimumLengthEncoding(
["ojtnj", "uuydcho", "dgsyp", "dwxycpx", "dpmvc", "dvfhmb", "flrxjjx", "fwhdhvn", "rgsakp", "aiconf", "nzacpk",
"sbxnaj", "shway", "rgrmz", "rysudo", "bzkioce", "mqxkzvu", "wyebk", "tymoaz", "mlmbg", "djbmek", "qfnme",
"khkiyae", "tjdaxry", "sqtcwz", "ehnsai", "jhncvrm", "cxkzgrx", "pummt", "hzrpfcn", "lkyqit", "phpqdxw",
"vangm", "wcjdgw", "pxesvtn", "mnqory", "bdrzvh", "brtzmo", "chqgf", "bipyxm", "meoikg", "ysyckk", "ojayeiq",
"zrfbsb", "yhuotea", "crfbhq", "tllycn", "qxnzihf", "avyawpz", "bwsjym", "myjozc", "lbdksm", "mctlt",
"dszowuw", "syshm", "xrvhhkn", "kgrcwfv", "dwlajlf", "yviuk", "xegjj", "spiczl", "vfvomi", "mgcujy", "dqmzb",
"isrisgt", "vdrtuah", "vsyth", "eoclef", "poccek", "cgafrlu", "crbhpgk", "sromv", "xmvbca", "gobra", "ygvlq",
"pjvhe", "tfweiso", "cskuohg", "eyalone", "pobkak", "nzpxn", "lbcrws", "uhtfe", "eorth", "showvu", "hxsmb",
"jrggose", "izifkb", "oqwyf", "mozmzj", "ijwle", "ggtqqqv", "geevzj", "meota", "ifsse", "kdtofm", "swydhvf",
"tzjhqap", "wqwwd", "jlinnov", "lmxkgeg", "stbot", "xrsfn", "etoyctk", "rygagm", "vcnrf", "zkdge", "emqtscp",
"newqcyy", "nnuus", "exwsxbd", "zstvl", "lbkko", "kygkyqq", "oggji", "xytbjo", "mfbahk", "ggoks", "lmqewkl",
"qexhyqe", "ogaogio", "nzvbav", "mdole", "qvyks", "gkupfu", "dgmpn", "ngrdrj", "iitqvk", "ipuiqb", "ugxfea",
"ialkmv", "hmgnx", "aoyoj", "fvzhjil", "butrbp", "dwhxnes", "etkdwg", "cjkghz", "tovkq", "mmxhv", "jgcsn",
"hmictal", "zxmnek", "pcoeg", "ntyqmlq", "hfubhtg", "ydjbv", "xnwlqto", "hatgi", "bsaczd", "pokwk", "arxlula",
"zjtqlk", "ocfxup", "nsnqjc", "xdcsopi", "iqxyxp", "xfmtpvm", "bqtgcf", "wboycn", "aoeda", "uowqdgj", "rzzzx",
"liucs", "ejzxz", "qmlehsh", "igrbmon", "dpmkbon", "pmayh", "nujdwdw", "awdgo", "ijgkzk", "inhee", "jzdtv",
"adhauh", "grtmbp", "qndbvw", "zprrw", "mpqieq", "jzmzeuu", "fcvftqs", "qxzxqy", "lidguzz", "eazwd", "zjhfsz",
"zsnzefh", "mnckfg", "zjgtq", "ckyxlif", "fznfo", "jegnof", "lzwyzb", "ozivfio", "igkclsa", "bebzn", "bitsggm",
"lrnwin", "hjnnzr", "idvoirn", "dgile", "vfngh", "xbmur", "rqaftt", "wjwwwxs", "btreou", "gjsycg", "pvsiylz",
"ccxzgdf", "excrrrr", "fiesr", "jdioj", "uzwsc", "odrlcoy", "hcsit", "ptwfprh", "sbqry", "kffvy", "ejeawbp",
"omvcc", "iqgxqlt", "edsuu", "xnbue", "qfbcx", "fzlmbkl", "wrrcueb", "mmqispp", "nknilwd", "dewuhju",
"hmdqlxy", "vjxgg", "lkuexo", "dzvfscm", "voulbs", "uevoqgq", "kmhwu", "oglzllg", "torhihn", "fhuqzc",
"mmcfhb", "woyayma", "uznsvre", "mmxed", "aoskwg", "xrosbm", "hpyrgh", "tghwbwh", "hcwzn", "iepeftj", "judij",
"kudbk", "jonpv", "lywck", "rxelz", "bgifz", "mehbxq", "fmqnz", "sqrmzj", "iqqjzex", "qioliz", "kjizbf",
"lgdcffc", "pfgmcr", "trdabul", "vlqjdnc", "jjvbxe", "fqlayw", "ilbhtyq", "saawulw", "gxysrb", "kighql",
"eceapr", "kztbcww", "jedkoy", "dxpcaga", "ndacphe", "rcoit", "ywgcnxg", "klipfup", "bddws", "jwyof", "lrfwgo",
"bediwuf", "ujakh", "ppima", "xzhwvm", "guzmsqt", "ffbliq", "adjmynm", "akabzn", "inmykju", "vlcjyv",
"orquepg", "tufrk", "vqpjymm", "lvuab", "qzxav", "ekcmu", "uqtuhie", "kfvtgf", "nklwjo", "ujxlfpl", "zobfpq",
"eignijd", "ythctg", "artllm", "wodhh", "tzpwszq", "njdqegg", "hzrqib", "zvoxtfd", "htboem", "axjuix", "bvmvm",
"jbnum", "bxdth", "atejt", "gqsqtnk", "fykrjbp", "ldyhonr", "wcuoj", "upphc", "agydg", "cjmwk", "rhxbqh",
"tpgozdd", "qyqoy", "zjqutw", "qoohqny", "nsiacwz", "xupin", "criuvs", "eswjeft", "pdmevn", "zvogq", "lrrvo",
"qhfqqpw", "ktudfg", "ijvmi", "neyjjdx", "rllpi", "vllvaa", "esebtu", "jyhcrh", "otgmr", "oudvyxj", "pmszy",
"opeed", "gicni", "mnuzn", "mjbfpod", "sqwgxu", "dwniwz", "wmbmmv", "lyafuy", "zmvlz", "kopxzuh", "urcbbiy",
"guhco", "nerjm", "lpdxc", "hxmjzz", "hynagc", "iyxeczi", "bdfxmoz", "yybnpqd", "jvgnb", "oquqem", "fmclmz",
"dmkhf", "zxbjpp", "qpxgcir", "iecvjm", "gtkne", "lgtqrbc", "gilbn", "mcxsg", "ncwbhn", "wkriiq", "zhsir",
"ptkkmw", "jcbpkrm", "vbefo", "vmbcd", "vqffj", "fhqzjt", "nryuh", "vmclav", "cjyggm", "sanev", "rrdocz",
"zqdexbs", "jrxstt", "pyhcesj", "aagghyr", "cyemjrb", "aliohf", "qaslg", "pnyjzxz", "pehnvi", "suhuw",
"twopabr", "sapqoc", "mckrh", "nzlgrxt", "aqpobnu", "pirbjgb", "plzlj", "raylxpu", "gyasfrh", "urjfxux",
"xjbwau", "iupknn", "vhxnc", "dnbjop", "vrxhwmd", "vjsmkh", "rfmqids", "smaiwt", "vkyfo", "bjqyxc", "rbbbp",
"dlkzg", "dwvdwu", "prulzh", "bavge", "ehhrz", "xxjqk", "pxopmp", "okmkmb", "slcznpp", "nvqlb", "jalrk",
"parwlcd", "anbxo", "oqcxyzo", "fjhrdjh", "pgvnwfe", "yfjyvh", "quvszjm", "xyiig", "xtncqv", "svsix", "jvpdnh",
"owuiv", "bsrugtt", "rmvggws", "lmdql", "kvmvd", "xrpmaw", "ssnxyb", "oworq", "rmmpuya", "rijpih", "aelazka",
"kncksqx", "yvtdiy", "epato", "pbbamj", "fejsw", "zgsru", "ekwrre", "zqben", "vugxi", "fvcsdp", "rujcews",
"asqxya", "worjlsd", "xggakg", "kzfpot", "haqon", "ypqxzz", "mmkzwt", "bdhif", "exzhv", "srnklzh", "hlrunb",
"dwfyke", "fvgbtdm", "aeutp", "czhefx", "tegfw", "jkxpsb", "gxkfkw", "exvntd", "gvuti", "jdmly", "owaqhw",
"fopuxzv", "edrvil", "biszwgv", "vgckzd", "fqdxn", "qktdf", "hpgwrk", "gpxiips", "vxnlab", "yylxz", "hsuscch",
"bhivaf", "wzrwtc", "ebplv", "yzxykou", "mxlssom", "evghv", "hksleg", "shybau", "zeyqa", "tljqka", "axfkec",
"fatdj", "janlkcc", "sjorbra", "jplge", "oazzot", "qbgtncn", "ozlil", "stohadq", "rvpuwn", "oqwpl", "byftgi",
"ubuusl", "fkogr", "bybdyhj", "vinyuzs", "ivsqvz", "vmnae", "gckxw", "rozbe", "glvxwj", "rcgicu", "xmvbd",
"itycsry", "llmwrs", "fuqth", "styrrwl", "wsseuln", "xwflcli", "muxgz", "ypmbboh", "rpmvnep", "wjvvnv",
"arjnw", "toauwc", "ltjxqrl", "basffd", "clxozwd", "glmrv", "iejgfj", "cvkoj", "wotjf", "mqucec", "xalgemc",
"hgimkh", "golvfq", "fuqpmak", "mhpcp", "pxoibt", "ledqa", "guzbyr", "ztvbeka", "racdp", "krsngra", "aaiknz",
"bhoobyc", "xibbe", "yohepxk", "eclevs", "ldliwcm", "qatvlk", "eiypbw", "vxvtwa", "nkdwsej", "ftmyvp",
"gpthye", "gazwoi", "zzgipon", "cithg", "wpabujl", "jhezlnb", "vqqaxfg", "kvpbk", "vggjemp", "owylv",
"lgwtfpg", "jjqvfm", "xbhga", "tulvfv", "sefuo", "hbysv", "ozopepd", "awyrifd", "pnudwx", "vreje", "zhpgw",
"qygbf", "tvbrvy", "zzmcw", "cznee", "deuzxt", "qfppjvi", "ilkps", "ydwhg", "krwkxzu", "mnsidg", "rkxyyr",
"ajkqz", "xtmom", "vqocor", "fympcl", "yyleyzy", "jjvzhrn", "kpmxvuz", "txoeqlx", "lhhmn", "chzgpf", "ncnjxle",
"ihxrg", "feqixq", "lkfhcar", "hfnsh", "bifczy", "umknat", "yrhgkh", "mgpcu", "qotukst", "yqlmfq", "ttcdp",
"xnjjzm", "cukbr", "hjhjb", "iikfcsr", "nsqbnnz", "dauygf", "cmydq", "lfnhqnl", "ppqgs", "hscbfug", "ohzisud",
"opspdkv", "aauxbop", "wpkhzo", "sxbsgu", "tajrv", "ololy", "mxmus", "vizvxv", "osaqz", "rxygkn", "mrzqlf",
"zrriyxb", "ufroe", "bajozg", "atpsu", "uhgauzu", "tffdw", "mdjulde", "rbrmy", "jhkqvwl", "gzsultq", "nkbfi",
"xtvwh", "dryzcv", "emaxuk", "zucvutb", "jdduyk", "bjdin", "loicuq", "qhjjb", "rgfjbq", "mphnk", "lxvceyx",
"zeoxb", "fxhnxu", "qpbipe", "ophwp", "wiioer", "quchwj", "pouxunw", "bloxgg", "xbsma", "dtwew", "xstorn",
"qfrfkz", "gxusbsn", "dhnxd", "mhstbs", "hekbtu", "wvrrjw", "yeiwd", "patplsx", "qmyiyi", "mowboj", "iskyd",
"bqhjj", "povppk", "vthpwx", "uuydaw", "rduxvez", "vmcww", "ylruvph", "ymqosp", "wzcvohg", "lhepwta", "bckhc",
"oiyyt", "wqzfv", "uduec", "lkkbtzl", "prvpbo", "jrwstii", "ijztoo", "qwwth", "vqzqiun", "krnjp", "zyanpiw",
"ojhjhvg", "lohmb", "thqtf", "reptzv", "zgkyq", "lhkvy", "cmjwl", "fmilgpw", "jrfawz", "vrtzd", "ezgfl",
"plzng", "zidzso", "civavlg", "vtwopu", "ljhckxo", "nuydt", "qembl", "fiwrre", "gfrgi", "gzegiq", "mltlqo",
"pcett", "snbsc", "msibcqn", "beacrhz", "vsycjt", "gjqji", "smcegol", "zregkp", "smcazoj", "dziqad", "jpuwp",
"hnlztac", "vduitco", "wyencad", "bkdnnqo", "cabzyg", "mgpcwr", "fxgvkxt", "wlkcrdd", "bhmhsy", "gqcctjc",
"atafpt", "vdzhmcg", "ighxj", "gfqpale", "fohbrtj", "mfpsgt", "tarjocf", "gyycb", "qvqfryl", "jpwowwc",
"jcgcg", "gmrjze", "nfptxq", "hmjhxge", "ieelj", "suvkgr", "nwjxe", "tkepqm", "extnpmq", "rxzdvf", "relzaa",
"hfhgaq", "lmihlz", "pacocq", "dclxr", "oknoem", "pbpnnd", "nleerfl", "tvytymc", "aamfnl", "ufdnq", "bxyzvyh",
"vksvout", "lohxhf", "sskgn", "aawbv", "hrvhx", "wvoqf", "vxkvh", "oqany", "bcmyd", "epdddqn", "zrlej",
"bchaf", "hmftii", "mefcrz", "wbxvc", "ewwnldf", "cqecxgh", "cnwvdmk", "vetrw", "zmogwov", "lshlzpe", "lijay",
"tcdqg", "xavqixd", "yjkhtsl", "myjvow", "cgthhd", "taaii", "iuuegk", "lcypmle", "wesrit", "tybco", "nhxysw",
"awkrj", "jcmqa", "porvo", "nrypriu", "vznnevp", "hzklwi", "vapuxh", "wyfkn", "albemu", "ttfdbl", "dbqrjv",
"cxals", "qzitwf", "ysunur", "llsefy", "cghfzji", "jboaa", "emhlkw", "khhmgha", "twlxgjz", "pyujor", "ozcax",
"fetvovo", "mdhrrd", "qdhdne", "fiuvw", "ebyxh", "ldaothh", "vwyjf", "yjyljlu", "ivroqg", "qvpeyec", "eemsdra",
"wavgeqk", "bjejrqg", "mdjimoz", "fgopy", "lgwodr", "cunvszh", "wiver", "ghmog", "jzgfyk", "vxlbx", "kvgbtn",
"cunorte", "mtesdc", "zdzmqu", "pigik", "smruadg", "czjxlt", "kukgaok", "tsldpqq", "luomo", "ezbcvdc",
"tfetwes", "uopzf", "wsvezkw", "wrnlvbx", "bpqungd", "jqnnof", "rqhiomi", "voulqb", "ouspxn", "chngpz",
"fbogfcv", "nqhunxo", "rydbke", "ewduo", "suqqwup", "oxzfxj", "kuwfwm", "euiics", "mvftoau", "vstfbm",
"vnmtoo", "muicf", "bjbskxb", "knbomlf", "enrbtfk", "hnaqe", "vxzsr", "gkqma", "qygmn", "ztkybmb", "injggpk",
"enqrgdk", "rkgoct", "tgaiu", "dnknoxk", "iwuou", "oxanccl", "xestej", "ekrqq", "xbwhz", "jkdvxfh", "oybaay",
"afyhci", "papffjq", "bdppssw", "qwyvjx", "xmnnosl", "kvqzjl", "wcwii", "ygfvt", "tpabbht", "kjmaq", "duschjz",
"gguiof", "wgfhve", "joqmfjq", "smqfd", "ynlovlz", "sgrzum", "bobmux", "dcppi", "isdjrwl", "lbevb", "efqsirq",
"hlgfql", "enmemlb", "dbmfk", "ibfpzm", "rtdnooq", "yicdq", "xadul", "dxibxzi", "yyxnj", "jhsdzxw", "thltbi",
"kwhreyi", "hrocoa", "fnaalbd", "vnwona", "nnonm", "naqaf", "xgzzies", "uhruynk", "kgadfx", "hyohzbd", "hnajx",
"yipzh", "ezdxaet", "xbzppoz", "rwnewxz", "hlcbkmb", "znyhu", "zsqtpkr", "gmyxr", "rphyvo", "bgjuz", "nulpv",
"eejfoso", "xmwcnes", "xxxxnpe", "jezkk", "idfsxrw", "qgzjtf", "arpzpo", "hxsanlt", "emvotcb", "sknzhvg",
"icitca", "ivhdln", "sqilerz", "ndigw", "bcsre", "mibbep", "zsczom", "cgghjbb", "fkylfgt", "bvzofs", "mefsng",
"bispbza", "tsosgy", "xopalrw", "wserf", "jbmlz", "xidxny", "ffmpjos", "vddwxmd", "netnsg", "kgevsp", "pguuv",
"cwisp", "slxiyb", "dmwaguc", "jobwusu", "uytcqrv", "hzhsy", "zrlsdd", "xhxah", "rxzij", "zwdgy", "ygmvkz",
"drkzbo", "qpsal", "tpxvl", "lfmfl", "sayjvlh", "rdamym", "ycuzd", "zkycu", "hdesec", "unequk", "lpkdid",
"vorxls", "admsdop", "rqnvkyg", "krnqqtb", "rxfms", "xfthd", "pxjbk", "gpslrg", "rwziwef", "usxgqvz", "baxxye",
"ocrkkrw", "lrlgsp", "ceyctg", "rniml", "vavug", "jgircl", "jrpnmsa", "rywvlfg", "prxnys", "fkzmknn", "ooelc",
"btvfs", "yqepuvw", "tmmmb", "qmpzexb", "zjckjvd", "aieytbb", "oafqq", "szrcyh", "czrxgae", "ifkte", "hfgajox",
"pwpnkqq", "yqphogn", "xuwthrd", "mpcmy", "qitdoa", "avlzfrh", "ywpip", "dgeki", "fgbnx", "tyofu", "xziqzj",
"qxzvqz", "vtsqk", "ipkld", "yfhim", "ebaegdc", "ubhrh", "ldejv", "mtflwy", "ocpyj", "yopgqs", "fkjxxd",
"njnnwr", "nylkeb", "taymdqv", "ekpznq", "cbzobmg", "bucdds", "qjozu", "uvpghor", "obhnu", "ljkxbg", "uqrxjtf",
"xwbxiw", "oxsmcg", "spchdd", "pcuitj", "faidq", "tybmy", "uygiyp", "qloizj", "cafgmy", "smetd", "kwcwb",
"tdabxf", "fpmrc", "lfjujn", "vvmvex", "mnsgdc", "enjlgsw", "ohwcg", "kxjdaup", "rotjarp", "aovdoq", "oviwq",
"qwaxs", "bmazco", "plcljsv", "yytjhl", "vgwjm", "drnue", "vqjgf", "uqlsfy", "bmqmfp", "lkauwna", "ozmqce",
"heunaxr", "zaffbj", "arbek", "qjnllw", "fdkhlz", "wgmbwh", "yceqag", "ltjjq", "yurggfw", "puaafsl", "tjiqkyt",
"yuzub", "ytmrfq", "ommmu", "ipknn", "iubnuab", "dzthvc", "zjbzpew", "dcooev", "pjydqcf", "zuojlzy", "zwjyfc",
"spmac", "dfkbnz", "fzriie", "asusog", "hdodx", "drjpo", "ddyif", "chabv", "ebvkwrr", "burdjl", "jjddi",
"dljzkye", "samyg", "zwgxcq", "xtratwo", "qfopz", "xvlaw", "laage", "btdium", "vzlnzt", "kmvbzkq", "kctobsx",
"kazbelu", "yxdwrk", "eslvjc", "nhsdmvs", "zuxqcc", "hqtxovn", "zrbdai", "fgjxs", "txecvio", "kjxlq", "dkuxss",
"mkbevn", "pzmdqc", "ihyia", "atsub", "twytus", "nzooxj", "qwuoly", "fdoigo", "zukhlh", "mugeaxt", "qqsfyls",
"qqtql", "wrvphcx", "nzjfhx", "uequtk", "fxuto", "qnast", "nveys", "ltbrcth", "toctdib", "fbpnh", "umxfgn",
"zvjuta", "yeron", "qzvswqk", "gbctr", "ryryz", "zieknd", "zcsna", "jrhak", "zfxqsj", "urlba", "lbozqf",
"yfcjaa", "hazgy", "gmmfzyz", "zjvkyc", "rvfdcf", "daitab", "hcxqgum", "qwakp", "ltbsjwo", "pqqtygx",
"upxcxao", "qylot", "lmxqc", "dwzcd", "tjccm", "mqcpap", "wgxqtr", "ivycvxy", "wdykg", "snvqka", "jxtvtsb",
"jnyowsq", "iwfuoig", "cuoixhu", "fzwalg", "djhrar", "sjmahk", "dyusf", "wrxqvdi", "ftytlor", "jsjbv",
"vjbebg", "agvsn", "vvmpgm", "gsgjopk", "vbqvhy", "afopf", "zybfuz", "aqsgc", "ytrjsvn", "wlhdfr", "vdhvl",
"jrlvr", "cscxwf", "yhgbew", "wupbl", "ssuhyvv", "bhcirzk", "oykwk", "ijbto", "qsnpgw", "otwzage", "ytqzh",
"rgwow", "bvhgkwh", "fvawxie", "fllxw", "gfcqf", "scoqb", "qubrq", "gdxjtp", "ahrpck", "awnlgi", "cmehsyp",
"dwmytpy", "firyeq", "oohwhr", "caelk", "mqemvs", "qflkzi", "tfpibll", "ybhzd", "ctsxri", "yurocj", "dnlnl",
"ydmdva", "xkaotl", "xovax", "ypynrqp", "kwfzw", "fbgsmrc", "tutime", "rcugul", "cvewno", "typhbpa", "wazew",
"flzfs", "wxxbza", "ogjfkl", "vjlebet", "imbubm", "xinyncy", "dqmxfy", "buhagzh", "jjadpos", "gejyz", "gxshqk",
"wkwrs", "dqeriqo", "dmixr", "bysjih", "aoloq", "ddwhsxs", "nteqv", "cqagf", "ditsrn", "wfxgl", "jwjqb",
"rvkxj", "rxapr", "yrlkip", "npquasb", "nvezlr", "gmhchcx", "lodfihi", "dheypxa", "plzjykh", "qopsthg",
"zsnes", "raongg", "zrpnac", "tzmtltj", "jsecdn", "rzudh", "hkcyic", "xsxmw", "reeuwpn", "grkwrag", "gvzzbsq",
"lrfta", "aqyvbkj", "ytgfu", "wcmvd", "olnvfi", "hhgmhb", "kojmepr", "wpohl", "szhgg", "hymiblu", "lkwjr",
"zulqpz", "sdcqjo", "olgsgez", "lxkpqci", "yxcgn", "gmvex", "fskpppe", "utzto", "axncvp", "lcyahba", "ydeae",
"zvzar", "ghfkkqv", "ryrpg", "gucpbq", "reofjz", "cdnoo", "dchhh", "byiwd", "cqbhok", "ksfnoa", "xsmmlr",
"qyvdfqh", "dzshj", "bpifnzh", "uxmoml", "jdxvojf", "ihfll", "vwesfof", "zynnpb", "fwzra", "rxlgww", "vkmjd",
"hcjgzt", "mkapfl", "ffjqlf", "wulaebc", "gurramv", "tufkzai", "bxprqek", "nkohv", "abgfwyl", "slslg",
"wirsnh", "pykvuh", "fdrwk", "gtmgsxe", "dxsaab", "lqiryty", "aoezg", "tzhugcg", "uoarf", "dwhsv", "rjiuoi",
"ycgcdnf", "rtfmwz", "amkjc", "woogtdi", "deprx", "ucknu", "womfm", "xdeev", "qapxpuu", "ngulnk", "fgtxyf",
"hnyabid", "cilmy", "wrsewtf", "luvtmo", "wftuh", "ifoeeqp", "dtfdhhl", "rwnburg", "fohkkul", "frqqi",
"gsrcyc", "teuync", "dvpvak", "daqjki", "kksscp", "somsde", "tyfvck", "ftfekl", "ahncv", "yvosm", "qgllvg",
"ylfwv", "jenqns", "lqovrnm", "iyger", "nfvtsv", "bknxmqj", "pfzybdr", "hqjol", "chlpk", "etgrtqa", "msuxdx",
"vnoatf", "ypdzomn", "vsshmg", "rfkipq", "jvpbiz", "vbskd", "edsoixj", "uowim", "hqtsj", "inbsxal", "ookrv",
"ipotdnk", "kmazqd", "jpfghb", "gvmnnpv", "juvwa", "xtkvzw", "ejqcl", "ebgcnt", "ztuyu", "dlzthw", "zzipe",
"iaxwdxy", "htynwkc", "lefbq", "pizfr", "vttrsv", "oagak", "eqlrom", "vttefg", "dsrmk", "oekbe", "cvugzk",
"diwvz", "gxmfob", "vjowzm", "mjpop", "uznhz", "kqvjwug", "wjqvxfg", "jbpwezu", "wsckdx", "slqfomn", "omuxk",
"zlgblso", "kvitoq", "dmafq", "djxmzk", "pjqfegq", "yjrttas", "siakcx", "iutiqk", "nwfdj", "gbgtazk", "cpqtf",
"panmlr", "aqubhsg", "iwdim", "nqetym", "mwazh", "thyhy", "ydtxan", "xfoin", "lsosc", "esznfa", "xgdisi",
"flvbzh", "mpltx", "iwjpsqp", "udfycf", "rntmc", "ltflwu", "wkgbaw", "bcuzt", "hejxuhb", "lguohe", "klnhb",
"mjump", "avcwrol", "yrcqlc", "ihxul", "avajh", "gtpauet", "iemzk", "rfdub", "gqnbk", "cfcmg", "iobyh",
"iruuapf", "tyifwt", "sbdtp", "mngcpmb", "oaqpolm", "mmimmh", "gxknadi", "bmxhuu", "ulyoa", "keidy", "vsnfk",
"cnnnfty", "pkajm", "ddgeecb", "prxidqd", "wmenvhd", "akjcqo", "tnekfef", "ipvsi", "pzjwq", "wmmct", "erdjnuf",
"vgeaqs", "nlbdx", "dpvbe", "dgeqz", "aiguzh", "akawppx", "tykrjcs", "gvavo", "hkyle", "yhedx", "xzqcg",
"gzdxt", "csssbk", "tmekrmv", "lfsgo", "iizahz", "aszfd", "aybqnsl", "vadwxsl", "ulmiii", "xaxdugp", "sfnnsbg",
"dkyruh", "qhpqu", "amesjd", "evjuki", "vtqjw", "aoabp", "qnsuhe", "bplbx", "fdqok", "ozkhgib", "cggwzys",
"nbknjay", "ooambw", "evmvegf", "htdlxik", "kahcume", "bojpn", "bhipie", "hdyjslw", "pbkkq", "qwszl",
"fgkbzsd", "hejdx", "vmcfhgx", "puzlmmm", "meffil", "boakbiz", "eczot", "fvkkit", "jebfx", "umvkjg", "uikgs",
"rycgpf", "rfmfgmy", "nveho", "bgywqen", "gepfma", "vquyq", "wcercbw", "wbpjkxc", "rqloeda", "omclokx",
"hvotwp", "tvqfxxu", "qrtghk", "hggme", "arnmfnt", "cxprj", "rspdt", "hlgfq", "dmqel", "pcerxk", "ptqjc",
"wzreko", "kahks", "xjnzo", "xzzye", "xbdeu", "koiwkv", "jlwkkjr", "xzdixoc", "xeedvrm", "mrtnhqi", "jaeann",
"mvubp", "olklqf", "retbgcj", "qxxlhh", "cqyyoy", "ngwikg", "qijte", "sjzck", "zkmkx", "ongtzf", "tanow",
"smgntvq", "urfgt", "xwcroa", "kadcpd", "cxhgo", "walku", "kvvcsyt", "elwmuxk", "bfphtm", "vzeumuq", "sknvev",
"vbsnfd", "grmbg", "vjahwt", "dmcbmn", "smubz", "jobbfcv", "ujlkm", "lcthh", "bauuqdu", "kjgzgtq", "gicjz",
"nugbax", "kbnjfiu", "sqfpein", "obbgfww", "ykggxjx", "irnmog", "xniuv", "rqiwycq", "hzlgyu", "yjtrttv",
"satym", "dgqhlkk", "rghal", "tbekx", "kkwmo", "eahwhks", "bpvmbur", "sqtgkj", "khboz", "enefr", "vkzqvt",
"wfruavu", "ninomu", "ypktaoa", "mlpmoit", "fxyhjfp", "fgnpp", "txieja", "dprnj", "bgyrp", "zsqwqrw", "stqzki",
"kwiayb", "ulbsn", "aetje", "vwzbb", "tedwyqs", "cymiruy", "jigpoqx", "ypuqsc", "weletu", "gvibea", "chhuldm",
"baylv", "wdhovo", "imfqu", "meodnsk", "jhlckqw", "jolyfh", "jsfkrhr", "tnbfzvs", "egcfht", "qnzmyr", "owtrqu",
"oqaqu", "xftys", "goxfftm", "sgbnp", "bhfvaz", "gospa", "jwzlvwk", "lqncoqd", "xxizglc", "bwffm", "mhpggzr",
"kdaoewx", "anviou", "mqiij", "wkskpn", "enougdh", "vldnn", "gbfgz", "ejmbh", "qsdrvsx", "mrvbz", "cqlufpf",
"kbgjlu", "njgna", "admrmk", "pwwsc", "gxkot", "pdjwh", "ejwxt", "bpaxufv", "iwjzs", "xxfsg", "vuhgh",
"srytgb", "yesvlux", "tggnch", "cgnbb", "fbzbx", "aomoqf", "zkrvrjg", "ueaoz", "dppacnl", "ewovhxz", "kbvee",
"ixeeb", "gwgoqm", "hlwlxe", "fpmkrk", "wzjsr", "ispwe", "garofu", "jcmpec", "tggeo", "yzdeo", "axpmln",
"zhnlhck", "duyqcn", "tpqwqi", "jvmaj", "bisgoy", "mpwmurb", "olqla", "ecapwan", "kcpxn", "xcapin", "ooctk",
"sgqql", "vcyyjxf", "ejyom", "jsgtha", "logxnjg", "nypadhj", "dprmk", "cqkuzb", "gratv", "tgkjgu", "fttcafm",
"tpryi", "ubbhw", "uwcuyn", "zkgohs", "snfesz", "ifrex", "tkbfz", "fvvkp", "otjiq", "lgomjjv", "ertracf",
"bregu", "kkbizb", "hyhvn", "zjcnxfl", "mceskuj", "lmupdq", "zdzqzgo", "yorppew", "fpwtjd", "dxvyzt", "bbnnu",
"pkycae", "ucvapn", "dijmkb", "nvwwpr", "bufkw", "zhono", "vayxf", "hlfwkev", "klkvkj", "yzgpwg", "lcbqr",
"tkkfi", "pcgljx", "bhduxu", "rgfipts", "hkjbrr", "fobvy", "wqmqhxo", "yjgvypg", "ehgoizl", "ipiibzh",
"aqxbxtx", "lrtin", "fyyuypr", "pyrocgm", "kwqbg", "ukccw", "wgsbpvx", "pcoivrv", "okhxaba", "bbuaibf",
"ccvfm", "phpst", "yxtqiz", "cdfbo", "sijfljn", "gdlhn", "bqmbced", "tiejf", "aurqer", "olmyd", "prctay",
"lwflhi", "bbehvta", "oxoda", "lklyc", "rzedhp", "kairil", "envan", "wdcwfk", "xoroddb", "womrlr", "ruxebe",
"jnpywrd", "wrifvz", "zkewcd", "vllfrn", "uvdvjh", "bglpya", "vzokkbw", "apaoqt", "xpjizn", "xoajmd", "xapjwc",
"jcknwg", "bjpreep", "ffkua", "ukcbah", "bugvkrf", "cbmmfs", "cwaczhl", "nsqaj", "sjeikg", "fayqif", "slowoh",
"xjpvkpa", "ynunjle", "bqavt", "nkpqudr", "neikvd", "yuqlzg", "pdxbtrb", "cashlog", "iqiqy", "smjmxv",
"zbtpbr", "zzamzcv", "jmakg", "txfswc", "pkaym", "swlde", "utann", "mqgpjne", "pslfvek", "nbiqhb", "bzsianu",
"wnxgbi", "ahkeeiz", "dqdfjg", "bptdg", "pwita", "uqyflq", "txabjn", "yznjmve", "mukcqqf", "cxonbf", "ixuewjm",
"pzlcat", "eikeeo", "scwsoa", "uaeyw", "oeorff", "gbqgd", "qboqiv", "hiulpb", "dbbdm", "qvdxx", "aypxbcn",
"ykjwdbg", "pvfxn", "shrqyz", "zaxtu", "pfefgww", "jwifrw", "zxuud", "kpkwhlj", "lwptgd", "zpdmvsw", "takeb",
"ynehl", "kixtod", "fyrgm", "qirzmr", "shyvec", "xjgzt", "bwfvht", "wyehh", "renzc", "nnibax", "slhfng",
"yjtecc", "lghvbzf", "qroxvun", "mlsed", "rrudho", "cyffhh", "tjlxahp", "xmaepzk", "jvdzh", "bbvegrw", "cebcz",
"odjpeam", "guerph", "tgmphgo", "ohtkqq", "jcxojz", "haeheae", "erydxni", "hatjxx", "kwmgkjw", "wmezvy",
"hsuuvfi", "ineek", "grkxmhb", "alxkt", "rmspxdg"]) == 13956
assert s.minimumLengthEncoding(["me", "time"]) == 5
assert s.minimumLengthEncoding(
["yiyqbv", "njqvawn", "wnlovvp", "vogum", "jpolc", "zleec", "sxdrww", "rbowr", "xsjorra", "kwjsx", "vornum",
"echku", "kuizegn", "rhuvv", "eemkh", "yshht", "pbixoa", "cmbxvtr", "iupia", "nmcbq", "mgrjsx", "ejvniwt",
"svhsel", "kazenhf", "fevpm", "xcwqfgw", "ozikzc", "mywnmqt", "taorwjm", "gcshacq", "fgtasq", "qexygw",
"ljmbari", "zfjudos", "rgxuzy", "kmzryaf", "exjfd", "mcqnebz", "ptoim", "zglfi", "fhneaz", "rexgc", "lhplwyr",
"dthdp", "jizetec", "obyzg", "rqupa", "yphttge", "wdcdn", "wdomtr", "hchbd", "ytyra", "upytftl", "swbbi",
"qpcybv", "dcoxspd", "dftkf", "nwjfmj", "ojbwy", "zofuy", "adqkt", "kpcply", "aeukw", "fqblb", "xurrbpo",
"veioa", "puzvl", "bnzvlax", "tjzsdcw", "jarqr", "orxjbg", "ilrqdri", "syjuoyi", "htoqdco", "gwslw", "dpqyf",
"jnkhv", "fpqhpr", "baewnvc", "caunsf", "qhbpe", "wlckl", "lmoroqe", "ddlak", "qipwbfp", "cefqs", "surczp",
"jtmfuro", "ezhqau", "dlsco", "hywoqh", "lnifq", "hvfmu", "cqjdkok", "tggdact", "rwuowdk", "attnl", "lwhyq",
"mqtsc", "bmwajiy", "nyohug", "vvfpt", "lbyazu", "sarwago", "iccztck", "ugsxcw", "rpwza", "yofmlll", "ulhdzhg",
"lbaqk", "bwxxwc", "dmsbawg", "tjloy", "imbrkul", "xguke", "shlkuq", "lizjcdu", "kmvykl", "ilqxxjm", "rtbvvqt",
"qisec", "zobzr", "thwntt", "afpifh", "uwiiovy", "hgsyecl", "pdgnm", "mqyesch", "suexztu", "msguuwu", "yrykkv",
"xtoommc", "muteu", "bamml", "kkhlb", "jfrnx", "wpytor", "zzogpt", "yryxxt", "hzqofjd", "ehtildc", "ptclf",
"nyltvd", "nrret", "qqqqt", "uuxunf", "jajxt", "lzdvlc", "gpdtjug", "hjsso", "jairua", "qarxuey", "rpwwjwv",
"cjqypep", "tuzgcs", "oytqxb", "rgfmud", "stnwn", "tzzaop", "jpuopzg", "qeywd", "spnstrg", "dfwgntg", "yjyqk",
"ioowc", "duqfg", "gmqxe", "xhlbby", "liurjk", "vdujfm", "xxyyn", "omapgc", "koemzbz", "ziiyako", "pjmhfrv",
"bshtfgj", "ihjvt", "pnipuw", "fajiuj", "rdvcqzd", "mgknns", "ouwkm", "ejnklwc", "osepl", "gplpyvs", "paxrddg",
"gsjlpd", "lgnmgl", "yifeeer", "hhnwlol", "fcmxs", "ilinwgm", "udhfdtq", "ceefc", "xweqx", "jfelwod",
"rtywfjo", "kzwrgqx", "fcjriov", "fzytqv", "zcpcddo", "scpyzow", "kbzegu", "gclwr", "gmiwlp", "rtpka",
"yiywuyy", "qceot", "dtrgn", "ntwbu", "fxobd", "zmxwza", "qcksyz", "wgbtmm", "pzorve", "hztydc", "jqlay",
"ijdkbk", "uzjrps", "gfzibk", "gsxqj", "kgjrkdd", "smdeuk", "iwizewp", "owjie", "kcdccu", "ifltqr", "zrdfbm",
"pznbcsk", "mtkpi", "cpasir", "flrxrm", "uxcxnv", "htlfcp", "ltukxfr", "ftbbha", "jhgjgyz", "qjreroc",
"vcvtbid", "nrhlq", "gtkpot", "gyplqqg", "lnorig", "fixhufv", "ugcug", "ndfug", "wuorhe", "owocnkw", "rcnbf",
"ioiiiui", "kakwtne", "svxtt", "wdrxogm", "ibrxs", "bddqi", "jeguac", "hlftdw", "nutgfjw", "krrzvf", "amxuloc",
"deozdoe", "ovsvk", "sfqsl", "slgiw", "jbjujag", "mhiru", "uqksech", "davosw", "nlueljv", "rhtvdu", "ivdpdqa",
"qnbenpq", "dtapqq", "hwwfpxl", "oyrfosn", "goxgmgo", "tbvutl", "cbbbcm", "iiugpk", "hinkem", "vvaitk",
"pskyf", "hdnekg", "nqhfn", "dqbozx", "zcwpko", "kafyu", "jfegubk", "nofqzsk", "ujmxxg", "akwzemu", "yvhxb",
"qqlwofi", "hmoecj", "qwgtlc", "jepvygq", "uzggm", "fztiews", "lvndvf", "vulax", "znqudh", "whgqi", "noguo",
"vewkx", "uruvgf", "ubohmba", "aulzi", "flvfdlq", "yspfie", "wugif", "qndyiwa", "keihmct", "rggvn", "ojjmuoh",
"sbbcl", "cdivmoz", "vkusmp", "mfddp", "kgohwvp", "rjbbxw", "vsgptj", "hbyjoz", "gufrv", "orxiv", "fxcqfw",
"okppik", "qlouw", "lkryigo", "qccvc", "ixcnodg", "wlfilts", "ahqtevp", "kkbuha", "oehaez", "rzczib", "vxobk",
"wmetvjs", "xfjgeq", "eadzl", "aeqdvch", "czojfq", "hxshidl", "ofswsj", "iwbqcmg", "schhwtt", "ltyth", "wiccu",
"akill", "zaaji", "qepvfa", "mpvrkeu", "dcpenm", "wdhlk", "llqbby", "lronwkr", "rwtguo", "ofnvs", "lxdnwzf",
"dctmilf", "zhckjd", "hajsuac", "wpylhy", "zhipvm", "ihikr", "zzwjgvr", "gdglrn", "skhow", "tlqtjl", "uypli",
"evdva", "civide", "iroihm", "lvuzid", "vexat", "ngmvrz", "szdhbt", "ggrbz", "bsmovlt", "kguomvl", "onzvx",
"nobgxw", "tqxemc", "vbiyx", "fpzpf", "ogtvf", "yuthri", "xszbn", "xcuhj", "nosnpbp", "mowsxg", "tfalyy",
"kxombgm", "cukrz", "krmseq", "velzh", "kmufxj", "nvxlkq", "ualvras", "wytoucy", "qicqyym", "pbeujtv",
"haojnbm", "xnfffpe", "wvoiald", "rlyvf", "sxamoxw", "ztqnmp", "biiavx", "lnjnzs", "arqdjdy", "pkrgokc",
"qxswouj", "dgqah", "mnhzo", "ggilb", "qscrd", "ggvkimw", "qlxjys", "wximi", "aqlhio", "iavtvy", "grkqf",
"dwrtut", "uozutfc", "fogxpdb", "ydtntlq", "vnmpmwp", "gtxhwq", "mlpihx", "yfpjlz", "hdvcquq", "nunny",
"wklasgp", "wxduo", "topsqf", "tngcpzc", "mcrut", "pdnsmt", "kavaok", "seiqsqa", "bhgkiyt", "mawvhtp",
"domcnrm", "fgusghc", "wdaufwz", "tzpuks", "kisndyz", "fwyieu", "wtdum", "ytxhl", "yhzkmuv", "nppnqe", "ccvhj",
"dautnyq", "hkaliab", "kngan", "ebmhiop", "vsdkcef", "nmpcnd", "vxvnl", "cwcgu", "zsuneh", "qjgcmd", "awvba",
"rzbisxo", "oilqrj", "neiazlm", "hlyrl", "tmiht", "lwqxxv", "gyblrw", "gnnjkb", "lrxiln", "xlwlseh", "npfwcvp",
"yjcdhw", "rzndd", "orlhmip", "gatuojh", "osotgvv", "owksz", "kcocizf", "izlev", "smigns", "wtxfwo", "knwizte",
"mqjojzp", "lkezye", "xqldbu", "cvbpyl", "aoipbz", "asrupt", "bdwkesh", "jpaykm", "pksbg", "gdbsibd", "lfxpwk",
"rmnfph", "yzxwke", "xjwyusv", "yetar", "sytdz", "pnystzi", "yntcqo", "egoorl", "aydxu", "rfdrfhe", "flzkos",
"mmjgev", "fbjwmvi", "jeouc", "lcmkri", "aggsb", "aaeazai", "amyxpey", "onxqpg", "qrjpxq", "zanea", "niwsgtv",
"nsqja", "utgskd", "hlcum", "frygtl", "xjmqetz", "upqddd", "vxzdstm", "hcmtera", "ejstou", "xkcguf", "bokigdk",
"vurnv", "zsgrje", "nbxlf", "tpilcx", "lvepux", "xacdtp", "amdgx", "ubbvnx", "xmvznh", "tlprri", "sthkn",
"xhoad", "deotaxo", "pqzppmw", "xlcpx", "qwzrpyp", "lujabeb", "heskwyy", "mzzaaur", "vnestcs", "rryphdl",
"ibdiabi", "eoiyt", "znflx", "clougix", "zzadxw", "lrrgtf", "lsdoakf", "yxfmqx", "qhnrry", "ktcdmv", "veygqu",
"btjlo", "fcspsc", "gozoazm", "xcsqgz", "aazae", "nkuvask", "mzdgjq", "sihqdhy", "zadrwzw", "gzcyuea",
"lpgccic", "fqtfuzw", "bjoqpkc", "oydpkxc", "sugnnu", "hyvygf", "axkxo", "rsmzb", "dlhqmac", "gbqby", "npqkj",
"odbtb", "bdsib", "zyasxv", "ifxqcc", "lmnjwhr", "ibuyu", "uzhle", "ccpwhjr", "vhrojnz", "fkzfz", "fyesm",
"dnvipvm", "jbbqn", "qdkgl", "xkvvgq", "dphugaf", "soxbfun", "rbgokx", "biveiz", "vbaqtn", "qapydgf", "llldu",
"ottjpzu", "fwjuc", "cawio", "gbkwe", "rrnnxer", "luviy", "zsalse", "ckwdeox", "ozhqocm", "vtozfwz", "jztole",
"ydqei", "bfugz", "psawjp", "dzlyrwp", "izuyrne", "rbwcfr", "vdvte", "usjbqs", "zzovkxr", "frfkwk", "mmtmdd",
"sntka", "wachbzo", "rmzvj", "scbngo", "eqiuiwi", "qfakk", "cckcmt", "owhzow", "rejdlw", "iprsqdq", "twwaldw",
"mfilzyk", "jygvx", "iewbo", "irhko", "zpazqhn", "ndqbg", "ayzxqdz", "zvpbh", "maapq", "pzitrfm", "qsgsurv",
"viwcfff", "wpgenms", "tjmvu", "czuemc", "infxoo", "avhbw", "nugkqx", "xubakjp", "ndask", "utaqq", "njhuxq",
"sdvuex", "tfmxqp", "bydovjo", "bizxjsp", "zoozxyv", "jegei", "gkpqobw", "psumbtg", "gkgoh", "sgcbpql",
"xxkhy", "kdorkr", "hcomj", "ulrpyv", "rhplil", "tyyochd", "xhzul", "srdjmns", "kgukye", "yepvs", "xnobsjb",
"umxmtub", "wvqasr", "igftpzw", "exhecn", "rreee", "jpxuvxh", "jriqf", "akexunb", "ekvdsoe", "ytzvj",
"vfrlyae", "pmfai", "biouzle", "xkbce", "clzyi", "xhjoso", "wmxkxb", "dqzzig", "ydtby", "gskwj", "wlkwbz",
"zepvllz", "zsgqp", "blntawk", "eynmil", "bdqyp", "wgtnqbc", "rrgaq", "gtafuzo", "qdiko", "kkcsdo", "zwqhs",
"kugzbmf", "wtvvs", "kqsdx", "mxsuxiz", "pgbgjfe", "vodfr", "qbvwu", "vfwbhgw", "ayojye", "kolzfqg", "xnbecj",
"akbcnf", "uutrn", "upmesa", "marqej", "bbucee", "bazqbau", "qikgsyf", "oeayzn", "uilxnzr", "vpnxknl",
"btgtxgh", "vjaav", "zaxtzah", "msweps", "awduwld", "gzaep", "ngvgc", "qpoqdgn", "kimndg", "qilmmpw",
"oafhlyp", "nyelgvw", "onymk", "feycbc", "dhcrx", "siqpfly", "tyvycmf", "huctqp", "uscjrp", "bbptd", "msdmu",
"xlxhye", "xnyzcox", "kyskda", "injdkmp", "jiwus", "spjylwd", "eqcrnt", "snfiu", "jvwvge", "yfeaw", "mmdnsjj",
"suzdw", "xiupf", "rjwjhng", "tqvasy", "rmibpa", "zuqax", "prpndnp", "efryqe", "pwuqfy", "wpqlfs", "aeswq",
"cxkeiue", "jydxzfi", "tzfvwp", "zzgtw", "mupiusx", "sojavt", "dxmsgq", "migjiyj", "kixjk", "ywwvcpl",
"khzcuo", "oykhx", "fochin", "foxbfkc", "sizjg", "wrjcvr", "ceadd", "tvfqgxq", "whzhche", "dcoeti", "mpilfib",
"cphie", "ucpnjm", "ajltvx", "kpizym", "vevfsrs", "jznrri", "yvhxomr", "cbcnk", "yuwuhu", "jywuzed", "kqakusq",
"jrnzgfo", "mjimzz", "mfjybnd", "ntqyq", "junxxck", "myvqajv", "kvuqs", "obfxw", "jwuba", "vnrvzvy", "aeric",
"vtgda", "nkrocpt", "ahitg", "dzxtr", "zswwc", "yhxap", "fdhiwr", "cpxtqv", "izbmo", "zyioo", "vysnoe",
"ouuyvj", "cumdhzn", "dbsmph", "cktjem", "vbmxy", "utgfyhc", "rqdeorp", "btnlmd", "chxwlt", "nsghoqi",
"egycsm", "wkanat", "lzjyf", "donyx", "cchqsa", "xozzz", "yzmnf", "jfzuh", "dpcpg", "hlahz", "vobopk",
"lssfeli", "ccttzi", "glzgqpv", "oyqzug", "qqhkrr", "euwotv", "hwbmtz", "hiylhly", "bppzne", "yetyyvs",
"cnbwcby", "hzblk", "pfjmxt", "dsxvt", "vvkju", "zjrfr", "gdbhb", "udoad", "nbhpzfm", "iwetbym", "atmly",
"tnxli", "myegb", "hiwqsk", "btrajk", "nhrmwn", "ftmbecv", "xopht", "eiikqy", "qizanwa", "cwxiatf", "jshjva",
"llrtkn", "zhivu", "lmwiu", "oaeaqz", "oxotfub", "jnkafm", "juhrmq", "mqzbtw", "puiaxty", "dnahvoj", "gaxhz",
"xfnay", "iqmlnlq", "xudhcg", "izpkz", "tqttmt", "bwnbs", "fdufd", "vhzyymh", "zhqtxr", "evbcrv", "xvnma",
"dgcwy", "cwxzlbz", "oodiol", "teyim", "kqqfjub", "ftsqzi", "arfztkr", "oqlujx", "rpkkdov", "ptoff", "ivxaxr",
"nxeept", "cacpl", "tehir", "spvggl", "qfzxkn", "bhwkukx", "fkdpuq", "xdrngre", "fnfplq", "dzbrl", "ufgxu",
"sciec", "fgdydvw", "nmpaqxi", "ydsvfv", "natjz", "lruyvzf", "xznznxp", "mhfrh", "kddsk", "uwatn", "uklzs",
"lnuta", "ryizc", "cvwko", "tnzpk", "ywpiv", "vbvcagq", "pzolw", "nmyfhg", "cshkofj", "ksptw", "kqejh",
"zgzjqzo", "mxzrw", "enabosq", "vmubgc", "sfzcj", "hewvk", "ewhrq", "oifnsmi", "izdnvu", "cshgtk", "mqotuhd",
"gnqgj", "rxailbm", "iyhxvtu", "ncjzklq", "zjmnoc", "awqwos", "ugujppc", "spbvfwl", "gntsvo", "euksu",
"qnvneph", "crhmf", "brktmf", "mvgmr", "yzcskrp", "tihawec", "edqmxpn", "fxyymlr", "dzfkucm", "prldz",
"gplrlhz", "bohwr", "bhebbk", "mmecj", "segydd", "ptslsb", "pyhgw", "cwmrq", "mjfhflh", "xhuid", "npxmb",
"izilq", "dczhqh", "tgfnxtb", "zrylvo", "lctxrar", "ylhrbii", "rfxedv", "llvhzjq", "bjocv", "wbnex", "cnohnf",
"xahrl", "rouvwyc", "hbhovgv", "dhucp", "ncmff", "ncsskg", "gsjbyin", "lroxscf", "whfaenl", "vsfultg",
"floxkpy", "captoai", "qwolyex", "ggaypn", "wzunypd", "pjixeu", "gxnjkoc", "pqiqhn", "xakjmgz", "vqizkx",
"gdzcxr", "kyxwdd", "pgxmazn", "qeuwf", "bduknm", "tcrcn", "nehgee", "wktbcgu", "jwqltdt", "wczkai", "drkqs",
"qhdqnn", "oobxirc", "lbunv", "ifscr", "xnfpbrw", "yrrdbax", "fbocs", "tewne", "iobixe", "zgosas", "yhesn",
"xlqwd", "pfcen", "slsjffx", "ilwatrc", "mhsmgp", "iteghl", "aqhufdl", "kxgpqcu", "ryrcgp", "azidf", "smlnl",
"rocxvbt", "iutfc", "loapgbr", "musulp", "dqcnj", "tpgbkfh", "wvskii", "itkfopo", "kytyb", "rzahbu", "aewptd",
"ohergbb", "cadxh", "aphwelj", "huooyzn", "gtttia", "izeyhcr", "cfvxz", "aitaxyp", "vypqost", "ebfnmif",
"kgiucm", "zryyu", "oxgnbpt", "frpwo", "ouqvodl", "pdaazh", "gxwmf", "dozxsjm", "yndpsik", "zcwvu", "mihug",
"jgodklw", "ysklw", "cfxqv", "yqvtz", "rctnp", "xjywa", "kpqyw", "hhtegzt", "rnwbeoi", "uyxqum", "jahcwbe",
"jzjns", "ovwoaz", "oqmsrua", "natbejl", "deffv", "okgbr", "paqhy", "jkafhte", "lifsknp", "afmskh", "oemdro",
"oxuwov", "qtyxa", "hkpfsm", "ulaubn", "tciurw", "myohwlo", "okuiejb", "ormoqsb", "gmipz", "hterzir", "ekxzre",
"xkevge", "ihenf", "nnhzv", "eocjmx", "upzal", "oounfko", "myhbwub", "fwipva", "pkzzvpd", "nrupm", "vluzq",
"fxkoyho", "atzktr", "aomrp", "qwpser", "ejagmb", "cfigelm", "bvanb", "cgcgabo", "hmjvlqt", "hxxocf", "ftqaud",
"htuipy", "bhwmcn", "tgyvaqe", "lvuwh", "yiabzs", "rzzavu", "fiubm", "uuqsb", "riyakuf", "psscffd", "kvckzr",
"fktmnf", "ivzqexi", "nhxzm", "kffjmb", "vdzxv", "esago", "bfikw", "gaiuxmz", "volokcm", "jypcs", "psibvs",
"hxaxklf", "lmqwgy", "spnbimo", "mtihak", "xikoiy", "rmmtv", "phaqgxj", "zcuwkhk", "emodbyb", "ztahsya",
"ieiqm", "lfoquh", "emznnq", "pnhlgut", "pgvads", "cqsjx", "lxnjei", "zpque", "rdjbiyb", "sxedpu", "potnqva",
"iirkn", "rjmnrxd", "ksgcd", "waeymnh", "tizdz", "kproa", "wpttygd", "lvyze", "peewvgm", "fwtyzbw", "zitkk",
"gfgqr", "udgvlz", "swqspo", "ohhvyq", "kgyuau", "hcerp", "pdomlm", "twabkk", "zfsea", "epiwp", "xgycjpt",
"jtkdh", "mxmdm", "rtkzm", "qkacy", "nuvdiq", "agctak", "hypgyh", "ewtjp", "paysolw", "bcutebe", "xelxyb",
"gzdvrth", "vpzfv", "cxrkt", "admiyzi", "lqlmn", "zbjpbg", "tlvdnli", "zetnox", "ylcsobo", "balajod", "igoume",
"sxcgw", "sbkkafk", "fmndnnw", "incsa", "jyupkg", "uhvvc", "rswnbth", "nvprfj", "figqf", "znyidqi", "aijper",
"euidr", "dftxkze", "vnppi", "splwifc", "fprgafl", "ixzaz", "mrhqtne", "dtkjsy", "dsmqrgy", "xfscz", "cymvmpu",
"vptkfdx", "zrgrjq", "mqvwsur", "hdtlw", "ugdpwun", "cvxitc", "vytvqg", "pmtpfz", "nfdtdt", "umvwjuc", "jouxc",
"qpypri", "pdhqp", "lmise", "wlsvcfg", "aqdkzcb", "qlrmrfz", "pbgoyi", "xmsskoh", "jjdye", "xvsdmq", "ymjeipy",
"igjyv", "uiojvmc", "uckoww", "grlnyeg", "hpglp", "omnnyy", "iiliir", "cnucbcx", "pcxvs", "hipad", "xmiltkj",
"oorwi", "qgoxjj", "jnmviqs", "wpleqn", "tudxw", "pcogem", "hgewaf", "niwfexy", "vcttgcb", "anjgovq",
"epgmscd", "mdtru", "xvapv", "rydjik", "kopppcr", "mjbsmu", "unxoakz", "ldpsw", "frksjr", "vyxxg", "yyydri",
"szidq", "qvbtd", "qratl", "xwfov", "bzhqyxl", "fskrtf", "pcpzmnv", "xuxwx", "vzbevnb", "ebaqz", "dbpuek",
"ooqwj", "gaimp", "coelqh", "bwuceq", "oxpfjt", "zrqyc", "rwllk", "pqunv", "ufbnn", "tbnjoz", "kkqmrxu",
"qyyrm", "hislf", "wyuck", "ubpre", "pdioi", "aryhv", "vdcxv", "rkgmaag", "czlzokw", "gtxuduz", "grpijx",
"qzrar", "qhues", "rmznt", "sxxmved", "onjzuwl", "atbjhip", "nrardl", "alrocy", "cfkip", "ihtbf", "pqdgm",
"hmokun", "dpghac", "otwml", "mnbzwa", "ehetlt", "rchvq", "lwjgywn", "lzdmjo", "nvhohdp", "tmshcpc", "gavjv",
"ycnkv", "uynzh", "bvpnfjq", "lfbem", "qberui", "vrmmhx", "wpbqtfq", "jujpx", "dujgkof", "hrpbso", "zhcdt",
"iybngyb", "rgeruza", "nesyxr", "cihgfe", "hjgskb", "zspxeqm", "inzrgyd", "crkjq", "iooshwp", "muvvj", "wakis",
"rowibwa", "qikwypf", "aportho", "pubcgx", "vqoqpfi", "rnpbri", "ussjv", "looor", "xkzvdv", "tstegg",
"zgiiokw", "rwvyaun", "mqqla", "asnqp", "nghuryl", "hlvhn", "ecuotnu", "judvbu", "xgvuw", "oeckn", "hdhttsg",
"hcyhu", "klbyjc", "tnrmqnc", "mjojxhi", "kvdet", "vbmevim", "oglrzs", "afbscdi", "zxrffti", "firzgmz",
"oenim", "wgpua", "asiep", "kyteq", "wpeneca", "qixmeoq", "zaofon", "csxxtr", "cpwmnl", "feylas", "idjuo",
"mrtpvta", "jjvmjy", "mnljocc", "lnvjleq", "oognud", "rbyneq", "rhvomm", "fldrkpk", "znvrp", "myswmz", "jiloe",
"juivjmo", "ylhbyzl", "ndmabkt", "sgdvlq", "pmnddmi", "utpuj", "kfisv", "nxfeell", "mxhgqd", "ccvdsdg",
"emtybo", "zmkylbt", "mmrpi", "dkwlgq", "iwlappb", "uimsrnu", "mkxaxmi", "tcvll", "njggal", "kmqud", "evgzlh",
"oaxizbp", "jiuej", "xknlp", "cyksydh", "gbixmz", "vtouyk", "sxjpkio", "qhubt", "kflvnb", "sjdfggl", "bxozyj",
"xekbh", "wtmcb", "xtapfco", "rnornl", "ursdpki", "waonim", "eibfyed", "zniinaz", "uyfohq", "qcaxlt",
"koyaapa", "pjuvbsi", "ecpdl", "ifaqwm", "yyumzc", "gvfngfp", "lttul", "flyza", "uasdlme", "oklhb", "wulkzzv",
"ziwsxo", "jqcxiu", "qdzrwgm", "zjdwy", "uumns", "emlnp", "irnrqp", "gqkza", "oynpcz", "yxyea", "zpamf",
"gyehxbv", "nplkhcc", "rxeekyo", "kecgp", "gseju", "nkisxqf", "vlyud", "fxxihhm", "yjgtml", "fehwpdi",
"wclnvyy", "lriwrc", "ikparv", "volfh", "ysphh", "szrvrv", "rqlmz", "jyqut", "fyftsj", "uvwfip", "rngwgm",
"mjwaz", "roehjki", "ploxokr", "yjbalp", "fspkq", "yfxrb", "kzulvk", "ordxp", "vdrrt", "wdiojwd", "ridzl",
"niykdvu", "whyycmn", "riwcma", "bkhgkrb", "nsine", "emgtgf", "zoymw", "ljtvhzb", "kfyfdma", "piygxdl",
"onfwgdf", "fwmkm", "vqbljay", "icife", "bxfli", "yeygr", "qenhgm", "mtxuckj", "kdcyx", "kwqhfcn", "ywkfy",
"prbpw", "pheyc", "kmnds", "cacqs", "kvekiqy", "bfvfhdy", "gxulp", "skmcra", "exomt", "lcxue", "mnvvday",
"rsddl", "gooegc", "udght", "doymnin", "ccdap", "wuive", "dyyln", "rynust", "luxabyg", "kdkkyyw", "vawqfsy",
"rmeswm", "rcxzyv", "clpowz", "pdntqm", "tvjkkmz", "iiclw", "nhudzen", "cybhu", "crwtw", "enypnh", "ygekg",
"hrjwqt", "peissge", "wangcy", "rbpoik", "raqulbf", "gyisnsj", "rgbqn", "lgvuzb", "djicf", "epnuu", "nsapc",
"voatgh", "yorfehc", "jxfttat", "wyuivb", "bwopl", "odwdsh", "anchkv", "sepvew", "qoxxmae", "bpvqnj", "sngfo",
"buoazou", "zhijssa", "janng", "uvdbd", "yfvkqo", "lcjii", "mvacvrz", "xztiar", "lpbtrqa", "ukbpdx", "okaqpgr",
"idgqlj", "ewglgo", "ruymhi", "pcidw", "bvuqj", "npzch", "yppyan", "oiguirj", "iijvwqj", "jvbwjys", "yjtunfc",
"iaikra", "oduhdgk", "ivixur", "ibcgai", "djzvcbx", "lmtsul", "lgnwzol", "wursq", "xsxbqwq", "jqvwnc",
"dcwwvtb", "vwybnr", "bughwjl", "rnelxb", "hmacv", "ufgdygl", "aabuat", "oynwask", "gnfjjf", "zipbq", "zxstn",
"jdrbprf", "jmkvny", "rblpql", "vykdj", "qaakyqw", "osbhddb", "avgldyy", "kvpoa", "fnqcliu", "zzlninw",
"drsal", "omswys", "hwqcpct", "ecraq", "fvhsbjq", "raauy", "pfmoz", "vvqvcm", "tbjqjun", "jcfbegq", "otiwup",
"axvvce", "dhpdnx", "pennr", "hvvmvzv", "binezl", "ygdmcuo", "ypwnqn", "aloxdv", "ucieh", "kovbtag", "rgfpaww",
"fpbftg", "spjowfr", "zridoy", "blwbbf", "evwlxi", "itbcz", "hgixuo", "qmoqmjb", "tkeeis", "pjiaq", "rbpje",
"ledoui", "ubecht", "mphdd", "uzswsbb", "ntsybr", "qmnijyp", "pqwawe", "ltytill", "dpnxy", "pkxqcol", "ayrdi",
"mycnd", "knotsn", "zvcrjl", "qwroblg", "vtrktey", "dzilezi", "wzkxg", "varqc", "xlpttyc", "xxqhnl", "jpxywa",
"kjdsh", "hdseebw", "bxqbp", "flazqce", "xrtab", "rupsfq", "asswer", "rhqof", "hjzdv", "addsgax", "cuahzjj",
"xwdilr", "osqgg", "pfhwv", "rqorah", "ggdlnv", "truvaoj", "jzuldwf", "mjddj", "vixtn", "eslxoaj", "cmoypm",
"jvvzs", "oqgxcc", "tptls", "wwgwbj", "tysuhg", "xbnqb", "iogjvg", "fbxdmr", "zdvsmx", "hiuja", "watrt",
"kjawab", "entxk", "jmnkaox", "zznsox", "asmzc", "soblvp", "quyxjw", "udrdc", "hyylvvw", "gzfwxuv", "jjqmjw",
"faegxbl", "lqjcg", "bzmruq", "bykuh", "miwhd", "ykgtwhk", "oyobzwi", "oltwpua", "ctulabr", "dwandd", "vhuhox",
"vtlknw", "ywvln", "qemqdeg", "akezvx", "kjmjpv", "vwuftx", "kreaxnj", "fvfop", "cxabs", "jfacbje", "eecnz",
"cmblit", "gfvpoq", "whywnh", "pghvx", "ohgkmf", "xxtiwd", "nkojni", "dlcicnp", "bwyvyyd", "gifup", "vgjfr",
"hhteifi", "kjhffq", "pawqaxl", "yozro", "slxluvd", "amqcquy", "vnnxkr", "wgdur", "rvawiu", "thcwnc", "cddut",
"vnrtrv", "fnfio", "nhvxe", "rfdqmj", "ucblh", "ccbnt", "lxckaoy", "fnwcbx", "gmdbiwt", "ypvwjy", "cbjazk",
"qmujnm", "nsqot", "lhcqt", "ijxcts", "nujrms", "itxel", "ghukr", "qpwitlr", "gcafqrn", "lcoho", "lfzab",
"vwhgceb", "vgsgy", "jrtgo", "ryxlz", "deoyq", "ybenly", "lyysca", "sodvazo", "hbnnoz", "ovgvda", "elwtjx",
"soydmn", "trdsi", "mwwjwo", "vupwj", "dszpcv", "kkhjdj", "ewmyo", "nmpeq", "oepldcq", "xttrgu", "wbcbxi",
"jakzk", "peukyw", "fvcqv", "xklwuu", "hsmva", "kslmkq", "azllbig", "stnzih", "wfyud", "ihauy", "cfxmj",
"pdyogwv", "dcqdpa", "xhusy", "jfpmpmm", "odeiiw", "ozyaer", "uykzvma", "tuaznxj", "kdnbdki", "syrnsem",
"fdysz", "hhrpo", "fglzfi", "vgcqzqm", "qhsjr", "bvboe", "dpfwpvg", "mvvry", "itnnr", "lgykbe", "pscow",
"mkrgeqv", "czffv", "apteht", "jeqixsx", "ksmbe", "zamivv", "vvmyo", "cwwoce", "sppubxc", "qaich", "nmbxr",
"tfkwfxi", "iakhezl", "fxujis", "fkwffe", "antaylq", "mmfgstq", "zxaacy", "zlswx", "pbqxil", "eupck",
"qzcxpbe", "rjalbzr", "wioagbq", "kreec", "zsdcuft", "rrdzb", "ocdlvq", "oxiroo", "zcxsqh", "wbrsi", "fqike",
"oskzupi", "thvof", "dicbyst", "iojwe", "hyfizq", "yoknhww", "nupiyyn", "ievah", "slcgmxg", "cnecpa", "lcwsoj",
"hnqsc", "ghipbi", "exobr", "nwpnq", "dmhbj", "amdbmwl", "xfbzovs", "puizvu", "yvsus", "ykysqg", "bgqdv",
"zgqbr", "zkjpkej", "crkot", "zciymk", "tleogn", "sayrmz", "elwma", "zugjva", "uifwsmw", "wstrg", "xbotd",
"hinsg", "qpgyoyp", "xzfocdy", "mbvuepb", "dtphufk", "cyapnt", "yyehhad", "ohdrd", "mlibm", "qzdfil",
"rdwszqx", "bzcbmyn", "uarjlg", "mtwpqmx", "nmagl", "cepniel", "tylvaa", "melhd", "jygeneg", "fdglfy",
"xcpciu", "ayrel", "bxceshv", "kspyg", "iclkaz", "ykbzt", "nrnkzo", "kxkto", "fabzszn", "edalls", "nilmh",
"wwawgnn", "gymbtx", "mzipa", "ajevx", "qppisv", "otqhsf", "ippxak", "bixnqd", "uqitwo", "soxcug", "loiscd",
"wqrjk", "rqntoa", "fzpxlp", "tuaob", "pyqqms", "krbzmmj", "aijqpfg", "nstqrbu", "wmtiahz", "joplby", "jyszxq",
"jnxtyhe", "lbvfv"]) == 14011
|
normal
|
{
"blob_id": "57de9a46dfbf33b117c2dfbb534a5020e019d520",
"index": 8513,
"step-1": "<mask token>\n\n\nclass Trie:\n\n def __init__(self):\n self.dicts = dict()\n\n def add(self, word):\n node = self.dicts\n for w in word:\n if w not in node:\n node[w] = dict()\n node = node[w]\n <mask token>\n\n\nclass Solution:\n\n def minimumLengthEncoding(self, words: List[str]) ->int:\n trie = Trie()\n ret = 0\n words.sort(key=lambda w: len(w), reverse=True)\n for word in words:\n if trie.search(word[::-1]):\n continue\n trie.add(word[::-1])\n ret += len(word) + 1\n return ret\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass KMP:\n <mask token>\n <mask token>\n\n\nclass Trie:\n\n def __init__(self):\n self.dicts = dict()\n\n def add(self, word):\n node = self.dicts\n for w in word:\n if w not in node:\n node[w] = dict()\n node = node[w]\n\n def search(self, word):\n node = self.dicts\n for w in word:\n if w not in node:\n return False\n node = node[w]\n return True\n\n\nclass Solution:\n\n def minimumLengthEncoding(self, words: List[str]) ->int:\n trie = Trie()\n ret = 0\n words.sort(key=lambda w: len(w), reverse=True)\n for word in words:\n if trie.search(word[::-1]):\n continue\n trie.add(word[::-1])\n ret += len(word) + 1\n return ret\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass KMP:\n\n def partial(self, pattern):\n \"\"\" Calculate partial match table: String -> [Int]\"\"\"\n ret = [0]\n for i in range(1, len(pattern)):\n j = ret[i - 1]\n while j > 0 and pattern[j] != pattern[i]:\n j = ret[j - 1]\n ret.append(j + 1 if pattern[j] == pattern[i] else j)\n return ret\n <mask token>\n\n\nclass Trie:\n\n def __init__(self):\n self.dicts = dict()\n\n def add(self, word):\n node = self.dicts\n for w in word:\n if w not in node:\n node[w] = dict()\n node = node[w]\n\n def search(self, word):\n node = self.dicts\n for w in word:\n if w not in node:\n return False\n node = node[w]\n return True\n\n\nclass Solution:\n\n def minimumLengthEncoding(self, words: List[str]) ->int:\n trie = Trie()\n ret = 0\n words.sort(key=lambda w: len(w), reverse=True)\n for word in words:\n if trie.search(word[::-1]):\n continue\n trie.add(word[::-1])\n ret += len(word) + 1\n return ret\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass KMP:\n\n def partial(self, pattern):\n \"\"\" Calculate partial match table: String -> [Int]\"\"\"\n ret = [0]\n for i in range(1, len(pattern)):\n j = ret[i - 1]\n while j > 0 and pattern[j] != pattern[i]:\n j = ret[j - 1]\n ret.append(j + 1 if pattern[j] == pattern[i] else j)\n return ret\n\n def search(self, T, P):\n \"\"\"\n KMP search main algorithm: String -> String -> [Int]\n Return all the matching position of pattern string P in T\n \"\"\"\n partial, j = self.partial(P), 0\n for i in range(len(T)):\n while j > 0 and T[i] != P[j]:\n j = partial[j - 1]\n if T[i] == P[j]:\n j += 1\n if j == len(P):\n return i - (j - 1)\n return -1\n\n\nclass Trie:\n\n def __init__(self):\n self.dicts = dict()\n\n def add(self, word):\n node = self.dicts\n for w in word:\n if w not in node:\n node[w] = dict()\n node = node[w]\n\n def search(self, word):\n node = self.dicts\n for w in word:\n if w not in node:\n return False\n node = node[w]\n return True\n\n\nclass Solution:\n\n def minimumLengthEncoding(self, words: List[str]) ->int:\n trie = Trie()\n ret = 0\n words.sort(key=lambda w: len(w), reverse=True)\n for word in words:\n if trie.search(word[::-1]):\n continue\n trie.add(word[::-1])\n ret += len(word) + 1\n return ret\n\n\nif __name__ == '__main__':\n s = Solution()\n assert s.minimumLengthEncoding(['time', 'me', 'bell']) == 10\n assert s.minimumLengthEncoding(['ojtnj', 'uuydcho', 'dgsyp', 'dwxycpx',\n 'dpmvc', 'dvfhmb', 'flrxjjx', 'fwhdhvn', 'rgsakp', 'aiconf',\n 'nzacpk', 'sbxnaj', 'shway', 'rgrmz', 'rysudo', 'bzkioce',\n 'mqxkzvu', 'wyebk', 'tymoaz', 'mlmbg', 'djbmek', 'qfnme', 'khkiyae',\n 'tjdaxry', 'sqtcwz', 'ehnsai', 'jhncvrm', 'cxkzgrx', 'pummt',\n 'hzrpfcn', 'lkyqit', 'phpqdxw', 'vangm', 'wcjdgw', 'pxesvtn',\n 'mnqory', 'bdrzvh', 'brtzmo', 'chqgf', 'bipyxm', 'meoikg', 'ysyckk',\n 'ojayeiq', 'zrfbsb', 'yhuotea', 'crfbhq', 'tllycn', 'qxnzihf',\n 'avyawpz', 'bwsjym', 'myjozc', 'lbdksm', 'mctlt', 'dszowuw',\n 'syshm', 'xrvhhkn', 'kgrcwfv', 'dwlajlf', 'yviuk', 'xegjj',\n 'spiczl', 'vfvomi', 'mgcujy', 'dqmzb', 'isrisgt', 'vdrtuah',\n 'vsyth', 'eoclef', 'poccek', 'cgafrlu', 'crbhpgk', 'sromv',\n 'xmvbca', 'gobra', 'ygvlq', 'pjvhe', 'tfweiso', 'cskuohg',\n 'eyalone', 'pobkak', 'nzpxn', 'lbcrws', 'uhtfe', 'eorth', 'showvu',\n 'hxsmb', 'jrggose', 'izifkb', 'oqwyf', 'mozmzj', 'ijwle', 'ggtqqqv',\n 'geevzj', 'meota', 'ifsse', 'kdtofm', 'swydhvf', 'tzjhqap', 'wqwwd',\n 'jlinnov', 'lmxkgeg', 'stbot', 'xrsfn', 'etoyctk', 'rygagm',\n 'vcnrf', 'zkdge', 'emqtscp', 'newqcyy', 'nnuus', 'exwsxbd', 'zstvl',\n 'lbkko', 'kygkyqq', 'oggji', 'xytbjo', 'mfbahk', 'ggoks', 'lmqewkl',\n 'qexhyqe', 'ogaogio', 'nzvbav', 'mdole', 'qvyks', 'gkupfu', 'dgmpn',\n 'ngrdrj', 'iitqvk', 'ipuiqb', 'ugxfea', 'ialkmv', 'hmgnx', 'aoyoj',\n 'fvzhjil', 'butrbp', 'dwhxnes', 'etkdwg', 'cjkghz', 'tovkq',\n 'mmxhv', 'jgcsn', 'hmictal', 'zxmnek', 'pcoeg', 'ntyqmlq',\n 'hfubhtg', 'ydjbv', 'xnwlqto', 'hatgi', 'bsaczd', 'pokwk',\n 'arxlula', 'zjtqlk', 'ocfxup', 'nsnqjc', 'xdcsopi', 'iqxyxp',\n 'xfmtpvm', 'bqtgcf', 'wboycn', 'aoeda', 'uowqdgj', 'rzzzx', 'liucs',\n 'ejzxz', 'qmlehsh', 'igrbmon', 'dpmkbon', 'pmayh', 'nujdwdw',\n 'awdgo', 'ijgkzk', 'inhee', 'jzdtv', 'adhauh', 'grtmbp', 'qndbvw',\n 'zprrw', 'mpqieq', 'jzmzeuu', 'fcvftqs', 'qxzxqy', 'lidguzz',\n 'eazwd', 'zjhfsz', 'zsnzefh', 'mnckfg', 'zjgtq', 'ckyxlif', 'fznfo',\n 'jegnof', 'lzwyzb', 'ozivfio', 'igkclsa', 'bebzn', 'bitsggm',\n 'lrnwin', 'hjnnzr', 'idvoirn', 'dgile', 'vfngh', 'xbmur', 'rqaftt',\n 'wjwwwxs', 'btreou', 'gjsycg', 'pvsiylz', 'ccxzgdf', 'excrrrr',\n 'fiesr', 'jdioj', 'uzwsc', 'odrlcoy', 'hcsit', 'ptwfprh', 'sbqry',\n 'kffvy', 'ejeawbp', 'omvcc', 'iqgxqlt', 'edsuu', 'xnbue', 'qfbcx',\n 'fzlmbkl', 'wrrcueb', 'mmqispp', 'nknilwd', 'dewuhju', 'hmdqlxy',\n 'vjxgg', 'lkuexo', 'dzvfscm', 'voulbs', 'uevoqgq', 'kmhwu',\n 'oglzllg', 'torhihn', 'fhuqzc', 'mmcfhb', 'woyayma', 'uznsvre',\n 'mmxed', 'aoskwg', 'xrosbm', 'hpyrgh', 'tghwbwh', 'hcwzn',\n 'iepeftj', 'judij', 'kudbk', 'jonpv', 'lywck', 'rxelz', 'bgifz',\n 'mehbxq', 'fmqnz', 'sqrmzj', 'iqqjzex', 'qioliz', 'kjizbf',\n 'lgdcffc', 'pfgmcr', 'trdabul', 'vlqjdnc', 'jjvbxe', 'fqlayw',\n 'ilbhtyq', 'saawulw', 'gxysrb', 'kighql', 'eceapr', 'kztbcww',\n 'jedkoy', 'dxpcaga', 'ndacphe', 'rcoit', 'ywgcnxg', 'klipfup',\n 'bddws', 'jwyof', 'lrfwgo', 'bediwuf', 'ujakh', 'ppima', 'xzhwvm',\n 'guzmsqt', 'ffbliq', 'adjmynm', 'akabzn', 'inmykju', 'vlcjyv',\n 'orquepg', 'tufrk', 'vqpjymm', 'lvuab', 'qzxav', 'ekcmu', 'uqtuhie',\n 'kfvtgf', 'nklwjo', 'ujxlfpl', 'zobfpq', 'eignijd', 'ythctg',\n 'artllm', 'wodhh', 'tzpwszq', 'njdqegg', 'hzrqib', 'zvoxtfd',\n 'htboem', 'axjuix', 'bvmvm', 'jbnum', 'bxdth', 'atejt', 'gqsqtnk',\n 'fykrjbp', 'ldyhonr', 'wcuoj', 'upphc', 'agydg', 'cjmwk', 'rhxbqh',\n 'tpgozdd', 'qyqoy', 'zjqutw', 'qoohqny', 'nsiacwz', 'xupin',\n 'criuvs', 'eswjeft', 'pdmevn', 'zvogq', 'lrrvo', 'qhfqqpw',\n 'ktudfg', 'ijvmi', 'neyjjdx', 'rllpi', 'vllvaa', 'esebtu', 'jyhcrh',\n 'otgmr', 'oudvyxj', 'pmszy', 'opeed', 'gicni', 'mnuzn', 'mjbfpod',\n 'sqwgxu', 'dwniwz', 'wmbmmv', 'lyafuy', 'zmvlz', 'kopxzuh',\n 'urcbbiy', 'guhco', 'nerjm', 'lpdxc', 'hxmjzz', 'hynagc', 'iyxeczi',\n 'bdfxmoz', 'yybnpqd', 'jvgnb', 'oquqem', 'fmclmz', 'dmkhf',\n 'zxbjpp', 'qpxgcir', 'iecvjm', 'gtkne', 'lgtqrbc', 'gilbn', 'mcxsg',\n 'ncwbhn', 'wkriiq', 'zhsir', 'ptkkmw', 'jcbpkrm', 'vbefo', 'vmbcd',\n 'vqffj', 'fhqzjt', 'nryuh', 'vmclav', 'cjyggm', 'sanev', 'rrdocz',\n 'zqdexbs', 'jrxstt', 'pyhcesj', 'aagghyr', 'cyemjrb', 'aliohf',\n 'qaslg', 'pnyjzxz', 'pehnvi', 'suhuw', 'twopabr', 'sapqoc', 'mckrh',\n 'nzlgrxt', 'aqpobnu', 'pirbjgb', 'plzlj', 'raylxpu', 'gyasfrh',\n 'urjfxux', 'xjbwau', 'iupknn', 'vhxnc', 'dnbjop', 'vrxhwmd',\n 'vjsmkh', 'rfmqids', 'smaiwt', 'vkyfo', 'bjqyxc', 'rbbbp', 'dlkzg',\n 'dwvdwu', 'prulzh', 'bavge', 'ehhrz', 'xxjqk', 'pxopmp', 'okmkmb',\n 'slcznpp', 'nvqlb', 'jalrk', 'parwlcd', 'anbxo', 'oqcxyzo',\n 'fjhrdjh', 'pgvnwfe', 'yfjyvh', 'quvszjm', 'xyiig', 'xtncqv',\n 'svsix', 'jvpdnh', 'owuiv', 'bsrugtt', 'rmvggws', 'lmdql', 'kvmvd',\n 'xrpmaw', 'ssnxyb', 'oworq', 'rmmpuya', 'rijpih', 'aelazka',\n 'kncksqx', 'yvtdiy', 'epato', 'pbbamj', 'fejsw', 'zgsru', 'ekwrre',\n 'zqben', 'vugxi', 'fvcsdp', 'rujcews', 'asqxya', 'worjlsd',\n 'xggakg', 'kzfpot', 'haqon', 'ypqxzz', 'mmkzwt', 'bdhif', 'exzhv',\n 'srnklzh', 'hlrunb', 'dwfyke', 'fvgbtdm', 'aeutp', 'czhefx',\n 'tegfw', 'jkxpsb', 'gxkfkw', 'exvntd', 'gvuti', 'jdmly', 'owaqhw',\n 'fopuxzv', 'edrvil', 'biszwgv', 'vgckzd', 'fqdxn', 'qktdf',\n 'hpgwrk', 'gpxiips', 'vxnlab', 'yylxz', 'hsuscch', 'bhivaf',\n 'wzrwtc', 'ebplv', 'yzxykou', 'mxlssom', 'evghv', 'hksleg',\n 'shybau', 'zeyqa', 'tljqka', 'axfkec', 'fatdj', 'janlkcc',\n 'sjorbra', 'jplge', 'oazzot', 'qbgtncn', 'ozlil', 'stohadq',\n 'rvpuwn', 'oqwpl', 'byftgi', 'ubuusl', 'fkogr', 'bybdyhj',\n 'vinyuzs', 'ivsqvz', 'vmnae', 'gckxw', 'rozbe', 'glvxwj', 'rcgicu',\n 'xmvbd', 'itycsry', 'llmwrs', 'fuqth', 'styrrwl', 'wsseuln',\n 'xwflcli', 'muxgz', 'ypmbboh', 'rpmvnep', 'wjvvnv', 'arjnw',\n 'toauwc', 'ltjxqrl', 'basffd', 'clxozwd', 'glmrv', 'iejgfj',\n 'cvkoj', 'wotjf', 'mqucec', 'xalgemc', 'hgimkh', 'golvfq',\n 'fuqpmak', 'mhpcp', 'pxoibt', 'ledqa', 'guzbyr', 'ztvbeka', 'racdp',\n 'krsngra', 'aaiknz', 'bhoobyc', 'xibbe', 'yohepxk', 'eclevs',\n 'ldliwcm', 'qatvlk', 'eiypbw', 'vxvtwa', 'nkdwsej', 'ftmyvp',\n 'gpthye', 'gazwoi', 'zzgipon', 'cithg', 'wpabujl', 'jhezlnb',\n 'vqqaxfg', 'kvpbk', 'vggjemp', 'owylv', 'lgwtfpg', 'jjqvfm',\n 'xbhga', 'tulvfv', 'sefuo', 'hbysv', 'ozopepd', 'awyrifd', 'pnudwx',\n 'vreje', 'zhpgw', 'qygbf', 'tvbrvy', 'zzmcw', 'cznee', 'deuzxt',\n 'qfppjvi', 'ilkps', 'ydwhg', 'krwkxzu', 'mnsidg', 'rkxyyr', 'ajkqz',\n 'xtmom', 'vqocor', 'fympcl', 'yyleyzy', 'jjvzhrn', 'kpmxvuz',\n 'txoeqlx', 'lhhmn', 'chzgpf', 'ncnjxle', 'ihxrg', 'feqixq',\n 'lkfhcar', 'hfnsh', 'bifczy', 'umknat', 'yrhgkh', 'mgpcu',\n 'qotukst', 'yqlmfq', 'ttcdp', 'xnjjzm', 'cukbr', 'hjhjb', 'iikfcsr',\n 'nsqbnnz', 'dauygf', 'cmydq', 'lfnhqnl', 'ppqgs', 'hscbfug',\n 'ohzisud', 'opspdkv', 'aauxbop', 'wpkhzo', 'sxbsgu', 'tajrv',\n 'ololy', 'mxmus', 'vizvxv', 'osaqz', 'rxygkn', 'mrzqlf', 'zrriyxb',\n 'ufroe', 'bajozg', 'atpsu', 'uhgauzu', 'tffdw', 'mdjulde', 'rbrmy',\n 'jhkqvwl', 'gzsultq', 'nkbfi', 'xtvwh', 'dryzcv', 'emaxuk',\n 'zucvutb', 'jdduyk', 'bjdin', 'loicuq', 'qhjjb', 'rgfjbq', 'mphnk',\n 'lxvceyx', 'zeoxb', 'fxhnxu', 'qpbipe', 'ophwp', 'wiioer', 'quchwj',\n 'pouxunw', 'bloxgg', 'xbsma', 'dtwew', 'xstorn', 'qfrfkz',\n 'gxusbsn', 'dhnxd', 'mhstbs', 'hekbtu', 'wvrrjw', 'yeiwd',\n 'patplsx', 'qmyiyi', 'mowboj', 'iskyd', 'bqhjj', 'povppk', 'vthpwx',\n 'uuydaw', 'rduxvez', 'vmcww', 'ylruvph', 'ymqosp', 'wzcvohg',\n 'lhepwta', 'bckhc', 'oiyyt', 'wqzfv', 'uduec', 'lkkbtzl', 'prvpbo',\n 'jrwstii', 'ijztoo', 'qwwth', 'vqzqiun', 'krnjp', 'zyanpiw',\n 'ojhjhvg', 'lohmb', 'thqtf', 'reptzv', 'zgkyq', 'lhkvy', 'cmjwl',\n 'fmilgpw', 'jrfawz', 'vrtzd', 'ezgfl', 'plzng', 'zidzso', 'civavlg',\n 'vtwopu', 'ljhckxo', 'nuydt', 'qembl', 'fiwrre', 'gfrgi', 'gzegiq',\n 'mltlqo', 'pcett', 'snbsc', 'msibcqn', 'beacrhz', 'vsycjt', 'gjqji',\n 'smcegol', 'zregkp', 'smcazoj', 'dziqad', 'jpuwp', 'hnlztac',\n 'vduitco', 'wyencad', 'bkdnnqo', 'cabzyg', 'mgpcwr', 'fxgvkxt',\n 'wlkcrdd', 'bhmhsy', 'gqcctjc', 'atafpt', 'vdzhmcg', 'ighxj',\n 'gfqpale', 'fohbrtj', 'mfpsgt', 'tarjocf', 'gyycb', 'qvqfryl',\n 'jpwowwc', 'jcgcg', 'gmrjze', 'nfptxq', 'hmjhxge', 'ieelj',\n 'suvkgr', 'nwjxe', 'tkepqm', 'extnpmq', 'rxzdvf', 'relzaa',\n 'hfhgaq', 'lmihlz', 'pacocq', 'dclxr', 'oknoem', 'pbpnnd',\n 'nleerfl', 'tvytymc', 'aamfnl', 'ufdnq', 'bxyzvyh', 'vksvout',\n 'lohxhf', 'sskgn', 'aawbv', 'hrvhx', 'wvoqf', 'vxkvh', 'oqany',\n 'bcmyd', 'epdddqn', 'zrlej', 'bchaf', 'hmftii', 'mefcrz', 'wbxvc',\n 'ewwnldf', 'cqecxgh', 'cnwvdmk', 'vetrw', 'zmogwov', 'lshlzpe',\n 'lijay', 'tcdqg', 'xavqixd', 'yjkhtsl', 'myjvow', 'cgthhd', 'taaii',\n 'iuuegk', 'lcypmle', 'wesrit', 'tybco', 'nhxysw', 'awkrj', 'jcmqa',\n 'porvo', 'nrypriu', 'vznnevp', 'hzklwi', 'vapuxh', 'wyfkn',\n 'albemu', 'ttfdbl', 'dbqrjv', 'cxals', 'qzitwf', 'ysunur', 'llsefy',\n 'cghfzji', 'jboaa', 'emhlkw', 'khhmgha', 'twlxgjz', 'pyujor',\n 'ozcax', 'fetvovo', 'mdhrrd', 'qdhdne', 'fiuvw', 'ebyxh', 'ldaothh',\n 'vwyjf', 'yjyljlu', 'ivroqg', 'qvpeyec', 'eemsdra', 'wavgeqk',\n 'bjejrqg', 'mdjimoz', 'fgopy', 'lgwodr', 'cunvszh', 'wiver',\n 'ghmog', 'jzgfyk', 'vxlbx', 'kvgbtn', 'cunorte', 'mtesdc', 'zdzmqu',\n 'pigik', 'smruadg', 'czjxlt', 'kukgaok', 'tsldpqq', 'luomo',\n 'ezbcvdc', 'tfetwes', 'uopzf', 'wsvezkw', 'wrnlvbx', 'bpqungd',\n 'jqnnof', 'rqhiomi', 'voulqb', 'ouspxn', 'chngpz', 'fbogfcv',\n 'nqhunxo', 'rydbke', 'ewduo', 'suqqwup', 'oxzfxj', 'kuwfwm',\n 'euiics', 'mvftoau', 'vstfbm', 'vnmtoo', 'muicf', 'bjbskxb',\n 'knbomlf', 'enrbtfk', 'hnaqe', 'vxzsr', 'gkqma', 'qygmn', 'ztkybmb',\n 'injggpk', 'enqrgdk', 'rkgoct', 'tgaiu', 'dnknoxk', 'iwuou',\n 'oxanccl', 'xestej', 'ekrqq', 'xbwhz', 'jkdvxfh', 'oybaay',\n 'afyhci', 'papffjq', 'bdppssw', 'qwyvjx', 'xmnnosl', 'kvqzjl',\n 'wcwii', 'ygfvt', 'tpabbht', 'kjmaq', 'duschjz', 'gguiof', 'wgfhve',\n 'joqmfjq', 'smqfd', 'ynlovlz', 'sgrzum', 'bobmux', 'dcppi',\n 'isdjrwl', 'lbevb', 'efqsirq', 'hlgfql', 'enmemlb', 'dbmfk',\n 'ibfpzm', 'rtdnooq', 'yicdq', 'xadul', 'dxibxzi', 'yyxnj',\n 'jhsdzxw', 'thltbi', 'kwhreyi', 'hrocoa', 'fnaalbd', 'vnwona',\n 'nnonm', 'naqaf', 'xgzzies', 'uhruynk', 'kgadfx', 'hyohzbd',\n 'hnajx', 'yipzh', 'ezdxaet', 'xbzppoz', 'rwnewxz', 'hlcbkmb',\n 'znyhu', 'zsqtpkr', 'gmyxr', 'rphyvo', 'bgjuz', 'nulpv', 'eejfoso',\n 'xmwcnes', 'xxxxnpe', 'jezkk', 'idfsxrw', 'qgzjtf', 'arpzpo',\n 'hxsanlt', 'emvotcb', 'sknzhvg', 'icitca', 'ivhdln', 'sqilerz',\n 'ndigw', 'bcsre', 'mibbep', 'zsczom', 'cgghjbb', 'fkylfgt',\n 'bvzofs', 'mefsng', 'bispbza', 'tsosgy', 'xopalrw', 'wserf',\n 'jbmlz', 'xidxny', 'ffmpjos', 'vddwxmd', 'netnsg', 'kgevsp',\n 'pguuv', 'cwisp', 'slxiyb', 'dmwaguc', 'jobwusu', 'uytcqrv',\n 'hzhsy', 'zrlsdd', 'xhxah', 'rxzij', 'zwdgy', 'ygmvkz', 'drkzbo',\n 'qpsal', 'tpxvl', 'lfmfl', 'sayjvlh', 'rdamym', 'ycuzd', 'zkycu',\n 'hdesec', 'unequk', 'lpkdid', 'vorxls', 'admsdop', 'rqnvkyg',\n 'krnqqtb', 'rxfms', 'xfthd', 'pxjbk', 'gpslrg', 'rwziwef',\n 'usxgqvz', 'baxxye', 'ocrkkrw', 'lrlgsp', 'ceyctg', 'rniml',\n 'vavug', 'jgircl', 'jrpnmsa', 'rywvlfg', 'prxnys', 'fkzmknn',\n 'ooelc', 'btvfs', 'yqepuvw', 'tmmmb', 'qmpzexb', 'zjckjvd',\n 'aieytbb', 'oafqq', 'szrcyh', 'czrxgae', 'ifkte', 'hfgajox',\n 'pwpnkqq', 'yqphogn', 'xuwthrd', 'mpcmy', 'qitdoa', 'avlzfrh',\n 'ywpip', 'dgeki', 'fgbnx', 'tyofu', 'xziqzj', 'qxzvqz', 'vtsqk',\n 'ipkld', 'yfhim', 'ebaegdc', 'ubhrh', 'ldejv', 'mtflwy', 'ocpyj',\n 'yopgqs', 'fkjxxd', 'njnnwr', 'nylkeb', 'taymdqv', 'ekpznq',\n 'cbzobmg', 'bucdds', 'qjozu', 'uvpghor', 'obhnu', 'ljkxbg',\n 'uqrxjtf', 'xwbxiw', 'oxsmcg', 'spchdd', 'pcuitj', 'faidq', 'tybmy',\n 'uygiyp', 'qloizj', 'cafgmy', 'smetd', 'kwcwb', 'tdabxf', 'fpmrc',\n 'lfjujn', 'vvmvex', 'mnsgdc', 'enjlgsw', 'ohwcg', 'kxjdaup',\n 'rotjarp', 'aovdoq', 'oviwq', 'qwaxs', 'bmazco', 'plcljsv',\n 'yytjhl', 'vgwjm', 'drnue', 'vqjgf', 'uqlsfy', 'bmqmfp', 'lkauwna',\n 'ozmqce', 'heunaxr', 'zaffbj', 'arbek', 'qjnllw', 'fdkhlz',\n 'wgmbwh', 'yceqag', 'ltjjq', 'yurggfw', 'puaafsl', 'tjiqkyt',\n 'yuzub', 'ytmrfq', 'ommmu', 'ipknn', 'iubnuab', 'dzthvc', 'zjbzpew',\n 'dcooev', 'pjydqcf', 'zuojlzy', 'zwjyfc', 'spmac', 'dfkbnz',\n 'fzriie', 'asusog', 'hdodx', 'drjpo', 'ddyif', 'chabv', 'ebvkwrr',\n 'burdjl', 'jjddi', 'dljzkye', 'samyg', 'zwgxcq', 'xtratwo', 'qfopz',\n 'xvlaw', 'laage', 'btdium', 'vzlnzt', 'kmvbzkq', 'kctobsx',\n 'kazbelu', 'yxdwrk', 'eslvjc', 'nhsdmvs', 'zuxqcc', 'hqtxovn',\n 'zrbdai', 'fgjxs', 'txecvio', 'kjxlq', 'dkuxss', 'mkbevn', 'pzmdqc',\n 'ihyia', 'atsub', 'twytus', 'nzooxj', 'qwuoly', 'fdoigo', 'zukhlh',\n 'mugeaxt', 'qqsfyls', 'qqtql', 'wrvphcx', 'nzjfhx', 'uequtk',\n 'fxuto', 'qnast', 'nveys', 'ltbrcth', 'toctdib', 'fbpnh', 'umxfgn',\n 'zvjuta', 'yeron', 'qzvswqk', 'gbctr', 'ryryz', 'zieknd', 'zcsna',\n 'jrhak', 'zfxqsj', 'urlba', 'lbozqf', 'yfcjaa', 'hazgy', 'gmmfzyz',\n 'zjvkyc', 'rvfdcf', 'daitab', 'hcxqgum', 'qwakp', 'ltbsjwo',\n 'pqqtygx', 'upxcxao', 'qylot', 'lmxqc', 'dwzcd', 'tjccm', 'mqcpap',\n 'wgxqtr', 'ivycvxy', 'wdykg', 'snvqka', 'jxtvtsb', 'jnyowsq',\n 'iwfuoig', 'cuoixhu', 'fzwalg', 'djhrar', 'sjmahk', 'dyusf',\n 'wrxqvdi', 'ftytlor', 'jsjbv', 'vjbebg', 'agvsn', 'vvmpgm',\n 'gsgjopk', 'vbqvhy', 'afopf', 'zybfuz', 'aqsgc', 'ytrjsvn',\n 'wlhdfr', 'vdhvl', 'jrlvr', 'cscxwf', 'yhgbew', 'wupbl', 'ssuhyvv',\n 'bhcirzk', 'oykwk', 'ijbto', 'qsnpgw', 'otwzage', 'ytqzh', 'rgwow',\n 'bvhgkwh', 'fvawxie', 'fllxw', 'gfcqf', 'scoqb', 'qubrq', 'gdxjtp',\n 'ahrpck', 'awnlgi', 'cmehsyp', 'dwmytpy', 'firyeq', 'oohwhr',\n 'caelk', 'mqemvs', 'qflkzi', 'tfpibll', 'ybhzd', 'ctsxri', 'yurocj',\n 'dnlnl', 'ydmdva', 'xkaotl', 'xovax', 'ypynrqp', 'kwfzw', 'fbgsmrc',\n 'tutime', 'rcugul', 'cvewno', 'typhbpa', 'wazew', 'flzfs', 'wxxbza',\n 'ogjfkl', 'vjlebet', 'imbubm', 'xinyncy', 'dqmxfy', 'buhagzh',\n 'jjadpos', 'gejyz', 'gxshqk', 'wkwrs', 'dqeriqo', 'dmixr', 'bysjih',\n 'aoloq', 'ddwhsxs', 'nteqv', 'cqagf', 'ditsrn', 'wfxgl', 'jwjqb',\n 'rvkxj', 'rxapr', 'yrlkip', 'npquasb', 'nvezlr', 'gmhchcx',\n 'lodfihi', 'dheypxa', 'plzjykh', 'qopsthg', 'zsnes', 'raongg',\n 'zrpnac', 'tzmtltj', 'jsecdn', 'rzudh', 'hkcyic', 'xsxmw',\n 'reeuwpn', 'grkwrag', 'gvzzbsq', 'lrfta', 'aqyvbkj', 'ytgfu',\n 'wcmvd', 'olnvfi', 'hhgmhb', 'kojmepr', 'wpohl', 'szhgg', 'hymiblu',\n 'lkwjr', 'zulqpz', 'sdcqjo', 'olgsgez', 'lxkpqci', 'yxcgn', 'gmvex',\n 'fskpppe', 'utzto', 'axncvp', 'lcyahba', 'ydeae', 'zvzar',\n 'ghfkkqv', 'ryrpg', 'gucpbq', 'reofjz', 'cdnoo', 'dchhh', 'byiwd',\n 'cqbhok', 'ksfnoa', 'xsmmlr', 'qyvdfqh', 'dzshj', 'bpifnzh',\n 'uxmoml', 'jdxvojf', 'ihfll', 'vwesfof', 'zynnpb', 'fwzra',\n 'rxlgww', 'vkmjd', 'hcjgzt', 'mkapfl', 'ffjqlf', 'wulaebc',\n 'gurramv', 'tufkzai', 'bxprqek', 'nkohv', 'abgfwyl', 'slslg',\n 'wirsnh', 'pykvuh', 'fdrwk', 'gtmgsxe', 'dxsaab', 'lqiryty',\n 'aoezg', 'tzhugcg', 'uoarf', 'dwhsv', 'rjiuoi', 'ycgcdnf', 'rtfmwz',\n 'amkjc', 'woogtdi', 'deprx', 'ucknu', 'womfm', 'xdeev', 'qapxpuu',\n 'ngulnk', 'fgtxyf', 'hnyabid', 'cilmy', 'wrsewtf', 'luvtmo',\n 'wftuh', 'ifoeeqp', 'dtfdhhl', 'rwnburg', 'fohkkul', 'frqqi',\n 'gsrcyc', 'teuync', 'dvpvak', 'daqjki', 'kksscp', 'somsde',\n 'tyfvck', 'ftfekl', 'ahncv', 'yvosm', 'qgllvg', 'ylfwv', 'jenqns',\n 'lqovrnm', 'iyger', 'nfvtsv', 'bknxmqj', 'pfzybdr', 'hqjol',\n 'chlpk', 'etgrtqa', 'msuxdx', 'vnoatf', 'ypdzomn', 'vsshmg',\n 'rfkipq', 'jvpbiz', 'vbskd', 'edsoixj', 'uowim', 'hqtsj', 'inbsxal',\n 'ookrv', 'ipotdnk', 'kmazqd', 'jpfghb', 'gvmnnpv', 'juvwa',\n 'xtkvzw', 'ejqcl', 'ebgcnt', 'ztuyu', 'dlzthw', 'zzipe', 'iaxwdxy',\n 'htynwkc', 'lefbq', 'pizfr', 'vttrsv', 'oagak', 'eqlrom', 'vttefg',\n 'dsrmk', 'oekbe', 'cvugzk', 'diwvz', 'gxmfob', 'vjowzm', 'mjpop',\n 'uznhz', 'kqvjwug', 'wjqvxfg', 'jbpwezu', 'wsckdx', 'slqfomn',\n 'omuxk', 'zlgblso', 'kvitoq', 'dmafq', 'djxmzk', 'pjqfegq',\n 'yjrttas', 'siakcx', 'iutiqk', 'nwfdj', 'gbgtazk', 'cpqtf',\n 'panmlr', 'aqubhsg', 'iwdim', 'nqetym', 'mwazh', 'thyhy', 'ydtxan',\n 'xfoin', 'lsosc', 'esznfa', 'xgdisi', 'flvbzh', 'mpltx', 'iwjpsqp',\n 'udfycf', 'rntmc', 'ltflwu', 'wkgbaw', 'bcuzt', 'hejxuhb', 'lguohe',\n 'klnhb', 'mjump', 'avcwrol', 'yrcqlc', 'ihxul', 'avajh', 'gtpauet',\n 'iemzk', 'rfdub', 'gqnbk', 'cfcmg', 'iobyh', 'iruuapf', 'tyifwt',\n 'sbdtp', 'mngcpmb', 'oaqpolm', 'mmimmh', 'gxknadi', 'bmxhuu',\n 'ulyoa', 'keidy', 'vsnfk', 'cnnnfty', 'pkajm', 'ddgeecb', 'prxidqd',\n 'wmenvhd', 'akjcqo', 'tnekfef', 'ipvsi', 'pzjwq', 'wmmct',\n 'erdjnuf', 'vgeaqs', 'nlbdx', 'dpvbe', 'dgeqz', 'aiguzh', 'akawppx',\n 'tykrjcs', 'gvavo', 'hkyle', 'yhedx', 'xzqcg', 'gzdxt', 'csssbk',\n 'tmekrmv', 'lfsgo', 'iizahz', 'aszfd', 'aybqnsl', 'vadwxsl',\n 'ulmiii', 'xaxdugp', 'sfnnsbg', 'dkyruh', 'qhpqu', 'amesjd',\n 'evjuki', 'vtqjw', 'aoabp', 'qnsuhe', 'bplbx', 'fdqok', 'ozkhgib',\n 'cggwzys', 'nbknjay', 'ooambw', 'evmvegf', 'htdlxik', 'kahcume',\n 'bojpn', 'bhipie', 'hdyjslw', 'pbkkq', 'qwszl', 'fgkbzsd', 'hejdx',\n 'vmcfhgx', 'puzlmmm', 'meffil', 'boakbiz', 'eczot', 'fvkkit',\n 'jebfx', 'umvkjg', 'uikgs', 'rycgpf', 'rfmfgmy', 'nveho', 'bgywqen',\n 'gepfma', 'vquyq', 'wcercbw', 'wbpjkxc', 'rqloeda', 'omclokx',\n 'hvotwp', 'tvqfxxu', 'qrtghk', 'hggme', 'arnmfnt', 'cxprj', 'rspdt',\n 'hlgfq', 'dmqel', 'pcerxk', 'ptqjc', 'wzreko', 'kahks', 'xjnzo',\n 'xzzye', 'xbdeu', 'koiwkv', 'jlwkkjr', 'xzdixoc', 'xeedvrm',\n 'mrtnhqi', 'jaeann', 'mvubp', 'olklqf', 'retbgcj', 'qxxlhh',\n 'cqyyoy', 'ngwikg', 'qijte', 'sjzck', 'zkmkx', 'ongtzf', 'tanow',\n 'smgntvq', 'urfgt', 'xwcroa', 'kadcpd', 'cxhgo', 'walku', 'kvvcsyt',\n 'elwmuxk', 'bfphtm', 'vzeumuq', 'sknvev', 'vbsnfd', 'grmbg',\n 'vjahwt', 'dmcbmn', 'smubz', 'jobbfcv', 'ujlkm', 'lcthh', 'bauuqdu',\n 'kjgzgtq', 'gicjz', 'nugbax', 'kbnjfiu', 'sqfpein', 'obbgfww',\n 'ykggxjx', 'irnmog', 'xniuv', 'rqiwycq', 'hzlgyu', 'yjtrttv',\n 'satym', 'dgqhlkk', 'rghal', 'tbekx', 'kkwmo', 'eahwhks', 'bpvmbur',\n 'sqtgkj', 'khboz', 'enefr', 'vkzqvt', 'wfruavu', 'ninomu',\n 'ypktaoa', 'mlpmoit', 'fxyhjfp', 'fgnpp', 'txieja', 'dprnj',\n 'bgyrp', 'zsqwqrw', 'stqzki', 'kwiayb', 'ulbsn', 'aetje', 'vwzbb',\n 'tedwyqs', 'cymiruy', 'jigpoqx', 'ypuqsc', 'weletu', 'gvibea',\n 'chhuldm', 'baylv', 'wdhovo', 'imfqu', 'meodnsk', 'jhlckqw',\n 'jolyfh', 'jsfkrhr', 'tnbfzvs', 'egcfht', 'qnzmyr', 'owtrqu',\n 'oqaqu', 'xftys', 'goxfftm', 'sgbnp', 'bhfvaz', 'gospa', 'jwzlvwk',\n 'lqncoqd', 'xxizglc', 'bwffm', 'mhpggzr', 'kdaoewx', 'anviou',\n 'mqiij', 'wkskpn', 'enougdh', 'vldnn', 'gbfgz', 'ejmbh', 'qsdrvsx',\n 'mrvbz', 'cqlufpf', 'kbgjlu', 'njgna', 'admrmk', 'pwwsc', 'gxkot',\n 'pdjwh', 'ejwxt', 'bpaxufv', 'iwjzs', 'xxfsg', 'vuhgh', 'srytgb',\n 'yesvlux', 'tggnch', 'cgnbb', 'fbzbx', 'aomoqf', 'zkrvrjg', 'ueaoz',\n 'dppacnl', 'ewovhxz', 'kbvee', 'ixeeb', 'gwgoqm', 'hlwlxe',\n 'fpmkrk', 'wzjsr', 'ispwe', 'garofu', 'jcmpec', 'tggeo', 'yzdeo',\n 'axpmln', 'zhnlhck', 'duyqcn', 'tpqwqi', 'jvmaj', 'bisgoy',\n 'mpwmurb', 'olqla', 'ecapwan', 'kcpxn', 'xcapin', 'ooctk', 'sgqql',\n 'vcyyjxf', 'ejyom', 'jsgtha', 'logxnjg', 'nypadhj', 'dprmk',\n 'cqkuzb', 'gratv', 'tgkjgu', 'fttcafm', 'tpryi', 'ubbhw', 'uwcuyn',\n 'zkgohs', 'snfesz', 'ifrex', 'tkbfz', 'fvvkp', 'otjiq', 'lgomjjv',\n 'ertracf', 'bregu', 'kkbizb', 'hyhvn', 'zjcnxfl', 'mceskuj',\n 'lmupdq', 'zdzqzgo', 'yorppew', 'fpwtjd', 'dxvyzt', 'bbnnu',\n 'pkycae', 'ucvapn', 'dijmkb', 'nvwwpr', 'bufkw', 'zhono', 'vayxf',\n 'hlfwkev', 'klkvkj', 'yzgpwg', 'lcbqr', 'tkkfi', 'pcgljx', 'bhduxu',\n 'rgfipts', 'hkjbrr', 'fobvy', 'wqmqhxo', 'yjgvypg', 'ehgoizl',\n 'ipiibzh', 'aqxbxtx', 'lrtin', 'fyyuypr', 'pyrocgm', 'kwqbg',\n 'ukccw', 'wgsbpvx', 'pcoivrv', 'okhxaba', 'bbuaibf', 'ccvfm',\n 'phpst', 'yxtqiz', 'cdfbo', 'sijfljn', 'gdlhn', 'bqmbced', 'tiejf',\n 'aurqer', 'olmyd', 'prctay', 'lwflhi', 'bbehvta', 'oxoda', 'lklyc',\n 'rzedhp', 'kairil', 'envan', 'wdcwfk', 'xoroddb', 'womrlr',\n 'ruxebe', 'jnpywrd', 'wrifvz', 'zkewcd', 'vllfrn', 'uvdvjh',\n 'bglpya', 'vzokkbw', 'apaoqt', 'xpjizn', 'xoajmd', 'xapjwc',\n 'jcknwg', 'bjpreep', 'ffkua', 'ukcbah', 'bugvkrf', 'cbmmfs',\n 'cwaczhl', 'nsqaj', 'sjeikg', 'fayqif', 'slowoh', 'xjpvkpa',\n 'ynunjle', 'bqavt', 'nkpqudr', 'neikvd', 'yuqlzg', 'pdxbtrb',\n 'cashlog', 'iqiqy', 'smjmxv', 'zbtpbr', 'zzamzcv', 'jmakg',\n 'txfswc', 'pkaym', 'swlde', 'utann', 'mqgpjne', 'pslfvek', 'nbiqhb',\n 'bzsianu', 'wnxgbi', 'ahkeeiz', 'dqdfjg', 'bptdg', 'pwita',\n 'uqyflq', 'txabjn', 'yznjmve', 'mukcqqf', 'cxonbf', 'ixuewjm',\n 'pzlcat', 'eikeeo', 'scwsoa', 'uaeyw', 'oeorff', 'gbqgd', 'qboqiv',\n 'hiulpb', 'dbbdm', 'qvdxx', 'aypxbcn', 'ykjwdbg', 'pvfxn', 'shrqyz',\n 'zaxtu', 'pfefgww', 'jwifrw', 'zxuud', 'kpkwhlj', 'lwptgd',\n 'zpdmvsw', 'takeb', 'ynehl', 'kixtod', 'fyrgm', 'qirzmr', 'shyvec',\n 'xjgzt', 'bwfvht', 'wyehh', 'renzc', 'nnibax', 'slhfng', 'yjtecc',\n 'lghvbzf', 'qroxvun', 'mlsed', 'rrudho', 'cyffhh', 'tjlxahp',\n 'xmaepzk', 'jvdzh', 'bbvegrw', 'cebcz', 'odjpeam', 'guerph',\n 'tgmphgo', 'ohtkqq', 'jcxojz', 'haeheae', 'erydxni', 'hatjxx',\n 'kwmgkjw', 'wmezvy', 'hsuuvfi', 'ineek', 'grkxmhb', 'alxkt', 'rmspxdg']\n ) == 13956\n assert s.minimumLengthEncoding(['me', 'time']) == 5\n assert s.minimumLengthEncoding(['yiyqbv', 'njqvawn', 'wnlovvp', 'vogum',\n 'jpolc', 'zleec', 'sxdrww', 'rbowr', 'xsjorra', 'kwjsx', 'vornum',\n 'echku', 'kuizegn', 'rhuvv', 'eemkh', 'yshht', 'pbixoa', 'cmbxvtr',\n 'iupia', 'nmcbq', 'mgrjsx', 'ejvniwt', 'svhsel', 'kazenhf', 'fevpm',\n 'xcwqfgw', 'ozikzc', 'mywnmqt', 'taorwjm', 'gcshacq', 'fgtasq',\n 'qexygw', 'ljmbari', 'zfjudos', 'rgxuzy', 'kmzryaf', 'exjfd',\n 'mcqnebz', 'ptoim', 'zglfi', 'fhneaz', 'rexgc', 'lhplwyr', 'dthdp',\n 'jizetec', 'obyzg', 'rqupa', 'yphttge', 'wdcdn', 'wdomtr', 'hchbd',\n 'ytyra', 'upytftl', 'swbbi', 'qpcybv', 'dcoxspd', 'dftkf', 'nwjfmj',\n 'ojbwy', 'zofuy', 'adqkt', 'kpcply', 'aeukw', 'fqblb', 'xurrbpo',\n 'veioa', 'puzvl', 'bnzvlax', 'tjzsdcw', 'jarqr', 'orxjbg',\n 'ilrqdri', 'syjuoyi', 'htoqdco', 'gwslw', 'dpqyf', 'jnkhv',\n 'fpqhpr', 'baewnvc', 'caunsf', 'qhbpe', 'wlckl', 'lmoroqe', 'ddlak',\n 'qipwbfp', 'cefqs', 'surczp', 'jtmfuro', 'ezhqau', 'dlsco',\n 'hywoqh', 'lnifq', 'hvfmu', 'cqjdkok', 'tggdact', 'rwuowdk',\n 'attnl', 'lwhyq', 'mqtsc', 'bmwajiy', 'nyohug', 'vvfpt', 'lbyazu',\n 'sarwago', 'iccztck', 'ugsxcw', 'rpwza', 'yofmlll', 'ulhdzhg',\n 'lbaqk', 'bwxxwc', 'dmsbawg', 'tjloy', 'imbrkul', 'xguke', 'shlkuq',\n 'lizjcdu', 'kmvykl', 'ilqxxjm', 'rtbvvqt', 'qisec', 'zobzr',\n 'thwntt', 'afpifh', 'uwiiovy', 'hgsyecl', 'pdgnm', 'mqyesch',\n 'suexztu', 'msguuwu', 'yrykkv', 'xtoommc', 'muteu', 'bamml',\n 'kkhlb', 'jfrnx', 'wpytor', 'zzogpt', 'yryxxt', 'hzqofjd',\n 'ehtildc', 'ptclf', 'nyltvd', 'nrret', 'qqqqt', 'uuxunf', 'jajxt',\n 'lzdvlc', 'gpdtjug', 'hjsso', 'jairua', 'qarxuey', 'rpwwjwv',\n 'cjqypep', 'tuzgcs', 'oytqxb', 'rgfmud', 'stnwn', 'tzzaop',\n 'jpuopzg', 'qeywd', 'spnstrg', 'dfwgntg', 'yjyqk', 'ioowc', 'duqfg',\n 'gmqxe', 'xhlbby', 'liurjk', 'vdujfm', 'xxyyn', 'omapgc', 'koemzbz',\n 'ziiyako', 'pjmhfrv', 'bshtfgj', 'ihjvt', 'pnipuw', 'fajiuj',\n 'rdvcqzd', 'mgknns', 'ouwkm', 'ejnklwc', 'osepl', 'gplpyvs',\n 'paxrddg', 'gsjlpd', 'lgnmgl', 'yifeeer', 'hhnwlol', 'fcmxs',\n 'ilinwgm', 'udhfdtq', 'ceefc', 'xweqx', 'jfelwod', 'rtywfjo',\n 'kzwrgqx', 'fcjriov', 'fzytqv', 'zcpcddo', 'scpyzow', 'kbzegu',\n 'gclwr', 'gmiwlp', 'rtpka', 'yiywuyy', 'qceot', 'dtrgn', 'ntwbu',\n 'fxobd', 'zmxwza', 'qcksyz', 'wgbtmm', 'pzorve', 'hztydc', 'jqlay',\n 'ijdkbk', 'uzjrps', 'gfzibk', 'gsxqj', 'kgjrkdd', 'smdeuk',\n 'iwizewp', 'owjie', 'kcdccu', 'ifltqr', 'zrdfbm', 'pznbcsk',\n 'mtkpi', 'cpasir', 'flrxrm', 'uxcxnv', 'htlfcp', 'ltukxfr',\n 'ftbbha', 'jhgjgyz', 'qjreroc', 'vcvtbid', 'nrhlq', 'gtkpot',\n 'gyplqqg', 'lnorig', 'fixhufv', 'ugcug', 'ndfug', 'wuorhe',\n 'owocnkw', 'rcnbf', 'ioiiiui', 'kakwtne', 'svxtt', 'wdrxogm',\n 'ibrxs', 'bddqi', 'jeguac', 'hlftdw', 'nutgfjw', 'krrzvf',\n 'amxuloc', 'deozdoe', 'ovsvk', 'sfqsl', 'slgiw', 'jbjujag', 'mhiru',\n 'uqksech', 'davosw', 'nlueljv', 'rhtvdu', 'ivdpdqa', 'qnbenpq',\n 'dtapqq', 'hwwfpxl', 'oyrfosn', 'goxgmgo', 'tbvutl', 'cbbbcm',\n 'iiugpk', 'hinkem', 'vvaitk', 'pskyf', 'hdnekg', 'nqhfn', 'dqbozx',\n 'zcwpko', 'kafyu', 'jfegubk', 'nofqzsk', 'ujmxxg', 'akwzemu',\n 'yvhxb', 'qqlwofi', 'hmoecj', 'qwgtlc', 'jepvygq', 'uzggm',\n 'fztiews', 'lvndvf', 'vulax', 'znqudh', 'whgqi', 'noguo', 'vewkx',\n 'uruvgf', 'ubohmba', 'aulzi', 'flvfdlq', 'yspfie', 'wugif',\n 'qndyiwa', 'keihmct', 'rggvn', 'ojjmuoh', 'sbbcl', 'cdivmoz',\n 'vkusmp', 'mfddp', 'kgohwvp', 'rjbbxw', 'vsgptj', 'hbyjoz', 'gufrv',\n 'orxiv', 'fxcqfw', 'okppik', 'qlouw', 'lkryigo', 'qccvc', 'ixcnodg',\n 'wlfilts', 'ahqtevp', 'kkbuha', 'oehaez', 'rzczib', 'vxobk',\n 'wmetvjs', 'xfjgeq', 'eadzl', 'aeqdvch', 'czojfq', 'hxshidl',\n 'ofswsj', 'iwbqcmg', 'schhwtt', 'ltyth', 'wiccu', 'akill', 'zaaji',\n 'qepvfa', 'mpvrkeu', 'dcpenm', 'wdhlk', 'llqbby', 'lronwkr',\n 'rwtguo', 'ofnvs', 'lxdnwzf', 'dctmilf', 'zhckjd', 'hajsuac',\n 'wpylhy', 'zhipvm', 'ihikr', 'zzwjgvr', 'gdglrn', 'skhow', 'tlqtjl',\n 'uypli', 'evdva', 'civide', 'iroihm', 'lvuzid', 'vexat', 'ngmvrz',\n 'szdhbt', 'ggrbz', 'bsmovlt', 'kguomvl', 'onzvx', 'nobgxw',\n 'tqxemc', 'vbiyx', 'fpzpf', 'ogtvf', 'yuthri', 'xszbn', 'xcuhj',\n 'nosnpbp', 'mowsxg', 'tfalyy', 'kxombgm', 'cukrz', 'krmseq',\n 'velzh', 'kmufxj', 'nvxlkq', 'ualvras', 'wytoucy', 'qicqyym',\n 'pbeujtv', 'haojnbm', 'xnfffpe', 'wvoiald', 'rlyvf', 'sxamoxw',\n 'ztqnmp', 'biiavx', 'lnjnzs', 'arqdjdy', 'pkrgokc', 'qxswouj',\n 'dgqah', 'mnhzo', 'ggilb', 'qscrd', 'ggvkimw', 'qlxjys', 'wximi',\n 'aqlhio', 'iavtvy', 'grkqf', 'dwrtut', 'uozutfc', 'fogxpdb',\n 'ydtntlq', 'vnmpmwp', 'gtxhwq', 'mlpihx', 'yfpjlz', 'hdvcquq',\n 'nunny', 'wklasgp', 'wxduo', 'topsqf', 'tngcpzc', 'mcrut', 'pdnsmt',\n 'kavaok', 'seiqsqa', 'bhgkiyt', 'mawvhtp', 'domcnrm', 'fgusghc',\n 'wdaufwz', 'tzpuks', 'kisndyz', 'fwyieu', 'wtdum', 'ytxhl',\n 'yhzkmuv', 'nppnqe', 'ccvhj', 'dautnyq', 'hkaliab', 'kngan',\n 'ebmhiop', 'vsdkcef', 'nmpcnd', 'vxvnl', 'cwcgu', 'zsuneh',\n 'qjgcmd', 'awvba', 'rzbisxo', 'oilqrj', 'neiazlm', 'hlyrl', 'tmiht',\n 'lwqxxv', 'gyblrw', 'gnnjkb', 'lrxiln', 'xlwlseh', 'npfwcvp',\n 'yjcdhw', 'rzndd', 'orlhmip', 'gatuojh', 'osotgvv', 'owksz',\n 'kcocizf', 'izlev', 'smigns', 'wtxfwo', 'knwizte', 'mqjojzp',\n 'lkezye', 'xqldbu', 'cvbpyl', 'aoipbz', 'asrupt', 'bdwkesh',\n 'jpaykm', 'pksbg', 'gdbsibd', 'lfxpwk', 'rmnfph', 'yzxwke',\n 'xjwyusv', 'yetar', 'sytdz', 'pnystzi', 'yntcqo', 'egoorl', 'aydxu',\n 'rfdrfhe', 'flzkos', 'mmjgev', 'fbjwmvi', 'jeouc', 'lcmkri',\n 'aggsb', 'aaeazai', 'amyxpey', 'onxqpg', 'qrjpxq', 'zanea',\n 'niwsgtv', 'nsqja', 'utgskd', 'hlcum', 'frygtl', 'xjmqetz',\n 'upqddd', 'vxzdstm', 'hcmtera', 'ejstou', 'xkcguf', 'bokigdk',\n 'vurnv', 'zsgrje', 'nbxlf', 'tpilcx', 'lvepux', 'xacdtp', 'amdgx',\n 'ubbvnx', 'xmvznh', 'tlprri', 'sthkn', 'xhoad', 'deotaxo',\n 'pqzppmw', 'xlcpx', 'qwzrpyp', 'lujabeb', 'heskwyy', 'mzzaaur',\n 'vnestcs', 'rryphdl', 'ibdiabi', 'eoiyt', 'znflx', 'clougix',\n 'zzadxw', 'lrrgtf', 'lsdoakf', 'yxfmqx', 'qhnrry', 'ktcdmv',\n 'veygqu', 'btjlo', 'fcspsc', 'gozoazm', 'xcsqgz', 'aazae',\n 'nkuvask', 'mzdgjq', 'sihqdhy', 'zadrwzw', 'gzcyuea', 'lpgccic',\n 'fqtfuzw', 'bjoqpkc', 'oydpkxc', 'sugnnu', 'hyvygf', 'axkxo',\n 'rsmzb', 'dlhqmac', 'gbqby', 'npqkj', 'odbtb', 'bdsib', 'zyasxv',\n 'ifxqcc', 'lmnjwhr', 'ibuyu', 'uzhle', 'ccpwhjr', 'vhrojnz',\n 'fkzfz', 'fyesm', 'dnvipvm', 'jbbqn', 'qdkgl', 'xkvvgq', 'dphugaf',\n 'soxbfun', 'rbgokx', 'biveiz', 'vbaqtn', 'qapydgf', 'llldu',\n 'ottjpzu', 'fwjuc', 'cawio', 'gbkwe', 'rrnnxer', 'luviy', 'zsalse',\n 'ckwdeox', 'ozhqocm', 'vtozfwz', 'jztole', 'ydqei', 'bfugz',\n 'psawjp', 'dzlyrwp', 'izuyrne', 'rbwcfr', 'vdvte', 'usjbqs',\n 'zzovkxr', 'frfkwk', 'mmtmdd', 'sntka', 'wachbzo', 'rmzvj',\n 'scbngo', 'eqiuiwi', 'qfakk', 'cckcmt', 'owhzow', 'rejdlw',\n 'iprsqdq', 'twwaldw', 'mfilzyk', 'jygvx', 'iewbo', 'irhko',\n 'zpazqhn', 'ndqbg', 'ayzxqdz', 'zvpbh', 'maapq', 'pzitrfm',\n 'qsgsurv', 'viwcfff', 'wpgenms', 'tjmvu', 'czuemc', 'infxoo',\n 'avhbw', 'nugkqx', 'xubakjp', 'ndask', 'utaqq', 'njhuxq', 'sdvuex',\n 'tfmxqp', 'bydovjo', 'bizxjsp', 'zoozxyv', 'jegei', 'gkpqobw',\n 'psumbtg', 'gkgoh', 'sgcbpql', 'xxkhy', 'kdorkr', 'hcomj', 'ulrpyv',\n 'rhplil', 'tyyochd', 'xhzul', 'srdjmns', 'kgukye', 'yepvs',\n 'xnobsjb', 'umxmtub', 'wvqasr', 'igftpzw', 'exhecn', 'rreee',\n 'jpxuvxh', 'jriqf', 'akexunb', 'ekvdsoe', 'ytzvj', 'vfrlyae',\n 'pmfai', 'biouzle', 'xkbce', 'clzyi', 'xhjoso', 'wmxkxb', 'dqzzig',\n 'ydtby', 'gskwj', 'wlkwbz', 'zepvllz', 'zsgqp', 'blntawk', 'eynmil',\n 'bdqyp', 'wgtnqbc', 'rrgaq', 'gtafuzo', 'qdiko', 'kkcsdo', 'zwqhs',\n 'kugzbmf', 'wtvvs', 'kqsdx', 'mxsuxiz', 'pgbgjfe', 'vodfr', 'qbvwu',\n 'vfwbhgw', 'ayojye', 'kolzfqg', 'xnbecj', 'akbcnf', 'uutrn',\n 'upmesa', 'marqej', 'bbucee', 'bazqbau', 'qikgsyf', 'oeayzn',\n 'uilxnzr', 'vpnxknl', 'btgtxgh', 'vjaav', 'zaxtzah', 'msweps',\n 'awduwld', 'gzaep', 'ngvgc', 'qpoqdgn', 'kimndg', 'qilmmpw',\n 'oafhlyp', 'nyelgvw', 'onymk', 'feycbc', 'dhcrx', 'siqpfly',\n 'tyvycmf', 'huctqp', 'uscjrp', 'bbptd', 'msdmu', 'xlxhye',\n 'xnyzcox', 'kyskda', 'injdkmp', 'jiwus', 'spjylwd', 'eqcrnt',\n 'snfiu', 'jvwvge', 'yfeaw', 'mmdnsjj', 'suzdw', 'xiupf', 'rjwjhng',\n 'tqvasy', 'rmibpa', 'zuqax', 'prpndnp', 'efryqe', 'pwuqfy',\n 'wpqlfs', 'aeswq', 'cxkeiue', 'jydxzfi', 'tzfvwp', 'zzgtw',\n 'mupiusx', 'sojavt', 'dxmsgq', 'migjiyj', 'kixjk', 'ywwvcpl',\n 'khzcuo', 'oykhx', 'fochin', 'foxbfkc', 'sizjg', 'wrjcvr', 'ceadd',\n 'tvfqgxq', 'whzhche', 'dcoeti', 'mpilfib', 'cphie', 'ucpnjm',\n 'ajltvx', 'kpizym', 'vevfsrs', 'jznrri', 'yvhxomr', 'cbcnk',\n 'yuwuhu', 'jywuzed', 'kqakusq', 'jrnzgfo', 'mjimzz', 'mfjybnd',\n 'ntqyq', 'junxxck', 'myvqajv', 'kvuqs', 'obfxw', 'jwuba', 'vnrvzvy',\n 'aeric', 'vtgda', 'nkrocpt', 'ahitg', 'dzxtr', 'zswwc', 'yhxap',\n 'fdhiwr', 'cpxtqv', 'izbmo', 'zyioo', 'vysnoe', 'ouuyvj', 'cumdhzn',\n 'dbsmph', 'cktjem', 'vbmxy', 'utgfyhc', 'rqdeorp', 'btnlmd',\n 'chxwlt', 'nsghoqi', 'egycsm', 'wkanat', 'lzjyf', 'donyx', 'cchqsa',\n 'xozzz', 'yzmnf', 'jfzuh', 'dpcpg', 'hlahz', 'vobopk', 'lssfeli',\n 'ccttzi', 'glzgqpv', 'oyqzug', 'qqhkrr', 'euwotv', 'hwbmtz',\n 'hiylhly', 'bppzne', 'yetyyvs', 'cnbwcby', 'hzblk', 'pfjmxt',\n 'dsxvt', 'vvkju', 'zjrfr', 'gdbhb', 'udoad', 'nbhpzfm', 'iwetbym',\n 'atmly', 'tnxli', 'myegb', 'hiwqsk', 'btrajk', 'nhrmwn', 'ftmbecv',\n 'xopht', 'eiikqy', 'qizanwa', 'cwxiatf', 'jshjva', 'llrtkn',\n 'zhivu', 'lmwiu', 'oaeaqz', 'oxotfub', 'jnkafm', 'juhrmq', 'mqzbtw',\n 'puiaxty', 'dnahvoj', 'gaxhz', 'xfnay', 'iqmlnlq', 'xudhcg',\n 'izpkz', 'tqttmt', 'bwnbs', 'fdufd', 'vhzyymh', 'zhqtxr', 'evbcrv',\n 'xvnma', 'dgcwy', 'cwxzlbz', 'oodiol', 'teyim', 'kqqfjub', 'ftsqzi',\n 'arfztkr', 'oqlujx', 'rpkkdov', 'ptoff', 'ivxaxr', 'nxeept',\n 'cacpl', 'tehir', 'spvggl', 'qfzxkn', 'bhwkukx', 'fkdpuq',\n 'xdrngre', 'fnfplq', 'dzbrl', 'ufgxu', 'sciec', 'fgdydvw',\n 'nmpaqxi', 'ydsvfv', 'natjz', 'lruyvzf', 'xznznxp', 'mhfrh',\n 'kddsk', 'uwatn', 'uklzs', 'lnuta', 'ryizc', 'cvwko', 'tnzpk',\n 'ywpiv', 'vbvcagq', 'pzolw', 'nmyfhg', 'cshkofj', 'ksptw', 'kqejh',\n 'zgzjqzo', 'mxzrw', 'enabosq', 'vmubgc', 'sfzcj', 'hewvk', 'ewhrq',\n 'oifnsmi', 'izdnvu', 'cshgtk', 'mqotuhd', 'gnqgj', 'rxailbm',\n 'iyhxvtu', 'ncjzklq', 'zjmnoc', 'awqwos', 'ugujppc', 'spbvfwl',\n 'gntsvo', 'euksu', 'qnvneph', 'crhmf', 'brktmf', 'mvgmr', 'yzcskrp',\n 'tihawec', 'edqmxpn', 'fxyymlr', 'dzfkucm', 'prldz', 'gplrlhz',\n 'bohwr', 'bhebbk', 'mmecj', 'segydd', 'ptslsb', 'pyhgw', 'cwmrq',\n 'mjfhflh', 'xhuid', 'npxmb', 'izilq', 'dczhqh', 'tgfnxtb', 'zrylvo',\n 'lctxrar', 'ylhrbii', 'rfxedv', 'llvhzjq', 'bjocv', 'wbnex',\n 'cnohnf', 'xahrl', 'rouvwyc', 'hbhovgv', 'dhucp', 'ncmff', 'ncsskg',\n 'gsjbyin', 'lroxscf', 'whfaenl', 'vsfultg', 'floxkpy', 'captoai',\n 'qwolyex', 'ggaypn', 'wzunypd', 'pjixeu', 'gxnjkoc', 'pqiqhn',\n 'xakjmgz', 'vqizkx', 'gdzcxr', 'kyxwdd', 'pgxmazn', 'qeuwf',\n 'bduknm', 'tcrcn', 'nehgee', 'wktbcgu', 'jwqltdt', 'wczkai',\n 'drkqs', 'qhdqnn', 'oobxirc', 'lbunv', 'ifscr', 'xnfpbrw',\n 'yrrdbax', 'fbocs', 'tewne', 'iobixe', 'zgosas', 'yhesn', 'xlqwd',\n 'pfcen', 'slsjffx', 'ilwatrc', 'mhsmgp', 'iteghl', 'aqhufdl',\n 'kxgpqcu', 'ryrcgp', 'azidf', 'smlnl', 'rocxvbt', 'iutfc',\n 'loapgbr', 'musulp', 'dqcnj', 'tpgbkfh', 'wvskii', 'itkfopo',\n 'kytyb', 'rzahbu', 'aewptd', 'ohergbb', 'cadxh', 'aphwelj',\n 'huooyzn', 'gtttia', 'izeyhcr', 'cfvxz', 'aitaxyp', 'vypqost',\n 'ebfnmif', 'kgiucm', 'zryyu', 'oxgnbpt', 'frpwo', 'ouqvodl',\n 'pdaazh', 'gxwmf', 'dozxsjm', 'yndpsik', 'zcwvu', 'mihug',\n 'jgodklw', 'ysklw', 'cfxqv', 'yqvtz', 'rctnp', 'xjywa', 'kpqyw',\n 'hhtegzt', 'rnwbeoi', 'uyxqum', 'jahcwbe', 'jzjns', 'ovwoaz',\n 'oqmsrua', 'natbejl', 'deffv', 'okgbr', 'paqhy', 'jkafhte',\n 'lifsknp', 'afmskh', 'oemdro', 'oxuwov', 'qtyxa', 'hkpfsm',\n 'ulaubn', 'tciurw', 'myohwlo', 'okuiejb', 'ormoqsb', 'gmipz',\n 'hterzir', 'ekxzre', 'xkevge', 'ihenf', 'nnhzv', 'eocjmx', 'upzal',\n 'oounfko', 'myhbwub', 'fwipva', 'pkzzvpd', 'nrupm', 'vluzq',\n 'fxkoyho', 'atzktr', 'aomrp', 'qwpser', 'ejagmb', 'cfigelm',\n 'bvanb', 'cgcgabo', 'hmjvlqt', 'hxxocf', 'ftqaud', 'htuipy',\n 'bhwmcn', 'tgyvaqe', 'lvuwh', 'yiabzs', 'rzzavu', 'fiubm', 'uuqsb',\n 'riyakuf', 'psscffd', 'kvckzr', 'fktmnf', 'ivzqexi', 'nhxzm',\n 'kffjmb', 'vdzxv', 'esago', 'bfikw', 'gaiuxmz', 'volokcm', 'jypcs',\n 'psibvs', 'hxaxklf', 'lmqwgy', 'spnbimo', 'mtihak', 'xikoiy',\n 'rmmtv', 'phaqgxj', 'zcuwkhk', 'emodbyb', 'ztahsya', 'ieiqm',\n 'lfoquh', 'emznnq', 'pnhlgut', 'pgvads', 'cqsjx', 'lxnjei', 'zpque',\n 'rdjbiyb', 'sxedpu', 'potnqva', 'iirkn', 'rjmnrxd', 'ksgcd',\n 'waeymnh', 'tizdz', 'kproa', 'wpttygd', 'lvyze', 'peewvgm',\n 'fwtyzbw', 'zitkk', 'gfgqr', 'udgvlz', 'swqspo', 'ohhvyq', 'kgyuau',\n 'hcerp', 'pdomlm', 'twabkk', 'zfsea', 'epiwp', 'xgycjpt', 'jtkdh',\n 'mxmdm', 'rtkzm', 'qkacy', 'nuvdiq', 'agctak', 'hypgyh', 'ewtjp',\n 'paysolw', 'bcutebe', 'xelxyb', 'gzdvrth', 'vpzfv', 'cxrkt',\n 'admiyzi', 'lqlmn', 'zbjpbg', 'tlvdnli', 'zetnox', 'ylcsobo',\n 'balajod', 'igoume', 'sxcgw', 'sbkkafk', 'fmndnnw', 'incsa',\n 'jyupkg', 'uhvvc', 'rswnbth', 'nvprfj', 'figqf', 'znyidqi',\n 'aijper', 'euidr', 'dftxkze', 'vnppi', 'splwifc', 'fprgafl',\n 'ixzaz', 'mrhqtne', 'dtkjsy', 'dsmqrgy', 'xfscz', 'cymvmpu',\n 'vptkfdx', 'zrgrjq', 'mqvwsur', 'hdtlw', 'ugdpwun', 'cvxitc',\n 'vytvqg', 'pmtpfz', 'nfdtdt', 'umvwjuc', 'jouxc', 'qpypri', 'pdhqp',\n 'lmise', 'wlsvcfg', 'aqdkzcb', 'qlrmrfz', 'pbgoyi', 'xmsskoh',\n 'jjdye', 'xvsdmq', 'ymjeipy', 'igjyv', 'uiojvmc', 'uckoww',\n 'grlnyeg', 'hpglp', 'omnnyy', 'iiliir', 'cnucbcx', 'pcxvs', 'hipad',\n 'xmiltkj', 'oorwi', 'qgoxjj', 'jnmviqs', 'wpleqn', 'tudxw',\n 'pcogem', 'hgewaf', 'niwfexy', 'vcttgcb', 'anjgovq', 'epgmscd',\n 'mdtru', 'xvapv', 'rydjik', 'kopppcr', 'mjbsmu', 'unxoakz', 'ldpsw',\n 'frksjr', 'vyxxg', 'yyydri', 'szidq', 'qvbtd', 'qratl', 'xwfov',\n 'bzhqyxl', 'fskrtf', 'pcpzmnv', 'xuxwx', 'vzbevnb', 'ebaqz',\n 'dbpuek', 'ooqwj', 'gaimp', 'coelqh', 'bwuceq', 'oxpfjt', 'zrqyc',\n 'rwllk', 'pqunv', 'ufbnn', 'tbnjoz', 'kkqmrxu', 'qyyrm', 'hislf',\n 'wyuck', 'ubpre', 'pdioi', 'aryhv', 'vdcxv', 'rkgmaag', 'czlzokw',\n 'gtxuduz', 'grpijx', 'qzrar', 'qhues', 'rmznt', 'sxxmved',\n 'onjzuwl', 'atbjhip', 'nrardl', 'alrocy', 'cfkip', 'ihtbf', 'pqdgm',\n 'hmokun', 'dpghac', 'otwml', 'mnbzwa', 'ehetlt', 'rchvq', 'lwjgywn',\n 'lzdmjo', 'nvhohdp', 'tmshcpc', 'gavjv', 'ycnkv', 'uynzh',\n 'bvpnfjq', 'lfbem', 'qberui', 'vrmmhx', 'wpbqtfq', 'jujpx',\n 'dujgkof', 'hrpbso', 'zhcdt', 'iybngyb', 'rgeruza', 'nesyxr',\n 'cihgfe', 'hjgskb', 'zspxeqm', 'inzrgyd', 'crkjq', 'iooshwp',\n 'muvvj', 'wakis', 'rowibwa', 'qikwypf', 'aportho', 'pubcgx',\n 'vqoqpfi', 'rnpbri', 'ussjv', 'looor', 'xkzvdv', 'tstegg',\n 'zgiiokw', 'rwvyaun', 'mqqla', 'asnqp', 'nghuryl', 'hlvhn',\n 'ecuotnu', 'judvbu', 'xgvuw', 'oeckn', 'hdhttsg', 'hcyhu', 'klbyjc',\n 'tnrmqnc', 'mjojxhi', 'kvdet', 'vbmevim', 'oglrzs', 'afbscdi',\n 'zxrffti', 'firzgmz', 'oenim', 'wgpua', 'asiep', 'kyteq', 'wpeneca',\n 'qixmeoq', 'zaofon', 'csxxtr', 'cpwmnl', 'feylas', 'idjuo',\n 'mrtpvta', 'jjvmjy', 'mnljocc', 'lnvjleq', 'oognud', 'rbyneq',\n 'rhvomm', 'fldrkpk', 'znvrp', 'myswmz', 'jiloe', 'juivjmo',\n 'ylhbyzl', 'ndmabkt', 'sgdvlq', 'pmnddmi', 'utpuj', 'kfisv',\n 'nxfeell', 'mxhgqd', 'ccvdsdg', 'emtybo', 'zmkylbt', 'mmrpi',\n 'dkwlgq', 'iwlappb', 'uimsrnu', 'mkxaxmi', 'tcvll', 'njggal',\n 'kmqud', 'evgzlh', 'oaxizbp', 'jiuej', 'xknlp', 'cyksydh', 'gbixmz',\n 'vtouyk', 'sxjpkio', 'qhubt', 'kflvnb', 'sjdfggl', 'bxozyj',\n 'xekbh', 'wtmcb', 'xtapfco', 'rnornl', 'ursdpki', 'waonim',\n 'eibfyed', 'zniinaz', 'uyfohq', 'qcaxlt', 'koyaapa', 'pjuvbsi',\n 'ecpdl', 'ifaqwm', 'yyumzc', 'gvfngfp', 'lttul', 'flyza', 'uasdlme',\n 'oklhb', 'wulkzzv', 'ziwsxo', 'jqcxiu', 'qdzrwgm', 'zjdwy', 'uumns',\n 'emlnp', 'irnrqp', 'gqkza', 'oynpcz', 'yxyea', 'zpamf', 'gyehxbv',\n 'nplkhcc', 'rxeekyo', 'kecgp', 'gseju', 'nkisxqf', 'vlyud',\n 'fxxihhm', 'yjgtml', 'fehwpdi', 'wclnvyy', 'lriwrc', 'ikparv',\n 'volfh', 'ysphh', 'szrvrv', 'rqlmz', 'jyqut', 'fyftsj', 'uvwfip',\n 'rngwgm', 'mjwaz', 'roehjki', 'ploxokr', 'yjbalp', 'fspkq', 'yfxrb',\n 'kzulvk', 'ordxp', 'vdrrt', 'wdiojwd', 'ridzl', 'niykdvu',\n 'whyycmn', 'riwcma', 'bkhgkrb', 'nsine', 'emgtgf', 'zoymw',\n 'ljtvhzb', 'kfyfdma', 'piygxdl', 'onfwgdf', 'fwmkm', 'vqbljay',\n 'icife', 'bxfli', 'yeygr', 'qenhgm', 'mtxuckj', 'kdcyx', 'kwqhfcn',\n 'ywkfy', 'prbpw', 'pheyc', 'kmnds', 'cacqs', 'kvekiqy', 'bfvfhdy',\n 'gxulp', 'skmcra', 'exomt', 'lcxue', 'mnvvday', 'rsddl', 'gooegc',\n 'udght', 'doymnin', 'ccdap', 'wuive', 'dyyln', 'rynust', 'luxabyg',\n 'kdkkyyw', 'vawqfsy', 'rmeswm', 'rcxzyv', 'clpowz', 'pdntqm',\n 'tvjkkmz', 'iiclw', 'nhudzen', 'cybhu', 'crwtw', 'enypnh', 'ygekg',\n 'hrjwqt', 'peissge', 'wangcy', 'rbpoik', 'raqulbf', 'gyisnsj',\n 'rgbqn', 'lgvuzb', 'djicf', 'epnuu', 'nsapc', 'voatgh', 'yorfehc',\n 'jxfttat', 'wyuivb', 'bwopl', 'odwdsh', 'anchkv', 'sepvew',\n 'qoxxmae', 'bpvqnj', 'sngfo', 'buoazou', 'zhijssa', 'janng',\n 'uvdbd', 'yfvkqo', 'lcjii', 'mvacvrz', 'xztiar', 'lpbtrqa',\n 'ukbpdx', 'okaqpgr', 'idgqlj', 'ewglgo', 'ruymhi', 'pcidw', 'bvuqj',\n 'npzch', 'yppyan', 'oiguirj', 'iijvwqj', 'jvbwjys', 'yjtunfc',\n 'iaikra', 'oduhdgk', 'ivixur', 'ibcgai', 'djzvcbx', 'lmtsul',\n 'lgnwzol', 'wursq', 'xsxbqwq', 'jqvwnc', 'dcwwvtb', 'vwybnr',\n 'bughwjl', 'rnelxb', 'hmacv', 'ufgdygl', 'aabuat', 'oynwask',\n 'gnfjjf', 'zipbq', 'zxstn', 'jdrbprf', 'jmkvny', 'rblpql', 'vykdj',\n 'qaakyqw', 'osbhddb', 'avgldyy', 'kvpoa', 'fnqcliu', 'zzlninw',\n 'drsal', 'omswys', 'hwqcpct', 'ecraq', 'fvhsbjq', 'raauy', 'pfmoz',\n 'vvqvcm', 'tbjqjun', 'jcfbegq', 'otiwup', 'axvvce', 'dhpdnx',\n 'pennr', 'hvvmvzv', 'binezl', 'ygdmcuo', 'ypwnqn', 'aloxdv',\n 'ucieh', 'kovbtag', 'rgfpaww', 'fpbftg', 'spjowfr', 'zridoy',\n 'blwbbf', 'evwlxi', 'itbcz', 'hgixuo', 'qmoqmjb', 'tkeeis', 'pjiaq',\n 'rbpje', 'ledoui', 'ubecht', 'mphdd', 'uzswsbb', 'ntsybr',\n 'qmnijyp', 'pqwawe', 'ltytill', 'dpnxy', 'pkxqcol', 'ayrdi',\n 'mycnd', 'knotsn', 'zvcrjl', 'qwroblg', 'vtrktey', 'dzilezi',\n 'wzkxg', 'varqc', 'xlpttyc', 'xxqhnl', 'jpxywa', 'kjdsh', 'hdseebw',\n 'bxqbp', 'flazqce', 'xrtab', 'rupsfq', 'asswer', 'rhqof', 'hjzdv',\n 'addsgax', 'cuahzjj', 'xwdilr', 'osqgg', 'pfhwv', 'rqorah',\n 'ggdlnv', 'truvaoj', 'jzuldwf', 'mjddj', 'vixtn', 'eslxoaj',\n 'cmoypm', 'jvvzs', 'oqgxcc', 'tptls', 'wwgwbj', 'tysuhg', 'xbnqb',\n 'iogjvg', 'fbxdmr', 'zdvsmx', 'hiuja', 'watrt', 'kjawab', 'entxk',\n 'jmnkaox', 'zznsox', 'asmzc', 'soblvp', 'quyxjw', 'udrdc',\n 'hyylvvw', 'gzfwxuv', 'jjqmjw', 'faegxbl', 'lqjcg', 'bzmruq',\n 'bykuh', 'miwhd', 'ykgtwhk', 'oyobzwi', 'oltwpua', 'ctulabr',\n 'dwandd', 'vhuhox', 'vtlknw', 'ywvln', 'qemqdeg', 'akezvx',\n 'kjmjpv', 'vwuftx', 'kreaxnj', 'fvfop', 'cxabs', 'jfacbje', 'eecnz',\n 'cmblit', 'gfvpoq', 'whywnh', 'pghvx', 'ohgkmf', 'xxtiwd', 'nkojni',\n 'dlcicnp', 'bwyvyyd', 'gifup', 'vgjfr', 'hhteifi', 'kjhffq',\n 'pawqaxl', 'yozro', 'slxluvd', 'amqcquy', 'vnnxkr', 'wgdur',\n 'rvawiu', 'thcwnc', 'cddut', 'vnrtrv', 'fnfio', 'nhvxe', 'rfdqmj',\n 'ucblh', 'ccbnt', 'lxckaoy', 'fnwcbx', 'gmdbiwt', 'ypvwjy',\n 'cbjazk', 'qmujnm', 'nsqot', 'lhcqt', 'ijxcts', 'nujrms', 'itxel',\n 'ghukr', 'qpwitlr', 'gcafqrn', 'lcoho', 'lfzab', 'vwhgceb', 'vgsgy',\n 'jrtgo', 'ryxlz', 'deoyq', 'ybenly', 'lyysca', 'sodvazo', 'hbnnoz',\n 'ovgvda', 'elwtjx', 'soydmn', 'trdsi', 'mwwjwo', 'vupwj', 'dszpcv',\n 'kkhjdj', 'ewmyo', 'nmpeq', 'oepldcq', 'xttrgu', 'wbcbxi', 'jakzk',\n 'peukyw', 'fvcqv', 'xklwuu', 'hsmva', 'kslmkq', 'azllbig', 'stnzih',\n 'wfyud', 'ihauy', 'cfxmj', 'pdyogwv', 'dcqdpa', 'xhusy', 'jfpmpmm',\n 'odeiiw', 'ozyaer', 'uykzvma', 'tuaznxj', 'kdnbdki', 'syrnsem',\n 'fdysz', 'hhrpo', 'fglzfi', 'vgcqzqm', 'qhsjr', 'bvboe', 'dpfwpvg',\n 'mvvry', 'itnnr', 'lgykbe', 'pscow', 'mkrgeqv', 'czffv', 'apteht',\n 'jeqixsx', 'ksmbe', 'zamivv', 'vvmyo', 'cwwoce', 'sppubxc', 'qaich',\n 'nmbxr', 'tfkwfxi', 'iakhezl', 'fxujis', 'fkwffe', 'antaylq',\n 'mmfgstq', 'zxaacy', 'zlswx', 'pbqxil', 'eupck', 'qzcxpbe',\n 'rjalbzr', 'wioagbq', 'kreec', 'zsdcuft', 'rrdzb', 'ocdlvq',\n 'oxiroo', 'zcxsqh', 'wbrsi', 'fqike', 'oskzupi', 'thvof', 'dicbyst',\n 'iojwe', 'hyfizq', 'yoknhww', 'nupiyyn', 'ievah', 'slcgmxg',\n 'cnecpa', 'lcwsoj', 'hnqsc', 'ghipbi', 'exobr', 'nwpnq', 'dmhbj',\n 'amdbmwl', 'xfbzovs', 'puizvu', 'yvsus', 'ykysqg', 'bgqdv', 'zgqbr',\n 'zkjpkej', 'crkot', 'zciymk', 'tleogn', 'sayrmz', 'elwma', 'zugjva',\n 'uifwsmw', 'wstrg', 'xbotd', 'hinsg', 'qpgyoyp', 'xzfocdy',\n 'mbvuepb', 'dtphufk', 'cyapnt', 'yyehhad', 'ohdrd', 'mlibm',\n 'qzdfil', 'rdwszqx', 'bzcbmyn', 'uarjlg', 'mtwpqmx', 'nmagl',\n 'cepniel', 'tylvaa', 'melhd', 'jygeneg', 'fdglfy', 'xcpciu',\n 'ayrel', 'bxceshv', 'kspyg', 'iclkaz', 'ykbzt', 'nrnkzo', 'kxkto',\n 'fabzszn', 'edalls', 'nilmh', 'wwawgnn', 'gymbtx', 'mzipa', 'ajevx',\n 'qppisv', 'otqhsf', 'ippxak', 'bixnqd', 'uqitwo', 'soxcug',\n 'loiscd', 'wqrjk', 'rqntoa', 'fzpxlp', 'tuaob', 'pyqqms', 'krbzmmj',\n 'aijqpfg', 'nstqrbu', 'wmtiahz', 'joplby', 'jyszxq', 'jnxtyhe',\n 'lbvfv']) == 14011\n",
"step-5": "# -*- coding: utf-8 -*-\n\n\"\"\"\n@Author: xiezizhe\n@Date: 5/7/2020 下午8:52\n\"\"\"\n\nfrom typing import List\n\n\nclass KMP:\n def partial(self, pattern):\n \"\"\" Calculate partial match table: String -> [Int]\"\"\"\n ret = [0]\n\n for i in range(1, len(pattern)):\n j = ret[i - 1]\n while j > 0 and pattern[j] != pattern[i]:\n j = ret[j - 1]\n ret.append(j + 1 if pattern[j] == pattern[i] else j)\n return ret\n\n def search(self, T, P):\n \"\"\"\n KMP search main algorithm: String -> String -> [Int]\n Return all the matching position of pattern string P in T\n \"\"\"\n partial, j = self.partial(P), 0\n\n for i in range(len(T)):\n while j > 0 and T[i] != P[j]:\n j = partial[j - 1]\n if T[i] == P[j]: j += 1\n if j == len(P):\n return i - (j - 1)\n\n return -1\n\n\nclass Trie:\n\n def __init__(self):\n self.dicts = dict()\n\n def add(self, word):\n node = self.dicts\n\n for w in word:\n if w not in node:\n node[w] = dict()\n node = node[w]\n\n def search(self, word):\n node = self.dicts\n for w in word:\n if w not in node:\n return False\n node = node[w]\n return True\n\n\nclass Solution:\n # def minimumLengthEncoding(self, words: List[str]) -> int:\n # kmp = KMP()\n # ret = 0\n # texts = ''\n # words.sort(key=lambda w: len(w), reverse=True)\n # for word in words:\n # idx = kmp.search(texts, word)\n # if idx == -1:\n # ret += len(word)\n # if len(texts) == 0:\n # texts = word + \"#\"\n # else:\n # texts = texts + word + '#'\n # ret += 1\n #\n # # print(texts)\n # for word in words:\n # if word not in texts:\n # print(word)\n # return len(texts)\n\n def minimumLengthEncoding(self, words: List[str]) -> int:\n trie = Trie()\n ret = 0\n words.sort(key=lambda w: len(w), reverse=True)\n for word in words:\n if trie.search(word[::-1]):\n continue\n trie.add(word[::-1])\n ret += len(word) + 1\n\n return ret\n\n\nif __name__ == \"__main__\":\n s = Solution()\n assert s.minimumLengthEncoding([\"time\", \"me\", \"bell\"]) == 10\n assert s.minimumLengthEncoding(\n [\"ojtnj\", \"uuydcho\", \"dgsyp\", \"dwxycpx\", \"dpmvc\", \"dvfhmb\", \"flrxjjx\", \"fwhdhvn\", \"rgsakp\", \"aiconf\", \"nzacpk\",\n \"sbxnaj\", \"shway\", \"rgrmz\", \"rysudo\", \"bzkioce\", \"mqxkzvu\", \"wyebk\", \"tymoaz\", \"mlmbg\", \"djbmek\", \"qfnme\",\n \"khkiyae\", \"tjdaxry\", \"sqtcwz\", \"ehnsai\", \"jhncvrm\", \"cxkzgrx\", \"pummt\", \"hzrpfcn\", \"lkyqit\", \"phpqdxw\",\n \"vangm\", \"wcjdgw\", \"pxesvtn\", \"mnqory\", \"bdrzvh\", \"brtzmo\", \"chqgf\", \"bipyxm\", \"meoikg\", \"ysyckk\", \"ojayeiq\",\n \"zrfbsb\", \"yhuotea\", \"crfbhq\", \"tllycn\", \"qxnzihf\", \"avyawpz\", \"bwsjym\", \"myjozc\", \"lbdksm\", \"mctlt\",\n \"dszowuw\", \"syshm\", \"xrvhhkn\", \"kgrcwfv\", \"dwlajlf\", \"yviuk\", \"xegjj\", \"spiczl\", \"vfvomi\", \"mgcujy\", \"dqmzb\",\n \"isrisgt\", \"vdrtuah\", \"vsyth\", \"eoclef\", \"poccek\", \"cgafrlu\", \"crbhpgk\", \"sromv\", \"xmvbca\", \"gobra\", \"ygvlq\",\n \"pjvhe\", \"tfweiso\", \"cskuohg\", \"eyalone\", \"pobkak\", \"nzpxn\", \"lbcrws\", \"uhtfe\", \"eorth\", \"showvu\", \"hxsmb\",\n \"jrggose\", \"izifkb\", \"oqwyf\", \"mozmzj\", \"ijwle\", \"ggtqqqv\", \"geevzj\", \"meota\", \"ifsse\", \"kdtofm\", \"swydhvf\",\n \"tzjhqap\", \"wqwwd\", \"jlinnov\", \"lmxkgeg\", \"stbot\", \"xrsfn\", \"etoyctk\", \"rygagm\", \"vcnrf\", \"zkdge\", \"emqtscp\",\n \"newqcyy\", \"nnuus\", \"exwsxbd\", \"zstvl\", \"lbkko\", \"kygkyqq\", \"oggji\", \"xytbjo\", \"mfbahk\", \"ggoks\", \"lmqewkl\",\n \"qexhyqe\", \"ogaogio\", \"nzvbav\", \"mdole\", \"qvyks\", \"gkupfu\", \"dgmpn\", \"ngrdrj\", \"iitqvk\", \"ipuiqb\", \"ugxfea\",\n \"ialkmv\", \"hmgnx\", \"aoyoj\", \"fvzhjil\", \"butrbp\", \"dwhxnes\", \"etkdwg\", \"cjkghz\", \"tovkq\", \"mmxhv\", \"jgcsn\",\n \"hmictal\", \"zxmnek\", \"pcoeg\", \"ntyqmlq\", \"hfubhtg\", \"ydjbv\", \"xnwlqto\", \"hatgi\", \"bsaczd\", \"pokwk\", \"arxlula\",\n \"zjtqlk\", \"ocfxup\", \"nsnqjc\", \"xdcsopi\", \"iqxyxp\", \"xfmtpvm\", \"bqtgcf\", \"wboycn\", \"aoeda\", \"uowqdgj\", \"rzzzx\",\n \"liucs\", \"ejzxz\", \"qmlehsh\", \"igrbmon\", \"dpmkbon\", \"pmayh\", \"nujdwdw\", \"awdgo\", \"ijgkzk\", \"inhee\", \"jzdtv\",\n \"adhauh\", \"grtmbp\", \"qndbvw\", \"zprrw\", \"mpqieq\", \"jzmzeuu\", \"fcvftqs\", \"qxzxqy\", \"lidguzz\", \"eazwd\", \"zjhfsz\",\n \"zsnzefh\", \"mnckfg\", \"zjgtq\", \"ckyxlif\", \"fznfo\", \"jegnof\", \"lzwyzb\", \"ozivfio\", \"igkclsa\", \"bebzn\", \"bitsggm\",\n \"lrnwin\", \"hjnnzr\", \"idvoirn\", \"dgile\", \"vfngh\", \"xbmur\", \"rqaftt\", \"wjwwwxs\", \"btreou\", \"gjsycg\", \"pvsiylz\",\n \"ccxzgdf\", \"excrrrr\", \"fiesr\", \"jdioj\", \"uzwsc\", \"odrlcoy\", \"hcsit\", \"ptwfprh\", \"sbqry\", \"kffvy\", \"ejeawbp\",\n \"omvcc\", \"iqgxqlt\", \"edsuu\", \"xnbue\", \"qfbcx\", \"fzlmbkl\", \"wrrcueb\", \"mmqispp\", \"nknilwd\", \"dewuhju\",\n \"hmdqlxy\", \"vjxgg\", \"lkuexo\", \"dzvfscm\", \"voulbs\", \"uevoqgq\", \"kmhwu\", \"oglzllg\", \"torhihn\", \"fhuqzc\",\n \"mmcfhb\", \"woyayma\", \"uznsvre\", \"mmxed\", \"aoskwg\", \"xrosbm\", \"hpyrgh\", \"tghwbwh\", \"hcwzn\", \"iepeftj\", \"judij\",\n \"kudbk\", \"jonpv\", \"lywck\", \"rxelz\", \"bgifz\", \"mehbxq\", \"fmqnz\", \"sqrmzj\", \"iqqjzex\", \"qioliz\", \"kjizbf\",\n \"lgdcffc\", \"pfgmcr\", \"trdabul\", \"vlqjdnc\", \"jjvbxe\", \"fqlayw\", \"ilbhtyq\", \"saawulw\", \"gxysrb\", \"kighql\",\n \"eceapr\", \"kztbcww\", \"jedkoy\", \"dxpcaga\", \"ndacphe\", \"rcoit\", \"ywgcnxg\", \"klipfup\", \"bddws\", \"jwyof\", \"lrfwgo\",\n \"bediwuf\", \"ujakh\", \"ppima\", \"xzhwvm\", \"guzmsqt\", \"ffbliq\", \"adjmynm\", \"akabzn\", \"inmykju\", \"vlcjyv\",\n \"orquepg\", \"tufrk\", \"vqpjymm\", \"lvuab\", \"qzxav\", \"ekcmu\", \"uqtuhie\", \"kfvtgf\", \"nklwjo\", \"ujxlfpl\", \"zobfpq\",\n \"eignijd\", \"ythctg\", \"artllm\", \"wodhh\", \"tzpwszq\", \"njdqegg\", \"hzrqib\", \"zvoxtfd\", \"htboem\", \"axjuix\", \"bvmvm\",\n \"jbnum\", \"bxdth\", \"atejt\", \"gqsqtnk\", \"fykrjbp\", \"ldyhonr\", \"wcuoj\", \"upphc\", \"agydg\", \"cjmwk\", \"rhxbqh\",\n \"tpgozdd\", \"qyqoy\", \"zjqutw\", \"qoohqny\", \"nsiacwz\", \"xupin\", \"criuvs\", \"eswjeft\", \"pdmevn\", \"zvogq\", \"lrrvo\",\n \"qhfqqpw\", \"ktudfg\", \"ijvmi\", \"neyjjdx\", \"rllpi\", \"vllvaa\", \"esebtu\", \"jyhcrh\", \"otgmr\", \"oudvyxj\", \"pmszy\",\n \"opeed\", \"gicni\", \"mnuzn\", \"mjbfpod\", \"sqwgxu\", \"dwniwz\", \"wmbmmv\", \"lyafuy\", \"zmvlz\", \"kopxzuh\", \"urcbbiy\",\n \"guhco\", \"nerjm\", \"lpdxc\", \"hxmjzz\", \"hynagc\", \"iyxeczi\", \"bdfxmoz\", \"yybnpqd\", \"jvgnb\", \"oquqem\", \"fmclmz\",\n \"dmkhf\", \"zxbjpp\", \"qpxgcir\", \"iecvjm\", \"gtkne\", \"lgtqrbc\", \"gilbn\", \"mcxsg\", \"ncwbhn\", \"wkriiq\", \"zhsir\",\n \"ptkkmw\", \"jcbpkrm\", \"vbefo\", \"vmbcd\", \"vqffj\", \"fhqzjt\", \"nryuh\", \"vmclav\", \"cjyggm\", \"sanev\", \"rrdocz\",\n \"zqdexbs\", \"jrxstt\", \"pyhcesj\", \"aagghyr\", \"cyemjrb\", \"aliohf\", \"qaslg\", \"pnyjzxz\", \"pehnvi\", \"suhuw\",\n \"twopabr\", \"sapqoc\", \"mckrh\", \"nzlgrxt\", \"aqpobnu\", \"pirbjgb\", \"plzlj\", \"raylxpu\", \"gyasfrh\", \"urjfxux\",\n \"xjbwau\", \"iupknn\", \"vhxnc\", \"dnbjop\", \"vrxhwmd\", \"vjsmkh\", \"rfmqids\", \"smaiwt\", \"vkyfo\", \"bjqyxc\", \"rbbbp\",\n \"dlkzg\", \"dwvdwu\", \"prulzh\", \"bavge\", \"ehhrz\", \"xxjqk\", \"pxopmp\", \"okmkmb\", \"slcznpp\", \"nvqlb\", \"jalrk\",\n \"parwlcd\", \"anbxo\", \"oqcxyzo\", \"fjhrdjh\", \"pgvnwfe\", \"yfjyvh\", \"quvszjm\", \"xyiig\", \"xtncqv\", \"svsix\", \"jvpdnh\",\n \"owuiv\", \"bsrugtt\", \"rmvggws\", \"lmdql\", \"kvmvd\", \"xrpmaw\", \"ssnxyb\", \"oworq\", \"rmmpuya\", \"rijpih\", \"aelazka\",\n \"kncksqx\", \"yvtdiy\", \"epato\", \"pbbamj\", \"fejsw\", \"zgsru\", \"ekwrre\", \"zqben\", \"vugxi\", \"fvcsdp\", \"rujcews\",\n \"asqxya\", \"worjlsd\", \"xggakg\", \"kzfpot\", \"haqon\", \"ypqxzz\", \"mmkzwt\", \"bdhif\", \"exzhv\", \"srnklzh\", \"hlrunb\",\n \"dwfyke\", \"fvgbtdm\", \"aeutp\", \"czhefx\", \"tegfw\", \"jkxpsb\", \"gxkfkw\", \"exvntd\", \"gvuti\", \"jdmly\", \"owaqhw\",\n \"fopuxzv\", \"edrvil\", \"biszwgv\", \"vgckzd\", \"fqdxn\", \"qktdf\", \"hpgwrk\", \"gpxiips\", \"vxnlab\", \"yylxz\", \"hsuscch\",\n \"bhivaf\", \"wzrwtc\", \"ebplv\", \"yzxykou\", \"mxlssom\", \"evghv\", \"hksleg\", \"shybau\", \"zeyqa\", \"tljqka\", \"axfkec\",\n \"fatdj\", \"janlkcc\", \"sjorbra\", \"jplge\", \"oazzot\", \"qbgtncn\", \"ozlil\", \"stohadq\", \"rvpuwn\", \"oqwpl\", \"byftgi\",\n \"ubuusl\", \"fkogr\", \"bybdyhj\", \"vinyuzs\", \"ivsqvz\", \"vmnae\", \"gckxw\", \"rozbe\", \"glvxwj\", \"rcgicu\", \"xmvbd\",\n \"itycsry\", \"llmwrs\", \"fuqth\", \"styrrwl\", \"wsseuln\", \"xwflcli\", \"muxgz\", \"ypmbboh\", \"rpmvnep\", \"wjvvnv\",\n \"arjnw\", \"toauwc\", \"ltjxqrl\", \"basffd\", \"clxozwd\", \"glmrv\", \"iejgfj\", \"cvkoj\", \"wotjf\", \"mqucec\", \"xalgemc\",\n \"hgimkh\", \"golvfq\", \"fuqpmak\", \"mhpcp\", \"pxoibt\", \"ledqa\", \"guzbyr\", \"ztvbeka\", \"racdp\", \"krsngra\", \"aaiknz\",\n \"bhoobyc\", \"xibbe\", \"yohepxk\", \"eclevs\", \"ldliwcm\", \"qatvlk\", \"eiypbw\", \"vxvtwa\", \"nkdwsej\", \"ftmyvp\",\n \"gpthye\", \"gazwoi\", \"zzgipon\", \"cithg\", \"wpabujl\", \"jhezlnb\", \"vqqaxfg\", \"kvpbk\", \"vggjemp\", \"owylv\",\n \"lgwtfpg\", \"jjqvfm\", \"xbhga\", \"tulvfv\", \"sefuo\", \"hbysv\", \"ozopepd\", \"awyrifd\", \"pnudwx\", \"vreje\", \"zhpgw\",\n \"qygbf\", \"tvbrvy\", \"zzmcw\", \"cznee\", \"deuzxt\", \"qfppjvi\", \"ilkps\", \"ydwhg\", \"krwkxzu\", \"mnsidg\", \"rkxyyr\",\n \"ajkqz\", \"xtmom\", \"vqocor\", \"fympcl\", \"yyleyzy\", \"jjvzhrn\", \"kpmxvuz\", \"txoeqlx\", \"lhhmn\", \"chzgpf\", \"ncnjxle\",\n \"ihxrg\", \"feqixq\", \"lkfhcar\", \"hfnsh\", \"bifczy\", \"umknat\", \"yrhgkh\", \"mgpcu\", \"qotukst\", \"yqlmfq\", \"ttcdp\",\n \"xnjjzm\", \"cukbr\", \"hjhjb\", \"iikfcsr\", \"nsqbnnz\", \"dauygf\", \"cmydq\", \"lfnhqnl\", \"ppqgs\", \"hscbfug\", \"ohzisud\",\n \"opspdkv\", \"aauxbop\", \"wpkhzo\", \"sxbsgu\", \"tajrv\", \"ololy\", \"mxmus\", \"vizvxv\", \"osaqz\", \"rxygkn\", \"mrzqlf\",\n \"zrriyxb\", \"ufroe\", \"bajozg\", \"atpsu\", \"uhgauzu\", \"tffdw\", \"mdjulde\", \"rbrmy\", \"jhkqvwl\", \"gzsultq\", \"nkbfi\",\n \"xtvwh\", \"dryzcv\", \"emaxuk\", \"zucvutb\", \"jdduyk\", \"bjdin\", \"loicuq\", \"qhjjb\", \"rgfjbq\", \"mphnk\", \"lxvceyx\",\n \"zeoxb\", \"fxhnxu\", \"qpbipe\", \"ophwp\", \"wiioer\", \"quchwj\", \"pouxunw\", \"bloxgg\", \"xbsma\", \"dtwew\", \"xstorn\",\n \"qfrfkz\", \"gxusbsn\", \"dhnxd\", \"mhstbs\", \"hekbtu\", \"wvrrjw\", \"yeiwd\", \"patplsx\", \"qmyiyi\", \"mowboj\", \"iskyd\",\n \"bqhjj\", \"povppk\", \"vthpwx\", \"uuydaw\", \"rduxvez\", \"vmcww\", \"ylruvph\", \"ymqosp\", \"wzcvohg\", \"lhepwta\", \"bckhc\",\n \"oiyyt\", \"wqzfv\", \"uduec\", \"lkkbtzl\", \"prvpbo\", \"jrwstii\", \"ijztoo\", \"qwwth\", \"vqzqiun\", \"krnjp\", \"zyanpiw\",\n \"ojhjhvg\", \"lohmb\", \"thqtf\", \"reptzv\", \"zgkyq\", \"lhkvy\", \"cmjwl\", \"fmilgpw\", \"jrfawz\", \"vrtzd\", \"ezgfl\",\n \"plzng\", \"zidzso\", \"civavlg\", \"vtwopu\", \"ljhckxo\", \"nuydt\", \"qembl\", \"fiwrre\", \"gfrgi\", \"gzegiq\", \"mltlqo\",\n \"pcett\", \"snbsc\", \"msibcqn\", \"beacrhz\", \"vsycjt\", \"gjqji\", \"smcegol\", \"zregkp\", \"smcazoj\", \"dziqad\", \"jpuwp\",\n \"hnlztac\", \"vduitco\", \"wyencad\", \"bkdnnqo\", \"cabzyg\", \"mgpcwr\", \"fxgvkxt\", \"wlkcrdd\", \"bhmhsy\", \"gqcctjc\",\n \"atafpt\", \"vdzhmcg\", \"ighxj\", \"gfqpale\", \"fohbrtj\", \"mfpsgt\", \"tarjocf\", \"gyycb\", \"qvqfryl\", \"jpwowwc\",\n \"jcgcg\", \"gmrjze\", \"nfptxq\", \"hmjhxge\", \"ieelj\", \"suvkgr\", \"nwjxe\", \"tkepqm\", \"extnpmq\", \"rxzdvf\", \"relzaa\",\n \"hfhgaq\", \"lmihlz\", \"pacocq\", \"dclxr\", \"oknoem\", \"pbpnnd\", \"nleerfl\", \"tvytymc\", \"aamfnl\", \"ufdnq\", \"bxyzvyh\",\n \"vksvout\", \"lohxhf\", \"sskgn\", \"aawbv\", \"hrvhx\", \"wvoqf\", \"vxkvh\", \"oqany\", \"bcmyd\", \"epdddqn\", \"zrlej\",\n \"bchaf\", \"hmftii\", \"mefcrz\", \"wbxvc\", \"ewwnldf\", \"cqecxgh\", \"cnwvdmk\", \"vetrw\", \"zmogwov\", \"lshlzpe\", \"lijay\",\n \"tcdqg\", \"xavqixd\", \"yjkhtsl\", \"myjvow\", \"cgthhd\", \"taaii\", \"iuuegk\", \"lcypmle\", \"wesrit\", \"tybco\", \"nhxysw\",\n \"awkrj\", \"jcmqa\", \"porvo\", \"nrypriu\", \"vznnevp\", \"hzklwi\", \"vapuxh\", \"wyfkn\", \"albemu\", \"ttfdbl\", \"dbqrjv\",\n \"cxals\", \"qzitwf\", \"ysunur\", \"llsefy\", \"cghfzji\", \"jboaa\", \"emhlkw\", \"khhmgha\", \"twlxgjz\", \"pyujor\", \"ozcax\",\n \"fetvovo\", \"mdhrrd\", \"qdhdne\", \"fiuvw\", \"ebyxh\", \"ldaothh\", \"vwyjf\", \"yjyljlu\", \"ivroqg\", \"qvpeyec\", \"eemsdra\",\n \"wavgeqk\", \"bjejrqg\", \"mdjimoz\", \"fgopy\", \"lgwodr\", \"cunvszh\", \"wiver\", \"ghmog\", \"jzgfyk\", \"vxlbx\", \"kvgbtn\",\n \"cunorte\", \"mtesdc\", \"zdzmqu\", \"pigik\", \"smruadg\", \"czjxlt\", \"kukgaok\", \"tsldpqq\", \"luomo\", \"ezbcvdc\",\n \"tfetwes\", \"uopzf\", \"wsvezkw\", \"wrnlvbx\", \"bpqungd\", \"jqnnof\", \"rqhiomi\", \"voulqb\", \"ouspxn\", \"chngpz\",\n \"fbogfcv\", \"nqhunxo\", \"rydbke\", \"ewduo\", \"suqqwup\", \"oxzfxj\", \"kuwfwm\", \"euiics\", \"mvftoau\", \"vstfbm\",\n \"vnmtoo\", \"muicf\", \"bjbskxb\", \"knbomlf\", \"enrbtfk\", \"hnaqe\", \"vxzsr\", \"gkqma\", \"qygmn\", \"ztkybmb\", \"injggpk\",\n \"enqrgdk\", \"rkgoct\", \"tgaiu\", \"dnknoxk\", \"iwuou\", \"oxanccl\", \"xestej\", \"ekrqq\", \"xbwhz\", \"jkdvxfh\", \"oybaay\",\n \"afyhci\", \"papffjq\", \"bdppssw\", \"qwyvjx\", \"xmnnosl\", \"kvqzjl\", \"wcwii\", \"ygfvt\", \"tpabbht\", \"kjmaq\", \"duschjz\",\n \"gguiof\", \"wgfhve\", \"joqmfjq\", \"smqfd\", \"ynlovlz\", \"sgrzum\", \"bobmux\", \"dcppi\", \"isdjrwl\", \"lbevb\", \"efqsirq\",\n \"hlgfql\", \"enmemlb\", \"dbmfk\", \"ibfpzm\", \"rtdnooq\", \"yicdq\", \"xadul\", \"dxibxzi\", \"yyxnj\", \"jhsdzxw\", \"thltbi\",\n \"kwhreyi\", \"hrocoa\", \"fnaalbd\", \"vnwona\", \"nnonm\", \"naqaf\", \"xgzzies\", \"uhruynk\", \"kgadfx\", \"hyohzbd\", \"hnajx\",\n \"yipzh\", \"ezdxaet\", \"xbzppoz\", \"rwnewxz\", \"hlcbkmb\", \"znyhu\", \"zsqtpkr\", \"gmyxr\", \"rphyvo\", \"bgjuz\", \"nulpv\",\n \"eejfoso\", \"xmwcnes\", \"xxxxnpe\", \"jezkk\", \"idfsxrw\", \"qgzjtf\", \"arpzpo\", \"hxsanlt\", \"emvotcb\", \"sknzhvg\",\n \"icitca\", \"ivhdln\", \"sqilerz\", \"ndigw\", \"bcsre\", \"mibbep\", \"zsczom\", \"cgghjbb\", \"fkylfgt\", \"bvzofs\", \"mefsng\",\n \"bispbza\", \"tsosgy\", \"xopalrw\", \"wserf\", \"jbmlz\", \"xidxny\", \"ffmpjos\", \"vddwxmd\", \"netnsg\", \"kgevsp\", \"pguuv\",\n \"cwisp\", \"slxiyb\", \"dmwaguc\", \"jobwusu\", \"uytcqrv\", \"hzhsy\", \"zrlsdd\", \"xhxah\", \"rxzij\", \"zwdgy\", \"ygmvkz\",\n \"drkzbo\", \"qpsal\", \"tpxvl\", \"lfmfl\", \"sayjvlh\", \"rdamym\", \"ycuzd\", \"zkycu\", \"hdesec\", \"unequk\", \"lpkdid\",\n \"vorxls\", \"admsdop\", \"rqnvkyg\", \"krnqqtb\", \"rxfms\", \"xfthd\", \"pxjbk\", \"gpslrg\", \"rwziwef\", \"usxgqvz\", \"baxxye\",\n \"ocrkkrw\", \"lrlgsp\", \"ceyctg\", \"rniml\", \"vavug\", \"jgircl\", \"jrpnmsa\", \"rywvlfg\", \"prxnys\", \"fkzmknn\", \"ooelc\",\n \"btvfs\", \"yqepuvw\", \"tmmmb\", \"qmpzexb\", \"zjckjvd\", \"aieytbb\", \"oafqq\", \"szrcyh\", \"czrxgae\", \"ifkte\", \"hfgajox\",\n \"pwpnkqq\", \"yqphogn\", \"xuwthrd\", \"mpcmy\", \"qitdoa\", \"avlzfrh\", \"ywpip\", \"dgeki\", \"fgbnx\", \"tyofu\", \"xziqzj\",\n \"qxzvqz\", \"vtsqk\", \"ipkld\", \"yfhim\", \"ebaegdc\", \"ubhrh\", \"ldejv\", \"mtflwy\", \"ocpyj\", \"yopgqs\", \"fkjxxd\",\n \"njnnwr\", \"nylkeb\", \"taymdqv\", \"ekpznq\", \"cbzobmg\", \"bucdds\", \"qjozu\", \"uvpghor\", \"obhnu\", \"ljkxbg\", \"uqrxjtf\",\n \"xwbxiw\", \"oxsmcg\", \"spchdd\", \"pcuitj\", \"faidq\", \"tybmy\", \"uygiyp\", \"qloizj\", \"cafgmy\", \"smetd\", \"kwcwb\",\n \"tdabxf\", \"fpmrc\", \"lfjujn\", \"vvmvex\", \"mnsgdc\", \"enjlgsw\", \"ohwcg\", \"kxjdaup\", \"rotjarp\", \"aovdoq\", \"oviwq\",\n \"qwaxs\", \"bmazco\", \"plcljsv\", \"yytjhl\", \"vgwjm\", \"drnue\", \"vqjgf\", \"uqlsfy\", \"bmqmfp\", \"lkauwna\", \"ozmqce\",\n \"heunaxr\", \"zaffbj\", \"arbek\", \"qjnllw\", \"fdkhlz\", \"wgmbwh\", \"yceqag\", \"ltjjq\", \"yurggfw\", \"puaafsl\", \"tjiqkyt\",\n \"yuzub\", \"ytmrfq\", \"ommmu\", \"ipknn\", \"iubnuab\", \"dzthvc\", \"zjbzpew\", \"dcooev\", \"pjydqcf\", \"zuojlzy\", \"zwjyfc\",\n \"spmac\", \"dfkbnz\", \"fzriie\", \"asusog\", \"hdodx\", \"drjpo\", \"ddyif\", \"chabv\", \"ebvkwrr\", \"burdjl\", \"jjddi\",\n \"dljzkye\", \"samyg\", \"zwgxcq\", \"xtratwo\", \"qfopz\", \"xvlaw\", \"laage\", \"btdium\", \"vzlnzt\", \"kmvbzkq\", \"kctobsx\",\n \"kazbelu\", \"yxdwrk\", \"eslvjc\", \"nhsdmvs\", \"zuxqcc\", \"hqtxovn\", \"zrbdai\", \"fgjxs\", \"txecvio\", \"kjxlq\", \"dkuxss\",\n \"mkbevn\", \"pzmdqc\", \"ihyia\", \"atsub\", \"twytus\", \"nzooxj\", \"qwuoly\", \"fdoigo\", \"zukhlh\", \"mugeaxt\", \"qqsfyls\",\n \"qqtql\", \"wrvphcx\", \"nzjfhx\", \"uequtk\", \"fxuto\", \"qnast\", \"nveys\", \"ltbrcth\", \"toctdib\", \"fbpnh\", \"umxfgn\",\n \"zvjuta\", \"yeron\", \"qzvswqk\", \"gbctr\", \"ryryz\", \"zieknd\", \"zcsna\", \"jrhak\", \"zfxqsj\", \"urlba\", \"lbozqf\",\n \"yfcjaa\", \"hazgy\", \"gmmfzyz\", \"zjvkyc\", \"rvfdcf\", \"daitab\", \"hcxqgum\", \"qwakp\", \"ltbsjwo\", \"pqqtygx\",\n \"upxcxao\", \"qylot\", \"lmxqc\", \"dwzcd\", \"tjccm\", \"mqcpap\", \"wgxqtr\", \"ivycvxy\", \"wdykg\", \"snvqka\", \"jxtvtsb\",\n \"jnyowsq\", \"iwfuoig\", \"cuoixhu\", \"fzwalg\", \"djhrar\", \"sjmahk\", \"dyusf\", \"wrxqvdi\", \"ftytlor\", \"jsjbv\",\n \"vjbebg\", \"agvsn\", \"vvmpgm\", \"gsgjopk\", \"vbqvhy\", \"afopf\", \"zybfuz\", \"aqsgc\", \"ytrjsvn\", \"wlhdfr\", \"vdhvl\",\n \"jrlvr\", \"cscxwf\", \"yhgbew\", \"wupbl\", \"ssuhyvv\", \"bhcirzk\", \"oykwk\", \"ijbto\", \"qsnpgw\", \"otwzage\", \"ytqzh\",\n \"rgwow\", \"bvhgkwh\", \"fvawxie\", \"fllxw\", \"gfcqf\", \"scoqb\", \"qubrq\", \"gdxjtp\", \"ahrpck\", \"awnlgi\", \"cmehsyp\",\n \"dwmytpy\", \"firyeq\", \"oohwhr\", \"caelk\", \"mqemvs\", \"qflkzi\", \"tfpibll\", \"ybhzd\", \"ctsxri\", \"yurocj\", \"dnlnl\",\n \"ydmdva\", \"xkaotl\", \"xovax\", \"ypynrqp\", \"kwfzw\", \"fbgsmrc\", \"tutime\", \"rcugul\", \"cvewno\", \"typhbpa\", \"wazew\",\n \"flzfs\", \"wxxbza\", \"ogjfkl\", \"vjlebet\", \"imbubm\", \"xinyncy\", \"dqmxfy\", \"buhagzh\", \"jjadpos\", \"gejyz\", \"gxshqk\",\n \"wkwrs\", \"dqeriqo\", \"dmixr\", \"bysjih\", \"aoloq\", \"ddwhsxs\", \"nteqv\", \"cqagf\", \"ditsrn\", \"wfxgl\", \"jwjqb\",\n \"rvkxj\", \"rxapr\", \"yrlkip\", \"npquasb\", \"nvezlr\", \"gmhchcx\", \"lodfihi\", \"dheypxa\", \"plzjykh\", \"qopsthg\",\n \"zsnes\", \"raongg\", \"zrpnac\", \"tzmtltj\", \"jsecdn\", \"rzudh\", \"hkcyic\", \"xsxmw\", \"reeuwpn\", \"grkwrag\", \"gvzzbsq\",\n \"lrfta\", \"aqyvbkj\", \"ytgfu\", \"wcmvd\", \"olnvfi\", \"hhgmhb\", \"kojmepr\", \"wpohl\", \"szhgg\", \"hymiblu\", \"lkwjr\",\n \"zulqpz\", \"sdcqjo\", \"olgsgez\", \"lxkpqci\", \"yxcgn\", \"gmvex\", \"fskpppe\", \"utzto\", \"axncvp\", \"lcyahba\", \"ydeae\",\n \"zvzar\", \"ghfkkqv\", \"ryrpg\", \"gucpbq\", \"reofjz\", \"cdnoo\", \"dchhh\", \"byiwd\", \"cqbhok\", \"ksfnoa\", \"xsmmlr\",\n \"qyvdfqh\", \"dzshj\", \"bpifnzh\", \"uxmoml\", \"jdxvojf\", \"ihfll\", \"vwesfof\", \"zynnpb\", \"fwzra\", \"rxlgww\", \"vkmjd\",\n \"hcjgzt\", \"mkapfl\", \"ffjqlf\", \"wulaebc\", \"gurramv\", \"tufkzai\", \"bxprqek\", \"nkohv\", \"abgfwyl\", \"slslg\",\n \"wirsnh\", \"pykvuh\", \"fdrwk\", \"gtmgsxe\", \"dxsaab\", \"lqiryty\", \"aoezg\", \"tzhugcg\", \"uoarf\", \"dwhsv\", \"rjiuoi\",\n \"ycgcdnf\", \"rtfmwz\", \"amkjc\", \"woogtdi\", \"deprx\", \"ucknu\", \"womfm\", \"xdeev\", \"qapxpuu\", \"ngulnk\", \"fgtxyf\",\n \"hnyabid\", \"cilmy\", \"wrsewtf\", \"luvtmo\", \"wftuh\", \"ifoeeqp\", \"dtfdhhl\", \"rwnburg\", \"fohkkul\", \"frqqi\",\n \"gsrcyc\", \"teuync\", \"dvpvak\", \"daqjki\", \"kksscp\", \"somsde\", \"tyfvck\", \"ftfekl\", \"ahncv\", \"yvosm\", \"qgllvg\",\n \"ylfwv\", \"jenqns\", \"lqovrnm\", \"iyger\", \"nfvtsv\", \"bknxmqj\", \"pfzybdr\", \"hqjol\", \"chlpk\", \"etgrtqa\", \"msuxdx\",\n \"vnoatf\", \"ypdzomn\", \"vsshmg\", \"rfkipq\", \"jvpbiz\", \"vbskd\", \"edsoixj\", \"uowim\", \"hqtsj\", \"inbsxal\", \"ookrv\",\n \"ipotdnk\", \"kmazqd\", \"jpfghb\", \"gvmnnpv\", \"juvwa\", \"xtkvzw\", \"ejqcl\", \"ebgcnt\", \"ztuyu\", \"dlzthw\", \"zzipe\",\n \"iaxwdxy\", \"htynwkc\", \"lefbq\", \"pizfr\", \"vttrsv\", \"oagak\", \"eqlrom\", \"vttefg\", \"dsrmk\", \"oekbe\", \"cvugzk\",\n \"diwvz\", \"gxmfob\", \"vjowzm\", \"mjpop\", \"uznhz\", \"kqvjwug\", \"wjqvxfg\", \"jbpwezu\", \"wsckdx\", \"slqfomn\", \"omuxk\",\n \"zlgblso\", \"kvitoq\", \"dmafq\", \"djxmzk\", \"pjqfegq\", \"yjrttas\", \"siakcx\", \"iutiqk\", \"nwfdj\", \"gbgtazk\", \"cpqtf\",\n \"panmlr\", \"aqubhsg\", \"iwdim\", \"nqetym\", \"mwazh\", \"thyhy\", \"ydtxan\", \"xfoin\", \"lsosc\", \"esznfa\", \"xgdisi\",\n \"flvbzh\", \"mpltx\", \"iwjpsqp\", \"udfycf\", \"rntmc\", \"ltflwu\", \"wkgbaw\", \"bcuzt\", \"hejxuhb\", \"lguohe\", \"klnhb\",\n \"mjump\", \"avcwrol\", \"yrcqlc\", \"ihxul\", \"avajh\", \"gtpauet\", \"iemzk\", \"rfdub\", \"gqnbk\", \"cfcmg\", \"iobyh\",\n \"iruuapf\", \"tyifwt\", \"sbdtp\", \"mngcpmb\", \"oaqpolm\", \"mmimmh\", \"gxknadi\", \"bmxhuu\", \"ulyoa\", \"keidy\", \"vsnfk\",\n \"cnnnfty\", \"pkajm\", \"ddgeecb\", \"prxidqd\", \"wmenvhd\", \"akjcqo\", \"tnekfef\", \"ipvsi\", \"pzjwq\", \"wmmct\", \"erdjnuf\",\n \"vgeaqs\", \"nlbdx\", \"dpvbe\", \"dgeqz\", \"aiguzh\", \"akawppx\", \"tykrjcs\", \"gvavo\", \"hkyle\", \"yhedx\", \"xzqcg\",\n \"gzdxt\", \"csssbk\", \"tmekrmv\", \"lfsgo\", \"iizahz\", \"aszfd\", \"aybqnsl\", \"vadwxsl\", \"ulmiii\", \"xaxdugp\", \"sfnnsbg\",\n \"dkyruh\", \"qhpqu\", \"amesjd\", \"evjuki\", \"vtqjw\", \"aoabp\", \"qnsuhe\", \"bplbx\", \"fdqok\", \"ozkhgib\", \"cggwzys\",\n \"nbknjay\", \"ooambw\", \"evmvegf\", \"htdlxik\", \"kahcume\", \"bojpn\", \"bhipie\", \"hdyjslw\", \"pbkkq\", \"qwszl\",\n \"fgkbzsd\", \"hejdx\", \"vmcfhgx\", \"puzlmmm\", \"meffil\", \"boakbiz\", \"eczot\", \"fvkkit\", \"jebfx\", \"umvkjg\", \"uikgs\",\n \"rycgpf\", \"rfmfgmy\", \"nveho\", \"bgywqen\", \"gepfma\", \"vquyq\", \"wcercbw\", \"wbpjkxc\", \"rqloeda\", \"omclokx\",\n \"hvotwp\", \"tvqfxxu\", \"qrtghk\", \"hggme\", \"arnmfnt\", \"cxprj\", \"rspdt\", \"hlgfq\", \"dmqel\", \"pcerxk\", \"ptqjc\",\n \"wzreko\", \"kahks\", \"xjnzo\", \"xzzye\", \"xbdeu\", \"koiwkv\", \"jlwkkjr\", \"xzdixoc\", \"xeedvrm\", \"mrtnhqi\", \"jaeann\",\n \"mvubp\", \"olklqf\", \"retbgcj\", \"qxxlhh\", \"cqyyoy\", \"ngwikg\", \"qijte\", \"sjzck\", \"zkmkx\", \"ongtzf\", \"tanow\",\n \"smgntvq\", \"urfgt\", \"xwcroa\", \"kadcpd\", \"cxhgo\", \"walku\", \"kvvcsyt\", \"elwmuxk\", \"bfphtm\", \"vzeumuq\", \"sknvev\",\n \"vbsnfd\", \"grmbg\", \"vjahwt\", \"dmcbmn\", \"smubz\", \"jobbfcv\", \"ujlkm\", \"lcthh\", \"bauuqdu\", \"kjgzgtq\", \"gicjz\",\n \"nugbax\", \"kbnjfiu\", \"sqfpein\", \"obbgfww\", \"ykggxjx\", \"irnmog\", \"xniuv\", \"rqiwycq\", \"hzlgyu\", \"yjtrttv\",\n \"satym\", \"dgqhlkk\", \"rghal\", \"tbekx\", \"kkwmo\", \"eahwhks\", \"bpvmbur\", \"sqtgkj\", \"khboz\", \"enefr\", \"vkzqvt\",\n \"wfruavu\", \"ninomu\", \"ypktaoa\", \"mlpmoit\", \"fxyhjfp\", \"fgnpp\", \"txieja\", \"dprnj\", \"bgyrp\", \"zsqwqrw\", \"stqzki\",\n \"kwiayb\", \"ulbsn\", \"aetje\", \"vwzbb\", \"tedwyqs\", \"cymiruy\", \"jigpoqx\", \"ypuqsc\", \"weletu\", \"gvibea\", \"chhuldm\",\n \"baylv\", \"wdhovo\", \"imfqu\", \"meodnsk\", \"jhlckqw\", \"jolyfh\", \"jsfkrhr\", \"tnbfzvs\", \"egcfht\", \"qnzmyr\", \"owtrqu\",\n \"oqaqu\", \"xftys\", \"goxfftm\", \"sgbnp\", \"bhfvaz\", \"gospa\", \"jwzlvwk\", \"lqncoqd\", \"xxizglc\", \"bwffm\", \"mhpggzr\",\n \"kdaoewx\", \"anviou\", \"mqiij\", \"wkskpn\", \"enougdh\", \"vldnn\", \"gbfgz\", \"ejmbh\", \"qsdrvsx\", \"mrvbz\", \"cqlufpf\",\n \"kbgjlu\", \"njgna\", \"admrmk\", \"pwwsc\", \"gxkot\", \"pdjwh\", \"ejwxt\", \"bpaxufv\", \"iwjzs\", \"xxfsg\", \"vuhgh\",\n \"srytgb\", \"yesvlux\", \"tggnch\", \"cgnbb\", \"fbzbx\", \"aomoqf\", \"zkrvrjg\", \"ueaoz\", \"dppacnl\", \"ewovhxz\", \"kbvee\",\n \"ixeeb\", \"gwgoqm\", \"hlwlxe\", \"fpmkrk\", \"wzjsr\", \"ispwe\", \"garofu\", \"jcmpec\", \"tggeo\", \"yzdeo\", \"axpmln\",\n \"zhnlhck\", \"duyqcn\", \"tpqwqi\", \"jvmaj\", \"bisgoy\", \"mpwmurb\", \"olqla\", \"ecapwan\", \"kcpxn\", \"xcapin\", \"ooctk\",\n \"sgqql\", \"vcyyjxf\", \"ejyom\", \"jsgtha\", \"logxnjg\", \"nypadhj\", \"dprmk\", \"cqkuzb\", \"gratv\", \"tgkjgu\", \"fttcafm\",\n \"tpryi\", \"ubbhw\", \"uwcuyn\", \"zkgohs\", \"snfesz\", \"ifrex\", \"tkbfz\", \"fvvkp\", \"otjiq\", \"lgomjjv\", \"ertracf\",\n \"bregu\", \"kkbizb\", \"hyhvn\", \"zjcnxfl\", \"mceskuj\", \"lmupdq\", \"zdzqzgo\", \"yorppew\", \"fpwtjd\", \"dxvyzt\", \"bbnnu\",\n \"pkycae\", \"ucvapn\", \"dijmkb\", \"nvwwpr\", \"bufkw\", \"zhono\", \"vayxf\", \"hlfwkev\", \"klkvkj\", \"yzgpwg\", \"lcbqr\",\n \"tkkfi\", \"pcgljx\", \"bhduxu\", \"rgfipts\", \"hkjbrr\", \"fobvy\", \"wqmqhxo\", \"yjgvypg\", \"ehgoizl\", \"ipiibzh\",\n \"aqxbxtx\", \"lrtin\", \"fyyuypr\", \"pyrocgm\", \"kwqbg\", \"ukccw\", \"wgsbpvx\", \"pcoivrv\", \"okhxaba\", \"bbuaibf\",\n \"ccvfm\", \"phpst\", \"yxtqiz\", \"cdfbo\", \"sijfljn\", \"gdlhn\", \"bqmbced\", \"tiejf\", \"aurqer\", \"olmyd\", \"prctay\",\n \"lwflhi\", \"bbehvta\", \"oxoda\", \"lklyc\", \"rzedhp\", \"kairil\", \"envan\", \"wdcwfk\", \"xoroddb\", \"womrlr\", \"ruxebe\",\n \"jnpywrd\", \"wrifvz\", \"zkewcd\", \"vllfrn\", \"uvdvjh\", \"bglpya\", \"vzokkbw\", \"apaoqt\", \"xpjizn\", \"xoajmd\", \"xapjwc\",\n \"jcknwg\", \"bjpreep\", \"ffkua\", \"ukcbah\", \"bugvkrf\", \"cbmmfs\", \"cwaczhl\", \"nsqaj\", \"sjeikg\", \"fayqif\", \"slowoh\",\n \"xjpvkpa\", \"ynunjle\", \"bqavt\", \"nkpqudr\", \"neikvd\", \"yuqlzg\", \"pdxbtrb\", \"cashlog\", \"iqiqy\", \"smjmxv\",\n \"zbtpbr\", \"zzamzcv\", \"jmakg\", \"txfswc\", \"pkaym\", \"swlde\", \"utann\", \"mqgpjne\", \"pslfvek\", \"nbiqhb\", \"bzsianu\",\n \"wnxgbi\", \"ahkeeiz\", \"dqdfjg\", \"bptdg\", \"pwita\", \"uqyflq\", \"txabjn\", \"yznjmve\", \"mukcqqf\", \"cxonbf\", \"ixuewjm\",\n \"pzlcat\", \"eikeeo\", \"scwsoa\", \"uaeyw\", \"oeorff\", \"gbqgd\", \"qboqiv\", \"hiulpb\", \"dbbdm\", \"qvdxx\", \"aypxbcn\",\n \"ykjwdbg\", \"pvfxn\", \"shrqyz\", \"zaxtu\", \"pfefgww\", \"jwifrw\", \"zxuud\", \"kpkwhlj\", \"lwptgd\", \"zpdmvsw\", \"takeb\",\n \"ynehl\", \"kixtod\", \"fyrgm\", \"qirzmr\", \"shyvec\", \"xjgzt\", \"bwfvht\", \"wyehh\", \"renzc\", \"nnibax\", \"slhfng\",\n \"yjtecc\", \"lghvbzf\", \"qroxvun\", \"mlsed\", \"rrudho\", \"cyffhh\", \"tjlxahp\", \"xmaepzk\", \"jvdzh\", \"bbvegrw\", \"cebcz\",\n \"odjpeam\", \"guerph\", \"tgmphgo\", \"ohtkqq\", \"jcxojz\", \"haeheae\", \"erydxni\", \"hatjxx\", \"kwmgkjw\", \"wmezvy\",\n \"hsuuvfi\", \"ineek\", \"grkxmhb\", \"alxkt\", \"rmspxdg\"]) == 13956\n assert s.minimumLengthEncoding([\"me\", \"time\"]) == 5\n assert s.minimumLengthEncoding(\n [\"yiyqbv\", \"njqvawn\", \"wnlovvp\", \"vogum\", \"jpolc\", \"zleec\", \"sxdrww\", \"rbowr\", \"xsjorra\", \"kwjsx\", \"vornum\",\n \"echku\", \"kuizegn\", \"rhuvv\", \"eemkh\", \"yshht\", \"pbixoa\", \"cmbxvtr\", \"iupia\", \"nmcbq\", \"mgrjsx\", \"ejvniwt\",\n \"svhsel\", \"kazenhf\", \"fevpm\", \"xcwqfgw\", \"ozikzc\", \"mywnmqt\", \"taorwjm\", \"gcshacq\", \"fgtasq\", \"qexygw\",\n \"ljmbari\", \"zfjudos\", \"rgxuzy\", \"kmzryaf\", \"exjfd\", \"mcqnebz\", \"ptoim\", \"zglfi\", \"fhneaz\", \"rexgc\", \"lhplwyr\",\n \"dthdp\", \"jizetec\", \"obyzg\", \"rqupa\", \"yphttge\", \"wdcdn\", \"wdomtr\", \"hchbd\", \"ytyra\", \"upytftl\", \"swbbi\",\n \"qpcybv\", \"dcoxspd\", \"dftkf\", \"nwjfmj\", \"ojbwy\", \"zofuy\", \"adqkt\", \"kpcply\", \"aeukw\", \"fqblb\", \"xurrbpo\",\n \"veioa\", \"puzvl\", \"bnzvlax\", \"tjzsdcw\", \"jarqr\", \"orxjbg\", \"ilrqdri\", \"syjuoyi\", \"htoqdco\", \"gwslw\", \"dpqyf\",\n \"jnkhv\", \"fpqhpr\", \"baewnvc\", \"caunsf\", \"qhbpe\", \"wlckl\", \"lmoroqe\", \"ddlak\", \"qipwbfp\", \"cefqs\", \"surczp\",\n \"jtmfuro\", \"ezhqau\", \"dlsco\", \"hywoqh\", \"lnifq\", \"hvfmu\", \"cqjdkok\", \"tggdact\", \"rwuowdk\", \"attnl\", \"lwhyq\",\n \"mqtsc\", \"bmwajiy\", \"nyohug\", \"vvfpt\", \"lbyazu\", \"sarwago\", \"iccztck\", \"ugsxcw\", \"rpwza\", \"yofmlll\", \"ulhdzhg\",\n \"lbaqk\", \"bwxxwc\", \"dmsbawg\", \"tjloy\", \"imbrkul\", \"xguke\", \"shlkuq\", \"lizjcdu\", \"kmvykl\", \"ilqxxjm\", \"rtbvvqt\",\n \"qisec\", \"zobzr\", \"thwntt\", \"afpifh\", \"uwiiovy\", \"hgsyecl\", \"pdgnm\", \"mqyesch\", \"suexztu\", \"msguuwu\", \"yrykkv\",\n \"xtoommc\", \"muteu\", \"bamml\", \"kkhlb\", \"jfrnx\", \"wpytor\", \"zzogpt\", \"yryxxt\", \"hzqofjd\", \"ehtildc\", \"ptclf\",\n \"nyltvd\", \"nrret\", \"qqqqt\", \"uuxunf\", \"jajxt\", \"lzdvlc\", \"gpdtjug\", \"hjsso\", \"jairua\", \"qarxuey\", \"rpwwjwv\",\n \"cjqypep\", \"tuzgcs\", \"oytqxb\", \"rgfmud\", \"stnwn\", \"tzzaop\", \"jpuopzg\", \"qeywd\", \"spnstrg\", \"dfwgntg\", \"yjyqk\",\n \"ioowc\", \"duqfg\", \"gmqxe\", \"xhlbby\", \"liurjk\", \"vdujfm\", \"xxyyn\", \"omapgc\", \"koemzbz\", \"ziiyako\", \"pjmhfrv\",\n \"bshtfgj\", \"ihjvt\", \"pnipuw\", \"fajiuj\", \"rdvcqzd\", \"mgknns\", \"ouwkm\", \"ejnklwc\", \"osepl\", \"gplpyvs\", \"paxrddg\",\n \"gsjlpd\", \"lgnmgl\", \"yifeeer\", \"hhnwlol\", \"fcmxs\", \"ilinwgm\", \"udhfdtq\", \"ceefc\", \"xweqx\", \"jfelwod\",\n \"rtywfjo\", \"kzwrgqx\", \"fcjriov\", \"fzytqv\", \"zcpcddo\", \"scpyzow\", \"kbzegu\", \"gclwr\", \"gmiwlp\", \"rtpka\",\n \"yiywuyy\", \"qceot\", \"dtrgn\", \"ntwbu\", \"fxobd\", \"zmxwza\", \"qcksyz\", \"wgbtmm\", \"pzorve\", \"hztydc\", \"jqlay\",\n \"ijdkbk\", \"uzjrps\", \"gfzibk\", \"gsxqj\", \"kgjrkdd\", \"smdeuk\", \"iwizewp\", \"owjie\", \"kcdccu\", \"ifltqr\", \"zrdfbm\",\n \"pznbcsk\", \"mtkpi\", \"cpasir\", \"flrxrm\", \"uxcxnv\", \"htlfcp\", \"ltukxfr\", \"ftbbha\", \"jhgjgyz\", \"qjreroc\",\n \"vcvtbid\", \"nrhlq\", \"gtkpot\", \"gyplqqg\", \"lnorig\", \"fixhufv\", \"ugcug\", \"ndfug\", \"wuorhe\", \"owocnkw\", \"rcnbf\",\n \"ioiiiui\", \"kakwtne\", \"svxtt\", \"wdrxogm\", \"ibrxs\", \"bddqi\", \"jeguac\", \"hlftdw\", \"nutgfjw\", \"krrzvf\", \"amxuloc\",\n \"deozdoe\", \"ovsvk\", \"sfqsl\", \"slgiw\", \"jbjujag\", \"mhiru\", \"uqksech\", \"davosw\", \"nlueljv\", \"rhtvdu\", \"ivdpdqa\",\n \"qnbenpq\", \"dtapqq\", \"hwwfpxl\", \"oyrfosn\", \"goxgmgo\", \"tbvutl\", \"cbbbcm\", \"iiugpk\", \"hinkem\", \"vvaitk\",\n \"pskyf\", \"hdnekg\", \"nqhfn\", \"dqbozx\", \"zcwpko\", \"kafyu\", \"jfegubk\", \"nofqzsk\", \"ujmxxg\", \"akwzemu\", \"yvhxb\",\n \"qqlwofi\", \"hmoecj\", \"qwgtlc\", \"jepvygq\", \"uzggm\", \"fztiews\", \"lvndvf\", \"vulax\", \"znqudh\", \"whgqi\", \"noguo\",\n \"vewkx\", \"uruvgf\", \"ubohmba\", \"aulzi\", \"flvfdlq\", \"yspfie\", \"wugif\", \"qndyiwa\", \"keihmct\", \"rggvn\", \"ojjmuoh\",\n \"sbbcl\", \"cdivmoz\", \"vkusmp\", \"mfddp\", \"kgohwvp\", \"rjbbxw\", \"vsgptj\", \"hbyjoz\", \"gufrv\", \"orxiv\", \"fxcqfw\",\n \"okppik\", \"qlouw\", \"lkryigo\", \"qccvc\", \"ixcnodg\", \"wlfilts\", \"ahqtevp\", \"kkbuha\", \"oehaez\", \"rzczib\", \"vxobk\",\n \"wmetvjs\", \"xfjgeq\", \"eadzl\", \"aeqdvch\", \"czojfq\", \"hxshidl\", \"ofswsj\", \"iwbqcmg\", \"schhwtt\", \"ltyth\", \"wiccu\",\n \"akill\", \"zaaji\", \"qepvfa\", \"mpvrkeu\", \"dcpenm\", \"wdhlk\", \"llqbby\", \"lronwkr\", \"rwtguo\", \"ofnvs\", \"lxdnwzf\",\n \"dctmilf\", \"zhckjd\", \"hajsuac\", \"wpylhy\", \"zhipvm\", \"ihikr\", \"zzwjgvr\", \"gdglrn\", \"skhow\", \"tlqtjl\", \"uypli\",\n \"evdva\", \"civide\", \"iroihm\", \"lvuzid\", \"vexat\", \"ngmvrz\", \"szdhbt\", \"ggrbz\", \"bsmovlt\", \"kguomvl\", \"onzvx\",\n \"nobgxw\", \"tqxemc\", \"vbiyx\", \"fpzpf\", \"ogtvf\", \"yuthri\", \"xszbn\", \"xcuhj\", \"nosnpbp\", \"mowsxg\", \"tfalyy\",\n \"kxombgm\", \"cukrz\", \"krmseq\", \"velzh\", \"kmufxj\", \"nvxlkq\", \"ualvras\", \"wytoucy\", \"qicqyym\", \"pbeujtv\",\n \"haojnbm\", \"xnfffpe\", \"wvoiald\", \"rlyvf\", \"sxamoxw\", \"ztqnmp\", \"biiavx\", \"lnjnzs\", \"arqdjdy\", \"pkrgokc\",\n \"qxswouj\", \"dgqah\", \"mnhzo\", \"ggilb\", \"qscrd\", \"ggvkimw\", \"qlxjys\", \"wximi\", \"aqlhio\", \"iavtvy\", \"grkqf\",\n \"dwrtut\", \"uozutfc\", \"fogxpdb\", \"ydtntlq\", \"vnmpmwp\", \"gtxhwq\", \"mlpihx\", \"yfpjlz\", \"hdvcquq\", \"nunny\",\n \"wklasgp\", \"wxduo\", \"topsqf\", \"tngcpzc\", \"mcrut\", \"pdnsmt\", \"kavaok\", \"seiqsqa\", \"bhgkiyt\", \"mawvhtp\",\n \"domcnrm\", \"fgusghc\", \"wdaufwz\", \"tzpuks\", \"kisndyz\", \"fwyieu\", \"wtdum\", \"ytxhl\", \"yhzkmuv\", \"nppnqe\", \"ccvhj\",\n \"dautnyq\", \"hkaliab\", \"kngan\", \"ebmhiop\", \"vsdkcef\", \"nmpcnd\", \"vxvnl\", \"cwcgu\", \"zsuneh\", \"qjgcmd\", \"awvba\",\n \"rzbisxo\", \"oilqrj\", \"neiazlm\", \"hlyrl\", \"tmiht\", \"lwqxxv\", \"gyblrw\", \"gnnjkb\", \"lrxiln\", \"xlwlseh\", \"npfwcvp\",\n \"yjcdhw\", \"rzndd\", \"orlhmip\", \"gatuojh\", \"osotgvv\", \"owksz\", \"kcocizf\", \"izlev\", \"smigns\", \"wtxfwo\", \"knwizte\",\n \"mqjojzp\", \"lkezye\", \"xqldbu\", \"cvbpyl\", \"aoipbz\", \"asrupt\", \"bdwkesh\", \"jpaykm\", \"pksbg\", \"gdbsibd\", \"lfxpwk\",\n \"rmnfph\", \"yzxwke\", \"xjwyusv\", \"yetar\", \"sytdz\", \"pnystzi\", \"yntcqo\", \"egoorl\", \"aydxu\", \"rfdrfhe\", \"flzkos\",\n \"mmjgev\", \"fbjwmvi\", \"jeouc\", \"lcmkri\", \"aggsb\", \"aaeazai\", \"amyxpey\", \"onxqpg\", \"qrjpxq\", \"zanea\", \"niwsgtv\",\n \"nsqja\", \"utgskd\", \"hlcum\", \"frygtl\", \"xjmqetz\", \"upqddd\", \"vxzdstm\", \"hcmtera\", \"ejstou\", \"xkcguf\", \"bokigdk\",\n \"vurnv\", \"zsgrje\", \"nbxlf\", \"tpilcx\", \"lvepux\", \"xacdtp\", \"amdgx\", \"ubbvnx\", \"xmvznh\", \"tlprri\", \"sthkn\",\n \"xhoad\", \"deotaxo\", \"pqzppmw\", \"xlcpx\", \"qwzrpyp\", \"lujabeb\", \"heskwyy\", \"mzzaaur\", \"vnestcs\", \"rryphdl\",\n \"ibdiabi\", \"eoiyt\", \"znflx\", \"clougix\", \"zzadxw\", \"lrrgtf\", \"lsdoakf\", \"yxfmqx\", \"qhnrry\", \"ktcdmv\", \"veygqu\",\n \"btjlo\", \"fcspsc\", \"gozoazm\", \"xcsqgz\", \"aazae\", \"nkuvask\", \"mzdgjq\", \"sihqdhy\", \"zadrwzw\", \"gzcyuea\",\n \"lpgccic\", \"fqtfuzw\", \"bjoqpkc\", \"oydpkxc\", \"sugnnu\", \"hyvygf\", \"axkxo\", \"rsmzb\", \"dlhqmac\", \"gbqby\", \"npqkj\",\n \"odbtb\", \"bdsib\", \"zyasxv\", \"ifxqcc\", \"lmnjwhr\", \"ibuyu\", \"uzhle\", \"ccpwhjr\", \"vhrojnz\", \"fkzfz\", \"fyesm\",\n \"dnvipvm\", \"jbbqn\", \"qdkgl\", \"xkvvgq\", \"dphugaf\", \"soxbfun\", \"rbgokx\", \"biveiz\", \"vbaqtn\", \"qapydgf\", \"llldu\",\n \"ottjpzu\", \"fwjuc\", \"cawio\", \"gbkwe\", \"rrnnxer\", \"luviy\", \"zsalse\", \"ckwdeox\", \"ozhqocm\", \"vtozfwz\", \"jztole\",\n \"ydqei\", \"bfugz\", \"psawjp\", \"dzlyrwp\", \"izuyrne\", \"rbwcfr\", \"vdvte\", \"usjbqs\", \"zzovkxr\", \"frfkwk\", \"mmtmdd\",\n \"sntka\", \"wachbzo\", \"rmzvj\", \"scbngo\", \"eqiuiwi\", \"qfakk\", \"cckcmt\", \"owhzow\", \"rejdlw\", \"iprsqdq\", \"twwaldw\",\n \"mfilzyk\", \"jygvx\", \"iewbo\", \"irhko\", \"zpazqhn\", \"ndqbg\", \"ayzxqdz\", \"zvpbh\", \"maapq\", \"pzitrfm\", \"qsgsurv\",\n \"viwcfff\", \"wpgenms\", \"tjmvu\", \"czuemc\", \"infxoo\", \"avhbw\", \"nugkqx\", \"xubakjp\", \"ndask\", \"utaqq\", \"njhuxq\",\n \"sdvuex\", \"tfmxqp\", \"bydovjo\", \"bizxjsp\", \"zoozxyv\", \"jegei\", \"gkpqobw\", \"psumbtg\", \"gkgoh\", \"sgcbpql\",\n \"xxkhy\", \"kdorkr\", \"hcomj\", \"ulrpyv\", \"rhplil\", \"tyyochd\", \"xhzul\", \"srdjmns\", \"kgukye\", \"yepvs\", \"xnobsjb\",\n \"umxmtub\", \"wvqasr\", \"igftpzw\", \"exhecn\", \"rreee\", \"jpxuvxh\", \"jriqf\", \"akexunb\", \"ekvdsoe\", \"ytzvj\",\n \"vfrlyae\", \"pmfai\", \"biouzle\", \"xkbce\", \"clzyi\", \"xhjoso\", \"wmxkxb\", \"dqzzig\", \"ydtby\", \"gskwj\", \"wlkwbz\",\n \"zepvllz\", \"zsgqp\", \"blntawk\", \"eynmil\", \"bdqyp\", \"wgtnqbc\", \"rrgaq\", \"gtafuzo\", \"qdiko\", \"kkcsdo\", \"zwqhs\",\n \"kugzbmf\", \"wtvvs\", \"kqsdx\", \"mxsuxiz\", \"pgbgjfe\", \"vodfr\", \"qbvwu\", \"vfwbhgw\", \"ayojye\", \"kolzfqg\", \"xnbecj\",\n \"akbcnf\", \"uutrn\", \"upmesa\", \"marqej\", \"bbucee\", \"bazqbau\", \"qikgsyf\", \"oeayzn\", \"uilxnzr\", \"vpnxknl\",\n \"btgtxgh\", \"vjaav\", \"zaxtzah\", \"msweps\", \"awduwld\", \"gzaep\", \"ngvgc\", \"qpoqdgn\", \"kimndg\", \"qilmmpw\",\n \"oafhlyp\", \"nyelgvw\", \"onymk\", \"feycbc\", \"dhcrx\", \"siqpfly\", \"tyvycmf\", \"huctqp\", \"uscjrp\", \"bbptd\", \"msdmu\",\n \"xlxhye\", \"xnyzcox\", \"kyskda\", \"injdkmp\", \"jiwus\", \"spjylwd\", \"eqcrnt\", \"snfiu\", \"jvwvge\", \"yfeaw\", \"mmdnsjj\",\n \"suzdw\", \"xiupf\", \"rjwjhng\", \"tqvasy\", \"rmibpa\", \"zuqax\", \"prpndnp\", \"efryqe\", \"pwuqfy\", \"wpqlfs\", \"aeswq\",\n \"cxkeiue\", \"jydxzfi\", \"tzfvwp\", \"zzgtw\", \"mupiusx\", \"sojavt\", \"dxmsgq\", \"migjiyj\", \"kixjk\", \"ywwvcpl\",\n \"khzcuo\", \"oykhx\", \"fochin\", \"foxbfkc\", \"sizjg\", \"wrjcvr\", \"ceadd\", \"tvfqgxq\", \"whzhche\", \"dcoeti\", \"mpilfib\",\n \"cphie\", \"ucpnjm\", \"ajltvx\", \"kpizym\", \"vevfsrs\", \"jznrri\", \"yvhxomr\", \"cbcnk\", \"yuwuhu\", \"jywuzed\", \"kqakusq\",\n \"jrnzgfo\", \"mjimzz\", \"mfjybnd\", \"ntqyq\", \"junxxck\", \"myvqajv\", \"kvuqs\", \"obfxw\", \"jwuba\", \"vnrvzvy\", \"aeric\",\n \"vtgda\", \"nkrocpt\", \"ahitg\", \"dzxtr\", \"zswwc\", \"yhxap\", \"fdhiwr\", \"cpxtqv\", \"izbmo\", \"zyioo\", \"vysnoe\",\n \"ouuyvj\", \"cumdhzn\", \"dbsmph\", \"cktjem\", \"vbmxy\", \"utgfyhc\", \"rqdeorp\", \"btnlmd\", \"chxwlt\", \"nsghoqi\",\n \"egycsm\", \"wkanat\", \"lzjyf\", \"donyx\", \"cchqsa\", \"xozzz\", \"yzmnf\", \"jfzuh\", \"dpcpg\", \"hlahz\", \"vobopk\",\n \"lssfeli\", \"ccttzi\", \"glzgqpv\", \"oyqzug\", \"qqhkrr\", \"euwotv\", \"hwbmtz\", \"hiylhly\", \"bppzne\", \"yetyyvs\",\n \"cnbwcby\", \"hzblk\", \"pfjmxt\", \"dsxvt\", \"vvkju\", \"zjrfr\", \"gdbhb\", \"udoad\", \"nbhpzfm\", \"iwetbym\", \"atmly\",\n \"tnxli\", \"myegb\", \"hiwqsk\", \"btrajk\", \"nhrmwn\", \"ftmbecv\", \"xopht\", \"eiikqy\", \"qizanwa\", \"cwxiatf\", \"jshjva\",\n \"llrtkn\", \"zhivu\", \"lmwiu\", \"oaeaqz\", \"oxotfub\", \"jnkafm\", \"juhrmq\", \"mqzbtw\", \"puiaxty\", \"dnahvoj\", \"gaxhz\",\n \"xfnay\", \"iqmlnlq\", \"xudhcg\", \"izpkz\", \"tqttmt\", \"bwnbs\", \"fdufd\", \"vhzyymh\", \"zhqtxr\", \"evbcrv\", \"xvnma\",\n \"dgcwy\", \"cwxzlbz\", \"oodiol\", \"teyim\", \"kqqfjub\", \"ftsqzi\", \"arfztkr\", \"oqlujx\", \"rpkkdov\", \"ptoff\", \"ivxaxr\",\n \"nxeept\", \"cacpl\", \"tehir\", \"spvggl\", \"qfzxkn\", \"bhwkukx\", \"fkdpuq\", \"xdrngre\", \"fnfplq\", \"dzbrl\", \"ufgxu\",\n \"sciec\", \"fgdydvw\", \"nmpaqxi\", \"ydsvfv\", \"natjz\", \"lruyvzf\", \"xznznxp\", \"mhfrh\", \"kddsk\", \"uwatn\", \"uklzs\",\n \"lnuta\", \"ryizc\", \"cvwko\", \"tnzpk\", \"ywpiv\", \"vbvcagq\", \"pzolw\", \"nmyfhg\", \"cshkofj\", \"ksptw\", \"kqejh\",\n \"zgzjqzo\", \"mxzrw\", \"enabosq\", \"vmubgc\", \"sfzcj\", \"hewvk\", \"ewhrq\", \"oifnsmi\", \"izdnvu\", \"cshgtk\", \"mqotuhd\",\n \"gnqgj\", \"rxailbm\", \"iyhxvtu\", \"ncjzklq\", \"zjmnoc\", \"awqwos\", \"ugujppc\", \"spbvfwl\", \"gntsvo\", \"euksu\",\n \"qnvneph\", \"crhmf\", \"brktmf\", \"mvgmr\", \"yzcskrp\", \"tihawec\", \"edqmxpn\", \"fxyymlr\", \"dzfkucm\", \"prldz\",\n \"gplrlhz\", \"bohwr\", \"bhebbk\", \"mmecj\", \"segydd\", \"ptslsb\", \"pyhgw\", \"cwmrq\", \"mjfhflh\", \"xhuid\", \"npxmb\",\n \"izilq\", \"dczhqh\", \"tgfnxtb\", \"zrylvo\", \"lctxrar\", \"ylhrbii\", \"rfxedv\", \"llvhzjq\", \"bjocv\", \"wbnex\", \"cnohnf\",\n \"xahrl\", \"rouvwyc\", \"hbhovgv\", \"dhucp\", \"ncmff\", \"ncsskg\", \"gsjbyin\", \"lroxscf\", \"whfaenl\", \"vsfultg\",\n \"floxkpy\", \"captoai\", \"qwolyex\", \"ggaypn\", \"wzunypd\", \"pjixeu\", \"gxnjkoc\", \"pqiqhn\", \"xakjmgz\", \"vqizkx\",\n \"gdzcxr\", \"kyxwdd\", \"pgxmazn\", \"qeuwf\", \"bduknm\", \"tcrcn\", \"nehgee\", \"wktbcgu\", \"jwqltdt\", \"wczkai\", \"drkqs\",\n \"qhdqnn\", \"oobxirc\", \"lbunv\", \"ifscr\", \"xnfpbrw\", \"yrrdbax\", \"fbocs\", \"tewne\", \"iobixe\", \"zgosas\", \"yhesn\",\n \"xlqwd\", \"pfcen\", \"slsjffx\", \"ilwatrc\", \"mhsmgp\", \"iteghl\", \"aqhufdl\", \"kxgpqcu\", \"ryrcgp\", \"azidf\", \"smlnl\",\n \"rocxvbt\", \"iutfc\", \"loapgbr\", \"musulp\", \"dqcnj\", \"tpgbkfh\", \"wvskii\", \"itkfopo\", \"kytyb\", \"rzahbu\", \"aewptd\",\n \"ohergbb\", \"cadxh\", \"aphwelj\", \"huooyzn\", \"gtttia\", \"izeyhcr\", \"cfvxz\", \"aitaxyp\", \"vypqost\", \"ebfnmif\",\n \"kgiucm\", \"zryyu\", \"oxgnbpt\", \"frpwo\", \"ouqvodl\", \"pdaazh\", \"gxwmf\", \"dozxsjm\", \"yndpsik\", \"zcwvu\", \"mihug\",\n \"jgodklw\", \"ysklw\", \"cfxqv\", \"yqvtz\", \"rctnp\", \"xjywa\", \"kpqyw\", \"hhtegzt\", \"rnwbeoi\", \"uyxqum\", \"jahcwbe\",\n \"jzjns\", \"ovwoaz\", \"oqmsrua\", \"natbejl\", \"deffv\", \"okgbr\", \"paqhy\", \"jkafhte\", \"lifsknp\", \"afmskh\", \"oemdro\",\n \"oxuwov\", \"qtyxa\", \"hkpfsm\", \"ulaubn\", \"tciurw\", \"myohwlo\", \"okuiejb\", \"ormoqsb\", \"gmipz\", \"hterzir\", \"ekxzre\",\n \"xkevge\", \"ihenf\", \"nnhzv\", \"eocjmx\", \"upzal\", \"oounfko\", \"myhbwub\", \"fwipva\", \"pkzzvpd\", \"nrupm\", \"vluzq\",\n \"fxkoyho\", \"atzktr\", \"aomrp\", \"qwpser\", \"ejagmb\", \"cfigelm\", \"bvanb\", \"cgcgabo\", \"hmjvlqt\", \"hxxocf\", \"ftqaud\",\n \"htuipy\", \"bhwmcn\", \"tgyvaqe\", \"lvuwh\", \"yiabzs\", \"rzzavu\", \"fiubm\", \"uuqsb\", \"riyakuf\", \"psscffd\", \"kvckzr\",\n \"fktmnf\", \"ivzqexi\", \"nhxzm\", \"kffjmb\", \"vdzxv\", \"esago\", \"bfikw\", \"gaiuxmz\", \"volokcm\", \"jypcs\", \"psibvs\",\n \"hxaxklf\", \"lmqwgy\", \"spnbimo\", \"mtihak\", \"xikoiy\", \"rmmtv\", \"phaqgxj\", \"zcuwkhk\", \"emodbyb\", \"ztahsya\",\n \"ieiqm\", \"lfoquh\", \"emznnq\", \"pnhlgut\", \"pgvads\", \"cqsjx\", \"lxnjei\", \"zpque\", \"rdjbiyb\", \"sxedpu\", \"potnqva\",\n \"iirkn\", \"rjmnrxd\", \"ksgcd\", \"waeymnh\", \"tizdz\", \"kproa\", \"wpttygd\", \"lvyze\", \"peewvgm\", \"fwtyzbw\", \"zitkk\",\n \"gfgqr\", \"udgvlz\", \"swqspo\", \"ohhvyq\", \"kgyuau\", \"hcerp\", \"pdomlm\", \"twabkk\", \"zfsea\", \"epiwp\", \"xgycjpt\",\n \"jtkdh\", \"mxmdm\", \"rtkzm\", \"qkacy\", \"nuvdiq\", \"agctak\", \"hypgyh\", \"ewtjp\", \"paysolw\", \"bcutebe\", \"xelxyb\",\n \"gzdvrth\", \"vpzfv\", \"cxrkt\", \"admiyzi\", \"lqlmn\", \"zbjpbg\", \"tlvdnli\", \"zetnox\", \"ylcsobo\", \"balajod\", \"igoume\",\n \"sxcgw\", \"sbkkafk\", \"fmndnnw\", \"incsa\", \"jyupkg\", \"uhvvc\", \"rswnbth\", \"nvprfj\", \"figqf\", \"znyidqi\", \"aijper\",\n \"euidr\", \"dftxkze\", \"vnppi\", \"splwifc\", \"fprgafl\", \"ixzaz\", \"mrhqtne\", \"dtkjsy\", \"dsmqrgy\", \"xfscz\", \"cymvmpu\",\n \"vptkfdx\", \"zrgrjq\", \"mqvwsur\", \"hdtlw\", \"ugdpwun\", \"cvxitc\", \"vytvqg\", \"pmtpfz\", \"nfdtdt\", \"umvwjuc\", \"jouxc\",\n \"qpypri\", \"pdhqp\", \"lmise\", \"wlsvcfg\", \"aqdkzcb\", \"qlrmrfz\", \"pbgoyi\", \"xmsskoh\", \"jjdye\", \"xvsdmq\", \"ymjeipy\",\n \"igjyv\", \"uiojvmc\", \"uckoww\", \"grlnyeg\", \"hpglp\", \"omnnyy\", \"iiliir\", \"cnucbcx\", \"pcxvs\", \"hipad\", \"xmiltkj\",\n \"oorwi\", \"qgoxjj\", \"jnmviqs\", \"wpleqn\", \"tudxw\", \"pcogem\", \"hgewaf\", \"niwfexy\", \"vcttgcb\", \"anjgovq\",\n \"epgmscd\", \"mdtru\", \"xvapv\", \"rydjik\", \"kopppcr\", \"mjbsmu\", \"unxoakz\", \"ldpsw\", \"frksjr\", \"vyxxg\", \"yyydri\",\n \"szidq\", \"qvbtd\", \"qratl\", \"xwfov\", \"bzhqyxl\", \"fskrtf\", \"pcpzmnv\", \"xuxwx\", \"vzbevnb\", \"ebaqz\", \"dbpuek\",\n \"ooqwj\", \"gaimp\", \"coelqh\", \"bwuceq\", \"oxpfjt\", \"zrqyc\", \"rwllk\", \"pqunv\", \"ufbnn\", \"tbnjoz\", \"kkqmrxu\",\n \"qyyrm\", \"hislf\", \"wyuck\", \"ubpre\", \"pdioi\", \"aryhv\", \"vdcxv\", \"rkgmaag\", \"czlzokw\", \"gtxuduz\", \"grpijx\",\n \"qzrar\", \"qhues\", \"rmznt\", \"sxxmved\", \"onjzuwl\", \"atbjhip\", \"nrardl\", \"alrocy\", \"cfkip\", \"ihtbf\", \"pqdgm\",\n \"hmokun\", \"dpghac\", \"otwml\", \"mnbzwa\", \"ehetlt\", \"rchvq\", \"lwjgywn\", \"lzdmjo\", \"nvhohdp\", \"tmshcpc\", \"gavjv\",\n \"ycnkv\", \"uynzh\", \"bvpnfjq\", \"lfbem\", \"qberui\", \"vrmmhx\", \"wpbqtfq\", \"jujpx\", \"dujgkof\", \"hrpbso\", \"zhcdt\",\n \"iybngyb\", \"rgeruza\", \"nesyxr\", \"cihgfe\", \"hjgskb\", \"zspxeqm\", \"inzrgyd\", \"crkjq\", \"iooshwp\", \"muvvj\", \"wakis\",\n \"rowibwa\", \"qikwypf\", \"aportho\", \"pubcgx\", \"vqoqpfi\", \"rnpbri\", \"ussjv\", \"looor\", \"xkzvdv\", \"tstegg\",\n \"zgiiokw\", \"rwvyaun\", \"mqqla\", \"asnqp\", \"nghuryl\", \"hlvhn\", \"ecuotnu\", \"judvbu\", \"xgvuw\", \"oeckn\", \"hdhttsg\",\n \"hcyhu\", \"klbyjc\", \"tnrmqnc\", \"mjojxhi\", \"kvdet\", \"vbmevim\", \"oglrzs\", \"afbscdi\", \"zxrffti\", \"firzgmz\",\n \"oenim\", \"wgpua\", \"asiep\", \"kyteq\", \"wpeneca\", \"qixmeoq\", \"zaofon\", \"csxxtr\", \"cpwmnl\", \"feylas\", \"idjuo\",\n \"mrtpvta\", \"jjvmjy\", \"mnljocc\", \"lnvjleq\", \"oognud\", \"rbyneq\", \"rhvomm\", \"fldrkpk\", \"znvrp\", \"myswmz\", \"jiloe\",\n \"juivjmo\", \"ylhbyzl\", \"ndmabkt\", \"sgdvlq\", \"pmnddmi\", \"utpuj\", \"kfisv\", \"nxfeell\", \"mxhgqd\", \"ccvdsdg\",\n \"emtybo\", \"zmkylbt\", \"mmrpi\", \"dkwlgq\", \"iwlappb\", \"uimsrnu\", \"mkxaxmi\", \"tcvll\", \"njggal\", \"kmqud\", \"evgzlh\",\n \"oaxizbp\", \"jiuej\", \"xknlp\", \"cyksydh\", \"gbixmz\", \"vtouyk\", \"sxjpkio\", \"qhubt\", \"kflvnb\", \"sjdfggl\", \"bxozyj\",\n \"xekbh\", \"wtmcb\", \"xtapfco\", \"rnornl\", \"ursdpki\", \"waonim\", \"eibfyed\", \"zniinaz\", \"uyfohq\", \"qcaxlt\",\n \"koyaapa\", \"pjuvbsi\", \"ecpdl\", \"ifaqwm\", \"yyumzc\", \"gvfngfp\", \"lttul\", \"flyza\", \"uasdlme\", \"oklhb\", \"wulkzzv\",\n \"ziwsxo\", \"jqcxiu\", \"qdzrwgm\", \"zjdwy\", \"uumns\", \"emlnp\", \"irnrqp\", \"gqkza\", \"oynpcz\", \"yxyea\", \"zpamf\",\n \"gyehxbv\", \"nplkhcc\", \"rxeekyo\", \"kecgp\", \"gseju\", \"nkisxqf\", \"vlyud\", \"fxxihhm\", \"yjgtml\", \"fehwpdi\",\n \"wclnvyy\", \"lriwrc\", \"ikparv\", \"volfh\", \"ysphh\", \"szrvrv\", \"rqlmz\", \"jyqut\", \"fyftsj\", \"uvwfip\", \"rngwgm\",\n \"mjwaz\", \"roehjki\", \"ploxokr\", \"yjbalp\", \"fspkq\", \"yfxrb\", \"kzulvk\", \"ordxp\", \"vdrrt\", \"wdiojwd\", \"ridzl\",\n \"niykdvu\", \"whyycmn\", \"riwcma\", \"bkhgkrb\", \"nsine\", \"emgtgf\", \"zoymw\", \"ljtvhzb\", \"kfyfdma\", \"piygxdl\",\n \"onfwgdf\", \"fwmkm\", \"vqbljay\", \"icife\", \"bxfli\", \"yeygr\", \"qenhgm\", \"mtxuckj\", \"kdcyx\", \"kwqhfcn\", \"ywkfy\",\n \"prbpw\", \"pheyc\", \"kmnds\", \"cacqs\", \"kvekiqy\", \"bfvfhdy\", \"gxulp\", \"skmcra\", \"exomt\", \"lcxue\", \"mnvvday\",\n \"rsddl\", \"gooegc\", \"udght\", \"doymnin\", \"ccdap\", \"wuive\", \"dyyln\", \"rynust\", \"luxabyg\", \"kdkkyyw\", \"vawqfsy\",\n \"rmeswm\", \"rcxzyv\", \"clpowz\", \"pdntqm\", \"tvjkkmz\", \"iiclw\", \"nhudzen\", \"cybhu\", \"crwtw\", \"enypnh\", \"ygekg\",\n \"hrjwqt\", \"peissge\", \"wangcy\", \"rbpoik\", \"raqulbf\", \"gyisnsj\", \"rgbqn\", \"lgvuzb\", \"djicf\", \"epnuu\", \"nsapc\",\n \"voatgh\", \"yorfehc\", \"jxfttat\", \"wyuivb\", \"bwopl\", \"odwdsh\", \"anchkv\", \"sepvew\", \"qoxxmae\", \"bpvqnj\", \"sngfo\",\n \"buoazou\", \"zhijssa\", \"janng\", \"uvdbd\", \"yfvkqo\", \"lcjii\", \"mvacvrz\", \"xztiar\", \"lpbtrqa\", \"ukbpdx\", \"okaqpgr\",\n \"idgqlj\", \"ewglgo\", \"ruymhi\", \"pcidw\", \"bvuqj\", \"npzch\", \"yppyan\", \"oiguirj\", \"iijvwqj\", \"jvbwjys\", \"yjtunfc\",\n \"iaikra\", \"oduhdgk\", \"ivixur\", \"ibcgai\", \"djzvcbx\", \"lmtsul\", \"lgnwzol\", \"wursq\", \"xsxbqwq\", \"jqvwnc\",\n \"dcwwvtb\", \"vwybnr\", \"bughwjl\", \"rnelxb\", \"hmacv\", \"ufgdygl\", \"aabuat\", \"oynwask\", \"gnfjjf\", \"zipbq\", \"zxstn\",\n \"jdrbprf\", \"jmkvny\", \"rblpql\", \"vykdj\", \"qaakyqw\", \"osbhddb\", \"avgldyy\", \"kvpoa\", \"fnqcliu\", \"zzlninw\",\n \"drsal\", \"omswys\", \"hwqcpct\", \"ecraq\", \"fvhsbjq\", \"raauy\", \"pfmoz\", \"vvqvcm\", \"tbjqjun\", \"jcfbegq\", \"otiwup\",\n \"axvvce\", \"dhpdnx\", \"pennr\", \"hvvmvzv\", \"binezl\", \"ygdmcuo\", \"ypwnqn\", \"aloxdv\", \"ucieh\", \"kovbtag\", \"rgfpaww\",\n \"fpbftg\", \"spjowfr\", \"zridoy\", \"blwbbf\", \"evwlxi\", \"itbcz\", \"hgixuo\", \"qmoqmjb\", \"tkeeis\", \"pjiaq\", \"rbpje\",\n \"ledoui\", \"ubecht\", \"mphdd\", \"uzswsbb\", \"ntsybr\", \"qmnijyp\", \"pqwawe\", \"ltytill\", \"dpnxy\", \"pkxqcol\", \"ayrdi\",\n \"mycnd\", \"knotsn\", \"zvcrjl\", \"qwroblg\", \"vtrktey\", \"dzilezi\", \"wzkxg\", \"varqc\", \"xlpttyc\", \"xxqhnl\", \"jpxywa\",\n \"kjdsh\", \"hdseebw\", \"bxqbp\", \"flazqce\", \"xrtab\", \"rupsfq\", \"asswer\", \"rhqof\", \"hjzdv\", \"addsgax\", \"cuahzjj\",\n \"xwdilr\", \"osqgg\", \"pfhwv\", \"rqorah\", \"ggdlnv\", \"truvaoj\", \"jzuldwf\", \"mjddj\", \"vixtn\", \"eslxoaj\", \"cmoypm\",\n \"jvvzs\", \"oqgxcc\", \"tptls\", \"wwgwbj\", \"tysuhg\", \"xbnqb\", \"iogjvg\", \"fbxdmr\", \"zdvsmx\", \"hiuja\", \"watrt\",\n \"kjawab\", \"entxk\", \"jmnkaox\", \"zznsox\", \"asmzc\", \"soblvp\", \"quyxjw\", \"udrdc\", \"hyylvvw\", \"gzfwxuv\", \"jjqmjw\",\n \"faegxbl\", \"lqjcg\", \"bzmruq\", \"bykuh\", \"miwhd\", \"ykgtwhk\", \"oyobzwi\", \"oltwpua\", \"ctulabr\", \"dwandd\", \"vhuhox\",\n \"vtlknw\", \"ywvln\", \"qemqdeg\", \"akezvx\", \"kjmjpv\", \"vwuftx\", \"kreaxnj\", \"fvfop\", \"cxabs\", \"jfacbje\", \"eecnz\",\n \"cmblit\", \"gfvpoq\", \"whywnh\", \"pghvx\", \"ohgkmf\", \"xxtiwd\", \"nkojni\", \"dlcicnp\", \"bwyvyyd\", \"gifup\", \"vgjfr\",\n \"hhteifi\", \"kjhffq\", \"pawqaxl\", \"yozro\", \"slxluvd\", \"amqcquy\", \"vnnxkr\", \"wgdur\", \"rvawiu\", \"thcwnc\", \"cddut\",\n \"vnrtrv\", \"fnfio\", \"nhvxe\", \"rfdqmj\", \"ucblh\", \"ccbnt\", \"lxckaoy\", \"fnwcbx\", \"gmdbiwt\", \"ypvwjy\", \"cbjazk\",\n \"qmujnm\", \"nsqot\", \"lhcqt\", \"ijxcts\", \"nujrms\", \"itxel\", \"ghukr\", \"qpwitlr\", \"gcafqrn\", \"lcoho\", \"lfzab\",\n \"vwhgceb\", \"vgsgy\", \"jrtgo\", \"ryxlz\", \"deoyq\", \"ybenly\", \"lyysca\", \"sodvazo\", \"hbnnoz\", \"ovgvda\", \"elwtjx\",\n \"soydmn\", \"trdsi\", \"mwwjwo\", \"vupwj\", \"dszpcv\", \"kkhjdj\", \"ewmyo\", \"nmpeq\", \"oepldcq\", \"xttrgu\", \"wbcbxi\",\n \"jakzk\", \"peukyw\", \"fvcqv\", \"xklwuu\", \"hsmva\", \"kslmkq\", \"azllbig\", \"stnzih\", \"wfyud\", \"ihauy\", \"cfxmj\",\n \"pdyogwv\", \"dcqdpa\", \"xhusy\", \"jfpmpmm\", \"odeiiw\", \"ozyaer\", \"uykzvma\", \"tuaznxj\", \"kdnbdki\", \"syrnsem\",\n \"fdysz\", \"hhrpo\", \"fglzfi\", \"vgcqzqm\", \"qhsjr\", \"bvboe\", \"dpfwpvg\", \"mvvry\", \"itnnr\", \"lgykbe\", \"pscow\",\n \"mkrgeqv\", \"czffv\", \"apteht\", \"jeqixsx\", \"ksmbe\", \"zamivv\", \"vvmyo\", \"cwwoce\", \"sppubxc\", \"qaich\", \"nmbxr\",\n \"tfkwfxi\", \"iakhezl\", \"fxujis\", \"fkwffe\", \"antaylq\", \"mmfgstq\", \"zxaacy\", \"zlswx\", \"pbqxil\", \"eupck\",\n \"qzcxpbe\", \"rjalbzr\", \"wioagbq\", \"kreec\", \"zsdcuft\", \"rrdzb\", \"ocdlvq\", \"oxiroo\", \"zcxsqh\", \"wbrsi\", \"fqike\",\n \"oskzupi\", \"thvof\", \"dicbyst\", \"iojwe\", \"hyfizq\", \"yoknhww\", \"nupiyyn\", \"ievah\", \"slcgmxg\", \"cnecpa\", \"lcwsoj\",\n \"hnqsc\", \"ghipbi\", \"exobr\", \"nwpnq\", \"dmhbj\", \"amdbmwl\", \"xfbzovs\", \"puizvu\", \"yvsus\", \"ykysqg\", \"bgqdv\",\n \"zgqbr\", \"zkjpkej\", \"crkot\", \"zciymk\", \"tleogn\", \"sayrmz\", \"elwma\", \"zugjva\", \"uifwsmw\", \"wstrg\", \"xbotd\",\n \"hinsg\", \"qpgyoyp\", \"xzfocdy\", \"mbvuepb\", \"dtphufk\", \"cyapnt\", \"yyehhad\", \"ohdrd\", \"mlibm\", \"qzdfil\",\n \"rdwszqx\", \"bzcbmyn\", \"uarjlg\", \"mtwpqmx\", \"nmagl\", \"cepniel\", \"tylvaa\", \"melhd\", \"jygeneg\", \"fdglfy\",\n \"xcpciu\", \"ayrel\", \"bxceshv\", \"kspyg\", \"iclkaz\", \"ykbzt\", \"nrnkzo\", \"kxkto\", \"fabzszn\", \"edalls\", \"nilmh\",\n \"wwawgnn\", \"gymbtx\", \"mzipa\", \"ajevx\", \"qppisv\", \"otqhsf\", \"ippxak\", \"bixnqd\", \"uqitwo\", \"soxcug\", \"loiscd\",\n \"wqrjk\", \"rqntoa\", \"fzpxlp\", \"tuaob\", \"pyqqms\", \"krbzmmj\", \"aijqpfg\", \"nstqrbu\", \"wmtiahz\", \"joplby\", \"jyszxq\",\n \"jnxtyhe\", \"lbvfv\"]) == 14011\n",
"step-ids": [
5,
7,
8,
10,
12
]
}
|
[
5,
7,
8,
10,
12
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(titanic.head())
<|reserved_special_token_0|>
x['age'].fillna(x['age'].mean(), inplace=True)
x.fillna('UNKNOWN', inplace=True)
<|reserved_special_token_0|>
dtc.fit(x_train, y_train)
print(dtc.score(x_test, y_test))
<|reserved_special_token_0|>
dtc.fit(x_train_fs, y_train)
<|reserved_special_token_0|>
print(dtc.score(x_test_fs, y_test))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
titanic = pd.read_csv(
'http://biostat.mc.vanderbilt.edu/wiki/pub/Main/DataSets/titanic.txt')
print(titanic.head())
x = titanic.drop(['row.names', 'name', 'survived'], axis=1)
y = titanic['survived']
x['age'].fillna(x['age'].mean(), inplace=True)
x.fillna('UNKNOWN', inplace=True)
<|reserved_special_token_0|>
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25,
random_state=33)
<|reserved_special_token_0|>
vec = DictVectorizer()
x_train = vec.fit_transform(x_train.to_dict(orient='record'))
x_test = vec.transform(x_test.to_dict(orient='record'))
<|reserved_special_token_0|>
dtc = DecisionTreeClassifier(criterion='entropy')
dtc.fit(x_train, y_train)
print(dtc.score(x_test, y_test))
<|reserved_special_token_0|>
fs = feature_selection.SelectPercentile(feature_selection.chi2, percentile=20)
x_train_fs = fs.fit_transform(x_train, y_train)
dtc.fit(x_train_fs, y_train)
x_test_fs = fs.transform(x_test)
print(dtc.score(x_test_fs, y_test))
<|reserved_special_token_1|>
import pandas as pd
titanic = pd.read_csv(
'http://biostat.mc.vanderbilt.edu/wiki/pub/Main/DataSets/titanic.txt')
print(titanic.head())
x = titanic.drop(['row.names', 'name', 'survived'], axis=1)
y = titanic['survived']
x['age'].fillna(x['age'].mean(), inplace=True)
x.fillna('UNKNOWN', inplace=True)
from sklearn.cross_validation import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25,
random_state=33)
from sklearn.feature_extraction import DictVectorizer
vec = DictVectorizer()
x_train = vec.fit_transform(x_train.to_dict(orient='record'))
x_test = vec.transform(x_test.to_dict(orient='record'))
from sklearn.tree import DecisionTreeClassifier
dtc = DecisionTreeClassifier(criterion='entropy')
dtc.fit(x_train, y_train)
print(dtc.score(x_test, y_test))
from sklearn import feature_selection
fs = feature_selection.SelectPercentile(feature_selection.chi2, percentile=20)
x_train_fs = fs.fit_transform(x_train, y_train)
dtc.fit(x_train_fs, y_train)
x_test_fs = fs.transform(x_test)
print(dtc.score(x_test_fs, y_test))
<|reserved_special_token_1|>
# obtain the dataset
import pandas as pd
titanic = pd.read_csv('http://biostat.mc.vanderbilt.edu/wiki/pub/Main/DataSets/titanic.txt')
#titanic.info()
print(titanic.head())
# preprocessing
x = titanic.drop(['row.names', 'name', 'survived'], axis=1)
y = titanic['survived']
x['age'].fillna(x['age'].mean(), inplace = True) # add data for age feature
x.fillna('UNKNOWN', inplace=True)
# split
from sklearn.cross_validation import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25, random_state=33)
#feature extraction
from sklearn.feature_extraction import DictVectorizer
vec = DictVectorizer()
x_train = vec.fit_transform(x_train.to_dict(orient='record'))
x_test = vec.transform(x_test.to_dict(orient='record'))
#print(len(vec.feature_names_))
# import decision tree model
from sklearn.tree import DecisionTreeClassifier
dtc = DecisionTreeClassifier(criterion='entropy')
dtc.fit(x_train, y_train)
#y_predict = dtc.predict(x_test)
print(dtc.score(x_test, y_test))
from sklearn import feature_selection
fs = feature_selection.SelectPercentile(feature_selection.chi2, percentile=20)
x_train_fs = fs.fit_transform(x_train, y_train)
dtc.fit(x_train_fs, y_train)
x_test_fs = fs.transform(x_test)
print(dtc.score(x_test_fs, y_test))
|
flexible
|
{
"blob_id": "f1475d651c3b52611657a9767ad62796b55d8711",
"index": 3676,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(titanic.head())\n<mask token>\nx['age'].fillna(x['age'].mean(), inplace=True)\nx.fillna('UNKNOWN', inplace=True)\n<mask token>\ndtc.fit(x_train, y_train)\nprint(dtc.score(x_test, y_test))\n<mask token>\ndtc.fit(x_train_fs, y_train)\n<mask token>\nprint(dtc.score(x_test_fs, y_test))\n",
"step-3": "<mask token>\ntitanic = pd.read_csv(\n 'http://biostat.mc.vanderbilt.edu/wiki/pub/Main/DataSets/titanic.txt')\nprint(titanic.head())\nx = titanic.drop(['row.names', 'name', 'survived'], axis=1)\ny = titanic['survived']\nx['age'].fillna(x['age'].mean(), inplace=True)\nx.fillna('UNKNOWN', inplace=True)\n<mask token>\nx_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25,\n random_state=33)\n<mask token>\nvec = DictVectorizer()\nx_train = vec.fit_transform(x_train.to_dict(orient='record'))\nx_test = vec.transform(x_test.to_dict(orient='record'))\n<mask token>\ndtc = DecisionTreeClassifier(criterion='entropy')\ndtc.fit(x_train, y_train)\nprint(dtc.score(x_test, y_test))\n<mask token>\nfs = feature_selection.SelectPercentile(feature_selection.chi2, percentile=20)\nx_train_fs = fs.fit_transform(x_train, y_train)\ndtc.fit(x_train_fs, y_train)\nx_test_fs = fs.transform(x_test)\nprint(dtc.score(x_test_fs, y_test))\n",
"step-4": "import pandas as pd\ntitanic = pd.read_csv(\n 'http://biostat.mc.vanderbilt.edu/wiki/pub/Main/DataSets/titanic.txt')\nprint(titanic.head())\nx = titanic.drop(['row.names', 'name', 'survived'], axis=1)\ny = titanic['survived']\nx['age'].fillna(x['age'].mean(), inplace=True)\nx.fillna('UNKNOWN', inplace=True)\nfrom sklearn.cross_validation import train_test_split\nx_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25,\n random_state=33)\nfrom sklearn.feature_extraction import DictVectorizer\nvec = DictVectorizer()\nx_train = vec.fit_transform(x_train.to_dict(orient='record'))\nx_test = vec.transform(x_test.to_dict(orient='record'))\nfrom sklearn.tree import DecisionTreeClassifier\ndtc = DecisionTreeClassifier(criterion='entropy')\ndtc.fit(x_train, y_train)\nprint(dtc.score(x_test, y_test))\nfrom sklearn import feature_selection\nfs = feature_selection.SelectPercentile(feature_selection.chi2, percentile=20)\nx_train_fs = fs.fit_transform(x_train, y_train)\ndtc.fit(x_train_fs, y_train)\nx_test_fs = fs.transform(x_test)\nprint(dtc.score(x_test_fs, y_test))\n",
"step-5": "# obtain the dataset\r\nimport pandas as pd\r\n\r\ntitanic = pd.read_csv('http://biostat.mc.vanderbilt.edu/wiki/pub/Main/DataSets/titanic.txt')\r\n#titanic.info()\r\nprint(titanic.head())\r\n\r\n\r\n# preprocessing\r\nx = titanic.drop(['row.names', 'name', 'survived'], axis=1)\r\ny = titanic['survived']\r\n\r\nx['age'].fillna(x['age'].mean(), inplace = True) # add data for age feature\r\nx.fillna('UNKNOWN', inplace=True)\r\n\r\n# split\r\nfrom sklearn.cross_validation import train_test_split\r\nx_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25, random_state=33)\r\n\r\n\r\n#feature extraction\r\nfrom sklearn.feature_extraction import DictVectorizer\r\nvec = DictVectorizer()\r\nx_train = vec.fit_transform(x_train.to_dict(orient='record'))\r\nx_test = vec.transform(x_test.to_dict(orient='record'))\r\n#print(len(vec.feature_names_))\r\n\r\n# import decision tree model\r\nfrom sklearn.tree import DecisionTreeClassifier\r\ndtc = DecisionTreeClassifier(criterion='entropy')\r\ndtc.fit(x_train, y_train)\r\n#y_predict = dtc.predict(x_test)\r\nprint(dtc.score(x_test, y_test))\r\n\r\nfrom sklearn import feature_selection\r\nfs = feature_selection.SelectPercentile(feature_selection.chi2, percentile=20)\r\nx_train_fs = fs.fit_transform(x_train, y_train)\r\ndtc.fit(x_train_fs, y_train)\r\nx_test_fs = fs.transform(x_test)\r\nprint(dtc.score(x_test_fs, y_test))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class System(ORMBase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def add_component(self, component):
for system_component in self.system_components:
if system_component.component is component:
system_component.count += 1
return
SystemComponent(system=self, component=component, count=1)
<|reserved_special_token_0|>
def __str__(self):
linesep = '\n '
components = [f'{linesep}{repr(component)}' for _, component in
self.components]
return f"{self.name}:{''.join(components)}"
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class System(ORMBase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def components(self):
for system_component in self.system_components:
for _ in range(system_component.count):
yield system_component.count, system_component.component
def add_component(self, component):
for system_component in self.system_components:
if system_component.component is component:
system_component.count += 1
return
SystemComponent(system=self, component=component, count=1)
<|reserved_special_token_0|>
def __str__(self):
linesep = '\n '
components = [f'{linesep}{repr(component)}' for _, component in
self.components]
return f"{self.name}:{''.join(components)}"
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class System(ORMBase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def components(self):
for system_component in self.system_components:
for _ in range(system_component.count):
yield system_component.count, system_component.component
def add_component(self, component):
for system_component in self.system_components:
if system_component.component is component:
system_component.count += 1
return
SystemComponent(system=self, component=component, count=1)
def __repr__(self):
components = [f'{count}x{repr(component)}' for count, component in
self.components]
return (
f'<System(name={self.name}, {components}, unique_id={self.unique_id})>'
)
def __str__(self):
linesep = '\n '
components = [f'{linesep}{repr(component)}' for _, component in
self.components]
return f"{self.name}:{''.join(components)}"
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class System(ORMBase):
__tablename__ = 'System'
unique_id = Column(Integer, primary_key=True)
name = Column(String)
user_roles = relationship('UserSystemRole')
system_components = relationship('SystemComponent')
@property
def components(self):
for system_component in self.system_components:
for _ in range(system_component.count):
yield system_component.count, system_component.component
def add_component(self, component):
for system_component in self.system_components:
if system_component.component is component:
system_component.count += 1
return
SystemComponent(system=self, component=component, count=1)
def __repr__(self):
components = [f'{count}x{repr(component)}' for count, component in
self.components]
return (
f'<System(name={self.name}, {components}, unique_id={self.unique_id})>'
)
def __str__(self):
linesep = '\n '
components = [f'{linesep}{repr(component)}' for _, component in
self.components]
return f"{self.name}:{''.join(components)}"
<|reserved_special_token_1|>
# Libraries
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.orm import relationship
# Taskobra
from taskobra.orm.base import ORMBase
from taskobra.orm.relationships import SystemComponent
class System(ORMBase):
__tablename__ = "System"
unique_id = Column(Integer, primary_key=True)
name = Column(String)
user_roles = relationship("UserSystemRole")
system_components = relationship("SystemComponent")
@property
def components(self):
for system_component in self.system_components:
for _ in range(system_component.count):
yield system_component.count, system_component.component
def add_component(self, component):
for system_component in self.system_components:
if system_component.component is component:
system_component.count += 1
return
SystemComponent(system=self, component=component, count=1)
def __repr__(self):
components = [
f"{count}x{repr(component)}"
for count, component in self.components
]
return f"<System(name={self.name}, {components}, unique_id={self.unique_id})>"
def __str__(self):
linesep = "\n "
components = [
f"{linesep}{repr(component)}"
for _, component in self.components
]
return f"{self.name}:{''.join(components)}"
|
flexible
|
{
"blob_id": "2fc2fd6631cee5f3737dadaac1a115c045af0986",
"index": 5058,
"step-1": "<mask token>\n\n\nclass System(ORMBase):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def add_component(self, component):\n for system_component in self.system_components:\n if system_component.component is component:\n system_component.count += 1\n return\n SystemComponent(system=self, component=component, count=1)\n <mask token>\n\n def __str__(self):\n linesep = '\\n '\n components = [f'{linesep}{repr(component)}' for _, component in\n self.components]\n return f\"{self.name}:{''.join(components)}\"\n",
"step-2": "<mask token>\n\n\nclass System(ORMBase):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def components(self):\n for system_component in self.system_components:\n for _ in range(system_component.count):\n yield system_component.count, system_component.component\n\n def add_component(self, component):\n for system_component in self.system_components:\n if system_component.component is component:\n system_component.count += 1\n return\n SystemComponent(system=self, component=component, count=1)\n <mask token>\n\n def __str__(self):\n linesep = '\\n '\n components = [f'{linesep}{repr(component)}' for _, component in\n self.components]\n return f\"{self.name}:{''.join(components)}\"\n",
"step-3": "<mask token>\n\n\nclass System(ORMBase):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def components(self):\n for system_component in self.system_components:\n for _ in range(system_component.count):\n yield system_component.count, system_component.component\n\n def add_component(self, component):\n for system_component in self.system_components:\n if system_component.component is component:\n system_component.count += 1\n return\n SystemComponent(system=self, component=component, count=1)\n\n def __repr__(self):\n components = [f'{count}x{repr(component)}' for count, component in\n self.components]\n return (\n f'<System(name={self.name}, {components}, unique_id={self.unique_id})>'\n )\n\n def __str__(self):\n linesep = '\\n '\n components = [f'{linesep}{repr(component)}' for _, component in\n self.components]\n return f\"{self.name}:{''.join(components)}\"\n",
"step-4": "<mask token>\n\n\nclass System(ORMBase):\n __tablename__ = 'System'\n unique_id = Column(Integer, primary_key=True)\n name = Column(String)\n user_roles = relationship('UserSystemRole')\n system_components = relationship('SystemComponent')\n\n @property\n def components(self):\n for system_component in self.system_components:\n for _ in range(system_component.count):\n yield system_component.count, system_component.component\n\n def add_component(self, component):\n for system_component in self.system_components:\n if system_component.component is component:\n system_component.count += 1\n return\n SystemComponent(system=self, component=component, count=1)\n\n def __repr__(self):\n components = [f'{count}x{repr(component)}' for count, component in\n self.components]\n return (\n f'<System(name={self.name}, {components}, unique_id={self.unique_id})>'\n )\n\n def __str__(self):\n linesep = '\\n '\n components = [f'{linesep}{repr(component)}' for _, component in\n self.components]\n return f\"{self.name}:{''.join(components)}\"\n",
"step-5": "# Libraries\nfrom sqlalchemy import Column, ForeignKey, Integer, String\nfrom sqlalchemy.ext.associationproxy import association_proxy\nfrom sqlalchemy.orm import relationship\n# Taskobra\nfrom taskobra.orm.base import ORMBase\nfrom taskobra.orm.relationships import SystemComponent\n\n\nclass System(ORMBase):\n __tablename__ = \"System\"\n unique_id = Column(Integer, primary_key=True)\n name = Column(String)\n user_roles = relationship(\"UserSystemRole\")\n system_components = relationship(\"SystemComponent\")\n\n @property\n def components(self):\n for system_component in self.system_components:\n for _ in range(system_component.count):\n yield system_component.count, system_component.component\n\n def add_component(self, component):\n for system_component in self.system_components:\n if system_component.component is component:\n system_component.count += 1\n return\n SystemComponent(system=self, component=component, count=1)\n\n def __repr__(self):\n components = [\n f\"{count}x{repr(component)}\"\n for count, component in self.components\n ]\n return f\"<System(name={self.name}, {components}, unique_id={self.unique_id})>\"\n\n def __str__(self):\n linesep = \"\\n \"\n components = [\n f\"{linesep}{repr(component)}\"\n for _, component in self.components\n ]\n return f\"{self.name}:{''.join(components)}\"\n",
"step-ids": [
3,
4,
5,
6,
8
]
}
|
[
3,
4,
5,
6,
8
] |
import tests.functions as functions
if __name__ == "__main__":
# functions.validate_all_redirects("linked.data.gov.au-vocabularies.json")
conf = open("../conf/linked.data.gov.au-vocabularies.conf")
new = [
"anzsrc-for",
"anzsrc-seo",
"ausplots-cv",
"australian-phone-area-codes",
"care",
"corveg-cv",
"nrm",
"reg-roles",
"reg-statuses",
"address-type",
"australian-states-and-territories",
"bc-labels",
"data-access-rights",
"dataciteroles",
"depth-reference",
"geo-commodities",
"geoadminfeatures",
"geofeatures",
"geological-observation-instrument",
"geological-observation-method",
"geological-observation-type",
"geological-sites",
"geometry-roles",
"georesource-report",
"gsq-alias",
"gsq-dataset-theme",
"gsq-roles",
"gsq-sample-facility",
"iso639-1",
"iso-19157-data-quality-dimension",
"iso-iec-25012-data-quality-dimension",
"nsw-quality-dimension",
"party-identifier-type",
"qg-agent",
"qg-file-types",
"qg-security-classifications",
"qg-sites",
"qld-data-licenses",
"iso19115-1/RoleCode",
"minerals",
"nslvoc",
"observation-detail-type",
"organisation-activity-status",
"organisation-name-types",
"organisation-type",
"party-relationship",
"queensland-crs",
"qld-resource-permit-status",
"qld-resource-permit",
"qld-utm-zones",
"geou",
"iso11179-6/RolesAndResponsibilities",
"qesd-qkd",
"qesd-uom",
"qld-obsprop",
"report-detail-type",
"report-status",
"resource-project-lifecycle",
"resource-types",
"result-type",
"sample-detail-type",
"sample-location-status",
"sample-location-types",
"sample-material",
"sample-preparation-methods",
"sample-relationship",
"sample-type",
"seismic-dimensionality",
"site-detail-type",
"site-relationships",
"site-status",
"supermodel/terms",
"survey-detail-type",
"survey-method",
"survey-relationship-type",
"survey-status",
"survey-type",
"telephone-type",
"tk-labels",
"trs"
]
lines = conf.readlines()
for n in new:
for line in lines:
if n in line:
pattern, match = line.split("$", 1)
print(pattern.strip().replace("RewriteRule ^", "https://linked.data.gov.au/"), " -- ", match.split("[R")[0].replace('"', '').strip())
break
|
normal
|
{
"blob_id": "4a620957b2cd1e5945d98e49a5eae5d5592ef5a2",
"index": 3911,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n conf = open('../conf/linked.data.gov.au-vocabularies.conf')\n new = ['anzsrc-for', 'anzsrc-seo', 'ausplots-cv',\n 'australian-phone-area-codes', 'care', 'corveg-cv', 'nrm',\n 'reg-roles', 'reg-statuses', 'address-type',\n 'australian-states-and-territories', 'bc-labels',\n 'data-access-rights', 'dataciteroles', 'depth-reference',\n 'geo-commodities', 'geoadminfeatures', 'geofeatures',\n 'geological-observation-instrument',\n 'geological-observation-method', 'geological-observation-type',\n 'geological-sites', 'geometry-roles', 'georesource-report',\n 'gsq-alias', 'gsq-dataset-theme', 'gsq-roles',\n 'gsq-sample-facility', 'iso639-1',\n 'iso-19157-data-quality-dimension',\n 'iso-iec-25012-data-quality-dimension', 'nsw-quality-dimension',\n 'party-identifier-type', 'qg-agent', 'qg-file-types',\n 'qg-security-classifications', 'qg-sites', 'qld-data-licenses',\n 'iso19115-1/RoleCode', 'minerals', 'nslvoc',\n 'observation-detail-type', 'organisation-activity-status',\n 'organisation-name-types', 'organisation-type',\n 'party-relationship', 'queensland-crs',\n 'qld-resource-permit-status', 'qld-resource-permit',\n 'qld-utm-zones', 'geou', 'iso11179-6/RolesAndResponsibilities',\n 'qesd-qkd', 'qesd-uom', 'qld-obsprop', 'report-detail-type',\n 'report-status', 'resource-project-lifecycle', 'resource-types',\n 'result-type', 'sample-detail-type', 'sample-location-status',\n 'sample-location-types', 'sample-material',\n 'sample-preparation-methods', 'sample-relationship', 'sample-type',\n 'seismic-dimensionality', 'site-detail-type', 'site-relationships',\n 'site-status', 'supermodel/terms', 'survey-detail-type',\n 'survey-method', 'survey-relationship-type', 'survey-status',\n 'survey-type', 'telephone-type', 'tk-labels', 'trs']\n lines = conf.readlines()\n for n in new:\n for line in lines:\n if n in line:\n pattern, match = line.split('$', 1)\n print(pattern.strip().replace('RewriteRule ^',\n 'https://linked.data.gov.au/'), ' -- ', match.split(\n '[R')[0].replace('\"', '').strip())\n break\n",
"step-3": "import tests.functions as functions\nif __name__ == '__main__':\n conf = open('../conf/linked.data.gov.au-vocabularies.conf')\n new = ['anzsrc-for', 'anzsrc-seo', 'ausplots-cv',\n 'australian-phone-area-codes', 'care', 'corveg-cv', 'nrm',\n 'reg-roles', 'reg-statuses', 'address-type',\n 'australian-states-and-territories', 'bc-labels',\n 'data-access-rights', 'dataciteroles', 'depth-reference',\n 'geo-commodities', 'geoadminfeatures', 'geofeatures',\n 'geological-observation-instrument',\n 'geological-observation-method', 'geological-observation-type',\n 'geological-sites', 'geometry-roles', 'georesource-report',\n 'gsq-alias', 'gsq-dataset-theme', 'gsq-roles',\n 'gsq-sample-facility', 'iso639-1',\n 'iso-19157-data-quality-dimension',\n 'iso-iec-25012-data-quality-dimension', 'nsw-quality-dimension',\n 'party-identifier-type', 'qg-agent', 'qg-file-types',\n 'qg-security-classifications', 'qg-sites', 'qld-data-licenses',\n 'iso19115-1/RoleCode', 'minerals', 'nslvoc',\n 'observation-detail-type', 'organisation-activity-status',\n 'organisation-name-types', 'organisation-type',\n 'party-relationship', 'queensland-crs',\n 'qld-resource-permit-status', 'qld-resource-permit',\n 'qld-utm-zones', 'geou', 'iso11179-6/RolesAndResponsibilities',\n 'qesd-qkd', 'qesd-uom', 'qld-obsprop', 'report-detail-type',\n 'report-status', 'resource-project-lifecycle', 'resource-types',\n 'result-type', 'sample-detail-type', 'sample-location-status',\n 'sample-location-types', 'sample-material',\n 'sample-preparation-methods', 'sample-relationship', 'sample-type',\n 'seismic-dimensionality', 'site-detail-type', 'site-relationships',\n 'site-status', 'supermodel/terms', 'survey-detail-type',\n 'survey-method', 'survey-relationship-type', 'survey-status',\n 'survey-type', 'telephone-type', 'tk-labels', 'trs']\n lines = conf.readlines()\n for n in new:\n for line in lines:\n if n in line:\n pattern, match = line.split('$', 1)\n print(pattern.strip().replace('RewriteRule ^',\n 'https://linked.data.gov.au/'), ' -- ', match.split(\n '[R')[0].replace('\"', '').strip())\n break\n",
"step-4": "import tests.functions as functions\n\nif __name__ == \"__main__\":\n # functions.validate_all_redirects(\"linked.data.gov.au-vocabularies.json\")\n\n conf = open(\"../conf/linked.data.gov.au-vocabularies.conf\")\n new = [\n \"anzsrc-for\",\n \"anzsrc-seo\",\n \"ausplots-cv\",\n \"australian-phone-area-codes\",\n \"care\",\n \"corveg-cv\",\n \"nrm\",\n \"reg-roles\",\n \"reg-statuses\",\n \"address-type\",\n \"australian-states-and-territories\",\n \"bc-labels\",\n \"data-access-rights\",\n \"dataciteroles\",\n \"depth-reference\",\n \"geo-commodities\",\n \"geoadminfeatures\",\n \"geofeatures\",\n \"geological-observation-instrument\",\n \"geological-observation-method\",\n \"geological-observation-type\",\n \"geological-sites\",\n \"geometry-roles\",\n \"georesource-report\",\n \"gsq-alias\",\n \"gsq-dataset-theme\",\n \"gsq-roles\",\n \"gsq-sample-facility\",\n \"iso639-1\",\n \"iso-19157-data-quality-dimension\",\n \"iso-iec-25012-data-quality-dimension\",\n \"nsw-quality-dimension\",\n \"party-identifier-type\",\n \"qg-agent\",\n \"qg-file-types\",\n \"qg-security-classifications\",\n \"qg-sites\",\n \"qld-data-licenses\",\n \"iso19115-1/RoleCode\",\n \"minerals\",\n \"nslvoc\",\n \"observation-detail-type\",\n \"organisation-activity-status\",\n \"organisation-name-types\",\n \"organisation-type\",\n \"party-relationship\",\n \"queensland-crs\",\n \"qld-resource-permit-status\",\n \"qld-resource-permit\",\n \"qld-utm-zones\",\n \"geou\",\n \"iso11179-6/RolesAndResponsibilities\",\n \"qesd-qkd\",\n \"qesd-uom\",\n \"qld-obsprop\",\n \"report-detail-type\",\n \"report-status\",\n \"resource-project-lifecycle\",\n \"resource-types\",\n \"result-type\",\n \"sample-detail-type\",\n \"sample-location-status\",\n \"sample-location-types\",\n \"sample-material\",\n \"sample-preparation-methods\",\n \"sample-relationship\",\n \"sample-type\",\n \"seismic-dimensionality\",\n \"site-detail-type\",\n \"site-relationships\",\n \"site-status\",\n \"supermodel/terms\",\n \"survey-detail-type\",\n \"survey-method\",\n \"survey-relationship-type\",\n \"survey-status\",\n \"survey-type\",\n \"telephone-type\",\n \"tk-labels\",\n \"trs\"\n ]\n lines = conf.readlines()\n\n for n in new:\n for line in lines:\n if n in line:\n pattern, match = line.split(\"$\", 1)\n print(pattern.strip().replace(\"RewriteRule ^\", \"https://linked.data.gov.au/\"), \" -- \", match.split(\"[R\")[0].replace('\"', '').strip())\n break",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from utils import to_device
from utils import build_dictionary,my_collate
from DataGenerator import DataGenerator
from torch.utils.data import DataLoader
from torch import optim
import torch.nn as nn
from ADSentimentModel import ADSentimentModel
import torch
def train(token2id, train_data, lr, batch_size, epochs,model):
dataset = DataGenerator(token2id, train_data)
dataloader = DataLoader(dataset,batch_size=batch_size,collate_fn=my_collate)
model = to_device(model)
model_optimizer = optim.Adam(model.discriminator.parameters(),lr=lr)
criterion = nn.BCELoss()
for epoch in range(1,epochs):
print("Epoch {}".format(epoch))
print("*"*80)
running_loss = 0
for i,data in enumerate(dataloader):
data = to_device(data)
x,x_len,y,_ = data
predict = model(x,x_len)
loss = criterion(predict.squeeze(1),y)
model_optimizer.zero_grad()
loss.backward()
model_optimizer.step()
running_loss += loss.item()
if i%10 == 0 and i != 0 :
print("Average batch loss: {}".format(running_loss/10))
running_loss = 0
if __name__ == "__mian__":
pass
|
normal
|
{
"blob_id": "d0364b7cad29c639af9df5c78e810144ffd6ce2e",
"index": 2415,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef train(token2id, train_data, lr, batch_size, epochs, model):\n dataset = DataGenerator(token2id, train_data)\n dataloader = DataLoader(dataset, batch_size=batch_size, collate_fn=\n my_collate)\n model = to_device(model)\n model_optimizer = optim.Adam(model.discriminator.parameters(), lr=lr)\n criterion = nn.BCELoss()\n for epoch in range(1, epochs):\n print('Epoch {}'.format(epoch))\n print('*' * 80)\n running_loss = 0\n for i, data in enumerate(dataloader):\n data = to_device(data)\n x, x_len, y, _ = data\n predict = model(x, x_len)\n loss = criterion(predict.squeeze(1), y)\n model_optimizer.zero_grad()\n loss.backward()\n model_optimizer.step()\n running_loss += loss.item()\n if i % 10 == 0 and i != 0:\n print('Average batch loss: {}'.format(running_loss / 10))\n running_loss = 0\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef train(token2id, train_data, lr, batch_size, epochs, model):\n dataset = DataGenerator(token2id, train_data)\n dataloader = DataLoader(dataset, batch_size=batch_size, collate_fn=\n my_collate)\n model = to_device(model)\n model_optimizer = optim.Adam(model.discriminator.parameters(), lr=lr)\n criterion = nn.BCELoss()\n for epoch in range(1, epochs):\n print('Epoch {}'.format(epoch))\n print('*' * 80)\n running_loss = 0\n for i, data in enumerate(dataloader):\n data = to_device(data)\n x, x_len, y, _ = data\n predict = model(x, x_len)\n loss = criterion(predict.squeeze(1), y)\n model_optimizer.zero_grad()\n loss.backward()\n model_optimizer.step()\n running_loss += loss.item()\n if i % 10 == 0 and i != 0:\n print('Average batch loss: {}'.format(running_loss / 10))\n running_loss = 0\n\n\nif __name__ == '__mian__':\n pass\n",
"step-4": "from utils import to_device\nfrom utils import build_dictionary, my_collate\nfrom DataGenerator import DataGenerator\nfrom torch.utils.data import DataLoader\nfrom torch import optim\nimport torch.nn as nn\nfrom ADSentimentModel import ADSentimentModel\nimport torch\n\n\ndef train(token2id, train_data, lr, batch_size, epochs, model):\n dataset = DataGenerator(token2id, train_data)\n dataloader = DataLoader(dataset, batch_size=batch_size, collate_fn=\n my_collate)\n model = to_device(model)\n model_optimizer = optim.Adam(model.discriminator.parameters(), lr=lr)\n criterion = nn.BCELoss()\n for epoch in range(1, epochs):\n print('Epoch {}'.format(epoch))\n print('*' * 80)\n running_loss = 0\n for i, data in enumerate(dataloader):\n data = to_device(data)\n x, x_len, y, _ = data\n predict = model(x, x_len)\n loss = criterion(predict.squeeze(1), y)\n model_optimizer.zero_grad()\n loss.backward()\n model_optimizer.step()\n running_loss += loss.item()\n if i % 10 == 0 and i != 0:\n print('Average batch loss: {}'.format(running_loss / 10))\n running_loss = 0\n\n\nif __name__ == '__mian__':\n pass\n",
"step-5": "from utils import to_device\nfrom utils import build_dictionary,my_collate\nfrom DataGenerator import DataGenerator\nfrom torch.utils.data import DataLoader\nfrom torch import optim\nimport torch.nn as nn\nfrom ADSentimentModel import ADSentimentModel\nimport torch\n\ndef train(token2id, train_data, lr, batch_size, epochs,model):\n\n dataset = DataGenerator(token2id, train_data)\n dataloader = DataLoader(dataset,batch_size=batch_size,collate_fn=my_collate)\n model = to_device(model)\n\n model_optimizer = optim.Adam(model.discriminator.parameters(),lr=lr)\n criterion = nn.BCELoss()\n\n for epoch in range(1,epochs):\n print(\"Epoch {}\".format(epoch))\n print(\"*\"*80)\n\n running_loss = 0\n for i,data in enumerate(dataloader):\n data = to_device(data)\n x,x_len,y,_ = data\n predict = model(x,x_len)\n loss = criterion(predict.squeeze(1),y)\n\n model_optimizer.zero_grad()\n loss.backward()\n model_optimizer.step()\n\n running_loss += loss.item()\n\n if i%10 == 0 and i != 0 :\n print(\"Average batch loss: {}\".format(running_loss/10))\n running_loss = 0\n\nif __name__ == \"__mian__\":\n pass\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Kai Joseph
# Loop Practice
# Since I worked on my own, I did not have to complete all 25 challenges (with Ms. Healey's permission). I completed a total of 14 challenges.
import sys
import random
''' 1.
Write a for loop that will print out all the integers from 0-4 in ascending order.
'''
if sys.argv[1] == '1':
for x in range(5):
print(str(x))
''' 2.
Write a for loop that will print out all the integers from 0-4 in descending order.
'''
if sys.argv[1] == '2':
for x in range(5):
print(str(4-x))
''' 3.
Write a for loop that will print out all the integers from 5-15 in descending order.
'''
if sys.argv[1] == '3':
for x in range(11):
print(str(15-x))
''' 4.
Write a for loop that will print out all the integers from -5 to 5 in ascending order.
'''
if sys.argv[1] == '4':
for x in range(11):
print(str(-5+x))
''' 5.
Write two for loops that will both print out odd numbers from 25 to 49. The loops themselves must be different, but they will have the same output.
'''
if sys.argv[1] == '5':
for x in range(25,50):
if x%2 != 0:
print(x)
for x in range(26):
if x%2 == 0:
print(str(25+x))
''' 6.
Write a for loop that prints out the squares of the numbers from 1 to 10. ie 1, 4, 9, 16, ... 100
'''
if sys.argv[1] == '6':
for x in range(1,11):
print(str(x**2))
''' 8.
A number starts at 4 and increases by one every day after the day it was created. Write a loop and use the variable days (int) that will print out how many days it will take for number to reach 57.
'''
if sys.argv[1] == '8':
for x in range(4,58):
print(x)
days = 57-x
print("Days remaining to reach 57:",str(days))
''' 9.
A girl in your class has jellybeans in a jar. The number of jellybeans is stored in int beans. Every day she shares one jellybean with every student in the class, and she herself takes two. The number of students in the class is held in variable students (int). Write a loop that determines how many days it will take for her to run out of jellybeans. You can store the result in variable numDays (int).
'''
if sys.argv[1] == '9':
while True:
students = input("Number of students (excluding the girl): ")
jellybeans = input("Number of jelly beans: ")
try:
students = int(students)
jellybeans = int(jellybeans)
break
except ValueError:
print("Please enter an integer for jelly beans and students.")
days = 0
while jellybeans > 0:
jellybeans = jellybeans - students - 2
days = days + 1
print(days)
''' 17.
Write a loop that will print out the decimal equivalents of 1/2, 1/3, 1/4, 1/5, 1/6, ... 1/20. The output for each iteration should look like:
"1/2 = .5" "1/3 = .666666666667" etc.
'''
if sys.argv[1] == '17':
for x in range(2,21):
num = 1/x
print("1/"+str(x),"=",str(num))
''' 18.
Write a loop that determines the sum of all the numbers from 1-100, as well as the average. Store the sum in variable total (int) and the average in variable avg (float).
'''
if sys.argv[1] == '18':
total = 0
for x in range(1,101):
total = total+x
print("Total: "+str(total))
avg = total/x
print("Average: " + str(avg))
''' 19.
A friend tells you that PI can be computed with the following equation:
PI = 4 * (1-1/3+1/5-1/7+1/9-1/11+1/13-1/15...)
Write a loop that will calculate this output for n-iterations of the pattern (n being an int), that could help you determine if your friend is right or wrong. Are they right or wrong?
'''
if sys.argv[1] == '19':
it = int(input("Enter the number of iterations: "))
num = 0
for x in range(1,it*2):
if x%2 != 0:
if (x-3)%4 == 0:
num = num - (1/x)
else:
num = num + (1/x)
print(str(4*num))
''' 22.
Write a loop which prints the numbers 1 to 110, 11 numbers per line. The program shall print "Coza" in place of the numbers which are multiples of 3, "Loza" for multiples of 5, "Woza" for multiples of 7, "CozaLoza" for multiples of 3 and 5, and so on. Sample output:
1 2 Coza 4 Loza Coza Woza 8 Coza Loza 11
Coza 13 Woza CozaLoza 16 17 Coza 19 Loza CozaWoza 22
23 Coza Loza 26 Coza Woza 29 CozaLoza 31 32 Coza
......
'''
if sys.argv[1] == '22':
numbers = []
for x in range(10):
numbers.append([])
for x in range(1,111):
if x < 12:
numbers[0].append(x)
elif x < 23:
numbers[1].append(x)
elif x < 34:
numbers[2].append(x)
elif x < 45:
numbers[3].append(x)
elif x < 56:
numbers[4].append(x)
elif x < 67:
numbers[5].append(x)
elif x < 78:
numbers[6].append(x)
elif x < 89:
numbers[7].append(x)
elif x < 100:
numbers[8].append(x)
elif x < 111:
numbers[9].append(x)
for x in range(len(numbers)):
for y in range(11):
word = ""
tampered = False
if int(numbers[x][y])%3 == 0:
word = word + "Coza"
tampered = True
if int(numbers[x][y])%5 == 0:
word = word + "Loza"
tampered = True
if int(numbers[x][y])%7 == 0:
word = word + "Woza"
tampered = True
if tampered:
numbers[x][y] = word
for x in range(len(numbers)):
print(*numbers[x])
''' 23.
Write code that will print out a times-table for practice and reference. It should look like this:
* | 1 2 3 4 5 6 7 8 9
-------------------------------
1 | 1 2 3 4 5 6 7 8 9
2 | 2 4 6 8 10 12 14 16 18
3 | 3 6 9 12 15 18 21 24 27
4 | 4 8 12 16 20 24 28 32 36
5 | 5 10 15 20 25 30 35 40 45
6 | 6 12 18 24 30 36 42 48 54
7 | 7 14 21 28 35 42 49 56 63
8 | 8 16 24 32 40 48 56 64 72
9 | 9 18 27 36 45 54 63 72 81
'''
if sys.argv[1] == '23':
x = [1,2,3,4,5,6,7,8,9]
y = x
numbers = []
for r in range(len(x)):
for z in range(len(y)):
print((int(x[r])*int(y[z])),end=" ")
print("")
''' 25.
Write code that will extract each digit from an int stored in variable number, in the reverse order. For example, if the int is 15423, the output shall be "3 2 4 5 1", with a space separating the digits.
'''
if sys.argv[1] == '25':
number = input("Enter the number that you wish to reverse: ")
number = str(number)
n = []
for x in range(len(number)):
n.append(number[len(number)-1-x])
for x in range(len(n)):
print(n[x],end=" ")
print("")
|
normal
|
{
"blob_id": "eda8bde048f3d4c4af4bd1c296e4cc02b92eaa17",
"index": 4727,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif sys.argv[1] == '1':\n for x in range(5):\n print(str(x))\n<mask token>\nif sys.argv[1] == '2':\n for x in range(5):\n print(str(4 - x))\n<mask token>\nif sys.argv[1] == '3':\n for x in range(11):\n print(str(15 - x))\n<mask token>\nif sys.argv[1] == '4':\n for x in range(11):\n print(str(-5 + x))\n<mask token>\nif sys.argv[1] == '5':\n for x in range(25, 50):\n if x % 2 != 0:\n print(x)\n for x in range(26):\n if x % 2 == 0:\n print(str(25 + x))\n<mask token>\nif sys.argv[1] == '6':\n for x in range(1, 11):\n print(str(x ** 2))\n<mask token>\nif sys.argv[1] == '8':\n for x in range(4, 58):\n print(x)\n days = 57 - x\n print('Days remaining to reach 57:', str(days))\n<mask token>\nif sys.argv[1] == '9':\n while True:\n students = input('Number of students (excluding the girl): ')\n jellybeans = input('Number of jelly beans: ')\n try:\n students = int(students)\n jellybeans = int(jellybeans)\n break\n except ValueError:\n print('Please enter an integer for jelly beans and students.')\n days = 0\n while jellybeans > 0:\n jellybeans = jellybeans - students - 2\n days = days + 1\n print(days)\n<mask token>\nif sys.argv[1] == '17':\n for x in range(2, 21):\n num = 1 / x\n print('1/' + str(x), '=', str(num))\n<mask token>\nif sys.argv[1] == '18':\n total = 0\n for x in range(1, 101):\n total = total + x\n print('Total: ' + str(total))\n avg = total / x\n print('Average: ' + str(avg))\n<mask token>\nif sys.argv[1] == '19':\n it = int(input('Enter the number of iterations: '))\n num = 0\n for x in range(1, it * 2):\n if x % 2 != 0:\n if (x - 3) % 4 == 0:\n num = num - 1 / x\n else:\n num = num + 1 / x\n print(str(4 * num))\n<mask token>\nif sys.argv[1] == '22':\n numbers = []\n for x in range(10):\n numbers.append([])\n for x in range(1, 111):\n if x < 12:\n numbers[0].append(x)\n elif x < 23:\n numbers[1].append(x)\n elif x < 34:\n numbers[2].append(x)\n elif x < 45:\n numbers[3].append(x)\n elif x < 56:\n numbers[4].append(x)\n elif x < 67:\n numbers[5].append(x)\n elif x < 78:\n numbers[6].append(x)\n elif x < 89:\n numbers[7].append(x)\n elif x < 100:\n numbers[8].append(x)\n elif x < 111:\n numbers[9].append(x)\n for x in range(len(numbers)):\n for y in range(11):\n word = ''\n tampered = False\n if int(numbers[x][y]) % 3 == 0:\n word = word + 'Coza'\n tampered = True\n if int(numbers[x][y]) % 5 == 0:\n word = word + 'Loza'\n tampered = True\n if int(numbers[x][y]) % 7 == 0:\n word = word + 'Woza'\n tampered = True\n if tampered:\n numbers[x][y] = word\n for x in range(len(numbers)):\n print(*numbers[x])\n<mask token>\nif sys.argv[1] == '23':\n x = [1, 2, 3, 4, 5, 6, 7, 8, 9]\n y = x\n numbers = []\n for r in range(len(x)):\n for z in range(len(y)):\n print(int(x[r]) * int(y[z]), end=' ')\n print('')\n<mask token>\nif sys.argv[1] == '25':\n number = input('Enter the number that you wish to reverse: ')\n number = str(number)\n n = []\n for x in range(len(number)):\n n.append(number[len(number) - 1 - x])\n for x in range(len(n)):\n print(n[x], end=' ')\n print('')\n",
"step-3": "import sys\nimport random\n<mask token>\nif sys.argv[1] == '1':\n for x in range(5):\n print(str(x))\n<mask token>\nif sys.argv[1] == '2':\n for x in range(5):\n print(str(4 - x))\n<mask token>\nif sys.argv[1] == '3':\n for x in range(11):\n print(str(15 - x))\n<mask token>\nif sys.argv[1] == '4':\n for x in range(11):\n print(str(-5 + x))\n<mask token>\nif sys.argv[1] == '5':\n for x in range(25, 50):\n if x % 2 != 0:\n print(x)\n for x in range(26):\n if x % 2 == 0:\n print(str(25 + x))\n<mask token>\nif sys.argv[1] == '6':\n for x in range(1, 11):\n print(str(x ** 2))\n<mask token>\nif sys.argv[1] == '8':\n for x in range(4, 58):\n print(x)\n days = 57 - x\n print('Days remaining to reach 57:', str(days))\n<mask token>\nif sys.argv[1] == '9':\n while True:\n students = input('Number of students (excluding the girl): ')\n jellybeans = input('Number of jelly beans: ')\n try:\n students = int(students)\n jellybeans = int(jellybeans)\n break\n except ValueError:\n print('Please enter an integer for jelly beans and students.')\n days = 0\n while jellybeans > 0:\n jellybeans = jellybeans - students - 2\n days = days + 1\n print(days)\n<mask token>\nif sys.argv[1] == '17':\n for x in range(2, 21):\n num = 1 / x\n print('1/' + str(x), '=', str(num))\n<mask token>\nif sys.argv[1] == '18':\n total = 0\n for x in range(1, 101):\n total = total + x\n print('Total: ' + str(total))\n avg = total / x\n print('Average: ' + str(avg))\n<mask token>\nif sys.argv[1] == '19':\n it = int(input('Enter the number of iterations: '))\n num = 0\n for x in range(1, it * 2):\n if x % 2 != 0:\n if (x - 3) % 4 == 0:\n num = num - 1 / x\n else:\n num = num + 1 / x\n print(str(4 * num))\n<mask token>\nif sys.argv[1] == '22':\n numbers = []\n for x in range(10):\n numbers.append([])\n for x in range(1, 111):\n if x < 12:\n numbers[0].append(x)\n elif x < 23:\n numbers[1].append(x)\n elif x < 34:\n numbers[2].append(x)\n elif x < 45:\n numbers[3].append(x)\n elif x < 56:\n numbers[4].append(x)\n elif x < 67:\n numbers[5].append(x)\n elif x < 78:\n numbers[6].append(x)\n elif x < 89:\n numbers[7].append(x)\n elif x < 100:\n numbers[8].append(x)\n elif x < 111:\n numbers[9].append(x)\n for x in range(len(numbers)):\n for y in range(11):\n word = ''\n tampered = False\n if int(numbers[x][y]) % 3 == 0:\n word = word + 'Coza'\n tampered = True\n if int(numbers[x][y]) % 5 == 0:\n word = word + 'Loza'\n tampered = True\n if int(numbers[x][y]) % 7 == 0:\n word = word + 'Woza'\n tampered = True\n if tampered:\n numbers[x][y] = word\n for x in range(len(numbers)):\n print(*numbers[x])\n<mask token>\nif sys.argv[1] == '23':\n x = [1, 2, 3, 4, 5, 6, 7, 8, 9]\n y = x\n numbers = []\n for r in range(len(x)):\n for z in range(len(y)):\n print(int(x[r]) * int(y[z]), end=' ')\n print('')\n<mask token>\nif sys.argv[1] == '25':\n number = input('Enter the number that you wish to reverse: ')\n number = str(number)\n n = []\n for x in range(len(number)):\n n.append(number[len(number) - 1 - x])\n for x in range(len(n)):\n print(n[x], end=' ')\n print('')\n",
"step-4": "# Kai Joseph\n# Loop Practice\n# Since I worked on my own, I did not have to complete all 25 challenges (with Ms. Healey's permission). I completed a total of 14 challenges.\n\n\nimport sys\nimport random\n\n\n''' 1. \n Write a for loop that will print out all the integers from 0-4 in ascending order. \n'''\n\nif sys.argv[1] == '1':\n\n\tfor x in range(5):\n\n\t\tprint(str(x))\n\n\n''' 2. \n Write a for loop that will print out all the integers from 0-4 in descending order.\n'''\n\nif sys.argv[1] == '2':\n\n\tfor x in range(5):\n\n\t\tprint(str(4-x))\n\n\n\n''' 3. \n Write a for loop that will print out all the integers from 5-15 in descending order.\n'''\n\nif sys.argv[1] == '3':\n\n\tfor x in range(11):\n\n\t\tprint(str(15-x))\n\n\n\n''' 4. \n Write a for loop that will print out all the integers from -5 to 5 in ascending order.\n'''\n\nif sys.argv[1] == '4':\n\n\tfor x in range(11):\n\n\t\tprint(str(-5+x))\n\n\n\n\n''' 5. \n Write two for loops that will both print out odd numbers from 25 to 49. The loops themselves must be different, but they will have the same output.\n'''\n\nif sys.argv[1] == '5':\n\n\tfor x in range(25,50):\n\n\t\tif x%2 != 0:\n\n\t\t\tprint(x)\n\n\tfor x in range(26):\n\n\t\tif x%2 == 0:\n\n\t\t\tprint(str(25+x))\n\n\n\n''' 6. \n Write a for loop that prints out the squares of the numbers from 1 to 10. ie 1, 4, 9, 16, ... 100\n'''\n\nif sys.argv[1] == '6':\n\n\tfor x in range(1,11):\n\n\t\tprint(str(x**2))\n\n\n\n''' 8. \n A number starts at 4 and increases by one every day after the day it was created. Write a loop and use the variable days (int) that will print out how many days it will take for number to reach 57. \n'''\n\nif sys.argv[1] == '8':\n\n\tfor x in range(4,58):\n\n\t\tprint(x)\n\n\t\tdays = 57-x\n\n\t\tprint(\"Days remaining to reach 57:\",str(days))\n\n\n\n''' 9. \n A girl in your class has jellybeans in a jar. The number of jellybeans is stored in int beans. Every day she shares one jellybean with every student in the class, and she herself takes two. The number of students in the class is held in variable students (int). Write a loop that determines how many days it will take for her to run out of jellybeans. You can store the result in variable numDays (int).\n'''\n\nif sys.argv[1] == '9':\n\n\twhile True:\n\n\t\tstudents = input(\"Number of students (excluding the girl): \")\n\n\t\tjellybeans = input(\"Number of jelly beans: \")\n\n\t\ttry:\n\n\t\t\tstudents = int(students)\n\n\t\t\tjellybeans = int(jellybeans)\n\n\t\t\tbreak\n\n\t\texcept ValueError:\n\n\t\t\tprint(\"Please enter an integer for jelly beans and students.\")\n\n\tdays = 0\n\n\twhile jellybeans > 0:\n\n\t\tjellybeans = jellybeans - students - 2\n\n\t\tdays = days + 1\n\n\n\tprint(days)\n\n\n\n\n\n''' 17. \n Write a loop that will print out the decimal equivalents of 1/2, 1/3, 1/4, 1/5, 1/6, ... 1/20. The output for each iteration should look like:\n \"1/2 = .5\" \"1/3 = .666666666667\" etc.\n'''\n\n\nif sys.argv[1] == '17':\n\n\tfor x in range(2,21):\n\n\t\tnum = 1/x\n\n\t\tprint(\"1/\"+str(x),\"=\",str(num))\n\n\n\n\n''' 18. \n Write a loop that determines the sum of all the numbers from 1-100, as well as the average. Store the sum in variable total (int) and the average in variable avg (float).\n'''\n\nif sys.argv[1] == '18':\n\n\ttotal = 0\n\n\tfor x in range(1,101):\n\n\t\ttotal = total+x\n\n\tprint(\"Total: \"+str(total))\n\n\tavg = total/x\n\n\tprint(\"Average: \" + str(avg))\n\n\n\n\n''' 19. \n A friend tells you that PI can be computed with the following equation:\n PI = 4 * (1-1/3+1/5-1/7+1/9-1/11+1/13-1/15...)\n Write a loop that will calculate this output for n-iterations of the pattern (n being an int), that could help you determine if your friend is right or wrong. Are they right or wrong?\n'''\n\nif sys.argv[1] == '19':\n\n\tit = int(input(\"Enter the number of iterations: \"))\n\n\tnum = 0\n\n\tfor x in range(1,it*2):\n\n\t\tif x%2 != 0:\n\n\t\t\tif (x-3)%4 == 0:\n\n\t\t\t\tnum = num - (1/x)\n\n\t\t\telse:\n\n\t\t\t\tnum = num + (1/x)\n\n\n\tprint(str(4*num))\n\n\n\n''' 22. \n Write a loop which prints the numbers 1 to 110, 11 numbers per line. The program shall print \"Coza\" in place of the numbers which are multiples of 3, \"Loza\" for multiples of 5, \"Woza\" for multiples of 7, \"CozaLoza\" for multiples of 3 and 5, and so on. Sample output:\n 1 2 Coza 4 Loza Coza Woza 8 Coza Loza 11 \n Coza 13 Woza CozaLoza 16 17 Coza 19 Loza CozaWoza 22 \n 23 Coza Loza 26 Coza Woza 29 CozaLoza 31 32 Coza\n ......\n'''\n\nif sys.argv[1] == '22':\n\n\tnumbers = []\n\n\tfor x in range(10):\n\n\t\tnumbers.append([])\n\n\tfor x in range(1,111):\n\n\t\tif x < 12:\n\n\t\t\tnumbers[0].append(x)\n\n\t\telif x < 23:\n\n\t\t\tnumbers[1].append(x)\n\n\t\telif x < 34:\n\n\t\t\tnumbers[2].append(x)\n\n\t\telif x < 45:\n\n\t\t\tnumbers[3].append(x)\n\n\t\telif x < 56:\n\n\t\t\tnumbers[4].append(x)\n\n\t\telif x < 67:\n\n\t\t\tnumbers[5].append(x)\n\n\t\telif x < 78:\n\n\t\t\tnumbers[6].append(x)\n\n\t\telif x < 89:\n\n\t\t\tnumbers[7].append(x)\n\n\t\telif x < 100:\n\n\t\t\tnumbers[8].append(x)\n\n\t\telif x < 111:\n\n\t\t\tnumbers[9].append(x)\n\n\n\tfor x in range(len(numbers)):\n\n\t\tfor y in range(11):\n\n\t\t\tword = \"\"\n\n\t\t\ttampered = False\n\n\t\t\tif int(numbers[x][y])%3 == 0:\n\n\t\t\t\tword = word + \"Coza\"\n\n\t\t\t\ttampered = True\n\n\t\t\tif int(numbers[x][y])%5 == 0:\n\n\t\t\t\tword = word + \"Loza\"\n\n\t\t\t\ttampered = True\n\n\t\t\tif int(numbers[x][y])%7 == 0:\n\n\t\t\t\tword = word + \"Woza\"\n\n\t\t\t\ttampered = True\n\n\t\t\tif tampered:\n\n\t\t\t\tnumbers[x][y] = word\n\n\tfor x in range(len(numbers)):\n\n\t\tprint(*numbers[x])\n\n\n\n''' 23.\n Write code that will print out a times-table for practice and reference. It should look like this:\n * | 1 2 3 4 5 6 7 8 9\n -------------------------------\n 1 | 1 2 3 4 5 6 7 8 9\n 2 | 2 4 6 8 10 12 14 16 18\n 3 | 3 6 9 12 15 18 21 24 27\n 4 | 4 8 12 16 20 24 28 32 36\n 5 | 5 10 15 20 25 30 35 40 45\n 6 | 6 12 18 24 30 36 42 48 54\n 7 | 7 14 21 28 35 42 49 56 63\n 8 | 8 16 24 32 40 48 56 64 72\n 9 | 9 18 27 36 45 54 63 72 81\n'''\n\n\nif sys.argv[1] == '23':\n\n\tx = [1,2,3,4,5,6,7,8,9]\n\n\ty = x\n\n\tnumbers = []\n\n\tfor r in range(len(x)):\n\n\t\tfor z in range(len(y)):\n\n\t\t\tprint((int(x[r])*int(y[z])),end=\" \")\n\n\t\tprint(\"\")\n\n\n\n''' 25. \n Write code that will extract each digit from an int stored in variable number, in the reverse order. For example, if the int is 15423, the output shall be \"3 2 4 5 1\", with a space separating the digits. \n'''\n\nif sys.argv[1] == '25':\n\n\tnumber = input(\"Enter the number that you wish to reverse: \")\n\n\tnumber = str(number)\n\n\tn = []\n\n\tfor x in range(len(number)):\n\n\t\tn.append(number[len(number)-1-x])\n\n\tfor x in range(len(n)):\n\n\t\tprint(n[x],end=\" \")\n\n\tprint(\"\")\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('details', '0002_auto_20210310_1421')]
operations = [migrations.AlterModelOptions(name='detail', options={
'get_latest_by': 'created', 'ordering': ['created']})]
<|reserved_special_token_1|>
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [('details', '0002_auto_20210310_1421')]
operations = [migrations.AlterModelOptions(name='detail', options={
'get_latest_by': 'created', 'ordering': ['created']})]
<|reserved_special_token_1|>
# Generated by Django 3.1.7 on 2021-03-28 01:03
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('details', '0002_auto_20210310_1421'),
]
operations = [
migrations.AlterModelOptions(
name='detail',
options={'get_latest_by': 'created', 'ordering': ['created']},
),
]
|
flexible
|
{
"blob_id": "cdaceb2d8804e08f0b35b9b65f2d06695efad002",
"index": 6470,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('details', '0002_auto_20210310_1421')]\n operations = [migrations.AlterModelOptions(name='detail', options={\n 'get_latest_by': 'created', 'ordering': ['created']})]\n",
"step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('details', '0002_auto_20210310_1421')]\n operations = [migrations.AlterModelOptions(name='detail', options={\n 'get_latest_by': 'created', 'ordering': ['created']})]\n",
"step-5": "# Generated by Django 3.1.7 on 2021-03-28 01:03\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('details', '0002_auto_20210310_1421'),\n ]\n\n operations = [\n migrations.AlterModelOptions(\n name='detail',\n options={'get_latest_by': 'created', 'ordering': ['created']},\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Generated by Django 3.1.3 on 2020-11-19 06:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myems', '0004_auto_20201118_1446'),
]
operations = [
migrations.RenameField(
model_name='dg',
old_name='sn',
new_name='id',
),
migrations.AddField(
model_name='dg',
name='code_ean13',
field=models.CharField(default=0, max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='dg',
name='commercial_designation_in_english',
field=models.CharField(default=0, max_length=100),
preserve_default=False,
),
migrations.AlterModelTable(
name='dg',
table='dg_gen',
),
]
|
normal
|
{
"blob_id": "11d96a8a400afb0861b92d8900e003826614c99a",
"index": 7502,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('myems', '0004_auto_20201118_1446')]\n operations = [migrations.RenameField(model_name='dg', old_name='sn',\n new_name='id'), migrations.AddField(model_name='dg', name=\n 'code_ean13', field=models.CharField(default=0, max_length=50),\n preserve_default=False), migrations.AddField(model_name='dg', name=\n 'commercial_designation_in_english', field=models.CharField(default\n =0, max_length=100), preserve_default=False), migrations.\n AlterModelTable(name='dg', table='dg_gen')]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('myems', '0004_auto_20201118_1446')]\n operations = [migrations.RenameField(model_name='dg', old_name='sn',\n new_name='id'), migrations.AddField(model_name='dg', name=\n 'code_ean13', field=models.CharField(default=0, max_length=50),\n preserve_default=False), migrations.AddField(model_name='dg', name=\n 'commercial_designation_in_english', field=models.CharField(default\n =0, max_length=100), preserve_default=False), migrations.\n AlterModelTable(name='dg', table='dg_gen')]\n",
"step-5": "# Generated by Django 3.1.3 on 2020-11-19 06:19\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('myems', '0004_auto_20201118_1446'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='dg',\n old_name='sn',\n new_name='id',\n ),\n migrations.AddField(\n model_name='dg',\n name='code_ean13',\n field=models.CharField(default=0, max_length=50),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='dg',\n name='commercial_designation_in_english',\n field=models.CharField(default=0, max_length=100),\n preserve_default=False,\n ),\n migrations.AlterModelTable(\n name='dg',\n table='dg_gen',\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
zi=["L","Ma","Mi","J","Vi","S","D"]
V=[]
for i in range(0,len(zi)):
x=input("dati salariul de: {} ".format(zi[i]))
V.append(int(x))
print("Salariul in fiecare zi: {}".format(V))
print(sum(V))
print(round(sum(V)/7,2))
print(max(V))
vMax=[]
vMin=[]
for i in range(0,len(zi)):
if V[i]==max(V):
vMax.append(zi[i])
print(vMax)
for i in range(0,len(zi)):
if V[i]==min(V):
vMin.append(zi[i])
print(vMin)
|
normal
|
{
"blob_id": "6c91114e0c32628b64734000c82354105032b2fd",
"index": 7954,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(0, len(zi)):\n x = input('dati salariul de: {} '.format(zi[i]))\n V.append(int(x))\nprint('Salariul in fiecare zi: {}'.format(V))\nprint(sum(V))\nprint(round(sum(V) / 7, 2))\nprint(max(V))\n<mask token>\nfor i in range(0, len(zi)):\n if V[i] == max(V):\n vMax.append(zi[i])\nprint(vMax)\nfor i in range(0, len(zi)):\n if V[i] == min(V):\n vMin.append(zi[i])\nprint(vMin)\n",
"step-3": "zi = ['L', 'Ma', 'Mi', 'J', 'Vi', 'S', 'D']\nV = []\nfor i in range(0, len(zi)):\n x = input('dati salariul de: {} '.format(zi[i]))\n V.append(int(x))\nprint('Salariul in fiecare zi: {}'.format(V))\nprint(sum(V))\nprint(round(sum(V) / 7, 2))\nprint(max(V))\nvMax = []\nvMin = []\nfor i in range(0, len(zi)):\n if V[i] == max(V):\n vMax.append(zi[i])\nprint(vMax)\nfor i in range(0, len(zi)):\n if V[i] == min(V):\n vMin.append(zi[i])\nprint(vMin)\n",
"step-4": "zi=[\"L\",\"Ma\",\"Mi\",\"J\",\"Vi\",\"S\",\"D\"]\r\nV=[]\r\nfor i in range(0,len(zi)):\r\n x=input(\"dati salariul de: {} \".format(zi[i]))\r\n V.append(int(x))\r\nprint(\"Salariul in fiecare zi: {}\".format(V))\r\nprint(sum(V))\r\nprint(round(sum(V)/7,2))\r\nprint(max(V))\r\nvMax=[]\r\nvMin=[]\r\nfor i in range(0,len(zi)):\r\n if V[i]==max(V):\r\n vMax.append(zi[i])\r\nprint(vMax)\r\nfor i in range(0,len(zi)):\r\n if V[i]==min(V):\r\n vMin.append(zi[i])\r\nprint(vMin)\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in cctv['구분']:
gu_list.append(gu_dict_num[i])
<|reserved_special_token_0|>
cctv.drop(['구분'], axis=1, inplace=True)
<|reserved_special_token_0|>
print(new_data.info())
new_data.to_csv('./dataset/train_add_cctv.csv', header=True, index=False)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
train_data = pd.read_csv('./dataset/train_park_daycare.csv')
cctv = pd.read_csv('./dataset/cctv_origin.csv', encoding='EUC-KR')
cctv = cctv.iloc[1:, :2]
gu_dict_num = {'용산구': 0, '양천구': 1, '강동구': 2, '관악구': 3, '노원구': 4, '영등포': 5,
'영등포구': 5, '마포구': 6, '서초구': 7, '성동구': 8, '금천구': 9, '도봉구': 10, '동작구': 11,
'강서구': 12, '동대문': 13, '동대문구': 13, '강북구': 14, '서대문': 15, '서대문구': 15,
'광진구': 16, '구로구': 17, '성북구': 18, '강남구': 19, '종로구': 20, '중구': 21, '중랑구':
22, '송파구': 23, '은평구': 24}
gu_list = []
for i in cctv['구분']:
gu_list.append(gu_dict_num[i])
cctv['gu'] = gu_list
cctv.drop(['구분'], axis=1, inplace=True)
cctv = cctv.rename(columns={'총계': 'cctv_num'})
cctv['cctv_num'] = cctv['cctv_num'].apply(lambda x: ''.join(x.split(',')))
cctv['cctv_num'] = pd.to_numeric(cctv['cctv_num'])
new_data = pd.merge(train_data, cctv, on='gu', how='left')
print(new_data.info())
new_data.to_csv('./dataset/train_add_cctv.csv', header=True, index=False)
<|reserved_special_token_1|>
import pandas as pd
train_data = pd.read_csv('./dataset/train_park_daycare.csv')
cctv = pd.read_csv('./dataset/cctv_origin.csv', encoding='EUC-KR')
cctv = cctv.iloc[1:, :2]
gu_dict_num = {'용산구': 0, '양천구': 1, '강동구': 2, '관악구': 3, '노원구': 4, '영등포': 5,
'영등포구': 5, '마포구': 6, '서초구': 7, '성동구': 8, '금천구': 9, '도봉구': 10, '동작구': 11,
'강서구': 12, '동대문': 13, '동대문구': 13, '강북구': 14, '서대문': 15, '서대문구': 15,
'광진구': 16, '구로구': 17, '성북구': 18, '강남구': 19, '종로구': 20, '중구': 21, '중랑구':
22, '송파구': 23, '은평구': 24}
gu_list = []
for i in cctv['구분']:
gu_list.append(gu_dict_num[i])
cctv['gu'] = gu_list
cctv.drop(['구분'], axis=1, inplace=True)
cctv = cctv.rename(columns={'총계': 'cctv_num'})
cctv['cctv_num'] = cctv['cctv_num'].apply(lambda x: ''.join(x.split(',')))
cctv['cctv_num'] = pd.to_numeric(cctv['cctv_num'])
new_data = pd.merge(train_data, cctv, on='gu', how='left')
print(new_data.info())
new_data.to_csv('./dataset/train_add_cctv.csv', header=True, index=False)
<|reserved_special_token_1|>
import pandas as pd
# 데이터 로드
train_data = pd.read_csv('./dataset/train_park_daycare.csv')
cctv = pd.read_csv("./dataset/cctv_origin.csv", encoding="EUC-KR")
## 데이터 전처리
# 데이터 추출
cctv = cctv.iloc[1:, :2]
# 구 매핑
gu_dict_num = {'용산구': 0, '양천구': 1, '강동구': 2, '관악구': 3, '노원구': 4, '영등포': 5, '영등포구': 5, '마포구': 6, '서초구': 7, '성동구': 8, '금천구': 9, '도봉구': 10, '동작구': 11, '강서구': 12, '동대문': 13, '동대문구': 13, '강북구': 14, '서대문': 15, '서대문구': 15, '광진구': 16, '구로구': 17, '성북구': 18, '강남구': 19, '종로구': 20, '중구': 21, '중랑구': 22, '송파구': 23, '은평구': 24}
gu_list = []
for i in cctv['구분']:
gu_list.append(gu_dict_num[i])
cctv['gu'] = gu_list
cctv.drop(['구분'], axis=1, inplace=True)
# 컬럼 이름 변경
cctv = cctv.rename(columns={'총계': 'cctv_num'})
# 데이터 타입 변경
cctv['cctv_num'] = cctv['cctv_num'].apply(lambda x: "".join(x.split(',')))
cctv['cctv_num'] = pd.to_numeric(cctv['cctv_num'])
# 조인
new_data = pd.merge(train_data, cctv, on='gu', how='left')
print(new_data.info())
# 저장
new_data.to_csv("./dataset/train_add_cctv.csv", header=True, index=False)
|
flexible
|
{
"blob_id": "ea2e9399a8384600d8457a9de3f263db44dc883d",
"index": 752,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in cctv['구분']:\n gu_list.append(gu_dict_num[i])\n<mask token>\ncctv.drop(['구분'], axis=1, inplace=True)\n<mask token>\nprint(new_data.info())\nnew_data.to_csv('./dataset/train_add_cctv.csv', header=True, index=False)\n",
"step-3": "<mask token>\ntrain_data = pd.read_csv('./dataset/train_park_daycare.csv')\ncctv = pd.read_csv('./dataset/cctv_origin.csv', encoding='EUC-KR')\ncctv = cctv.iloc[1:, :2]\ngu_dict_num = {'용산구': 0, '양천구': 1, '강동구': 2, '관악구': 3, '노원구': 4, '영등포': 5,\n '영등포구': 5, '마포구': 6, '서초구': 7, '성동구': 8, '금천구': 9, '도봉구': 10, '동작구': 11,\n '강서구': 12, '동대문': 13, '동대문구': 13, '강북구': 14, '서대문': 15, '서대문구': 15,\n '광진구': 16, '구로구': 17, '성북구': 18, '강남구': 19, '종로구': 20, '중구': 21, '중랑구':\n 22, '송파구': 23, '은평구': 24}\ngu_list = []\nfor i in cctv['구분']:\n gu_list.append(gu_dict_num[i])\ncctv['gu'] = gu_list\ncctv.drop(['구분'], axis=1, inplace=True)\ncctv = cctv.rename(columns={'총계': 'cctv_num'})\ncctv['cctv_num'] = cctv['cctv_num'].apply(lambda x: ''.join(x.split(',')))\ncctv['cctv_num'] = pd.to_numeric(cctv['cctv_num'])\nnew_data = pd.merge(train_data, cctv, on='gu', how='left')\nprint(new_data.info())\nnew_data.to_csv('./dataset/train_add_cctv.csv', header=True, index=False)\n",
"step-4": "import pandas as pd\ntrain_data = pd.read_csv('./dataset/train_park_daycare.csv')\ncctv = pd.read_csv('./dataset/cctv_origin.csv', encoding='EUC-KR')\ncctv = cctv.iloc[1:, :2]\ngu_dict_num = {'용산구': 0, '양천구': 1, '강동구': 2, '관악구': 3, '노원구': 4, '영등포': 5,\n '영등포구': 5, '마포구': 6, '서초구': 7, '성동구': 8, '금천구': 9, '도봉구': 10, '동작구': 11,\n '강서구': 12, '동대문': 13, '동대문구': 13, '강북구': 14, '서대문': 15, '서대문구': 15,\n '광진구': 16, '구로구': 17, '성북구': 18, '강남구': 19, '종로구': 20, '중구': 21, '중랑구':\n 22, '송파구': 23, '은평구': 24}\ngu_list = []\nfor i in cctv['구분']:\n gu_list.append(gu_dict_num[i])\ncctv['gu'] = gu_list\ncctv.drop(['구분'], axis=1, inplace=True)\ncctv = cctv.rename(columns={'총계': 'cctv_num'})\ncctv['cctv_num'] = cctv['cctv_num'].apply(lambda x: ''.join(x.split(',')))\ncctv['cctv_num'] = pd.to_numeric(cctv['cctv_num'])\nnew_data = pd.merge(train_data, cctv, on='gu', how='left')\nprint(new_data.info())\nnew_data.to_csv('./dataset/train_add_cctv.csv', header=True, index=False)\n",
"step-5": "import pandas as pd\n\n# 데이터 로드\ntrain_data = pd.read_csv('./dataset/train_park_daycare.csv')\ncctv = pd.read_csv(\"./dataset/cctv_origin.csv\", encoding=\"EUC-KR\")\n\n## 데이터 전처리\n# 데이터 추출\ncctv = cctv.iloc[1:, :2]\n\n# 구 매핑\ngu_dict_num = {'용산구': 0, '양천구': 1, '강동구': 2, '관악구': 3, '노원구': 4, '영등포': 5, '영등포구': 5, '마포구': 6, '서초구': 7, '성동구': 8, '금천구': 9, '도봉구': 10, '동작구': 11, '강서구': 12, '동대문': 13, '동대문구': 13, '강북구': 14, '서대문': 15, '서대문구': 15, '광진구': 16, '구로구': 17, '성북구': 18, '강남구': 19, '종로구': 20, '중구': 21, '중랑구': 22, '송파구': 23, '은평구': 24}\ngu_list = []\nfor i in cctv['구분']:\n gu_list.append(gu_dict_num[i])\ncctv['gu'] = gu_list\ncctv.drop(['구분'], axis=1, inplace=True)\n\n# 컬럼 이름 변경\ncctv = cctv.rename(columns={'총계': 'cctv_num'})\n\n# 데이터 타입 변경\ncctv['cctv_num'] = cctv['cctv_num'].apply(lambda x: \"\".join(x.split(',')))\ncctv['cctv_num'] = pd.to_numeric(cctv['cctv_num'])\n\n# 조인\nnew_data = pd.merge(train_data, cctv, on='gu', how='left')\n\nprint(new_data.info())\n# 저장\nnew_data.to_csv(\"./dataset/train_add_cctv.csv\", header=True, index=False)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import sys
import time
import netifaces
import requests
_GET_ADDR_MAX_ITERATION = 50
_POST_CALLBACK_MAX_ITERATION =50
_RETRY_INTERVAL = 5
def _process_error(message):
sys.stderr.write(message)
sys.stderr.write('\n')
sys.exit(1)
def _parse_kernel_cmdline():
"""Parse linux kernel command line"""
with open('/proc/cmdline', 'rt') as f:
cmdline = f.read()
parameters = {}
for p in cmdline.split():
name, _, value = p.partition('=')
parameters[name] = value
return parameters
def _get_interface_ip(mac_addr):
""""Get IP address of interface by mac."""
interfaces = netifaces.interfaces()
for iface in interfaces:
addresses = netifaces.ifaddresses(iface)
link_addresses = addresses.get(netifaces.AF_LINK, [])
for link_addr in link_addresses:
if link_addr.get('addr') == mac_addr:
ip_addresses = addresses.get(netifaces.AF_INET)
if ip_addresses:
# NOTE: return first address, ironic API does not
# support multiple
return ip_addresses[0].get('addr')
else:
break
def main():
"""Script informs Ironic that bootstrap loading is done.
There are three mandatory parameters in kernel command line.
Ironic prepares these two:
'ironic_api_url' - URL of Ironic API service,
'deployment_id' - UUID of the node in Ironic.
Passed from PXE boot loader:
'BOOTIF' - MAC address of the boot interface.
"""
kernel_params = _parse_kernel_cmdline()
api_url = kernel_params.get('ironic_api_url')
deployment_id = kernel_params.get('deployment_id')
inspect = kernel_params.get('inspect')
# TODO(aarefiev): change ssh driver
ironic_driver = kernel_params.get('callback-driver-name', 'ansible_ssh')
if inspect and api_url is None:
_process_error('Ironic ansible callback: Mandatory parameter '
'"ironic_api_url" is missing.')
if api_url is None or deployment_id is None:
_process_error('Mandatory parameter ("ironic_api_url" or '
'"deployment_id") is missing.')
boot_mac = kernel_params.get('BOOTIF')
if boot_mac is None:
_process_error('Cannot define boot interface, "BOOTIF" parameter is '
'missing.')
# There is a difference in syntax in BOOTIF variable between pxe and ipxe
# boot with Ironic. For pxe boot the the leading `01-' denotes the device type
# (Ethernet) and is not a part of the MAC address
if boot_mac.startswith('01-'):
boot_mac = boot_mac[3:].replace('-', ':')
for n in range(_GET_ADDR_MAX_ITERATION):
boot_ip = _get_interface_ip(boot_mac)
if boot_ip is not None:
break
time.sleep(_RETRY_INTERVAL)
else:
_process_error('Cannot find IP address of boot interface.')
data = {"callback_url": "ssh://" + boot_ip}
if inspect:
passthru = ('%(api-url)s/v1/drivers/%(driver)s/vendor_passthru'
'/inspect' % {'api-url': api_url,
'driver': ironic_driver}
else:
passthru = '%(api-url)s/v1/nodes/%(deployment_id)s/vendor_passthru' \
'/heartbeat' % {'api-url': api_url,
'deployment_id': deployment_id}
for attempt in range(_POST_CALLBACK_MAX_ITERATION):
try:
resp = requests.post(passthru, data=json.dumps(data),
headers={'Content-Type': 'application/json',
'Accept': 'application/json'})
except Exception as e:
error = str(e)
else:
if resp.status_code != 202:
error= ('Wrong status code %d returned from Ironic API' %
resp.status_code)
else:
break
if attempt == (_POST_CALLBACK_MAX_ITERATION - 1):
_process_error(error)
time.sleep(_RETRY_INTERVAL)
if __name__ == '__main__':
sys.exit(main())
|
normal
|
{
"blob_id": "8dab85622a29bc40f8ad6150f9e6f284853aeaf8",
"index": 4235,
"step-1": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport json\nimport sys\nimport time\n\nimport netifaces\nimport requests\n\n\n_GET_ADDR_MAX_ITERATION = 50\n_POST_CALLBACK_MAX_ITERATION =50\n_RETRY_INTERVAL = 5\n\n\ndef _process_error(message):\n sys.stderr.write(message)\n sys.stderr.write('\\n')\n sys.exit(1)\n\n\ndef _parse_kernel_cmdline():\n \"\"\"Parse linux kernel command line\"\"\"\n with open('/proc/cmdline', 'rt') as f:\n cmdline = f.read()\n parameters = {}\n for p in cmdline.split():\n name, _, value = p.partition('=')\n parameters[name] = value\n return parameters\n\ndef _get_interface_ip(mac_addr):\n \"\"\"\"Get IP address of interface by mac.\"\"\"\n interfaces = netifaces.interfaces()\n for iface in interfaces:\n addresses = netifaces.ifaddresses(iface)\n link_addresses = addresses.get(netifaces.AF_LINK, [])\n for link_addr in link_addresses:\n if link_addr.get('addr') == mac_addr:\n ip_addresses = addresses.get(netifaces.AF_INET)\n if ip_addresses:\n # NOTE: return first address, ironic API does not\n # support multiple\n return ip_addresses[0].get('addr')\n else:\n break\n\ndef main():\n \"\"\"Script informs Ironic that bootstrap loading is done.\n\n There are three mandatory parameters in kernel command line.\n Ironic prepares these two:\n 'ironic_api_url' - URL of Ironic API service,\n 'deployment_id' - UUID of the node in Ironic.\n Passed from PXE boot loader:\n 'BOOTIF' - MAC address of the boot interface.\n \"\"\"\n kernel_params = _parse_kernel_cmdline()\n api_url = kernel_params.get('ironic_api_url')\n deployment_id = kernel_params.get('deployment_id')\n inspect = kernel_params.get('inspect')\n # TODO(aarefiev): change ssh driver\n ironic_driver = kernel_params.get('callback-driver-name', 'ansible_ssh')\n if inspect and api_url is None:\n _process_error('Ironic ansible callback: Mandatory parameter '\n '\"ironic_api_url\" is missing.')\n if api_url is None or deployment_id is None:\n _process_error('Mandatory parameter (\"ironic_api_url\" or '\n '\"deployment_id\") is missing.')\n\n boot_mac = kernel_params.get('BOOTIF')\n if boot_mac is None:\n _process_error('Cannot define boot interface, \"BOOTIF\" parameter is '\n 'missing.')\n\n # There is a difference in syntax in BOOTIF variable between pxe and ipxe\n # boot with Ironic. For pxe boot the the leading `01-' denotes the device type\n # (Ethernet) and is not a part of the MAC address\n if boot_mac.startswith('01-'):\n boot_mac = boot_mac[3:].replace('-', ':')\n\n for n in range(_GET_ADDR_MAX_ITERATION):\n boot_ip = _get_interface_ip(boot_mac)\n if boot_ip is not None:\n break\n time.sleep(_RETRY_INTERVAL)\n else:\n _process_error('Cannot find IP address of boot interface.')\n\n data = {\"callback_url\": \"ssh://\" + boot_ip}\n\n if inspect:\n passthru = ('%(api-url)s/v1/drivers/%(driver)s/vendor_passthru'\n '/inspect' % {'api-url': api_url,\n 'driver': ironic_driver}\n else:\n passthru = '%(api-url)s/v1/nodes/%(deployment_id)s/vendor_passthru' \\\n '/heartbeat' % {'api-url': api_url,\n 'deployment_id': deployment_id}\n\n for attempt in range(_POST_CALLBACK_MAX_ITERATION):\n try:\n resp = requests.post(passthru, data=json.dumps(data),\n headers={'Content-Type': 'application/json',\n 'Accept': 'application/json'})\n except Exception as e:\n error = str(e)\n else:\n if resp.status_code != 202:\n error= ('Wrong status code %d returned from Ironic API' %\n resp.status_code)\n else:\n break\n\n if attempt == (_POST_CALLBACK_MAX_ITERATION - 1):\n _process_error(error)\n\n time.sleep(_RETRY_INTERVAL)\n\n\nif __name__ == '__main__':\n sys.exit(main())\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding: utf-8 -*-
import os
from flask import Flask, request,render_template,url_for
from flask_uploads import UploadSet, configure_uploads, IMAGES, patch_request_class
import sys
sys.path.insert(1, 'script')
from backend import model
import io
from PIL import Image
import base64
import numpy as np
app = Flask(__name__)
app.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')
photos = UploadSet('photos', IMAGES)
configure_uploads(app, photos)
patch_request_class(app)
@app.route('/', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
file_url = photos.url(filename)
path,label,element = model(file_url)
result = []
for el in path :
img = Image.fromarray((el * 255).astype(np.uint8))
file_object = io.BytesIO()
img.save(file_object, 'jpeg',quality=100)
figdata_jgp = base64.b64encode(file_object.getvalue())
result.append(figdata_jgp.decode('ascii'))
return render_template('display.html',image = file_url,label = element, results=zip(result,label))
return render_template('index.html')
app.run(threaded=False)
render_template('index.html')
|
normal
|
{
"blob_id": "93d0d73d56b04bba505265958fccff229f5eaf49",
"index": 872,
"step-1": "<mask token>\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path, label, element = model(file_url)\n result = []\n for el in path:\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg', quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html', image=file_url, label=\n element, results=zip(result, label))\n return render_template('index.html')\n\n\n<mask token>\n",
"step-2": "<mask token>\nsys.path.insert(1, 'script')\n<mask token>\nconfigure_uploads(app, photos)\npatch_request_class(app)\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path, label, element = model(file_url)\n result = []\n for el in path:\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg', quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html', image=file_url, label=\n element, results=zip(result, label))\n return render_template('index.html')\n\n\napp.run(threaded=False)\nrender_template('index.html')\n",
"step-3": "<mask token>\nsys.path.insert(1, 'script')\n<mask token>\napp = Flask(__name__)\napp.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')\nphotos = UploadSet('photos', IMAGES)\nconfigure_uploads(app, photos)\npatch_request_class(app)\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path, label, element = model(file_url)\n result = []\n for el in path:\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg', quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html', image=file_url, label=\n element, results=zip(result, label))\n return render_template('index.html')\n\n\napp.run(threaded=False)\nrender_template('index.html')\n",
"step-4": "import os\nfrom flask import Flask, request, render_template, url_for\nfrom flask_uploads import UploadSet, configure_uploads, IMAGES, patch_request_class\nimport sys\nsys.path.insert(1, 'script')\nfrom backend import model\nimport io\nfrom PIL import Image\nimport base64\nimport numpy as np\napp = Flask(__name__)\napp.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')\nphotos = UploadSet('photos', IMAGES)\nconfigure_uploads(app, photos)\npatch_request_class(app)\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path, label, element = model(file_url)\n result = []\n for el in path:\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg', quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html', image=file_url, label=\n element, results=zip(result, label))\n return render_template('index.html')\n\n\napp.run(threaded=False)\nrender_template('index.html')\n",
"step-5": "\n# -*- coding: utf-8 -*-\nimport os\nfrom flask import Flask, request,render_template,url_for\nfrom flask_uploads import UploadSet, configure_uploads, IMAGES, patch_request_class\nimport sys\nsys.path.insert(1, 'script')\nfrom backend import model\nimport io\nfrom PIL import Image\nimport base64\nimport numpy as np\n\n\n\n\napp = Flask(__name__)\napp.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')\n\n\n\nphotos = UploadSet('photos', IMAGES)\nconfigure_uploads(app, photos)\npatch_request_class(app) \n\n@app.route('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path,label,element = model(file_url)\n result = []\n for el in path :\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg',quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html',image = file_url,label = element, results=zip(result,label))\n return render_template('index.html')\n\n\napp.run(threaded=False)\nrender_template('index.html')\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from RestClient4py.client import RestClient
from API_Wrap import util
import os
import json
kakao_native_app_key, kakao_rest_api_key, kakao_javascript_key, kakao_admin_key = util.kakao_auth()
client = RestClient()
client.set_header("Authorization", "KakaoAK {}".format(kakao_rest_api_key))
client.set_header("Accept", "*/*")
"""
https://developers.kakao.com/docs/restapi/translation
"""
def translation(query, src_lang, target_lang):
if type(query) != str:
raise AttributeError("[ERROR] query parameter should be string type")
elif len(query) > 5000:
raise AttributeError("[ERROR] Maximum length of query parameter should be same or less than 5,000 chars")
if type(src_lang) != str:
raise AttributeError("[ERROR] src_lang parameter should be string type")
elif src_lang not in ["kr", "en", "jp", "cn", "vi", "id", "ar", "bn", "de", "es", "fr", "hi", "it", "ms", "nl",
"pt", "ru", "th", "tr"]:
raise AttributeError("[ERROR] src_lang parameter should be one of below language codes"
"--------------------------------------------------------------"
"Number | Language Code | Language"
"1 | kr | 한국어"
"2 | en | 영어"
"3 | jp | 일본어"
"4 | cn | 중국어"
"5 | vi | 베트남어"
"6 | id | 인도네시아어"
"7 | ar | 아랍어"
"8 | bn | 뱅갈어"
"9 | de | 독일어"
"10 | es | 스페인어"
"11 | fr | 프랑스어"
"12 | hi | 힌디어"
"13 | it | 이탈리아어"
"14 | ms | 말레이시아어"
"15 | nl | 네덜란드어"
"16 | pt | 포르투갈어"
"17 | ru | 러시아어"
"18 | th | 태국어"
"19 | tr | 터키어")
if type(target_lang) != str:
raise AttributeError("[ERROR] target_lang parameter should be string type")
elif target_lang not in ["kr", "en", "jp", "cn", "vi", "id", "ar", "bn", "de", "es", "fr", "hi", "it", "ms", "nl",
"pt", "ru", "th", "tr"]:
raise AttributeError("[ERROR] target_lang parameter should be one of below language codes"
"--------------------------------------------------------------"
"Number | Language Code | Language"
"1 | kr | 한국어"
"2 | en | 영어"
"3 | jp | 일본어"
"4 | cn | 중국어"
"5 | vi | 베트남어"
"6 | id | 인도네시아어"
"7 | ar | 아랍어"
"8 | bn | 뱅갈어"
"9 | de | 독일어"
"10 | es | 스페인어"
"11 | fr | 프랑스어"
"12 | hi | 힌디어"
"13 | it | 이탈리아어"
"14 | ms | 말레이시아어"
"15 | nl | 네덜란드어"
"16 | pt | 포르투갈어"
"17 | ru | 러시아어"
"18 | th | 태국어"
"19 | tr | 터키어")
postData = {
"query": query,
"src_lang": src_lang,
"target_lang": target_lang
}
return client.post("https://kapi.kakao.com/v1/translation/translate", data=postData)
|
normal
|
{
"blob_id": "7f58179efecd5a0d691a5c6d83b808f2cd2fcba3",
"index": 5332,
"step-1": "<mask token>\n\n\ndef translation(query, src_lang, target_lang):\n if type(query) != str:\n raise AttributeError('[ERROR] query parameter should be string type')\n elif len(query) > 5000:\n raise AttributeError(\n '[ERROR] Maximum length of query parameter should be same or less than 5,000 chars'\n )\n if type(src_lang) != str:\n raise AttributeError('[ERROR] src_lang parameter should be string type'\n )\n elif src_lang not in ['kr', 'en', 'jp', 'cn', 'vi', 'id', 'ar', 'bn',\n 'de', 'es', 'fr', 'hi', 'it', 'ms', 'nl', 'pt', 'ru', 'th', 'tr']:\n raise AttributeError(\n '[ERROR] src_lang parameter should be one of below language codes--------------------------------------------------------------Number | Language Code | Language1 | kr | 한국어2 | en | 영어3 | jp | 일본어4 | cn | 중국어5 | vi | 베트남어6 | id | 인도네시아어7 | ar | 아랍어8 | bn | 뱅갈어9 | de | 독일어10 | es | 스페인어11 | fr | 프랑스어12 | hi | 힌디어13 | it | 이탈리아어14 | ms | 말레이시아어15 | nl | 네덜란드어16 | pt | 포르투갈어17 | ru | 러시아어18 | th | 태국어19 | tr | 터키어'\n )\n if type(target_lang) != str:\n raise AttributeError(\n '[ERROR] target_lang parameter should be string type')\n elif target_lang not in ['kr', 'en', 'jp', 'cn', 'vi', 'id', 'ar', 'bn',\n 'de', 'es', 'fr', 'hi', 'it', 'ms', 'nl', 'pt', 'ru', 'th', 'tr']:\n raise AttributeError(\n '[ERROR] target_lang parameter should be one of below language codes--------------------------------------------------------------Number | Language Code | Language1 | kr | 한국어2 | en | 영어3 | jp | 일본어4 | cn | 중국어5 | vi | 베트남어6 | id | 인도네시아어7 | ar | 아랍어8 | bn | 뱅갈어9 | de | 독일어10 | es | 스페인어11 | fr | 프랑스어12 | hi | 힌디어13 | it | 이탈리아어14 | ms | 말레이시아어15 | nl | 네덜란드어16 | pt | 포르투갈어17 | ru | 러시아어18 | th | 태국어19 | tr | 터키어'\n )\n postData = {'query': query, 'src_lang': src_lang, 'target_lang':\n target_lang}\n return client.post('https://kapi.kakao.com/v1/translation/translate',\n data=postData)\n",
"step-2": "<mask token>\nclient.set_header('Authorization', 'KakaoAK {}'.format(kakao_rest_api_key))\nclient.set_header('Accept', '*/*')\n<mask token>\n\n\ndef translation(query, src_lang, target_lang):\n if type(query) != str:\n raise AttributeError('[ERROR] query parameter should be string type')\n elif len(query) > 5000:\n raise AttributeError(\n '[ERROR] Maximum length of query parameter should be same or less than 5,000 chars'\n )\n if type(src_lang) != str:\n raise AttributeError('[ERROR] src_lang parameter should be string type'\n )\n elif src_lang not in ['kr', 'en', 'jp', 'cn', 'vi', 'id', 'ar', 'bn',\n 'de', 'es', 'fr', 'hi', 'it', 'ms', 'nl', 'pt', 'ru', 'th', 'tr']:\n raise AttributeError(\n '[ERROR] src_lang parameter should be one of below language codes--------------------------------------------------------------Number | Language Code | Language1 | kr | 한국어2 | en | 영어3 | jp | 일본어4 | cn | 중국어5 | vi | 베트남어6 | id | 인도네시아어7 | ar | 아랍어8 | bn | 뱅갈어9 | de | 독일어10 | es | 스페인어11 | fr | 프랑스어12 | hi | 힌디어13 | it | 이탈리아어14 | ms | 말레이시아어15 | nl | 네덜란드어16 | pt | 포르투갈어17 | ru | 러시아어18 | th | 태국어19 | tr | 터키어'\n )\n if type(target_lang) != str:\n raise AttributeError(\n '[ERROR] target_lang parameter should be string type')\n elif target_lang not in ['kr', 'en', 'jp', 'cn', 'vi', 'id', 'ar', 'bn',\n 'de', 'es', 'fr', 'hi', 'it', 'ms', 'nl', 'pt', 'ru', 'th', 'tr']:\n raise AttributeError(\n '[ERROR] target_lang parameter should be one of below language codes--------------------------------------------------------------Number | Language Code | Language1 | kr | 한국어2 | en | 영어3 | jp | 일본어4 | cn | 중국어5 | vi | 베트남어6 | id | 인도네시아어7 | ar | 아랍어8 | bn | 뱅갈어9 | de | 독일어10 | es | 스페인어11 | fr | 프랑스어12 | hi | 힌디어13 | it | 이탈리아어14 | ms | 말레이시아어15 | nl | 네덜란드어16 | pt | 포르투갈어17 | ru | 러시아어18 | th | 태국어19 | tr | 터키어'\n )\n postData = {'query': query, 'src_lang': src_lang, 'target_lang':\n target_lang}\n return client.post('https://kapi.kakao.com/v1/translation/translate',\n data=postData)\n",
"step-3": "<mask token>\n(kakao_native_app_key, kakao_rest_api_key, kakao_javascript_key,\n kakao_admin_key) = util.kakao_auth()\nclient = RestClient()\nclient.set_header('Authorization', 'KakaoAK {}'.format(kakao_rest_api_key))\nclient.set_header('Accept', '*/*')\n<mask token>\n\n\ndef translation(query, src_lang, target_lang):\n if type(query) != str:\n raise AttributeError('[ERROR] query parameter should be string type')\n elif len(query) > 5000:\n raise AttributeError(\n '[ERROR] Maximum length of query parameter should be same or less than 5,000 chars'\n )\n if type(src_lang) != str:\n raise AttributeError('[ERROR] src_lang parameter should be string type'\n )\n elif src_lang not in ['kr', 'en', 'jp', 'cn', 'vi', 'id', 'ar', 'bn',\n 'de', 'es', 'fr', 'hi', 'it', 'ms', 'nl', 'pt', 'ru', 'th', 'tr']:\n raise AttributeError(\n '[ERROR] src_lang parameter should be one of below language codes--------------------------------------------------------------Number | Language Code | Language1 | kr | 한국어2 | en | 영어3 | jp | 일본어4 | cn | 중국어5 | vi | 베트남어6 | id | 인도네시아어7 | ar | 아랍어8 | bn | 뱅갈어9 | de | 독일어10 | es | 스페인어11 | fr | 프랑스어12 | hi | 힌디어13 | it | 이탈리아어14 | ms | 말레이시아어15 | nl | 네덜란드어16 | pt | 포르투갈어17 | ru | 러시아어18 | th | 태국어19 | tr | 터키어'\n )\n if type(target_lang) != str:\n raise AttributeError(\n '[ERROR] target_lang parameter should be string type')\n elif target_lang not in ['kr', 'en', 'jp', 'cn', 'vi', 'id', 'ar', 'bn',\n 'de', 'es', 'fr', 'hi', 'it', 'ms', 'nl', 'pt', 'ru', 'th', 'tr']:\n raise AttributeError(\n '[ERROR] target_lang parameter should be one of below language codes--------------------------------------------------------------Number | Language Code | Language1 | kr | 한국어2 | en | 영어3 | jp | 일본어4 | cn | 중국어5 | vi | 베트남어6 | id | 인도네시아어7 | ar | 아랍어8 | bn | 뱅갈어9 | de | 독일어10 | es | 스페인어11 | fr | 프랑스어12 | hi | 힌디어13 | it | 이탈리아어14 | ms | 말레이시아어15 | nl | 네덜란드어16 | pt | 포르투갈어17 | ru | 러시아어18 | th | 태국어19 | tr | 터키어'\n )\n postData = {'query': query, 'src_lang': src_lang, 'target_lang':\n target_lang}\n return client.post('https://kapi.kakao.com/v1/translation/translate',\n data=postData)\n",
"step-4": "from RestClient4py.client import RestClient\nfrom API_Wrap import util\nimport os\nimport json\n(kakao_native_app_key, kakao_rest_api_key, kakao_javascript_key,\n kakao_admin_key) = util.kakao_auth()\nclient = RestClient()\nclient.set_header('Authorization', 'KakaoAK {}'.format(kakao_rest_api_key))\nclient.set_header('Accept', '*/*')\n<mask token>\n\n\ndef translation(query, src_lang, target_lang):\n if type(query) != str:\n raise AttributeError('[ERROR] query parameter should be string type')\n elif len(query) > 5000:\n raise AttributeError(\n '[ERROR] Maximum length of query parameter should be same or less than 5,000 chars'\n )\n if type(src_lang) != str:\n raise AttributeError('[ERROR] src_lang parameter should be string type'\n )\n elif src_lang not in ['kr', 'en', 'jp', 'cn', 'vi', 'id', 'ar', 'bn',\n 'de', 'es', 'fr', 'hi', 'it', 'ms', 'nl', 'pt', 'ru', 'th', 'tr']:\n raise AttributeError(\n '[ERROR] src_lang parameter should be one of below language codes--------------------------------------------------------------Number | Language Code | Language1 | kr | 한국어2 | en | 영어3 | jp | 일본어4 | cn | 중국어5 | vi | 베트남어6 | id | 인도네시아어7 | ar | 아랍어8 | bn | 뱅갈어9 | de | 독일어10 | es | 스페인어11 | fr | 프랑스어12 | hi | 힌디어13 | it | 이탈리아어14 | ms | 말레이시아어15 | nl | 네덜란드어16 | pt | 포르투갈어17 | ru | 러시아어18 | th | 태국어19 | tr | 터키어'\n )\n if type(target_lang) != str:\n raise AttributeError(\n '[ERROR] target_lang parameter should be string type')\n elif target_lang not in ['kr', 'en', 'jp', 'cn', 'vi', 'id', 'ar', 'bn',\n 'de', 'es', 'fr', 'hi', 'it', 'ms', 'nl', 'pt', 'ru', 'th', 'tr']:\n raise AttributeError(\n '[ERROR] target_lang parameter should be one of below language codes--------------------------------------------------------------Number | Language Code | Language1 | kr | 한국어2 | en | 영어3 | jp | 일본어4 | cn | 중국어5 | vi | 베트남어6 | id | 인도네시아어7 | ar | 아랍어8 | bn | 뱅갈어9 | de | 독일어10 | es | 스페인어11 | fr | 프랑스어12 | hi | 힌디어13 | it | 이탈리아어14 | ms | 말레이시아어15 | nl | 네덜란드어16 | pt | 포르투갈어17 | ru | 러시아어18 | th | 태국어19 | tr | 터키어'\n )\n postData = {'query': query, 'src_lang': src_lang, 'target_lang':\n target_lang}\n return client.post('https://kapi.kakao.com/v1/translation/translate',\n data=postData)\n",
"step-5": "from RestClient4py.client import RestClient\nfrom API_Wrap import util\nimport os\nimport json\n\n\nkakao_native_app_key, kakao_rest_api_key, kakao_javascript_key, kakao_admin_key = util.kakao_auth()\nclient = RestClient()\nclient.set_header(\"Authorization\", \"KakaoAK {}\".format(kakao_rest_api_key))\nclient.set_header(\"Accept\", \"*/*\")\n\n\"\"\"\n https://developers.kakao.com/docs/restapi/translation\n\"\"\"\ndef translation(query, src_lang, target_lang):\n if type(query) != str:\n raise AttributeError(\"[ERROR] query parameter should be string type\")\n elif len(query) > 5000:\n raise AttributeError(\"[ERROR] Maximum length of query parameter should be same or less than 5,000 chars\")\n\n if type(src_lang) != str:\n raise AttributeError(\"[ERROR] src_lang parameter should be string type\")\n elif src_lang not in [\"kr\", \"en\", \"jp\", \"cn\", \"vi\", \"id\", \"ar\", \"bn\", \"de\", \"es\", \"fr\", \"hi\", \"it\", \"ms\", \"nl\",\n \"pt\", \"ru\", \"th\", \"tr\"]:\n raise AttributeError(\"[ERROR] src_lang parameter should be one of below language codes\"\n \"--------------------------------------------------------------\"\n \"Number | Language Code | Language\"\n \"1 | kr | 한국어\"\n \"2 | en | 영어\"\n \"3 | jp | 일본어\"\n \"4 | cn | 중국어\"\n \"5 | vi | 베트남어\"\n \"6 | id | 인도네시아어\"\n \"7 | ar | 아랍어\"\n \"8 | bn | 뱅갈어\"\n \"9 | de | 독일어\"\n \"10 | es | 스페인어\"\n \"11 | fr | 프랑스어\"\n \"12 | hi | 힌디어\"\n \"13 | it | 이탈리아어\"\n \"14 | ms | 말레이시아어\"\n \"15 | nl | 네덜란드어\"\n \"16 | pt | 포르투갈어\"\n \"17 | ru | 러시아어\"\n \"18 | th | 태국어\"\n \"19 | tr | 터키어\")\n\n if type(target_lang) != str:\n raise AttributeError(\"[ERROR] target_lang parameter should be string type\")\n elif target_lang not in [\"kr\", \"en\", \"jp\", \"cn\", \"vi\", \"id\", \"ar\", \"bn\", \"de\", \"es\", \"fr\", \"hi\", \"it\", \"ms\", \"nl\",\n \"pt\", \"ru\", \"th\", \"tr\"]:\n raise AttributeError(\"[ERROR] target_lang parameter should be one of below language codes\"\n \"--------------------------------------------------------------\"\n \"Number | Language Code | Language\"\n \"1 | kr | 한국어\"\n \"2 | en | 영어\"\n \"3 | jp | 일본어\"\n \"4 | cn | 중국어\"\n \"5 | vi | 베트남어\"\n \"6 | id | 인도네시아어\"\n \"7 | ar | 아랍어\"\n \"8 | bn | 뱅갈어\"\n \"9 | de | 독일어\"\n \"10 | es | 스페인어\"\n \"11 | fr | 프랑스어\"\n \"12 | hi | 힌디어\"\n \"13 | it | 이탈리아어\"\n \"14 | ms | 말레이시아어\"\n \"15 | nl | 네덜란드어\"\n \"16 | pt | 포르투갈어\"\n \"17 | ru | 러시아어\"\n \"18 | th | 태국어\"\n \"19 | tr | 터키어\")\n\n postData = {\n \"query\": query,\n \"src_lang\": src_lang,\n \"target_lang\": target_lang\n }\n\n return client.post(\"https://kapi.kakao.com/v1/translation/translate\", data=postData)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while True:
_, frame = video_capture.read()
frame = cv2.medianBlur(frame, 3)
frame = cv2.filter2D(frame, -1, MASK)
_, frame = cv2.threshold(frame, 10, 255, cv2.THRESH_BINARY_INV)
streamer.update_frame(frame)
if not streamer.is_streaming:
streamer.start_streaming()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
MASK = np.array([[0, 1, 0], [1, -4, 1], [0, 1, 0]])
port = 3030
require_login = False
streamer = Streamer(port, require_login)
video_capture = cv2.VideoCapture('http://149.43.156.105/mjpg/video.mjpg')
while True:
_, frame = video_capture.read()
frame = cv2.medianBlur(frame, 3)
frame = cv2.filter2D(frame, -1, MASK)
_, frame = cv2.threshold(frame, 10, 255, cv2.THRESH_BINARY_INV)
streamer.update_frame(frame)
if not streamer.is_streaming:
streamer.start_streaming()
<|reserved_special_token_1|>
from flask_opencv_streamer.streamer import Streamer
import cv2
import numpy as np
MASK = np.array([[0, 1, 0], [1, -4, 1], [0, 1, 0]])
port = 3030
require_login = False
streamer = Streamer(port, require_login)
video_capture = cv2.VideoCapture('http://149.43.156.105/mjpg/video.mjpg')
while True:
_, frame = video_capture.read()
frame = cv2.medianBlur(frame, 3)
frame = cv2.filter2D(frame, -1, MASK)
_, frame = cv2.threshold(frame, 10, 255, cv2.THRESH_BINARY_INV)
streamer.update_frame(frame)
if not streamer.is_streaming:
streamer.start_streaming()
<|reserved_special_token_1|>
from flask_opencv_streamer.streamer import Streamer
import cv2
import numpy as np
MASK = np.array([
[0, 1, 0],
[1, -4, 1],
[0, 1, 0]
])
port = 3030
require_login = False
streamer = Streamer(port, require_login)
video_capture = cv2.VideoCapture('http://149.43.156.105/mjpg/video.mjpg')
while True:
_, frame = video_capture.read()
frame = cv2.medianBlur(frame, 3)
frame = cv2.filter2D(frame, -1, MASK)
_, frame = cv2.threshold(frame, 10, 255, cv2.THRESH_BINARY_INV)
streamer.update_frame(frame)
if not streamer.is_streaming:
streamer.start_streaming()
# было в примере, но вроде и без этого работает
# cv2.waitKey(30)
|
flexible
|
{
"blob_id": "a19b4928c9423dae6c60f39dbc5af0673b433c8e",
"index": 3551,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n _, frame = video_capture.read()\n frame = cv2.medianBlur(frame, 3)\n frame = cv2.filter2D(frame, -1, MASK)\n _, frame = cv2.threshold(frame, 10, 255, cv2.THRESH_BINARY_INV)\n streamer.update_frame(frame)\n if not streamer.is_streaming:\n streamer.start_streaming()\n",
"step-3": "<mask token>\nMASK = np.array([[0, 1, 0], [1, -4, 1], [0, 1, 0]])\nport = 3030\nrequire_login = False\nstreamer = Streamer(port, require_login)\nvideo_capture = cv2.VideoCapture('http://149.43.156.105/mjpg/video.mjpg')\nwhile True:\n _, frame = video_capture.read()\n frame = cv2.medianBlur(frame, 3)\n frame = cv2.filter2D(frame, -1, MASK)\n _, frame = cv2.threshold(frame, 10, 255, cv2.THRESH_BINARY_INV)\n streamer.update_frame(frame)\n if not streamer.is_streaming:\n streamer.start_streaming()\n",
"step-4": "from flask_opencv_streamer.streamer import Streamer\nimport cv2\nimport numpy as np\nMASK = np.array([[0, 1, 0], [1, -4, 1], [0, 1, 0]])\nport = 3030\nrequire_login = False\nstreamer = Streamer(port, require_login)\nvideo_capture = cv2.VideoCapture('http://149.43.156.105/mjpg/video.mjpg')\nwhile True:\n _, frame = video_capture.read()\n frame = cv2.medianBlur(frame, 3)\n frame = cv2.filter2D(frame, -1, MASK)\n _, frame = cv2.threshold(frame, 10, 255, cv2.THRESH_BINARY_INV)\n streamer.update_frame(frame)\n if not streamer.is_streaming:\n streamer.start_streaming()\n",
"step-5": "from flask_opencv_streamer.streamer import Streamer\r\nimport cv2\r\nimport numpy as np\r\n\r\nMASK = np.array([\r\n [0, 1, 0],\r\n [1, -4, 1],\r\n [0, 1, 0]\r\n])\r\n\r\nport = 3030\r\nrequire_login = False\r\nstreamer = Streamer(port, require_login)\r\n\r\nvideo_capture = cv2.VideoCapture('http://149.43.156.105/mjpg/video.mjpg')\r\n\r\nwhile True:\r\n _, frame = video_capture.read()\r\n\r\n frame = cv2.medianBlur(frame, 3)\r\n frame = cv2.filter2D(frame, -1, MASK)\r\n _, frame = cv2.threshold(frame, 10, 255, cv2.THRESH_BINARY_INV)\r\n streamer.update_frame(frame)\r\n\r\n if not streamer.is_streaming:\r\n streamer.start_streaming()\r\n # было в примере, но вроде и без этого работает\r\n # cv2.waitKey(30)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for line in ratings_dat:
arr = line.split('::')
new_line = '\t'.join(arr)
ratings_csv.write(new_line)
ratings_dat.close()
ratings_csv.close()
<|reserved_special_token_1|>
ratings_dat = open('../data/movielens-1m/users.dat', 'r')
ratings_csv = open('../data/movielens-1m/users.txt', 'w')
for line in ratings_dat:
arr = line.split('::')
new_line = '\t'.join(arr)
ratings_csv.write(new_line)
ratings_dat.close()
ratings_csv.close()
<|reserved_special_token_1|>
#!/usr/bin/env python
# script :: creating a datamodel that fits mahout from ratings.dat
ratings_dat = open('../data/movielens-1m/users.dat', 'r')
ratings_csv = open('../data/movielens-1m/users.txt', 'w')
for line in ratings_dat:
arr = line.split('::')
new_line = '\t'.join(arr)
ratings_csv.write(new_line)
ratings_dat.close()
ratings_csv.close()
|
flexible
|
{
"blob_id": "2dd59681a0dcb5d3f1143385100c09c7783babf4",
"index": 76,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor line in ratings_dat:\n arr = line.split('::')\n new_line = '\\t'.join(arr)\n ratings_csv.write(new_line)\nratings_dat.close()\nratings_csv.close()\n",
"step-3": "ratings_dat = open('../data/movielens-1m/users.dat', 'r')\nratings_csv = open('../data/movielens-1m/users.txt', 'w')\nfor line in ratings_dat:\n arr = line.split('::')\n new_line = '\\t'.join(arr)\n ratings_csv.write(new_line)\nratings_dat.close()\nratings_csv.close()\n",
"step-4": "#!/usr/bin/env python\n# script :: creating a datamodel that fits mahout from ratings.dat\n\n\n\nratings_dat = open('../data/movielens-1m/users.dat', 'r')\nratings_csv = open('../data/movielens-1m/users.txt', 'w')\n\nfor line in ratings_dat:\n\tarr = line.split('::')\n\tnew_line = '\\t'.join(arr)\n\n\tratings_csv.write(new_line)\n\nratings_dat.close()\nratings_csv.close()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('api', '0002_auto_20170308_1949')]
operations = [migrations.AlterField(model_name='deck', name=
'description', field=models.TextField(default=''))]
<|reserved_special_token_1|>
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('api', '0002_auto_20170308_1949')]
operations = [migrations.AlterField(model_name='deck', name=
'description', field=models.TextField(default=''))]
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-26 16:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0002_auto_20170308_1949'),
]
operations = [
migrations.AlterField(
model_name='deck',
name='description',
field=models.TextField(default=''),
),
]
|
flexible
|
{
"blob_id": "bf3b529f8f06619c94d2dfca283df086466af4ea",
"index": 5027,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('api', '0002_auto_20170308_1949')]\n operations = [migrations.AlterField(model_name='deck', name=\n 'description', field=models.TextField(default=''))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('api', '0002_auto_20170308_1949')]\n operations = [migrations.AlterField(model_name='deck', name=\n 'description', field=models.TextField(default=''))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-03-26 16:51\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('api', '0002_auto_20170308_1949'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='deck',\n name='description',\n field=models.TextField(default=''),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import webbrowser
import time
total = 3
count = 0
while count < total:
webbrowser.open('https://www.youtube.com/watch?v=GoSBNNgf_Vc')
time.sleep(5 * 60 * 60)
count += 1
|
normal
|
{
"blob_id": "e11a04cad967ae377449aab8b12bfde23e403335",
"index": 8391,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile count < total:\n webbrowser.open('https://www.youtube.com/watch?v=GoSBNNgf_Vc')\n time.sleep(5 * 60 * 60)\n count += 1\n",
"step-3": "<mask token>\ntotal = 3\ncount = 0\nwhile count < total:\n webbrowser.open('https://www.youtube.com/watch?v=GoSBNNgf_Vc')\n time.sleep(5 * 60 * 60)\n count += 1\n",
"step-4": "import webbrowser\nimport time\ntotal = 3\ncount = 0\nwhile count < total:\n webbrowser.open('https://www.youtube.com/watch?v=GoSBNNgf_Vc')\n time.sleep(5 * 60 * 60)\n count += 1\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def task5(arr):
for row in arr:
moneyGranted[int(row[1]) - 2015][int(row[3]) - 1] += int(row[4])
moneyRequested[int(row[1]) - 2015][int(row[3]) - 1] += int(row[5])
for i in range(6):
for j in range(5):
if moneyRequested[i][j] == 0:
print(i + 2015, ',', category[j], ':', '0.0%')
else:
perFull[i][j] = round(moneyGranted[i][j] / moneyRequested[i
][j] * 100, 2)
print(i + 2015, ',', category[j], ':', perFull[i][j], '%')
for i in range(6):
graphTitle = 'Percentage fulfilled for each category in ' + str(i +
2015)
plt.title(graphTitle)
plt.bar(category, perFull[i])
plt.show()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def task5(arr):
for row in arr:
moneyGranted[int(row[1]) - 2015][int(row[3]) - 1] += int(row[4])
moneyRequested[int(row[1]) - 2015][int(row[3]) - 1] += int(row[5])
for i in range(6):
for j in range(5):
if moneyRequested[i][j] == 0:
print(i + 2015, ',', category[j], ':', '0.0%')
else:
perFull[i][j] = round(moneyGranted[i][j] / moneyRequested[i
][j] * 100, 2)
print(i + 2015, ',', category[j], ':', perFull[i][j], '%')
for i in range(6):
graphTitle = 'Percentage fulfilled for each category in ' + str(i +
2015)
plt.title(graphTitle)
plt.bar(category, perFull[i])
plt.show()
with open('CEL_HistoricalGrantInformation_2014-7Oct2020_CSV.csv', newline=''
) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
arr = np.append(arr, np.array([[row['organization_id'], int(row[
'year_id']), row['process_id'], int(row['area_id']), int(row[
'awarded_id']), int(row['requested_id'])]]), axis=0)
task5(arr)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
category = ['Ecological Well-being', 'Health & Human Services',
'Arts & Culture', 'Community Building', 'Environment']
arr = np.empty((0, 6), str)
moneyGranted = [([0] * 5) for _ in range(6)]
moneyRequested = [([0] * 5) for _ in range(6)]
perFull = [([0] * 5) for _ in range(6)]
def task5(arr):
for row in arr:
moneyGranted[int(row[1]) - 2015][int(row[3]) - 1] += int(row[4])
moneyRequested[int(row[1]) - 2015][int(row[3]) - 1] += int(row[5])
for i in range(6):
for j in range(5):
if moneyRequested[i][j] == 0:
print(i + 2015, ',', category[j], ':', '0.0%')
else:
perFull[i][j] = round(moneyGranted[i][j] / moneyRequested[i
][j] * 100, 2)
print(i + 2015, ',', category[j], ':', perFull[i][j], '%')
for i in range(6):
graphTitle = 'Percentage fulfilled for each category in ' + str(i +
2015)
plt.title(graphTitle)
plt.bar(category, perFull[i])
plt.show()
with open('CEL_HistoricalGrantInformation_2014-7Oct2020_CSV.csv', newline=''
) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
arr = np.append(arr, np.array([[row['organization_id'], int(row[
'year_id']), row['process_id'], int(row['area_id']), int(row[
'awarded_id']), int(row['requested_id'])]]), axis=0)
task5(arr)
<|reserved_special_token_1|>
import numpy as np
import matplotlib.pyplot as plt
import csv
category = ['Ecological Well-being', 'Health & Human Services',
'Arts & Culture', 'Community Building', 'Environment']
arr = np.empty((0, 6), str)
moneyGranted = [([0] * 5) for _ in range(6)]
moneyRequested = [([0] * 5) for _ in range(6)]
perFull = [([0] * 5) for _ in range(6)]
def task5(arr):
for row in arr:
moneyGranted[int(row[1]) - 2015][int(row[3]) - 1] += int(row[4])
moneyRequested[int(row[1]) - 2015][int(row[3]) - 1] += int(row[5])
for i in range(6):
for j in range(5):
if moneyRequested[i][j] == 0:
print(i + 2015, ',', category[j], ':', '0.0%')
else:
perFull[i][j] = round(moneyGranted[i][j] / moneyRequested[i
][j] * 100, 2)
print(i + 2015, ',', category[j], ':', perFull[i][j], '%')
for i in range(6):
graphTitle = 'Percentage fulfilled for each category in ' + str(i +
2015)
plt.title(graphTitle)
plt.bar(category, perFull[i])
plt.show()
with open('CEL_HistoricalGrantInformation_2014-7Oct2020_CSV.csv', newline=''
) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
arr = np.append(arr, np.array([[row['organization_id'], int(row[
'year_id']), row['process_id'], int(row['area_id']), int(row[
'awarded_id']), int(row['requested_id'])]]), axis=0)
task5(arr)
<|reserved_special_token_1|>
import numpy as np
import matplotlib.pyplot as plt
import csv
category = ["Ecological Well-being", "Health & Human Services", "Arts & Culture", "Community Building", "Environment"]
arr = np.empty((0, 6), str)
moneyGranted = [[0]*5 for _ in range(6)]
moneyRequested = [[0]*5 for _ in range(6)]
perFull = [[0]*5 for _ in range(6)]
def task5(arr): # function definition; be sure to add your task number after 'task'
# Write your code here
for row in arr:
moneyGranted[int(row[1])-2015][int(row[3])-1] += int(row[4])
moneyRequested[int(row[1])-2015][int(row[3])-1] += int(row[5])
for i in range(6):
for j in range(5):
if moneyRequested[i][j] == 0:
print(i+2015,",",category[j],":", "0.0%")
else:
perFull[i][j] = round((moneyGranted[i][j] / moneyRequested[i][j])*100, 2)
print(i+2015,",",category[j],":", perFull[i][j],"%")
for i in range(6):
graphTitle = "Percentage fulfilled for each category in " + str(i+2015)
plt.title(graphTitle)
plt.bar(category, perFull[i])
plt.show()
with open('CEL_HistoricalGrantInformation_2014-7Oct2020_CSV.csv', newline='') as csvfile: # reading the csv file
reader = csv.DictReader(csvfile)
for row in reader:
arr = np.append(arr, np.array([[row['organization_id'], int(row['year_id']), row['process_id'],
int(row['area_id']), int(row['awarded_id']), int(row['requested_id'])]]), axis=0)
#print(arr)
task5(arr)
|
flexible
|
{
"blob_id": "e7b2e716fbcaf761e119003000bf1b16af57a2b7",
"index": 7009,
"step-1": "<mask token>\n\n\ndef task5(arr):\n for row in arr:\n moneyGranted[int(row[1]) - 2015][int(row[3]) - 1] += int(row[4])\n moneyRequested[int(row[1]) - 2015][int(row[3]) - 1] += int(row[5])\n for i in range(6):\n for j in range(5):\n if moneyRequested[i][j] == 0:\n print(i + 2015, ',', category[j], ':', '0.0%')\n else:\n perFull[i][j] = round(moneyGranted[i][j] / moneyRequested[i\n ][j] * 100, 2)\n print(i + 2015, ',', category[j], ':', perFull[i][j], '%')\n for i in range(6):\n graphTitle = 'Percentage fulfilled for each category in ' + str(i +\n 2015)\n plt.title(graphTitle)\n plt.bar(category, perFull[i])\n plt.show()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef task5(arr):\n for row in arr:\n moneyGranted[int(row[1]) - 2015][int(row[3]) - 1] += int(row[4])\n moneyRequested[int(row[1]) - 2015][int(row[3]) - 1] += int(row[5])\n for i in range(6):\n for j in range(5):\n if moneyRequested[i][j] == 0:\n print(i + 2015, ',', category[j], ':', '0.0%')\n else:\n perFull[i][j] = round(moneyGranted[i][j] / moneyRequested[i\n ][j] * 100, 2)\n print(i + 2015, ',', category[j], ':', perFull[i][j], '%')\n for i in range(6):\n graphTitle = 'Percentage fulfilled for each category in ' + str(i +\n 2015)\n plt.title(graphTitle)\n plt.bar(category, perFull[i])\n plt.show()\n\n\nwith open('CEL_HistoricalGrantInformation_2014-7Oct2020_CSV.csv', newline=''\n ) as csvfile:\n reader = csv.DictReader(csvfile)\n for row in reader:\n arr = np.append(arr, np.array([[row['organization_id'], int(row[\n 'year_id']), row['process_id'], int(row['area_id']), int(row[\n 'awarded_id']), int(row['requested_id'])]]), axis=0)\ntask5(arr)\n",
"step-3": "<mask token>\ncategory = ['Ecological Well-being', 'Health & Human Services',\n 'Arts & Culture', 'Community Building', 'Environment']\narr = np.empty((0, 6), str)\nmoneyGranted = [([0] * 5) for _ in range(6)]\nmoneyRequested = [([0] * 5) for _ in range(6)]\nperFull = [([0] * 5) for _ in range(6)]\n\n\ndef task5(arr):\n for row in arr:\n moneyGranted[int(row[1]) - 2015][int(row[3]) - 1] += int(row[4])\n moneyRequested[int(row[1]) - 2015][int(row[3]) - 1] += int(row[5])\n for i in range(6):\n for j in range(5):\n if moneyRequested[i][j] == 0:\n print(i + 2015, ',', category[j], ':', '0.0%')\n else:\n perFull[i][j] = round(moneyGranted[i][j] / moneyRequested[i\n ][j] * 100, 2)\n print(i + 2015, ',', category[j], ':', perFull[i][j], '%')\n for i in range(6):\n graphTitle = 'Percentage fulfilled for each category in ' + str(i +\n 2015)\n plt.title(graphTitle)\n plt.bar(category, perFull[i])\n plt.show()\n\n\nwith open('CEL_HistoricalGrantInformation_2014-7Oct2020_CSV.csv', newline=''\n ) as csvfile:\n reader = csv.DictReader(csvfile)\n for row in reader:\n arr = np.append(arr, np.array([[row['organization_id'], int(row[\n 'year_id']), row['process_id'], int(row['area_id']), int(row[\n 'awarded_id']), int(row['requested_id'])]]), axis=0)\ntask5(arr)\n",
"step-4": "import numpy as np\nimport matplotlib.pyplot as plt\nimport csv\ncategory = ['Ecological Well-being', 'Health & Human Services',\n 'Arts & Culture', 'Community Building', 'Environment']\narr = np.empty((0, 6), str)\nmoneyGranted = [([0] * 5) for _ in range(6)]\nmoneyRequested = [([0] * 5) for _ in range(6)]\nperFull = [([0] * 5) for _ in range(6)]\n\n\ndef task5(arr):\n for row in arr:\n moneyGranted[int(row[1]) - 2015][int(row[3]) - 1] += int(row[4])\n moneyRequested[int(row[1]) - 2015][int(row[3]) - 1] += int(row[5])\n for i in range(6):\n for j in range(5):\n if moneyRequested[i][j] == 0:\n print(i + 2015, ',', category[j], ':', '0.0%')\n else:\n perFull[i][j] = round(moneyGranted[i][j] / moneyRequested[i\n ][j] * 100, 2)\n print(i + 2015, ',', category[j], ':', perFull[i][j], '%')\n for i in range(6):\n graphTitle = 'Percentage fulfilled for each category in ' + str(i +\n 2015)\n plt.title(graphTitle)\n plt.bar(category, perFull[i])\n plt.show()\n\n\nwith open('CEL_HistoricalGrantInformation_2014-7Oct2020_CSV.csv', newline=''\n ) as csvfile:\n reader = csv.DictReader(csvfile)\n for row in reader:\n arr = np.append(arr, np.array([[row['organization_id'], int(row[\n 'year_id']), row['process_id'], int(row['area_id']), int(row[\n 'awarded_id']), int(row['requested_id'])]]), axis=0)\ntask5(arr)\n",
"step-5": "import numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport csv\r\n\r\ncategory = [\"Ecological Well-being\", \"Health & Human Services\", \"Arts & Culture\", \"Community Building\", \"Environment\"]\r\narr = np.empty((0, 6), str)\r\nmoneyGranted = [[0]*5 for _ in range(6)]\r\nmoneyRequested = [[0]*5 for _ in range(6)]\r\nperFull = [[0]*5 for _ in range(6)]\r\n\r\n\r\ndef task5(arr): # function definition; be sure to add your task number after 'task'\r\n # Write your code here\r\n\r\n for row in arr:\r\n moneyGranted[int(row[1])-2015][int(row[3])-1] += int(row[4])\r\n moneyRequested[int(row[1])-2015][int(row[3])-1] += int(row[5]) \r\n \r\n for i in range(6):\r\n for j in range(5):\r\n if moneyRequested[i][j] == 0:\r\n print(i+2015,\",\",category[j],\":\", \"0.0%\")\r\n else:\r\n perFull[i][j] = round((moneyGranted[i][j] / moneyRequested[i][j])*100, 2)\r\n print(i+2015,\",\",category[j],\":\", perFull[i][j],\"%\")\r\n for i in range(6):\r\n graphTitle = \"Percentage fulfilled for each category in \" + str(i+2015) \r\n plt.title(graphTitle) \r\n plt.bar(category, perFull[i]) \r\n plt.show() \r\n\r\n \r\n\r\nwith open('CEL_HistoricalGrantInformation_2014-7Oct2020_CSV.csv', newline='') as csvfile: # reading the csv file\r\n reader = csv.DictReader(csvfile)\r\n for row in reader:\r\n arr = np.append(arr, np.array([[row['organization_id'], int(row['year_id']), row['process_id'],\r\n int(row['area_id']), int(row['awarded_id']), int(row['requested_id'])]]), axis=0)\r\n\r\n #print(arr)\r\n\r\ntask5(arr)\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class GameObject(pygame.sprite.Sprite):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Food(gameobject.GameObject):
def __init__(self, x, y, surface, time=random.randint(0, 50)):
super(Food, self).__init__(x, y, surface)
self.dead = False
self.SIZE = gameobject.GameObject.SIZE
self.image = pygame.Surface((2 * self.SIZE, 2 * self.SIZE), flags=
SRCALPHA)
self.image.convert()
self.rect = pygame.draw.circle(self.image, pygame.Color('blue'), (
self.SIZE, self.SIZE), self.SIZE / 2 + 2)
self.rect.midtop = x, y
def update(self):
pass
def collide(self, main, other):
if not other == self and not self.dead:
self.dead = True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GameObject(pygame.sprite.Sprite):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def getDistance(self, other):
return abs(self.x - other.x) + abs(self.y - other.y)
def collide(self, main, other):
pass
<|reserved_special_token_0|>
class Food(gameobject.GameObject):
def __init__(self, x, y, surface, time=random.randint(0, 50)):
super(Food, self).__init__(x, y, surface)
self.dead = False
self.SIZE = gameobject.GameObject.SIZE
self.image = pygame.Surface((2 * self.SIZE, 2 * self.SIZE), flags=
SRCALPHA)
self.image.convert()
self.rect = pygame.draw.circle(self.image, pygame.Color('blue'), (
self.SIZE, self.SIZE), self.SIZE / 2 + 2)
self.rect.midtop = x, y
def update(self):
pass
def collide(self, main, other):
if not other == self and not self.dead:
self.dead = True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GameObject(pygame.sprite.Sprite):
<|reserved_special_token_0|>
def __init__(self, x, y, surface):
super(GameObject, self).__init__()
self.x = x
self.y = y
self.surface = surface
def getDistance(self, other):
return abs(self.x - other.x) + abs(self.y - other.y)
def collide(self, main, other):
pass
<|reserved_special_token_0|>
class Food(gameobject.GameObject):
def __init__(self, x, y, surface, time=random.randint(0, 50)):
super(Food, self).__init__(x, y, surface)
self.dead = False
self.SIZE = gameobject.GameObject.SIZE
self.image = pygame.Surface((2 * self.SIZE, 2 * self.SIZE), flags=
SRCALPHA)
self.image.convert()
self.rect = pygame.draw.circle(self.image, pygame.Color('blue'), (
self.SIZE, self.SIZE), self.SIZE / 2 + 2)
self.rect.midtop = x, y
def update(self):
pass
def collide(self, main, other):
if not other == self and not self.dead:
self.dead = True
<|reserved_special_token_1|>
import pygame
import random
from pygame.locals import *
import pygame
from pygame.locals import *
class GameObject(pygame.sprite.Sprite):
SIZE = 8
def __init__(self, x, y, surface):
super(GameObject, self).__init__()
self.x = x
self.y = y
self.surface = surface
def getDistance(self, other):
return abs(self.x - other.x) + abs(self.y - other.y)
def collide(self, main, other):
pass
import gameobject
class Food(gameobject.GameObject):
def __init__(self, x, y, surface, time=random.randint(0, 50)):
super(Food, self).__init__(x, y, surface)
self.dead = False
self.SIZE = gameobject.GameObject.SIZE
self.image = pygame.Surface((2 * self.SIZE, 2 * self.SIZE), flags=
SRCALPHA)
self.image.convert()
self.rect = pygame.draw.circle(self.image, pygame.Color('blue'), (
self.SIZE, self.SIZE), self.SIZE / 2 + 2)
self.rect.midtop = x, y
def update(self):
pass
def collide(self, main, other):
if not other == self and not self.dead:
self.dead = True
<|reserved_special_token_1|>
import pygame
import random
from pygame.locals import *
import pygame
from pygame.locals import *
class GameObject(pygame.sprite.Sprite):
SIZE = 8
def __init__(self, x, y, surface):
super(GameObject, self).__init__()
self.x = x
self.y = y
self.surface = surface
def getDistance(self, other):
return abs(self.x-other.x) + abs(self.y - other.y)
def collide(self, main, other):
pass
import gameobject
class Food(gameobject.GameObject):
def __init__(self, x, y, surface, time = random.randint(0, 50)):
super(Food, self).__init__(x,y,surface)
self.dead = False
self.SIZE = gameobject.GameObject.SIZE
self.image = pygame.Surface((2*self.SIZE, 2*self.SIZE),
flags = SRCALPHA)
self.image.convert()
self.rect = pygame.draw.circle(self.image,
pygame.Color("blue"),
(self.SIZE,self.SIZE), self.SIZE/2+2)
self.rect.midtop = (x,y)
def update(self):
pass
# self.rect.midtop = (self.x, self.y)
def collide(self, main, other):
if not other == self and not self.dead:
self.dead = True
|
flexible
|
{
"blob_id": "c589ce4ba2ae60d14787a8939146f6140fff1f01",
"index": 7914,
"step-1": "<mask token>\n\n\nclass GameObject(pygame.sprite.Sprite):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n\n\nclass Food(gameobject.GameObject):\n\n def __init__(self, x, y, surface, time=random.randint(0, 50)):\n super(Food, self).__init__(x, y, surface)\n self.dead = False\n self.SIZE = gameobject.GameObject.SIZE\n self.image = pygame.Surface((2 * self.SIZE, 2 * self.SIZE), flags=\n SRCALPHA)\n self.image.convert()\n self.rect = pygame.draw.circle(self.image, pygame.Color('blue'), (\n self.SIZE, self.SIZE), self.SIZE / 2 + 2)\n self.rect.midtop = x, y\n\n def update(self):\n pass\n\n def collide(self, main, other):\n if not other == self and not self.dead:\n self.dead = True\n",
"step-2": "<mask token>\n\n\nclass GameObject(pygame.sprite.Sprite):\n <mask token>\n <mask token>\n\n def getDistance(self, other):\n return abs(self.x - other.x) + abs(self.y - other.y)\n\n def collide(self, main, other):\n pass\n\n\n<mask token>\n\n\nclass Food(gameobject.GameObject):\n\n def __init__(self, x, y, surface, time=random.randint(0, 50)):\n super(Food, self).__init__(x, y, surface)\n self.dead = False\n self.SIZE = gameobject.GameObject.SIZE\n self.image = pygame.Surface((2 * self.SIZE, 2 * self.SIZE), flags=\n SRCALPHA)\n self.image.convert()\n self.rect = pygame.draw.circle(self.image, pygame.Color('blue'), (\n self.SIZE, self.SIZE), self.SIZE / 2 + 2)\n self.rect.midtop = x, y\n\n def update(self):\n pass\n\n def collide(self, main, other):\n if not other == self and not self.dead:\n self.dead = True\n",
"step-3": "<mask token>\n\n\nclass GameObject(pygame.sprite.Sprite):\n <mask token>\n\n def __init__(self, x, y, surface):\n super(GameObject, self).__init__()\n self.x = x\n self.y = y\n self.surface = surface\n\n def getDistance(self, other):\n return abs(self.x - other.x) + abs(self.y - other.y)\n\n def collide(self, main, other):\n pass\n\n\n<mask token>\n\n\nclass Food(gameobject.GameObject):\n\n def __init__(self, x, y, surface, time=random.randint(0, 50)):\n super(Food, self).__init__(x, y, surface)\n self.dead = False\n self.SIZE = gameobject.GameObject.SIZE\n self.image = pygame.Surface((2 * self.SIZE, 2 * self.SIZE), flags=\n SRCALPHA)\n self.image.convert()\n self.rect = pygame.draw.circle(self.image, pygame.Color('blue'), (\n self.SIZE, self.SIZE), self.SIZE / 2 + 2)\n self.rect.midtop = x, y\n\n def update(self):\n pass\n\n def collide(self, main, other):\n if not other == self and not self.dead:\n self.dead = True\n",
"step-4": "import pygame\nimport random\nfrom pygame.locals import *\nimport pygame\nfrom pygame.locals import *\n\n\nclass GameObject(pygame.sprite.Sprite):\n SIZE = 8\n\n def __init__(self, x, y, surface):\n super(GameObject, self).__init__()\n self.x = x\n self.y = y\n self.surface = surface\n\n def getDistance(self, other):\n return abs(self.x - other.x) + abs(self.y - other.y)\n\n def collide(self, main, other):\n pass\n\n\nimport gameobject\n\n\nclass Food(gameobject.GameObject):\n\n def __init__(self, x, y, surface, time=random.randint(0, 50)):\n super(Food, self).__init__(x, y, surface)\n self.dead = False\n self.SIZE = gameobject.GameObject.SIZE\n self.image = pygame.Surface((2 * self.SIZE, 2 * self.SIZE), flags=\n SRCALPHA)\n self.image.convert()\n self.rect = pygame.draw.circle(self.image, pygame.Color('blue'), (\n self.SIZE, self.SIZE), self.SIZE / 2 + 2)\n self.rect.midtop = x, y\n\n def update(self):\n pass\n\n def collide(self, main, other):\n if not other == self and not self.dead:\n self.dead = True\n",
"step-5": "import pygame\nimport random\n \nfrom pygame.locals import *\nimport pygame\n \nfrom pygame.locals import *\n \nclass GameObject(pygame.sprite.Sprite):\n SIZE = 8\n def __init__(self, x, y, surface):\n super(GameObject, self).__init__()\n self.x = x\n self.y = y\n self.surface = surface\n \n \n def getDistance(self, other):\n return abs(self.x-other.x) + abs(self.y - other.y)\n \n def collide(self, main, other): \n pass\nimport gameobject\n\n \nclass Food(gameobject.GameObject):\n \n def __init__(self, x, y, surface, time = random.randint(0, 50)):\n super(Food, self).__init__(x,y,surface)\n self.dead = False\n self.SIZE = gameobject.GameObject.SIZE\n self.image = pygame.Surface((2*self.SIZE, 2*self.SIZE),\n flags = SRCALPHA)\n self.image.convert()\n \n self.rect = pygame.draw.circle(self.image,\n pygame.Color(\"blue\"),\n (self.SIZE,self.SIZE), self.SIZE/2+2)\n \n \n self.rect.midtop = (x,y)\n \n def update(self):\n pass\n # self.rect.midtop = (self.x, self.y)\n \n def collide(self, main, other):\n if not other == self and not self.dead: \n self.dead = True\n",
"step-ids": [
5,
7,
8,
10,
11
]
}
|
[
5,
7,
8,
10,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if USE_MEMMAP:
Xmm = np.memmap('X.mmap', dtype=X.dtype, mode='w+', shape=X.shape)
ymm = np.memmap('y.mmap', dtype=y.dtype, mode='w+', shape=y.shape)
np.copyto(Xmm, X)
np.copyto(ymm, y)
del data
del X
del y
X = Xmm
y = ymm
<|reserved_special_token_0|>
model.load_model('xgb-model.bin')
<|reserved_special_token_0|>
print(cm)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
USE_MEMMAP = True
data = pd.read_csv('dataset.csv').as_matrix()
X = data[:, 0:-1]
y = data[:, -1]
if USE_MEMMAP:
Xmm = np.memmap('X.mmap', dtype=X.dtype, mode='w+', shape=X.shape)
ymm = np.memmap('y.mmap', dtype=y.dtype, mode='w+', shape=y.shape)
np.copyto(Xmm, X)
np.copyto(ymm, y)
del data
del X
del y
X = Xmm
y = ymm
d = xgb.DMatrix(X, label=y)
model = xgb.Booster({'nthread': 1})
model.load_model('xgb-model.bin')
cm = confusion_matrix(y, model.predict(d) > 0.5)
print(cm)
<|reserved_special_token_1|>
import numpy as np
import pandas as pd
import xgboost as xgb
from sklearn.metrics import confusion_matrix
USE_MEMMAP = True
data = pd.read_csv('dataset.csv').as_matrix()
X = data[:, 0:-1]
y = data[:, -1]
if USE_MEMMAP:
Xmm = np.memmap('X.mmap', dtype=X.dtype, mode='w+', shape=X.shape)
ymm = np.memmap('y.mmap', dtype=y.dtype, mode='w+', shape=y.shape)
np.copyto(Xmm, X)
np.copyto(ymm, y)
del data
del X
del y
X = Xmm
y = ymm
d = xgb.DMatrix(X, label=y)
model = xgb.Booster({'nthread': 1})
model.load_model('xgb-model.bin')
cm = confusion_matrix(y, model.predict(d) > 0.5)
print(cm)
<|reserved_special_token_1|>
import numpy as np
import pandas as pd
import xgboost as xgb
from sklearn.metrics import confusion_matrix
USE_MEMMAP = True
data = pd.read_csv( 'dataset.csv' ).as_matrix()
X = data[ :, 0:-1 ]
y = data[ :, -1 ]
if USE_MEMMAP:
Xmm = np.memmap( 'X.mmap', dtype=X.dtype, mode='w+', shape=X.shape )
ymm = np.memmap( 'y.mmap', dtype=y.dtype, mode='w+', shape=y.shape )
np.copyto( Xmm, X )
np.copyto( ymm, y )
del( data )
del( X )
del( y )
X = Xmm
y = ymm
d = xgb.DMatrix( X, label=y )
model = xgb.Booster({'nthread':1})
model.load_model('xgb-model.bin')
cm = confusion_matrix(y, model.predict(d) > 0.5)
print(cm)
|
flexible
|
{
"blob_id": "e2682a5cab95914e7567431cb04c3fb542eda3bf",
"index": 4353,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif USE_MEMMAP:\n Xmm = np.memmap('X.mmap', dtype=X.dtype, mode='w+', shape=X.shape)\n ymm = np.memmap('y.mmap', dtype=y.dtype, mode='w+', shape=y.shape)\n np.copyto(Xmm, X)\n np.copyto(ymm, y)\n del data\n del X\n del y\n X = Xmm\n y = ymm\n<mask token>\nmodel.load_model('xgb-model.bin')\n<mask token>\nprint(cm)\n",
"step-3": "<mask token>\nUSE_MEMMAP = True\ndata = pd.read_csv('dataset.csv').as_matrix()\nX = data[:, 0:-1]\ny = data[:, -1]\nif USE_MEMMAP:\n Xmm = np.memmap('X.mmap', dtype=X.dtype, mode='w+', shape=X.shape)\n ymm = np.memmap('y.mmap', dtype=y.dtype, mode='w+', shape=y.shape)\n np.copyto(Xmm, X)\n np.copyto(ymm, y)\n del data\n del X\n del y\n X = Xmm\n y = ymm\nd = xgb.DMatrix(X, label=y)\nmodel = xgb.Booster({'nthread': 1})\nmodel.load_model('xgb-model.bin')\ncm = confusion_matrix(y, model.predict(d) > 0.5)\nprint(cm)\n",
"step-4": "import numpy as np\nimport pandas as pd\nimport xgboost as xgb\nfrom sklearn.metrics import confusion_matrix\nUSE_MEMMAP = True\ndata = pd.read_csv('dataset.csv').as_matrix()\nX = data[:, 0:-1]\ny = data[:, -1]\nif USE_MEMMAP:\n Xmm = np.memmap('X.mmap', dtype=X.dtype, mode='w+', shape=X.shape)\n ymm = np.memmap('y.mmap', dtype=y.dtype, mode='w+', shape=y.shape)\n np.copyto(Xmm, X)\n np.copyto(ymm, y)\n del data\n del X\n del y\n X = Xmm\n y = ymm\nd = xgb.DMatrix(X, label=y)\nmodel = xgb.Booster({'nthread': 1})\nmodel.load_model('xgb-model.bin')\ncm = confusion_matrix(y, model.predict(d) > 0.5)\nprint(cm)\n",
"step-5": "import numpy as np\nimport pandas as pd\nimport xgboost as xgb\n\nfrom sklearn.metrics import confusion_matrix\n\n\nUSE_MEMMAP = True\n\n\ndata = pd.read_csv( 'dataset.csv' ).as_matrix()\n\nX = data[ :, 0:-1 ]\ny = data[ :, -1 ]\n\nif USE_MEMMAP:\n\tXmm = np.memmap( 'X.mmap', dtype=X.dtype, mode='w+', shape=X.shape )\n\tymm = np.memmap( 'y.mmap', dtype=y.dtype, mode='w+', shape=y.shape )\n\tnp.copyto( Xmm, X )\n\tnp.copyto( ymm, y )\n\tdel( data )\n\tdel( X )\n\tdel( y )\n\tX = Xmm\n\ty = ymm\n\nd = xgb.DMatrix( X, label=y )\n\nmodel = xgb.Booster({'nthread':1})\nmodel.load_model('xgb-model.bin')\ncm = confusion_matrix(y, model.predict(d) > 0.5)\nprint(cm)\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def sampling(args):
"""Reparameterization trick by sampling fr an isotropic unit Gaussian.
# Arguments
args (tensor): mean and log of variance of Q(z|X)
# Returns
z (tensor): sampled latent vector
"""
z_mean, z_log_var = args
batch = K.shape(z_mean)[0]
dim = K.int_shape(z_mean)[1]
epsilon = K.random_normal(shape=(batch, dim))
return z_mean + K.exp(0.5 * z_log_var) * epsilon
<|reserved_special_token_0|>
def construct_vae(image_size, kernel_size, latent_dim):
input_shape = image_size[0], image_size[1], 1
inputs = Input(shape=input_shape, name='encoder_input')
x = inputs
x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu',
strides=1, padding='same')(x)
x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu',
strides=2, padding='same')(x)
x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu',
strides=1, padding='same')(x)
shape = K.int_shape(x)
x = Flatten()(x)
x = Dense(16, activation='relu')(x)
z_mean = Dense(latent_dim, name='z_mean')(x)
z_log_var = Dense(latent_dim, name='z_log_var')(x)
z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean,
z_log_var])
encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')
encoder.summary()
plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)
latent_inputs = Input(shape=(latent_dim,), name='z_sampling')
x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)
x = Reshape((shape[1], shape[2], shape[3]))(x)
x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation=
'relu', strides=1, padding='same')(x)
x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation=
'relu', strides=2, padding='same')(x)
x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation=
'relu', strides=1, padding='same')(x)
outputs = Conv2DTranspose(filters=1, kernel_size=kernel_size,
activation='sigmoid', padding='same', name='decoder_output')(x)
decoder = Model(latent_inputs, outputs, name='decoder')
decoder.summary()
plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)
outputs = decoder(encoder(inputs)[2])
vae = Model(inputs, outputs, name='vae')
reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(
outputs))
reconstruction_loss *= image_size[0] * image_size[1]
kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)
kl_loss = K.sum(kl_loss, axis=-1)
kl_loss *= -0.5
vae_loss = K.mean(reconstruction_loss + kl_loss)
vae.add_loss(vae_loss)
vae.compile(optimizer='rmsprop')
vae.summary()
plot_model(vae, to_file='vae_cnn.png', show_shapes=True)
return vae, encoder, decoder
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def sampling(args):
"""Reparameterization trick by sampling fr an isotropic unit Gaussian.
# Arguments
args (tensor): mean and log of variance of Q(z|X)
# Returns
z (tensor): sampled latent vector
"""
z_mean, z_log_var = args
batch = K.shape(z_mean)[0]
dim = K.int_shape(z_mean)[1]
epsilon = K.random_normal(shape=(batch, dim))
return z_mean + K.exp(0.5 * z_log_var) * epsilon
def process_data(data_path):
data = np.load(data_path)
X_train, X_test = train_test_split(data, test_size=0.05, random_state=42)
print('Shape train/test:', X_train.shape, X_test.shape)
image_size = X_train.shape[1], X_train.shape[2]
data = np.reshape(data, [-1, image_size[0], image_size[1], 1])
X_train = np.reshape(X_train, [-1, image_size[0], image_size[1], 1])
X_test = np.reshape(X_test, [-1, image_size[0], image_size[1], 1])
data = data.astype('float32') / 255
X_train = X_train.astype('float32') / 255
X_test = X_test.astype('float32') / 255
return data, X_train, X_test, image_size
def construct_vae(image_size, kernel_size, latent_dim):
input_shape = image_size[0], image_size[1], 1
inputs = Input(shape=input_shape, name='encoder_input')
x = inputs
x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu',
strides=1, padding='same')(x)
x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu',
strides=2, padding='same')(x)
x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu',
strides=1, padding='same')(x)
shape = K.int_shape(x)
x = Flatten()(x)
x = Dense(16, activation='relu')(x)
z_mean = Dense(latent_dim, name='z_mean')(x)
z_log_var = Dense(latent_dim, name='z_log_var')(x)
z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean,
z_log_var])
encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')
encoder.summary()
plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)
latent_inputs = Input(shape=(latent_dim,), name='z_sampling')
x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)
x = Reshape((shape[1], shape[2], shape[3]))(x)
x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation=
'relu', strides=1, padding='same')(x)
x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation=
'relu', strides=2, padding='same')(x)
x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation=
'relu', strides=1, padding='same')(x)
outputs = Conv2DTranspose(filters=1, kernel_size=kernel_size,
activation='sigmoid', padding='same', name='decoder_output')(x)
decoder = Model(latent_inputs, outputs, name='decoder')
decoder.summary()
plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)
outputs = decoder(encoder(inputs)[2])
vae = Model(inputs, outputs, name='vae')
reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(
outputs))
reconstruction_loss *= image_size[0] * image_size[1]
kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)
kl_loss = K.sum(kl_loss, axis=-1)
kl_loss *= -0.5
vae_loss = K.mean(reconstruction_loss + kl_loss)
vae.add_loss(vae_loss)
vae.compile(optimizer='rmsprop')
vae.summary()
plot_model(vae, to_file='vae_cnn.png', show_shapes=True)
return vae, encoder, decoder
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
K.clear_session()
np.random.seed(237)
def sampling(args):
"""Reparameterization trick by sampling fr an isotropic unit Gaussian.
# Arguments
args (tensor): mean and log of variance of Q(z|X)
# Returns
z (tensor): sampled latent vector
"""
z_mean, z_log_var = args
batch = K.shape(z_mean)[0]
dim = K.int_shape(z_mean)[1]
epsilon = K.random_normal(shape=(batch, dim))
return z_mean + K.exp(0.5 * z_log_var) * epsilon
def process_data(data_path):
data = np.load(data_path)
X_train, X_test = train_test_split(data, test_size=0.05, random_state=42)
print('Shape train/test:', X_train.shape, X_test.shape)
image_size = X_train.shape[1], X_train.shape[2]
data = np.reshape(data, [-1, image_size[0], image_size[1], 1])
X_train = np.reshape(X_train, [-1, image_size[0], image_size[1], 1])
X_test = np.reshape(X_test, [-1, image_size[0], image_size[1], 1])
data = data.astype('float32') / 255
X_train = X_train.astype('float32') / 255
X_test = X_test.astype('float32') / 255
return data, X_train, X_test, image_size
def construct_vae(image_size, kernel_size, latent_dim):
input_shape = image_size[0], image_size[1], 1
inputs = Input(shape=input_shape, name='encoder_input')
x = inputs
x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu',
strides=1, padding='same')(x)
x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu',
strides=2, padding='same')(x)
x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu',
strides=1, padding='same')(x)
shape = K.int_shape(x)
x = Flatten()(x)
x = Dense(16, activation='relu')(x)
z_mean = Dense(latent_dim, name='z_mean')(x)
z_log_var = Dense(latent_dim, name='z_log_var')(x)
z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean,
z_log_var])
encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')
encoder.summary()
plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)
latent_inputs = Input(shape=(latent_dim,), name='z_sampling')
x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)
x = Reshape((shape[1], shape[2], shape[3]))(x)
x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation=
'relu', strides=1, padding='same')(x)
x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation=
'relu', strides=2, padding='same')(x)
x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation=
'relu', strides=1, padding='same')(x)
outputs = Conv2DTranspose(filters=1, kernel_size=kernel_size,
activation='sigmoid', padding='same', name='decoder_output')(x)
decoder = Model(latent_inputs, outputs, name='decoder')
decoder.summary()
plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)
outputs = decoder(encoder(inputs)[2])
vae = Model(inputs, outputs, name='vae')
reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(
outputs))
reconstruction_loss *= image_size[0] * image_size[1]
kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)
kl_loss = K.sum(kl_loss, axis=-1)
kl_loss *= -0.5
vae_loss = K.mean(reconstruction_loss + kl_loss)
vae.add_loss(vae_loss)
vae.compile(optimizer='rmsprop')
vae.summary()
plot_model(vae, to_file='vae_cnn.png', show_shapes=True)
return vae, encoder, decoder
if __name__ == '__main__':
is_train = False
data_file = '../data/out/moment_frames_5.npy'
data, X_train, X_test, im_size = process_data(data_file)
kernel_size = 3, 3
latent_dim = 128
batch_size = 128
epochs = 10
vae, encoder, decoder = construct_vae(im_size, kernel_size, latent_dim)
if is_train:
history = vae.fit(X_train, epochs=epochs, batch_size=batch_size,
validation_data=(X_test, None), verbose=2)
vae.save_weights('vae_cnn.h5')
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.savefig('vae_train.jpeg')
plt.show()
else:
vae.load_weights('vae_cnn.h5')
encoded_data = encoder.predict(data, batch_size=batch_size)
pd.DataFrame(encoded_data[0]).to_csv('latest_rep_cnn.csv', index=None)
print('Completed.')
<|reserved_special_token_1|>
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras.layers import Dense, Input
from keras.layers import Conv2D, Flatten, Lambda
from keras.layers import Reshape, Conv2DTranspose
from keras.models import Model
from keras.losses import mse, binary_crossentropy
from keras.utils import plot_model
from keras import backend as K
from sklearn.model_selection import train_test_split
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
K.clear_session()
np.random.seed(237)
def sampling(args):
"""Reparameterization trick by sampling fr an isotropic unit Gaussian.
# Arguments
args (tensor): mean and log of variance of Q(z|X)
# Returns
z (tensor): sampled latent vector
"""
z_mean, z_log_var = args
batch = K.shape(z_mean)[0]
dim = K.int_shape(z_mean)[1]
epsilon = K.random_normal(shape=(batch, dim))
return z_mean + K.exp(0.5 * z_log_var) * epsilon
def process_data(data_path):
data = np.load(data_path)
X_train, X_test = train_test_split(data, test_size=0.05, random_state=42)
print('Shape train/test:', X_train.shape, X_test.shape)
image_size = X_train.shape[1], X_train.shape[2]
data = np.reshape(data, [-1, image_size[0], image_size[1], 1])
X_train = np.reshape(X_train, [-1, image_size[0], image_size[1], 1])
X_test = np.reshape(X_test, [-1, image_size[0], image_size[1], 1])
data = data.astype('float32') / 255
X_train = X_train.astype('float32') / 255
X_test = X_test.astype('float32') / 255
return data, X_train, X_test, image_size
def construct_vae(image_size, kernel_size, latent_dim):
input_shape = image_size[0], image_size[1], 1
inputs = Input(shape=input_shape, name='encoder_input')
x = inputs
x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu',
strides=1, padding='same')(x)
x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu',
strides=2, padding='same')(x)
x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu',
strides=1, padding='same')(x)
shape = K.int_shape(x)
x = Flatten()(x)
x = Dense(16, activation='relu')(x)
z_mean = Dense(latent_dim, name='z_mean')(x)
z_log_var = Dense(latent_dim, name='z_log_var')(x)
z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean,
z_log_var])
encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')
encoder.summary()
plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)
latent_inputs = Input(shape=(latent_dim,), name='z_sampling')
x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)
x = Reshape((shape[1], shape[2], shape[3]))(x)
x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation=
'relu', strides=1, padding='same')(x)
x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation=
'relu', strides=2, padding='same')(x)
x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation=
'relu', strides=1, padding='same')(x)
outputs = Conv2DTranspose(filters=1, kernel_size=kernel_size,
activation='sigmoid', padding='same', name='decoder_output')(x)
decoder = Model(latent_inputs, outputs, name='decoder')
decoder.summary()
plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)
outputs = decoder(encoder(inputs)[2])
vae = Model(inputs, outputs, name='vae')
reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(
outputs))
reconstruction_loss *= image_size[0] * image_size[1]
kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)
kl_loss = K.sum(kl_loss, axis=-1)
kl_loss *= -0.5
vae_loss = K.mean(reconstruction_loss + kl_loss)
vae.add_loss(vae_loss)
vae.compile(optimizer='rmsprop')
vae.summary()
plot_model(vae, to_file='vae_cnn.png', show_shapes=True)
return vae, encoder, decoder
if __name__ == '__main__':
is_train = False
data_file = '../data/out/moment_frames_5.npy'
data, X_train, X_test, im_size = process_data(data_file)
kernel_size = 3, 3
latent_dim = 128
batch_size = 128
epochs = 10
vae, encoder, decoder = construct_vae(im_size, kernel_size, latent_dim)
if is_train:
history = vae.fit(X_train, epochs=epochs, batch_size=batch_size,
validation_data=(X_test, None), verbose=2)
vae.save_weights('vae_cnn.h5')
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.savefig('vae_train.jpeg')
plt.show()
else:
vae.load_weights('vae_cnn.h5')
encoded_data = encoder.predict(data, batch_size=batch_size)
pd.DataFrame(encoded_data[0]).to_csv('latest_rep_cnn.csv', index=None)
print('Completed.')
<|reserved_special_token_1|>
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras.layers import Dense, Input
from keras.layers import Conv2D, Flatten, Lambda
from keras.layers import Reshape, Conv2DTranspose
from keras.models import Model
from keras.losses import mse, binary_crossentropy
from keras.utils import plot_model
from keras import backend as K
from sklearn.model_selection import train_test_split
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
K.clear_session()
np.random.seed(237)
# reparameterization trick
# instead of sampling from Q(z|X), sample eps = N(0,I)
# then z = z_mean + sqrt(var)*eps
def sampling(args):
"""Reparameterization trick by sampling fr an isotropic unit Gaussian.
# Arguments
args (tensor): mean and log of variance of Q(z|X)
# Returns
z (tensor): sampled latent vector
"""
z_mean, z_log_var = args
batch = K.shape(z_mean)[0]
dim = K.int_shape(z_mean)[1]
# by default, random_normal has mean=0 and std=1.0
epsilon = K.random_normal(shape=(batch, dim))
return z_mean + K.exp(0.5 * z_log_var) * epsilon
def process_data(data_path):
data = np.load(data_path)
X_train, X_test = train_test_split(data, test_size=0.05, random_state=42)
print('Shape train/test:', X_train.shape, X_test.shape)
image_size = X_train.shape[1], X_train.shape[2]
data = np.reshape(data, [-1, image_size[0], image_size[1], 1])
X_train = np.reshape(X_train, [-1, image_size[0], image_size[1], 1])
X_test = np.reshape(X_test, [-1, image_size[0], image_size[1], 1])
data = data.astype('float32') / 255
X_train = X_train.astype('float32') / 255
X_test = X_test.astype('float32') / 255
return data, X_train, X_test, image_size
def construct_vae(image_size, kernel_size, latent_dim):
# network parameters
input_shape = (image_size[0], image_size[1], 1)
# VAE model = encoder + decoder
# build encoder model
inputs = Input(shape=input_shape, name='encoder_input')
x = inputs
x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)
x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu', strides=2, padding='same')(x)
x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)
# shape info needed to build decoder model
shape = K.int_shape(x)
# generate latent vector Q(z|X)
x = Flatten()(x)
x = Dense(16, activation='relu')(x)
z_mean = Dense(latent_dim, name='z_mean')(x)
z_log_var = Dense(latent_dim, name='z_log_var')(x)
# use reparameterization trick to push the sampling out as input
# note that "output_shape" isn't necessary with the TensorFlow backend
z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean, z_log_var])
# instantiate encoder model
encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')
encoder.summary()
plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)
# build decoder model
latent_inputs = Input(shape=(latent_dim,), name='z_sampling')
x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)
x = Reshape((shape[1], shape[2], shape[3]))(x)
x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)
x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation='relu', strides=2, padding='same')(x)
x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)
outputs = Conv2DTranspose(filters=1,
kernel_size=kernel_size,
activation='sigmoid',
padding='same',
name='decoder_output')(x)
# instantiate decoder model
decoder = Model(latent_inputs, outputs, name='decoder')
decoder.summary()
plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)
# instantiate VAE model
outputs = decoder(encoder(inputs)[2])
vae = Model(inputs, outputs, name='vae')
# VAE loss = mse_loss or xent_loss + kl_loss
reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(outputs))
reconstruction_loss *= image_size[0] * image_size[1]
kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)
kl_loss = K.sum(kl_loss, axis=-1)
kl_loss *= -0.5
vae_loss = K.mean(reconstruction_loss + kl_loss)
vae.add_loss(vae_loss)
vae.compile(optimizer='rmsprop')
vae.summary()
plot_model(vae, to_file='vae_cnn.png', show_shapes=True)
return vae, encoder, decoder
if __name__ == '__main__':
is_train = False
data_file = '../data/out/moment_frames_5.npy'
data, X_train, X_test, im_size = process_data(data_file)
kernel_size = (3, 3)
latent_dim = 128
batch_size = 128
epochs = 10
vae, encoder, decoder = construct_vae(im_size, kernel_size, latent_dim)
if is_train:
history = vae.fit(X_train,
epochs=epochs,
batch_size=batch_size,
validation_data=(X_test, None),
verbose=2)
vae.save_weights('vae_cnn.h5')
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.savefig('vae_train.jpeg')
plt.show()
else:
vae.load_weights('vae_cnn.h5')
# Transform to latent representation
encoded_data = encoder.predict(data, batch_size=batch_size)
pd.DataFrame(encoded_data[0]).to_csv('latest_rep_cnn.csv', index=None)
print('Completed.')
|
flexible
|
{
"blob_id": "88343b9c5cac3510e8cea75ac5b11f517ddc164b",
"index": 5943,
"step-1": "<mask token>\n\n\ndef sampling(args):\n \"\"\"Reparameterization trick by sampling fr an isotropic unit Gaussian.\n # Arguments\n args (tensor): mean and log of variance of Q(z|X)\n # Returns\n z (tensor): sampled latent vector\n \"\"\"\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon\n\n\n<mask token>\n\n\ndef construct_vae(image_size, kernel_size, latent_dim):\n input_shape = image_size[0], image_size[1], 1\n inputs = Input(shape=input_shape, name='encoder_input')\n x = inputs\n x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu',\n strides=2, padding='same')(x)\n x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n shape = K.int_shape(x)\n x = Flatten()(x)\n x = Dense(16, activation='relu')(x)\n z_mean = Dense(latent_dim, name='z_mean')(x)\n z_log_var = Dense(latent_dim, name='z_log_var')(x)\n z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean,\n z_log_var])\n encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')\n encoder.summary()\n plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)\n latent_inputs = Input(shape=(latent_dim,), name='z_sampling')\n x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)\n x = Reshape((shape[1], shape[2], shape[3]))(x)\n x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation=\n 'relu', strides=2, padding='same')(x)\n x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n outputs = Conv2DTranspose(filters=1, kernel_size=kernel_size,\n activation='sigmoid', padding='same', name='decoder_output')(x)\n decoder = Model(latent_inputs, outputs, name='decoder')\n decoder.summary()\n plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)\n outputs = decoder(encoder(inputs)[2])\n vae = Model(inputs, outputs, name='vae')\n reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(\n outputs))\n reconstruction_loss *= image_size[0] * image_size[1]\n kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)\n kl_loss = K.sum(kl_loss, axis=-1)\n kl_loss *= -0.5\n vae_loss = K.mean(reconstruction_loss + kl_loss)\n vae.add_loss(vae_loss)\n vae.compile(optimizer='rmsprop')\n vae.summary()\n plot_model(vae, to_file='vae_cnn.png', show_shapes=True)\n return vae, encoder, decoder\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef sampling(args):\n \"\"\"Reparameterization trick by sampling fr an isotropic unit Gaussian.\n # Arguments\n args (tensor): mean and log of variance of Q(z|X)\n # Returns\n z (tensor): sampled latent vector\n \"\"\"\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon\n\n\ndef process_data(data_path):\n data = np.load(data_path)\n X_train, X_test = train_test_split(data, test_size=0.05, random_state=42)\n print('Shape train/test:', X_train.shape, X_test.shape)\n image_size = X_train.shape[1], X_train.shape[2]\n data = np.reshape(data, [-1, image_size[0], image_size[1], 1])\n X_train = np.reshape(X_train, [-1, image_size[0], image_size[1], 1])\n X_test = np.reshape(X_test, [-1, image_size[0], image_size[1], 1])\n data = data.astype('float32') / 255\n X_train = X_train.astype('float32') / 255\n X_test = X_test.astype('float32') / 255\n return data, X_train, X_test, image_size\n\n\ndef construct_vae(image_size, kernel_size, latent_dim):\n input_shape = image_size[0], image_size[1], 1\n inputs = Input(shape=input_shape, name='encoder_input')\n x = inputs\n x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu',\n strides=2, padding='same')(x)\n x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n shape = K.int_shape(x)\n x = Flatten()(x)\n x = Dense(16, activation='relu')(x)\n z_mean = Dense(latent_dim, name='z_mean')(x)\n z_log_var = Dense(latent_dim, name='z_log_var')(x)\n z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean,\n z_log_var])\n encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')\n encoder.summary()\n plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)\n latent_inputs = Input(shape=(latent_dim,), name='z_sampling')\n x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)\n x = Reshape((shape[1], shape[2], shape[3]))(x)\n x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation=\n 'relu', strides=2, padding='same')(x)\n x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n outputs = Conv2DTranspose(filters=1, kernel_size=kernel_size,\n activation='sigmoid', padding='same', name='decoder_output')(x)\n decoder = Model(latent_inputs, outputs, name='decoder')\n decoder.summary()\n plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)\n outputs = decoder(encoder(inputs)[2])\n vae = Model(inputs, outputs, name='vae')\n reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(\n outputs))\n reconstruction_loss *= image_size[0] * image_size[1]\n kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)\n kl_loss = K.sum(kl_loss, axis=-1)\n kl_loss *= -0.5\n vae_loss = K.mean(reconstruction_loss + kl_loss)\n vae.add_loss(vae_loss)\n vae.compile(optimizer='rmsprop')\n vae.summary()\n plot_model(vae, to_file='vae_cnn.png', show_shapes=True)\n return vae, encoder, decoder\n\n\n<mask token>\n",
"step-3": "<mask token>\nK.clear_session()\nnp.random.seed(237)\n\n\ndef sampling(args):\n \"\"\"Reparameterization trick by sampling fr an isotropic unit Gaussian.\n # Arguments\n args (tensor): mean and log of variance of Q(z|X)\n # Returns\n z (tensor): sampled latent vector\n \"\"\"\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon\n\n\ndef process_data(data_path):\n data = np.load(data_path)\n X_train, X_test = train_test_split(data, test_size=0.05, random_state=42)\n print('Shape train/test:', X_train.shape, X_test.shape)\n image_size = X_train.shape[1], X_train.shape[2]\n data = np.reshape(data, [-1, image_size[0], image_size[1], 1])\n X_train = np.reshape(X_train, [-1, image_size[0], image_size[1], 1])\n X_test = np.reshape(X_test, [-1, image_size[0], image_size[1], 1])\n data = data.astype('float32') / 255\n X_train = X_train.astype('float32') / 255\n X_test = X_test.astype('float32') / 255\n return data, X_train, X_test, image_size\n\n\ndef construct_vae(image_size, kernel_size, latent_dim):\n input_shape = image_size[0], image_size[1], 1\n inputs = Input(shape=input_shape, name='encoder_input')\n x = inputs\n x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu',\n strides=2, padding='same')(x)\n x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n shape = K.int_shape(x)\n x = Flatten()(x)\n x = Dense(16, activation='relu')(x)\n z_mean = Dense(latent_dim, name='z_mean')(x)\n z_log_var = Dense(latent_dim, name='z_log_var')(x)\n z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean,\n z_log_var])\n encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')\n encoder.summary()\n plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)\n latent_inputs = Input(shape=(latent_dim,), name='z_sampling')\n x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)\n x = Reshape((shape[1], shape[2], shape[3]))(x)\n x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation=\n 'relu', strides=2, padding='same')(x)\n x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n outputs = Conv2DTranspose(filters=1, kernel_size=kernel_size,\n activation='sigmoid', padding='same', name='decoder_output')(x)\n decoder = Model(latent_inputs, outputs, name='decoder')\n decoder.summary()\n plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)\n outputs = decoder(encoder(inputs)[2])\n vae = Model(inputs, outputs, name='vae')\n reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(\n outputs))\n reconstruction_loss *= image_size[0] * image_size[1]\n kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)\n kl_loss = K.sum(kl_loss, axis=-1)\n kl_loss *= -0.5\n vae_loss = K.mean(reconstruction_loss + kl_loss)\n vae.add_loss(vae_loss)\n vae.compile(optimizer='rmsprop')\n vae.summary()\n plot_model(vae, to_file='vae_cnn.png', show_shapes=True)\n return vae, encoder, decoder\n\n\nif __name__ == '__main__':\n is_train = False\n data_file = '../data/out/moment_frames_5.npy'\n data, X_train, X_test, im_size = process_data(data_file)\n kernel_size = 3, 3\n latent_dim = 128\n batch_size = 128\n epochs = 10\n vae, encoder, decoder = construct_vae(im_size, kernel_size, latent_dim)\n if is_train:\n history = vae.fit(X_train, epochs=epochs, batch_size=batch_size,\n validation_data=(X_test, None), verbose=2)\n vae.save_weights('vae_cnn.h5')\n plt.plot(history.history['loss'])\n plt.plot(history.history['val_loss'])\n plt.title('model loss')\n plt.ylabel('loss')\n plt.xlabel('epoch')\n plt.legend(['train', 'test'], loc='upper left')\n plt.savefig('vae_train.jpeg')\n plt.show()\n else:\n vae.load_weights('vae_cnn.h5')\n encoded_data = encoder.predict(data, batch_size=batch_size)\n pd.DataFrame(encoded_data[0]).to_csv('latest_rep_cnn.csv', index=None)\n print('Completed.')\n",
"step-4": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom keras.layers import Dense, Input\nfrom keras.layers import Conv2D, Flatten, Lambda\nfrom keras.layers import Reshape, Conv2DTranspose\nfrom keras.models import Model\nfrom keras.losses import mse, binary_crossentropy\nfrom keras.utils import plot_model\nfrom keras import backend as K\nfrom sklearn.model_selection import train_test_split\nimport pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nK.clear_session()\nnp.random.seed(237)\n\n\ndef sampling(args):\n \"\"\"Reparameterization trick by sampling fr an isotropic unit Gaussian.\n # Arguments\n args (tensor): mean and log of variance of Q(z|X)\n # Returns\n z (tensor): sampled latent vector\n \"\"\"\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon\n\n\ndef process_data(data_path):\n data = np.load(data_path)\n X_train, X_test = train_test_split(data, test_size=0.05, random_state=42)\n print('Shape train/test:', X_train.shape, X_test.shape)\n image_size = X_train.shape[1], X_train.shape[2]\n data = np.reshape(data, [-1, image_size[0], image_size[1], 1])\n X_train = np.reshape(X_train, [-1, image_size[0], image_size[1], 1])\n X_test = np.reshape(X_test, [-1, image_size[0], image_size[1], 1])\n data = data.astype('float32') / 255\n X_train = X_train.astype('float32') / 255\n X_test = X_test.astype('float32') / 255\n return data, X_train, X_test, image_size\n\n\ndef construct_vae(image_size, kernel_size, latent_dim):\n input_shape = image_size[0], image_size[1], 1\n inputs = Input(shape=input_shape, name='encoder_input')\n x = inputs\n x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu',\n strides=2, padding='same')(x)\n x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n shape = K.int_shape(x)\n x = Flatten()(x)\n x = Dense(16, activation='relu')(x)\n z_mean = Dense(latent_dim, name='z_mean')(x)\n z_log_var = Dense(latent_dim, name='z_log_var')(x)\n z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean,\n z_log_var])\n encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')\n encoder.summary()\n plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)\n latent_inputs = Input(shape=(latent_dim,), name='z_sampling')\n x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)\n x = Reshape((shape[1], shape[2], shape[3]))(x)\n x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation=\n 'relu', strides=2, padding='same')(x)\n x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n outputs = Conv2DTranspose(filters=1, kernel_size=kernel_size,\n activation='sigmoid', padding='same', name='decoder_output')(x)\n decoder = Model(latent_inputs, outputs, name='decoder')\n decoder.summary()\n plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)\n outputs = decoder(encoder(inputs)[2])\n vae = Model(inputs, outputs, name='vae')\n reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(\n outputs))\n reconstruction_loss *= image_size[0] * image_size[1]\n kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)\n kl_loss = K.sum(kl_loss, axis=-1)\n kl_loss *= -0.5\n vae_loss = K.mean(reconstruction_loss + kl_loss)\n vae.add_loss(vae_loss)\n vae.compile(optimizer='rmsprop')\n vae.summary()\n plot_model(vae, to_file='vae_cnn.png', show_shapes=True)\n return vae, encoder, decoder\n\n\nif __name__ == '__main__':\n is_train = False\n data_file = '../data/out/moment_frames_5.npy'\n data, X_train, X_test, im_size = process_data(data_file)\n kernel_size = 3, 3\n latent_dim = 128\n batch_size = 128\n epochs = 10\n vae, encoder, decoder = construct_vae(im_size, kernel_size, latent_dim)\n if is_train:\n history = vae.fit(X_train, epochs=epochs, batch_size=batch_size,\n validation_data=(X_test, None), verbose=2)\n vae.save_weights('vae_cnn.h5')\n plt.plot(history.history['loss'])\n plt.plot(history.history['val_loss'])\n plt.title('model loss')\n plt.ylabel('loss')\n plt.xlabel('epoch')\n plt.legend(['train', 'test'], loc='upper left')\n plt.savefig('vae_train.jpeg')\n plt.show()\n else:\n vae.load_weights('vae_cnn.h5')\n encoded_data = encoder.predict(data, batch_size=batch_size)\n pd.DataFrame(encoded_data[0]).to_csv('latest_rep_cnn.csv', index=None)\n print('Completed.')\n",
"step-5": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom keras.layers import Dense, Input\nfrom keras.layers import Conv2D, Flatten, Lambda\nfrom keras.layers import Reshape, Conv2DTranspose\nfrom keras.models import Model\nfrom keras.losses import mse, binary_crossentropy\nfrom keras.utils import plot_model\nfrom keras import backend as K\n\nfrom sklearn.model_selection import train_test_split\n\nimport pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nK.clear_session()\n\nnp.random.seed(237)\n\n\n# reparameterization trick\n# instead of sampling from Q(z|X), sample eps = N(0,I)\n# then z = z_mean + sqrt(var)*eps\ndef sampling(args):\n \"\"\"Reparameterization trick by sampling fr an isotropic unit Gaussian.\n # Arguments\n args (tensor): mean and log of variance of Q(z|X)\n # Returns\n z (tensor): sampled latent vector\n \"\"\"\n\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n # by default, random_normal has mean=0 and std=1.0\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon\n\n\ndef process_data(data_path):\n\n data = np.load(data_path)\n\n X_train, X_test = train_test_split(data, test_size=0.05, random_state=42)\n print('Shape train/test:', X_train.shape, X_test.shape)\n\n image_size = X_train.shape[1], X_train.shape[2]\n\n data = np.reshape(data, [-1, image_size[0], image_size[1], 1])\n X_train = np.reshape(X_train, [-1, image_size[0], image_size[1], 1])\n X_test = np.reshape(X_test, [-1, image_size[0], image_size[1], 1])\n\n data = data.astype('float32') / 255\n X_train = X_train.astype('float32') / 255\n X_test = X_test.astype('float32') / 255\n\n return data, X_train, X_test, image_size\n\n\ndef construct_vae(image_size, kernel_size, latent_dim):\n # network parameters\n input_shape = (image_size[0], image_size[1], 1)\n\n # VAE model = encoder + decoder\n # build encoder model\n inputs = Input(shape=input_shape, name='encoder_input')\n x = inputs\n x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)\n x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu', strides=2, padding='same')(x)\n x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)\n\n # shape info needed to build decoder model\n shape = K.int_shape(x)\n\n # generate latent vector Q(z|X)\n x = Flatten()(x)\n x = Dense(16, activation='relu')(x)\n z_mean = Dense(latent_dim, name='z_mean')(x)\n z_log_var = Dense(latent_dim, name='z_log_var')(x)\n\n # use reparameterization trick to push the sampling out as input\n # note that \"output_shape\" isn't necessary with the TensorFlow backend\n z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean, z_log_var])\n\n # instantiate encoder model\n encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')\n encoder.summary()\n plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)\n\n # build decoder model\n latent_inputs = Input(shape=(latent_dim,), name='z_sampling')\n x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)\n x = Reshape((shape[1], shape[2], shape[3]))(x)\n\n x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)\n x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation='relu', strides=2, padding='same')(x)\n x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)\n\n outputs = Conv2DTranspose(filters=1,\n kernel_size=kernel_size,\n activation='sigmoid',\n padding='same',\n name='decoder_output')(x)\n\n # instantiate decoder model\n decoder = Model(latent_inputs, outputs, name='decoder')\n decoder.summary()\n plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)\n\n # instantiate VAE model\n outputs = decoder(encoder(inputs)[2])\n vae = Model(inputs, outputs, name='vae')\n\n # VAE loss = mse_loss or xent_loss + kl_loss\n reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(outputs))\n\n reconstruction_loss *= image_size[0] * image_size[1]\n kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)\n kl_loss = K.sum(kl_loss, axis=-1)\n kl_loss *= -0.5\n vae_loss = K.mean(reconstruction_loss + kl_loss)\n vae.add_loss(vae_loss)\n vae.compile(optimizer='rmsprop')\n vae.summary()\n plot_model(vae, to_file='vae_cnn.png', show_shapes=True)\n\n return vae, encoder, decoder\n\n\nif __name__ == '__main__':\n\n is_train = False\n data_file = '../data/out/moment_frames_5.npy'\n data, X_train, X_test, im_size = process_data(data_file)\n\n kernel_size = (3, 3)\n latent_dim = 128\n batch_size = 128\n epochs = 10\n\n vae, encoder, decoder = construct_vae(im_size, kernel_size, latent_dim)\n\n if is_train:\n history = vae.fit(X_train,\n epochs=epochs,\n batch_size=batch_size,\n validation_data=(X_test, None),\n verbose=2)\n vae.save_weights('vae_cnn.h5')\n\n # summarize history for loss\n plt.plot(history.history['loss'])\n plt.plot(history.history['val_loss'])\n plt.title('model loss')\n plt.ylabel('loss')\n plt.xlabel('epoch')\n plt.legend(['train', 'test'], loc='upper left')\n plt.savefig('vae_train.jpeg')\n plt.show()\n\n else:\n vae.load_weights('vae_cnn.h5')\n\n # Transform to latent representation\n encoded_data = encoder.predict(data, batch_size=batch_size)\n\n pd.DataFrame(encoded_data[0]).to_csv('latest_rep_cnn.csv', index=None)\n\n print('Completed.')\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# Based on https://dev.to/jemaloqiu/design-pattern-in-python-2-observer-j4
class AbstractObservable():
"""
Abstract Observable
"""
def __init__(self):
self.__observers = []
def add_observer(self, observer):
self.__observers.append(observer)
def remove_observer(self, observer):
self.__observers.remove(observer)
def notify_observers(self, arg=0):
for o in self.__observers:
o.update(self, arg)
class AbstractObserver():
"""
Abstract Observer - Abstract device
"""
def __init__(self):
pass
def update(self):
pass
#
class MonitorTruck(AbstractObservable):
"""
Concrete Observable class
"""
def __init__(self, name):
super().__init__()
self.name = name
self.__physical_properties = {"temperature": 0.0, "humidity": 0.0}
def set_value(self, measure_key, val):
if measure_key in self.__physical_properties:
self.__physical_properties[measure_key] = val
self.notify_observers()
else:
print(f"Parameter type {measure_key} not supported.")
def get_value(self, measure_key):
return self.__physical_properties.get(measure_key)
class Thermometer(AbstractObserver):
"""
Concrete Observer - Thermometer
"""
def __init__(self):
super().__init__()
def update(self, tt, obj):
if tt.__class__ == MonitorTruck:
temperature = tt.get_value("temperature")
if temperature > 37.8:
print(f"WARNING - Temperature too high: {temperature}" )
elif temperature < 36.0:
print(f"WARNING - Temperature too slow: {temperature}")
else:
print(f"INFO - Temperature normal: {temperature}")
else:
pass
class HumidityMeter(AbstractObserver):
"""
Concrete Observer - humidity meter
"""
def __init__(self):
super().__init__()
def update(self, tt, obj):
if tt.__class__ == MonitorTruck:
humidity_value = tt.get_value("humidity")
if humidity_value > 60:
print(f"WARNING - humidity too high: {humidity_value}" )
elif humidity_value < 40:
print(f"WARNING - humidity too high: {humidity_value}" )
else:
print(f"INFO - humidity normal: {humidity_value}")
else:
pass
import time
if __name__ == "__main__":
tuck = MonitorTruck("John")
thermometer = Thermometer()
humidity = HumidityMeter()
## now kick off the simulation
for i in range(0, 15):
time.sleep(1.5)
print("====== Time step {} =======".format(i+1))
# At rount #3: thermometer is added for monitoring temperature
# At rount #5: humidity is added for monitoring the humidity level
# At rount #10: thermometer is removed
if i == 3:
tuck.add_observer(thermometer)
elif i == 5:
tuck.add_observer(humidity)
elif i == 10:
tuck.remove_observer(thermometer)
# simulating the physical parameters
if i%3 ==0:
tuck.set_value("temperature", 35.5 + 0.5*i)
elif i%3 == 1:
tuck.set_value("humidity", 30 + 3*i)
|
normal
|
{
"blob_id": "3b3f423cfb08413a4135646ea4d3d6dcb5d0cc10",
"index": 662,
"step-1": "<mask token>\n\n\nclass MonitorTruck(AbstractObservable):\n \"\"\"\n Concrete Observable class\n \"\"\"\n\n def __init__(self, name):\n super().__init__()\n self.name = name\n self.__physical_properties = {'temperature': 0.0, 'humidity': 0.0}\n\n def set_value(self, measure_key, val):\n if measure_key in self.__physical_properties:\n self.__physical_properties[measure_key] = val\n self.notify_observers()\n else:\n print(f'Parameter type {measure_key} not supported.')\n\n def get_value(self, measure_key):\n return self.__physical_properties.get(measure_key)\n\n\nclass Thermometer(AbstractObserver):\n \"\"\"\n Concrete Observer - Thermometer\n \"\"\"\n\n def __init__(self):\n super().__init__()\n\n def update(self, tt, obj):\n if tt.__class__ == MonitorTruck:\n temperature = tt.get_value('temperature')\n if temperature > 37.8:\n print(f'WARNING - Temperature too high: {temperature}')\n elif temperature < 36.0:\n print(f'WARNING - Temperature too slow: {temperature}')\n else:\n print(f'INFO - Temperature normal: {temperature}')\n else:\n pass\n\n\nclass HumidityMeter(AbstractObserver):\n \"\"\"\n Concrete Observer - humidity meter\n \"\"\"\n\n def __init__(self):\n super().__init__()\n\n def update(self, tt, obj):\n if tt.__class__ == MonitorTruck:\n humidity_value = tt.get_value('humidity')\n if humidity_value > 60:\n print(f'WARNING - humidity too high: {humidity_value}')\n elif humidity_value < 40:\n print(f'WARNING - humidity too high: {humidity_value}')\n else:\n print(f'INFO - humidity normal: {humidity_value}')\n else:\n pass\n\n\n<mask token>\n",
"step-2": "class AbstractObservable:\n <mask token>\n\n def __init__(self):\n self.__observers = []\n <mask token>\n\n def remove_observer(self, observer):\n self.__observers.remove(observer)\n\n def notify_observers(self, arg=0):\n for o in self.__observers:\n o.update(self, arg)\n\n\nclass AbstractObserver:\n \"\"\"\n Abstract Observer - Abstract device\n \"\"\"\n\n def __init__(self):\n pass\n\n def update(self):\n pass\n\n\nclass MonitorTruck(AbstractObservable):\n \"\"\"\n Concrete Observable class\n \"\"\"\n\n def __init__(self, name):\n super().__init__()\n self.name = name\n self.__physical_properties = {'temperature': 0.0, 'humidity': 0.0}\n\n def set_value(self, measure_key, val):\n if measure_key in self.__physical_properties:\n self.__physical_properties[measure_key] = val\n self.notify_observers()\n else:\n print(f'Parameter type {measure_key} not supported.')\n\n def get_value(self, measure_key):\n return self.__physical_properties.get(measure_key)\n\n\nclass Thermometer(AbstractObserver):\n \"\"\"\n Concrete Observer - Thermometer\n \"\"\"\n\n def __init__(self):\n super().__init__()\n\n def update(self, tt, obj):\n if tt.__class__ == MonitorTruck:\n temperature = tt.get_value('temperature')\n if temperature > 37.8:\n print(f'WARNING - Temperature too high: {temperature}')\n elif temperature < 36.0:\n print(f'WARNING - Temperature too slow: {temperature}')\n else:\n print(f'INFO - Temperature normal: {temperature}')\n else:\n pass\n\n\nclass HumidityMeter(AbstractObserver):\n \"\"\"\n Concrete Observer - humidity meter\n \"\"\"\n\n def __init__(self):\n super().__init__()\n\n def update(self, tt, obj):\n if tt.__class__ == MonitorTruck:\n humidity_value = tt.get_value('humidity')\n if humidity_value > 60:\n print(f'WARNING - humidity too high: {humidity_value}')\n elif humidity_value < 40:\n print(f'WARNING - humidity too high: {humidity_value}')\n else:\n print(f'INFO - humidity normal: {humidity_value}')\n else:\n pass\n\n\n<mask token>\n",
"step-3": "class AbstractObservable:\n \"\"\"\n Abstract Observable \n \"\"\"\n\n def __init__(self):\n self.__observers = []\n\n def add_observer(self, observer):\n self.__observers.append(observer)\n\n def remove_observer(self, observer):\n self.__observers.remove(observer)\n\n def notify_observers(self, arg=0):\n for o in self.__observers:\n o.update(self, arg)\n\n\nclass AbstractObserver:\n \"\"\"\n Abstract Observer - Abstract device\n \"\"\"\n\n def __init__(self):\n pass\n\n def update(self):\n pass\n\n\nclass MonitorTruck(AbstractObservable):\n \"\"\"\n Concrete Observable class\n \"\"\"\n\n def __init__(self, name):\n super().__init__()\n self.name = name\n self.__physical_properties = {'temperature': 0.0, 'humidity': 0.0}\n\n def set_value(self, measure_key, val):\n if measure_key in self.__physical_properties:\n self.__physical_properties[measure_key] = val\n self.notify_observers()\n else:\n print(f'Parameter type {measure_key} not supported.')\n\n def get_value(self, measure_key):\n return self.__physical_properties.get(measure_key)\n\n\nclass Thermometer(AbstractObserver):\n \"\"\"\n Concrete Observer - Thermometer\n \"\"\"\n\n def __init__(self):\n super().__init__()\n\n def update(self, tt, obj):\n if tt.__class__ == MonitorTruck:\n temperature = tt.get_value('temperature')\n if temperature > 37.8:\n print(f'WARNING - Temperature too high: {temperature}')\n elif temperature < 36.0:\n print(f'WARNING - Temperature too slow: {temperature}')\n else:\n print(f'INFO - Temperature normal: {temperature}')\n else:\n pass\n\n\nclass HumidityMeter(AbstractObserver):\n \"\"\"\n Concrete Observer - humidity meter\n \"\"\"\n\n def __init__(self):\n super().__init__()\n\n def update(self, tt, obj):\n if tt.__class__ == MonitorTruck:\n humidity_value = tt.get_value('humidity')\n if humidity_value > 60:\n print(f'WARNING - humidity too high: {humidity_value}')\n elif humidity_value < 40:\n print(f'WARNING - humidity too high: {humidity_value}')\n else:\n print(f'INFO - humidity normal: {humidity_value}')\n else:\n pass\n\n\n<mask token>\n",
"step-4": "class AbstractObservable:\n \"\"\"\n Abstract Observable \n \"\"\"\n\n def __init__(self):\n self.__observers = []\n\n def add_observer(self, observer):\n self.__observers.append(observer)\n\n def remove_observer(self, observer):\n self.__observers.remove(observer)\n\n def notify_observers(self, arg=0):\n for o in self.__observers:\n o.update(self, arg)\n\n\nclass AbstractObserver:\n \"\"\"\n Abstract Observer - Abstract device\n \"\"\"\n\n def __init__(self):\n pass\n\n def update(self):\n pass\n\n\nclass MonitorTruck(AbstractObservable):\n \"\"\"\n Concrete Observable class\n \"\"\"\n\n def __init__(self, name):\n super().__init__()\n self.name = name\n self.__physical_properties = {'temperature': 0.0, 'humidity': 0.0}\n\n def set_value(self, measure_key, val):\n if measure_key in self.__physical_properties:\n self.__physical_properties[measure_key] = val\n self.notify_observers()\n else:\n print(f'Parameter type {measure_key} not supported.')\n\n def get_value(self, measure_key):\n return self.__physical_properties.get(measure_key)\n\n\nclass Thermometer(AbstractObserver):\n \"\"\"\n Concrete Observer - Thermometer\n \"\"\"\n\n def __init__(self):\n super().__init__()\n\n def update(self, tt, obj):\n if tt.__class__ == MonitorTruck:\n temperature = tt.get_value('temperature')\n if temperature > 37.8:\n print(f'WARNING - Temperature too high: {temperature}')\n elif temperature < 36.0:\n print(f'WARNING - Temperature too slow: {temperature}')\n else:\n print(f'INFO - Temperature normal: {temperature}')\n else:\n pass\n\n\nclass HumidityMeter(AbstractObserver):\n \"\"\"\n Concrete Observer - humidity meter\n \"\"\"\n\n def __init__(self):\n super().__init__()\n\n def update(self, tt, obj):\n if tt.__class__ == MonitorTruck:\n humidity_value = tt.get_value('humidity')\n if humidity_value > 60:\n print(f'WARNING - humidity too high: {humidity_value}')\n elif humidity_value < 40:\n print(f'WARNING - humidity too high: {humidity_value}')\n else:\n print(f'INFO - humidity normal: {humidity_value}')\n else:\n pass\n\n\nimport time\nif __name__ == '__main__':\n tuck = MonitorTruck('John')\n thermometer = Thermometer()\n humidity = HumidityMeter()\n for i in range(0, 15):\n time.sleep(1.5)\n print('====== Time step {} ======='.format(i + 1))\n if i == 3:\n tuck.add_observer(thermometer)\n elif i == 5:\n tuck.add_observer(humidity)\n elif i == 10:\n tuck.remove_observer(thermometer)\n if i % 3 == 0:\n tuck.set_value('temperature', 35.5 + 0.5 * i)\n elif i % 3 == 1:\n tuck.set_value('humidity', 30 + 3 * i)\n",
"step-5": "# Based on https://dev.to/jemaloqiu/design-pattern-in-python-2-observer-j4\n\nclass AbstractObservable():\n \"\"\"\n Abstract Observable \n \"\"\"\n\n def __init__(self):\n self.__observers = []\n\n def add_observer(self, observer):\n self.__observers.append(observer)\n\n def remove_observer(self, observer):\n self.__observers.remove(observer)\n\n def notify_observers(self, arg=0):\n for o in self.__observers:\n o.update(self, arg)\n\n\nclass AbstractObserver():\n \"\"\"\n Abstract Observer - Abstract device\n \"\"\"\n\n def __init__(self):\n pass\n\n def update(self): \n pass\n\n#\nclass MonitorTruck(AbstractObservable):\n \"\"\"\n Concrete Observable class\n \"\"\"\n\n def __init__(self, name):\n super().__init__() \n self.name = name\n self.__physical_properties = {\"temperature\": 0.0, \"humidity\": 0.0}\n\n def set_value(self, measure_key, val):\n if measure_key in self.__physical_properties:\n self.__physical_properties[measure_key] = val\n self.notify_observers()\n else:\n print(f\"Parameter type {measure_key} not supported.\")\n\n def get_value(self, measure_key):\n return self.__physical_properties.get(measure_key)\n\nclass Thermometer(AbstractObserver): \n \"\"\"\n Concrete Observer - Thermometer\n \"\"\"\n\n def __init__(self):\n super().__init__()\n\n\n def update(self, tt, obj):\n if tt.__class__ == MonitorTruck:\n temperature = tt.get_value(\"temperature\")\n if temperature > 37.8:\n print(f\"WARNING - Temperature too high: {temperature}\" )\n elif temperature < 36.0:\n print(f\"WARNING - Temperature too slow: {temperature}\")\n else:\n print(f\"INFO - Temperature normal: {temperature}\")\n\n else:\n pass\n\nclass HumidityMeter(AbstractObserver): \n \"\"\"\n Concrete Observer - humidity meter\n \"\"\"\n\n def __init__(self):\n super().__init__()\n\n def update(self, tt, obj):\n if tt.__class__ == MonitorTruck:\n humidity_value = tt.get_value(\"humidity\")\n if humidity_value > 60:\n print(f\"WARNING - humidity too high: {humidity_value}\" )\n elif humidity_value < 40:\n print(f\"WARNING - humidity too high: {humidity_value}\" )\n else:\n print(f\"INFO - humidity normal: {humidity_value}\")\n\n else:\n pass\n\nimport time\n\nif __name__ == \"__main__\":\n tuck = MonitorTruck(\"John\")\n thermometer = Thermometer()\n humidity = HumidityMeter()\n\n\n ## now kick off the simulation \n for i in range(0, 15):\n\n time.sleep(1.5)\n print(\"====== Time step {} =======\".format(i+1))\n\n # At rount #3: thermometer is added for monitoring temperature\n # At rount #5: humidity is added for monitoring the humidity level\n # At rount #10: thermometer is removed\n\n if i == 3:\n tuck.add_observer(thermometer) \n elif i == 5: \n tuck.add_observer(humidity) \n elif i == 10:\n tuck.remove_observer(thermometer)\n\n # simulating the physical parameters\n if i%3 ==0:\n tuck.set_value(\"temperature\", 35.5 + 0.5*i)\n elif i%3 == 1:\n tuck.set_value(\"humidity\", 30 + 3*i)\n ",
"step-ids": [
13,
21,
23,
25,
26
]
}
|
[
13,
21,
23,
25,
26
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def seq(ctn, array, l):
if sorted(check) in array:
return
for i in range(n):
l += 1
check.append(arr[i])
seq(ctn + 1, array, l)
check.pop()
print('l :', l, ' i :', i)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def seq(ctn, array, l):
if sorted(check) in array:
return
for i in range(n):
l += 1
check.append(arr[i])
seq(ctn + 1, array, l)
check.pop()
print('l :', l, ' i :', i)
seq(0, [], 1)
<|reserved_special_token_1|>
n, m = list(map(int, input().split()))
arr = [i for i in range(1, n + 1)]
check = []
def seq(ctn, array, l):
if sorted(check) in array:
return
for i in range(n):
l += 1
check.append(arr[i])
seq(ctn + 1, array, l)
check.pop()
print('l :', l, ' i :', i)
seq(0, [], 1)
<|reserved_special_token_1|>
# 15650번 수열 2번째
n, m = list(map(int, input().split()))
arr = [i for i in range(1,n+1)]
check = []
def seq(ctn, array, l):
if sorted(check) in array:
return
# if ctn == m:
# # l+=1
# # print('ctn :',ctn,' check :',sorted(check))
# array.append(sorted(check))
# for k in range(m):
# print(check[k], end = ' ')
# print()
# return
for i in range(n):
l += 1
check.append(arr[i])
seq(ctn+1, array, l)
check.pop()
print('l :',l,' i :',i)
seq(0,[], 1)
|
flexible
|
{
"blob_id": "dc5d56d65417dd8061a018a2f07132b03e2d616e",
"index": 5127,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef seq(ctn, array, l):\n if sorted(check) in array:\n return\n for i in range(n):\n l += 1\n check.append(arr[i])\n seq(ctn + 1, array, l)\n check.pop()\n print('l :', l, ' i :', i)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef seq(ctn, array, l):\n if sorted(check) in array:\n return\n for i in range(n):\n l += 1\n check.append(arr[i])\n seq(ctn + 1, array, l)\n check.pop()\n print('l :', l, ' i :', i)\n\n\nseq(0, [], 1)\n",
"step-4": "n, m = list(map(int, input().split()))\narr = [i for i in range(1, n + 1)]\ncheck = []\n\n\ndef seq(ctn, array, l):\n if sorted(check) in array:\n return\n for i in range(n):\n l += 1\n check.append(arr[i])\n seq(ctn + 1, array, l)\n check.pop()\n print('l :', l, ' i :', i)\n\n\nseq(0, [], 1)\n",
"step-5": "# 15650번 수열 2번째\n\nn, m = list(map(int, input().split()))\n\narr = [i for i in range(1,n+1)]\ncheck = []\n\ndef seq(ctn, array, l):\n if sorted(check) in array:\n return\n # if ctn == m:\n # # l+=1\n # # print('ctn :',ctn,' check :',sorted(check))\n # array.append(sorted(check))\n # for k in range(m):\n # print(check[k], end = ' ')\n # print()\n # return\n\n for i in range(n):\n l += 1\n check.append(arr[i])\n seq(ctn+1, array, l)\n check.pop()\n print('l :',l,' i :',i)\n\n\nseq(0,[], 1)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
"""
Example 1:
Input: J = "aA", S = "aAAbbbb"
Output: 3
Example 2:
Input: J = "z", S = "ZZ"
Output: 0
Note:
S and J will consist of letters and have length at most 50.
The characters in J are distinct.
查找J中的每个字符在 S 出现的次数的总和。
改进:
J有可能有重复的数。
测试数据:
https://leetcode.com/problems/jewels-and-stones/description/
"""
c.. Solution o..
___ numJewelsInStones J, S
"""
:type J: str
:type S: str
:rtype: int
"""
S_dict = {i:S.c..(i) ___ i __ s..(S)}
r_ s..((S_dict.get(i, 0) ___ i __ J))
|
normal
|
{
"blob_id": "8a04447f12a9cb6ba31a21d43629d887a0d1f411",
"index": 3097,
"step-1": "\"\"\"\nExample 1:\n\nInput: J = \"aA\", S = \"aAAbbbb\"\nOutput: 3\nExample 2:\n\nInput: J = \"z\", S = \"ZZ\"\nOutput: 0\nNote:\n\nS and J will consist of letters and have length at most 50.\nThe characters in J are distinct.\n\n查找J中的每个字符在 S 出现的次数的总和。\n\n改进:\nJ有可能有重复的数。\n\n测试数据:\nhttps://leetcode.com/problems/jewels-and-stones/description/\n\n\"\"\"\n\nc.. Solution o..\n ___ numJewelsInStones J, S\n \"\"\"\n :type J: str\n :type S: str\n :rtype: int\n \"\"\"\n S_dict = {i:S.c..(i) ___ i __ s..(S)}\n \n r_ s..((S_dict.get(i, 0) ___ i __ J))\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
np.random.seed(0)
<|reserved_special_token_0|>
z < 3
z[z < 3]
<|reserved_special_token_0|>
a + b
a + 30
<|reserved_special_token_0|>
print(a)
a.shape()
a.ndim()
a[0, 2]
a[0, :]
a[:, 1]
np.min(a)
np.zeros(5)
np.zeros_like([[10, 10], [1, 1]])
np.ones(3, 2)
np.full((2, 2), 100)
np.full_like((2, 2), 10, dtype=np.int)
np.random.rand(2, 4)
np.random.randint(10)
np.random.randint(5, 10, size=(2, 2))
<|reserved_special_token_0|>
np.cos(a)
np.arange(10)
<|reserved_special_token_0|>
np.vstack([v1, v2, v1])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
z = np.linspace(2, 10, 5)
np.random.seed(0)
z1 = np.random.randint(10, size=6)
z = np.array([1, 2, 3, 4, 5])
z < 3
z[z < 3]
a = np.array([1, 2, 3, 4, 5])
b = np.array([6, 7, 8, 9, 10])
a + b
a + 30
a = np.array([[1, 2, 3], [4, 5, 6]])
print(a)
a.shape()
a.ndim()
a[0, 2]
a[0, :]
a[:, 1]
np.min(a)
np.zeros(5)
np.zeros_like([[10, 10], [1, 1]])
np.ones(3, 2)
np.full((2, 2), 100)
np.full_like((2, 2), 10, dtype=np.int)
np.random.rand(2, 4)
np.random.randint(10)
np.random.randint(5, 10, size=(2, 2))
a = [np.pi, -np.pi, 0]
np.cos(a)
np.arange(10)
v1 = np.array([1, 2, 3])
v2 = np.array([4, 5, 6])
np.vstack([v1, v2, v1])
a = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9])
filedata = np.genfromtxt('name.txt', delimiter=',')
filedata = filedata.astype('type')
a = np.arange(7, dtype='f')
x = np.arange(0, 10, 2)
y = np.arange(5)
m = np.vstack([x, y])
xy = np.hstack([x, y])
<|reserved_special_token_1|>
import numpy as np
z = np.linspace(2, 10, 5)
np.random.seed(0)
z1 = np.random.randint(10, size=6)
z = np.array([1, 2, 3, 4, 5])
z < 3
z[z < 3]
a = np.array([1, 2, 3, 4, 5])
b = np.array([6, 7, 8, 9, 10])
a + b
a + 30
a = np.array([[1, 2, 3], [4, 5, 6]])
print(a)
a.shape()
a.ndim()
a[0, 2]
a[0, :]
a[:, 1]
np.min(a)
np.zeros(5)
np.zeros_like([[10, 10], [1, 1]])
np.ones(3, 2)
np.full((2, 2), 100)
np.full_like((2, 2), 10, dtype=np.int)
np.random.rand(2, 4)
np.random.randint(10)
np.random.randint(5, 10, size=(2, 2))
a = [np.pi, -np.pi, 0]
np.cos(a)
np.arange(10)
v1 = np.array([1, 2, 3])
v2 = np.array([4, 5, 6])
np.vstack([v1, v2, v1])
a = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9])
filedata = np.genfromtxt('name.txt', delimiter=',')
filedata = filedata.astype('type')
a = np.arange(7, dtype='f')
x = np.arange(0, 10, 2)
y = np.arange(5)
m = np.vstack([x, y])
xy = np.hstack([x, y])
<|reserved_special_token_1|>
import numpy as np
z = np.linspace(2,10,5) #from 2 to 10, with 5 elements
# OUT: array( [ 2. , 4. , 6. , 8. , 10. ] )
np.random.seed(0)
z1 = np.random.randint(10, size = 6)
# OUT: array( [5, 0, 3, 3, 7, 9] )
z = np.array([1,2,3,4,5])
z < 3
# OUT: array([T,T,F,F,F])
z[z<3]
# OUT: array([1,2])
a = np.array([1,2,3,4,5])
b = np.array([6,7,8,9,10])
a + b # - * /
# OUT: array([7,9,11,13,15])
a + 30 # - * /
# OUT: array([31,32,33,34,35])
a = np.array([[1,2,3],[4,5,6]])
print(a)
# OUT: [[1 2 3]
# [4 5 6]]
a.shape()
# OUT: (2,3)
a.ndim()
# OUT: 2
a[0,2]
# OUT: 3
a[0,:]
# array([1,2,3])
a[:,1]
# array([2,4])
np.min(a) #or MAX|SUM
# OUT: 1
np.zeros(5)
# OUT: array([0.,0.,0.,0.,0.])
np.zeros_like([[10,10],[1,1]])
# OUT: [[0,0],[0,0]]
np.ones(3,2)
# OUT: array([[1,1],
# [1,1],
# [1,1]])
np.full((2,2),100)
# OUT: array([[100,100],
# [100,100]])
np.full_like((2,2), 10, dtype = np.int)
# OUT: [[10,10][10,10]]
np.random.rand(2,4)
#OUT: array([[x,x,x,x],
# [x,x,x,x]])
np.random.randint(10)
#OUT: x # random from 0 to 10 (non include)
np.random.randint(5,10, size=(2,2)) #from 5 to 10(non include)
#OUT: array([[x,x],
# [x,x]])
a = [np.pi,-np.pi,0]
np.cos(a)
#OUT: [-1,-1,1]
np.arange(10)
#OUT: [0,1,...,9]
v1 = np.array([1,2,3])
v2 = np.array([4,5,6])
np.vstack([v1,v2,v1])
#1 2 3
#4 5 6
#1 2 3
a = np.array([1,2,3,4,5,6,7,8,9])
#a[[1,2,8]]
#OUT: 2,3,9
filedata = np.genfromtxt("name.txt", delimiter = ",")
# ?
filedata = filedata.astype("type") #!
# filedata[filedata > 50]
# ((filedata > 50) & (filedata < 100))
# bool Boolean (True or False) stored as a bit
# inti Platform integer (normally either int32 or int64)
# int8 Byte (-128 to 127)
# int16 Integer (-32768 to 32767)
# int32 Integer (-2 ** 31 to 2 ** 31 -1)
# int64 Integer (-2 ** 63 to 2 ** 63 -1)
# uint8 Unsigned integer (0 to 255)
# uint16 Unsigned integer (0 to 65535)
# uint32 Unsigned integer (0 to 2 ** 32 - 1)
# uint64 Unsigned integer (0 to 2 ** 64 - 1)
# float16 Half precision float: sign bit, 5 bits exponent, 10 bits mantissa
# float32 Single precision float: sign bit, 8 bits exponent, 23 bits mantissa
# float64 Double precision float: sign bit, 11 bits exponent, 52 bits mantissa
a = np.arange(7, dtype='f')
# Integer i
# Unsigned integer u
# Single precision float f
# Double precision float d
# Boolean b
# Complex D
# String S
# Unicode U
# Void V
x = np.arange(0,10,2) # x=([0,2,4,6,8])
y = np.arange(5) # y=([0,1,2,3,4])
m = np.vstack([x,y]) # m=([[0,2,4,6,8],
# [0,1,2,3,4]])
xy = np.hstack([x,y]) # xy =([0,2,4,6,8,0,1,2,3,4])
|
flexible
|
{
"blob_id": "be5147efda879165107378527ebf44890c03be75",
"index": 6679,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nnp.random.seed(0)\n<mask token>\nz < 3\nz[z < 3]\n<mask token>\na + b\na + 30\n<mask token>\nprint(a)\na.shape()\na.ndim()\na[0, 2]\na[0, :]\na[:, 1]\nnp.min(a)\nnp.zeros(5)\nnp.zeros_like([[10, 10], [1, 1]])\nnp.ones(3, 2)\nnp.full((2, 2), 100)\nnp.full_like((2, 2), 10, dtype=np.int)\nnp.random.rand(2, 4)\nnp.random.randint(10)\nnp.random.randint(5, 10, size=(2, 2))\n<mask token>\nnp.cos(a)\nnp.arange(10)\n<mask token>\nnp.vstack([v1, v2, v1])\n<mask token>\n",
"step-3": "<mask token>\nz = np.linspace(2, 10, 5)\nnp.random.seed(0)\nz1 = np.random.randint(10, size=6)\nz = np.array([1, 2, 3, 4, 5])\nz < 3\nz[z < 3]\na = np.array([1, 2, 3, 4, 5])\nb = np.array([6, 7, 8, 9, 10])\na + b\na + 30\na = np.array([[1, 2, 3], [4, 5, 6]])\nprint(a)\na.shape()\na.ndim()\na[0, 2]\na[0, :]\na[:, 1]\nnp.min(a)\nnp.zeros(5)\nnp.zeros_like([[10, 10], [1, 1]])\nnp.ones(3, 2)\nnp.full((2, 2), 100)\nnp.full_like((2, 2), 10, dtype=np.int)\nnp.random.rand(2, 4)\nnp.random.randint(10)\nnp.random.randint(5, 10, size=(2, 2))\na = [np.pi, -np.pi, 0]\nnp.cos(a)\nnp.arange(10)\nv1 = np.array([1, 2, 3])\nv2 = np.array([4, 5, 6])\nnp.vstack([v1, v2, v1])\na = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9])\nfiledata = np.genfromtxt('name.txt', delimiter=',')\nfiledata = filedata.astype('type')\na = np.arange(7, dtype='f')\nx = np.arange(0, 10, 2)\ny = np.arange(5)\nm = np.vstack([x, y])\nxy = np.hstack([x, y])\n",
"step-4": "import numpy as np\nz = np.linspace(2, 10, 5)\nnp.random.seed(0)\nz1 = np.random.randint(10, size=6)\nz = np.array([1, 2, 3, 4, 5])\nz < 3\nz[z < 3]\na = np.array([1, 2, 3, 4, 5])\nb = np.array([6, 7, 8, 9, 10])\na + b\na + 30\na = np.array([[1, 2, 3], [4, 5, 6]])\nprint(a)\na.shape()\na.ndim()\na[0, 2]\na[0, :]\na[:, 1]\nnp.min(a)\nnp.zeros(5)\nnp.zeros_like([[10, 10], [1, 1]])\nnp.ones(3, 2)\nnp.full((2, 2), 100)\nnp.full_like((2, 2), 10, dtype=np.int)\nnp.random.rand(2, 4)\nnp.random.randint(10)\nnp.random.randint(5, 10, size=(2, 2))\na = [np.pi, -np.pi, 0]\nnp.cos(a)\nnp.arange(10)\nv1 = np.array([1, 2, 3])\nv2 = np.array([4, 5, 6])\nnp.vstack([v1, v2, v1])\na = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9])\nfiledata = np.genfromtxt('name.txt', delimiter=',')\nfiledata = filedata.astype('type')\na = np.arange(7, dtype='f')\nx = np.arange(0, 10, 2)\ny = np.arange(5)\nm = np.vstack([x, y])\nxy = np.hstack([x, y])\n",
"step-5": "import numpy as np\n\n\nz = np.linspace(2,10,5) #from 2 to 10, with 5 elements\n# OUT: array( [ 2. , 4. , 6. , 8. , 10. ] )\n\nnp.random.seed(0)\nz1 = np.random.randint(10, size = 6)\n# OUT: array( [5, 0, 3, 3, 7, 9] )\n\nz = np.array([1,2,3,4,5])\nz < 3\n# OUT: array([T,T,F,F,F])\nz[z<3]\n# OUT: array([1,2])\n\na = np.array([1,2,3,4,5])\nb = np.array([6,7,8,9,10])\n\na + b # - * /\n# OUT: array([7,9,11,13,15])\na + 30 # - * /\n# OUT: array([31,32,33,34,35])\n\na = np.array([[1,2,3],[4,5,6]])\nprint(a)\n# OUT: [[1 2 3]\n# [4 5 6]]\na.shape()\n# OUT: (2,3)\na.ndim()\n# OUT: 2\na[0,2]\n# OUT: 3\na[0,:]\n# array([1,2,3])\na[:,1]\n# array([2,4])\n\nnp.min(a) #or MAX|SUM\n# OUT: 1\n\n\n\nnp.zeros(5)\n# OUT: array([0.,0.,0.,0.,0.])\nnp.zeros_like([[10,10],[1,1]])\n# OUT: [[0,0],[0,0]]\nnp.ones(3,2)\n# OUT: array([[1,1],\n#\t [1,1],\n#\t [1,1]])\nnp.full((2,2),100)\n# OUT: array([[100,100],\n#\t [100,100]])\nnp.full_like((2,2), 10, dtype = np.int)\n# OUT: [[10,10][10,10]]\n\n\nnp.random.rand(2,4)\n#OUT: array([[x,x,x,x],\n#\t [x,x,x,x]])\n\nnp.random.randint(10) \n#OUT: x # random from 0 to 10 (non include)\n\nnp.random.randint(5,10, size=(2,2)) #from 5 to 10(non include)\n#OUT: array([[x,x],\n#\t [x,x]])\n\n\na = [np.pi,-np.pi,0]\nnp.cos(a) \n#OUT: [-1,-1,1]\n\n\nnp.arange(10)\n#OUT: [0,1,...,9]\n\n\nv1 = np.array([1,2,3])\nv2 = np.array([4,5,6])\n\nnp.vstack([v1,v2,v1])\n\n#1 2 3\n#4 5 6\n#1 2 3\n\n\n\na = np.array([1,2,3,4,5,6,7,8,9])\n#a[[1,2,8]]\n#OUT: 2,3,9\n\n\nfiledata = np.genfromtxt(\"name.txt\", delimiter = \",\")\n# ?\nfiledata = filedata.astype(\"type\") #!\n# filedata[filedata > 50] \n# ((filedata > 50) & (filedata < 100))\n\n\n\n\n# bool Boolean (True or False) stored as a bit\n# inti Platform integer (normally either int32 or int64)\n# int8 Byte (-128 to 127)\n# int16 Integer (-32768 to 32767)\n# int32 Integer (-2 ** 31 to 2 ** 31 -1)\n# int64 Integer (-2 ** 63 to 2 ** 63 -1)\n# uint8 Unsigned integer (0 to 255)\n# uint16 Unsigned integer (0 to 65535)\n# uint32 Unsigned integer (0 to 2 ** 32 - 1)\n# uint64 Unsigned integer (0 to 2 ** 64 - 1)\n# float16 Half precision float: sign bit, 5 bits exponent, 10 bits mantissa\n# float32 Single precision float: sign bit, 8 bits exponent, 23 bits mantissa\n# float64 Double precision float: sign bit, 11 bits exponent, 52 bits mantissa\n\n\na = np.arange(7, dtype='f')\n# Integer i\n# Unsigned integer u\n# Single precision float f\n# Double precision float d\n# Boolean b\n# Complex D\n# String S\n# Unicode U\n# Void V\n\n\n\nx = np.arange(0,10,2) # x=([0,2,4,6,8])\ny = np.arange(5) # y=([0,1,2,3,4])\nm = np.vstack([x,y]) # m=([[0,2,4,6,8],\n # [0,1,2,3,4]])\nxy = np.hstack([x,y]) # xy =([0,2,4,6,8,0,1,2,3,4])",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Related(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class AbstractModel(models.Model):
bases = ProxyGenericRelation(Base, content_type_field='content_type',
object_id_field='content_id')
class Meta:
abstract = True
class ConcreteModel(AbstractModel):
pass
class Proxy(Related):
def some_method(self):
return True
class Meta:
proxy = True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Related(models.Model):
bases = ProxyGenericRelation(Base, content_type_field='content_type',
object_id_field='content_id')
content = models.CharField(max_length=255)
class AbstractModel(models.Model):
bases = ProxyGenericRelation(Base, content_type_field='content_type',
object_id_field='content_id')
class Meta:
abstract = True
class ConcreteModel(AbstractModel):
pass
class Proxy(Related):
def some_method(self):
return True
class Meta:
proxy = True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Base(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Related(models.Model):
bases = ProxyGenericRelation(Base, content_type_field='content_type',
object_id_field='content_id')
content = models.CharField(max_length=255)
class AbstractModel(models.Model):
bases = ProxyGenericRelation(Base, content_type_field='content_type',
object_id_field='content_id')
class Meta:
abstract = True
class ConcreteModel(AbstractModel):
pass
class Proxy(Related):
def some_method(self):
return True
class Meta:
proxy = True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Base(models.Model):
content_type = models.ForeignKey(ContentType)
content_id = models.PositiveIntegerField()
obj = ProxyGenericForeignKey('content_type', 'content_id')
class Related(models.Model):
bases = ProxyGenericRelation(Base, content_type_field='content_type',
object_id_field='content_id')
content = models.CharField(max_length=255)
class AbstractModel(models.Model):
bases = ProxyGenericRelation(Base, content_type_field='content_type',
object_id_field='content_id')
class Meta:
abstract = True
class ConcreteModel(AbstractModel):
pass
class Proxy(Related):
def some_method(self):
return True
class Meta:
proxy = True
<|reserved_special_token_1|>
from django.db import models
from django.contrib.contenttypes.models import ContentType
from widgy.generic import ProxyGenericForeignKey, ProxyGenericRelation
from django.contrib.contenttypes.generic import GenericForeignKey, GenericRelation
class Base(models.Model):
content_type = models.ForeignKey(ContentType)
content_id = models.PositiveIntegerField()
obj = ProxyGenericForeignKey('content_type', 'content_id')
class Related(models.Model):
bases = ProxyGenericRelation(Base,
content_type_field='content_type',
object_id_field='content_id')
content = models.CharField(max_length=255)
class AbstractModel(models.Model):
bases = ProxyGenericRelation(Base,
content_type_field='content_type',
object_id_field='content_id')
class Meta:
abstract = True
class ConcreteModel(AbstractModel):
pass
class Proxy(Related):
def some_method(self):
return True
class Meta:
proxy = True
|
flexible
|
{
"blob_id": "c70df1fab0db6f71d22a23836b11d66879879656",
"index": 6336,
"step-1": "<mask token>\n\n\nclass Related(models.Model):\n <mask token>\n <mask token>\n\n\nclass AbstractModel(models.Model):\n bases = ProxyGenericRelation(Base, content_type_field='content_type',\n object_id_field='content_id')\n\n\n class Meta:\n abstract = True\n\n\nclass ConcreteModel(AbstractModel):\n pass\n\n\nclass Proxy(Related):\n\n def some_method(self):\n return True\n\n\n class Meta:\n proxy = True\n",
"step-2": "<mask token>\n\n\nclass Related(models.Model):\n bases = ProxyGenericRelation(Base, content_type_field='content_type',\n object_id_field='content_id')\n content = models.CharField(max_length=255)\n\n\nclass AbstractModel(models.Model):\n bases = ProxyGenericRelation(Base, content_type_field='content_type',\n object_id_field='content_id')\n\n\n class Meta:\n abstract = True\n\n\nclass ConcreteModel(AbstractModel):\n pass\n\n\nclass Proxy(Related):\n\n def some_method(self):\n return True\n\n\n class Meta:\n proxy = True\n",
"step-3": "<mask token>\n\n\nclass Base(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Related(models.Model):\n bases = ProxyGenericRelation(Base, content_type_field='content_type',\n object_id_field='content_id')\n content = models.CharField(max_length=255)\n\n\nclass AbstractModel(models.Model):\n bases = ProxyGenericRelation(Base, content_type_field='content_type',\n object_id_field='content_id')\n\n\n class Meta:\n abstract = True\n\n\nclass ConcreteModel(AbstractModel):\n pass\n\n\nclass Proxy(Related):\n\n def some_method(self):\n return True\n\n\n class Meta:\n proxy = True\n",
"step-4": "<mask token>\n\n\nclass Base(models.Model):\n content_type = models.ForeignKey(ContentType)\n content_id = models.PositiveIntegerField()\n obj = ProxyGenericForeignKey('content_type', 'content_id')\n\n\nclass Related(models.Model):\n bases = ProxyGenericRelation(Base, content_type_field='content_type',\n object_id_field='content_id')\n content = models.CharField(max_length=255)\n\n\nclass AbstractModel(models.Model):\n bases = ProxyGenericRelation(Base, content_type_field='content_type',\n object_id_field='content_id')\n\n\n class Meta:\n abstract = True\n\n\nclass ConcreteModel(AbstractModel):\n pass\n\n\nclass Proxy(Related):\n\n def some_method(self):\n return True\n\n\n class Meta:\n proxy = True\n",
"step-5": "from django.db import models\nfrom django.contrib.contenttypes.models import ContentType\n\nfrom widgy.generic import ProxyGenericForeignKey, ProxyGenericRelation\nfrom django.contrib.contenttypes.generic import GenericForeignKey, GenericRelation\n\n\nclass Base(models.Model):\n content_type = models.ForeignKey(ContentType)\n content_id = models.PositiveIntegerField()\n obj = ProxyGenericForeignKey('content_type', 'content_id')\n\n\nclass Related(models.Model):\n bases = ProxyGenericRelation(Base,\n content_type_field='content_type',\n object_id_field='content_id')\n\n content = models.CharField(max_length=255)\n\n\nclass AbstractModel(models.Model):\n bases = ProxyGenericRelation(Base,\n content_type_field='content_type',\n object_id_field='content_id')\n class Meta:\n abstract = True\n\nclass ConcreteModel(AbstractModel):\n pass\n\nclass Proxy(Related):\n def some_method(self):\n return True\n\n class Meta:\n proxy = True\n",
"step-ids": [
6,
7,
8,
9,
11
]
}
|
[
6,
7,
8,
9,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def parse_args():
"""
Parse command-line arguments to train and evaluate a multimodal network for activity recognition on MM-Fit.
:return: Populated namespace.
"""
parser = argparse.ArgumentParser(description='baseline Mask R-CNN')
parser.add_argument('--dataset', required=True, metavar=
'/path/to/dataset/', help='Directory of the dataset')
parser.add_argument('--continue_train', type=str, required=False,
default='None', metavar='/path/to/latest/weights.h5', help=
'Path to lastest training weights .h5 file')
parser.add_argument('--weight', required=False, metavar=
'/path/to/pretrained/weight.h5', help='Path to trained weight')
parser.add_argument('--image', required=False, metavar=
'/path/to/testing/image/directory', help=
'Path to testing image directory')
parser.add_argument('--video', required=False, metavar=
'/path/to/testing/image/directory', help=
'Path to testing image directory')
return parser.parse_args()
<|reserved_special_token_1|>
import argparse
def parse_args():
"""
Parse command-line arguments to train and evaluate a multimodal network for activity recognition on MM-Fit.
:return: Populated namespace.
"""
parser = argparse.ArgumentParser(description='baseline Mask R-CNN')
parser.add_argument('--dataset', required=True, metavar=
'/path/to/dataset/', help='Directory of the dataset')
parser.add_argument('--continue_train', type=str, required=False,
default='None', metavar='/path/to/latest/weights.h5', help=
'Path to lastest training weights .h5 file')
parser.add_argument('--weight', required=False, metavar=
'/path/to/pretrained/weight.h5', help='Path to trained weight')
parser.add_argument('--image', required=False, metavar=
'/path/to/testing/image/directory', help=
'Path to testing image directory')
parser.add_argument('--video', required=False, metavar=
'/path/to/testing/image/directory', help=
'Path to testing image directory')
return parser.parse_args()
<|reserved_special_token_1|>
import argparse
def parse_args():
"""
Parse command-line arguments to train and evaluate a multimodal network for activity recognition on MM-Fit.
:return: Populated namespace.
"""
parser = argparse.ArgumentParser(description='baseline Mask R-CNN')
parser.add_argument('--dataset', required=True,
metavar="/path/to/dataset/",
help='Directory of the dataset')
parser.add_argument('--continue_train', type=str, required=False, default='None',
metavar="/path/to/latest/weights.h5", help="Path to lastest training weights .h5 file")
parser.add_argument('--weight', required=False,
metavar='/path/to/pretrained/weight.h5', help="Path to trained weight")
parser.add_argument('--image', required=False,
metavar='/path/to/testing/image/directory', help="Path to testing image directory")
parser.add_argument('--video', required=False,
metavar='/path/to/testing/image/directory', help="Path to testing image directory")
return parser.parse_args()
|
flexible
|
{
"blob_id": "b6527a09f346ee1b7dd446a0ff21995a995481a8",
"index": 6640,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef parse_args():\n \"\"\"\n Parse command-line arguments to train and evaluate a multimodal network for activity recognition on MM-Fit.\n :return: Populated namespace.\n \"\"\"\n parser = argparse.ArgumentParser(description='baseline Mask R-CNN')\n parser.add_argument('--dataset', required=True, metavar=\n '/path/to/dataset/', help='Directory of the dataset')\n parser.add_argument('--continue_train', type=str, required=False,\n default='None', metavar='/path/to/latest/weights.h5', help=\n 'Path to lastest training weights .h5 file')\n parser.add_argument('--weight', required=False, metavar=\n '/path/to/pretrained/weight.h5', help='Path to trained weight')\n parser.add_argument('--image', required=False, metavar=\n '/path/to/testing/image/directory', help=\n 'Path to testing image directory')\n parser.add_argument('--video', required=False, metavar=\n '/path/to/testing/image/directory', help=\n 'Path to testing image directory')\n return parser.parse_args()\n",
"step-3": "import argparse\n\n\ndef parse_args():\n \"\"\"\n Parse command-line arguments to train and evaluate a multimodal network for activity recognition on MM-Fit.\n :return: Populated namespace.\n \"\"\"\n parser = argparse.ArgumentParser(description='baseline Mask R-CNN')\n parser.add_argument('--dataset', required=True, metavar=\n '/path/to/dataset/', help='Directory of the dataset')\n parser.add_argument('--continue_train', type=str, required=False,\n default='None', metavar='/path/to/latest/weights.h5', help=\n 'Path to lastest training weights .h5 file')\n parser.add_argument('--weight', required=False, metavar=\n '/path/to/pretrained/weight.h5', help='Path to trained weight')\n parser.add_argument('--image', required=False, metavar=\n '/path/to/testing/image/directory', help=\n 'Path to testing image directory')\n parser.add_argument('--video', required=False, metavar=\n '/path/to/testing/image/directory', help=\n 'Path to testing image directory')\n return parser.parse_args()\n",
"step-4": "import argparse\n\n\ndef parse_args():\n \"\"\"\n Parse command-line arguments to train and evaluate a multimodal network for activity recognition on MM-Fit.\n :return: Populated namespace.\n \"\"\"\n parser = argparse.ArgumentParser(description='baseline Mask R-CNN')\n parser.add_argument('--dataset', required=True,\n metavar=\"/path/to/dataset/\",\n help='Directory of the dataset')\n parser.add_argument('--continue_train', type=str, required=False, default='None',\n metavar=\"/path/to/latest/weights.h5\", help=\"Path to lastest training weights .h5 file\")\n parser.add_argument('--weight', required=False,\n metavar='/path/to/pretrained/weight.h5', help=\"Path to trained weight\")\n parser.add_argument('--image', required=False,\n metavar='/path/to/testing/image/directory', help=\"Path to testing image directory\")\n parser.add_argument('--video', required=False,\n metavar='/path/to/testing/image/directory', help=\"Path to testing image directory\")\n return parser.parse_args()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('user', '0001_initial')]
operations = [migrations.AddField(model_name='user', name='my_resume',
field=models.CharField(choices=[('', ''), ('삼성전자', '삼성전자')],
default=True, max_length=80))]
<|reserved_special_token_1|>
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('user', '0001_initial')]
operations = [migrations.AddField(model_name='user', name='my_resume',
field=models.CharField(choices=[('', ''), ('삼성전자', '삼성전자')],
default=True, max_length=80))]
<|reserved_special_token_1|>
# Generated by Django 2.2.5 on 2019-10-28 08:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='my_resume',
field=models.CharField(choices=[('', ''), ('삼성전자', '삼성전자')], default=True, max_length=80),
),
]
|
flexible
|
{
"blob_id": "32c28c7a1e1572744387b509fc6a448554ed565e",
"index": 3445,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('user', '0001_initial')]\n operations = [migrations.AddField(model_name='user', name='my_resume',\n field=models.CharField(choices=[('', ''), ('삼성전자', '삼성전자')],\n default=True, max_length=80))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('user', '0001_initial')]\n operations = [migrations.AddField(model_name='user', name='my_resume',\n field=models.CharField(choices=[('', ''), ('삼성전자', '삼성전자')],\n default=True, max_length=80))]\n",
"step-5": "# Generated by Django 2.2.5 on 2019-10-28 08:45\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('user', '0001_initial'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='user',\n name='my_resume',\n field=models.CharField(choices=[('', ''), ('삼성전자', '삼성전자')], default=True, max_length=80),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class Solution:
def eventualSafeNodes(self, graph: List[List[int]]) ->List[int]:
res = []
d = {}
def dfs(node):
if graph[node] == []:
return True
if node in d:
return d[node]
if node in visit:
return False
visit.add(node)
for nei in graph[node]:
if dfs(nei) == False:
d[node] = False
return False
d[node] = True
return True
visit = set()
for i in range(len(graph)):
if dfs(i):
res.append(i)
return res
|
normal
|
{
"blob_id": "b815f72e2cad351fd9411361a0e7cc75d39ae826",
"index": 9270,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def eventualSafeNodes(self, graph: List[List[int]]) ->List[int]:\n res = []\n d = {}\n\n def dfs(node):\n if graph[node] == []:\n return True\n if node in d:\n return d[node]\n if node in visit:\n return False\n visit.add(node)\n for nei in graph[node]:\n if dfs(nei) == False:\n d[node] = False\n return False\n d[node] = True\n return True\n visit = set()\n for i in range(len(graph)):\n if dfs(i):\n res.append(i)\n return res\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in range(n):
peeps = set(list(map(int, input().split()))[1:])
villagers[i + 1] = villagers.get(i + 1, set())
for p in peeps:
if i + 1 in peeps:
susList.add(i + 1)
break
villagers[p] = villagers.get(p, set()) | {i + 1}
peoples.append(peeps)
<|reserved_special_token_0|>
while queue:
s = queue.pop()
queue.extend(list(villagers[s]))
susList |= set(villagers[s])
villagers[s] = set()
for s in susList:
for p in peoples[s - 1]:
try:
villagers[p].remove(s)
except:
pass
for k, v in sorted(villagers.items(), key=lambda x: x[0]):
if imp - len(susList) >= (n - len(susList)) // 2:
print(0)
elif k in susList:
print(0)
elif len(v) >= imp - len(susList):
print(1)
else:
print(0)
<|reserved_special_token_1|>
n, imp = list(map(int, input().split()))
villagers = {}
peoples = []
susList = set()
for i in range(n):
peeps = set(list(map(int, input().split()))[1:])
villagers[i + 1] = villagers.get(i + 1, set())
for p in peeps:
if i + 1 in peeps:
susList.add(i + 1)
break
villagers[p] = villagers.get(p, set()) | {i + 1}
peoples.append(peeps)
queue = [s for s in susList]
while queue:
s = queue.pop()
queue.extend(list(villagers[s]))
susList |= set(villagers[s])
villagers[s] = set()
for s in susList:
for p in peoples[s - 1]:
try:
villagers[p].remove(s)
except:
pass
for k, v in sorted(villagers.items(), key=lambda x: x[0]):
if imp - len(susList) >= (n - len(susList)) // 2:
print(0)
elif k in susList:
print(0)
elif len(v) >= imp - len(susList):
print(1)
else:
print(0)
<|reserved_special_token_1|>
n, imp = list(map(int, input().split()))
villagers = {}
peoples = []
susList = set()
for i in range(n):
peeps = set(list(map(int, input().split()))[1:])
# Initialize the set
villagers[i+1] = villagers.get(i+1, set())
for p in peeps:
if i+1 in peeps:
susList.add(i+1)
break
villagers[p] = villagers.get(p, set()) | {i+1}
peoples.append(peeps)
# Confirmed imposters
queue = [s for s in susList]
while queue:
# Everyone that voted for them is an imposter
s = queue.pop()
queue.extend(list(villagers[s]))
susList |= set(villagers[s])
villagers[s] = set()
# Discredit all imposter votes
for s in susList:
for p in peoples[s-1]:
try:
villagers[p].remove(s)
except:
pass
for k, v in sorted(villagers.items(), key=lambda x: x[0]):
if imp - len(susList) >= (n- len(susList)) // 2:
print(0)
elif k in susList:
print(0)
elif len(v) >= imp - len(susList):
print(1)
else:
print(0)
|
flexible
|
{
"blob_id": "3eca3066a6c6484257ca17164d35654812a87b80",
"index": 6636,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(n):\n peeps = set(list(map(int, input().split()))[1:])\n villagers[i + 1] = villagers.get(i + 1, set())\n for p in peeps:\n if i + 1 in peeps:\n susList.add(i + 1)\n break\n villagers[p] = villagers.get(p, set()) | {i + 1}\n peoples.append(peeps)\n<mask token>\nwhile queue:\n s = queue.pop()\n queue.extend(list(villagers[s]))\n susList |= set(villagers[s])\n villagers[s] = set()\nfor s in susList:\n for p in peoples[s - 1]:\n try:\n villagers[p].remove(s)\n except:\n pass\nfor k, v in sorted(villagers.items(), key=lambda x: x[0]):\n if imp - len(susList) >= (n - len(susList)) // 2:\n print(0)\n elif k in susList:\n print(0)\n elif len(v) >= imp - len(susList):\n print(1)\n else:\n print(0)\n",
"step-3": "n, imp = list(map(int, input().split()))\nvillagers = {}\npeoples = []\nsusList = set()\nfor i in range(n):\n peeps = set(list(map(int, input().split()))[1:])\n villagers[i + 1] = villagers.get(i + 1, set())\n for p in peeps:\n if i + 1 in peeps:\n susList.add(i + 1)\n break\n villagers[p] = villagers.get(p, set()) | {i + 1}\n peoples.append(peeps)\nqueue = [s for s in susList]\nwhile queue:\n s = queue.pop()\n queue.extend(list(villagers[s]))\n susList |= set(villagers[s])\n villagers[s] = set()\nfor s in susList:\n for p in peoples[s - 1]:\n try:\n villagers[p].remove(s)\n except:\n pass\nfor k, v in sorted(villagers.items(), key=lambda x: x[0]):\n if imp - len(susList) >= (n - len(susList)) // 2:\n print(0)\n elif k in susList:\n print(0)\n elif len(v) >= imp - len(susList):\n print(1)\n else:\n print(0)\n",
"step-4": "n, imp = list(map(int, input().split()))\nvillagers = {}\npeoples = []\nsusList = set()\nfor i in range(n):\n peeps = set(list(map(int, input().split()))[1:])\n # Initialize the set\n villagers[i+1] = villagers.get(i+1, set())\n for p in peeps:\n if i+1 in peeps:\n susList.add(i+1)\n break\n villagers[p] = villagers.get(p, set()) | {i+1}\n peoples.append(peeps)\n\n# Confirmed imposters\nqueue = [s for s in susList]\nwhile queue:\n # Everyone that voted for them is an imposter\n s = queue.pop()\n queue.extend(list(villagers[s]))\n susList |= set(villagers[s])\n villagers[s] = set()\n\n# Discredit all imposter votes\nfor s in susList:\n for p in peoples[s-1]:\n try:\n villagers[p].remove(s)\n except:\n pass\n\n\n\nfor k, v in sorted(villagers.items(), key=lambda x: x[0]):\n if imp - len(susList) >= (n- len(susList)) // 2:\n print(0)\n elif k in susList:\n print(0)\n elif len(v) >= imp - len(susList):\n print(1)\n else:\n print(0)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.