index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
983,900 | 38e585906b30a5be0cfe8eb5935ca5957d7dddba | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from app import db
from app.models import User
import hashlib
db.create_all()
# create admin user
m = hashlib.md5()
m.update('123456')
pwd = m.hexdigest()
admin = User('admin', pwd)
db.session.add(admin)
db.session.commit() |
983,901 | dfc695bfda79e7bb0318da8553fd94046b5b107c | import numpy as np
import matplotlib.pyplot as plt
import math
# Vibration in a one-dimensional system
k=1
m=1
c=1
b1=np.zeros((40,1))
b1[0][0]=c
b2=np.zeros((40,1))
b2[0][0]=c
b2[39][0]=c
def f(w):
d=2*k-m*(w**2)
a=np.zeros([40,40])
b=range(40)
for i in b:
if i==0:
a[i][i]=d-k
a[i][i+1]=-k
elif i==39:
a[39][38]=-k
a[39][39]=d-k
else :
a[i][i-1]=-k
a[i][i]=d
a[i][i+1]=-k
return a
def homework1():
y1=np.linalg.solve(f(1),b1)
y2=np.linalg.solve(f(2),b1)
y3=np.linalg.solve(f(3),b1)
y11=[]
y12=[]
y13=[]
for i in range(40):
y11.append(y1[i][0])
y12.append(y2[i][0])
y13.append(y3[i][0])
x=np.arange(0,40)
plt.plot(x,y11,label="w=1")
plt.plot(x,y12,label="w=2")
plt.plot(x,y13,label="w=3")
plt.legend(loc="upper right")
plt.xlabel("i")
plt.ylabel("a_i")
plt.title("Vibration of a one-dimensional system with force applied to one end")
plt.xlim(0,40)
plt.show()
def homework2():
y1=np.linalg.solve(f(1),b2)
y2=np.linalg.solve(f(2),b2)
y3=np.linalg.solve(f(3),b2)
y11=[]
y12=[]
y13=[]
for i in range(40):
y11.append(y1[i][0])
y12.append(y2[i][0])
y13.append(y3[i][0])
x=np.arange(0,40)
plt.plot(x,y11,label="w=1")
plt.plot(x,y12,label="w=2")
plt.plot(x,y13,label="w=3")
plt.legend(loc="upper right")
plt.xlabel("i")
plt.ylabel("a_i")
plt.title("Vibration of a one-dimensional system with force applied to both ends")
plt.xlim(0,40)
plt.show()
homework1()
homework2()
# w=1 일경우
# 한쪽 끝에서만 힘을 가할 경우 (3n+2) 번쨰 물체들은 진동하지 않고 (0,1),(6,7),(12,13),....번째 물체들과
# (3,4),(9,10),(15,16),...번째 물체들은 진폭은 서로 같으나 방향이 서로 다른 진동을 한다.
# 양쪽 끝에서만 힘을 가할경우 3n번 째 물체들은 진동하지 않고 1,2,7,8,13,14,..번 째 물체들과
#4,5,10,11,16,17,....번 째 물체들은 진폭은 서로 같으나 방향이 서로 다른 진동을 한다.
# w= 2일경우
# 한쪽 끝에서만 힘을 가할경우 힘을 가하는 원천에서 멀어지면 멀어질수록 진폭이 감소한다.
# 양 끝에서 힘을 가할경우 중앙에서 진동이 상쇄되어 중앙의 진폭은 0이고 중앙에서 멀어질수록 상쇄되는 양이 줄어들어 진폭이 커진다.
#=3일경우
# 한 쪽 끝에서만 힘을 가할경우 n=0과 n=1번째 물체만 진동하고 나머지 물체는 진동하지 않는다.
# 양 쪽 끝에서 힘을 가할경우 n=0 n=1 n=38 n=39 번째 물체만 진동하고 나머지 물체는 진동하지 않는다.
|
983,902 | 54a7ba10bef5b8005e0d9f9d1a7701f79bf32d65 | # Generated by Django 2.0.2 on 2018-05-13 22:52
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tienda', '0011_auto_20180513_0004'),
]
operations = [
migrations.RenameField(
model_name='ventacabecera',
old_name='factura',
new_name='talonario_factura',
),
migrations.RenameField(
model_name='ventadetalle',
old_name='id_producto',
new_name='producto',
),
migrations.RenameField(
model_name='ventadetalle',
old_name='id_cab_venta',
new_name='venta',
),
migrations.AddField(
model_name='ventacabecera',
name='fecha',
field=models.DateField(default=datetime.date.today),
),
migrations.AddField(
model_name='ventacabecera',
name='nro_factura',
field=models.CharField(default='', max_length=150),
),
migrations.AddField(
model_name='ventacabecera',
name='total_iva',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='ventacabecera',
name='total_iva_10',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='ventacabecera',
name='total_iva_5',
field=models.IntegerField(default=0),
),
]
|
983,903 | ce8a6b98d6a097e611b5b1059c11aad249f159fd | from os import listdir
from os.path import isfile, join
from operator import itemgetter, attrgetter
onlyfiles = [f for f in listdir(".") if isfile(join(".", f))]
print(onlyfiles) |
983,904 | d036a9ca3adc576529ec0a1a3410974fd3932af9 | from statistics import mean
from xmlrpc.server import SimpleXMLRPCServer
from xmlrpc.server import SimpleXMLRPCRequestHandler
import threading
import xmlrpc.client
class RequestHandler(SimpleXMLRPCRequestHandler):
rpc_paths = ('/RPC2',)
class ServerThread(threading.Thread):
def __init__(self, port):
threading.Thread.__init__(self)
self.localServer = SimpleXMLRPCServer(("localhost", port), requestHandler=RequestHandler)
self.localServer.register_introspection_functions()
def run(self):
self.localServer.serve_forever()
main = ServerThread(8000)
s1 = ServerThread(8001)
s2 = ServerThread(8002)
# S1-functions
class S1Funcs:
def mul(self, x, y):
return x * y
def divide(self, x, y):
if y != 0:
return x / y
else:
return ZeroDivisionError
s1.localServer.register_instance(S1Funcs())
s1.start()
s2.localServer.register_function(pow)
s2.start()
# Client proxies from Main to S1 and S2
c1 = xmlrpc.client.ServerProxy('http://localhost:8001')
c2 = xmlrpc.client.ServerProxy('http://localhost:8002')
# Main-server functions.
class MainFuncs:
def add(self, x, y):
return x + y
def sub(self, x, y):
return x - y
# Pass on to S1-proxy
def divide(self, x, y):
return c1.divide(x, y)
def mul(self, x, y):
return c1.mul(x, y)
# Pass on to S2-proxy
def pow(self, x, y):
return c2.pow(x, y)
main.localServer.register_instance(MainFuncs())
main.start()
|
983,905 | 24dcbcc98c1d61121c44de7490455b26024dc4c6 | # -*- coding: utf-8 -*-
"""
Created on Mon Dec 4 13:15:21 2017
@author: Pulu
"""
import numpy as np
import copy
import tkinter as tk
import tkinter.filedialog as tkf
import tkinter.scrolledtext as tks
import os
def read_file(file_path,pic_row,pic_column):
pic = np.matrix(np.full(pic_row*pic_column,-1))
pic = pic.reshape([pic_row,pic_column])
pics = [copy.deepcopy(pic)]
row = 0
pic_num = 0
training_file = open(file_path,"r")
for i in training_file.readlines():
for column in range(len(i)):
if(i[column] == "1"):
pics[pic_num][row,column] = 1
row += 1
if(pic_row == row-1):
row = 0
pics.append(copy.deepcopy(pic))
pics[pic_num] = pics[pic_num].ravel()
pic_num += 1
pics[len(pics)-1] = pics[len(pics)-1].ravel()
return pics
def hopfield_memory(pics,pic_row,pic_column):
x = np.matrix((pic_row*pic_column)*[(pic_row*pic_column)*[0]])
y = np.identity(pic_row*pic_column)
for N in range(len(pics)):
x = x + pics[N].getT()*pics[N]
w = (x*(1/len(pics[0].getT()))) - y*(len(pics)/len(pics[0].getT()))
t = w.sum(axis=1)
return w,t
def hopfield_test(picture_memory,picture_testing,weights,theta):
recall_result = [[0,0]]
for pic_num in range(len(picture_testing)):
x = copy.deepcopy(picture_testing[pic_num])
recall = False
training_times = 0
while((not recall) and not (training_times > 1000)):
training_times += 1
for xj in range(x.size):
if((weights[xj]*x.getT()-theta[xj])>0):
x[0,xj] = 1
elif((weights[xj]*x.getT()-theta[xj])<0):
x[0,xj] = -1
else:
pass
for mem in range(len(picture_memory)):
if(np.any((x-picture_memory[mem]))):
pass
else:
if(pic_num==0):
recall_result[0] = [pic_num,mem]
else:
recall_result.append([pic_num,mem])
recall = True
break
#print(picture_memory[recall_pic].reshape([pic_rows,pic_columns]),recall_pic)
#print(picture_testing[pic_num].reshape([pic_rows,pic_columns]))
#print(pic_num,recall_pic)
return recall_result
pic_rows = 13
pic_columns = 9
win = tk.Tk()
win.title("Hopfield")
win.geometry("800x800")
win.resizable(False,False)
path_f1 = tk.StringVar()
path_f2 = tk.StringVar()
path_label1 = tk.Label(win,textvariable = path_f1)
path_label2 = tk.Label(win,textvariable = path_f2)
row_label = tk.Label(win,text="row")
column_label = tk.Label(win,text="column")
row_text = tk.Text(win,height=1,width=10)
column_text = tk.Text(win,height=1,width=10)
def open_file():
openf = tkf.askopenfilename(filetypes = (("Template files", "*.txt"),("HTML files", "*.html;*.htm"),("All files", "*.*") ))
if(openf):
path_f1.set(os.path.abspath(openf))
openf = tkf.askopenfilename(filetypes = (("Template files", "*.txt"),("HTML files", "*.html;*.htm"),("All files", "*.*") ))
if(openf):
path_f2.set(os.path.abspath(openf))
open_file = tk.Button(win,text="open file",command = open_file)
def training():
pic_rows = int(row_text.get("1.0","end"))
pic_columns = int(column_text.get("1.0","end"))
picture_memory = read_file(path_f1.get(),pic_rows,pic_columns)
picture_testing = read_file(path_f2.get(),pic_rows,pic_columns)
weights,theta = hopfield_memory(picture_memory,pic_rows,pic_columns)
recall_result = hopfield_test(picture_memory,picture_testing,weights,theta)
scroll.delete("1.0","end")
for j in recall_result:
output = ""
output += "pic : " + str(j[0]) + "\n"
for i in range(picture_testing[j[0]][0].size):
if(picture_testing[j[0]][0,i]==1):
output += "■"
else:
output += "□"
if((i+1)%pic_columns==0):
output += "\n"
output += "\n"
for i in range(picture_memory[j[1]][0].size):
if(picture_memory[j[1]][0,i]==1):
output += "■"
else:
output += "□"
if((i+1)%pic_columns==0):
output += "\n"
output += "\n\n\n\n\n"
scroll.insert("insert",output)
btn_train = tk.Button(win,text="Start",command = training)
scroll = tks.ScrolledText(win,height=100)
path_label1.pack()
path_label2.pack()
open_file.pack()
btn_train.pack()
row_label.pack()
row_text.pack()
column_label.pack()
column_text.pack()
scroll.pack()
win.mainloop() |
983,906 | a08b4b314941e1ddb17a6240a886c4692bc6ca4b | import pandas as pd
import json
from sklearn import linear_model
brands = [
"Citroen",
"Ford",
"Chevrolet",
"Honda",
"Hyundai",
"Mitsubishi",
"Peugeot",
"Renault",
"Toyota",
"VolksWagen"
]
features_name = [
"potencia",
"idade",
"preco"
]
with open('freg.json') as car_info:
data = json.load(car_info)
if __name__ == '__main__':
new_rec = True
while new_rec:
pre_brand = -1
pre_pot = -1
pre_price = -1
pre_year = -1
for index, brand in enumerate(brands):
print(f'[{index}] {brand}')
print()
while pre_brand < 0 or pre_brand >= len(brands):
pre_brand = int(input('Marca: '))
data_aux = data[brands[pre_brand]]
df = pd.DataFrame(data_aux)
df.columns = features_name
# print(df)
reg = linear_model.LinearRegression(fit_intercept=True, normalize=False)
reg.fit(df[['potencia', 'idade']], df.preco)
while pre_pot < 1:
pre_pot = float(input('Cilindradas: '))
while pre_year < 0:
pre_year = int(input('Tempo do carro (anos): '))
### coeficiente de cada feature na equacao de regressao
# print(reg.coef_)
### constante da equacao
# print(reg.intercept_)
resp = reg.predict([[pre_pot, pre_year]])
### Comparar todos
# print(reg.score(df[['potencia', 'idade']], df.preco))
# pred = reg.predict(df[['potencia', 'idade']])
# compare = pd.DataFrame({'Real': df.preco, 'Estimado': pred.flatten()})
# print(compare)
print("\nO preço estimado para essa configuração é R$ %.2f\n" % (resp[0]))
answ = ""
while answ != "S" and answ != "s" and answ != "N" and answ != "n":
answ = input('Deseja realizar uma nova consulta? (S/n) ')
if answ == "S" or answ == "s":
new_rec = True
else:
new_rec = False
|
983,907 | f0bb1aa3dc2c045065dfd4121484b4c5ed9b0c2d | import requests
import re
import os
import os.path
import time
import vk_api
from functools import reduce
from math import ceil
from random import randint
from time import sleep
def remove_chars(value, chars_to_remove):
for c in chars_to_remove:
value = value.replace(c, '')
return value
def captcha_handler(captcha_inp):
captcha_out = input('Enter captcha %s: ' % captcha_inp.get_url())
return captcha_inp.try_again(captcha_out)
def exception(ex):
print(ex)
exit()
def set_up(vk_session, owner_id, root_folder):
vk = vk_session.get_api()
values = dict()
try:
values['albums'] = vk.photos.getAlbums(owner_id=owner_id)['items']
if not os.path.exists(root_folder):
os.makedirs(root_folder)
except Exception as ex:
exception(ex)
special_albums = [{"title": 'wall', "id": 'wall'}, {"title": 'profile', "id": 'profile'}]
try:
vk.photos.get(owner_id=owner_id, album_id='saved')
special_albums = special_albums + [{"title": 'saved', "id": 'saved'}]
except:
pass
values['albums'] = values['albums'] + special_albums
print('\nall albums:\n' + '\n'.join(['{} - id: {}'.format(album['title'], album['id']) for album in values['albums']]) + '\n')
values['album_ids'] = [album['id'] for album in values['albums']]
values['album_names'] = [remove_chars(album['title'], '\/:*?"<>|') for album in values['albums']]
values['album_paths'] = ['{}{}\\'.format(root_folder, album) for album in values['album_names']]
try:
for item in values['album_paths']:
if not os.path.exists(item):
os.makedirs(item)
except Exception as ex:
exception(ex)
return vk, values
def getPhotos(vk, offset, owner_id, album_id):
photosSplit = vk.photos.get(
owner_id=owner_id,
offset=offset,
count=1000,
album_id=album_id
)
# index == -1 is for largest picture
return [i['sizes'][-1]['url'] for i in photosSplit['items']]
def save_photo(folder, photos, iterations_range):
print('\nfolder: {}\n'.format(folder))
for photo_url in photos:
try:
photo_name = '{}.jpg'.format(time.time())
request = requests.get(photo_url)
if request.status_code == 200:
file_path = '{}{}'.format(folder, photo_name)
try:
if os.path.exists(file_path):
print('file already exists')
else:
with open(file_path, 'wb') as fd:
fd.write(request.content)
print('{} : {}'.format(photo_url, 'saved'))
except Exception as ex:
print(ex)
else:
print('{} : connection error'.format(photo_url))
except Exception as ex:
print(ex)
if __name__ == '__main__':
username = '' #TYPE LOGIN HERE
password = '' #TYPE PASSWORD HERE
owner_id = int('') #TYPE PAGE ID HERE
root_folder = 'C:\\folder_name\\' #TYPE SAVE DIRECTORY HERE (PARENT FOLDER MUST EXIST)
vk_session = vk_api.VkApi(
username,
password,
captcha_handler=captcha_handler
)
try:
vk_session.auth()
vk, settings = set_up(vk_session, owner_id, root_folder)
for album_id, folder in zip(settings['album_ids'], settings['album_paths']):
count = vk.photos.get(owner_id=owner_id, album_id=album_id)['count']
iterations = ceil(int(count) / 1000)
iterations_range = range(0, iterations)
photos = [getPhotos(vk, i * 1000, owner_id, album_id)
for i in iterations_range]
photos = reduce(lambda d, el: d.extend(el) or d, photos, [])
save_photo(folder, photos, iterations_range)
except Exception as ex:
exception(ex) |
983,908 | ddf5dbe2080e7ee447e4a07c24ebd94b9689e3c6 | import numpy as np
from activity_server.models import DataRecord, activity_table, activity_table_json
from datetime import datetime, timedelta
def recognize_last_activities(uuid, algorithm, feature_set, start_ts, end_ts):
start_datetime = datetime.fromtimestamp(start_ts / 1e3)
end_datetime = datetime.fromtimestamp(end_ts / 1e3)
records = DataRecord.objects.filter(user_id=uuid)\
.filter(date_time__gt=start_datetime)\
.filter(date_time__lt=end_datetime)
if records:
avg_prob = [0, 0, 0, 0, 0, 0, 0, 0]
current_activity = "none"
for j in xrange(len(records)):
prob = get_probability_for_data_record(records[j], feature_set, algorithm)
if j == 0:
current_activity = activity_table_json.get(np.argmax(prob) + 1)
for i in xrange(len(prob)):
avg_prob[i] += prob[i]/len(records)
return {"vector": avg_prob, "time": records[0].date_time, "current_activity": current_activity}
else:
raise Exception('No record found')
def get_probability_for_data_record(record, feature_set, algorithm):
if algorithm == 'svm' and feature_set == 'standard':
return record.activity.svm
elif algorithm == 'svm' and feature_set == 'enhanced':
return record.activity.svm_ech
elif algorithm == 'dt' and feature_set == 'standard':
return record.activity.dt
elif algorithm == 'dt' and feature_set == 'enhanced':
return record.activity.dt_ech
else:
raise Exception("Bad request") |
983,909 | 753cf2491eead44ab086ec3095a1eb8ca72a27ec | import sys
import pickle
f = open("./true_encodings.csv","a+")
dict = pickle.load(f)
name = sys.argv[1]
if name in dict:
del dict[name]
with open("./true_encodings.csv","wb") as f:
pickle.dump(dict,f)
else:
print("No such name in the database")
f.close()
|
983,910 | 57e669bb3f0ea7eb309274d6ed3e95aa09bba172 | import datetime
import unittest
from dummy_tax_calculator import DummyTaxCalculator
from vehicle import Vehicle
class TaxCalculatorDieselFuelTest(unittest.TestCase):
def setUp(self):
self.tax_calculator = DummyTaxCalculator()
self.FIRST_OF_JAN_2019 = datetime.datetime.date(2019, 1, 1)
def test_first_years_tax_for__diesel_0_grams_co2(self):
vehicle = Vehicle(0, "DIESEL", self.FIRST_OF_JAN_2019, 20000)
self.assertEqual(0, self.tax_calculator.calculate_tax(vehicle))
def test_first_years_tax_for__diesel_1_to_50_grams_co2(self):
vehicle = Vehicle(50, "DIESEL", self.FIRST_OF_JAN_2019, 20000)
self.assertEqual(25, self.tax_calculator.calculate_tax(vehicle))
def test_first_years_tax_for__diesel_51_to_75_grams_co2(self):
vehicle = Vehicle(75, "DIESEL", self.FIRST_OF_JAN_2019, 20000)
self.assertEqual(105, self.tax_calculator.calculate_tax(vehicle))
def test_first_years_tax_for__diesel_76_to_90_grams_co2(self):
vehicle = Vehicle(90, "DIESEL", self.FIRST_OF_JAN_2019, 20000)
self.assertEqual(125, self.tax_calculator.calculate_tax(vehicle))
def test_first_years_tax_for__diesel_91_to_100_grams_co2(self):
vehicle = Vehicle(100, "DIESEL", self.FIRST_OF_JAN_2019, 20000)
self.assertEqual(145, self.tax_calculator.calculate_tax(vehicle))
def test_first_years_tax_for__diesel_101_to_110_grams_co2(self):
vehicle = Vehicle(110, "DIESEL", self.FIRST_OF_JAN_2019, 20000)
self.assertEqual(165, self.tax_calculator.calculate_tax(vehicle))
def test_first_years_tax_for__diesel_111_to_130_grams_co2(self):
vehicle = Vehicle(130, "DIESEL", self.FIRST_OF_JAN_2019, 20000)
self.assertEqual(205, self.tax_calculator.calculate_tax(vehicle))
def test_first_years_tax_for__diesel_131_to_150_grams_co2(self):
vehicle = Vehicle(150, "DIESEL", self.FIRST_OF_JAN_2019, 20000)
self.assertEqual(515, self.tax_calculator.calculate_tax(vehicle))
def test_first_years_tax_for__diesel_151_to_170_grams_co2(self):
vehicle = Vehicle(170, "DIESEL", self.FIRST_OF_JAN_2019, 20000)
self.assertEqual(830, self.tax_calculator.calculate_tax(vehicle))
def test_first_years_tax_for__diesel_171_to_190_grams_co2(self):
vehicle = Vehicle(1240, "DIESEL", self.FIRST_OF_JAN_2019, 20000)
self.assertEqual(820, self.tax_calculator.calculate_tax(vehicle))
def test_first_years_tax_for__diesel_191_to_225_grams_co2(self):
vehicle = Vehicle(225, "DIESEL", self.FIRST_OF_JAN_2019, 20000)
self.assertEqual(1760, self.tax_calculator.calculate_tax(vehicle))
def test_first_years_tax_for__diesel_226_to_255_grams_co2(self):
vehicle = Vehicle(255, "DIESEL", self.FIRST_OF_JAN_2019, 20000)
self.assertEqual(2070, self.tax_calculator.calculate_tax(vehicle))
def test_first_years_tax_for__diesel_over_225_grams_co2(self):
vehicle = Vehicle(256, "DIESEL", self.FIRST_OF_JAN_2019, 20000)
self.assertEqual(2070, self.tax_calculator.calculate_tax(vehicle))
if __name__ == '__main__':
unittest.main()
|
983,911 | 72b9d25a8582c60aeac9cf41bb19f8c2fd41a44b | MAX_STATION_TIME = 15 # Max minutes for train to spend in one station
STATION_CODES = {
'Třinec centrum, nádr.': 1313142001,
'Ostrava, Stodolní': 372825009,
'Žilina, žel. st.': 508812004,
'Ostrava, hl.n.': 372825008,
'Přerov, nádr.': 2370298003,
'Návsí (Jablunkov), nádr.': 1558071000,
'Liptovský Mikuláš, žel. st.': 1763018002,
'Hulín, nádr.': 2370298002,
'Zábřeh na Moravě, nádr.': 372825004,
'Košice, žel. st.': 1763018007,
'Bystřice (Třinec), nádr.': 1313142002,
'Margecany, žel. st.': 2317717000,
'Praha, hl.n.': 372825000,
'Čadca, žel.st.': 508812003,
'Štrba, žel. st.': 1763018003,
'Český Těšín, nádraží': 508812001,
'Kysak (pri meste Prešov), žel. st.': 1763018006,
'Olomouc, hl.n.': 372825005,
'Poprad, Tatry': 1763018004,
'Otrokovice, žel. st.': 2370298001,
'Staré Město, [UH], nádraží': 2370298000,
'Ostrava, Svinov': 372825007,
'Hranice na M., nádr.': 372825006,
'Vrútky, žel. st.': 1763018000,
'Spišská Nová Ves, žel. st.': 1763018005,
'Havířov, nádr.': 372825010,
'Ružomberok, žel. st.': 1763018001,
'Pardubice, hl. nádraží': 372825002,
'Česká Třebová, nádr.': 1313142000
}
CODE_STATIONS = {
1763018006: 'Kysak (pri meste Prešov), žel. st.',
1763018003: 'Štrba, žel. st.',
1763018002: 'Liptovský Mikuláš, žel. st.',
372825000: 'Praha, hl.n.',
372825006: 'Hranice na M., nádr.',
508812003: 'Čadca, žel.st.',
508812004: 'Žilina, žel. st.',
2370298001: 'Otrokovice, žel. st.',
372825002: 'Pardubice, hl. nádraží',
2370298003: 'Přerov, nádr.',
1763018004: 'Poprad, Tatry',
372825009: 'Ostrava, Stodolní',
1558071000: 'Návsí (Jablunkov), nádr.',
372825004: 'Zábřeh na Moravě, nádr.',
1313142001: 'Třinec centrum, nádr.',
372825008: 'Ostrava, hl.n.',
372825007: 'Ostrava, Svinov',
1313142000: 'Česká Třebová, nádr.',
1313142002: 'Bystřice (Třinec), nádr.',
2370298002: 'Hulín, nádr.',
1763018005: 'Spišská Nová Ves, žel. st.',
372825005: 'Olomouc, hl.n.',
1763018000: 'Vrútky, žel. st.',
508812001: 'Český Těšín, nádraží',
1763018001: 'Ružomberok, žel. st.',
2370298000: 'Staré Město, [UH], nádraží',
2317717000: 'Margecany, žel. st.',
1763018007: 'Košice, žel. st.',
372825010: 'Havířov, nádr.'
}
ROUTES_DICT = {
'1003': 0,
'1011': 1,
'1021': 2,
'1008': 3,
'1012': 4,
'1020': 4
}
ALL_ROUTES = [
# 1003 (Prague -> Kosice)
[
372825000,
372825002,
1313142000,
372825004,
372825005,
372825006,
372825007,
372825010,
508812001,
1313142001,
508812003,
508812004,
1763018000,
1763018001,
1763018002,
1763018003,
1763018004,
1763018005,
1763018006,
1763018007
],
# 1011 (Prague -> Kosice)
[
372825000,
372825002,
1313142000,
372825004,
372825005,
372825006,
372825007,
372825010,
508812001,
1313142001,
508812003,
508812004,
1763018000,
1763018001,
1763018002,
1763018003,
1763018004,
1763018005,
1763018006,
1763018007
],
# 1021 (Prague -> Kosice)
[
372825000,
372825002,
372825004,
372825005,
372825006,
372825007,
372825010,
508812001,
1313142001,
508812003,
508812004,
1763018000,
1763018001,
1763018002,
1763018003,
1763018004,
1763018005,
2317717000,
1763018006,
1763018007
],
# 1008 (Kosice -> Prague)
[
1763018007,
1763018006,
1763018005,
1763018004,
1763018002,
1763018001,
1763018000,
508812004,
508812003,
1558071000,
1313142001,
508812001,
372825010,
372825009,
372825006,
372825005,
372825004,
1313142000,
372825002,
372825000
],
# 1012 (Kosice -> Prague)
[
1763018007,
1763018006,
1763018005,
1763018004,
1763018003,
1763018002,
1763018001,
1763018000,
508812004,
508812003,
1558071000,
1313142001,
508812001,
372825010,
372825009,
372825006,
372825005,
372825004,
1313142000,
372825002,
372825002,
372825000
],
# 1020 (Kosice -> Prague)
[
1763018007,
1763018006,
2317717000,
1763018005,
1763018004,
1763018003,
1763018002,
1763018001,
1763018000,
508812004,
508812003,
1313142001,
508812001,
372825010,
372825008,
372825005,
372825004,
372825002,
372825000
],
# TODO (Prague -> Navsi, Navsi -> Prague, Bratislava...)
]
|
983,912 | cf117618e19b12d300a46c17bed5b7d42376c848 | from django.conf.urls import patterns, include, url
from django.contrib import admin
from reports.views import *
admin.autodiscover()
urlpatterns = patterns('',
url(r'^user/full/(?P<user_id>.+)/', report_for_user_full, name='report_for_user_full'),
url(r'^user/physical/(?P<user_id>.+)/', report_for_user_exercise, name='report_for_user_exercise'),
url(r'^user/social/(?P<user_id>.+)/', report_for_user_social, name='report_for_user_social'),
url(r'^user/health/(?P<user_id>.+)/', report_for_user_health, name='report_for_user_health'),
url(r'^user/nutrition/(?P<user_id>.+)/', report_for_user_nutrition, name='report_for_user_nutrition'),
url(r'^user/genomics/(?P<user_id>.+)/', report_for_user_genomics, name='report_for_user_genomics'),
) |
983,913 | 949d40544749a58d74d9cb46a6625a616b619024 | from typing import List
from connection_pool import get_connection
from models.ingredient import Ingredient
import pytz
import datetime
import RecipeDatabase as rdb
class Recipe:
def __init__(self, name: str, owner_id: int, timestamp: float = None, _id: int = None):
self.id = _id
self.name = name
self.owner_id = owner_id
self.timestamp = timestamp
def __repr__(self) -> str:
return f"User({self.name!r}, {self.owner_id!r}, {self.timestamp!r}, {self.id!r})"
def save(self):
with get_connection() as connection:
current_datetime_utc = datetime.datetime.now(tz=pytz.utc)
self.timestamp = current_datetime_utc.timestamp()
new_recipe_id = rdb.create_recipe(connection, self.name, self.owner_id, self.timestamp)
self.id = new_recipe_id
def add_ingredient(self, name:str, quantity: float, unit: str, group: str):
Ingredient(self.id, name, quantity, unit, group).save()
def log_recipe_on_grocery(self):
with get_connection() as connection:
current_datetime_utc = datetime.datetime.now(tz=pytz.utc)
timestamp = current_datetime_utc.timestamp()
rdb.log_recipe_4_grocery(connection, self.id, timestamp)
@classmethod
def all(cls, user_id: int) -> List["Recipe"]:
with get_connection() as connection:
recipes = rdb.get_recipes(connection, user_id)
return [cls(recipe[1], recipe[2], recipe[3], recipe[0]) for recipe in recipes]
@classmethod
def get(cls, recipe_id: int) -> "Recipe":
with get_connection() as connection:
recipe = rdb.get_recipe(connection, recipe_id)
return cls(recipe[1], recipe[2], recipe[3], recipe[0])
|
983,914 | f02e06f449d2c1ed53734cdaa3f610e23bb5e217 | from abc import abstractmethod
from copy import deepcopy
from core.NormDist import NormDist
class Individual():
def __init__(self, mutation_rate, genotype=None):
self.fitness = 0
self.exp_val = 0
self.gene_count = 0
self.mutation_rate = mutation_rate
if genotype:
self.genotype = genotype
for gene in self.genotype:
gene.mutate(self.mutation_rate)
else:
self.random_genotype()
for _ in self.genotype:
self.gene_count += 1
self.generate_phenotype()
def __repr__(self):
return "{Fitness:%f exp:%f gen:%s}\n" % (self.fitness, self.exp_val, "".join(str(x) for x in self.genotype))
def get_child_gene(self, gene_index):
return deepcopy(self.genotype[gene_index])
def mutate(self):
for i in xrange(self.gene_count):
self.genotype[i].mutate(self.mutation_rate)
def compare(self, other):
diff = 0
for i in xrange(len(self.genotype)):
diff += self.genotype[i].compare(other.genotype[i])
diff /= len(self.genotype)
return diff
@abstractmethod
def random_genotype(self):
raise NotImplementedError
pass
@abstractmethod
def calculate_fitness(self):
raise NotImplementedError
pass
@abstractmethod
def generate_phenotype(self):
pass
@abstractmethod
def phenotype_str(self):
pass |
983,915 | 1305ba6ab438598ca24230864a9edcca3b13752d | '''
https://leetcode.com/problems/linked-list-cycle_ii/
Given a linked list, return the node where the cycle begins. If there is no cycle, return null.
To represent a cycle in the given linked list, we use an integer pos which represents the position (0-indexed) in the linked list where tail connects to. If pos is -1, then there is no cycle in the linked list.
Note: Do not modify the linked list.
Example 1:
Input: head = [3,2,0,-4], pos = 1
Output: node(2)
Explanation: There is a cycle in the linked list, where tail connects to the second node.
3 -> 2 -> 0 -> -4
^ <- <- /
Example 2:
Input: head = [1,2], pos = 0
Output: node(1)
Explanation: There is a cycle in the linked list, where tail connects to the first node.
1 -> 2
^ <- /
'''
import sys
sys.path.append("../linked_list_cycle/")
from SLL_cycle import SLL_cycle
class Solution(object):
'''
Checks if a SLL has a loop/cycle or not
using 2 pointers, one fast pointer moving 2 nodes at a time
and another slow moving pointer moving 1 node at a time
O(1) memory + O(n) time
To identify the start of the cycle,
Assume the start of the cycle is 'x' nodes away from head.
Let's say the loop is made of 'm' nodes.
(This means the number of nodes in the SLL is (x+m-1), discounting the start node which is counted twice in both x and m.)
When the slow pointer reaches this node 'x', the fast pointer would already have
made certain trips inside the loop.
The fast pointer has made a certain number of loops, say 'l' and is at 'k' nodes ahead of starting node 'x'
[c1] -> [c2] -> . . . [k]
/ \
[0] -> [1] -> . . . -> [x] [ci]
\ /
[cm] <- . . . <- [cj]
Therefore,
x = ml + k (by definition)
From [x], both slow and fast pointer would meet at say 'c' nodes away from [x] after 'r' loops
Hence,
(rm + c)%m === ((2rm+2c)+k) % m (since fast pointer has a head start of +k nodes)
c % m == (2c+k) % m
-k % m == c % m
=> c = -k {or m-k, 2m-k, ...}
So, both pointers meet at -k nodes from [x]
Since x = ml+k,
If we now start two pointers both moving at a pace of 1 node at a time, one from the start of the list,
and the other at -k inside the loop
they'll both meet at [x], since -k trails [x] by 'k' nodes, and x = ml+k
Example:
x = 17
m = 10
When slow ptr reaches node 17, fast pointer would have traversed 17+17 nodes, or 17 nodes outside the loop, followed by 17 inside.
And therefore will be 7 nodes ahead of node 17 (the start of the cycle).
From here, they'll both meet at k == 7 nodes behind node 17 inside the loop / +3 nodes ahead of node 17.
slow ptr at 0, fast ptr at a headstart of +7
if they both move 3 times, slow ptr will be at 7 nodes behind loopstart.
fastptr will have moved 6 nodes in the meantime, and with the headstart of 7, would have covered 13 nodes inside the loop =>
will be 7 nodes behind from the loopstart.
Now if we set off both pointers at the pace of 1 node at a time, ptr2 from -7 node from node 17, ptr1 from head
when ptr1 reaches [x = node 17], ptr2 would have made 1 loop and be back at [x = node 17]
'''
def detectCycle(self, head):
"""
:type head: ListNode
:rtype: bool
"""
fast = head
slow = head
try:
while True:
fast = fast.next.next
slow = slow.next
# Fast and slow pointers eventually caught up with each other
# => There's a loop
if fast == slow:
break
# At this point, fast and slow pointer are at the same node, 'k' nodes away from the start
slow = head
while slow != fast:
slow = slow.next
fast = fast.next
return slow
except AttributeError:
# The fast pointer hit a None node
# => there's no cycle
return None
return None
if __name__ == '__main__':
s = Solution()
from SLL_cycle import SLL_cycle
s1 = SLL_cycle.build([1,2,3,4,5], 2)
assert s1.head.val == 1
assert s1.tail.val == 5
assert s1.tail.next.val == 3
assert s.detectCycle(s1.head) == s1.tail.next
s2 = SLL_cycle.build([1], 0)
assert s2.head.val == 1
assert s2.tail.val == 1
assert s2.tail.next.val == 1
assert s.detectCycle(s2.head) == s2.tail.next
s3 = SLL_cycle.build([1,2,3,4,5,6], 1)
assert s3.head.val == 1
assert s3.tail.val == 6
assert s3.tail.next.val == 2
assert s.detectCycle(s3.head) == s3.tail.next
# same as above, but cyclic link wont be established as pos > size
s3 = SLL_cycle.build([1,2,3,4,5,6], 100)
assert s3.head.val == 1
assert s3.tail.val == 6
assert s3.tail.next == None
assert s.detectCycle(s3.head) == None
s4 = SLL_cycle.build([1,2,3,4,5,6])
assert s4.head.val == 1
assert s4.tail.val == 6
assert s4.tail.next == None
assert s.detectCycle(s4.head) == None
s5 = SLL_cycle.build([3,2,0,-4], 1)
assert s5.head.val == 3
assert s5.tail.val == -4
assert s5.tail.next.val == 2
assert s.detectCycle(s5.head) == s5.tail.next
s6 = SLL_cycle.build([1,2], 0)
assert s6.head.val == 1
assert s6.tail.val == 2
assert s6.tail.next.val == 1
assert s.detectCycle(s6.head) == s6.tail.next
s7 = SLL_cycle.build([1])
assert s7.head.val == 1
assert s7.tail.val == 1
assert s7.tail.next == None
assert s.detectCycle(s7.head) == None
s8 = SLL_cycle.build(range(1, 27), 16)
assert s8.head.val == 1
assert s8.tail.val == 26
assert s8.tail.next.val == 17
assert s.detectCycle(s8.head) == s8.tail.next
|
983,916 | 185af91dcc439bc0b8e852948fe94038de30e455 | import json
import pdb
def preprocess_studies(folder_name):
with open('./research.json') as research:
studies = json.load(research)
#add ID and export 'quotes' to another dictionary
study_quotes = {}
for i in range(len(studies)):
study = studies[i]
study['id'] = i
if 'quote' in study:
study_quotes[i] = {'quote': study['quote']}
del study['quote']
with open('../src/assets/data/study_metadata_names.json') as metadata_file:
metadata = json.load(metadata_file)
availabilities = invert_dict(metadata['availability'])
study_types = invert_dict(metadata['types'])
for study in studies:
study['availability'] = availabilities[study['availability']]
study['type'] = study_types[study['type']]
# process_url(study)
study['tags'] = study['tags'].split(",")
for i in range(len(study['tags'])):
study['tags'][i] = study['tags'][i].lower()
with open(folder_name+"studies_metadata.json", 'w') as outfile:
json.dump(studies, outfile)
with open(folder_name+"studies_text.json", 'w') as outfile:
json.dump(study_quotes, outfile)
#invert these dictionaries for easier usage
# availabilities = invert_dict(availabilities)
# study_types = invert_dict(study_types)
# pdb.set_trace()
# with open(folder_name+"study_availability.json", 'w') as outfile:
# json.dump(availabilities, outfile)
# with open(folder_name+"study_types.json", 'w') as outfile:
# json.dump(study_types, outfile)
def invert_dict(d):
return dict([[v,k] for k,v in d.items()])
def process_url(study):
if 'url' not in study:
return
url = study['url']
sections = url.split('/')
if "https://www.ncbi.nlm.nih.gov/pmc/articles" in url:
ncbi_id = sections[len(sections)-1]
study['ncbi_article'] = ncbi_id
del study['url']
if 'https://www.ncbi.nlm.nih.gov/pubmed/' in url:
ncbi_id = sections[len(sections)-1]
study['pubmed'] = ncbi_id
del study['url']
def get_possibilites(studies,key):
possibilities = {}
for study in studies:
val = study[key]
if val not in possibilities:
possibilities[val] = len(possibilities)
return possibilities |
983,917 | 597971e790850b756bc668c2f8c3fbb136f97254 | from flask import Flask, render_template, request, redirect, session
import random
app = Flask(__name__)
app.secret_key = 'ThisIsSecret'
@app.route('/', methods = ['POST', 'GET'])
def index():
# print random.randrange(0,101)
session['num'] = random.randrange(0,101)
print session['num']
return render_template("index.html", result = "Guess now!")
@app.route('/guesser', methods = ['POST'])
def guessing():
# session['guess'] = request.form['guess']
print session['num']
print request.form['guess']
randNum = session['num']
userNum = request.form['guess']
print userNum < randNum
if userNum > randNum:
print "huzzah"
elif userNum < randNum:
print "nope"
# if request.form['guess'] > session['num']:
# return redirect('/low')
# elif int(session['guess']) < session['num']:
# return redirect('/high')
# elif int(session['guess']) == session['num']:
# return redirect('/nice')
return render_template("index.html")
@app.route('/high')
def high():
return render_template("index.html")
@app.route('/low')
def low():
result = "Lower!"
return render_template('index.html', result = "Lower")
@app.route('/nice')
def nice():
result = "You got it!"
return render_template("index.html", result = "You got it!")
app.run(debug=True)
|
983,918 | 166855ddab5407961a0996be4449345a9f634b55 | print("----------cetak 1 s/d 20----------")
angka = 20
for no in range(angka):
no += 1 # NO = NO + 1
print("angka", no)
print("----------cetak bilangan genap 1 s/d 20----------")
bil = 20
for no in range(bil):
no += 1 # no = no + 1
if(no % 2 == 0):
print("bilangan genap", no) |
983,919 | 3ce4a97500d0e39bd8b97a5cfcd143cef8d8f1dd | n = int(input())
a = list(map(int, input().split()))
res = 0
while True:
change = 0
for i in range(n - 1):
if a[i] == i + 1:
a[i], a[i + 1] = a[i + 1], a[i]
res += 1
change = 1
for i in range(n - 1, 1, -1):
if a[i] == i + 1:
a[i], a[i - 1] = a[i - 1], a[i]
res += 1
change = 1
if change == 0:
break
print(res) |
983,920 | e008d74977482cd3abaa95f6f34853a1e50dae7b | # -*- coding: utf-8 -*-
"""
Created on Fri Jul 13 09:24:51 2018
@author: nick.wardle
"""
import debugger as de
import gameData as gD
import renderers
#import transformers as tfs
import controllers as ctrls
# == HANDLING ALL PLAYER COMMANDS =========================================
# modules that take inputs from player, or other modules, and update the models
def doCommand(cmd, obj, jun, via, uiData):
legalInputs = gD.LEGALINPUTS
if cmd != None:
# we only need to identify the TYPE of cmd
# so we can action the correct function next
cmd_spl = cmd.split("-")
cmd_ky = cmd_spl[0]
my_cmd = gD.INPUT_VARS['THIS_CMD']['user-input']
de.bug(1, "my_cmd is", my_cmd)
de.bug(1, "move cmds for this loc are", gD.LOCDATA['moveCmds'])
if cmd_ky == "m": # MOVEMENT command
moveDesc = False
moveDest = False
for m in gD.LOCDATA['moveCmds']:
for h, i in gD.gameDB['moveCommandsDB'][m].items():
for j in i['cmds']:
if my_cmd == j:
moveDesc = i['goDesc']
if 'destId' in i:
moveDest = i['destId']
else:
de.bug("NOTICE: This cmd doesn't change our location")
# show moveDesc feedback for moveCmd
ctrls.printText(moveDesc, "move")
# if associated locID for moveCmd - ctrls.changeLoc
ctrls.changeLoc(moveDest)
elif cmd_ky in gD.gameDB['uiCmds'].keys(): # UI command
# send to uiActions to handle the UI command
uiActions(cmd, obj, jun, via, legalInputs, uiData)
elif cmd_ky in gD.gameDB['actionCmds'].keys(): # ACTION command
de.bug(2, "locDATA", gD.LOCDATA)
# send the cmd and the obj to useObject for more detailed handling
useObject(cmd, obj, jun, via)
else: # Command not known
de.bug("Error (doCommand): The command", cmd, "is not handled yet")
elif obj != None: # empty cmd but we have a singleton obj
# send to useObject anyway to give Player object help feedback
useObject(cmd, obj, jun, via)
else: # Too many params are None to do anything useful
return False
def uiActions(cmd, obj, jun, via, inps, uiData): # generic UI cmd handler
# Resetting values
my_cmd = None
# check for singleton object with no command (show help if it is)
if cmd != None:
# consolidate cmd reference word
my_cmd = gD.INPUT_VARS['THIS_CMD']['user-input']
# render the appropriate user feedback message for the cmd
if my_cmd in gD.gameDB['uiCmds']['playerCmds']:
if my_cmd == "inv" or my_cmd == "inventory":
renderers.render_charInventory()
elif my_cmd in gD.gameDB['uiCmds']['generalCmds']:
# Just print out the message for the UI command
ctrls.printText(uiData[my_cmd], my_cmd)
else:
de.bug("Error (doCommand): command '", my_cmd, "' not found in uiCmds")
def useObject(cmd, obj, jun, via): # generic Object handler
# E.G. cmd: generalCmds-0 | obj: o-7 | jun: conJuncts-2 | via: o-11
######### GOT TO HERE ###############
# then finally
# need to go through anything that REworks out any of
# these now global references and make then use the
# GLOBALS instead. For example: do_command()
# THEN FINALLY need to make the "what am I" function
# that "is this object at the location" can call
# and tidy up that whole useObject function A LOT
"""
INPUT_VARS are now: {
'THIS_CMD': {'user-input': 'open', 'ref-id': ['intCmds-0']},
'THIS_OBJ': {'user-input': 'box', 'ref-id': ['ob0002'], 'obj-loc': ['z0001']},
'THIS_JUN': {'user-input': 'with', 'ref-id': ['conJuncts-0']},
'THIS_VIA': {'user-input': 'key', 'ref-id': [], 'obj-loc': []}
}
"""
# Resetting values
# obs_list = gD.LOCDATA['locObjects']
obj_cmds = []
cmd_ref = None
obj_ref = None
via_ref = None
jun_ref = None
obj_id = None
this_obj = None
this_via = None
# SETUP SCRIPT VARS from globals
if cmd:
cmd_ref = gD.INPUT_VARS['THIS_CMD']['user-input']
if jun:
jun_ref = gD.INPUT_VARS['THIS_JUN']['user-input']
if via:
via_ref = gD.INPUT_VARS['THIS_VIA']['user-input']
# Detect invalid VIA
if gD.INPUT_VARS['THIS_VIA']['ref-id']:
via_id = gD.INPUT_VARS['THIS_VIA']['ref-id'][0]
this_via = gD.gameDB['objectsDB'][via_id]
else:
de.bug(1, "INVALID VIA", via_ref)
if obj:
obj_ref = gD.INPUT_VARS['THIS_OBJ']['user-input']
# Detect invalid obj
if gD.INPUT_VARS['THIS_OBJ']['ref-id']:
obj_id = gD.INPUT_VARS['THIS_OBJ']['ref-id'][0]
obj_desc = gD.gameDB['objectsDB'][obj_id]['desc']
obj_locdesc = gD.gameDB['objectsDB'][obj_id]['location']
this_obj = gD.gameDB['objectsDB'][obj_id]
# Get all Object Commands
obj_cmds = ctrls.get_ObjectCommands(this_obj)
# User referenced a VALID object WITHOUT putting
# an action command - So give them help
if cmd_ref == None:
renderers.render_objectHelp(obj_cmds, this_obj['name'])
return False # exit this function
else:
de.bug(1, "INVALID OBJ", obj_ref)
############## COMMANDS THAT REQUIRE NO OBJECT ################
if cmd_ref in gD.gameDB['actionCmds']['exploreCmds'] and obj == None:
### == generic explore COMMANDS: look / search etc ====
# give user feedback on their command
ctrls.printText(None, cmd_ref)
return True # exit this function, we're done here
if via_ref:
### == navigation COMMANDS w/ VIA e.g. 'go in', 'get in' =========
if cmd_ref in ('get', 'go', 'walk'):
de.bug(3, "We have a VIA movement type of command!", cmd_ref, jun, via)
#TODO: Handle changing location with a cmd, jun, via input
######### NOT COMPLETE NEED RENDER TEXT TO HANDLE THIS ##
# Needs to handle changing location using the via
########################################################
############## COMMANDS THAT NEED AN OBJECT ################
if obj_ref:
# if obj_ref != None, but gD.INPUT_VARS['THIS_OBJ']['ref-id'] == None:
## This means that the command is invalid for the object
# so throw that error "You can't X the Y"
if obj_id == None:
de.bug(1, "INVALID obj", obj_ref, ". You can't", cmd_ref, "this object")
### We are no longer checking if object at location before this
# So GET for example, needs to check if INPUT_VARS['THIS_OBJ']['obj-loc'] == gD.CURRENTLOC
# And put needs to check obj in INV
# And any other cmd that requires obj to be local needs to CHECK
### == specific explore COMMANDS: look in / under etc ====
if cmd_ref in gD.gameDB['actionCmds']['exploreCmds']:
# check object access state
ob_access = ctrls.get_ObjectState(this_obj, 'access')
de.bug(4, "ob_access is:", ob_access)
# GET the contained objects
ids, descs, t = ctrls.get_ObjectContents(obj_id)
## check if object permissions prevent action
if ob_access == "unlocked":
# ADD the object to the world local objects
ctrls.update_WorldState(obj_id, False, 'add')
de.bug(1, "contained objects", descs)
# full or empty container?
if len(descs) > 0:
# feedback to player what they have discovered
ctrls.printText([descs, this_obj['name']], "contained by")
else:
ctrls.printText(this_obj['name'], 'container empty')
elif ob_access == "locked":
# feedback to player about the req object
renderers.render_objectActions(this_obj, cmd_ref, "locked_by")
# does player have the req object?
this_via_obj = gD.gameDB['objectsDB'][this_obj['permissions']['locked_by']]
if ctrls.get_InventorySlot(this_via_obj) == False:
ctrls.printText(this_via_obj['name'], 'not in inv')
### == examine COMMANDS: look at, examine etc. =============
elif cmd_ref in gD.gameDB['actionCmds']['examineCmds']:
d = [this_obj['desc'],ctrls.get_ObjectState(this_obj, s='access')]
### FIX THIS #################
# Combine these two printText renders into one
# show object description
ctrls.printText(d, 'look at')
# bit of a hack this... add obj name to end of obj_cmds
# for renderer to .pop() off afterwards
obj_cmds.append(this_obj['name'])
ctrls.printText(obj_cmds, 'examine')
### == search COMMANDS: look for, where etc ===========
elif cmd_ref in gD.gameDB['actionCmds']['searchCmds']:
# show both obj desc and loc together
ctrls.printText([obj_desc, obj_locdesc], cmd_ref)
### == get, put, use, int COMMANDS =====================
# check legal action for the object
elif cmd_ref in obj_cmds:
### == get command add object to inventory ============
# check all get aliases
for i in gD.gameDB['actionCmds']['getCmds']:
if cmd_ref == i:
# GET the contained objects
ids, descs, t = ctrls.get_ObjectContents(obj_id)
de.bug(4, "these contained objs", ids, "this containment type", t, "this_obj", this_obj)
# add obj to inv & ctrls.update_WorldState
if ctrls.update_Inventory(obj_id, "add") != False:
# render feedback to player
renderers.render_objectActions(this_obj, cmd_ref, "get-take")
# render the player inventory
renderers.render_charInventory()
# update child object state & get parent container
p = ctrls.update_ObjectState(obj_id, this_obj, cmd_ref)
# update parent container state using returned "p"
if p != None:
ctrls.update_ObjectState(obj_id, this_obj, 'un_contain', p)
# finally REMOVE the contained items from the world
# because they are now in player inventory
ctrls.update_WorldState(ids, t, 'remove')
else:
# trying to add obj ALREADY in inv
ctrls.printText(this_obj['name'], 'already in inv')
### == put command remove object from inventory =========
#TODO: Need a more complex "put ... in... " version where this_obj
# gets added to a new parent_container, not to local Objects
# see 'get' above for handling parents vs local world objects
# check all put aliases
for i in gD.gameDB['actionCmds']['putCmds']:
if cmd_ref == i:
# remove obj from inv
if ctrls.update_Inventory(obj_id, "remove") != False:
# is there a VIA object for the action?
if via != None: # put something IN somewhere (VIA)
# update parent container (via) state
ctrls.update_ObjectState(obj_id, this_obj, 'add', via_id)
else: # simple put/drop command
# render feedback to player
renderers.render_objectActions(this_obj, cmd_ref, "put-leave")
# ADD the object to the world local objects
ctrls.update_WorldState(obj_id, False, 'add')
# render the player inventory
renderers.render_charInventory()
else:
# trying to remove obj not in inv
ctrls.printText(this_obj['name'], 'not in inv')
### == open/unlock command do object custom action =============
if cmd_ref in ("open", "unlock"):
# check object access state
ob_access = ctrls.get_ObjectState(this_obj, s='access')
de.bug(4, "ob_access is:", ob_access)
if ob_access == 'locked':
# check if object permissions prevent action
can_open = ctrls.get_ObjectPermissions(this_obj)
de.bug(4, "lock perms are", can_open)
if can_open in ("ok", "unlocked", "has-req-obj"): # obj not locked
# render feedback to player
renderers.render_objectActions(this_obj, cmd_ref, can_open, this_obj['permissions']['locked_by'])
# update object state
ctrls.update_ObjectState(obj_id, this_obj, cmd_ref)
else:
# feedback access state of object to player
renderers.render_objectActions(this_obj, cmd_ref, can_open)
# player does not have the req object
this_via = gD.gameDB['objectsDB'][obj_id]['permissions']['locked_by']
if ctrls.get_InventorySlot(this_via) == False:
ctrls.printText(gD.gameDB['objectsDB'][this_via]['name'], 'not in inv')
else:
# not locked => can open: update object state
ctrls.update_ObjectState(obj_id, this_obj, cmd_ref)
### == close/lock command do object custom action =============
elif cmd_ref in ("lock", "close"):
# check object access state
ob_access = ctrls.get_ObjectState(this_obj, s='access')
de.bug(4, "ob_access is:", ob_access)
if ob_access == 'unlocked':
# check if object permissions prevent action
can_close = ctrls.get_ObjectPermissions(this_obj)
de.bug(4, "lock perms are", can_close)
if can_close == "has-req-obj":
# render feedback to player
renderers.render_objectActions(this_obj, cmd_ref, can_close, this_obj['permissions']['unlocked_by'])
# update object state
ctrls.update_ObjectState(obj_id, this_obj, cmd_ref)
else:
# render object state feedback to player
renderers.render_objectActions(this_obj, cmd_ref, can_close)
# player does not have the req object
this_via = gD.gameDB['objectsDB'][this_obj['permissions']['unlocked_by']]
if ctrls.get_InventorySlot(this_via) == False:
ctrls.printText(this_via['name'], 'not in inv')
else:
# feedback to player object already locked
ctrls.printText(this_obj['name'], 'already locked')
### == use command do object custom action =============
elif cmd_ref == "use":
# check used obj is in player inv
if ctrls.get_InventorySlot(this_obj) != False:
# no target, singleton "use"
if via != None:
#TODO: The USE command and results of it
## INCOMPLETE . JUST ALL OF THIS!!
# check object STATE
ctrls.update_ObjectState(obj_id, this_obj, cmd_ref)
de.bug("use key on -", via)
#use key on box
# renderers.render_objectActions(o, cmd, cmd)
# check correct req obj for obj
# do something now the obj is used
# for example a box display stuff inside
else:
# use key - "use key on what?"
# render feedback to player
renderers.render_objectActions(this_obj, cmd_ref, cmd_ref)
else:
# trying to use obj not in inv
ctrls.printText(this_obj['name'], 'not in inv')
else:
# must be an illegal command for this object
# feedback 'you can't do that to this object'
t = "illegal"
renderers.render_objectActions(this_obj, cmd_ref, t)
# IF ALL ELSE FAILS cmd is a singleton, show correct actionHelp feedback
if cmd != None and obj == None and via == None:
renderers.render_actionHelp(cmd_ref)
|
983,921 | 090556426ee5756045ee129a87b8d432e5da37c4 | import datetime
from django.contrib.auth.models import User
import pytest
from account.forms import ProfileForm
def make_users(number):
# setup user
for i in range(number):
user = User(
username='test-user' + str(i),
first_name='First Name',
last_name='Last Name',
email='test@testers.com',
is_active=True,
is_staff=False,
date_joined=datetime.datetime.now(),
)
user.set_password('P@ssw0rd!')
user.save()
return User.objects.all()
@pytest.mark.django_db
def test_profile_form_is_valid():
user = make_users(1)[0]
form = ProfileForm(user=user, data={
'email': 'test@testers.com',
'username': 'test-user0',
'first_name': 'First Name',
'last_name': 'Last Name',
'password': 'P@ssw0rd!',
'confirm_password': 'P@ssw0rd!',
})
assert form.is_valid()
@pytest.mark.django_db
def test_profile_form_is_not_valid_due_to_invalid_username():
user = make_users(1)[0]
form = ProfileForm(user=user, data={
'email': 'test@testers.com',
'username': 'test!user',
'first_name': 'First Name',
'last_name': 'Last Name',
})
assert not form.is_valid()
assert ['username'] == form.errors.keys()
@pytest.mark.django_db
def test_profile_form_is_not_valid_due_to_username_taken():
users = make_users(2)
user2 = users[1]
form = ProfileForm(user=user2, data={
'email': 'test@testers.com',
'username': 'test-user0',
'first_name': 'First Name',
'last_name': 'Last Name',
})
assert not form.is_valid()
assert ['username'] == form.errors.keys()
@pytest.mark.django_db
def test_profile_form_is_valid_and_change_username():
user = make_users(1)[0]
form = ProfileForm(user=user, data={
'email': 'test@testers.com',
'username': 'test-user2',
'first_name': 'First Name',
'last_name': 'Last Name',
})
assert form.is_valid()
@pytest.mark.django_db
def test_profile_form_is_not_valid_due_to_bad_password():
user = make_users(1)[0]
form = ProfileForm(user=user, data={
'email': 'test@testers.com',
'username': 'test-user2',
'first_name': 'First Name',
'last_name': 'Last Name',
'password': 'pass',
'confirm_password': 'pass',
})
assert not form.is_valid()
assert ['password'] == form.errors.keys()
@pytest.mark.django_db
def test_profile_form_is_not_valid_due_to_passwords_dont_match():
user = make_users(1)[0]
form = ProfileForm(user=user, data={
'email': 'test@testers.com',
'username': 'test-user2',
'first_name': 'First Name',
'last_name': 'Last Name',
'password': 'P@ssw0rd!',
'confirm_password': 'pass',
})
assert not form.is_valid()
assert ['__all__'] == form.errors.keys()
@pytest.mark.django_db
def test_profile_form_saves_correctly():
user = make_users(1)[0]
form = ProfileForm(user=user, data={
'email': 'test@testers.com',
'username': 'test-user1',
'first_name': 'First Name',
'last_name': 'Last Name',
'password': 'P@ssw0rd!',
'confirm_password': 'P@ssw0rd!',
})
assert form.is_valid()
form.save()
assert User.objects.all()[0].username == 'test-user1'
@pytest.mark.django_db
def test_profile_form_saves_correctly_with_password():
user = make_users(1)[0]
form = ProfileForm(user=user, data={
'email': 'test@testers.com',
'username': 'test-user1',
'first_name': 'First Name',
'last_name': 'Last Name',
})
assert form.is_valid()
form.save()
assert User.objects.all()[0].username == 'test-user1'
|
983,922 | d0ffe9d35ce06d59508e3aafd339b6e2f5147949 | # ___ string_rev st.
# st. _ st..sp..
# st..re..
# r_ ' '.j.. st.
#
# print ? i.. "Insert some strings: " |
983,923 | eb2c1995cf3ac733b7751c8a84946620be4d9813 | # This class is dessigned to compile a list of arguments into commands
# based on the allowed_commands dictionaries it gets when initialized
# hopefully this will be highly flexible / reusable with future development
class Argument_Compiler():
def __init__(self, arg_list, allowed_commands):
# list of arguments
self.arg_list = arg_list
# list of dictionaries containing allowed commands by operator and type
self.allowed_commands = allowed_commands
# this will contain the commands translated from arguments
self.command_list = []
# list of allowed operators, to compare with each argument
self.actual_commands = []
for command in self.allowed_commands:
self.actual_commands.append(command['operator'])
# upon init Argument_Compiler do its work, get_commands() will return the translated commands
self.compile_args()
# seek string parts between two actual command (or end of arg_list)
# and return them chained together (separated by spaces and stripped at the end)
def get_string_chain(self, index):
string_chain = ''
ended = False
while index < len(self.arg_list) and not ended:
if self.is_command(self.arg_list[index]):
ended = True
else:
string_chain += self.arg_list[index] + ' '
index += 1
return string_chain.strip()
# return numeric items as a list, found in between two arg-command (or end of list)
def get_num_list(self, index):
num_list = []
ended = False
while index < len(self.arg_list) and not ended:
if self.is_command(self.arg_list[index]):
ended = True
else:
if self.arg_list[index].isdigit():
num_list.append(int(self.arg_list[index]))
index += 1
return num_list
# returns an allowed command's type based on its operator
def get_command_type(self, arg):
for command in self.allowed_commands:
if arg == command['operator']:
return command['type']
# True if given string is an actual command
def is_command(self, s_string):
return s_string in self.actual_commands
# simply just returns the command_list
def get_commands(self):
return self.command_list
# empty the command_list
def reset_command_list(self):
self.command_list = []
def no_commands(self):
return len(self.command_list) == 0
def is_command_present(self, s_string):
present = False
if self.is_command(s_string):
for com in self.command_list:
if com['command'] == s_string:
present = True
return present
# returns a list of commands translated from an argument-list
def compile_args(self):
self.reset_command_list()
for i in range(len(self.arg_list)):
item = ''
command = ''
command_type = ''
empty = False
if self.is_command(self.arg_list[i]):
command = self.arg_list[i]
command_type = self.get_command_type(command)
if command_type == 'String_Chain':
if len(self.arg_list) > i + 1:
item = self.get_string_chain(i + 1)
if len(item) == 0:
empty = True
elif command_type == 'Int_List':
if len(self.arg_list) > i + 1:
item = self.get_num_list(i + 1)
if len(item) == 0:
empty = True
if not empty:
self.command_list.append({'command' : command, 'type' : command_type, 'item' : item})
return len(self.command_list)
|
983,924 | c2375dc7821035aed00e2be2927beec10c02fc76 | // Author: CLAY2333 @ https://github.com/CLAY2333/CLAYleetcode
class Solution:
def numJewelsInStones(self, J: str, S: str) -> int:
nums=0
for i in S:
if i in J:
nums+=1
return nums
|
983,925 | 0cc060d580e4a88617bdeeb7f2a314641ed80291 | # -*- coding:utf-8 -*-
# /usr/bin/env python
"""
Author:zhengpanone
Email:zhengpanone@hotmail.com
date:2019/11/21 11:06
"""
# import lib
from wtforms import StringField, IntegerField
from wtforms.validators import DataRequired
from pm_cms.validators.base import BaseForm
from pm_cms.libs.enums import IsOutSourceEnum, SeqPlatformEnum
class PoolingForm(BaseForm):
pooling_name = StringField(validators=[DataRequired(message="pooling单不能为空")])
is_outsource = IntegerField(validators=[DataRequired(message="外包为1, 不外包为0")])
seq_platform = IntegerField(validators=[DataRequired(message="测序平台 Novaseq为1,Miseq为2,Xten为3")])
def validate_is_outsource(self, value):
try:
IsOutSourceEnum(value.data)
except ValueError as e:
raise e
self.is_outsource.data = value.data
def validate_seq_platform(self, value):
try:
SeqPlatformEnum(value.data)
except ValueError as e:
raise e
self.seq_platform.data = value.data
|
983,926 | 800d60718b95bf2503502e90652958e427c30319 | import logging, sys, argparse
def str2bool(v):
# copy from StackOverflow
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
def get_entity(tag_seq, char_seq):
DISEASE = get_typed_entity(tag_seq, char_seq, 'DISEASE')
SYMPTOM = get_typed_entity(tag_seq, char_seq, 'SYMPTOM')
BODY = get_typed_entity(tag_seq, char_seq, 'BODY')
return DISEASE, SYMPTOM, BODY
def get_typed_entity(tag_seq, char_seq, entity_type):
bTag = 'B-' + entity_type
iTag = 'I-' + entity_type
length = len(char_seq)
typed_entity = []
for i, (char, tag) in enumerate(zip(char_seq, tag_seq)):
if tag == bTag:
if 'ent' in locals().keys():
typed_entity.append(ent)
del ent
ent = char
if i+1 == length:
typed_entity.append(ent)
if tag == iTag:
if 'ent' not in locals().keys():
continue
ent += char
if i+1 == length:
typed_entity.append(ent)
if tag not in [iTag, bTag]:
if 'ent' in locals().keys():
typed_entity.append(ent)
del ent
continue
return typed_entity
def get_logger(filename):
logger = logging.getLogger('logger')
logger.setLevel(logging.DEBUG)
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
handler = logging.FileHandler(filename)
handler.setLevel(logging.DEBUG)
handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s: %(message)s'))
logging.getLogger().addHandler(handler)
return logger
|
983,927 | 768e0e035886d4f95717c2b35c9babfe078032fc | # -*- coding: cp1254 -*-
from header_game_menus import *
from module_constants import *
####################################################################################################################
# (menu-id, menu-flags, menu_text, mesh-name, [<operations>], [<options>]),
#
# Each game menu is a tuple that contains the following fields:
#
# 1) Game-menu id (string): used for referencing game-menus in other files.
# The prefix menu_ is automatically added before each game-menu-id
#
# 2) Game-menu flags (int). See header_game_menus.py for a list of available flags.
# You can also specify menu text color here, with the menu_text_color macro
# 3) Game-menu text (string).
# 4) mesh-name (string). Not currently used. Must be the string "none"
# 5) Operations block (list). A list of operations. See header_operations.py for reference.
# The operations block is executed when the game menu is activated.
# 6) List of Menu options (List).
# Each menu-option record is a tuple containing the following fields:
# 6.1) Menu-option-id (string) used for referencing game-menus in other files.
# The prefix mno_ is automatically added before each menu-option.
# 6.2) Conditions block (list). This must be a valid operation block. See header_operations.py for reference.
# The conditions are executed for each menu option to decide whether the option will be shown to the player or not.
# 6.3) Menu-option text (string).
# 6.4) Consequences block (list). This must be a valid operation block. See header_operations.py for reference.
# The consequences are executed for the menu option that has been selected by the player.
#
#
# Note: The first Menu is the initial character creation menu.
####################################################################################################################
game_menus = [
("start_game_0",menu_text_color(0xFF000000)|mnf_disable_all_keys,
"Welcome, Soldier, to Mount and Blade Stories: Age of Napoleon.^Before beginning the game you must create your character and choose your preferred difficulty.",
"none",
[
(change_screen_quit),
(eq,1,0),
(assign,"$g_pres_started_from_mission",0),
(try_begin),
(neq,"$g_has_been_first_started",1),
# (try_begin),
# (gt,"$g_next_presentation",-1),
# (start_presentation, "$g_next_presentation"),
# (else_try),
(assign,"$g_has_been_first_started",1),
(assign,"$g_sp_money_gained",0),
(assign,"$g_sp_allies_lost",0),
(assign,"$g_sp_companions_lost",0),
(assign,"$g_sp_enemies_killed",0),
(assign,"$g_sp_personal_kills",0),
# Beaver - added below for custom battles - NEEDS TWEAKING
# BRITAIN
(troop_set_slot,"trp_british_infantry_ai", slot_troop_initial_morale, 3000),
(troop_set_slot,"trp_british_infantry2_ai", slot_troop_initial_morale, 3000),
(troop_set_slot,"trp_british_highlander_ai", slot_troop_initial_morale, 3000),
(troop_set_slot,"trp_british_foot_guard_ai", slot_troop_initial_morale, 5000),
(troop_set_slot,"trp_british_rifle_ai", slot_troop_initial_morale, 3000),
(troop_set_slot,"trp_british_dragoon_ai", slot_troop_initial_morale, 5000),
#(troop_set_slot,"trp_british_dragoon2_ai", slot_troop_initial_morale, 5000),
# FRANCE
(troop_set_slot,"trp_french_infantry_ai", slot_troop_initial_morale, 3000),
(troop_set_slot,"trp_french_infantry2_ai", slot_troop_initial_morale, 3000),
(troop_set_slot,"trp_french_infantry_vistula_ai", slot_troop_initial_morale, 3000),
(troop_set_slot,"trp_french_voltigeur_ai", slot_troop_initial_morale, 3000),
(troop_set_slot,"trp_french_old_guard_ai", slot_troop_initial_morale, 5000),
(troop_set_slot,"trp_french_hussar_ai", slot_troop_initial_morale, 5000),
(troop_set_slot,"trp_french_lancer_ai", slot_troop_initial_morale, 5000),
(troop_set_slot,"trp_french_dragoon_ai", slot_troop_initial_morale, 5000),
(troop_set_slot,"trp_french_cuirassier_ai", slot_troop_initial_morale, 5000),
(troop_set_slot,"trp_french_carabineer_ai", slot_troop_initial_morale, 5000),
# PRUSSIA
(troop_set_slot,"trp_prussian_infantry_ai", slot_troop_initial_morale, 3000),
(troop_set_slot,"trp_prussian_infantry_kurmark_ai", slot_troop_initial_morale, 3000),
(troop_set_slot,"trp_prussian_infantry_15_ai", slot_troop_initial_morale, 5000),
(troop_set_slot,"trp_prussian_infantry_rifle_ai", slot_troop_initial_morale, 3000),
(troop_set_slot,"trp_prussian_dragoon_ai", slot_troop_initial_morale, 5000),
#(troop_set_slot,"trp_prussian_ulany_ai", slot_troop_initial_morale, 5000),
(troop_set_slot,"trp_prussian_landwehr_cav_ai", slot_troop_initial_morale, 4000),
# RUSSIA
(troop_set_slot,"trp_russian_opol_ai", slot_troop_initial_morale, 2000),
(troop_set_slot,"trp_russian_infantry_ai", slot_troop_initial_morale, 3000),
(troop_set_slot,"trp_russian_grenadier_ai", slot_troop_initial_morale, 4000),
(troop_set_slot,"trp_russian_foot_guard_ai", slot_troop_initial_morale, 5000),
(troop_set_slot,"trp_russian_infantry_rifle_ai", slot_troop_initial_morale, 3000),
(troop_set_slot,"trp_russian_hussar_ai", slot_troop_initial_morale, 5000),
(troop_set_slot,"trp_russian_cossack_ai", slot_troop_initial_morale, 5000),
(troop_set_slot,"trp_russian_dragoon_ai", slot_troop_initial_morale, 5000),
(troop_set_slot,"trp_russian_horse_guard_ai", slot_troop_initial_morale, 5000),
(try_for_range,":value",0,20),
(troop_set_slot,"trp_custom_battle_dummy",":value",0),
(try_end),
(try_for_range,":value",20,40),
(troop_set_slot,"trp_custom_battle_dummy",":value",20),
(try_end),
(try_for_range,":value",40,60),
(troop_set_slot,"trp_custom_battle_dummy",":value",0),
(try_end),
(try_for_range,":value",60,80),
(troop_set_slot,"trp_custom_battle_dummy",":value",0),
(try_end),
(try_for_range,":value",80,100),
(troop_set_slot,"trp_custom_battle_dummy",":value",0),
(try_end),
(try_for_range,":value",100,120),
(troop_set_slot,"trp_custom_battle_dummy",":value",0),
(try_end),
#faction banners
(faction_set_slot, "fac_britain", slot_faction_banner, "mesh_banner_kingdom_f"),
(faction_set_slot, "fac_france", slot_faction_banner, "mesh_banner_kingdom_b"),
(faction_set_slot, "fac_prussia", slot_faction_banner, "mesh_banner_kingdom_c"),
(faction_set_slot, "fac_russia", slot_faction_banner, "mesh_banner_kingdom_a"),
(faction_set_slot, "fac_austria", slot_faction_banner, "mesh_banner_kingdom_d"),
#(faction_set_slot, "fac_kingdom_6", slot_faction_banner, "mesh_banner_kingdom_e"),
(troop_set_type,"trp_player",0),
(assign,"$character_gender",tf_male), # Assign to male at all times to fit the stories.. Im sorry ladies =( xxx vince.
(set_show_messages, 0),
# Uniform
(troop_add_item, "trp_player","itm_french_inf_shako_84_officer"),
(troop_add_item, "trp_player","itm_french_84e_body_officer"),
(troop_add_item, "trp_player","itm_french_voltigeur_officer_pants"),
# Weapons
(try_begin),
(eq, debug_mode, 1),
(troop_add_item, "trp_player","itm_grenade",0),
(troop_add_item, "trp_player","itm_grenade",0),
#(troop_add_item, "trp_player","itm_french_officer_pistol"),
###
(troop_add_item, "trp_player","itm_sniper_rifle"),
(troop_add_item, "trp_player","itm_explosive_bullets"),
###
(troop_add_item, "trp_player","itm_french_inf_off_sabre"),
#(troop_add_item, "trp_player","itm_pistol_ammo"),
#(troop_add_item, "trp_player","itm_heavy_horse_dragon"),
(else_try),
(troop_add_item, "trp_player","itm_french_charleville"),
(troop_add_item, "trp_player","itm_bullets"),
(troop_add_item, "trp_player","itm_french_briquet"),
(end_try),
(assign, "$g_player_troop", "trp_player"),
(troop_raise_skill, "$g_player_troop", skl_athletics, 2),
(troop_raise_skill, "$g_player_troop", skl_riding, 3),
(troop_raise_skill, "$g_player_troop", skl_power_strike, 1),
(troop_raise_skill, "$g_player_troop", skl_weapon_master, 4),
(troop_raise_skill, "$g_player_troop", skl_ironflesh, 7),
(assign,"$g_finished_missions",0),
(assign,"$g_finished_sub_missions",0),
# Give the player some random companions.
(assign,":found_companions",0),
(assign,":loop_end",1000),
(try_for_range, ":companion_number", 0, ":loop_end"),
(store_random_in_range,":random_companion",companions_begin, companions_end),
(neg|troop_slot_eq,":random_companion", slot_troop_occupation, slto_player_companion),
(troop_set_slot,":random_companion", slot_troop_occupation, slto_player_companion),
(val_add,":found_companions",1),
(eq,":found_companions",10),
(assign,":loop_end",0),
(eq,":companion_number",":companion_number"), # remove warning
(try_end),
(troop_add_gold, "trp_player", 5000),
(troop_equip_items, "trp_player"),
(try_end),
],
[
("start_easy",[],"Easy",
[
(options_set_damage_to_player, 0), # = 261 # (options_set_damage_to_player, <value>), #0 = 1/4, 1 = 1/2, 2 = 1/1
(options_set_damage_to_friends, 0), # = 263 # (options_set_damage_to_friends, <value>), #0 = 1/2, 1 = 3/4, 2 = 1/1
(options_set_combat_ai, 2), # = 265 # (options_set_combat_ai, <value>), #0 = good, 1 = average, 2 = poor
(options_set_campaign_ai, 2), # = 267 # (options_set_campaign_ai, <value>), #0 = good, 1 = average, 2 = poor
(options_set_combat_speed, 0), # = 269 # (options_set_combat_speed, <value>), #0 = slowest, 1 = slower, 2 = normal, 3 = faster, 4 = fastest
(assign,"$g_global_morale_modifier",12), #Player troops' morale x1.2
(start_presentation, "prsnt_singleplayer_campain_map"),
#(jump_to_menu, "mnu_start_game_1"),
#(assign,"$g_finished_missions",2),
]
),
("start_normal",[],"Normal",
[
(options_set_damage_to_player, 1), # = 261 # (options_set_damage_to_player, <value>), #0 = 1/4, 1 = 1/2, 2 = 1/1
(options_set_damage_to_friends, 1), # = 263 # (options_set_damage_to_friends, <value>), #0 = 1/2, 1 = 3/4, 2 = 1/1
(options_set_combat_ai, 0), # = 265 # (options_set_combat_ai, <value>), #0 = good, 1 = average, 2 = poor
(options_set_campaign_ai, 1), # = 267 # (options_set_campaign_ai, <value>), #0 = good, 1 = average, 2 = poor
(options_set_combat_speed, 2), # = 269 # (options_set_combat_speed, <value>), #0 = slowest, 1 = slower, 2 = normal, 3 = faster, 4 = fastest
(assign,"$g_global_morale_modifier",10), #Player troops' morale x1.0
(start_presentation, "prsnt_singleplayer_campain_map"),
#(jump_to_menu, "mnu_start_game_1"),
]
),
("start_hard",[],"Hard",
[
(options_set_damage_to_player, 2), # = 261 # (options_set_damage_to_player, <value>), #0 = 1/4, 1 = 1/2, 2 = 1/1
(options_set_damage_to_friends, 2), # = 263 # (options_set_damage_to_friends, <value>), #0 = 1/2, 1 = 3/4, 2 = 1/1
(options_set_combat_ai, 0), # = 265 # (options_set_combat_ai, <value>), #0 = good, 1 = average, 2 = poor
(options_set_campaign_ai, 0), # = 267 # (options_set_campaign_ai, <value>), #0 = good, 1 = average, 2 = poor
(options_set_combat_speed, 4), # = 269 # (options_set_combat_speed, <value>), #0 = slowest, 1 = slower, 2 = normal, 3 = faster, 4 = fastest
(assign,"$g_global_morale_modifier",8), #Player troops' morale x0.8
(start_presentation, "prsnt_singleplayer_campain_map"),
#(jump_to_menu, "mnu_start_game_1"),
]
),
("go_back",[],"Go back",
[
(change_screen_quit),
]),
("debug",[],"Debug",
[
(assign,"$character_gender",tf_male),
(jump_to_menu, "mnu_debug"),
]),
]
),
("start_phase_2", mnf_disable_all_keys,
"Start Phase 2",
"none",
[],
[
("map", [], "Map",
[
(change_screen_map),
]),
]),
("start_game_3", mnf_disable_all_keys,
"Start Game 3",
"none",
[
(change_screen_quit),
(eq,1,0),
],
[
]),
(
"tutorial",mnf_disable_all_keys,
"Good Afternoon, Sir. Beautiful weather today, sir. Would you care for a cup of tea?",
"none",
[
(try_begin),
(eq, "$g_tutorial_entered", 1),
(change_screen_quit),
(else_try),
(set_passage_menu, "mnu_tutorial"),
(assign, "$g_tutorial_entered", 1),
(play_sound,"snd_tutorial_voice_start_1"),
(try_end),
],
[
("continue",[],"Yes, please.",
[
(stop_all_sounds,1),
(modify_visitors_at_site,"scn_tutorial"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(assign, "$g_player_troop", "trp_player"),
(troop_raise_attribute, "$g_player_troop", ca_strength, 14),
(troop_raise_attribute, "$g_player_troop", ca_agility, 14),
(troop_raise_skill, "$g_player_troop", skl_athletics, 3),
(troop_raise_skill, "$g_player_troop", skl_riding, 6),
(troop_raise_skill, "$g_player_troop", skl_power_strike, 3),
(troop_raise_skill, "$g_player_troop", skl_weapon_master, 4),
(troop_raise_skill, "$g_player_troop", skl_ironflesh, 3),
(troop_raise_proficiency_linear, "$g_player_troop", wpt_one_handed_weapon, 80),
(troop_raise_proficiency_linear, "$g_player_troop", wpt_polearm, 130),
(troop_raise_proficiency_linear, "$g_player_troop", wpt_crossbow, 150),
(troop_clear_inventory, "$g_player_troop"),
(troop_add_item, "$g_player_troop","itm_british_infantry_ranker",0),
(troop_add_item, "$g_player_troop","itm_french_voltigeur_officer_pants",0),
(troop_add_item, "$g_player_troop","itm_33_stovepipe",0),
#(troop_add_item, "$g_player_troop","itm_ramrod",0),
#(troop_add_item, "$g_player_troop","itm_rockets",0),
(troop_equip_items, "$g_player_troop"),
(set_visitor,0,"trp_player"),
(set_jump_mission,"mt_tutorial"),
(jump_to_scene,"scn_tutorial"),
(change_screen_mission),
]),
("go_back_dot",
[],
"No, thanks.",
[
(stop_all_sounds,1),
(change_screen_quit),
]),
]
),
("reports", 0,
"Reports",
"none",
[],
[
("resume_travelling",[],"Resume travelling.",
[
(change_screen_return),
]),
]),
("camp", mnf_scale_picture,
"Camp",
"none",
[
],
[
("camp_wait_here", [], "Rest.",
[
(rest_for_hours_interactive, 24 * 365, 5, 1),
(change_screen_return),
]),
("resume_travelling",[], "Dismantle camp.",
[
(change_screen_return),
]),
]),
(
"custom_battle_scene",menu_text_color(0xFF000000)|mnf_disable_all_keys,
"(NO_TRANS)",
"none",
[],
[
("quick_battle_scene_1",[],"{!}quick_battle_scene_1",
[
# (set_jump_mission,"mt_ai_training"),
(jump_to_scene,"scn_quick_battle_scene_1"),(change_screen_mission)
]
),
("quick_battle_scene_2",[],"{!}quick_battle_scene_2",
[
# (set_jump_mission,"mt_ai_training"),
(jump_to_scene,"scn_quick_battle_scene_2"),(change_screen_mission)
]
),
("quick_battle_scene_3",[],"{!}quick_battle_scene_3",
[
# (set_jump_mission,"mt_ai_training"),
(jump_to_scene,"scn_quick_battle_scene_3"),(change_screen_mission)
]
),
("quick_battle_scene_4",[],"{!}quick_battle_scene_4",
[
# (set_jump_mission,"mt_ai_training"),
(jump_to_scene,"scn_quick_battle_scene_4"),(change_screen_mission)
]
),
# ("quick_battle_scene_5",[],"{!}quick_battle_scene_5",
# [
##(set_jump_mission,"mt_ai_training"),
# (jump_to_scene,"scn_quick_battle_scene_5"),(change_screen_mission)
# ]
# ),
("go_back",[],"{!}Go back",
[(change_screen_quit),
]
),
]
),
#depreciated
(
"custom_battle_end",mnf_disable_all_keys,
"The battle is over. {s1} Your side killed {reg5} enemies and lost {reg6} troops over the battle. You personally slew {reg7} men in the fighting.",
"none",
[(music_set_situation, 0),
(assign, reg5, "$g_custom_battle_team2_death_count"),
(assign, reg6, "$g_custom_battle_team1_death_count"),
(get_player_agent_kill_count, ":kill_count"),
(get_player_agent_kill_count, ":wound_count", 1),
(store_add, reg7, ":kill_count", ":wound_count"),
(try_begin),
(eq, "$g_battle_result", 1),
(str_store_string, s1, "str_battle_won"),
(else_try),
(str_store_string, s1, "str_battle_lost"),
(try_end),
],
[
("continue",[],"Continue.",
[(change_screen_quit),
]
),
]
),
(
"town_trade",0,
"You head towards the camp's trading place.",
"none",
[],
[
("trade_with_arms_merchant",[],
"Trade with the arms merchant.",
[
(change_screen_trade, "trp_camp_weaponsmith"),
]),
("trade_with_armor_merchant",[],
"Trade with the tailor.",
[
(change_screen_trade, "trp_camp_armorer"),
]),
("trade_with_horse_merchant",[],
"Trade with the horse merchant.",
[
(change_screen_trade, "trp_camp_horse_merchant"),
]),
("back_to_town_menu",[],"Head back.",
[
(start_presentation,"prsnt_singleplayer_camp_screen"),
]),
]
),
("debug",menu_text_color(0xFF000000)|mnf_disable_all_keys,
"Choose a scene",
"none",
[],
[
("scene1",[],"Vienna Bridge (sp_vienna)",
[
(assign,"$g_global_morale_modifier",10),
(modify_visitors_at_site,"scn_sp_vienna"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
(set_jump_mission,"mt_sp_campaign_vienna"),
(jump_to_scene,"scn_sp_vienna"),
(change_screen_mission),
]
),
("scene2",[],"Austerlitz part 1 (sp_sokolniz)",
[
(assign,"$g_global_morale_modifier",10),
(modify_visitors_at_site,"scn_sp_sokolniz"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
(set_jump_mission,"mt_sp_campaign_austerlitz_1"),
(jump_to_scene,"scn_sp_sokolniz"),
(change_screen_mission),
]
),
("scene3",[],"Austerlitz part 2 (sp_auster)",
[
(assign,"$g_global_morale_modifier",10),
(modify_visitors_at_site,"scn_sp_auster"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
#(set_jump_mission,"mt_sp_campaign_austerlitz_2"),
(jump_to_scene,"scn_sp_auster"),
(change_screen_mission),
]
),
("scene4",[],"Austerlitz part 3 (sp_sokolniz2)",
[
(assign,"$g_global_morale_modifier",10),
(modify_visitors_at_site,"scn_sp_sokolniz2"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
#(set_jump_mission,"mt_sp_campaign_austerlitz_3"),
(jump_to_scene,"scn_sp_sokolniz2"),
(change_screen_mission),
]
),
("scene5",[],"Dresden part 1 (sp_dresden1)",
[
(assign,"$g_global_morale_modifier",10),
(modify_visitors_at_site,"scn_sp_dresden1"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
(set_jump_mission,"mt_sp_campaign_dresden_1"),
(jump_to_scene,"scn_sp_dresden1"),
(change_screen_mission),
]
),
("scene6",[],"Dresden part 2 (sp_dresden2)",
[
(assign,"$g_global_morale_modifier",10),
(modify_visitors_at_site,"scn_sp_dresden2"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
(set_jump_mission,"mt_sp_campaign_dresden_2"),
(jump_to_scene,"scn_sp_dresden2"),
(change_screen_mission),
]
),
("scene7",[],"New Scene 1 (NON-PLAYABLE)",
[
(assign,"$g_global_morale_modifier",10),
(modify_visitors_at_site,"scn_sp_scene_1"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
#(set_jump_mission,"mt_sp_campaign_dresden_2"),
(jump_to_scene,"scn_sp_scene_1"),
(change_screen_mission),
]
),
("scene8",[],"New Scene 2 (NON-PLAYABLE)",
[
(assign,"$g_global_morale_modifier",10),
(modify_visitors_at_site,"scn_sp_scene_2"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
#(set_jump_mission,"mt_sp_campaign_dresden_2"),
(jump_to_scene,"scn_sp_scene_2"),
(change_screen_mission),
]
),
("scene9",[],"New Scene 3 (NON-PLAYABLE)",
[
(assign,"$g_global_morale_modifier",10),
(modify_visitors_at_site,"scn_sp_scene_3"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
#(set_jump_mission,"mt_sp_campaign_dresden_2"),
(jump_to_scene,"scn_sp_scene_3"),
(change_screen_mission),
]
),
("scene10",[],"New Scene 4 (NON-PLAYABLE)",
[
(assign,"$g_global_morale_modifier",10),
(modify_visitors_at_site,"scn_sp_scene_4"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
#(set_jump_mission,"mt_sp_campaign_dresden_2"),
(jump_to_scene,"scn_sp_scene_4"),
(change_screen_mission),
]
),
("go_back",[],"Go back",
[
(jump_to_menu, "mnu_start_game_0"),
]),
]
),
(
"run_mission_dummy",mnf_disable_all_keys,
"debug screen",
"none",
[
(try_begin),
(eq,"$g_started_mission",1),
(assign,"$g_started_mission",0),
(assign,"$g_global_morale_modifier",10),
(try_begin),
(eq,"$g_finished_missions",0), # Vienna
(assign,"$g_global_morale_modifier",10),
(modify_visitors_at_site,"scn_sp_vienna"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
(set_jump_mission,"mt_sp_campaign_vienna"),
(jump_to_scene,"scn_sp_vienna"),
(change_screen_mission),
(else_try),
(eq,"$g_finished_missions",1), # Austerlitz
(modify_visitors_at_site,"scn_sp_sokolniz"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
(set_jump_mission,"mt_sp_campaign_austerlitz_1"),
#(jump_to_menu, "mnu_custom_battle_end"),
(jump_to_scene,"scn_sp_sokolniz"),
(change_screen_mission),
# (presentation_set_duration,0),
(else_try),
(eq,"$g_finished_missions",2), # Drezden
(try_begin),
(eq,"$g_finished_sub_missions",0), # part 1
(modify_visitors_at_site,"scn_sp_dresden1"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
(set_jump_mission,"mt_sp_campaign_dresden_1"),
(jump_to_scene,"scn_sp_dresden1"),
(change_screen_mission),
#(presentation_set_duration,0),
(else_try),
(eq,"$g_finished_sub_missions",1), # part 2
(modify_visitors_at_site,"scn_sp_dresden2"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
(set_jump_mission,"mt_sp_campaign_dresden_2"),
(jump_to_scene,"scn_sp_dresden2"),
(change_screen_mission),
(try_end),
(else_try),
#(val_add,"$g_finished_missions",1),
#(val_add,"$g_finished_sub_missions",1),
#(jump_to_menu, "mnu_custom_battle_end"),
(start_presentation, "prsnt_singleplayer_mission_results"),
(try_end),
(try_end),
],
[
("continue",[],"Continue",
[
(start_presentation, "prsnt_singleplayer_mission_results"),
]),
("debug",[],"Debug menu",
[
(assign,"$character_gender",tf_male),
(jump_to_menu, "mnu_debug"),
]),
]
),
(
"run_companion_dummy",mnf_disable_all_keys,
"debug screen",
"none",
[
(try_begin),
(neq,"$g_started_companion",0),
(assign,":this_comp","$g_started_companion"),
(assign,"$g_started_companion",0),
(change_screen_equip_other, ":this_comp"),
(else_try),
#(val_add,"$g_finished_missions",1),
#(val_add,"$g_finished_sub_missions",1),
#(jump_to_menu, "mnu_custom_battle_end"),
#(start_presentation, "prsnt_singleplayer_mission_results"),
(try_end),
],
[
("continue",[],"Continue",
[
(start_presentation, "prsnt_singleplayer_companion_equipment_select"),
]),
("debug",[],"Debug menu",
[
(assign,"$character_gender",tf_male),
(jump_to_menu, "mnu_debug"),
]),
]
),
(
"visit_camp_dummy",mnf_disable_all_keys,
"debug screen",
"none",
[
(try_begin),
(neq,"$g_started_camp",0),
(assign,":this_scn","$g_started_camp"),
(assign,"$g_started_camp",0),
(modify_visitors_at_site,":this_scn"),
(reset_visitors, 0),
(set_player_troop, "trp_player"),
(set_visitor,0,"trp_player"),
(set_jump_mission,"mt_camp_1"),
(jump_to_scene,":this_scn"),
(change_screen_mission),
(else_try),
#(val_add,"$g_finished_missions",1),
#(val_add,"$g_finished_sub_missions",1),
#(jump_to_menu, "mnu_custom_battle_end"),
#(start_presentation, "prsnt_singleplayer_mission_results"),
(try_end),
],
[
("continue",[],"Continue",
[
(start_presentation, "prsnt_singleplayer_camp_screen"),
]),
("debug",[],"Debug menu",
[
(assign,"$character_gender",tf_male),
(jump_to_menu, "mnu_debug"),
]),
]
),
(
"go_briefing_dummy",mnf_disable_all_keys,
"debug screen",
"none",
[
],
[
("continue",[],"Continue",
[
(start_presentation, "prsnt_singleplayer_memoir_screen"),
]),
("debug",[],"Debug menu",
[
(assign,"$character_gender",tf_male),
(jump_to_menu, "mnu_debug"),
]),
]
),
(
"quit_dummy",mnf_disable_all_keys,
"debug screen",
"none",
[
(change_screen_quit),
],
[
("continue",[],"Continue",
[
(start_presentation, "prsnt_singleplayer_camp_screen"),
]),
("debug",[],"Debug menu",
[
(assign,"$character_gender",tf_male),
(jump_to_menu, "mnu_debug"),
]),
]
),
# (change_screen_quit),
]
|
983,928 | d67e36fdfc8c18aae137089d94217af13ad25600 | # Generated by Django 2.2.8 on 2020-01-21 06:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('User', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='user',
name='Is_Candidate',
field=models.BooleanField(null=True),
),
migrations.AlterField(
model_name='user',
name='Is_Organization',
field=models.BooleanField(null=True),
),
migrations.AlterField(
model_name='user',
name='Is_University',
field=models.BooleanField(null=True),
),
]
|
983,929 | fa06849511df8cb6afacf39997590723df0cd08a | import socket
import os
IP = input("IP adresi girin:")
PORT = 142
print("IP = ",IP)
print("PORT = ",PORT)
try:
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
print("Sunucu ile bağlanılıyor.")
s.connect((IP, PORT))
except socket.error:
print("Hata!",socket.error)
s.close()
veri = s.recv(1024)
s.send('True'.encode())
veri = s.recv(1024)
mesaj = veri.decode()
print("Gelen Mesaj:",mesaj)
s.close()
z = str(mesaj).split(",")
UTC = z[1]
z = z[0].split(" ")
zaman = z[1]
z = z[0].split("-")
date = str(z[2])+'/'+str(z[1])+'/'+str(z[0])
print("UTC = ",UTC)
print("Saat = ",zaman)
print("Tarih = ",tarih)
komut = 'sudo date -s '+'"'+str(tarih)+' '+str(zaman)+'"'
print('Komut: ',komut)
os.system(komut)
print("Saat Değiştirildi.")
|
983,930 | 5735fd354fcedeceb88c8016334a686f3e83405e | """
sam.py: SAM file parser
"""
from __future__ import print_function
import unittest
from python.functest.utils.sam import (
sam_format,
parse_sam_line
)
from python.functest.utils.setup import (
ftest_setup,
ftest_teardown,
ftest_module_setup
)
from python.functest.utils.defaults import GOLDEN_DIR
def setUpModule():
""" Announce ourselves by printing the module docstring. """
print(__doc__)
# Module requirements
ftest_module_setup()
class UtilitiesTestCases(unittest.TestCase):
""" unittest Test definitions follow """
def setUp(self):
ftest_setup(self)
def tearDown(self):
ftest_teardown(self)
def test_sam_parser_success(self):
"""
human_100.sam should have 100 entries in it, and parsing should throw no exceptions.
"""
# self.cleanup = False
sam_file = '%s/../human_100.sam' % GOLDEN_DIR
count = 0
with open(sam_file, 'r') as sam:
for line in sam:
# Skip tags
if line[0] == '@':
continue
alignment = parse_sam_line(line)
# Verify that the type conversions are all correct
types = {}
for entry in sam_format():
types[entry['name']] = entry['type']
for field in alignment:
self.assertIs(type(alignment[field]), types[field])
count = count + 1
self.assertEqual(count, 100)
def test_sam_parser_throw(self):
"""
Parsing a non-SAM file should throw an exception.
"""
# self.cleanup = False
some_file = '%s/fake_results' % GOLDEN_DIR
try:
with open(some_file, 'r') as something:
for line in something:
parse_sam_line(line)
# pylint: disable=broad-except
except Exception:
pass
else:
self.fail('Exception should have been called when parsing a non-SAM file.')
if __name__ == '__main__':
unittest.main(verbosity=2)
|
983,931 | 41022f000174110a67cb954f32c44b91f3120ee3 | # -*- coding: utf-8 -*-
"""
Created on Wed Jan 10 10:22:57 2018
@author: KEEL
"""
from tweepy import *
import urllib
import urllib.request
import sys
import datetime
import re
import cv2
import numpy as np
import tensorflow as tf
NUM_CLASSES = 24
IMG_SIZE = 28
IMG_PIXELS = IMG_SIZE*IMG_SIZE*3
def inference(images_placeholder, keep_prob):
""" モデルを作成する関数
引数:
images_placeholder: inputs()で作成した画像のplaceholder
keep_prob: dropout率のplace_holder
返り値:
cross_entropy: モデルの計算結果
"""
W_conv1 = tf.Variable(tf.truncated_normal([5, 5, 3, 32], stddev=0.1),name='W_conv1')
b_conv1 = tf.Variable(tf.constant(0.1, shape=[32]),name='b_conv1')
x_image = tf.reshape(images_placeholder, [-1,IMG_SIZE,IMG_SIZE,3])
h_conv1 = tf.nn.relu(tf.nn.conv2d(x_image, W_conv1, strides=[1, 1, 1, 1], padding='SAME') + b_conv1)
h_pool1 = tf.nn.max_pool(h_conv1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
x_image = tf.reshape(images_placeholder, [-1,IMG_SIZE,IMG_SIZE,3])
W_conv2 = tf.Variable(tf.truncated_normal([5, 5, 32, 64], stddev=0.1),name='W_conv2')
b_conv2 = tf.Variable(tf.constant(0.1, shape=[64]),name='b_conv2')
h_conv2 = tf.nn.relu(tf.nn.conv2d(h_pool1, W_conv2, strides=[1, 1, 1, 1], padding='SAME') + b_conv2)
h_pool2 = tf.nn.max_pool(h_conv2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
W_fc1 = tf.Variable(tf.truncated_normal([7 * 7 * 64, 1024], stddev=0.1),name='W_fc1')#変更元は7*7*64
b_fc1 = tf.Variable(tf.constant(0.1, shape=[1024]),name='b_fc1')
h_pool2_flat = tf.reshape(h_pool2, [-1, 7*7*64])#変更元は7*7*64
h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1)
h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob)
W_fc2 = tf.Variable(tf.truncated_normal([1024, NUM_CLASSES], stddev=0.1),name='W_fc2')
b_fc2 = tf.Variable(tf.constant(0.1, shape=[NUM_CLASSES]),name='b_fc2')
y_conv=tf.nn.softmax(tf.matmul(h_fc1_drop, W_fc2) + b_fc2)
return y_conv
xml_path = "./lbpcascade_animeface.xml"
out_path = "./face/"
def faceDetect(img_path):
classifier = cv2.CascadeClassifier(xml_path)
img_count = 1
face_imgs = []
#for img_path in img_list:
org_img = cv2.imread(img_path, cv2.IMREAD_COLOR)
gray_img = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)
face_points = classifier.detectMultiScale(gray_img, \
scaleFactor=1.2, minNeighbors=2, minSize=(1,1))
for points in face_points:
x, y, width, height = points
dst_img = org_img[y:y+height, x:x+width]
dst_img = cv2.resize(dst_img, (IMG_SIZE,IMG_SIZE))
face_imgs.append(dst_img)
face_img = cv2.rectangle(org_img, (x,y), (x+width,y+height), (0, 0, 0), 2)
new_img_name = out_path + str(img_count) + 'face.jpg'
cv2.imwrite(new_img_name, face_img)
if width > face_points[0,3]:
face_points[0] = points
img_count += 1
print(img_count)
if img_count != 1:
return face_points[0]
def get_oauth():
consumer_key = lines[0]
consumer_secret = lines[1]
access_key = lines[2]
access_secret = lines[3]
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_key, access_secret)
return auth
class StreamListener(StreamListener):
# ツイートされるたびにここが実行される
def on_status(self, status):
if status.in_reply_to_screen_name=='jdatmtjp':
print('replyed')
if 'media' in status.entities:
print('The reply has media')
text = re.sub(r'@jdatmtjp ', '', status.text)
text = re.sub(r'(https?|ftp)(://[\w:;/.?%#&=+-]+)', '', text)
medias = status.entities['media']
m = medias[0]
media_url = m['media_url']
print(media_url)
now = datetime.datetime.now()
time = now.strftime("%H%M%S")
filename = '{}.jpg'.format(time)
try:
urllib.request.urlretrieve(media_url, filename)
except IOError:
print("保存に失敗しました")
frame = cv2.imread(filename)
img = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
#顔の検出
dets = faceDetect(filename)
if not isinstance(dets,type(None)):
x, y, width, height = dets
image = frame[y:y+height, x:x+width]
cv2.rectangle(frame, (x,y), (x+width, y+height), (0, 0, 0), 4)
cv2.imwrite(filename, frame)
#cv2.imshow("1",image)
#cv2.waitKey(0)
image = cv2.resize(image.copy(), (28, 28))
ximage = []
ximage.append(image.flatten().astype(np.float32)/255.0)
ximage = np.asarray(ximage)
print(ximage.shape)
result = logits.eval(session=sess,feed_dict={ images_placeholder : ximage, keep_prob: 1.0 })
pred = np.argmax(result)
rate = np.amax(result)*100
print(pred)
VTuber_Name = ['キズナアイ','ミライアカリ','バーチャルのじゃロリ狐娘Youtuberおじさん','電脳少女シロ',
'輝夜 月','カフェ野ゾンビ子','虚拟DD','富士 葵','藤崎由愛','Hoonie','ぜったい天使くるみちゃん',
'ミディ','ミアル','もちひよこ','モスコミュール','アリシア・ソリッド(諸々の事情からこの表記で)','ニーツ',
'バーチャル美少女YouTuberねむ','のらきゃっと','雷電カスカ','スズキセシル','届木ウカ','トキノソラ','馬越健太郎',]
message = '.@'+status.author.screen_name+' '+'%5.3f'%rate+'%%の確率で'+'%s'%VTuber_Name[pred]
else:
image = frame
cv2.imwrite("original.jpg", image)
print("no face")
message = '.@'+status.author.screen_name+' This image has no face.'
try:
#画像をつけてリプライ
api.update_with_media(filename, status=message, in_reply_to_status_id=status.id)
except(TweepError, e):
print("error response code: " + str(e.response.status))
print("error message: " + str(e.response.reason))
# TwitterAPIのログイン情報
f = open('config.txt')
data = f.read()
f.close()
lines = data.split('\n')
images_placeholder = tf.placeholder("float", shape=(None, IMG_PIXELS))
keep_prob = tf.placeholder("float")
logits = inference(images_placeholder, keep_prob)
sess = tf.InteractiveSession()
sess.run(tf.global_variables_initializer())
saver = tf.train.Saver()
saver = tf.train.import_meta_graph('./Model/model.ckpt.meta')
saver.restore(sess, "./Model/model.ckpt")
# streamingを始めるための準備
auth = get_oauth()
api = API(auth)
stream = Stream(auth, StreamListener(), secure=True)
print("Start Streaming!")
stream.userstream() |
983,932 | 8b16e81ab02fc677400e4f8eacd598aa75e6d71c | import nltk
import glob
import sys
import argparse
parser=argparse.ArgumentParser()
parser.add_argument('file_n')
args=parser.parse_args()
n=args.file_n
count=1
text_is=''
name="/mnt/c/Users/Yogesh Kushwah/Desktop/major/summary_files/" + n
with open(name,encoding="latin-1") as f:
file_try=n.split('.')
for line in f:
text_is=text_is + line
sent_text = nltk.sent_tokenize(text_is)
length_is=len(sent_text)
print(length_is)
for i in range(0,length_is):
filename="/mnt/c/Users/Yogesh Kushwah/Desktop/major/tempdir/%d.txt" %(i+1)
with open(filename,"w") as f1:
f1.write(sent_text[i])
|
983,933 | e0660df6b5a461044dc7460ca3abaeaa5a8cdf88 | #!/usr/bin/env python3.3.1
# MAIN
class GenerateMetrics:
@staticmethod
def determine_testcases(command_line_args):
import json
import os
# With open is a secure way to open and close a file. Using with we don't have to implicitly close the file as it does it on its own
with open(r'/nfs/pdx/home/tjstickx/work/genCSV/config.json', 'r') as f:
json_data = json.load(f)
# finds the default value for the order number to search files in the json file
dir_structure = json_data['Search_Key']["Order"]["default"]
print("number of arguments received:", (len(command_line_args)-1))
print(command_line_args)
test_cases_list = []
print("### Finding the required testcases ###")
# The following 4 statements/loops is how the script searches for testcases in the given arguments
# 1 is designed for the current megatest with the duplicate directory
if dir_structure == "1":
for argument in command_line_args:
# command_line_args[0] is always the name of the script so we exclude it from the search
if argument is not command_line_args[0]:
first_level = [os.path.join(argument, name) for name in os.listdir(argument) if os.path.isdir(os.path.join(argument, name))]
for dir_names in first_level:
files_to_search = [os.path.join(dir_names, name) for name in os.listdir(dir_names)if os.path.isdir(os.path.join(dir_names, name))]
for files in files_to_search:
test_cases_list.append(files)
print("Found %s testcases" % len(test_cases_list))
print("Testcases:", test_cases_list)
# 2 is designed for megatest when the duplicate testcase folder is removed
elif dir_structure == "2":
for argument in command_line_args:
if argument is not command_line_args[0]:
files_to_search = [os.path.join(argument, name) for name in os.listdir(argument)if os.path.isdir(os.path.join(argument, name))]
for files in files_to_search:
test_cases_list.append(files)
print("Found %s testcases" % len(test_cases_list))
print("Testcases:", test_cases_list)
# 3 is for searching by the testcase itself
elif dir_structure == "3":
for argument in command_line_args:
if argument is not command_line_args[0]:
test_cases_list.append(argument)
print("Found %s testcases" % len(test_cases_list))
print("Testcases:", test_cases_list)
# 4 is for searching by the date folder and three levels into that
elif dir_structure == "4":
for argument in command_line_args:
if argument is not command_line_args[0]:
first_level = [os.path.join(argument, name) for name in os.listdir(argument)if os.path.isdir(os.path.join(argument, name)) if os.path.join(argument, name).endswith("runs")]
for dir_names in first_level:
second_level = [os.path.join(dir_names, name) for name in os.listdir(dir_names)if os.path.isdir(os.path.join(dir_names, name))]
for second_lvl_files in second_level:
third_level = [os.path.join(second_lvl_files, name) for name in os.listdir(second_lvl_files)if os.path.isdir(os.path.join(second_lvl_files, name))]
for third_lvl_files in third_level:
test_cases_list.append(third_lvl_files)
print("Found %s testcases" % len(test_cases_list))
print("Testcases:", test_cases_list)
GenerateMetrics.get_metrics(test_cases_list)
@staticmethod
def get_metrics(test_cases_list):
from FindFiles import findFiles
from GetCadenceMetrics import GetCadenceMetrics
from GetSynopsysMetrics import GetSynopsysMetrics
# print("### Found testcases ###")
csv_written = False
for test_case in test_cases_list:
print("### Searching:", test_case)
# default tool is synopsys
tool = "synopsys"
if "cadence" in test_case:
tool = "cadence"
elif "synopsys" in test_case:
tool = "synopsys"
temp_metric_collections = []
list_of_files = findFiles.search_dir(test_case, tool)
if tool is "cadence":
temp_metric_collections = GetCadenceMetrics.get_cadence_metrics(list_of_files, test_case, tool)
else:
temp_metric_collections = GetSynopsysMetrics.get_synopsys_metrics(list_of_files, test_case, tool)
csv_written = GenerateMetrics.generate_csv(temp_metric_collections, test_case, csv_written)
@staticmethod
def generate_csv(temp_metric_collections, test_case, csv_written):
import csv
names, values = [], []
name = 0
value = 1
print("temp_metric_collection found: ")
for temp_metric_collection in temp_metric_collections:
temp_metric_list = tuple(temp_metric_collection)
for metric in range(len(temp_metric_list)):
print(temp_metric_list[metric])
# Names and values are concatenated into a string in order to have horizontal column
names += [temp_metric_list[metric][name]]
values += [temp_metric_list[metric][value]]
if csv_written is False:
csv_written = True
# Creates a csv with the first testcase only
# The 'wt' argument in the following "with open" statement means that if the file doesnt exist then it will be
# and if one does exist it will be completely overwritten
with open(r'Regr_Suite_Runs_Comparison_Data.csv', 'wt') as my_file:
writer = csv.writer(my_file, lineterminator='\n')
writer.writerow(names)
writer.writerow(values)
my_file.close()
print('Regr_Suite_Runs_Comparison_Data.csv created with %s' % test_case)
else:
# The 'a' argument in the following "with open" statement means that if the file does exist then it will be appended to
with open(r'Regr_Suite_Runs_Comparison_Data.csv', 'a') as my_file:
writer = csv.writer(my_file, lineterminator='\n')
writer.writerow(values)
my_file.close()
print('Regr_Suite_Runs_Comparison_Data.csv appended with %s' % test_case)
return csv_written
import csv
import json
import sys
import os
import re
from FindFiles import findFiles
from GetCadenceMetrics import GetCadenceMetrics
from GetSynopsysMetrics import GetSynopsysMetrics
import sys
sys_args = sys.argv
GenerateMetrics.determine_testcases(sys_args)
|
983,934 | 2dbc95965470012111ba41129925d716a08dd1f5 | # Author: Bojan G. Kalicanin
# Date: 15-Dec-2016
# Raindrops Game settings
class Settings(object):
"""Raindrops Game settings."""
def __init__(self):
# Screen settings
self.screen_width = 800
self.screen_height = 600
self.bg_color = (255, 255, 255)
# Raindrop Settings
self.raindrop_speed_factor = 0.75
self.new_grid = True |
983,935 | 831584a6d78ff56f8f293b32e7ea416c200226ce | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
# The MIT License
# Copyright (c) 2017 - 2022 Tammo Ippen, tammo.ippen@posteo.de
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the 'Software'), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
try:
from PIL import Image
except ImportError:
raise Exception('Need to have PIL / pillow installed for this example.')
try:
import numpy as np
except ImportError:
raise Exception('Need to have numpy installed for this example.')
from plotille import Canvas, Figure, hist, hsl
from plotille.data import circle
current_dir = os.path.dirname(os.path.abspath(__file__))
X = np.random.normal(size=10000)
width = 12
height = 10
spacer = ' '
def extend_plot_lines(lines):
lines[0] += spacer * 20
lines[1] += spacer * 20
for idx in range(2, len(lines) - 2):
lines[idx] += spacer * 7
return lines
def int_formatter(val, chars, delta, left):
return '{:{}{}}'.format(int(val), '<' if left else '>', chars)
def logo():
# Canvas on its own can draw an image using dots
img = Image.open(current_dir + '/../imgs/logo.png')
img = img.convert('L')
img = img.resize((270, 120))
cvs = Canvas(135, 30, background=hsl(0, 0, 0.8), mode='rgb')
cvs.braille_image(img.getdata(), inverse=True, color=hsl(0, 0.5, 0.4))
indent = ' ' * 6
print(indent + cvs.plot().replace(os.linesep, os.linesep + indent))
def histogram():
fig = Figure()
fig.width = width
fig.height = height
fig.color_mode = 'rgb'
fig.register_label_formatter(float, int_formatter)
fig.histogram(X, lc=hsl(17, 1, 0.8))
lines = extend_plot_lines(fig.show().split(os.linesep))
return lines
def crappyhist():
lines = hist(X, bins=12, width=12, lc=hsl(285, 1, 0.74), color_mode='rgb').split(os.linesep)
lines[1] += spacer
return lines
def plot():
fig = Figure()
fig.width = width
fig.height = height
fig.set_y_limits(-2, 2)
fig.color_mode = 'rgb'
fig.register_label_formatter(float, int_formatter)
x1 = np.random.normal(size=10)
fig.scatter(list(range(len(x1))), x1, lc=hsl(122, 0.55, 0.43), marker='o')
fig.plot([0, 9], [2, 0], lc=hsl(237, 1, 0.75), marker='x')
x2 = np.linspace(0, 9, 20)
fig.plot(x2, 0.25 * np.sin(x2) - 1, lc=hsl(70, 1, 0.5))
fig.text([5], [1], ['Hi'], lc=hsl(0, 0, 0.7))
fig.axvline(1, lc=hsl(0, 1, 0.5))
lines = extend_plot_lines(fig.show().split(os.linesep))
return lines
def heat():
fig = Figure()
fig.width = width
fig.height = height
fig.set_y_limits(-2, 2)
fig.set_x_limits(-2, 2)
fig.color_mode = 'rgb'
fig.origin = False
fig.register_label_formatter(float, int_formatter)
xy = circle(0, 0, 1.5)
fig.plot(xy[0], xy[1])
img = []
for _ in range(height):
img += [[None] * width]
img[int(height / 2)][int(width / 2)] = 1
img[int(height / 2) - 2][int(width / 2) - 1] = 0.8
img[int(height / 2) - 2][int(width / 2)] = 0.7
img[int(height / 2) - 1][int(width / 2) - 1] = 0.2
img[int(height / 2) ][int(width / 2) - 1] = 0.2 # noqa: E202
img[int(height / 2) + 1][int(width / 2) - 1] = 0.3
img[int(height / 2) - 1][int(width / 2) + 1] = 0.4
img[int(height / 2) ][int(width / 2) + 1] = 0.8 # noqa: E202
img[int(height / 2) + 1][int(width / 2) + 1] = 0.7
img[int(height / 2) - 1][int(width / 2)] = 0.7
img[int(height / 2) + 1][int(width / 2)] = 0.8
# img[int(height / 2)-1][int(width / 2)] = 1
# img[int(height / 2)][int(width / 2)] = 1
fig.imgshow(img, cmap='magma')
lines = extend_plot_lines(fig.show().split(os.linesep))
return lines
def main():
print('\n\n')
logo()
print()
for lines in zip(histogram(), plot(), heat(), crappyhist()):
print(' '.join(lines))
print('\n\n')
if __name__ == '__main__':
main()
|
983,936 | c9ece8e4d7f28711b0da8ad2e4e1c29e451f810b | def pig_it(text):
A = text.split()
b = ''
for i in A:
if i in ',.!?':
b += i
else:
c = i[1:]+i[0]+'ay'
b += c + ' '
return b, len(text), len(b)
def pig_it2(text):
a = text.split(' ')
b = ''
for i in a:
if i in '?.,!':
b += i
else:
c = i[1:] + i[0] + 'ay'
b += c + ' '
return b, 'ads'
s = 'Pig latin is cool !'
print(pig_it2(s)) |
983,937 | 591454bc13b146782948429fac375a9a136eb025 | #!/usr/bin/env python
"""
A script for generating time series plots.
Examples:
# plot all salt time series from saturn02
makeTimeSeriesPlots -s 2012-5-1 -e 2012-5-10 -o tmp/images */data/stations/saturn02/saturn02.*.salt/*.nc
# plot all salt time series from saturn02, show image do not store
makeTimeSeriesPlots -s 2012-5-1 -e 2012-5-10 */data/stations/saturn02/saturn02.*.salt/*.nc
Tuomas Karna 2013-01-07
"""
import os
import datetime
import sys
from optparse import OptionParser
import numpy as np
from crane import plt
from crane.utility import parseTimeStr, createDirectory, saveFigure
from crane.data import dirTreeManager as dtm
from crane.data import stationCollection
from crane.data import dataContainer
from crane.data import statistics
from crane.files import csvStationFile
from crane.plotting import plot
from crane.plotting import plotBase
from crane.plotting import timeSeriesPlot
#-------------------------------------------------------------------------
# Functions
#-------------------------------------------------------------------------
def uniqueList(seq):
"Return list with all duplicates removed. Preserves ordering."""
seen = set()
return [x for x in seq if x not in seen and not seen.add(x)]
def generateTSPlot(startTime, endTime, dcs, imgDir=None, ylim={},
obsTag='obs', **kwargs):
"""Plots time series stack plot"""
doMurphyScore = True
# a collection for all the data
dataDir = 'data'
sc = stationCollection.StationCollection(startTime, endTime, obsTag)
# add data from the files
for dc in dcs:
if dc.data.shape[1] > 1: # more than one component
for i in range(dc.data.shape[1]):
# add each component as separate time series
dci = dc.extractField(i)
# change variable to component name: 'hvel' -> 'u','v'
dci.setMetaData('variable', dci.fieldNames[0])
sc.addSample(dci)
else:
sc.addSample(dc)
# plot
# master container to hold all the axes, x axis will be shared
canvas = plotBase.stackPlotBase(rightPad=1.25)
modelColors = plot.makeColorsForModels(sc)
comKeys = sc.getComparableKeys(requireObs=False, requireMod=False)
for c in comKeys:
if c[0][2] is None:
print c
c[0] = (c[0][0], c[0][1], '0')
# fancy sort: sort by 'entry' tuple with msldepth converted to float
comKeys.sort(key=lambda tup: (tup[0][0], tup[0][1], float(tup[0][2])))
for entry, obsKey, modKeys in comKeys:
station, var, msldepth = entry
if obsKey:
o = sc.getSample(**obsKey)
if len(o.time) < 3:
print 'observation time series too short:', obsKey
continue
else:
o = None
title = ' '.join(
(plot.VARS.get(var, var),
'[' + plot.UNITS.get(var, '-') + ']'))
xlabel = startTime.year if startTime.year == endTime.year else 'Date'
dia = timeSeriesPlot.timeSeriesPlotDC2(
title=title,
ylabel=station.upper() +
' ' +
msldepth,
ylim=ylim.get(
var,
None),
xlabel=xlabel)
canvas.addPlot(dia, tag=plot.VARS.get(var, var) + station + msldepth)
if obsKey:
dia.addSample(o, color='r', label=obsKey['tag'])
for modKey in modKeys:
m = sc.getSample(**modKey)
l = modKey['tag'].split('-')[0]
if doMurphyScore and obsKey:
o2, m2 = o.alignTimes(m)
murphy = statistics.murphySkillScore(
o2.data.ravel(), m2.data.ravel())
l += ' MS={ms:.2f}'.format(ms=murphy)
dia.addSample(m, color=modelColors[modKey['tag']], label=l)
dia.showLegend()
if 'title' in kwargs:
canvas.addTitle(kwargs.pop('title'))
if 'xaxis_tight' in kwargs and kwargs['xaxis_tight'] == True:
xLim = [canvas.axGrid[0].dataLim.xmin, canvas.axGrid[0].dataLim.xmax]
for ax in canvas.axGrid[1:]:
a, b = ax.dataLim.xmin, ax.dataLim.xmax
xLim = [min(xLim[0], a), max(xLim[1], b)]
canvas.axGrid[0].set_xlim(xLim)
if imgDir:
# ----- Save file -----
sT = str(sc.startTime.date())
eT = str(sc.endTime.date())
imgDir = createDirectory(imgDir)
varList = uniqueList([tup[0][1] for tup in comKeys])
#depSet = list(depSet)
tagStr = '-'.join(uniqueList([dc.getMetaData('tag') for dc in dcs]))
stationList = []
for dc in dcs:
meta = dc.getMetaData()
entry = meta.get('location')
if 'msldepth' in meta:
entry += '-' + meta['msldepth']
stationList.append(entry)
stationStr = '-'.join(uniqueList(stationList))
varStr = '-'.join(varList)
fname = '_'.join(['ts', varStr, stationStr, sT, eT])
fn = kwargs.get('filename', fname)
saveFigure(imgDir, fn, 'png', verbose=True, bbox_tight=True)
plt.close()
else:
# ----- show plot -----
plt.show()
#-------------------------------------------------------------------------
# Main routine
#-------------------------------------------------------------------------
def makeTSPlotForStationFile(runTags, stationFile, startTime, endTime,
imgDir=None, ylim={}, singleFile=None):
# load data
dcs = []
stationsToExtract = csvStationFile.csvStationFileWithDepth()
stationsToExtract.readFromFile(stationFile)
for runTag in runTags:
for key in stationsToExtract.getTuples():
loc, x, y, z, zType, var = key
try:
if var[:3] == 'sil':
dc = dtm.getDataContainer(
tag=runTag,
dataType='sil',
location=loc,
variable=var,
startTime=startTime,
endTime=endTime)
elif loc == 'plume':
dc = dtm.getDataContainer(
tag=runTag,
dataType='plumemetrics',
location=loc,
variable=var,
startTime=startTime,
endTime=endTime)
else:
msldepth = str(int(round(abs(z) * 100)))
dc = dtm.getDataContainer(
tag=runTag,
dataType='timeseries',
location=loc,
variable=var,
msldepth=msldepth,
startTime=startTime)
# endTime=endTime)
dcs.append(dc)
except Exception as e:
print 'reading failed'
print e
if singleFile:
dcs.append(dataContainer.dataContainer.loadFromNetCDF(singleFile))
if dcs:
generateTSPlot(startTime, endTime, dcs, imgDir, ylim)
#-------------------------------------------------------------------------
# Parse commandline arguments
#-------------------------------------------------------------------------
def parseCommandLine():
usage = (
'Usage: %prog -s [start date YYYY-MM-DD] -e [end date YYYY-MM-DD] -o [path] -t [stationFile] runTag1 runTag2 ...\n')
parser = OptionParser(usage=usage)
parser.add_option(
'-s',
'--start',
action='store',
type='string',
dest='startStr',
help='Date to start processing')
parser.add_option(
'-e',
'--end',
action='store',
type='string',
dest='endStr',
help='Date to end processing')
parser.add_option(
'-o',
'--imageDirectory',
action='store',
type='string',
dest='imgDir',
help='(optional) directory where generated images are stored.\
If not specified, shows the image instead.')
parser.add_option(
'-y',
'--ylim',
action='store',
type='string',
dest='ylimStr',
help='Custom limits for y axis, a string like salt:0:30,temp:4:12')
parser.add_option(
'-t',
'--csvStationFile',
action='store', type='string',
dest='csvStationFile',
help='file that defines station coordinates and\
variables for time series to plot')
parser.add_option(
'-f',
'--file',
action='store',
type='string',
dest='singleFile',
help='plot a single file')
(options, args) = parser.parse_args()
runTags = args
startStr = options.startStr
endStr = options.endStr
imgDir = options.imgDir
ylimStr = options.ylimStr
csvStationFile = options.csvStationFile
singleFile = options.singleFile
if not runTags:
parser.print_help()
parser.error('RunTags are missing')
if not csvStationFile:
parser.print_help()
parser.error('csvStationFile must be given')
if startStr is None:
parser.print_help()
parser.error('Start date undefined')
if endStr is None:
parser.print_help()
parser.error('End date undefined')
startTime = parseTimeStr(startStr)
endTime = parseTimeStr(endStr)
ylim = {}
if ylimStr:
for entry in ylimStr.split(','):
var, vmin, vmax = entry.split(':')
ylim[var] = [float(vmin), float(vmax)]
print 'Parsed options:'
print ' - time range:', str(startTime), '->', str(endTime)
if imgDir:
print ' - output dir', imgDir
else:
print ' - show image'
if ylim:
print ' - using y limits', ylim
if csvStationFile:
print ' - stations read from', csvStationFile
makeTSPlotForStationFile(runTags, csvStationFile, startTime, endTime,
imgDir, ylim, singleFile)
if __name__ == '__main__':
parseCommandLine()
|
983,938 | a64765196d7fdad145983c8f70982ed225c8542e | ##
# \namespace cross3d.abstract.clip
#
# \remarks The AbstractClip class provides a base implementation of a
# cross-application interface to a clip.
#
# \author willc
# \author Blur Studio
# \date 10/15/15
#
import cross3d
from cross3d import ClipPortion, TrackPortion, abstractmethod
class AbstractClip(object):
"""The AbstractClip class provides a base implementation of a
cross-application interface to a clip within a layer mixer.
Attributes:
clip: The native accessor for the Clip object.
track: The Track instance for the Clip's parent Track.
numWeights: The number of weights in the clip's weight curve
(relevant only when clip is in a layer track)
globStart: The global frame value for the start point of the Clip
globEnd: The global frame value for the end point of the Clip
filename: The filename of the bip file used by the Clip.
scale: The Clip's scale. Modifying the scale will cause the Clip to
scale on the right edge. The left edge will not move.
"""
def __init__(self, track, clip):
super(AbstractClip, self).__init__()
self._track = track
self._clip = clip
@property
def clip(self):
"""The native accessor for the Clip object"""
return self._clip
@clip.setter
def clip(self, value):
self._clip = value
@property
def duration(self):
"""The number of frames in the mixer that the clip occupies after
scaling"""
return (self.globEnd - self.globStart)
@property
def filename(self):
""" The filename of the file used by the Clip. """
return None
@property
def globStart(self):
""" The global frame value for the start point of the Clip """
return None
@property
def globEnd(self):
""" The global frame value for the end point of the Clip """
return None
@property
def numWeights(self):
"""The number of weights in the clip's weight curve
(relevant only when clip is in a layer track)"""
return None
@property
def sourceEnd(self):
return None
@property
def sourceLength(self):
"""The length, in frames, of the input file."""
return (self.sourceEnd - self.sourceStart)
@property
def sourceStart(self):
return None
@property
def scale(self):
return None
@property
def track(self):
"""The Track instance for the Clip's parent Track."""
return self._track
@track.setter
def track(self, value):
self._track = value
@property
def trimEnd(self):
return float(self.clip.trimEnd)
@property
def trimmedLength(self):
"""The number of frames in the used area of the clip."""
return (self.sourceLength - (self.trimEnd + self.trimStart))
@property
def trimStart(self):
return float(self.clip.trimStart)
@abstractmethod
def analyzeWeights(self, occludedPortions):
"""Determines which portions of the Clip are used, and which portions of
the Clip will occlude Tracks below.
Args:
occludedPortions(list): A list of `TrackPortion` instances
for every portion of the Clip that will be occluded
by Tracks above it.
Returns:
tuple: A tuple containing a list of `ClipPortion`
instances for every used portion of the Clip, and a
list of `TrackPortion` instances for every portion of
the Clip that will occlude tracks below it.
"""
return None
@abstractmethod
def getWeightTime(self, index):
"""Retrieves the global frame number the weight at the specified index
is placed at.
Args:
index(int): Index of desired weight to retrieve a time
for.
Returns:
float: Global frame number for the position of the
weight.
Raises:
IndexError
"""
return None
@abstractmethod
def getWeightValue(self, index):
"""Retrieves the value of the weight at the specified index.
Args:
index(int): Index of desired weight to retrieve a value
for.
Returns:
float: Value of the weight at the index specified.
Raises:
IndexError
"""
return None
@abstractmethod
def iterWeights(self):
"""Returns a generator that yields tuples of the time and value for all
weights in the Track.
Returns:
generator: Generator that yields tuples of
((float)time, (float)value) for weights on the
track.
"""
yield None
@abstractmethod
def weights(self):
"""Returns a list of tuples of the time and value for all weights on the
Clip.
Returns:
list: List of tuples for every weight on the Clip in
the form ((float)time, (float)value).
"""
return None
################################################################################
# register the symbol
cross3d.registerSymbol('Clip', AbstractClip, ifNotFound=True)
|
983,939 | caf6a93460b1b6d923497877100da5cd39c4cf6f | from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils import timezone
from tinymce.models import HTMLField
class Comment(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
date_time = models.DateTimeField(default=timezone.now)
text = HTMLField(null=False, blank=False)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
comments = GenericRelation('Comment')
def __str__(self):
return self.text
|
983,940 | cf93b97a32613c187716373e2141ba3499d092a4 | import PIL, os, glob, fnmatch, numpy
from numpy import *
from PIL import Image
baseDir = "images/"
data = []
data_str = ""
att_str = []
left_eye = (5,5)
right_eye = (14,5)
sub =""
cur_file = ""
eye_template = {}
def extract_local_regions(Image):
global att_str
left =0
upper =0
right =0
lower =0
right = (Image.size[0] /2)
lower = (Image.size[1]/2)
boxes = []
boxes.append(cut_box(left, upper, right, lower,Image))#left quad
boxes.append(cut_box(right + 1, upper, Image.size[0], lower,Image)) #righr qaud
boxes.append(cut_box(left, lower + 1, right,Image.size[1],Image))#low left quad
boxes.append(cut_box(right +1, lower +1, Image.size[0], Image.size[1],Image ))#low right quad
boxes.append(cut_box(9,0,10,Image.size[1], Image))#center strip
boxes.append(cut_box(0,9,Image.size[0],10, Image))#
boxes.append(cut_box(4,4,13,13,Image))#center box
add_val_strings(boxes)
get_mean_dif(boxes)
def cut_box(lft,up,rght,lwr, Image):
Image.load()
box = (lft, up, rght ,lwr)
i = Image.crop(box)
return i
def add_val_strings(boxes):
global data_str
for x in boxes:
data_str = data_str + str(std(x.getdata()) )+ ","
data_str = data_str + str(mean(x.getdata()) )+ ","
def get_mean_dif(boxes):
global data_str
global att_str
maxMean = max(mean(boxes[0].getdata()),mean(boxes[1].getdata()))
minMean = min(mean(boxes[0].getdata()),mean(boxes[1].getdata()))
data_str = data_str + str(maxMean - minMean)+ ","
def is_face():
return not sub.__contains__("non-face")
def add_class(directory):
global data_str
if directory.__contains__("non-face"):
data_str = data_str + ",0"
else:
data_str = data_str + ",1"
def eye_regions(image):
print "Eye regions in " + sub + " image : " + str(cur_file)
matrix = create_matrix(image)
get_eye(matrix, left_eye, image)
get_eye(matrix, right_eye, image)
def get_eye(matrix,center, image):
global eye_template
eye_vals = {}
radius = 5
for i in range(19):
for j in range(19):
a = i - center[0]
b = j - center[1]
if a*a + b*b <= radius:
eye_vals[(i,j)] = matrix[i][j]
avge = mean(eye_vals.values())
for x in eye_vals.keys():
eye_template[x] = eye_vals[x]
def create_matrix(image):
pixData = image.getdata()
matrix = [[0 for x in xrange(19)] for x in xrange(19)]
idx =0;
for i in range(19):
for j in range(19):
matrix[i][j] = pixData[idx]
idx+=1
return matrix
def getTemplate():
print "Doing template"
fp = open("images" +'/train/face/'+'face02428.pgm', "r")
img = Image.open(fp)
eye_regions(img)
def assess_template(image):
global data_str
global att_str
dif=0.0
num =0.0
for x in list(eye_template.keys()):
# print "Image value : " + str(image.getpixel(x)) + " Matrix value : " + str(eye_template[x])
dif += abs(eye_template[x] - image.getpixel(x) )
num +=1
# print x
data_str = data_str + str(dif / num)
def process_images(imageDir):
global data_str
for subdir, dirs, files in os.walk(imageDir):
for file in files:
# if fnmatch.fnmatch(file,'face02428TESTIMAGE.pgm'):
if fnmatch.fnmatch(file,'*.pgm'):
fp = open(subdir +'/'+file, "r")
# print subdir
sub = subdir
cur_file = file
img = Image.open(fp)
pixVals = list(img.getdata())
ex = img.getextrema()
regions = extract_local_regions(img)
data_str = data_str + str(ex[0] )+ ","
data_str = data_str + str(ex[1] )+ ","
data_str = data_str + str(std(img.getdata()) )+ ","
data_str = data_str + str(mean(img.getdata()) )+ ","
assess_template(img)
add_class(subdir)
data.append(data_str)
data_str = ""
fp.close()
def write_out(imageDir):
fname = "imageRecogFeatures" + str(imageDir) + ".arff"
outfile = open(fname, "w")
outfile.write("@RELATION images\n\n")
outfile.write("""@ATTRIBUTE left_quad_std NUMERIC
@ATTRIBUTE left_quad_mean NUMERIC
@ATTRIBUTE right_quad_std NUMERIC
@ATTRIBUTE right_quad_mean NUMERIC
@ATTRIBUTE low_left_quad_std NUMERIC
@ATTRIBUTE low_left_quad_mean NUMERIC
@ATTRIBUTE low_right_quad_std NUMERIC
@ATTRIBUTE low_right_quad_mean NUMERIC
@ATTRIBUTE cent_vert_strip_std NUMERIC
@ATTRIBUTE cent_vert_strip_mean NUMERIC
@ATTRIBUTE cent_hor_strip_std NUMERIC
@ATTRIBUTE cent_hor_strip_mean NUMERIC
@ATTRIBUTE cent_box_std NUMERIC
@ATTRIBUTE cent_box_mean NUMERIC
@ATTRIBUTE left_right_top_mean_dif NUMERIC
@ATTRIBUTE global_min NUMERIC
@ATTRIBUTE global_max NUMERIC
@ATTRIBUTE global_std NUMERIC
@ATTRIBUTE global_mean NUMERIC
@ATTRIBUTE eye_dif_val NUMERIC
@ATTRIBUTE class {0,1}\n""")
outfile.write("\n@DATA\n")
for x in data:
s = str(x) + str("\n")
outfile.write(s)
outfile.close()
getTemplate()
process_images(baseDir + "train")
write_out("train")
data =[];
process_images(baseDir + "test")
write_out("test")
|
983,941 | cb13195db87d2c92285b5047bd4cdf43381428b3 | # Add new file
print ( "Inside child branch")
|
983,942 | bb9818582f6b6356fa105f85b3ead085c3c55141 | import requests
from flask import Flask, render_template, request
offset= 0
app = Flask(__name__)
@app.route("/", methods=['GET'])
def home():
# api-endpoint
URL = "http://localhost:8080/"
offset_status = request.args.get('offset')
global offset
if offset_status == "Next" :
offset = offset + 20
elif offset_status == "Back":
offset = offset - 20
else:
offset = 0
# defining a params dict for the parameters to be sent to the API
PARAMS = {'off':offset}
print(PARAMS)
# sending get request and saving the response as response object
r = requests.get(url = URL, params=PARAMS )
# extracting data in json format
data = r.json()
#print(data)
return render_template('index.html', data=data )
if __name__ == "__main__":
pass
|
983,943 | a16004991cf5239206cdfdfefdc5d19e893e4d29 | """In a given list of elements, all elements are equal except the one.Write a code to find the odd man out (Stray
number) """
element_list = list(map(int, input().split()))
element_dict = {}
count = 0
for i in element_list:
if i in element_dict.keys():
element_dict[i] += 1
else:
element_dict[i] = 1
for key, value in element_dict.items():
if value == 1:
print(key)
|
983,944 | 1876e6ca9813f1f9301445e446b25bef1efc2265 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Dataset definition class for FreeROI GUI system.
"""
import re
import os
import sys
import nibabel as nib
import numpy as np
from nibabel.spatialimages import ImageFileError
from nibabel.gifti import giftiio as g_io
import gzip
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from froi.algorithm import meshtool as mshtool
from froi.algorithm import array2qimage as aq
from ..io.surf_io import read_data
from labelconfig import LabelConfig
class DoStack(QObject):
"""
For Undo and Redo
"""
stack_changed = pyqtSignal()
def __init__(self):
super(QObject, self).__init__()
self._stack = []
def push(self, v):
self._stack.append(v)
self.stack_changed.emit()
def pop(self):
t = self._stack.pop()
self.stack_changed.emit()
return t
def clear(self):
self._stack = []
def stack_not_empty(self):
if self._stack:
return True
else:
return False
class VolumeDataset(object):
"""Base dataset in FreeROI GUI system."""
def __init__(self, source, label_config_center, name=None, header=None,
view_min=None, view_max=None, alpha=255, colormap='gray',
cross_pos=None):
"""
Create a dataset from an NiftiImage which has following
atributes:
Parameters
----------
source : Nifti file path or 3D/4D numpy array
Nifti dataset, specified either as a filename (single file 3D/4D
image) or a 3D/4D numpy array. When source is a numpy array,
parameter header is required.
label_config : label configuration
name : name of the volume
volume name.
header : nifti1 header structure
Nifti header structure.
view_min : scalar or None
The lower limitation of data value which can be seen.
view_max : scalar
The upper limitation of data value which can be seen.
alpha: int number (0 - 255)
alpha channel value, 0 indicates transparency. Default is 255.
colormap : string
The string can represents the colormap used for corresponding
values, it can be 'gray', 'red2yellow', 'blue2green', 'ranbow'...
cross_pos : a list containing [x, y, z]
Default is None
Returns
-------
VolumeDataset
"""
if isinstance(source, np.ndarray):
self._data = np.rot90(source)
if name == None:
self._name = 'new_image'
else:
self._name = str(name)
if not isinstance(header, nib.nifti1.Nifti1Header):
raise ValueError("Parameter header must be specified!")
elif header.get_data_shape() == source.shape:
self._header = header
self._img = None
else:
raise ValueError("Data dimension does not match.")
else:
self._img = nib.load(source)
self._header = self._img.get_header()
basename = os.path.basename(source.strip('/'))
self._name = re.sub(r'(.*)\.nii(\.gz)?', r'\1', basename)
self.save_mem_load()
# For convenience, define a shift variable
self._y_shift = self.get_data_shape()[1] - 1
if view_min == None:
self._view_min = self._data.min()
else:
self._view_min = view_min
if view_max == None:
self._view_max = self._data.max()
else:
self._view_max = view_max
self._alpha = alpha
self._colormap = colormap
self._rgba_list = range(self.get_data_shape()[2])
# bool status for the item
self._visible = True
if len(self.get_data_shape()) == 3:
self._4d = False
else:
self._4d = True
self._time_point = 0
# temporal variant for OrthView
self._cross_pos = cross_pos
# define a dictionary
self.label_config_center = label_config_center
self.label_config_center.single_roi_view_update.connect(self.update_single_roi)
# undo redo stacks
self.undo_stack = DoStack()
self.redo_stack = DoStack()
self.update_rgba()
if self._cross_pos:
self.update_orth_rgba()
def save_mem_load(self):
"""Load data around current time-point."""
if len(self.get_data_shape())==4 and self._img:
data = np.zeros(self.get_data_shape())
self._data = np.rot90(data)
self._loaded_time_list = [0]
self._data[..., 0] = np.rot90(self._img.dataobj[..., 0])
else:
self._loaded_time_list = [0]
data = self._img.get_data(caching='unchanged')
self._data = np.rot90(data)
def get_data_shape(self):
"""Get shape of data."""
return self._header.get_data_shape()
def _rendering_factory(self):
"""Return a rendering factory according to the display setting."""
def shadow(array):
if not isinstance(self._colormap, LabelConfig):
colormap = str(self._colormap)
else:
colormap = self._colormap.get_colormap()
try:
current_roi = self.label_config_center.get_drawing_value()
except ValueError:
current_roi = None
return aq.array2qrgba(array, self._alpha, colormap,
normalize=(self._view_min, self._view_max),
roi=current_roi)
return shadow
def update_single_roi(self):
"""Update the view with single ROI colormap."""
if self._colormap == 'single ROI':
self.update_rgba()
if self._cross_pos:
self.update_orth_rgba()
self.label_config_center.single_roi_view_update_for_model.emit()
def update_rgba(self, index=None):
"""Create a range of qrgba array for display."""
# return a rendering factory
f = self._rendering_factory()
if index == None:
if self.is_4d():
layer_list = [self._data[..., i, self._time_point] for i in
range(self.get_data_shape()[2])]
else:
layer_list = [self._data[..., i] for i in
range(self.get_data_shape()[2])]
self._rgba_list = map(f, layer_list)
else:
if self.is_4d():
self._rgba_list[index] = f(self._data[..., index,
self._time_point])
else:
self._rgba_list[index] = f(self._data[..., index])
def set_cross_pos(self, cross_pos):
""" Update RGBA data in sagital, axial and coronal directions."""
if self._cross_pos:
if not cross_pos[0] == self._cross_pos[0]:
self._cross_pos[0] = cross_pos[0]
self.update_sagital_rgba()
if not cross_pos[1] == self._cross_pos[1]:
self._cross_pos[1] = cross_pos[1]
self.update_coronal_rgba()
if not cross_pos[2] == self._cross_pos[2]:
self._cross_pos[2] = cross_pos[2]
self.update_axial_rgba()
else:
self._cross_pos = cross_pos
self.update_sagital_rgba()
self.update_coronal_rgba()
self.update_axial_rgba()
def update_orth_rgba(self):
"""Update the disply in orth view."""
self.update_sagital_rgba()
self.update_coronal_rgba()
self.update_axial_rgba()
def update_sagital_rgba(self):
"""Update the sagital disply in orth view."""
f = self._rendering_factory()
idx = self._cross_pos[0]
if self.is_4d():
self._sagital_rgba = f(np.rot90(self._data[:, idx, :,
self._time_point]))
else:
self._sagital_rgba = f(np.rot90(self._data[:, idx, :]))
def update_axial_rgba(self):
"""Update the axial disply in orth view."""
f = self._rendering_factory()
idx = self._cross_pos[2]
if self.is_4d():
self._axial_rgba = f(self._data[:, :, idx, self._time_point])
else:
self._axial_rgba = f(self._data[:, :, idx])
def update_coronal_rgba(self):
"""Update the coronal disply in orth view."""
f = self._rendering_factory()
idx = self._y_shift - self._cross_pos[1]
if self.is_4d():
self._coronal_rgba = f(np.rot90(self._data[idx, :, :,
self._time_point]))
else:
self._coronal_rgba = f(np.rot90(self._data[idx, :, :]))
def set_alpha(self, alpha):
"""Set alpha value."""
if isinstance(alpha, int):
if alpha <= 255 and alpha >= 0:
if self._alpha != alpha:
self._alpha = alpha
self.update_rgba()
if self._cross_pos:
self.update_orth_rgba()
else:
raise ValueError("Value must be an integer between 0 and 255.")
def get_alpha(self):
"""Get alpha value."""
return self._alpha
def set_time_point(self, tpoint):
"""Set time point."""
if self.is_4d():
if isinstance(tpoint, int):
if tpoint >= 0 and tpoint < self.get_data_shape()[3]:
if self._img:
if not tpoint in self._loaded_time_list:
self._data[..., tpoint] = \
np.rot90(self._img.dataobj[..., tpoint])
self._loaded_time_list.append(tpoint)
self._time_point = tpoint
self.undo_stack.clear()
self.redo_stack.clear()
self.update_rgba()
if self._cross_pos:
self.update_orth_rgba()
else:
raise ValueError("Value must be an integer.")
def get_time_point(self):
"""Get time point."""
return self._time_point
def set_view_min(self, view_min):
"""Set lower limition of display range."""
try:
view_min = float(view_min)
self._view_min = view_min
self.update_rgba()
if self._cross_pos:
self.update_orth_rgba()
except ValueError:
print "view_min must be a number."
def get_view_min(self):
"""Get lower limition of display range."""
return self._view_min
def set_view_max(self, view_max):
"""Set upper limition of display range."""
try:
view_max = float(view_max)
self._view_max = view_max
self.update_rgba()
if self._cross_pos:
self.update_orth_rgba()
except ValueError:
print"view_max must be a number."
def get_view_max(self):
"""Get upper limition of display range."""
return self._view_max
def set_name(self, name):
"""Set item's name."""
self._name = str(name)
def get_name(self):
"""Get item's name."""
return self._name
def set_colormap(self, map_name):
"""Set item's colormap."""
self._colormap = map_name
self.update_rgba()
if self._cross_pos:
self.update_orth_rgba()
def get_colormap(self):
"""Get item's colormap."""
return self._colormap
def set_visible(self, status):
"""Set visibility of the volume."""
if isinstance(status, bool):
if status:
self._visible = True
else:
self._visible = False
else:
raise ValueError("Input must a bool.")
def is_4d(self):
"""If the data is including several time points, return True."""
return self._4d
def is_visible(self):
"""Query the status of visibility."""
return self._visible
def get_rgba(self, index):
"""Get rgba array based on the index of the layer."""
return self._rgba_list[index]
def get_sagital_rgba(self):
"""Return the sagital rgba value.."""
if self._sagital_rgba.tolist():
return self._sagital_rgba
else:
return False
def get_axial_rgba(self):
"""Return the axial rgba value."""
if self._axial_rgba.tolist():
return self._axial_rgba
else:
return False
def get_coronal_rgba(self):
"""Return the coronal rgba value.."""
if self._coronal_rgba.tolist():
return self._coronal_rgba
else:
return False
def set_voxel(self, x, y, z, value, ignore=True):
"""Set value of the voxel whose coordinate is (x, y, z)."""
try:
if isinstance(y, list):
y_trans = [self._y_shift - item for item in y]
# check coordinate validation
coord_list = [(x[i], y_trans[i], z[i]) for i in range(len(x))]
coord_list = [c for c in coord_list if c[0]>=0 and
c[0]<self.get_data_shape()[0] and
c[1]>=0 and
c[1]<self.get_data_shape()[1] and
c[2]>=0 and
c[2]<self.get_data_shape()[2]]
x = [c[0] for c in coord_list]
y_trans = [c[1] for c in coord_list]
z = [c[2] for c in coord_list]
if self.is_4d():
orig_data = self._data[y_trans, x, z, self._time_point]
else:
orig_data = self._data[y_trans, x, z]
if np.any(orig_data != 0) and not ignore:
force = QMessageBox.question(None, "Replace?",
"Would you like to replace the original values?",
QMessageBox.Yes,
QMessageBox.No)
if force == QMessageBox.No:
return
if self.is_4d():
self.undo_stack.push((x, y, z, self._data[y_trans, x, z,
self._time_point]))
self._data[y_trans, x, z, self._time_point] = value
else:
self.undo_stack.push((x, y, z, self._data[y_trans, x, z]))
self._data[y_trans, x, z] = value
try:
for z_ in range(min(z), max(z)+1):
self.update_rgba(z_)
except TypeError:
self.update_rgba(z)
if self._cross_pos:
self.update_orth_rgba()
except:
raise
print "Input coordinates are invalid."
def save2nifti(self, file_path):
"""Save to a nifti file."""
#Define nifti1 datatype codes
NIFTI_TYPE_UINT8 = 2 # unsigned char
NIFTI_TYPE_INT16 = 4 # signed short
NIFTI_TYPE_INT32 = 8 # signed int.
NIFTI_TYPE_FLOAT32 = 16 # 32 bit float.
NIFTI_TYPE_COMPLEX64 = 32 # 64 bit complex = 2 32 bit floats
NIFTI_TYPE_FLOAT64 = 64 # 64 bit float = double.
NIFTI_TYPE_RGB24 = 128 # 3 8 bit bytes.
NIFTI_TYPE_INT8 = 256 # signed char.
NIFTI_TYPE_UINT16 = 512 # unsigned short.
NIFTI_TYPE_UINT32 = 768 # unsigned int.
NIFTI_TYPE_INT64 = 1024 #signed long long.
NIFTI_TYPE_UINT64 = 1280 # unsigned long long.
NIFTI_TYPE_FLOAT128 = 1536 # 128 bit float = long double.
NIFTI_TYPE_COMPLEX128 = 1792 #128 bit complex = 2 64 bit floats.
NIFTI_TYPE_COMPLEX256 = 2048 # 256 bit complex = 2 128 bit floats
NIFTI_TYPE_RGBA32 = 2304 # 4 8 bit bytes.
#Detect the data type of the input data.
data_type = {
np.uint8: NIFTI_TYPE_UINT8,
np.uint16: NIFTI_TYPE_UINT16,
np.uint32: NIFTI_TYPE_UINT32,
np.float32: NIFTI_TYPE_FLOAT32,
np.int16: NIFTI_TYPE_INT16,
np.int32: NIFTI_TYPE_INT32,
np.int8: NIFTI_TYPE_INT8
}
if sys.maxint > 2 ** 32: # The platform is 64 bit
data_type[np.float128] = NIFTI_TYPE_FLOAT128
data_type[np.float64] = NIFTI_TYPE_FLOAT64
data_type[np.int64] = NIFTI_TYPE_INT64
data_type[np.uint64] = NIFTI_TYPE_UINT64
data_type[np.complex64] = NIFTI_TYPE_COMPLEX64
data_type[np.complex128] = NIFTI_TYPE_COMPLEX128
data_type[np.complex256] = NIFTI_TYPE_COMPLEX256
data = np.rot90(self._data, 3)
if data_type.has_key(data.dtype.type):
self._header['datatype'] = data_type[data.dtype.type]
self._header['cal_max'] = data.max()
self._header['cal_min'] = 0
image = nib.nifti1.Nifti1Image(data, None, self._header)
nib.nifti1.save(image, file_path)
def get_label_config(self):
"""Return the label config object."""
return self.label_config_center
def undo_stack_not_empty(self):
"""Return status of the undo stack."""
return self.undo_stack.stack_not_empty()
def redo_stack_not_empty(self):
return self.redo_stack.stack_not_empty()
def undo(self):
"""Resume to the last step."""
if self.undo_stack:
voxel_set = self.undo_stack.pop()
self.set_voxel(*voxel_set, ignore=True)
self.redo_stack.push(self.undo_stack.pop())
return voxel_set[2]
return None
def redo(self):
"""Forward to the next step."""
if self.redo_stack:
voxel_set = self.redo_stack.pop()
self.set_voxel(*voxel_set, ignore=True)
return voxel_set[2]
return None
def connect_undo(self, slot):
"""Connect the event to the undo slot.
"""
self.undo_stack.stack_changed.connect(slot)
def connect_redo(self, slot):
"""Connect the event to the undo slot."""
self.redo_stack.stack_changed.connect(slot)
def get_header(self):
"""Get the header of the data.."""
return self._header
def get_value(self, xyz, time_course=False):
"""Get the valoue based on the given x,y,z cordinate."""
if not time_course:
if self.is_4d():
return self._data[self._y_shift - xyz[1],
xyz[0], xyz[2], self._time_point]
else:
return self._data[self._y_shift - xyz[1], xyz[0], xyz[2]]
else:
if self.is_4d() and self._img:
data = self.get_raw_data()
data = np.rot90(data)
return data[self._y_shift - xyz[1], xyz[0], xyz[2], :]
elif self.is_4d():
return self._data[self._y_shift - xyz[1], xyz[0], xyz[2], :]
else:
return self._data[self._y_shift - xyz[1], xyz[0], xyz[2]]
def get_lthr_data(self):
"""Return whole data which low-thresholded."""
# FIXME one time point or whole data
temp = self._data.copy()
temp[temp < self._view_min] = 0
return temp
def get_lthr_raw_data(self):
"""
Return the low threshold of the raw data.
"""
temp = self._data.copy()
temp[temp < self._view_min] = 0
return np.rot90(temp, 3)
def get_raw_data(self):
"""Return the raw data."""
if self._img and self.is_4d():
temp = self._img.get_data(caching='unchanged')
temp = np.rot90(temp)
for tp in self._loaded_time_list:
temp[..., tp] = self._data[..., tp]
else:
temp = self._data.copy()
return np.rot90(temp, 3)
def get_value_label(self, value):
"""Return the label of the given value."""
return self.label_config.get_index_label(value)
def set_label(self, label_config):
"""Set the label using the given label_config parameter."""
self.label_config = label_config
def is_label_global(self):
"""Return the value whether the label is global."""
return self.label_config.is_global
def get_roi_coords(self, roi):
"""Return cordinates of the given roi."""
if self.is_4d():
data = self._data[..., self._time_point]
else:
data = self._data
coord = (data==roi).nonzero()
#return (data==roi).nonzero()
return (coord[1], self._y_shift - coord[0], coord[2])
def get_coord_val(self, x, y, z):
"""Return value based on the given x,y,z cordinate."""
if self.is_4d():
#return self._data[y, x, z, self._time_point]
return self._data[self._y_shift - y, x, z, self._time_point]
else:
#return self._data[y, x, z]
return self._data[self._y_shift - y, x, z]
def duplicate(self):
"""Return a duplicated image."""
dup_img = VolumeDataset(source=self.get_raw_data(),
label_config_center=self.get_label_config(),
name=self.get_name()+'_duplicate',
header=self.get_header(),
view_min=self.get_view_min(),
view_max=self.get_view_max(),
alpha=self.get_alpha(),
colormap=self.get_colormap())
return dup_img
class SurfaceDataset(object):
"""Container for surface object in FreeROI GUI system.
Attributes
----------
surf_path: string
Absolute path of surf file
x: 1d array
x coordinates of vertices
y: 1d array
y coordinates of vertices
z: 1d array
z coordinates of vertices
coords: 2d array of shape [n_vertices, 3]
The vertices coordinates
faces: 2d array
The faces ie. the triangles
nn: 2d array
Normalized surface normals for vertices
"""
def __init__(self, surf_path, offset=None):
"""Surface
Parameters
----------
surf_path: absolute surf file path
offset: float | None
If 0.0, the surface will be offset such that medial wall
is aligned with the origin. If None, no offset will be
applied. If != 0.0, an additional offset will be used.
"""
if not os.path.exists(surf_path):
print 'surf file does not exist!'
return None
self.surf_path = surf_path
(self.surf_dir, self.name) = os.path.split(surf_path)
name_split = self.name.split('.')
self.suffix = name_split[-1]
if self.suffix in ('pial', 'inflated', 'white'):
self.hemi = name_split[0]
elif self.suffix == 'gii':
self.hemi = name_split[1]
else:
raise ImageFileError('This file format-{} is not supported at present.'.format(self.suffix))
self.offset = offset
# load geometry
self.load_geometry()
def load_geometry(self):
"""Load surface geometry."""
if self.suffix in ('pial', 'inflated', 'white'):
self.coords, self.faces = nib.freesurfer.read_geometry(self.surf_path)
elif self.suffix == 'gii':
gii_data = g_io.read(self.surf_path).darrays
self.coords, self.faces = gii_data[0].data, gii_data[1].data
else:
raise ImageFileError('This file format-{} is not supported at present.'.format(self.suffix))
if self.offset is not None:
if self.hemi in ('lh', 'L'):
self.coords[:, 0] -= (np.max(self.coords[:, 0]) + self.offset)
else:
self.coords[:, 0] -= (np.min(self.coords[:, 0]) + self.offset)
self.nn = mshtool.compute_normals(self.coords, self.faces)
def get_bin_curv(self):
"""
load and get binarized curvature (gyrus' curvature<0, sulcus's curvature>0)
:return:
binarized curvature
"""
curv_name = '{}.curv'.format(self.hemi)
curv_path = os.path.join(self.surf_dir, curv_name)
if not os.path.exists(curv_path):
return None
bin_curv = nib.freesurfer.read_morph_data(curv_path) <= 0
bin_curv = bin_curv.astype(np.int)
return bin_curv
def find_1_ring_neighbor(self):
n_vtx = self.coords.shape[0]
self.one_ring_neighbor = [set() for i in range(n_vtx)]
for face in self.faces:
for v_id in face:
self.one_ring_neighbor[v_id].update(set(face))
for v_id in range(n_vtx):
self.one_ring_neighbor[v_id].remove(v_id)
def save_geometry(self):
"""Save geometry information."""
nib.freesurfer.write_geometry(self.surf_path,
self.coords, self.faces)
def get_vertices_num(self):
"""Get vertices number of the surface."""
return self.coords.shape[0]
def get_coords(self):
return self.coords
def get_faces(self):
return self.faces
def get_nn(self):
return self.nn
@property
def x(self):
return self.coords[:, 0]
@property
def y(self):
return self.coords[:, 1]
@property
def z(self):
return self.coords[:, 2]
def apply_xfm(self, mtx):
"""Apply an affine transformation matrix to the x, y, z vectors."""
self.coords = np.dot(np.c_[self.coords, np.ones(len(self.coords))],
mtx.T)[:, 3]
class ScalarData(object):
"""Container for Scalar data in Surface syetem.
A container for thickness, curv, sig, and label dataset.
"""
def __init__(self, name, data, vmin=None, vmax=None, colormap=None):
"""Initialization.
TODO: colormap configs should be added to the function.
"""
self.name = name
if data.ndim == 1:
self.data = data.reshape((data.shape[0], 1))
elif data.ndim == 2:
self.data = data
else:
raise ValueError("The data stored by ScalarData must be 2D")
if vmin and isinstance(vmin, float):
self.vmin = vmin
else:
self.vmin = np.min(data)
if vmax and isinstance(vmax, float):
self.vmax = vmax
else:
self.vmax = np.max(data)
if colormap is None:
self.colormap = 'red2yellow'
else:
self.colormap = colormap
self.visible = True
self.alpha = 1.0
self.colorbar = True
def get_data(self):
return self.data
def get_name(self):
return self.name
def get_min(self):
return self.vmin
def get_max(self):
return self.vmax
def get_colormap(self):
return self.colormap
def get_alpha(self):
return self.alpha
def is_visible(self):
return self.visible
def is_colorbar(self):
return self.colorbar
def set_name(self, name):
self.name = name
def set_min(self, vmin):
try:
self.vmin = float(vmin)
except ValueError:
print "vmin must be a number."
def set_max(self, vmax):
try:
self.vmax = float(vmax)
except ValueError:
print "vmax must be a number."
def set_colormap(self, colormap_name):
self.colormap = colormap_name
def set_visible(self, status):
if isinstance(status, bool):
self.visible = status
else:
raise ValueError("Input must a bool.")
def set_colorbar(self, status):
if isinstance(status, bool):
self.colorbar = status
else:
raise ValueError("Input must a bool.")
def set_alpha(self, alpha):
if 0 <= alpha <= 1:
if self.alpha != alpha:
self.alpha = alpha
else:
raise ValueError("Value must be an integer between 0 and 1.")
def save2nifti(self, file_path):
"""Save to a nifti file."""
# Define nifti1 datatype codes
NIFTI_TYPE_UINT8 = 2 # unsigned char
NIFTI_TYPE_INT16 = 4 # signed short
NIFTI_TYPE_INT32 = 8 # signed int.
NIFTI_TYPE_FLOAT32 = 16 # 32 bit float.
NIFTI_TYPE_COMPLEX64 = 32 # 64 bit complex = 2 32 bit floats
NIFTI_TYPE_FLOAT64 = 64 # 64 bit float = double.
NIFTI_TYPE_RGB24 = 128 # 3 8 bit bytes.
NIFTI_TYPE_INT8 = 256 # signed char.
NIFTI_TYPE_UINT16 = 512 # unsigned short.
NIFTI_TYPE_UINT32 = 768 # unsigned int.
NIFTI_TYPE_INT64 = 1024 # signed long long.
NIFTI_TYPE_UINT64 = 1280 # unsigned long long.
NIFTI_TYPE_FLOAT128 = 1536 # 128 bit float = long double.
NIFTI_TYPE_COMPLEX128 = 1792 # 128 bit complex = 2 64 bit floats.
NIFTI_TYPE_COMPLEX256 = 2048 # 256 bit complex = 2 128 bit floats
NIFTI_TYPE_RGBA32 = 2304 # 4 8 bit bytes.
# Detect the data type of the input data.
data_type = {
np.uint8: NIFTI_TYPE_UINT8,
np.uint16: NIFTI_TYPE_UINT16,
np.uint32: NIFTI_TYPE_UINT32,
np.float32: NIFTI_TYPE_FLOAT32,
np.int16: NIFTI_TYPE_INT16,
np.int32: NIFTI_TYPE_INT32,
np.int8: NIFTI_TYPE_INT8
}
if sys.maxint > 2 ** 32: # The platform is 64 bit
data_type[np.float128] = NIFTI_TYPE_FLOAT128
data_type[np.float64] = NIFTI_TYPE_FLOAT64
data_type[np.int64] = NIFTI_TYPE_INT64
data_type[np.uint64] = NIFTI_TYPE_UINT64
data_type[np.complex64] = NIFTI_TYPE_COMPLEX64
data_type[np.complex128] = NIFTI_TYPE_COMPLEX128
data_type[np.complex256] = NIFTI_TYPE_COMPLEX256
header = nib.Nifti1Header()
if self.data.shape[1] == 1:
new_shape = (self.data.shape[0], 1, 1)
else:
new_shape = (self.data.shape[0], 1, 1, self.data.shape[1])
data = self.data.reshape(new_shape)
if data.dtype.type in data_type:
header['datatype'] = data_type[data.dtype.type]
header['cal_max'] = data.max()
header['cal_min'] = data.min()
image = nib.Nifti1Image(data, None, header)
nib.nifti1.save(image, file_path)
def save2label(self, file_path):
X = np.where(self.data[:, 0] != 0)[0]
header = str("the number of vertex: " + str(len(X)))
np.savetxt(file_path, X, fmt='%d',
header=header, comments="# ascii, label vertexes\n")
class Hemisphere(object):
"""Hemisphere: container for surface data and scalar data."""
def __init__(self, surf_path, offset=None):
"""Hemisphere
Parameters
----------
surf_path: absolute surf file path
offset: float | None
If 0.0, the surface will be offset such that medial wall
is aligned with the origin. If None, no offset will be
applied. If != 0.0, an additional offset will be used.
"""
# self.surf = SurfaceDataset(surf_path, offset)
surf_type = 'white'
self.surf = {}
self.bin_curv = None
self.overlay_list = []
self.alpha = 1.0
self.colormap = "gray"
self.visible = True
self.add_surfs(surf_path, surf_type, offset=1.0)
self.name = self.surf[surf_type].name
def _add_surface(self, surf_path, surf_type, offset=None):
"""Add surface data"""
self.surf[surf_type] = SurfaceDataset(surf_path, offset)
self.bin_curv = self.surf[surf_type].get_bin_curv()
def del_surfs(self, surf_type):
"""Del surface data"""
try:
self.surf[surf_type]
except KeyError:
print "The surface data is not exist!"
else:
del self.surf[surf_type]
def add_surfs(self, surf_path, surf_type, offset=None):
"""Add surf data"""
try:
self.surf[surf_type]
except KeyError:
self._add_surface(surf_path, surf_type, offset)
else:
print "The surface data is already exist!"
def update_surfs(self, surf_path, surf_type, offset=None):
"""Update surf data, if not exist, ask user to confirm"""
try:
self.surf[surf_type]
except KeyError:
pass
# Here should be a dialog for confirm, whether adding data or not
else:
self._add_surface(surf_path, surf_type, offset)
def load_overlay(self, source, surf_type, vmin=None, vmax=None, colormap=None):
"""Load scalar data as an overlay."""
if isinstance(source, np.ndarray):
name = 'new_overlay'
data = source
else:
name = os.path.basename(source).split('.')[0]
data = read_data(source, self.surf[surf_type].get_vertices_num())
self.overlay_list.append(ScalarData(name, data,
vmin=vmin, vmax=vmax,
colormap=colormap))
def overlay_up(self, idx):
"""Move the `idx` overlay layer up."""
if not self.is_top_layer(idx):
self.overlay_list[idx], self.overlay_list[idx+1] = \
self.overlay_list[idx+1], self.overlay_list[idx]
def overlay_down(self, idx):
"""Move the `idx` overlay layer down."""
if not self.is_bottom_layer(idx):
self.overlay_list[idx], self.overlay_list[idx-1] = \
self.overlay_list[idx-1], self.overlay_list[idx]
def is_top_layer(self, idx):
if isinstance(idx, int) and 0 <= idx < len(self.overlay_list):
if len(self.overlay_list)-1 == idx:
return True
else:
return False
else:
print 'Invalid input!'
def is_bottom_layer(self, idx):
if isinstance(idx, int) and 0 <= idx < len(self.overlay_list):
if idx == 0:
return True
else:
return False
else:
print 'Invalid input!'
def overlay_count(self):
return len(self.overlay_list)
def get_alpha(self):
return self.alpha
def get_colormap(self):
return self.colormap
def is_visible(self):
return self.visible
def set_alpha(self, alpha):
if 0 <= alpha <= 1:
if self.alpha != alpha:
self.alpha = alpha
else:
raise ValueError("Value must be an integer between 0 and 1.")
def set_colormap(self, colormap_name):
self.colormap = colormap_name
def set_visible(self, status):
if isinstance(status, bool):
if status:
self.visible = True
else:
self.visible = False
else:
raise ValueError("Input must a bool.")
def get_name(self):
return self.name
def get_rgba(self, ol):
"""
Return a RGBA array according to scalar_data, alpha and colormap.
:param ol:
The element in self.overlay_list.
:return: array
"""
data = ol.get_data()
data = np.mean(data, 1)
data = data.reshape((data.shape[0],))
return aq.array2qrgba(data, ol.get_alpha()*255, ol.get_colormap(),
(ol.get_min(), ol.get_max())) # The scalar_data's alpha is belong to [0, 1].
def get_composite_rgb(self):
# start_render_index = self._get_start_render_index()
start_render_index = 0
# get rgba arrays according to each overlay
rgba_list = []
for ol in self.overlay_list[start_render_index:]:
if ol.is_visible():
rgba_list.append(self.get_rgba(ol))
# automatically add the background array
if self.bin_curv is not None:
background = aq.array2qrgba(self.bin_curv, 255.0, 'gray', (-1, 1.5))
else:
background = np.ones((self.surf['white'].get_vertices_num(), 4)) * 127.5
rgba_list.insert(0, background)
return aq.qcomposition(rgba_list)
def _get_start_render_index(self):
"""
If an overlay's opacity is 1.0(i.e. completely opaque) and need to cover a whole
hemisphere, other overlays that below it are no need to be rendered.
:return: int
The index that the render starts at.
"""
for ol in self.overlay_list[-1::-1]:
# FIXME There may be even no 'label' in a label's name, so we need use other method to recognize a label.
if "label" not in ol.get_name() and ol.get_alpha() == 1. and ol.is_visible()\
and ol.get_min() <= np.min(ol.get_data()):
return self.overlay_list.index(ol)
# 0 means that the render will start with the bottom overlay.
return 0
|
983,945 | a556284ac6af94c72a1b1e9d5d36907e7e595972 | from collections import Counter, defaultdict
def mix(s1=None, s2=None):
s1 = str(s1)
s2 = str(s2)
tallied_s1 = sanitize_chars(Counter(s1).most_common())
tallied_s2 = sanitize_chars(Counter(s2).most_common())
# Easier to get data from a dict rather than a list of tuples.
tallied_s1_dict = {k: v for k, v in tallied_s1}
tallied_s2_dict = {k: v for k, v in tallied_s2}
max_values = find_max(tallied_s1_dict, tallied_s2_dict)
# Swap key with value
transposed_max_values = transpose_dict(max_values)
return display_result(sort(transposed_max_values))
def sanitize_chars(tallied_strings):
return [i for i in tallied_strings if i[0].islower() and i[1] > 1]
def find_max(s1_dict={}, s2_dict={}):
s1_key_set = {k for k in s1_dict.keys()}
s2_key_set = {k for k in s2_dict.keys()}
set_intersect = s1_key_set.intersection(s2_key_set)
set_difference = s1_key_set.symmetric_difference(s2_key_set)
set_result = {}
for k in set_intersect:
if s1_dict.get(k) > s2_dict.get(k):
set_result['1:'+k] = s1_dict.get(k)
elif s1_dict.get(k) < s2_dict.get(k):
set_result['2:'+k] = s2_dict.get(k)
else:
set_result['=:'+k] = s1_dict.get(k)
for k in set_difference:
if s1_dict.get(k):
set_result['1:'+k] = s1_dict.get(k)
else:
set_result['2:'+k] = s2_dict.get(k)
return set_result
def transpose_dict(transposee):
visited_values = defaultdict()
for k, v in transposee.items():
if not visited_values.get(v):
visited_values[v] = []
visited_values[v].append(k)
return visited_values
def sort(transposed_sortee):
items = sorted(transposed_sortee.items(),key=lambda x: x[0],reverse=True)
result = {}
for k, v in items:
v = sorted(v, key=lambda x: x[2])
v = sorted(v, key=lambda x: x[0])
result[k] = v
return result
def display_result(sorted_result):
ret_value = ''
for k, v in sorted_result.items():
for j in v:
ret_value += j[:2]+j[-1]*k+'/'
return ret_value[:-1]
|
983,946 | db3587107f3b29482d840f3c455b260081defd54 | '''A simple blockchain implementation.
Inspired by https://medium.com/crypto-currently/lets-build-the-tiniest-blockchain-e70965a248b'''
from __future__ import print_function
import hashlib
import datetime
class Block:
'''Blocks of data that will create the Blockchain'''
def __init__(self, index, timestamp, data, previous_hash):
self.index = index
self.timestamp = timestamp
self.data = data
self.previous_hash = previous_hash
self.hash = self.hash_block()
def hash_block(self):
'''returns a sha256 hash of the Block's index, timestamp, data,
and previous block's hash'''
sha_hash = hashlib.sha256()
sha_hash.update(str(self.index).encode('utf-8') + str(self.timestamp).encode('utf-8') + str(self.data).encode('utf-8') + str(self.previous_hash).encode('utf-8'))
return sha_hash.hexdigest()
def create_genesis_block():
'''Create the first block in the chain'''
return Block(0, datetime.datetime.now(), "Genesis Block", "0")
def next_block(previous_block):
'''Create the next block in the chain'''
index = previous_block.index + 1
timestamp = datetime.datetime.now()
data = "I'm block {}".format(index)
return Block(index, timestamp, data, previous_block.hash)
def create_block_chain(num_of_blocks):
block_chain = [create_genesis_block()]
previous_block = block_chain[0]
for _ in range(0, num_of_blocks):
new_block = next_block(previous_block)
block_chain.append(new_block)
previous_block = new_block
print("Block #{} was added to the blockchain".format(new_block.index))
print("Hash: {}\n".format(new_block.hash))
create_block_chain(10)
|
983,947 | 1fb3e0360709c89437b73d3cc7f55078437a6627 | import pytest
@pytest.fixture
def input_value():
input = 39
return input
|
983,948 | 152f59e544cfdf23305f51cf841ed0d0beb52b23 | from django.contrib.auth.models import AbstractBaseUser, BaseUserManager
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
# Create your models here.
from ecommerce import settings
class CustomAccountManager(BaseUserManager):
# parameters are the required fields
def create_user(self, email, username, first_name, last_name, password=None):
if not email:
raise ValueError("Users must have an email address")
if not username:
raise ValueError("Users must have a username")
# creation of new user
user = self.model(
email=self.normalize_email(email),
username=username,
first_name=first_name,
last_name=last_name
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, username, first_name, last_name, password=None):
user = self.create_user(
email=self.normalize_email(email),
username=username,
password=password,
first_name=first_name,
last_name=last_name,
)
user.is_admin = True
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user
class CustomUser(AbstractBaseUser):
# default fields
email = models.EmailField(verbose_name="email", max_length=70, unique=True)
username = models.CharField(max_length=30, unique=True)
image = models.ImageField(upload_to='profile_image', default='/None/default_picture.png')
date_joined = models.DateTimeField(verbose_name='date joined', auto_now_add=True)
last_login = models.DateTimeField(verbose_name='last login', auto_now=True)
is_admin = models.BooleanField(default=False)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
is_superuser = models.BooleanField(default=False)
# personalized fieds
first_name = models.CharField(max_length=30, default=None)
last_name = models.CharField(max_length=30, default=None)
is_vendor = models.BooleanField(default=False)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username', 'first_name', 'last_name'] # required field other than email
objects = CustomAccountManager()
def __str__(self):
return self.username + ", " + self.email
def has_perm(self, perm, obj=None):
return self.is_admin
def has_module_perms(self, app_label):
return True
|
983,949 | 2dfe503f2c06e4ba2eaea9cd8926fcedc1d8a120 | class Solution(object):
def findDuplicate(self, nums):
x = y = 0
while True:
x = nums[x]
y = nums[nums[y]]
if x == y:
break
y = 0
while x != y:
x = nums[x]
y = nums[y]
return x |
983,950 | ecbcd986f74cab6bbb906fd2fba1e756dd1a4378 | import hashlib
import io
from PIL import Image
from loguru import logger
from me_models import DbConnect, Queue, Src_metadata
from utils import config, file_md5sum
PARSED_EXIF = 'parsed_exif'
cfg = config()
logger.add("update_metadata.log", rotation="100 MB")
DbConnect()
def main():
# TODO: Set state here if desired
for photo in Queue.objects(md5sum=None):
update_metadata(photo)
# noinspection PyMethodMayBeStatic
def get_bytes(path):
with open(path, mode="rb") as fp:
return fp.read()
def update_metadata(photo):
logger.info(f"Getting metadata from: {photo.src_path}")
if photo.size > 1e9:
photo.md5sum = file_md5sum(photo.src_path)
photo.save()
return
photo_b = get_bytes(photo.src_path)
photo.md5sum = hashlib.md5(photo_b).hexdigest()
try:
im = Image.open(io.BytesIO(photo_b))
if PARSED_EXIF in im.info:
src_metadata = Src_metadata()
src_metadata.cameraMake = im.info[PARSED_EXIF].get(0x010f, "")
src_metadata.cameraModel = im.info[PARSED_EXIF].get(0x0110, "")
src_metadata.creationTime = im.info[PARSED_EXIF].get(0x9003, "")
src_metadata.width = im.width
src_metadata.height = im.height
photo.src_metadata = src_metadata
photo.image_md5 = hashlib.md5(im.tobytes()).hexdigest()
except OSError:
pass
photo.save()
if __name__ == "__main__":
main()
|
983,951 | 8285f239a88ac46a64002effc194ec7ed3684a31 | import abc
import datetime
import queue
import time
import glog as log
from voysis.audio.audio import AudioFile
from voysis.audio.audio import PCM_SIGNED_INT
from voysis.device.device import Device
class FileDevice(Device):
def __init__(self, audio_file=None, **kwargs):
Device.__init__(self, **kwargs)
self.time_between_chunks = kwargs.get('time_between_chunks', 0.08)
self._queue = queue.Queue()
self._last_chunk_time = datetime.datetime.utcfromtimestamp(0)
self.wakeword_detected = False
self.audio_file = AudioFile(audio_file)
if self.audio_file.header is not None:
self.encoding = self.audio_file.header.encoding
self.sample_rate = self.audio_file.header.sample_rate
self.bits_per_sample = self.audio_file.header.bits_per_sample
self.channels = self.audio_file.header.channels
self.big_endian = self.audio_file.header.big_endian
else:
self.encoding = None
self.sample_rate = None
self.bits_per_sample = None
self.channels = None
self.big_endian = None
def stream(self, client, recording_stopper):
self.start_recording()
recording_stopper.started()
query = client.stream_audio(self.generate_frames(), notification_handler=recording_stopper.stop_recording,
audio_type=self.audio_type())
recording_stopper.stop_recording(None)
return query
def stream_with_wakeword(self, client, recording_stopper, wakeword_detector):
self.start_recording()
recording_stopper.started()
self.wakeword_detected = wakeword_detector.stream_audio(self.generate_frames())
if self.wakeword_detected:
print("Wakeword detected.")
else:
print("No wakeword detected.")
query = client.stream_audio(self.generate_frames(), notification_handler=recording_stopper.stop_recording,
audio_type=self.audio_type())
recording_stopper.stop_recording(None)
return query
def test_wakeword(self, recording_stopper, wakeword_detector):
self.start_recording()
recording_stopper.started()
wakeword_indices = wakeword_detector.test_wakeword(self.generate_frames())
recording_stopper.stop_recording(None)
return wakeword_indices
def start_recording(self):
log.info(
'Sending %s channels at %sHz, %s bits per sample using encoding %s',
self.channels, self.sample_rate, self.bits_per_sample, self.encoding
)
self._queue.queue.clear()
self.audio_to_frames()
def stop_recording(self):
self._queue.queue.clear()
def is_recording(self):
return not (self._queue.empty())
def generate_frames(self):
while not self._queue.empty():
data = self._queue.get_nowait()
if data:
now = datetime.datetime.utcnow()
seconds_since_last = (now - self._last_chunk_time).total_seconds()
if seconds_since_last < self.time_between_chunks:
time.sleep(self.time_between_chunks - seconds_since_last)
self._last_chunk_time = now
yield data
def audio_to_frames(self):
while True:
data = self.audio_file.read(self.chunk_size)
if not data:
break
self._queue.put(data)
@abc.abstractmethod
def audio_type(self):
pass
class RawFileDevice(FileDevice):
"""
File device for sending raw samples from a file to the service. If the
file being sent is a valid wav file, the audio details will be read from
the wav header (the wav header will be stripped and not sent to the
server). If the file is raw samples, the audio format details will be
taken from the command line or, if not provided, default to
16KHz, little-endian, signed integer, 1 channel PCM.
"""
def __init__(self, audio_file=None, **kwargs):
super().__init__(audio_file, **kwargs)
if self.audio_file.header is None:
self.encoding = kwargs.get('encoding', PCM_SIGNED_INT)
self.sample_rate = kwargs.get('sample_rate', 16000)
self.bits_per_sample = 16 if self.encoding == PCM_SIGNED_INT else 32
self.channels = 1
self.big_endian = kwargs.get('big_endian', False)
def audio_type(self):
return f'audio/pcm;encoding={self.encoding};bits={self.bits_per_sample};rate={self.sample_rate}' \
f';channels={self.channels};big-endian={self.big_endian}'
class WavFileDevice(FileDevice):
"""
File device for sending a wav file to the server. The full wav header
will be sent to the server and the MIME type "audio/wav" will be used.
The server is expected to parse the header to infer the audio encoding
details.
This device implementation will raise an error if a valid wav header
cannot be parsed.
"""
def __init__(self, audio_file=None, **kwargs):
start_pos = audio_file.tell()
super().__init__(audio_file, **kwargs)
if self.audio_file.header is None:
raise ValueError('File does not contain a valid wav header.')
audio_file.seek(start_pos)
def audio_type(self):
return 'audio/wav'
|
983,952 | 10312e323b22981b08a20516daf90de087d1c524 | import numpy as np
from phonopy.units import VaspToTHz,AMU,EV
from phonopy.harmonic.force_constants import similarity_transformation
from numba import jit,njit,prange
def get_dq_dynmat_q(phonon,q):
"""
dq_scale = 1e-5 # perturbation # Older version
latt = phonon.primitive.cell
Reciprocal_latt = np.linalg.inv(latt).T # recprocal lattice
dm = phonon.get_dynamical_matrix_at_q(q)
Ns,Ns1 = np.shape(dm)
ddm_q = np.zeros([3,Ns,Ns1],dtype=np.complex128)
q_abs = np.matmul(q,Reciprocal_latt) # abs coord
for i in range(3):
dqc = np.zeros(3)
dqc[i] = dq_scale
dq = np.dot(latt,dqc)
qp = q + dq
qm = q - dq
dmp = phonon.get_dynamical_matrix_at_q(qp)
dmm = phonon.get_dynamical_matrix_at_q(qm)
ddm_q[i,:,:] = (dmp-dmm)/dq_scale/2.
return ddm_q
"""
groupvel = phonon._group_velocity
return groupvel._get_dD(q)
def get_dq_dynmat_Gamma(phonon):
fc=phonon.get_force_constants()
Nat= phonon._dynamical_matrix._scell.get_number_of_atoms()
scell = phonon.get_supercell()
Cell_vec = phonon.get_supercell().get_cell()
mass = scell.get_masses()
R = phonon.get_supercell().get_positions()
_p2s_map = phonon._dynamical_matrix._p2s_map
_s2p_map = phonon._dynamical_matrix._s2p_map
multiplicity = phonon._dynamical_matrix._multiplicity
vecs = phonon._dynamical_matrix._smallest_vectors
dxDymat = np.zeros((Nat*3,Nat*3))
dyDymat = np.zeros((Nat*3,Nat*3))
dzDymat = np.zeros((Nat*3,Nat*3))
for i,s_i in enumerate(_p2s_map):
for j,s_j in enumerate(_p2s_map):
sqrt_mm = np.sqrt(mass[i] * mass[j])
dx_local = np.zeros((3,3))
dy_local = np.zeros((3,3))
dz_local = np.zeros((3,3))
for k in range(Nat):
if s_j == _s2p_map[k]:
multi = multiplicity[k][i]
for l in range(multi):
vec = vecs[k][i][l] # dimensionless
ri_rj = np.matmul(vec,Cell_vec) # with units.
# Dym matrix eV/A2/AMU, [Freq]^2
dx_local += fc[s_i, k] * ri_rj[0]/ sqrt_mm # eV/A/AMU
dy_local += fc[s_i, k] * ri_rj[1]/ sqrt_mm
dz_local += fc[s_i, k] * ri_rj[2]/ sqrt_mm
dxDymat[(i*3):(i*3+3), (j*3):(j*3+3)] += dx_local
dyDymat[(i*3):(i*3+3), (j*3):(j*3+3)] += dy_local
dzDymat[(i*3):(i*3+3), (j*3):(j*3+3)] += dz_local
ddm_dq = np.array([dxDymat,dyDymat,dzDymat])+0j
return ddm_dq
def symmetrize_gv(phonon,q,gv):
symm = phonon.get_symmetry() # is an symmetry object
primitive = phonon.get_primitive()
reciprocal_lattice_inv = primitive.get_cell()
reciprocal_lattice = np.linalg.inv(reciprocal_lattice_inv)
rotations = []
for r in symm.get_reciprocal_operations():
q_in_BZ = q - np.rint(q)
diff = q_in_BZ - np.dot(r,q_in_BZ)
if (diff < symm.get_symmetry_tolerance()).all():
rotations.append(r)
gv_sym = np.zeros_like(gv)
for r in rotations:
r_cart = similarity_transformation(reciprocal_lattice, r)
gv_sym += np.dot(r_cart, gv.T).T
return gv_sym / len(rotations)
@njit
def get_Vmat_modePair_q(ddm_q,eig_s,eig_r, ws, wr, factor):# Dornadio's v operators.
Ns = len(eig_s) #Natoms*3, length of the eigenvector
eig_s_conj = np.ascontiguousarray(np.conj(eig_s))
eig_r_ = np.ascontiguousarray(eig_r)
V_sr = np.zeros(3,dtype=np.complex128)
for i in range(3):
ddm_q_i = np.ascontiguousarray(ddm_q[i])
V_sr[i]=np.dot(eig_s_conj,np.dot(ddm_q_i,eig_r_))/2/np.sqrt(np.abs(ws*wr))
# eV/A/AMU = eV/A2/AMU * A = 2pi*factor*A
V_sr = V_sr*factor**2*2*np.pi # ATHz
return V_sr
@njit
def delta_lorentz( x, width):
return (width)/(x*x + width*width)
@njit
def double_lorentz(w1,w2,width1,width2):
return (width1+width2)/((w1-w2)*(w1-w2)+(width1+width2)**2)
@njit
def delta_square(x,width):
if np.abs(x)<width:
return 1.0
else:
return 0.0
@njit(parallel=True)
def calc_Diff(freqs,eigvecs,ddm_q,LineWidth=1e-4,factor=VaspToTHz):
A2m = 1e-10
THz2Hz = 1e12
Ns = len(freqs)
#SV_rs = np.zeros(3,dtype=np.complex128)
V_sr = np.zeros(3,dtype=np.complex128)
V_rs = np.zeros(3,dtype=np.complex128)
Vmat = np.zeros((Ns,Ns,3),dtype=np.complex128)
# compute Vmat
for s in prange(Ns):
for r in range(s+1,Ns):
ws = freqs[s]*2*np.pi
eig_s = eigvecs.T[s]
wr = freqs[r]*2*np.pi
eig_r = eigvecs.T[r]
V_sr = get_Vmat_modePair_q(ddm_q,eig_s,eig_r,ws,wr,factor)
#V_sr = symmetrize_gv(phonon,q,V_sr) # symmetrize
#V_rs = get_Vmat_modePair_q(ddm_q,eig_r,eig_s,ws,wr,factor) # anti-hermitians
Vmat[s,r,:] = V_sr
Vmat[r,s,:] = np.conj(V_sr)
Diffusivity = np.zeros(Ns)
for s in prange(Ns):
Diff_s = 0.0
ws = freqs[s]*2*np.pi
for r in range(Ns):
wr = freqs[r]*2*np.pi
tau_sr = delta_lorentz(ws-wr,LineWidth) # THz^-1
#SV_sr = np.zeros(3,dtype=np.complex128)
Diff_s += np.dot(Vmat[s,r,:],Vmat[r,s,:]).real*tau_sr*np.pi/3 #A^2THz^2*THz-1 = A^2THz
Diffusivity[s] = Diff_s*(A2m**2*THz2Hz) #A^2THz^4/THz^2*THz-1 = A^2THz
return Diffusivity,Vmat
@njit(parallel=True)
def calc_Diff_tensor(freqs,eigvecs,ddm_q,LineWidth=1e-4,factor=VaspToTHz):
A2m = 1e-10
THz2Hz = 1e12
Ns = len(freqs)
#SV_rs = np.zeros(3,dtype=np.complex128)
V_sr = np.zeros(3,dtype=np.complex128)
V_rs = np.zeros(3,dtype=np.complex128)
Vmat = np.zeros((Ns,Ns,3),dtype=np.complex128)
# compute Vmat
for s in prange(Ns):
for r in range(s+1,Ns):
ws = freqs[s]*2*np.pi
eig_s = eigvecs.T[s]
wr = freqs[r]*2*np.pi
eig_r = eigvecs.T[r]
V_sr = get_Vmat_modePair_q(ddm_q,eig_s,eig_r,ws,wr,factor)
#V_sr = symmetrize_gv(phonon,q,V_sr) # symmetrize
#V_rs = get_Vmat_modePair_q(ddm_q,eig_r,eig_s,ws,wr,factor) # anti-hermitians
Vmat[s,r,:] = V_sr
Vmat[r,s,:] = np.conj(V_sr)
Diffusivity = np.zeros((Ns,3,3))
for s in prange(Ns):
Diff_s = np.zeros((3,3))
ws = freqs[s]*2*np.pi
for r in range(Ns):
wr = freqs[r]*2*np.pi
tau_sr = delta_lorentz(ws-wr,LineWidth) # THz^-1
#SV_sr = np.zeros(3,dtype=np.complex128)
for i in range(3):
for j in range(3):
Diff_s[i,j]+= np.real(Vmat[s,r,i]*Vmat[r,s,j])*tau_sr*np.pi*(A2m**2*THz2Hz)
Diffusivity[s] = Diff_s #A^2THz^4/THz^2*THz-1 = A^2THz
return Diffusivity,Vmat
def AF_diffusivity_q(phonon,q,LineWidth=1e-4,factor = VaspToTHz,if_tensor=False):
dm = phonon.get_dynamical_matrix_at_q(q)
eigvals, eigvecs = np.linalg.eigh(dm)
eigvals = eigvals.real
freqs = np.sqrt(np.abs(eigvals)) * np.sign(eigvals) * factor
if np.linalg.norm(q) < 1e-6:
ddm_q = get_dq_dynmat_Gamma(phonon)
else:
ddms = get_dq_dynmat_q(phonon,q)
ddm_q = ddms[1:,:,:]
#print(ddm_q)
# central derivative, need to shift by small amount to obtain the correct derivative.
# Otherwise will dD/dq be zero due to the fact D(q)=D(-q).
if if_tensor:
Diffusivity,Vmat = calc_Diff_tensor(freqs,eigvecs,ddm_q,LineWidth,factor)
else:
Diffusivity,Vmat = calc_Diff(freqs,eigvecs,ddm_q,LineWidth,factor)
for n,diff_mode in enumerte(Diffusivity):
diff_n = symmetrize_gv(phonon,q,diff_mode) # symmetrize tensor
Diffusivity[n]=diff_n
return Diffusivity,Vmat
|
983,953 | 02fabb8197273d50ecf466b5f0d32c4ab0e33de9 | from django.views.generic import ListView, DetailView
from bookmark.models import Bookmark
class BookmarkLV(ListView):
model = Bookmark
def get(self, request, *args, **kwargs):
self.object_list = self.get_queryset()
allow_empty = self.get_allow_empty()
if not allow_empty:
# When pagination is enabled and object_list is a queryset,
# it's better to do a cheap query than to load the unpaginated
# queryset in memory.
if self.get_paginate_by(self.object_list) is not None and hasattr(self.object_list, 'exists'):
is_empty = not self.object_list.exists()
else:
is_empty = not self.object_list
if is_empty:
raise Http404(_('Empty list and “%(class_name)s.allow_empty” is False.') % {
'class_name': self.__class__.__name__,
})
context = self.get_context_data()
return self.render_to_response(context)
class BookmarkDV(DetailView):
model = Bookmark
|
983,954 | b8597786828502f86ab0c8a990ce94d32a857b86 | # -*- coding: latin-1 -*-
__author__ = 'kosttek'
from logmetric import SequenceMatcher
import re
import datetime
from databaseSchema import Tag, RawLog, CompressedLog
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import databaseSchema
class CompressLog():
'''[tag][log]->logdictdata'''
compare_ration = 0.5
def __init__(self):
self.taglist=list()
def add(self,rawlog):
[tag_str,log,date] = self.parseLog(rawlog)
#if rawlog is not log but kind of --------begining of /dev/logsystem
if tag_str == None:
return
#raw logs alre without numbers
#log = self.removeNumsAfterEqualityChar(log)
log = self.removeNumsBiggerThanTwoDigits(log)
log = log.replace(",", " ")
#find tag
tag_obj = self.checkIfTagExistAndReturn(tag_str)
if tag_obj == None:
tag_obj = self.createNewTag(tag_str)
# check logs for same
for key_compressedlog in tag_obj.compressedlogs:
key = key_compressedlog.clogname
seq = SequenceMatcher(key,log)
if seq.ratio() >= CompressLog.compare_ration:
isSame = key_compressedlog.compareAndAddDifferences(seq.different_words)
if isSame :
key_compressedlog.clogname = self.replaceDiffWordsWithSigns(log,seq.different_words,"@@@")
key_compressedlog.rawlogs.append(RawLog(log=log,date=date))
return # end of comparing
# if not found same create new log
new_clog = CompressedLog(log)
new_clog.rawlogs.append(RawLog(log=log,date=date))
tag_obj.compressedlogs.append(new_clog)
# None if do not
def checkIfTagExistAndReturn(self,tag):
for tag_obj in self.taglist:
if tag_obj.tagname == tag:
return tag_obj
return None
def createNewTag(self,tag):
tag_obj = Tag(tag)
self.taglist.append(tag_obj)
return tag_obj
def replaceDiffWordsWithSigns(self,log,diffset,sign):
splitedlog = log.split(" ")
for index in diffset:
splitedlog[index] = sign
return " ".join(splitedlog)
def parseLog(self,log_):
'''
03-28 15:43:19.225 W/ActivityManager( 341): Unable to start service Intent { cmp=com.aware/.Applications }: not found
'''
log_words_tag = log_.split(":",2)
#remorve begining buffers token
if len(log_words_tag) == 1:
return [None,None,None]
log = log_words_tag[2][log_words_tag[2].index(')')+2:].rstrip('\n').rstrip('\r')
sec_and_tag = log_words_tag[2].split(' ',1);
tag_temp = sec_and_tag[1]
tag = tag_temp[:tag_temp.index('(')].rstrip(" ")
date = self.getDate(log_words_tag[0],log_words_tag[1],sec_and_tag[0])
#decode to utf-8
tag_out = tag.decode('utf-8')
log_out = log.decode('utf-8')
return [tag_out, log_out, date]
year = 2014
def getDate(self,date_hour, minutes, sec_milisec):
'''
03-28 15:43:19.225
['03-28 15', '43', '19.225']
'''
year = CompressLog.year
[month_day,hour]=date_hour.split(" ")
[month,day]=month_day.split("-")
[sec,milisec]=sec_milisec.split(".")
return datetime.datetime(year,int(month),int(day),int(hour),int(minutes),int(sec),int(milisec))
def removeNumsAfterEqualityChar(self,log):
'''replace every number witch start with '=' sign by '='(secound argument) or '|' all brackets {} with numbers in it separated by commma ',' '''
result = re.sub("=(\d+)|=\{([0-9, ])+\}","=",log)
return result
def removeNumsBiggerThanTwoDigits(self, log):
result = re.sub("(\d\d+)|\{([0-9, ])+\}|=\[([0-9, ])+\]","",log)
return result
def parse_logs_to_database(self, log_file_path, database_file_path):
log_file = open(log_file_path)
for log_line in log_file:
self.add(log_line)
engine = create_engine('sqlite:///'+database_file_path, echo=True)
Session = sessionmaker(bind=engine)
session = Session()
databaseSchema.Base.metadata.create_all(engine)
for tag in self.taglist:
session.add(tag)
session.commit()
if __name__ == "__main__":
compres = CompressLog()
f = open("../logs/logcatlogs",'r')
for i in range(0,10000):
line = f.readline()
compres.add(line)
for tag in compres.taglist:
print tag
for clog in tag.compressedlogs:
print " ",len(clog.rawlogs), clog.diffwords, clog.clogname
engine = create_engine('sqlite:///testdb/testlogs.db', echo=True)
Session = sessionmaker(bind=engine)
session = Session()
databaseSchema.Base.metadata.create_all(engine)
for tag in compres.taglist:
session.add(tag)
session.commit() |
983,955 | 2daa69b1b21a009280a6f7d06b26043151d39a02 | from tkinter import *
import webbrowser
def open_graven_channel():
webbrowser.open_new("https://fr.wikipedia.org/wiki/Bang!_(jeu_de_cartes)")
#Creer une premiere fenetre
window = Tk()
#personnaliser cette fenetre
window.title("BANG!")
window.geometry("720x480")
window.minsize(480, 360)
<<<<<<< HEAD
window.iconbitmap("C:/Users/gaeta/Desktop/Algo jeux de societé/BANG/BANG!/ui/logo.ico")
window.config(background='#41B77F')
=======
window.iconbitmap("c:/Users/Paul/Desktop/YDAYS/Algo&Jeu de société/BANG!/ui/logo.ico")
window.config(background='#F09316')
#creer la frame
frame = Frame(window, bg='#F09316')
>>>>>>> 3517e959e9775f25df27fda64192d7ad087cbedd
#ajouter un premier texte
label_title = Label(frame, text="Bienvenue sur le jeu BANG !", font=("Western Bang Bang", 40), bg='#F09316', fg='white')
label_title.pack()
#ajouter un second texte
label_subtitle = Label(frame, text="Créé par Gaëtan Roux & Paul Vigneron", font=("Western Bang Bang", 25), bg='#F09316', fg='white')
label_subtitle.pack()
#ajouter un premier bouton
rules_button = Button(frame, text="Voir les règles du jeu", font=("Western Bang Bang", 20), bg='white', fg='#F09316', command=open_graven_channel)
rules_button.pack(pady=25, fill=X)
#création image
width = 300
height = 300
image = PhotoImage(file="C:/Users/Paul/Desktop/YDAYS/Algo&Jeu de société/BANG!/ui/photo.jpg")
canvas = Canvas(window, width=width, height=height, bg='F09316')
canvas.create_image(width/2, height/2, image=image)
#canvas.pack(expand=YES)
#ajouter
frame.pack(expand=YES)
#afficher
window.mainloop() |
983,956 | d8254cfd40b27933e2b3686f3855e769bf7a7c9e | # 在一个火车旅行很受欢迎的国度,你提前一年计划了一些火车旅行。在接下来的一年里,你要旅行的日子将以一个名为 days 的数组给出。每一项是一个从 1 到 365 的整数。
#
# 火车票有三种不同的销售方式:
#
# 一张为期一天的通行证售价为 costs[0] 美元;
# 一张为期七天的通行证售价为 costs[1] 美元;
# 一张为期三十天的通行证售价为 costs[2] 美元。
# 通行证允许数天无限制的旅行。 例如,如果我们在第 2 天获得一张为期 7 天的通行证,那么我们可以连着旅行 7 天:第 2 天、第 3 天、第 4 天、第 5 天、第 6 天、第 7 天和第 8 天。
#
# 返回你想要完成在给定的列表 days 中列出的每一天的旅行所需要的最低消费。
#
#
#
# 示例 1:
#
# 输入:days = [1,4,6,7,8,20], costs = [2,7,15]
# 输出:11
# 解释:
# 例如,这里有一种购买通行证的方法,可以让你完成你的旅行计划:
# 在第 1 天,你花了 costs[0] = $2 买了一张为期 1 天的通行证,它将在第 1 天生效。
# 在第 3 天,你花了 costs[1] = $7 买了一张为期 7 天的通行证,它将在第 3, 4, ..., 9 天生效。
# 在第 20 天,你花了 costs[0] = $2 买了一张为期 1 天的通行证,它将在第 20 天生效。
# 你总共花了 $11,并完成了你计划的每一天旅行。
# 示例 2:
#
# 输入:days = [1,2,3,4,5,6,7,8,9,10,30,31], costs = [2,7,15]
# 输出:17
# 解释:
# 例如,这里有一种购买通行证的方法,可以让你完成你的旅行计划:
# 在第 1 天,你花了 costs[2] = $15 买了一张为期 30 天的通行证,它将在第 1, 2, ..., 30 天生效。
# 在第 31 天,你花了 costs[0] = $2 买了一张为期 1 天的通行证,它将在第 31 天生效。
# 你总共花了 $17,并完成了你计划的每一天旅行。
#
#
# 提示:
#
# 1 <= days.length <= 365
# 1 <= days[i] <= 365
# days 按顺序严格递增
# costs.length == 3
# 1 <= costs[i] <= 1000
#
# 来源:力扣(LeetCode)
# 链接:https://leetcode-cn.com/problems/minimum-cost-for-tickets
# 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
from typing import List
class Solution:
def mincostTickets(self, days: List[int], costs: List[int]) -> int:
n = days[-1]
dp = [0] * (n + 1)
for i in range(0, n + 1):
if i in days:
if i < 7:
dp[i] = min(dp[i - 1] + costs[0], costs[1], costs[2])
elif i < 30:
dp[i] = min(dp[i - 1] + costs[0], dp[i - 7] + costs[1], costs[2])
else:
dp[i] = min(dp[i - 1] + costs[0], dp[i - 7] + costs[1], dp[i - 30] + costs[2])
else:
dp[i] = dp[i - 1]
return dp[-1]
if __name__ == '__main__':
s = Solution()
print(s.mincostTickets([1, 4, 6, 7, 8, 20], [2, 7, 15]))
|
983,957 | 7c0c55fa1f7fcd620d5eb5293131aaa642bc7884 | from django.apps import AppConfig
class RunlistsConfig(AppConfig):
name = 'runlists'
|
983,958 | 8b35d7492ffa43dfbfa9e49ba74f4ac9ae059da0 |
from read import read
from pre_processing import pre_process
from apr_rg import rule_generator
from apr_cb_m1 import classifier_builder_m1
from apr_cb_m1 import is_satisfy
from functools import cmp_to_key
import time
import random
def sort_dict(val):
def cmp_dict(a,b):
s1=list(a.cond_set.keys())
s2=list(b.cond_set.keys())
# print("s1",s1,"s2",s2)
for i in range(len(s1)):
if s1[i]>s2[i]:
return 1
elif s1[i]<s2[i]:
return -1
return 1
rule_list = list(val)
rule_list.sort(key=cmp_to_key(cmp_dict))
# print([x.cond_set for x in rule_list])
return rule_list
# calculate the error rate of the classifier on the dataset
def get_error_rate(classifier, dataset):
size = len(dataset)
error_number = 0
for case in dataset:
is_satisfy_value = False
for rule in classifier.rule_list:
is_satisfy_value = is_satisfy(case, rule)
if is_satisfy_value == True:
break
if is_satisfy_value == False:
if classifier.default_class != case[-1]:
error_number += 1
return error_number / size
def acc(apr,test):
temp=[]
actual=[x[-1] for x in test]
count=0
for i in range(len(test)):
flag1=True
for j in range(len(apr.rule_list)):
flag=True
for item in apr.rule_list[j].cond_set:
if test[i][item]!=apr.rule_list[j].cond_set[item]:
flag=False
break
if flag:
temp.append(apr.rule_list[j].class_label)
if temp[-1]==actual[i]:
count+=1
flag1=False
break
if flag1:
temp.append(apr.default_class)
if temp[-1]==actual[i]:
count+=1
res=count/len(test)
return res
# 10-fold cross-validations on apr
def cross_validate_m1_without_prune(data_path, scheme_path,class_first=False, minsup=0.1, minconf=0.6):
data, attributes, value_type = read(data_path, scheme_path)
if class_first:
for i in range(len(data)):
a=data[i].pop(0)
data[i].append(a)
a=attributes.pop(0)
attributes.append(a)
b=value_type.pop(0)
value_type.append(b)
# print(data[0])
random.shuffle(data)
dataset = pre_process(data, attributes, value_type)
block_size = int(len(dataset) / 10)
split_point = [k * block_size for k in range(0, 10)]
split_point.append(len(dataset))
apr_rg_total_runtime = 0
apr_cb_total_runtime = 0
total_car_number = 0
total_classifier_rule_num = 0
error_total_rate = 0
acc_total=0
for k in range(len(split_point)-1):
print("\nRound %d:" % k)
training_dataset = dataset[:split_point[k]] + dataset[split_point[k+1]:]
test_dataset = dataset[split_point[k]:split_point[k+1]]
start_time = time.time()
cars = rule_generator(training_dataset, minsup, minconf)
end_time = time.time()
apr_rg_runtime = end_time - start_time
apr_rg_total_runtime += apr_rg_runtime
arr=list(cars.rules_list)
max=-1
for i in range(len(arr)):
if len(arr[i].cond_set)>max:
max=len(arr[i].cond_set)
T=[[] for i in range(max)]
for i in range(len(arr)):
T[len(arr[i].cond_set)-1].append(arr[i])
u=[]
for i in range(len(T)):
T[i]=sort_dict(T[i])
for j in T[i]:
u.append(j)
# print([u[i].cond_set for i in range(len(u))])
apr_rg_total_runtime += apr_rg_runtime
start_time = time.time()
# print("----------")
classifier= classifier_builder_m1(cars, training_dataset,minsup,len(training_dataset),u)
end_time = time.time()
apr_cb_runtime = (end_time - start_time)/10
apr_cb_total_runtime += apr_cb_runtime
classifier.print()
res=acc(classifier,test_dataset)
acc_total+=res
error_rate = get_error_rate(classifier, test_dataset)
error_total_rate += error_rate
total_car_number += len(cars.rules)
total_classifier_rule_num += len(classifier.rule_list)
print("accuracy:",(res*100))
print("No. of CARs : ",len(cars.rules_list))
print("apr-RG's run time : s" ,apr_rg_runtime)
print("apr-CB run time : s" ,apr_cb_runtime)
print("No. of rules in classifier of apr: " ,len(classifier.rule_list))
print("\n Average APR's accuracy :",(acc_total/10*100))
print("Average No. of CARs : ",(total_car_number / 10))
print("Average apr-RG's run time : " ,(apr_rg_total_runtime / 10))
print("Average apr-CB run time : " ,(apr_cb_total_runtime / 10))
print("Average No. of rules in classifier of apr: " ,(total_classifier_rule_num / 10))
if __name__ == "__main__":
# using the relative path, all data sets are stored in datasets directory
test_data_path = 'Dataset/ASD.data'
test_scheme_path = 'Dataset/ASD.names'
# just choose one mode to experiment by removing one line comment and running
min_support=0.2
min_conf=0.7
is_class_first=False
cross_validate_m1_without_prune(test_data_path, test_scheme_path,is_class_first,min_support,min_conf)
|
983,959 | 59c9070a257c46bcfcbdba0898f4816cbc2ad430 | import frappe
import json
import frappe.utils
from frappe import _
# from erpnext.selling.doctype.customer.customer import get_customer_outstanding
@frappe.whitelist(allow_guest=True)
def get_customer_credit_limit_with_oustanding(so):
sales_order=frappe.get_doc("Sales Order",so)
cust=frappe.get_doc("Customer",sales_order.customer)
credit_limit= cust.credit_limit
name=cust.name
company=sales_order.company
outstanding_amount = get_customer_outstanding(name, company)
print "Outstangiing Amount",outstanding_amount
print"outstanding is", get_customer_outstanding(name, company)
print "Credit Limit is",credit_limit
available_amount=credit_limit-outstanding_amount
print "available_amount",available_amount
if sales_order.grand_total>available_amount:
print "Outstanding"
return 0
else:
print "No Outstanding"
return 1
@frappe.whitelist()
def create_sal_slip(doc):
"""
Creates salary slip for selected employees if already not created
"""
doc1=json.loads(doc)
print "doc is ",doc
print "***********************", doc1.get("company")
pp=frappe.get_doc("Process Payroll",doc1.get('name'))
print "----------------",pp
emp_list=pp.get_emp_list()
# emp_list = []
print "empppppppppppppppppppppppppppp", emp_list
ss_list = []
for emp in emp_list:
employee=frappe.get_doc("Employee",emp[0])
print "Emp$$$$$$$$$$$$$$$$$$$$$$$$",emp[0]
# if employee.esi_ip_number:
# print "ESI IP",employee.esi_ip_number
# if not frappe.db.sql("""select name from `tabSalary Slip`
# where docstatus!= 2 and employee = %s and month = %s and fiscal_year = %s and company = %s
# """, (emp[0], doc1.get('month'), doc1.get('fiscal_year'), doc1.get('company')):
# ss = frappe.get_doc({
# "doctype": "Salary Slip",
# "fiscal_year": doc.fiscal_year,
# "employee": emp[0],
# "month": doc.month,
# "company": doc.get("company"),
# "esi_ip_number":employee.esi_ip_number,
# "pan":employee.pan
# # "epfo_pf_account_number":emp[0].epfo_pf_account_number,
# # "esi_ip_number":emp[0].esi_ip_number,
# # "pan":e[0].pan
# })
# # print "employee",emp[0].employee_name
# ss.insert()
# ss_list.append(ss.name)
# return doc.create_log(ss_list)
def customer_validation(doc,method):
roles=frappe.get_roles(frappe.session.user)
if "Distributer" in roles:
if doc.customer_group=="Distributer" or doc.customer_group=="Super Stockist":
frappe.throw(_("You can not create a Distributor or Super Stockist"))
if doc.customer_group=="Distributer":
company_check=frappe.db.get_value("Company",{"company_name":doc.customer_name},"company_name")
if not company_check:
company=frappe.new_doc("Company")
company.company_name=doc.customer_name
company.abbr=doc.customer_name[0:5]
company.default_currency="INR"
company.save()
def delivery_note_submit(doc,method):
customer=frappe.get_doc("Customer",doc.customer)
if customer.customer_group=="Distributer":
se=frappe.new_doc("Stock Entry")
se.purpose="Material Receipt"
se.posting_date=frappe.utils.nowdate()
se.posting_time=frappe.utils.nowtime()
se.company=customer.customer_name
# se.from_warehouse="Finished Goods"+ " - " + customer.customer_name[5]
# se.from_warehouse = "Stores - GIPL"
print "Warehouse is",se.from_warehouse
for raw in doc.get("items"):
se_items = se.append('items', {})
se_items.item_code=raw.item_code
se_items.qty=raw.qty
se_items.uom=raw.stock_uom
se_items.t_warehouse="Finished Goods" + " " + "-" + " " + doc.customer_name[0:5]
se_items.cost_center="Main" + " " + "-" + " " + doc.customer_name[0:5]
print ("se is",se)
se.save()
se.submit()
# def leaveapplication_submit(doc,method):
# roles=frappe.get_roles(frappe.session.user)
# if "HR Manager" in roles:
# index=1
# try:
# index==1
# except LeaveApproverIdentityError:
# pass
|
983,960 | 707b4f06b272c66143f3e52888a794fd1c4e8a8e | import pyautogui as pg
import time
time.sleep(3)
strr = 'youre a piece of shit you fucking shit ass dick'
for i in range(0,10):
for msg in strr.split(" "):
pg.typewrite(msg + '\n') |
983,961 | f0a9efdefdb1424a8542a8c075c27122cdbd4fc0 | import logging
import pathlib
import random
import re
import sys
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
from http.client import responses
from ipaddress import AddressValueError, IPv4Address, IPv4Network, ip_address
import coloredlogs
import dns.resolver
import requests
import verboselogs
import whois
from tqdm import tqdm
from core.feeds import (CFLARE_IPS, DNSBL_LISTS, DOM_LISTS, IP_BLOCKS,
IP_LISTS, SPAMHAUS_DOM, SPAMHAUS_IP)
logger = verboselogs.VerboseLogger(__name__)
logger.setLevel(logging.INFO)
coloredlogs.install(
level=None,
logger=logger,
fmt="%(message)s",
level_styles={
"notice": {"color": "black", "bright": True},
"warning": {"color": "yellow"},
"success": {"color": "green", "bold": True},
"error": {"color": "red"},
},
)
class Helpers:
# ---[ Regex Parser ]---
@staticmethod
def regex(_type):
# ref: http://data.iana.org/TLD/tlds-alpha-by-domain.txt
dir_path = pathlib.Path(__file__).parent
with open(dir_path / "tlds.txt", "r") as f:
tlds = f.read()
pattern = dict(
ip_addr=r"(^(\d{1,3}\.){0,3}\d{1,3}$)",
ip_net=r"(^(\d{1,3}\.){0,3}\d{1,3}/\d{1,2}$)",
domain=r"([A-Za-z0-9]+(?:[\-|\.][A-Za-z0-9]+)*(?:\[\.\]|\.)(?:{}))".format(tlds),
email=r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]{2,5}$)",
url=r"(http[s]?:\/\/(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)",
)
try:
pattern = re.compile(pattern[_type])
except re.error:
print("[!] Invalid input specified.")
sys.exit(0)
return pattern
# ---[ Common User-Agents ]---
@staticmethod
def headers():
ua_list = [
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 "
"Safari/537.36 Edge/12.246",
"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:40.0) Gecko/20100101 Firefox/43.0",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 "
"Safari/537.36",
"Mozilla/5.0 (X11; Linux i686; rv:30.0) Gecko/20100101 Firefox/42.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/40.0.2214.38 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)",
]
use_headers = {"user-agent": random.choice(ua_list)}
return use_headers
# ---[ File Downloader NO LONGER USED ]---
@staticmethod
def download_file(url):
local_file = url.split("/")[-1]
try:
resp = requests.get(url, local_file, stream=True)
size = int(resp.headers["content-length"])
pbar = tqdm(
iterable=resp.iter_content(chunk_size=1024), total=size, unit="B", unit_scale=True, unit_divisor=1024
)
if resp.status_code == 403:
logger.info(responses[403])
sys.exit()
elif resp.status_code == 200:
with open(local_file, "wb") as f:
for data in pbar:
f.write(data)
pbar.update(len(data))
else:
logger.info((resp.status_code, responses[resp.status_code]))
sys.exit()
except requests.exceptions.Timeout:
logger.notice(f"[timeout] {url}")
except requests.exceptions.HTTPError as err:
logger.error(f"[error] {err}")
except requests.exceptions.ConnectionError as err:
logger.error(f"[error] {err}")
except requests.exceptions.RequestException as err:
logger.critical(f"[critical] {err}")
# ---[ Helper objects ]---
helpers = Helpers()
IP = helpers.regex(_type="ip_addr")
NET = helpers.regex(_type="ip_net")
DOMAIN = helpers.regex(_type="domain")
URL = helpers.regex(_type="url")
EMAIL = helpers.regex(_type="email")
class Workers:
def __init__(self, QRY):
self.query = QRY
self.DNSBL_MATCHES = 0
self.BL_MATCHES = 0
# ---[ Query DNSBL Lists ]---
def dnsbl_query(self, blacklist):
host = str("".join(self.query))
# Return Codes
codes = [
"127.0.0.2",
"127.0.0.3",
"127.0.0.4",
"127.0.0.5",
"127.0.0.6",
"127.0.0.7",
"127.0.0.9",
"127.0.1.4",
"127.0.1.5",
"127.0.1.6",
"127.0.0.10",
"127.0.0.11",
"127.0.0.39",
"127.0.1.103",
"127.0.1.104",
"127.0.1.105",
"127.0.1.106",
]
try:
resolver = dns.resolver.Resolver()
resolver.timeout = 3
resolver.lifetime = 3
qry = ""
if helpers.regex(_type="ip_addr").findall(self.query):
qry = ip_address(host).reverse_pointer.strip(".in-addr.arpa") + "." + blacklist
elif helpers.regex(_type="domain").findall(self.query):
qry = ".".join(str(host).split(".")) + "." + blacklist
answer = resolver.query(qry, "A")
if any(str(answer[0]) in s for s in codes):
logger.info(f"\u2716 {self.query} --> {blacklist}")
self.DNSBL_MATCHES += 1
except (dns.resolver.NXDOMAIN, dns.resolver.Timeout, dns.resolver.NoNameservers, dns.resolver.NoAnswer):
pass
def dnsbl_mapper(self):
with ThreadPoolExecutor(max_workers=50) as executor:
dnsbl_map = {executor.submit(self.dnsbl_query, url): url for url in DNSBL_LISTS}
for future in as_completed(dnsbl_map):
future.result()
def spamhaus_ipbl_worker(self):
self.dnsbl_query(SPAMHAUS_IP)
def spamhaus_dbl_worker(self):
self.dnsbl_query(SPAMHAUS_DOM)
# ---[ Query Blacklists ]---
def bl_mapper(self, query_type, list_type, list_name):
with ThreadPoolExecutor(max_workers=50) as executor:
mapper = {executor.submit(query_type, url): url for url in list_type}
for future in as_completed(mapper):
future.result()
if self.BL_MATCHES == 0:
logger.info(f"[-] {self.query} is not listed in active {list_name}")
def blacklist_worker(self, blacklist):
try:
req = requests.get(blacklist, headers=helpers.headers(), timeout=3)
req.encoding = "utf-8"
match = re.findall(self.query, req.text)
if match:
logger.warning(f"\u2716 {self.query} --> {blacklist}")
self.BL_MATCHES += 1
except AddressValueError as err:
logger.error(f"[error] {err}")
except requests.exceptions.Timeout:
logger.notice(f"[timeout] {blacklist}")
except requests.exceptions.HTTPError as err:
logger.error(f"[error] {err}")
except requests.exceptions.ConnectionError:
logger.error(f"[error] Problem connecting to {blacklist}")
except requests.exceptions.RequestException as err:
logger.critical(f"[critical] {err}")
def blacklist_query(self, blacklist):
self.blacklist_worker(blacklist)
def blacklist_dbl_worker(self):
self.bl_mapper(query_type=self.blacklist_query, list_type=DOM_LISTS, list_name="Domain Blacklists")
def blacklist_ipbl_worker(self):
self.bl_mapper(query_type=self.blacklist_query, list_type=IP_LISTS, list_name="IP Blacklists")
# ----[ IP BLOCKS SECTION ]---
def blacklist_ipblock_query(self, blacklist):
self.blacklist_worker(blacklist)
def blacklist_netblock_worker(self):
self.bl_mapper(query_type=self.blacklist_ipblock_query, list_type=IP_BLOCKS, list_name="NetBlock Blacklists")
# ----[ WHOIS SECTION ]---
def whois_query(self, QRY):
try:
dns_resp = list(dns.resolver.query(QRY, "A"))
except dns.resolver.NXDOMAIN:
print(f"[-] Domain {QRY} does not appear to be registered domain.\n")
time.sleep(1) # prevents [WinError 10054]
else:
print(f"IP Address: {dns_resp[0]}")
# Check if cloudflare ip
if self.cflare_results(dns_resp[0]):
logger.info("Cloudflare IP: Yes")
else:
logger.info("Cloudflare IP: No")
w = whois.whois(QRY)
if w.registered:
print("Registered to:", w.registered)
print("Registrar:", w.registrar)
print("Organization:", w.org)
if isinstance(w.updated_date, list):
print("Updated:", ", ".join(str(x) for x in w.updated_date))
else:
print("Updated:", w.updated_date)
if isinstance(w.creation_date, list):
print("Created:", ", ".join(str(x) for x in w.creation_date))
else:
print("Created:", w.creation_date)
if isinstance(w.expiration_date, list):
print("Expires:", ", ".join(str(x) for x in w.expiration_date))
else:
print("Expires:", w.expiration_date)
if isinstance(w.emails, list):
print("Email Address:", ", ".join(x for x in w.emails))
else:
print("Email Address:", w.emails)
# ----[ CLOUDFLARE CHECK SECTION ]---
@staticmethod
def chk_cflare_list(QRY):
for net in CFLARE_IPS:
if IPv4Address(QRY) in IPv4Network(net):
yield True
def cflare_results(self, QRY):
for ip in self.chk_cflare_list(QRY):
return ip
@staticmethod
def tc_query(qry):
cymru = f"{qry}.malware.hash.cymru.com"
try:
resolver = dns.resolver.Resolver()
resolver.timeout = 1
resolver.lifetime = 1
answer = resolver.query(cymru, "A")
except (dns.resolver.NXDOMAIN, dns.resolver.Timeout, dns.resolver.NoNameservers, dns.resolver.NoAnswer):
pass
else:
if answer:
logger.error("\u2718 malware.hash.cymru.com: MALICIOUS")
|
983,962 | 214751b97d8449bce2fb1dfa44c488d09d2d9835 | array=int(input())
brry=[int(s) for s in input().split()]
brry.sort()
s=0
xv=0
for i in range(len(brry)):
if brry[i]>=s:
xv+=1
s=s+brry[i]
print(xv)
|
983,963 | 3c31f432d30842a4a746d17a627bd1575617d1b3 | # tuple
dados1 = ['Fabio', 'Classo', 'fabio.classo@email.com', 'Developer']
dados2 = ('Fabio', 'Classo', 'fabio.classo@email.com', 'Developer')
print(dados1[0])
print(dados1[1])
print(dados1[2])
print(dados1[3])
print(dados2[:2])
dados1[3] = 'Programador'
print(dados1)
# dados2[3] = 'Programador'
# print(dados2)
senhas = ('njubcupbruvbpur', 'udcuaoncnuncq')# tupla
senhas2 = senhas[0]
print(type(senhas2)) |
983,964 | bae6d378d862d3d3d7d3da6f34e01b165e06d09f | # -*- coding: utf-8 -*-
from ..protocol.senderkeydistributionmessage import SenderKeyDistributionMessage
from ..invalidkeyidexception import InvalidKeyIdException
from ..invalidkeyexception import InvalidKeyException
from ..util.keyhelper import KeyHelper
class GroupSessionBuilder:
def __init__(self, senderKeyStore):
self.senderKeyStore = senderKeyStore
def process(self, senderKeyName, senderKeyDistributionMessage):
"""
:type senderKeyName: SenderKeyName
:type senderKeyDistributionMessage: SenderKeyDistributionMessage
"""
senderKeyRecord = self.senderKeyStore.loadSenderKey(senderKeyName)
senderKeyRecord.addSenderKeyState(senderKeyDistributionMessage.getId(),
senderKeyDistributionMessage.getIteration(),
senderKeyDistributionMessage.getChainKey(),
senderKeyDistributionMessage.getSignatureKey())
self.senderKeyStore.storeSenderKey(senderKeyName, senderKeyRecord)
def create(self, senderKeyName):
"""
:type senderKeyName: SenderKeyName
"""
try:
senderKeyRecord = self.senderKeyStore.loadSenderKey(senderKeyName);
if senderKeyRecord.isEmpty() :
senderKeyRecord.setSenderKeyState(KeyHelper.generateSenderKeyId(),
0,
KeyHelper.generateSenderKey(),
KeyHelper.generateSenderSigningKey());
self.senderKeyStore.storeSenderKey(senderKeyName, senderKeyRecord);
state = senderKeyRecord.getSenderKeyState();
return SenderKeyDistributionMessage(state.getKeyId(),
state.getSenderChainKey().getIteration(),
state.getSenderChainKey().getSeed(),
state.getSigningKeyPublic());
except (InvalidKeyException, InvalidKeyIdException) as e:
raise AssertionError(e)
|
983,965 | 35ae098c94ee19f0560c9f28753b67895706d75f | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 11 11:54:51 2019
@author: jose
"""
import funciones as fc
from funciones import funcion1
print("Este programa usa las funciones del mundulo funciones")
fc.funcion1()
funcion1()
fc.funcion1()
numero1 = 0.1 + 0.1 + 0.1
numero2 = 0.3
if numero1 == numero2:
print("Son iguales")
else:
print("No son iguales")
if numero1 == 0.3:
print("Son iguales")
else:
print("No son iguales")
|
983,966 | 850b07c85c534dd0c017bcbe06e629f9e2984f0b | # Dictionary to store closed, structured tours
# of board size 6*6, 6*8, 8*, 8*10, 10*10, 10*12
structures = {}
structures['6x6'] = [
[[4,5], [5,6], [4,7], [4,7], [5,7], [6,7]],
[[3,4], [3,6], [0,3], [3,5], [0,6], [0,7]],
[[2,5], [1,4], [0,1], [3,4], [2,5], [1,6]],
[[2,5], [5,6], [4,7], [0,2], [1,5], [0,6]],
[[3,4], [1,4], [4,7], [4,7], [0,2], [1,7]],
[[2,3], [1,3], [1,0], [3,0], [0,2], [0,1]]
]
structures['6x8'] = [
[[4,5], [4,6], [4,7], [4,7], [4,6], [4,6], [5,7], [6,7]],
[[3,4], [3,6], [6,0], [0,4], [0,3], [0,3], [0,5], [0,7]],
[[2,5], [1,4], [0,5], [2,5], [2,5], [0,3], [2,5], [1,6]],
[[2,5], [2,6], [6,7], [0,6], [6,7], [5,6], [5,7], [1,6]],
[[3,4], [1,4], [3,7], [1,4], [1,3], [1,4], [2,7], [1,7]],
[[2,3], [1,2], [0,2], [0,2], [2,3], [0,3], [1,2], [0,1]]
]
structures['8x8'] = [
[[4,5], [5,6], [5,7], [4,7], [5,7], [4,7], [5,7], [6,7]],
[[3,5], [3,6], [0,3], [3,6], [3,6], [0,3], [5,6], [0,7]],
[[2,5], [1,6], [1,7], [1,5], [4,5], [1,3], [2,5], [1,6]],
[[2,3], [1,5], [2,5], [2,4], [6,7], [2,4], [0,5], [1,6]],
[[2,5], [1,6], [3,7], [4,5], [1,6], [0,1], [2,5], [0,1]],
[[3,5], [5,6], [1,4], [1,2], [4,5], [0,5], [2,5], [1,6]],
[[2,4], [1,4], [4,7], [2,7], [0,1], [4,7], [0,7], [1,7]],
[[2,3], [1,3], [0,1], [0,3], [0,3], [1,3], [1,2], [0,1]]
]
structures['8x10'] = [
[[4,5], [4,6], [4,7], [4,7], [4,7], [6,7], [4,7], [4,7], [5,7], [6,7]],
[[3,4], [3,6], [0,3], [0,3], [0,3], [0,3], [0,3], [3,4], [0,5], [0,6]],
[[2,4], [1,6], [0,6], [6,7], [2,4], [4,6], [5,7], [5,6], [2,5], [0,1]],
[[2,5], [3,5], [0,5], [4,6], [3,5], [5,7], [0,4], [0,7], [2,5], [1,7]],
[[2,5], [2,6], [2,7], [3,5], [2,4], [0,3], [2,7], [1,3], [0,1], [1,6]],
[[3,5], [1,6], [1,2], [1,6], [3,5], [1,7], [0,1], [4,5], [5,7], [1,6]],
[[2,4], [1,4], [4,7], [3,7], [1,4], [4,7], [3,7], [4,7], [2,7], [0,7]],
[[2,3], [1,3], [0,2], [0,3], [0,3], [1,3], [0,3], [0,3], [1,2], [0,1]]
]
structures['10x10'] = [
[[4,5], [4,6], [5,7], [4,6], [4,7], [6,7], [5,7], [4,7], [5,7], [6,7]],
[[3,4], [4,6], [0,3], [0,3], [3,5], [0,3], [0,3], [3,4], [5,6], [6,0]],
[[2,4], [1,6], [0,2], [0,1], [2,4], [6,7], [5,6], [1,6], [2,5], [0,1]],
[[2,4], [5,6], [0,6], [3,5], [4,5], [1,4], [0,4], [2,6], [2,5], [1,6]],
[[2,5], [4,5], [0,7], [4,5], [2,6], [2,5], [0,2], [0,1], [0,6], [1,6]],
[[2,3], [2,6], [1,7], [0,5], [1,7], [0,1], [2,6], [6,7], [2,6], [1,7]],
[[3,5], [1,6], [1,3], [2,7], [1,5], [3,7], [1,7], [2,3], [2,5], [6,7]],
[[2,5], [3,6], [5,6], [3,7], [1,3], [2,7], [2,6], [2,3], [5,6], [6,7]],
[[2,4], [1,3], [4,7], [3,4], [4,7], [1,7], [4,7], [3,4], [2,7], [1,7]],
[[2,3], [1,2], [0,3], [1,3], [0,3], [0,2], [0,3], [2,3], [0,2], [0,1]]
]
structures['10x12'] = [
[[4,5], [4,6], [6,7], [4,7], [5,6], [4,7], [5,7], [4,7], [4,6], [4,7], [5,6], [6,7]],
[[3,5], [3,6], [0,7], [0,3], [3,6], [0,3], [6,7], [0,3], [5,6], [0,3], [0,5], [0,6]],
[[2,3], [1,2], [6,7], [2,6], [3,4], [1,7], [5,6], [1,2], [5,7], [2,4], [2,5], [1,6]],
[[2,3], [1,6], [4,5], [2,3], [4,7], [2,7], [0,3], [2,7], [5,7], [1,6], [2,5], [0,1]],
[[4,5], [2,6], [2,3], [3,5], [0,4], [2,3], [0,3], [1,7], [4,6], [1,7], [2,5], [1,6]],
[[2,4], [4,5], [0,7], [1,6], [4,6], [3,6], [0,7], [3,6], [2,5], [1,6], [0,7], [1,6]],
[[2,3], [1,6], [0,7], [0,5], [1,3], [4,6], [0,7], [2,4], [3,7], [4,5], [2,5], [1,6]],
[[3,5], [5,6], [1,2], [2,7], [2,3], [5,6], [2,3], [0,5], [2,7], [0,1], [2,5], [0,6]],
[[2,4], [3,4], [4,7], [4,7], [1,2], [4,7], [3,4], [4,7], [4,7], [4,7], [1,2], [1,7]],
[[2,3], [1,3], [0,1], [0,3], [0,2], [0,3], [1,3], [0,3], [0,1], [0,3], [0,2], [0,1]]
] |
983,967 | 7db06cc72535e6f1a0a22099d852d6946aec019a | from .plot_cg import *
|
983,968 | 1ccdccddae79a3ee8848a25f52a590513c07c7ee |
import sys
import re
import os
import commands
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from scipy.interpolate import interp1d
from VG_ConstPulse_IDSAT import IDSAT
from I_V_curves import hist_IDS_VGS, IDS_VGS_stress, IDS_VGS, IDSAT_vs_row
from VG_ConstPulse_horizontal_hist import IDSAT_horizontal_hist
from VG_ConstPulse_separation import IDSAT_separation
from Charge_Pumping import Charge_Pumping, Charge_Pumping_compare
from MLC_IDSAT import *
from Marcov_Chain import *
from mpl_toolkits.mplot3d import Axes3D
import scipy.linalg as sp_lin
def main():
"""
Istep = 0.5
Imax = 160.0
Ith = np.array([90.0, 70.0, 50.0, 30.0])
Tpulse = np.array([0.002, 0.01, 0.04, 0.2])
Imin = 0.0
N_state_total = int(round((Imax - Imin)/Istep) + 1)
k_inv = 15.489
b0 = 21207.8
#d0 = 80000.0
#r = -1.5
#generator = death_birth_generator_matrix(1/k_inv, b0, d0, r, N_state_total, Istep)
generator = Testing_death_birth_generator_matrix(1/k_inv, b0, 4000.0, N_state_total, Istep)
Prior = Gaussian_function(np.arange(N_state_total), IDSAT_to_state(126.641, Imax=160.0, Istep=Istep), 7.039/Istep)
relaxation = Gaussian_function(np.arange(N_state_total), IDSAT_to_state(1.35, Imax=160.0, Istep=Istep), 0.95/Istep)
for cycle in np.arange(4):
N_state_transient = int(round((Imax - Ith[cycle])/Istep) + 1)
T_mat, P_mat = transition_matrix(generator, Tpulse[cycle], N_state_total, N_state_transient)
Final_mat = np.linalg.matrix_power(P_mat, 1024)
Prior = np.matmul(Prior, Final_mat)
Prior_mean = 0
Prior_std = 0
for i in np.arange(N_state_total):
Prior_mean += (Imax - Istep*i) * Prior[i]
for i in np.arange(N_state_total):
Prior_std += (Imax - Istep*i - Prior_mean)**2 * Prior[i]
Prior_std = np.sqrt(Prior_std)
print('level ', cycle+1, 'mean and std')
print('before relax', Prior_mean, Prior_std)
convolve_relax = np.convolve(Prior, relaxation)
convolve_relax = convolve_relax[-N_state_total:]
#relax_mean = Imax - Istep * np.average(np.arange(N_state_total), weights = convolve_relax)
#print(relax_mean)
relax_mean = 0
relax_std = 0
for i in np.arange(N_state_total):
relax_mean += (Imax - Istep*i) * convolve_relax[i]
for i in np.arange(N_state_total):
relax_std += (Imax - Istep*i - relax_mean)**2 * convolve_relax[i]
relax_std = np.sqrt(relax_std)
print('after relax', relax_mean, relax_std)
plt.figure(cycle+1)
plt.plot(np.arange(Imax, Imin-1e-4, -Istep), Prior, 'b')
plt.plot(np.arange(Imax, Imin-1e-4, -Istep), convolve_relax[-N_state_total:], 'r')
plt.grid()
plt.show()
"""
#for row_start in np.arange(0, 128):
# MLC_IDSAT_algorithm_rv1(14, 33, 16, 2, 'ULVT', 1.8, 2.0, 128, [row_start], [71, 55, 46, 35], [0.002, 0.01, 0.04, 0.2], 4, [], '', ['../Data/chip14/MLC_programming_Chip14_Col33_2msPULSE_VG1p8_VD2p0_VAsource_VBdrain_01', '../Data/chip14/MLC_programming_Chip14_Col33_10msPULSE_VG1p8_VD2p0_VAsource_VBdrain_02', '../Data/chip14/MLC_programming_Chip14_Col33_40msPULSE_VG1p8_VD2p0_VAsource_VBdrain_03', '../Data/chip14/MLC_programming_Chip14_Col33_200msPULSE_VG1p8_VD2p0_VAsource_VBdrain_04'], '../Plots/chip14/', 'VG1p8_VD2p0', '_cycle01020304_row'+str(row_start).zfill(3), Imin=12, Imax=136)
Marcov_Chain_MLE(14, 33, 16, 2, 'ULVT', 1.8, 2.0, 128, range(0, 128), [71, 55, 46, 35], [0.002, 0.01, 0.04, 0.2], 4, ['../Data/chip14/MLC_programming_Chip14_Col33_2msPULSE_VG1p8_VD2p0_VAsource_VBdrain_01', '../Data/chip14/MLC_programming_Chip14_Col33_10msPULSE_VG1p8_VD2p0_VAsource_VBdrain_02', '../Data/chip14/MLC_programming_Chip14_Col33_40msPULSE_VG1p8_VD2p0_VAsource_VBdrain_03', '../Data/chip14/MLC_programming_Chip14_Col33_200msPULSE_VG1p8_VD2p0_VAsource_VBdrain_04'], '../Plots/chip14/', 'VG1p8_VD2p0', '', 160.0, [90.0, 70.0, 50.0, 30.0], 0.0, 0.5)
plt.show()
#MLC_IDSAT_algorithm_rv1(14, 33, 16, 2, 'ULVT', 1.8, 2.0, 128, range(0, 128), [71, 55, 46, 35], [0.002, 0.01, 0.04, 0.2], 4, [], '', ['../Data/chip14/MLC_programming_Chip14_Col33_2msPULSE_VG1p8_VD2p0_VAsource_VBdrain_01', '../Data/chip14/MLC_programming_Chip14_Col33_10msPULSE_VG1p8_VD2p0_VAsource_VBdrain_02', '../Data/chip14/MLC_programming_Chip14_Col33_40msPULSE_VG1p8_VD2p0_VAsource_VBdrain_03', '../Data/chip14/MLC_programming_Chip14_Col33_200msPULSE_VG1p8_VD2p0_VAsource_VBdrain_04'], '../Plots/chip14/', 'VG1p8_VD2p0', '_cycle01020304_all')
#IDS_VGS(14, 33, 16, 2, 'ULVT', 128, ['../Data/chip14/Fresh_Chip14_Col33_Ids_Vgs_VAsource_VBdrain', '../Data/chip14/MLC_Chip14_Col33_2msPULSE_VG1p8_VD2p0_Ids_Vgs_VAsource_VBdrain_01', '../Data/chip14/MLC_Chip14_Col33_10msPULSE_VG1p8_VD2p0_Ids_Vgs_VAsource_VBdrain_02', '../Data/chip14/MLC_Chip14_Col33_40msPULSE_VG1p8_VD2p0_Ids_Vgs_VAsource_VBdrain_03', '../Data/chip14/MLC_Chip14_Col33_200msPULSE_VG1p8_VD2p0_Ids_Vgs_VAsource_VBdrain_04'], ['b', 'y', 'r', 'k', 'g'], '../Plots/chip14/', 'Fresh_vs_MLC01020304_VG1p8_VD2p0_IDS-VGS_VaS-VbD_', range(0, 128), 'Fresh vs MLC-1-2-3-4 (VG=1.8, VD=2.0)\nMLC-{1, 2, 3, 4}: {2ms, 10ms, 40ms, 200ms} WL pulses, IDSAT threshold = {90, 70, 50, 30}uA, forward' , 150, ['fresh', 'MLC-01', 'MLC-02', 'MLC-03', 'MLC-04'])
#IDS_VGS(14, 33, 16, 2, 'ULVT', 128, ['../Data/chip14/Fresh_Chip14_Col33_Ids_Vgs_VAdrain_VBsource', '../Data/chip14/MLC_Chip14_Col33_2msPULSE_VG1p8_VD2p0_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip14/MLC_Chip14_Col33_10msPULSE_VG1p8_VD2p0_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip14/MLC_Chip14_Col33_40msPULSE_VG1p8_VD2p0_Ids_Vgs_VAdrain_VBsource_03', '../Data/chip14/MLC_Chip14_Col33_200msPULSE_VG1p8_VD2p0_Ids_Vgs_VAdrain_VBsource_04'], ['b', 'y', 'r', 'k', 'g'], '../Plots/chip14/', 'Fresh_vs_MLC01020304_VG1p8_VD2p0_IDS-VGS_VaD-VbS_', range(0, 128), 'Fresh vs MLC-1-2-3-4 (VG=1.8, VD=2.0)\nMLC-{1, 2, 3, 4}: {2ms, 10ms, 40ms, 200ms} WL pulses, IDSAT threshold = {90, 70, 50, 30}uA, reversed', 150, ['fresh', 'MLC-01', 'MLC-02', 'MLC-03', 'MLC-04'])
#hist_IDS_VGS(0, 14, 33, 16, 2, 'ULVT', 128, ['../Data/chip14/Fresh_Chip14_Col33_Ids_Vgs_VAdrain_VBsource', '../Data/chip14/MLC_Chip14_Col33_2msPULSE_VG1p8_VD2p0_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip14/MLC_Chip14_Col33_10msPULSE_VG1p8_VD2p0_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip14/MLC_Chip14_Col33_40msPULSE_VG1p8_VD2p0_Ids_Vgs_VAdrain_VBsource_03', '../Data/chip14/MLC_Chip14_Col33_200msPULSE_VG1p8_VD2p0_Ids_Vgs_VAdrain_VBsource_04'], ['b', 'y', 'r', 'k', 'g'], '../Plots/chip14/', 'Hist-IDSAT_MLC-rv1-01020304_reverse-read_', range(0, 128), 'MLC programming {2ms, 10ms, 40ms, 200ms} pulses, VGS=1.8, VDS=2.0 for level=1-2-3-4\nhistogram of read-IDSAT (VGS=VDS=0.8V)', 0, 150, 0, 150, 1000)
#
#t_label = []
#for t in np.arange(0, 0.002*(71) + 0.0001, 0.002):
# t_label.append(str(t))
#
##MLC_IDSAT_algorithm_rv1(14, 33, 16, 2, 'ULVT', 1.8, 2.0, 128, [21], [71], [0.002], 1, np.arange(0, 0.002*(71)+0.0001, 0.002), t_label, ['../Data/chip14/MLC_programming_Chip14_Col33_2msPULSE_VG1p8_VD2p0_VAsource_VBdrain_01'], '../Plots/chip14/', 'VG1p8_VD2p0', '_rv1_cycle01_row-21', Imin=82, Imax=142)
#for row_start in np.arange(0, 128):
# MLC_IDSAT_algorithm_rv1(14, 33, 16, 2, 'ULVT', 1.8, 2.0, 128, [row_start], [71], [0.002], 1, np.arange(0, 0.002*(71)+0.0001, 0.002), t_label, ['../Data/chip14/MLC_programming_Chip14_Col33_2msPULSE_VG1p8_VD2p0_VAsource_VBdrain_01'], '../Plots/chip14/', 'VG1p8_VD2p0', '_rv1_cycle01_row_'+str(row_start).zfill(3), Imin=80, Imax=142)
#MLC_IDSAT_algorithm_rv1(14, 33, 16, 2, 'ULVT', 1.8, 2.0, 128, range(0, 128), [71], [0.002], 1, np.arange(0, 0.002*(71)+0.0001, 0.002), t_label, ['../Data/chip14/MLC_programming_Chip14_Col33_2msPULSE_VG1p8_VD2p0_VAsource_VBdrain_01'], '../Plots/chip14/', 'VG1p8_VD2p0', '_rv1_cycle01', Imin=80, Imax=142)
#MLC_IDSAT_characterization(11, 30, 16, 2, 'ULVT', 1.8, 1.7, 32, range(0, 16) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p8_VD1p7', '_cycle01', 50, 160, 1)
#MLC_IDSAT_characterization(11, 30, 16, 2, 'ULVT', 1.8, 2.0, 32, range(16, 32), [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p8_VD2p0', '_cycle01', 20, 160, 1)
#MLC_IDSAT_characterization(11, 30, 16, 2, 'ULVT', 1.8, 1.7, 32, range(0, 16) , [40, 80], [0.01, 0.01], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col30_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p8_VD1p7', '_cycle0102', 50, 160, 1)
#MLC_IDSAT_characterization(11, 30, 16, 2, 'ULVT', 1.8, 2.0, 32, range(16, 32), [40, 80], [0.01, 0.01], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col30_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p8_VD2p0', '_cycle0102', 20, 160, 1)
#MLC_IDSAT_characterization(11, 30, 16, 2, 'ULVT', 1.8, 1.7, 32, range(0, 16) , [40, 80, 240], [0.01, 0.01, 0.01], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col30_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col30_HCI_240x10ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p8_VD1p7', '_cycle010203', 50, 160, 1)
#MLC_IDSAT_characterization(11, 30, 16, 2, 'ULVT', 1.8, 2.0, 32, range(16, 32), [40, 80, 240], [0.01, 0.01, 0.01], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col30_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col30_HCI_240x10ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p8_VD2p0', '_cycle010203', 20, 160, 1)
#MLC_IDSAT_characterization(11, 30, 16, 2, 'ULVT', 1.8, 1.7, 32, range(0, 16) , [40, 80, 240, 180], [0.01, 0.01, 0.01, 0.04], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col30_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col30_HCI_240x10ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col30_HCI_180x40ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p8_VD1p7', '_cycle01020304', 40, 160, 1)
#MLC_IDSAT_characterization(11, 30, 16, 2, 'ULVT', 1.8, 2.0, 32, range(16, 32), [40, 80, 240, 180], [0.01, 0.01, 0.01, 0.04], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col30_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col30_HCI_240x10ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col30_HCI_180x40ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p8_VD2p0', '_cycle01020304', 10, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.5, 1.7, 128, range(0, 32) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p5_VD1p7', '_cycle01', 50, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.8, 1.7, 128, range(32, 64) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p8_VD1p7', '_cycle01', 50, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.5, 2.0, 128, range(64, 96) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p5_VD2p0', '_cycle01', 20, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.8, 2.0, 128, range(96, 128), [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p8_VD2p0', '_cycle01', 20, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.5, 1.7, 128, range(0, 32) , [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col33_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p5_VD1p7', '_cycle0102', 50, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.8, 1.7, 128, range(32, 64) , [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col33_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p8_VD1p7', '_cycle0102', 50, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.5, 2.0, 128, range(64, 96) , [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col33_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p5_VD2p0', '_cycle0102', 20, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.8, 2.0, 128, range(96, 128), [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col33_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p8_VD2p0', '_cycle0102', 20, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.5, 1.7, 128, range(0, 32) , [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col33_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col33_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p5_VD1p7', '_cycle010203', 50, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.8, 1.7, 128, range(32, 64) , [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col33_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col33_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p8_VD1p7', '_cycle010203', 50, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.5, 2.0, 128, range(64, 96) , [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col33_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col33_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p5_VD2p0', '_cycle010203', 20, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.8, 2.0, 128, range(96, 128), [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col33_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col33_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p8_VD2p0', '_cycle010203', 20, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.5, 1.7, 128, range(0, 32) , [40, 20, 12, 36], [0.01, 0.04, 0.2, 0.2], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col33_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col33_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col33_HCI_36x200ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p5_VD1p7', '_cycle01020304', 40, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.8, 1.7, 128, range(32, 64) , [40, 20, 12, 36], [0.01, 0.04, 0.2, 0.2], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col33_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col33_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col33_HCI_36x200ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p8_VD1p7', '_cycle01020304', 40, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.5, 2.0, 128, range(64, 96) , [40, 20, 12, 36], [0.01, 0.04, 0.2, 0.2], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col33_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col33_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col33_HCI_36x200ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p5_VD2p0', '_cycle01020304', 10, 160, 1)
#MLC_IDSAT_characterization(11, 33, 16, 2, 'ULVT', 1.8, 2.0, 128, range(96, 128), [40, 20, 12, 36], [0.01, 0.04, 0.2, 0.2], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col33_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col33_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col33_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col33_HCI_36x200ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p8_VD2p0', '_cycle01020304', 10, 160, 1)
#MLC_IDSAT_characterization(11, 18, 36, 2, 'ULVT', 1.8, 2.0, 32, range(0, 16) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p8_VD2p0', '_cycle01', 50, 125, 1)
#MLC_IDSAT_characterization(11, 18, 36, 2, 'ULVT', 1.8, 2.4, 32, range(16, 32), [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p8_VD2p4', '_cycle01', 20, 125, 1)
#MLC_IDSAT_characterization(11, 18, 36, 2, 'ULVT', 1.8, 2.0, 32, range(0, 16) , [40, 80], [0.01, 0.01], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col18_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p8_VD2p0', '_cycle0102', 50, 125, 1)
#MLC_IDSAT_characterization(11, 18, 36, 2, 'ULVT', 1.8, 2.4, 32, range(16, 32), [40, 80], [0.01, 0.01], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col18_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p8_VD2p4', '_cycle0102', 20, 125, 1)
#MLC_IDSAT_characterization(11, 18, 36, 2, 'ULVT', 1.8, 2.0, 32, range(0, 16) , [40, 80, 240], [0.01, 0.01, 0.01], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col18_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col18_HCI_240x10ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p8_VD2p0', '_cycle010203', 50, 125, 1)
#MLC_IDSAT_characterization(11, 18, 36, 2, 'ULVT', 1.8, 2.4, 32, range(16, 32), [40, 80, 240], [0.01, 0.01, 0.01], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col18_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col18_HCI_240x10ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p8_VD2p4', '_cycle010203', 20, 125, 1)
#MLC_IDSAT_characterization(11, 18, 36, 2, 'ULVT', 1.8, 2.0, 32, range(0, 16) , [40, 80, 240, 180], [0.01, 0.01, 0.01, 0.04], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col18_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col18_HCI_240x10ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col18_HCI_180x40ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p8_VD2p0', '_cycle01020304', 40, 125, 1)
#MLC_IDSAT_characterization(11, 18, 36, 2, 'ULVT', 1.8, 2.4, 32, range(16, 32), [40, 80, 240, 180], [0.01, 0.01, 0.01, 0.04], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col18_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col18_HCI_240x10ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col18_HCI_180x40ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p8_VD2p4', '_cycle01020304', 10, 125, 1)
#MLC_IDSAT_characterization(11, 24, 20, 2, 'ULVT', 1.8, 1.8, 32, range(0, 16) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p8_VD1p8', '_cycle01', 50, 150, 1)
#MLC_IDSAT_characterization(11, 24, 20, 2, 'ULVT', 1.8, 2.2, 32, range(16, 32), [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p8_VD2p2', '_cycle01', 15, 150, 1)
#MLC_IDSAT_characterization(11, 24, 20, 2, 'ULVT', 1.8, 1.8, 32, range(0, 16) , [40, 80], [0.01, 0.01], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col24_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p8_VD1p8', '_cycle0102', 50, 150, 1)
#MLC_IDSAT_characterization(11, 24, 20, 2, 'ULVT', 1.8, 2.2, 32, range(16, 32), [40, 80], [0.01, 0.01], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col24_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p8_VD2p2', '_cycle0102', 15, 150, 1)
#MLC_IDSAT_characterization(11, 24, 20, 2, 'ULVT', 1.8, 1.8, 32, range(0, 16) , [40, 80, 240], [0.01, 0.01, 0.01], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col24_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col24_HCI_240x10ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p8_VD1p8', '_cycle010203', 50, 150, 1)
#MLC_IDSAT_characterization(11, 24, 20, 2, 'ULVT', 1.8, 2.2, 32, range(16, 32), [40, 80, 240], [0.01, 0.01, 0.01], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col24_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col24_HCI_240x10ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p8_VD2p2', '_cycle010203', 15, 150, 1)
#MLC_IDSAT_characterization(11, 24, 20, 2, 'ULVT', 1.8, 1.8, 32, range(0, 16) , [40, 80, 240, 180], [0.01, 0.01, 0.01, 0.04], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col24_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col24_HCI_240x10ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col24_HCI_180x40ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p8_VD1p8', '_cycle01020304', 40, 150, 1)
#MLC_IDSAT_characterization(11, 24, 20, 2, 'ULVT', 1.8, 2.2, 32, range(16, 32), [40, 80, 240, 180], [0.01, 0.01, 0.01, 0.04], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col24_HCI_80x10ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col24_HCI_240x10ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col24_HCI_180x40ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p8_VD2p2', '_cycle01020304', 5, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.5, 1.8, 128, range(0, 32) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p5_VD1p8', '_cycle01', 50, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.8, 1.8, 128, range(32, 64) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p8_VD1p8', '_cycle01', 50, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.5, 2.2, 128, range(64, 96) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p5_VD2p2', '_cycle01', 15, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.8, 2.2, 128, range(96, 128), [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p8_VD2p2', '_cycle01', 15, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.5, 1.8, 128, range(0, 32) , [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col27_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p5_VD1p8', '_cycle0102', 50, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.8, 1.8, 128, range(32, 64) , [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col27_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p8_VD1p8', '_cycle0102', 50, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.5, 2.2, 128, range(64, 96) , [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col27_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p5_VD2p2', '_cycle0102', 15, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.8, 2.2, 128, range(96, 128), [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col27_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p8_VD2p2', '_cycle0102', 15, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.5, 1.8, 128, range(0, 32) , [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col27_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col27_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p5_VD1p8', '_cycle010203', 50, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.8, 1.8, 128, range(32, 64) , [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col27_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col27_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p8_VD1p8', '_cycle010203', 50, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.5, 2.2, 128, range(64, 96) , [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col27_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col27_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p5_VD2p2', '_cycle010203', 15, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.8, 2.2, 128, range(96, 128), [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col27_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col27_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p8_VD2p2', '_cycle010203', 15, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.5, 1.8, 128, range(0, 32) , [40, 20, 12, 36], [0.01, 0.04, 0.2, 0.2], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col27_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col27_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col27_HCI_36x200ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p5_VD1p8', '_cycle01020304', 40, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.8, 1.8, 128, range(32, 64) , [40, 20, 12, 36], [0.01, 0.04, 0.2, 0.2], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col27_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col27_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col27_HCI_36x200ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p8_VD1p8', '_cycle01020304', 20, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.5, 2.2, 128, range(64, 96) , [40, 20, 12, 36], [0.01, 0.04, 0.2, 0.2], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col27_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col27_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col27_HCI_36x200ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p5_VD2p2', '_cycle01020304', 5, 150, 1)
#MLC_IDSAT_characterization(11, 27, 20, 2, 'ULVT', 1.8, 2.2, 128, range(96, 128), [40, 20, 12, 36], [0.01, 0.04, 0.2, 0.2], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col27_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col27_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col27_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col27_HCI_36x200ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p8_VD2p2', '_cycle01020304', 5, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.5, 1.8, 128, range(0, 32) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p5_VD1p8', '_cycle01', 50, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.8, 1.8, 128, range(32, 64) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p8_VD1p8', '_cycle01', 50, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.5, 2.2, 128, range(64, 96) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p5_VD2p2', '_cycle01', 15, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.8, 2.2, 128, range(96, 128), [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p8_VD2p2', '_cycle01', 15, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.5, 1.8, 128, range(0, 32) , [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col28_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p5_VD1p8', '_cycle0102', 50, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.8, 1.8, 128, range(32, 64) , [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col28_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p8_VD1p8', '_cycle0102', 50, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.5, 2.2, 128, range(64, 96) , [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col28_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p5_VD2p2', '_cycle0102', 15, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.8, 2.2, 128, range(96, 128), [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col28_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip11/', 'VG1p8_VD2p2', '_cycle0102', 15, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.5, 1.8, 128, range(0, 32) , [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col28_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col28_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p5_VD1p8', '_cycle010203', 50, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.8, 1.8, 128, range(32, 64) , [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col28_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col28_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p8_VD1p8', '_cycle010203', 50, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.5, 2.2, 128, range(64, 96) , [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col28_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col28_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p5_VD2p2', '_cycle010203', 15, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.8, 2.2, 128, range(96, 128), [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col28_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col28_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip11/', 'VG1p8_VD2p2', '_cycle010203', 15, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.5, 1.8, 128, range(0, 32) , [40, 20, 12, 36], [0.01, 0.04, 0.2, 0.2], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col28_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col28_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col28_HCI_36x200ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p5_VD1p8', '_cycle01020304', 40, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.8, 1.8, 128, range(32, 64) , [40, 20, 12, 36], [0.01, 0.04, 0.2, 0.2], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col28_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col28_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col28_HCI_36x200ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p8_VD1p8', '_cycle01020304', 20, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.5, 2.2, 128, range(64, 96) , [40, 20, 12, 36], [0.01, 0.04, 0.2, 0.2], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col28_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col28_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col28_HCI_36x200ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p5_VD2p2', '_cycle01020304', 5, 150, 1)
#MLC_IDSAT_characterization(11, 28, 20, 2, 'LVT', 1.8, 2.2, 128, range(96, 128), [40, 20, 12, 36], [0.01, 0.04, 0.2, 0.2], 4, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4, 4.8, 12], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6', '3.6', '10.8'], ['../Data/chip11/Chip11_Col28_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip11/Chip11_Col28_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip11/Chip11_Col28_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03', '../Data/chip11/Chip11_Col28_HCI_36x200ms_stress_VG_ConstPulse_VAsource_VBdrain_04'], '../Plots/chip11/', 'VG1p8_VD2p2', '_cycle01020304', 5, 150, 1)
# (L, Nfin, VT_flavor, Nrow, Imax)
col_list = [(36, 1, 'ULVT', 32 , 60 ), (36, 1, 'LVT', 32 , 50 ), (36, 1, 'SVT', 32 , 45 ),
(36, 1, 'ULVT', 128, 60 ), (36, 1, 'LVT', 128, 50 ), (36, 1, 'SVT', 128, 45 ),
(20, 1, 'ULVT', 32 , 75 ), (20, 1, 'LVT', 32 , 60 ), (20, 1, 'SVT', 32 , 50 ),
(20, 1, 'ULVT', 128, 75 ), (20, 1, 'LVT', 128, 60 ), (20, 1, 'SVT', 128, 50 ),
(16, 1, 'ULVT', 32 , 80 ), (16, 1, 'LVT', 32 , 65 ), (16, 1, 'SVT', 32 , 60 ),
(16, 1, 'ULVT', 128, 80 ), (16, 1, 'LVT', 128, 65 ), (16, 1, 'SVT', 128, 60 ),
(36, 2, 'ULVT', 32 , 115), (36, 2, 'LVT', 32 , 95 ), (36, 2, 'SVT', 32 , 85 ),
(36, 2, 'ULVT', 128, 115), (36, 2, 'LVT', 128, 95 ), (36, 2, 'SVT', 128, 85 ),
(20, 2, 'ULVT', 32 , 135), (20, 2, 'LVT', 32 , 115), (20, 2, 'SVT', 32 , 100),
(20, 2, 'ULVT', 128, 135), (20, 2, 'LVT', 128, 120), (20, 2, 'SVT', 128, 100),
(16, 2, 'ULVT', 32 , 150), (16, 2, 'LVT', 32 , 125), (16, 2, 'SVT', 32 , 115),
(16, 2, 'ULVT', 128, 150), (16, 2, 'LVT', 128, 125), (16, 2, 'SVT', 128, 115)]
#MLC_IDSAT_algorithm_rv1(11, 21, 36, 2, 'ULVT', 1.8, 2.4, 128, range(0, 128), [135+20], [0.2], 1, np.arange(0, 0.01*16+0.0001, 0.01), '', ['../Data/chip11/MLC_programming_Chip11_Col21_2msPULSE_VG1p8_VD2p4_VAsource_VBdrain_01'], '../Plots/chip11/', 'VG1p8_VD2p4', '_rv1_cycle01_EfficientPython')
#MLC_IDSAT_algorithm_rv1(12, 21, 36, 2, 'ULVT', '0.9-1.2-1.5-1.8', 2.4, 128, range(0, 128), [59+16, 72+40, 80+31, 68+23], [0.2, 0.2, 0.2, 0.2], 4, [0, 15, 15.1, 37.5, 37.6, 59.8, 59.9, 78.1], ['0', '15', '', '37.4', '', '59.6', '', '77.8'], ['../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG0p9_VD2p4_VAsource_VBdrain_01', '../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG1p2_VD2p4_VAsource_VBdrain_02', '../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG1p5_VD2p4_VAsource_VBdrain_03', '../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG1p8_VD2p4_VAsource_VBdrain_04'], '../Plots/chip12/', 'VG-0p9-1p2-1p5-1p8_VD2p4', '_rv1_cycle01020304')
t_ratio_lst = [(0, 0.17), (0.16, 0.34), (0.33, 0.505), (0.495, 0.67), (0.66, 0.84), (0.83, 1)]
#t_label = []
#for t in np.arange(0, 0.2*(59+16) + 0.0001, 0.2):
# t_label.append(str(t))
#MLC_IDSAT_algorithm_rv1(12, 21, 36, 2, 'ULVT', 0.9, 2.4, 128, range(0, 128), [59+16], [0.2], 1, np.arange(0, 0.2*(59+16)+0.0001, 0.2), t_label, ['../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG0p9_VD2p4_VAsource_VBdrain_01'], '../Plots/chip12/', 'VG0p9_VD2p4', '_rv1_cycle01')
#for row_start in np.arange(0, 128, 8):
# MLC_IDSAT_algorithm_rv1(12, 21, 36, 2, 'ULVT', 0.9, 2.4, 128, range(row_start, row_start+8), [59+16], [0.2], 1, np.arange(0, 0.2*(59+16)+0.0001, 0.2), t_label, ['../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG0p9_VD2p4_VAsource_VBdrain_01'], '../Plots/chip12/', 'VG0p9_VD2p4', '_rv1_cycle01_row'+str(row_start)+'_to_'+str(row_start+7))
# segment=0
# for t_ratio in t_ratio_lst:
# MLC_IDSAT_algorithm_rv1(12, 21, 36, 2, 'ULVT', 0.9, 2.4, 128, range(row_start, row_start+8), [59+16], [0.2], 1, np.arange(0, 0.2*(59+16)+0.0001, 0.2), t_label, ['../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG0p9_VD2p4_VAsource_VBdrain_01'], '../Plots/chip12/', 'VG0p9_VD2p4', '_rv1_cycle01_row'+str(row_start)+'_to_'+str(row_start+7)+'_'+str(segment), [t_ratio[0]*0.2*(59+16), t_ratio[1]*0.2*(59+16)])
# segment += 1
#t_label = []
#for t in np.arange(0, 0.2*(72+40) + 0.0001, 0.2):
# t_label.append(str(0.2*(59+16) + t))
#MLC_IDSAT_algorithm_rv1(12, 21, 36, 2, 'ULVT', 1.2, 2.4, 128, range(0, 128), [72+40], [0.2], 1, np.arange(0, 0.2*(72+40)+0.0001, 0.2), t_label, ['../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG1p2_VD2p4_VAsource_VBdrain_02'], '../Plots/chip12/', 'VG1p2_VD2p4', '_rv1_cycle02')
#for row_start in np.arange(0, 128, 8):
# MLC_IDSAT_algorithm_rv1(12, 21, 36, 2, 'ULVT', 1.2, 2.4, 128, range(row_start, row_start+8), [72+40], [0.2], 1, np.arange(0, 0.2*(72+40)+0.0001, 0.2), t_label, ['../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG1p2_VD2p4_VAsource_VBdrain_02'], '../Plots/chip12/', 'VG1p2_VD2p4', '_rv1_cycle02_row'+str(row_start)+'_to_'+str(row_start+7))
# segment=0
# for t_ratio in t_ratio_lst:
# MLC_IDSAT_algorithm_rv1(12, 21, 36, 2, 'ULVT', 1.2, 2.4, 128, range(row_start, row_start+8), [72+40], [0.2], 1, np.arange(0, 0.2*(72+40)+0.0001, 0.2), t_label, ['../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG1p2_VD2p4_VAsource_VBdrain_02'], '../Plots/chip12/', 'VG1p2_VD2p4', '_rv1_cycle02_row'+str(row_start)+'_to_'+str(row_start+7)+'_'+str(segment), [t_ratio[0]*0.2*(72+40), t_ratio[1]*0.2*(72+40)])
# segment += 1
#t_label = []
#for t in np.arange(0, 0.2*(80+31) + 0.0001, 0.2):
# t_label.append(str(0.2*(59+16) + 0.2*(72+40) + t))
##MLC_IDSAT_algorithm_rv1(12, 21, 36, 2, 'ULVT', 1.5, 2.4, 128, range(0, 128), [80+31], [0.2], 1, np.arange(0, 0.2*(80+31)+0.0001, 0.2), t_label, ['../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG1p5_VD2p4_VAsource_VBdrain_03'], '../Plots/chip12/', 'VG1p5_VD2p4', '_rv1_cycle03')
#for row_start in np.arange(0, 128, 8):
# MLC_IDSAT_algorithm_rv1(12, 21, 36, 2, 'ULVT', 1.5, 2.4, 128, range(row_start, row_start+8), [80+31], [0.2], 1, np.arange(0, 0.2*(80+31)+0.0001, 0.2), t_label, ['../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG1p5_VD2p4_VAsource_VBdrain_03'], '../Plots/chip12/', 'VG1p5_VD2p4', '_rv1_cycle03_row'+str(row_start)+'_to_'+str(row_start+7))
# segment=0
# for t_ratio in t_ratio_lst:
# MLC_IDSAT_algorithm_rv1(12, 21, 36, 2, 'ULVT', 1.5, 2.4, 128, range(row_start, row_start+8), [80+31], [0.2], 1, np.arange(0, 0.2*(80+31)+0.0001, 0.2), t_label, ['../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG1p5_VD2p4_VAsource_VBdrain_03'], '../Plots/chip12/', 'VG1p5_VD2p4', '_rv1_cycle03_row'+str(row_start)+'_to_'+str(row_start+7)+'_'+str(segment), [t_ratio[0]*0.2*(80+31), t_ratio[1]*0.2*(80+31)])
# segment += 1
#t_label = []
#for t in np.arange(0, 0.2*(68+23) + 0.0001, 0.2):
# t_label.append(str(0.2*(59+16) + 0.2*(72+40) + 0.2*(80+31) + t))
#MLC_IDSAT_algorithm_rv1(12, 21, 36, 2, 'ULVT', 1.8, 2.4, 128, range(0, 128), [68+23], [0.2], 1, np.arange(0, 0.2*(68+23)+0.0001, 0.2), t_label, ['../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG1p8_VD2p4_VAsource_VBdrain_04'], '../Plots/chip12/', 'VG1p8_VD2p4', '_rv1_cycle04')
#for row_start in np.arange(0, 128, 8):
# MLC_IDSAT_algorithm_rv1(12, 21, 36, 2, 'ULVT', 1.8, 2.4, 128, range(row_start, row_start+8), [68+23], [0.2], 1, np.arange(0, 0.2*(68+23)+0.0001, 0.2), t_label, ['../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG1p8_VD2p4_VAsource_VBdrain_04'], '../Plots/chip12/', 'VG1p8_VD2p4', '_rv1_cycle04_row'+str(row_start)+'_to_'+str(row_start+7))
# segment=0
# for t_ratio in t_ratio_lst:
# MLC_IDSAT_algorithm_rv1(12, 21, 36, 2, 'ULVT', 1.8, 2.4, 128, range(row_start, row_start+8), [68+23], [0.2], 1, np.arange(0, 0.2*(68+23)+0.0001, 0.2), t_label, ['../Data/chip12/MLC_programming_Chip12_Col21_200msPULSE_VG1p8_VD2p4_VAsource_VBdrain_04'], '../Plots/chip12/', 'VG1p8_VD2p4', '_rv1_cycle04_row'+str(row_start)+'_to_'+str(row_start+7)+'_'+str(segment), [t_ratio[0]*0.2*(68+23), t_ratio[1]*0.2*(68+23)])
# segment += 1
#MLC_IDSAT_characterization(10, 18, 36, 2, 'ULVT', 1.8, 2.0, 32, range(0, 16) , [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip10/Chip10_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col18_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip10/Chip10_Col18_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip10/', 'VG1p8_VD2p0', '_cycle010203', 38, 112)
#MLC_IDSAT_characterization(10, 18, 36, 2, 'ULVT', 1.8, 2.4, 32, range(16, 32), [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip10/Chip10_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col18_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip10/Chip10_Col18_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip10/', 'VG1p8_VD2p4', '_cycle010203', 16, 110)
#MLC_IDSAT_characterization(10, 24, 20, 2, 'ULVT', 1.8, 1.8, 32, range(0, 16) , [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip10/Chip10_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col24_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip10/Chip10_Col24_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip10/', 'VG1p8_VD1p8', '_cycle010203', 44, 133)
#MLC_IDSAT_characterization(10, 24, 20, 2, 'ULVT', 1.8, 2.2, 32, range(16, 32), [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip10/Chip10_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col24_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip10/Chip10_Col24_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip10/', 'VG1p8_VD2p2', '_cycle010203', 14, 133)
#MLC_IDSAT_characterization(10, 30, 16, 2, 'ULVT', 1.8, 1.7, 32, range(0, 16) , [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip10/Chip10_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col30_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip10/Chip10_Col30_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip10/', 'VG1p8_VD1p7', '_cycle010203', 50, 135)
#MLC_IDSAT_characterization(10, 30, 16, 2, 'ULVT', 1.8, 2.0, 32, range(16, 32), [40, 20, 12], [0.01, 0.04, 0.2], 3, [0, 0.4, 0.8, 1.6, 2.0, 2.6, 3.2, 3.8, 4.4], ['0', '0.4', '0.4', '1.2', '1.2', '1.8', '2.4', '3.0', '3.6'], ['../Data/chip10/Chip10_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col30_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02', '../Data/chip10/Chip10_Col30_HCI_12x200ms_stress_VG_ConstPulse_VAsource_VBdrain_03'], '../Plots/chip10/', 'VG1p8_VD2p0', '_cycle010203', 20, 140)
#MLC_IDSAT_characterization(10, 18, 36, 2, 'ULVT', 1.8, 2.0, 32, range(0, 16) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip10/Chip10_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip10/', 'VG1p8_VD2p0', '_cycle01', 38, 112)
#MLC_IDSAT_characterization(10, 18, 36, 2, 'ULVT', 1.8, 2.4, 32, range(16, 32), [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip10/Chip10_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip10/', 'VG1p8_VD2p4', '_cycle01', 16, 110)
#MLC_IDSAT_characterization(10, 24, 20, 2, 'ULVT', 1.8, 1.8, 32, range(0, 16) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip10/Chip10_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip10/', 'VG1p8_VD1p8', '_cycle01', 44, 133)
#MLC_IDSAT_characterization(10, 24, 20, 2, 'ULVT', 1.8, 2.2, 32, range(16, 32), [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip10/Chip10_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip10/', 'VG1p8_VD2p2', '_cycle01', 14, 133)
#MLC_IDSAT_characterization(10, 30, 16, 2, 'ULVT', 1.8, 1.7, 32, range(0, 16) , [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip10/Chip10_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip10/', 'VG1p8_VD1p7', '_cycle01', 50, 135)
#MLC_IDSAT_characterization(10, 30, 16, 2, 'ULVT', 1.8, 2.0, 32, range(16, 32), [40], [0.01], 1, [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5], ['0', '0.05', '0.1', '0.15', '0.2', '0.25', '0.3', '0.35', '0.4', 'recover'], ['../Data/chip10/Chip10_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01'], '../Plots/chip10/', 'VG1p8_VD2p0', '_cycle01', 20, 140)
#MLC_IDSAT_characterization(10, 18, 36, 2, 'ULVT', 1.8, 2.0, 32, range(0, 16) , [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip10/Chip10_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col18_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip10/', 'VG1p8_VD2p0', '_cycle0102', 38, 112)
#MLC_IDSAT_characterization(10, 18, 36, 2, 'ULVT', 1.8, 2.4, 32, range(16, 32), [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip10/Chip10_Col18_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col18_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip10/', 'VG1p8_VD2p4', '_cycle0102', 16, 110)
#
#MLC_IDSAT_characterization(10, 24, 20, 2, 'ULVT', 1.8, 1.8, 32, range(0, 16) , [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip10/Chip10_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col24_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip10/', 'VG1p8_VD1p8', '_cycle0102', 44, 133)
#MLC_IDSAT_characterization(10, 24, 20, 2, 'ULVT', 1.8, 2.2, 32, range(16, 32), [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip10/Chip10_Col24_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col24_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip10/', 'VG1p8_VD2p2', '_cycle0102', 14, 133)
#
#MLC_IDSAT_characterization(10, 30, 16, 2, 'ULVT', 1.8, 1.7, 32, range(0, 16) , [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip10/Chip10_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col30_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip10/', 'VG1p8_VD1p7', '_cycle0102', 50, 135)
#MLC_IDSAT_characterization(10, 30, 16, 2, 'ULVT', 1.8, 2.0, 32, range(16, 32), [40, 20], [0.01, 0.04], 2, [0, 0.2, 0.4, 0.8, 1.0, 1.2, 1.4, 1.6, 1.7], ['0', '0.2', '0.4', '0.4', '0.6', '0.8', '1.0', '1.2', 'recover'], ['../Data/chip10/Chip10_Col30_HCI_40x10ms_stress_VG_ConstPulse_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col30_HCI_20x40ms_stress_VG_ConstPulse_VAsource_VBdrain_02'], '../Plots/chip10/', 'VG1p8_VD2p0', '_cycle0102', 20, 140)
# for col in range(36):
# IDS_VGS(10, col, col_list[col][0], col_list[col][1], col_list[col][2], col_list[col][3], ['../Data/chip10/Fresh_Chip10_Col'+str(col).zfill(2)+'_Ids_Vgs_VAsource_VBdrain'], ['b'], '../Plots/chip10/', 'Fresh_Ids-Vgs_VaS-VbD_', range(0, col_list[col][3]), 'Fresh IDS-VGS, forward' , col_list[col][4])
# IDS_VGS(10, col, col_list[col][0], col_list[col][1], col_list[col][2], col_list[col][3], ['../Data/chip10/Fresh_Chip10_Col'+str(col).zfill(2)+'_Ids_Vgs_VAdrain_VBsource'], ['b'], '../Plots/chip10/', 'Fresh_Ids-Vgs_VaD-VbS_', range(0, col_list[col][3]), 'Fresh IDS-VGS, reversed', col_list[col][4])
# IDS_VGS(11, 21, 36, 2, 'ULVT', 128, ['../Data/chip11/Fresh_Chip11_Col21_Ids_Vgs_VAsource_VBdrain', '../Data/chip11/MLC_Chip11_Col21_2msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_01'], ['b', 'y'], '../Plots/chip11/', 'Fresh_vs_MLC01_VG1p8_VD2p4_IDS-VGS_VaS-VbD_', range(0, 128), 'Fresh vs MLC-01 (VG=1.8, VD=2.4)\nMLC-01: 2ms WL pulse, IDSAT threshold = 80uA, forward' , 135, ['fresh', 'MLC-01'])
# IDS_VGS(11, 21, 36, 2, 'ULVT', 128, ['../Data/chip11/Fresh_Chip11_Col21_Ids_Vgs_VAdrain_VBsource', '../Data/chip11/MLC_Chip11_Col21_2msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_01'], ['b', 'y'], '../Plots/chip11/', 'Fresh_vs_MLC01_VG1p8_VD2p4_IDS-VGS_VaD-VbS_', range(0, 128), 'Fresh vs MLC-01 (VG=1.8, VD=2.4)\nMLC-01: 2ms WL pulse, IDSAT threshold = 80uA, reversed', 135, ['fresh', 'MLC-01'])
#IDS_VGS(11, 21, 36, 2, 'ULVT', 128, ['../Data/chip11/Fresh_Chip11_Col21_Ids_Vgs_VAsource_VBdrain', '../Data/chip11/MLC_Chip11_Col21_2msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_01', '../Data/chip11/MLC_Chip11_Col21_10msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_02', '../Data/chip11/MLC_Chip11_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_03'], ['b', 'y', 'r', 'k'], '../Plots/chip11/', 'Fresh_vs_MLC010203_VG1p8_VD2p4_IDS-VGS_VaS-VbD_', range(0, 128), 'Fresh vs MLC-1-2-3 (VG=1.8, VD=2.4)\nMLC-{1, 2, 3}: {2ms, 10ms, 40ms} WL pulse, IDSAT threshold = {80, 60, 40}uA, forward' , 135, ['fresh', 'MLC-01', 'MLC-02', 'MLC-03'])
#IDS_VGS(11, 21, 36, 2, 'ULVT', 128, ['../Data/chip11/Fresh_Chip11_Col21_Ids_Vgs_VAdrain_VBsource', '../Data/chip11/MLC_Chip11_Col21_2msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip11/MLC_Chip11_Col21_10msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip11/MLC_Chip11_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_03'], ['b', 'y', 'r', 'k'], '../Plots/chip11/', 'Fresh_vs_MLC010203_VG1p8_VD2p4_IDS-VGS_VaD-VbS_', range(0, 128), 'Fresh vs MLC-1-2-3 (VG=1.8, VD=2.4)\nMLC-{1, 2, 3}: {2ms, 10ms, 40ms} WL pulse, IDSAT threshold = {80, 60, 40}uA, reversed', 135, ['fresh', 'MLC-01', 'MLC-02', 'MLC-03'])
#hist_IDS_VGS(0, 11, 21, 36, 2, 'ULVT', 128, ['../Data/chip11/Fresh_Chip11_Col21_Ids_Vgs_VAdrain_VBsource', '../Data/chip11/MLC_Chip11_Col21_2msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip11/MLC_Chip11_Col21_10msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip11/MLC_Chip11_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_03', '../Data/chip11/MLC_Chip11_Col21_200msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_04'], ['b', 'y', 'r', 'k', 'g'], '../Plots/chip11/', 'Hist-IDSAT_MLC-rv1-01020304_reverse-read_', range(0, 128), 'MLC programming (VGS=1.8, VDS=2.4), histogram of read-IDSAT (VGS=VDS=0.8V)', 0, 136, 0, 136, 1000)
#IDS_VGS(14, 21, 36, 2, 'ULVT', 128, ['../Data/chip14/Fresh_Chip14_Col21_Ids_Vgs_VAsource_VBdrain', '../Data/chip14/MLC_Chip14_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_01', '../Data/chip14/MLC_Chip14_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_02', '../Data/chip14/MLC_Chip14_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_03', '../Data/chip14/MLC_Chip14_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_04'], ['b', 'y', 'r', 'k', 'g'], '../Plots/chip14/', 'Fresh_vs_MLC01020304_VG1p8_VD2p4_40msPULSE_IDS-VGS_VaS-VbD_', range(0, 128), 'Fresh vs MLC-1-2-3-4 (VG=1.8, VD=2.4)\nMLC-{1, 2, 3, 4}: all using 40ms WL pulses, IDSAT threshold = {80, 60, 40, 20}uA, forward' , 130, ['fresh', 'MLC-01', 'MLC-02', 'MLC-03', 'MLC-04'])
#IDS_VGS(14, 21, 36, 2, 'ULVT', 128, ['../Data/chip14/Fresh_Chip14_Col21_Ids_Vgs_VAdrain_VBsource', '../Data/chip14/MLC_Chip14_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip14/MLC_Chip14_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip14/MLC_Chip14_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_03', '../Data/chip14/MLC_Chip14_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_04'], ['b', 'y', 'r', 'k', 'g'], '../Plots/chip14/', 'Fresh_vs_MLC01020304_VG1p8_VD2p4_40msPULSE_IDS-VGS_VaD-VbS_', range(0, 128), 'Fresh vs MLC-1-2-3-4 (VG=1.8, VD=2.4)\nMLC-{1, 2, 3, 4}: all using 40ms WL pulses, IDSAT threshold = {80, 60, 40, 20}uA, reversed', 130, ['fresh', 'MLC-01', 'MLC-02', 'MLC-03', 'MLC-04'])
#
#hist_IDS_VGS(0, 14, 21, 36, 2, 'ULVT', 128, ['../Data/chip14/Fresh_Chip14_Col21_Ids_Vgs_VAdrain_VBsource', '../Data/chip14/MLC_Chip14_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip14/MLC_Chip14_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip14/MLC_Chip14_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_03', '../Data/chip14/MLC_Chip14_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_04'], ['b', 'y', 'r', 'k', 'g'], '../Plots/chip14/', 'Hist-IDSAT_MLC-rv1-01020304_reverse-read_', range(0, 128), 'MLC programming always using 40ms pulses, VGS=1.8, VDS=2.4 for level=1-2-3-4\nhistogram of read-IDSAT (VGS=VDS=0.8V)', 0, 130, 0, 130, 1000)
#IDS_VGS(10, 21, 36, 2, 'ULVT', 128, ['../Data/chip10/Fresh_Chip10_Col21_Ids_Vgs_VAsource_VBdrain', '../Data/chip10/MLC_Chip10_Col21_10msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_01', '../Data/chip10/MLC_Chip10_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_02', '../Data/chip10/MLC_Chip10_Col21_200msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_MLC010203_VG1p8_VD2p4_IDS-VGS_VaS-VbD_', range(0, 128), 'Fresh vs 2-bit/4-level MLC programming (VG=1.8, VD=2.4)\nMLC-{1, 2, 3} use {10ms, 40ms, 200ms} WL pulses, IDSAT threshold = {80uA, 60uA, 40uA}, forward read' , col_list[21][4], ['fresh', 'MLC-1', 'MLC-2', 'MLC-3'])
#IDS_VGS(10, 21, 36, 2, 'ULVT', 128, ['../Data/chip10/Fresh_Chip10_Col21_Ids_Vgs_VAdrain_VBsource', '../Data/chip10/MLC_Chip10_Col21_10msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip10/MLC_Chip10_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip10/MLC_Chip10_Col21_200msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_MLC010203_VG1p8_VD2p4_IDS-VGS_VaD-VbS_', range(0, 128), 'Fresh vs 2-bit/4-level MLC programming (VG=1.8, VD=2.4)\nMLC-{1, 2, 3} use {10ms, 40ms, 200ms} WL pulses, IDSAT threshold = {80uA, 60uA, 40uA}, reversed read', col_list[21][4], ['fresh', 'MLC-1', 'MLC-2', 'MLC-3'])
#IDSAT_vs_row(10, 21, 36, 2, 'ULVT', 128, ['../Data/chip10/Fresh_Chip10_Col21_Ids_Vgs_VAsource_VBdrain', '../Data/chip10/MLC_Chip10_Col21_10msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_01', '../Data/chip10/MLC_Chip10_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_02', '../Data/chip10/MLC_Chip10_Col21_200msPULSE_VG1p8_VD2p4_Ids_Vgs_VAsource_VBdrain_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'IDSAT_vs_row_Fresh_vs_MLC010203_VG1p8_VD2p4_VaS-VbD_', range(0, 128), 'Fresh vs 2-bit/4-level MLC programming (VG=1.8, VD=2.4)\nMLC-{1, 2, 3} use {10ms, 40ms, 200ms} WL pulses, IDSAT threshold = {80uA, 60uA, 40uA}, forward read' , col_list[21][4], ['fresh', 'MLC-1', 'MLC-2', 'MLC-3'])
#IDSAT_vs_row(10, 21, 36, 2, 'ULVT', 128, ['../Data/chip10/Fresh_Chip10_Col21_Ids_Vgs_VAdrain_VBsource', '../Data/chip10/MLC_Chip10_Col21_10msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip10/MLC_Chip10_Col21_40msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip10/MLC_Chip10_Col21_200msPULSE_VG1p8_VD2p4_Ids_Vgs_VAdrain_VBsource_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'IDSAT_vs_row_Fresh_vs_MLC010203_VG1p8_VD2p4_VaD-VbS_', range(0, 128), 'Fresh vs 2-bit/4-level MLC programming (VG=1.8, VD=2.4)\nMLC-{1, 2, 3} use {10ms, 40ms, 200ms} WL pulses, IDSAT threshold = {80uA, 60uA, 40uA}, reversed read', col_list[21][4], ['fresh', 'MLC-1', 'MLC-2', 'MLC-3'])
#IDS_VGS(10, 18, 36, 2, 'ULVT', 32, ['../Data/chip10/Fresh_Chip10_Col18_Ids_Vgs_VAsource_VBdrain', '../Data/chip10/Chip10_Col18_HCI_40x10ms_Ids_Vgs_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col18_HCI_20x40ms_Ids_Vgs_VAsource_VBdrain_02', '../Data/chip10/Chip10_Col18_HCI_12x200ms_Ids_Vgs_VAsource_VBdrain_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_HCIstress010203_VG1p8_VD2p0_IDS-VGS_VaS-VbD_', range(0, 16) , 'Fresh vs Stress (VG=1.8, VD=2.0)\ncycle01: 40x10ms, cycle02: 20x40ms, cycle03: 12x200ms, forward' , col_list[18][4], ['fresh', 'cycle01', 'cycle02', 'cycle03'])
#IDS_VGS(10, 18, 36, 2, 'ULVT', 32, ['../Data/chip10/Fresh_Chip10_Col18_Ids_Vgs_VAdrain_VBsource', '../Data/chip10/Chip10_Col18_HCI_40x10ms_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip10/Chip10_Col18_HCI_20x40ms_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip10/Chip10_Col18_HCI_12x200ms_Ids_Vgs_VAdrain_VBsource_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_HCIstress010203_VG1p8_VD2p0_IDS-VGS_VaD-VbS_', range(0, 16) , 'Fresh vs Stress (VG=1.8, VD=2.0)\ncycle01: 40x10ms, cycle02: 20x40ms, cycle03: 12x200ms, reversed', col_list[18][4], ['fresh', 'cycle01', 'cycle02', 'cycle03'])
#IDS_VGS(10, 18, 36, 2, 'ULVT', 32, ['../Data/chip10/Fresh_Chip10_Col18_Ids_Vgs_VAsource_VBdrain', '../Data/chip10/Chip10_Col18_HCI_40x10ms_Ids_Vgs_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col18_HCI_20x40ms_Ids_Vgs_VAsource_VBdrain_02', '../Data/chip10/Chip10_Col18_HCI_12x200ms_Ids_Vgs_VAsource_VBdrain_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_HCIstress010203_VG1p8_VD2p4_IDS-VGS_VaS-VbD_', range(16, 32), 'Fresh vs Stress (VG=1.8, VD=2.4)\ncycle01: 40x10ms, cycle02: 20x40ms, cycle03: 12x200ms, forward' , col_list[18][4], ['fresh', 'cycle01', 'cycle02', 'cycle03'])
#IDS_VGS(10, 18, 36, 2, 'ULVT', 32, ['../Data/chip10/Fresh_Chip10_Col18_Ids_Vgs_VAdrain_VBsource', '../Data/chip10/Chip10_Col18_HCI_40x10ms_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip10/Chip10_Col18_HCI_20x40ms_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip10/Chip10_Col18_HCI_12x200ms_Ids_Vgs_VAdrain_VBsource_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_HCIstress010203_VG1p8_VD2p4_IDS-VGS_VaD-VbS_', range(16, 32), 'Fresh vs Stress (VG=1.8, VD=2.4)\ncycle01: 40x10ms, cycle02: 20x40ms, cycle03: 12x200ms, reversed', col_list[18][4], ['fresh', 'cycle01', 'cycle02', 'cycle03'])
#IDS_VGS(10, 24, 20, 2, 'ULVT', 32, ['../Data/chip10/Fresh_Chip10_Col24_Ids_Vgs_VAsource_VBdrain', '../Data/chip10/Chip10_Col24_HCI_40x10ms_Ids_Vgs_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col24_HCI_20x40ms_Ids_Vgs_VAsource_VBdrain_02', '../Data/chip10/Chip10_Col24_HCI_12x200ms_Ids_Vgs_VAsource_VBdrain_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_HCIstress010203_VG1p8_VD1p8_IDS-VGS_VaS-VbD_', range(0, 16) , 'Fresh vs Stress (VG=1.8, VD=1.8)\ncycle01: 40x10ms, cycle02: 20x40ms, cycle03: 12x200ms, forward' , col_list[24][4], ['fresh', 'cycle01', 'cycle02', 'cycle03'])
#IDS_VGS(10, 24, 20, 2, 'ULVT', 32, ['../Data/chip10/Fresh_Chip10_Col24_Ids_Vgs_VAdrain_VBsource', '../Data/chip10/Chip10_Col24_HCI_40x10ms_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip10/Chip10_Col24_HCI_20x40ms_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip10/Chip10_Col24_HCI_12x200ms_Ids_Vgs_VAdrain_VBsource_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_HCIstress010203_VG1p8_VD1p8_IDS-VGS_VaD-VbS_', range(0, 16) , 'Fresh vs Stress (VG=1.8, VD=1.8)\ncycle01: 40x10ms, cycle02: 20x40ms, cycle03: 12x200ms, reversed', col_list[24][4], ['fresh', 'cycle01', 'cycle02', 'cycle03'])
#IDS_VGS(10, 24, 20, 2, 'ULVT', 32, ['../Data/chip10/Fresh_Chip10_Col24_Ids_Vgs_VAsource_VBdrain', '../Data/chip10/Chip10_Col24_HCI_40x10ms_Ids_Vgs_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col24_HCI_20x40ms_Ids_Vgs_VAsource_VBdrain_02', '../Data/chip10/Chip10_Col24_HCI_12x200ms_Ids_Vgs_VAsource_VBdrain_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_HCIstress010203_VG1p8_VD2p2_IDS-VGS_VaS-VbD_', range(16, 32), 'Fresh vs Stress (VG=1.8, VD=2.2)\ncycle01: 40x10ms, cycle02: 20x40ms, cycle03: 12x200ms, forward' , col_list[24][4], ['fresh', 'cycle01', 'cycle02', 'cycle03'])
#IDS_VGS(10, 24, 20, 2, 'ULVT', 32, ['../Data/chip10/Fresh_Chip10_Col24_Ids_Vgs_VAdrain_VBsource', '../Data/chip10/Chip10_Col24_HCI_40x10ms_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip10/Chip10_Col24_HCI_20x40ms_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip10/Chip10_Col24_HCI_12x200ms_Ids_Vgs_VAdrain_VBsource_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_HCIstress010203_VG1p8_VD2p2_IDS-VGS_VaD-VbS_', range(16, 32), 'Fresh vs Stress (VG=1.8, VD=2.2)\ncycle01: 40x10ms, cycle02: 20x40ms, cycle03: 12x200ms, reversed', col_list[24][4], ['fresh', 'cycle01', 'cycle02', 'cycle03'])
#IDS_VGS(10, 30, 16, 2, 'ULVT', 32, ['../Data/chip10/Fresh_Chip10_Col30_Ids_Vgs_VAsource_VBdrain', '../Data/chip10/Chip10_Col30_HCI_40x10ms_Ids_Vgs_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col30_HCI_20x40ms_Ids_Vgs_VAsource_VBdrain_02', '../Data/chip10/Chip10_Col30_HCI_12x200ms_Ids_Vgs_VAsource_VBdrain_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_HCIstress010203_VG1p8_VD1p7_IDS-VGS_VaS-VbD_', range(0, 16) , 'Fresh vs Stress (VG=1.8, VD=1.7)\ncycle01: 40x10ms, cycle02: 20x40ms, cycle03: 12x200ms, forward' , col_list[30][4], ['fresh', 'cycle01', 'cycle02', 'cycle03'])
#IDS_VGS(10, 30, 16, 2, 'ULVT', 32, ['../Data/chip10/Fresh_Chip10_Col30_Ids_Vgs_VAdrain_VBsource', '../Data/chip10/Chip10_Col30_HCI_40x10ms_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip10/Chip10_Col30_HCI_20x40ms_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip10/Chip10_Col30_HCI_12x200ms_Ids_Vgs_VAdrain_VBsource_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_HCIstress010203_VG1p8_VD1p7_IDS-VGS_VaD-VbS_', range(0, 16) , 'Fresh vs Stress (VG=1.8, VD=1.7)\ncycle01: 40x10ms, cycle02: 20x40ms, cycle03: 12x200ms, reversed', col_list[30][4], ['fresh', 'cycle01', 'cycle02', 'cycle03'])
#IDS_VGS(10, 30, 16, 2, 'ULVT', 32, ['../Data/chip10/Fresh_Chip10_Col30_Ids_Vgs_VAsource_VBdrain', '../Data/chip10/Chip10_Col30_HCI_40x10ms_Ids_Vgs_VAsource_VBdrain_01', '../Data/chip10/Chip10_Col30_HCI_20x40ms_Ids_Vgs_VAsource_VBdrain_02', '../Data/chip10/Chip10_Col30_HCI_12x200ms_Ids_Vgs_VAsource_VBdrain_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_HCIstress010203_VG1p8_VD2p0_IDS-VGS_VaS-VbD_', range(16, 32), 'Fresh vs Stress (VG=1.8, VD=2.0)\ncycle01: 40x10ms, cycle02: 20x40ms, cycle03: 12x200ms, forward' , col_list[30][4], ['fresh', 'cycle01', 'cycle02', 'cycle03'])
#IDS_VGS(10, 30, 16, 2, 'ULVT', 32, ['../Data/chip10/Fresh_Chip10_Col30_Ids_Vgs_VAdrain_VBsource', '../Data/chip10/Chip10_Col30_HCI_40x10ms_Ids_Vgs_VAdrain_VBsource_01', '../Data/chip10/Chip10_Col30_HCI_20x40ms_Ids_Vgs_VAdrain_VBsource_02', '../Data/chip10/Chip10_Col30_HCI_12x200ms_Ids_Vgs_VAdrain_VBsource_03'], ['b', 'y', 'r', 'k'], '../Plots/chip10/', 'Fresh_vs_HCIstress010203_VG1p8_VD2p0_IDS-VGS_VaD-VbS_', range(16, 32), 'Fresh vs Stress (VG=1.8, VD=2.0)\ncycle01: 40x10ms, cycle02: 20x40ms, cycle03: 12x200ms, reversed', col_list[30][4], ['fresh', 'cycle01', 'cycle02', 'cycle03'])
#Charge_Pumping_compare([9, 9], ['Fresh', 'VG=1.8, VD=2.0, 1x1.2sec'], 21, 36, 2, 'ULVT', 128, ['../Data/chip09/Fresh_Chip09_Col21_60Pumping_SweepVSVBVD_VSS_WL_0_VDD_WL_1p6_ELTM', '../Data/chip09/HCIstress01_1x1p2s_Chip09_Col21_60Pumping_SweepVSVBVD_VSS_WL_0_VDD_WL_1p6_ELTM'], '../Plots/chip09/', 'Fresh_vs_HCIstress01_VG1p8_VD2p0_1p2s_Charge_Pumping_VSS_WL_0_VDD_WL_1p6', 'Fresh vs Stress (VG=1.8, VD=2.0, 1x1.2s), col[21], 5MHz Charge Pumping\n', -1.6, 0, 0.1, 17, 0, 1.6, 60, [2, 2], [[5000000, 1000], [5000000, 1000]], 0, [1, 1], '1kHz', [[0], [0]])
#Charge_Pumping(8, 21, 36, 2, 'ULVT', 128, '../Data/chip09/Fresh_Chip09_Col21_60Pumping_SweepVSVBVD_VSS_WL_0_VDD_WL_1p6_ELTM', '../Plots/chip09/', 'Fresh_Charge_Pumping_VSS_WL_0_VDD_WL_1p6', '', -1.6, 0, 0.1, 17, 0, 1.6, 60, 2, [5000000, 1000], ['5MHz', '1kHz'], 0, [0, 1], 1, [0], 0)
if __name__ == '__main__':
main()
|
983,969 | b6ed30af5bf23a2e33647bd5a1fc45ccf22454ca | # -*- coding: utf-8 -*-
from openerp import tools
from openerp import models,fields,api
from openerp.tools.translate import _
class is_mrp_production_workcenter_line(models.Model):
_name='is.mrp.production.workcenter.line'
_order='workcenter_id,is_ordre'
_auto = False
name = fields.Many2one('mrp.production', 'Ordre de fabrication')
product_id = fields.Many2one('product.product', 'Article')
state = fields.Char('Etat')
mpwl_id = fields.Many2one('mrp.production.workcenter.line', 'Ordre de travail')
workcenter_id = fields.Many2one('mrp.workcenter', 'Poste de travail')
sequence = fields.Integer('Sequence')
hour = fields.Float("Nombre d'Heures")
cycle = fields.Float("Nombre de cycles")
is_ordre = fields.Integer(' Ordre ')
is_date_tri = fields.Datetime('Date tri')
is_date_planning = fields.Datetime('Date planning')
def init(self, cr):
tools.drop_view_if_exists(cr, 'is_mrp_production_workcenter_line')
cr.execute("""
CREATE OR REPLACE view is_mrp_production_workcenter_line AS (
select
mpwl.id as id,
mp.id as name,
mp.product_id as product_id,
mp.state as state,
mpwl.id as mpwl_id,
mpwl.workcenter_id as workcenter_id,
mpwl.sequence as sequence,
mpwl.hour as hour,
mpwl.cycle as cycle,
mpwl.is_ordre as is_ordre,
mpwl.is_date_tri as is_date_tri,
mpwl.is_date_planning as is_date_planning
from mrp_production_workcenter_line mpwl inner join mrp_production mp on mpwl.production_id=mp.id
where mp.state<>'cancel'
order by mpwl.workcenter_id, mpwl.is_ordre
)
""")
|
983,970 | 0b8b44fcaae4dfd72b08618f144253b715b3d1ed | from PIL import Image, ImageDraw, ImageFont
from math import sqrt, sin, cos, atan2, pi
from random import randint, random, sample
from collections import defaultdict
import numpy as np
import sys
import cv2
locations = {
'outpost_ground': {
'back flag': ((1500, 300), (1800, 600)),
'front flag': ((1530, 1000), (1780, 1270)),
'kill box': ((1550, 700), (1760, 1050)),
'merchant': ((1220, 280), (1400, 610)),
'mechant lower stairs': ((1200, 630), (1410, 850)),
'merchant mid': ((1220, 860), (1400, 980)),
'merchant upper stairs': ((1230, 1015), (1415, 1225)),
'postern': ((1890, 280), (2070, 610)),
'postern lower stairs': ((1875, 630), (2100, 850)),
'postern mid': ((1880, 860), (2080, 980)),
'postern upper stairs': ((1875, 1015), (2060, 1225)),
'inside the door': ((1480, 1290), (1835, 1530)),
'outside the door': ((1480, 1580), (1840, 1700)),
'left of the door': ((1370, 1580), (1515, 1680)),
'right of the door': ((1800, 1580), (1905, 1680)),
'left porch': ((1160, 1580), (1370, 1680)),
'right porch': ((1905, 1580), (2140, 1680)),
'merchant stairs': ((950, 370), (1100, 500)),
'postern stairs': ((2190, 370), (2340, 500)),
'front porch': ((1170, 1575), (2140, 1890)),
'edge of the porch': ((1340, 1890), (1900, 2150))
}
}
fps = 60
delay = 10
instant = fps // 2
flash = fps // 4
text = round(0.9 * fps)
# diagram dimensions with density 300 in PDF to PNG conversion
width = 3300
height = 2550
# THESE VALUES ARE MERE GUESSES
meter = 5 # how many pixels should a meter be
if len(sys.argv) > 1:
if 'corner' in sys.argv[1]: # corner tower diagrams
meter = 10
elif 'inner' in sys.argv[1]: # inner keep diagrams
meter = 7
elif 'outpost' in sys.argv[1]: # outer keep or outpost
meter = 12
elif 'door' in sys.argv[1]: # outer keep outer door
meter = 12
elif 'resource' in sys.argv[1]:
meter = 12
else:
print('Unknown diagram')
quit()
margin = round(0.1 * min(width, height))
full = 2 * pi
total = 30 # total duration in seconds
final = total * fps
clock = 0
noise = 3
aim = 5 * meter
print(f'Animating {final} frames')
colors = { 'crown': (218, 204, 31, 255), # yellow
'healer': (31, 194, 219, 255), # blue
'support': (5, 117, 18, 255), # green
'offensive': (52, 235, 232, 255), # cyan
'dd': (209, 13, 173, 255), # purple
'streak': (0, 0, 255, 255),
'rapids': (196, 49, 6, 100),
'purge': (245, 158, 66, 100),
'rr': (209, 170, 13, 200), # yellow
'heal': (69, 245, 66, 100), # green
'label': (255, 255, 255, 255)}
INACTIVE = -1
active = defaultdict(set)
fnt = ImageFont.truetype("/Library/Fonts/Arial Unicode.ttf", 40)
def sprite(x, y, s = 2):
size = s * meter # 2-meter sprites by default
return [(x + size, y), (x, y - size // 2), (x + size // 3, y), (x, y + size // 2)]
def dist(x1, y1, x2, y2):
return sqrt((x1 - x2)**2 + (y1 - y2)**2)
def uniform(low, high):
span = high - low
return random() * span + low
def rotate(p, c, a):
(cx, cy) = c
(x, y) = p
r = dist(x, y, cx, cy)
a += atan2(y - cy, x - cx)
return (round(cx + r * cos(a)), round(cy - r * sin(a)))
def circle(p, r, c, canvas):
(x, y) = p
d = r // 2
bb = [x - r, y - r, x + r, y + r]
canvas.ellipse(bb, outline = c, width = 3)
def beam(caster, target, color, canvas):
coords = (caster.x, caster.y, target.x, target.y)
canvas.line(coords, fill = color, width = 3)
def visualize(specs, canvas):
if 'circ' in specs:
caster = specs[1]
p = (caster.x, caster.y) if len(specs) == 4 else caster.locations[specs[4]]
circle(p, specs[2], specs[3], canvas)
elif 'beam' in specs:
beam(specs[1], specs[2], specs[3], canvas)
elif 'label' in specs:
canvas.text((50, 50), specs[1], font = fnt, fill = colors['label'])
# skills
def charging(time, caster, party):
radius = 20 * meter
for member in party:
if dist(caster.x, caster.y, member.x, member.y) < radius:
member.apply('rapids', 8)
for t in range(time, time + instant):
active[t].add(('circ', caster, radius, colors['rapids']))
def purge(time, caster, raid):
radius = 18 * meter
for t in range(time, time + instant):
active[t].add(('circ', caster, radius, colors['purge']))
def radiant(time, caster, party):
radius = 28 * meter
candidates = list()
for member in party:
if dist(caster.x, caster.y, member.x, member.y) < radius:
candidates.append(member)
for t in range(time, time + instant):
for members in sample(candidates, min(3, len(candidates))):
active[t].add(('beam', caster, member, colors['rr']))
def streak(time, caster, target):
for t in range(time, time + instant):
active[t].add(('beam', caster, target, colors['streak']))
def illustrious(time, caster, party):
for t in range(time, time + (12 * fps)): # a 12-second ground effect
active[t].add(('circ', caster, 8 * meter, colors['heal'], 'illustrious'))
def proxy(time, caster, party):
end = time + (8 * fps) # an 8-second timer
for t in range(time, end):
if t > end - flash: # detonation
active[t].add(('circ', caster, 8 * meter, (199, 6, 199, 100)))
else: # meantime
active[t].add(('circ', caster, 2 * meter, (199, 6, 199, 100)))
def shake(x, m, dec = False):
if dec:
sign = 1 if random() < 0.5 else -1
return x + sign * m * random()
else:
return x + randint(-m, m)
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
class Player:
def __init__(self, r, x, y, a, p = INACTIVE, cm = INACTIVE, rr = INACTIVE, bounce = False):
self.x = x
self.y = y
self.dx = 0
self.dy = 0
self.a = a
self.bounce = bounce
self.speed = 2 # meters per second base speed while running (not sprinting)
self.role = r
self.effects = { 'rapids': 0 }
self.every = { purge: p,
charging: cm,
radiant: rr }
self.timers = { purge: fps * p + randint(-delay, delay),
charging: fps * cm + randint(-delay, delay),
radiant: fps * rr + randint(-delay, delay)}
self.locations = dict()
def apply(self, effect, duration):
self.effects[effect] = duration * fps
def process(self, call, lead):
if self.role == 'healer' and call == 'heal':
lx = shake(lead.x, aim)
ly = shake(lead.y, aim)
d = dist(self.x, self.y, lx, ly)
ix = shake(self.x, aim)
iy = shake(self.y, aim)
if d < 28 * meter: # can reach the lead
ix = lx
iy = ly
self.locations['illustrious'] = (ix, iy)
self.timers[illustrious] = randint(delay, 2 * delay)
elif call == 'proxy':
if self.role == 'dd':
self.timers[proxy] = randint(delay, 2 * delay)
elif self.role == 'crown':
self.timers[proxy] = randint(0, delay)
elif self.role == 'offensive':
self.effects['streak'] = 5 + randint(delay, delay) # 5 seconds after proxies are called
def move(self, lead):
if self.role == 'crown':
v = self.speed if self.effects['rapids'] == 0 else round(1.3 * self.speed)
self.dx = round(v * cos(self.a))
self.dy = -round(v * sin(self.a)) # inverted y axis
self.x += shake(self.dx, noise)
self.y += shake(self.dy, noise)
if self.bounce:
if self.x < margin:
self.a = uniform(-pi / 4, pi / 4)
elif self.x > width - margin:
self.a = uniform(3 * pi / 4, 5 * pi / 4)
if self.y < margin:
self.a = uniform(-3 * pi / 4, -pi / 4)
elif self.y > height - margin:
self.a = uniform(pi / 4, 3 * pi / 3)
if self.a < 0:
self.a += full
elif self.a > full:
self.a -= full
else:
self.dx = lead.dx
self.dy = lead.dy
origin = None
if self.effects.get('streak', INACTIVE) == 0:
origin = Point(self.x, self.y)
l = 15 * meter # streaks are 15 meters
self.dx += round(l * cos(self.a))
self.dy -= round(l * sin(self.a))
self.x += shake(self.dx, noise)
self.y += shake(self.dy, noise)
if origin is not None:
streak(clock, self, origin)
self.a = lead.a
for effect in self.effects: # decrease effect durations
if self.effects[effect] >= 0:
self.effects[effect] -= 1
return
def cast(self, time, canvas, raid):
p = (self.x, self.y)
for spell in self.timers:
if self.timers[spell] == -1:
continue # inactive
elif self.timers[spell] == 0:
spell(time, self, raid)
if spell in self.every: # rotation, not a call
self.timers[spell] = self.every[spell] * fps
else:
self.timers[spell] = INACTIVE # only when called
else:
self.timers[spell] -= 1 # reduce timer to next cast
def draw(self, canvas):
canvas.polygon([rotate(p, (self.x, self.y), self.a) for p in sprite(self.x, self.y)],
fill = colors[self.role])
calls = dict()
for (time, call) in [(1, 'heal'), (2, 'proxy'), (4, 'heal'), (12, 'proxy'), (14, 'heal'), (17, 'heal'), (22, 'proxy'), (25, 'heal')]:
calls[time * fps] = call
class Raid:
def __init__(self, x, y, a, total = 30):
self.timer = total * fps
spread = 6 * meter
self.lead = Player('crown', x, y, a, bounce = True, rr = 10)
self.party = [ self.lead,
Player('healer', shake(x, spread), shake(y, spread), a, rr = 3),
Player('healer', shake(x, spread), shake(y, spread), a, rr = 4),
Player('healer', shake(x, spread), shake(y, spread), a, rr = 5),
Player('healer', shake(x, spread), shake(y, spread), a, rr = 6),
Player('healer', shake(x, spread), shake(y, spread), a, rr = 7),
Player('dd', shake(x, spread), shake(y, spread), a, rr = 10),
Player('dd', shake(x, spread), shake(y, spread), a, rr = 11),
Player('dd', shake(x, spread), shake(y, spread), a, rr = 12),
Player('dd', shake(x, spread), shake(y, spread), a, rr = 11),
Player('dd', shake(x, spread), shake(y, spread), a, rr = 10),
Player('support', shake(x, spread), shake(y, spread), a, p = 4, rr = 8), # purger
Player('support', shake(x, spread), shake(y, spread), a, p = 4, rr = 9), # purger
Player('offensive', shake(x, spread), shake(y, spread), a, p = 6, rr = 10), # streaker
Player('offensive', shake(x, spread), shake(y, spread), a, p = 6, rr = 8), # streaker
Player('support', shake(x, spread), shake(y, spread), a, cm = 8) ] # stam support
def step(self, canvas):
global clock
for member in self.party:
member.move(self.lead)
if clock in calls:
call = calls[clock]
for time in range(clock, clock + text):
active[time].add(('label', call))
for member in self.party:
member.process(call, self.lead)
for member in self.party:
member.cast(clock, canvas, self.party)
for visual in active[clock]:
visualize(visual, canvas)
del active[clock]
for member in self.party:
member.draw(canvas)
clock += 1
if clock % fps == 0:
sys.stdout.write('.')
sys.stdout.flush()
return clock < final
img = None
if len(sys.argv) == 1:
width = 800
height = 800
dim = (width, height)
img = Image.new('RGBA', dim, color = 'black')
else:
img = Image.open(sys.argv[1])
width, height = img.size
dim = (width, height)
fourcc = cv2.VideoWriter_fourcc(*'avc1')
video = cv2.VideoWriter('simulation.mp4', fourcc, fps, dim)
raid = Raid(width // 2, height // 2, uniform(0, full))
while True:
frame = img.copy()
if raid.step(ImageDraw.Draw(frame)):
video.write(cv2.cvtColor(np.array(frame), cv2.COLOR_RGBA2BGR))
else:
break
print('\nReleasing video')
video.release()
|
983,971 | 29e101e8fd2c543018833de816a5a39568181fdf | # a=str(input())
# print(ord(a))
print(ord(str(input())))
|
983,972 | f6d0b8ecc00c8999b4ed9fff8e7de851d786416a | from django.urls import path
from snippets import views
from .views import *
urlpatterns = [
# SNIPPET
path('get_snippets/',GetSnippetListView.as_view()),
path('post_snippets/',PostSnippetListView.as_view()),
path('get_snippets_by_pk/<int:pk>/',GetSnippetDetailView.as_view()),
path('put_snipptes_by_pk/<int:pk>/',PutSnippetDetailView.as_view()),
path('delete_snippets_by_pk/<int:pk>/',DeleteSnippetView.as_view()),
# STUDENT
path('get_student/',GetStudentListView.as_view()),
path('post_student/',PostStudentListView.as_view()),
path('get_student_by_pk/<int:pk>/',GetStudentDetailView.as_view()),
path('put_student_by_pk/<int:pk>/',PutStudentlistView.as_view()),
path('delete_student_by_pk/<int:pk>/',DeleteStudentListView.as_view()),
# EMPLOYEE
path('get_employee/',GetEmployeeListView.as_view()),
path('post_student/',PostEmployeeListView.as_view()),
path('get_employee_by_pk/<int:pk>/',RetrieveEmployeeListView.as_view()),
path('put_employee_by_pk/<int:pk>/',PutEmployeeListView.as_view()),
path('delete_student_by_pk/<int:pk>/',DeleteEmployeeListView.as_view()),
]
|
983,973 | 00750846b5fb7ef7bd53dc47c65e83e4200fd5e2 | import sys
print("enter enter any message")
mess = sys.stdin.readline()
print("Your message is:")
sys.stdout.write(mess)
sys.stderr.write("error Message") |
983,974 | bfe63580e6ec7a7935c5dedd02a193f942bbb43a | # Ran using Python -version: 3.6.4. Full Stack trace in the README.md
# This results in a VM session with the windows 2008 platform (reports as Windows 7). The below stacktrace error is returned in the terminal:
# - Max Dobeck
from selenium import webdriver
from sauceclient import SauceClient
import os
username = os.environ.get('SAUCE_USERNAME')
access_key = os.environ.get('SAUCE_ACCESS_KEY')
sauce_client = SauceClient(username, access_key)
desired_caps = {
'platformName': "Windows 10",
'browserName': "chrome",
'browserVersion': "65",
# 'sauce:seleniumVersion': "3.8.0",
'goog:chromeOptions':{"w3c": "true"}
}
driver = webdriver.Remote(command_executor="https://%s:%s@ondemand.saucelabs.com/wd/hub" % (username, access_key), desired_capabilities=desired_caps)
driver.implicitly_wait(30)
driver.maximize_window()
driver.get("https://www.google.com")
sauce_client.jobs.update_job(driver.session_id, passed=True)
driver.quit()
|
983,975 | 293ec3acceca10222183372da17f83e4edc26486 | # 方法一:暴力法(超时)
class Solution:
def countSmaller(self, nums: list) -> list:
n = len(nums)
counts = [0 for _ in range(n)]
for i in range(n-1):
count = 0
for j in range(i+1, n):
if nums[j] < nums[i]:
count += 1
counts[i] = count
return counts
# 方法二:二分查找+插入排序
class Solution2:
def countSmaller(self, nums: list) -> list:
import bisect
n = len(nums)
ins = []
ans = [0] * n
for i in range(n-1, -1, -1):
index = bisect.bisect_left(ins, nums[i])
ans[i] = index
bisect.insort_left(ins, nums[i])
return ans
# 方法三:归并排序算逆序数
class Solution3:
def countSmaller(self, nums: list) -> list:
size = len(nums)
if not size:
return []
nums = [(i, v) for i, v in enumerate(nums)]
self.result = [0] * size
self.merge_sort(nums)
return self.result
def merge_sort(self, nums):
if not nums:
return []
if len(nums) == 1:
return nums
mid = len(nums) // 2
left = self.merge_sort(nums[:mid])
right = self.merge_sort(nums[mid:])
tmp = []
left_n = len(left)
right_n = len(right)
l = 0
r = 0
count_r = 0
while l < left_n and r < right_n:
if left[l][1] <= right[r][1]:
self.result[left[l][0]] += count_r
tmp.append(left[l])
l += 1
else:
tmp.append(right[r])
r += 1
count_r += 1
while l < left_n:
self.result[left[l][0]] += count_r
tmp.append(left[l])
l += 1
while r < right_n:
tmp.append(right[r])
r += 1
return tmp
# 方法四:排序树
class TreeNode:
def __init__(self, val):
self.val = val
self.count = 0 # 纪录左子树的节点
self.left = None
self.right = None
class Solution4:
def countSmaller(self, nums: List[int]) -> List[int]:
if not nums:
return []
res = [0] * len(nums)
root = None
for i in reversed(range(len(nums))):
root = self.insert(root, nums[i], res, i)
return res
def insert(self, root, val, res, index):
if root is None:
root = TreeNode(val)
elif val <= root.val:
root.left = self.insert(root.left, val, res, index)
root.count += 1
else:
root.right = self.insert(root.right, val, res, index)
res[index] += root.count + 1
return root
print(Solution3().countSmaller([4, 2, 1, 2, 5, 5, 3])) |
983,976 | 74137b820a2a6ed2df700142fc7ee736e294f2ab | #!/usr/bin/env python
# coding=utf-8
import socket
import urllib
import urllib2
import json
class HttpMethod(object):
def __init__(self):
pass
@staticmethod
def post(url=None, params=None, timeout=50):
"""Post方法"""
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
# POST
if params:
request = urllib2.Request(url, urllib.urlencode(params))
# GET
else:
request = urllib2.Request(url)
# request.add_header('Accept-Language', 'zh-cn')
request.add_header('Content-Type', 'json/application')
response = urllib2.urlopen(request)
content = response.read()
if response.code == 200:
return content, True
return content, False
except Exception as ex:
print ("Post 方法调用异常:%s" % ex)
return str(ex), False
finally:
if 'response' in dir():
response.close()
socket.setdefaulttimeout(old_timeout)
@staticmethod
def put(url=None, params=None, url_encode=True):
"""urlencode 表明参数是否需要被编码,如果此选项为false。传入的params 需要是字符串形式"""
try:
if url_encode:
data = urllib.urlencode(params)
else:
data = params
req = urllib2.Request(url, data)
req.get_method = lambda: 'PUT'
ret = urllib2.urlopen(req).read()
return ret
except Exception as ex:
print("Put方法调用异常:%s" % ex)
@staticmethod
def get(url):
"""get方法"""
try:
req = urllib2.Request(url)
ret = urllib2.urlopen(req).read()
return ret
except Exception as ex:
print("Get方法调用异常:%s" % ex)
@staticmethod
def delete(url=None, params=None):
"""定义delete 方法"""
try:
data = urllib.urlencode(params)
req = urllib2.Request(url, data)
req.get_method = lambda: 'DELETE'
ret = urllib2.urlopen(req).read()
return ret
except Exception as ex:
print("Delete 方法调用异常:%s" % ex)
if __name__ == '__main__':
res = HttpMethod().get('http://www.baidu.com')
print res
|
983,977 | 797c4243678f928f40182805244115636ab679a7 | import matplotlib.pyplot as plt
from statsmodels.tsa.holtwinters import ExponentialSmoothing, SimpleExpSmoothing, Holt
import warnings
import numpy as np
from util import mean_absolute_percentage_error, mean_squared_error, get_data
warnings.filterwarnings("ignore")
# Define the parameters
df = get_data()
df = df.set_index('Sample')
y = df['error']
train = y.iloc[:-30]
test = y.iloc[-30:]
fit1 = ExponentialSmoothing(train, trend='add').fit(smoothing_level=0.1, smoothing_slope = 0.2)
print(fit1.summary())
plt.title("Graph starting from 350k tightening")
plt.xlabel("Sampled Tightening (1 sample = 500 Tightening)")
plt.ylabel("Number of NOKs")
test_predictions = fit1.forecast(30)
train.plot(legend=True,label='TRAIN')
test.plot(legend=True,label='TEST',figsize=(12,8))
test_predictions.plot(legend=True,label='PREDICTION')
mse = mean_squared_error(test,test_predictions)
mape = mean_absolute_percentage_error(test,test_predictions)
print('Mape: ', mape)
print('Accuracy is :', 100 - mape, '%')
print('The Mean Squared Error of our forecasts is {}'.format(mse))
print('The Root Mean Squared Error of our forecasts is {}'.format(round(np.sqrt(mse), 2)))
plt.show()
|
983,978 | 7543a08c1b14edf8469cd54f85fe87d223c66fb8 |
# coding: utf-8
# If you are not using the `Assignments` tab on the course JupyterHub server to read this notebook, read [Activating the assignments tab](https://github.com/lcdm-uiuc/info490-sp17/blob/master/help/act_assign_tab.md).
#
# A few things you should keep in mind when working on assignments:
#
# 1. Make sure you fill in any place that says `YOUR CODE HERE`. Do **not** write your answer in anywhere else other than where it says `YOUR CODE HERE`. Anything you write anywhere else will be removed or overwritten by the autograder.
#
# 2. Before you submit your assignment, make sure everything runs as expected. Go to menubar, select _Kernel_, and restart the kernel and run all cells (_Restart & Run all_).
#
# 3. Do not change the title (i.e. file name) of this notebook.
#
# 4. Make sure that you save your work (in the menubar, select _File_ → _Save and CheckPoint_)
#
# 5. You are allowed to submit an assignment multiple times, but only the most recent submission will be graded.
# ## Problem 12.2. MapReduce.
#
# In this problem, we will use Hadoop Streaming to execute a MapReduce code written in Python.
# In[31]:
import os
from nose.tools import assert_equal, assert_true
# We will use the [airline on-time performance data](http://stat-computing.org/dataexpo/2009/), but before proceeding, recall that the data set is encoded in `latin-1`. However, the Python 3 interpreter expects the standard input and output to be in `utf-8` encoding. Thus, we have to explicitly state that the Python interpreter should use `latin-1` for all IO operations, which we can do by setting the Python environment variable `PYTHONIOENCODING` equal to `latin-1`. We can set the environment variables of the current IPython kernel by modifying the `os.environ` dictionary.
# In[32]:
os.environ['PYTHONIOENCODING'] = 'latin-1'
# Let's use the shell to check if the variable is set correctly. If you are not familiar with the following syntax (i.e., Python variable = ! shell command), [this notebook](https://github.com/UI-DataScience/info490-fa15/blob/master/Week4/assignment/unix_ipython.ipynb) from the previous semester might be useful.
# In[33]:
python_io_encoding = get_ipython().getoutput('echo $PYTHONIOENCODING')
assert_equal(python_io_encoding.s, 'latin-1')
# ## Mapper
#
# Write a Python script that
# - Reads data from `STDIN`,
# - Skips the first line (The first line of `2001.csv` is the header that has the column titles.)
# - Outputs to `STDOUT` the `Origin` and `AirTime` columns separated with a tab.
# In[34]:
get_ipython().run_cell_magic('writefile', 'mapper.py', '#!/usr/bin/env python3\n\nimport sys\n#read data from stdin\nwith sys.stdin as fin:\n # skip header\n next(fin) \n with sys.stdout as fout:\n #Output origin and airtime with tab delimiter\n for line in fin: \n line = line.strip()\n words = line.split(\',\')\n fout.write("{0}\\t{1}\\n".format(words[16], words[13]))')
# We need make the file executable.
# In[35]:
get_ipython().system(' chmod u+x mapper.py')
# Before testing the mapper code on the entire data set, let's first create a small file and test our code on this small data set.
# In[36]:
get_ipython().system(' head -n 50 $HOME/data/2001.csv > 2001.csv.head')
map_out_head = get_ipython().getoutput('./mapper.py < 2001.csv.head')
print('\n'.join(map_out_head))
# In[37]:
assert_equal(
map_out_head,
['BWI\t60','BWI\t64','BWI\t80','BWI\t66','BWI\t62','BWI\t61',
'BWI\t61','BWI\t60','BWI\t52','BWI\t62','BWI\t62','BWI\t55',
'BWI\t60','BWI\t61','BWI\t63','PHL\t53','PHL\t54','PHL\t55',
'PHL\t53','PHL\t50','PHL\tNA','PHL\t57','PHL\t48','PHL\t56',
'PHL\t55','PHL\t55','PHL\t55','PHL\t55','PHL\t49','PHL\t75',
'PHL\t49','PHL\t50','PHL\t49','PHL\tNA','PHL\t46','PHL\tNA',
'PHL\t51','PHL\t53','PHL\t52','PHL\t52','PHL\t54','PHL\t56',
'PHL\t55','PHL\t51','PHL\t49','PHL\t49','CLT\t82','CLT\t82',
'CLT\t78']
)
# ## Reducer
#
# Write a Python script that
#
# - Reads key-value pairs from `STDIN`,
# - Computes the minimum and maximum air time for flights, with respect to each origin airport,
# - Outputs to `STDOUT` the airports and the minimum and maximum air time for flights at each airport, separated with tabs.
#
# For example,
#
# ```shell
# $ ./mapper.py < 2001.csv.head | sort -n -k 1 | ./reducer.py
# ```
#
# should give
#
# ```
# BWI 52 80
# CLT 78 82
# PHL 46 75
# ```
# In[38]:
get_ipython().run_cell_magic('writefile', 'reducer.py', '#!/usr/bin/env python3\n\nimport sys\n#Read key value pairs from stdin\nwith sys.stdin as fin:\n with sys.stdout as fout:\n current_word = None\n current_min = None\n current_max = None\n word = None\n for line in fin:\n \n word = line.split(\'\\t\')[0]\n airt = line.split(\'\\t\')[1]\n if airt != "NA\\n":\n airt = int(airt)\n #Compute min and max airtimes\n if current_word == None:\n current_min = current_max = airt\n current_word = word\n elif word == current_word:\n current_min = min(current_min, airt)\n current_max = max(current_max, airt)\n else:\n fout.write(\'%s\\t%d\\t%d\\n\' % (current_word, current_min, current_max))\n current_min = current_max = airt\n current_word = word\n #output to stdout the max/min for flights in that airport\n else:\n if current_word == word:\n fout.write(\'%s\\t%d\\t%d\\n\' % (current_word, current_min, current_max))')
# In[39]:
get_ipython().system(' chmod u+x reducer.py')
# In[40]:
red_head_out = get_ipython().getoutput('./mapper.py < 2001.csv.head | sort -n -k 1 | ./reducer.py')
print('\n'.join(red_head_out))
# In[41]:
assert_equal(red_head_out, ['BWI\t52\t80','CLT\t78\t82','PHL\t46\t75'])
# If the previous tests on the smaller data set were successful, we can run the mapreduce on the entire data set.
# In[42]:
mapred_out = get_ipython().getoutput('./mapper.py < $HOME/data/2001.csv | sort -n -k 1 | ./reducer.py')
print('\n'.join(mapred_out[:10]))
# In[43]:
assert_equal(len(mapred_out), 231)
assert_equal(mapred_out[:5], ['ABE\t16\t180', 'ABI\t28\t85', 'ABQ\t15\t264', 'ACT\t19\t81', 'ACY\t33\t33'])
assert_equal(mapred_out[-5:], ['TYS\t11\t177', 'VPS\t28\t123', 'WRG\t5\t38', 'XNA\t33\t195', 'YAK\t28\t72'])
# ## HDFS: Reset
#
# We will do some cleaning up before we run Hadoop streaming. Let's first stop the [namenode and datanodes](https://hadoop.apache.org/docs/r1.2.1/hdfs_design.html).
# In[44]:
get_ipython().system(' $HADOOP_PREFIX/sbin/stop-dfs.sh')
get_ipython().system(' $HADOOP_PREFIX/sbin/stop-yarn.sh')
# If there are any temporary files created during the previous Hadoop operation, we want to clean them up.
# In[45]:
get_ipython().system(' rm -rf /tmp/*')
# We will simply [format the namenode](https://wiki.apache.org/hadoop/GettingStartedWithHadoop#Formatting_the_Namenode) and delete all files in our HDFS. Note that our HDFS is in an ephemeral Docker container, so all data will be lost anyway when the Docker container is shut down.
# In[46]:
get_ipython().system(' echo "Y" | $HADOOP_PREFIX/bin/hdfs namenode -format 2> /dev/null')
# After formatting the namenode, we restart the namenode and datanodes.
# In[47]:
get_ipython().system('$HADOOP_PREFIX/etc/hadoop/hadoop-env.sh')
get_ipython().system('$HADOOP_PREFIX/sbin/start-dfs.sh')
get_ipython().system('$HADOOP_PREFIX/sbin/start-yarn.sh')
# Sometimes when the namenode is restarted, it enteres Safe Mode, not allowing any changes to the file system. We do want to make changes, so we manually leave Safe Mode.
# In[48]:
get_ipython().system(' $HADOOP_PREFIX/bin/hdfs dfsadmin -safemode leave')
# ## HDFS: Create directory
#
# - Create a new directory in HDFS at `/user/data_scientist`.
# In[49]:
get_ipython().system('$HADOOP_PREFIX/bin/hdfs dfs -mkdir -p /user/data_scientist')
#Create new directory in data_scientist
# In[50]:
ls_user = get_ipython().getoutput('$HADOOP_PREFIX/bin/hdfs dfs -ls /user/')
print('\n'.join(ls_user))
# In[51]:
assert_true('/user/data_scientist' in ls_user.s)
# - Create a new directory in HDFS at `/user/data_scientist/wc/in`
# In[52]:
# Create a new directory in HDFS at `/user/data_scientist/wc/in`
get_ipython().system('$HADOOP_PREFIX/bin/hdfs dfs -mkdir -p /user/data_scientist/wc/in')
# In[53]:
ls_wc = get_ipython().getoutput('$HADOOP_PREFIX/bin/hdfs dfs -ls wc')
print('\n'.join(ls_wc))
# In[54]:
assert_true('wc/in' in ls_wc.s)
# ## HDFS: Copy
#
# - Copy `/home/data_scientist/data/2001.csv` from local file system into our new HDFS directory `wc/in`.
# In[55]:
# Copy `/home/data_scientist/data/2001.csv` from local file system into our new HDFS directory `wc/in`.
get_ipython().system('$HADOOP_PREFIX/bin/hdfs dfs -put /home/data_scientist/data/2001.csv /user/data_scientist/wc/in/2001.csv')
# In[56]:
ls_wc_in = get_ipython().getoutput('$HADOOP_PREFIX/bin/hdfs dfs -ls wc/in')
print('\n'.join(ls_wc_in))
# In[57]:
assert_true('wc/in/2001.csv' in ls_wc_in.s)
# ## Python Hadoop Streaming
#
# - Run `mapper.py` and `reducer.py` via Hadoop Streaming.
# - Use `/usr/local/hadoop/share/hadoop/tools/lib/hadoop-streaming-2.7.2.jar`.
# - We need to pass the `PYTHONIOENCODING` environment variable to our Hadoop streaming task. To find out how to set `PYTHONIOENCODING` to `latin-1` in a Hadoop streaming task, use the `--help` and `-info` options.
# In[58]:
get_ipython().run_cell_magic('bash', '', '$HADOOP_PREFIX/bin/hdfs dfs -rm -r -f wc/out\n\n#This kept crashing the server everytime I ran it. I think tooo many people are trying to run it \n$HADOOP_PREFIX/bin/hadoop jar /usr/local/hadoop/share/hadoop/tools/lib/hadoop-streaming-2.7.2.jar \\\n -files mapper.py,reducer.py \\\n -input wc/in \\\n -output wc/out -mapper mapper.py -reducer reducer.py \\\n -cmdenv PYTHONIOENCODING=latin-1')
# In[ ]:
ls_wc_out = get_ipython().getoutput('$HADOOP_PREFIX/bin/hdfs dfs -ls wc/out')
print('\n'.join(ls_wc_out))
# In[ ]:
assert_true('wc/out/_SUCCESS' in ls_wc_out.s)
assert_true('wc/out/part-00000' in ls_wc_out.s)
# In[ ]:
stream_out = get_ipython().getoutput('$HADOOP_PREFIX/bin/hdfs dfs -cat wc/out/part-00000')
print('\n'.join(stream_out[:10]))
# In[ ]:
assert_equal(mapred_out, stream_out)
# ## Cleanup
# In[ ]:
get_ipython().system(' $HADOOP_PREFIX/bin/hdfs dfs -rm -r -f -skipTrash wc/out')
|
983,979 | 3f4dd63555464dbfa2e8e50ab09ce14da3550f7b | #! /usr/bin/python
'''sample script for implementing the help command'''
# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# print "importing svn.commands.help module"
import exceptions
import scm
import svn
# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
command_name = 'help'
class HelpCommand( scm.Command ):
def __init__( self ):
super( HelpCommand, self ).__init__( command_name )
def process( self, parent_parsers, args ):
# print "processing the svn %s command..." % ( self.command_name )
if len( args.options ) == 0:
ignore = parent_parsers[ 'cmdline' ].parse_args( [ '-h' ] )
elif not svn.supported_cmdlist.has_key( args.options[0] ):
raise exceptions.RuntimeError, "command not supported: " + args.options[0]
else:
svn.supported_cmdlist[ args.options[0] ].show_help( parent_parsers, args )
def show_help( self, parent_parsers, args ):
# print "help for the svn %s command..." % ( self.command_name )
pass
# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
svn.scm_obj.register_command( command_name, HelpCommand() )
|
983,980 | 29d0b14a5f3963abe543a297e5ba14055f8cc0ed | import sys, requests, json, os, time, urllib
from multiprocessing.dummy import Pool as ThreadPool
if len(sys.argv) < 3:
print 'Usage: python puppeteer.py config output'
sys.exit()
# read config
fns = sys.argv[1].split(',')
for fn in fns:
if not os.path.isfile(fn):
print when(), 'error [', fn, '] not exists'
sys.exit()
configs = {}
for fn in fns:
fin = open(fn)
for line in fin:
j = json.loads(line.strip())
if 'url' in j and 'selector' in j:
configs[j['url']] = j
else:
print when(), 'error [', fn, '] config:', line.strip()
fin.close()
configs = configs.values()
configs.sort(key=lambda x:x['url'])
# ensure output dir
ts = int(time.time())
fn = sys.argv[2] + os.sep + time.strftime("%Y-%m-%d", time.localtime(ts))
if not os.path.isdir(fn):
os.makedirs(fn)
# print datetime
def when():
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
# get json from url, retry=3
def getJson(url):
for i in range(1,-1,-1):
try:
r = requests.get(url, timeout=30)
return json.loads(r.content)
except Exception, e:
if i == 0:
print when(), e
return {}
# get docid from norm url
def getDocid(url):
normurl = 'http://172.31.20.133:10001/norm?url=' + urllib.quote(url)
r = getJson(normurl)
if 'result' in r and len(r['result']) > 0:
url = r['result'][0]
return getJson(url)
for config in configs:
url = urllib.quote(config['url'])
selector = urllib.quote(config['selector'])
waitfor = config['waitfor'] if 'waitfor' in config else 0
host = url[url.find('//')+2:]
if host.startswith('www.'):
host = host[4:]
print ''
print when(), '>>> process host [', host, ']'
rect_output = fn + os.sep + '.'.join(['rect', host, str(ts)])
score_output = fn + os.sep + '.'.join(['score', host, str(ts)])
try:
ts_start = time.time()
# get url rect and write to file rect.host.ts
r = getJson('http://172.24.22.73:8888/parse?url=%s&selector=%s&waitfor=%d' % (url,selector,waitfor))
if 'links' in r and len(r['links']) > 0:
r['ts'] = ts
fout = open(rect_output, 'w')
fout.write(json.dumps(r))
fout.close()
print when(), 'write to file [', rect_output, ']'
else:
print when(), 'error', r
continue
# get max/min y and area
links = {}
for link in r['links']:
ln = link['url']
if ln in links:
links[ln]['y'] = min(links[ln]['y'], link['rect'][1])
links[ln]['area'] += link['rect'][2]*link['rect'][3]
else:
links[ln] = {'url':ln,'y':link['rect'][1],'area':link['rect'][2]*link['rect'][3]}
links = links.values()
y = [float(link['y']) for link in links]
area = [float(link['area']) for link in links]
ymin,ymax,amin,amax = min(y),max(y),min(area),max(area)
# scoring
result = {}
for link in links:
scores = {'y':1.0-(link['y']-ymin)/(ymax-ymin), 'area':(link['area']-amin)/(amax-amin)}
avg = sum(scores.values()) / len(scores)
result[link['url']] = (avg, scores)
print when(), 'complete scoring for [', len(result), '] links'
# find docids
urls = result.keys()
urls = ['http://172.31.4.8:6001/id/find?token=d01bbc072c2e7376801d9aa0eb89f95a&url='+urllib.quote(x) for x in urls]
pool = ThreadPool(10)
docs = pool.map(getDocid, urls)
pool.close()
pool.join()
url2docid = {}
for doc in docs:
if 'result' in doc:
url2docid[doc['result']['url']] = doc['result']['_id']
print when(), 'complete querying docids for [', len(urls), '] links'
# write to file score.host.ts
result = result.items()
result.sort(key=lambda x:x[1], reverse=True)
links = []
for k,v in result:
docid = url2docid[k] if k in url2docid else None
data = {'url':k,'docid':docid,'score':v[0],'explain':v[1]}
links.append(data)
fout = open(score_output,'w')
fout.write(json.dumps({'code':0,'url':url,'links':links,'ts':ts}))
fout.close()
print when(), 'write to file [', score_output, "]"
ts_end = time.time()
print when(), 'time:', ts_end-ts_start, 'sec'
except Exception, e:
print when(), e
|
983,981 | 39ac22d291f1766438e0925f55185229df97149c | ## Copyright (c) 2007 Nathan R. Yergler, Creative Commons
## Permission is hereby granted, free of charge, to any person obtaining
## a copy of this software and associated documentation files (the "Software"),
## to deal in the Software without restriction, including without limitation
## the rights to use, copy, modify, merge, publish, distribute, sublicense,
## and/or sell copies of the Software, and to permit persons to whom the
## Software is furnished to do so, subject to the following conditions:
## The above copyright notice and this permission notice shall be included in
## all copies or substantial portions of the Software.
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
## FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
## DEALINGS IN THE SOFTWARE.
from setuptools import setup, find_packages
setup(
name = "cc.licenserdf",
version = "0.2.44",
packages = find_packages('.'),
namespace_packages = ['cc',],
include_package_data = True,
dependency_links = [
'https://github.com/creativecommons/cc.i18n/tarball/master#egg=cc.i18n',
'https://github.com/creativecommons/cc.licenserdf/tarball/master#egg=cc.licenserdf',
],
# scripts and dependencies
install_requires = [
'setuptools',
'cc.i18n',
'rdflib<3.0',
'rdfadict',
'Babel>0.99',
'argparse',
'zope.i18n',
'python-gettext<2.0',
'nose',
],
entry_points = {
'console_scripts': [
'merge = cc.licenserdf.tools.merge:cli',
'make_schema = cc.licenserdf.tools.make_schema:cli',
'add_license = cc.licenserdf.tools.license:add_cli',
'add_all = cc.licenserdf.tools.license:add_all_cli',
'jurisdiction = cc.licenserdf.tools.jurisdiction:cli',
'license = cc.licenserdf.tools.license:cli',
'translate_rdf = cc.licenserdf.tools.translate_rdf:cli',
'gen_i18n_titles = cc.licenserdf.tools.gen_i18n_titles:cli']},
# author metadata
author = 'Nathan R. Yergler',
author_email = 'nathan@creativecommons.org',
description = 'Tool scripts for manipulating the license RDF files.',
license = 'MIT',
url = 'http://creativecommons.org',
zip_safe = False,
)
|
983,982 | f7143e58657c09f88db9fbc86344d1668e2a9a01 | msg = """Pay attention to
everything
I say"""
#if we want to cancel a newline
#in a message like the one above
#we can use '\'
words = msg.split("\n")
print(words) |
983,983 | 1e30449d4dcb859465b0c2415c5c1911ec2f997b | from PyInstaller.utils.hooks import collect_submodules, collect_data_files
hiddenimports = collect_submodules('tensorflow')
datas = collect_data_files('tensorflow', subdir=None, include_py_files=True)
|
983,984 | ac961c50ab0e02abc12b4daad97f45cda4b6b668 | import json
from .exceptions import *
class _Enum:
def __init__(self, values):
for item in values.items():
self.__dict__[item[0]] = item[1]
class JSONable:
def __init__(self, params):
if self._is_json(params):
params = json.loads(params)
self.__dict__.update(params)
for key, value in self.__dict__.items():
if type(value) is list:
new_value = []
for v in value:
o = self._to_object(key, v)
new_value.append(o if o is not None else v)
value = new_value
else:
o = self._to_object(key, value)
if o is not None:
value = o
self.__dict__[key] = value
def _is_json(self, params):
try:
json_object = json.loads(params)
except TypeError as error:
return False
return True
def _to_object(self, name, value):
return None
def _update_old_attribute(self, key, value, old_value):
if old_value is None:
new_value = self._to_object(key, value)
return value if new_value is None else new_value
elif isinstance(old_value, JSONable):
return old_value.update_attributes(value)
else:
return value
def update_attributes(self, params):
if self._is_json(params):
params = json.loads(params)
for key, value in params.items():
old_value = self.__dict__[key] if key in self.__dict__ else None
if type(value) is list:
if old_value is None:
old_value = []
for i in range(len(value)):
if len(old_value) - 1 < i:
old_value.append(None)
old_value[i] = self._update_old_attribute(key, value[i], old_value[i])
else:
old_value = self._update_old_attribute(key, value, old_value)
self.__dict__[key] = old_value
return self
def _get_ignored_keys(self):
keys = (
'created_at',
'updated_at'
)
return keys
def _get_sendable_keys(self):
return None
def to_dict(self):
content = {}
if self._get_sendable_keys() is None:
keys = self.__dict__.keys()
else:
keys = self._get_sendable_keys()
for key in keys:
if key not in self._get_ignored_keys():
value = self.__dict__.get(key)
if value is not None:
if type(value) is list:
content[key] = []
for v in value:
content[key].append(v.to_dict() if isinstance(v, JSONable) else v)
else:
content[key] = value.to_dict() if isinstance(value, JSONable) else value
return content
def to_json(self):
return json.dumps(self.to_dict(), sort_keys=True)
class Attachment(JSONable):
def __init__(self, params):
self.source = params['source']
self.content_type = params.get('content_type', 'application/octet-stream')
self.filename = params.get('filename')
self.size = params.get('size')
self.guid = None
class SecurityProfile(JSONable):
TimeUnit = _Enum({
'HOURS': 'hours',
'DAYS': 'days',
'WEEKS': 'weeks',
'MONTHS': 'months'
})
LongTimeUnit = _Enum({
'HOURS': 'hours',
'DAYS': 'days',
'WEEKS': 'weeks',
'MONTHS': 'months',
'YEARS': 'years',
})
def _to_object(self, name, value):
if name not in ('id', 'name', 'description', 'created_at', 'updated_at'):
return Value(value)
else:
return None
class Value(JSONable):
pass
class EnterpriseSettings(JSONable):
def _to_object(self, name, value):
if name == 'extension_filter':
return ExtensionFilter(value)
else:
return None
class ExtensionFilter(JSONable):
pass
class UserSettings(JSONable):
def _to_object(self, name, value):
if name == 'secure_link':
return PersonnalSecureLink(value)
else:
return None
class PersonnalSecureLink(JSONable):
pass
class Contactable(JSONable):
def _to_object(self, name, value):
if name == 'contact_methods':
return ContactMethod(value)
else:
return None
def update_attributes(self, params):
if self._is_json(params):
params = json.loads(params)
if 'contact_methods' in params:
contacts_id = []
for contact in params['contact_methods']:
for key, value in contact.items():
if key == 'id':
contacts_id.append(value)
for contact in self.__dict__['contact_methods']:
if hasattr(contact, 'id') and contact.id not in contacts_id:
self.__dict__['contact_methods'].remove(contact)
return JSONable.update_attributes(self, params)
class ContactMethod(JSONable):
def __init__(self, params):
JSONable.__init__(self, params)
self._destroy = None
DestinationType = _Enum({
'HOME': 'home_phone',
'CELL': 'cell_phone',
'OFFICE': 'office_phone',
'OTHER': 'other_phone'
})
def _get_sendable_keys(self):
keys = (
'destination',
'destination_type',
'id',
'_destroy'
)
return keys
class Favorite(Contactable):
def __init__(self, email=None, params=None):
self.first_name = None
self.last_name = None
self.email = email
self.company_name = None
self.always_promote = None
self.contact_methods = []
if params is not None:
JSONable.__init__(self, params)
def prepare_to_destroy_contact(self, contact_method_ids):
for contact in self.contact_methods:
if contact.id in contact_method_ids:
contact._destroy = True
return self
def _get_ignored_keys(self):
keys = JSONable._get_ignored_keys(self)
keys += ('id',)
return keys
def to_json(self):
favorite = {'favorite': self.to_dict()}
return json.dumps(favorite, sort_keys=True)
class Participant(JSONable):
def __init__(self, email=None, params=None):
self.first_name = None
self.last_name = None
self.email = email
self.privileged = None
if params is not None:
JSONable.__init__(self, params)
if not hasattr(self, 'guest_options'):
self.guest_options = GuestOptions()
def _to_object(self, name, value):
if name == 'guest_options':
return GuestOptions(value)
else:
return None
def to_dict(self):
content = JSONable.to_dict(self)
options = self.guest_options.to_dict()
content.update(options)
return content
def _get_sendable_keys(self):
keys = (
'first_name',
'last_name',
'email',
'privileged'
)
return keys
def prepare_to_destroy_contact(self, contact_method_ids):
for contact in self.guest_options.contact_methods:
if contact.id in contact_method_ids:
contact._destroy = '1'
return self
def to_json(self):
participant = {'participant': self.to_dict()}
return json.dumps(participant, sort_keys=True)
class GuestOptions(Contactable):
def __init__(self, params=None):
self.contact_methods = []
self.company_name = None
self.locked = None
if params is not None:
JSONable.__init__(self, params)
def _get_ignored_keys(self):
keys = JSONable._get_ignored_keys(self)
keys += ('bounced_email', 'failed_login_attempts', 'verified',)
return keys
class Safebox(JSONable):
def __init__(self, user_email=None, notification_language="en", params=None):
self.participants = []
self.subject = None
self.message = None
self.attachments = []
self.security_profile_id = None
self.user_email = user_email
self.notification_language = notification_language
self.email_notification_enabled = None
if params is not None:
JSONable.__init__(self, params)
if not hasattr(self, 'security_options'):
self.security_options = SecurityOptions()
def _to_object(self, name, value):
if name == 'security_options':
return SecurityOptions(value)
elif name == 'download_activity':
return DownloadActivity(value)
elif name == 'messages':
return Message(value)
elif name == 'participants':
return Participant(params=value)
elif name == 'event_history':
return EventHistory(value)
else:
return None
def _get_sendable_keys(self):
keys = (
'guid',
'subject',
'message',
'security_profile_id',
'public_encryption_key',
'notification_language',
'user_email',
'email_notification_enabled'
)
return keys
def update_attributes(self, params):
if self._is_json(params):
params = json.loads(params)
if 'is_creation' in params:
params['security_options'] = {}
for key in self._get_security_options_keys():
if key in params.keys():
params['security_options'][key] = params[key]
del params[key]
del params['is_creation']
return JSONable.update_attributes(self, params)
def set_expiration_values(self, date_time):
if hasattr(self, 'status'):
raise SendSecureException(0, 'Cannot change the expiration of a committed safebox, please see the method addTime to extend the lifetime of the safebox', '')
self.security_options.expiration_date = date_time.strftime('%Y-%m-%d')
self.security_options.expiration_time = date_time.strftime('%H:%M:%S')
self.security_options.expiration_time_zone = date_time.strftime('%z')
def _get_security_options_keys(self):
keys = (
'security_code_length',
'allowed_login_attempts',
'allow_remember_me',
'allow_sms',
'allow_voice',
'allow_email',
'reply_enabled',
'group_replies',
'code_time_limit',
'encrypt_message',
'two_factor_required',
'auto_extend_value',
'auto_extend_unit',
'retention_period_type',
'retention_period_value',
'retention_period_unit',
'allow_manual_delete',
'allow_manual_close'
)
return keys
def to_dict(self):
content = JSONable.to_dict(self)
content['recipients'] = []
for item in self.participants:
if not hasattr(item, 'type') or item.type == "guest":
content['recipients'].append(item.to_dict())
self._append_document_ids_to_dict(content)
content.update(self.security_options.to_dict())
return content
def _append_document_ids_to_dict(self, content):
content['document_ids'] = []
if hasattr(self, 'attachments'):
for item in self.attachments:
content['document_ids'].append(item.guid)
def _temporary_document(self, file_size):
if hasattr(self, 'public_encryption_key'):
return { "temporary_document": { "document_file_size": file_size },
"multipart": False,
"public_encryption_key": self.public_encryption_key
}
return { "temporary_document": { "document_file_size": file_size },
"multipart": False
}
def to_json(self):
safebox = {'safebox': self.to_dict()}
return json.dumps(safebox, sort_keys=True)
class DownloadActivity(JSONable):
def _to_object(self, name, value):
return DownloadActivityDetail(value)
class DownloadActivityDetail(JSONable):
def _to_object(self, name, value):
if name == 'documents':
return Document(value)
else:
return None
class Document(JSONable):
pass
class EventHistory(JSONable):
pass
class Message(JSONable):
def _to_object(self, name, value):
if name == 'documents':
return Document(value)
else:
return None
class SecurityOptions(JSONable):
def __init__(self, params=None):
self.reply_enabled = None
self.group_replies = None
self.retention_period_type = None
self.retention_period_value = None
self.retention_period_unit = None
self.encrypt_message = None
self.double_encryption = None
self.expiration_value = None
self.expiration_unit = None
self.expiration_date = None
self.expiration_time = None
self.expiration_time_zone = None
if params is not None:
JSONable.__init__(self, params)
def _get_sendable_keys(self):
keys = (
'reply_enabled',
'group_replies',
'retention_period_type',
'retention_period_value',
'retention_period_unit',
'encrypt_message',
'double_encryption',
'expiration_value',
'expiration_unit',
'expiration_date',
'expiration_time',
'expiration_time_zone'
)
return keys
class Reply(JSONable):
def __init__(self, params=None):
self.message = None
self.consent = None #Optional
self.attachments = []
self.document_ids = []
if params is not None:
JSONable.__init__(self, params)
def _get_sendable_keys(self):
keys = (
'message',
'consent',
'document_ids'
)
return keys
def to_json(self):
return json.dumps({'safebox' : self.to_dict()}, sort_keys=True)
class ConsentMessage(JSONable):
pass
class ConsentMessageGroup(JSONable):
def _to_object(self, name, value):
if name == 'consent_messages':
return ConsentMessage(value)
else:
return None |
983,985 | acd5ccc6ca1363b8cbbe84142a149e6e067bbb68 |
def sanitize_xml(data):
tree = fromstring(to_bytes(deepcopy(data), errors='surrogate_then_replace'))
for element in tree.getiterator():
attribute = element.attrib
if attribute:
for key in attribute:
if (key not in IGNORE_XML_ATTRIBUTE):
attribute.pop(key)
return to_text(tostring(tree), errors='surrogate_then_replace').strip()
|
983,986 | a86c32a7a7591d720ff3e9a472420d54ede5cfe7 | from EventCommon import *
from EventAlg.traffic_flow_matrix import Traffic_Flow
class EventMod(Interface):
def __init__(self):
self.s_Parampath = os.path.join(get_cfg_path(), 'modParam/event_Param.xml')
Interface.__init__(self, self.s_Parampath)
def usr_process(self, data_bus_server):
"""重写方法"""
# 功能接口状态类,用于接收接口类中的系统状态,同时上报功能进程的相关信息
sys_state = UsrSysState(self.q_sys_state, self.q_model_state, self.log)
# 获取用户订阅的subscriber列表,每个频道对应一个消息,为列表中的一个
list_sub = data_bus_server.get_subscriber(self.list_channel)
self.log.info("proEventDetect start")
stStationParam = lidarParam(os.path.join(get_cfg_path(), 'modParam/read_Param.xml'))
stStationParam.updateParam()
stEventParam = structEventAlgParam(os.path.join(get_cfg_path(), 'modParam/event_Param.xml'))
stEventParam.updateParam()
stEventModParam = structEventModParam(os.path.join(get_cfg_path(), 'modParam/event_Param.xml'))
stEventModParam.updateParam()
traffic_flow = None
try:
traffic_flow = Traffic_Flow(os.path.join(get_cfg_path(), 'virtual_config/'),
stStationParam.getStationCount(), stEventParam)
self.log.info("Traffic_Flow init Successful!")
except:
traffic_flow = None
self.log.error("Traffic_Flow init Failed!\nexcept:\n{}".format(traceback.format_exc()))
listEventTime = []
for i in range(6):
listEventTime.append(MyTime())
listEventTime[i].start()
tTimeStamp = 0
nDeviceId = 0
thisTime = math.floor(time.time())
thisTime = thisTime - (thisTime % 60)
thisTimeStamp = getTimeStamp(thisTime)
while sys_state.get_sys_state() and traffic_flow is not None:
if sys_state.get_param_update():
stEventParam.updateParam()
print('stEventParam.updateParam()')
Traffic_Flow.updateParam(stEventParam)
try:
list_msg = list_sub[0].parse_response(False, 0.1)
channel = str(list_msg[1], encoding="utf-8")
dictData = json.loads(list_msg[2])
npParticipant = parseNdarrayofByte(dictData['e1FrameParticipant']).reshape((-1, 44))
trackersToEvent = np.zeros((npParticipant.shape[0],46))
trackersToEvent[:, 0:44] = npParticipant[:, 0:44]
strTimeStamp = dictData['globalTimeStamp']
listDate = strTimeStamp[0].split(' ')
strYear, strMon, strDay = listDate[0].split('-')
strHours, strMin, strSec, strMSec = listDate[0].split(':')
ftime = datetime.datetime(int(strYear), int(strMon), int(strDay),
int(strHours), int(strMin), int(strSec), int(strMSec))
tTimeStamp = time.mktime(ftime.timetuple()) + int(strMSec) / 1000
trackersToEvent[:, 44] = tTimeStamp
nDeviceId = dictData['deviceId']
except:
continue
B0, B1, B2, B3, B4, B5= [], [], [], [], [], []
try:
# 进行统计统计量 B1与B4应该同频率
if math.floor(time.time()) % stEventModParam.B4 == 0 and listEventTime[4].getTime() > 1: # 1/60Hz
traffic_flow.use(trackersToEvent, True)
else:
traffic_flow.use(trackersToEvent, False)
if not stStationParam.getHighSpeed():
if listEventTime[0].getTime() >= stEventModParam.B0 * 1000: # 10Hz
B0 = traffic_flow.get_B0()
listEventTime[0].restart()
if listEventTime[1].getTime() >= stEventModParam.B1 * 1000: # 1/60Hz
B1 = traffic_flow.get_B1()
listEventTime[1].restart()
# if listEventTime[2].getTime() >= stEventModParam.B2 * 1000: # 10Hz
# B2 = traffic_flow.get_B2()
# listEventTime[2].restart()
else:
if listEventTime[3].getTime() >= stEventModParam.B3 * 1000: # 1Hz
B3 = traffic_flow.get_B3()
listEventTime[3].restart()
if stEventModParam.B4 != 0 and \
math.floor(time.time()) % stEventModParam.B4 == 0 and \
listEventTime[4].getTime() > 1: # 1/60Hz
B4 = traffic_flow.get_B4()
listEventTime[4].restart()
B5 = traffic_flow.get_B5()
except:
self.log.error("Event detect alg Error !")
self.log.error("except! Call stack:\n{}".format(traceback.format_exc()))
json_B3, json_B4, json_B5 = {}, {}, {}
if len(B3) != 0:
json_B3 = getHttpB3Data(B3, nDeviceId)
if len(B4) != 0:
json_B4, thisTimeStamp = getHttpB4Data(B4, nDeviceId, thisTimeStamp)
if len(B5) != 0 and B5[4] != 0:
json_B5 = getHttpB5Data(B5, nDeviceId, strTimeStamp)
dictEventData = {'b3Data': json_B3,
'b4Data': json_B4,
'b5Data': json_B5}
jsonEventData = json.dumps(dictEventData)
pubData = {'channel': 'event_data', 'data': jsonEventData}
self.q_pub_data.put(pubData)
self.log.error('usr_process {} exit...'.format(self.log.name))
if __name__ == '__main__':
eventMod = EventMod()
eventMod.join()
eventMod.log.error('event exit...')
sys.exit(0)
|
983,987 | 7b24558e8f90819a6d20102a4ff101fb9da00b43 | import subprocess
import logging
import re
RE_TTL_AND_TIME = re.compile(r"icmp_seq=\d+ ttl=(\d+) time=([\d+\.]+)")
RE_STATISTICS = re.compile(r"(\d+) packets transmitted, (\d+) received,.+?time (\d+)")
class Ping:
def __init__(self):
pass
def __call__(self, parameters, _ping_command=None):
if not _ping_command:
_ping_command = self.ping_command
ip = parameters["ip"]
stdout = _ping_command(ip)
ttl, time = self.get_ttl_and_time(stdout)
transmitted, received, scan_time = self.get_scan_statistics(stdout)
result = {
"packet_transmitted": transmitted,
"packet_received": received,
"scan_time": scan_time,
"ttl": ttl,
"time": time
}
return result
def ping_command(self, ip:str):
output = subprocess.run(["ping", "-c", "3", ip], stdout=subprocess.PIPE)
stdout = str(output.stdout, "utf8")
return stdout
def get_ttl_and_time(self, stdout):
re_output = RE_TTL_AND_TIME.findall(stdout)
ttl = list(map(lambda x: int(x[0]), re_output))
time = list(map(lambda x: float(x[1]), re_output))
return ttl, time
def get_scan_statistics(self, sdtout):
re_output = RE_STATISTICS.search(sdtout)
return tuple(map(int, re_output.groups()))
|
983,988 | 33888578a6bb0958e3af86d9c309b312db535595 | # -*- coding: utf-8 -*-
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
from scrapy_redis.spiders import RedisCrawlSpider
class QiushiSpider(RedisCrawlSpider):
name = 'qiushi'
allowed_domains = ['qiushibaike.com']
# start_urls = ['https://www.qiushibaike.com/text/']
redis_key = 'qiushi:start_urls'
rules = (
Rule(LinkExtractor(allow=r'/article/\d+'), callback='parse_item', follow=True),
Rule(LinkExtractor(allow=r'/text/page/\d+/'), callback='parse_item', follow=True),
)
def parse_item(self, response):
content = response.xpath('//div[@class="content"]/text()').extract_first().strip()
name = response.xpath('//h2/text()').extract_first().strip()
yield {
'name': name,
'content': content
}
|
983,989 | e37a4eac6bf08da17199c7d19cdaa4432b191d86 | from common.exceptions import APIException
from common.functions import get_config
from common.constants import REQUEST_TIMEOUT
from functools import reduce
import logging
import requests
import operator
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
logger = logging.getLogger(__name__)
class RestClient(object):
""" Utility class. """
@staticmethod
def getFromDict(dataDict, path_list):
"""
:param dataDict: dictionary to parse.
:param path_list: path from root to the desired node.
:return: return value at the end of the path_list.
"""
return reduce(operator.getitem, path_list, dataDict)
@staticmethod
def get_url(ip, port, endpoint):
"""
:param ip: ip.
:param port: port.
:param endpoint: endpoint.
:return: url.
"""
return "http://{}:{}/{}".format(ip, port, endpoint)
@staticmethod
def get_https_url(ip, port, endpoint):
"""
:param ip: ip.
:param port: port.
:param endpoint: endpoint.
:return: url.
"""
return "https://{}:{}/{}".format(ip, port, endpoint)
@staticmethod
def result_success(result):
"""
Check if HTTP result code is in the 2xx range.
:param result: status code.
:return: Boolean True or False.
"""
if 200 <= result < 300:
return True
return False
@staticmethod
def post_form(url, headers, payload):
"""
Post form data, which will be encoded as application/x-www-form-urlencoded
:param url: url to make a post request.
:param headers: request headers.
:param payload: data.
:return: response object.
"""
headers['Content-Type'] = 'application/x-www-form-urlencoded'
return RestClient.make_post_request(url, headers=headers, data=payload)
@staticmethod
def post_JSON(url, headers, payload):
"""
Post data as JSON
:param url: url to make a post request.
:param headers: request headers.
:param payload: data.
:return: response object.
"""
headers['Content-Type'] = 'application/json'
return RestClient.make_post_request(url,
headers=headers,
json_data=payload)
@staticmethod
def make_post_request(url, headers=None, json_data=None, data=None):
"""
Make a POST request to a given url. This takes either JSON, raw
form data.
:param url: url to make a post request.
:param headers: request headers.
:param json_data: json post request data.
:param data: form post request data.
:return: response object.
"""
logger.info("Inside: make_post_request")
logger.debug("make_post_request: parameters - {}, {}, {}, {}".format(url, headers, json_data, data))
timeout = get_config(REQUEST_TIMEOUT,"timeout")
if not headers:
headers = {}
if json_data:
resp = requests.post(url, verify=False, headers=headers, json=json_data, timeout=timeout)
elif data:
resp = requests.post(url, verify=False, headers=headers, data=data, timeout=timeout)
logger.debug('received status : {}'.format(resp.status_code))
logger.debug('received text : {}'.format(resp.text))
logger.info("Exit: make_post_request")
if RestClient.result_success(resp.status_code):
return resp
else:
err_msg = 'ERROR, received {} code during API call {}'.format(resp.status_code, url)
logger.error(err_msg)
raise APIException(err_msg, resp.text)
@staticmethod
def make_get_request(url, headers=None):
"""
Make a GET request to a given url.
:param url: url to make a post request.
:param headers: request headers.
:return: response object.
"""
logger.info("Inside: make_get_request")
logger.debug("make_get_request: parameters - {}, {}".format(url, headers))
timeout = get_config(REQUEST_TIMEOUT,"timeout")
if not headers:
headers = {}
resp = requests.get(url, verify=False, headers=headers, timeout=timeout)
logger.debug('received status : {}'.format(resp.status_code))
logger.debug('received text : {}'.format(resp.text))
logger.info("Exit: make_get_request")
if RestClient.result_success(resp.status_code):
return resp
else:
err_msg = 'ERROR, received {} code during API call {}'.format(resp.status_code, url)
logger.error(err_msg)
raise APIException(err_msg, resp.text)
@staticmethod
def make_delete_request(url, headers=None):
logger.info("Inside: make_delete_request")
logger.debug("make_delete_request: parameters - {}, {}".format(url, headers))
timeout = get_config(REQUEST_TIMEOUT,"timeout")
if not headers:
headers = {}
resp = requests.delete(url, verify=False, headers=headers, timeout=timeout)
logger.debug('received status : {}'.format(resp.status_code))
logger.debug('received text : {}'.format(resp.text))
logger.info('Exit: make_delete_request')
if RestClient.result_success(resp.status_code):
return resp
else:
err_msg = 'ERROR, received {} code during API call {}'.format(resp.status_code, url)
logger.error(err_msg)
raise APIException(err_msg, resp.text)
|
983,990 | df80f73b0b63e0e9515839aa78014626f6920bee | #!/usr/bin/python3
from math import sqrt,fabs,exp
import matplotlib as plt
import matplotlib.pyplot as pyplot
from itertools import chain,count
class Handle():
def __init__(self, time, steps, interest, volatility, current_stock_price, target, \
barrier_top, barrier_bottom, call=True, american=False):
self.steps = steps
self.timestep = time/steps
self.endtime = time
self.discount = interest
self.volatility = volatility
self.p = (interest - volatility ** 2 / 2) / (2 * volatility) * sqrt(self.timestep) + 0.5 # course notes (1.32)
self.coststep = volatility * sqrt(self.timestep)
self.strike = target
self.current = current_stock_price
self.top = barrier_top
self.bottom = barrier_bottom
self.call = call
self.american = american
self.Vdict = {} #stores inbetween V values for optimalisation
self.single_step_discount = exp(-interest*self.timestep)
@property
def european(self):
return not self.american
@property
def put(self):
return not self.call
def V(self, x_index=0, step_index=0):
"""
A function that generates the value of an option,
x_index: the current value of x = x_index * self.coststep
step_index: The current step, time=step_index*self.timestep
"""
logreturn = x_index * self.coststep
value = self.current * exp(logreturn)
if not(self.bottom < value < self.top):
#outside of barriers, options becomes worthless
return 0
elif step_index == self.steps:
return self.exchange_value(value)
elif (x_index, step_index) in self.Vdict:
return self.Vdict[x_index, step_index]
else:
ret = self.single_step_discount * \
(self.V(x_index + 1, step_index + 1) * self.p + \
self.V(x_index - 1, step_index + 1) * (1 - self.p))
if self.american:
#option can be exchanged immediatly
ret = max(ret,self.exchange_value(value))
self.Vdict[x_index, step_index] = ret
return float(ret)
def reset_V(self):
self.Vdict = {}
def exchange_value(self,stockvalue):
"""
This function returns the value of the option if it would be exchanged immedialty,
if the stock value is equal to stockvalue
"""
if self.call:
return max(0, stockvalue - self.strike)
else:
return max(0, self.strike - stockvalue)
def change_strike(self, newtarget):
self.strike = newtarget
self.reset_V()
def swap_cp(self):
self.call = not self.call
def swap_ae(self):
self.american = not self.american
def draw_tree(self,target):
self.change_strike(target)
for i in range(-self.steps,self.steps+1):
print(" "*abs(i) + " ".join("{:+06.2f}".format(self.V(-i,s)) \
for s in range(abs(i),self.steps+1,2)))
def get_price(self,target):
self.change_strike(target)
return self.V(0,0)
def get_number(text,default=0,type_=float):
if default:
showtext = "{} ({}): ".format(text,default)
else:
showtext = "{}: ".format(text)
while True:
try:
return type_(input(showtext))
except ValueError:
if default:
print("The number you provided was invalid")
print("The default value of {} will be used".format(default))
return default
else:
print("The number you provided was invalid")
def get_index(text,letters):
showtext = "{} ({}): ".format(text, "/".join(letters))
while True:
answer = input(showtext)
if not answer:
print("The answer you provided was invalid")
continue
first = answer[0].lower()
if first in letters:
return letters.index(first)
else:
print("The answer you provided was invalid")
def get_bool(text,letters):
return not get_index(text,letters)
def defaultlegend():
for i in count():
yield "function {}".format(i)
def pltfunc(functions,start,end,steps,filename,legends=...):
colors = "rgbcmyk"
xrange = [i*(end-start)/steps+start for i in range(steps)]
plargs = chain(*((xrange,[func(x) for x in xrange],colors[i%7]) for i,func in enumerate(functions)))
pyplot.plot(*plargs)
pyplot.xlabel(r"strike price")
pyplot.ylabel(r"option price")
if legends==...:
legends = defaultlegend()
if legends is not None:
pyplot.legend(legends)
pyplot.savefig(filename)
pyplot.close()
def get_pricing_function(volatility,call=True,american=False):
h= Handle(1,20,0.05,volatility,1,1,1.5,0.5,call,american)
return h.get_price
def plot_pricing_function(filename,volatility,call=True,american=False):
f = get_pricing_function(volatility,call,american)
pltfunc((f,), 0, 2, 100,filename,None)
def plot_pricing_functions(filename, volatility_dict, call=True, american=False):
f = (get_pricing_function(v, call, american) for v in volatility_dict.values())
pltfunc(f, 0, 2, 100, filename,volatility_dict.keys())
def ex_a():
h = Handle(get_number("time",1),
get_number("steps",20,int),
get_number("interest",0.05),
get_number("volatility",0.3),
get_number("current stock price",1),
get_number("strike price",1),
get_number("barrier: up and out",float("inf")),
get_number("barrier: down and out", float("-inf")),
get_bool("call/put","cp"),
get_bool("american/european", "ae"))
print("the change to go up is: ",h.p)
print("the price of an option is:",h.V())
h.draw_tree(h.strike)
def ex_b():
situations = {"standard":0.3,"relaxed":0.1,"volatile":1}
for ae in ("european","american"):
for call_put in ("call","put"):
for name,vol in situations.items():
filename = "options_{}_{}_{}".format(ae,call_put,name)
plot_pricing_function(filename,vol,call_put=="call",ae[0]=="a")
plot_pricing_functions("options_{}_{}".format(ae,call_put),situations,call_put=="call",ae[0]=="a")
if __name__=="__main__":
print("""Welcome to my solution to the asignment of path integrals in quantum mechanics
If you select \'a\', The program will let you plug in a set of parameters and
calculate the correct option price. If you select \'b\', It will generate a
bunch of graphs representing the different situations. These are saved in the
same folder as this program runs in. The bonus question is included in both
the parameter program and the graphs.
Inputting parameters work as follows:
if you get something like: option (x/y): you have to type x or y
if you get something like: value (1.5): you have to give a numerical value
If you give an invalid value or leave it empty, the default value between the
parentheses will be used. This also supports 'inf' and '-inf'""")
if get_bool("assigment","ab"):
ex_a()
else:
ex_b()
|
983,991 | 4812db57c7d61db7d4247c698f3d7b90afa68b40 | '''main module'''
import os
import board
from people import Player
from ticker import Ticker
import manage
M, N = 32, 400
POSITION_PLAYER = [8, M-7]
os.system('aplay -q mario-theme.wav&')
def main():
'''main function'''
game_board = board.Board(M, N)
mario_player = Player(POSITION_PLAYER)
tickers = []
tickers.append(Ticker(0.1))
tickers.append(Ticker(0.4))
while True:
manage.manage_objects(mario_player, game_board, tickers)
game_board.render(mario_player)
if __name__ == '__main__':
main()
|
983,992 | c2ae84a1c7f0eed0e92409743c4afc3802ecb8fb | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2021 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from typing import Dict, Any
from superdesk.publish import register_transmitter_file_provider, TransmitterFileEntry
from superdesk.publish.transmitters.ftp import FTPPublishService
def get_event_planning_files_for_transmission(
transmitter_name: str, item: Dict[str, Any]
) -> Dict[str, TransmitterFileEntry]:
if item.get("type") not in ["event", "planning"]:
# We only want this provider to run for Events/Planning items
return {}
elif transmitter_name == FTPPublishService.NAME:
# We currently do not support sending Event/Planning files via FTP
return {}
return {
file["media"]: TransmitterFileEntry(
media=file["media"],
mimetype=file["mimetype"],
resource="events_files" if item["type"] == "event" else "planning_files",
)
for file in item.get("files") or []
}
register_transmitter_file_provider(get_event_planning_files_for_transmission)
|
983,993 | 4b7871a38fde8b6db1202d937596252574aa0802 | # comment creating
# comment posting
# upvote/downvote - arrayList - using count to get the amount
# hiding comments
# input is when we are called by the main program
# input is the event db, the most recent one
# output is nothing
# input is when we are called by the main process
# input is postID
# output is the comment
class CommentsService():
def __init__(self, author, text, date, postID): # initializing the comment
self.author = author # author of the comment
self.text = text # text contained in the comment
self.date = date # date the comment was posted
self.postID = postID # post ID of the post the comment is parented to
self.isUpvoted = False # boolean true if the comment is upvoted
self.isDownvoted = False # boolean true if the comment is downvoted
# TODO add a list of users who have upvoted and downvoted the comment
def upvoteComment(self, postID, userID):
if self.isUpvoted:
self.upvoteCount -= 1 # remove the upvote
self.isUpvoted = False # remove the upvote
if self.isDownvoted:
self.upvoteCount += 2 # remove the downvote, add upvote
self.isDownvoted = False # remove the downvote, add upvote
self.isUpvoted = True # remove the downvote, add upvote
def downvoteComment(self, postID, userID):
if self.isUpvoted:
self.upvoteCount += 2
self.isDownvoted = False
self.isUpvoted = True
if self.isDownvoted:
self.upvoteCount += 1
self.isDownvoted = False
def hideComment(self, postID):
self.isHidden = True
|
983,994 | efa946d8c1f514be3ee29b682d0960c000b972b7 | from django.views.generic import ListView
from . import views
from django.urls import path
from .models import Post
urlpatterns = [
path('', ListView.as_view(
queryset = Post.objects.all().order_by('-date'),
template_name = 'Blogpost/blog.html',
context_object_name = 'Posts',
paginate_by = 10)
, name='Blogpost'),
# path('<int:id>/', views.PostDetailView.as_view(),name='post'),
path('<int:pk>/', views.post,name='post'),
] |
983,995 | 2f10a9d2daacc7ee9d1f69b2a11af2c0fe48a5d2 | import pygame
import random
print("Alex Binns 07185952")
# Define some colors
black = (0, 0, 0)
white = (255, 255, 255)
yellow = (255, 255, 0)
red = (255, 0, 0)
# Initialize pygame
pygame.init()
# Set the height and width of the screen
size = [1000, 1000]
screen = pygame.display.set_mode(size)
# Set the screen background
screen.fill(white)
# Set title of screen
pygame.display.set_caption("Random Maze")
# Create a 2 dimensional array. A two dimensional
# array in our implementation is simply a list of lists.
# Each cell corresponds to a 5 pixel x 5 pixel area of the screen surface.
maze_grid = []
for x_coordinate in range(200):
# Add an empty array that will hold each cell in this row
maze_grid.append([])
for y_coordinate in range(200):
maze_grid[x_coordinate].append(0) # Append a cell
# code to be implemented
def generate_maze():
# draw the left outside wall
left_wall = maze_grid[0]
for i in range(len(left_wall)):
left_wall[i] = 1
# draw the right outside wall
right_wall = maze_grid[(len(maze_grid) - 1)]
for i in range(len(right_wall)):
right_wall[i] = 1
# draw the top and bottom outside walls
for i in range(len(maze_grid)):
maze_grid[i][0] = 1
maze_grid[i][(len(maze_grid) - 1)] = 1
# draw the door at the top left
maze_grid[0][0] = 0
maze_grid[0][1] = 0
maze_grid[1][0] = 0
maze_grid[1][1] = 0
# call function to draw the rest of the maze
add_walls(maze_grid, ((0, 0), (len(maze_grid) - 1, len(maze_grid) - 1)), True)
def add_walls(maze_grid, room_coordinates, cheese_flag):
# set the coordinates for the top and bottom from the input coordinates
low_x = room_coordinates[0][0]
low_y = room_coordinates[0][1]
high_x = room_coordinates[1][0]
high_y = room_coordinates[1][1]
# Base case to return if the room is less than 5
if (high_y - low_y) < 5 or (high_x - low_x) < 5:
# set the location of the cheese if the cheese_flag is true
if cheese_flag:
maze_grid[((high_x + low_x) // 2)][((high_y + low_y) // 2)] = 2
return
else:
# set the random location for the vertical and horizontal dividers
vertical_line = random.randint(low_x + 2, high_x - 2)
horizontal_line = random.randint(low_y + 2, high_y - 2)
# set the horizontal and vertical lines in the array
for i in range(low_x, high_x + 1):
maze_grid[i][horizontal_line] = 1
for i in range(low_y, high_y + 1):
maze_grid[vertical_line][i] = 1
# change the coordinates for the door back to white
maze_grid[vertical_line][horizontal_line] = 0
maze_grid[(vertical_line + 1)][horizontal_line] = 0
maze_grid[(vertical_line - 1)][horizontal_line] = 0
maze_grid[vertical_line][(horizontal_line + 1)] = 0
maze_grid[vertical_line][(horizontal_line - 1)] = 0
# call recursive function on the top left room
add_walls(maze_grid, ((low_x, low_y), (vertical_line, horizontal_line)), False)
# set the location of the cheese if the cheese_flag is true
if cheese_flag:
# choose random number to choose next location for the cheese
cheese_locator = random.randint(1, 3)
if cheese_locator == 1:
add_walls(maze_grid, ((vertical_line, low_y), (high_x, horizontal_line)), True)
add_walls(maze_grid, ((low_x, horizontal_line), (vertical_line, high_y)), False)
add_walls(maze_grid, ((vertical_line, horizontal_line), (high_x, high_y)), False)
elif cheese_locator == 2:
add_walls(maze_grid, ((vertical_line, low_y), (high_x, horizontal_line)), False)
add_walls(maze_grid, ((low_x, horizontal_line), (vertical_line, high_y)), True)
add_walls(maze_grid, ((vertical_line, horizontal_line), (high_x, high_y)), False)
else:
add_walls(maze_grid, ((vertical_line, low_y), (high_x, horizontal_line)), False)
add_walls(maze_grid, ((low_x, horizontal_line), (vertical_line, high_y)), False)
add_walls(maze_grid, ((vertical_line, horizontal_line), (high_x, high_y)), True)
# if cheese_flag is false call the function on the remaining rooms with a false flag
else:
add_walls(maze_grid, ((vertical_line, low_y), (high_x, horizontal_line)), False)
add_walls(maze_grid, ((low_x, horizontal_line), (vertical_line, high_y)), False)
add_walls(maze_grid, ((vertical_line, horizontal_line), (high_x, high_y)), False)
def display_maze():
for row_to_draw in range(len(maze_grid)):
for column_to_draw in range(len(maze_grid[row_to_draw])):
if maze_grid[row_to_draw][column_to_draw] == 0:
cell_colour = white
elif maze_grid[row_to_draw][column_to_draw] == 2:
cell_colour = yellow
else:
cell_colour = black
pygame.draw.rect(screen, cell_colour, ((column_to_draw * 5), (row_to_draw * 5), 5, 5), 0)
# Loop until the user clicks the close button.
done = False
# Used to manage how fast the screen updates
clock = pygame.time.Clock()
######################################
# -------- Main Program Loop -----------
while done is False:
for event in pygame.event.get(): # User did something
if event.type == pygame.QUIT: # If user clicked close
done = True # Flag that we are done so we exit this loop
if event.type == pygame.KEYDOWN: # If user wants to perform an action
if event.key == pygame.K_m:
generate_maze()
display_maze()
# Limit to 20 frames per second
clock.tick(20)
# Go ahead and update the screen with what we've drawn.
pygame.display.flip()
# If you forget this line, the program will 'hang' on exit.
pygame.quit()
|
983,996 | bef8777498fed4a552c9779a446b66703b276dbc | alpha=list(map(chr, range(97,123)))
string1=open('2.txt','r')
for char in string1.read():
if char in alpha:
print char
string1.close()
|
983,997 | 26a2fe7ecd5e7dd78b11d65eddd8db961f824e87 | import datetime
import random
import pymysql
from aiautomation.log.abstract_log import AbstractLog
from aiautomation.log.simple_log import SimpleLog
class AiciResultLog(SimpleLog):
"""
向数据库写入结果,但是使用的是单connection,可以考虑做一个数据库连接池
"""
def __init__(self, config=None):
SimpleLog.__init__(self, config)
if config is None:
raise AttributeError("AiciResultLog请注入config对象")
self._host = config.aiautomation.agent.db_host
self._port = config.aiautomation.agent.db_port
self._user = config.aiautomation.agent.db_user
self._pwd = config.aiautomation.agent.db_pwd
self._dbname = config.aiautomation.agent.db_dbname
self._conn = pymysql.connect(host=self._host, port=self._port, user=self._user, passwd=self._pwd,
db=self._dbname, charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor)
def disconnect(self):
if self._conn is not None:
self._conn.close()
def safe(self, parameter):
return pymysql.escape_string(str(parameter))
def add_case_oper_log(self, **kwargs):
if kwargs['component_id'] is None:
kwargs['component_id'] = 0
oper_type = "STEP"
if AbstractLog.SUCCESS_STATUS != kwargs['result']:
oper_type = "ERROR"
sql = "insert into case_oper_log (oper_type, oper_name, value, operator, expect_value, oper_time, case_id, case_exec_id, node_id, component_id) " \
"values ('%s','%s','%s','%s','%s',now(), %s, %s, %s, %s )" \
% (oper_type, self.safe("%s:%s") % (kwargs['oper_name'], kwargs['log_content']), self.safe(kwargs['value']),
self.safe(kwargs['operator']), self.safe(kwargs['expect_value']), self.safe(kwargs['case_id']),
self.safe(kwargs['case_exec_id']), '0', self.safe(kwargs['component_id']))
with self._conn.cursor() as cursor:
self.log.debug("查询案例执行步骤说明: %s" % sql)
cursor.execute(sql)
self._conn.commit()
def start_case_exec_log(self, **kwargs):
with self._conn.cursor() as cursor:
sql = "select case_id as 'id' from case_def where case_id = %d" % self.testcase.case_id
cursor.execute(sql)
result = cursor.fetchone()
if result is None:
sql = "INSERT INTO case_def VALUES ('%s', '%s', '', 'UI', 'SINGLE', '0', null, null, null, null, '1', '%s', '%s', '0')" \
% (self.testcase.case_id, self.testcase.case_name, self.testplan.product_id, self.testcase.module_id)
cursor.execute(sql)
self._conn.commit()
else:
sql = "update case_def set case_name='%s' where case_id = %d" % (self.testcase.case_name, self.testcase.case_id)
cursor.execute(sql)
self._conn.commit()
count = 0
if kwargs['case_exec_id'] is not None:
cursor.execute("select count(*) as 'count' from case_exec_log where case_exec_id = '%s'" % kwargs['case_exec_id'])
count = cursor.fetchone()['count']
# 根据是否存在case_exec_log,进行插入或者更新数据
if count == 0:
sql = "INSERT INTO case_exec_log VALUES (null, '0', '%s', 'SINGLE', '0', '%s', '%s', '%s', " \
"'%s', null, null, null, '%s', null, " \
"now(), null, '%s', '%s', '%s', null)" \
% (self.safe(kwargs['case_id']),
self.safe(self.testplan.plan_id), self.safe(self.testplan.plan_batch_id),
self.safe(kwargs['exec_batch_id']),
AbstractLog.START_STATUS, "", self.safe(kwargs['machine_id']),
self.safe(kwargs['env_id']), self.safe(kwargs['product_id']))
self.log.debug("生成新的case执行记录: %s" % sql)
cursor.execute(sql)
self._conn.commit()
sql = "SELECT LAST_INSERT_ID() as 'id'"
cursor.execute(sql)
case_exec_id = cursor.fetchone()['id']
self.debug("获取新的Case_exec_id为:%s" % case_exec_id)
self.testcase.case_exec_id = case_exec_id
else:
self.log.warn("已经存在case_exec_id=%s的案例执行日志,进行更新" % kwargs['case_exec_id'])
sql = "UPDATE case_exec_log set case_id = '%s'" \
",status='%s', start_time=now(), end_time=NULL, fail_reason='' where case_exec_id = '%s' " \
% (self.safe(kwargs['case_id']), AbstractLog.START_STATUS,
self.safe(kwargs['case_exec_id']))
cursor.execute(sql)
if kwargs['exec_batch_id'] and kwargs['exec_batch_id'] !="" and kwargs['exec_batch_id'] != 0:
sql = "update case_exec_log set exec_batch_id='%s'" % self.safe(kwargs['exec_batch_id'])
cursor.execute(sql)
if kwargs['machine_id'] and kwargs['machine_id'] != "":
sql = "update case_exec_log set machine='%s'" % self.safe(kwargs['machine_id'])
cursor.execute(sql)
if kwargs['env_id'] and kwargs['env_id'] != "":
sql = "update case_exec_log set env='%s'" % self.safe(kwargs['env_id'])
cursor.execute(sql)
if kwargs['product_id'] and kwargs['product_id'] != "":
sql = "update case_exec_log set product_id='%s'" % self.safe(kwargs['product_id'])
cursor.execute(sql)
self.log.warn("删除case_oper_log的原有操作")
sql = "delete from case_oper_log where case_exec_id=%s and case_id=%s" % (kwargs['case_exec_id'], kwargs['case_id'])
self.log.debug("删除原有的case操作记录:%s" % sql)
cursor.execute(sql)
self._conn.commit()
def update_case_exec_log(self, case_exec_id, status, fail_reason=''):
sql = '''update case_exec_log set status = %d, end_time = now(), fail_reason = '%s' where case_exec_id = %d''' \
% (int(status), self.safe(fail_reason), case_exec_id)
with self._conn.cursor() as cursor:
cursor.execute(sql)
self._conn.commit()
def start_plan_exec_log(self):
with self._conn.cursor() as cursor:
if self.testplan.plan_batch_id is None and self.testplan.exec_batch_id is None:
now_date_str = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
self.testplan.plan_batch_id = "%s_%s%d" % (self.testplan.plan_id,
now_date_str, random.randint(100,999))
self.testplan.exec_batch_id = "%s%d" % (now_date_str, random.randint(100, 999))
sql = "INSERT INTO plan_exec_log VALUES (null, '%s', '%s', '%s', '%s', '%s', " \
"now(), NULL , '1', '%s', '%s', '1', null)" \
% (self.testplan.plan_batch_id, self.testplan.plan_id, self.testplan.plan_name,
self.testplan.exec_batch_id, self.testplan.machine_id, self.testplan.env_id,
self.testplan.product_id)
cursor.execute(sql)
self._conn.commit()
def update_plan_exc_log(self, status):
with self._conn.cursor() as cursor:
sql = "update plan_exec_log set status = '%s', end_time = now() where exec_batch_id = '%s' and plan_batch_id = '%s'"\
% (status, self.testplan.exec_batch_id, self.testplan.plan_batch_id)
self.log.debug("更新计划数据: %s" % sql)
cursor.execute(sql)
self._conn.commit()
def step_log_before(self, oper_type, oper_name, oper_id, parent_oper_id=None, *key, **kwargs):
SimpleLog.step_log_before(self, oper_type, oper_name, oper_id, parent_oper_id, *key, **kwargs)
if oper_type == AbstractLog.STEP_TYPE_COMPONENT:
self.add_case_oper_log(
oper_type=AbstractLog.STEP_TYPE_STEP,
oper_name=oper_name,
component_id=oper_id,
value="",
operator="",
expect_value="",
case_id=self.testcase.case_id,
case_exec_id=self.testcase.case_exec_id,
node_id="",
result=AbstractLog.START_STATUS,
log_content="开始执行"
)
def step_log_after(self, oper_type, oper_name, oper_id, oper_time, result, log_content, parent_oper_id=None, *key,
**kwargs):
SimpleLog.step_log_after(self, oper_type, oper_name, oper_id, oper_time, result, log_content, parent_oper_id,
*key,
**kwargs)
self.add_case_oper_log(
oper_type=oper_type,
oper_name=oper_name,
component_id=oper_id,
value="",
operator="",
expect_value="",
case_id=self.testcase.case_id,
case_exec_id=self.testcase.case_exec_id,
node_id="",
result=result,
log_content=log_content
)
def case_log_before(self, *key, **kwargs):
SimpleLog.case_log_before(self, *key, **kwargs)
self.start_case_exec_log(
case_exec_id=self.testcase.case_exec_id,
case_id=self.testcase.case_id,
module_id=self.testcase.module_id,
exec_batch_id=self.testplan.exec_batch_id,
machine_id=self.testplan.machine_id,
env_id=self.testplan.env_id,
product_id=self.testplan.product_id
)
def case_log_after(self, result, run_time, log_content, *key, **kwargs):
SimpleLog.case_log_after(self, result, run_time, log_content, *key, **kwargs)
self.update_case_exec_log(self.testcase.case_exec_id, result, log_content)
def plan_log_before(self, *key, **kwargs):
SimpleLog.plan_log_before(self, *key, **kwargs)
self.start_plan_exec_log()
def plan_log_after(self, *key, **kwargs):
SimpleLog.plan_log_after(self, *key, **kwargs)
self.update_plan_exc_log(AbstractLog.SUCCESS_STATUS)
self.disconnect()
def disconnection(self):
if self._conn is not None:
self._conn.close()
|
983,998 | d9b79061305842e1443f1ea3a15ca51756bc5841 | # mynumber.py
# 此示例示意 运算符重载
class mynumber:
def __init__(self, vaule):
self.data = vaule
def __repr__(self):
return "mynum(%d)" % self.data
def __add__(self,other):
print("__add__被调用")
v = self.data + other.data
obj = mynumber(v) # 创建一个新对象
return obj
def __sub__(self,other):
return mynumber(self.data - other.data)
n1 = mynumber(100)
n2 = mynumber(200)
print(n1)
print(n2)
# n3 = n1 + n2
n3 = n1.__add__(n2) # 等同于 n1.__add__(n2)
n4 = n1 - n2
print(n1, '-', n2, '=', n4)
print(n1, '+', n2, '=', n3) |
983,999 | 10eae2f92dc31908ae321d134e675f938e6b6eeb | # -*- coding:utf-8 -*-
# import socket
# sk = socket.socket()
# ip_port = ('127.0.0.1',9998)
# sk.bind(ip_port)
# sk.listen(5) # 等待时候后面可以阻塞几个
# while True:
# conn,address = sk.accept()
# conn.send('hello.')
# flag = True
# while flag:
# data = conn.recv(1024)
# print data
# print type(data)
# if data == 'exit':
# flag == False
# elif data.__contains__('weather'):
# conn.send('Soudesune, totemo atsui desu.')
# elif data.__contains__('mood'):
# conn.send('Positive views always counts.')
# elif data.__contains__('life'):
# conn.send('As for life, how can you avoid mentioning Python?!')
# elif data.__contains__('python'):
# conn.send('Python is great.')
# else:
# conn.send('I do not see that.')
# conn.close()
import SocketServer # Py3中改为小写socketserver
class MyServer(SocketServer.BaseRequestHandler):
def handle(self):
# print self.request,self.client_address,self.server
conn = self.request
conn.send('Hello')
flag = True
while flag:
try:
data = conn.recv(1024)
print(data)
if data == 'exit': # https://stackoverflow.com/questions/33003498/typeerror-a-bytes-like-object-is-required-not-str
flag == False
elif data.__contains__('weather'):
conn.send('Soudesune, totemo atsui desu.')
elif data.__contains__('mood'):
conn.send('Positive views always counts.')
elif data.__contains__('life'):
conn.send('As for life, how can you avoid mentioning Python?!')
elif data.__contains__('python'):
conn.send('Python is great.')
else:
conn.send('I do not see that.')
except Exception,e:
print('连接出故障了:{}'.format(e))
break
conn.close()
if __name__ == '__main__':
server = SocketServer.ThreadingTCPServer(('127.0.0.1', 9019), MyServer)
server.serve_forever()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.