blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f5e30c5375c055d52d0340950e14a4ffaa6c09c9 | 3edbc107cc2b1cfe9ec068e930e9b118c455b709 | /client.py | 6032be1781e26af80b54780224d02c65d87616f3 | [] | no_license | uciriello/OPCUA_test | 03c959a738b4f0657be8c163f3509d5776b0c2ad | 1059306da0281cdaf1b134ed160a30395b1bdb5d | refs/heads/master | 2023-04-16T10:06:19.814705 | 2021-04-23T10:59:52 | 2021-04-23T10:59:52 | 352,331,171 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 947 | py | from opcua import Client
import paho.mqtt.client as mqtt
import time
import json
url = "opc.tcp://192.168.1.17:4840"
client = Client(url)
client.connect()
print("OPC UA Client connected")
iot_hub = "demo.thingsboard.io"
port = 1883
username = "whohDlx7wkEXXuoWim6Y"
password = ""
topic = "v1/devices/me/telemetry"
iot_hub_client = mqtt.Client()
iot_hub_client.username_pw_set(username, password)
iot_hub_client.connect(iot_hub, port)
print("Connected to IOT hub")
data = dict()
while True:
try:
temp = client.get_node("ns=2;i=2")
press = client.get_node("ns=2;i=3")
temperature = temp.get_value()
pressure = press.get_value()
print(temperature, pressure)
data["temperature"] = int(temperature)
data["pressure"] = int(pressure)
data_out = json.dumps(data)
iot_hub_client.publish(topic, data_out, 0)
time.sleep(2)
except Exception as e:
print(e) | [
"umberto.ciriello@quinck.io"
] | umberto.ciriello@quinck.io |
567f50449d6a6f8df81b333983100dbcf6dce40e | f2de4ecbecc1a71a8aa81b3d9f6c00fc9ca6a3a5 | /lab7/3d-plots.py | 7e63f888d6e7652cf94c30775c6a32d1fdfc8993 | [] | no_license | ou-sbselab/se4cps-labs | 18e424f71ac8840a61557a741c5d3393ee53c790 | 39e54a763bea1ecc8f35be1d406915668b222e04 | refs/heads/master | 2022-02-27T18:12:07.724211 | 2019-11-04T04:03:00 | 2019-11-04T04:03:00 | 106,855,892 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,854 | py | # https://matplotlib.org/mpl_toolkits/mplot3d/tutorial.html
import numpy as np
import matplotlib as mpl
mpl.use('Agg') # necessary since we're headless
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d
mpl.rcParams['legend.fontsize'] = 10
fig = plt.figure()
ax = fig.gca(projection='3d')
theta = np.linspace(-4 * np.pi, 4 * np.pi, 100)
z = np.linspace(-2, 2, 100)
r = z**2 + 1
x = r * np.sin(theta)
y = r * np.cos(theta)
ax.plot(x, y, z, label='parametric curve')
ax.legend()
plt.savefig('plots/3d-line-plot.png')
print("3d-line-plot.png generated!")
input('[1/3] Press enter to continue to the next step')
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
# Grab some test data.
X, Y, Z = axes3d.get_test_data(0.05)
# Plot a basic wireframe.
ax.plot_wireframe(X, Y, Z, rstride=10, cstride=10)
plt.savefig('plots/3d-wireframe-plot.png')
print("3d-wireframe-plot.png generated!")
input('[2/3] Press enter to continue to the next step')
n_radii = 8
n_angles = 36
# Make radii and angles spaces (radius r=0 omitted to eliminate duplication).
radii = np.linspace(0.125, 1.0, n_radii)
angles = np.linspace(0, 2*np.pi, n_angles, endpoint=False)
# Repeat all angles for each radius.
angles = np.repeat(angles[..., np.newaxis], n_radii, axis=1)
# Convert polar (radii, angles) coords to cartesian (x, y) coords.
# (0, 0) is manually added at this stage, so there will be no duplicate
# points in the (x, y) plane.
x = np.append(0, (radii*np.cos(angles)).flatten())
y = np.append(0, (radii*np.sin(angles)).flatten())
# Compute z to make the pringle surface.
z = np.sin(-x*y)
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot_trisurf(x, y, z, linewidth=0.2, antialiased=True)
plt.savefig('plots/3d-triangular-plot.png')
print("3d-triangular-plot.png generated!")
print('[3/3] Done.')
| [
"fredericks@oakland.edu"
] | fredericks@oakland.edu |
89ce81b5b9529fa6b97b6590d4c19cdc081f99da | 6f568d165d6cbf2295cc826dd1933e6e7792fd27 | /Genetyka_Obj_v1.py | 36819506a45b2156304c71613a80f93b5b3ff419 | [] | no_license | majkowskir/algen | 4de6a7fc5c2dc5cbf21b0d0e74f8889737f34794 | a41f61242020471ba9efc5241fbd696128a6b62a | refs/heads/master | 2020-09-17T04:20:05.590416 | 2019-12-01T08:49:52 | 2019-12-01T08:49:52 | 223,986,720 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,419 | py | # Wyznaczanie maksimum funkcji algorytmem ewolucyjnym
# Radosław Majkowski 233256
# Mateusz Witomski 233270
import time, math, matplotlib
import matplotlib.pyplot as plt
import numpy as np
import random as rand
from collections import Counter
import tkinter as tk
from tkinter import *
# funckja przystosowania, wpisana zgodnie z warunkami zadania laboratoryjnego
# uwaga 1: dziedzina funkcji wyklucza zero
# uwaga 2: reguła ruletki nie dopuszcza ujemnych wartości funkcji celu; znamy orientacyjne wartości w interesującyjm przedziale,
# dlatego "podnosimy" wartość o bezpieczne 7
iteracja = 0 # wartość startowa, warunek dla iteracja = Gen
# Parametry początkowe programu: liczba pokoleń (Gen), liczba zmiennych w funkcji (k), przedział (Xmin, Xmax), dokładność (d)
Gen = 50
# pop_size - liczebność populacji, dobrze, aby była parzysta
pop_size = 300
# prawdopodobieństwa: krzyżowania (Pc) oraz mutacji (Pm)
Pc = 0.25
Pm = 0.01
# Funkcja celu jest funkcją jednej zmiennej (k), nie jest używana w tej wersji programu
k = 1
# przedział w którym badamy funkcję (Xmin do Xmax)
# uwaga: w zadaniu występują tylko wartości dodatnie, nie ma potrzeby przesuwania przedziału
Xmin = 0.5
Xmax = 2.5
# dokładność: 3 miejsca po kropce dziesiętnej
d = 3
# obliczamy, ile wartości musimy zakodować binarnie: mi
mi = ((Xmax-Xmin)*10**d)+1
# ...oraz ile bitów potrzebujemy, aby zakodować tyle wartości: m
# używamy logarytmu o podstawie 2 oraz funkcji ceiling (zwraca najbliższą liczbę całkowitą - większą lub równą wskazanej)
# uwaga: badamy funkcję jednej zmiennej (k=1), program nie przewiduje wielu zmiennych!
m = math.ceil(math.log(mi,2))
print("Dla zadanej dokładności i przedziału niezbędne jest zakodowanie minimum %s wartości, użyjemy do tego %s bitów." % (mi, m))
def funkcja(argument):
try:
y = (math.exp(argument) * math.sin(10*math.pi*argument)+1)/argument
#y = math.sin(argument)
except:
print("UWAGA! Błąd obliczania wartości funkcji dla argumentu x = %s" % argument)
y = 0
return(y)
class Osobnik:
# obsadzamy pierwszą populację (pop_size) losowymi wartościami 0/1 wg wyliczonej liczby bitów; pomiędzy chromosoamami nie unikamy powtórzeń
def __init__(self):
v_chromosom = np.random.choice(a=[0, 1], size=m)
self.chromosom = v_chromosom
def mutacja(self):
pozycja_mutacji = rand.randint(0, m-1)
mutant = abs(self.chromosom[pozycja_mutacji]-1)
#return (mutant, pozycja_mutacji)
return (mutant)
def kopiowanie(self):
return self.chromosom
def crossover(self, pula):
punkt_krzyzowania = rand.randint(1, m-2) # punkt krzyżowania, przecięcie chromosomu najwcześniej za pierwszym bitem, najpóźniej za przedostatnim
rodzic_2 = rand.randint(0, pop_size-1)
parent_2 = pula.stado[rodzic_2].chromosom
dziecko_1 = np.concatenate((self.chromosom[:punkt_krzyzowania], parent_2[punkt_krzyzowania:]), axis=None)
dziecko_2 = np.concatenate((parent_2[:punkt_krzyzowania], self.chromosom[punkt_krzyzowania:]), axis=None)
self.chromosom = dziecko_1
#pula.stado[rodzic_2].chromosom = dziecko_2
#return (self.chromosom, parent_2, punkt_krzyzowania, dziecko_1, dziecko_2) # lista dla dwóch zwracanych wartości
return (dziecko_1, dziecko_2)
class Populacja:
def __init__(self, liczebnosc_stada):
self.stado = []
for i in range(liczebnosc_stada):
self.stado.append(Osobnik())
def ewaluacja(self):
wartosci_f = [] # usunięcie danych z listy do przechowywania wartosci funkcji dla danego pokolenia
for i in range(pop_size):
my_lst = self.stado[i].chromosom
str1=""
str1 = "".join(map(str, my_lst)) # łączenie elementów listy w string
dekodowanie2dec = int(str1, 2) # dekodowanie binarki do liczby dziesiętnej
argument = ((Xmax-Xmin)*dekodowanie2dec)/((2**m)-1)+Xmin # mapowanie chromosomu do wartości x z zakresu (Xmin,Xmax)
wartosc = funkcja(argument) # wartość funkcji w punkcie x
if wartosc < 0: wartosc = 0 # zapewniamy dla ruletki wartości dodatnie przez wyzerowanie ujemnych
#wartosci_f.append([round(argument,d), round(wartosc,d)])
wartosci_f.append([argument, wartosc])
return(wartosci_f)
def ruletka(self):
ewal = self.ewaluacja()
F = [sum(i) for i in zip(*ewal)] # https://www.geeksforgeeks.org/python-position-summation-in-list-of-tuples/
F = F[1] # suma wszystkich wartości funkcji w stadzie
Ps=[] # prawdopodobieństwa poszczególnych osobników, im większa wartość tym "więcej miejsca na kole"
try:
for x in ewal:
Ps.append(x[1]/F)
except:
print("sometink went wronk: np. suma prawdopodobieństw wynosi zero?!?")
sektor = 0
kolo_ruletki = []
for i in range(pop_size):
sektor = sektor + Ps[i]
kolo_ruletki.append(sektor) # dodawanie do listy wartości brzegowej sektora
pula_rodzicielska = []
for i in range(pop_size):
losowa = rand.random() # losujemy liczbę z przedziału (0,1) tyle razy, ile osobników w populacji
sektor = 0 # zaczynamy od sektora zero
for j in range(len(kolo_ruletki)): # sprawdzamy, do którego sektora na kole ruletki wpadła wylosowana liczba
if losowa > kolo_ruletki[j]:
sektor=sektor+1
pula_rodzicielska.append(self.stado[sektor].chromosom) # do puli rodzicielskiej dodajemy osobnika z puli zero
# nadpisanie stada pulą rodzicielską
for i in (range(len(pula_rodzicielska))):
self.stado[i].chromosom = pula_rodzicielska[i]
return(F)
def operacje(self, pula):
potomstwo = []
stado = iter(range(len(pula)))
for i in stado:
operacja = rand.random()
if operacja > (Pc+Pm):
potomstwo.append(self.stado[i].kopiowanie())
elif operacja > Pm:
potomstwo.append(self.stado[i].crossover(pula))
next stado
else:
potomstwo.append(self.stado[i].mutacja())
next stado
return()
Stado_Alfa = Populacja(pop_size) # utworzenie stada A
# wypisanie osobników w stadzie A
for i in range(pop_size):
print(Stado_Alfa.stado[i].chromosom)
epoki = iter(range(Gen))
for i in epoki:
Stado_Alfa.ewaluacja()
Stado_Alfa.ruletka()
print(round(Stado_Alfa.ruletka()/pop_size,d))
Stado_Alfa.operacje(Stado_Alfa)
# print("Po x iteracjach")
# for i in range(pop_size):
# print(Stado_Alfa.stado[i].chromosom)
#print(*Stado_Alfa.ewaluacja(), sep="\n")
#print(*wartosci_funkcji_do_wykresu, sep="\n") | [
"majkowski.r@gmail.com"
] | majkowski.r@gmail.com |
dc3f8642f838624c60bc4195205fd88e718f31b6 | ce143f71c756494cdf04d298e3eacf05c2f3b710 | /module/src/sensors_test.py | 31eee3dc5a6c0d7f2ddd942478bb09352ae4bb9b | [] | no_license | nuft/PolyPot | ef5aa9e845a6e1c87b404867d1e8f9ecca5f110d | cdb1749f22c7a0ff9596d417416dbbdc158023b6 | refs/heads/master | 2021-03-27T15:22:50.975282 | 2017-12-25T16:08:15 | 2017-12-25T16:08:15 | 109,604,142 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,616 | py | import machine
import struct
def sensor_pwr(enable):
SENSOR_ENn = machine.Pin(25, machine.Pin.OUT)
if enable:
SENSOR_ENn.value(0)
else:
SENSOR_ENn.value(1)
pin_moist = machine.Pin(35, machine.Pin.IN)
adc_moist = machine.ADC(pin_moist) # ADC1 CH 7
adc_moist.atten(machine.ADC.ATTN_11DB)
adc_moist.read()
pin_battery = machine.Pin(32, machine.Pin.IN)
adc_battery = machine.ADC(pin_battery) # ADC1 CH 4
pin_luminosity = machine.Pin(33, machine.Pin.IN)
adc_luminosity = machine.ADC(pin_luminosity) # ADC1 CH 5
adc_luminosity.read()
i2c = machine.I2C(scl=machine.Pin(16), sda=machine.Pin(4), freq=100000)
i2c.scan()
# temperature sensor
i2c.writeto_mem(72, 0x01, b'\x00') # write control register
temp = i2c.readfrom_mem(72, 0x00, 2)
temp = struct.unpack('>h', temp) # MSByte first -> 16 bit big endian
(temp, ) = temp # upack tuple
temp = temp / 256
# TOF distance sensor
import machine
import sensors
SENSOR_ENn = machine.Pin(25, machine.Pin.OUT)
SENSOR_ENn.value(0)
TOF_RESETn = machine.Pin(17, machine.Pin.OUT)
TOF_RESETn.value(1) # enable TOF sensor
i2c = machine.I2C(scl=machine.Pin(16), sda=machine.Pin(4), freq=100000)
i2c.scan()
TOF_ADDR = 0x29
i2c.readfrom_mem(TOF_ADDR, 0x000, 1, addrsize=16)
i2c.writeto_mem(TOF_ADDR, 0x0011, b'\x10', addrsize=16)
def rd(reg):
data = i2c.readfrom_mem(TOF_ADDR, reg, 1, addrsize=16)
(data, ) = struct.unpack('B', data)
return data
def wr(reg, data):
data = struct.pack('B', data)
i2c.writeto_mem(TOF_ADDR, reg, data, addrsize=16)
import sensors
tof = sensors.VL6180X(i2c, TOF_RESETn)
tof.start()
tof.distance()
| [
"michael.spieler@epfl.ch"
] | michael.spieler@epfl.ch |
0276f6f16c5bc0edafe9e68bb7e04d35798c0d7f | bb5108506c1eb9b8ab81f572f97fd07e0c02e27d | /TrajectoryReconstructionFull.py | 073a045b09e1a773b0282f253e68a0aef5ac1d66 | [] | no_license | anjliu/VehicleEmissionModelling | a81d9ec6c028ae8cdca609ecb7257903f3223663 | ead744de1e327507cf8ef09186c7090d7d1686a9 | refs/heads/master | 2020-03-22T18:47:38.353549 | 2018-09-19T22:41:05 | 2018-09-19T22:41:05 | 140,483,122 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,727 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Jul 11 12:00:35 2018
@author: Anjie
"""
import pandas as pd
import numpy as np
import json
import matplotlib.pyplot as plt
from time import time
import toolSpace as ts
start_t=time()
#temp
inters=['Pinebush']
direcs=['NB']
# intersections, directions
#inters=['Dunbar','Bishop','Sheldon','Pinebush','Hwy401']
#direcs=['NB','SB']
## start template for output file
## (similar to fzp files so that the VissimOutputToMovesInput module can be used)
#sec=[]
#no=[]
#link=[]
#speed=[]
#accel=[]
#vehtype=[]
traj=pd.DataFrame(columns=['sec','no','link','lane','dist','pos','speed','accel','vehtype','sim'])
#import simulation data
lsas=ts.listFiles('lsa')
mers=ts.listFiles('mer')
rsrs=ts.listFiles('rsr')
fzps=ts.listFiles('fzp')
# runSpecs
runSpecs=json.load(open('runSpecs.json'))
periods=runSpecs['periods']
# modelSpecs
modelSpecs=json.load(open('modelSpecs-MajorOnly.json'))
# parameters for trajectory reconstruction
dt=1 # time step [s]
adj=.1 # parameter adjustment step for curve fitting
stoplag=1 # time between vehicle detection at stopbar and vehicle coming to a complete stop
startlag=1 # time between vehicle starting to accelerate and vehicle no longer being detected at stopped position
stoptime=1 # minimum time of detection for a vehicle that stopped at the detector position
t_tol=1 # acceptable difference between detected time and fitted curve time at the advance position
# original deceleration curve parameters from literature
k3o=0.005
k4o=0.154
k5o=0.493
# acceleration curve parameters from literature
beta0=3.369
beta1=0.072
# car-following parameters: time interval and effective vehicle length
leffs=pd.Series([5.5,13,11.5,13.5],index=[100,150,200,210]) # effective length [m]
hDis=2 # maximum headway for queue discharge
pstopcruise=.75 # if a vehicle stops, its speed is at most this fraction of its cruise speed
#%% filter data, reconstruct trajectories, and append to trajectory records
for s in range(len(lsas)): # for each simulation
# import vissim data
lsa=ts.import_lsa(lsas[s])
mer,merDetail=ts.import_mer(mers[s])
rsr=ts.import_rsr(rsrs[s])
fzp=ts.import_fzp(fzps[s])
merEn=mer.loc[mer['t(Entry)']>0] # divide detector data into dectector on and detector off data
merEx=mer.loc[mer['t(Exit)']>0]
tg=lsa.loc[lsa['newState'].str.contains('green')] # filter signal timing to green times
for p in periods: # for each period (hour)
rsrp=ts.filterbyperiod(rsr,p)
fzpp=ts.filterbyperiod(fzp,p)
inter_index=1 # for naming linkID
for i in inters: # for each intersection
tgi=tg.loc[tg['SC']==modelSpecs[i]['SignalController']] # filter to signal controller
direc_index=1
for d in direcs: # for each direction
# Dunbar northbound and Hwy401 southbound are out of scope (end intersections)
if not ((i=='Dunbar' and d=='NB') or (i=='Hwy401' and d=='SB')):
time_id0=time()
f=ts.startFig('Lead Trajectories of '+str(p)+str(i)+str(d),'Time','Position',4)
# segment model specs
segSpecs=modelSpecs[i][d]
# links in the segment
intlink=segSpecs['links']['intersection']['thru']
approachlinks=segSpecs['links']['approach']['thru']
links=approachlinks+intlink
# link lengths
lengthMap=ts.getLengthMap(links)
# simulated trajectories from vehicle records
fzppid=fzpp.loc[fzpp['link'].isin(links)] # vehicle records for the intersection and approach
## find absolute position of vehicle with end of last major intersection as reference
fzpx=0 #cumulative distance from reference
# main segment positions
for l in approachlinks:
fzppid.loc[fzppid['link']==l,'abspos']=fzppid['pos']+fzpx
fzpx +=lengthMap[l]
# intersection positions
fzppid.loc[fzppid['link']==intlink,'abspos']=fzppid['pos']+fzpx
# plot trajectories vehicle by vehicle
fzp_vehicles=fzppid['no'].unique() # all vehicles numbers
for n in fzp_vehicles:
veh=fzppid.loc[fzp['no']==n]
plt.plot(veh['sec'],veh['abspos'],'grey')
# filter data to segment
tgid=tgi.loc[lsa['SG']==segSpecs['StartingSignalGroup']] # filter signal timing to signal group
tgid=tgid.reset_index(drop=True) # reset index for filtering
tgidp=ts.filterbyperiod(tgid,p) # filter signal timing to greens starting this period
if p!='evening2':
tgidp=tgidp.append(tgid.loc[tgidp.index[-1]+1]) # add on the next green start after the last green start in this period to close the last bin
tgidp=tgidp['tsim'] # only the timing data is needed
if p=='evening2': # for last period, there is no next green after the period (simulation stopped)
tgidp=tgidp.append(pd.Series(merEx['t(Exit)'].iloc[-1]),ignore_index=True)
tgidp=tgidp.reset_index(drop=True) # reset index for iterating through cycles
rsrpid=rsrp.loc[rsrp['No.']==segSpecs['TravelTimeMeas']] # filter travel time measurements
# get cruising speed from travel times
ttcruise=np.percentile(rsrpid['Trav'],5) # use 5th percentile of travel times
segLength=sum(lengthMap) # length of segment
v0=segLength/ttcruise
# get detector numbers
detn={}
segDetSpecs=segSpecs['detectors']
detn['adv']=segDetSpecs['advance']
detn['stop']=segDetSpecs['stopbar']
detn['end']=segDetSpecs['end']
xdet={} # get positions of detectors
for sd in detn:
xdet[sd]=int(merDetail.loc[merDetail[3]==detn[sd][0],9].iloc[0])
# for stopbar detector
posOffset=sum(lengthMap.loc[pd.Series(approachlinks).iloc[0:-1]]) # offset distance is length of upstream links
xdet['stop']=xdet['stop']+posOffset
# for advance detector
detlink=merDetail.loc[merDetail[3]==detn['adv'][0],5].iloc[0] # link detector is on
detlinkindex=links.index(detlink)
posOffset=sum(lengthMap.loc[pd.Series(approachlinks).iloc[0:detlinkindex]]) # offset distance is length of upstream links up to link of detector
xdet['adv']=xdet['adv']+posOffset
# end of intersection
xdet['end']=xdet['stop']+float(lengthMap[intlink]) #add on intersection span
# iterate through cycles in this period
cycles=range(len(tgidp)-1) # the added on green start is not counted - just for closing the bin
for c in cycles:
no=[]
link=[]
speed=[]
accel=[]
vehtype=[]
lanes=len(detn['stop'])
for l in range(lanes):
# detectors for this lane
detectorStop=detn['stop'][l]
# fine vehicles detected in this lane during this cycle at the stopbar
merExid_stopbar=merEx.loc[merEx['Measurem.']==detectorStop] # stopbar exit detections
merExid_stopbar_cycle=merExid_stopbar.loc[(merExid_stopbar['t(Exit)']>=tgidp[c])&(merExid_stopbar['t(Exit)']<=tgidp[c+1])]
vehicles=merExid_stopbar_cycle['VehNo'].unique()
if len(vehicles)>0:
# filter detections to vehicles
# (so that adv detections before the cycle starts are included as long as the vehicle passes the stopbar during the cycle.)
merEnc=merEn.loc[merEn['VehNo'].isin(vehicles)]
merExc=merEx.loc[merEx['VehNo'].isin(vehicles)]
# data for stopbar detection of this lane
detectionStopEn=merEnc.loc[merEnc['Measurem.']==detectorStop]
detectionStopEx=merExc.loc[merExc['Measurem.']==detectorStop]
# data for advance detection of all lanes
detectionAdvEn=merEnc.loc[merEnc['Measurem.'].isin(detn['adv'])]
detectionAdvEx=merExc.loc[merExc['Measurem.'].isin(detn['adv'])]
# data for end of intersection detection of all lanes
detectionEndEn=merEnc.loc[merEnc['Measurem.'].isin(detn['end'])]
detectionEndEx=merExc.loc[merExc['Measurem.'].isin(detn['end'])]
# compose detector data to combine entry and exit times into one data entry for each vehicle
stopt1=[]
stopt2=[]
advt1=[]
advt2=[]
endt1=[]
endt2=[]
vehn=[]
vtype=[]
for n in vehicles:
# compile detector data
vehn.append(n)
vtype.append(detectionStopEn.loc[detectionStopEn['VehNo']==n,'Vehicle type'].iloc[0])
stopt1n=detectionStopEn.loc[detectionStopEn['VehNo']==n,'t(Entry)'].iloc[0]
stopt2n=detectionStopEx.loc[detectionStopEx['VehNo']==n,'t(Exit)'].iloc[0]
advt1n=detectionAdvEn.loc[detectionAdvEn['VehNo']==n,'t(Entry)'].iloc[0]
advt2n=detectionAdvEx.loc[detectionAdvEx['VehNo']==n,'t(Exit)'].iloc[0]
stopt1.append(stopt1n)
stopt2.append(stopt2n)
advt1.append(advt1n)
advt2.append(advt2n)
plt.plot([stopt1n,stopt2n],[xdet['stop']]*2,'orange')
plt.plot([advt1n,advt2n],[xdet['adv']]*2,'orange')
# dataframe of detector data
detection=pd.DataFrame({'vehn':vehn,'advt1':advt1,'advt2':advt2,'stopt1':stopt1,'stopt2':stopt2,'vtype':vtype})
detection=detection.sort_values(by='stopt1') # sort by time of reaching stopbar
detection=detection.reset_index(drop=True)
leadTraj={} # trajectory of lead vehicle
leaddetection=detection.iloc[0]
stopbarWait=leaddetection['stopt2']-leaddetection['stopt1']
# check if there was a queue - how long the first vehicle stopped at the stopbar
if stopbarWait>stoptime: # if the vehicle had stopped, construct deceleration and acceleration curves for the lead vehicle
tdelta=9999 # time difference for fitting curve to detector data (to be minimized)
# deceleration curve
# start with original values
k3=k3o
k4=k4o
k5=k5o
while tdelta>t_tol: # time difference between advance detection and extrapolated cruise trajectory at advance
v=[v0] # speed profile
dc=[0] # deceleration profile
xdec=[0] # positions
tdec=[0] # time
while v[-1]>0: # until speed has reached zero,
dc.append(-k3*v[-1]**2+k4*v[-1]+k5) # using deceleration curve from literature
v.append(v[-1]-(dc[-1]+dc[-2])/2*dt) # get speed for each time step
xdec.append(xdec[-1]+v[-1]*dt+(dc[-1]+dc[-2])/4*dt**2) # append to positions
tdec.append(tdec[-1]+dt) # append to times
# shift deceleration trajectory to match end of deceleration with beginning of stopped trajectory
tdeci=pd.Series(tdec)+leaddetection['stopt1']-tdec[-1]+stoplag
xdeci=pd.Series(xdec)+xdet['stop']-xdec[-1]
tcruise=(xdeci[0]-xdet['adv'])/v[0] # time spent cruising after advance detection before deceleration
textra=tdeci[0]-tcruise # extrapolated time of reaching advance detector from beginning of deceleration
tdelta=leaddetection['advt1']-textra # difference between advance detection time and extrapolated time
if xdeci[0]<xdet['adv']: # if deceleration started before advance detection
tdelta=leaddetection['advt1']-tdeci[abs(xdeci-xdet['adv']).idxmin()]
if tdelta>0: # if curve is earlier than detection
k3=k3-k3o*adj
k4=k4+k4o*adj
k5=k5+k5o*adj
else: # if curve is later than detection
k3=k3+k3o*adj
k4=k4-k4o*adj
k5=k5-k5o*adj
vdec=v
adec=dc
# stopped curve
tStopEnd=leaddetection['stopt2']
# acceleration curve
v=[0] # start at zero speed
tdelta=9999 # to be minimized
a=[0] # acceleration starts at 0
xacc=[0]
tacc=[0]
while v[-1]<v0:
a.append(beta0-beta1*v[-1])
v.append(v[-1]+(a[-1]+a[-2])/2*dt)
xacc.append(xacc[-1]+v[-1]*dt+(dc[-1]+dc[-2])/4*dt**2)
tacc.append(tacc[-1]+dt)
# shift acceleration curve to start at the end of the stopped curve
tacc+=tStopEnd-startlag
xacc=[xx+xdet['stop'] for xx in xacc]
aacc=a
vacc=v
# compile curves into lead trajectory
leadTraj['t']=list(tdeci)+list(tacc)
leadTraj['x']=list(xdeci)+list(xacc)
leadTraj['a']=list(adec)+list(aacc)
leadTraj['v']=list(vdec)+list(vacc)
leadTraj['vehn']=detection.loc[0,'vehn']
leadTraj['vtype']=detection.loc[0,'vtype']
### following trajectories
tstop=detection.loc[0,'stopt1'] # time preceding vehicle stops at stopbar
tgo=detection.loc[0,'stopt2'] # time preceding vehicle leaves from stopbar
xstop=xdet['stop']
traj={0:{'x':leadTraj['x'],'t':leadTraj['t'],'v':leadTraj['v'],'a':leadTraj['a'],'vehn':leadTraj['vehn'],'vtype':leadTraj['vtype']}}
for j in detection[1:].index:
traj[j]={'vehn':detection.loc[j,'vehn'],'vtype':detection.loc[j,'vtype']} # fill in vehicle number and type first
vapproach=(xdet['stop']-xdet['adv'])/(detection.loc[j,'stopt1']-detection.loc[j,'advt1']) # average speed approaching the intersection
h=detection.loc[j,'stopt1']-detection.loc[j-1,'stopt1'] # headway wrt preceding vehicle at stopbar
leff=leffs[detection.loc[j-1,'vtype']] # get effective length of preceding vehicle based on vehicle type
# find T (time interval) for Newell's model - time lag of trajectories
t2=detection.loc[j,'stopt2']
xlead=xdet['stop']+leff # effective position of preceding vehicle corresponding the following vehicle at the stopbar
index=abs(np.array(traj[j-1]['x'])-xlead).argmin() # find point in lead closest to stopbar
t1=traj[j-1]['t'][index] # time when preceding vehicle is at this position
if xlead>traj[j-1]['x'][-1]: # if lead acceleration curve ends before effective lead position,
t1=traj[j-1]['t'][-1]+(xdet['stop']+leff-traj[j-1]['x'][-1])/v0 # then extrapolate using cruise speed
tlag=t2-t1 # time interval for Newell's model
if vapproach<pstopcruise*v0: #check if vehicle stopped in queue
# offset end of stopped curve of preceding vehicle to get end of stopped curve for current following vehicle
tgo+=tlag
xstop-=leff
# shift deceleration curve spatially so that it ends where the stopped curve starts
xdeci=pd.Series(xdec).add(xstop-xdec[-1])
# shift deceleration curve temporally
if xdeci[0]>xdet['adv']: # if deceleration curve starts after the advance location
tdeci=pd.Series(tdec).add(detection.loc[j,'advt1']+(xdeci[0]-xdet['adv'])/v0) # match extrapolated upstream position to advance detector
# tdeci=pd.Series(detection.loc[j,'advt1']).append(tdeci)
# xdeci=pd.Series(xdet['adv']).append(xdeci)
else: # if deceleration curve starts before the advance location
tdelta=tdec[abs(np.array(xdeci-xdet['adv'])).argmin()]-tdec[0] # find the point closest to the advance detector
tdeci=pd.Series(tdec).add(detection.loc[j,'advt1']-tdelta) # shift deceleration curve to match this point to advance detector
#acceleration curve
v=[0] # speed vector
a=[0] # acceleration vector
xacc=[0] # position vector
tacc=[0] # time vector
tdelta=9999 # to be minimized
while v[-1]<v0: # until speed reaches cruise speed
a.append(beta0-beta1*v[-1]) # accleration of this time step, from literature
v.append(v[-1]+(a[-1]+a[-2])/2*dt) # append speed for each time step
xacc.append(xacc[-1]+v[-1]*dt+(a[-1]+dc[-2])/4*dt**2) # append to position vector
tacc.append(tacc[-1]+dt) # append to time vector
vacc=v
aacc=a
tacc+=tgo-startlag # shift start of acceleration to end of stopped curve, account for start lag
xacc=[xx+xstop for xx in xacc]
# put together decelerationg and acceleration curves
traj[j]['t']=list(tdeci)+list(tacc)
traj[j]['x']=list(xdeci)+list(xacc)
traj[j]['v']=list(vdec)+list(vacc)
traj[j]['a']=list(adec)+list(aacc)
elif h<hDis: #didn't stop but followed closely
# values at start of trajectory
t0=detection.loc[j,'advt1']
traj[j]={'t':[t0],'x':[xdet['adv']],'a':[0],'v':[v0],'vehn':detection.loc[j,'vehn'],'vtype':detection.loc[j,'vtype']}
for k in range(1,len(traj[j-1]['t'])): # offset point by point
# build following trajectory as time-space offset of preceding trajectory (Newell's model)
tfol=traj[j-1]['t'][k]+tlag # time offset
xfol=traj[j-1]['x'][k]-leff # position offset
traj[j]['x'].append(xfol)
tfolcruise=(traj[j]['x'][k]-traj[j]['x'][k-1])/v0+traj[j]['t'][k-1]
if tfolcruise>tfol: # if cruising gets the vehicle to the follow position later than the time offset, then assume cruising instead
tfol=tfolcruise
traj[j]['t'].append(tfol)
# calculate speed and acceleration per point increment
traj[j]['v'].append(traj[j]['x'][-1]-traj[j]['x'][-2]) # speed as change in position
traj[j]['a'].append(traj[j]['v'][-1]-traj[j]['v'][-2]) # acceleration as change in speed
else: #free flow vehicles
# straight trajectories from advance detector to stop detector
traj[j]={'t':[t0],'x':[xdet['adv']],'a':[0],'v':[v0],'vehn':detection.loc[j,'vehn'],'vtype':detection.loc[j,'vtype']}
ti=detection.loc[j,'advt1']
tf=detection.loc[j,'stopt1']
tflow=tf-ti
xi=xdet['adv']
xf=xdet['stop']
xflow=xf-xi
vflow=xflow/tflow
traj[j]['t']=np.arange(ti,ti+int(tflow),1)
points=len(traj[j]['t'])
traj[j]['x']=np.linspace(xi,xf,points)
traj[j]['v']=[v0]*points
traj[j]['a']=[0]*points
plt.plot(traj[j]['t'],traj[j]['x'],'g--')
else: # if first vehicle didn't stop, then trajectory is assumed to be straight from advance to stopbar
# lead trajectory
ti=detection.loc[0,'advt1']
tf=detection.loc[0,'stopt1']
tflow=tf-ti
xi=xdet['adv']
xf=xdet['stop']
xflow=xf-xi
vflow=xflow/tflow
leadTraj['t']=np.arange(ti,ti+int(tflow),1)
points=len(leadTraj['t'])
leadTraj['x']=np.linspace(xi,xf,points)
leadTraj['v']=[v0]*points
leadTraj['a']=[0]*points
leadTraj['vehn']=detection.loc[0,'vehn']
leadTraj['vtype']=detection.loc[0,'vtype']
traj={0:{'t':leadTraj['t'],'x':leadTraj['x'],'a':leadTraj['a'],'v':leadTraj['v'],'vehn':leadTraj['vehn'],'vtype':leadTraj['vtype']}}
for j in detection.index[1:]:
traj[j]={'vehn':detection.loc[j,'vehn'],'vtype':detection.loc[j,'vtype']} # fill in vehicle number and type first
ti=detection.loc[j,'advt1']
tf=detection.loc[j,'stopt1']
tflow=tf-ti
xi=xdet['adv']
xf=xdet['stop']
xflow=xf-xi
vflow=xflow/tflow
traj[j]['t']=np.arange(ti,ti+int(tflow),1)
points=len(traj[j]['t'])
traj[j]['x']=np.linspace(xi,xf,points)
traj[j]['v']=[v0]*points
traj[j]['a']=[0]*points
# plot lead trajectories
plt.plot(leadTraj['t'],leadTraj['x'],'r--')
# complete reconstructed trajectories for lane and add to cycle collection
for j in traj:
# filling for before the reconstructed trajectory (constant speed)
xfill=traj[j]['x'][0]
tfill=xfill/v0
fillpoints=int(tfill) # number points, i.e. seconds, to fill in before the trajectory
vfillList1=[v0]*fillpoints # array of speeds to fill in
afillList1=[0]*fillpoints # array of accelerations to fill in
# filling for after the reconstructed trajectory (constant speed)
xfill=xdet['end']-traj[j]['x'][-1]
tfill=xfill/v0
fillpoints=int(tfill) # number points, i.e. seconds, to fill in before the trajectory
vfillList2=[v0]*fillpoints # array of speeds to fill in
afillList2=[0]*fillpoints # array of accelerations to fill in
totalpoints=len(vfillList1)+len(traj[j]['v'])+len(vfillList2) # number of points in whole trajectory
# append to table of trajectory records for the cycle
speed.extend(vfillList1+traj[j]['v']+vfillList2) # join trajectories before, during, and after reconstructed portion
accel.extend(afillList1+traj[j]['v']+afillList2) # join trajectories before, during, and after reconstructed portion
no.append(traj[j]['vehn'])
vehtype.extend([traj[j]['vtype']]*totalpoints)
# linkID=c+1+s*100+inter_index*10000+direc_index*1000
# link.extend([linkID]*)
time_id=time()-time_id0
print('Processed '+str(p)+' '+i+' '+d+' in %.fs' %time_id)
f.savefig('TrajReconstruction-TestPlots/'+str(p)+str(i)+str(d)+'.png', bbox_inches='tight')
direc_index+=1
inter_index+=1
print('runtime = ' +str(time()-start_t))
| [
"anjieliu.al@gmail.com"
] | anjieliu.al@gmail.com |
cd1b616721d53514d80440788a48f49edc7432fc | 6189f34eff2831e3e727cd7c5e43bc5b591adffc | /WebMirror/management/rss_parser_funcs/feed_parse_extractMayomtlHomeBlog.py | b24bf15613f06fa3f5fec756e2f050bb98d368d9 | [
"BSD-3-Clause"
] | permissive | fake-name/ReadableWebProxy | 24603660b204a9e7965cfdd4a942ff62d7711e27 | ca2e086818433abc08c014dd06bfd22d4985ea2a | refs/heads/master | 2023-09-04T03:54:50.043051 | 2023-08-26T16:08:46 | 2023-08-26T16:08:46 | 39,611,770 | 207 | 20 | BSD-3-Clause | 2023-09-11T15:48:15 | 2015-07-24T04:30:43 | Python | UTF-8 | Python | false | false | 938 | py | def extractMayomtlHomeBlog(item):
'''
Parser for 'mayomtl.home.blog'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('My Lover Was Stolen, And I Was Kicked Out Of The Hero’s Party, But I Awakened To The EX Skill “Fixed Damage” And Became Invincible. Now, Let’s Begin Some Revenge',
'My Lover Was Stolen, And I Was Kicked Out Of The Hero’s Party, But I Awakened To The EX Skill “Fixed Damage” And Became Invincible. Now, Let’s Begin Some Revenge', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | [
"something@fake-url.com"
] | something@fake-url.com |
ebf9e16d0dc1998f35d44ba5017f92cdab150035 | d77af24d09dc00a4b7d3e0bdc14b7d2727c96495 | /RouToolPa/Tools/AsseblyQC/PurgeDups.py | 68fc6d159a6e577e289bfe832a2558a7f6313423 | [] | no_license | mahajrod/RouToolPa | 14ee0f7fce78c53e8639e770caa6ffb0dfd82fce | 9b0cd0f0817a23cd3f37b3a55f83ce2d8abc71d8 | refs/heads/master | 2023-08-19T19:15:49.876175 | 2023-08-12T12:27:39 | 2023-08-12T12:27:39 | 181,844,151 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 5,532 | py | #!/usr/bin/env python
from pathlib import Path
import pandas as pd
import numpy as np
from RouToolPa.Routines import MathRoutines
from RouToolPa.Tools.Abstract import Tool
class PurgeDups(Tool):
def __init__(self, path="", max_threads=4):
Tool.__init__(self, "augustus", path=path, max_threads=max_threads)
def convert_coverage_file_to_bed(self, input_file, output_prefix):
length_dict = {}
coverage_dict = {}
mean_coverage_dict = {}
median_coverage_dict = {}
with self.metaopen(input_file, "r", buffering=100000000) as in_fd, \
self.metaopen(output_prefix + ".bed", "w", buffering=100000000) as out_fd:
scaffold, length = in_fd.readline()[1:].split()
length_dict[scaffold] = int(length)
coverage_dict[scaffold] = {}
for line in in_fd:
if line[0] == ">":
scaffold, length = line[1:].split()
length_dict[scaffold] = int(length)
coverage_dict[scaffold] = {}
continue
#print(line)
value_list = list(map(int, line.strip().split()))
value_list[0] -= 1 # convert to zero-based and half open coordinates
out_fd.write("{0}\t{1}\n".format(scaffold, "\t".join(map(str, value_list))))
#print(value_list)
if value_list[-1] not in coverage_dict[scaffold]:
coverage_dict[scaffold][value_list[-1]] = value_list[1] - value_list[0]
else:
coverage_dict[scaffold][value_list[-1]] += value_list[1] - value_list[0]
for scaffold in coverage_dict:
median_coverage_dict[scaffold] = MathRoutines.median_from_dict(coverage_dict[scaffold])
mean_coverage_dict[scaffold] = MathRoutines.mean_from_dict(coverage_dict[scaffold])
stat_df = pd.DataFrame.from_dict(length_dict, columns=["length", ], orient='index').sort_values(by=["length"], ascending=False)
stat_df.index.name = "scaffold"
stat_df["mean_cov"] = pd.Series(mean_coverage_dict)
stat_df["median_cov"] = pd.Series(median_coverage_dict)
stat_df.to_csv(output_prefix + ".stat", sep="\t", header=False, index=True)
stat_df[["length"]].to_csv(output_prefix + ".len", sep="\t", header=False, index=True)
return stat_df
def add_lengths_to_dups_bed(self, input_file, length_file, output_file):
if isinstance(length_file, (str, Path)):
length_df = pd.read_csv(length_file, sep="\t", header=None, index_col=0, names=["scaffold", "length"])
else:
length_df = length_file
dups_bed_df = pd.read_csv(input_file, sep="\t", header=None, index_col=0, names=["scaffold", "start", "end", "type", "overlapping_scaffold"])
dups_bed_df["overlap_len"] = dups_bed_df["end"] - dups_bed_df["start"]
dups_bed_df["scaffold_len"] = length_df["length"]
dups_bed_df["overlapping_scaffold_len"] = dups_bed_df["overlapping_scaffold"].apply(lambda s: length_df.loc[s, "length"])
dups_bed_df["overlap_faction"] = dups_bed_df["overlap_len"] / dups_bed_df["scaffold_len"]
dups_bed_df["overlap_faction_overlapping_scaffold"] = dups_bed_df["overlap_len"] / dups_bed_df["overlapping_scaffold_len"]
def count_fraction(df):
scaffold_len = df["scaffold_len"].iloc[0]
sorted_df = df[["start", "end"]].sort_values(by=["start", "end"])
fraction_df = [list(sorted_df.iloc[0])]
for row in sorted_df.itertuples(index=False):
if row[0] <= fraction_df[-1][1]:
if row[1] > fraction_df[-1][1]:
fraction_df[-1][1] = row[1]
else:
fraction_df.append(list(row))
fraction_df = pd.DataFrame(fraction_df, columns=["start", "end"])
fraction_df["fraction"] = (fraction_df["end"] - fraction_df["start"]) / scaffold_len
return sum(fraction_df["fraction"])
haplo_fraction_df = dups_bed_df[["start", "end", "scaffold_len"]].groupby(by='scaffold').apply(count_fraction)
dups_bed_df["cumulative_overlap_fraction"] = haplo_fraction_df
with open(output_file, "w") as out_fd:
out_fd.write("#{0}\n".format("\t".join(["scaffold", "start", "end", "type", "overlapping_scaffold",
"overlap_len", "scaffold_len", "overlapping_scaffold_len",
"overlap_faction", "overlap_faction_overlapping_scaffold",
"cumulative_overlap_fraction"])))
dups_bed_df.to_csv(out_fd, sep="\t", header=False, index=True, na_rep=".")
#print(haplo_fraction_df)
return dups_bed_df
"""
def count_contig_fraction_in_haplotype(self, input_file_with_len, output_file):
if isinstance(input_file_with_len, (str, Path)):
haplo_df = pd.read_csv(input_file_with_len, sep="\t", header=None, index_col=0,
names=["scaffold", "start", "end", "type", "overlapping_scaffold",
"overlap_len", "scaffold_len", "overlapping_scaffold_len",
"overlap_faction,%", "overlap_faction_overlapping_scaffold,%"])
else:
haplo_df = input_file_with_len
print(haplo_df)
"""
| [
"mahajrod@gmail.com"
] | mahajrod@gmail.com |
b7fb815163f10caed661bab7ac841a405f0475fe | f82be98c5db626a735c92098f00d010af4220bc0 | /Python/Itertool/combinations.py | 0cc92f0a38da9198ea30a66823592f8c8c7ee61d | [] | no_license | DgrinderHZ/Hackerrank | 99acebd521019db2f97bf68dbc2b4e284ff44bb5 | cde695394a83af40f4d4f95002655800042304c2 | refs/heads/master | 2023-02-09T21:02:31.533087 | 2023-01-31T16:53:32 | 2023-01-31T16:53:32 | 172,239,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 250 | py | # Enter your code here. Read input from STDIN. Print output to STDOUT
from itertools import combinations
s , k = input().split()
for i in range(int(k)):
perm = list(combinations(sorted(s), i+1))
for item in perm:
print("".join(item))
| [
"noreply@github.com"
] | DgrinderHZ.noreply@github.com |
e69668b7fe317b961e4f110bbcad61f23894e6e4 | 1fe958f942e5f0bf34705f2d2cb6f79df05e99da | /amipy/subcmd/help.py | f9c40516322aa099818b14512e52763244753319 | [
"MIT"
] | permissive | ChenghuaMi/Amipy | 5a91eeb16926b5827e31720c4d553d93a811d127 | 254ef5cf1b34f9671ec8eecc3ee61633644b3ff8 | refs/heads/master | 2022-09-02T21:19:45.651123 | 2020-05-29T06:15:19 | 2020-05-29T06:15:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,102 | py |
from amipy.BaseClass import SpiderClientCommand
class SCommand(SpiderClientCommand):
def parse(self,cmdnae,args,spiders):
prompt = '''All commands of the Spider-Client:
\r\n* Usage:
\r\n <command> [spider name]
\r\n* Available commands:
\r\n show spiders show all running spiders and their conditions.
\r\n list list a general situation of all spiders.
\r\n echo echo a running spider and its attributes.
\r\n pause pause a running spider by a give name.
\r\n stop stop a running/paused spider by a give name.
\r\n close close a spider by a give name.
\r\n restart restart a stopped spider by a give name.
\r\n resume resume a paused spider by a give name.
\r\n quit quit the Spider-Client.
\r\n help show all the available commands usage.
'''
return prompt
| [
"opensrc@126.com"
] | opensrc@126.com |
c387e022354c89f96838c5697b5b229da319c648 | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /MY_REPOS/JAMSTACK-TEMPLATES/_EVENMOAR/USERS/GREGBER/argos/examples/with-django/screenshot/urls.py | 95701a96abd2542e4163a96a56675fda05ead35c | [
"MIT"
] | permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 108 | py | from django.conf.urls import url
from . import views
urlpatterns = [url(r"^$", views.index, name="index")]
| [
"bryan.guner@gmail.com"
] | bryan.guner@gmail.com |
7e6fecd2800ad54f231f600117e44305c20454f4 | f7a4cbd50e391af66b76d58994b50b6baaeb2255 | /Problem 58/problem_58.py | 9b7b3514d9d06a47faa3f59c480a07689dec3953 | [] | no_license | orralacm/LeetCode | 3754df2a220fbfa900185c5b3823d73613e462b3 | 869d539105da5af8452fa67f63bd6e408c1bf72a | refs/heads/main | 2023-08-07T19:14:44.067421 | 2021-09-16T04:48:40 | 2021-09-16T04:48:40 | 380,321,737 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py |
s = "Hello World"
word = ""
if (s[-1] == " ") :
word = ""
print(0)
else :
for i in range (1, len(s) + 1) :
print(f"value of i: {i}")
r = (-1) * i
print(f"value of r: {r}")
if (s[r] == " ") :
break
word += s[r]
print(f"word is: {word}")
l = len(word)
print(f"lenght of last word is: {l}")
| [
"orrala@live.com.mx"
] | orrala@live.com.mx |
44b27378b4ea4a56ee40c98ad8fa798406f849ae | 63b79c404d83e4980891c488f4d9592558ecda35 | /assets/src/ba_data/python/bastd/game/kingofthehill.py | 1e8b361031e049a49962e8a2f2e17f3990011e82 | [
"MIT"
] | permissive | kakekakeka/ballistica | 56e8879cd5b4b990e5e05da3dfd300d7cbb45446 | 3ffeff8ce401a00128363ff08b406471092adaa9 | refs/heads/master | 2022-11-14T08:11:57.160782 | 2020-07-01T05:43:13 | 2020-07-01T05:49:44 | 276,755,445 | 2 | 0 | MIT | 2020-07-02T22:18:37 | 2020-07-02T22:18:36 | null | UTF-8 | Python | false | false | 11,065 | py | # Copyright (c) 2011-2020 Eric Froemling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------
"""Defines the King of the Hill game."""
# ba_meta require api 6
# (see https://ballistica.net/wiki/meta-tag-system)
from __future__ import annotations
import weakref
from enum import Enum
from typing import TYPE_CHECKING
import ba
from bastd.actor.flag import Flag
from bastd.actor.playerspaz import PlayerSpaz
from bastd.actor.scoreboard import Scoreboard
from bastd.gameutils import SharedObjects
if TYPE_CHECKING:
from weakref import ReferenceType
from typing import Any, Type, List, Dict, Optional, Sequence, Union
class FlagState(Enum):
"""States our single flag can be in."""
NEW = 0
UNCONTESTED = 1
CONTESTED = 2
HELD = 3
class Player(ba.Player['Team']):
"""Our player type for this game."""
def __init__(self) -> None:
self.time_at_flag = 0
class Team(ba.Team[Player]):
"""Our team type for this game."""
def __init__(self, time_remaining: int) -> None:
self.time_remaining = time_remaining
# ba_meta export game
class KingOfTheHillGame(ba.TeamGameActivity[Player, Team]):
"""Game where a team wins by holding a 'hill' for a set amount of time."""
name = 'King of the Hill'
description = 'Secure the flag for a set length of time.'
available_settings = [
ba.IntSetting(
'Hold Time',
min_value=10,
default=30,
increment=10,
),
ba.IntChoiceSetting(
'Time Limit',
choices=[
('None', 0),
('1 Minute', 60),
('2 Minutes', 120),
('5 Minutes', 300),
('10 Minutes', 600),
('20 Minutes', 1200),
],
default=0,
),
ba.FloatChoiceSetting(
'Respawn Times',
choices=[
('Shorter', 0.25),
('Short', 0.5),
('Normal', 1.0),
('Long', 2.0),
('Longer', 4.0),
],
default=1.0,
),
]
scoreconfig = ba.ScoreConfig(label='Time Held')
@classmethod
def supports_session_type(cls, sessiontype: Type[ba.Session]) -> bool:
return issubclass(sessiontype, ba.MultiTeamSession)
@classmethod
def get_supported_maps(cls, sessiontype: Type[ba.Session]) -> List[str]:
return ba.getmaps('king_of_the_hill')
def __init__(self, settings: dict):
super().__init__(settings)
shared = SharedObjects.get()
self._scoreboard = Scoreboard()
self._swipsound = ba.getsound('swip')
self._tick_sound = ba.getsound('tick')
self._countdownsounds = {
10: ba.getsound('announceTen'),
9: ba.getsound('announceNine'),
8: ba.getsound('announceEight'),
7: ba.getsound('announceSeven'),
6: ba.getsound('announceSix'),
5: ba.getsound('announceFive'),
4: ba.getsound('announceFour'),
3: ba.getsound('announceThree'),
2: ba.getsound('announceTwo'),
1: ba.getsound('announceOne')
}
self._flag_pos: Optional[Sequence[float]] = None
self._flag_state: Optional[FlagState] = None
self._flag: Optional[Flag] = None
self._flag_light: Optional[ba.Node] = None
self._scoring_team: Optional[ReferenceType[Team]] = None
self._hold_time = int(settings['Hold Time'])
self._time_limit = float(settings['Time Limit'])
self._flag_region_material = ba.Material()
self._flag_region_material.add_actions(
conditions=('they_have_material', shared.player_material),
actions=(
('modify_part_collision', 'collide', True),
('modify_part_collision', 'physical', False),
('call', 'at_connect',
ba.Call(self._handle_player_flag_region_collide, True)),
('call', 'at_disconnect',
ba.Call(self._handle_player_flag_region_collide, False)),
))
# Base class overrides.
self.default_music = ba.MusicType.SCARY
def get_instance_description(self) -> Union[str, Sequence]:
return 'Secure the flag for ${ARG1} seconds.', self._hold_time
def get_instance_description_short(self) -> Union[str, Sequence]:
return 'secure the flag for ${ARG1} seconds', self._hold_time
def create_team(self, sessionteam: ba.SessionTeam) -> Team:
return Team(time_remaining=self._hold_time)
def on_begin(self) -> None:
super().on_begin()
shared = SharedObjects.get()
self.setup_standard_time_limit(self._time_limit)
self.setup_standard_powerup_drops()
self._flag_pos = self.map.get_flag_position(None)
ba.timer(1.0, self._tick, repeat=True)
self._flag_state = FlagState.NEW
Flag.project_stand(self._flag_pos)
self._flag = Flag(position=self._flag_pos,
touchable=False,
color=(1, 1, 1))
self._flag_light = ba.newnode('light',
attrs={
'position': self._flag_pos,
'intensity': 0.2,
'height_attenuated': False,
'radius': 0.4,
'color': (0.2, 0.2, 0.2)
})
# Flag region.
flagmats = [self._flag_region_material, shared.region_material]
ba.newnode('region',
attrs={
'position': self._flag_pos,
'scale': (1.8, 1.8, 1.8),
'type': 'sphere',
'materials': flagmats
})
self._update_flag_state()
def _tick(self) -> None:
self._update_flag_state()
# Give holding players points.
for player in self.players:
if player.time_at_flag > 0:
self.stats.player_scored(player,
3,
screenmessage=False,
display=False)
if self._scoring_team is None:
scoring_team = None
else:
scoring_team = self._scoring_team()
if scoring_team:
if scoring_team.time_remaining > 0:
ba.playsound(self._tick_sound)
scoring_team.time_remaining = max(0,
scoring_team.time_remaining - 1)
self._update_scoreboard()
if scoring_team.time_remaining > 0:
assert self._flag is not None
self._flag.set_score_text(str(scoring_team.time_remaining))
# Announce numbers we have sounds for.
numsound = self._countdownsounds.get(scoring_team.time_remaining)
if numsound is not None:
ba.playsound(numsound)
# winner
if scoring_team.time_remaining <= 0:
self.end_game()
def end_game(self) -> None:
results = ba.GameResults()
for team in self.teams:
results.set_team_score(team, self._hold_time - team.time_remaining)
self.end(results=results, announce_delay=0)
def _update_flag_state(self) -> None:
holding_teams = set(player.team for player in self.players
if player.time_at_flag)
prev_state = self._flag_state
assert self._flag_light
assert self._flag is not None
assert self._flag.node
if len(holding_teams) > 1:
self._flag_state = FlagState.CONTESTED
self._scoring_team = None
self._flag_light.color = (0.6, 0.6, 0.1)
self._flag.node.color = (1.0, 1.0, 0.4)
elif len(holding_teams) == 1:
holding_team = list(holding_teams)[0]
self._flag_state = FlagState.HELD
self._scoring_team = weakref.ref(holding_team)
self._flag_light.color = ba.normalized_color(holding_team.color)
self._flag.node.color = holding_team.color
else:
self._flag_state = FlagState.UNCONTESTED
self._scoring_team = None
self._flag_light.color = (0.2, 0.2, 0.2)
self._flag.node.color = (1, 1, 1)
if self._flag_state != prev_state:
ba.playsound(self._swipsound)
def _handle_player_flag_region_collide(self, colliding: bool) -> None:
try:
player = ba.getcollision().opposingnode.getdelegate(
PlayerSpaz, True).getplayer(Player, True)
except ba.NotFoundError:
return
# Different parts of us can collide so a single value isn't enough
# also don't count it if we're dead (flying heads shouldn't be able to
# win the game :-)
if colliding and player.is_alive():
player.time_at_flag += 1
else:
player.time_at_flag = max(0, player.time_at_flag - 1)
self._update_flag_state()
def _update_scoreboard(self) -> None:
for team in self.teams:
self._scoreboard.set_team_value(team,
team.time_remaining,
self._hold_time,
countdown=True)
def handlemessage(self, msg: Any) -> Any:
if isinstance(msg, ba.PlayerDiedMessage):
super().handlemessage(msg) # Augment default.
# No longer can count as time_at_flag once dead.
player = msg.getplayer(Player)
player.time_at_flag = 0
self._update_flag_state()
self.respawn_player(player)
| [
"ericfroemling@gmail.com"
] | ericfroemling@gmail.com |
60d12a66b66ecb932b0a33d1d9d6a287d4675859 | 24b08ee8856a791f61b5943179beb39e74a2f1ac | /coordinate_conversion.py | c310e7540e7ab4abab36340b3f8ff2345633b8da | [] | no_license | jgibson2/HaploCluster | 39266d5f56cdde9ce6a1a39cba2fa5328eaa0f55 | eb65399ea421a3238304990b73508f595e82e7f1 | refs/heads/master | 2021-01-23T12:15:48.353657 | 2017-08-09T15:11:36 | 2017-08-09T15:11:36 | 95,608,669 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 231 | py | import sys
with open(sys.argv[1], 'r') as f:
for line in f:
line = line.strip()
if line[0] == '#':
print(line)
else:
data = line.split('\t')
data[1] = str(int(sys.argv[2]) + int(data[1]))
print('\t'.join(data))
| [
"johngibson@wustl.edu"
] | johngibson@wustl.edu |
a31e0c49cbba5e45a39a6fef1c35198454eac3b3 | 44a7473404d37a3f5c73cbcdf88be55564e580bb | /121_word-ladder-ii/word-ladder-ii.py | 07104867d5e6cc1bf312c512b69bfb50106d1741 | [] | no_license | frankobe/lintcode | dbc10befc8055c55b2ca9716aa3dfa238b58aaa9 | db131f968944b8140f07a8e5765fea55c72da6ba | refs/heads/master | 2021-06-07T18:32:12.338879 | 2016-11-22T09:05:37 | 2016-11-22T09:05:37 | 20,018,761 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,264 | py | # coding:utf-8
'''
@Copyright:LintCode
@Author: frankobe
@Problem: http://www.lintcode.com/problem/word-ladder-ii
@Language: Python
@Datetime: 15-09-10 00:37
'''
from string import ascii_lowercase
from collections import defaultdict
class Solution:
# @param start, a string
# @param end, a string
# @param dict, a set of string
# @return a list of lists of string
def findLadders(self, start, end, dict):
dict.add(end)
level = {start}
size = len(start)
parents = defaultdict(set)
while level and end not in parents:
next_level = defaultdict(set)
for node in level:
for char in ascii_lowercase:
for i in range(size):
n = node[:i]+char+node[i+1:]
if n in dict and n not in parents:
next_level[n].add(node)
level = next_level
parents.update(next_level)
res = [[end]]
while res and res[0][0] != start:
res = [[p]+r for r in res for p in parents[r[0]]]
return res
| [
"mua08p@gmail.com"
] | mua08p@gmail.com |
f3d0b596efd1fe69fa027e5270f9228cab68fa12 | fc4be31912202754c8f3fd5808f50c58a509e1b5 | /servicios/migrations/0001_initial.py | f49f0c54b48b679f076e01351bcc291ca73f1778 | [] | no_license | GermanMoran/Tienda-django | 8af03b332738dcb47233c22dbc618e6d930c5a00 | 82e24bb26f74cc027c5a9609f75e37ead0f12fdd | refs/heads/main | 2023-07-06T21:11:08.044476 | 2021-08-06T21:07:25 | 2021-08-06T21:07:25 | 393,171,299 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 891 | py | # Generated by Django 3.2.4 on 2021-07-01 19:32
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='servicios',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('titulo', models.CharField(max_length=50)),
('contenido', models.CharField(max_length=50)),
('imagen', models.ImageField(upload_to='')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now_add=True)),
],
options={
'verbose_name': 'servicio',
'verbose_name_plural': 'servicios',
},
),
]
| [
"homerogpm@gmail.com"
] | homerogpm@gmail.com |
18dec0e6479ce44069f5db5b04caaca1f3fc6b65 | 6c26658439bf988dd59d2f88dbf17cc6d0bfbe5f | /dining_philosopher(deadlock).py | c9562325d0042b9d0b599ee31e1fb2ad917dd2cc | [] | no_license | muks97/dining-philosopher | d5f3a29d30402b9e81e2df9ae58ad7dd4b1153c3 | 8bae028bb34cc4eaf02f4f3c9a8ac8224630c04f | refs/heads/master | 2022-04-19T21:49:57.354522 | 2020-04-19T10:33:20 | 2020-04-19T10:33:20 | 256,971,865 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,295 | py | """
Date: 04/19/2020
Class: CS5541
Assignment: Dining Philosophers
Author: Mukesh Viswanathan
"""
import sys
import threading
import time
class Semaphore(object):
def __init__(self, initial):
self.lock = threading.Condition(threading.Lock())
self.value = initial
def up(self):
with self.lock:
self.value += 1
self.lock.notify()
def down(self):
with self.lock:
while self.value == 0:
self.lock.wait()
self.value -= 1
class ChopStick(object):
def __init__(self, number):
self.number = number # chop stick ID
self.user = -1 # keep track of philosopher using it
self.lock = threading.Condition(threading.Lock())
self.taken = False
def take(self, user): # used for synchronization
with self.lock:
while self.taken == True:
self.lock.wait()
self.user = user
self.taken = True
sys.stdout.write("p[%s] took c[%s]\n" % (user, self.number))
self.lock.notifyAll()
def drop(self, user): # used for synchronization
with self.lock:
while self.taken == False:
self.lock.wait()
self.user = -1
self.taken = False
sys.stdout.write("p[%s] dropped c[%s]\n" % (user, self.number))
self.lock.notifyAll()
class Philosopher (threading.Thread):
def __init__(self, number, left, right, butler):
threading.Thread.__init__(self)
self.number = number # philosopher number
self.left = left
self.right = right
self.butler = butler
def run(self):
for i in range(20):
self.butler.down() # start service by butler
time.sleep(0.1) # think
self.left.take(self.number) # pickup left chopstick
time.sleep(0.1) # (yield makes deadlock more likely)
failed =self.right.take(self.number) # pickup right chopstick
if(not failed):
print("waiting")
time.sleep(0)
#self.number=(self.number+1)
time.sleep(0.1)
for i in range(0,2):
if self.number==i:
self.right.drop(i) # drop right chopstick
self.left.drop(i) # drop left chopstick
self.butler.up()
# end service by butler
sys.stdout.write("p[%s] finished thinking and eating\n" % self.number)
print(" Longer wait time...Process is in Deadlock")
def main():
# number of philosophers / chop sticks
n = 5
# butler for deadlock avoidance (n-1 available)
butler = Semaphore(n-1)
# list of chopsticks
c = [ChopStick(i) for i in range(n)]
#k= [Philosopher(i, c[i], c[(i+1)%n], butler) for i in range(n)]
# list of philsophers
p = [Philosopher(i, c[i], c[(i+1)%n], butler) for i in range(n)]
print("phase 2")
print("------------Deadlock Vulnerability----------------")
for i in range(n):
p[i].start()
if __name__ == "__main__":
main() | [
"noreply@github.com"
] | muks97.noreply@github.com |
761f351aabe14cde3bd1b7c3d625b2c8bd592c4b | dba522d0d9f1677672af03c81a0118565158c659 | /Gen/ChannelService.py | 423c7570a96b50561420ca5f4321bcbb471f7c70 | [] | no_license | fossabot/LineAlpha-Full-Ver | c6fefbf0d1d69b744c2913e0e1fd51ade5f931d5 | cabe9ab158d358ddb92195855ff07c7d483c6c20 | refs/heads/master | 2022-12-18T07:44:02.743358 | 2020-09-14T12:31:57 | 2020-09-14T12:31:57 | 295,410,125 | 0 | 0 | null | 2020-09-14T12:31:45 | 2020-09-14T12:31:44 | null | UTF-8 | Python | false | true | 127,570 | py | #
# Autogenerated by Thrift Compiler (0.9.3)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
import logging
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def approveChannelAndIssueChannelToken(self, channelId):
"""
Parameters:
- channelId
"""
pass
def approveChannelAndIssueRequestToken(self, channelId, otpId):
"""
Parameters:
- channelId
- otpId
"""
pass
def fetchNotificationItems(self, localRev):
"""
Parameters:
- localRev
"""
pass
def getApprovedChannels(self, lastSynced, locale):
"""
Parameters:
- lastSynced
- locale
"""
pass
def getChannelInfo(self, channelId, locale):
"""
Parameters:
- channelId
- locale
"""
pass
def getChannelNotificationSetting(self, channelId, locale):
"""
Parameters:
- channelId
- locale
"""
pass
def getChannelNotificationSettings(self, locale):
"""
Parameters:
- locale
"""
pass
def getChannels(self, lastSynced, locale):
"""
Parameters:
- lastSynced
- locale
"""
pass
def getDomains(self, lastSynced):
"""
Parameters:
- lastSynced
"""
pass
def getFriendChannelMatrices(self, channelIds):
"""
Parameters:
- channelIds
"""
pass
def getNotificationBadgeCount(self, localRev):
"""
Parameters:
- localRev
"""
pass
def issueChannelToken(self, channelId):
"""
Parameters:
- channelId
"""
pass
def issueRequestToken(self, channelId, otpId):
"""
Parameters:
- channelId
- otpId
"""
pass
def issueRequestTokenWithAuthScheme(self, channelId, otpId, authScheme, returnUrl):
"""
Parameters:
- channelId
- otpId
- authScheme
- returnUrl
"""
pass
def reserveCoinUse(self, request, locale):
"""
Parameters:
- request
- locale
"""
pass
def revokeChannel(self, channelId):
"""
Parameters:
- channelId
"""
pass
def syncChannelData(self, lastSynced, locale):
"""
Parameters:
- lastSynced
- locale
"""
pass
def updateChannelNotificationSetting(self, setting):
"""
Parameters:
- setting
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def approveChannelAndIssueChannelToken(self, channelId):
"""
Parameters:
- channelId
"""
self.send_approveChannelAndIssueChannelToken(channelId)
return self.recv_approveChannelAndIssueChannelToken()
def send_approveChannelAndIssueChannelToken(self, channelId):
self._oprot.writeMessageBegin('approveChannelAndIssueChannelToken', TMessageType.CALL, self._seqid)
args = approveChannelAndIssueChannelToken_args()
args.channelId = channelId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_approveChannelAndIssueChannelToken(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = approveChannelAndIssueChannelToken_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "approveChannelAndIssueChannelToken failed: unknown result")
def approveChannelAndIssueRequestToken(self, channelId, otpId):
"""
Parameters:
- channelId
- otpId
"""
self.send_approveChannelAndIssueRequestToken(channelId, otpId)
return self.recv_approveChannelAndIssueRequestToken()
def send_approveChannelAndIssueRequestToken(self, channelId, otpId):
self._oprot.writeMessageBegin('approveChannelAndIssueRequestToken', TMessageType.CALL, self._seqid)
args = approveChannelAndIssueRequestToken_args()
args.channelId = channelId
args.otpId = otpId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_approveChannelAndIssueRequestToken(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = approveChannelAndIssueRequestToken_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "approveChannelAndIssueRequestToken failed: unknown result")
def fetchNotificationItems(self, localRev):
"""
Parameters:
- localRev
"""
self.send_fetchNotificationItems(localRev)
return self.recv_fetchNotificationItems()
def send_fetchNotificationItems(self, localRev):
self._oprot.writeMessageBegin('fetchNotificationItems', TMessageType.CALL, self._seqid)
args = fetchNotificationItems_args()
args.localRev = localRev
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_fetchNotificationItems(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = fetchNotificationItems_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchNotificationItems failed: unknown result")
def getApprovedChannels(self, lastSynced, locale):
"""
Parameters:
- lastSynced
- locale
"""
self.send_getApprovedChannels(lastSynced, locale)
return self.recv_getApprovedChannels()
def send_getApprovedChannels(self, lastSynced, locale):
self._oprot.writeMessageBegin('getApprovedChannels', TMessageType.CALL, self._seqid)
args = getApprovedChannels_args()
args.lastSynced = lastSynced
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getApprovedChannels(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getApprovedChannels_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getApprovedChannels failed: unknown result")
def getChannelInfo(self, channelId, locale):
"""
Parameters:
- channelId
- locale
"""
self.send_getChannelInfo(channelId, locale)
return self.recv_getChannelInfo()
def send_getChannelInfo(self, channelId, locale):
self._oprot.writeMessageBegin('getChannelInfo', TMessageType.CALL, self._seqid)
args = getChannelInfo_args()
args.channelId = channelId
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getChannelInfo(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getChannelInfo_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getChannelInfo failed: unknown result")
def getChannelNotificationSetting(self, channelId, locale):
"""
Parameters:
- channelId
- locale
"""
self.send_getChannelNotificationSetting(channelId, locale)
return self.recv_getChannelNotificationSetting()
def send_getChannelNotificationSetting(self, channelId, locale):
self._oprot.writeMessageBegin('getChannelNotificationSetting', TMessageType.CALL, self._seqid)
args = getChannelNotificationSetting_args()
args.channelId = channelId
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getChannelNotificationSetting(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getChannelNotificationSetting_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getChannelNotificationSetting failed: unknown result")
def getChannelNotificationSettings(self, locale):
"""
Parameters:
- locale
"""
self.send_getChannelNotificationSettings(locale)
return self.recv_getChannelNotificationSettings()
def send_getChannelNotificationSettings(self, locale):
self._oprot.writeMessageBegin('getChannelNotificationSettings', TMessageType.CALL, self._seqid)
args = getChannelNotificationSettings_args()
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getChannelNotificationSettings(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getChannelNotificationSettings_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getChannelNotificationSettings failed: unknown result")
def getChannels(self, lastSynced, locale):
"""
Parameters:
- lastSynced
- locale
"""
self.send_getChannels(lastSynced, locale)
return self.recv_getChannels()
def send_getChannels(self, lastSynced, locale):
self._oprot.writeMessageBegin('getChannels', TMessageType.CALL, self._seqid)
args = getChannels_args()
args.lastSynced = lastSynced
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getChannels(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getChannels_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getChannels failed: unknown result")
def getDomains(self, lastSynced):
"""
Parameters:
- lastSynced
"""
self.send_getDomains(lastSynced)
return self.recv_getDomains()
def send_getDomains(self, lastSynced):
self._oprot.writeMessageBegin('getDomains', TMessageType.CALL, self._seqid)
args = getDomains_args()
args.lastSynced = lastSynced
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getDomains(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getDomains_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getDomains failed: unknown result")
def getFriendChannelMatrices(self, channelIds):
"""
Parameters:
- channelIds
"""
self.send_getFriendChannelMatrices(channelIds)
return self.recv_getFriendChannelMatrices()
def send_getFriendChannelMatrices(self, channelIds):
self._oprot.writeMessageBegin('getFriendChannelMatrices', TMessageType.CALL, self._seqid)
args = getFriendChannelMatrices_args()
args.channelIds = channelIds
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getFriendChannelMatrices(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getFriendChannelMatrices_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getFriendChannelMatrices failed: unknown result")
def getNotificationBadgeCount(self, localRev):
"""
Parameters:
- localRev
"""
self.send_getNotificationBadgeCount(localRev)
return self.recv_getNotificationBadgeCount()
def send_getNotificationBadgeCount(self, localRev):
self._oprot.writeMessageBegin('getNotificationBadgeCount', TMessageType.CALL, self._seqid)
args = getNotificationBadgeCount_args()
args.localRev = localRev
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getNotificationBadgeCount(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getNotificationBadgeCount_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getNotificationBadgeCount failed: unknown result")
def issueChannelToken(self, channelId):
"""
Parameters:
- channelId
"""
self.send_issueChannelToken(channelId)
return self.recv_issueChannelToken()
def send_issueChannelToken(self, channelId):
self._oprot.writeMessageBegin('issueChannelToken', TMessageType.CALL, self._seqid)
args = issueChannelToken_args()
args.channelId = channelId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_issueChannelToken(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = issueChannelToken_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "issueChannelToken failed: unknown result")
def issueRequestToken(self, channelId, otpId):
"""
Parameters:
- channelId
- otpId
"""
self.send_issueRequestToken(channelId, otpId)
return self.recv_issueRequestToken()
def send_issueRequestToken(self, channelId, otpId):
self._oprot.writeMessageBegin('issueRequestToken', TMessageType.CALL, self._seqid)
args = issueRequestToken_args()
args.channelId = channelId
args.otpId = otpId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_issueRequestToken(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = issueRequestToken_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "issueRequestToken failed: unknown result")
def issueRequestTokenWithAuthScheme(self, channelId, otpId, authScheme, returnUrl):
"""
Parameters:
- channelId
- otpId
- authScheme
- returnUrl
"""
self.send_issueRequestTokenWithAuthScheme(channelId, otpId, authScheme, returnUrl)
return self.recv_issueRequestTokenWithAuthScheme()
def send_issueRequestTokenWithAuthScheme(self, channelId, otpId, authScheme, returnUrl):
self._oprot.writeMessageBegin('issueRequestTokenWithAuthScheme', TMessageType.CALL, self._seqid)
args = issueRequestTokenWithAuthScheme_args()
args.channelId = channelId
args.otpId = otpId
args.authScheme = authScheme
args.returnUrl = returnUrl
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_issueRequestTokenWithAuthScheme(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = issueRequestTokenWithAuthScheme_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "issueRequestTokenWithAuthScheme failed: unknown result")
def reserveCoinUse(self, request, locale):
"""
Parameters:
- request
- locale
"""
self.send_reserveCoinUse(request, locale)
return self.recv_reserveCoinUse()
def send_reserveCoinUse(self, request, locale):
self._oprot.writeMessageBegin('reserveCoinUse', TMessageType.CALL, self._seqid)
args = reserveCoinUse_args()
args.request = request
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_reserveCoinUse(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = reserveCoinUse_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "reserveCoinUse failed: unknown result")
def revokeChannel(self, channelId):
"""
Parameters:
- channelId
"""
self.send_revokeChannel(channelId)
self.recv_revokeChannel()
def send_revokeChannel(self, channelId):
self._oprot.writeMessageBegin('revokeChannel', TMessageType.CALL, self._seqid)
args = revokeChannel_args()
args.channelId = channelId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_revokeChannel(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = revokeChannel_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def syncChannelData(self, lastSynced, locale):
"""
Parameters:
- lastSynced
- locale
"""
self.send_syncChannelData(lastSynced, locale)
return self.recv_syncChannelData()
def send_syncChannelData(self, lastSynced, locale):
self._oprot.writeMessageBegin('syncChannelData', TMessageType.CALL, self._seqid)
args = syncChannelData_args()
args.lastSynced = lastSynced
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_syncChannelData(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = syncChannelData_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "syncChannelData failed: unknown result")
def updateChannelNotificationSetting(self, setting):
"""
Parameters:
- setting
"""
self.send_updateChannelNotificationSetting(setting)
self.recv_updateChannelNotificationSetting()
def send_updateChannelNotificationSetting(self, setting):
self._oprot.writeMessageBegin('updateChannelNotificationSetting', TMessageType.CALL, self._seqid)
args = updateChannelNotificationSetting_args()
args.setting = setting
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_updateChannelNotificationSetting(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = updateChannelNotificationSetting_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["approveChannelAndIssueChannelToken"] = Processor.process_approveChannelAndIssueChannelToken
self._processMap["approveChannelAndIssueRequestToken"] = Processor.process_approveChannelAndIssueRequestToken
self._processMap["fetchNotificationItems"] = Processor.process_fetchNotificationItems
self._processMap["getApprovedChannels"] = Processor.process_getApprovedChannels
self._processMap["getChannelInfo"] = Processor.process_getChannelInfo
self._processMap["getChannelNotificationSetting"] = Processor.process_getChannelNotificationSetting
self._processMap["getChannelNotificationSettings"] = Processor.process_getChannelNotificationSettings
self._processMap["getChannels"] = Processor.process_getChannels
self._processMap["getDomains"] = Processor.process_getDomains
self._processMap["getFriendChannelMatrices"] = Processor.process_getFriendChannelMatrices
self._processMap["getNotificationBadgeCount"] = Processor.process_getNotificationBadgeCount
self._processMap["issueChannelToken"] = Processor.process_issueChannelToken
self._processMap["issueRequestToken"] = Processor.process_issueRequestToken
self._processMap["issueRequestTokenWithAuthScheme"] = Processor.process_issueRequestTokenWithAuthScheme
self._processMap["reserveCoinUse"] = Processor.process_reserveCoinUse
self._processMap["revokeChannel"] = Processor.process_revokeChannel
self._processMap["syncChannelData"] = Processor.process_syncChannelData
self._processMap["updateChannelNotificationSetting"] = Processor.process_updateChannelNotificationSetting
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_approveChannelAndIssueChannelToken(self, seqid, iprot, oprot):
args = approveChannelAndIssueChannelToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = approveChannelAndIssueChannelToken_result()
try:
result.success = self._handler.approveChannelAndIssueChannelToken(args.channelId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("approveChannelAndIssueChannelToken", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_approveChannelAndIssueRequestToken(self, seqid, iprot, oprot):
args = approveChannelAndIssueRequestToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = approveChannelAndIssueRequestToken_result()
try:
result.success = self._handler.approveChannelAndIssueRequestToken(args.channelId, args.otpId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("approveChannelAndIssueRequestToken", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_fetchNotificationItems(self, seqid, iprot, oprot):
args = fetchNotificationItems_args()
args.read(iprot)
iprot.readMessageEnd()
result = fetchNotificationItems_result()
try:
result.success = self._handler.fetchNotificationItems(args.localRev)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("fetchNotificationItems", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getApprovedChannels(self, seqid, iprot, oprot):
args = getApprovedChannels_args()
args.read(iprot)
iprot.readMessageEnd()
result = getApprovedChannels_result()
try:
result.success = self._handler.getApprovedChannels(args.lastSynced, args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getApprovedChannels", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getChannelInfo(self, seqid, iprot, oprot):
args = getChannelInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = getChannelInfo_result()
try:
result.success = self._handler.getChannelInfo(args.channelId, args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getChannelInfo", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getChannelNotificationSetting(self, seqid, iprot, oprot):
args = getChannelNotificationSetting_args()
args.read(iprot)
iprot.readMessageEnd()
result = getChannelNotificationSetting_result()
try:
result.success = self._handler.getChannelNotificationSetting(args.channelId, args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getChannelNotificationSetting", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getChannelNotificationSettings(self, seqid, iprot, oprot):
args = getChannelNotificationSettings_args()
args.read(iprot)
iprot.readMessageEnd()
result = getChannelNotificationSettings_result()
try:
result.success = self._handler.getChannelNotificationSettings(args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getChannelNotificationSettings", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getChannels(self, seqid, iprot, oprot):
args = getChannels_args()
args.read(iprot)
iprot.readMessageEnd()
result = getChannels_result()
try:
result.success = self._handler.getChannels(args.lastSynced, args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getChannels", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getDomains(self, seqid, iprot, oprot):
args = getDomains_args()
args.read(iprot)
iprot.readMessageEnd()
result = getDomains_result()
try:
result.success = self._handler.getDomains(args.lastSynced)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getDomains", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getFriendChannelMatrices(self, seqid, iprot, oprot):
args = getFriendChannelMatrices_args()
args.read(iprot)
iprot.readMessageEnd()
result = getFriendChannelMatrices_result()
try:
result.success = self._handler.getFriendChannelMatrices(args.channelIds)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getFriendChannelMatrices", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getNotificationBadgeCount(self, seqid, iprot, oprot):
args = getNotificationBadgeCount_args()
args.read(iprot)
iprot.readMessageEnd()
result = getNotificationBadgeCount_result()
try:
result.success = self._handler.getNotificationBadgeCount(args.localRev)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getNotificationBadgeCount", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_issueChannelToken(self, seqid, iprot, oprot):
args = issueChannelToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = issueChannelToken_result()
try:
result.success = self._handler.issueChannelToken(args.channelId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("issueChannelToken", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_issueRequestToken(self, seqid, iprot, oprot):
args = issueRequestToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = issueRequestToken_result()
try:
result.success = self._handler.issueRequestToken(args.channelId, args.otpId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("issueRequestToken", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_issueRequestTokenWithAuthScheme(self, seqid, iprot, oprot):
args = issueRequestTokenWithAuthScheme_args()
args.read(iprot)
iprot.readMessageEnd()
result = issueRequestTokenWithAuthScheme_result()
try:
result.success = self._handler.issueRequestTokenWithAuthScheme(args.channelId, args.otpId, args.authScheme, args.returnUrl)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("issueRequestTokenWithAuthScheme", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_reserveCoinUse(self, seqid, iprot, oprot):
args = reserveCoinUse_args()
args.read(iprot)
iprot.readMessageEnd()
result = reserveCoinUse_result()
try:
result.success = self._handler.reserveCoinUse(args.request, args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("reserveCoinUse", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_revokeChannel(self, seqid, iprot, oprot):
args = revokeChannel_args()
args.read(iprot)
iprot.readMessageEnd()
result = revokeChannel_result()
try:
self._handler.revokeChannel(args.channelId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("revokeChannel", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_syncChannelData(self, seqid, iprot, oprot):
args = syncChannelData_args()
args.read(iprot)
iprot.readMessageEnd()
result = syncChannelData_result()
try:
result.success = self._handler.syncChannelData(args.lastSynced, args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("syncChannelData", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_updateChannelNotificationSetting(self, seqid, iprot, oprot):
args = updateChannelNotificationSetting_args()
args.read(iprot)
iprot.readMessageEnd()
result = updateChannelNotificationSetting_result()
try:
self._handler.updateChannelNotificationSetting(args.setting)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("updateChannelNotificationSetting", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class approveChannelAndIssueChannelToken_args:
"""
Attributes:
- channelId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
)
def __init__(self, channelId=None,):
self.channelId = channelId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('approveChannelAndIssueChannelToken_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class approveChannelAndIssueChannelToken_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelToken, ChannelToken.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelToken()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('approveChannelAndIssueChannelToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class approveChannelAndIssueRequestToken_args:
"""
Attributes:
- channelId
- otpId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
(2, TType.STRING, 'otpId', None, None, ), # 2
)
def __init__(self, channelId=None, otpId=None,):
self.channelId = channelId
self.otpId = otpId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.otpId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('approveChannelAndIssueRequestToken_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
if self.otpId is not None:
oprot.writeFieldBegin('otpId', TType.STRING, 2)
oprot.writeString(self.otpId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
value = (value * 31) ^ hash(self.otpId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class approveChannelAndIssueRequestToken_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('approveChannelAndIssueRequestToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class fetchNotificationItems_args:
"""
Attributes:
- localRev
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.I64, 'localRev', None, None, ), # 2
)
def __init__(self, localRev=None,):
self.localRev = localRev
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.localRev = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('fetchNotificationItems_args')
if self.localRev is not None:
oprot.writeFieldBegin('localRev', TType.I64, 2)
oprot.writeI64(self.localRev)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.localRev)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class fetchNotificationItems_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (NotificationFetchResult, NotificationFetchResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = NotificationFetchResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('fetchNotificationItems_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getApprovedChannels_args:
"""
Attributes:
- lastSynced
- locale
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.I64, 'lastSynced', None, None, ), # 2
(3, TType.STRING, 'locale', None, None, ), # 3
)
def __init__(self, lastSynced=None, locale=None,):
self.lastSynced = lastSynced
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.lastSynced = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getApprovedChannels_args')
if self.lastSynced is not None:
oprot.writeFieldBegin('lastSynced', TType.I64, 2)
oprot.writeI64(self.lastSynced)
oprot.writeFieldEnd()
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 3)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.lastSynced)
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getApprovedChannels_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ApprovedChannelInfos, ApprovedChannelInfos.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ApprovedChannelInfos()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getApprovedChannels_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannelInfo_args:
"""
Attributes:
- channelId
- locale
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.STRING, 'channelId', None, None, ), # 2
(3, TType.STRING, 'locale', None, None, ), # 3
)
def __init__(self, channelId=None, locale=None,):
self.channelId = channelId
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannelInfo_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 2)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 3)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannelInfo_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelInfo, ChannelInfo.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelInfo()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannelInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannelNotificationSetting_args:
"""
Attributes:
- channelId
- locale
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
(2, TType.STRING, 'locale', None, None, ), # 2
)
def __init__(self, channelId=None, locale=None,):
self.channelId = channelId
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannelNotificationSetting_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 2)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannelNotificationSetting_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelNotificationSetting, ChannelNotificationSetting.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelNotificationSetting()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannelNotificationSetting_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannelNotificationSettings_args:
"""
Attributes:
- locale
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'locale', None, None, ), # 1
)
def __init__(self, locale=None,):
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannelNotificationSettings_args')
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 1)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannelNotificationSettings_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(ChannelNotificationSetting, ChannelNotificationSetting.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype638, _size635) = iprot.readListBegin()
for _i639 in xrange(_size635):
_elem640 = ChannelNotificationSetting()
_elem640.read(iprot)
self.success.append(_elem640)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannelNotificationSettings_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter641 in self.success:
iter641.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannels_args:
"""
Attributes:
- lastSynced
- locale
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.I64, 'lastSynced', None, None, ), # 2
(3, TType.STRING, 'locale', None, None, ), # 3
)
def __init__(self, lastSynced=None, locale=None,):
self.lastSynced = lastSynced
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.lastSynced = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannels_args')
if self.lastSynced is not None:
oprot.writeFieldBegin('lastSynced', TType.I64, 2)
oprot.writeI64(self.lastSynced)
oprot.writeFieldEnd()
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 3)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.lastSynced)
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannels_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelInfos, ChannelInfos.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelInfos()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannels_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getDomains_args:
"""
Attributes:
- lastSynced
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.I64, 'lastSynced', None, None, ), # 2
)
def __init__(self, lastSynced=None,):
self.lastSynced = lastSynced
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.lastSynced = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getDomains_args')
if self.lastSynced is not None:
oprot.writeFieldBegin('lastSynced', TType.I64, 2)
oprot.writeI64(self.lastSynced)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.lastSynced)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getDomains_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelDomains, ChannelDomains.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelDomains()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getDomains_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getFriendChannelMatrices_args:
"""
Attributes:
- channelIds
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'channelIds', (TType.STRING,None), None, ), # 1
)
def __init__(self, channelIds=None,):
self.channelIds = channelIds
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.channelIds = []
(_etype645, _size642) = iprot.readListBegin()
for _i646 in xrange(_size642):
_elem647 = iprot.readString()
self.channelIds.append(_elem647)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getFriendChannelMatrices_args')
if self.channelIds is not None:
oprot.writeFieldBegin('channelIds', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.channelIds))
for iter648 in self.channelIds:
oprot.writeString(iter648)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelIds)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getFriendChannelMatrices_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (FriendChannelMatricesResponse, FriendChannelMatricesResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = FriendChannelMatricesResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getFriendChannelMatrices_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNotificationBadgeCount_args:
"""
Attributes:
- localRev
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.I64, 'localRev', None, None, ), # 2
)
def __init__(self, localRev=None,):
self.localRev = localRev
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.localRev = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNotificationBadgeCount_args')
if self.localRev is not None:
oprot.writeFieldBegin('localRev', TType.I64, 2)
oprot.writeI64(self.localRev)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.localRev)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNotificationBadgeCount_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNotificationBadgeCount_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class issueChannelToken_args:
"""
Attributes:
- channelId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
)
def __init__(self, channelId=None,):
self.channelId = channelId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('issueChannelToken_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class issueChannelToken_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelToken, ChannelToken.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelToken()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('issueChannelToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class issueRequestToken_args:
"""
Attributes:
- channelId
- otpId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
(2, TType.STRING, 'otpId', None, None, ), # 2
)
def __init__(self, channelId=None, otpId=None,):
self.channelId = channelId
self.otpId = otpId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.otpId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('issueRequestToken_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
if self.otpId is not None:
oprot.writeFieldBegin('otpId', TType.STRING, 2)
oprot.writeString(self.otpId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
value = (value * 31) ^ hash(self.otpId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class issueRequestToken_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('issueRequestToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class issueRequestTokenWithAuthScheme_args:
"""
Attributes:
- channelId
- otpId
- authScheme
- returnUrl
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
(2, TType.STRING, 'otpId', None, None, ), # 2
(3, TType.LIST, 'authScheme', (TType.STRING,None), None, ), # 3
(4, TType.STRING, 'returnUrl', None, None, ), # 4
)
def __init__(self, channelId=None, otpId=None, authScheme=None, returnUrl=None,):
self.channelId = channelId
self.otpId = otpId
self.authScheme = authScheme
self.returnUrl = returnUrl
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.otpId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.authScheme = []
(_etype652, _size649) = iprot.readListBegin()
for _i653 in xrange(_size649):
_elem654 = iprot.readString()
self.authScheme.append(_elem654)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.returnUrl = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('issueRequestTokenWithAuthScheme_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
if self.otpId is not None:
oprot.writeFieldBegin('otpId', TType.STRING, 2)
oprot.writeString(self.otpId)
oprot.writeFieldEnd()
if self.authScheme is not None:
oprot.writeFieldBegin('authScheme', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.authScheme))
for iter655 in self.authScheme:
oprot.writeString(iter655)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.returnUrl is not None:
oprot.writeFieldBegin('returnUrl', TType.STRING, 4)
oprot.writeString(self.returnUrl)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
value = (value * 31) ^ hash(self.otpId)
value = (value * 31) ^ hash(self.authScheme)
value = (value * 31) ^ hash(self.returnUrl)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class issueRequestTokenWithAuthScheme_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (RequestTokenResponse, RequestTokenResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = RequestTokenResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('issueRequestTokenWithAuthScheme_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class reserveCoinUse_args:
"""
Attributes:
- request
- locale
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.STRUCT, 'request', (CoinUseReservation, CoinUseReservation.thrift_spec), None, ), # 2
(3, TType.STRING, 'locale', None, None, ), # 3
)
def __init__(self, request=None, locale=None,):
self.request = request
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.STRUCT:
self.request = CoinUseReservation()
self.request.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('reserveCoinUse_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 2)
self.request.write(oprot)
oprot.writeFieldEnd()
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 3)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class reserveCoinUse_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('reserveCoinUse_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class revokeChannel_args:
"""
Attributes:
- channelId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
)
def __init__(self, channelId=None,):
self.channelId = channelId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('revokeChannel_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class revokeChannel_result:
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('revokeChannel_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class syncChannelData_args:
"""
Attributes:
- lastSynced
- locale
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.I64, 'lastSynced', None, None, ), # 2
(3, TType.STRING, 'locale', None, None, ), # 3
)
def __init__(self, lastSynced=None, locale=None,):
self.lastSynced = lastSynced
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.lastSynced = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('syncChannelData_args')
if self.lastSynced is not None:
oprot.writeFieldBegin('lastSynced', TType.I64, 2)
oprot.writeI64(self.lastSynced)
oprot.writeFieldEnd()
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 3)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.lastSynced)
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class syncChannelData_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelSyncDatas, ChannelSyncDatas.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelSyncDatas()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('syncChannelData_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateChannelNotificationSetting_args:
"""
Attributes:
- setting
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'setting', (TType.STRUCT,(ChannelNotificationSetting, ChannelNotificationSetting.thrift_spec)), None, ), # 1
)
def __init__(self, setting=None,):
self.setting = setting
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.setting = []
(_etype659, _size656) = iprot.readListBegin()
for _i660 in xrange(_size656):
_elem661 = ChannelNotificationSetting()
_elem661.read(iprot)
self.setting.append(_elem661)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateChannelNotificationSetting_args')
if self.setting is not None:
oprot.writeFieldBegin('setting', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.setting))
for iter662 in self.setting:
iter662.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.setting)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateChannelNotificationSetting_result:
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateChannelNotificationSetting_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other) | [
"noreply@github.com"
] | fossabot.noreply@github.com |
f3ee6326209c8a3c3ee9f316fb892d28b080fdd9 | 48b2e6dad7d22e835e27d69a9c376c431cc230b0 | /democracy/migrations/0032_add_language_code_to_comment.py | 53a933fa7e6267eca20be53d6def03e820e8c435 | [
"MIT"
] | permissive | suutari-ai/kerrokantasi | 41d506d8cec0972ec7aeda201e5d8b291aac7d95 | 1fa09d68dbdc7567579c3596f9f2996085970d89 | refs/heads/master | 2020-12-30T15:42:21.899849 | 2017-04-28T15:45:00 | 2017-04-28T15:45:00 | 91,167,457 | 0 | 0 | null | 2017-05-13T10:46:11 | 2017-05-13T10:46:10 | null | UTF-8 | Python | false | false | 1,046 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2016-12-16 15:03
from __future__ import unicode_literals
from django.db import migrations, models
from democracy.models import SectionComment
def forwards_func(apps, schema_editor):
for comment in SectionComment.objects.all():
comment._detect_lang()
comment.save()
def backwards_func(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('democracy', '0031_remove_untranslated_fields'),
]
operations = [
migrations.AlterModelOptions(
name='sectionimage',
options={'ordering': ('ordering', 'translations__title'), 'verbose_name': 'section image', 'verbose_name_plural': 'section images'},
),
migrations.AddField(
model_name='sectioncomment',
name='language_code',
field=models.CharField(blank=True, max_length=15, verbose_name='language code'),
),
migrations.RunPython(forwards_func, backwards_func),
]
| [
"stephane.weiss@anders.fi"
] | stephane.weiss@anders.fi |
4ec30332211936d3429ef77f04126a097bcb5486 | 4c5130ee572a03dad00b0385029d38a84ae05780 | /Exercise-1/RANSAC.py | f3324ac1eab9ff8a35ff75674a07a834582aa864 | [] | no_license | Olcina/RoboND-Perception-Project | 84de966598704d062e47c47ce681cc0c88dbc86b | 42b61d2905c40d203c12ca5cf3eb5b54db7a5162 | refs/heads/master | 2021-07-16T07:59:07.349944 | 2017-10-07T11:17:02 | 2017-10-07T11:17:02 | 105,163,693 | 0 | 0 | null | 2017-09-28T15:10:54 | 2017-09-28T15:10:53 | null | UTF-8 | Python | false | false | 2,611 | py | # Import PCL module
import pcl
# Load Point Cloud file
cloud = pcl.load_XYZRGB('tabletop.pcd')
# Voxel Grid filter
# Create a VoxelGrid filter object for our input point cloud
vox = cloud.make_voxel_grid_filter()
# Choose a voxel (also known as leaf) size
# Note: this (1) is a poor choice of leaf size
# Experiment and find the appropriate size!
LEAF_SIZE = 0.01
# Set the voxel (or leaf) size
vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
# Call the filter function to obtain the resultant downsampled point cloud
cloud_filtered = vox.filter()
#filename = 'voxel_downsampled.pcd'
#pcl.save(cloud_filtered, filename)
# PassThrough filter
# Create a PassThrough filter object.
passthrough = cloud_filtered.make_passthrough_filter()
# Assign axis and range to the passthrough filter object.
filter_axis = 'z'
passthrough.set_filter_field_name(filter_axis)
axis_min = 0.6
axis_max = 1.1
passthrough.set_filter_limits(axis_min, axis_max)
# Finally use the filter function to obtain the resultant point cloud.
cloud_filtered = passthrough.filter()
filename = 'pass_through_filtered.pcd'
#pcl.save(cloud_filtered, filename)
# RANSAC plane segmentation
# Create the segmentation object
seg = cloud_filtered.make_segmenter()
# Set the model you wish to fit
seg.set_model_type(pcl.SACMODEL_PLANE)
seg.set_method_type(pcl.SAC_RANSAC)
# Max distance for a point to be considered fitting the model
# Experiment with different values for max_distance
# for segmenting the table
max_distance = .01
seg.set_distance_threshold(max_distance)
# Call the segment function to obtain set of inlier indices and model coefficients
inliers, coefficients = seg.segment()
# Extract inliers
extracted_inliers = cloud_filtered.extract(inliers, negative=False)
filename = 'extracted_inliers.pcd'
extracted_outliers = cloud_filtered.extract(inliers, negative=True)
filename = 'extracted_outliers.pcd'
pcl.save(extracted_outliers, filename)
# Save pcd for table
# pcl.save(cloud, filename)
# Extract outliers -- optional as the Exercise set has no outliers
outlier_filter = cloud_filtered.make_statistical_outlier_filter()
# Set the number of neighboring points to analyze for any given point
outlier_filter.set_mean_k(50)
# Set threshold scale factor
x = 1.0
# Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
outlier_filter.set_std_dev_mul_thresh(x)
# Finally call the filter function for magic
cloud_filtered = outlier_filter.filter()
filename = 'tabletop_objects.pcd'
# Save pcd for tabletop objects
pcl.save(cloud_filtered,filename)
| [
"karlos.olcina@gmail.com"
] | karlos.olcina@gmail.com |
a8b1a81814ec1eb946712044c5d82b17b319c4fb | e55b298ecff0e205710f5be9e938bd1762b2ad28 | /bin/bin.py | 05a91f0f8281d2a74afac2c0f5688f0209c7029d | [] | no_license | NullOnSpace/bili-lib | dffc94bf7bd4d23f7f5694e004f0bf91269cd0ae | 1d2e7b302647ae404c5ad9b674d4cbdb568ba077 | refs/heads/master | 2020-06-29T10:09:33.475053 | 2019-08-04T15:35:27 | 2019-08-04T15:35:27 | 200,507,844 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | import sys
import os
BASE_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
sys.path.append(BASE_DIR)
from core.main import main
print(sys.argv)
main()
| [
"portuguese_d029@163.com"
] | portuguese_d029@163.com |
b110a7876636f36d982392b5382b185a91822e69 | 5d967f233e26fddc778e94a66617a76dd877a616 | /azext_script/compilers/az/handlers/SqlServer.py | 835cfaa48d023c1fc7be3700f6779d835968f265 | [
"MIT"
] | permissive | yorek/azure-script | 8756052cf9bae34416c63ff9f087d7c203a51e11 | d9da1b7d46c71415e38a6efe5b1c8d45b02b3704 | refs/heads/master | 2020-03-28T10:17:18.330896 | 2019-03-27T16:47:19 | 2019-03-27T16:47:19 | 148,097,555 | 1 | 1 | MIT | 2018-10-18T19:01:06 | 2018-09-10T04:07:37 | Python | UTF-8 | Python | false | false | 1,628 | py | from .Generic import GenericHandler
class SqlServerHandler(GenericHandler):
# The AZ object this handler will manage
azure_object = "sql server"
def execute(self):
if (self.action == "create"):
# use the value set by the "group" command
# to fill the "resource-group" parameter, required by "az sql server" command
self.add_context_parameter("resource-group", "group")
# as above, take location set in the script and
# use it as parameter
self.add_context_parameter("location", "location")
# check that required parameters are actually provided
self.set_required_parameter("admin-user")
self.set_required_parameter("admin-password")
cmd = super(SqlServerHandler, self).execute()
# push object name into script context
self.save_to_context()
return cmd
class SqlServerFirewallRuleHandler(GenericHandler):
azure_object = "sql server firewall-rule"
def execute(self):
if (self.action == "create"):
# add parameters from context
self.add_context_parameter("resource-group", "group")
self.add_context_parameter("server", "sql server")
# check that required parameters are actually provided
self.set_required_parameter("start-ip-address")
self.set_required_parameter("end-ip-address")
cmd = super(SqlServerFirewallRuleHandler, self).execute()
# push object name into script context
self.save_to_context()
return cmd
| [
"damauri@microsoft.com"
] | damauri@microsoft.com |
3a867c97d04bc12c43529626104a44e5cde357d0 | 5982a9c9c9cb682ec9732f9eeb438b62c61f2e99 | /Problem_131/my_solution.py | 0957503ab542faeb851bc44ae52794dc24263800 | [] | no_license | chenshanghao/LeetCode_learning | 6fdf98473be8f2240dd86d5586bbd1bbb95d6b0c | acf2395f3b946054009d4543f2a13e83402323d3 | refs/heads/master | 2021-10-23T05:23:01.970535 | 2019-03-15T05:08:54 | 2019-03-15T05:08:54 | 114,688,902 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 787 | py | class Solution(object):
def partition(self, s):
"""
:type s: str
:rtype: List[List[str]]
"""
self.result = []
if len(s) == 0: return self.result
self.backtrack([],0,s)
return self.result
def IsPalindrome(self, string):
i, j = 0, len(string)-1
while(i<=j):
if string[i]!= string[j]:
return False
i+=1
j-=1
return True
def backtrack(self, temp, start, s):
if start >= len(s):
self.result.append(temp[:])
for i in range(start,len(s)):
if self.IsPalindrome(s[start:i+1]):
temp.append(s[start:i+1])
self.backtrack(temp, i+1, s)
temp.pop() | [
"21551021@zju.edu.cn"
] | 21551021@zju.edu.cn |
6264d32ea5a91e831bec9657167223a53ee6f3ec | b70a5715139b62398980f5e82fcf2d398880155c | /TNS/ArmRopRegisterControl.py | 6368280efd22bdb389248771f23b60a7111c86cd | [] | no_license | amilarajans/ghidra_scripts | 2219fb3568c2ec8e7be59e670285dedcc1bcd7cc | d1d187a05d259d0b6ada952dab707a4b69c6359c | refs/heads/main | 2023-05-02T14:07:01.806371 | 2021-05-23T12:59:23 | 2021-05-23T12:59:23 | 370,053,650 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 897 | py | # Find ARM ROP gadgets that give control of registers by popping them off the stack.
#@author fuzzywalls
#@category TNS.ARM
#@menupath TNS.Arm Rop.Register Control
from utils import armrop, utils
utils.allowed_processors(currentProgram, 'ARM')
reg = askChoice('Source Register',
'What register do you want to control?',
['Any', 'r0', 'r1', 'r2', 'r3', 'r4', 'r5', 'r6', 'r7', 'r8',
'r9', 'r10', 'r11', 'r12'],
'Any')
if reg == 'Any':
print 'Searching for gadgets that give control of any register.'
reg = '.*(r[01]?\d).*'
else:
print 'Searching for gadgets that give control of %s.' % reg
reg = '.*' + reg + ' .*'
reg_control = armrop.ArmInstruction('ldmia', 'sp!', reg)
arm_rop = armrop.ArmRop(currentProgram)
control = arm_rop.find_instructions([reg_control], controllable_calls=False)
control.pretty_print()
| [
"amilarajans@gmail.com"
] | amilarajans@gmail.com |
b6c172d55c5d5eaf72f119d31a4f42b3720eb1e9 | e133f9a2158fe6fe1d7d49d197d9eb918d36d2e0 | /src/GPT2_model_position_fixed.py | 5da7faccd064e1ebd1c779bdda0f60dbe52e6f78 | [
"Apache-2.0"
] | permissive | fraware/MindSpore-GPT2 | 0c8a2916e4521ed02a499d3094dbb7edeab52855 | 0ee3b5c327ccbbb306899c3f3f5d258c259c348e | refs/heads/master | 2023-09-04T06:57:51.124954 | 2021-10-07T20:07:28 | 2021-10-07T20:07:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 29,838 | py | import math
import copy
import numpy as np
import mindspore.common.dtype as mstype
import mindspore.nn as nn
import mindspore.ops.functional as F
from mindspore.ops import operations as P
from mindspore.common.tensor import Tensor
from mindspore.common.parameter import Parameter
from .weight_init import normal_weight, weight_variable, zero_weight
class GPT2Config:
"""
Configuration for `GPT2Model`.
Args:
batch_size (int): Batch size of input dataset. Default: 512.
seq_length (int): Length of input sequence. Default: 1024.
vocab_size (int): The shape of each embedding vector. Default: 50257.
d_model (int): Size of the bert encoder layers. Default: 768.
num_hidden_layers (int): Number of hidden layers in the GPT2Transformer decoder block. Default: 12.
num_attention_heads (int): Number of attention heads in the GPT2Transformer decoder block. Default: 12.
intermediate_size (int): Size of intermediate layer in the GPT2Transformer decoder block. Default: 3072.
hidden_act (str): Activation function used in the GPT2Transformer decoder block. Default: "gelu".
hidden_dropout (float): The dropout probability for GPT2Output. Default: 0.1.
attention_dropout (float): The dropout probability for MaskedMultiHeadAttention. Default: 0.1.
max_position_embeddings (int): Maximum length of sequences used in this model. Default: 1024.
initializer_range (float): Initialization value of TruncatedNormal. Default: 0.02.
input_mask_from_dataset (bool): Specifies whether to use the input mask that loaded from dataset. Default: True.
dtype (:class:`mindspore.dtype`): Data type of the input. Default: mstype.float32.
compute_type (:class:`mindspore.dtype`): Compute type in GPT2Transformer. Default: mstype.float32.
"""
def __init__(self,
batch_size=512,
seq_length=1024,
vocab_size=50257,
d_model=768,
num_hidden_layers=12,
num_attention_heads=12,
intermediate_size=3072,
hidden_act="gelu",
hidden_dropout=0.1,
attention_dropout=0.1,
max_position_embeddings=1024,
initializer_range=0.02,
input_mask_from_dataset=True,
dtype=mstype.float32,
compute_type=mstype.float32):
self.batch_size = batch_size
self.seq_length = seq_length
self.vocab_size = vocab_size
self.d_model = d_model
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.hidden_act = hidden_act
self.hidden_dropout = hidden_dropout
self.attention_dropout = attention_dropout
self.max_position_embeddings = max_position_embeddings
self.initializer_range = initializer_range
self.input_mask_from_dataset = input_mask_from_dataset
self.dtype = dtype
self.compute_type = compute_type
class EmbeddingLookup(nn.Cell):
"""
A embeddings lookup table with a fixed dictionary and size.
Args:
vocab_size (int): Size of the dictionary of embeddings.
embedding_size (int): The size of each embedding vector.
use_one_hot_embeddings (bool): Specifies whether to use one hot encoding form. Default: False.
"""
def __init__(self,
vocab_size,
embedding_dim,
use_one_hot_embeddings=False):
super(EmbeddingLookup, self).__init__()
self.vocab_size = vocab_size
self.embedding_dim = embedding_dim
self.use_one_hot_embeddings = use_one_hot_embeddings
self.embedding_table = Parameter(normal_weight([vocab_size, embedding_dim], embedding_dim), name='embedding_table')
self.expand = P.ExpandDims()
self.shape_flat = (-1, )
self.gather = P.GatherV2() # axis=1 从列取 axis=0从行取 index_select
self.one_hot = P.OneHot()
self.on_value = Tensor(1.0, mstype.float32)
self.off_value = Tensor(0.0, mstype.float32)
self.array_mul = P.MatMul()
self.reshape = P.Reshape()
self.shape = P.Shape()
def construct(self, input_ids):
input_shape = self.shape(input_ids) # [batch_size, seq_length]
flat_ids = self.reshape(input_ids, self.shape_flat) # [batch_size * seq_length]
if self.use_one_hot_embeddings:
one_hot_ids = self.one_hot(flat_ids, self.vocab_size, self.on_value, self.off_value)
output_for_reshape = self.array_mul(one_hot_ids, self.embedding_table)
else:
output_for_reshape = self.gather(self.embedding_table, flat_ids, 0) # [batch_size * seq_length * embedding_dim]
out_shape = input_shape + (self.embedding_dim, )
output = self.reshape(output_for_reshape, out_shape) # [batch_size, seq_length, embedidng_dim]
return output, self.embedding_table
def position_encoding(length,
d_model,
min_timescale=1,
max_timescale=1e4):
"""
Create Tensor of sinusoids of different frequencies.
Args:
length (int): Length of the Tensor to create, i.e. Number of steps.
d_model (int): Hidden size.
min_timescale (float): Default: 1.
max_timescale (float): Default: 10000.
Returns:
Tensor of shape (length, d_model)
"""
d_model = d_model // 2
positions = np.arange(length, dtype=np.float32)
log_timescale_increment = (np.log(max_timescale / min_timescale) / (d_model - 1))
inv_timescales = min_timescale * np.exp(np.arange(d_model, dtype=np.float32) * -log_timescale_increment)
scaled_time = np.expand_dims(positions, 1) * np.expand_dims(inv_timescales, 0)
x = np.concatenate([np.sin(scaled_time), np.cos(scaled_time)], axis=1)
return x
class EmbeddingPostprocessor(nn.Cell):
"""
Postprocessors apply positional embeddings to word embeddings.
Args:
embedding_dim (int): The size of each embedding vector.
max_position_embeddings (int): Maximum length of sequences used in this model. Default: 1024.
dropout_prob (float): The dropout probability. Default: 0.1.
"""
def __init__(self,
embedding_dim,
max_position_embeddings=1024,
dropout_prob=0.1):
super(EmbeddingPostprocessor, self).__init__()
self.add = P.TensorAdd()
self.dropout = nn.Dropout(1 - dropout_prob, dtype=mstype.float32)
self.use_dropout = dropout_prob > 0
self.expand_dims = P.ExpandDims()
self.position_embedding_table = Tensor(position_encoding(max_position_embeddings, embedding_dim), mstype.float32)
self.shape = P.Shape()
def construct(self, word_embeddings):
input_shape = self.shape(word_embeddings) # [batch_szie, seq_length, embedding_dim]
input_len = input_shape[1] # seq_length
position_embeddings = self.position_embedding_table[0:input_len:1, ::]
position_embeddings = self.expand_dims(position_embeddings, 0)
output = self.add(word_embeddings, position_embeddings)
if self.use_dropout:
output = self.dropout(output)
return output
class CastWrapper(nn.Cell):
"""
Cast wrapper
"""
def __init__(self,
dst_type=mstype.float32):
super(CastWrapper, self).__init__()
self.cast = P.Cast()
self.dst_type = dst_type
def construct(self, x):
return self.cast(x, self.dst_type)
class LayerNorm(nn.Cell):
"""
Do layer norm
Args:
in_channels (int): In channels number of layer norm
"""
def __init__(self,
in_channels=None):
super(LayerNorm, self).__init__()
self.layer_norm = nn.LayerNorm((in_channels, ))
self.cast = P.Cast()
self.get_dtype = P.DType()
def construct(self, input_tensor):
output = self.cast(input_tensor, mstype.float32)
output = self.layer_norm(output)
output = self.cast(output, self.get_dtype(input_tensor))
return output
class ResidualConnection(nn.Cell):
"""
Add residual to output.
Args:
dropout_prob (float): Dropout rate.
Returns:
Tensor, with the same shape of hidden_tensor
"""
def __init__(self,
dropout_prob=0.1):
super(ResidualConnection, self).__init__()
self.add = P.TensorAdd()
self.dropout = nn.Dropout(1 - dropout_prob)
self.use_dropout = dropout_prob > 0
def construct(self, hidden_tensor, input_tensor):
# hidden_tensor is the output of sublayer
output = hidden_tensor
if self.use_dropout:
output = self.dropout(output)
output = self.add(output, input_tensor)
return output
class Conv1D(nn.Cell):
"""
1D-convolutional layer as defined by Radford et al. for OpenAI GPT (and also used in GPT-2).
Basically works like a linear layer but the weights are transposed.
Args:
nx (int): The number of input features.
nf (int): The number of output features.
"""
def __init__(self,
nx,
nf):
super(Conv1D, self).__init__()
self.nx = nx
self.nf = nf
self.weight = Parameter(normal_weight([nx, nf], nf), name='projection_weight')
self.bias = Parameter(zero_weight(nf), name='projection_bias')
self.matmul = P.MatMul()
self.add = P.TensorAdd()
def construct(self, input_tensor): # [batch_size * seq_length, nx]
output_tensor = self.matmul(input_tensor, self.weight) # [batch_size * seq_length, self.nf]
output_tensor = self.add(output_tensor, self.bias) # [batch_size * seq_length, self.nf]
return output_tensor
class MaskedSelfAttention(nn.Cell):
"""
Apply masked multi-head attention.
Args:
batch_size (int): Batch size of input datasets. Default: 512.
d_model (int): Size of last dim of input tensor. Default: 768.
seq_length (int): Length of input tensor sequence. Default: 1024.
num_attention_heads (int): Number of attention heads. Default: 12.
dim_per_head (int): Size of each attention head. Default: 64.
has_attention_mask (bool): Specifies whether to use attention mask. Default: True.
attention_dropout (float): The dropout probability for MultiheadAttention. Default: 0.0.
compute_type (:class:`mindspore.dtype`): Compute type in MultiheadAttention. Default: mstype.float32.
Returns:
Tensor, with the shape [batch_size, seq_length, d_model]
"""
def __init__(self,
batch_size=512,
d_model=768,
seq_length=1024,
num_attention_heads=12,
dim_per_head=64,
has_attention_mask=True,
do_return_2d_tensor=True,
attention_dropout=0.0,
compute_type=mstype.float32):
super(MaskedSelfAttention, self).__init__()
self.batch_size = batch_size
self.d_model = d_model
self.seq_length = seq_length
self.num_heads = num_attention_heads
self.dim_per_head = dim_per_head
self.has_attention_mask = has_attention_mask
assert has_attention_mask
self.scale = Tensor([1.0 / math.sqrt(float(self.dim_per_head))], dtype=compute_type) # attention scale
self.mask_data = Tensor([-10000.0, ], dtype=compute_type)
self.split_head_shape = (self.batch_size, self.seq_length, self.num_heads, self.dim_per_head)
self.c_attn = Conv1D(d_model, d_model*3)
self.c_proj = Conv1D(d_model, d_model)
self.split_for_qkv = P.Split(1, 3) # P.Split(axis, output_num)
# self.shape = P.Shape()
self.reshape = P.Reshape()
self.transpose = P.Transpose()
self.trans_shape = (0, 2, 1, 3)
self.matmul_trans_b = P.BatchMatMul(transpose_b=True)
self.matmul = P.BatchMatMul()
self.multiply = P.Mul()
if self.has_attention_mask:
self.expand_dims = P.ExpandDims()
self.sub = P.Sub()
self.add = P.TensorAdd()
self.cast = P.Cast()
self.get_dtype = P.DType()
if do_return_2d_tensor:
self.shape_return = (batch_size * seq_length, d_model)
else:
self.shape_return = (batch_size, seq_length, d_model)
self.softmax = nn.Softmax()
self.softmax_cast = P.Cast()
self.dropout = nn.Dropout(1 - attention_dropout)
self.use_attention_dropout = attention_dropout > 0
def construct(self, input_tensor, attention_mask): # input_tensor [batch_size * seq_length, d_mdoel]
input_tensor = self.c_attn(input_tensor) # [batch_size * seq_length, d_model*3]---> eg.[1 * 3, 2304]
input_tensor = self.split_for_qkv(input_tensor)
query = input_tensor[0] # [batch_size * seq_length, d_model] ---> eg. [1 * 3, 768]
key = input_tensor[1]
value = input_tensor[2]
# split head
query = self.reshape(query, self.split_head_shape)
query = self.transpose(query, self.trans_shape) # [batch_size, num_heads, seq_len, dim_per_head] ---> eg. [1, 12, 3, 64]
key = self.reshape(key, self.split_head_shape)
key = self.transpose(key, self.trans_shape) # [batch_size, num_heads, seq_len, dim_per_head] ---> eg. [1, 12, 3, 64]
value = self.reshape(value, self.split_head_shape)
value = self.transpose(value, self.trans_shape) # [batch_size, num_heads, seq_len, dim_per_head] ---> eg. [1, 12, 3, 64]
# attention and mask
attention_scores = self.matmul_trans_b(query, key) # [batch_size, num_heads, seq_len, seq_len]
attention_scores = self.multiply(attention_scores, self.scale)
if self.has_attention_mask:
attention_mask = self.expand_dims(attention_mask, 1) # [batch_size, 1, seq_length, seq_length]
multiply_out = self.sub(self.cast(F.tuple_to_array((1.0,)), self.get_dtype(attention_scores)),
self.cast(attention_mask, self.get_dtype(attention_scores)))
adder = self.multiply(multiply_out, self.mask_data)
attention_scores = self.add(adder, attention_scores)
attention_scores = self.softmax_cast(attention_scores, mstype.float32)
attention_probs = self.softmax(attention_scores) # [batch_size, num_heads, seq_len, seq_len]
attention_probs = self.softmax_cast(attention_probs, self.get_dtype(key))
if self.use_attention_dropout:
attention_probs = self.dropout(attention_probs)
outputs = self.matmul(attention_probs, value) # [batch_size, num_heads, seq_len, dim_per_head]
# merge heads
outputs = self.transpose(outputs, self.trans_shape) # [batch_size, seq_len, num_heads, dim_per_head]
outputs = self.reshape(outputs, self.shape_return) # default True, the outputs shape [batch_size * seq_len, d_model]
# project
outputs = self.c_proj(outputs)
return outputs
class FeedForward(nn.Cell):
"""
Apply two-layer feed forward
Args:
in_channels (int): Size of the input layer. Default: 768.
out_channels (int): Size of the output layers. Default: 768.
hidden_size (int): Size of the hidden layer. Default: 3072.
hidden_dropout (float): The dropout probability for hidden outputs. Default: 0.1.
"""
def __init__(self,
in_channels=786,
out_channels=768,
hidden_size=3072,
hidden_dropout=0.1):
super(FeedForward, self).__init__()
self.c_fc = Conv1D(in_channels, hidden_size)
self.c_proj = Conv1D(hidden_size, out_channels)
self.layernorm = LayerNorm(in_channels=in_channels)
self.residual_connect = ResidualConnection(dropout_prob=hidden_dropout)
self.gelu = nn.GELU()
self.dropout = nn.Dropout(1 - hidden_dropout)
self.use_dropout = hidden_dropout > 0
self.reshape = P.Reshape()
def construct(self, input_tensor): # input_tensor shape [batch_szie * seq_len, d_model]
# LayerNorm
output = self.layernorm(input_tensor)
# Feed Forward
output = self.c_fc(output) # [batch_szie * seq_len, d_model * 4]
if self.use_dropout:
output = self.dropout(output)
output = self.c_proj(output) # [batch_szie * seq_len, d_model]
# Add
output = self.residual_connect(output, input_tensor) # [batch_szie * seq_len, d_model]
return output
class MaskedMultiHeadAttention(nn.Cell):
def __init__(self,
batch_size=512,
seq_length=2014,
d_model=768,
num_attention_heads=12,
attention_dropout=0.02,
hidden_dropout=0.1,
has_attention_mask=True,
compute_type=mstype.float32
):
super(MaskedMultiHeadAttention, self).__init__()
if d_model % num_attention_heads != 0:
raise ValueError("The hidden size (%d) is not a multiple of the number "
"of attention heads (%d)" % (d_model, num_attention_heads))
self.dim_per_head = int(d_model / num_attention_heads) # 64
self.masked_self_attention = MaskedSelfAttention(
batch_size=batch_size,
d_model=d_model,
seq_length=seq_length,
num_attention_heads=num_attention_heads,
dim_per_head=self.dim_per_head,
has_attention_mask=has_attention_mask,
do_return_2d_tensor=True,
attention_dropout=attention_dropout,
compute_type=compute_type
)
self.layer_norm = LayerNorm(in_channels=d_model)
self.residual_connection = ResidualConnection(dropout_prob=hidden_dropout)
self.reshape = P.Reshape()
self.new_shape = (-1, d_model)
def construct(self, input_tensor, attention_mask): # input tensor shape[batch_size * seq_length, d_model]
# layernorm
output_tensor = self.layer_norm(input_tensor)
# masked multi-head attention
attention_output = self.masked_self_attention(output_tensor, attention_mask) # [batch_size * seq_length, d_model]
# residual connection
output = self.residual_connection(attention_output, input_tensor) # [batch_size * seq_length, d_model]
return output
class DecoderBlock(nn.Cell):
"""
decoder block used in GPT2.
Args:
batch_size (int): Batch size of input dataset. Default: 512.
seq_length (int): Length of input sequence. Default: 1024.
d_model (int): Size of the GPT2 decoder layers. Default: 768.
num_attention_heads (int): Number of attention heads. Default: 12.
intermediate_size (int): Size of intermediate layer. Default: 3072.
attention_dropout (float): The dropout probability for MaskedMultiHeadAttention. Default: 0.02.
hidden_dropout (float): The dropout probability for hidden outputs. Default: 0.1.
has_attention_mask (bool): Specifies whether to use attention mask. Default: True.
compute_type (:class:`mindspore.dtype`): Compute type in attention. Default: mstype.float32.
"""
def __init__(self,
batch_size=512,
seq_length=1024,
d_model=768,
num_attention_heads=12,
intermediate_size=3072,
attention_dropout=0.02,
hidden_dropout=0.1,
has_attention_mask=True,
compute_type=mstype.float32
):
super(DecoderBlock, self).__init__()
if d_model % num_attention_heads != 0:
raise ValueError("The hidden size (%d) is not a multiple of the number "
"of attention heads (%d)" % (d_model, num_attention_heads))
self.dim_per_head = int(d_model / num_attention_heads) # 64
self.masked_multi_head_attention = MaskedMultiHeadAttention(
batch_size=batch_size,
seq_length=seq_length,
d_model=d_model,
num_attention_heads=num_attention_heads,
attention_dropout=attention_dropout,
hidden_dropout=hidden_dropout,
has_attention_mask=has_attention_mask,
compute_type=compute_type
)
self.feedforward = FeedForward(
in_channels=d_model,
out_channels=d_model,
hidden_size=intermediate_size,
hidden_dropout=hidden_dropout
)
self.reshape = P.Reshape()
self.new_shape = (-1, d_model)
def construct(self, input_tensor, attention_mask): # input tensor shape[batch_size, seq_length, d_model]
input_tensor = self.reshape(input_tensor, self.new_shape) # [batch_size * seq_length, d_model]
# masked multi head attention with ln, res
attention_output = self.masked_multi_head_attention(input_tensor, attention_mask)
# feed forward with ln, res
output = self.feedforward(attention_output) # [batch_size * seq_length, d_model]
return output
class GPT2Transformer(nn.Cell):
"""
Multi-layer GPT2 transformer.
Args:
batch_size (int): Batch size of input dataset. Default: 512.
d_model (int): Size of the decoder layers. Default: 768.
seq_length (int): Length of input sequence. Default: 1024.
num_hidden_layers (int): Number of hidden layers in decoder cells. Default: 12.
num_attention_heads (int): Number of attention heads in decoder cells. Default: 12.
intermediate_size (int): Size of intermediate layer in decoder cells. Default: 3072.
has_attention_mask (bool): Specifies whether to use attention mask. Default: True.
attention_dropout (float): The dropout probability for MaskedMultiHeadAttention. Default: 0.1.
hidden_dropout (float): The dropout probability for GPT2Output. Default: 0.1.
compute_type (:class:`mindspore.dtype`): Compute type in BertTransformer. Default: mstype.float32.
"""
def __init__(self,
batch_size=512,
d_model=768,
seq_length=1024,
num_hidden_layers=12,
num_attention_heads=12,
intermediate_size=3072,
has_attention_mask=True,
attention_dropout=0.1,
hidden_dropout=0.1,
compute_type=mstype.float32):
super(GPT2Transformer, self).__init__()
layers = []
for _ in range(num_hidden_layers):
layer = DecoderBlock(batch_size=batch_size,
seq_length=seq_length,
d_model=d_model,
num_attention_heads=num_attention_heads,
intermediate_size=intermediate_size,
attention_dropout=attention_dropout,
hidden_dropout=hidden_dropout,
has_attention_mask=has_attention_mask,
compute_type=compute_type)
layers.append(layer)
self.layers = nn.CellList(layers)
self.reshape = P.Reshape()
self.new_shape = (-1, d_model)
self.out_shape = (batch_size, seq_length, d_model)
def construct(self, input_tensor, attention_mask):
prev_output = self.reshape(input_tensor, self.new_shape)
for layer_module in self.layers:
layer_output = layer_module(prev_output, attention_mask)
prev_output = layer_output
output = self.reshape(prev_output, self.out_shape)
return output
class CreateAttentionMaskFromInputMask(nn.Cell):
"""
Create attention mask according to input mask.
Args:
config (Class): Configuration for GPT2Model.
"""
def __init__(self, config):
super(CreateAttentionMaskFromInputMask, self).__init__()
self.input_mask_from_dataset = config.input_mask_from_dataset
self.input_mask = None
assert self.input_mask_from_dataset
self.cast = P.Cast()
self.shape = P.Shape()
self.reshape = P.Reshape()
self.matmul = P.BatchMatMul()
self.multiply = P.Mul()
# mask future positions
ones = np.ones(shape=(config.batch_size, config.seq_length, config.seq_length))
self.lower_triangle_mask = Tensor(np.tril(ones), dtype=mstype.float32)
def construct(self, input_mask, mask_future=True):
"""
Construct network.
Args:
input_mask (Tensor): Tensor mask vectors with shape [batch_size, seq_len].
mask_future (bool): Whether mask future (for decoder training). Default: True.
Returns:
attention_mask (Tensor): shape [batch_size, seq_len, seq_len].
"""
input_shape = self.shape(input_mask)
shape_right = (input_shape[0], 1, input_shape[1]) # [batch_size, 1, seq_len]
shape_left = input_shape + (1,) # [batch_size, seq_len, 1]
input_mask = self.cast(input_mask, mstype.float32)
mask_left = self.reshape(input_mask, shape_left)
mask_right = self.reshape(input_mask, shape_right)
attention_mask = self.matmul(mask_left, mask_right) # [batch_szie, seq_len, seq_len]
if mask_future:
attention_mask = self.multiply(attention_mask, self.lower_triangle_mask)
return attention_mask
class GPT2Model(nn.Cell):
"""
Decoder Representations from Transformers.
Args:
config (Class): Configuration for GPT2Model.
is_training (bool): True for training mode. False for eval mode. ######### training要写在这里吗?
use_one_hot_embeddings (bool): Specifies whether to use one hot encoding form. Default: False.
"""
def __init__(self,
config,
is_training,
use_one_hot_embeddings=False):
super(GPT2Model, self).__init__()
config = copy.deepcopy(config)
self.is_training = is_training
if not is_training:
config.hidden_dropout = 0.0
config.attention_dropout = 0.0
self.input_mask_from_dataset = config.input_mask_from_dataset
self.batch_size = config.batch_size
self.seq_length = config.seq_length
self.d_model = config.d_model
self.num_hidden_layers = config.num_hidden_layers
self.embedding_dim = config.d_model
self.last_idx = self.num_hidden_layers - 1
self.gpt2_embedding_lookup = EmbeddingLookup(
vocab_size=config.vocab_size,
embedding_dim=self.embedding_dim,
use_one_hot_embeddings=use_one_hot_embeddings
)
self.gpt2_embedding_postprocess = EmbeddingPostprocessor(
embedding_dim=self.embedding_dim,
max_position_embeddings=config.max_position_embeddings,
dropout_prob=config.hidden_dropout
)
self.gpt2_decoder = GPT2Transformer(
batch_size=self.batch_size,
d_model=self.d_model,
seq_length=self.seq_length,
num_hidden_layers=self.num_hidden_layers,
num_attention_heads=config.num_attention_heads,
intermediate_size=config.intermediate_size,
has_attention_mask=True,
attention_dropout=config.attention_dropout,
hidden_dropout=config.hidden_dropout,
compute_type=config.compute_type
)
self.cast_compute_type = CastWrapper(dst_type=config.compute_type)
self.layer_norm = LayerNorm(in_channels=self.d_model)
self.dropout = nn.Dropout(1 - config.hidden_dropout)
self._create_attention_mask_from_input_mask = CreateAttentionMaskFromInputMask(config)
self.reshape = P.Reshape()
self.new_shape = (-1, self.d_model)
def construct(self, input_ids, input_mask):
"""
Construct network.
Args:
input_ids (Tensor): input sentences with shape [batch_size, seq_len].
input_mask (Tensor): input sentences padding mask with shape [batch_size, seq_len],
where 0 indicates padding position.
Returns:
decoder_output (Tensor): shape[batch_size, seq_len, d_model].
embedding_tables (Tensor): word embeddings with shape [vocab_size, d_model]
"""
# Embedding
word_embeddings, embedding_tables = self.gpt2_embedding_lookup(input_ids)
embedding_output = self.gpt2_embedding_postprocess(word_embeddings)
embedding_output = self.dropout(embedding_output)
# Attention mask with shape [batch_size, seq_len, seq_len]
attention_mask = self._create_attention_mask_from_input_mask(input_mask, True)
# GPT2 decoder
decoder_output = self.gpt2_decoder(
self.cast_compute_type(embedding_output),
self.cast_compute_type(attention_mask)
)
# LayerNorm
decoder_output = self.reshape(decoder_output, self.new_shape)
decoder_output = self.layer_norm(decoder_output)
decoder_output = self.reshape(decoder_output, (self.batch_size, self.seq_length, self.d_model))
return decoder_output, embedding_tables | [
"1159941086@qq.com"
] | 1159941086@qq.com |
4a94299bc3fecdb286feb53e805f7a93bbd246a8 | b020e4e5e9c35ef838aae2c57454ae66a3a3de05 | /txt_to_csv_exponencial.py | d281d794f7b44c0ca3fd8fd876272287455e88a9 | [] | no_license | gabriela-motta/ADSD_Escalonador | f9d2412038f4a74e8d2b2a93390f7d95b949e8b0 | badce6ae976504957ef866cd6cf4c3ecbceea531 | refs/heads/master | 2021-08-31T16:23:47.698852 | 2017-12-22T02:21:41 | 2017-12-22T02:21:41 | 113,713,439 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 609 | py | #coding: utf-8
txt = open('resultados_exponencial.txt', 'r')
csv = open('resultados.csv', 'w')
csv.write('Distribuicao de chegada,Parametros,Valor medio servico,Duracao da simulacao,Quantidade de Requisicoes recebidas,Quantidade de Requisicoes atendidas,Tempo medio de atendimento,Quantidade media de elementos em espera')
csv.write('\n')
csvLine = ''
mBloco = 7
bloco = 0
for line in txt:
if (bloco <= mBloco):
bloco += 1
parametro = line.split(': ')[1].rstrip('\n')
csvLine += parametro + ','
else:
bloco = 0
csv.write(csvLine[:-1])
csv.write('\n')
csvLine = ''
txt.close()
csv.close()
| [
"gabmla19@gmail.com"
] | gabmla19@gmail.com |
816b0a8889ea30c7dc4db8128946889439a95770 | 04bd9bf85586ec5450970b3e5d27d3f7730fc68e | /day2/session2/doc_ex.py | 0c3cb6f2ad392ddc502f032e7e982a3a3c3592f5 | [] | no_license | abdulmuneer/mindtree_python_training | 299cead8dc5e367f67a7c5e984dc976da84d90f2 | 32dc49beb6f64092646596ffab0071f5f33f764d | refs/heads/master | 2021-01-10T14:44:33.475715 | 2016-02-29T09:39:34 | 2016-02-29T09:39:34 | 52,187,845 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | def square(x):
"""Squares x.
>>> square(2)
4
>>> square(-2)
4
"""
return x * x
if __name__ == '__main__':
import doctest
doctest.testmod()
| [
"abdulmuneer@gmail.com"
] | abdulmuneer@gmail.com |
1105051585796a40a6299fa70741f3bd3e91a768 | 401cf50e260558158c3756a90ff62783917190d5 | /Python Intermedio/Clase 03/PI0307 - Orden alfabetico.py | 1481e82e8f7d1d49147d567f41154eefceb07d6e | [] | no_license | RUN-tic/Soluciones-HackerRanks-RUN-2019 | 958ac03c9ec913f2cb10c04731d7707df4329d03 | 1cbe555b90f6826e751c2515e0f11647e87e4e4a | refs/heads/master | 2020-08-02T03:57:01.383093 | 2019-10-12T05:08:38 | 2019-10-12T05:08:38 | 211,226,371 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 74 | py | secuencia=input().split('-')
secuencia.sort()
print('-'.join(secuencia)) | [
"noreply@github.com"
] | RUN-tic.noreply@github.com |
e97d900e1e7624fde472f0927a9acdd56581b60c | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/59/usersdata/216/26475/submittedfiles/testes.py | 54f602251a4e6970b2390dc096dbe706d12bd81d | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 154 | py | # -*- coding: utf-8 -*-
from __future__ import division
#COMECE AQUI ABAIXO
a= float(input('digite a'))
b= float(input('digite b'))
c=(a+b)/2
print (c) | [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
09821126447c737fe9801397d57db7ff78a18cb0 | 71879afb36391879f781fa54f2ca51edb93dc143 | /menu/menu.py | 762a6e1f3f6d5344227dcb90d7986bb60d8cbf22 | [] | no_license | nklein79/square | 89304d98e046604db9ec686d5031704c1e0baed8 | f6873301075df1c38e3efcd94d5974f1fa9d40f5 | refs/heads/master | 2020-06-06T17:48:37.479104 | 2013-06-11T22:20:14 | 2013-06-11T22:20:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,985 | py | import pygame
from pygame.locals import *
class Menu(pygame.sprite.Sprite):
""" Game menu
"""
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.menuItems = []
def display(self, screen):
for menuItem in self.menuItems:
menuItem.display(screen)
class MainMenu(Menu):
""" Main menu
"""
def __init__(self):
super(MainMenu, self).__init__()
self.postion = (0,0)
controlsAction = lambda: 1+1
controlsMenuItem = MenuItem("Controls", controlsAction)
quitAction = lambda: pygame.event.post(pygame.event.Event(QUIT))
quitMenuItem = MenuItem("Quit", quitAction)
self.menuItems.append(controlsMenuItem)
self.menuItems.append(quitMenuItem)
def display(self, screen):
index = 0
for menuItem in self.menuItems:
position = (screen.get_rect().center[0], screen.get_rect().center[1] + (index * 50))
menuItem.display(screen, position)
index += 1
class MenuItem(Menu):
""" Game menu item
"""
def __init__(self, text, action):
super(MenuItem, self).__init__()
self.width = 200
self.height = 40
self.menuFnt = pygame.font.SysFont('Arial', 32, True)
self.action = action
self.image = pygame.Surface((self.width,self.height), pygame.SRCALPHA)
self.image.fill((0,0,0,0))
# Make our top-left corner the passed-in location.
self.rect = self.image.get_rect()
self.rect.topleft = (0,0)
self.image.blit(self.menuFnt.render(text, True, (255,255,255)), (0,0))
def select(self):
self.action()
def update(self, current_time, entities=None):
pass
def display(self, screen, position=(0,0)):
self.rect.topleft = position
screen.blit(self.image, self.rect.topleft)
| [
"nklein79@gmail.com"
] | nklein79@gmail.com |
c9ab9dd3c0306f507f0a05f4140edbbcc0cb0e61 | 5bd6ace4e636a3299dbdbb6c578790873995356c | /BST.py | beee4fd60b5e41229f668cdcc25f2eba3a8f9b62 | [] | no_license | BlindWiz-1/pythonDataStruct | d2e53d0c615352bbc2761239a5fe57fb48eca9b2 | a054fe6f4f11e242d3812befd6ddb796ad8c6829 | refs/heads/main | 2023-04-18T18:42:29.695209 | 2021-04-28T09:42:17 | 2021-04-28T09:42:17 | 355,300,546 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,331 | py | #-------------------------------------------------------------------------------
# Name: module1
# Purpose:
#
# Author: Denado Rabeli
#
# Created: 08/05/2020
# Copyright: (c) denad 2020
# Licence: <your licence>
#-------------------------------------------------------------------------------
class BST:
#This is the initializiaton of each BST object .
def __init__(self,value):
self.value=value
self.left_child=None
self.right_child=None
#Insertion function which insert new nodes depending on their value .
def insert_node(self,value):
if self.value>=value and self.left_child: # if left exists recall function
self.left_child.insert_node(value)
elif self.value>=value: # if left doesn't exist enter it directly
self.left_child=BST(value)
elif self.value<value and self.right_child: #same as above
self.right_child.insert_node(value)
else:
self.right_child=BST(value)#same as if there is no left
#Print them in preorder traversal .
def preOrder(self):
print(self.value, end=' ')
if self.left_child:
self.left_child.preOrder()
if self.right_child:
self.right_child.preOrder()
#prints them in postOrder traversal .
def postOrder(self):
if self.left_child:
self.left_child.postOrder()
if self.right_child:
self.right_child.postOrder()
print(self.value, end=' ')
#Print the value of the smallest node .
def findMin(self):
#Go left to search for minimum
if self.left_child!=None:
return self.left_child.findMin()
else:
print(self.value)
#Print value of the biggest node .
def findMax(self):
#Go right to check for maximum
if self.right_child!=None:
return self.right_child.findMax()
else:
print(self.value)
#Find if the node searched by user is present or not and return 1 if yes and
#0 otherwise .
def findNode(self,data):
#if value found return 1
if self.value==data:
return 1
#if smaller go left , also checking if next child is empty
elif self.left_child and self.value>data:
return self.left_child.findNode(data)
#if it is bigger than self value go right , same as above
elif self.right_child and self.value<data:
return self.right_child.findNode(data)
#if not found return 0
else:
return 0
#Entering input and dividing it .
n=input()
a=list(map(int,n.split()))
#setting the root so that the insertion function can work .
b=BST(a[0])
#To insert each node value .
for i in range(1,len(a)):
b.insert_node(a[i])
#To print them in pre-order traversal .
print("PRE - ORDER")
b.preOrder()
#To print them in post-order traversal .
print()
print("POST-ORDER")
b.postOrder()
#Printing the max and min of our functions .
print()
print("Min is :")
b.findMin()
print("Max is :")
b.findMax()
#Checking for a certain value if found or not
print("Was it found YES/1 NO/0 ?")
print(b.findNode(50))
| [
"noreply@github.com"
] | BlindWiz-1.noreply@github.com |
6b67e2feb73937333f17ba4161d07053f4f1e2e8 | be8fe594fae8a4fb66c0cbc1f0e8462891eabb1b | /examsystemapp/models/semester.py | 2953a79ad27db3d271b43bd88f590c80bdf9a64c | [] | no_license | ITfyMe/ExamSytemPython | a30cccc1ba4ef832666b23109a772209fcbcea8c | da7506ae9607d69c97744bdc08ac1113fc86237a | refs/heads/master | 2023-06-12T11:48:20.514142 | 2021-07-12T13:16:06 | 2021-07-12T13:16:06 | 369,427,947 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 516 | py | """
Created By : <Auto generated code>
Created On :
Reviewed By :
Reviewed On :
Version :
"""
from examsystemapp.models.base_model import BaseModel
from examsystemapp.utils.helpers.general_helper import Validation
class SemesterModel(BaseModel):
def __init__(self, semesterid=None,name=None,code=None):
self.semesterid = semesterid
self.name = name
self.code = code
def get_id(self):
return None
def is_valid(self, type, event_type=None):
return Validation()
| [
"nikesh.kedlaya@gmail.com"
] | nikesh.kedlaya@gmail.com |
ecdbc102e9f8ef980acd46a217b272a871a16550 | 1efeed0fa970b05801a29ccfdc90c52bb571dd02 | /venv/bin/pip3.7 | eb97d48a42293fd28c8d07856e8ff04e1a0b9bce | [] | no_license | williamwang0/MusicGen | 2e7fe5d9b2d35d1406b8951a86a5eac6d704571e | b6411505d1fd29e13ca93e3975f3de106ad4a7d0 | refs/heads/master | 2020-07-08T15:48:33.840412 | 2020-05-27T17:30:38 | 2020-05-27T17:30:38 | 203,717,161 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 416 | 7 | #!/home/albertczhang/Desktop/Projects/Music-Gen/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.7'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.7')()
)
| [
"albertczhang@berkeley.edu"
] | albertczhang@berkeley.edu |
c2738f3b5e44022bc4b88be8c35c14dbc07e2b1a | 2f4a96852bb95b6e56fa095503a26cc44be70134 | /capreolus/benchmark/robust04.py | a8aaf26c3d3e95fad6b16540bd9418f433cb6090 | [
"Apache-2.0"
] | permissive | bpiwowar/capreolus-xpm | bbf73cfc1641f3f06d34da84fd3a754aa3d46ae5 | 5374eb48df96b54d51365fc32441ae50a3e634c2 | refs/heads/master | 2020-12-26T05:02:08.628008 | 2020-01-31T10:01:53 | 2020-01-31T10:01:53 | 237,392,330 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,534 | py | import json
import os
from capreolus.collection import COLLECTIONS
from capreolus.benchmark import Benchmark
from capreolus.utils.loginit import get_logger
logger = get_logger(__name__) # pylint: disable=invalid-name
@Benchmark.register
class Robust04Benchmark(Benchmark):
""" Benchmark using title queries with Robust2004 folds from [1] (Table 1) with the same dev (validation) and test folds as in [2]. That is, fold "sn" is the split whose test fold contains the query ids in fold n from [1].
[1] Samuel Huston and W. Bruce Croft. Parameters learned in the comparison of retrieval models using term dependencies. Technical Report (2014).
[2] Sean MacAvaney, Andrew Yates, Arman Cohan, and Nazli Goharian. CEDR: Contextualized Embeddings for Document Ranking. SIGIR 2019.
"""
name = "robust04.title"
query_type = "title"
@staticmethod
def config():
fold = "s1"
searcher = "bm25"
collection = "robust04"
rundocsonly = True # use only docs from the searcher as pos/neg training instances (i.e., not all qrels)
return locals().copy() # ignored by sacred
def build(self):
self.folds = json.load(open(os.path.join(self.collection.basepath, "rob04_cedr_folds.json"), "rt"))
self.create_and_store_train_and_pred_pairs(self.folds)
@Benchmark.register
class DemoRobust04Benchmark(Benchmark):
""" Benchmark using title queries with Robust2004 folds and pipeline defaults corresponding to those used in [1].
See the WSDM20 runbook for config options to use with each reranker.
[1] Wei Yang, Kuang Lu, Peilin Yang, and Jimmy Lin. Critically Examining the "Neural Hype": Weak Baselines and the Additivity of Effectiveness Gains from Neural Ranking Models. SIGIR 2019.
"""
name = "robust04.title.wsdm20demo"
query_type = "title"
@staticmethod
def config():
fold = "s1"
searcher = "bm25staticrob04yang19"
collection = "robust04"
rundocsonly = True # use only docs from the searcher as pos/neg training instances (i.e., not all qrels)
maxqlen = 4
maxdoclen = 800
niters = 50
batch = 32
lr = 0.001
softmaxloss = False
stemmer = "none"
indexstops = False
return locals().copy() # ignored by sacred
def build(self):
self.folds = json.load(open(os.path.join(self.collection.basepath, "rob04_yang19_folds.json"), "rt"))
self.create_and_store_train_and_pred_pairs(self.folds)
| [
"andrew.yates@gmail.com"
] | andrew.yates@gmail.com |
22f771ab7258c173e2caef7ef32d1acc4e09a772 | 9a1929f07dcb965eb82300c131b2141a2e92985a | /4_youtubeDownloader.py | 664fab729e2cddd740a0689c57966bbaa8f73d87 | [] | no_license | DevBaki/Python_Examples_Basic | 3224fa180406939304deb0e9c36035758362f230 | 34de719207fcf6eb27b17af4a206b5ad91e72b92 | refs/heads/master | 2020-12-23T21:21:29.812338 | 2020-01-30T19:50:43 | 2020-01-30T19:50:43 | 237,278,371 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 876 | py |
import webbrowser
import requests
from bs4 import BeautifulSoup
input_func = None
try:
input_func = input('Enter the song to be played: ')
except NameError:
input_func = input('Enter the song to be played: ')
query = input_func.replace(' ', '+')
# search for the best similar matching video
url = 'https://www.youtube.com/results?search_query=' + query
source_code = requests.get(url, timeout=15)
plain_text = source_code.text
soup = BeautifulSoup(plain_text, "html.parser")
# fetches the url of the video
songs = soup.findAll('div', {'class': 'yt-lockup-video'})
song = songs[0].contents[0].contents[0].contents[0]
# link = song['href']
# webbrowser.open('https://www.youtube.com' + link)
try:
link = song['href']
webbrowser.open('https://www.youtube.com' + link)
except KeyError:
print("Can't find any song,check your network or try a new word")
| [
"55077651+baki1234@users.noreply.github.com"
] | 55077651+baki1234@users.noreply.github.com |
170cb2075e5d9d3f2cd67842ad23f55fc31c3f0b | ec0e7d6b3a56c3b259e9867c2c983dd09ba594a4 | /seq_calcs/apps.py | d1b7cb7add38bb7eaad47bda46e9371f56b9f64e | [] | no_license | mttmartin/biocalcs | a6897bdcd6e4b0be598819eb34dea32ac32013e5 | 749dd30182bcfd5af59aeffbab5c13038862b173 | refs/heads/master | 2020-03-22T21:04:24.877895 | 2018-07-12T04:03:16 | 2018-07-12T04:03:16 | 140,654,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | from django.apps import AppConfig
class SeqCalcsConfig(AppConfig):
name = 'seq_calcs'
| [
"matthew@mttmartin.com"
] | matthew@mttmartin.com |
452c6c65ec584a7c24c62109ee13e4ef38269410 | 59d18dc2b539d46e32c07b3c5b5cbeb16beda4d1 | /lineTopo.py | 5ac871dc58592cddac495427c6c47aea7e9cf9be | [
"Apache-2.0"
] | permissive | Aliced3645/DataCenterMarketing | 9cbb0e429a8053af180172f5da69cb37a99c49be | 67bc485e73cf538498a89b28465afb822717affb | refs/heads/master | 2016-08-04T22:23:23.952571 | 2013-05-07T02:50:47 | 2013-05-07T02:50:47 | 8,640,330 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 786 | py | from mininet.topo import Topo
class MyTopo( Topo ):
"Line topology"
def __init__(self):
Topo.__init__(self)
h1 = self.addHost('h1');
h2 = self.addHost('h2');
h3 = self.addHost('h3');
h4 = self.addHost('h4');
h5 = self.addHost('h5');
s1 = self.addSwitch('s1');
s2 = self.addSwitch('s2');
s3 = self.addSwitch('s3');
s4 = self.addSwitch('s4');
s5 = self.addSwitch('s5');
self.addLink(s1,s2);
self.addLink(s2,s3);
self.addLink(s3,s4);
self.addLink(s4,s5);
self.addLink(s1,h1);
self.addLink(s2,h2);
self.addLink(s3,h3);
self.addLink(s4,h4);
self.addLink(s5,h5);
topos = { 'linetopo': ( lambda: MyTopo() ) }
| [
"shu@shu-GE70-0NC.(none)"
] | shu@shu-GE70-0NC.(none) |
bf8fe310ff7bfc2dd2cc76bfa8e8f1c9e5ffd8c9 | acb1ba80cea4bc87fa86b9e782e85592f7ff7209 | /Text.py | 397c5f61a6e6bcc4969d51a93813aded371d47a7 | [] | no_license | seanmcquaid/Basic_War | 85bcf628ba48ea72dde96d4c7c92e94a6c8f5c23 | cc0b9d007dc8567c14fff6cb0115dd5f27c974f9 | refs/heads/main | 2021-06-19T00:56:10.596599 | 2019-01-11T04:46:52 | 2019-01-11T04:46:52 | 159,394,722 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,819 | py | import pygame.font
class Text(object):
def __init__(self,screen):
self.width = 125
self.screen = screen
self.height = 125
self.player_points = 0
self.comp_points = 0
self.screen_rect = self.screen.get_rect()
self.text_color = (0,0,0)
self.rect = pygame.Rect(0,0,self.width,self.height)
self.rect.center = self.screen_rect.center
def title(self):
self.font = pygame.font.Font(None, 45)
self.title_message = self.font.render("BASIC WAR!", True, self.text_color)
self.title_message_rect = self.title_message.get_rect()
self.title_message_rect.center = self.rect.center
def instructions(self):
self.xy1 = (100,100)
self.font = pygame.font.Font(None, 28)
self.instruct_message = self.font.render("Press Spacebar to Continue to Each Round", True, self.text_color)
def player_point_inc(self):
self.player_points += 2
def comp_point_inc(self):
self.comp_points += 2
def player_point_counter(self):
self.xy2 = (0,0)
self.font = pygame.font.Font(None, 25)
self.player_count_message = self.font.render("PLAYER POINT COUNTER: %d" %self.player_points, True, self.text_color)
def comp_point_counter(self):
self.xy3 = (300,0)
self.font = pygame.font.Font(None, 25)
self.comp_count_message = self.font.render("COMPUTER POINT COUNTER: %d" %self.comp_points, True, self.text_color)
def deck_counter(self,deck_length):
self.xy4 = (225,150)
self.font = pygame.font.Font(None, 28)
self.deck_message = self.font.render("CARDS LEFT: %d" %deck_length, True, self.text_color)
def winner_of_round(self,round_winner):
self.round_winner = round_winner
self.xy5 = (205,300)
self.font = pygame.font.Font(None, 23)
self.winner_message1 = self.font.render("ROUND WINNER: %s" %self.round_winner, True, self.text_color)
def winner_of_game(self, game_winner):
self.game_winner = game_winner
self.xy6 = (90,500)
self.font = pygame.font.Font(None, 23)
self.winner_message2 = self.font.render("GAME WINNER: %s" %self.game_winner, True, self.text_color)
def draw_title_text(self):
self.screen.blit(self.title_message,self.title_message_rect)
def draw_instructions(self):
self.screen.blit(self.instruct_message,self.xy1)
def draw_counters(self):
self.screen.blit(self.player_count_message,self.xy2)
self.screen.blit(self.comp_count_message,self.xy3)
self.screen.blit(self.deck_message,self.xy4)
def draw_round_winner_player(self):
self.screen.blit(self.winner_message1,self.xy5)
def draw_game_winner_player(self):
self.screen.blit(self.winner_message2,self.xy6)
| [
"sean.mcquaid2@verizon.net"
] | sean.mcquaid2@verizon.net |
71ef9adfe5c91ed30c62535a9b158318af0ebf75 | 476d65f4d560d7793916843f794b09c8542be729 | /boto3_fixtures/dynamodb.py | 6b7d05a727c1d99cabf31b3b2bf49641f33878c4 | [
"MIT"
] | permissive | awichmann-mintel/boto3-fixtures | ffae235cab732020bc6bf5351593b4ba8f58e396 | 9a1c3aa8bfea14e9b828f214928ad96170c929ad | refs/heads/master | 2022-09-10T04:56:08.158554 | 2020-05-26T20:00:41 | 2020-05-26T20:00:41 | 267,139,300 | 0 | 0 | MIT | 2020-05-26T19:58:57 | 2020-05-26T19:58:56 | null | UTF-8 | Python | false | false | 2,046 | py | """
Example Usage
```python
@pytest.fixture(scope="session")
def dynamodb_tables():
return [
{
"AttributeDefinitions": [
{"AttributeName": "uri", "AttributeType": "S"},
{"AttributeName": "timestamp", "AttributeType": "S"},
],
"TableName": "my-dbd-table",
"KeySchema": [
{"AttributeName": "uri", "KeyType": "HASH"},
{"AttributeName": "timestamp", "KeyType": "RANGE"},
],
}
]
@pytest.fixture(scope="class")
def dynamodb(localstack, dynamodb_tables):
with boto3_fixtures.setup_dynamodb(dynamodb_tables) as tables:
yield tables
```
"""
import backoff
import boto3
from botocore.exceptions import ClientError
import boto3_fixtures.contrib.boto3
from boto3_fixtures import utils
@backoff.on_exception(backoff.expo, ClientError, max_time=30)
def create_table(config):
config.update({"BillingMode": "PAY_PER_REQUEST"})
return utils.call(boto3.client("dynamodb").create_table, **config)
def create_tables(dynamodb_tables):
client = boto3_fixtures.contrib.boto3.client("dynamodb")
for table in dynamodb_tables:
assert create_table(table)
for table in dynamodb_tables:
name = table["TableName"]
client.get_waiter("table_exists").wait(
TableName=name, WaiterConfig={"Delay": 1, "MaxAttempts": 30}
)
assert utils.call(client.describe_table, TableName=name)
return [t["TableName"] for t in dynamodb_tables]
@backoff.on_exception(backoff.expo, ClientError, max_tries=3)
def destroy_table(config):
client = boto3_fixtures.contrib.boto3.client("dynamodb")
return utils.call(client.delete_table, TableName=config["TableName"])
def destroy_tables(dynamodb_tables):
boto3_fixtures.contrib.boto3.client("dynamodb")
for table in dynamodb_tables:
destroy_table(table)
def setup(tables):
create_tables(tables)
return {"tables": tables}
def teardown(tables):
destroy_tables(tables)
| [
"noreply@github.com"
] | awichmann-mintel.noreply@github.com |
dc763b74c1dc61594084c2e1bd2444d4edaf96d4 | 9c0f691393abbeb5754e1624e0c48dfcdf857352 | /2017/Helpers/day_02.py | ce9e1ba0627d0318c61d59f26c208b83c5be9430 | [] | no_license | seligman/aoc | d0aac62eda3e6adc3c96229ca859bd2274398187 | 9de27ff2e13100770a3afa4595b15565d45bb6bc | refs/heads/master | 2023-04-02T16:45:19.032567 | 2023-03-22T15:05:33 | 2023-03-22T15:05:33 | 230,493,583 | 17 | 10 | null | null | null | null | UTF-8 | Python | false | false | 1,393 | py | #!/usr/bin/env python3
import itertools
DAY_NUM = 2
DAY_DESC = 'Day 2: Corruption Checksum'
def calc(log, values):
values = [[int(y) for y in x.replace('\t', ' ').split(' ')] for x in values]
ret = 0
ret2 = 0
for row in values:
a, b = min(row), max(row)
ret += b - a
for a, b in itertools.combinations(row, 2):
if b > a:
a, b = b, a
if a % b == 0:
ret2 += a // b
log("Second form: " + str(ret2))
return ret
def test(log):
values = [
"5 1 9 5",
"7 5 3",
"2 4 6 8",
]
if calc(log, values) == 18:
return True
else:
return False
def run(log, values):
log(calc(log, values))
if __name__ == "__main__":
import sys, os
def find_input_file():
for fn in sys.argv[1:] + ["input.txt", f"day_{DAY_NUM:0d}_input.txt", f"day_{DAY_NUM:02d}_input.txt"]:
for dn in [[], ["Puzzles"], ["..", "Puzzles"]]:
cur = os.path.join(*(dn + [fn]))
if os.path.isfile(cur): return cur
fn = find_input_file()
if fn is None: print("Unable to find input file!\nSpecify filename on command line"); exit(1)
print(f"Using '{fn}' as input file:")
with open(fn) as f: values = [x.strip("\r\n") for x in f.readlines()]
print(f"Running day {DAY_DESC}:")
run(print, values)
| [
"scott.seligman@gmail.com"
] | scott.seligman@gmail.com |
35555a72fc74f91bcef1f29d8338b82e6d1291fc | 033ee4c3cae70aa9277cc5f4728bebc95f93b56c | /alumnos/AleFCortes/tareas/min.py | 7dc8e9242857f2c9925d97fe2caa8fe905e92b7e | [] | no_license | Shotzo/mate-computacional | f780da74cef62f80fa813144131e91f341957fc8 | 1a94ba61bd378f49759f1a7656259839a951eb05 | refs/heads/master | 2021-01-15T22:46:57.565947 | 2015-05-19T04:59:57 | 2015-05-19T04:59:57 | 29,537,312 | 3 | 0 | null | 2015-01-20T15:44:32 | 2015-01-20T15:44:32 | null | UTF-8 | Python | false | false | 65 | py | def numMin(n):
minimo = ((2**(n-1))-1)*(-1)
return minimo | [
"sabakunosofia@gmail.com"
] | sabakunosofia@gmail.com |
422d7e7ab6f55020fe5127386f0938f38842a446 | 6afb187283a40debbf093f1b9419254001872dbe | /models/bi_lstm_cnn_tuning/no_meta_extra_drop_128/var.py | e3154dd6fb8258267bca795049a85647f647f256 | [] | no_license | bwxu/FactCheckerModels | ae1aa90e6249aaca66d27eb61e81093bd19f9b93 | 2fc26d8208dbf64a0ecef9ccabe505aa68db1cc8 | refs/heads/master | 2021-09-22T16:02:14.765782 | 2018-09-11T17:54:30 | 2018-09-11T17:54:30 | 109,511,371 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,522 | py | # Location of data files
USE_WORD2VEC = False
WORD2VEC_BIN_PATH = "data/GoogleNews-vectors-negative300.bin"
GLOVE_VECTOR_PATH = "data/glove.840B.300d.txt"
TRAINING_DATA_PATH = "data/train.tsv"
VALIDATION_DATA_PATH = "data/valid.tsv"
TEST_DATA_PATH = "data/test.tsv"
# CNN, LSTM, BI_LSTM or BI_LSTM_CNN
MODEL_TYPE = "BI_LSTM_CNN"
HIDDEN_LAYER_SIZE = 100
LSTM_OUT_DIM = 128
LSTM_DROPOUT = 0.4
# Arguments for preparing sentences, labels, and embedding matrix
LABEL_MAPPING = {"pants-fire": 0,
"false": 1,
"barely-true": 2,
"half-true": 3,
"mostly-true": 4,
"true": 5}
MAX_NUM_WORDS = 20000
MAX_SEQUENCE_LENGTH = 67
EMBEDDING_DIM = 300
# Parameters for model construction
TRAIN_EMBEDDINGS = True
FILTER_SIZE_LIST = [2, 3, 4]
NUM_FILTERS = [128, 128, 128]
DROPOUT_PROB = 0.2
# "MAX" or "AVG" or "MAXOUT"
POOLING = "AVG"
# Training parameters
NUM_EPOCHS = 10
BATCH_SIZE = 64
# Parameters for saving the trained model
FOLDER_NAME = "models/bi_lstm_cnn_tuning/no_meta_extra_drop_128"
# FILE_NAME = "new-epoch-{epoch:02d}-val_loss-{val_loss:.4f}.hdf5"
FILE_NAME = '_lowest_val_loss.hdf5'
USE_SUBJECTS = False
NUM_SUBJECTS = 30
SUBJECT_MAPPING = {}
USE_PARTY = False
NUM_PARTIES = 10
PARTY_MAPPING = {}
USE_CREDIT = False
NUM_CREDIT_TYPES = 5
CREDIT_MAPPING = {"barely-true": 0,
"false": 1,
"half-true": 2,
"mostly-true": 3,
"pants-fire": 4}
NUM_MODELS = 10
| [
"bwxu@mit.edu"
] | bwxu@mit.edu |
527bdd34ceb14da0731203d697a17be4ec88b8bc | cf5aa9620a35b14a498683e0a7d494f878090baa | /urlreq.py | decf7d0f48e362f692ed1d6061fd4f3262884a8b | [
"MIT"
] | permissive | Khantanjil/Porto-Meteorologia-Populacao | 2cdba8f53ab6447d3d589ab0134a96823f4a4269 | f62e880593d175bfb0253052a90bad941cb120ac | refs/heads/master | 2022-04-11T18:43:21.635501 | 2020-03-24T12:11:15 | 2020-03-24T12:11:15 | 249,057,236 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | import requests
from bs4 import BeautifulSoup
r = requests.get("https://www.otempo.pt/porto.html", {"User-agent": "XY"})
content = r.content
soup = BeautifulSoup(content, "html.parser")
all = soup.find_all("span", {"class": "m_table_weather_day_max_temp"})
today_temp = all[0].find_all("span")[0].text
today = ''.join(e for e in today_temp if e.isalnum())
f = open("todaysTemMax.txt", "w+")
f.write(today)
f.close()
# print(all)
| [
"tanjilkhan.sh@gmail.com"
] | tanjilkhan.sh@gmail.com |
6d675585c621f5d06ee0a556dee58b526428200c | c27c6817eebb2fb8e1f64d68a4ea1ef3493dd03f | /venv/Scripts/easy_install-script.py | dadd31e5de5f9b80141aefee8ef3691f93d180cf | [] | no_license | Sweissekarte/pythonClass | ba525901cfed144417a7cb807e3ac4892ee7c211 | e0f0c18d9d22d2be617b7315fad6c822b5fab828 | refs/heads/master | 2020-07-23T05:00:55.687914 | 2019-09-20T19:56:50 | 2019-09-20T19:56:50 | 207,452,700 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 470 | py | #!C:\Users\LENOVO-USER\PycharmProjects\Edureka-Certification\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')()
)
| [
"marc@805stats.com"
] | marc@805stats.com |
985338ef85155a6b5020fd60f77c499c9ad8365f | 818b0bbc10492412a28d2fdc2b593fa19a2e477e | /9 Class.py | e0e0ea56cf01d554a180541859717cca4706b162 | [] | no_license | GalaChen/Python-Codeacademy | 07f0e090c5c76a768fe65a8a5620d27dede65ed2 | 4bc1828d791ce512cb804e2480d9b5b2ae382267 | refs/heads/master | 2021-09-10T23:03:40.110438 | 2018-04-03T20:54:38 | 2018-04-03T20:54:38 | 110,886,567 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,238 | py | ###########################################################
################## Class ##################
###########################################################
#python中的类(class)应该理解为用于传递参数的一个用户自己定义的模块
#class中包含数据与函数
#将参数传递给class之后,即可应用class包含的函数进行操作
def __init__(self):
#class总是开始于init,且init至少有一个参数,也就是self
#self传递进去表示函数自身已被建立
#init可以理解为“开始”
#self.name等code可以理解为为class传递新的参数
class Animal(object):
"""Makes cute animals."""
is_alive = True
def __init__(self, name, age):
self.name = name
self.age = age
# Add your method here!
def description(self):
print self.name
print self.age
hippo = Animal('ppx',32)
hippo.description()
#class中创建函数及使用函数
class Animal(object):
"""Makes cute animals."""
is_alive = True
def __init__(self, name, age):
self.name = name
self.age = age
zebra = Animal("Jeffrey", 2)
giraffe = Animal("Bruce", 1)
panda = Animal("Chad", 7)
print zebra.name, zebra.age, zebra.is_alive
print giraffe.name, giraffe.age, giraffe.is_alive
print panda.name, panda.age, panda.is_alive
#class中有全局变量和局部变量的区别
hippo = Animal("Jake", 12)
cat = Animal("Boots", 3)
print hippo.is_alive
hippo.is_alive = False
print hippo.is_alive
print cat.is_alive
#全局变量和局部变量可随时个性化更改
class Customer(object):
"""Produces objects that represent customers."""
def __init__(self, customer_id):
self.customer_id = customer_id
def display_cart(self):
print "I'm a string that stands in for the contents of your shopping cart!"
class ReturningCustomer(Customer):
"""For customers of the repeat variety."""
def display_order_history(self):
print "I'm a string that stands in for your order history!"
monty_python = ReturningCustomer("ID: 12345")
monty_python.display_cart()
monty_python.display_order_history()
#class可“继承”其它class
class Employee(object):
def __init__(self, name):
self.name = name
def greet(self, other):
print "Hello, %s" % other.name
class CEO(Employee):
def greet(self, other):
print "Get back to work, %s!" % other.name
ceo = CEO("Emily")
emp = Employee("Steve")
emp.greet(ceo)
# Hello, Emily
ceo.greet(emp)
# Get back to work, Steve!
###
#inheritance可以在父子和子父之间相互进行
class Employee(object):
"""Models real-life employees!"""
def __init__(self, employee_name):
self.employee_name = employee_name
def calculate_wage(self, hours):
self.hours = hours
return hours * 20.00
# Add your code below!
class PartTimeEmployee(Employee):
def calculate_wage(self,hours):
self.hours = hours
return 12 * hours
def full_time_wage(self,hours):
return super(PartTimeEmployee,self).calculate_wage(hours)
milton = PartTimeEmployee('Milton')
print milton.full_time_wage(10)
#通过super函数能够使class实现自身调用。注意传递的参数
############################################
############ Class 综合举例 ############
############################################
class Triangle(object):
number_of_sides = 3
def __init__(self, angle1, angle2, angle3):
self.angle1 = angle1
self.angle2 = angle2
self.angle3 = angle3
def check_angles(self):
s = self.angle1+self.angle2+self.angle3
if s == 180:
return True
else:
return False
my_triangle = Triangle(90,30,60)
print my_triangle.number_of_sides
print my_triangle.check_angles()
class Equilateral(Triangle):
angle = 60
def __init__(self):
self.angle1 = self.angle
self.angle2 = self.angle
self.angle3 = self.angle
#############################################
############ Class 综合举例2 ############
#############################################
class Car(object):
condition = "new"
def __init__(self, model, color, mpg):
self.model = model
self.color = color
self.mpg = mpg
def display_car(self):
return "This is a %s %s with %s MPG" %(self.color,self.model,self.mpg)
def drive_car(self):
self.condition = "used"
return self.condition
class ElectricCar(Car):
def __init__(self,battery_type):
self.battery_type = battery_type
def drive_car(self):
self.condition = 'like new'
return self.condition
my_car = ElectricCar( "battery type" )
print my_car.condition
print my_car.drive_car()
print my_car.condition
#############################################
############ Class 综合举例3 ############
#############################################
class Point3D(object):
def __init__(self,x,y,z):
self.x = x
self.y = y
self.z = z
def __repr__(self):
return "(%d, %d, %d)" % (self.x, self.y, self.z)
my_point = Point3D(1,2,3)
print my_point
#repr可以直接返回想要的数据表达方式,而无需在print后面去表达 | [
"noreply@github.com"
] | GalaChen.noreply@github.com |
5c539b791b19c0dcaefe1cb2e23620945a896160 | 0e15a7a808e2e4c20035a766c34157eae52b75fb | /rango/migrations/0003_category_slug.py | fae989c9eab94de5708179a13df74e10e08f9fd3 | [] | no_license | 2083417w/tangowithdjango | c3d6feac71938111338d0983abc5cb2d22a599d7 | bb3fe45a8ea0ec9defe7b9fc50b723cbee4133fd | refs/heads/master | 2020-05-18T01:03:43.904179 | 2015-03-08T22:22:25 | 2015-03-08T22:22:25 | 28,946,463 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 444 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('rango', '0002_auto_20150125_1606'),
]
operations = [
migrations.AddField(
model_name='category',
name='slug',
field=models.SlugField(default='', unique=True),
preserve_default=False,
),
]
| [
"2083417w@student.gla.ac.uk"
] | 2083417w@student.gla.ac.uk |
6015c71d15294c4e5332fac46c344a18bee9eddd | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/216/usersdata/354/113785/submittedfiles/av2_p3_civil.py | e7ed6d32a539f56ce9f534bf54484c4e125a9e91 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 546 | py | # -*- coding: utf-8 -*-
import numpy as np
ordem=int(input('digite a dimensao n da matriz: '))
x=int(input('digite a linha do numero: '))
y=int(input('digite a coluna do numero: '))
matriz=np.zeros((ordem,ordem))
for i in range(0,ordem,1):
for j in range(0,ordem,1):
matriz[i,j]=int(input('digite os valores da matriz: '))
#LINHA
i=x
soma=0
for j in range(0,ordem,1):
if j!=y:
soma=soma+matriz[i,j]
#COLUNA
j=y
soma1=0
for i in range(0,ordem,1):
if i!=x:
soma1=soma1+matriz[i,j]
peso=soma+soma1
print(peso) | [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
2bbc1196c0025f188016b75fc7993a434729f616 | 6af81c1e3853255f064ce58e848b34211decdd23 | /test/top/api/rest/HotelsSearchRequest.py | f79b816b5737e298bbab342a8a4f1a00cb4bc2be | [] | no_license | dacy413/TBAutoTool | d472445f54f0841f2cd461d48ec6181ae2182d92 | ca7da4638d38dd58e38c680ee03aaccf575bce7b | refs/heads/master | 2016-09-06T16:13:01.633177 | 2015-02-01T00:04:50 | 2015-02-01T00:04:50 | 29,625,228 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 438 | py | '''
Created by auto_sdk on 2015-01-20 12:36:26
'''
from top.api.base import RestApi
class HotelsSearchRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.city = None
self.country = None
self.district = None
self.domestic = None
self.name = None
self.page_no = None
self.province = None
def getapiname(self):
return 'taobao.hotels.search'
| [
"1656505353@qq.com"
] | 1656505353@qq.com |
6bbdc3cae916404efdb6a54c9d8d4d4bf00b72fb | 98110c880d51d776e76637af527c4cf682f51b62 | /subapps/v1.py | faf21e204dfc1b4e640a6c601c6df92c0a885d9e | [
"MIT"
] | permissive | regulad/easy-gTTS-API | 34813509f3c968870c54ed87ca7902b4da0a2a5c | 443f92a7762449bcbddf8926ce178a2037edf093 | refs/heads/master | 2023-05-15T06:05:42.087499 | 2021-06-09T19:53:31 | 2021-06-09T19:53:31 | 372,965,778 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,246 | py | import json
from aiohttp import web, ClientSession
from asyncgTTS import (
AsyncGTTSSession, ServiceAccount, TextSynthesizeRequestBody, SynthesisInput, VoiceSelectionParams, AudioConfig,
AudioEncoding
)
subapp = web.Application()
routes = web.RouteTableDef()
with open("config/SERVICE_ACCOUNT.JSON") as service_account_json:
service_account_dict = json.load(service_account_json)
subapp["clientsession"] = ClientSession()
subapp["serviceaccount"] = ServiceAccount.from_service_account_dict(service_account_dict)
subapp["ttsclientsession"] = AsyncGTTSSession.from_service_account(
subapp["serviceaccount"], client_session=subapp["clientsession"],
endpoint="https://texttospeech.googleapis.com/v1/",
)
@routes.get("/synthesize")
async def tts_route(request: web.Request):
text = request.query.get("text")
lang = request.query.get("lang", "en-US")
voice = request.query.get("voice", "en-US-Wavenet-D")
encoding = request.query.get("encoding", "MP3")
if text is None:
raise web.HTTPBadRequest(reason="No input text was provided.")
synthesis_input = SynthesisInput(text)
synthesis_voice = VoiceSelectionParams(lang, voice)
try:
synthesis_encoding = AudioConfig(AudioEncoding(encoding.upper()))
except ValueError:
raise web.HTTPBadRequest(reason=f"{encoding} is not a valid encoding.")
text_synthesize_request_body = TextSynthesizeRequestBody(
synthesis_input, voice_input=synthesis_voice, audio_config_input=synthesis_encoding
)
audio_bytes = await subapp["ttsclientsession"].synthesize(text_synthesize_request_body)
stream_response = web.StreamResponse()
stream_response.content_type = f"audio/{encoding.lower()}"
await stream_response.prepare(request)
await stream_response.write(audio_bytes)
await stream_response.write_eof()
return stream_response
@routes.get("/voices")
async def tts_voices(request: web.Request):
language_code = request.query.get("languageCode")
response_json = await subapp["ttsclientsession"].get_voices(language_code=language_code)
return web.json_response(response_json)
subapp.add_routes(routes)
def setup(app: web.Application):
app.add_subapp("/v1/", subapp)
| [
"52430642+regulad@users.noreply.github.com"
] | 52430642+regulad@users.noreply.github.com |
70c76db1ec07449c468c62369074bb65be67d7f4 | 7920ac571217d627aad1ed8fa0b87ef1436cdb28 | /casepro/cases/migrations/0006_auto_20150508_0912.py | ba0c4ea6984a0959cd1e04d511e9dab37ee86a50 | [
"BSD-3-Clause"
] | permissive | rapidpro/casepro | 34777e5373822d41ff2e5f3995f86d009c2d1e7c | 66177c00b06b2bd6e6cad2b648feb8f28f592add | refs/heads/main | 2023-07-20T00:16:09.616516 | 2023-07-06T21:46:31 | 2023-07-06T21:46:31 | 32,147,348 | 23 | 30 | BSD-3-Clause | 2023-07-19T07:44:59 | 2015-03-13T09:31:47 | Python | UTF-8 | Python | false | false | 1,662 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("cases", "0005_auto_20150424_1427")]
operations = [
migrations.CreateModel(
name="CaseEvent",
fields=[
("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)),
("event", models.CharField(max_length=1, choices=[("R", "Contact replied")])),
("created_on", models.DateTimeField(db_index=True)),
],
),
migrations.AlterField(
model_name="case",
name="opened_on",
field=models.DateTimeField(help_text="When this case was opened", auto_now_add=True, db_index=True),
),
migrations.AlterField(
model_name="caseaction", name="created_on", field=models.DateTimeField(auto_now_add=True, db_index=True)
),
migrations.AlterField(
model_name="messageaction",
name="action",
field=models.CharField(
max_length=1,
choices=[
("F", "Flag"),
("N", "Un-flag"),
("L", "Label"),
("U", "Remove Label"),
("A", "Archive"),
("R", "Restore"),
],
),
),
migrations.AddField(
model_name="caseevent",
name="case",
field=models.ForeignKey(related_name="events", to="cases.Case", on_delete=models.PROTECT),
),
]
| [
"rowanseymour@gmail.com"
] | rowanseymour@gmail.com |
e70e3aaac0a7048470ec3afc71c95a532aa29f5a | 5204c5fc94466e84e5cfd0f5d003c2bd81c928e2 | /musixmatch/api.py | d45e487eec5507404341bca906fe9d5e77209747 | [] | no_license | marmikshah/Mood-Classification | 3e43b378761748199c5f56b4fa683767da27a187 | 30f32f5d0d89c37ddd6296451826a0df7431a4b8 | refs/heads/master | 2021-10-25T15:57:46.696869 | 2019-04-05T02:15:47 | 2019-04-05T02:15:47 | 177,504,943 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,575 | py | import json
import requests
# A simple class to store attributes of a single track.
class Track(object):
def __init__(self, name, track_id, artist):
# Name of the track (Can be used to display on the front end)
self.name = name
# ID of the track (Will be used to get the Lyrics)
self.id = track_id
# Name of the artist
self.artist = artist
def addLyrics(self, lyrics):
# Variable storing lyrics of the current track.
self.lyrics = lyrics
def label(self, mood) :
# This is to store the final label (Happy/Sad) after classification is complete.
self.mood = mood
class Musix(object):
def __init__(self, country="us", apikey="acf266ecb81687ee6f567e6fe9d0ca06"):
# Country variable will be used for getting charts.
self.country = country
# Developer registration.
self.apikey = apikey
# Changing the country of search
def change_country(self, country):
self.country = country
# Function to retrive lyrics of top k songs in a country.
# Step 1 : Get the Songs/Tracks. Store their name and ID.
# Step 2 : Get lyrics for each track using it's ID.
def get_top_lyrics(self, k):
# Get the songs.
tracks = self.get_top_songs()
# Get the Lyrics for first 'k' objects in the list.
for track in tracks[:k]:
track.addLyrics(self.get_lyrics(track.id))
return tracks;
# This function will get the top 'songs' of the desired country.
def get_top_songs(self):
# The URL for getting charts.
url = "https://api.musixmatch.com/ws/1.1/chart.tracks.get"
# Params for the Request
querystring = {"format": "json", "callback": "callback",
"country": self.country, "apikey": self.apikey,"page_size":100,}
headers = {'Accept': "text/plain", }
# Perform Request and get the JSON response.
response = requests.request("GET", url, headers=headers, params=querystring).json()
# Iterate over the track list and extract the track names and track id's
songs = []
for track in response['message']['body']['track_list']:
# Get the name
name = track['track']['track_name']
# Get the ID
track_id = track['track']['track_id']
# Get the artist name
artist = track['track']['artist_name']
# Create a Track Object
trackObj = Track(name, track_id, artist)
# Save the track object.
songs.append(trackObj)
return songs
# Function to request for lyrics of the given track_id
def get_lyrics(self, track_id):
# URL to get the Lyrics
url = "https://api.musixmatch.com/ws/1.1/track.lyrics.get"
# Params for the reqest.
querystring = {"format": "json", "callback": "callback",
"track_id": track_id, "apikey": self.apikey}
headers = {'Accept': "application/json", }
# Perform the Request
response = requests.request("GET", url, headers=headers, params=querystring).json()
# Extract and return the lyrics.
# The reason I am using 'replace' function is because Musixmatch appends a string to the lyrics.
# We need to remove this because it will cause a problem in the classification.
return response['message']['body']['lyrics']['lyrics_body'].replace("******* This Lyrics is NOT for Commercial use *******", "")
| [
"marmikshah@icloud.com"
] | marmikshah@icloud.com |
05cd6f081aaaff3e3da677689d1dfab5b16c2bc3 | 114ad0c49ba0ca0f5d8b95151da45d54a7f05a04 | /zion/urls.py | 682b6af9853493bf4c7a20985f1b29cf2beb7d57 | [] | no_license | gray-adeyi/zion | b10cbd09588b1fdea86fff38be79c4dde1d5043c | c0d1ac64db978f8277932a82a15fa1f3f2b188f6 | refs/heads/master | 2023-01-10T00:50:17.121785 | 2020-11-11T19:24:37 | 2020-11-11T19:24:37 | 312,034,548 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,003 | py | """zion URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('core.urls')),
]
if settings.DEBUG == True:
media_url = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += media_url
| [
"adeyigbenga005@gmail.com"
] | adeyigbenga005@gmail.com |
b412bdfd03e31ab7ae6e0ca299ce46fedafa32e3 | 4f9acc09a8106806b6b2192db4987a09ee82ede4 | /HOME/.offlineimap.py.gpg | 9dc4679c8c94020022e2b2c24c566c36e2c8d68b | [
"MIT"
] | permissive | dvogt23/dotfiles | dc8523ceef01105e12d2f1d3a8203b032289e284 | 32eb94b09da10e301c9af7d2544ba18a7c153842 | refs/heads/master | 2023-07-27T22:00:26.252318 | 2023-07-13T19:38:00 | 2023-07-13T19:38:00 | 57,355,476 | 8 | 5 | null | 2018-02-27T11:55:23 | 2016-04-29T04:51:47 | Vim script | UTF-8 | Python | false | false | 178 | gpg | #! /usr/bin/env python2
from subprocess import check_output
def get_pass(account):
return check_output("gpg -dq ~/.offlineimap." + account + ".gpg", shell=True).strip("\n")
| [
"divogt@vogt.dev"
] | divogt@vogt.dev |
b05b99ad3d27425f8ea97ab5b00b4902d4f6a9b4 | 4a290201df52b0e0b5705a07ecb2a242796b8658 | /Assignment4.py | 892be0355f54282768fd5eb1e5999738d4816287 | [] | no_license | afcarl/Numerical-analysis | a7b9b7a9a83b7f663430421e5485c3164c43459a | 3587960cec1f299853839ff51ea7873acfa66473 | refs/heads/master | 2020-03-14T13:40:58.239725 | 2015-05-23T08:33:33 | 2015-05-23T08:33:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,195 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 22 19:53:52 2015
@author: Johan
"""
from __future__ import division
from scipy import *
from pylab import *
import numpy as np
from matplotlib.widgets import Slider, Button, RadioButtons
from scipy.linalg import solve
from matplotlib.pyplot import *
from s1002 import *
close('all')
#---------------------------------------------------------------------------------------------
#Cubic splines
def cubespline(xint,yint):# should return mx4 matrix with coefficiants
h=xint[1]-xint[0]
a = len(xint)-1
coeff = zeros((a,4))
#Adding d to the matrix
for i in range(a):
coeff[i][3] = yint[i]
#To calculate sigma we need to solve diagonal 141 matrix
A=diag((a+1)*[4]) + diag((a)*[1],-1) + diag((a)*[1],1)
A[0][0]=1
A[0][1]=0
A[a][a]=1
A[a][a-1]=0
b = [0]
for i in range(1,a):
b+= [(6/h**2)*(yint[i+1] - 2*yint[i] + yint[i-1])]
b+=[0]
#Solve fo the sigmas
sigma = solve(A,b)
for i in range(a):
#Adding ai to the matrix
coeff[i][0] = (1./(6.*h))*(sigma[i+1]-sigma[i])
#Adding b to out matrix
coeff[i][1] = sigma[i]/2.
#Adding c to the matrix
coeff[i][2]=(yint[i+1]-yint[i])/h - ((h/6.)*(2.*sigma[i]+sigma[i+1]))
return coeff
def cubsplineval(coeff,xint,xval):
yplot =[]
for k in range(len(xval)):
#Argmax gives false on all when k is the last element(6 in this case)
if k==len(xval)-1:
i = argmax(xint>=xval[k])-1
else:
i = argmax(xint>xval[k]) -1
tmp= (coeff[i][0]*(xval[k]-xint[i])**3 + coeff[i][1]*(xval[k]-xint[i])**2 + coeff[i][2]*(xval[k]-xint[i])**1 + coeff[i][3])
yplot += [tmp]
return yplot
x = array([0,1,2,3,4,5,6])
y = array([1,3,-2,0,1,0,1])
coeff = cubespline(x,y)
xplot = linspace(0,6,100)
yplot=cubsplineval(coeff,x,xplot)
#Plots the data
#plot(x,y,'o')
#plot(xplot,yplot)
#--------------------------------------------------------------------------------------------------
eps = .0000000000000000000001
def Bsplbasis(t,v,dt):
#Evaluate the piecewise cubic B-spline curve at intervals of dt
#knots are t(i) and de-boor points v(i)
#% IMPORTANT:
#(1) there should be 4 more knots t(i) than points v(i).
#(2) This function can be used to evaluate the B-spline curve over [t_3, t_{m-3}],
# where m is the number of knots. (Note the function is not
# well-defined outside this interval.)
m = len(t) #number of knots
i = 4 #index of first knot
q=[]
for u in arange(t[3],t[m-3]+dt,dt):
# check if u value has moved to the next knot interval
# include small tolerance on knots to avoid round-off error in comparisons.
while (u>(t[i]+eps)):
i+=1
# Now evaluate the spline at u using the deBoor algorithm.
# Start with the relevant control points.
# w used here to simplify indices.
w = i-4
qq = zeros(len(v))
for j in arange(1,5,1):
qq[j-1]=v[w+j-1]
for j in arange(1,4,1):
for k in arange(1,4-j+1,1):
qq[k-1] = ((t[w + k + 4-1] - u)/(t[w + k + 4-1] - t[w + k + j-1])) * qq[k-1] + ((u - t[w + k + j-1])/(t[w + k + 4-1] - t[w + k + j-1])) * qq[k+1-1]
#Create vector of points on the B-spline curve.
q.append(qq[0])
return q
#------------------------------------------------------------------------------
#Slider plot
#fig, ax = plt.subplots()
#plt.subplots_adjust(left=0.25, bottom=0.52)
#
#plt.plot(x,y,'o')
#
#init = 2
#min = -15.
#max = 15.
#
#x = array([0,0,0,0,1,2,3,4,5,6,6,6,6]) #Same x values but with extra data on the side
#deBoor = array([0,0,0,0,0,0,0,0,0]) #Initial deBoor points
#
#y=Bsplbasis(x,deBoor,(x[len(x)-1]-x[0])/99.)
#plt.plot(xplot,yplot)
#l, = plt.plot(xplot,y, lw=2, color='red')
#plt.axis([0, 6, min, max])
#
##Adds axis and sliders
#axcolor = 'lightgoldenrodyellow'
#v0ax = plt.axes([0.25, 0.05, 0.65, 0.03], axisbg=axcolor)
#v1ax = plt.axes([0.25, 0.1, 0.65, 0.03], axisbg=axcolor)
#v2ax = plt.axes([0.25, 0.15, 0.65, 0.03], axisbg=axcolor)
#v3ax = plt.axes([0.25, 0.2, 0.65, 0.03], axisbg=axcolor)
#v4ax = plt.axes([0.25, 0.25, 0.65, 0.03], axisbg=axcolor)
#v5ax = plt.axes([0.25, 0.3, 0.65, 0.03], axisbg=axcolor)
#v6ax = plt.axes([0.25, 0.35, 0.65, 0.03], axisbg=axcolor)
#v7ax = plt.axes([0.25, 0.4, 0.65, 0.03], axisbg=axcolor)
#v8ax = plt.axes([0.25, 0.45, 0.65, 0.03], axisbg=axcolor)
#
#sv0 = Slider(v0ax, 'v0', min, max, valinit=init)
#sv1 = Slider(v1ax, 'v1', min, max, valinit=init)
#sv2 = Slider(v2ax, 'v2', min, max, valinit=init)
#sv3 = Slider(v3ax, 'v3', min, max, valinit=init)
#sv4 = Slider(v4ax, 'v4', min, max, valinit=init)
#sv5 = Slider(v5ax, 'v5', min, max, valinit=init)
#sv6 = Slider(v6ax, 'v6', min, max, valinit=init)
#sv7 = Slider(v7ax, 'v7', min, max, valinit=init)
#sv8 = Slider(v8ax, 'v8', min, max, valinit=init)
#
##Adds function that updates the values v0-v8 when you change the slider
#def update(val):
# v0=sv0.val
# v1=sv1.val
# v2=sv2.val
# v3=sv3.val
# v4=sv4.val
# v5=sv5.val
# v6=sv6.val
# v7=sv7.val
# v8=sv8.val
# deBoor=[v0,v1,v2,v3,v4,v5,v6,v7,v8]
#
# l.set_ydata(Bsplbasis(x,deBoor,(x[len(x)-1]-x[0])/99.))
# fig.canvas.draw_idle()
#
#sv0.on_changed(update)
#sv1.on_changed(update)
#sv2.on_changed(update)
#sv3.on_changed(update)
#sv4.on_changed(update)
#sv5.on_changed(update)
#sv6.on_changed(update)
#sv7.on_changed(update)
#sv8.on_changed(update)
#
##Adds a reset button
#resetax = plt.axes([0.8, 0.005, 0.1, 0.04])
#button = Button(resetax, 'Reset', color=axcolor, hovercolor='0.975')
#def reset(event):
# sv0.reset()
# sv1.reset()
# sv2.reset()
# sv3.reset()
# sv4.reset()
# sv5.reset()
# sv6.reset()
# sv7.reset()
# sv8.reset()
#button.on_clicked(reset)
#
##Adds a save values button
#saveax = plt.axes([0.25, 0.005, 0.1, 0.04])
#savebutton = Button(saveax,'Save', color=axcolor, hovercolor='0.975')
#def save(event):
# v0=sv0.val
# v1=sv1.val
# v2=sv2.val
# v3=sv3.val
# v4=sv4.val
# v5=sv5.val
# v6=sv6.val
# v7=sv7.val
# v8=sv8.val
# deBoor=[v0,v1,v2,v3,v4,v5,v6,v7,v8]
# print(deBoor)
#savebutton.on_clicked(save)
#
##Changes the coulor of the line
#rax = plt.axes([0.025, 0.5, 0.15, 0.15], axisbg=axcolor)
#radio = RadioButtons(rax, ('red', 'blue', 'green'), active=0)
#def colorfunc(label):
# l.set_color(label)
# fig.canvas.draw_idle()
#radio.on_clicked(colorfunc)
#
#plt.show()
#-------------------------------------------------------------------------------------
#Task 3 - train data
#wheel = []
#mm = [i for i in range(-70,60+1)]
#
#for i in range(len(mm)):
# wheel += [(-1.)*s1002(mm[i])] #Here I should not have to use the -1 to det it looking right?
#
#coeff = cubespline(mm,wheel)
#xplot = linspace(mm[0],mm[len(mm)-1],100)
#yplot=cubsplineval(coeff,mm,xplot)
#
##Plots the data
##plot(mm,wheel,'o')
#
#plot(xplot,yplot)
| [
"johan.tillmar@gmail.com"
] | johan.tillmar@gmail.com |
9364a7ea7ee0ec5d468df263e0c925c4f78ff3a7 | 9afbb6993450d1e0c3bae68e86844bd06d4419ee | /global_var/g11.py | 32a8a7f1014ea83d6ac59211fac7c41d0e675748 | [] | no_license | Jigar710/Python_Programs | 6f331caac30878655d4cca4ad97d4214c0262088 | 714a6306487eb6712f32ccb51b6a2407a81873fa | refs/heads/main | 2023-02-25T12:24:44.874199 | 2021-01-28T15:43:24 | 2021-01-28T15:43:24 | 332,869,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53 | py | def m1():
global a
a = 10
print(a)
m1()
print(a) | [
"jigar.shekhat.777@gmail.com"
] | jigar.shekhat.777@gmail.com |
61cbca6ae98bfe29817aceebbeaa860f7fd52ced | 3d02b79ce53a83fc0086decb2e933c767c3d55bc | /IPython/parallel/controller/scheduler.py | 00ba2f0560ea955ebb8b3f3da811083b6c61616d | [
"BSD-3-Clause"
] | permissive | minrk/ipython-py3k | 47f8a65ac9767599568c94f01540364b8593a35d | 094195eaf203e88c58f71e834bf0153842ac852c | refs/heads/master | 2023-06-08T10:51:37.423469 | 2011-07-02T01:57:42 | 2011-07-02T01:57:42 | 1,989,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,626 | py | """The Python scheduler for rich scheduling.
The Pure ZMQ scheduler does not allow routing schemes other than LRU,
nor does it check msg_id DAG dependencies. For those, a slightly slower
Python Scheduler exists.
Authors:
* Min RK
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2010-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#----------------------------------------------------------------------
# Imports
#----------------------------------------------------------------------
import logging
import sys
from datetime import datetime, timedelta
from random import randint, random
from types import FunctionType
try:
import numpy
except ImportError:
numpy = None
import zmq
from zmq.eventloop import ioloop, zmqstream
# local imports
from IPython.external.decorator import decorator
from IPython.config.application import Application
from IPython.config.loader import Config
from IPython.utils.traitlets import Instance, Dict, List, Set, Int, Enum
from IPython.parallel import error
from IPython.parallel.factory import SessionFactory
from IPython.parallel.util import connect_logger, local_logger
from .dependency import Dependency
@decorator
def logged(f,self,*args,**kwargs):
# print ("#--------------------")
self.log.debug("scheduler::%s(*%s,**%s)", f.__name__, args, kwargs)
# print ("#--")
return f(self,*args, **kwargs)
#----------------------------------------------------------------------
# Chooser functions
#----------------------------------------------------------------------
def plainrandom(loads):
"""Plain random pick."""
n = len(loads)
return randint(0,n-1)
def lru(loads):
"""Always pick the front of the line.
The content of `loads` is ignored.
Assumes LRU ordering of loads, with oldest first.
"""
return 0
def twobin(loads):
"""Pick two at random, use the LRU of the two.
The content of loads is ignored.
Assumes LRU ordering of loads, with oldest first.
"""
n = len(loads)
a = randint(0,n-1)
b = randint(0,n-1)
return min(a,b)
def weighted(loads):
"""Pick two at random using inverse load as weight.
Return the less loaded of the two.
"""
# weight 0 a million times more than 1:
weights = 1./(1e-6+numpy.array(loads))
sums = weights.cumsum()
t = sums[-1]
x = random()*t
y = random()*t
idx = 0
idy = 0
while sums[idx] < x:
idx += 1
while sums[idy] < y:
idy += 1
if weights[idy] > weights[idx]:
return idy
else:
return idx
def leastload(loads):
"""Always choose the lowest load.
If the lowest load occurs more than once, the first
occurance will be used. If loads has LRU ordering, this means
the LRU of those with the lowest load is chosen.
"""
return loads.index(min(loads))
#---------------------------------------------------------------------
# Classes
#---------------------------------------------------------------------
# store empty default dependency:
MET = Dependency([])
class TaskScheduler(SessionFactory):
"""Python TaskScheduler object.
This is the simplest object that supports msg_id based
DAG dependencies. *Only* task msg_ids are checked, not
msg_ids of jobs submitted via the MUX queue.
"""
hwm = Int(0, config=True, shortname='hwm',
help="""specify the High Water Mark (HWM) for the downstream
socket in the Task scheduler. This is the maximum number
of allowed outstanding tasks on each engine."""
)
scheme_name = Enum(('leastload', 'pure', 'lru', 'plainrandom', 'weighted', 'twobin'),
'leastload', config=True, shortname='scheme', allow_none=False,
help="""select the task scheduler scheme [default: Python LRU]
Options are: 'pure', 'lru', 'plainrandom', 'weighted', 'twobin','leastload'"""
)
def _scheme_name_changed(self, old, new):
self.log.debug("Using scheme %r"%new)
self.scheme = globals()[new]
# input arguments:
scheme = Instance(FunctionType) # function for determining the destination
def _scheme_default(self):
return leastload
client_stream = Instance(zmqstream.ZMQStream) # client-facing stream
engine_stream = Instance(zmqstream.ZMQStream) # engine-facing stream
notifier_stream = Instance(zmqstream.ZMQStream) # hub-facing sub stream
mon_stream = Instance(zmqstream.ZMQStream) # hub-facing pub stream
# internals:
graph = Dict() # dict by msg_id of [ msg_ids that depend on key ]
retries = Dict() # dict by msg_id of retries remaining (non-neg ints)
# waiting = List() # list of msg_ids ready to run, but haven't due to HWM
depending = Dict() # dict by msg_id of (msg_id, raw_msg, after, follow)
pending = Dict() # dict by engine_uuid of submitted tasks
completed = Dict() # dict by engine_uuid of completed tasks
failed = Dict() # dict by engine_uuid of failed tasks
destinations = Dict() # dict by msg_id of engine_uuids where jobs ran (reverse of completed+failed)
clients = Dict() # dict by msg_id for who submitted the task
targets = List() # list of target IDENTs
loads = List() # list of engine loads
# full = Set() # set of IDENTs that have HWM outstanding tasks
all_completed = Set() # set of all completed tasks
all_failed = Set() # set of all failed tasks
all_done = Set() # set of all finished tasks=union(completed,failed)
all_ids = Set() # set of all submitted task IDs
blacklist = Dict() # dict by msg_id of locations where a job has encountered UnmetDependency
auditor = Instance('zmq.eventloop.ioloop.PeriodicCallback')
def start(self):
self.engine_stream.on_recv(self.dispatch_result, copy=False)
self._notification_handlers = dict(
registration_notification = self._register_engine,
unregistration_notification = self._unregister_engine
)
self.notifier_stream.on_recv(self.dispatch_notification)
self.auditor = ioloop.PeriodicCallback(self.audit_timeouts, 2e3, self.loop) # 1 Hz
self.auditor.start()
self.log.info("Scheduler started [%s]"%self.scheme_name)
def resume_receiving(self):
"""Resume accepting jobs."""
self.client_stream.on_recv(self.dispatch_submission, copy=False)
def stop_receiving(self):
"""Stop accepting jobs while there are no engines.
Leave them in the ZMQ queue."""
self.client_stream.on_recv(None)
#-----------------------------------------------------------------------
# [Un]Registration Handling
#-----------------------------------------------------------------------
def dispatch_notification(self, msg):
"""dispatch register/unregister events."""
try:
idents,msg = self.session.feed_identities(msg)
except ValueError:
self.log.warn("task::Invalid Message: %r"%msg)
return
try:
msg = self.session.unpack_message(msg)
except ValueError:
self.log.warn("task::Unauthorized message from: %r"%idents)
return
msg_type = msg['msg_type']
handler = self._notification_handlers.get(msg_type, None)
if handler is None:
self.log.error("Unhandled message type: %r"%msg_type)
else:
try:
handler(str(msg['content']['queue']))
except KeyError:
self.log.error("task::Invalid notification msg: %r"%msg)
def _register_engine(self, uid):
"""New engine with ident `uid` became available."""
# head of the line:
self.targets.insert(0,uid)
self.loads.insert(0,0)
# initialize sets
self.completed[uid] = set()
self.failed[uid] = set()
self.pending[uid] = {}
if len(self.targets) == 1:
self.resume_receiving()
# rescan the graph:
self.update_graph(None)
def _unregister_engine(self, uid):
"""Existing engine with ident `uid` became unavailable."""
if len(self.targets) == 1:
# this was our only engine
self.stop_receiving()
# handle any potentially finished tasks:
self.engine_stream.flush()
# don't pop destinations, because they might be used later
# map(self.destinations.pop, self.completed.pop(uid))
# map(self.destinations.pop, self.failed.pop(uid))
# prevent this engine from receiving work
idx = self.targets.index(uid)
self.targets.pop(idx)
self.loads.pop(idx)
# wait 5 seconds before cleaning up pending jobs, since the results might
# still be incoming
if self.pending[uid]:
dc = ioloop.DelayedCallback(lambda : self.handle_stranded_tasks(uid), 5000, self.loop)
dc.start()
else:
self.completed.pop(uid)
self.failed.pop(uid)
def handle_stranded_tasks(self, engine):
"""Deal with jobs resident in an engine that died."""
lost = self.pending[engine]
for msg_id in list(lost.keys()):
if msg_id not in self.pending[engine]:
# prevent double-handling of messages
continue
raw_msg = lost[msg_id][0]
idents,msg = self.session.feed_identities(raw_msg, copy=False)
parent = self.session.unpack(msg[1].bytes)
idents = [engine, idents[0]]
# build fake error reply
try:
raise error.EngineError("Engine %r died while running task %r"%(engine, msg_id))
except:
content = error.wrap_exception()
msg = self.session.msg('apply_reply', content, parent=parent, subheader={'status':'error'})
raw_reply = list(map(zmq.Message, self.session.serialize(msg, ident=idents)))
# and dispatch it
self.dispatch_result(raw_reply)
# finally scrub completed/failed lists
self.completed.pop(engine)
self.failed.pop(engine)
#-----------------------------------------------------------------------
# Job Submission
#-----------------------------------------------------------------------
def dispatch_submission(self, raw_msg):
"""Dispatch job submission to appropriate handlers."""
# ensure targets up to date:
self.notifier_stream.flush()
try:
idents, msg = self.session.feed_identities(raw_msg, copy=False)
msg = self.session.unpack_message(msg, content=False, copy=False)
except Exception:
self.log.error("task::Invaid task msg: %r"%raw_msg, exc_info=True)
return
# send to monitor
self.mon_stream.send_multipart(['intask']+raw_msg, copy=False)
header = msg['header']
msg_id = header['msg_id']
self.all_ids.add(msg_id)
# targets
targets = set(header.get('targets', []))
retries = header.get('retries', 0)
self.retries[msg_id] = retries
# time dependencies
after = header.get('after', None)
if after:
after = Dependency(after)
if after.all:
if after.success:
after = after.difference(self.all_completed)
if after.failure:
after = after.difference(self.all_failed)
if after.check(self.all_completed, self.all_failed):
# recast as empty set, if `after` already met,
# to prevent unnecessary set comparisons
after = MET
else:
after = MET
# location dependencies
follow = Dependency(header.get('follow', []))
# turn timeouts into datetime objects:
timeout = header.get('timeout', None)
if timeout:
timeout = datetime.now() + timedelta(0,timeout,0)
args = [raw_msg, targets, after, follow, timeout]
# validate and reduce dependencies:
for dep in after,follow:
if not dep: # empty dependency
continue
# check valid:
if msg_id in dep or dep.difference(self.all_ids):
self.depending[msg_id] = args
return self.fail_unreachable(msg_id, error.InvalidDependency)
# check if unreachable:
if dep.unreachable(self.all_completed, self.all_failed):
self.depending[msg_id] = args
return self.fail_unreachable(msg_id)
if after.check(self.all_completed, self.all_failed):
# time deps already met, try to run
if not self.maybe_run(msg_id, *args):
# can't run yet
if msg_id not in self.all_failed:
# could have failed as unreachable
self.save_unmet(msg_id, *args)
else:
self.save_unmet(msg_id, *args)
def audit_timeouts(self):
"""Audit all waiting tasks for expired timeouts."""
now = datetime.now()
for msg_id in list(self.depending.keys()):
# must recheck, in case one failure cascaded to another:
if msg_id in self.depending:
raw,after,targets,follow,timeout = self.depending[msg_id]
if timeout and timeout < now:
self.fail_unreachable(msg_id, error.TaskTimeout)
def fail_unreachable(self, msg_id, why=error.ImpossibleDependency):
"""a task has become unreachable, send a reply with an ImpossibleDependency
error."""
if msg_id not in self.depending:
self.log.error("msg %r already failed!", msg_id)
return
raw_msg,targets,after,follow,timeout = self.depending.pop(msg_id)
for mid in follow.union(after):
if mid in self.graph:
self.graph[mid].remove(msg_id)
# FIXME: unpacking a message I've already unpacked, but didn't save:
idents,msg = self.session.feed_identities(raw_msg, copy=False)
header = self.session.unpack(msg[1].bytes)
try:
raise why()
except:
content = error.wrap_exception()
self.all_done.add(msg_id)
self.all_failed.add(msg_id)
msg = self.session.send(self.client_stream, 'apply_reply', content,
parent=header, ident=idents)
self.session.send(self.mon_stream, msg, ident=['outtask']+idents)
self.update_graph(msg_id, success=False)
def maybe_run(self, msg_id, raw_msg, targets, after, follow, timeout):
"""check location dependencies, and run if they are met."""
blacklist = self.blacklist.setdefault(msg_id, set())
if follow or targets or blacklist or self.hwm:
# we need a can_run filter
def can_run(idx):
# check hwm
if self.hwm and self.loads[idx] == self.hwm:
return False
target = self.targets[idx]
# check blacklist
if target in blacklist:
return False
# check targets
if targets and target not in targets:
return False
# check follow
return follow.check(self.completed[target], self.failed[target])
indices = list(filter(can_run, list(range(len(self.targets)))))
if not indices:
# couldn't run
if follow.all:
# check follow for impossibility
dests = set()
relevant = set()
if follow.success:
relevant = self.all_completed
if follow.failure:
relevant = relevant.union(self.all_failed)
for m in follow.intersection(relevant):
dests.add(self.destinations[m])
if len(dests) > 1:
self.depending[msg_id] = (raw_msg, targets, after, follow, timeout)
self.fail_unreachable(msg_id)
return False
if targets:
# check blacklist+targets for impossibility
targets.difference_update(blacklist)
if not targets or not targets.intersection(self.targets):
self.depending[msg_id] = (raw_msg, targets, after, follow, timeout)
self.fail_unreachable(msg_id)
return False
return False
else:
indices = None
self.submit_task(msg_id, raw_msg, targets, follow, timeout, indices)
return True
def save_unmet(self, msg_id, raw_msg, targets, after, follow, timeout):
"""Save a message for later submission when its dependencies are met."""
self.depending[msg_id] = [raw_msg,targets,after,follow,timeout]
# track the ids in follow or after, but not those already finished
for dep_id in after.union(follow).difference(self.all_done):
if dep_id not in self.graph:
self.graph[dep_id] = set()
self.graph[dep_id].add(msg_id)
def submit_task(self, msg_id, raw_msg, targets, follow, timeout, indices=None):
"""Submit a task to any of a subset of our targets."""
if indices:
loads = [self.loads[i] for i in indices]
else:
loads = self.loads
idx = self.scheme(loads)
if indices:
idx = indices[idx]
target = self.targets[idx]
# print (target, map(str, msg[:3]))
# send job to the engine
self.engine_stream.send(target, flags=zmq.SNDMORE, copy=False)
self.engine_stream.send_multipart(raw_msg, copy=False)
# update load
self.add_job(idx)
self.pending[target][msg_id] = (raw_msg, targets, MET, follow, timeout)
# notify Hub
content = dict(msg_id=msg_id, engine_id=target)
self.session.send(self.mon_stream, 'task_destination', content=content,
ident=['tracktask',self.session.session])
#-----------------------------------------------------------------------
# Result Handling
#-----------------------------------------------------------------------
def dispatch_result(self, raw_msg):
"""dispatch method for result replies"""
try:
idents,msg = self.session.feed_identities(raw_msg, copy=False)
msg = self.session.unpack_message(msg, content=False, copy=False)
engine = idents[0]
try:
idx = self.targets.index(engine)
except ValueError:
pass # skip load-update for dead engines
else:
self.finish_job(idx)
except Exception:
self.log.error("task::Invaid result: %r", raw_msg, exc_info=True)
return
header = msg['header']
parent = msg['parent_header']
if header.get('dependencies_met', True):
success = (header['status'] == 'ok')
msg_id = parent['msg_id']
retries = self.retries[msg_id]
if not success and retries > 0:
# failed
self.retries[msg_id] = retries - 1
self.handle_unmet_dependency(idents, parent)
else:
del self.retries[msg_id]
# relay to client and update graph
self.handle_result(idents, parent, raw_msg, success)
# send to Hub monitor
self.mon_stream.send_multipart(['outtask']+raw_msg, copy=False)
else:
self.handle_unmet_dependency(idents, parent)
def handle_result(self, idents, parent, raw_msg, success=True):
"""handle a real task result, either success or failure"""
# first, relay result to client
engine = idents[0]
client = idents[1]
# swap_ids for XREP-XREP mirror
raw_msg[:2] = [client,engine]
# print (map(str, raw_msg[:4]))
self.client_stream.send_multipart(raw_msg, copy=False)
# now, update our data structures
msg_id = parent['msg_id']
self.blacklist.pop(msg_id, None)
self.pending[engine].pop(msg_id)
if success:
self.completed[engine].add(msg_id)
self.all_completed.add(msg_id)
else:
self.failed[engine].add(msg_id)
self.all_failed.add(msg_id)
self.all_done.add(msg_id)
self.destinations[msg_id] = engine
self.update_graph(msg_id, success)
def handle_unmet_dependency(self, idents, parent):
"""handle an unmet dependency"""
engine = idents[0]
msg_id = parent['msg_id']
if msg_id not in self.blacklist:
self.blacklist[msg_id] = set()
self.blacklist[msg_id].add(engine)
args = self.pending[engine].pop(msg_id)
raw,targets,after,follow,timeout = args
if self.blacklist[msg_id] == targets:
self.depending[msg_id] = args
self.fail_unreachable(msg_id)
elif not self.maybe_run(msg_id, *args):
# resubmit failed
if msg_id not in self.all_failed:
# put it back in our dependency tree
self.save_unmet(msg_id, *args)
if self.hwm:
try:
idx = self.targets.index(engine)
except ValueError:
pass # skip load-update for dead engines
else:
if self.loads[idx] == self.hwm-1:
self.update_graph(None)
def update_graph(self, dep_id=None, success=True):
"""dep_id just finished. Update our dependency
graph and submit any jobs that just became runable.
Called with dep_id=None to update entire graph for hwm, but without finishing
a task.
"""
# print ("\n\n***********")
# pprint (dep_id)
# pprint (self.graph)
# pprint (self.depending)
# pprint (self.all_completed)
# pprint (self.all_failed)
# print ("\n\n***********\n\n")
# update any jobs that depended on the dependency
jobs = self.graph.pop(dep_id, [])
# recheck *all* jobs if
# a) we have HWM and an engine just become no longer full
# or b) dep_id was given as None
if dep_id is None or self.hwm and any( [ load==self.hwm-1 for load in self.loads ]):
jobs = list(self.depending.keys())
for msg_id in jobs:
raw_msg, targets, after, follow, timeout = self.depending[msg_id]
if after.unreachable(self.all_completed, self.all_failed)\
or follow.unreachable(self.all_completed, self.all_failed):
self.fail_unreachable(msg_id)
elif after.check(self.all_completed, self.all_failed): # time deps met, maybe run
if self.maybe_run(msg_id, raw_msg, targets, MET, follow, timeout):
self.depending.pop(msg_id)
for mid in follow.union(after):
if mid in self.graph:
self.graph[mid].remove(msg_id)
#----------------------------------------------------------------------
# methods to be overridden by subclasses
#----------------------------------------------------------------------
def add_job(self, idx):
"""Called after self.targets[idx] just got the job with header.
Override with subclasses. The default ordering is simple LRU.
The default loads are the number of outstanding jobs."""
self.loads[idx] += 1
for lis in (self.targets, self.loads):
lis.append(lis.pop(idx))
def finish_job(self, idx):
"""Called after self.targets[idx] just finished a job.
Override with subclasses."""
self.loads[idx] -= 1
def launch_scheduler(in_addr, out_addr, mon_addr, not_addr, config=None,
logname='root', log_url=None, loglevel=logging.DEBUG,
identity=b'task', in_thread=False):
ZMQStream = zmqstream.ZMQStream
if config:
# unwrap dict back into Config
config = Config(config)
if in_thread:
# use instance() to get the same Context/Loop as our parent
ctx = zmq.Context.instance()
loop = ioloop.IOLoop.instance()
else:
# in a process, don't use instance()
# for safety with multiprocessing
ctx = zmq.Context()
loop = ioloop.IOLoop()
ins = ZMQStream(ctx.socket(zmq.XREP),loop)
ins.setsockopt(zmq.IDENTITY, identity)
ins.bind(in_addr)
outs = ZMQStream(ctx.socket(zmq.XREP),loop)
outs.setsockopt(zmq.IDENTITY, identity)
outs.bind(out_addr)
mons = zmqstream.ZMQStream(ctx.socket(zmq.PUB),loop)
mons.connect(mon_addr)
nots = zmqstream.ZMQStream(ctx.socket(zmq.SUB),loop)
nots.setsockopt(zmq.SUBSCRIBE, b'')
nots.connect(not_addr)
# setup logging.
if in_thread:
log = Application.instance().log
else:
if log_url:
log = connect_logger(logname, ctx, log_url, root="scheduler", loglevel=loglevel)
else:
log = local_logger(logname, loglevel)
scheduler = TaskScheduler(client_stream=ins, engine_stream=outs,
mon_stream=mons, notifier_stream=nots,
loop=loop, log=log,
config=config)
scheduler.start()
if not in_thread:
try:
loop.start()
except KeyboardInterrupt:
print ("interrupted, exiting...", file=sys.__stderr__)
| [
"takowl@gmail.com"
] | takowl@gmail.com |
ff6087e86a41b4915477217ada61e8481a1fdf35 | f13a8904c59a205fd77e569bc1c19cf8f3e5fed8 | /mini_window.py | adae202b23bbfaf4db0e3bd66647d59bacff896d | [] | no_license | liberize/rhythmbox-doubanfm-plugin | 603a9721f1e856a500c3d6353813313fc115d95d | 8908fca35b8feffd226bbfaf37d6118dfac671f6 | refs/heads/master | 2021-01-10T01:00:38.333927 | 2015-12-15T08:07:19 | 2015-12-15T08:07:19 | 10,237,455 | 8 | 5 | null | null | null | null | UTF-8 | Python | false | false | 11,691 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2013 liberize <liberize@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os, urllib, thread
from gi.repository import Gtk, Gdk, GdkPixbuf, GObject, Pango, RB, GLib
from doubanfm_keys import *
WINDOW_FILE = 'mini_window.glade'
class MiniWindow(GObject.Object):
__gtype_name__ = 'MiniWindow'
object = GObject.property(type=GObject.Object)
def __init__(self, plugin):
self.plugin = plugin
self.shell = plugin.shell
self.source = plugin.source
self.player = plugin.player
self.current_song = None
self.keep_above = False
self.ui = Gtk.Builder()
self.ui.add_from_file(PLUGIN_DIR + WINDOW_FILE)
self.ui.connect_signals({
'on_fav_song': self.on_fav_song,
'on_skip_song': self.on_skip_song,
'on_del_song': self.on_del_song,
'on_go_home': self.on_go_home,
'on_volume_changed': self.on_volume_changed,
'on_change_play_time': self.on_change_play_time,
'on_play_time_button_press': self.on_play_time_button_press,
'on_play_time_button_release': self.on_play_time_button_release,
'on_cover_button_press': self.on_cover_button_press,
'on_settings': self.on_settings,
'on_album': self.on_album,
'on_report': self.on_report,
'on_show_menu': self.on_show_menu,
'on_keep_above': self.on_keep_above,
'on_quit': self.on_quit,
'on_pause': self.on_pause,
'on_recommend_song': self.on_recommend_song,
'on_share_sina': self.on_share_sina,
'on_share_renren': self.on_share_renren,
'on_share_kaixin001': self.on_share_kaixin001,
'on_share_twitter': self.on_share_twitter,
'on_share_fanfou': self.on_share_fanfou,
'on_copy_permalink': self.on_copy_permalink
})
self.mini_window = self.ui.get_object('mini_window')
self.mini_window.connect('destroy', self.on_destroy)
self.cover_image = self.ui.get_object('cover_image')
self.song_title_label = self.ui.get_object('song_title_label')
font_description = self.song_title_label.get_pango_context().get_font_description()
font_description.set_size(1024 * 12)
font_description.set_weight(Pango.Weight.BOLD)
self.song_title_label.override_font(font_description)
self.song_info_label = self.ui.get_object('song_info_label')
self.fav_button = self.ui.get_object('fav_button')
self.del_button = self.ui.get_object('del_button')
self.skip_button = self.ui.get_object('skip_button')
self.pause_button = self.ui.get_object('pause_button')
self.keep_above_button = self.ui.get_object('keep_above_button')
self.volume_button = self.ui.get_object('volume_button')
self.play_time_scale = self.ui.get_object('play_time_scale')
self.play_time_scale.connect('format-value', self.on_format_value)
self.button_down = False
self.popup_menu = self.ui.get_object('more_menu')
self.report_menuitem = self.ui.get_object('report_menuitem')
self.album_menuitem = self.ui.get_object('album_menuitem')
self.share_menuitem = self.ui.get_object('share_menuitem')
self.channels_menu = self.ui.get_object('channels_menu')
self.plugin.build_submenu(self.channels_menu)
self.button_images = {
'fav': Gtk.Image.new_from_icon_name('emblem-favorite', Gtk.IconSize.BUTTON),
'nofav': Gtk.Image.new_from_icon_name('bookmark-new', Gtk.IconSize.BUTTON),
'pause': Gtk.Image.new_from_stock('gtk-media-pause', Gtk.IconSize.BUTTON),
'play': Gtk.Image.new_from_stock('gtk-media-play', Gtk.IconSize.BUTTON),
'down': Gtk.Image.new_from_stock('gtk-go-down', Gtk.IconSize.BUTTON),
'above': Gtk.Image.new_from_stock('gtk-goto-top', Gtk.IconSize.BUTTON)
}
self.sensitive_widgets = [
self.fav_button,
self.del_button,
self.skip_button,
self.report_menuitem,
self.album_menuitem,
self.share_menuitem
]
self.share_templates = {
'kaixin001': "http://www.kaixin001.com/repaste/bshare.php?rurl=%s&rcontent=&rtitle=%s",
'renren': "http://www.connect.renren.com/share/sharer?title=%s&url=%s",
'sina': "http://v.t.sina.com.cn/share/share.php?appkey=3015934887&url=%s&title=%s&source=&sourceUrl=&content=utf-8&pic=%s",
'twitter': "http://twitter.com/share?text=%s&url=%s",
'fanfou': "http://fanfou.com/sharer?u=%s&t=%s&d=&s=bm",
'douban': "http://shuo.douban.com/!service/share?name=%s&href=%s&image=%s&text=&desc=(%s)&apikey=0ace3f74eb3bd5d8206abe5ec1b38188&target_type=rec&target_action=0&object_kind=3043&object_id=%s"
}
def set_visibile(self, visible):
self.set_handle_signals(visible)
if visible:
self.initialize()
self.mini_window.show_all()
self.shell.props.window.hide()
else:
self.plugin.initialize()
self.shell.props.window.show_all()
self.mini_window.hide()
def initialize(self):
entry = self.player.get_playing_entry()
if entry == None:
self.player.set_playing_source(self.source)
self.player.do_next()
else:
self.on_playing_song_changed(self.player, entry)
self.on_elapsed_changed(self.player, self.player.get_playing_time()[1])
self.on_playing_changed(self.player, self.player.get_playing()[1])
self.volume_button.set_value(self.player.get_volume()[1])
def set_handle_signals(self, handle):
if handle:
self.handlers = [
self.player.connect('playing-song-changed', self.on_playing_song_changed),
self.player.connect('elapsed-changed', self.on_elapsed_changed),
self.player.connect('playing-changed', self.on_playing_changed)
]
else:
for handler in self.handlers:
self.player.disconnect(handler)
self.plugin.set_handle_signals(not handle)
def set_sensitive(self, sensitive):
for widget in self.sensitive_widgets:
widget.set_sensitive(sensitive)
def on_destroy(self, *e):
self.set_visibile(False)
def on_quit(self, *e):
self.shell.quit()
def on_fav_song(self, *e):
if self.current_song.like:
self.fav_button.set_image(self.button_images['nofav'])
self.source.unfav_song(self.current_song)
else:
self.fav_button.set_image(self.button_images['fav'])
self.source.fav_song(self.current_song)
def on_skip_song(self, *e):
self.source.skip_song(self.current_song)
def on_del_song(self, *e):
self.source.del_song(self.current_song)
def on_go_home(self, *e):
self.set_visibile(False)
def on_volume_changed(self, *e):
self.player.set_volume(self.volume_button.get_value())
def on_format_value(self, scale, value):
if self.current_song != None:
pos_in_secs = int(value * self.current_song.length)
return '%02d:%02d/%s ' % (pos_in_secs / 60, pos_in_secs % 60,
self.formatted_song_length)
return ''
def on_play_time_button_press(self, *e):
self.button_down = True
def on_play_time_button_release(self, *e):
self.button_down = False
def on_change_play_time(self, *e):
if not self.button_down:
self.play_pos = self.play_time_scale.get_value()
pos_in_secs = int(self.current_song.length * self.play_pos)
self.player.set_playing_time(pos_in_secs)
def on_playing_song_changed(self, player, entry):
if entry != None:
title = entry.get_string(RB.RhythmDBPropType.TITLE)
self.current_song = self.source.get_song_by_title(title)
self.formatted_song_length = '%02d:%02d' % (self.current_song.length / 60,
self.current_song.length % 60)
self.song_title_str = ('%s - %s' % (self.current_song.title, self.current_song.artist)
).encode('utf-8')
self.song_info_str = ('< %s > %s' % (self.current_song.albumtitle, self.current_song.public_time)
).encode('utf-8')
self.song_url = self.current_song.get_uri()
self.mini_window.set_title(self.song_title_str + ' - Rhythmbox 豆瓣FM')
self.song_title_label.set_label(self.song_title_str)
self.song_info_label.set_label(self.song_info_str)
self.fav_button.set_image(self.button_images['fav'] if self.current_song.like else
self.button_images['nofav'])
thread.start_new_thread(self.update_cover_image, ())
else:
self.source.new_playlist()
def on_elapsed_changed(self, player, elapsed):
if self.current_song != None:
if not self.button_down:
self.play_pos = float(elapsed) / float(self.current_song.length)
self.play_time_scale.set_value(self.play_pos)
if elapsed == self.current_song.length:
self.source.played_song(self.current_song)
def on_pause(self, *e):
self.player.playpause(True)
def on_playing_changed(self, player, playing):
self.pause_button.set_image(self.button_images['pause'] if playing else
self.button_images['play'])
self.set_sensitive(playing)
def update_cover_image(self):
url = self.current_song.picture
GLib.idle_add(self.update_cover_image_cb, urllib.urlopen(url).read())
def update_cover_image_cb(self, data):
loader = GdkPixbuf.PixbufLoader()
loader.write(data)
loader.close()
self.cover_image.set_from_pixbuf(loader.get_pixbuf())
def on_cover_button_press(self, widget, event):
if event.type == Gdk.EventType._2BUTTON_PRESS and event.button == Gdk.BUTTON_PRIMARY:
self.on_album()
def on_settings(self, *e):
os.popen(' '.join(['xdg-open', 'http://douban.fm/mine']))
def on_album(self, *e):
url = "http://music.douban.com/subject/%s/" % self.current_song.aid
os.popen(' '.join(['xdg-open', url]))
def on_report(self, *e):
url = ("http://music.douban.com/subject/%s/report?song_id=%s" %
(self.current_song.aid, self.current_song.sid))
os.popen(' '.join(['xdg-open', url]))
def on_keep_above(self, *e):
self.keep_above = not self.keep_above
self.mini_window.set_keep_above(self.keep_above)
self.keep_above_button.set_image(self.button_images['down'] if self.keep_above else
self.button_images['above'])
def on_show_menu(self, widget, event):
self.popup_menu.popup(None, None, None, None, event.button, event.time)
return True
def on_share_sina(self, *e):
url = self.share_templates['sina'] % tuple(map(urllib.quote_plus,
[self.song_url, self.song_title_str, self.current_song.picture]))
os.popen(' '.join(['xdg-open', '"%s"' % url]))
def on_share_kaixin001(self, *e):
url = self.share_templates['kaixin001'] % tuple(map(urllib.quote_plus,
[self.song_url, self.song_title_str]))
os.popen(' '.join(['xdg-open', '"%s"' % url]))
def on_share_renren(self, *e):
url = self.share_templates['renren'] % tuple(map(urllib.quote_plus,
[self.song_title_str, self.song_url]))
os.popen(' '.join(['xdg-open', '"%s"' % url]))
def on_share_twitter(self, *e):
url = self.share_templates['twitter'] % tuple(map(urllib.quote_plus,
[self.song_title_str, self.song_url]))
os.popen(' '.join(['xdg-open', '"%s"' % url]))
def on_share_fanfou(self, *e):
url = self.share_templates['fanfou'] % tuple(map(urllib.quote_plus,
[self.song_url, self.song_title_str]))
os.popen(' '.join(['xdg-open', '"%s"' % url]))
def on_recommend_song(self, *e):
url = self.share_templates['douban'] % tuple(map(urllib.quote_plus, [
self.current_song.title.encode('utf8'),
self.song_url,
self.current_song.picture,
"Rhythmbox DoubanFM Plugin",
self.current_song.sid
]))
os.popen(' '.join(['xdg-open', '"%s"' % url]))
def on_copy_permalink(self, *e):
clipboard = Gtk.Clipboard.get(Gdk.SELECTION_CLIPBOARD)
clipboard.set_text(self.song_url, -1)
| [
"liberize@gmail.com"
] | liberize@gmail.com |
c48c7baf0c80e74dc9eae1476c92c21ad7e92be9 | 3130399e365f4e3fb5d2877ed9575ea184e1a23c | /160-拼接遍历法-Intersection of Two Linked Lists.py | 8f077193845e02be76bc50d643c4e46f0adb7076 | [] | no_license | johnzan0743/My_Leet_Code_Note | af4b3e8ccf8aa05a80e9caddd273a74eed510c57 | 7351d3447cedefcb93552082f1137fd4c9ddd233 | refs/heads/master | 2022-12-02T20:50:14.740450 | 2020-08-15T05:21:51 | 2020-08-15T05:21:51 | 274,098,210 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 497 | py | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def getIntersectionNode(self, headA: ListNode, headB: ListNode) -> ListNode:
p1 = headA
p2 = headB
while p1 != p2:
if p1:
p1 = p1.next
else:
p1 = headB
if p2:
p2 = p2.next
else:
p2 = headA
return p1 | [
"65598659+johnzan0743@users.noreply.github.com"
] | 65598659+johnzan0743@users.noreply.github.com |
b453aed2c254c9389e6d16e6972bda279a7aa2b9 | cf3891c6122d21584bb6d7ad81c41e26755c1083 | /tests/gmprocess/subcommands/import_test.py | e189a580a0d4d0d4d0c4ed40a44128a0147e9ff5 | [
"Unlicense",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | mmoschetti-usgs/groundmotion-processing | 5cb6412eebe258dd3d30e085d68affc20553d744 | 944667e90b5a0a01f7017a676f60e2958b1eb902 | refs/heads/master | 2022-11-03T04:32:00.353837 | 2022-10-19T17:57:16 | 2022-10-19T18:37:23 | 186,485,732 | 0 | 0 | NOASSERTION | 2019-05-13T19:51:34 | 2019-05-13T19:51:34 | null | UTF-8 | Python | false | false | 2,014 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
import shutil
import pathlib
from gmprocess.utils import constants
def test_import(script_runner):
try:
# Need to create profile first.
cdir = constants.CONFIG_PATH_TEST
ddir = constants.TEST_DATA_DIR / "demo"
idir = constants.TEST_DATA_DIR / "import"
setup_inputs = io.StringIO(f"test\n{str(cdir)}\n{str(ddir)}\nname\ntest@email.com\n")
ret = script_runner.run("gmrecords", "projects", "-c", stdin=setup_inputs)
setup_inputs.close()
assert ret.success
# Test CESMD zip file
zfile = idir / "cesmd_test.zip"
ret = script_runner.run(
"gmrecords", "import", "-e", "nn00725272", "-p", str(zfile)
)
print("*** stdout ***")
print(ret.stdout)
print("*** stderr ***")
print(ret.stderr)
assert ret.success
raw_dir = ddir / "nn00725272" / "raw"
assert raw_dir.is_dir()
dst_files = list(pathlib.Path(raw_dir).glob("*"))
assert len(dst_files) == 23
# Test tar file of CWB data
tfile = idir / "test.tar.zip"
ret = script_runner.run(
"gmrecords", "import", "-e", "us6000e2mt", "-p", str(tfile)
)
assert ret.success
raw_dir = ddir / "us6000e2mt" / "raw"
assert raw_dir.is_dir()
dst_dats = list(raw_dir.glob("*.dat"))
assert len(dst_dats) == 19
# Test directory of files
dpath = idir / "dir"
ret = script_runner.run(
"gmrecords", "import", "-e", "us6000e2mt", "-p", str(dpath)
)
assert ret.success
except Exception as ex:
raise ex
finally:
shutil.rmtree(str(constants.CONFIG_PATH_TEST), ignore_errors=True)
# Remove created files
events = ["us6000e2mt", "nn00725272"]
for eid in events:
shutil.rmtree(str(ddir / eid), ignore_errors=True)
if __name__ == "__main__":
test_import()
| [
"emthompson@usgs.gov"
] | emthompson@usgs.gov |
2ce7421fd93492b1f95e87ef2bab3cf0e5734ab9 | 4679450204dd8c6b4f8969f147a0bf1c9541cc29 | /src/starlib.py | ea0a81f2250b20424ebfa432e5513a51d252e8ef | [] | no_license | SergioGomis/project-API-scraping | 7eba3271a22fdef5f25d6f4967e6e41d710a929c | 5fabb6d8043c6641e7eb2d7efb8090ea3d7b1f29 | refs/heads/master | 2020-12-26T22:51:34.309558 | 2020-02-06T19:17:53 | 2020-02-06T19:17:53 | 237,673,209 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,362 | py | import pandas as pd
import numpy as np
import os
import base64
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import (
Mail, Attachment, FileContent, FileName,
FileType, Disposition, ContentId)
from dotenv import load_dotenv
load_dotenv()
import re
from fpdf import FPDF
import requests
ver = FPDF()
def showStats():
ds = pd.read_csv("./input/starbucks_updt.csv")
# print(ds.describe())
print('==== STATS ====')
print('- El dataset contiene '+str(len(ds))+' tiendas')
print('- De las cuales el '+str(int(len(ds[ds['Latitude']>0])*100/len(ds)))+'% están ubicadas en el hemisferio norte.')
print('- Y están en Estados Unidos el '+str(int(len(ds[ds['Pais']=='United States'])*100/len(ds)))+'% del total.')
# Top 10 countries
print('\n')
def possibleResults(ciudad):
ds = pd.read_csv("./input/starbucks_updt.csv")
busqueda = ciudad[:-1]
alternativas = ds[ds['City'].str.contains(busqueda, case=False, na=False)].City.unique()
while not alternativas.any() and len(busqueda) > 1:
#print(busqueda)
busqueda = busqueda[:-1]
alternativas = ds[ds['City'].str.contains(busqueda, case=False, na=False)].City.unique()
print('Ciudad no encontrada. Prueba con estas posibles coincidencias:')
for elem in alternativas:
print('- ',elem)
#df[df['A'].str.contains("hello")]
def storesByCity(ciudad):
ds = pd.read_csv("./input/starbucks_updt.csv")
salida = ds[ds['City'].str.lower()==ciudad.lower()]
if not salida.empty:
paises = salida.groupby(['Pais'])['Pais'].unique().shape[0]
if paises > 1:
print('Hay varios posibles países para esta ciudad:')
lista = [a for a in ds[ds['City'].str.lower()==ciudad.lower()].groupby(['Pais'])['Pais'].unique().keys()]
for a in range(len(lista)):
print('['+str(a)+'] '+lista[a])
while True:
try:
pais_elegido = int(input('Introduce el numero del cual quieres los datos: '))
if(pais_elegido in range(paises)):
break
except:
print('Valor no válido')
salida = ds[(ds['City'].str.lower()==ciudad.lower()) & (ds['Pais']==lista[pais_elegido])]
#print(ds[ds['City']==ciudad])
return salida
| [
"sgomis@sonneil.com"
] | sgomis@sonneil.com |
a83e272646dedbc78131b6e3764b1eef14e2d74e | 2585eae43d14414b300718ffd1265d1923e96239 | /typeidea-master/config/admin.py | a9e81229280abefb9d41a1aa4cb50840214fc5d6 | [] | no_license | Knight9zz/typeidea | 2854e19007cc20aff57dbdc68a1f759d7c59ee4e | f7b8f01c43faa4ff642b33c09e43a7fc1e8f8155 | refs/heads/master | 2022-04-12T05:50:23.317770 | 2020-03-26T09:57:13 | 2020-03-26T09:57:13 | 248,477,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 511 | py | from django.contrib import admin
from .models import Link, SideBar
from .base_admin import BaseOwnerAdmin
# Register your models here.
@admin.register(Link)
class LinkAdmin(BaseOwnerAdmin):
list_display = ('title', 'href', 'status', 'weight',
'created_time')
fields = ('title', 'href', 'status', 'weight')
@admin.register(SideBar)
class SideBarAdmin(BaseOwnerAdmin):
list_display = ('title', 'display_type', 'content', 'created_time')
fields = ('title', 'display_type', 'content')
| [
"62378703+Knight9zz@users.noreply.github.com"
] | 62378703+Knight9zz@users.noreply.github.com |
89302cc74ca6ac2bdca46b282f61fee632281c3a | ad02587a87ec19658d6a53bcf2a2f5e92149e7f4 | /django-stubs/core/serializers/__init__.pyi | fcc124753a89fb1b8460527fcb732507dc4e7f9c | [
"BSD-3-Clause"
] | permissive | Naddiseo/django-stubs | 32a944617aea5b0e2dc3b8ad4dfd191b9ca6198b | cff5ab463c911283a9c43a26a38cb7bd4deebbd5 | refs/heads/master | 2020-04-18T05:01:40.832084 | 2019-01-22T17:13:31 | 2019-01-22T17:13:31 | 167,261,510 | 0 | 0 | BSD-3-Clause | 2019-01-23T22:06:15 | 2019-01-23T22:06:15 | null | UTF-8 | Python | false | false | 1,396 | pyi | from collections import OrderedDict
from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Type, Union
from django.apps.config import AppConfig
from django.core.serializers.base import Serializer, Deserializer
from django.db.models.base import Model
from django.db.models.query import QuerySet
BUILTIN_SERIALIZERS: Any
class BadSerializer:
internal_use_only: bool = ...
exception: ModuleNotFoundError = ...
def __init__(self, exception: ImportError) -> None: ...
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
def register_serializer(format: str, serializer_module: str, serializers: Optional[Dict[str, Any]] = ...) -> None: ...
def unregister_serializer(format: str) -> None: ...
def get_serializer(format: str) -> Union[Type[Serializer], BadSerializer]: ...
def get_serializer_formats() -> List[str]: ...
def get_public_serializer_formats() -> List[str]: ...
def get_deserializer(format: str) -> Union[Callable, Type[Deserializer]]: ...
def serialize(
format: str, queryset: Union[Iterator[Any], List[Model], QuerySet], **options: Any
) -> Optional[Union[List[OrderedDict], bytes, str]]: ...
def deserialize(format: str, stream_or_string: Any, **options: Any) -> Union[Iterator[Any], Deserializer]: ...
def sort_dependencies(
app_list: Union[List[Tuple[AppConfig, None]], List[Tuple[str, List[Type[Model]]]]]
) -> List[Type[Model]]: ...
| [
"maxim.kurnikov@gmail.com"
] | maxim.kurnikov@gmail.com |
77866253d8655e0c944d336355f10a33f32d3878 | d2c6f923f4889c1306b3a5a95fedf87a69869925 | /105.py | dc92f1ca37e0ef16530cce9d58fb1794b3391459 | [] | no_license | Sakthiumamaheshwari97/92 | fc10b3f025d6f01bfaf93cedc743103be6a1aac3 | d74dbd3b4be272e2e1a028ea78f96ebf39b79e81 | refs/heads/master | 2020-06-14T15:01:16.781020 | 2019-07-04T15:04:29 | 2019-07-04T15:04:29 | 195,034,768 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 45 | py | uma,mahesh=input().split()
print(uma+mahesh)
| [
"noreply@github.com"
] | Sakthiumamaheshwari97.noreply@github.com |
c0f765c97c7e40f0e5fc8aece7c0932f237a6bb1 | 2df43755a51ed81c430f5a70f1a310b985f01012 | /tensorflow_lite_support/custom_ops/python/sentencepiece_tokenizer_test.py | 1e28484f703ee63036dce8401c7684539538834b | [
"Apache-2.0"
] | permissive | msgpo/tflite-support | 726b962f03e48a9e2f414f736916fa186ce96118 | 4affc99e6e841da63dedbaa3d60bff2b331b23f7 | refs/heads/master | 2022-12-01T11:07:28.606072 | 2020-08-15T04:00:43 | 2020-08-15T04:01:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,211 | py | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Lint as: python3
"""Tests for sentencepiece_tokenizer."""
import time
from absl import flags
import numpy as np
import tensorflow.compat.v2 as tf # pylint: disable=g-direct-tensorflow-import
import tensorflow_text
from tensorflow.lite.python import interpreter as interpreter_wrapper # pylint: disable=g-direct-tensorflow-import
from third_party.tensorflow.python.platform import resource_loader
from tensorflow_lite_support.custom_ops.python import sentencepiece_tokenizer
FLAGS = flags.FLAGS
SENTENCEPIECE_MODEL_FILE = (
"../kernel/sentencepiece/testdata/sentencepiece.model")
def _GetSentencepieceModel():
model_filename = resource_loader.get_path_to_datafile(
SENTENCEPIECE_MODEL_FILE)
with open(model_filename, "rb") as file:
model = file.read()
return model
class SentencepieceTokenizerTest(tf.test.TestCase):
def setUp(self):
super(SentencepieceTokenizerTest, self).setUp()
self.sentencepiece_model = _GetSentencepieceModel()
def test_tftext_sentencepiece_tokenizer(self):
"""Check that the new tokenizer produces the same result that the tftext one."""
tftext_sp = tensorflow_text.SentencepieceTokenizer(self.sentencepiece_model)
opt_sp = sentencepiece_tokenizer.SentencepieceTokenizer(
self.sentencepiece_model)
input_text = [
u" ", u"to be or not to be", u"ignored by length text1",
u"ignored by length text2"
]
tftext_tokenized = tftext_sp.tokenize(input_text)
opt_tokenized = opt_sp.tokenize(input_text)
self.assertAllEqual(tftext_tokenized, opt_tokenized)
def test_tftext_sentencepiece_tokenizer_bos_eos(self):
"""Check that the new tokenizer produces the same result that the tftext one with bos and eos."""
tftext_sp = tensorflow_text.SentencepieceTokenizer(
self.sentencepiece_model, add_bos=True, add_eos=True)
opt_sp = sentencepiece_tokenizer.SentencepieceTokenizer(
self.sentencepiece_model, add_bos=True, add_eos=True)
input_text = [
u" ", u"to be or not to be", u"ignored by length text1",
u"ignored by length text2"
]
tftext_tokenized = tftext_sp.tokenize(input_text)
opt_tokenized = opt_sp.tokenize(input_text)
self.assertAllEqual(tftext_tokenized, opt_tokenized)
def test_tflite_opt_sentence_tokenizer(self):
"""Check that can convert a Keras model to TFLite and it produces the same result for tokenization."""
class TokenizerLayer(tf.keras.layers.Layer):
def __init__(self, sentencepiece_model, **kwargs):
super(TokenizerLayer, self).__init__(**kwargs)
self.sp = sentencepiece_tokenizer.SentencepieceTokenizer(
sentencepiece_model)
def call(self, input_tensor, **kwargs):
return self.sp.tokenize(input_tensor).flat_values
model = tf.keras.models.Sequential(
[TokenizerLayer(self.sentencepiece_model)])
input_data = np.array([[
u" ", u"to be or not to be", u"ignored by length text1",
u"ignored by length text2"
]])
tf_result = model.predict(input_data)
converter = tf.lite.TFLiteConverter.from_keras_model(model)
supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS]
converter.target_spec.supported_ops = supported_ops
converter.allow_custom_ops = True
tflite_model = converter.convert()
interpreter = interpreter_wrapper.InterpreterWithCustomOps(
model_content=tflite_model,
custom_op_registerers=["TFLite_SentencepieceTokenizerRegisterer"])
interpreter.allocate_tensors()
input_details = interpreter.get_input_details()
interpreter.set_tensor(input_details[0]["index"], input_data)
interpreter.invoke()
output_details = interpreter.get_output_details()
expected_result = [
13, 36, 83, 131, 13, 36, 4, 3127, 152, 130, 30, 2424, 168, 1644, 1524,
4, 3127, 152, 130, 30, 2424, 168, 1644, 636
]
self.assertAllEqual(tf_result, expected_result)
self.assertAllEqual(
interpreter.get_tensor(output_details[0]["index"]), expected_result)
def test_tflite_opt_sentence_tokenizer_vocab_size(self):
"""Check that can convert a Keras model to TFLite and it produces the same result for vocabulary size."""
class TokenizerLayer(tf.keras.layers.Layer):
def __init__(self, sentencepiece_model, **kwargs):
super(TokenizerLayer, self).__init__(**kwargs)
self.sp = sentencepiece_tokenizer.SentencepieceTokenizer(
sentencepiece_model)
def call(self, input_tensor, **kwargs):
return self.sp.vocab_size()
model = tf.keras.models.Sequential(
[TokenizerLayer(self.sentencepiece_model)])
input_data = np.array([[""]])
tf_result = model.predict(input_data)
converter = tf.lite.TFLiteConverter.from_keras_model(model)
supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS]
converter.target_spec.supported_ops = supported_ops
converter.allow_custom_ops = True
tflite_model = converter.convert()
interpreter = interpreter_wrapper.InterpreterWithCustomOps(
model_content=tflite_model,
custom_op_registerers=["TFLite_SentencepieceTokenizerRegisterer"])
interpreter.allocate_tensors()
input_details = interpreter.get_input_details()
interpreter.set_tensor(input_details[0]["index"], input_data)
interpreter.invoke()
output_details = interpreter.get_output_details()
expected_result = 4000
self.assertEqual(tf_result, expected_result)
self.assertAllEqual(
interpreter.get_tensor(output_details[0]["index"]), expected_result)
class SentencepieceTokenizerBenchmark(tf.test.Benchmark):
def benchmarkTokenizer(self):
sp_model = _GetSentencepieceModel()
test_text = [
"This week we celebrate the casts and creatives who have come together"
" to bring us our favorite.",
"More Stacks products demonstrated commitment to excellent support.",
"Test, test, test."
]
tftext_sp = tensorflow_text.SentencepieceTokenizer(sp_model)
opt_sp = sentencepiece_tokenizer.SentencepieceTokenizer(sp_model)
iter_number = 1000
start = time.time()
for _ in range(iter_number):
_ = opt_sp.tokenize(test_text)
self.report_benchmark(
iters=iter_number, wall_time=time.time() - start, name="opt")
start = time.time()
for _ in range(iter_number):
_ = tftext_sp.tokenize(test_text)
self.report_benchmark(
iters=iter_number, wall_time=time.time() - start, name="tf.text")
if __name__ == "__main__":
tf.test.main()
| [
"tflite-support-github-robot@google.com"
] | tflite-support-github-robot@google.com |
7442cc095982c595c26f2dc4f1297cb96e53d1b1 | c5f58af61e3577ded52acda210f4f664651b598c | /template/mmdetection/tools/inference.py | 1c3be13bc08f24a5ff7a2139b02780c446855c27 | [
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | hojihun5516/object-detection-level2-cv-02 | 0a4ee5cea9a77ef5d43fb61a4b37fe3a87cb0eac | bc8a08286935b31b8e7e597c4b1ca2cbbaeb9109 | refs/heads/master | 2023-08-31T09:50:59.150971 | 2021-10-16T15:00:19 | 2021-10-16T15:00:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,490 | py | import argparse
import os
import os.path as osp
import time
import warnings
import mmcv
import torch
from mmcv import Config, DictAction
from mmcv.cnn import fuse_conv_bn
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import get_dist_info, init_dist, load_checkpoint, wrap_fp16_model
from mmdet.apis import multi_gpu_test, single_gpu_test
from mmdet.datasets import build_dataloader, build_dataset, replace_ImageToTensor
from mmdet.models import build_detector
import pandas as pd
from pandas import DataFrame
import numpy as np
from pycocotools.coco import COCO
def parse_args():
parser = argparse.ArgumentParser(description="MMDet test (and eval) a model")
# config 파일 경로 (학습 시킬 때 사용했던 config 파일, work_dir에도 복사되어있음)
parser.add_argument("config", help="test config file path")
# checkpoint가 저장되어있는 work_dir 경로
parser.add_argument("--work_dir", help="the directory to save the file containing evaluation metrics")
# 사용할 checkpoint epoch
parser.add_argument("--epoch", default="latest", help="Checkpoint file's epoch")
parser.add_argument("--show_score_thr", type=float, default=0.05, help="score threshold (default: 0.05)")
args = parser.parse_args()
return args
def make_csv(output, cfg):
# submission 양식에 맞게 output 후처리
prediction_strings = []
file_names = []
coco = COCO(cfg.data.test.ann_file)
img_ids = coco.getImgIds()
class_num = len(cfg.data.test.classes)
for i, out in enumerate(output):
prediction_string = ""
image_info = coco.loadImgs(coco.getImgIds(imgIds=i))[0]
for j in range(class_num):
for o in out[j]:
prediction_string += (
str(j)
+ " "
+ str(o[4])
+ " "
+ str(o[0])
+ " "
+ str(o[1])
+ " "
+ str(o[2])
+ " "
+ str(o[3])
+ " "
)
prediction_strings.append(prediction_string)
file_names.append(image_info["file_name"])
submission = pd.DataFrame()
submission["PredictionString"] = prediction_strings
submission["image_id"] = file_names
submission.to_csv(os.path.join(cfg.work_dir, "submission.csv"), index=None)
print(f"submission.csv is saved in {cfg.work_dir}")
def main():
args = parse_args()
cfg = Config.fromfile(args.config)
if args.work_dir:
cfg.work_dir = args.work_dir
cfg.data.test.test_mode = True
dataset = build_dataset(cfg.data.test)
data_loader = build_dataloader(
dataset,
samples_per_gpu=cfg.data.samples_per_gpu,
workers_per_gpu=cfg.data.workers_per_gpu,
dist=False,
shuffle=False,
)
checkpoint_path = os.path.join(cfg.work_dir, f"{args.epoch}.pth")
# build detector
cfg.model.train_cfg = None
model = build_detector(cfg.model, test_cfg=cfg.get("test_cfg"))
# ckpt load
checkpoint = load_checkpoint(model, checkpoint_path, map_location="cpu")
model.CLASSES = dataset.CLASSES
model = MMDataParallel(model.cuda(), device_ids=[0])
# cal ouput
output = single_gpu_test(model, data_loader, show_score_thr=args.show_score_thr)
make_csv(output, cfg)
if __name__ == "__main__":
main()
| [
"hanbin@kakao.com"
] | hanbin@kakao.com |
5b163ac46101b63506884ebc00d7a547febbfee1 | f772150e9f54af30619d31e258fa8e6eb05d89e8 | /oxime/apps.py | 9ad90a83b84f509f27d403f163c729fd077b096a | [] | no_license | nirala161/oxime | 0cf5c26747e98b13b78357b6c2f9633d75b0551f | 59c022c6fa32a095ad2ba5041efc2ed0010f5f46 | refs/heads/master | 2023-04-12T09:38:02.128469 | 2021-05-16T19:42:57 | 2021-05-16T19:42:57 | 367,967,666 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 142 | py | from django.apps import AppConfig
class OximeConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'oxime'
| [
"raushankumar18@8e86bfb0e784"
] | raushankumar18@8e86bfb0e784 |
bf8ede38667fa3b596a358983dba90b959017275 | babb3e2f03966dd1d29e76c3a2b2d7efcaaef65b | /old/ddlc.py | bd776db0913e1c9d0230e6b09a10b9351081da62 | [] | no_license | WaveTheSwallow06/OperationDingo | 403c5bf47a1e180ff9ed6c1c23a9f326c06f4a40 | 589adc547382d3cddf623e6e77aadbb7d2f4ad84 | refs/heads/master | 2020-11-30T07:21:39.100162 | 2019-12-27T00:47:59 | 2019-12-27T00:47:59 | 230,345,888 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 892 | py | import pygame
def main():
pygame.init()
logo = pygame.image.load("PreAlpha.png")
pygame.display.set_icon(logo)
pygame.display.set_caption("Operation Dingo")
screen = pygame.display.set_mode((1778,876))
pygame.mixer.music.load("despacito.mid")
pygame.mixer.music.play()
title = pygame.image.load("Title.png")
place = pygame.image.load("PLACEHOLDER.png")
screen.blit(title, (0,-3))
running = True
while running:
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RETURN:
screen.blit(place, (5, 0))
if event.key == pygame.K_d:
pygame.mixer.music.load("despacito.mid")
pygame.mixer.music.play()
if event.key == pygame.K_n:
pygame.mixer.music.load("mm2wood.mid")
pygame.mixer.music.play()
if __name__=="__main__":
main() | [
"noreply@github.com"
] | WaveTheSwallow06.noreply@github.com |
cadf81dfd122a304037404dce6324b82d7433f76 | ea6b69535972cc738c3c0b580c6b184414c9823a | /Scripts/django-admin.py | 1d12ab2f430018857f7d8809b074ea36d1c302d8 | [
"MIT"
] | permissive | spinho010/ControleEstoque | ab10e359c3bd76a5f7f177e974fc3a4697a84af7 | b59a65ba42c4324475a5660aa0ccfe586aed55ec | refs/heads/main | 2023-07-14T16:26:58.668528 | 2021-08-28T12:48:45 | 2021-08-28T12:48:45 | 400,294,680 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 710 | py | #!c:\users\lucas\documents\repositorios\controle_estoque\scripts\python.exe
# When the django-admin.py deprecation ends, remove this script.
import warnings
from django.core import management
try:
from django.utils.deprecation import RemovedInDjango40Warning
except ImportError:
raise ImportError(
'django-admin.py was deprecated in Django 3.1 and removed in Django '
'4.0. Please manually remove this script from your virtual environment '
'and use django-admin instead.'
)
if __name__ == "__main__":
warnings.warn(
'django-admin.py is deprecated in favor of django-admin.',
RemovedInDjango40Warning,
)
management.execute_from_command_line()
| [
"78925489+spinho010@users.noreply.github.com"
] | 78925489+spinho010@users.noreply.github.com |
139979e1e89b8f2d7bad76539da81612cb1afb63 | 2053bf5708e2d1d07dcecfea3a74cee03c89759d | /7_django_models/coffeehouse/stores/urls.py | ef1ec59cdc057c569afa1b7c53d7559ddad95383 | [] | no_license | alimp5/beginningdjango | 59bcdf5f81cfa08187a8517706bc3c4f63c1f4bb | 69dbaaced5d057a1f5a44ff2f3e43fe45bde4f10 | refs/heads/master | 2023-04-28T02:50:04.865078 | 2021-04-22T02:01:14 | 2021-04-22T02:01:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 408 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import include, url
from . import views
urlpatterns = [
url(r'^$',views.index,name="index"),
url(r'^feedback/$',views.feedback,name="feedback"),
url(r'^(?P<store_id>\d+)/$',views.detail,name="detail"),
url(r'^(?P<store_id>\d+)/about/',include('coffeehouse.about.urls',namespace="stores_about")),
]
| [
"daniel@webforefront.com"
] | daniel@webforefront.com |
efc6f54940bc267b212e91d14b32cb91e6a5316f | 7ab5c962b909fa21cf67e4e07241836b7fe4498d | /A2/stonehenge.py | c7d6b6b21e0261e035ea0869e1fbedc3cdb3b918 | [] | no_license | nikita-sh/CSC148 | d82bed327d4dab8489d3ea5755e785a77128bfce | e37e145bf6fa00850ba38b489334c57eeee71953 | refs/heads/master | 2021-04-26T23:21:46.407382 | 2018-03-23T13:44:43 | 2018-03-23T13:44:43 | 123,979,508 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,900 | py | class StonehengeGame(Game):
"""
Class to model Stonehenge Game.
===Attributes===
p1_turn: whether or not it is currently Player 1's turn.
current_state: current state of the game
instructions: instructions for a game of Stonehenge
"""
p1_turn: bool
current_state: 'StonehengeState'
instructions: str
def __init__(self, p1_starts: bool) -> None:
"""
Initialize this Game, using p1_starts to find who the first player is.
~Doctests omitted due to use of input~
"""
self.p1_turn = p1_starts
# retrieving size of board through input, dont put this into state class
# since we return new states when a move is made and cant answer input
# when it is constructed
size = int(input("Enter the size of the game board:"))
# creating state
self.current_state = StonehengeState(self.p1_turn, size)
# setting instructions
self.instructions = "Stonehenge is played on a hexagonal grid formed " \
"by removing the corners of a triangular grid. \n" \
"Boards can have various sizes based on their " \
"side-length (the number of cells in the grid \n" \
"along the bottom), but are always formed in a " \
"similar manner: For side-length n, the first \n" \
"row has 2 cells, and each row after it has 1 " \
"additional cell up until there's a row with \n" \
"n+1 cells, after which the last row has only " \
"n cells in it. Players take turns claiming \n" \
"cells (abbreviated with a capital letter). " \
"When a player captures at least half of the \n" \
"cells in a ley-line (lines extending from " \
"each third of the hexagon to the other side),\n" \
" then the player captures that ley-line. The " \
"first player to capture at least half of the \n" \
"ley-lines is the winner. A ley-line, once " \
"claimed, cannot be taken by the other player."
def get_instructions(self) -> str:
"""
Return the instructions for this Game.
~Doctests omitted due to use of input~
"""
return self.instructions
def is_over(self, state: 'StonehengeState') -> bool:
"""
Return whether or not this game is over at state.
~Doctests omitted due to use of input~
"""
count1, count2 = 0, 0
# counting ley line markers to evaluate games ending condition
for i in state.ley_lines:
if i[0] == 1:
count1 += 1
elif i[0] == 2:
count2 += 1
# determining if counts are at least half of an even length of ley lines
if len(state.ley_lines) % 2 == 0:
if count1 >= (len(state.ley_lines) / 2):
return True
elif count2 >= (len(state.ley_lines) / 2):
return True
return False
# determining if counts are at least half of an odd length of ley lines
else:
if count1 >= (len(state.ley_lines) // 2) + 1:
return True
elif count2 >= (len(state.ley_lines) // 2) + 1:
return True
return False
def is_winner(self, player: str) -> bool:
"""
Return whether player has won the game.
Precondition: player is 'p1' or 'p2'.
~Doctests omitted due to use of input~
"""
# assures that game is over before evaluating winner
if not self.is_over(self.current_state):
return False
# if game is over and current player is player, the other player has won
elif self.current_state.curr_player == player:
return False
return True
def str_to_move(self, string: str) -> Any:
"""
Return the move that string represents. If string is not a move,
return some invalid move.
~Doctests omitted due to use of input~
"""
assert isinstance(string, str) and string.upper() in POSS_VAL, \
"Move must be a letter of the alphabet"
return string
class StonehengeState(GameState):
"""
Game state for game of Stonehenge at a certain time.
===Attributes===
curr_player: player whose turn it currently is
size: size of the game board
board: current cells of the game board
ley_lines: ley line values of current state
"""
curr_player: str
size: int
board: List[List[str]]
ley_lines: List[List[str]]
def __init__(self, is_p1_turn: bool, size: int) -> None:
"""
Initialize this game state and set the current player based on
is_p1_turn.
>>> s = StonehengeState(True, 3)
>>> s.board
[['A', 'B'], ['C', 'D', 'E'], ['F', 'G', 'H', 'I'], ['J', 'K', 'L']]
"""
super().__init__(is_p1_turn)
assert 5 >= size > 0, "Board size must be a positive integer and no " \
"greater than 5."
self.curr_player = "p2"
if is_p1_turn:
self.curr_player = "p1"
self.size = size
# creating cells in Stonehenge board
self.board = create_stonehenge_board(self.size, POSS_VAL)
# creating ley lines
self.ley_lines = get_ley_lines(self.board, self.size)
def __str__(self) -> str:
"""
Return a string representation of the current state of the game.
~Doctests omitted due to use of \n when representing string~
"""
# copy of ley lines and board since we will be removing them after they
# are added, logic is similar to getting ley lines
ley_copy = copy.deepcopy(self.ley_lines)
board_copy = gather_list(copy.deepcopy(self.board))
# separating ley lines into horizontal, left diagonal and right diagonal
index = int(len(self.ley_lines) / 3)
horiz = ley_copy[:index]
left = ley_copy[index:2 * index]
right = ley_copy[2 * index:3 * index]
spacing = (len(self.board) - 2) * 2
# adding first two ley lines and their markers
ret_str = ""
ret_str += "\n{}{} {}".format(" "*(spacing+6), left[0][0], left[1][0])
ret_str += "\n{}/ /".format(" "*(spacing+5))
# removing added vals, we will continue to do this to be able to access
# them from the same index
left = left[2:]
n = 2
while n <= self.size + 1:
# creating newline and adding correct spacing and horizontal ley
# line marker
ret_str += "\n{}{} -".format(" " * spacing, horiz[0][0])
horiz = horiz[1:]
spacing -= 2
for _i in range(n):
if n == self.size + 1:
ret_str += " {} -".format(board_copy[0]) if _i != n-1 \
else " {} ".format(board_copy[0])
board_copy = board_copy[1:]
# if we are on the last cell of the row, we add a ley line
# marker and remove it from the ley lines
elif _i == n - 1 and n != self.size + 2:
ret_str += " {} {}".format(board_copy[0], left[0][0])
left = left[1:]
board_copy = board_copy[1:]
else:
ret_str += " {} -".format(board_copy[0])
board_copy = board_copy[1:]
n += 1
# adding slashes
if n == self.size + 2:
ret_str += "\n{} \\".format(" "*(spacing+5))
ret_str += " {}".format("/ \\ "*(n-2))
elif n > 2:
ret_str += "\n{}{}".format(" "*(spacing+5), "/ \\ "*(n-1))
ret_str += "/"
else:
ret_str += "\n{}{}".format(" "*(spacing+5), "/ \\ "*(n-1))
# adding last row of cells, slashes and ley line markers after size+1
# row
spacing = 2
ret_str += "\n{}{} - ".format(" "*spacing, horiz[0][0])
for _j in range(self.size):
if _j == self.size - 1:
ret_str += "{} {}".format(board_copy[0], right[0][0])
right = right[1:]
else:
ret_str += "{} - ".format(board_copy[0])
board_copy = board_copy[1:]
ret_str += "\n{}{}".format(" "*(spacing+5), "\\ "*self.size)
ret_str += "\n{}".format(" "*(spacing+6))
for _k in range(len(right)):
ret_str += "{} ".format(right[-1][0])
right = right[:-1]
return ret_str
def get_possible_moves(self) -> list:
"""
Return all possible moves that can be applied to this state.
>>> s = StonehengeState(True, 1)
>>> s.get_possible_moves()
['A', 'B', 'C']
>>> s = StonehengeState(True, 2)
>>> s.get_possible_moves()
['A', 'B', 'C', 'D', 'E', 'F', 'G']
"""
# performing same count as in StonehengeGame.is_over() to return an
# empty list if the game is over
count1, count2 = 0, 0
for i in self.ley_lines:
if i[0] == 1:
count1 += 1
elif i[0] == 2:
count2 += 1
if len(self.ley_lines) % 2 == 0:
if count1 >= (len(self.ley_lines) / 2):
return []
elif count2 >= (len(self.ley_lines) / 2):
return []
moves = []
# returning available cells if game isnt over
for cell in gather_list(self.board):
if cell in POSS_VAL:
moves.append(cell)
return moves
else:
if count1 >= (len(self.ley_lines) // 2) + 1:
return []
elif count2 >= (len(self.ley_lines) // 2) + 1:
return []
# returning available cells if game isnt over
moves = []
for cell in gather_list(self.board):
if cell in POSS_VAL:
moves.append(cell)
return moves
def get_current_player_name(self) -> str:
"""
Return 'p1' if the current player is Player 1, and 'p2' if the current
player is Player 2.
>>> s = StonehengeState(True, 2)
>>> s.get_current_player_name()
'p1'
>>> s = StonehengeState(False, 2)
>>> s.get_current_player_name()
'p2'
"""
if self.p1_turn:
return 'p1'
return 'p2'
def make_move(self, move: Any) -> 'StonehengeState':
"""
Return the GameState that results from applying move to this GameState.
>>> s = StonehengeState(True, 2)
>>> s.get_current_player_name()
'p1'
>>> s2 = s.make_move("A")
>>> s2.get_current_player_name()
'p2'
>>> s2.board
[[1, 'B'], ['C', 'D', 'E'], ['F', 'G']]
"""
if self.curr_player == "p1":
# creating new state and copying current board
new_state = StonehengeState(False, self.size)
new_state.board = copy.deepcopy(self.board)
# changing cells that have been taken by player
update_board(new_state.board, move, "p1")
# updating ley lines
new_state.ley_lines = update_ley(self.ley_lines, move, True)
return new_state
# creating new state and copying current board
new_state = StonehengeState(True, self.size)
new_state.board = copy.deepcopy(self.board)
# changing cells that have been taken by player
update_board(new_state.board, move, "p2")
# updating ley lines
new_state.ley_lines = update_ley(self.ley_lines, move, False)
return new_state
def is_valid_move(self, move: Any) -> bool:
"""
Return whether move is a valid move for this GameState.
>>> s = StonehengeState(True, 2)
>>> s.is_valid_move("A")
True
>>> s.is_valid_move("Z")
False
"""
return move in self.get_possible_moves()
def __repr__(self) -> Any:
"""
Return a representation of this state (which can be used for
equality testing).
~Doctests omitted due to use of \n to represent attributes~
"""
board = ""
ley_lines = ""
# adding new line for each row of board
for i in self.board:
board += "{}\n".format(i)
# addding new line for each row of ley lines
for j in self.ley_lines:
ley_lines += "{}\n".format(j)
# returning all attributes of StonehengeState
return "Board:\n{}\nLey-Lines:\n{}\n" \
"Size:\n{}\n\nCurrent Player:\n{}".format(board, ley_lines,
self.size,
self.curr_player)
def rough_outcome(self) -> float:
"""
Return an estimate in interval [LOSE, WIN] of best outcome the current
player can guarantee from state self.
>>> s = StonehengeState(True, 1)
>>> s.rough_outcome()
1
"""
# checking if rough_outcome is called on a game that is already done
if not self.get_possible_moves():
count1, count2 = 0, 0
for l in self.ley_lines:
if l[0] == 1:
count1 += 1
elif l[0] == 2:
count2 += 1
if len(self.ley_lines) % 2 == 0:
if count1 >= len(self.ley_lines) / 2:
return 1 if self.get_current_player_name() == "p1" else -1
elif count2 >= len(self.ley_lines) / 2:
return 1 if self.get_current_player_name() == "p2" else -1
else:
if count1 >= (len(self.ley_lines) // 2) + 1:
return 1 if self.get_current_player_name() == "p1" else -1
elif count2 >= (len(self.ley_lines) // 2) + 1:
return 1 if self.get_current_player_name() == "p2" else -1
# creating states for next possible move and next possible move after
# that
nstates = [self.make_move(x) for x in self.get_possible_moves()]
ostates = []
for state in nstates:
ostates.extend([state.make_move(y)
for y in state.get_possible_moves()])
# checking if any of the first possible moves lead to an end state
for state in nstates:
if not state.get_possible_moves():
if self.get_current_player_name() == "p1":
return 1 if state.get_current_player_name() != "p1" else -1
return 1 if state.get_current_player_name() != "p2" else -1
# checking if any of the next possible moves lead to an end state
for ostate in ostates:
if not ostate.get_possible_moves():
if self.get_current_player_name() == "p1":
return 1 if ostate.get_current_player_name() != "p1" else -1
return 1 if ostate.get_current_player_name() != "p2" else -1
# returning a number in range in (WIN, LOSS) if none of the conditions
# is satisfied
count1, count2 = 0, 0
for l in self.ley_lines:
if l[0] == 1:
count1 += 1
elif l[0] == 2:
count2 += 1
return min(count1, count2) / max(count1, count2)
| [
"noreply@github.com"
] | nikita-sh.noreply@github.com |
080fc995154d64b17349f13421fb4a7a7a32c41e | 59eafe2d75f04d6c32ab18c1a617418488f25c6e | /task_buffet/file_lock.py | f7cfaa1927f5efaaccaa2cde0bcf13a449bc8c54 | [] | no_license | jclevesque/task_buffet | 9804e37adcb590c1886feec48c469a0dc4d47704 | defa24fed9750da02a12a6f45dedd1e92366a295 | refs/heads/master | 2020-12-09T14:26:08.834754 | 2020-03-10T20:46:37 | 2020-03-10T20:46:37 | 55,301,924 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,405 | py | # Copyright (C) 2015 Julien-Charles Levesque
# Based on pylockfile by openstack: https://github.com/openstack/pylockfile
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import time
import socket
import time
import urllib
import uuid
import warnings
import functools
class LockError(Exception):
#Base class for error arising from attempts to acquire the lock.
pass
class LockTimeout(LockError):
#Raised when lock creation fails within a user-defined period of time.
pass
class AlreadyLocked(LockError):
#Some other thread/process is locking the file.
pass
class LockFailed(LockError):
#Lock file creation failed for some other reason.
pass
class UnlockError(Exception):
#Base class for errors arising from attempts to release the lock.
pass
class NotLocked(UnlockError):
#Raised when an attempt is made to unlock an unlocked file.
pass
class NotMyLock(UnlockError):
#Raised when an attempt is made to unlock a file someone else locked.
pass
def locked(path, timeout=None):
"""Decorator which enables locks for decorated function.
Arguments:
- path: path for lockfile.
- timeout (optional): Timeout for acquiring lock.
Usage:
@locked('/var/run/myname', timeout=0)
def myname(...):
...
"""
def decor(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
lock = FileLock(path, timeout=timeout)
lock.acquire()
try:
return func(*args, **kwargs)
finally:
lock.release()
return wrapper
return decor
class Locker:
"""Lock access to a file using atomic property of link(2).
>>> lock = LinkLockFile('somefile')
"""
def __init__(self, path):
self.path = path
def __enter__(self):
"""
Context manager support.
"""
self.acquire()
return self
def __exit__(self, *_exc):
"""
Context manager support.
"""
self.release()
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.path)
def __init__(self, path, timeout=None):
"""
>>> lock = LockBase('somefile')
"""
self.path = path
self.lock_file = os.path.abspath(path) + ".lock"
self.hostname = socket.gethostname()
self.pid = os.getpid()
dirname = os.path.dirname(self.lock_file)
# unique name is mostly about the current process, but must
# also contain the path -- otherwise, two adjacent locked
# files conflict (one file gets locked, creating lock-file and
# unique file, the other one gets locked, creating lock-file
# and overwriting the already existing lock-file, then one
# gets unlocked, deleting both lock-file and unique file,
# finally the last lock errors out upon releasing.
self.unique_name = os.path.join(dirname,
"%s.%s.%s" % (self.hostname,
self.pid,
uuid.uuid1().hex))
self.timeout = timeout
def acquire(self, timeout=None):
#print("Locking", self.unique_name)
try:
open(self.unique_name, "wb").close()
except IOError:
raise LockFailed("failed to create %s" % self.unique_name)
timeout = timeout is not None and timeout or self.timeout
end_time = time.time()
if timeout is not None and timeout > 0:
end_time += timeout
while True:
# Try and create a hard link to it.
try:
os.link(self.unique_name, self.lock_file)
except OSError:
# Link creation failed. Maybe we've double-locked?
nlinks = os.stat(self.unique_name).st_nlink
if nlinks == 2:
# The original link plus the one I created == 2. We're
# good to go.
return
else:
# Otherwise the lock creation failed.
if timeout is not None and time.time() > end_time:
os.unlink(self.unique_name)
if timeout > 0:
raise LockTimeout("Timeout waiting to acquire"
" lock for %s" %
self.path)
else:
raise AlreadyLocked("%s is already locked" %
self.path)
time.sleep(timeout is not None and timeout/10 or 0.1)
else:
# Link creation succeeded. We're good to go.
return
def release(self):
#print("Unlocking", self.unique_name)
if not self.is_locked():
raise NotLocked("%s is not locked" % self.path)
#print("Warning: %s i not locked." % self.path)
#return
elif not os.path.exists(self.unique_name):
raise NotMyLock("%s is locked, but not by me" % self.path)
os.unlink(self.unique_name)
os.unlink(self.lock_file)
def is_locked(self):
return os.path.exists(self.lock_file)
def i_am_locking(self):
return (self.is_locked() and
os.path.exists(self.unique_name) and
os.stat(self.unique_name).st_nlink == 2)
def break_lock(self):
if os.path.exists(self.lock_file):
os.unlink(self.lock_file)
def __del__(self):
#print("Deleting", self.unique_name)
if self.i_am_locking():
self.release()
| [
"levesque.jc@gmail.com"
] | levesque.jc@gmail.com |
0ffb1a2ee81207f529a86af9c5969f5b359151d8 | 92866897ac8b95067960f312aa92a4d02c7c81df | /environments/oc-p5/database.py | 93f99ef3da8506942db150a6ad42cd3bace69117 | [] | no_license | DenisLamalis/cours-python | 63fec725c038a50fd52f428152dbc1e0671dba53 | 1fc92b125969a2771633d6e8508138986163b6e7 | refs/heads/master | 2023-02-03T19:59:34.345181 | 2020-12-15T09:57:42 | 2020-12-15T09:57:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,841 | py | import mysql.connector
from config import *
from mysql.connector import errorcode
from tables import Tables
class Database:
""" """
def __init__(self):
""" """
self.host = HOST
self.user = USER
self.password = PASSWORD
self.db_name = 'PureBeurre'
self.tables = Tables()
def connection(self):
""" """
try:
self.connection = mysql.connector.connect(
host = self.host,
user = self.user,
password = self.password,
database = self.db_name)
self.mycursor = self.connection.cursor()
if (self.connection.is_connected()):
print(f"REUSSITE : Connection à la base {self.db_name} effectuée.")
return self.mycursor
except mysql.connector.Error as error:
print("ECHEC : impossible de me connecter, erreur : {}".format(error))
def db_create(self):
""" """
mycursor = self.connection()
try:
mycursor.execute("CREATE DATABASE {} DEFAULT CHARACTER SET 'utf8'".format(self.db_name))
print(f"REUSSITE : création de la base {self.db_name} effectuée.")
except mysql.connector.Error as err:
print("ECHEC : impossible de créer la base, erreur : {}".format(err))
exit(1)
def tables_create(self):
""" """
mycursor = self.connection()
for table_name in self.tables.TABLES:
table_description = self.tables.TABLES[table_name]
try:
mycursor.execute(table_description)
print("REUSSITE : la création de la table {} est effectuée.\n".format(table_name), end='')
except mysql.connector.Error as err:
print("ECHEC : impossible de créer la table, erreur : {}".format(error))
def load_nutriscore(self):
mycursor = self.connection()
try:
add_nutriscore = ("INSERT INTO nutriscore (nut_id, nut_type) VALUES (%s,%s)")
values = (1, 'A')
self.mycursor.execute(add_nutriscore, values)
values = (2, 'B')
self.mycursor.execute(add_nutriscore, values)
values = (3, 'C')
self.mycursor.execute(add_nutriscore, values)
values = (4, 'D')
self.mycursor.execute(add_nutriscore, values)
values = (5, 'E')
self.mycursor.execute(add_nutriscore, values)
self.connection.commit()
print("Les différents Nutriscore ont été chargés dans la base.")
except mysql.connector.Error as error:
print("Erreur lors du chargement : {}".format(error))
if __name__ == "__main__":
database = Database()
| [
"erischon@gmail.com"
] | erischon@gmail.com |
416ec842b2b7a239aeb5b2e16f638d426da827e1 | 15686c79bca2d8ab6dbc3980636d5d29b8925475 | /ally/Order/tests/OrderManual.py | 62640ea35c26e90d836ddea42f5dac4e3ba89026 | [
"MIT"
] | permissive | brigittee/PyAlly | 00b3eb376c9670f9ac5bed8b1f19b1dcd606378f | 55e437adf20e93319268fa81ea533b50424df9fc | refs/heads/master | 2022-12-14T17:11:18.299300 | 2020-09-17T00:11:55 | 2020-09-17T00:12:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,709 | py | # MIT License
#
# Copyright (c) 2020 Brett Graves
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import xml.etree.ElementTree as ET
import unittest
from ..classes import *
from ..order import Order
class TestOrderManual(unittest.TestCase):
def test_buysell_buy (self):
o = Order(buysell='buy')
self.assertEqual(o.buysell, Side.Buy, "Should be Side.Buy")
o = Order(buysell=Side.Buy)
self.assertEqual(o.buysell, Side.Buy, "Should be Side.Buy")
def test_buysell_sell (self):
o = Order(buysell='sell')
self.assertEqual(o.buysell, Side.Sell, "Should be Side.Sell")
o = Order(buysell=Side.Sell)
self.assertEqual(o.buysell, Side.Sell, "Should be Side.Sell")
def test_buysell_sellshort (self):
o = Order(buysell='sellshort')
self.assertEqual(o.buysell, Side.SellShort, "Should be Side.SellShort")
o = Order(buysell=Side.SellShort)
self.assertEqual(o.buysell, Side.SellShort, "Should be Side.SellShort")
def test_buysell_buycover (self):
o = Order(buysell='buycover')
self.assertEqual(o.buysell, Side.BuyCover, "Should be Side.BuyCover")
o = Order(buysell=Side.BuyCover)
self.assertEqual(o.buysell, Side.BuyCover, "Should be Side.BuyCover")
def test_quantity ( self ):
o = Order(qty=10)
self.assertEqual(o.quantity, 10, "Should be 10")
o = Order(qty=10.5)
self.assertEqual(o.quantity, 10, "Should be 10")
o = Order(qty="10")
self.assertEqual(o.quantity, 10, "Should be 10")
def test_price_market ( self ):
o = Order(time='onclose')
self.assertEqual(o.time, TimeInForce.OnClose, "Should be TimeInForce.OnClose")
o = Order(time=TimeInForce.OnClose)
self.assertEqual(o.time, TimeInForce.OnClose, "Should be TimeInForce.OnClose")
| [
"alienbrett648@gmail.com"
] | alienbrett648@gmail.com |
57e1b5631a33f14b833a77b81a08c4285a8e5380 | 3abc7ebbe1f122ba1fb8e02a4e193d348918b60b | /shop_back/shop_back/settings.py | 0cc637c8426a4d9539146b20e4607408bc49871b | [] | no_license | agarysova/wd-spring | a571173fded00f033cb3fa5efe35d1b3b1fd35b4 | 182b3692280d48244b548b4a5b4d19bc666b1d47 | refs/heads/master | 2021-06-13T22:46:26.558084 | 2020-04-26T07:38:18 | 2020-04-26T07:38:18 | 254,459,403 | 0 | 0 | null | 2020-04-09T19:22:15 | 2020-04-09T19:22:15 | null | UTF-8 | Python | false | false | 4,662 | py | """
Django settings for shop_back project.
Generated by 'django-admin startproject' using Django 3.0.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
from django.conf import settings
import datetime
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'r567_%776m5-hr!oe2v8z_wrwr9a$d(3g-*@r%0(i6#a18yu9g'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework_jwt',
'corsheaders',
'shop_back',
'api',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
CORS_ORIGIN_ALLOW_ALL = True
ROOT_URLCONF = 'shop_back.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'shop_back.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
REST_FRAMEWORK = {
# 'DEFAULT_PERMISSION_CLASSES': (
# 'rest_framework.permissions.IsAuthenticated',
# ),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework_jwt.authentication.JSONWebTokenAuthentication',
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.BasicAuthentication',
),
}
JWT_AUTH = {
'JWT_ENCODE_HANDLER':
'rest_framework_jwt.utils.jwt_encode_handler',
'JWT_DECODE_HANDLER':
'rest_framework_jwt.utils.jwt_decode_handler',
'JWT_PAYLOAD_HANDLER':
'rest_framework_jwt.utils.jwt_payload_handler',
'JWT_PAYLOAD_GET_USER_ID_HANDLER':
'rest_framework_jwt.utils.jwt_get_user_id_from_payload_handler',
'JWT_RESPONSE_PAYLOAD_HANDLER':
'rest_framework_jwt.utils.jwt_response_payload_handler',
'JWT_SECRET_KEY': settings.SECRET_KEY,
'JWT_GET_USER_SECRET_KEY': None,
'JWT_PUBLIC_KEY': None,
'JWT_PRIVATE_KEY': None,
'JWT_ALGORITHM': 'HS256',
'JWT_VERIFY': True,
'JWT_VERIFY_EXPIRATION': True,
'JWT_LEEWAY': 0,
'JWT_EXPIRATION_DELTA': datetime.timedelta(days=2),
'JWT_AUDIENCE': None,
'JWT_ISSUER': None,
'JWT_ALLOW_REFRESH': False,
'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7),
'JWT_AUTH_HEADER_PREFIX': 'JWT',
'JWT_AUTH_COOKIE': None,
} | [
"togzhanagarys@icloud.com"
] | togzhanagarys@icloud.com |
55ce520c6f1a9740d847fbad7865df869d52089d | d88c36280914df79f81f061ebb0b39697435f7d0 | /Newsapp/views.py | 2ff11582298080cf25797353bf3dc056fa5c08b9 | [] | no_license | ashutosh164/News-app | 91e16d62b15fad4810f8b117af5ba312c8fe1f50 | f4dc941ca521a2119ae5084fdf5a0b784727dee7 | refs/heads/master | 2023-04-07T09:04:49.926349 | 2021-04-10T09:20:52 | 2021-04-10T09:20:52 | 356,531,938 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,354 | py | from django.shortcuts import render
from newsapi import NewsApiClient
# Create your views here.
def index(request):
newsapi = NewsApiClient(api_key="ff61f6a2a5e24b92b6dbc8065a98b350")
topheadline = newsapi.get_top_headlines(sources='al-jazeera-english')
articles = topheadline['articles']
desc = []
news =[]
img = []
for i in range(len(articles)):
myarticles = articles[i]
news.append(myarticles['title'])
desc.append(myarticles['description'])
img.append(myarticles['urlToImage'])
mylist = zip(news,desc,img)
return render(request,'index.html',context={'mylist':mylist})
def bbc(request):
newsapi = NewsApiClient(api_key="ff61f6a2a5e24b92b6dbc8065a98b350")
topheadline = newsapi.get_top_headlines(sources='bbc-news')
articles = topheadline['articles']
desc = []
news =[]
img = []
for i in range(len(articles)):
myarticles = articles[i]
news.append(myarticles['title'])
desc.append(myarticles['description'])
img.append(myarticles['urlToImage'])
mylist = zip(news,desc,img)
return render(request,'bbc.html',context={'mylist':mylist})
def times(request):
newsapi = NewsApiClient(api_key="ff61f6a2a5e24b92b6dbc8065a98b350")
topheadline = newsapi.get_top_headlines(sources='the-times-of-india')
articles = topheadline['articles']
desc = []
news =[]
img = []
for i in range(len(articles)):
myarticles = articles[i]
news.append(myarticles['title'])
desc.append(myarticles['description'])
img.append(myarticles['urlToImage'])
mylist = zip(news,desc,img)
return render(request,'times.html',context={'mylist':mylist})
def espn(request):
newsapi = NewsApiClient(api_key="ff61f6a2a5e24b92b6dbc8065a98b350")
topheadline = newsapi.get_top_headlines(sources='espn-cric-info')
articles = topheadline['articles']
desc = []
news =[]
img = []
for i in range(len(articles)):
myarticles = articles[i]
news.append(myarticles['title'])
desc.append(myarticles['description'])
img.append(myarticles['urlToImage'])
mylist = zip(news,desc,img)
return render(request,'espn.html',context={'mylist':mylist})
def yent(request):
newsapi = NewsApiClient(api_key="ff61f6a2a5e24b92b6dbc8065a98b350")
topheadline = newsapi.get_top_headlines(sources='ynet')
articles = topheadline['articles']
desc = []
news =[]
img = []
for i in range(len(articles)):
myarticles = articles[i]
news.append(myarticles['title'])
desc.append(myarticles['description'])
img.append(myarticles['urlToImage'])
mylist = zip(news,desc,img)
return render(request,'ynet.html',context={'mylist':mylist})
def lenta(request):
newsapi = NewsApiClient(api_key="ff61f6a2a5e24b92b6dbc8065a98b350")
topheadline = newsapi.get_top_headlines(sources='lenta')
articles = topheadline['articles']
desc = []
news =[]
img = []
for i in range(len(articles)):
myarticles = articles[i]
news.append(myarticles['title'])
desc.append(myarticles['description'])
img.append(myarticles['urlToImage'])
mylist = zip((news,desc,img))
return render(request,'lenta.html',context={'mylist':mylist}) | [
"ashutoshpradhan164@gmail.com"
] | ashutoshpradhan164@gmail.com |
dd121328da958577a0671baf5dbb8cbeb3a5c377 | 5c61990fc1a79f389111a3e449c1fadf65fc1b8c | /portnet_api/models/contract.py | 19c2fe0d6032fe1e126ae861a70ebe56c2177e77 | [] | no_license | brahim94/portnet | 3befb64009fd014b74e01151cc429a613d3d2f11 | f1120ce4806ba2fd7e26132ca918d1ce8b9ad32c | refs/heads/master | 2023-04-14T07:17:40.956207 | 2021-04-27T16:37:48 | 2021-04-27T16:37:48 | 356,211,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 34,894 | py | # -*- coding: utf-8 -*-
import json
import logging
import requests
import base64
import datetime
import time
from openerp import api, fields, models, _
from dateutil.relativedelta import relativedelta
from openerp.exceptions import ValidationError
_logger = logging.getLogger(__name__)
try:
import xmltodict
except:
_logger.debug('xmltodict libraries not available. Please install "xmltodict"\
python package by "pip install xmltodict".')
class ResContract(models.Model):
_inherit = 'res.contract'
@api.model
def create_subscription(self, values):
subscription_id = False
if values:
### Validate Data
if not values.get('name'):
return {'faultCode': 0, 'faultString': 'N° Souscriptions is required.'}
if not values.get('template_id'):
return {'faultCode': 0, 'faultString': 'Package is required.'}
if not values.get('state'):
return {'faultCode': 0, 'faultString': 'State is required.'}
if not values.get('date_start'):
return {'faultCode': 0, 'faultString': 'Date début is required.'}
if not values.get('date'):
return {'faultCode': 0, 'faultString': 'Date fin is required.'}
if not values.get('partner_categ_id'):
return {'faultCode': 0, 'faultString': "Rôle de l'opérateur is required."}
if not values.get('partner_id'):
return {'faultCode': 0, 'faultString': 'Opérateur is required.'}
if not values.get('date_create'):
return {'faultCode': 0, 'faultString': 'Create Date is required.'}
### Find Data From DB
template_id = self.search([('name', '=', values['template_id']), ('type_contract', '=', 'package'), ('is_template', '=', True)], limit=1)
if not template_id:
return {'faultCode': 0, 'faultString': 'template_id doesn’t exist in Odoo db'}
partner_categ_id = self.env['res.partner.category'].search([('code', '=', values['partner_categ_id'])], limit=1)
if not partner_categ_id:
return {'faultCode': 0, 'faultString': "partner_categ_id doesn’t exist in Odoo db"}
partner_id = self.env['res.partner'].search([('code', '=', values['partner_id']), ('categ_id', '=', partner_categ_id.id), ('customer', '=', True)], limit=1)
if not partner_id:
return {'faultCode': 0, 'faultString': 'partner_id doesn’t exist in Odoo db'}
### A = pending
### D = draft
state = False
if values['state'] == 'A':
state = 'pending'
elif values['state'] == 'D':
state = 'draft'
else:
return {'faultCode': 0, 'faultString': 'state doesn’t exist in Odoo db'}
date_start = str(values['date_start']).strip()
date = str(values['date']).strip()
date_create = str(values['date_create']).strip()
next_invoice_date = fields.Date.from_string(date_start) + relativedelta(months=template_id.periodicity_id.nb_months)
subscription_id = self.with_context(default_type_contract='package', default_is_template=False).create({
### API Fields
'name': values['name'],
'template_id': template_id.id if template_id else False,
'date_start': date_start,
'date': date,
'add_balance': values.get('add_balance') or 0,
'partner_categ_id': partner_categ_id.id if partner_categ_id else False,
'partner_id': partner_id.id if partner_id else False,
'date_create_portnet': date_create,
'state': state,
### Default Package Fields
'product_id': template_id.product_id.id,
'product_category_id': template_id.product_category_id.id,
'periodicity_id': template_id.periodicity_id.id,
'tacite': template_id.tacite,
'currency_id': template_id.currency_id.id,
'amount': template_id.amount,
'transaction_no': template_id.transaction_no,
'first_invoice_date': date_start,
'next_invoice_date': next_invoice_date,
'anticipated_invoice_date': next_invoice_date,
})
subscription_id.onchange_template_id()
subscription_id.message_post(body=_("Record created by API Services"))
if subscription_id:
return {'success': subscription_id.id}
else:
return {'faultCode': 0, 'faultString': 'Something went wrong!'}
@api.model
def update_subscription(self, values):
subscription_id = False
if values:
### Validate Data
if not values.get('name'):
return {'faultCode': 0, 'faultString': 'N° Souscriptions is required.'}
if not values.get('comment'):
return {'faultCode': 0, 'faultString': 'Comment is required.'}
if not values.get('date_write'):
return {'faultCode': 0, 'faultString': 'Update Date is required.'}
### Find Data From DB
subscription_id = self.search([('name', '=', values['name']), ('type_contract', '=', 'package'), ('is_template', '=', False)], limit=1)
if not subscription_id:
return {'faultCode': 0, 'faultString': 'Subscription doesn’t exist in Odoo db'}
vals = {}
if values.get('date_start'):
vals.update({'date_start': str(values['date_start']).strip()})
if values.get('date'):
vals.update({'date': str(values['date']).strip()})
if values.get('add_balance'):
vals.update({'add_balance': values['add_balance'] or 0})
t = time.strptime(str(values['date_write']).strip(), "%Y-%m-%dT%H:%M:%S")
date_write = datetime.datetime(*tuple(t)[:7])
# date_write = str(values['date_write']).strip()
vals.update({'date_write_portnet': date_write})
if values.get('state'):
if values['state'] == 'A':
vals.update({'state': 'pending'})
elif values['state'] == 'S':
vals.update({'state': 'suspend'})
elif values['state'] == 'E':
vals.update({'state': 'expire'})
elif values['state'] == 'C':
vals.update({'state': 'closed'})
# vals.update({'description_package': values.get('comment')})
subscription_id.write(vals)
subscription_id.message_post(body=_("Record updated by API Services"))
subscription_id.message_post(body=_(values.get('comment').strip()))
if subscription_id:
return {'success': subscription_id.id}
else:
return {'faultCode': 0, 'faultString': 'Something went wrong!'}
def master_tag_start(self, tag):
data = "<%s xmlns=\"http://www.portnet.ma/nouvelleTarification\">" % tag
return data
def sub_tag_start(self, tag):
data = "<%s>" % tag
return data
def tag_end(self, tag):
data = "</%s>" % tag
return data
def new_line(self):
return '\n'
def get_tab(self):
return ''.ljust(4)
def get_tranches_lines(self, line_ids):
lines = ''
for line in line_ids:
lines += ''.join([
self.sub_tag_start('tranches'),
self.sub_tag_start('de'), (line.tranche_de_no or ''), self.tag_end('de'), self.new_line(), self.get_tab(),
self.sub_tag_start('a'), (line.tranche_a_no or ''), self.tag_end('a'), self.new_line(), self.get_tab(),
self.sub_tag_start('frais'), (line.frais_de_services or ''), self.tag_end('frais'), self.new_line(), self.get_tab(),
self.tag_end('tranches'),
])
return lines
@api.multi
def action_sync_GU(self):
company_id = self.env.user.company_id
url = (("%s/crm/nouvelleTarification/updateSouscription") % (company_id.ip_address))
# payload = "<souscription xmlns=\"http://www.portnet.ma/nouvelleTarification\">\n <identifiant>S000545</identifiant>\n <codePackage>POS-AGM-111125</codePackage>\n <debutValidite>2020-05-30T09:00:00</debutValidite>\n <finValidite>2020-06-30T09:00:00</finValidite>\n <soldeSupplementaire>400</soldeSupplementaire>\n <statut>ACTIVE</statut>\n <typeOperateur>IMPEXP</typeOperateur>\n <codeOperateur>3861</codeOperateur>\n</souscription >"
headers = {
'authorization': "Basic %s" % (base64.b64encode(("%s:%s" % (company_id.user_id, company_id.password)).encode())).decode(),
'content-type': "application/xml",
}
payload = ''.join([
self.master_tag_start('souscription'), self.new_line(), self.get_tab(),
self.sub_tag_start('identifiant'), (self.name or ''), self.tag_end('identifiant'), self.new_line(), self.get_tab(),
self.sub_tag_start('debutValidite'), (fields.Datetime.from_string(self.date_start).strftime("%Y-%m-%dT%H:%M:%S")), self.tag_end('debutValidite'), self.new_line(), self.get_tab(),
self.sub_tag_start('finValidite'), (fields.Datetime.from_string(self.date).strftime("%Y-%m-%dT%H:%M:%S")), self.tag_end('finValidite'), self.new_line(), self.get_tab(),
self.sub_tag_start('dateModification'), (fields.Datetime.from_string(fields.Datetime.now()).strftime("%Y-%m-%dT%H:%M:%S")), self.tag_end('dateModification'), self.new_line(), self.get_tab(),
self.sub_tag_start('motif'), (str(self.description_package) or ''), self.tag_end('motif'), self.new_line(), self.get_tab(),
self.tag_end('souscription'),
])
response = requests.request("POST", url, headers=headers, data=payload)
res = json.loads(json.dumps(xmltodict.parse(response.text, process_namespaces=True)))
if response.status_code != 200:
message = ''
description = ''
guid = ''
res = json.loads(json.dumps(xmltodict.parse(response.text, process_namespaces=True)))
if res and res.get('http://www.portnet.ma/nouvelleTarification:reponse') and res.get('http://www.portnet.ma/nouvelleTarification:reponse').get('http://www.portnet.ma/nouvelleTarification:description'):
message = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:message']
description = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:description']
guid = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:guid']
_logger.warning("\nERROR MESSAGE: \n\n %s \n\n" % str(response.text))
raise ValidationError("%s \n\n %s \nGUID: %s" % (message, description, guid))
self.write({'date_write_portnet': fields.Datetime.now(), 'date_sync_portnet': fields.Datetime.now()})
return True
@api.multi
def action_suspend(self):
company_id = self.env.user.company_id
url = (("%s/crm/nouvelleTarification/suspendSouscription") % (company_id.ip_address))
# payload = "<souscription xmlns=\"http://www.portnet.ma/nouvelleTarification\">\n <identifiant>S000545</identifiant>\n <codePackage>POS-AGM-111125</codePackage>\n <debutValidite>2020-05-30T09:00:00</debutValidite>\n <finValidite>2020-06-30T09:00:00</finValidite>\n <soldeSupplementaire>400</soldeSupplementaire>\n <statut>ACTIVE</statut>\n <typeOperateur>IMPEXP</typeOperateur>\n <codeOperateur>3861</codeOperateur>\n</souscription >"
headers = {
'authorization': "Basic %s" % (base64.b64encode(("%s:%s" % (company_id.user_id, company_id.password)).encode())).decode(),
'content-type': "application/xml",
}
payload = ''.join([
self.master_tag_start('souscription'), self.new_line(), self.get_tab(),
self.sub_tag_start('identifiant'), (self.name or ''), self.tag_end('identifiant'), self.new_line(), self.get_tab(),
self.sub_tag_start('statut'), ('SUSPENDU'), self.tag_end('statut'), self.new_line(), self.get_tab(),
self.sub_tag_start('dateSuspension'), (fields.Datetime.from_string(fields.Datetime.now()).strftime("%Y-%m-%dT%H:%M:%S")), self.tag_end('dateSuspension'), self.new_line(), self.get_tab(),
self.sub_tag_start('motif'), (str(self.description_package) or ''), self.tag_end('motif'), self.new_line(), self.get_tab(),
self.tag_end('souscription'),
])
response = requests.request("POST", url, headers=headers, data=payload)
res = json.loads(json.dumps(xmltodict.parse(response.text, process_namespaces=True)))
if response.status_code != 200:
message = ''
description = ''
guid = ''
res = json.loads(json.dumps(xmltodict.parse(response.text, process_namespaces=True)))
if res and res.get('http://www.portnet.ma/nouvelleTarification:reponse') and res.get('http://www.portnet.ma/nouvelleTarification:reponse').get('http://www.portnet.ma/nouvelleTarification:description'):
message = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:message']
description = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:description']
guid = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:guid']
_logger.warning("\nERROR MESSAGE: \n\n %s \n\n" % str(response.text))
raise ValidationError("%s \n\n %s \nGUID: %s" % (message, description, guid))
self.write({'date_sync_portnet': fields.Datetime.now(), 'state': 'suspend'})
return True
@api.model
def create_package(self, values):
package_id = False
if values:
vals = {}
Currency = self.env['res.currency']
Product = self.env['product.product']
ProductCategory = self.env['product.category']
PartnerCategory = self.env['res.partner.category']
Periodicity = self.env['res.periodicity']
### Validate Data
if not values.get('name'):
return {'faultCode': 0, 'faultString': 'Code package is required.'}
if not values.get('partner_categ_id'):
return {'faultCode': 0, 'faultString': "Rôle de l'opérateur is required."}
if not values.get('active_package'):
return {'faultCode': 0, 'faultString': 'Active Package status is required.'}
if not values.get('criteria_factures'):
return {'faultCode': 0, 'faultString': 'Critére de facturation is required.'}
if not values.get('parameter_decompte'):
return {'faultCode': 0, 'faultString': 'Paramétre de décompte is required.'}
if not values.get('type_paiment'):
return {'faultCode': 0, 'faultString': 'Type paiement is required.'}
if values.get('transaction_no') and values['transaction_no'] == 'transaction_limit' and not values.get('transaction_no_limit'):
return {'faultCode': 0, 'faultString': 'Nombre de transactions is required.'}
if not values.get('periodicity_id'):
return {'faultCode': 0, 'faultString': 'Périodicité is required.'}
if not values.get('debut_validate'):
return {'faultCode': 0, 'faultString': 'Debut de validité is required.'}
if not values.get('validate_package'):
return {'faultCode': 0, 'faultString': 'Validité du package is required.'}
# if not values.get('tacite'):
# return {'faultCode': 0, 'faultString': 'Tacite de reconduction is required.'}
if not values.get('type_service'):
return {'faultCode': 0, 'faultString': 'Type de frais is required.'}
if not values.get('date_create'):
return {'faultCode': 0, 'faultString': 'Create Date is required.'}
### Find Data From DB
currency_id = Currency.search([('name', '=', 'MAD')], limit=1)
if not currency_id:
return {'faultCode': 0, 'faultString': 'Currency doesn’t exist in Odoo db'}
product_category_id = self.env.ref('product.product_category_all')
if not product_category_id:
product_category_id = ProductCategory.search([('name', '=', 'All')], limit=1)
if not product_category_id:
return {'faultCode': 0, 'faultString': 'Product Category doesn’t exist in Odoo db'}
product_id = Product.search([('name', '=', values['name'])], limit=1)
if not product_id:
product_id = Product.with_context(default_type='service', default_is_package=True, default_category_id=product_category_id.id).create({
'name': values['name'],
})
partner_categ_id = PartnerCategory.search([('code', '=', values['partner_categ_id'])], limit=1)
if not partner_categ_id:
return {'faultCode': 0, 'faultString': "partner_categ_id doesn’t exist in Odoo db"}
month = 0
if values['periodicity_id'] == 'Mensuel':
month = 1
elif values['periodicity_id'] == 'Trimestriel':
month = 3
elif values['periodicity_id'] == 'Semestriel':
month = 6
elif values['periodicity_id'] == 'Annuel':
month = 12
periodicity_id = Periodicity.search([('nb_months', '=', month)], limit=1)
if not periodicity_id:
return {'faultCode': 0, 'faultString': 'periodicity_id doesn’t exist in Odoo db'}
criteria_factures = False
if values['criteria_factures'] == "Titre d'importation":
criteria_factures = 'enable'
elif values['criteria_factures'] == "Escale":
criteria_factures = 'disable'
parameter_decompte = False
if values['parameter_decompte'] == "Envoi pour domiciliation":
parameter_decompte = 'enable'
elif values['parameter_decompte'] == "Envoi du manifeste":
parameter_decompte = 'disable'
if values['type_service'] == 'fix' and not values.get('service_fee'):
return {'faultCode': 0, 'faultString': 'service_fee is mandatory.'}
elif values['type_service'] == 'tranches' and not values.get('type_service_line_ids'):
return {'faultCode': 0, 'faultString': 'service_lines is mandatory.'}
if values.get('type_service_line_ids'):
service_lines = []
for line in values['type_service_line_ids']:
service_lines.append((0, 0, {'tranche_de_no': line[0], 'tranche_a_no': line[1], 'frais_de_services': line[2]}))
vals.update({'type_service_line_ids': service_lines})
date_create = str(values['date_create']).strip()
if values.get('transaction_no'):
vals.update({'transaction_no': values['transaction_no']})
vals.update({
'name': values['name'],
'currency_id': currency_id.id,
'product_category_id': product_category_id.id,
'product_id': product_id.id,
'partner_categ_id': partner_categ_id.id,
'active_package': values['active_package'],
'criteria_factures': criteria_factures,
'parameter_decompte': parameter_decompte,
'type_paiment': values['type_paiment'],
# 'transaction_no': values['transaction_no'],
'transaction_no_limit': values.get('transaction_no_limit'),
'amount': values.get('amount'),
'periodicity_id': periodicity_id.id,
'debut_validate': values['debut_validate'],
'validate_package': values['validate_package'],
'tacite': values['tacite'],
'type_service': values['type_service'],
'service_fee': values.get('service_fee'),
'description_package': values.get('description_package'),
'date_create_portnet': date_create,
})
package_id = self.with_context(default_type_contract='package', default_is_template=True).create(vals)
package_id.message_post(body=_("Record created by API Services"))
if package_id:
return {'success': package_id.id}
else:
return {'faultCode': 0, 'faultString': 'Something went wrong!'}
@api.model
def update_package(self, values):
package_id = False
if values:
vals = {}
PartnerCategory = self.env['res.partner.category']
Periodicity = self.env['res.periodicity']
### Validate Data
if not values.get('name'):
return {'faultCode': 0, 'faultString': 'Code package is required.'}
if not values.get('comment'):
return {'faultCode': 0, 'faultString': 'Comment is required.'}
if not values.get('date_write'):
return {'faultCode': 0, 'faultString': 'Update Date is required.'}
package_id = self.search([('name', '=', values['name']), ('type_contract', '=', 'package'), ('is_template', '=', True)], limit=1)
if not package_id:
return {'faultCode': 0, 'faultString': 'Package doesn’t exist in Odoo db'}
### Find Data From DB
if values.get('partner_categ_id'):
partner_categ_id = PartnerCategory.search([('code', '=', values['partner_categ_id'])], limit=1)
if not partner_categ_id:
return {'faultCode': 0, 'faultString': "partner_categ_id doesn’t exist in Odoo db"}
vals.update({'partner_categ_id': partner_categ_id.id})
if values.get('active_package'):
vals.update({'active_package': values['active_package']})
if values.get('criteria_factures'):
if values['criteria_factures'] == "Titre d'importation":
vals.update({'criteria_factures': 'enable'})
elif values['criteria_factures'] == "Escale":
vals.update({'criteria_factures': 'disable'})
if values.get('parameter_decompte'):
if values['parameter_decompte'] == "Envoi pour domiciliation":
vals.update({'parameter_decompte': 'enable'})
elif values['parameter_decompte'] == "Envoi du manifeste":
vals.update({'parameter_decompte': 'disable'})
if values.get('type_paiment'):
vals.update({'type_paiment': values['type_paiment']})
if values.get('transaction_no'):
vals.update({'transaction_no': values['transaction_no']})
if values.get('transaction_no_limit'):
vals.update({'transaction_no_limit': values['transaction_no_limit']})
if values.get('amount'):
vals.update({'amount': values['amount']})
if values.get('periodicity_id'):
month = 0
if values['periodicity_id'] == 'Mensuel':
month = 1
elif values['periodicity_id'] == 'Trimestriel':
month = 3
elif values['periodicity_id'] == 'Semestriel':
month = 6
elif values['periodicity_id'] == 'Annuel':
month = 12
periodicity_id = Periodicity.search([('nb_months', '=', month)], limit=1)
if not periodicity_id:
return {'faultCode': 0, 'faultString': 'periodicity_id doesn’t exist in Odoo db'}
vals.update({'periodicity_id': periodicity_id.id})
if values.get('debut_validate'):
vals.update({'debut_validate': values['debut_validate']})
if values.get('validate_package'):
vals.update({'validate_package': values['validate_package']})
if values.get('type_service'):
if values['type_service'] == 'fix' and not values.get('service_fee'):
return {'faultCode': 0, 'faultString': 'service_fee is mandatory.'}
elif values['type_service'] == 'tranches' and not values.get('type_service_line_ids'):
return {'faultCode': 0, 'faultString': 'service_lines is mandatory.'}
vals.update({'type_service': values['type_service']})
if values.get('service_fee'):
vals.update({'service_fee': values['service_fee']})
if values.get('type_service_line_ids'):
service_lines = []
for line in values['type_service_line_ids']:
service_lines.append((0, 0, {'tranche_de_no': line[0], 'tranche_a_no': line[1], 'frais_de_services': line[2]}))
package_id.type_service_line_ids.unlink()
vals.update({'type_service_line_ids': service_lines})
if values.get('description_package'):
vals.update({'description_package': values['description_package']})
date_write = str(values['date_write']).strip()
vals.update({'date_write_portnet': date_write, 'tacite': values.get('tacite')})
package_id.write(vals)
package_id.message_post(body=_("Record updated by API Services"))
package_id.message_post(body=_((values['comment']).strip()))
if package_id:
return {'success': package_id.id}
else:
return {'faultCode': 0, 'faultString': 'Something went wrong!'}
@api.multi
def create_package_export(self):
package_code = False
contract_id = self
company_id = self.env.user.company_id
url = (("%s/crm/nouvelleTarification/createPackage") % (company_id.ip_address))
code_url = ("%s/crm/nouvelleTarification/identifiantPackage?roleOperateur=%s&typePaiement=%s") % (company_id.ip_address, str(contract_id.partner_categ_id.code), str(contract_id.type_paiment))
# payload = "<souscription xmlns=\"http://www.portnet.ma/nouvelleTarification\">\n <identifiant>S000545</identifiant>\n <codePackage>POS-AGM-111125</codePackage>\n <debutValidite>2020-05-30T09:00:00</debutValidite>\n <finValidite>2020-06-30T09:00:00</finValidite>\n <soldeSupplementaire>400</soldeSupplementaire>\n <statut>ACTIVE</statut>\n <typeOperateur>IMPEXP</typeOperateur>\n <codeOperateur>3861</codeOperateur>\n</souscription >"
headers = {
'authorization': "Basic %s" % (base64.b64encode(("%s:%s" % (company_id.user_id, company_id.password)).encode())).decode(),
'content-type': "application/xml",
}
### Get Package Code
response_code = requests.request("GET", code_url, headers=headers)
res_code = json.loads(json.dumps(xmltodict.parse(response_code.text, process_namespaces=True)))
result_sub_code = json.loads(json.dumps(xmltodict.parse(response_code.text, process_namespaces=True)))
if response_code.status_code == 200:
if result_sub_code and result_sub_code.get('http://www.portnet.ma/nouvelleTarification:reponse') and result_sub_code.get('http://www.portnet.ma/nouvelleTarification:reponse').get('http://www.portnet.ma/nouvelleTarification:message'):
package_code = result_sub_code['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:message']
else:
message_code = ''
description_code = ''
guid_code = ''
if result_sub_code and result_sub_code.get('http://www.portnet.ma/nouvelleTarification:reponse') and result_sub_code.get('http://www.portnet.ma/nouvelleTarification:reponse').get('http://www.portnet.ma/nouvelleTarification:description'):
message_code = result_sub_code['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:message']
description_code = result_sub_code['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:description']
guid_code = result_sub_code['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:guid']
_logger.warning("\nERROR MESSAGE: \n\n %s \n\n" % str(response_code.text))
raise ValidationError("%s \n\n %s \nGUID: %s" % (message_code, description_code, guid_code))
### Create Package After getting package number
payload = ''.join([
contract_id.master_tag_start('package'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('code'), (package_code), contract_id.tag_end('code'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('statut'), ('Actif'), contract_id.tag_end('statut'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('roleOperateur'), (contract_id.partner_categ_id.code or ''), contract_id.tag_end('roleOperateur'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('critereFacturation'), str(dict(self._fields['criteria_factures'].selection).get(contract_id.criteria_factures)), contract_id.tag_end('critereFacturation'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('parametreDecompte'), str(dict(self._fields['parameter_decompte'].selection).get(contract_id.parameter_decompte)), contract_id.tag_end('parametreDecompte'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('reconduction'), ('1' if contract_id.tacite else ''), contract_id.tag_end('reconduction'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('typePaiement'), (contract_id.type_paiment or ''), contract_id.tag_end('typePaiement'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('transactionAutorisee'), (contract_id.transaction_no or ''), contract_id.tag_end('transactionAutorisee'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('nbreTransactions'), (str(contract_id.transaction_no_limit) or ''), contract_id.tag_end('nbreTransactions'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('abonnementBase'), (str(contract_id.amount) or ''), contract_id.tag_end('abonnementBase'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('periodicite'), (contract_id.periodicity_id.name or ''), contract_id.tag_end('periodicite'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('validite'), (contract_id.validate_package or ''), contract_id.tag_end('validite'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('debutValidite'), (contract_id.debut_validate or ''), contract_id.tag_end('debutValidite'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('typeService'), (contract_id.type_service or ''), contract_id.tag_end('typeService'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('fraisService'), (str(contract_id.service_fee) or ''), contract_id.tag_end('fraisService'), contract_id.new_line(), contract_id.get_tab(),
(contract_id.get_tranches_lines(contract_id.type_service_line_ids)),
contract_id.sub_tag_start('dateCreation'), (fields.Datetime.from_string(contract_id.date_create_portnet or fields.Datetime.now()).strftime("%Y-%m-%dT%H:%M:%S")), contract_id.tag_end('dateCreation'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('description'), (contract_id.description_package or ''), contract_id.tag_end('description'), contract_id.new_line(), contract_id.get_tab(),
contract_id.tag_end('package'),
])
response = requests.request("POST", url, headers=headers, data=payload)
res = json.loads(json.dumps(xmltodict.parse(response.text, process_namespaces=True)))
if response.status_code != 200:
message = ''
description = ''
guid = ''
res = json.loads(json.dumps(xmltodict.parse(response.text, process_namespaces=True)))
if res and res.get('http://www.portnet.ma/nouvelleTarification:reponse') and res.get('http://www.portnet.ma/nouvelleTarification:reponse').get('http://www.portnet.ma/nouvelleTarification:description'):
message = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:message']
description = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:description']
guid = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:guid']
_logger.warning("\nERROR MESSAGE: \n\n %s \n\n" % str(response.text))
raise ValidationError("%s \n\n %s \nGUID: %s" % (message, description, guid))
else:
contract_id.write({'name': package_code, 'date_create_portnet': fields.Datetime.now(), 'date_sync_portnet': fields.Datetime.now()})
return True
@api.model
def create(self, values):
res = super(ResContract, self).create(values)
if self._context.get('default_is_template') and self._context['default_is_template'] == True and self._context.get('default_type_contract') and self._context['default_type_contract'] == 'package':
res.create_package_export()
return res | [
"brahim-ayad@hotmail.com"
] | brahim-ayad@hotmail.com |
7a04d9f11c765c7934f32b3e2ac19461c220203f | 48d72e4def0f17cdd1ab28d34eebfdfa2fa2f296 | /app/__init__.py | dc231bfd1603f2f496870f701620345c7c0fc69e | [] | no_license | caiyingyi/news_web | 12f5e340978efe8477f82de701de52959c248f08 | 15c4473937c5c29784c0ae72e9a0106f9988ae42 | refs/heads/master | 2020-03-11T17:08:47.376780 | 2018-05-07T15:26:54 | 2018-05-07T15:26:54 | 130,138,793 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 916 | py | # -*- coding:utf-8 -*-
from flask import Flask, render_template
from flask_bootstrap import Bootstrap
from config import config
from flask_moment import Moment
from flask_mongoengine import MongoEngine
import datetime
bootstrap = Bootstrap()
moment = Moment()
mongo_db = MongoEngine()
# 自定义jinjia2的过滤器
def transform_timestamp(timestamp):
d = datetime.datetime.fromtimestamp(timestamp)
result = d.strftime("%Y-%m-%d")
return result
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
moment.init_app(app)
mongo_db.init_app(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
# 注册jinjia2的自定义过滤器
env = app.jinja_env
env.filters['transform_timestamp'] = transform_timestamp
return app
| [
"caiyingyi902@163.com"
] | caiyingyi902@163.com |
5812623a6b231e2bf8b445f6ffa642fcb04887cc | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_328/ch37_2020_03_25_19_56_06_004731.py | 24a51c11e2535e75fe75b9bbbcb27294953173b6 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 184 | py | a = (input("Digite aqui sua senha: "))
while True:
if a != "desisto":
a= (input("Digite aqui sua senha: "))
else:
print ("Você acertou a senha!")
break | [
"you@example.com"
] | you@example.com |
3e92a3d30c749a7757e2ed2bf5079a1f324cd7a1 | 2f900b24394f6e128b2f5af78cb531d2b83f4055 | /f-jp.py | bb683941f1de6fc712e01ec85c60477acccb22f8 | [] | no_license | Maggieyu0204/HH-ATPase | 41350676afae56d3045aad9829785a98f8a32ae8 | feda910c526a17fce43a02c1e9818e2b8e5ecdb8 | refs/heads/master | 2023-02-25T22:10:06.426926 | 2021-01-20T15:06:14 | 2021-01-20T15:06:14 | 331,308,446 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,954 | py | import numpy as np
from scipy.integrate import odeint
import matplotlib.pyplot as plt
import scipy.signal as signal
from mpl_toolkits.mplot3d import Axes3D
def HHmodel2(points, t, para):
[I, k1p, k1n, k2p, k2n, k3p, k3n, k4p, k4n] = para # I(\muA/cm^2)
[v, Nai, Ki, n, m, h, A, B, C, D] = points
EK = RTF * np.log(Ke / Ki) # mV
ENa = RTF * np.log(Nae / Nai) # mV
alphan = 0.01 * (v + 50) / (1 - np.exp(-(v + 50) / 10))
betan = 0.125 * np.exp(-(v + 60) / 80)
alpham = 0.1 * (v + 35) / (1 - np.exp(-(v + 35) / 10))
betam = 4 * np.exp(-(v + 60) / 18)
alphah = 0.07 * np.exp(-(v + 60) / 20)
betah = 1 / (np.exp(-(v + 30) / 10) + 1)
Nae1a = Nae / (KdNae0 * np.exp((1 + delta) * v / RTF))
Nae2a = Nae / KdNae
Nai1a = Nai / (KdNai0 * np.exp(delta * v / RTF))
Nai2a = Nai / KdNai
Kea = Ke / KdKe
Kia = Ki / KdKi
ATPa = ATP / KdATP
alpha1p = k1p * Nai1a * (Nai2a ** 2) / (Nai1a * (Nai2a ** 2) + (1 + Nai2a) ** 2 + (1 + Kia) ** 2 - 1)
alpha2p = k2p
alpha3p = k3p * Kea ** 2 / (Nae1a * (Nae2a ** 2) + (1 + Nae2a) ** 2 + (1 + Kea) ** 2 - 1)
alpha4p = k4p * ATPa / (1 + ATPa)
alpha1n = k1n * ADP
alpha2n = k2n * Nae1a * (Nae2a ** 2) / (Nae1a * (Nae2a ** 2) + (1 + Nae2a) ** 2 + (1 + Kea) ** 2 - 1)
alpha3n = k3n * Pi * (10 ** (-4)) / (1 + ATPa)
alpha4n = k4n * Kia ** 2 / (Nai1a * (Nai2a ** 2) + (1 + Nai2a) ** 2 + (1 + Kia) ** 2 - 1)
Jp = alpha2p * B - alpha2n * C
dvdt = (gl * (El - v) + gNa * m ** 3 * h * (ENa - v) + gK * n ** 4 * (
EK - v) - sigma_pump * charge * Jp * (10 ** 17) + I) / Cm
dNaidt = -10 * sigma_pump * Jp * (10 ** 22) / (R * NA) - 2 * gNa * (m ** 3) * h * (v - ENa) * (10 ** 5) / (
charge * NA * R)
dKidt = 10 * sigma_pump * Jp * (10 ** 22) / (R * NA) - 2 * gK * (n ** 4) * (v - EK) * (10 ** 5) / (charge * NA * R)
dndt = alphan * (1 - n) - betan * n
dmdt = alpham * (1 - m) - betam * m
dhdt = alphah * (1 - h) - betah * h
dAdt = -alpha1p * A + alpha1n * B - alpha4n * A + alpha4p * D
dBdt = -alpha2p * B + alpha2n * C - alpha1n * B + alpha1p * A
dCdt = -alpha3p * C + alpha3n * D - alpha2n * C + alpha2p * B
dDdt = -alpha4p * D + alpha4n * A - alpha3n * D + alpha3p * C
ODE = np.array([dvdt, dNaidt, dKidt, dndt, dmdt, dhdt, dAdt, dBdt, dCdt, dDdt])
return ODE
G = 12 #G/RT
# concentration(mMol/L)
Nae = 140 # 437
Ke = 5 # 8.46 # 24
ADP = 0.05
Pi = 0.8 # 4.2
ATP = np.exp(G) * ADP * Pi
# constant
charge = 1.6 * 10 ** (-19)
NA = 6.02 * 10 ** (23)
RTF = 25.8
# 轴突半径(A)
R = 10000
# V(mV)
El = -49.4 # -49.4
# g(ms/cm^2)=
gl = 0.3
gNa = 120
gK = 36
# C(muF/cm^2)
Cm = 1
popt = np.loadtxt('parameter_new.txt')[0:17]
[k1p, k1n, k2p, k2n, k3p, k3n, k4p, k4n, KdNai0, KdNae0, KdNai, KdNae, KdKi, KdKe, KdATP, delta,
sigma_pump] = popt.tolist()
def find_increase(I, turn, k1p, k1n, k2p, k2n, k3p, k3n, k4p, k4n):
k1p = turn * k1p
k1n = turn * k1n
k2p = turn * k2p
k2n = turn * k2n
k3p = turn * k3p
k3n = turn * k3n
k4p = turn * k4p
k4n = turn * k4n
# [v, Nai, Ki, n, m, h, A, B, C, D]
initial = [-60, 15, 140, 0.5, 0, 1, 1, 0, 0, 0]
dt = 0.01
time = np.arange(0, 300, dt)
results = odeint(HHmodel2, initial, time, args=([I, k1p, k1n, k2p, k2n, k3p, k3n, k4p, k4n],))
V1 = np.array(results[:, 0])[10000:]
NA = np.array(results[:, 1])[10000:]
Vmax = signal.argrelextrema(V1, np.greater)
Vmin = signal.argrelextrema(V1, np.less)
Namax = signal.argrelextrema(NA, np.greater)
if len(Vmax[0]) < 4 or len(Vmin[0]) < 4 or V1[Vmax[0][-1]] - V1[Vmin[0][-1]] < 30 or Vmin[0][-1] < 17000:
return None
else:
if abs(NA[Namax[0][-1]] - NA[Namax[0][0]]) < 0.1:
f = 100000 / (Vmin[0][-1] - Vmin[0][-2])
J = 0
B = np.array(results[:, 7])[10000:]
C = np.array(results[:, 8])[10000:]
for i in range(len(V1)):
Nae1a = Nae / (KdNae0 * np.exp((1 + delta) * V1[i] / (RTF)))
Nae2a = Nae / KdNae
Kea = Ke / KdKe
alpha2p = k2p
alpha2n = k2n * Nae1a * (Nae2a ** 2) / (Nae1a * (Nae2a ** 2) + (1 + Nae2a) ** 2 + (1 + Kea) ** 2 - 1)
Jp = (alpha2p * B[i] - alpha2n * C[i]) / 20
J += Jp
return [f, J]
else:
if NA[-1] - NA[0] > 0:
return [0, 1]
else:
return [0, -1]
I = np.arange(30, 200, 1)
f = []
J = []
for i in I:
find = False
for turn in np.arange(1, 61, 0.5):
a = find_increase(i, turn, k1p, k1n, k2p, k2n, k3p, k3n, k4p, k4n)
if a != None:
if a[0] != 0:
f.append(a[0])
J.append(a[1])
print(i, turn, a)
find = True
break
if find == False:
f.append(0)
J.append(0)
print(i, [0, 0])
| [
"ysm@SMdeMacBook-Pro.local"
] | ysm@SMdeMacBook-Pro.local |
165e7dc760f0dca474059f542342f73228ee2ee4 | 7aebfaec6957ad67523f1d8851856af88fb997a6 | /catkin_ws/build/robotiq/robotiq_modbus_rtu/catkin_generated/pkg.develspace.context.pc.py | 2fea7493bdaa8d6fad68cdd3e90a1c93c073d9a2 | [] | no_license | k-makihara/ROS | 918e79e521999085ab628b6bf27ec28a51a8ab87 | 45b60e0488a5ff1e3d8f1ca09bfd191dbf8c0508 | refs/heads/master | 2023-01-28T06:00:55.943392 | 2020-11-26T05:27:16 | 2020-11-26T05:27:16 | 316,127,707 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 384 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "rospy".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "robotiq_modbus_rtu"
PROJECT_SPACE_DIR = "/home/mslab/catkin_ws/devel"
PROJECT_VERSION = "1.0.0"
| [
"makihara@ms.esys.tsukuba.ac.jp"
] | makihara@ms.esys.tsukuba.ac.jp |
1a32485f4e0b1fc540078bd49427b8f148e99615 | 8f9bb71fdf79d857363f93cb3e5f1fd77d49249c | /lesson02Arrays/OddOccurrencesInArray/solution.py | c7cc93eff20256aca9e54b3ddc28f9f7a422583a | [
"MIT"
] | permissive | wandesky/codility | 22047c6be5f9bb494675104a58a29de53012f155 | c13d91ca828c37a7673d529743b4d678910eb611 | refs/heads/master | 2020-03-24T11:58:26.702171 | 2018-08-06T14:38:07 | 2018-08-06T14:38:07 | 142,700,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,005 | py | # you can write to stdout for debugging purposes, e.g.
# print("this is a debug message")
def solution(A):
'''
write your code in Python 3.6
'''
#catch the known scenarios
# sort the array
# check for similarity in adjacent bipolar elements
# if two adjacent elements are not equal, return the smallest one
# catch index out of bound error and return last item
if(len(A) == 1):
return A[0]
elif(len(A) == 0):
return 0
# print('a is ', A)
sortedA = sorted(A)
# print('last item is', int(sortedA[-1:]))
# print('sorted a is ', sortedA)
last_index = (len(A)-1)
length = len(A)
print ('last one', last_index)
for i in range(0, length, 2):
# print('current value of i is', i)
if(i >= len(A)-1):
# print('last item is', sortedA[last_index])
return sortedA[last_index]
elif(sortedA[i] != sortedA[i+1]):
return sortedA[i]
| [
"bwandesky@gmail.com"
] | bwandesky@gmail.com |
bf814021ff4d629a5429b06dfb6b3ece4e5d5908 | eea993e5460e7f9a98ee192ce72ddb06bf9d4799 | /machineLearning/venv/bin/gifmaker.py | 05a02ecbfc40bee6d78983b916c8c1c2774f0e80 | [] | no_license | drmartens/tenderDemo | 72983129057f225d1c04cf7cda4f237a481ad2d9 | 033d819d5522a24e28c71b26c9e8b50d2d81cc2d | refs/heads/master | 2021-05-12T04:41:57.164841 | 2018-01-11T23:25:15 | 2018-01-11T23:25:15 | 117,165,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 686 | py | #!/Users/DocMartens/Documents/ideo/tensorBot/venv/bin/python3.6
#
# The Python Imaging Library
# $Id$
#
# convert sequence format to GIF animation
#
# history:
# 97-01-03 fl created
#
# Copyright (c) Secret Labs AB 1997. All rights reserved.
# Copyright (c) Fredrik Lundh 1997.
#
# See the README file for information on usage and redistribution.
#
from __future__ import print_function
from PIL import Image
if __name__ == "__main__":
import sys
if len(sys.argv) < 3:
print("GIFMAKER -- create GIF animations")
print("Usage: gifmaker infile outfile")
sys.exit(1)
im = Image.open(sys.argv[1])
im.save(sys.argv[2], save_all=True)
| [
"martd371@newschool.edu"
] | martd371@newschool.edu |
34dae5b511667fb682d512f68ccc45fcbf454285 | a5f0d40a7f3beae80bf47053792d80621b2705e9 | /D_A_T/UDI/read_smx_sheet/templates/testing_script_02.py | 94fc576c8635f80b791cb334d73cdb8b4c3502b9 | [] | no_license | marcEssam48/DataProfiling_TestingAutomation_Tool | db47131054636fd2c4426921c845f6010d1dee38 | bd3b8cc2611585275c99c08e3692500cbaf20963 | refs/heads/master | 2023-05-28T06:48:43.754772 | 2021-06-07T11:57:53 | 2021-06-07T11:57:53 | 372,188,067 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,549 | py | from D_A_T.UDI.read_smx_sheet.app_Lib import functions as funcs
from D_A_T.UDI.read_smx_sheet.Logging_Decorator import Logging_decorator
@Logging_decorator
def source_testing_script(cf, source_output_path, source_name, Table_mapping, Core_tables):
file_name = funcs.get_file_name(__file__)
f = funcs.WriteFile(source_output_path, file_name, "sql")
########################################
# TARGET_TABLE_NAME Table mapping done
# TABLE_MAPPING_NAME Table mapping done
# SOURCE_TABLE Table mapping done
# TARGET_COLUMN_NAME Column mapping done
# NATURAL_KEY STG tables done
# PHYSICAL_NAME BKEY done
# KEY_DOMAIN_ID_VALUE BKEY done
#
# select a.*
# from input_view a
# left join base_table b
# on key_join
# where b.process_name is null
########################################
script = """select a.*\nfrom {input_view} a\nleft join {base_table} b\n\ton {key_join}\nwhere b.process_name is null;"""
# 1- get all
process_type = "TXF"
Table_mapping_df = Table_mapping[(Table_mapping['Source'] == source_name)
& (Table_mapping['Layer'] == 'CORE')][['Target table name', 'Mapping name', 'Layer']]
Table_mapping_df = Table_mapping_df.sort_values(['Target table name', 'Mapping name'])
for Table_mapping_df_index, Table_mapping_df_row in Table_mapping_df.iterrows():
layer = Table_mapping_df_row['Layer']
TARGET_TABLE_NAME = Table_mapping_df_row['Target table name']
TABLE_MAPPING_NAME = Table_mapping_df_row['Mapping name']
inp_view = cf.INPUT_VIEW_DB + "." + process_type + "_" + layer + "_" + TABLE_MAPPING_NAME + "_IN"
core_table = cf.core_table + "." + TARGET_TABLE_NAME
key_columns = Core_tables[(Core_tables['Table name'] == TARGET_TABLE_NAME) &
(Core_tables['PK'] == "Y")]['Column name'].tolist()
complete_on_clause = ""
for index, i in enumerate(key_columns):
on_clause = " a.{}=b.{} "
if index == 0:
and_ = ""
else:
and_ = "\n\tand"
complete_on_clause = complete_on_clause + and_ + on_clause.format(i, i)
script_ = script.format(input_view=inp_view, base_table=core_table, key_join=complete_on_clause)
f.write(script_.strip()+"\n\n")
f.close()
| [
"marcessam48_625@fci.helwan.edu.eg"
] | marcessam48_625@fci.helwan.edu.eg |
5f93f5ba5709b9d176ed6b582674e10ba7ffe927 | 6d45310b34763d6455b1a193e2b832fd57ec92c4 | /Class1_Python3/example_00130_show_types_quiz.py | 22e60f8be9551ce3319bae7c66a3e5f4e969ce11 | [] | no_license | katharinameislitzer/smartninjacourse | cf1bb53b5171622c4e6a2908a08d4ba369ebfc12 | 6923d4c983647f57ea1241047d8583e841083cd3 | refs/heads/master | 2023-05-14T14:50:48.861437 | 2019-12-21T17:42:22 | 2019-12-21T17:42:22 | 212,634,382 | 0 | 0 | null | 2023-05-02T18:37:35 | 2019-10-03T17:06:59 | HTML | UTF-8 | Python | false | false | 509 | py | print(str(1))
print(type(1)) # <type = str>
eingabe_1 = "1"
eingabe_2 = "2"
print(3 + 4) # int
print(eingabe_1 + eingabe_2) # concat 12
print(int(eingabe_1) + int(eingabe_2)) # 3
print(float(eingabe_1) + float(eingabe_2)) # 3.0
resultat_1 = int(eingabe_1) + int(eingabe_2) # 3
resultat_2 = float(eingabe_1) + float(eingabe_2) #3.0
print(resultat_1, resultat_2) #3 3.0
print(resultat_1 == resultat_2) # true, das gleiche, nicht das selbe
print(resultat_1 is resultat_2) # false
print(1 is 1) # true
| [
"katharina.meislitzer@otago.at"
] | katharina.meislitzer@otago.at |
a356adb736988a633389a711031008f53ac4c626 | a76d9d1f8d8c2d5c5762df8091c384ae39a0c840 | /Exp_5/ML5.py | 662232258738992648b2b6c8cf5f2646d9f3fc5c | [] | no_license | technoindianjr/Machine-Learning-Lab---6CS4-22 | 4adb1091991bdc2e3e7da7970e088076ccf738fa | d44ac428a96ff3a167e894a063067f1e66239976 | refs/heads/main | 2023-02-25T13:20:46.324083 | 2021-02-01T06:58:03 | 2021-02-01T06:58:03 | 333,660,237 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,945 | py | # -*- coding: utf-8 -*-
"""
@author: aaditya025
"""
import csv
import random
import math
def loadcsv(filename):
lines = csv.reader(open(filename, "r"));
dataset = list(lines)
for i in range(len(dataset)):
#converting strings into numbers for processing
dataset[i] = [float(x) for x in dataset[i]]
return dataset
def splitdataset(dataset, splitratio):
#67% training size
trainsize = int(len(dataset) * splitratio);
trainset = []
copy = list(dataset);
while len(trainset) < trainsize:
#generate indices for the dataset list randomly to pick ele for training data
index = random.randrange(len(copy));
trainset.append(copy.pop(index))
return [trainset, copy]
def separatebyclass(dataset):
separated = {} #dictionary of classes 1 and 0
#creates a dictionary of classes 1 and 0 where the values are
#the instances belonging to each class
for i in range(len(dataset)):
vector = dataset[i]
if (vector[-1] not in separated):
separated[vector[-1]] = []
separated[vector[-1]].append(vector)
return separated
def mean(numbers):
return sum(numbers)/float(len(numbers))
def stdev(numbers):
avg = mean(numbers)
variance = sum([pow(x-avg,2) for x in numbers])/float(len(numbers)-1)
return math.sqrt(variance)
def summarize(dataset): #creates a dictionary of classes
summaries = [(mean(attribute), stdev(attribute)) for attribute in zip(*dataset)];
del summaries[-1] #excluding labels +ve or -ve
return summaries
def summarizebyclass(dataset):
separated = separatebyclass(dataset);
#print(separated)
summaries = {}
for classvalue, instances in separated.items():
#for key,value in dic.items()
#summaries is a dic of tuples(mean,std) for each class value
summaries[classvalue] = summarize(instances) #summarize is used to cal to mean and std
return summaries
def calculateprobability(x, mean, stdev):
exponent = math.exp(-(math.pow(x-mean,2)/(2*math.pow(stdev,2))))
return (1 / (math.sqrt(2*math.pi) * stdev)) * exponent
def calculateclassprobabilities(summaries, inputvector):
probabilities = {} # probabilities contains the all prob of all class of test data
for classvalue, classsummaries in summaries.items():#class and attribute information as mean and sd
probabilities[classvalue] = 1
for i in range(len(classsummaries)):
mean, stdev = classsummaries[i] #take mean and sd of every attribute for class 0 and 1 seperaely
x = inputvector[i] #testvector's first attribute
probabilities[classvalue] *= calculateprobability(x, mean, stdev);#use normal dist
return probabilities
def predict(summaries, inputvector): #training and test data is passed
probabilities = calculateclassprobabilities(summaries, inputvector)
bestLabel, bestProb = None, -1
for classvalue, probability in probabilities.items():#assigns that class which has he highest prob
if bestLabel is None or probability > bestProb:
bestProb = probability
bestLabel = classvalue
return bestLabel
def getpredictions(summaries, testset):
predictions = []
for i in range(len(testset)):
result = predict(summaries, testset[i])
predictions.append(result)
return predictions
def getaccuracy(testset, predictions):
correct = 0
for i in range(len(testset)):
if testset[i][-1] == predictions[i]:
correct += 1
return (correct/float(len(testset))) * 100.0
def main():
filename = 'naivedata.csv'
splitratio = 0.67
dataset = loadcsv(filename);
trainingset, testset = splitdataset(dataset, splitratio)
print('Split {0} rows into train={1} and test={2} rows'.format(len(dataset), len(trainingset), len(testset)))
# prepare model
summaries = summarizebyclass(trainingset);
#print(summaries)
# test model
predictions = getpredictions(summaries, testset) #find the predictions of test data with the training data
accuracy = getaccuracy(testset, predictions)
print('Accuracy of the classifier is : {0}%'.format(accuracy))
main() | [
"aaditya025@gmail.com"
] | aaditya025@gmail.com |
e551d79d4076e2962ead5bb1ae1c2baadfab55b5 | 8fa3dd685570864f941336de396e459d5cee4222 | /setup.py | 1d8d624473005a5b79b2a01dbbb96f27714982a5 | [] | no_license | sethschori/emailr | c56f2e4fdaebc7281c816dbaf6b66cb815c54f9f | 2d24a206f05d8f993bf6845db1de96678ed33026 | refs/heads/master | 2021-06-30T23:48:00.997937 | 2018-08-15T02:14:41 | 2018-08-15T02:14:41 | 96,313,350 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | __author__ = 'Seth Schori'
from setuptools import setup
setup(
name='emailr',
packages=['emailr'],
include_package_data=True,
install_requires=[
'flask',
],
) | [
"sethschori@users.noreply.github.com"
] | sethschori@users.noreply.github.com |
0db3feac1f527e1525c754b36fdede32dbd4ff7f | ddbf6b134fa6440832c65429a1d419cd4165b5de | /poetry_workspace_plugin/console/commands/new.py | ddcda3b5b77c9e822029510f4665c817026f3fd5 | [
"MIT"
] | permissive | linooohon/poetry-workspace-plugin | f5a79b385195f5ca2e0e19eef72b5595772832d3 | 2d52640fe5fe2dad0c3c10ecec0d752a028fcc8d | refs/heads/main | 2023-09-03T20:52:27.722203 | 2021-10-24T15:42:12 | 2021-10-24T15:42:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 830 | py | from pathlib import Path
from poetry.console.commands.new import NewCommand
from poetry_workspace_plugin.helpers import get_workspaces_table
class WorkspaceNewCommand(NewCommand):
name = "workspace new"
description = "Creates a new Python project at <path>, tracked as a workspace in the current project."
def handle(self) -> int:
path = Path(self.argument("path"))
name = self.option("name") or path.name
content = self.poetry.file.read()
workspaces = get_workspaces_table(content)
if name in workspaces:
self.line(f"<fg=red>Workspace already registered with name <options=bold>{name}</></>")
return 1
super().handle() # Create the new project
workspaces[name] = str(path)
self.poetry.file.write(content)
return 0
| [
"jack.je.smith@gmail.com"
] | jack.je.smith@gmail.com |
e32832429f721a159d8953b0ee70408f86ba61a5 | 48e56f24a3162d3a0657d9eae3f253903c6cb701 | /有效的字母异位词.py | 0054eb48ffc4ecd9431b1d648763eefc7870534a | [
"MIT"
] | permissive | 15586969295/LeetCode | 376fdfd125d28f6ea9c7b7f9b30abbd239578998 | 140cf6dea774a0297d72710e1210b3e1c9710319 | refs/heads/master | 2020-04-02T16:30:49.620886 | 2018-10-29T08:53:07 | 2018-10-29T08:53:07 | 154,615,629 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | # Given two strings s and t , write a function to determine if t is an anagram of s.
# 给定两个字符串 s 和 t ,编写一个函数来判断 t 是否是 s 的一个字母异位词。
class Solution(object):
def isAnagram(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
return sorted("".join(s)) == sorted("".join(t))
| [
"42865648+15586969295@users.noreply.github.com"
] | 42865648+15586969295@users.noreply.github.com |
552a63ae90a6ce0d7576b7cc7ae6088f813e86ea | 78aa4ca975b00d21361fae11c1d7e0ca0d7e3b07 | /tests/test_config.py | ec1c9d59778af0cac55277ad15714e5cd1f64bff | [] | no_license | sakhuja/cookie_lover | 9d11f07c0de9b020960591e7b02f3d88655fbc65 | 8469d6d232c542db173ce952a71a7be54a495bb9 | refs/heads/master | 2021-01-22T05:00:56.255234 | 2014-05-21T00:34:53 | 2014-05-21T00:34:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 546 | py | # -*- coding: utf-8 -*-
from cookie_flaskApp.app import create_app
from cookie_flaskApp.settings import ProdConfig, DevConfig
def test_production_config():
app = create_app(ProdConfig)
assert app.config['ENV'] == 'prod'
assert app.config['DEBUG'] is False
assert app.config['DEBUG_TB_ENABLED'] is False
assert app.config['ASSETS_DEBUG'] is False
def test_dev_config():
app = create_app(DevConfig)
assert app.config['ENV'] == 'dev'
assert app.config['DEBUG'] is True
assert app.config['ASSETS_DEBUG'] is True | [
"asakhuja@paypal.com"
] | asakhuja@paypal.com |
f3d1de23c937418d9d66cee322518ae815b1b97d | 942ee5e8d54e8ebe9c5c841fbfdd1da652946944 | /1001-1500/1354.Construct Target Array With Multiple Sums.2.py | 937d34cc801eb03e13440f0ce5a28b99a2374341 | [] | no_license | kaiwensun/leetcode | 0129c174457f32887fbca078fb448adce46dd89d | 6b607f4aae3a4603e61f2e2b7480fdfba1d9b947 | refs/heads/master | 2023-08-31T07:30:50.459062 | 2023-08-27T07:59:16 | 2023-08-27T07:59:16 | 57,526,914 | 69 | 9 | null | 2023-08-20T06:34:41 | 2016-05-01T05:37:29 | Python | UTF-8 | Python | false | false | 640 | py | import heapq
class Solution(object):
def isPossible(self, target):
s = sum(target)
max_heap = [-t for t in target]
heapq.heapify(max_heap)
while max_heap[0] != -1:
top = -heapq.heappop(max_heap)
snd = -max_heap[0] if max_heap else 0
restored = top * 2 - s
diff = top - restored
if top == snd or diff == 0:
return False
restored = snd + (top - snd) % -diff
if restored < 1:
return False
s -= (top - restored)
heapq.heappush(max_heap, -restored)
return True
| [
"noreply@github.com"
] | kaiwensun.noreply@github.com |
db6ad3596c5cd2741f04704f68abcc10970200d6 | cfa9162568f78170821ac4e394cb34eed28cc1ef | /PyScripts/getopt_example.py | 5ebd0d7c5167f1b831bdf4a081f9e9fe00dc863c | [] | no_license | Allenhe123/MyPython2.0 | f239a698ae0c4a205a465f3d12d5d03b50f4fa4a | 52605ee7265fc64f9e8b78930a3d6af71d7b66f0 | refs/heads/master | 2021-05-09T23:37:38.795642 | 2018-01-24T17:19:15 | 2018-01-24T17:19:15 | 118,797,082 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 934 | py | # -*- coding: UTF-8 -*-
import sys
import getopt
print "script name: ", sys.argv[0]
print 'param number: ', len(sys.argv) - 1
for i in range(1, len(sys.argv)):
print "param ", i, sys.argv[i]
#################
# options返回元组的列表,args返回列表
# 输入:命令行参数列表,'短选项',‘长选项列表’
# 短选项名后的冒号(:)表示该选项必须有附加的参数,不带冒号表示该选项不附加参数。
# 长选项名后的等号(=)表示如果设置该选项,必须有附加的参数,否则就不附加参数。
# python getopt_example.py -a -b -c ccc -o ooo -v --output=output --verbose --version=version --file file param1 param2 param3
try:
options, args = getopt.getopt(sys.argv[1:], 'abc:o:v', ['output=', 'verbose', 'version=', 'file='])
print 'options: ', options
print 'args: ', args
except getopt.GetoptError as err:
print 'ERROR:', err
sys.exit(1) | [
"931317255@qq.com"
] | 931317255@qq.com |
a89ea6c1e3c3fd93f0aafd5c58a3a7438c09e1ea | 2285ce1440325b47c8a81d4d2f3c24a2e309ab94 | /Hw5_PRF/rocchio_vsm.py | ea37c4875319c647edf5a94cf9d74a8149a134e3 | [] | no_license | Lillian0114/information-retrieval | 0c7116652032209db94261d2424a4dc0c3dc0f1e | c3aa47937e948f8d230c111f0551e75a861f5e43 | refs/heads/main | 2023-03-22T03:40:10.280138 | 2021-03-07T15:59:50 | 2021-03-07T15:59:50 | 345,375,472 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,833 | py | import os
import math
import copy
import re
import time
import nltk
from tqdm import tqdm
from nltk.corpus import stopwords
from nltk.stem import SnowballStemmer
import numpy as np
from sklearn.metrics.pairwise import cosine_similarity
snowball_stemmer = SnowballStemmer('english')
stop_words = set(stopwords.words('english'))
root_path = 'D:/Python_test/IR/Hw5_PRF/ir_hw5_data/'
class RocchioAlgo:
def __init__(self,rank_amount):
self.stop_words = set(stopwords.words('english'))
self.doc_name = []
self.query_name = []
# self.document = {}
self.document = []
# self.query = {}
self.query = []
self.rank_amount = rank_amount
self.docidf = {}
self.querryidf = {}
self.ans = []
def readfile(self):
global root_path
#read doc
with open(root_path + 'doc_list.txt', 'r') as L:
for filename in tqdm(L):
filename = filename.strip('\n')
path = root_path + 'docs/'
file = path+filename+".txt"
self.doc_name.append(filename)
with open(file, 'r') as f:
listOfWords = []
for lines in f.readlines():
listOfWords = nltk.word_tokenize(lines)
f.close()
doc_dict = {}
for w in listOfWords:
w = snowball_stemmer.stem(w) # part of speech stem
if w not in self.stop_words:
doc_dict[w] = doc_dict.get(w, 0.) + 1.0 # TF
for wc in doc_dict:
# IDF preprocessing
self.docidf[wc] = self.docidf.get(wc, 0.0)+1.0
# TF ---- Log Normalization
doc_dict[wc] = 1 + math.log(doc_dict.get(wc, 0.0), 2)
self.document.append(doc_dict)
#read query
with open(root_path + 'query_list.txt', 'r') as Q:
for queryfilename in tqdm(Q):
queryfilename = queryfilename.strip('\n')
path = root_path + 'queries/'
file = path+queryfilename+".txt"
self.query_name.append(queryfilename)
with open(file, 'r') as f:
listOfWords = []
for lines in f.readlines():
listOfWords = nltk.word_tokenize(lines)
f.close()
query_dict = {}
for w in listOfWords:
w = snowball_stemmer.stem(w)
if w not in self.stop_words:
query_dict[w] = (query_dict.get(w, 0.) + 1.0) # TF
for wc in query_dict:
# IDF preprocessing
self.querryidf[wc] = self.querryidf.get(wc, 0.0)+1.0
self.query.append(query_dict)
print('read file down')
def pseudo_read(self,pseudo_docname):
pseudo_list = []
with open(pseudo_docname, 'r') as pseudo_file:
for line in pseudo_file:
if line == 'Query,RetrievedDocuments\n':
continue
pseudo_name = re.split(',| |\n', line)
pseudo_name.pop()
pseudo_name.pop()
pseudo_name.pop(0)
pseudo_list.append(pseudo_name)
return pseudo_list
def pseudo_relevant_doc(self,first):
relevant_doc_word = []
for d_list in first:
total_voc = {}
for d_name in d_list:
temp = copy.deepcopy(self.document[self.doc_name.index(d_name)])
for temp_word in temp:
if temp_word not in total_voc:
total_voc[temp_word] = temp[temp_word]
else:
total_voc[temp_word] += temp[temp_word]
relevant_doc_word.append(total_voc) # query k relevant doc all word
return relevant_doc_word
def pseudo_non_relevant_doc(self,first):
nonrelevant_doc_word = []
for d_list in first:
total_voc = {}
for d_name in d_list:
temp = copy.deepcopy(self.document[self.doc_name.index(d_name)])
for temp_word in temp:
if temp_word not in total_voc:
total_voc[temp_word] = temp[temp_word]
else:
total_voc[temp_word] += temp[temp_word]
nonrelevant_doc_word.append(total_voc)
return nonrelevant_doc_word
def rocchioalgor(self, alpha, beta, gamma, relevant_doc, file_num, non_rel_doc, non_file_num):
Doc_tfidf, Q_tfidf, rel_tfidf, non_rel_tfidf = self.tf_idf_with_rel(relevant_doc, non_rel_doc)
print('tfidf down')
start = time.time()
self.ans = self.ROCCHIO(Doc_tfidf, Q_tfidf, rel_tfidf, non_rel_tfidf, alpha, beta, gamma, file_num, non_file_num)
print('rocchio down')
print(time.time() - start)
def tf_idf_with_rel(self, relevant_doc, non_rel_doc):
N = len(self.doc_name)+1
Doc_idf = self.docidf.copy()
x = list(Doc_idf.keys())
for i in range(0,len(Doc_idf)-1):
Doc_idf[x[i]] = math.log10(N/(Doc_idf[x[i]]+1)) #idf
Doc_tfidf = copy.deepcopy(self.document)
for j in range(0,len(Doc_tfidf)-1):
x = list(self.document[j].keys())
for i in range(0,len(x)-1):
Doc_tfidf[j][x[i]] = Doc_tfidf[j][x[i]]*Doc_idf[x[i]] #tf*idf
Q_tfidf = copy.deepcopy(self.query)
for j in range(0, len(Q_tfidf)-1):
x = list(self.query[j].keys())
for i in range(0, len(x)-1):
if(x[i] in Doc_idf):
Q_tfidf[j][x[i]] = self.query[j][x[i]]*Doc_idf[x[i]] #tf*idf
else:
Q_tfidf[j][x[i]] = 0
rel_tfidf = copy.deepcopy(relevant_doc)
for j in range(0,len(rel_tfidf)-1):
x = list(relevant_doc[j].keys())
for i in range(0,len(x)-1):
if(x[i] in Doc_idf):
rel_tfidf[j][x[i]] = relevant_doc[j][x[i]]*Doc_idf[x[i]] #tf*idf
else:
rel_tfidf[j][x[i]] = 0
non_rel_tfidf = copy.deepcopy(non_rel_doc)
for j in range(0,len(rel_tfidf)-1):
x = list(non_rel_doc[j].keys())
for i in range(0,len(x)-1):
if(x[i] in Doc_idf):
non_rel_tfidf[j][x[i]] = non_rel_doc[j][x[i]]*Doc_idf[x[i]] #tf*idf
else:
non_rel_tfidf[j][x[i]] = 0
return Doc_tfidf, Q_tfidf, rel_tfidf, non_rel_tfidf
def ROCCHIO(self, Doc_tfidf, Q_tfidf, rel_tfidf, non_rel_tfidf, alpha, beta, gamma, file_num, non_file_num):
Ans_T = []
for q, old_que_dic in enumerate(Q_tfidf):
if q % 50 == 0:
print(time.strftime("%D,%H:%M:%S"))
print(q)
Sim = []
que_dic = copy.deepcopy(old_que_dic) # new_q
for w in que_dic:
que_dic[w] *= alpha
for rel in rel_tfidf[q]:
if rel in que_dic:
que_dic[rel] += beta * rel_tfidf[q][rel] / file_num
else:
que_dic[rel] = beta * rel_tfidf[q][rel] / file_num
# for nonrel in non_rel_tfidf[q]:
# if nonrel in que_dic:
# que_dic[nonrel] += gamma * non_rel_tfidf[q][nonrel] / non_file_num
# else:
# que_dic[nonrel] = gamma * non_rel_tfidf[q][nonrel] / non_file_num
#cosine_similarity
for doc_dic in Doc_tfidf:
a, b = 0, 0
for que_voc in que_dic:
if que_dic[que_voc] == 0:
continue
if que_voc in doc_dic:
a += que_dic[que_voc] * doc_dic[que_voc] # 被除數
b += pow(que_dic[que_voc], 2) # 除數1
c = sum(pow(doc_dic[doc_voc], 2) for doc_voc in doc_dic) # 除數2
Sim.append(a / (math.sqrt(b)*math.sqrt(c)))
# arrayQuery = np.array(que_dic)
# arrayDoc = np.array(Doc_tfidf)
# Sim.append( cosine_similarity([arrayQuery], [arrayDoc]) )
Sim_sort = sorted(Sim, reverse=True)
Ans = []
for i in range(0, self.rank_amount):
Ans.append(self.doc_name[Sim.index(Sim_sort[i])] )
Ans_T.append(Ans)
return Ans_T
def writeAns(self, file_name):
with open(str(file_name) + '.txt', 'w') as file:
file.write("Query,RetrievedDocuments\n")
for i in range(0, len(self.query_name)):
file.write(str(self.query_name[i]) + ',')
for num, j in enumerate(self.ans[i]):
if num < self.rank_amount:
file.write(str(j) + ' ')
else:
break
file.write('\n')
rocchio_cal = RocchioAlgo(5001) # rank_amount
rocchio_cal.readfile()
pseudodoc = rocchio_cal.pseudo_read('D:/Python_test/IR/bm25_relevant_10_0.07.txt')
pseudonondoc = rocchio_cal.pseudo_read('D:/Python_test/IR/bm25_non_relevant_2.txt')
new_que = rocchio_cal.pseudo_relevant_doc(pseudodoc)
non_rel = rocchio_cal.pseudo_non_relevant_doc(pseudonondoc)
rocchio_cal.rocchioalgor(1, 0.2, 0, new_que, 10, non_rel, 0) #relevant doc count is 10
rocchio_cal.writeAns('rocchio_final') | [
"32055946+Lillian0114@users.noreply.github.com"
] | 32055946+Lillian0114@users.noreply.github.com |
f08c32dd69383683c0c68f47aae43fbbfd90ddc0 | 518803820a50699997b8288cf56e0cfdfe6d1b1f | /dot.py | 19ee35f5bffb1c6a8fc8f3059faba241714de5c1 | [] | no_license | giri711dharan/python | fe5e2013989a1485a2338cf527d37fe9093cde70 | f9a9e16636ff73753f2e9099ba1e33de9392a12c | refs/heads/master | 2021-04-06T03:20:46.048372 | 2019-08-14T10:21:47 | 2019-08-14T10:21:47 | 124,636,986 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 34 | py | m=str(input(""))
print(m,end=".")
| [
"noreply@github.com"
] | giri711dharan.noreply@github.com |
4487da882f8609274ab7b6944f8c882c2b813ceb | 6693024778d4f3f3530abedd6a898363b09b69fd | /mysite/polls/migrations/0011_auto_20191103_2027.py | e675992a331ee99e00bf5f40e33be37f275411a3 | [] | no_license | mhRumi/Varsity-Circle-Kaliakoir-Society | d72fc98c48ed6011504aeabeb19b1f23611279a9 | bd56d1c1f3e9798fb47730566a2b2d915192464f | refs/heads/master | 2020-08-28T17:25:56.743667 | 2020-01-27T22:04:46 | 2020-01-27T22:04:46 | 217,769,037 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 870 | py | # Generated by Django 2.2.6 on 2019-11-03 20:27
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('polls', '0010_auto_20191101_0757'),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(default='user.png', upload_to='media')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.DeleteModel(
name='UploadPicture',
),
]
| [
"mh913350@gmail.com"
] | mh913350@gmail.com |
8acd7f0b84d63e7f93c370fddda0a52423c34f22 | 6a99547f767b942e2b51b79da0f23a990f3105d3 | /zyugyourobokon/build/navigation/catkin_generated/pkg.develspace.context.pc.py | 402bf6c8fe243528f6f26e4d058a75479241185b | [] | no_license | andokeisuke/NHK2020 | 588a1c0070bacaa98f10229252b40eb34c647345 | d6cb3f0c192141e9d87f4faaf7d1d4537ede4a3e | refs/heads/master | 2020-07-11T20:19:29.539799 | 2019-11-22T18:48:04 | 2019-11-22T18:48:04 | 204,635,311 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 375 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "navigation"
PROJECT_SPACE_DIR = "/home/ando/zyugyourobokon/devel"
PROJECT_VERSION = "0.0.0"
| [
"kakipi.moai@gmail.com"
] | kakipi.moai@gmail.com |
3e4ce8d3fddb9541c77472aec59f38933c1ce84f | a56abbe4b201d5e44456f87fa66229283a2ec3ed | /Snake.py | baea6bd1397883a62b9f0d7dafd8c5f9909e3521 | [] | no_license | mqadimzadeh/Course-Projects | 1331b38730cbafede65dfb57ecb78194ca1df78d | f67d55f91749954728d7ca495873a94b7cbc0e5e | refs/heads/master | 2022-11-18T22:27:51.428635 | 2020-07-17T23:03:12 | 2020-07-17T23:03:12 | 280,531,774 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 36,141 | py | #
# Viper: A competative variant of Snake
#
# Goal: Be the last snake standing in each round.
#
# Scoring: Each time a snake crashes all of the snakes that have not yet
# crashed are awarded 1 point.
#
# Game End: The game ends at the end of the round in which one or more players
# reach 10 points. The player with the highest score is the winner.
# Ties can occur.
#
# Future Work: Right now the AI identifies the direction it should head
# by looking for the longest distance to a collision. But it
# doesn't check to see if it is possible to turn from its
# current heading to the heading that has the longest distance
# without colliding with anything. It would be better if the
# AI identified its direction based on the longest distance to
# a collision that can actually be reached based on its turning
# radius (which is based on its speed).
#
from SimpleGraphics import *
from math import sin, cos, tan, atan2, pi, sqrt, fabs, ceil, floor
from random import randrange
from time import time
from functools import partial, reduce
MAX_SCORE = 10 # What score has to be achieved for the game to end?
COUNTDOWN_DURATION = 1 # How long is the countdown between rounds?
FRAME_RATE = 30 # Target framerate to maintain
BOUNDARY = [0, 0, 799, 0, 799, 599, 0, 599, 0, 0] # Line segments for the edges
# of the screen
###############################################################################
##
## Functions to determine whether or not two line segments intersect (and
## where they intersect). The only function that needs to be called directly
## by a student is doIntersect. The other functions are called by the
## provided code.
##
## Adapted from
## https://www.geeksforgeeks.org/check-if-two-given-line-segments-intersect/
##
###############################################################################
#
# Do line segments (ax, ay, bx, by) and (cx, cy, dx, dy) intersect?
#
# Parameters:
# (ax, ay, bx, by): The end points of the first line segment
# (cx, cy, dx, dy): The end points of the second line segemnt
#
# Returns:
# True if the line segments intersect, False otherwise.
#
def doIntersect(ax, ay, bx, by, cx, cy, dx, dy):
return doIntersectPos(ax, ay, bx, by, cx, cy, dx, dy)[0]
#
# Determine if a point lies on a line segment. The three points passed to
# this function must be co-linear.
#
# Parameters:
# (px, py): One end point of the line segment
# (qx, qy): The point to check
# (rx, ry): The other end of the line segment
#
# Returns:
# True if (qx, qy) lines on line segment (px, py, rx, ry). False otherwise.
#
def onSegment(px, py, qx, qy, rx, ry):
if qx <= px and qx <= rx and qx >= px and qx >= rx and \
qy <= py and qy <= ry and qy >= py and qy >= ry:
return True
return False
#
# Do line segments (ax, ay, bx, by) and (cx, cy, dx, dy) intersect? What is
# their intersection point?
#
# Parameters:
# (ax, ay, bx, by): The end points of the first line segment
# (cx, cy, dx, dy): The end points of the second line segemnt
#
# Returns:
# True and the intersection point if the line segments intersect. Otherwise
# (False, 0, 0) is returned.
#
def doIntersectPos(ax, ay, bx, by, cx, cy, dx, dy):
# Bounding box checks
if ax < cx and ax < dx and bx < cx and bx < dx:
return False, 0, 0
if ax > cx and ax > dx and bx > cx and bx > dx:
return False, 0, 0
if ay < cy and ay < dy and by < cy and by < dy:
return False, 0, 0
if ay > cy and ay > dy and by > cy and by > dy:
return False, 0, 0
# Compute the orientation values. This has been inlined to improve
# performance.
val = (by - ay) * (cx - bx) - (bx - ax) * (cy - by)
o1 = 1 if val >= 1e-10 else (2 if val <= -1e-10 else 0)
val = (by - ay) * (dx - bx) - (bx - ax) * (dy - by)
o2 = 1 if val >= 1e-10 else (2 if val <= -1e-10 else 0)
val = (dy - cy) * (ax - dx) - (dx - cx) * (ay - dy)
o3 = 1 if val >= 1e-10 else (2 if val <= -1e-10 else 0)
val = (dy - cy) * (bx - dx) - (dx - cx) * (by - dy)
o4 = 1 if val >= 1e-10 else (2 if val <= -1e-10 else 0)
# General case
if o1 != o2 and o3 != o4:
# If (ax, ay, bx, by) is vertical
if ax == bx:
m_cd = (dy - cy) / (dx - cx)
b_cd = cy - m_cd * cx
return (True, ax, m_cd * ax + b_cd)
# If (cx, cy, dx, dy) is vertical
if cx == dx:
m_ab = (by - ay) / (bx - ax)
b_ab = ay - m_ab * ax
return (True, cx, m_ab * cx + b_ab)
# This can't be computed earlier in case bx - ax is 0, or dx - cx is 0
m_ab = (by - ay) / (bx - ax)
b_ab = ay - m_ab * ax
m_cd = (dy - cy) / (dx - cx)
b_cd = cy - m_cd * cx
# If m_cd + m_ab is 0 or b_ab is 0 then we have to handle it as a special
# case
if m_cd + m_ab == 0 or b_ab == 0:
y = -(m_ab * b_cd + m_cd * b_ab) / (m_cd - m_ab)
x = (y - b_ab) / m_ab
return (True, x, y)
# General case
x = (b_cd - b_ab) / (m_ab - m_cd)
y = m_ab * x + b_ab
return (True, x, y)
# Special Cases
# a, b and c are colinear and c lies on segment ab
if o1 == 0 and onSegment(ax, ay, cx, cy, bx, by):
return (True, cx, cy)
# a, b and d are colinear and d lies on segment ab
if o2 == 0 and onSegment(ax, ay, dx, dy, bx, by):
return (True, dx, dy)
# c, d and a are colinear and a lies on segment cd
if o3 == 0 and onSegment(cx, cy, ax, ay, dx, dy):
return (True, ax, ay)
# c, d and b are colinear and b lies on segment cd
if o4 == 0 and onSegment(cx, cy, bx, by, dx, dy):
return (True, bx, by)
return (False, 0, 0) # Doesn't fall in any of the above cases
#
# Do line segments (ax, ay, bx, by) and (cx, cy, dx, dy) intersect? What is
# their intersection point? How far is (ax, ay) from the intersection point?
#
# This function largely duplicates doIntersectPos, but this version takes the
# second segment as a tuple to make it possible to map it over a list of such
# segments. It only returns the distance to the intersection point (in
# addition to whether or not there was an intersection and its location) so
# that the minimum distance can be found with the min function. This function
# doesn't call doIntersectPos to improve its performance.
#
# Parameters:
# (ax, ay, bx, by): The end points of the first line segment
# (seg): The end points of the second line segemnt as a tuple
#
# Returns:
# The square of the distance from (ax, ay) to the intersection point (or
# 1e12 if they do not intersect)
# True and the intersection point if the line segments intersect. Otherwise
# (False, 0, 0) is returned.
#
def doIntersectDistPos(ax, ay, bx, by, seg):
cx, cy, dx, dy = seg
# Bounding box checks
if ax < cx and ax < dx and bx < cx and bx < dx:
return 1e12, False, 0, 0
if ax > cx and ax > dx and bx > cx and bx > dx:
return 1e12, False, 0, 0
if ay < cy and ay < dy and by < cy and by < dy:
return 1e12, False, 0, 0
if ay > cy and ay > dy and by > cy and by > dy:
return 1e12, False, 0, 0
# Compute the orientation values. This has been inlined to improve
# performance.
val = (by - ay) * (cx - bx) - (bx - ax) * (cy - by)
o1 = 1 if val >= 1e-10 else (2 if val <= -1e-10 else 0)
val = (by - ay) * (dx - bx) - (bx - ax) * (dy - by)
o2 = 1 if val >= 1e-10 else (2 if val <= -1e-10 else 0)
val = (dy - cy) * (ax - dx) - (dx - cx) * (ay - dy)
o3 = 1 if val >= 1e-10 else (2 if val <= -1e-10 else 0)
val = (dy - cy) * (bx - dx) - (dx - cx) * (by - dy)
o4 = 1 if val >= 1e-10 else (2 if val <= -1e-10 else 0)
# General case
if o1 != o2 and o3 != o4:
# If (ax, ay, bx, by) is vertical
if ax == bx:
m_cd = (dy - cy) / (dx - cx)
b_cd = cy - m_cd * cx
x, y = ax, m_cd * ax + b_cd
return (dist2(ax, ay, x, y), True, x, y)
# If (cx, cy, dx, dy) is vertical
if cx == dx:
m_ab = (by - ay) / (bx - ax)
b_ab = ay - m_ab * ax
x, y = cx, m_ab * cx + b_ab
return (dist2(ax, ay, x, y), True, x, y)
# This can't be computed earlier in case bx - ax is 0, or dx - cx is 0
m_ab = (by - ay) / (bx - ax)
b_ab = ay - m_ab * ax
m_cd = (dy - cy) / (dx - cx)
b_cd = cy - m_cd * cx
# If m_cd + m_ab is 0 or b_ab is 0 then we have to handle it as a special
# case
if m_cd + m_ab == 0 or b_ab == 0:
y = -(m_ab * b_cd + m_cd * b_ab) / (m_cd - m_ab)
x = (y - b_ab) / m_ab
return (dist2(ax, ay, x, y), True, x, y)
# General case
x = (b_cd - b_ab) / (m_ab - m_cd)
y = m_ab * x + b_ab
return (dist2(ax, ay, x, y), True, x, y)
# Special Cases
# a, b and c are colinear and c lies on segment ab
if o1 == 0 and onSegment(ax, ay, cx, cy, bx, by):
return (dist2(ax, ay, cx, cy),True, cx, cy)
# a, b and d are colinear and d lies on segment ab
if o2 == 0 and onSegment(ax, ay, dx, dy, bx, by):
return (dist2(ax, ay, dx, dy), True, dx, dy)
# c, d and a are colinear and a lies on segment cd
if o3 == 0 and onSegment(cx, cy, ax, ay, dx, dy):
return (0, True, ax, ay)
# c, d and b are colinear and b lies on segment cd
if o4 == 0 and onSegment(cx, cy, bx, by, dx, dy):
return (dist2(ax, ay, bx, by), True, bx, by)
return (1e12, False, 0, 0) # Doesn't fall in any of the above cases
#
# Do line segments (ax, ay, bx, by) and (cx, cy, dx, dy) intersect?
#
# This function largely duplicates doIntersectPos, but this version takes the
# second segment as a tuple to make it possible to map it over a list of such
# segments. It only returns whether or not there was an intersection, not it's
# position, so that a list of Booleans (that can have an and or or operation
# applied to them) is the result of such a map. This function doesn't call
# doIntersectPos to improve its performance.
#
# Parameters:
# (ax, ay, bx, by): The end points of the first line segment
# seg: The end points of the second line segemnt (as a tuple)
#
# Returns: True and the intersection point if the line segments intersect.
# Otherwise (False, 0, 0) is returned.
#
def doIntersectTuple(ax, ay, bx, by, seg):
cx, cy, dx, dy = seg
# Bounding box checks
if ax < cx and ax < dx and bx < cx and bx < dx:
return False
if ax > cx and ax > dx and bx > cx and bx > dx:
return False
if ay < cy and ay < dy and by < cy and by < dy:
return False
if ay > cy and ay > dy and by > cy and by > dy:
return False
# Compute the orientation values. This has been inlined to improve
# performance.
val = (by - ay) * (cx - bx) - (bx - ax) * (cy - by)
o1 = 1 if val >= 1e-10 else (2 if val <= -1e-10 else 0)
val = (by - ay) * (dx - bx) - (bx - ax) * (dy - by)
o2 = 1 if val >= 1e-10 else (2 if val <= -1e-10 else 0)
val = (dy - cy) * (ax - dx) - (dx - cx) * (ay - dy)
o3 = 1 if val >= 1e-10 else (2 if val <= -1e-10 else 0)
val = (dy - cy) * (bx - dx) - (dx - cx) * (by - dy)
o4 = 1 if val >= 1e-10 else (2 if val <= -1e-10 else 0)
# General case
if o1 != o2 and o3 != o4:
# If (ax, ay, bx, by) is vertical
if ax == bx:
m_cd = (dy - cy) / (dx - cx)
b_cd = cy - m_cd * cx
return True
# If (cx, cy, dx, dy) is vertical
if cx == dx:
m_ab = (by - ay) / (bx - ax)
b_ab = ay - m_ab * ax
return True
# This can't be computed earlier in case bx - ax is 0, or dx - cx is 0
m_ab = (by - ay) / (bx - ax)
b_ab = ay - m_ab * ax
m_cd = (dy - cy) / (dx - cx)
b_cd = cy - m_cd * cx
# If m_cd + m_ab is 0 or b_ab is 0 then we have to handle it as a special
# case
if m_cd + m_ab == 0 or b_ab == 0:
y = -(m_ab * b_cd + m_cd * b_ab) / (m_cd - m_ab)
x = (y - b_ab) / m_ab
return True
# General case
x = (b_cd - b_ab) / (m_ab - m_cd)
y = m_ab * x + b_ab
return True
# Special Cases
# a, b and c are colinear and c lies on segment ab
if o1 == 0 and onSegment(ax, ay, cx, cy, bx, by):
return True
# a, b and d are colinear and d lies on segment ab
if o2 == 0 and onSegment(ax, ay, dx, dy, bx, by):
return True
# c, d and a are colinear and a lies on segment cd
if o3 == 0 and onSegment(cx, cy, ax, ay, dx, dy):
return True
# c, d and b are colinear and b lies on segment cd
if o4 == 0 and onSegment(cx, cy, bx, by, dx, dy):
return True
return False # Doesn't fall in any of the above cases
###############################################################################
##
## End of code for determining whether or not two line segments intersect.
##
###############################################################################
###############################################################################
##
## Other functions used by the provided code.
##
###############################################################################
#
# Compute and return the distance between two points.
#
# Parameters:
# (x1, y1): The first point
# (x2, y2): The second point
#
# Returns:
# The distance from (x1, y1) to (x2, y2)
#
def dist(x1, y1, x2, y2):
return sqrt((x2-x1) * (x2-x1) + (y2-y1) * (y2-y1))
#
# Compute and return the square of the distance between two points. (This
# function can be used when we just need to know if one distance is
# larger than another without knowing what the actual distances are. It
# is more efficient than using dist because the square root doesn't need
# to be computed).
#
# Parameters:
# (x1, y1): The first point
# (x2, y2): The second point
#
# Returns:
# The distance from (x1, y1) to (x2, y2)
#
def dist2(x1, y1, x2, y2):
return (x2-x1) * (x2-x1) + (y2-y1) * (y2-y1)
#
# Does a line segment collide with any of the line segments represented by a
# list of points? Functional constructs are used in an effort to maximize
# the performance of this function.
#
# Parameters:
# (ax, ay), (bx, by): A line segment
# segments: A list of points [x0, y0, x1, y1, x2, y2, ... , xn, yn]
#
# Returns:
# True if (ax, ay), (bx, by) intersects with any of the line segments
# represented by adjacent points in the list. False otherwise.
#
def fastCollides(ax, ay, bx, by, segments):
it = iter(segments)
it2 = iter(segments)
if len(segments) >= 2:
next(it2)
next(it2)
# Are the above 4 lines faster than it2 = iter(segments[2:])? That
# probably depends on the length of segments.
endpts = list(zip(it, it, it, it)) + list(zip(it2, it2, it2, it2))
full = map(partial(doIntersectTuple, ax, ay, bx, by), endpts)
return reduce((lambda x, y: x or y), full, False)
#
# Find and return the location of the closest collision to (ax, ay) between the
# line segment (ax, ay), (bx, by) and any segment in a list of list of list of
# points. Functional constructs are used in an effort to maximize the
# performance of this function.
#
# Parameters:
# (ax, ay), (bx, by): A line segment
# segments: A list of lists, each of which is a list of points [x0, y0, x1,
# y1, x2, y2, ... , xn, yn]
#
# Returns:
# True if (ax, ay), (bx, by) intersects with any of the line segments
# represented by adjacent points in the lists. False otherwise.
# The (x, y) location of the intersection point
#
def closestCollision(ax, ay, bx, by, seg_lists):
full = []
for segs in seg_lists:
it = iter(segs)
it2 = iter(segs)
if len(segs) >= 2:
next(it2)
next(it2)
# Are the above 4 lines faster than it2 = iter(segments[2:])? That
# probably depends on the length of segments.
endpts = list(zip(it, it, it, it)) + list(zip(it2, it2, it2, it2))
# Not sure if += or extend is faster
#full.extend(map(partial(doIntersectDistPos, ax, ay, bx, by), endpts))
full += map(partial(doIntersectDistPos, ax, ay, bx, by), endpts)
mn = min(full)
if mn[1] == False:
return (False, 0, 0)
else:
return (True, mn[2], mn[3])
#
# Display the countdown used before the game starts and between rounds. This
# function doesn't return until the countdown is complete.
#
# Parameters:
# duration: The duration of the countdown in seconds
# background: The background image to display during the countdown
#
# Returns:
# None
#
def countdown(duration, background):
start = time()
end = start + duration
setColor("black")
while not closed() and time() < end:
# Clear the screen and drawn the background image
clear()
drawImage(background, 0, 0)
# Display the countdown message
setFont("Arial", 30)
text(getWidth() / 2, getHeight() / 2, ceil(end - time()), "c")
setFont("Arial", 16)
text(getWidth() / 2, getHeight() / 2 - 50, "Game starting in...", "c")
update()
#
# Get the number of AI players for the user.
#
# Parameters:
# background: The background image to display when the number of AI players
# is being selected.
#
# Returns:
# The number of players selected by the user
#
def getAICount(background):
drawImage(background, 0, 0)
setOutline("black")
setFill(154, 165, 171)
for i in range(3):
rect(275 + i * 100, 375, 50, 50)
setColor("black")
setFont("Arial", 30)
text(getWidth() / 2, 150, "Python Viper")
text(300, 400, "1")
text(400, 400, "2")
text(500, 400, "3")
setFont("Arial", 15)
setColor("black")
text(getWidth() / 2, 350, "Select the number of AI snakes to begin the game.")
setFont("Arial", 12)
text(414, 565, "Note that slower machines may struggle to achieve")
text(414, 585, "a reasonable framerate with more than one AI snake.")
num_ai = 0
while not closed() and num_ai == 0:
if leftButtonPressed():
x, y = mousePos()
for i in range(3):
if x >= 275 + i * 100 and x <= 275 + i * 100 + 50 and \
y >= 375 and y <= 375 + 50:
num_ai = i + 1
k = getKeys()
if "1" in k:
num_ai = 1
if "2" in k:
num_ai = 2
if "3" in k:
num_ai = 3
return num_ai
#
# Load all of the images used by the game.
#
# Parameters:
# (None)
#
# Returns:
# background: The plain background image used during gameplay
# snake: The image of a snake used for AI selection, countdown and the
# gameover screen if the human wins.
# gameover: A list of images (the snake with animated eyes) used for
# the gameover screen if the player human loses.
#
def loadImages():
# Load all of the images. Display an error message and quit if the images
# are not loaded successfully.
try:
allImages = loadImage("/Users/mahsa/Documents/CPSC/CPSC 217/Assignments/Assignment3/ViperImages.gif")
except:
print("An error was encountered while trying to load the images from")
print("ViperImages.gif. Please ensure that it resides in the same")
print("folder / directory as your program.")
close()
quit()
# Extract the plain background image
background = tk.PhotoImage()
background.tk.call(background, 'copy', allImages, '-from', 0, 0, 800, 600, '-to', 0, 0)
# Extract the snake image
snake = tk.PhotoImage()
snake.tk.call(snake, 'copy', allImages, '-from', 0, 600, 800, 600+600, '-to', 0, 0)
# Extract the gameover images
gameover = []
for i in range(6):
temp = tk.PhotoImage()
temp.tk.call(temp, 'copy', allImages, '-from', 0, 1200+600*i, 800, 1200+600*(i+1), '-to', 0, 0)
gameover.append(temp)
# Return all of the images
return background, snake, gameover
###############################################################################
##
## Insert the functions that you need to write in parts 3 and 5 here.
##
##############################################################################
def movingsnake(p1_queue, max_length):
total_length=0
for i in range(0,len(p1_queue)-3,2):
length = sqrt((p1_queue[i] - p1_queue[i+2])**2 + (p1_queue[i+1] - p1_queue[i+3])**2)
total_length=total_length+length
if total_length <= max_length:
return (p1_queue)
else:
while total_length > max_length:
#for i in p1_queue:
p1_queue.pop(0)
p1_queue.pop(0)
total_length=0
for i in range(0,len(p1_queue)-3,2):
length = sqrt((p1_queue[i] - p1_queue[i+2])**2 + (p1_queue[i+1] - p1_queue[i+3])**2)
total_length=total_length+length
return (p1_queue)
def collisiontest(p1_x, p1_y, p2_x, p2_y, q_list):
for i in range (0,len(q_list)-3,2):
resault=doIntersect(p1_x, p1_y, p2_x, p2_y, q_list[i],q_list[i+1],q_list[i+2],q_list[i+3])
if resault==True:
return True
return False
###############################################################################
##
## End of function insertion point.
##
###############################################################################
# Play the game
def main():
# Only redraw the screen when specifically requested to do so
setAutoUpdate(False)
counter = 0 # Frame counter
speed = 100 # Snake speeds in pixels per second
max_length = 100 # Current maximum length for the snakes
time_since_increase = 0 # How much time has elapsed since the last time the
# speed was increased and the snakes were lengthened?
# Create the player snake. Randomly position the player in the upper left
# corner of the screen and point them toward the middle of the screen.
p1_x = randrange(5, getWidth() // 4 - 1)
p1_y = randrange(5, getHeight() // 4 - 1)
p1_heading = atan2(getHeight() / 2 - p1_y, getWidth() / 2 - p1_x)
p1_lost = False # Has the player lost?
p1_plost = False # Previous frame's lost value
p1_queue = [] # x1, y1, x2, y2, ..., xn, yn
p1_score = 0 # The player's sore
# Load all of the images used by the game
background, snake, gameover = loadImages()
# Get the number of AI players from the user
num_ai = getAICount(snake)
# Set up each list so that it is populated with 3 values, then truncate the
# number of values in the list to the number of AI players selected for the
# game.
if num_ai > 0:
e_queues = [[randrange(3 * getWidth() // 4 + 1, getWidth() - 1), \
randrange(3 * getHeight() // 4 + 1, getHeight() - 1)], \
[randrange(3 * getWidth() // 4 + 1, getWidth() - 1),
randrange(5, 1 * getHeight() // 4 - 1)], \
[randrange(5, 1 * getWidth() // 4 - 1),
randrange(3 * getHeight() // 4 + 1, getHeight() - 1)]][:num_ai]
e_lengths = [0, 0, 0][:num_ai]
e_scores = [0, 0, 0][:num_ai]
e_names = ["R2-D2", "Roomba", "Lt. Cmdr. Data"][:num_ai]
e_colors = ["blue3", "black", "goldenrod2"][:num_ai]
e_lost = [False, False, False][:num_ai]
e_plost = [False, False, False][:num_ai]
# Compute each AI snake's initial heading
e_headings = []
for i in range(len(e_queues)):
e_headings.append(atan2(getHeight() / 2 - e_queues[i][1], \
getWidth() / 2 - e_queues[i][0]))
# Get ready to play!
countdown(COUNTDOWN_DURATION, snake)
state = "playing"
max_score = 0
# Make the snakes wider so they are easier to see
setWidth(3)
reset_time = 0
# Set up initial values for the frame rate timing
start = time()
elapsed = 1/FRAME_RATE
# While the game has not been closed.
while not closed() and not (max_score >= MAX_SCORE and state == "next_round" and time() > reset_time):
if state == "next_round" and time() > reset_time:
# Reset the maximum length and speed
speed = 100 # snake speeds in pixels per second
max_length = 100 # current maximum length for the snakes
# Set the player up to play again
p1_x = randrange(5, getWidth() // 4 - 1)
p1_y = randrange(5, getHeight() // 4 - 1)
p1_heading = atan2(getHeight() / 2 - p1_y, getWidth() / 2 - p1_x)
p1_lost = False
p1_plost = False # Previous frame's lost value
p1_queue = []
# Set the AI players up to play again
e_queues = [[randrange(3 * getWidth() // 4 + 1, getWidth() - 1), \
randrange(3 * getHeight() // 4 + 1, getHeight() - 1)], \
[randrange(3 * getWidth() // 4 + 1, getWidth() - 1),
randrange(5, 1 * getHeight() // 4 - 1)], \
[randrange(5, 1 * getWidth() // 4 - 1),
randrange(3 * getHeight() // 4 + 1, getHeight() - 1)]][:num_ai]
e_lengths = [0, 0, 0][:num_ai]
e_lost = [False, False, False][:num_ai]
e_plost = [False, False, False][:num_ai]
# Compute each AI snake's initial heading
e_headings = []
for i in range(len(e_queues)):
e_headings.append(atan2(getHeight() / 2 - e_queues[i][1], \
getWidth() / 2 - e_queues[i][0]))
# Prepare for the next round
countdown(COUNTDOWN_DURATION, snake)
state = "playing"
# Reset the timer
start = time()
elapsed = 1/FRAME_RATE
clear()
drawImage(background, 0, 0)
# Draw the player snake if it consists of at least one line segment
if p1_lost == True:
setColor("red")
else:
setColor("chartreuse2")
ellipse(p1_x - 2, p1_y - 2, 5, 5)
if 'p1_queue' in locals() and len(p1_queue) >= 4:
line(p1_queue)
# Draw the enemy snakes
for i in range(len(e_queues)):
if len(e_queues[i]) >= 4:
if e_lost[i] == True:
setColor("red")
else:
setColor(e_colors[i])
line(e_queues[i])
ellipse(e_queues[i][-2] - 2, e_queues[i][-1] - 2, 5, 5)
# Read input
keys = getHeldKeys()
# Update the display values
setFont("Arial", 10)
setColor("Black")
text(5, 530, "Speed: " + str(round(speed,1)), "w")
text(5, 545, "Max Length: " + str(max_length), "w")
text(5, 560, "Frame rate: " + str(round(1 / elapsed,2)), "w")
# Respond to the input and update the player's position
if "Left" in keys:
p1_heading = p1_heading - pi * elapsed
if "Right" in keys:
p1_heading = p1_heading + pi * elapsed
###############################################################################
##
## Insert your code for parts 1 through 6 here. This code runs once for
## each frame in the game (approximately 30 times a second). The insertion
## point for functions that you need to write is marked above.
##
###############################################################################
#
# Part 1: A Moving Dot...
if p1_lost==False:
p1_x=p1_x+cos(p1_heading)*speed*elapsed
p1_y=p1_y+sin(p1_heading)*speed*elapsed
#
# Part 2: A Long and Permanent Line
p1_queue.append(p1_x)
p1_queue.append(p1_y)
# Part 3-1: A Growing Snake
movingsnake(p1_queue, max_length)
# Part 4: Colliding with Walls
#for i in p1_queue:
if p1_queue[-1] > 599 or p1_queue[-1] < 0 or p1_queue[-2] < 0 or p1_queue[-2] > 799:
p1_lost=True
#
# Part 5: Colliding with Yourself
#To check if your head collides with an other part you should just check if the 2 positions are the same
if len(p1_queue) > 7 and p1_lost==False:
p1_lost=collisiontest(p1_queue[-2], p1_queue[-1], p1_queue[-4], p1_queue[-3], p1_queue[:-6])
#
# Part 6: Colliding with Other Snakes
if len(p1_queue) > 7:
for snake in e_queues:
if p1_lost==False:
p1_lost=collisiontest(p1_queue[-2], p1_queue[-1], p1_queue[-4], p1_queue[-3], snake)
###################
#
#
#
###############################################################################
##
## Do not modify the code that follows unless you are attempting one of the
## bonus parts of the assignment.
##
###############################################################################
# Respond to the input and update the AI's position if they haven't lost
for i in range(len(e_queues)):
if e_lost[i] == False:
# Avoid colliding with ourselves due to overlap between the current
# segment touching the end of the previous one
most_e_queue = e_queues[i][:-2]
# Need the other two snakes so that we can check if we collided with
# them. Construct a list of the other snakes, and add to their heads to
# make cut-offs harder.
others = list(e_queues)
for j in range(len(e_queues)):
ox = others[j][-2] + cos(e_headings[j]) * speed * 0.6
oy = others[j][-1] + sin(e_headings[j]) * speed * 0.6
others[j] = others[j] + [ox, oy]
others.pop(i)
# Extend the player's queue to make it harder for the player to cut the
# AI off
if 'p1_queue' in locals() and len(p1_queue) > 0:
extended_p1_queue = p1_queue + [p1_queue[-2] + cos(p1_heading) * speed * 0.6, p1_queue[-1] + sin(p1_heading) * speed * 0.6]
else:
extended_p1_queue = []
angle = e_headings[i] - 0.6 * pi
found = False
while angle <= e_headings[i] + 0.6 * pi:
(hits, x, y) = closestCollision(e_queues[i][-2], e_queues[i][-1], e_queues[i][-2] + cos(angle) * 10000, e_queues[i][-1] + sin(angle) * 10000, [extended_p1_queue, most_e_queue, BOUNDARY] + others)
if hits == False:
# This should never happen becase we should always at least hit
# the boundary
print(e_queues[i][-2], e_queues[i][-1], e_queues[i][-2] + cos(angle) * 2000, e_queues[i][-1] + sin(angle) * 2000)
raise("hits is False when that shouldn't be possible")
if hits and found == False:
best_x = x
best_y = y
best_angle = angle
found = True
elif hits and found:
if dist2(e_queues[i][-2], e_queues[i][-1], x, y) > dist2(e_queues[i][-2], e_queues[i][-1], best_x, best_y):
best_x = x
best_y = y
best_angle = angle
angle += 0.05 * pi
old_heading = e_headings[i]
if e_headings[i] < -pi / 2 and best_angle > pi / 2:
best_angle -= 2 * pi
if e_headings[i] > pi / 2 and best_angle < -pi / 2:
best_angle += 2 * pi
if e_headings[i] < best_angle:
if best_angle - e_headings[i] < pi * elapsed:
e_headings[i] = best_angle
else:
e_headings[i] = e_headings[i] + pi * elapsed
elif e_headings[i] > best_angle:
if e_headings[i] - best_angle < pi * elapsed:
e_headings[i] = best_angle
else:
e_headings[i] = e_headings[i] - pi * elapsed
e_headings[i] %= 2*pi
ex = e_queues[i][-2] + cos(e_headings[i]) * speed * elapsed
ey = e_queues[i][-1] + sin(e_headings[i]) * speed * elapsed
# Determine if the AI has crashed into any AI (including itself)
for j in range(len(e_queues)):
if i == j:
if fastCollides(e_queues[i][-2], e_queues[i][-1], ex, ey, e_queues[j][:-2]):
e_lost[i] = True
else:
if fastCollides(e_queues[i][-2], e_queues[i][-1], ex, ey, e_queues[j]):
e_lost[i] = True
# Determine if the AI has crashed into the player
if 'p1_queue' in locals():
if fastCollides(e_queues[i][-2], e_queues[i][-1], ex, ey, p1_queue):
e_lost[i] = True
# Determine if the player has crashed into a wall
if fastCollides(e_queues[i][-2], e_queues[i][-1], ex, ey, BOUNDARY):
e_lost[i] = True
# Add the latest segment to the snake and truncate it to the correct
# length
e_queues[i].append(ex)
e_queues[i].append(ey)
e_lengths[i] += dist(e_queues[i][-4], e_queues[i][-3], e_queues[i][-2], e_queues[i][-1])
while e_lengths[i] > max_length:
e_lengths[i] -= dist(e_queues[i][0], e_queues[i][1], e_queues[i][2], e_queues[i][3])
# Is this faster than popping two elements from the front of the list?
e_queues[i] = e_queues[i][2:]
# Increase the speeds and lengths of the snakes
time_since_increase += elapsed
if time_since_increase > 0.1:
time_since_increase -= 0.1
speed += 0.1
max_length += 2
# If the player's lost status changed during this frame
if p1_lost != p1_plost:
# Give a point to every AI that is still alive
for j in range(len(e_queues)):
if e_lost[j] == False:
e_scores[j] += 1
# Update the maximum score
max_score = max([p1_score] + e_scores)
# If any of the enemy's list status changed during this frame
for i in range(len(e_queues)):
if e_lost[i] != e_plost[i]:
# Give a point to the player if they are still alive
if p1_lost == False:
p1_score += 1
# Give a point to every AI that is still alive
for j in range(len(e_queues)):
if e_lost[j] == False:
e_scores[j] += 1
# Update the maximum score
max_score = max([p1_score] + e_scores)
# Display the scores
setColor("chartreuse2")
setFont("Arial", 15)
text(10, getHeight() - 20, "Human: " + str(p1_score), "w")
for j in range(len(e_scores)):
setColor(e_colors[j])
text((j + 1) * getWidth() / (len(e_scores) + 1), getHeight() - 20, e_names[j] + ": " + str(e_scores[j]), "w")
# Determine if the game has been won by counting the number of players
# that have not lost
if state == "playing":
winner_count = 0
if p1_lost == False:
winner_count += 1
winner = "Human"
for i in range(len(e_lost)):
if e_lost[i] == False:
winner_count += 1
winner = e_names[i]
if winner_count <= 1:
state = "next_round"
reset_time = time() + 3
# Update the previous lost status to match the current lost status for the
# player and all of the AIs
p1_plost = p1_lost
for i in range(len(e_queues)):
e_plost[i] = e_lost[i]
# Count the frame
counter += 1
if winner_count == 0 and state == "next_round":
setFont("Arial", 30)
setColor("black")
text(getWidth() / 2, getHeight() / 2, "This Round Ended in a Draw")
elif winner_count == 1 and state == "next_round":
setFont("Arial", 30)
setColor("black")
text(getWidth() / 2, getHeight() / 2, "This Round was Won by " + winner)
# Update the screen
update()
# Delay so that the current frame took 1/FRAME_RATE of a second
current = time()
elapsed = current - start
while elapsed < 1 / FRAME_RATE:
current = time()
elapsed = current - start
# Record the start time for the next frame
start = current
if not closed():
# Find the winner of the game
winner = ""
if p1_score == max_score:
winner = "Human"
# Determine who the winners are
for i in range(len(e_scores)):
if e_scores[i] == max_score:
if winner == "":
winner = e_names[i]
else:
winner += " and " + e_names[i]
# Determine which set of frames to use, depending on whether the human is
# one of the game's winners
if "Human" not in winner:
frames = gameover
else:
frames = [snake]
# Display the gameover message, animating the backround image at 5 frames
# per second
start_time = time()
i = 0
while not closed():
clear()
drawImage(frames[i], 0, 0)
setFont("Arial", 30)
text(getWidth() / 2, getHeight() / 2 - 50, "Game Over!")
text(getWidth() / 2, getHeight() / 2 + 50, "The game was won by")
text(getWidth() / 2, getHeight() / 2 + 100, winner)
update()
if time() > start_time + 0.2:
i = i + 1
start_time = time()
if i >= len(frames):
i = 0
main()
| [
"noreply@github.com"
] | mqadimzadeh.noreply@github.com |
e9b4ab7f5edede978a7fa80965eeb84c1febf1af | 277031e7d0f63c904dc1946b24d352c78665065c | /backend/chat/urls.py | d052ac0cd12dc8516640d17fdf435e2b7699a41e | [] | no_license | Reqiver/Dzen | 1ee255615d417a677a7e05b006716cf28acaee29 | 1c4a5530e37be38257c9c6b5108012e5a9468c1b | refs/heads/master | 2022-12-03T21:02:25.357574 | 2020-01-14T08:21:09 | 2020-01-14T08:21:09 | 220,040,365 | 0 | 0 | null | 2022-11-22T04:18:13 | 2019-11-06T16:22:55 | JavaScript | UTF-8 | Python | false | false | 190 | py | from django.urls import include, path
from .views import index, room
urlpatterns = [
path('chat', index, name='index'),
path('chat/<str:room_name>/', room, name='room'),
]
| [
"reqiver@gmail.com"
] | reqiver@gmail.com |
1e1180eb72895f4e82375475acf44d574daa64a0 | 8a221c8edcd1b2b87f73e7a9d929d1bf6a5a39ec | /exception2.py | cced5aee80df39f86cb9075ae3e0bc5a7c2703e1 | [] | no_license | xoxloviwan/Stepik1 | c48af8769478b8676a3bcb73bb6855bb83bc426a | 8b84222fbeb19e3fc83f5c76a45829d7c90d2760 | refs/heads/master | 2020-03-28T11:04:43.745780 | 2018-09-12T20:54:30 | 2018-09-12T20:54:30 | 147,878,797 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | class NonPositiveError(ArithmeticError):
pass
class PositiveList(list):
def append(self, x):
if x > 0:
super(PositiveList, self).append(x)
else:
raise NonPositiveError()
x = PositiveList()
x.append(1)
print(x)
x.append(3)
print(x)
x.append(-3)
print(x)
| [
"43066471+xoxloviwan@users.noreply.github.com"
] | 43066471+xoxloviwan@users.noreply.github.com |
1a716b8abca83a189ef6938b8233ab476d6e2f62 | 6a11a4f42b572bfba9bd4b631e68e3b35724e2e5 | /app/stringMatching.py | 77396ef299548bdd3cdfe532949245cfde6e5db8 | [] | no_license | AdityaPutraS/Tubes-3-Stima-Backend | 11d507fdfeb06db89d0eaadd29fa0070094ed791 | aa79e24eb571538034432baddcd0185efa5c930e | refs/heads/master | 2020-05-15T19:20:11.768324 | 2019-04-22T03:40:52 | 2019-04-22T03:40:52 | 182,452,655 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,862 | py | import json
def simpan(data,nama):
json.dump(data,open(nama+'.json','w'))
def buka(nama):
data = json.load(open(nama+'.json','r'))
return data
stopWord = buka('app/static/stopwords-id')
sinonim = buka('app/static/sinonim')
soal = buka('app/static/soal')
jawaban = buka('app/static/jawaban')
#Menghitung border function dari string s
def computeFail(s):
fail = [0 for i in range(len(s))]
fail[0] = 0
m = len(s)
i, j = 1, 0
while(i < m):
if(s[j] == s[i]):
fail[i] = j+1
i += 1
j += 1
elif (j > 0):
j = fail[j-1]
else:
fail[i] = 0
i += 1
return fail
#Mencocokan pattern di text (apakah pattern ada di text)
#Merupakkan exact match
#Algoritma sesuai slide matkul Stima 2019 dengan modifikasi
#agar bisa mengembalikan panjang kata terpanjang yang mirip
#O(m+n)
#Return : [index, maxLen kata yang mirip]
def kmpITB(text, pattern):
#Hitung border function
n = len(text)
m = len(pattern)
fail = computeFail(pattern)
i, j = 0, 0
maxLen = 0
while(i < n):
if(pattern[j] == text[i]):
if(j == m-1):
return [i-m+1, j+1] #Ketemu
i += 1
j += 1
else:
if(maxLen < j):
maxLen = j
if (j > 0):
j = fail[j-1]
else:
i += 1
return [-1, maxLen] #Tidak ketemu
#Menghitung last occurence untuk boyer-moore dari string s
def computeLast(s):
last = [-1 for i in range(128)]
for i in range(len(s)):
last[ord(s[i])] = i
return last
#Mencocokan pattern di text
#Merupakan exact match
#Algoritma sesuai slide matkul Stima 2019 dengan modifikasi
#agar bisa mengembalikan panjang kata terpanjang yang mirip
#O(nm + A), A = banyak alphabet
#Return : [index, maxLen kata yang mirip]
def bmITB(text, pattern):
last = computeLast(pattern)
n, m = len(text), len(pattern)
i = m-1
if(i > n-1):
return bmITB(pattern, text)
j = m-1
maxLen = 0
go = True
temp = 0
while(go):
if(pattern[j] == text[i]):
temp += 1
if(j == 0):
return [i, m]
else:
i -= 1
j -= 1
else:
if(maxLen < temp):
maxLen = temp
temp = 0
lo = last[ord(text[i])]
i = i + m - min(j, lo+1)
j = m-1
if(i > n-1):
go = False
return [-1, maxLen]
def hapusStopWord(s):
tempKata = s.split(' ')
mark = [False for i in tempKata]
for sw in stopWord:
while(sw in tempKata):
tempKata.remove(sw)
return ' '.join(tempKata)
def hitungCocok(text, pattern):
text = text.lower()
pattern = pattern.lower()
if(kmpITB(pattern, text)[0] > -1):
return 1
else:
totalCocok = 0
#Cocokkan per kata (Boleh sesuai spek)
for tS in text.split(' '):
tS = tS.strip()
if(tS != ''):
maxCocok = 0
for p in pattern.split(' '):
for s in getSinonim(p):
#print("Match ", s, ", ", t)
hasKMP = kmpITB(s, tS)
hasBM = bmITB(s, tS)
has = hasKMP
if(hasKMP[1] > hasBM[1]):
has = hasKMP
if(maxCocok < has[1]):
maxCocok = has[1]
if(has[0] > -1):
break
if(maxCocok != 0):
totalCocok += 1
totalCocok += maxCocok
return min(totalCocok/len(text), 1)
def getSinonim(kata):
hasil = {kata}
if(kata in sinonim):
if('sinonim' in sinonim[kata]):
hasil = hasil.union(set(sinonim[kata]['sinonim']))
# for s in sinonim:
# if(kata in sinonim[s]['sinonim']):
# hasil = hasil.union(set([s]))
for h in hasil:
if(kmpITB(kata, h) == -1 and kmpITB(h, kata) == -1):
hasil.union(getSinonim(h))
return hasil
def getJawaban(pertanyaan):
pertanyaan = pertanyaan.lower().strip()
pertanyaan = hapusStopWord(pertanyaan)
if(len(pertanyaan) == 0):
return []
mirip = []
for idx, s in enumerate(soal):
s = s.lower().strip()
s = hapusStopWord(s)
if(len(s) != 0):
h = hitungCocok(s, pertanyaan)
if(h >= 0.5):
mirip.append({'jawaban':jawaban[idx], 'kemiripan' : h})
if(h == 1):
break
mirip.sort(key = lambda x : x['kemiripan'], reverse=True)
if(len(mirip) > 0):
if(mirip[0]['kemiripan'] == 1):
return mirip[:1]
return mirip[:3] | [
"adityaputra159@gmail.com"
] | adityaputra159@gmail.com |
04552c3c7c5c5ba2b8346d0847535c221920dcf0 | f9362ba32a2a1529c8bda78bfc726868add83ebf | /app.py | 615128c716eea59d70a8544d1215fb84351e2325 | [] | no_license | snairharikrishnan/Pneumonia-prediction | 77ca7ccc21abcca211fa2d58a158bc6909763625 | 700f03ad75c3a485ea5c1432b3d1011ce56aa17b | refs/heads/main | 2023-03-03T09:24:22.225033 | 2021-02-10T20:07:52 | 2021-02-10T20:07:52 | 337,839,704 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,381 | py | from flask import Flask,request
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing import image
from tensorflow.keras.applications.vgg16 import preprocess_input
import numpy as np
import os
from flasgger import Swagger
app=Flask(__name__)
Swagger(app)
def model_predict(file_path):
model=load_model('pneumonia_model.h5')
img=image.load_img(file_path,target_size=(224,224))
img=image.img_to_array(img)
img=np.expand_dims(img,axis=0)
img=preprocess_input(img)
pred=model.predict(img)
pred=pred.argmax()
return pred
@app.route('/predict',methods=['POST'])
def upload():
"""Pneumonia Prediction
This is using docstrings for specifications.
---
parameters:
- name: file
in: formData
type: file
required: true
description: Upload The Chest X-Ray
responses:
200:
description: The output values
"""
f=request.files['file']
base_path=os.getcwd()
file_path=os.path.join(base_path,'chest_xray\\val',f.filename)
pred=model_predict(file_path)
if pred==1:
return "The person has pneumonia"
return "The person doesn't have pneumonia"
@app.route('/')
def home():
return "Welcome"
if __name__=="__main__":
app.run(debug=False) | [
"noreply@github.com"
] | snairharikrishnan.noreply@github.com |
bcb2d3d5b2956afcdde5f3be634d6e0742748d87 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02233/s835389106.py | 94c01a979ff6a34046d86dab98703089af7bc21b | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140 | py | fib = [0 for i in range(45)]
fib[0] = 1
fib[1] = 1
for i in range(2, 45):
fib[i] = fib[i - 1] + fib[i - 2]
n = int(input())
print(fib[n])
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.