text string | size int64 | token_count int64 |
|---|---|---|
"""
Salts RD Lite shared module
Copyright (C) 2016 creits -2- tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import xbmcaddon
import xbmcplugin
import xbmcgui
import xbmc
import xbmcvfs
import urllib
import urlparse
import sys
import os
import re
import json
import time
import CustomProgressDialog
addon = xbmcaddon.Addon()
get_setting = addon.getSetting
show_settings = addon.openSettings
sleep = xbmc.sleep
_log = xbmc.log
def execute_jsonrpc(command):
if not isinstance(command, basestring):
command = json.dumps(command)
response = xbmc.executeJSONRPC(command)
return json.loads(response)
def get_path():
return addon.getAddonInfo('path').decode('utf-8')
def get_profile():
return addon.getAddonInfo('profile').decode('utf-8')
def translate_path(path):
return xbmc.translatePath(path).decode('utf-8')
def set_setting(id, value):
if not isinstance(value, basestring): value = str(value)
addon.setSetting(id, value)
def accumulate_setting(setting, addend=1):
cur_value = get_setting(setting)
cur_value = int(cur_value) if cur_value else 0
set_setting(setting, cur_value + addend)
def get_version():
return addon.getAddonInfo('version')
def get_id():
return addon.getAddonInfo('id')
def get_name():
return addon.getAddonInfo('name')
def has_addon(addon_id):
return xbmc.getCondVisibility('System.HasAddon(%s)' % (addon_id)) == 1
def get_kodi_version():
class MetaClass(type):
def __str__(self):
return '|%s| -> |%s|%s|%s|%s|%s|' % (self.version, self.major, self.minor, self.tag, self.tag_version, self.revision)
class KodiVersion(object):
__metaclass__ = MetaClass
version = xbmc.getInfoLabel('System.BuildVersion').decode('utf-8')
match = re.search('([0-9]+)\.([0-9]+)', version)
if match: major, minor = match.groups()
match = re.search('-([a-zA-Z]+)([0-9]*)', version)
if match: tag, tag_version = match.groups()
match = re.search('\w+:(\w+-\w+)', version)
if match: revision = match.group(1)
try: major = int(major)
except: major = 0
try: minor = int(minor)
except: minor = 0
try: revision = revision.decode('utf-8')
except: revision = u''
try: tag = tag.decode('utf-8')
except: tag = u''
try: tag_version = int(tag_version)
except: tag_version = 0
return KodiVersion
def get_plugin_url(queries):
try:
query = urllib.urlencode(queries)
except UnicodeEncodeError:
for k in queries:
if isinstance(queries[k], unicode):
queries[k] = queries[k].encode('utf-8')
query = urllib.urlencode(queries)
return sys.argv[0] + '?' + query
def end_of_directory(cache_to_disc=True):
xbmcplugin.endOfDirectory(int(sys.argv[1]), cacheToDisc=cache_to_disc)
def set_content(content):
xbmcplugin.setContent(int(sys.argv[1]), content)
def create_item(queries, label, thumb='', fanart='', is_folder=None, is_playable=None, total_items=0, menu_items=None, replace_menu=False):
if not thumb: thumb = os.path.join(get_path(), 'icon.png')
list_item = xbmcgui.ListItem(label, iconImage=thumb, thumbnailImage=thumb)
add_item(queries, list_item, fanart, is_folder, is_playable, total_items, menu_items, replace_menu)
def add_item(queries, list_item, fanart='', is_folder=None, is_playable=None, total_items=0, menu_items=None, replace_menu=False):
if not fanart: fanart = os.path.join(get_path(), 'fanart.jpg')
if menu_items is None: menu_items = []
if is_folder is None:
is_folder = False if is_playable else True
if is_playable is None:
playable = 'false' if is_folder else 'true'
else:
playable = 'true' if is_playable else 'false'
liz_url = queries if isinstance(queries, basestring) else get_plugin_url(queries)
if not list_item.getProperty('fanart_image'): list_item.setProperty('fanart_image', fanart)
list_item.setInfo('video', {'title': list_item.getLabel()})
list_item.setProperty('isPlayable', playable)
list_item.addContextMenuItems(menu_items, replaceItems=replace_menu)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), liz_url, list_item, isFolder=is_folder, totalItems=total_items)
def parse_query(query):
q = {'mode': 'main'}
if query.startswith('?'): query = query[1:]
queries = urlparse.parse_qs(query)
for key in queries:
if len(queries[key]) == 1:
q[key] = queries[key][0]
else:
q[key] = queries[key]
return q
def notify(header=None, msg='', duration=2000, sound=None, icon_path=None):
if header is None: header = get_name()
if sound is None: sound = get_setting('mute_notifications') == 'false'
if icon_path is None: icon_path = os.path.join(get_path(), 'icon.png')
try:
xbmcgui.Dialog().notification(header, msg, icon_path, duration, sound)
except:
builtin = "XBMC.Notification(%s,%s, %s, %s)" % (header, msg, duration, icon_path)
xbmc.executebuiltin(builtin)
def close_all():
xbmc.executebuiltin('Dialog.Close(all)')
def get_current_view():
window = xbmcgui.Window(xbmcgui.getCurrentWindowId())
return str(window.getFocusId())
def set_view(content, set_view=False, set_sort=False):
# set content type so library shows more views and info
if content:
set_content(content)
if set_view:
view = get_setting('%s_view' % (content))
if view and view != '0':
_log('Setting View to %s (%s)' % (view, content), xbmc.LOGDEBUG)
xbmc.executebuiltin('Container.SetViewMode(%s)' % (view))
# set sort methods - probably we don't need all of them
if set_sort:
xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_UNSORTED)
xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_VIDEO_SORT_TITLE_IGNORE_THE)
xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_VIDEO_YEAR)
xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_MPAA_RATING)
xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_DATE)
xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_VIDEO_RUNTIME)
xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_GENRE)
def refresh_container():
xbmc.executebuiltin("XBMC.Container.Refresh")
def update_container(url):
xbmc.executebuiltin('Container.Update(%s)' % (url))
def get_keyboard(heading, default=''):
keyboard = xbmc.Keyboard()
keyboard.setHeading(heading)
if default: keyboard.setDefault(default)
keyboard.doModal()
if keyboard.isConfirmed():
return keyboard.getText()
else:
return None
class Translations(object):
def __init__(self, strings):
self.strings = strings
def i18n(self, string_id):
try:
return addon.getLocalizedString(self.strings[string_id]).encode('utf-8', 'ignore')
except Exception as e:
xbmc.log('%s: Failed String Lookup: %s (%s)' % (get_name(), string_id, e), xbmc.LOGWARNING)
return string_id
class WorkingDialog(object):
wd = None
def __init__(self):
try:
self.wd = xbmcgui.DialogBusy()
self.wd.create()
self.update(0)
except:
xbmc.executebuiltin('ActivateWindow(busydialog)')
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self.wd is not None:
self.wd.close()
else:
xbmc.executebuiltin('Dialog.Close(busydialog)')
def is_canceled(self):
if self.wd is not None:
return self.wd.iscanceled()
else:
return False
def update(self, percent):
if self.wd is not None:
self.wd.update(percent)
class ProgressDialog(object):
pd = None
def __init__(self, heading, line1='', line2='', line3='', background=False, active=True, timer=0):
self.begin = time.time()
self.timer = timer
self.background = background
self.heading = heading
if active and not timer:
self.pd = self.__create_dialog(line1, line2, line3)
self.pd.update(0)
def __create_dialog(self, line1, line2, line3):
if self.background:
pd = xbmcgui.DialogProgressBG()
msg = line1 + line2 + line3
pd.create(self.heading, msg)
else:
if xbmc.getCondVisibility('Window.IsVisible(progressdialog)'):
pd = CustomProgressDialog.ProgressDialog()
else:
pd = xbmcgui.DialogProgress()
pd.create(self.heading, line1, line2, line3)
return pd
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self.pd is not None:
self.pd.close()
def is_canceled(self):
if self.pd is not None and not self.background:
return self.pd.iscanceled()
else:
return False
def update(self, percent, line1='', line2='', line3=''):
if self.pd is None and self.timer and (time.time() - self.begin) >= self.timer:
self.pd = self.__create_dialog(line1, line2, line3)
if self.pd is not None:
if self.background:
msg = line1 + line2 + line3
self.pd.update(percent, self.heading, msg)
else:
self.pd.update(percent, line1, line2, line3)
class CountdownDialog(object):
__INTERVALS = 5
pd = None
def __init__(self, heading, line1='', line2='', line3='', active=True, countdown=60, interval=5):
self.heading = heading
self.countdown = countdown
self.interval = interval
self.line3 = line3
if active:
if xbmc.getCondVisibility('Window.IsVisible(progressdialog)'):
pd = CustomProgressDialog.ProgressDialog()
else:
pd = xbmcgui.DialogProgress()
if not self.line3: line3 = 'Expires in: %s seconds' % (countdown)
pd.create(self.heading, line1, line2, line3)
pd.update(100)
self.pd = pd
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self.pd is not None:
self.pd.close()
def start(self, func, args=None, kwargs=None):
if args is None: args = []
if kwargs is None: kwargs = {}
result = func(*args, **kwargs)
if result:
return result
start = time.time()
expires = time_left = int(self.countdown)
interval = self.interval
while time_left > 0:
for _ in range(CountdownDialog.__INTERVALS):
sleep(interval * 1000 / CountdownDialog.__INTERVALS)
if self.is_canceled(): return
time_left = expires - int(time.time() - start)
if time_left < 0: time_left = 0
progress = time_left * 100 / expires
line3 = 'Expires in: %s seconds' % (time_left) if not self.line3 else ''
self.update(progress, line3=line3)
result = func(*args, **kwargs)
if result:
return result
def is_canceled(self):
if self.pd is None:
return False
else:
return self.pd.iscanceled()
def update(self, percent, line1='', line2='', line3=''):
if self.pd is not None:
self.pd.update(percent, line1, line2, line3)
| 12,578 | 4,087 |
# coding=utf-8
from random import randint
import pygame, math
from character import *
class AICharacter(Character):
def __init__(self, x, y, Vx, Vy, properties=('slime', -1, -1)):
# Properties should be a tuple of the form (STRING mobName, INT leftLimit,
# INT rightLimit) where leftLimit and rightLimit can be -1 to remove the limit
self.mobType = properties[0]
self.limit = [properties[1], properties[2]]
# Call base class implementation
Character.__init__(self, x, y, Vx, Vy)
# Decide colour if slime
self.colour = 'Blue'
if self.mobType == 'slime' and randint(0, 1) == 0:
self.colour = 'Green'
# Load images
# slime
self.slimeDL = pygame.image.load('enemies'+os.sep+'slime'+os.sep+'slime' + self.colour +'_squashed.png').convert_alpha()
self.slimeDR = pygame.image.load('enemies'+os.sep+'slime'+os.sep+'slime' + self.colour + '_squashedR.png').convert_alpha()
self.slimeL = pygame.image.load('enemies'+os.sep+'slime'+os.sep+'slime' + self.colour + '_walk.png').convert_alpha()
self.slimeR = pygame.image.load('enemies'+os.sep+'slime'+os.sep+'slime' + self.colour + '_walkR.png').convert_alpha()
# fly
self.flyDL = pygame.image.load('enemies'+os.sep+'fly'+os.sep+'fly_dead.png').convert_alpha()
self.flyDR = pygame.image.load('enemies'+os.sep+'fly'+os.sep+'fly_dead_r.png').convert_alpha()
self.flyL = pygame.image.load('enemies'+os.sep+'fly'+os.sep+'fly_fly.png').convert_alpha()
self.flyR = pygame.image.load('enemies'+os.sep+'fly'+os.sep+'fly_fly_r.png').convert_alpha()
# fish
self.fishDL = pygame.image.load('enemies'+os.sep+'other'+os.sep+'fishGreen_dead.png').convert_alpha()
self.fishDR = pygame.image.load('enemies'+os.sep+'other'+os.sep+'fishGreen_dead_r.png').convert_alpha()
self.fishL = pygame.image.load('enemies'+os.sep+'other'+os.sep+'fishGreen_swim.png').convert_alpha()
self.fishR = pygame.image.load('enemies'+os.sep+'other'+os.sep+'fishGreen_swim_r.png').convert_alpha()
# snail
self.snailL1 = pygame.image.load('enemies'+os.sep+'other'+os.sep+'snailWalk1.png').convert_alpha()
self.snailL2 = pygame.image.load('enemies'+os.sep+'other'+os.sep+'snailWalk2.png').convert_alpha()
self.snailR1 = pygame.image.load('enemies'+os.sep+'other'+os.sep+'snailWalk1R.png').convert_alpha()
self.snailR2 = pygame.image.load('enemies'+os.sep+'other'+os.sep+'snailWalk2R.png').convert_alpha()
self.snailDL = pygame.image.load('enemies'+os.sep+'other'+os.sep+'snailShell.png').convert_alpha()
self.snailDR = pygame.image.load('enemies'+os.sep+'other'+os.sep+'snailShellR.png').convert_alpha()
# general image properties
self.imageL1, self.imageL2, self.imageR1, self.imageR2, self.imageDL, self.imageDR = [None] * 6
self.deadWidth, self.deadHeight = [None] * 2
# Other control variables
self.originalHeight = y
self.alive = True
self.health = 1
self.gravity = 1
self.runSpeed = abs(self.Vx)
self.currentStep = 0
self.takenAction = False
self.updateFrequency = 2
# -----------------------------------------------------------------------------------------------------------------
@staticmethod
def distance(p0, p1):
return math.sqrt((p0[0] - p1[0]) ** 2 + (p0[1] - p1[1]) ** 2)
# -----------------------------------------------------------------------------------------------------------------
def updateAI(self, platforms, mainChar, blocks):
# Increment position by velocity
self.x += self.Vx
self.y += self.Vy
# Determine direction for draw() method
if self.Vx > 0:
self.direction = 1
elif self.Vx < 0:
self.direction = 0
# Check if character is still alive
if self.health <= 0:
self.alive = False
# Set a terminal velocity
if self.Vy >= platforms[0].height:
self.Vy = platforms[0].height - 5
if not self.onGround and self.Vy >= platforms[0].height - 15 and self.y > platforms[self.lowestPlatform][1]:
self.dispose()
# Apply gravity if necessary
if self.onGround:
self.Vy = 0
elif ((self.mobType == 'fly' and not self.alive) or self.mobType != 'fly') and (self.mobType != 'fish' or
(self.mobType == 'fish' and not self.alive)):
self.Vy += self.gravity
# Keep character within bounds
if self.limit[0] != -1 and self.x <= self.limit[0]:
self.x += self.runSpeed
self.Vx = abs(self.Vx)
if self.limit[1] != -1 and self.x >= self.limit[1]:
self.x -= self.runSpeed
self.Vx = -abs(self.Vx)
# Switch to a dead state if close to explosion
explosionRadius = 400
for block in blocks:
distanceFromBlock = self.distance((self.x + 0.5 * self.width, self.y + 0.5 * self.height),
(block.x + 0.5 * block.width, block.y + 0.5 * block.height))
if block.disabled and block.willExplode and block.explosionStep == 1 and \
distanceFromBlock < explosionRadius:
self.health = 0
# Prevent AI from falling off the lowest platform
if self.mobType == 'slime' or self.mobType == 'snail':
testXLeft = self.x - 25
testXRight = self.x + 25 + self.width
lowestPlatLeft = platforms[self.lowestPlatform][0]
lowestPlatRight = platforms[self.lowestPlatform][2]
onLowestPlatform = self.currentPlatform == self.lowestPlatform
if onLowestPlatform and testXLeft <= lowestPlatLeft and self.Vx < 0:
self.x += self.runSpeed
self.Vx *= -1
elif onLowestPlatform and testXRight >= lowestPlatRight and self.Vx > 0:
self.x -= self.runSpeed
self.Vx *= -1
# Implement simple AI
if self.mobType == 'slime' or self.mobType == 'snail' and randint(0, 10 - self.updateFrequency) == 0:
platformsBelowSelf = []
currentPlatformHeight = platforms[self.currentPlatform][1]
limitBackup = [self.limit[0], self.limit[1]]
self.limit[0] = platforms[self.currentPlatform][0] + 5
self.limit[1] = platforms[self.currentPlatform][2] - 40
safePlatformDropLeft, safePlatformDropRight = False, False
for i in range(0, len(platforms)):
if platforms[i][1] > currentPlatformHeight:
platformsBelowSelf.append(platforms[i])
for platform in platformsBelowSelf:
if platform[0] < platforms[self.currentPlatform][0] < platform[2]:
safePlatformDropLeft = True
if platform[0] < platforms[self.currentPlatform][2] and platform[2] > platforms[self.currentPlatform][
2]:
safePlatformDropRight = True
if safePlatformDropLeft:
self.limit[0] = limitBackup[0]
if safePlatformDropRight:
self.limit[1] = limitBackup[1]
elif self.mobType == 'fly' and self.alive and randint(0, 10 - self.updateFrequency) == 0:
self.limit[0] = platforms[0][0]
for i in range(0, len(platforms)):
if self.x + self.width + 5 >= platforms[i][0] and self.x <= platforms[i][2] and \
platforms[i][1] <= self.y <= platforms[i][3]:
self.limit[1] = platforms[i][0]
self.Vx *= -1
self.x -= self.runSpeed
# -----------------------------------------------------------------------------------------------------------------
def update(self, platforms, ev, movableObjects, blocks, aiCharacters, mainChar, pool, surface, FPS, torches=None):
# Collide with other objects
Character.collide(self, platforms, blocks, aiCharacters, pool, torches)
# Update motion and AI actions
self.updateAI(platforms, mainChar, blocks)
# Draw correct character
self.draw(surface, FPS)
# -----------------------------------------------------------------------------------------------------------------
def draw(self, surface, fps=60):
# Return immediately if mob is invisibile
if not self.visible:
return
# Determine the correct image to use
if self.mobType == 'slime' and not self.imageL1:
self.imageL1 = self.imageL2 = self.slimeL
self.imageR1 = self.imageR2 = self.slimeR
self.imageDL = self.slimeDL
self.imageDR = self.slimeDR
elif self.mobType == 'fly' and not self.imageL1:
self.imageL1 = self.imageL2 = self.flyL
self.imageR1 = self.imageR2 = self.flyR
self.imageDL = self.flyDL
self.imageDR = self.flyDR
elif self.mobType == 'fish' and not self.imageL1:
self.imageL1 = self.fishL
self.imageL2 = self.fishL
self.imageR1 = self.fishR
self.imageR2 = self.fishR
self.imageDL = self.fishDL
self.imageDR = self.fishDR
elif self.mobType == 'snail' and not self.imageL1:
self.imageL1 = self.snailL1
self.imageL2 = self.snailL2
self.imageR1 = self.snailR1
self.imageR2 = self.snailR2
self.imageDL = self.snailDL
self.imageDR = self.snailDR
# Get image widths and heights
self.width = pygame.Surface.get_width(self.imageL1)
self.height = pygame.Surface.get_height(self.imageL1)
self.deadWidth = pygame.Surface.get_width(self.imageDL)
self.deadHeight = pygame.Surface.get_height(self.imageDL)
# Increment the walking/moving frame
footstepRarity = 1
if pygame.time.get_ticks() % footstepRarity == 0:
self.walkFrame += 1
if self.walkFrame > 1:
self.walkFrame = 0
if self.direction == 1 and self.alive and self.walkFrame == 0:
surface.blit(self.imageR1, (self.x, self.y))
elif self.direction == 0 and self.alive and self.walkFrame == 0:
surface.blit(self.imageL1, (self.x, self.y))
elif self.direction == 1 and self.alive and self.walkFrame == 1:
surface.blit(self.imageR2, (self.x, self.y))
elif self.direction == 0 and self.alive and self.walkFrame == 1:
surface.blit(self.imageL2, (self.x, self.y))
elif self.direction == 1 and not self.alive:
surface.blit(self.imageDR, (self.x, self.y))
elif self.direction == 0 and not self.alive:
surface.blit(self.imageDL, (self.x, self.y))
# Recalculate the image width and height, and stop horizontal motion if the AI char is dead
if not self.alive:
self.width = self.deadWidth
self.height = self.deadHeight
self.Vx = 0
# -----------------------------------------------------------------------------------------------------------------
| 11,554 | 3,702 |
import psutil
from .base import BaseMetric
class CpuLoad(BaseMetric):
def __init__(self):
pass
def measure(self):
load = psutil.cpu_percent()
cpu_threads = psutil.cpu_percent(percpu=True)
return {'cpu_load_average': load,
'cpu_load_threads': cpu_threads}
def get_type(self):
return 'cpu'
| 360 | 115 |
#@ Sets : used to store multiple items in a single variable.
#@ These are immutable, unordered and unindexed. And duplicates are not allowed in sets.
#@ Use the curly brackets -> {} to create a set
#@ set can be of any data type
#@ Examples
myset = {'apple', 'banana', 'orange'} # Creating a set
print (myset) # check the items inside the set
type(myset) # check with type function
set1 = {True, False, True, True}
#@ create set with set()
theset = set ((1,2, 'ab', True, 'tableau'))
print(type(theset))
#@ create a set using casting with set() function.
a = [1,2,3] # create a list
b = set(a) # cast the list to set with set() function
print (b) # check the value of the recently created set
type(b) # check the data type
#@ duplicates are not allowed
thisset = {'apple', 'banana', 'orange', 'apple'} # creating the set
print(thisset) # printing the tuple to check if there are duplicate elements
print (len(thisset)) # get the length of set or total number of element
set2 = {'abc', 123, True, 1, 'male'} # creating a set: set allow different data types
# Looping through set
#@ for loop # using for loop to print the item in the list
for x in set2:
print (x)
#@ check if set contain specific element
print('male' in set2) # check if the set contain certain element
#@ we can add new item in the set using add () method
set2.add('python') # adding new item in set with the add() method
print(set2)
# add set with update() method
set2.update(thisset) # add the element in the set with update() method
print(set2)
# wen can use any iterable object eg_ tuplel list, dict with update () to update set
thisset = {"apple", "banana", "cherry"}
mylist = ["kiwi", "orange"]
thisset.update(mylist)
print(thisset)
#@ Remove the item from set: by using remove() and discard()
thisset = {'banana', 'apple', 'orange'} # create a set
thisset.remove('banana') # remove the item from set using remove method
print (thisset) # print to check if banana is still there in the set
thisset.add('banana') # again add banana to the set
print (thisset) # check the item in the set
thisset.discard('banana') # remove the element with discard() method
print(thisset) # check the item in the set
x = thisset.pop() # remove the last item using pop() method
print(x) # check the element just removed
thisset.clear() # clear () method empties the set
del thisset # del key word will delete the set completely
#@ Join sets
#@ we can use union() method that returns a new set containing all items from both the set
#@ update() nmethod that will inserts all the items from one set into another
a = {'a', 'b', 'c'} # creating a new set
b = {1,2,3} # creating a new set
c = a.union(b) # The union() methods returns a new set with all items from both the sets
print(c) # Print the newly created set
a.update(b) # the update() method inserts the items in set b to set a
print (a) # get the element in set a
#@ Keep only the duplicates : using intersection_update()
x = {1,2,3,4} # create a new set
y = {2,5,6,7} # create a new set
x.intersection_update(y) # using intersetion_update() method to keep only duplicates
print (x) # get the element
#@ intersection (): method will return a new set that only contains the items that are present in both sets
x = {1,2,3,4} # create a new set
y = {2,5,6,7} # create a new set
z = x.intersection(y) # using interseciton method to create a new set using only duplicate
print (z) # print the new set
#@ Keep all but not the duplicates
x.symmetric_difference_update(y) # keep all the items in both the set but not the common element
print (x)
#@ create a new set
x = {1,2,3,4} # create a new set
y = {2,5,6,7} # create a new set
z = x.symmetric_difference(y) # return set that contain all items from both sets except items that are present in both
print (z)
| 6,157 | 1,347 |
# Circuit Playground Express Hot Potato
#
# Author: Carter Nelson
# MIT License (https://opensource.org/licenses/MIT)
import time
import random
import math
import board
from analogio import AnalogIn
from adafruit_circuitplayground.express import cpx
# This brings in the song to play
import melody
number_of_notes = len(melody.melody)
SHAKE_THRESHOLD = 30
def get_total_accel():
# Compute total acceleration
X = 0
Y = 0
Z = 0
for count in range(10):
x,y,z = cpx.acceleration
X = X + x
Y = Y + y
Z = Z + z
time.sleep(0.001)
X = X / 10
Y = Y / 10
Z = Z / 10
return math.sqrt(X*X + Y*Y + Z*Z)
# Seed the random function with noise
a4 = AnalogIn(board.A4)
a5 = AnalogIn(board.A5)
a6 = AnalogIn(board.A6)
a7 = AnalogIn(board.A7)
seed = a4.value
seed += a5.value
seed += a6.value
seed += a7.value
random.seed(seed)
# Set the NeoPixels all red
cpx.pixels.fill(0xFF0000)
# Loop forever
while True:
# Wait for shaking
while get_total_accel() < SHAKE_THRESHOLD:
pass # do nothing
# Game length
game_length = random.randint(number_of_notes, 6*number_of_notes)
# Game play with melody
note_to_play = 0
for game_step in range(game_length):
# Add some flare using the NeoPixels
cpx.pixels.fill(0)
cpx.pixels[random.randint(0,9)] = ( random.randint(0,255),
random.randint(0,255),
random.randint(0,255) )
# Play the note
note_duration = 1 / melody.tempo[note_to_play]
note = melody.melody[note_to_play]
note = note if note <= 3500 else 3500
if note == 0:
time.sleep(note_duration)
else:
cpx.play_tone(note, note_duration)
# Increment and check the note counter
note_to_play += 1
note_to_play = note_to_play if note_to_play < number_of_notes else 0
#
# GAME OVER
#
# Set the NeoPixels all red
cpx.pixels.fill(0xFF0000)
# Delay a bit so can't just reset with a shake
time.sleep(2) | 2,181 | 814 |
from typing import TYPE_CHECKING
from optuna.pruners._base import BasePruner
from optuna.pruners._hyperband import HyperbandPruner
from optuna.pruners._median import MedianPruner
from optuna.pruners._nop import NopPruner
from optuna.pruners._patient import PatientPruner
from optuna.pruners._percentile import PercentilePruner
from optuna.pruners._successive_halving import SuccessiveHalvingPruner
from optuna.pruners._threshold import ThresholdPruner
if TYPE_CHECKING:
from optuna.study import Study
from optuna.trial import FrozenTrial
__all__ = [
"BasePruner",
"HyperbandPruner",
"MedianPruner",
"NopPruner",
"PatientPruner",
"PercentilePruner",
"SuccessiveHalvingPruner",
"ThresholdPruner",
]
def _filter_study(study: "Study", trial: "FrozenTrial") -> "Study":
if isinstance(study.pruner, HyperbandPruner):
# Create `_BracketStudy` to use trials that have the same bracket id.
pruner: HyperbandPruner = study.pruner
return pruner._create_bracket_study(study, pruner._get_bracket_id(study, trial))
else:
return study
| 1,107 | 406 |
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.font_manager import FontProperties
from collections import OrderedDict
import pandas as pd
from evaluator.evaluator_helpers import Categories, Sub_categories, Metrics
class Table(object):
"""docstring for Table"""
def __init__(self, arg=None):
super(Table, self).__init__()
self.entries = {}
self.sub_entries = {}
self.arg = arg
self.results = {}
self.sub_results = {}
self.collision_test = {}
def add_collision_entry(self, name, result):
self.collision_test[name] = result
def add_entry(self, name, results):
final_results = []
sub_final_results = []
## Overall metrics ADE, FDE, ColI, ColII, Topk_ade, Topk_fde, NLL
table_metrics = Metrics(*([0]*8))
## Metrics for the 4 types of trajectories and interactions
table_categories = Categories(*[Metrics(*([0]*8)) for i in range(1,5)])
table_sub_categories = Sub_categories(*[Metrics(*([0]*8)) for i in range(1,5)])
for dataset, (metrics, categories, sub_categories) in results.items():
## Overall
table_metrics += metrics
## Main Types
table_categories.static_scenes += categories.static_scenes
table_categories.linear_scenes += categories.linear_scenes
table_categories.forced_non_linear_scenes += categories.forced_non_linear_scenes
table_categories.non_linear_scenes += categories.non_linear_scenes
## Sub Types
table_sub_categories.lf += sub_categories.lf
table_sub_categories.ca += sub_categories.ca
table_sub_categories.grp += sub_categories.grp
table_sub_categories.others += sub_categories.others
final_results += table_categories.static_scenes.avg_vals_to_list()
final_results += table_categories.linear_scenes.avg_vals_to_list()
final_results += table_categories.forced_non_linear_scenes.avg_vals_to_list()
final_results += table_categories.non_linear_scenes.avg_vals_to_list()
final_results += table_metrics.avg_vals_to_list()
sub_final_results += table_sub_categories.lf.avg_vals_to_list()
sub_final_results += table_sub_categories.ca.avg_vals_to_list()
sub_final_results += table_sub_categories.grp.avg_vals_to_list()
sub_final_results += table_sub_categories.others.avg_vals_to_list()
self.results[name] = final_results
self.sub_results[name] = sub_final_results
return final_results, sub_final_results
def add_result(self, name, final_results, sub_final_results):
self.results[name] = final_results
self.sub_results[name] = sub_final_results
def render_mpl_table(self, data, col_width=3.0, row_height=0.625, font_size=14,
header_color='#40466e', row_colors=['#f1f1f2', 'w'], edge_color='w',
bbox=[0, 0, 1, 1], header_columns=0,
ax=None, **kwargs):
if ax is None:
size = (np.array(data.shape[::-1]) + np.array([0, 1])) * np.array([col_width, row_height])
fig, ax = plt.subplots(figsize=size)
ax.axis('off')
mpl_table = ax.table(cellText=data.values, bbox=bbox, colLabels=data.columns, cellLoc='center', **kwargs)
for (row, col), cell in mpl_table.get_celld().items():
if (row == 0) or (col == 1) or (col == 0):
cell.set_text_props(fontproperties=FontProperties(weight='bold'))
mpl_table.auto_set_font_size(False)
mpl_table.set_fontsize(font_size)
return ax
def print_table(self):
fig = plt.figure(figsize=(20, 20))
# ------------------------------------------ TABLES -------------------------------------------
# Overall Table #
ax1 = fig.add_subplot(311)
ax1.axis('tight')
ax1.axis('off')
df = pd.DataFrame(columns=['', 'Model', 'No.', 'ADE', 'FDE', 'Col I', 'Col II', 'Top3 ADE', 'Top3 FDE', 'NLL', 'Col_test'])
it = 0
len_name = 10
for key in self.results:
df.loc[it] = ['Overall'] + [key[:len_name]] + [self.results[key][index].__format__('.2f') for index in range(32, 40)] + [self.collision_test[key]]
it += 1
ax1 = self.render_mpl_table(df, header_columns=0, col_width=2.0, bbox=[0, 0.9, 1, 0.1*len(self.results)], ax=ax1)
ax2 = fig.add_subplot(312)
ax2.axis('tight')
ax2.axis('off')
# Overall Table #
df = pd.DataFrame(columns=['Type', 'Sub-Type', 'Model', 'No.', 'ADE', 'FDE', 'Col I', 'Col II', 'Top3 ADE', 'Top3 FDE', 'NLL'])
type_list = [['I', ''], ['II', ''], ['III', ''], ['III', 'LF'], ['III', 'CA'], ['III', 'Grp'], ['III', 'Oth'], ['IV', '']]
it = 0
##Type I
for key in self.results:
df.loc[it] = type_list[0] + [key[:len_name]] + [self.results[key][index].__format__('.2f') for index in range(8)]
it += 1
df.loc[it] = ['Type', 'Sub-Type', 'Model', 'No.', 'ADE', 'FDE', 'Col I', 'Col II', 'Top3 ADE', 'Top3 FDE', 'NLL']
it += 1
##Type II
for key in self.results:
df.loc[it] = type_list[1] + [key[:len_name]] + [self.results[key][index].__format__('.2f') for index in range(8, 16)]
it += 1
df.loc[it] = ['Type', 'Sub-Type', 'Model', 'No.', 'ADE', 'FDE', 'Col I', 'Col II', 'Top3 ADE', 'Top3 FDE', 'NLL']
it += 1
##Type III
for key in self.results:
df.loc[it] = type_list[2] + [key[:len_name]] + [self.results[key][index].__format__('.2f') for index in range(16, 24)]
it += 1
df.loc[it] = ['Type', 'Sub-Type', 'Model', 'No.', 'ADE', 'FDE', 'Col I', 'Col II', 'Top3 ADE', 'Top3 FDE', 'NLL']
it += 1
##Type III: LF
for key in self.results:
df.loc[it] = type_list[3] + [key[:len_name]] + [self.sub_results[key][index].__format__('.2f') for index in range(8)]
it += 1
df.loc[it] = ['Type', 'Sub-Type', 'Model', 'No.', 'ADE', 'FDE', 'Col I', 'Col II', 'Top3 ADE', 'Top3 FDE', 'NLL']
it += 1
##Type III: CA
for key in self.results:
df.loc[it] = type_list[4] + [key[:len_name]] + [self.sub_results[key][index].__format__('.2f') for index in range(8, 16)]
it += 1
df.loc[it] = ['Type', 'Sub-Type', 'Model', 'No.', 'ADE', 'FDE', 'Col I', 'Col II', 'Top3 ADE', 'Top3 FDE', 'NLL']
it += 1
##Type III: Grp
for key in self.results:
df.loc[it] = type_list[5] + [key[:len_name]] + [self.sub_results[key][index].__format__('.2f') for index in range(16, 24)]
it += 1
df.loc[it] = ['Type', 'Sub-Type', 'Model', 'No.', 'ADE', 'FDE', 'Col I', 'Col II', 'Top3 ADE', 'Top3 FDE', 'NLL']
it += 1
##Type III: Others
for key in self.results:
df.loc[it] = type_list[6] + [key[:len_name]] + [self.sub_results[key][index].__format__('.2f') for index in range(24, 32)]
it += 1
df.loc[it] = ['Type', 'Sub-Type', 'Model', 'No.', 'ADE', 'FDE', 'Col I', 'Col II', 'Top3 ADE', 'Top3 FDE', 'NLL']
it += 1
##Type IV
for key in self.results:
df.loc[it] = type_list[7] + [key[:len_name]] + [self.results[key][index].__format__('.2f') for index in range(24, 32)]
it += 1
ax2 = self.render_mpl_table(df, header_columns=0, col_width=2.0, bbox=[0, -1.6, 1, 0.6*len(self.results)], ax=ax2)
fig.savefig('Results.png')
| 7,728 | 2,754 |
"""A benchmark to be run externally.
Executes a program that might make heavy use of Result/Option types
in one of two ways: classically, with exceptions, or using result types.
The program checks several data stores (in memory to minimize interference
from slow IO &c.) in order for a key. If it finds it, it gets the value,
adds something to it, and then overwrites the value.
"""
import sys
import typing as t
from timeit import timeit
from safetywrap import Some, Nothing, Ok, Err, Option, Result
T = t.TypeVar("T")
class ClassicalDataStore:
def __init__(self, values: dict = None) -> None:
self._values = values or {}
def connect(self, fail: bool = False) -> "ClassicalDataStore":
"""'Connect' to the store."""
if fail:
raise RuntimeError("Failed to connect")
return self
def get(self, key: str) -> t.Any:
"""Return a value from the store."""
return self._values.get(key)
def insert(self, key: str, val: T, overwrite: bool = False) -> T:
"""Insert the value and return it."""
if key in self._values and not overwrite:
raise KeyError("Key already exists")
self._values[key] = val
return val
class MonadicDataStore:
"""Using the monadic types."""
def __init__(self, values: dict = None) -> None:
self._values = values or {}
def connect(self, fail: bool = False) -> Result["MonadicDataStore", str]:
if fail:
return Err("failed to connect")
return Ok(self)
def get(self, key: str) -> Option[t.Any]:
"""Return a value from the store."""
if key in self._values:
return Some(self._values[key])
return Nothing()
def insert(
self, key: str, val: T, overwrite: bool = False
) -> Result[T, str]:
"""Insert the value and return it."""
if key in self._values and not overwrite:
return Err("Key already exists")
self._values[key] = val
return Ok(val)
class Classical:
"""Run the program in the classical way."""
def __init__(self) -> None:
self._stores = {
0: ClassicalDataStore(),
1: ClassicalDataStore(),
2: ClassicalDataStore(),
3: ClassicalDataStore({"you": "me"}),
}
def run(self) -> None:
"""Run the program."""
for store in self._stores.values():
try:
store = store.connect()
except RuntimeError:
continue
val = store.get("you")
if val is not None:
new_val = val + "et"
try:
inserted = store.insert("you", new_val)
except KeyError:
# oops, need to specify overwrite
inserted = store.insert("you", new_val, overwrite=True)
assert inserted == "meet"
break
else:
raise RuntimeError("Could not get value anywhere.")
class Monadic:
"""Use the monadic types."""
def __init__(self) -> None:
self._stores = {
0: MonadicDataStore(),
1: MonadicDataStore(),
2: MonadicDataStore(),
3: MonadicDataStore({"you": "me"}),
}
def run(self) -> None:
"""Run the program."""
for unconnected in self._stores.values():
connected = unconnected.connect()
if connected.is_err():
continue
store = connected.unwrap()
inserted = (
store.get("you")
.ok_or("no such val")
.map(lambda val: str(val + "et"))
.and_then(
lambda val: store.insert("you", val).or_else(
lambda _: store.insert("you", val, overwrite=True)
)
)
)
if inserted.is_ok():
assert inserted.unwrap() == "meet"
break
else:
raise RuntimeError("Could not get value anywhere")
if __name__ == "__main__":
to_run = sys.argv[1].lower()
switch: t.Dict[str, t.Callable[[], None]] = {
"classical": lambda: Classical().run(),
"monadic": lambda: Monadic().run(),
}
if to_run not in switch:
raise RuntimeError("No such method: {}".format(to_run))
if len(sys.argv) > 2 and sys.argv[2] == "timeit":
# run internal timings
NUMBER = int(1e6)
taken = timeit("switch[to_run]()", globals=globals(), number=NUMBER)
print(taken / NUMBER)
else:
switch[to_run]()
| 4,667 | 1,295 |
from django.contrib import admin
from .models import Package, PackageBuild
class PackageBuildInline(admin.TabularInline):
model = PackageBuild
readonly_fields = ('package', 'github_run_id', 'version')
extra = 0
can_delete = False
class PackageAdmin(admin.ModelAdmin):
readonly_fields = ('name', 'repository', 'token')
inlines = [PackageBuildInline]
class PackageBuildAdmin(admin.ModelAdmin):
readonly_fields = ('package', 'github_run_id', 'version')
admin.site.register(Package, PackageAdmin)
admin.site.register(PackageBuild, PackageBuildAdmin)
| 583 | 170 |
import json
class ValidationResult(object):
ERROR = 1
WARNING = 2
def __init__(self, namespace, classname):
super(ValidationResult, self).__init__()
self.warnings = []
self.errors = []
self.namespace = namespace
self.classname = classname
def add_error(self, warning):
if warning:
if warning.level == ValidationResult.WARNING:
self.warnings.append(warning)
elif warning.level == ValidationResult.ERROR:
self.errors.append(warning)
def to_json(self):
mapping = self.to_dict()
return json.dumps(mapping)
def to_dict(self):
mapping = {}
mapping['warnings'] = []
for warning in self.warnings:
mapping['warnings'].append(warning.to_dict())
mapping['errors'] = []
for error in self.errors:
mapping['errors'].append(error.to_dict())
mapping['namespace'] = self.namespace
mapping['classname'] = self.classname
return mapping
def __len__(self):
return len(self.warnings) + len(self.errors)
class ValidationWarning(object):
def __init__(self, level, string, line, line_num):
super(ValidationWarning, self).__init__()
self.level = level
self.string = string
self.line_num = line_num
self.line_text = line
def to_dict(self):
mapping = {}
mapping['level'] = \
"Error" if self.level == ValidationResult.ERROR else "Warning"
mapping['string'] = self.string
mapping['line'] = self.line_text
mapping['num'] = self.line_num
return mapping
def to_json(self):
mapping = self.to_dict()
return json.dumps(mapping)
| 1,775 | 497 |
"""
A convenience script to playback random demonstrations from
a set of demonstrations stored in a hdf5 file.
Example:
$ python playback_demonstrations_from_hdf5.py --folder ../models/assets/demonstrations/SawyerPickPlace/
"""
import os
import h5py
import argparse
import random
import numpy as np
import robosuite
from robosuite.utils.mjcf_utils import postprocess_model_xml
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--folder",
type=str,
default=os.path.join(
robosuite.models.assets_root, "demonstrations/SawyerNutAssembly"
),
)
args = parser.parse_args()
demo_path = args.folder
hdf5_path = os.path.join(demo_path, "demo.hdf5")
f = h5py.File(hdf5_path, "r")
env_name = f["data"].attrs["env"]
env = robosuite.make(
env_name,
has_renderer=True,
ignore_done=True,
use_camera_obs=False,
gripper_visualization=True,
reward_shaping=True,
control_freq=100,
)
# list of all demonstrations episodes
demos = list(f["data"].keys())
while True:
print("Playing back random episode... (press ESC to quit)")
# # select an episode randomly
ep = random.choice(demos)
# read the model xml, using the metadata stored in the attribute for this episode
model_file = f["data/{}".format(ep)].attrs["model_file"]
model_path = os.path.join(demo_path, "models", model_file)
with open(model_path, "r") as model_f:
model_xml = model_f.read()
env.reset()
xml = postprocess_model_xml(model_xml)
env.reset_from_xml_string(xml)
env.viewer.set_camera(0)
# load the flattened mujoco states
states = f["data/{}/states".format(ep)].value
# force the sequence of internal mujoco states one by one
for state in states:
env.sim.set_state_from_flattened(state)
env.sim.forward()
env.render()
f.close()
| 2,049 | 665 |
from theseus.registry import Registry
from .metric_template import Metric
METRIC_REGISTRY = Registry('METRIC')
| 113 | 38 |
import merch_calendar
from datetime import date, timedelta
import calendar
class MerchWeek:
def __init__(self, year, month, week):
try:
self._date = merch_calendar.start_of_week(year, month, week)
except:
raise
@classmethod
def from_date(cls, date):
year = merch_calendar.year(date)
month = merch_calendar.month(date)
week = merch_calendar.week_of_month(date)
return cls(year, month, week)
@classmethod
def from_year_week(cls, year, week):
if week < 1 or week > merch_calendar.weeks_in_year(year):
raise ValueError('Week number out of range')
return cls(year, 1, week)
@property
def year(self):
return merch_calendar.year(self._date)
@property
def month(self):
return merch_calendar.month(self._date)
@property
def week_of_month(self):
return merch_calendar.week_of_month(self._date)
@property
def week_of_year(self):
return merch_calendar.week_of_year(self._date)
@property
def month_abbr(self):
return calendar.month_abbr[merch_calendar.to_julian_month(self.month)]
@property
def month_name(self):
return calendar.month_name[merch_calendar.to_julian_month(self.month)]
@property
def start_date(self):
return self._date
@property
def end_date(self):
return self._date + timedelta(6)
def __repr__(self):
return "<MerchWeek: "+str(self.year)+" WK"+"{n:02d}".format(n=self.week_of_year)+">"
| 1,562 | 518 |
"""
Module to provide tests related to the MD007 rule.
"""
from test.markdown_scanner import MarkdownScanner
import pytest
@pytest.mark.rules
def test_md007_bad_configuration_indent():
"""
Test to verify that a configuration error is thrown when supplying the
indent value with a string that is not an integer.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md007.indent=bad",
"--strict-config",
"scan",
"test/resources/rules/md007/good_list_indentation.md",
]
expected_return_code = 1
expected_output = ""
expected_error = (
"BadPluginError encountered while configuring plugins:\n"
+ "The value for property 'plugins.md007.indent' must be of type 'int'."
)
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_configuration_start_indented():
"""
Test to verify that a configuration error is thrown when supplying the
start_indented value with a value that is not a boolean.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md007.start_indented=bad",
"--strict-config",
"scan",
"test/resources/rules/md007/good_list_indentation.md",
]
expected_return_code = 1
expected_output = ""
expected_error = (
"BadPluginError encountered while configuring plugins:\n"
+ "The value for property 'plugins.md007.start_indented' must be of type 'bool'."
)
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_configuration_indent_bad():
"""
Test to verify that a configuration error is thrown when supplying the
indent value with a string that is not a valid integer.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md007.indent=$#5",
"--strict-config",
"scan",
"test/resources/rules/md007/good_list_indentation.md",
]
expected_return_code = 1
expected_output = ""
expected_error = (
"BadPluginError encountered while configuring plugins:\n"
+ "The value for property 'plugins.md007.indent' is not valid: Allowable values are between 2 and 4."
)
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_good_list_indentation_x():
"""
Test to make sure this rule does not trigger with a document that
only has the required spaces after the list item.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/good_list_indentation.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_list_indentation_level_0():
"""
Test to make sure this rule does trigger with a document that
has the extra spaces after the level 1 list item.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_list_indentation_level_0.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_list_indentation_level_0.md:3:2: "
+ "MD007: Unordered list indentation "
+ "[Expected: 0, Actual=1] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_list_indentation_level_1():
"""
Test to make sure this rule does trigger with a document that
has the extra spaces after the level 2 list item.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_list_indentation_level_1.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_list_indentation_level_1.md:4:4: "
+ "MD007: Unordered list indentation "
+ "[Expected: 2, Actual=3] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_list_indentation_level_2():
"""
Test to make sure this rule does trigger with a document that
has the extra spaces after the level 3 list item.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_list_indentation_level_2.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_list_indentation_level_2.md:5:6: "
+ "MD007: Unordered list indentation "
+ "[Expected: 4, Actual=5] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_good_list_indentation_in_block_quote():
"""
Test to make sure this rule does not trigger with a document that
only has the required spaces after the list item, but in a block quote.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/good_list_indentation_in_block_quote.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_good_list_indentation_in_double_block_quote():
"""
Test to make sure this rule does not trigger with a document that
only has the required spaces after the list item, but in a doulbe block quote.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/good_list_indentation_in_double_block_quote.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_good_unordered_list_in_ordered_list():
"""
Test to make sure this rule does not trigger with a document that
only has the required spaces after the list item, but in an ordered list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--disable-rules",
"md030",
"scan",
"test/resources/rules/md007/good_unordered_list_in_ordered_list.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_unordered_list_in_ordered_list():
"""
Test to make sure this rule does trigger with a document that has
an unordered list starting with extra spaces inside of an ordered list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--disable-rules",
"md030",
"scan",
"test/resources/rules/md007/bad_unordered_list_in_ordered_list.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_unordered_list_in_ordered_list.md:2:6: "
+ "MD007: Unordered list indentation "
+ "[Expected: 5, Actual=6] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_level_1_unordered_list_in_ordered_list():
"""
Test to make sure this rule does trigger with a document that has
two nested unordered lists, the inner one starting with extra spaces,
inside of an ordered list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--disable-rules",
"md030",
"scan",
"test/resources/rules/md007/bad_level_1_unordered_list_in_ordered_list.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_level_1_unordered_list_in_ordered_list.md:3:8: "
+ "MD007: Unordered list indentation "
+ "[Expected: 7, Actual=8] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_good_unordered_list_in_double_ordered_list():
"""
Test to make sure this rule does not trigger with a document that has
two nested ordered lists with a good unordered list with them that
does not have extra spaces.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/good_unordered_list_in_double_ordered_list.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_unordered_list_in_double_ordered_list():
"""
Test to make sure this rule does trigger with a document that has
two nested ordered lists with a bad unordered list with them that
does have extra spaces.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_unordered_list_in_double_ordered_list.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_unordered_list_in_double_ordered_list.md:3:8: "
+ "MD007: Unordered list indentation "
+ "[Expected: 7, Actual=8] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_good_unordered_ordered_unordere_ordered_unordered():
"""
Test to make sure this rule does not trigger with a document that has
nested ordered lists and unordered lists, with no extra spaces.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/good_unordered_ordered_unordere_ordered_unordered.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_unordered_bad_ordered_unordered_ordered_unordered():
"""
Test to make sure this rule does trigger with a document that has
nested ordered lists and unordered lists, with extra spaces.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_unordered_bad_ordered_unordered_ordered_unordered.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_unordered_bad_ordered_unordered_ordered_unordered.md:1:2: "
+ "MD007: Unordered list indentation "
+ "[Expected: 0, Actual=1] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_unordered_ordered_unordered_bad_ordered_unordered():
"""
Test to make sure this rule does trigger with a document that has
nested ordered lists and unordered lists, with extra spaces.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_unordered_ordered_unordered_bad_ordered_unordered.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_unordered_ordered_unordered_bad_ordered_unordered.md:3:7: "
+ "MD007: Unordered list indentation "
+ "[Expected: 6, Actual=7] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_unordered_ordered_unordered_ordered_unordered_bad():
"""
Test to make sure this rule does trigger with a document that has
nested ordered lists and unordered lists, with extra spaces.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_unordered_ordered_unordered_ordered_unordered_bad.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_unordered_ordered_unordered_ordered_unordered_bad.md:5:12: "
+ "MD007: Unordered list indentation "
+ "[Expected: 11, Actual=12] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_list_indentation_in_block_quote_level_0():
"""
Test to make sure this rule does trigger with a document that has
nested unordered lists within a block quote, with extra spaces.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_list_indentation_in_block_quote_level_0.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_list_indentation_in_block_quote_level_0.md:3:4: "
+ "MD007: Unordered list indentation "
+ "[Expected: 0, Actual=1] (ul-indent)\n"
+ "test/resources/rules/md007/bad_list_indentation_in_block_quote_level_0.md:4:6: "
+ "MD007: Unordered list indentation "
+ "[Expected: 2, Actual=3] (ul-indent)\n"
+ "test/resources/rules/md007/bad_list_indentation_in_block_quote_level_0.md:5:8: "
+ "MD007: Unordered list indentation "
+ "[Expected: 4, Actual=5] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_list_in_block_quote_after_text():
"""
Test to make sure this rule does trigger with a document that has
a bad nested unordered lists after a text block.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_list_in_block_quote_after_text.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_list_in_block_quote_after_text.md:4:6: "
+ "MD007: Unordered list indentation "
+ "[Expected: 2, Actual=3] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_list_in_block_quote_after_atx_heading():
"""
Test to make sure this rule does trigger with a document that has
a bad nested unordered lists after an Atx Heading.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_list_in_block_quote_after_atx_heading.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_list_in_block_quote_after_atx_heading.md:4:6: "
+ "MD007: Unordered list indentation "
+ "[Expected: 2, Actual=3] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_list_in_block_quote_after_thematic_break():
"""
Test to make sure this rule does trigger with a document that has
a bad nested unordered lists after a thematic break.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_list_in_block_quote_after_thematic_break.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_list_in_block_quote_after_thematic_break.md:6:6: "
+ "MD007: Unordered list indentation "
+ "[Expected: 2, Actual=3] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_list_in_block_quote_after_setext_heading():
"""
Test to make sure this rule does trigger with a document that has
a bad nested unordered lists after a SetExt Heading.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_list_in_block_quote_after_setext_heading.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_list_in_block_quote_after_setext_heading.md:5:6: "
+ "MD007: Unordered list indentation "
+ "[Expected: 2, Actual=3] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_list_in_block_quote_after_html_block():
"""
Test to make sure this rule does trigger with a document that has
a bad nested unordered lists after a HTML block.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_list_in_block_quote_after_html_block.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_list_in_block_quote_after_html_block.md:6:6: "
+ "MD007: Unordered list indentation "
+ "[Expected: 2, Actual=3] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_list_in_block_quote_after_fenced_block():
"""
Test to make sure this rule does trigger with a document that has
a bad nested unordered lists after a fenced code block.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_list_in_block_quote_after_fenced_block.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_list_in_block_quote_after_fenced_block.md:6:6: "
+ "MD007: Unordered list indentation "
+ "[Expected: 2, Actual=3] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_list_in_block_quote_after_indented_block():
"""
Test to make sure this rule does trigger with a document that has
a bad nested unordered lists after an indented code block.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_list_in_block_quote_after_indented_block.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_list_in_block_quote_after_indented_block.md:4:6: "
+ "MD007: Unordered list indentation "
+ "[Expected: 2, Actual=3] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_list_in_block_quote_after_link_reference_definition():
"""
Test to make sure this rule does trigger with a document that has
a bad nested unordered lists after a link reference definition.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_list_in_block_quote_after_link_reference_definition.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_list_in_block_quote_after_link_reference_definition.md:4:6: "
+ "MD007: Unordered list indentation "
+ "[Expected: 2, Actual=3] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_bad_list_in_block_quote_after_other_list():
"""
Test to make sure this rule does trigger with a document that has
a bad nested unordered lists after another list
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/bad_list_in_block_quote_after_other_list.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md007/bad_list_in_block_quote_after_other_list.md:4:6: "
+ "MD007: Unordered list indentation "
+ "[Expected: 2, Actual=3] (ul-indent)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_good_unordered_list_elements():
"""
Test to make sure this rule does not trigger with a document that has
many nested unordered lists, each one properly indented.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md007/good_unordered_list_elements.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_good_list_indentation_by_four():
"""
Test to make sure this rule does not trigger with a document that has
each list indented by 4, but configuration to support it.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md007.indent=$#4",
"scan",
"test/resources/rules/md007/good_list_indentation_by_four.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md007_good_list_indentation_with_start():
"""
Test to make sure this rule does not trigger with a document that has
the level 1 list indented, due to configuration.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md007.start_indented=$!True",
"scan",
"test/resources/rules/md007/good_list_indentation_with_start.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
| 26,352 | 8,547 |
import copy
from mock import patch, Mock
import urllib
from django.core.files import File
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from django.urls import reverse
from ticketing.models import (
TicketingEvents,
BrownPaperSettings,
EventbriteSettings,
Purchaser,
SyncStatus,
Transaction
)
from tests.factories.ticketing_factories import (
BrownPaperSettingsFactory,
EventbriteSettingsFactory,
PurchaserFactory,
TicketItemFactory,
TicketingEventsFactory,
)
import nose.tools as nt
from django.contrib.auth.models import User
from django.test import TestCase
from django.test.client import RequestFactory
from django.test import Client
from ticketing.views import (
transactions
)
from tests.factories.gbe_factories import (
ProfileFactory,
UserFactory,
)
from tests.functions.gbe_functions import (
assert_alert_exists,
grant_privilege,
login_as,
)
from gbetext import (
eventbrite_error,
import_transaction_message,
no_settings_error,
sync_off_instructions,
)
from tests.contexts import PurchasedTicketContext
from tests.ticketing.eb_order_list import order_dict
import eventbrite
class TestTransactions(TestCase):
'''Tests for transactions view'''
def setUp(self):
self.factory = RequestFactory()
self.client = Client()
self.privileged_user = ProfileFactory()
grant_privilege(self.privileged_user, 'Ticketing - Transactions')
self.url = reverse('transactions', urlconf='ticketing.urls')
@patch('eventbrite.Eventbrite.get', autospec=True)
def test_transactions_sync_ticket_missing(self, m_eventbrite):
TicketingEvents.objects.all().delete()
BrownPaperSettings.objects.all().delete()
EventbriteSettings.objects.all().delete()
SyncStatus.objects.all().delete()
BrownPaperSettingsFactory()
EventbriteSettingsFactory()
event = TicketingEventsFactory(event_id="1", source=2)
m_eventbrite.return_value = order_dict
login_as(self.privileged_user, self)
response = self.client.post(self.url, data={'Sync': 'Sync'})
assert_alert_exists(response,
'danger',
'Error',
"Ticket Item for id 3255985 does not exist")
error_status = SyncStatus.objects.filter(is_success=False).first()
success_status = SyncStatus.objects.filter(is_success=True).first()
self.assertEqual(error_status.error_msg,
"Ticket Item for id 3255985 does not exist")
self.assertEqual(success_status.import_type,
"EB Transaction")
self.assertEqual(success_status.import_number,
0)
@patch('eventbrite.Eventbrite.get', autospec=True)
def test_transactions_sync_eb_only(self, m_eventbrite):
TicketingEvents.objects.all().delete()
BrownPaperSettings.objects.all().delete()
EventbriteSettings.objects.all().delete()
SyncStatus.objects.all().delete()
BrownPaperSettingsFactory(active_sync=False)
EventbriteSettingsFactory()
event = TicketingEventsFactory(event_id="1", source=2)
ticket = TicketItemFactory(ticketing_event=event, ticket_id='3255985')
limbo, created = User.objects.get_or_create(username='limbo')
m_eventbrite.return_value = order_dict
login_as(self.privileged_user, self)
response = self.client.post(self.url, data={'Sync': 'Sync'})
assert_alert_exists(response,
'success',
'Success',
"%s Transactions imported: %d -- Eventbrite" % (
import_transaction_message,
1))
success_status = SyncStatus.objects.filter(is_success=True).first()
self.assertEqual(success_status.import_type,
"EB Transaction")
self.assertEqual(success_status.import_number,
1)
@patch('eventbrite.Eventbrite.get', autospec=True)
def test_transactions_sync_eb_match_prior_purchaser(self, m_eventbrite):
TicketingEvents.objects.all().delete()
BrownPaperSettings.objects.all().delete()
EventbriteSettings.objects.all().delete()
BrownPaperSettingsFactory(active_sync=False)
EventbriteSettingsFactory()
event = TicketingEventsFactory(event_id="1", source=2)
ticket = TicketItemFactory(ticketing_event=event, ticket_id='3255985')
limbo, created = User.objects.get_or_create(username='limbo')
purchaser = PurchaserFactory(matched_to_user=limbo)
user = UserFactory(email=purchaser.email)
m_eventbrite.return_value = order_dict
login_as(self.privileged_user, self)
response = self.client.post(self.url, data={'Sync': 'Sync'})
print(response.content)
test_purchaser = Purchaser.objects.get(pk=purchaser.pk)
self.assertEqual(test_purchaser.matched_to_user, user)
@patch('eventbrite.Eventbrite.get', autospec=True)
def test_transactions_sync_eb_pagination(self, m_eventbrite):
TicketingEvents.objects.all().delete()
BrownPaperSettings.objects.all().delete()
EventbriteSettings.objects.all().delete()
BrownPaperSettingsFactory()
EventbriteSettingsFactory()
event = TicketingEventsFactory(event_id="1", source=2)
ticket = TicketItemFactory(ticketing_event=event, ticket_id='3255985')
limbo, created = User.objects.get_or_create(username='limbo')
continue_order_page = copy.deepcopy(order_dict)
continue_order_page['pagination']['has_more_items'] = True
continue_order_page['pagination']['continuation'] = "eyJwYWdlIjogMn0"
m_eventbrite.side_effect = [continue_order_page,
order_dict]
login_as(self.privileged_user, self)
response = self.client.post(self.url, data={'Sync': 'Sync'})
assert_alert_exists(response,
'success',
'Success',
"%s Transactions imported: %d -- Eventbrite" % (
import_transaction_message,
1))
@patch('eventbrite.Eventbrite.get', autospec=True)
def test_transactions_sync_eb_w_purchaser(self, m_eventbrite):
TicketingEvents.objects.all().delete()
BrownPaperSettings.objects.all().delete()
EventbriteSettings.objects.all().delete()
BrownPaperSettingsFactory()
EventbriteSettingsFactory()
event = TicketingEventsFactory(event_id="1", source=2)
ticket = TicketItemFactory(ticketing_event=event, ticket_id='3255985')
purchaser = PurchaserFactory()
known_buyer_order = order_dict
known_buyer_order['attendees'][0]["profile"]["email"] = purchaser.email
m_eventbrite.return_value = known_buyer_order
login_as(self.privileged_user, self)
response = self.client.post(self.url, data={'Sync': 'Sync'})
assert_alert_exists(response,
'success',
'Success',
"%s Transactions imported: %d -- Eventbrite" % (
import_transaction_message,
1))
@patch('eventbrite.Eventbrite.get', autospec=True)
def test_transactions_sync_eb_w_user(self, m_eventbrite):
TicketingEvents.objects.all().delete()
BrownPaperSettings.objects.all().delete()
EventbriteSettings.objects.all().delete()
BrownPaperSettingsFactory()
EventbriteSettingsFactory()
event = TicketingEventsFactory(event_id="1", source=2)
ticket = TicketItemFactory(ticketing_event=event, ticket_id='3255985')
profile = ProfileFactory()
known_buyer_order = order_dict
known_buyer_order[
'attendees'][0]["profile"]["email"] = profile.purchase_email
m_eventbrite.return_value = known_buyer_order
login_as(self.privileged_user, self)
response = self.client.post(self.url, data={'Sync': 'Sync'})
assert_alert_exists(response,
'success',
'Success',
"%s Transactions imported: %d -- Eventbrite" % (
import_transaction_message,
1))
@patch('eventbrite.Eventbrite.get', autospec=True)
def test_transactions_sync_eb_bad_auth_token(self, m_eventbrite):
TicketingEvents.objects.all().delete()
BrownPaperSettings.objects.all().delete()
EventbriteSettings.objects.all().delete()
BrownPaperSettingsFactory()
EventbriteSettingsFactory()
event = TicketingEventsFactory(event_id="1", source=2)
ticket = TicketItemFactory(ticketing_event=event, ticket_id='3255985')
m_eventbrite.side_effect = [{
"status_code": 401,
"error_description": "The OAuth token you provided was invalid.",
"error": "NOT_AUTH"}]
login_as(self.privileged_user, self)
response = self.client.post(self.url, data={'Sync': 'Sync'})
assert_alert_exists(response, 'danger', 'Error', eventbrite_error % (
401,
"The OAuth token you provided was invalid."))
@nt.raises(PermissionDenied)
def test_user_is_not_ticketing(self):
# The user does not have the right privileges. Send PermissionDenied
user = ProfileFactory.create().user_object
request = self.factory.get(
reverse('transactions', urlconf='ticketing.urls'),
)
request.user = user
response = transactions(request)
def test_transactions_w_privilege(self):
context = PurchasedTicketContext()
login_as(self.privileged_user, self)
response = self.client.get(self.url)
self.assertContains(response, context.transaction.purchaser.email)
self.assertContains(response, context.profile.display_name)
self.assertContains(response, context.transaction.ticket_item.title)
self.assertNotContains(response, "- Vendor")
self.assertNotContains(response, "- Act")
def test_transactions_w_privilege_userview_editpriv(self):
context = PurchasedTicketContext()
context.transaction.ticket_item.ticketing_event.act_submission_event = True
context.transaction.ticket_item.ticketing_event.save()
grant_privilege(self.privileged_user, 'Registrar')
login_as(self.privileged_user, self)
response = self.client.get(self.url + "?format=user")
self.assertContains(response, context.profile.user_object.email)
self.assertContains(response, context.profile.display_name)
self.assertContains(response,
"%s - Act" % context.transaction.ticket_item.title)
self.assertNotContains(response, "- Vendor")
self.assertContains(response, reverse(
'admin_profile',
urlconf="gbe.urls",
args=[context.profile.resourceitem_id]))
def test_transactions_empty(self):
TicketingEvents.objects.all().delete()
BrownPaperSettings.objects.all().delete()
login_as(self.privileged_user, self)
response = self.client.get(self.url)
nt.assert_equal(response.status_code, 200)
def test_transactions_old_conf_limbo_purchase(self):
limbo, created = User.objects.get_or_create(username='limbo')
old_context = PurchasedTicketContext()
old_context.conference.status = "past"
old_context.conference.save()
old_context.transaction.purchaser.matched_to_user = limbo
old_context.transaction.purchaser.save()
old_ticket = old_context.transaction.ticket_item
old_ticket.ticketing_event.vendor_submission_event = True
old_ticket.ticketing_event.save()
context = PurchasedTicketContext()
login_as(self.privileged_user, self)
response = self.client.get("%s?conference=%s" % (
self.url,
old_context.conference.conference_slug))
self.assertContains(response, old_context.transaction.purchaser.email)
self.assertContains(response, "%s, %s" % (
old_context.transaction.purchaser.last_name,
old_context.transaction.purchaser.first_name))
self.assertContains(
response,
"%s - Vendor" % old_context.transaction.ticket_item.title)
self.assertNotContains(response, "- Act")
self.assertNotContains(response, context.transaction.purchaser.email)
self.assertNotContains(response, context.profile.display_name)
self.assertNotContains(response, context.transaction.ticket_item.title)
def test_transactions_old_conf_limbo_purchase_user_view(self):
limbo, created = User.objects.get_or_create(username='limbo')
old_context = PurchasedTicketContext()
old_context.conference.status = "past"
old_context.conference.save()
old_context.transaction.purchaser.matched_to_user = limbo
old_context.transaction.purchaser.save()
context = PurchasedTicketContext()
login_as(self.privileged_user, self)
response = self.client.get("%s?format=user&conference=%s" % (
self.url,
old_context.conference.conference_slug))
self.assertContains(response, old_context.transaction.purchaser.email)
self.assertContains(response, "N/A<br>(%s, %s)" % (
old_context.transaction.purchaser.last_name,
old_context.transaction.purchaser.first_name))
self.assertContains(response,
old_context.transaction.ticket_item.title)
self.assertNotContains(response, "- Vendor")
self.assertNotContains(response, "- Act")
self.assertNotContains(response, context.transaction.purchaser.email)
self.assertNotContains(response, context.profile.display_name)
self.assertNotContains(response, context.transaction.ticket_item.title)
@patch('urllib.request.urlopen', autospec=True)
def test_transactions_sync_bpt_only(self, m_urlopen):
TicketingEvents.objects.all().delete()
BrownPaperSettings.objects.all().delete()
BrownPaperSettingsFactory()
event = TicketingEventsFactory(event_id="1")
ticket = TicketItemFactory(
ticketing_event=event,
ticket_id='%s-%s' % (event.event_id, '3255985'))
limbo, created = User.objects.get_or_create(username='limbo')
a = Mock()
order_filename = open("tests/ticketing/orderlist.xml", 'r')
a.read.side_effect = [File(order_filename).read()]
m_urlopen.return_value = a
login_as(self.privileged_user, self)
response = self.client.post(self.url, data={'Sync': 'Sync'})
nt.assert_equal(response.status_code, 200)
transaction = get_object_or_404(
Transaction,
reference='A12345678')
nt.assert_equal(str(transaction.order_date),
"2014-08-15 19:26:56")
nt.assert_equal(transaction.shipping_method, 'Will Call')
nt.assert_equal(transaction.order_notes, 'None')
nt.assert_equal(transaction.payment_source, 'Brown Paper Tickets')
nt.assert_equal(transaction.purchaser.email, 'test@tickets.com')
nt.assert_equal(transaction.purchaser.phone, '111-222-3333')
nt.assert_equal(transaction.purchaser.matched_to_user, limbo)
nt.assert_equal(transaction.purchaser.first_name, 'John')
nt.assert_equal(transaction.purchaser.last_name, 'Smith')
assert_alert_exists(response,
'success',
'Success',
"%s Transactions imported: %s - BPT" % (
import_transaction_message,
"1"))
assert_alert_exists(response, 'danger', 'Error', no_settings_error)
def test_transactions_sync_no_sources_on(self):
TicketingEvents.objects.all().delete()
BrownPaperSettings.objects.all().delete()
EventbriteSettings.objects.all().delete()
BrownPaperSettingsFactory(active_sync=False)
EventbriteSettingsFactory(active_sync=False)
login_as(self.privileged_user, self)
response = self.client.post(self.url, data={'Sync': 'Sync'})
assert_alert_exists(response,
'success',
'Success',
"%s Transactions imported: %s - BPT" % (
import_transaction_message,
"0"))
assert_alert_exists(response,
'success',
'Success',
sync_off_instructions % "Eventbrite")
def test_transactions_sync_both_on_no_events(self):
TicketingEvents.objects.all().delete()
BrownPaperSettings.objects.all().delete()
EventbriteSettings.objects.all().delete()
BrownPaperSettingsFactory(active_sync=False)
EventbriteSettingsFactory()
login_as(self.privileged_user, self)
response = self.client.post(self.url, data={'Sync': 'Sync'})
assert_alert_exists(response,
'success',
'Success',
"%s Transactions imported: %s - BPT" % (
import_transaction_message,
"0"))
assert_alert_exists(response,
'success',
'Success',
"%s Transactions imported: %d -- Eventbrite" % (
import_transaction_message,
0))
| 18,168 | 5,240 |
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from aws import Action as BaseAction
from aws import BaseARN
service_name = 'AWS XRay'
prefix = 'xray'
class Action(BaseAction):
def __init__(self, action=None):
sup = super(Action, self)
sup.__init__(prefix, action)
class ARN(BaseARN):
def __init__(self, resource='', region='', account=''):
sup = super(ARN, self)
sup.__init__(service=prefix, resource=resource, region=region,
account=account)
BatchGetTraces = Action('BatchGetTraces')
CreateGroup = Action('CreateGroup')
CreateSamplingRule = Action('CreateSamplingRule')
DeleteGroup = Action('DeleteGroup')
DeleteSamplingRule = Action('DeleteSamplingRule')
GetEncryptionConfig = Action('GetEncryptionConfig')
GetGroup = Action('GetGroup')
GetGroups = Action('GetGroups')
GetSamplingRules = Action('GetSamplingRules')
GetSamplingStatisticSummaries = Action('GetSamplingStatisticSummaries')
GetSamplingTargets = Action('GetSamplingTargets')
GetServiceGraph = Action('GetServiceGraph')
GetTimeSeriesServiceStatistics = Action('GetTimeSeriesServiceStatistics')
GetTraceGraph = Action('GetTraceGraph')
GetTraceSummaries = Action('GetTraceSummaries')
PutEncryptionConfig = Action('PutEncryptionConfig')
PutTelemetryRecords = Action('PutTelemetryRecords')
PutTraceSegments = Action('PutTraceSegments')
UpdateGroup = Action('UpdateGroup')
UpdateSamplingRule = Action('UpdateSamplingRule')
| 1,518 | 448 |
"""The setup script."""
from setuptools import setup, find_packages
import versioneer
setup(
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
packages=find_packages(),
include_package_data=True
)
| 235 | 73 |
import acme_notifications.init
application = acme_notifications.init.app
| 74 | 23 |
from __future__ import print_function
import os
import shutil
import subprocess
import logging
import pyhhi.build.common.ver as ver
import pyhhi.build.common.bldtools as bldtools
from pyhhi.build.common.system import SystemInfo
class BjamBuilder(object):
"""The BjamBuilder class supports building a new bjam executable."""
def __init__(self, sys_info, top_dir, bb_version):
self._logger = logging.getLogger(__name__)
bjam_src_tree_list = []
self._sys_info = sys_info
self._bjam_src_dir = None
self._top_dir = top_dir
self._bb_version = bb_version
self._toolset = None
self._tmp_dirs = []
if self._sys_info.is_windows():
self._bjam_names = ('b2.exe', 'bjam.exe')
else:
self._bjam_names = ('b2', 'bjam')
if sys_info.is_windows():
build_script = 'build.bat'
else:
build_script = 'build.sh'
# the bjam source is supposed to come from the boost source tree.
assert bb_version is not None
boost_tools_dir = os.path.join(self._top_dir, 'tools')
bjam_src_tree_list.append(os.path.join(boost_tools_dir, 'build', 'src', 'engine'))
bjam_src_tree_list.append(os.path.join(boost_tools_dir, 'build', 'v2', 'engine'))
bjam_src_tree_list.append(os.path.join(boost_tools_dir, 'build', 'v2', 'engine', 'src'))
bjam_src_tree_list.append(os.path.join(boost_tools_dir, 'jam', 'src'))
for d in bjam_src_tree_list:
# check for the build script to figure out which source location holds the bjam source files.
if os.path.exists(os.path.join(d, build_script)):
self._bjam_src_dir = d
break
if self._bjam_src_dir is not None:
# create a new bldtools suitable to build bjam on this platform.
self._toolset = bldtools.BjamToolset(sys_info, bb_version)
def build(self, target_arch='x86_64'):
"""Builds the b2 executable from source and returns the full path to the executable."""
assert self._bjam_src_dir is not None
if self._sys_info.is_windows() and (ver.version_compare(self._bb_version, (1, 66, 0)) >= 0):
target_arch = 'x86'
# create a new list of temporary directories to be removed after the bjam executable has been installed.
self._tmp_dirs = []
bjam_bin_dir = os.path.join(self._bjam_src_dir, self._get_bjam_bin_dir_folder(target_arch))
self._tmp_dirs.append(bjam_bin_dir)
b2_prog_path = os.path.join(bjam_bin_dir, self._bjam_names[0])
bjam_prog_path = os.path.join(bjam_bin_dir, self._bjam_names[1])
bootstrap_dir = os.path.join(self._bjam_src_dir, 'bootstrap')
self._tmp_dirs.append(bootstrap_dir)
if os.path.exists(bootstrap_dir):
# in case a previous build failed to remove the temporary files, remove bootstrap completely.
shutil.rmtree(bootstrap_dir)
cur_dir = os.getcwd()
os.chdir(self._bjam_src_dir)
print("========================================================")
print("Start building bjam in", self._bjam_src_dir, "...")
print("========================================================")
build_script_args = []
if self._sys_info.is_windows():
build_script = os.path.join(self._bjam_src_dir, 'build.bat')
build_script_args.append(build_script)
bjam_toolset_arg = self._toolset.get_bjam_toolset(build_script_format=True)
build_script_args.append(bjam_toolset_arg)
if target_arch == 'x86_64':
# build.bat builds a 32 bit b2 executable by default but we prefer a native b2.
if bjam_toolset_arg in ['vc141', 'vc14']:
build_script_args.append('amd64')
else:
build_script_args.append('x86_amd64')
else:
build_script = os.path.join(self._bjam_src_dir, 'build.sh')
build_script_args.append(build_script)
retv = subprocess.call(build_script_args)
if retv != 0:
raise Exception("Building bjam failed. Please contact technical support.")
# restore the previous current working directory
os.chdir(cur_dir)
if os.path.exists(b2_prog_path):
return b2_prog_path
elif os.path.exists(bjam_prog_path):
return bjam_prog_path
else:
assert False
return None
def remove_tmp_files(self):
"""Removes all temporary files created by the bjam build script."""
for d in self._tmp_dirs:
if os.path.exists(d):
try:
shutil.rmtree(d)
except WindowsError as exc:
print("WARNING: ignoring spurious windows error [" + str(exc.winerror) + "]: " + exc.strerror + " raised by shutil.rmtree().")
if os.path.exists(d):
file_list = os.listdir(d)
if file_list:
print("The directory '" + d + "' is not empty for unknown reason: ", file_list)
self._tmp_dirs = []
def _get_bjam_bin_dir_folder(self, target_arch='x86_64'):
if self._sys_info.is_windows():
bin_dir = 'bin.nt' + target_arch
elif self._sys_info.is_linux():
bin_dir = 'bin.linux' + target_arch
elif self._sys_info.is_macosx():
bin_dir = 'bin.macosx' + target_arch
else:
assert False
return bin_dir
class BjamLauncher(object):
def __init__(self, sys_info=None, verbosity=1):
self._logger = logging.getLogger(__name__)
if sys_info is None:
sys_info = SystemInfo()
self._sys_info = sys_info
self._verbosity_level = verbosity
def get_optimal_number_bjam_jobs(self):
"""Returns the optimal number of bjam jobs."""
bjam_jobs = self._sys_info.get_number_processors()
if 'BJAM_MAX_JOBS' in os.environ:
bjam_max_jobs = int(os.environ['BJAM_MAX_JOBS'], 10)
if bjam_jobs > bjam_max_jobs:
bjam_jobs = bjam_max_jobs
assert bjam_jobs >= 1
return bjam_jobs
def launch(self, argv):
"""Launch a bjam build and block until it terminates."""
if self._verbosity_level > 0:
# assemble the bjam command line for logging purposes
joiner = ' '
cmd_line = joiner.join(argv)
print("Launching: " + cmd_line)
retv = subprocess.call(argv)
if retv < 0:
self._logger.debug("child was terminated by signal: %d", -retv)
else:
self._logger.debug("child returned: %d", retv)
return retv
| 6,839 | 2,204 |
# coding: utf-8
# In[ ]:
from __future__ import division
get_ipython().magic(u'matplotlib inline')
import numpy as np
import matplotlib.pyplot as plt
import math
import multivarlinreg
import rmse
# In[ ]:
#Linear regression
red_train = np.loadtxt('redwine_training.txt')
red_test = np.loadtxt('redwine_testing.txt')
red_train_data = red_train[:, :11]
red_train_score = red_train[:, 11]
red_test_data = red_test[:, :11]
red_test_score = red_test[:, 11]
#red_train.shape
# In[ ]:
"""
def multivarlinreg(data, ground_truth):
#data = full_data[:, :-1]
X = np.hstack((data, np.repeat(1, data.shape[0]).reshape(-1, 1)))
X_T_X = np.dot(X.T, X)
# if full-rank matrix or positive definite matrix:
#check if it invertible
if np.linalg.det(X_T_X) != 0:
inverse = np.linalg.inv(X_T_X)
w = np.dot(np.dot(inverse, X.T), ground_truth) #w0 at the last column
#print w
return w
else:
print "use other method"
"""
# In[ ]:
#only contains the first feature (fixed acidity)
train_fixed_acidity = red_train_data[:, 0].reshape(-1, 1)
train_w_acidity = multivarlinreg.multivarlinreg(train_fixed_acidity, red_train_score)
train_w_acidity
#the propotion of acidity is not very high; bias is very large for it???
#actually we can not use it to predivt the wine's quality very well
#array([0.05035934, 5.2057261 ])
# In[ ]:
#physiochemical
w_all = multivarlinreg.multivarlinreg(red_train_data, red_train_score)
w_all.shape
np.set_printoptions(suppress=True)
w_all
#positive relate negative relation
#the first weight for acidity is changed
#Some features play important roles in wine's quality. Some features are negatively related.
# In[ ]:
"""#Exercise 3 (Evaluating Linear Regression).
def rmse(predicted_value, ground_truth):
diff = ground_truth - predicted_value
diff_square = np.dot(diff, diff)
#rmse = np.sqrt(np.divide(diff_square, ground_truth.shape[0]))
rmse = np.sqrt(diff_square/ground_truth.shape[0])
return rmse
"""
# In[ ]:
#1-dimensional input variables using the training set
#first feature for the test set
test_fixed_acidity = red_test_data[:, 0].reshape(-1, 1)
test_X_acidity = np.hstack((test_fixed_acidity, np.repeat(1, test_fixed_acidity.shape[0]).reshape(-1, 1)))
predicted_score_acidity = np.dot(test_X_acidity, train_w_acidity.T)
#predicted_score_acidity = predicted_value(train_fixed_acidity, test_fixed_acidity, red_test_score)
rmse.rmse(predicted_score_acidity, red_test_score)
#0.7860892754162216
# In[ ]:
#full 11-dimensional input variables
test_X = np.hstack((red_test_data, np.repeat(1, red_test_data.shape[0]).reshape(-1, 1)))
predicted_score = np.dot(test_X, w_all.T)
rmse.rmse(predicted_score, red_test_score)
#0.644717277241364
| 2,773 | 1,120 |
from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
class Choice(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
class User(models.Model):
# lastname = models.CharField(max_length=200)
# firstname = models.CharField(max_length=200)
email = models.CharField(max_length=200)
pseudo = models.CharField(max_length=200)
age = models.IntegerField(default=18)
city = models.CharField(max_length=200)
situation = models.CharField(max_length=200)
status = models.CharField(max_length=200)
def __str__(self):
return self.pseudo
class Organization(models.Model):
name = models.CharField(max_length=200)
status = models.CharField(max_length=200)
def __str__(self):
return self.name
class Category(models.Model):
label = models.CharField(max_length=200)
def __str__(self):
return self.label
class Type(models.Model):
label = models.CharField(max_length=200)
def __str__(self):
return self.label
class Aid(models.Model):
title = models.CharField(max_length=200)
description = models.CharField(max_length=1500)
organization = models.ForeignKey(Organization, on_delete=models.CASCADE)
category = models.ForeignKey(Category, on_delete=models.CASCADE)
type = models.ForeignKey(Type, on_delete=models.CASCADE)
def __str__(self):
return self.title
class Criteria(models.Model):
label = models.CharField(max_length=200)
value = models.CharField(max_length=200)
aid = models.ForeignKey(Aid, on_delete=models.CASCADE)
# op = models.ForeignKey(Question, on_delete=models.CASCADE)
def __str__(self):
return self.label
| 1,915 | 640 |
# from .utils import (
# IMAGENET_MEAN, IMAGENET_STD, CollateDetection, CollateTracking,
# get_default_transforms, get_default_detection_transforms, get_default_tracking_transforms
# )
# from .coco import COCODataset
# from .voc import VOCDataset
# from .crowdhuman import CrowdHumanDataset
# from .mot import MOTTrackingSequence, MOTTrackingDataset
# from .kitti import KITTITrackingSequence, KITTITrackingDataset
# from .detection_for_tracking import DetectionForTracking
# from .inference import InferenceDataset
# from .builder import build_dataset, build_dataloader
# __all__ = [
# "IMAGENET_MEAN", "IMAGENET_STD", "CollateDetection", "CollateTracking",
# "get_default_transforms", "get_default_detection_transforms", "get_default_tracking_transforms",
# "COCODataset", "VOCDataset", "CrowdHumanDataset", "DetectionForTracking",
# "MOTTrackingSequence", "MOTTrackingDataset", "KITTITrackingSequence", "KITTITrackingDataset",
# "InferenceDataset",
# "build_dataset", "build_dataloader"
# ]
| 1,027 | 368 |
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from Axon.Component import component
class Translator(component):
Inboxes = {"inbox" : " standard inbox",
"control": "shutdown messages"}
Outboxes = {"outbox": "",
"signal" : ""}
def __init__(self, nick):
super(Translator, self).__init__()
self.nick = nick
def main(self):
while 1:
if not self.anyReady():
self.pause()
yield 1
data = ""
if self.dataReady('privmsg'):
formatted = self.formatPrivmsg(self.recv('privmsg'))
self.send(formatted)
if self.dataReady('channel'):
formatted = self.formatChannelMsg(self.recv('channel'))
self.send(formatted)
if self.dataReady('nonPrivmsg'):
formatted = self.formatMisc(self.recv('channel'))
self.send(formatted)
if self.dataReady('notice'):
formatted = self.formatNotice(self.recv('notice'))
self.send(formatted)
if self.dataReady('ERR'):
formatted = self.formatError(self.recv('ERR'))
self.send(formatted)
if self.dataReady('RPL'):
formatted = self.formatNumReply(self.recv('RPL'))
self.send(formatted)
def formatPrivmsg(self, msg):
temp, sender, recipient, body = msg
if body[0] == 'ACTION':
send = "*** %s %s" % (sender, body[body.find('ACTION') + 7])
else:
send = "%s: %s" % (sender, body)
return send
def formatChannelMsg(self, msg):
return msg
def formatMisc(self, msg):
return msg
def formatNotice(self, msg):
return msg
def formatError(self, msg):
return msg
def formatNumReply(self, msg):
return msg
if __name__ == '__main__':
from Kamaelia.Util.Console import ConsoleReader, ConsoleEchoer
from Kamaelia.Chassis.Graphline import Graphline
from Kamaelia.Chassis.Pipeline import Pipeline
from Prefab import ComplexIRCClientPrefab
client = Graphline(
prefab = ComplexIRCClientPrefab(host="irc.freenode.net", nick="kamaeliabot", defaultChannel="#kamtest"),
formatter = Translator("kamaeliabot"),
linkages = {("prefab", "outbox") : ("formatter", "privmsg")}
)
Pipeline(ConsoleReader(), client, ConsoleEchoer()).run()
| 3,260 | 961 |
#!/usr/bin/python
import numpy as np
from hybmc.models.StochasticProcess import StochasticProcess
class DeterministicModel(StochasticProcess):
# Python constructor
def __init__(self,
domAlias, # name of our domestic (numeraire) currency
domCurve, # domestic (discounting) yield curve
forAliases, # list of foreign currencies (all relative to dom currency)
forAssetSpots, # list of foreign asset initial values
forCurves ): # list of foreign (discounting) yield curves
#
self.domAlias = domAlias
self.domCurve = domCurve
self.forAliases = forAliases
self.forAssetSpots = forAssetSpots
self.forCurves = forCurves
#
# we need to know the model index for a given alias
if self.forAliases is not None:
self.index = { self.forAliases[k] : k for k in range(len(self.forAliases)) }
else:
self.index = None
def size(self):
return 0
def factors(self):
return 0
def initialValues(self):
return np.array([])
def evolve(self, t0, X0, dt, dW, X1):
# there is nothing to be done
return
# the short rate over an integration time period
# this is required for drift calculation in multi-asset and hybrid models
def shortRateOverPeriod(self, t0, dt, X0, X1):
B_d = self.domCurve.discount(t0) / self.domCurve.discount(t0 + dt) # deterministic drift part for r_d
return np.log(B_d) / dt
# bond volatility is used in hybrid model vol adjuster
def zeroBondVolatility(self, t, T):
# we wrap scalar bond volatility into array to allow
# for generalisation to multi-factor models
return np.array([ ])
def zeroBondVolatilityPrime(self, t, T):
# we wrap scalar bond volatility into array to allow
# for generalisation to multi-factor models
return np.array([ ])
# interface for payoff calculation
def numeraire(self, t, X):
return 1.0 / self.domCurve.discount(t)
def asset(self, t, X, alias):
k = self.index[alias] # this should throw an exception if alias is unknown
return self.forAssetSpots[k] * self.forCurves[k].discount(t) / self.domCurve.discount(t)
def zeroBond(self, t, T, X, alias):
if alias is None or alias==self.domAlias:
return self.domCurve.discount(T) / self.domCurve.discount(t)
k = self.index[alias] # this should throw an exception if alias is unknown
return self.forCurves[k].discount(T) / self.forCurves[k].discount(t)
def path(self):
return self.Path(self)
# for actual payoffs we need a path object
class Path():
# Python constructor
def __init__(self, model):
self.model = model
# the numeraire in the domestic currency used for discounting future payoffs
def numeraire(self, t):
# we may add numeraire adjuster here...
return self.model.numeraire(t,None)
# a domestic/foreign currency zero coupon bond
def zeroBond(self, t, T, alias):
# we may add zcb adjuster here
return self.model.zeroBond(t, T, None, alias)
# an asset price for a given currency alias
def asset(self, t, alias):
# we may add asset adjuster here
return self.model.asset(t, None, alias)
# there are no components to keep track of
def stateAliases(self):
return []
def factorAliases(self):
return []
# some easy to use functions
def DcfModel(curve):
return DeterministicModel(None,curve,None,None,None)
| 3,840 | 1,136 |
from __future__ import print_function
from __future__ import division
from Notation import SpaceGroupsDict as spgs
SpGr = spgs.spacegroups()
notation_dictionary = spgs.spacegroups()
import PeakFinding2 as pfnd #custom library to handle the functions behind Find_Peaks
import UniversalLoader2 as uvll #custom library to handle the functions behind UniversalLoader
import requests
import numpy as np
def Load_Profile(path,get_metadata=False):
"""
Loads an image and extracts the relevant metadata for the image based in file type
Inputs:
path : string, contains the location of the file on the local machine
Outputs:
image_data : np.array, the array of values in the image
calibration : dictionary, the essential metadata to convert from pixels to two_theta space
"""
"""
NEEDS A BUNCH OF CONDITIONALS TO DETERMINE
THE NATURE OF THE DATA, BEING IN SOOOOO MANY FORMS
"""
valid_filetypes={".csv":uvll.csv_extract,
".txt":uvll.txt_extract}
file_type = path[-4:]
# Check that the provided file is a supported file type
if file_type in valid_filetypes.keys():
# Call the appropriate extraction function
profile,scale = valid_filetypes[file_type](path)
else:
raise ValueError("Unsupported file type: please use a {}".format(valid_filetypes.keys()))
return profile,scale
def Find_Peaks(profile, scale, **kwargs):
"""
Pulls out the peaks from a radial profile
Inputs:
profile : dictionary, contains intensity profile and pixel scale of
diffraction pattern
calibration : dictionary, contains camera parameters to scale data
properly in two theta space
is_profile : boolean, changes processing for profiles vs 2D patterns
scale_bar : string, determines which conversions need to be run
to convert to two theta
display_type: string, determines which plots to show
Outputs:
peak_locs : dictionary, contains two_theta, d_spacings, and input_vector arrays
peaks locations found in the profile
"""
max_numpeaks = kwargs.get('max_numpeaks', 75)
scale_range = kwargs.get('dspace_range',[0.5, 6])
squished_scale = [True if x<scale_range[1] and x >scale_range[0] else False for x in scale]
print(squished_scale)
filter_size_default=max(int(scale[squished_scale].shape[0]/50),3)
print(filter_size_default)
kwargs['filter_size'] = kwargs.get('filter_size',filter_size_default)
print('filter size')
print(kwargs['filter_size'])
# find the location of the peaks in pixel space
peaks = pfnd.vote_peaks(profile[squished_scale], **kwargs)
peaks_d = scale[squished_scale][peaks>0]
scale_d = scale
thresh = 0
orig_length = len(peaks_d)
if len(peaks_d) > max_numpeaks:
print(len(peaks_d))
print("WARNING: {} peaks were detected," +
" some of the peaks will be trimmed."+
"\nFor best results. Please check calibration or run manual peak detection.".format(len(peaks_d)))
srt_peaks = np.sort(peaks[peaks>0])
thresh = srt_peaks[len(peaks_d)-max_numpeaks]
if len(scale[squished_scale][peaks>thresh]) ==0 and thresh>0:
thresh -=1
peaks_d = scale[squished_scale][peaks>thresh]
print(len(peaks_d))
print(thresh)
print(srt_peaks)
if len(peaks_d) == orig_length:
print("WARNING: reduction based on votes unsuccessful. try other parameters")
elif len(peaks_d)> max_numpeaks:
print("WARNING: partial reduction to {} peaks.".format(len(peaks_d)))
peak_locs = {"d_spacing":scale[squished_scale][peaks>thresh],
"vec":[int(round((x-.5)*164))-1 for x in peaks_d]
}
# Display the data
peaks_h = pfnd.plot_peaks(profile[squished_scale], scale[squished_scale], peaks, thresh, **kwargs)
if len(peak_locs['vec']) <= 4:
print("WARNING: only {} peaks were detected," +
" this is lower than the recommended 4+ peaks needed"+
"\nFor best results. Please check calibration.".format(len(peaks_d)))
return peak_locs, peaks_h
def find_name_in_dict(name,dict):
o_ind = False
for ind, nm in dict.items():
if nm == name:
o_ind = ind
return o_ind
def Send_For_Classification(peak_locations, chem_vec, mode, crystal_family, user_info, URL, prediction_per_level, fam=None):
"""
Input:
peak_locs : dictionary, contains two_theta, d_spacings, and input_vector arrays
peaks locations found in the profile
user_info : dictionary, contains user profile information for tracking
and security purposes
Outputs:
payload : dictionary, contains classification statistics and predictions
Calls:
URL: POST, sends peak locations to the server for classification
"""
int_to_fam = {0:"triclinic",
1:"monoclinic",
2:"orthorhombic",
3:"tetragonal",
4:"trigonal",
5:"hexagonal",
6:"cubic"}
payload = {'peaks':peak_locations['vec'],
'chemistry':chem_vec,
'level':"Family",
'mode': mode,
'number':0
}
# print(payload)
payload['prediction_per_level'] = prediction_per_level
skip_family = False
# reproduce the gen 1 ability to specify the family to look it. Use this if the family prediction seems suspect.
if crystal_family:
print(" setting the family to search in is old functionality that is no longer needed for most predictions")
number = find_name_in_dict(crystal_family,int_to_fam)
if number:
payload['family'] = crystal_family
payload['family_1'] = crystal_family
payload['fam_confidence_1'] = float("nan")
payload['number'] = number+1
skip_family = True
payload = Classify_Family(payload, user_info, URL, 1, 1)
for k in range(1,prediction_per_level[0]):
payload['family_'+str(1+k)] = float("nan")
payload['fam_confidence_'+str(1+k)] = float("nan")
for l in range(0,prediction_per_level[1]):
num_l = (k)*prediction_per_level[1]+l+1
payload['genus_'+str(num_l)] = float("nan")
payload['gen_confidence_'+str(num_l)] = float("nan")
for m in range(0,prediction_per_level[2]):
num_m = (num_l-1)*prediction_per_level[2]+m+1
payload['species_'+str(num_m)] = float("nan")
payload['spec_confidence_'+str(num_m)] = float("nan")
else:
print("family name not recognized, ignoring input.")
if not skip_family:
print(requests.post(URL+"predict", json=payload).text)
family = requests.post(URL+"predict", json=payload).json()
print(family['votes'])
fam_votes = family['votes']
pred = []
# pred.append(np.argmax(family['votes']))
# payload['family_1'] = int_to_fam[pred[0]]
fam_confidence = confidence(fam_votes)
# payload['fam_confidence_1'] = fam_confidence[pred[0]]
# payload['number'] = int(pred[0])+1
#
# print(pred[0])
# Classify_Family(peak_locations,payload,user_info,URL,1)
# print(fam_confidence)
# print(payload)
for k in range(prediction_per_level[0]):
pred.append(np.argmax(fam_votes))
payload['family'] = int_to_fam[pred[k]]
payload['family_'+str(k+1)] = int_to_fam[pred[k]]
payload['fam_confidence_'+str(k+1)] =fam_confidence[pred[k]]
payload['number'] = int(pred[k])+1
w = fam_confidence[pred[k]]
payload = Classify_Family(payload, user_info, URL, w, k+1)
# for next iteration
fam_votes[pred[k]] = -float("inf")
# print(pred[k])
# print(fam_votes)
return payload
def confidence(array):
# softmax like normalization
np_array = np.array(array)
total = np.sum(np.exp(np_array))
# total = np.sum(np_array[np_array>0])
# print('softmax -')
# print(np_array)
# print(total)
# print(np.exp(np_array)/total)
#L = -np_array+np.log(total)
#L = -np.log(np.exp(np_array)/total)
L = np.exp(np_array)/total
# L = np_array/total
return L
def Classify_Family(payload, user_info, URL, weight, pred_number):
payload['level'] = "Genera"
# Once the family is known, predicts the genus
# print(requests.post(URL+"predict", json=payload,timeout=30))
print("----")
print(payload)
genus = requests.post(URL+"predict", json=payload,timeout=30).json()
print("---genus---")
# print(genus['votes'])
# genera_votes = np.sum(genus['votes'],axis=0).tolist()
# genera_votes_1 = int(np.argmax(genus['votes']))
genera_votes = genus['votes']
genera_con = confidence(genera_votes)
pred=[]
genera_pred = []
for k in range(payload['prediction_per_level'][1]):
pred.append(int(np.argmax(genera_votes)))
# print(pred[k])
g_pred_num = (pred_number-1)*payload['prediction_per_level'][1]+k+1
genera_pred.append(pred[k]+ notation_dictionary.edges["genus"][payload['family']][0])
payload['genus_'+str(g_pred_num)] = genera_pred[k]
payload['gen_confidence_'+str(g_pred_num)] = genera_con[pred[k]] * weight
payload['number'] = genera_pred[k]
# print('genus prediction = ',genera_pred[k])
# print('genus_number = ',g_pred_num)
w = genera_con[pred[k]] * weight
payload = Classify_Genus(payload,user_info,URL,w, g_pred_num)
genera_votes[pred[k]] = - float("inf")
# print(pred[k])
# print(genera_votes)
return payload
# pred_2 = int(np.argmax(genera_votes))
# genera_pred_2 = pred_2+ notation_dictionary.edges["genus"][payload['family']][0]
#
#
# payload['genus_2'] = genera_pred_2
# payload['gen_confidence_2'] = genera_con[pred_2]
# Configure payload json for next request
def Classify_Genus(payload, user_info, URL, weight, pred_number):
# species prediction 1
print("---species ---")
payload['level'] = "Species"
# print(requests.post(URL+"predict", json=payload,timeout=30))
species = requests.post(URL+"predict", json=payload,timeout=30).json()
# print(species)
# print(species['votes'])
# Formatting the response to be saved more easily
species_votes = species['votes']
spec_confidence = confidence(species_votes)
pred = []
species_pred = []
# print(payload)
for k in range(payload['prediction_per_level'][2]):
pred.append(int(np.argmax(species_votes)))
species_pred.append(pred[k] + notation_dictionary.edges["species"][payload['genus_'+str(pred_number)]][0])
num = (pred_number-1)*payload['prediction_per_level'][2]+k+1
# print('species number = ',num)
payload["species_"+str(num)] = species_pred[k]
payload["spec_confidence_"+str(num)] = spec_confidence[pred[k]] * weight
payload["hall_"+str(num)] = SpGr.sgs_to_group[str(species_pred[k])]
species_votes[pred[k]] = -float("inf")
return payload
| 11,926 | 3,797 |
from __future__ import absolute_import
import os
import signal
def check_leaked_workers(arbiter):
leaked_workers = []
for worker in arbiter.workers.values():
try: # check if process still exists
os.kill(worker.pid, signal.SIGKILL)
leaked_workers.append(worker.pid)
except OSError:
pass # good, worker dead
if leaked_workers:
raise AssertionError("leaked workers: " + repr(leaked_workers))
| 465 | 143 |
import nextcord
from nextcord import SlashOption
from nextcord.interactions import Interaction
client = nextcord.Client()
@client.slash_command(guild_ids=[...]) # Limits the guildes
async def choose_a_number(
interaction: Interaction,
number: str = SlashOption(
name="picker",
description="The number you want",
choices={"1": 1, "2": 2, "3": 3},
),
):
await interaction.response.send_message(f"You chose {number}!")
@client.slash_command(guild_ids=[...]) # limits the guilds with this command
async def hi(
interaction: Interaction,
member: nextcord.Member = SlashOption(name="user", description="the user to say hi to"),
):
await interaction.response.send_message(f"{interaction.user} just said hi to {member.mention}")
client.run("TOKEN")
| 802 | 252 |
name = "pyvcdr"
| 16 | 10 |
import usb.core
import usb.util
import sys
# got these using the command lsusb -vv
VENDOR_ID = 0x1AB1
PRODUCT_ID = 0x0E11
DATA_SIZE = 1
device = usb.core.find(idVendor=VENDOR_ID, idProduct=PRODUCT_ID)
#@@@#print(device.is_kernel_driver_active(0))
# was it found?
if device is None:
raise ValueError('USB Device not found')
try:
# set the active configuration. With no arguments, the first
# configuration will be the active one
device.set_configuration()
except usb.core.USBError as e:
raise Exception("failed to set configuration\n %s" % e)
cfg = device.get_active_configuration()
for cfg in device:
sys.stdout.write(str(cfg.bConfigurationValue) + '\n')
#@@@#device.read(0x81, 255, 1000000)
| 729 | 275 |
import pyamg
import numpy as np
import time
from prettytable import PrettyTable
A = pyamg.gallery.poisson((500,500), format='csr') # 2D Poisson problem on 500x500 grid
# print('A : ', A )
ml = pyamg.ruge_stuben_solver(A)
b = np.random.rand(A.shape[0]) # pick a random right hand side
# t0=
# solve Ax=b to a tolerance of 1e-10
# print("residual: ", np.linalg.norm(b-A*x))
loops = [100]
nb_times = []
times=[]
for loop in loops:
t0 = time.time()
for i in range(loop):
y = ml.solve(b, tol=1e-10)
t1 = time.time()
t_sc = t1-t0
times.append(t_sc/loop)
tb = PrettyTable()
tb.field_names = [""]+["{} loops".format(loop) for loop in loops]
tb.add_row(["ruge stuben"]+["{:0.4f} ms".format(time*1000) for time in times])
print(tb) | 809 | 339 |
import logging
import re
from queue import SimpleQueue
from telegram.bot import Bot as Telegram_Bot
from telegram.ext import Updater, CommandHandler, Filters
class TelegramHandler(object):
def __init__(self, bot_token, allowed_telegram_user_ids):
self.logger = logging.getLogger("telegram2mqtt.bot")
self.telegram_bot = Telegram_Bot(bot_token)
# self.telegram_bot.get_me() # For debugging purposes
self.allowed_telegram_user_ids = allowed_telegram_user_ids
self.updater = Updater(bot=self.telegram_bot, use_context=True)
self.topics_to_uid = {}
self.sub_queue = SimpleQueue()
self.unsub_queue = SimpleQueue()
self.pub_queue = SimpleQueue()
# Register Handlers
self.updater.dispatcher.add_handler(
CommandHandler(
"sub",
self.sub_handler,
filters=Filters.user(self.allowed_telegram_user_ids),
)
)
self.updater.dispatcher.add_handler(
CommandHandler(
"unsub",
self.unsub_handler,
filters=Filters.user(self.allowed_telegram_user_ids),
)
)
self.updater.dispatcher.add_handler(
CommandHandler(
"pub",
self.pub_handler,
filters=Filters.user(self.allowed_telegram_user_ids),
)
)
self.logger.info("Telegram-Handler is initialized.")
def __call__(self):
self.logger.info("Telegram-Handler started.")
for uid in self.allowed_telegram_user_ids:
self.telegram_bot.send_message(uid, "Telegram2MQTT Bot is online.")
self.updater.start_polling()
def stop(self):
for uid in self.allowed_telegram_user_ids:
self.telegram_bot.send_message(uid, "Telegram2MQTT Bot is offline.")
self.logger.info("Telegram-Handler stopped.")
self.updater.stop()
def sub_handler(self, update, context):
"""
For subscriptions, two wildcard characters are supported:
- A '#' character represents a complete sub-tree of the hierarchy
and thus must be the last character in a subscription topic string, such as SENSOR/#.
This will match any topic starting with SENSOR/, such as SENSOR/1/TEMP and SENSOR/2/HUMIDITY.
- A '+' character represents a single level of the hierarchy and is used between delimiters.
For example, SENSOR/+/TEMP will match SENSOR/1/TEMP and SENSOR/2/TEMP.
+ --> \w+
# --> (\w|\/)+
"""
self.logger.debug(f"Sub Handler received args: '{context.args}'")
# Validate context.args
topic = context.args[0]
if topic.count("#") >= 2:
self.logger.warning(
f"Invalid topic '{topic}' for subscription: Multiple '#' character used."
)
context.bot.send_message(
chat_id=update.effective_chat.id,
text=f"Invalid topic '{topic}' for subscription: Multiple '#' character used.",
)
return
if "#" in topic and not topic.endswith("#"):
self.logger.warning(
f"Invalid topic '{topic}' for subscription: '#' not used as last character."
)
context.bot.send_message(
chat_id=update.effective_chat.id,
text=f"Invalid topic '{topic}' for subscription: '#' not used as last character.",
)
return
if topic not in self.topics_to_uid:
self.logger.info(f"Subscribe to topic '{topic}'")
self.topics_to_uid[topic] = set()
self.sub_queue.put(topic)
if update.effective_chat.id not in self.topics_to_uid[topic]:
self.topics_to_uid[topic].add(update.effective_chat.id)
context.bot.send_message(
chat_id=update.effective_chat.id,
text=f"Sub on topic '{topic}' received.",
)
else:
context.bot.send_message(
chat_id=update.effective_chat.id,
text=f"It seems that you already subcribed to topic '{topic}'.",
)
def unsub_handler(self, update, context):
self.logger.debug(f"Unsub Handler received args: '{context.args}'")
# Validate context.args
topic = context.args[0]
if (
topic not in self.topics_to_uid
or update.effective_chat.id not in self.topics_to_uid[topic]
):
context.bot.send_message(
chat_id=update.effective_chat.id,
text=f"It seems that you aren't subcribed on topic '{topic}'.",
)
return
self.topics_to_uid[topic].remove(update.effective_chat.id)
context.bot.send_message(
chat_id=update.effective_chat.id, text=f"Unsubcribed on topic '{topic}'."
)
if len(self.topics_to_uid[topic]) == 0:
self.logger.info(f"Unsubscribe from topic '{topic}'")
self.unsub_queue.put(topic)
del self.topics_to_uid[topic]
def pub_handler(self, update, context):
topic, message = context.args[0], " ".join(context.args[1:])
self.logger.info(
f"Pub Handler received pub on topic '{topic}'. Message: '{message}'"
)
# Validate topic
if "#" in topic or "+" in topic:
self.logger.warning(
f"Pub Handler received topic with wildcard-character: '{topic}'."
)
context.bot.send_message(
chat_id=update.effective_chat.id,
text=f"Invalid input: wildcard-character in topic '{topic}' detected.",
)
return
# Validate msg, should empty messages be valid?
self.pub_queue.put((topic, message))
context.bot.send_message(
chat_id=update.effective_chat.id,
text=f"Pub on topic '{topic}'.\nMessage: {message}",
)
def publish_to_telegram(self, topic, message):
self.logger.debug(f"Message from MQTT. Topic: '{topic}' Message: '{message}'")
# TODO: Handle wildcards here
# 1. Search for known subs to topics which match the mqtt messages topic
make_regex_from_topic = lambda t: re.compile(
t.replace("/", "\/")
.replace(r"+", r"\w+")
.replace(r"#", r"(\w|\/)+")
.replace("$", "\$")
)
matched_topics = [
t for t in self.topics_to_uid if re.match(make_regex_from_topic(t), topic)
]
logging.debug(f"Matched topics for topic '{topic}': {matched_topics}")
# 2. Collect all users which subbed to any of these topics, store as set to deduplicate
recipients = set()
for t in matched_topics:
for uid in self.topics_to_uid[t]:
recipients.add(uid)
if len(recipients) == 0:
self.logger.error(
f"Couldn't publish message '{message}' to topic '{topic}', no matching user id found."
)
return
# 3. Forward message
self.logger.info("Forwarding mqtt-message to telegram.")
telegram_msg = f"Received message on topic '{topic}':\n{message}"
for uid in recipients:
self.telegram_bot.send_message(uid, telegram_msg)
| 7,467 | 2,159 |
# Copyright 2021 The casbin Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import sys
import os
import pycasbin
import test_enforcer
import test_model
import test_config
def suite():
# top level directory cached on loader instance
suite = unittest.TestSuite()
loader = unittest.TestLoader()
suite.addTest(loader.loadTestsFromModule(test_enforcer))
suite.addTest(loader.loadTestsFromModule(test_model))
suite.addTest(loader.loadTestsFromModule(test_config))
return suite
if __name__ == '__main__':
runner = unittest.TextTestRunner(verbosity=2)
test_suite = suite()
result = runner.run(test_suite)
if result.wasSuccessful() == False:
sys.exit(1)
sys.exit(0)
| 1,273 | 395 |
import functools
import warnings
def call_on_import(func):
"""Declare a decorator that will run `func` when imported."""
func()
def get_package_file_path(from_package, relative_path):
"""Use source of a python package to locate and cache the address of a file."""
from pkg_resources import resource_filename
return resource_filename(from_package, relative_path)
def deprecate_kwargs(deprecated_kwargs=None):
if deprecated_kwargs is None:
deprecated_kwargs = set()
def decorate_deprecate_kwargs(func):
@functools.wraps(func)
def wrapper(self_or_cls, *args, **kwargs):
_deprecate_kwargs(kwargs, deprecated_kwargs)
return func(self_or_cls, *args, **kwargs)
return wrapper
return decorate_deprecate_kwargs
def _deprecate_kwargs(kwargs, deprecated_kwargs):
added_args = []
for kwarg in kwargs:
if kwarg in deprecated_kwargs:
added_args.append(kwarg)
if len(added_args) > 1:
message = (
"Keyword arguments `{dep_args}` are deprecated and will be removed in the "
"next minor release of the package. Please update your code accordingly"
)
else:
message = (
"Keyword argument `{dep_args}` is deprecated and will be removed in the "
"next minor release of the package. Please update your code accordingly"
)
if added_args:
warnings.warn(
message.format(dep_args=", ".join(added_args)),
DeprecationWarning,
3,
)
| 1,574 | 442 |
#!/usr/local/bin/python3
decimal = input("Please enter a real number: ")
index = 0
while index < len(decimal) and decimal[index] != ".":
index += 1
if index == 10:
intpart = decimal[:]
fracpart = ""
else :
intpart = decimal[:index]
fracpart = decimal[index+1:]
# print("int len = ", len(intpart), "int part = ", intpart, "frac len = ", len(fracpart), " fracpart = ", fracpart)
if intpart == "":
binint = "0"
else :
binint = ""
intpart = int(intpart)
while intpart != 0:
if intpart % 2 == 0:
binint = "0" + binint
else :
binint = "1" + binint
intpart //= 2
if fracpart == "":
fracint = "0"
else :
fracint = ""
fracpart = float(fracpart) / 10 ** len(fracpart)
while fracpart != 0:
if fracpart * 2 >= 1:
fracint += "1"
else :
fracint += "0"
fracpart = fracpart * 2 - int(fracpart * 2)
print(decimal, "is equivalent to", binint + "." + fracint)
| 889 | 381 |
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
cos test cast
"""
import os
import pytest
from base import TestBase
from nose.plugins.attrib import attr
from test_run.cos_run import cos_run
class TestCos(TestBase):
def setup(self):
case_name = "test_akg_cos_001"
case_path = os.getcwd()
self.params_init(case_name, case_path)
self.caseresult = True
self._log.info("========================{0} Setup case=================".format(self.casename))
self.testarg = [
# testflag,opfuncname,testRunArgs, dimArgs
("cos_f16_8_16", cos_run, ((8, 16), "float16")),
#("cos_f16_1024_1024", cos_run, ((1024, 1024), "float16")),
#("cos_f16_8_1024_1024", cos_run, ((8, 1024, 1024), "float16")),
#("cos_f16_16_3_256_512", cos_run, ((16, 3, 256, 512), "float16")),
#("cos_f16_32_16_512_512", cos_run, ((32, 16, 512, 512), "float16")),
#("cos_f16_64_3125", cos_run, ((64, 3125), "float16")),
#("cos_f16_96_3125", cos_run, ((96, 3125), "float16")),
#("cos_f16_128_3125", cos_run, ((128, 3125), "float16")),
#("cos_f16_64_1563", cos_run, ((64, 1563), "float16")),
#("cos_f16_96_1563", cos_run, ((96, 1563), "float16")),
#("cos_f16_128_1563", cos_run, ((128, 1563), "float16")),
#("cos_f16_64_31250", cos_run, ((64, 31250), "float16")),
#("cos_f16_96_31250", cos_run, ((96, 31250), "float16")),
#("cos_f16_128_31250", cos_run, ((128, 31250), "float16")),
#("cos_f16_64_15625", cos_run, ((64, 15625), "float16")),
#("cos_f16_96_15625", cos_run, ((96, 15625), "float16")),
#("cos_f16_128_15625", cos_run, ((128, 15625), "float16")),
("cos_f32_8_16", cos_run, ((8, 16), "float32")),
#("cos_f32_1024_1024", cos_run, ((1024, 1024), "float32")),
#("cos_f32_8_1024_1024", cos_run, ((8, 1024, 1024), "float32")),
#("cos_f32_16_3_256_512", cos_run, ((16, 3, 256, 512), "float32")),
#("cos_f32_32_16_512_512", cos_run, ((32, 16, 512, 512), "float32")),
#("cos_f32_64_3125", cos_run, ((64, 3125), "float32")),
#("cos_f32_96_3125", cos_run, ((96, 3125), "float32")),
#("cos_f32_128_3125", cos_run, ((128, 3125), "float32")),
#("cos_f32_64_1563", cos_run, ((64, 1563), "float32")),
#("cos_f32_96_1563", cos_run, ((96, 1563), "float32")),
#("cos_f32_128_1563", cos_run, ((128, 1563), "float32")),
#("cos_f32_64_31250", cos_run, ((64, 31250), "float32")),
#("cos_f32_96_31250", cos_run, ((96, 31250), "float32")),
#("cos_f32_128_31250", cos_run, ((128, 31250), "float32")),
#("cos_f32_64_15625", cos_run, ((64, 15625), "float32")),
#("cos_f32_96_15625", cos_run, ((96, 15625), "float32")),
#("cos_f32_128_15625", cos_run, ((128, 15625), "float32")),
]
return
@pytest.mark.rpc_mini
@pytest.mark.level0
@pytest.mark.env_onecard
@pytest.mark.platform_x86_ascend_training
def test_run(self):
"""
run case.#
:return:
"""
self.common_run(self.testarg)
def teardown(self):
"""
clean environment
:return:
"""
self._log.info("============= {0} Teardown============".format(self.casename))
return
| 4,003 | 1,895 |
# Copyright 2008-2018 Univa Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=not-callable,multiple-statements,no-member,no-self-use
from sqlalchemy.orm.exc import NoResultFound
from tortuga.db.networkDevicesDbHandler import NetworkDevicesDbHandler
from tortuga.db.tortugaDbObjectHandler import TortugaDbObjectHandler
from tortuga.exceptions.nicAlreadyExists import NicAlreadyExists
from tortuga.exceptions.nicNotFound import NicNotFound
from .models.nic import Nic
class NicsDbHandler(TortugaDbObjectHandler):
"""
This class handles nics table.
"""
def __init__(self):
super().__init__()
self._networkDevicesDbHandler = NetworkDevicesDbHandler()
def getNic(self, session, mac):
"""
Return nic.
This method should be named 'getNicByMAC()'
"""
self._logger.debug(
'Retrieving NIC with MAC address [%s]' % (mac))
try:
return session.query(Nic).filter(Nic.mac == mac).one()
except NoResultFound:
raise NicNotFound(
'NIC with MAC address [%s] not found.' % (mac))
def getNicById(self, session, _id):
"""
Return nic.
"""
self._logger.debug('Retrieving NIC ID [%s]' % _id)
dbNic = session.query(Nic).get(_id)
if not dbNic:
raise NicNotFound('NIC ID [%s] not found.' % (_id))
return dbNic
def addNic(self, session, nic):
"""
Insert nic into the db.
"""
if nic.getMac():
self._logger.debug('Inserting NIC [%s]' % (nic))
try:
self.getNic(session, nic.getMac())
raise NicAlreadyExists('NIC [%s] already exists' % (nic))
except NicNotFound:
# OK.
pass
dbNic = Nic(
mac=nic.getMac(),
nodeId=nic.getNodeId(),
networkId=nic.getNetworkId(),
ip=nic.getIp(),
boot=nic.getBoot())
dbNic.networkdevice = \
self._networkDevicesDbHandler.createNetworkDeviceIfNotExists(
session, nic.getNetworkDevice().getName())
return dbNic
| 2,716 | 814 |
import pytest
import titration.utils.devices.board_mock as board_mock
import titration.utils.devices.ph_probe_mock as ph_probe_mock
def test_ph_create():
ph = ph_probe_mock.pH_Probe(board_mock.SCL, board_mock.SDA)
assert ph is not None
def test_ph_create_null():
ph = ph_probe_mock.pH_Probe(None, None)
assert ph is not None
def test_ph_voltage():
ph = ph_probe_mock.pH_Probe(board_mock.SCL, board_mock.SDA)
assert ph.voltage() == 0
def test_ph_voltage_null():
ph = ph_probe_mock.pH_Probe(None, None)
assert ph.voltage() == 0
def test_ph_voltage_set():
ph = ph_probe_mock.pH_Probe(board_mock.SCL, board_mock.SDA)
ph.mock_set_voltage(3.0)
assert ph.voltage() == 3.0
def test_ph_set_gain():
ph = ph_probe_mock.pH_Probe(board_mock.SCL, board_mock.SDA)
gain_options = [2 / 3, 1, 2, 4, 8, 16]
for gain in gain_options:
ph.set_gain(gain)
assert ph.get_gain() == gain
with pytest.raises(ValueError):
ph.set_gain(0)
with pytest.raises(ValueError):
ph.set_gain(32)
| 1,068 | 468 |
from string import ascii_lowercase
class TwoRotationCypher:
def encrypt(self, firstSize, firstRotate, secondRotate, message):
f, s, r = ascii_lowercase[:firstSize], ascii_lowercase[firstSize:], ''
for e in message:
if e == ' ':
r += e
elif e in f:
r += chr(ord(f[0]) + (ord(e) - ord(f[0]) + firstRotate)%firstSize)
else:
r += chr(ord(s[0]) + (ord(e) - ord(s[0]) + secondRotate)%(26-firstSize))
return r
| 516 | 174 |
import numpy as np
from class_neural import GenericAutoencoder
from class_clustering_reduction import ReductionCluster
from class_subspace_reduction import ReductionSubspace
try:
import umap
except Exception as es:
print(f"UMAP unavailable. {es}")
from sklearn.decomposition import TruncatedSVD
from sklearn.manifold import LocallyLinearEmbedding
import logging
logging.basicConfig(format="%(asctime)s - %(message)s", datefmt="%d-%b-%y %H:%M:%S")
logging.getLogger(__name__).setLevel(logging.INFO)
class CoRe:
"""
The main CoRe class.
"""
def __init__(
self,
tau=2,
verbose=True,
embedding_algorithm="CoRe-small",
store_intermediary=False,
):
self.verbose = verbose
self.store_intermediary = store_intermediary
self.intermediary_representations = []
self.k = tau
self.rep_scores = []
self.embedding_algorithm = embedding_algorithm
if "-direct" in self.embedding_algorithm:
self.direct_projection = True
else:
self.direct_projection = False
def dimension_series(self, max_y):
dim_series = []
cdim = max_y
while cdim > self.k:
temp_dim = int(cdim / self.k)
if temp_dim > self.k:
dim_series.append(temp_dim)
cdim = temp_dim
else:
break
dim_series.append(self.k)
self.dim_series = dim_series
logging.info("Initialized dimension series: {}".format(self.dim_series))
def measure_complexity(self, matrix):
norms = np.sqrt(np.einsum("ij,ij->i", matrix, matrix)) / matrix.shape[1]
mnorm = (norms - np.mean(norms)) / (np.max(norms) - np.min(norms))
mnorm = np.std(mnorm)
return mnorm
def fit(self, dataframe):
dimension_y = min(dataframe.shape[1], dataframe.shape[0])
self.dimension_series(dimension_y)
encoders = []
intermediary_representations = [dataframe]
for dim in self.dim_series:
if self.verbose:
logging.info(f"Re-embedding into {dim} dimensions.")
if "CoRe-large" in self.embedding_algorithm:
encoder = GenericAutoencoder(
n_components=dim, verbose=self.verbose, nn_type="large"
)
if "CoRe-small" in self.embedding_algorithm:
encoder = GenericAutoencoder(
n_components=dim, verbose=self.verbose, nn_type="mini"
)
elif "UMAP" in self.embedding_algorithm:
encoder = umap.UMAP(n_components=dim)
elif "RandomSubspace" in self.embedding_algorithm:
encoder = ReductionSubspace(n_components=dim)
elif "SparseRandom" in self.embedding_algorithm:
encoder = SparseRandomProjection(n_components=dim)
elif "NMF" in self.embedding_algorithm:
encoder = NMF(n_components=dim)
elif "Cluster-mean" in self.embedding_algorithm:
encoder = ReductionCluster(n_dim=dim, aggregation="mean")
elif "Cluster-median" in self.embedding_algorithm:
encoder = ReductionCluster(n_dim=dim, aggregation="median")
elif "Cluster-max" in self.embedding_algorithm:
encoder = ReductionCluster(n_dim=dim, aggregation="max")
elif "LLE" in self.embedding_algorithm:
encoder = LocallyLinearEmbedding(n_components=dim)
elif "SVD" in self.embedding_algorithm:
encoder = TruncatedSVD(n_components=dim)
# encode the initial representation
if self.direct_projection:
encoded_representation = encoder.fit_transform(
intermediary_representations[0]
)
# encode current representation
else:
encoded_representation = encoder.fit_transform(
intermediary_representations[-1]
)
self.rep_scores.append(self.measure_complexity(encoded_representation))
encoders.append(encoder)
intermediary_representations.append(encoded_representation)
if self.store_intermediary:
self.intermediary_representations = intermediary_representations
self.encoder_space = encoders
def transform(self, dataframe, keep_intermediary=True):
current_df = dataframe
if self.verbose:
logging.info("Encoding new data.")
if keep_intermediary:
intermediary_representations = [dataframe]
for encoder in self.encoder_space:
tmp_df = encoder.transform(current_df)
if keep_intermediary:
intermediary_representations.append(tmp_df)
if self.direct_projection:
current_df = dataframe
else:
current_df = tmp_df
if self.verbose:
logging.info("Encoding obtained.")
if keep_intermediary:
return intermediary_representations
else:
return current_df
def fit_transform(self, dataframe, keep_intermediary=False):
self.fit(dataframe)
return self.transform(dataframe, keep_intermediary)
if __name__ == "__main__":
import numpy as np
X = np.random.random((100, 100))
core_instance = CoRe(
verbose=False, embedding_algorithm="CoRe-small", store_intermediary=False
)
core_instance.fit(X)
intermediary = core_instance.transform(X, keep_intermediary=True)
print(len(intermediary))
| 5,675 | 1,653 |
from __future__ import absolute_import
from autograd.core import (primitive, Node, VSpace, register_node, vspace,
register_vspace, SparseObject)
from builtins import zip
from future.utils import iteritems
from functools import partial
import autograd.numpy as np
class SequenceNode(Node):
__slots__ = []
def __getitem__(self, idx): return sequence_take(self, idx)
def __len__(self): return len(self.value)
def __add__(self, other): return sequence_extend_right(self, *other)
def __radd__(self, other): return sequence_extend_left(self, *other)
register_node(SequenceNode, tuple)
register_node(SequenceNode, list)
@primitive
def sequence_take(A, idx):
return A[idx]
def grad_sequence_take(g, ans, vs, gvs, A, idx):
return sequence_untake(g, idx, vs)
sequence_take.defvjp(grad_sequence_take)
@primitive
def sequence_extend_right(seq, *elts):
return seq + type(seq)(elts)
def grad_sequence_extend_right(argnum, g, ans, vs, gvs, args, kwargs):
seq, elts = args[0], args[1:]
return g[:len(seq)] if argnum == 0 else g[len(seq) + argnum - 1]
sequence_extend_right.vjp = grad_sequence_extend_right
@primitive
def sequence_extend_left(seq, *elts):
return type(seq)(elts) + seq
def grad_sequence_extend_left(argnum, g, ans, vs, gvs, args, kwargs):
seq, elts = args[0], args[1:]
return g[len(elts):] if argnum == 0 else g[argnum - 1]
sequence_extend_left.vjp = grad_sequence_extend_left
@primitive
def sequence_untake(x, idx, vs):
if isinstance(idx, int):
accum = lambda result: vs.shape[idx].mut_add(result, x)
else:
accum = lambda result: [elt_vs.mut_add(a, b)
for elt_vs, a, b in zip(vs.shape[idx], result, x)]
def mut_add(A):
result = list(A)
result[idx] = accum(result[idx])
return vs.sequence_type(result)
return SparseObject(vs, mut_add)
sequence_untake.defvjp(lambda g, ans, vs, gvs, x, idx, _: sequence_take(g, idx))
sequence_untake.defvjp_is_zero(argnums=(1, 2))
@primitive
def make_sequence(sequence_type, *args):
return sequence_type(args)
make_sequence.vjp = lambda argnum, g, sequence_type, *args: g[argnum - 1]
make_tuple = partial(make_sequence, tuple)
make_list = partial(make_sequence, list)
class SequenceVSpace(VSpace):
def __init__(self, value):
self.shape = [vspace(x) for x in value]
self.size = sum(s.size for s in self.shape)
self.sequence_type = type(value)
assert self.sequence_type in (tuple, list)
def zeros(self):
return self.sequence_type(x.zeros() for x in self.shape)
def mut_add(self, xs, ys):
return self.sequence_type(vs.mut_add(x, y)
for vs, x, y in zip(self.shape, xs, ys))
def flatten(self, value, covector=False):
if self.shape:
return np.concatenate(
[vs.flatten(v, covector) for vs, v in zip(self.shape, value)])
else:
return np.zeros((0,))
def unflatten(self, value, covector=False):
result = []
start = 0
for vs in self.shape:
N = vs.size
result.append(vs.unflatten(value[start:start + N], covector))
start += N
return self.sequence_type(result)
register_vspace(SequenceVSpace, list)
register_vspace(SequenceVSpace, tuple)
class DictNode(Node):
__slots__ = []
def __getitem__(self, idx): return dict_take(self, idx)
def __len__(self): return len(self.value)
def __iter__(self): return self.value.__iter__()
def items(self): return list(self.iteritems())
def keys(self): return list(self.iterkeys())
def values(self): return list(self.itervalues())
def iteritems(self): return ((k, self[k]) for k in self)
def iterkeys(self): return iter(self)
def itervalues(self): return (self[k] for k in self)
register_node(DictNode, dict)
@primitive
def dict_take(A, idx):
return A[idx]
def grad_dict_take(g, ans, vs, gvs, A, idx):
return dict_untake(g, idx, vs)
dict_take.defvjp(grad_dict_take)
@primitive
def dict_untake(x, idx, vs):
def mut_add(A):
A[idx] = vs.shape[idx].mut_add(A[idx], x)
return A
return SparseObject(vs, mut_add)
dict_untake.defvjp(lambda g, ans, vs, gvs, x, idx, _: dict_take(g, idx))
dict_untake.defvjp_is_zero(argnums=(1, 2))
def make_dict(pairs):
keys, vals = zip(*pairs)
return _make_dict(make_list(*keys), make_list(*vals))
@primitive
def _make_dict(keys, vals):
return dict(zip(keys, vals))
_make_dict.defvjp(lambda g, ans, vs, gvs, keys, vals: make_list(*[g[key] for key in keys]),
argnum=1)
class DictVSpace(VSpace):
def __init__(self, value):
self.shape = {k : vspace(v) for k, v in iteritems(value)}
self.size = sum(s.size for s in self.shape.values())
def zeros(self):
return {k : v.zeros() for k, v in iteritems(self.shape)}
def mut_add(self, xs, ys):
return {k : v.mut_add(xs[k], ys[k])
for k, v in iteritems(self.shape)}
def flatten(self, value, covector=False):
if self.shape:
return np.concatenate(
[s.flatten(value[k], covector)
for k, s in sorted(iteritems(self.shape))])
else:
return np.zeros((0,))
def unflatten(self, value, covector=False):
result = {}
start = 0
for k, s in sorted(iteritems(self.shape)):
N = s.size
result[k] = s.unflatten(value[start:start + N], covector)
start += N
return result
register_vspace(DictVSpace, dict)
| 5,632 | 1,959 |
'''
- Leetcode problem: 314
- Difficulty: Medium
- Brief problem description:
Given a binary tree, return the vertical order traversal of its nodes' values. (ie, from top to bottom, column by column).
If two nodes are in the same row and column, the order should be from left to right.
Examples 1:
Input: [3,9,20,null,null,15,7]
3
/\
/ \
9 20
/\
/ \
15 7
Output:
[
[9],
[3,15],
[20],
[7]
]
Examples 2:
Input: [3,9,8,4,0,1,7]
3
/\
/ \
9 8
/\ /\
/ \/ \
4 01 7
Output:
[
[4],
[9],
[3,0,1],
[8],
[7]
]
Examples 3:
Input: [3,9,8,4,0,1,7,null,null,null,2,5] (0's right child is 2 and 1's left child is 5)
3
/\
/ \
9 8
/\ /\
/ \/ \
4 01 7
/\
/ \
5 2
Output:
[
[4],
[9,5],
[3,0,1],
[8,2],
[7]
]
- Solution Summary:
- Used Resources:
--- Bo Zhou
'''
import collections
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def verticalOrder(self, root: TreeNode) -> List[List[int]]:
orderMap = collections.defaultdict(list)
dq = collections.deque()
dq.append((root, 0))
while dq:
n = len(dq)
for i in range(n):
node, order = dq.popleft()
if node:
orderMap[order].append(node.val)
dq.append((node.left, order - 1))
dq.append((node.right, order + 1))
orderedDict = collections.OrderedDict(sorted(orderMap.items()))
result = []
for k, v in orderedDict.items():
result.append(v)
return result
| 1,771 | 708 |
import unittest
from pterradactyl.commands.lookup import LookupCommand
from mock import patch
import os
import argparse
import pytest
class TestLookupCommands(unittest.TestCase):
def setUp(self) -> None:
self.base_path = os.path.dirname(os.path.abspath(__file__))
self.config = os.path.join(os.getcwd(), 'tests/resources/config/pterra.yaml')
self.facts = os.path.join(os.getcwd(), 'tests/resources/config/facts.yaml')
self.facts_invalid = os.path.join(os.getcwd(), 'tests/resources/config/facts_invalid.yaml')
self.parser = self.create_parser()
def create_parser(self):
parser = argparse.ArgumentParser()
parser.add_argument('--test', '-t')
return parser
def test_parser_args(self):
with patch('os.getcwd') as cwd_mock:
cwd_mock.return_value = self.config
self.lc = LookupCommand(config=self.config, parser=self.parser)
parsed = self.parser.parse_args((['--backend', 'yaml', '--facts', self.facts, '--set', 'foo=bar', '--set', 'foo1=bar1']))
self.assertEqual(parsed.backend, 'yaml')
self.assertEqual(parsed.set, ['foo=bar', 'foo1=bar1'])
self.assertEqual(parsed.facts, self.facts)
self.lc.execute(parsed, ['hierarchy'])
def test_lookup_should_exit_with_invalid_yaml_file(self):
with patch('os.getcwd') as cwd_mock_exception:
with pytest.raises(SystemExit) as pytest_wrapped_e:
cwd_mock_exception.return_value = self.config
self.lc = LookupCommand(config=self.config, parser=self.parser)
parsed = self.parser.parse_args((['--backend', 'yaml', '--facts', self.facts_invalid, '--set', 'foo=bar', '--set', 'foo1=bar1']))
self.lc.execute(parsed, ['hierarchy'])
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 42
def test_lookup_invalid_set_facts(self):
with patch('os.getcwd') as cwd_mock_exception:
with pytest.raises(SystemExit) as pytest_wrapped_e:
cwd_mock_exception.return_value = self.config
self.lc = LookupCommand(config=self.config, parser=self.parser)
parsed = self.parser.parse_args((['--backend', 'yaml', '--facts', self.facts, '--set', 'foo=bar', '--set', 'foo:bar']))
self.lc.execute(parsed, ['hierarchy'])
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 42
| 2,548 | 825 |
class Node:
def __init__(self,data):
self.data=data
self.next=None
class Stack():
def __init__(self):
self.head=None
def push(self,data):
new_node=Node(data)
new_node.next=self.head
self.head=new_node
def pop(self):
cur_node=self.head
self.head=cur_node.next
cur_node=None
def get_stack(self):
cur_node=self.head
while cur_node:
print(cur_node.data)
cur_node = cur_node.next
def peak(self):
top_node=self.head
print(top_node.data)
def size(self,node):
if node is None:
return 0
return 1+self.size(node.next)
def is_empty(self):
if self.head is None:
print("Yes")
else:
print("No")
s=Stack()
s.push("A")
s.push("B")
s.push("C")
s.push("D")
s.push("E")
s.pop()
s.pop()
s.get_stack()
| 926 | 336 |
# don't use 3.9, currently using 3.7
import mysecrets
import re
import email
import logging
import slackhandler
import my_parser
from exchangelib import Credentials, Account, DELEGATE, Configuration, FaultTolerance, Message
from imapclient import IMAPClient
from collections import deque
class emailhandler:
def __init__(self, count=10, protocol='EWS'):
self.last_ten_tickets = deque([], maxlen=count)
self.protocol = protocol
self.credentials = Credentials(mysecrets.username, mysecrets.password)
self.on_call = _get_on_call_number_from_file(mysecrets.oncalltxt_location)
self.permanent_numbers = mysecrets.permanent_numbers
if not self.on_call:
self.on_call = mysecrets.on_call
self.config = Configuration(server=mysecrets.host,
credentials=self.credentials,
retry_policy=FaultTolerance())
self.account = Account(primary_smtp_address=mysecrets.email_address,
config=self.config,
credentials=self.credentials,
autodiscover=False,
access_type=DELEGATE)
self.phonebook = {}
# with open('phonebook.csv') as file:
# csv_file = csv.DictReader(file)
# for row in csv_file:
# self.phonebook[row['Username']] = row['Phone_Number']
# doesn't add ticket if it is already in the deque
def add_ticket_num(self, ticket):
if not self.last_ten_tickets.__contains__(ticket):
self.last_ten_tickets.append(ticket)
return ticket
# Needs to be of email type and not exchangelib message
def process_emails(self, emails):
if isinstance(emails, list):
for mail in emails:
# Update on call logic
on_call_phone_num = _on_call_update_email(mail)
if on_call_phone_num:
self.on_call = on_call_phone_num
_update_on_call_file(on_call_phone_num)
logging.debug("emailhandler.py :: On call number has beeen updated to " + on_call_phone_num)
slackhandler.notifyOnCallUpdate(on_call_phone_num)
# Who is on call request
elif _on_call_request_email(mail):
logging.debug("emailhandler.py :: on call request notification" +
" being sent to slackhandler")
slackhandler.notify_inform_who_is_on_call(self.on_call)
# Priority 1 or 2 logic
else:
num_pri_tuple = _get_ticket_num(str(mail['Subject']))
if num_pri_tuple:
ticket_num = self.add_ticket_num(num_pri_tuple[0])
if ticket_num:
# This block is reached if it's a new ticket to the bot
if num_pri_tuple[1] == 1:
logging.debug("emailhandler.py :: sending message to slackhandler.notify priority 1")
slackhandler.notifyP1(mail)
self.notify_on_call(mail, self.on_call)
for num in self.permanent_numbers:
if num != self.on_call:
self.notify_on_call(mail, num)
elif num_pri_tuple[1] == 2:
logging.debug("emailhandler.py :: sending message to slackhandler.notify priority 2")
slackhandler.notifyP2(mail)
else:
logging.ERROR("Invalid block reached in process_emails")
# returns array of new emails
def get_emails(self):
if self.protocol == 'IMAP':
with IMAPClient(host=mysecrets.host) as client:
# init IMAP connection
client.login(mysecrets.username, mysecrets.password)
client.select_folder('Inbox')
# returns uids of emails
messages = client.search(['UNSEEN'])
# returns emails in a dictionary format
email_dict = client.fetch(messages, ['RFC822'])
client.add_flags(messages, '\\SEEN')
# close out imap connection
client.shutdown()
emails = []
# convert emails from dict format to email format
for mail in email_dict.values():
emails.append(email.message_from_string(mail[b'RFC822'].decode("UTF-8")))
return emails
if self.protocol == 'EWS':
# get unread emails
unread = self.account.inbox.filter(is_read=False)
logging.debug("emailhandler.py get_emails()::" + str(unread.count()))
emails = []
# convert from exchangelib.items.message.Message object to email object
for mail in unread:
try:
emails.append(_convert_from_exchange_email(mail))
logging.debug("emailhandler.py get_emails unread email found :: " + str(mail.subject))
# mark as read
mail.is_read = True
mail.save(update_fields=['is_read'])
except:
logging.error("emailhandler.py:: ERROR in reading email. Not email?")
return emails
# Sets the flag on all email to seen
def read_all_emails(self):
if self.protocol == 'IMAP':
with IMAPClient(host=mysecrets.host) as client:
client.login(mysecrets.username, mysecrets.password)
client.select_folder('Inbox')
messages = client.search(['UNSEEN'])
client.add_flags(messages, '\\SEEN')
client.shutdown()
if self.protocol == 'EWS':
# get unread emails
unread = self.account.inbox.filter(is_read=False)
for mail in unread:
logging.debug('emailhandler.py:: Unread email found in read_all_emails: ' + str(mail.subject))
mail.is_read = True
# todo: save is returning a massive string - check documentation
mail.save(update_fields=['is_read'])
# mail needs to be of email type and not exchangelib message
def notify_on_call(self, mail, phone_number):
on_call_email_to_sms = phone_number + "@vtext.com"
logging.debug("emailhandler.py :: Entering Notify_on_Call" +
"\n - Subject = " +
str(mail["Subject"]) +
"\n to_recipients = " +
on_call_email_to_sms)
if phone_number:
body_string = (mail["Subject"] +
"\n" +
"Center ID: " +
my_parser.get_cid(mail) +
"\n"
"Summary: " +
my_parser.get_summary(mail))
message_to_send = Message(
account=self.account,
subject='',
body=body_string,
to_recipients=[on_call_email_to_sms]
)
try:
message_to_send.send()
logging.debug("emailhandler.py :: email sent to " + str(on_call_email_to_sms))
except:
logging.error("emailhandler.py :: FAILED TO SEND ON CALL TEXT")
else:
logging.debug("emailhandler.py :: Unable to send on call text, on_call is empty")
# get ticket number from a valid high priority subject line
# If str in not in proper format, nothing is returned
# Return tuple of (ticket_number, priority)
def _get_ticket_num(subj):
# emails follow the following format
# Incident# 12345 is a Priority 1 ticket and has been assigned to your team
pattern = re.compile(mysecrets.ticket_regex_string)
if re.search(pattern, subj):
nums = re.findall(r"\d+", subj)
return int(nums[0]), int(nums[1])
def _convert_from_exchange_email(mail):
return email.message_from_string(mail.mime_content.decode("UTF-8"))
def _on_call_update_email(mail):
if isinstance(mail, email.message.Message):
if str(mail['Subject']).upper() == "UPDATE ON-CALL":
# first payload seems to be body - This could change depending where and how it's sent
# Should be consistent throughout enterprise
logging.debug("emailhandler.py :: On-Call Update Found")
phone_num_groups = ''
for p in mail.get_payload():
phone_num_groups = re.match(r"^\d{10}", p.get_payload())
if phone_num_groups:
return phone_num_groups.group(0)
def _update_on_call_file(phone_number):
try:
with open(mysecrets.oncalltxt_location, 'w') as file_obj:
file_obj.write(phone_number)
except IOError as e:
logging.error("emailhandler.py :: IO error recieved while trying to update oncall.txt")
except:
logging.error("emailhandler.py :: Unexpected occured trying to update oncall.txt")
def _get_on_call_number_from_file(oncall_file):
phone_number = ''
try:
with open(oncall_file, 'r') as file_obj:
phone_number = file_obj.readline(10)
except IOError as e:
logging.error("emailhandler.py :: IO error recieved while trying to read oncall.txt")
except:
logging.error("emailhandler.py :: Unexpected occured trying to read oncall.txt")
return phone_number
def _on_call_request_email(mail):
if isinstance(mail, email.message.Message):
if str(mail['Subject']).upper() == "SLACKBOT WHO IS ON CALL":
logging.debug("emailhandler.py :: _on_call_request_email found!")
return True
return False
| 10,059 | 2,760 |
print( "you have successfully imported the jet latitude calculation subroutines" )
import numpy as np
def getzonalmeanonplev( xdatin, plev ):
# -----calculates zonal mean and picks out a pressure level-----
# input: xdatin = xarray data array (plev,nlat,nlon)
# : plev = desired pressure level (hPa)
# oudput: xdatzmonp = the zonal mean of xdatin on plev
# --------------------------------------------------------------
plevpa = plev*100. #convert to Pa
xdatzm = xdatin.mean(dim='lon')
xdatzmonp = xdatzm.sel(plev = plevpa, method='nearest')
return xdatzmonp
def calcjetlat( uzm, minlat, maxlat):
# -----calculate the latitude of a maximum between latitude bounds
# input: uzm = data array(nlat)
# minlat = minimum latitude over which to search for the max
# maxlat = maximum latitude over which to search for the max
# output: jlatv = the jet latitude
# jmaxv = the jet maximum
# the jet is as the maximum of the quadratic fit to the grid point maximum
# and the two adjacent grid points (as Kidston and Gerber 2010)
# NaN's are skipped
lats = uzm.sel(lat = slice(minlat,maxlat)).coords['lat']
imax = uzm.sel(lat = slice(minlat,maxlat)).argmax(dim='lat', skipna=True, keep_attrs=True)
if (imax == 0) or (imax == len(lats)-1):
jlatv = np.nan
jspeedv = np.nan
print( "!!!! no local maximum found in calcjetlat" )
else:
lat4fit = lats.isel(lat=slice(int(imax)-1,int(imax)+2))
u4fit = uzm.sel(lat = slice(minlat,maxlat)).isel(lat=slice(int(imax)-1,int(imax)+2))
coefs = np.polyfit(lat4fit,u4fit,2)
jlatv = -1.*coefs[1]/(2.*coefs[0])
jspeedv = coefs[2] + coefs[1]*jlatv + coefs[0]*jlatv**2
return jlatv, jspeedv
| 1,698 | 632 |
from statuscheck.services.bases._statuspageio import BaseStatusPageAPI
class ServiceAPI(BaseStatusPageAPI):
name = "Sentry"
domain_id = "t687h3m0nh65"
status_url = "https://status.sentry.io"
service_url = "https://sentry.io/"
| 244 | 89 |
import FWCore.ParameterSet.Config as cms
process = cms.Process("DQM")
process.load("DQM.HLTEvF.HLTMonitor_cff")
process.load("DQMServices.Core.DQM_cfg")
process.load("DQMServices.Components.DQMEnvironment_cfi")
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(100)
)
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(
### TTbar 3
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/1C51253D-D4CC-DD11-89EC-001D09F23A6B.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/26FB4A24-D2CC-DD11-845E-0019B9F72F97.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/30EE04C0-CACC-DD11-96CF-000423D99658.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/362BA44E-D2CC-DD11-8BAB-001617DBD5B2.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/6459EFC9-D7CC-DD11-8D80-001617DBCF6A.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/6AD9D6F6-D3CC-DD11-909A-001617C3B79A.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/7890DAB6-CDCC-DD11-968C-000423DD2F34.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/ACCC88FA-CFCC-DD11-AE2D-001D09F24637.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/ACFCB5C4-BCCC-DD11-832F-001D09F253C0.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/B2A7BEBE-CCCC-DD11-824A-000423D98E54.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/C2BBDEF8-CBCC-DD11-A8A0-001617DBD5AC.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/C65BB6E9-59CD-DD11-95F1-0030487A1990.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/D2BB240F-D3CC-DD11-9921-001617C3B5E4.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/DA78BADE-BCCC-DD11-AE39-0030487BC68E.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/DAA706E0-BDCC-DD11-AD9E-001D09F2432B.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/DC28FB7D-D5CC-DD11-A689-001D09F231C9.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/E02F1FE1-D4CC-DD11-AF0C-001D09F231C9.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/EAB4B18D-CFCC-DD11-B952-0030487C608C.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/FAE75F4C-C0CC-DD11-97B1-001617C3B706.root',
'/store/relval/CMSSW_3_0_0_pre3/RelValQCD_Pt_80_120/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP_30X_v1/0004/FC1FE58F-CCCC-DD11-9198-001617DBCF90.root'
)
)
process.MessageLogger = cms.Service("MessageLogger",
detailedInfo = cms.untracked.PSet(
threshold = cms.untracked.string('INFO')
),
critical = cms.untracked.PSet(
threshold = cms.untracked.string('ERROR')
),
debugModules = cms.untracked.vstring('*'),
cout = cms.untracked.PSet(
threshold = cms.untracked.string('WARNING'),
WARNING = cms.untracked.PSet(
limit = cms.untracked.int32(0)
),
noLineBreaks = cms.untracked.bool(True)
),
destinations = cms.untracked.vstring('detailedInfo',
'critical',
'cout')
)
process.p = cms.EndPath(process.dqmSaver)
process.DQMStore.verbose = 0
process.DQM.collectorHost = ''
process.dqmSaver.convention = 'Online'
process.dqmSaver.saveByRun = 1
process.dqmSaver.saveAtJobEnd = True
| 4,145 | 2,507 |
import praw, sys, os
user_agent = "Tournament Reminder by /u/0ffkilter"
config_file = open('%s%s' %(os.getcwd(), '/Config.txt'), 'r')
config = config_file.read().split('\n')
config_file.close()
reddit = praw.Reddit(user_agent = user_agent)
reddit.login('stunfiskhelperbot', config[1])
part_file = open('%s%s' %(os.getcwd(), '/Participants.txt'), 'r')
parts = part_file.read().split('\n')
part_file.close()
message = ("Hello! You are receiving this message because you are "
"signed up for /r/stunfisk's Bucket O' Mons Tournament! If you "
"did not sign up for the tournament, that means that /u/0ffkilter "
"typed in someone's name wrong. You should probably let him know. \n\n"
"In Any case, this is a reminder that Round 1 of the Tournament is out! \n\n"
"The Theme is 'power pokes', and the post can be found "
"[here](http://www.reddit.com/r/stunfisk/comments/2cejgl/tournament_bucket_o_mons_round_1_announcement/)\n\n "
"You have until Tuesday, August 5th 12:00 PST to complete your match! \n\n"
"Additional rules and regulations can be found on the aforementioned post. \n\n"
"Send Questions or comments to /u/0ffkilter!")
subject = "Reminder for Bucket O' Mons Tournament!"
parts = ['bigyeIIowtaxi']
for participant in parts:
try:
reddit.send_message(participant, subject, message)
print('Sent -> %s' %participant)
except:
print('Failed -> %s' %participant)
| 1,488 | 526 |
import random
while True:
random_list = random.sample((3, 4, 5), 2)
#print(random_list)
random_number = random.randint(1, 1000)
#print(random_number)
start_range = random_number - random_list[0]
end_range = random_number + random_list[1]
print(f'Range Hint: The Number is between {start_range} & {end_range}')
# prime Hint
prime = True
for x in range(2, int(random_number)):
if (int(random_number) % x == 0):
prime = False
pass
if prime:
print('Prime Number Hint: TRUE')
else:
print('Prime Number Hint: FALSE')
# odd-even hint
if random_number % 2 == 0:
print('ODD or EVEN Hint: Even')
else:
print('ODD or EVEN Hint: Odd')
chanses = 4
while chanses > 0:
user_input = int(input('Enter Your Guessing:-'))
if user_input == random_number:
print(f'You Guessed Right, Number is {random_number}')
break
else:
print('Try Again! Read Hints Carefully\n')
if user_input > random_number:
print('The Number is Less then your Input Number\n')
else:
print('The Number is More then your Input Number\n')
chanses = chanses-1
print('')
# scoring system
if chanses == 0:
print('You Loose')
elif chanses == 1:
print('You Tried Hard')
elif chanses >= 2:
print('Well Done! You Genius Guy')
ask = input('Want to Play Again: Play(1) or Exit(2)--')
if ask == '2':
break
else:
print('Starting game Again...... \n')
| 1,699 | 597 |
# map = [[0 for i in range(4)] for j in range(4)]
# print(map)
map = [[2, 0, 0, 0], [0, 0, 0, 4], [8, 0, 0, 0], [4, 0, 0, 2]]
# ma = [[map[c][r] for c in range(4)] for r in reversed(range(4))]
# print(ma)
ma = [[map[c][r] for r in reversed(range(4))] for c in range(4)]
print(ma) | 284 | 148 |
#!/usr/bin/env python
# coding=utf-8
# platform_node.py
# https://www.cnblogs.com/hongten/p/hongten_python_platform.html
import platform
'''
python中,platform模块给我们提供了很多方法去获取操作系统的信息
如:
import platform
platform.platform() #获取操作系统名称及版本号,'Linux-3.13.0-46-generic-i686-with-Deepin-2014.2-trusty'
platform.version() #获取操作系统版本号,'#76-Ubuntu SMP Thu Feb 26 18:52:49 UTC 2015'
platform.architecture() #获取操作系统的位数,('32bit', 'ELF')
platform.machine() #计算机类型,'i686'
platform.node() #计算机的网络名称,'XF654'
platform.processor() #计算机处理器信息,''i686'
platform.uname() #包含上面所有的信息汇总,('Linux', 'XF654', '3.13.0-46-generic', '#76-Ubuntu SMP Thu Feb 26 18:52:49 UTC 2015', 'i686', 'i686')
还可以获得计算机中python的一些信息:
platform.python_build()
platform.python_compiler()
platform.python_branch()
platform.python_implementation()
platform.python_revision()
platform.python_version()
platform.python_version_tuple()
'''
#global var
# 是否显示日志信息
SHOW_LOG = True
def get_platform():
'''获取操作系统名称及版本号'''
return platform.platform()
def get_version():
'''获取操作系统版本号'''
return platform.version()
def get_architecture():
'''获取操作系统的位数'''
return platform.architecture()
def get_machine():
'''计算机类型'''
return platform.machine()
def get_node():
'''计算机的网络名称'''
return platform.node()
def get_processor():
'''计算机处理器信息'''
return platform.processor()
def get_system():
'''获取操作系统类型'''
return platform.system()
def get_uname():
'''汇总信息'''
return platform.uname()
def get_python_build():
''' the Python build number and date as strings'''
return platform.python_build()
def get_python_compiler():
'''Returns a string identifying the compiler used for compiling Python'''
return platform.python_compiler()
def get_python_branch():
'''Returns a string identifying the Python implementation SCM branch'''
return platform.python_branch()
def get_python_implementation():
'''Returns a string identifying the Python implementation. Possible return values are: ‘CPython’, ‘IronPython’, ‘Jython’, ‘PyPy’.'''
return platform.python_implementation()
def get_python_version():
'''Returns the Python version as string 'major.minor.patchlevel'
'''
return platform.python_version()
def get_python_revision():
'''Returns a string identifying the Python implementation SCM revision.'''
return platform.python_revision()
def get_python_version_tuple():
'''Returns the Python version as tuple (major, minor, patchlevel) of strings'''
return platform.python_version_tuple()
def show_os_all_info():
'''打印os的全部信息'''
print('获取操作系统名称及版本号 : [{}]'.format(get_platform()))
print('获取操作系统版本号 : [{}]'.format(get_version()))
print('获取操作系统的位数 : [{}]'.format(get_architecture()))
print('计算机类型 : [{}]'.format(get_machine()))
print('计算机的网络名称 : [{}]'.format(get_node()))
print('计算机处理器信息 : [{}]'.format(get_processor()))
print('获取操作系统类型 : [{}]'.format(get_system()))
print('汇总信息 : [{}]'.format(get_uname()))
def show_os_info():
'''只打印os的信息,没有解释部分'''
print(get_platform())
print(get_version())
print(get_architecture())
print(get_machine())
print(get_node())
print(get_processor())
print(get_system())
print(get_uname())
def show_python_all_info():
'''打印python的全部信息'''
print('The Python build number and date as strings : [{}]'.format(
get_python_build()))
print('Returns a string identifying the compiler used for compiling Python : [{}]'.format(
get_python_compiler()))
print('Returns a string identifying the Python implementation SCM branch : [{}]'.format(
get_python_branch()))
print('Returns a string identifying the Python implementation : [{}]'.format(
get_python_implementation()))
print('The version of Python : [{}]'.format(get_python_version()))
print('Python implementation SCM revision : [{}]'.format(
get_python_revision()))
print('Python version as tuple : [{}]'.format(get_python_version_tuple()))
def show_python_info():
'''只打印python的信息,没有解释部分'''
print(get_python_build())
print(get_python_compiler())
print(get_python_branch())
print(get_python_implementation())
print(get_python_version())
print(get_python_revision())
print(get_python_version_tuple())
def test():
print('操作系统信息:')
if SHOW_LOG:
show_os_all_info()
else:
show_os_info()
print('#' * 50)
print('计算机中的python信息:')
if SHOW_LOG:
show_python_all_info()
else:
show_python_info()
def init():
global SHOW_LOG
SHOW_LOG = True
def main():
init()
test()
if __name__ == '__main__':
main()
| 4,876 | 1,778 |
import sys
import os
import numpy as np
RESULT_DIR = 'results/'
PREFIX = ['ART', 'btree', 'prefixbtree', 'hot', 'microbench/cpr_latency', 'SuRF']
TYPE = ['point', 'range']
DATASETS = ['email', 'url', 'wiki']
VAR = ['cpr','x','height', 'fpr', 'lat', 'insertlat', 'lookuplat', 'mem', 'stats']
BT_OUTFILE = "../results/microbench/build_time_breakdown/bt_breakdown.csv"
def generate_result_single(dirpath, filename):
full_path = dirpath + filename
output_path = dirpath + 'final_' + filename
results = []
with open(full_path, 'r') as f:
print("Generate result for " + full_path)
lines = f.readlines()
cnt = 0
for line in lines:
line = line.strip('\n')
if line == '-':
break
cnt += 1
results = [[] for i in range(cnt)]
idx = 0
for i,line in enumerate(lines):
line = line.strip(',\n')
if line == '-':
idx = 0
continue
results[idx].append(np.array([float(x) for x in line.split(',')]))
idx += 1
results = (np.mean(np.asarray(results), axis=1))
# Output results to file
with open(output_path, 'w') as of:
for row in results:
line_result = ''
for col in row:
line_result += str(col) + ','
line_result = line_result.strip(',')
line_result += '\n'
of.write(line_result)
def microtree():
for pre in PREFIX:
for t in TYPE:
if pre == 'microbench/cpr_latency':
cur_dir = RESULT_DIR + pre + '/'
else:
cur_dir = RESULT_DIR + pre + '/' + t + '/'
for v in VAR:
for d in DATASETS:
file_prefix = v + '_' + d
files = os.listdir(cur_dir)
for f in files:
if f.startswith(file_prefix):
generate_result_single(cur_dir, f)
def buildtime():
ss_time = []
encode_time = []
build_dict_time = []
with open("bt") as f:
lines = f.readlines()
for line in lines:
wl = line.split("=")
key = wl[0].strip()
if (key == "Symbol Select time"):
ss_time.append(wl[1].strip())
if (key == "Code Assign(Hu-Tucker) time"):
encode_time.append(wl[1].strip())
if (key == "Build Dictionary time"):
build_dict_time.append(wl[1].strip())
with open(BT_OUTFILE, 'w') as f:
for i in range(0, len(ss_time)):
f.write(ss_time[i] + "," + encode_time[i] + "," + build_dict_time[i]+"\n")
def triearray():
pass
if __name__ == "__main__":
if len(sys.argv) < 2:
print(sys.argv)
print("Did not generate any results, return")
exit(0)
exp_type = sys.argv[1]
if exp_type == "micro-tree":
microtree()
elif exp_type == "bt":
buildtime()
elif exp_type == "ta":
triearray()
else:
print("Unkown experiment type")
| 3,110 | 1,006 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-09 22:02
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Actualizacion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('texto', models.TextField()),
],
options={
'verbose_name_plural': 'Actualizaci\xf3n',
},
),
migrations.CreateModel(
name='Alcance',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('texto', models.TextField()),
],
options={
'verbose_name_plural': 'Alcance',
},
),
migrations.CreateModel(
name='InfoGeneral',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('texto', models.TextField()),
],
options={
'verbose_name_plural': 'Texto Sistema de la Asociaci\xf3n de Productores',
},
),
migrations.CreateModel(
name='Objetivo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('texto', models.TextField()),
],
options={
'verbose_name_plural': 'Objetivo',
},
),
migrations.CreateModel(
name='SistemaInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('texto', models.TextField()),
],
options={
'verbose_name_plural': 'Texto Sistema de Informaci\xf3n',
},
),
]
| 2,139 | 589 |
# -*- coding: utf-8 -*-
# @Author: djvolz
# @Date: 2016-11-14 17:03:11
# @Last Modified by: djvolz
# @Last Modified time: 2016-11-15 01:13:28
import time
import json # import json library to parse messages
import boto3 # import boto library that handles queuing functions
import lightSequence as ls
class Controller:
def __init__(self):
# Set the default action
self._action = 'undefined'
# Get the service resource
sqs = boto3.resource('sqs')
# Get the queue
self._queue = sqs.get_queue_by_name(QueueName='talkWithCarlLights')
def lightsFactory(self, type):
program = ls.LightSequence.factory(type)
if (program):
print("TURNING LIGHTS ON LIKE WE AIN'T GOT NO POWER BILL")
program.run()
def processMessages(self):
# Process messages by printing out body and optional author name
for message in \
self._queue.receive_messages(
MessageAttributeNames=['Author']):
# Parse the message request for the action.
request = json.loads(message.body)
action = request['request']['action']
if(action):
self._action = action
# Let the queue know that the message is processed
message.delete()
def run(self):
# Jump straight in
while True:
# Process messages from Alexa
self.processMessages()
# Generate the light sequence based on the specified action
self.lightsFactory(self._action)
# Sleepy time
time.sleep(1)
| 1,647 | 469 |
#!/usr/bin/env python3
from __future__ import print_function
from copy import copy
from collections import OrderedDict
from abc import ABCMeta, abstractmethod, abstractproperty
from textboard.ansi import ANSI
LOG_BOARD_DEFAULT_LINES_COUNT = 20
LOG_BOARD_SECTOR_DEFAULT_LINES_COUNT = 4
def _validate_id_property(cls, _id):
if not isinstance(_id, bytes) and not isinstance(_id, str):
raise TypeError("{cls} id should be either a string or bytes".format(cls=cls))
def _is_brd_obj(self, obj):
if not isinstance(obj, BoardObject):
raise TypeError("Given object is not a board object")
class BoardObject(object):
"""BoardObject - The abstract class of a board object"""
__metaclass__ = ABCMeta
def __init__(self):
pass
@abstractproperty
def id(self):
pass
@abstractproperty
def lines_count(self):
pass
@abstractproperty
def max_lines_count(self):
pass
@abstractmethod
def draw(self):
pass
class BoardLine(BoardObject):
class LineField(object):
class Delegate(object):
"""The abstract class of a LineField delegate. The delegate
is used to manipultae the field on different events of the field
"""
__metaclass__ = ABCMeta
@abstractmethod
def on_text_change(self, field):
"""on_text_change(self, field)
Manipulate the given field on its text change.
field - The field being delegated.
"""
pass
def __init__(self, field_id, size=None, text="", style=None, delegate=None):
"""LineField(self, field_id, size=None, text="", style=None)
Creates a BoardLine's field
field_id - The ID to access the field inside the line
size - The size of the field in the line (default: None - Unlimited)
text - The text of the field (default: empty)
style - The text style of the field (Default: None - the terminal's current style)
delegate - The field delegate.
"""
_validate_id_property(self.__class__, field_id)
self._id = field_id
self._size = size
self._text = text
self._style = style
self._delegate = delegate if delegate is not None else EmptyFieldDelegate()
@property
def id(self):
"""The id property of the LineField"""
return self._id
@property
def size(self):
"""The size property of the LineField"""
return self._size if self._size != None else ""
@property
def text(self):
"""The text property of the LineField"""
return self._text
@property
def style(self):
"""The style property of the LineField"""
return self._style
@property
def delegate(self):
"""The delegate property of the LineField"""
return self._delegate
@text.setter
def text(self, val):
"""The text property's setter of the LineField
val - The new text to set
"""
if isinstance(val, bytes):
val = val.decode("utf-8")
self._text = val
self._delegate.on_text_change(self)
@style.setter
def style(self, val):
"""The style property's setter of the LineField
val - The new style to set
"""
self._style = val
@delegate.setter
def delegate(self, val):
"""The delegate property's setter of the LineField
val - The new delegate to set
"""
self._delegate = val
def build(self):
"""build(self)
Build the string of the LineField
"""
text = "{text:{size}}".format(text=self.text, size=self.size).replace("\n", '')
if self.style is not None:
text = self.style.format(text)
return text
@classmethod
def create_from(cls, field):
"""create_from(cls, field) -> LineField
Create a new line field instance from the given line field.
field - The line field to duplicate
"""
return cls(field.id, field.size, field.text, field.style, field.delegate)
def __init__(self, line_id=None):
"""BoardLine(line_id=None)
Creates a board line
line_id - The ID for the line, used for accessing it from the contatining board object,
if none given, line is inaccessable (default: None)
"""
super(BoardLine, self).__init__()
if line_id is not None:
_validate_id_property(self.__class__, line_id)
self._id = line_id
if not hasattr(self, "_fields"):
self._fields = OrderedDict()
else:
self._copy_fields(self._fields)
def _copy_fields(self, fields_to_copy):
"""_copy_fields(self, fields_to_copy)
Copy the given fields dictionary into this line's fields dictionary
fields_to_copy - The fields to copy into this line's field dictionary
"""
fields = [field for field in fields_to_copy.values()]
self._fields = OrderedDict()
for field in fields:
self.add(field.id, field.size, field.text, field.style, field.delegate)
@property
def id(self):
"""The id property of the BoardLine"""
return self._id if self._id is not None else id(self)
@property
def lines_count(self):
"""The lines count property of the BoardLine"""
return 1
@property
def max_lines_count(self):
"""The max lines count property of the BoardLine"""
return self.lines_count
def add(self, field_id, size=None, text="", style=None, delegate=None):
"""add(self, field_id, size=None, text="") -> self
Add a field to the board line
field_id - The ID to access the field inside the line
size - The size of the field in the line (default: None - Unlimited)
text - The text of the field (default: empty)
style - The text style of the field (Default: None - the terminal's current style)
delegate - The delegate of the field
"""
field = BoardLine.LineField(field_id, size=size, text=text, style=style, delegate=delegate)
setattr(self, field_id, field)
return self
def get(self, field_id):
"""get(self, field_id) -> LineField
Get the requested field from this line
field_id - The ID of the field to get
"""
return self._fields[field_id]
def remove(self, field_id):
"""remove(self, field_id) -> LineField
Remove a field from the line by its iD, the removed field is returned
field_id - The ID of the field to remove
"""
ret_val = self._fields.pop(field_id)
delattr(self, field_id)
return ret_val
def _build(self):
"""_build(self) -> str
Returns the string value of this line
"""
line_txt = ""
for field in self._fields.values():
line_txt += field.build()
return line_txt
def draw(self):
"""draw(self)
Draw the line to the screen
"""
print(self._build())
def __setattr__(self, name, value):
if hasattr(self, name):
attr_to_set = getattr(self, name)
if isinstance(value, str) or isinstance(value, bytes):
if isinstance(attr_to_set, BoardLine.LineField):
attr_to_set.text = value
return
elif isinstance(value, BoardLine.LineField):
if not isinstance(attr_to_set, BoardLine.LineField):
raise ValueError("Can not override BoardLine property with field '{field_name}'".format(field_name=name))
try:
if isinstance(value, BoardLine.LineField):
self._fields[value.id] = value
super(BoardLine, self).__setattr__(name, value)
except AttributeError:
pass
super(BoardLine, self).__setattr__(name, value)
def __rshift__(self, cls_name):
"""__rshift__(self, cls_name) -> custom BoardLine subclass
Dynamically creating a new custom subclass of BoardLine. the new class will have the same
fields as the one of this BoardLine instance and their current text will be set as the initial text
value for the newly created class fields.
cls_name - The name of the newly created subclass
"""
return type(cls_name, (self.__class__, ), self.__dict__)
@classmethod
def create(cls, line_id=None, **fields):
"""create(cls, line_id=None, **fields)
Create a new line instance with the given id. the fields of
the created line will be filled from the passed fields.
line_id - The id of the line to create
**fields - The fields to set.
"""
line = cls(line_id)
for field_name, field_val in fields.items():
if hasattr(line, field_name):
setattr(line, field_name, field_val)
else:
raise ValueError("Field '{field}' does not exist in line.".format(field=field_name))
return line
def duplicate(self, line_id=None):
"""duplicate(self, line_id=None) -> BoardLine
Duplicates the instance of the given line, with a new chosen/random id.
line_id - The id of the line to create, if none is given, a random id will be selected.
"""
return self.create(line_id, **self._fields)
@classmethod
def create_from(cls, line):
"""create_from(cls, line) -> BoardLine
Create a new line instance from the given line.
line - The line to duplicate
"""
new_line = cls(line._id)
new_line._copy_fields(line._fields)
return new_line
class BoardSector(BoardObject):
def __init__(self, sector_id, max_lines_count=LOG_BOARD_SECTOR_DEFAULT_LINES_COUNT, title_line=None, draw_empty=True):
"""BoardSector(self, sector_id, name, max_lines_count, title_line, draw_empty)
Creates a board sector
sector_id - The ID of the sector, used for accessing it from the containing board object
max_lines_count - The maximum lines count to set for this sector (Not including the title line)
title_line - A title line to display when drawing the sector, if none is given, no title will be drawn,
otherwise a title will be drawn using the given BoardLine and both lines_count and max_lines_count will increase by one.
(default: None)
draw_empty - A boolean that indicates wether or not the empty lines of the sector should be drawn (default: True)
"""
super(BoardSector, self).__init__()
_validate_id_property(self.__class__, sector_id)
self._id = sector_id
if not hasattr(self, "_lines"):
self._lines = OrderedDict()
else:
self._copy_lines(self._lines)
self._max_lines_count = max_lines_count
if not hasattr(self, "_title") or title_line is not None:
self._title = title_line
elif hasattr(self, "_title") and self._title != None:
self._title = BoardLine.create_from(self._title)
self._draw_empty = draw_empty
def _copy_lines(self, lines_to_copy):
"""_copy_lines(self, lines_to_copy)
Copy the given lines dictionary into this sector's lines dictionary
lines_to_copy - The lines to copy into this sector's field dictionary
"""
lines = [line.__class__.create_from(line) for line in lines_to_copy.values()]
self._lines = OrderedDict()
self.add(*lines)
@property
def id(self):
"""The id property of the BoardSector"""
return self._id
@property
def lines(self):
"""The lines property of the BoardSector"""
return self._lines
@property
def lines_count(self):
"""The lines count property of the BoardSector"""
return len(self.lines) + int(self._has_title)
@property
def max_lines_count(self):
"""The max lines count property of the BoardSector"""
return self._max_lines_count + int(self._has_title)
@property
def title(self):
"""The title property of the BoardSector"""
return self._title
@property
def _has_title(self):
"""The _has_title property of the BoardSector
indicates wether this sector has a title or not
"""
return self._title != None
@property
def draw_empty(self):
"""The draw_empty property of the BoardSector
indicates wether the empty lines should be drawn or not
"""
return self._draw_empty
def add(self, *lines):
"""add(self, *lines) -> self
Add line(s) to this sector
*lines - The line(s) to add to the sector
"""
for line in lines:
if self.lines_count >= self.max_lines_count:
raise OverflowError("Board sector '{sector}' has reached the maximum lines count of {max_cnt}".format(sector=self.id,
max_cnt=self.max_lines_count))
if line.id in self._lines:
raise ValueError("Board sector '{sector}' already contains a line with the ID '{line.id}'".format(sector=self.id, line=line))
self._lines[line.id] = line
if not isinstance(line.id, int):
setattr(self, line.id, line)
return self
def get(self, line_id):
"""get(self, line_id) -> BoardLine
Get a line from the sector by its ID
line_id - The ID of the line to get
"""
return self._lines[line_id]
def remove(self, *lines_ids):
"""remove(self, *lines_ids)
Remove line(s) from the sector by ID(s)
*lines_ids - The ID(s) of the line(s) to remove
"""
for line_id in lines_ids:
self._lines.pop(line_id)
def clear(self):
"""clear(self)
Clear all of the lines in this sector
"""
self.lines.clear()
def draw(self):
"""draw(self)
Draw the sector to the screen
"""
if self._has_title:
self.title.draw()
for line in self.lines.values():
line.draw()
if self.draw_empty:
print("\n"*(self.max_lines_count-self.lines_count), end="")
def __setattr__(self, name, value):
try:
if name in self._lines:
super(BoardSector, self).__setattr__(name, self._lines[name])
except AttributeError:
pass
super(BoardSector, self).__setattr__(name, value)
def __rshift__(self, cls_name):
"""__rshift__(self, cls_name) -> custom BoardSector subclass
Dynamically creating a new custom subclass of BoardSector. the new class will have the same
lines and title as the one of this BoardSector instance and their current data will be set as the initial data
value for the newly created class lines and title.
cls_name - The name of the newly created subclass
"""
return type(cls_name, (self.__class__, ), self.__dict__)
class TextBoard(BoardObject):
def __init__(self, id=None, max_lines_count=LOG_BOARD_DEFAULT_LINES_COUNT):
super(TextBoard, self).__init__()
self._id = id
self._max_lines_count = max_lines_count
self._board = OrderedDict()
@property
def id(self):
"""The id property of the TextBoard"""
return self._id if self._id is not None else id(self)
@property
def lines_count(self):
"""The lines count property of the TextBoard"""
count = 0
for brd_obj in self._board.values():
count += brd_obj.max_lines_count
return count
@property
def max_lines_count(self):
"""The max lines count property of the TextBoard"""
return self._max_lines_count
@max_lines_count.setter
def max_lines_count(self, val):
"""The max lines count property setter of the TextBoard
val - The max lines count to set"""
self._max_lines_count = val
def add(self, *brd_objects):
"""add(self, *brd_objects) -> self
Add board object(s) to this board
*brd_objects - The board object(s) to add
"""
for brd_object in brd_objects:
if self.lines_count + brd_object.max_lines_count > self.max_lines_count:
raise OverflowError("Failed to add board object: The board has reached the maximum lines count of {max_cnt}".format(max_cnt=self.max_lines_count))
self._board[brd_object.id] = brd_object
return self
def get(self, obj_id):
"""get(self, obj_id) -> BoardObject
Get a board object from this board
obj_id - The ID of the board object to get
"""
return self._board[obj_id]
def remove(self, *obj_ids):
"""remove(self, *obj_ids) -> BoardObject
Remove board object(s) from this board
*obj_ids - The ID(s) of the board object(s) to remove
"""
for r_id in r_ids:
self._lines.pop(r_id)
def clear(self):
"""clear(self)
Clear all of the board objects in this board
"""
self._board.clear()
def draw(self, clear_screen=False):
"""draw(self, clear_screen=True)
Draw the board to the screen
clear_screen - indicates wether the screen should be cleared
first or not. in any case the drawing of the board will start
from the first line and the previously drawn board will be
erased. (Default: False)
"""
if clear_screen:
ANSI.scrn_reset()
else:
ANSI.cur_set()
self._erase_printed_board()
for obj in self._board.values():
obj.draw()
def _erase_printed_board(self):
"""_erase_printed_board(self)
Erase the printed board from the screen.
"""
ANSI.cur_save()
for i in range(self.max_lines_count):
ANSI.ln_clear()
ANSI.cur_next_ln()
ANSI.cur_restore()
def __getattr__(self, name):
if name in self._board:
return self._board[name]
def __del__(self):
ANSI.cur_down(self.max_lines_count)
def redraws(board, clear_screen=True):
"""redraws(board)
A decorator that redraws the given board at the end of the
decorated function
board - The board to redraw
"""
def _exec_and_redraw(func):
def wrapper(*args, **kwargs):
ret_val = func(*args, **kwargs)
board.draw(clear_screen)
return ret_val
return wrapper
return _exec_and_redraw
LineFieldDelegate = BoardLine.LineField.Delegate
class EmptyFieldDelegate(LineFieldDelegate):
"""EmptyFieldDelegate - An empty LineField delegate"""
def on_text_change(self, field):
pass
"""PlainTextLine - The most simple BoardLine one could wish.
This line has only field named text.
"""
PlainTextLine = BoardLine().add("text") >> "PlainTextLine"
class ProcessSector(BoardSector):
def __init__(self, sector_id, max_lines_count=LOG_BOARD_SECTOR_DEFAULT_LINES_COUNT, title_line=None,
draw_empty=True, line_cls=PlainTextLine, line_handler=None, **line_fields):
"""__init__(self, sector_id, max_lines_count, title_line, draw_empty, line_cls, line_handler, **line_fields)
Creates a ProcessSector which is a subclass of BoardSector
This is a special sector class dedicated to work with subprocess.Popen that was created with the flags:
stdout=PIPE and optionally stderr=STDOUT (Both values from the subprocess module)
sector_id - The ID of the sector, used for accessing it from the containing board object
max_lines_count - The maximum lines count to set for this sector (Not including the title line)
title_line - A title line to display when drawing the sector, if none is given, no title will be drawn,
otherwise a title will be drawn using the given BoardLine and both lines_count and max_lines_count will increase by one.
(default: None)
draw_empty - A boolean that indicates wether or not the empty lines of the sector should be drawn (default: True)
line_cls - The BoardLine class to use for drawing the tracked process (default: PlainTextLine)
line_handler - A callable that receives a line and manipulates it as desired
**line_fields - kwargs to format the created Boardlines. The values should be either a callable that returns a string or a string.
"""
super(ProcessSector, self).__init__(sector_id, max_lines_count=max_lines_count, title_line=title_line, draw_empty=draw_empty)
if not hasattr(line_cls, "text"):
raise ValueError("ProcessSector BoardLine must have a text field.")
self._line_cls = line_cls
self._line_fields = line_fields
self._line_handler = line_handler
def update_from_file(self, file):
"""update_from_file(self, file) -> bool
Update the sector with output from the given file object, returning true as long as there is data to read.
file - The file object to read the lines from
* NOTE: file must supply a readline method.
"""
line = file.readline()
if not line: return False
brd_line = self._line_cls()
brd_line.text = line
for field, field_val_getter in self._line_fields.items():
brd_line.get(field).text = field_val_getter if not callable(field_val_getter) else field_val_getter()
if self._line_handler is not None: self._line_handler(brd_line)
if self.lines_count >= self.max_lines_count:
self.lines.popitem(False)
self.add(brd_line)
return True | 22,361 | 6,211 |
from .adobe_sign_api import AdobeSignAPI
| 41 | 15 |
# ••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••
# Copyright (c) 2018, S.J.M. Steffann. This software is licensed under the BSD
# 3-Clause License. Please see the LICENSE file in the project root directory.
# ••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••
import sys
import requests
from django.utils.translation import gettext_lazy as _
from uwsgi_tasks import RetryTaskException, task
from generic.utils import TokenAuth, print_error, print_message, print_warning, retry_get
@task(retry_count=5, retry_timeout=300)
def remove_from_trillian(pk):
from measurements.models import InstanceRun
try:
# Try to find the InstanceRun multiple times, in case of a race condition
run = retry_get(InstanceRun.objects.exclude(analysed=None), pk=pk)
if not run.analysed:
print_warning(_("InstanceRun {pk} has not yet been analysed").format(pk=pk))
return
if not run.trillian_url:
# Already cleaned up
return
print_message(_("Deleting InstanceRun {run.pk} ({run.url}) from {run.trillian.name}").format(run=run))
response = requests.request(
method='DELETE',
url=run.trillian_url,
auth=TokenAuth(run.trillian.token),
timeout=(5, 15),
)
print(response)
if response.status_code not in [204, 404]:
# 204 = deleted, 404 = doesn't exist anymore
print_error(
_("{run.trillian.name} didn't accept our request ({response.status_code}), retrying later").format(
run=run,
response=response
)
)
raise RetryTaskException
run.trillian_url = ''
run.save()
print_message(_("Trillian {run.trillian.name} deleted completed InstanceRun {run.pk}").format(run=run))
except RetryTaskException:
raise
except InstanceRun.DoesNotExist:
print_warning(_("InstanceRun {pk} does not exist anymore").format(pk=pk))
return
except Exception as ex:
print_error(_('{name} on line {line}: {msg}').format(
name=type(ex).__name__,
line=sys.exc_info()[-1].tb_lineno,
msg=ex
))
raise RetryTaskException
| 2,350 | 833 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
## Project:
## Contact: Oliver Watts - owatts@staffmail.ed.ac.uk
import sys
import os
import glob
import os
import fileinput
from argparse import ArgumentParser
import shelve
import h5py
import numpy
import numpy as np
from train_halfphone import get_data_dump_name
from synth_halfphone import Synthesiser
# import pylab
from keras.layers import Dense, Activation
from keras.models import Sequential
from keras.callbacks import EarlyStopping
def train_autoencoder(data):
m,n = data.shape
model = Sequential()
model.add(Dense(units=1000, input_dim=n))
model.add(Activation('relu'))
model.add(Dense(units=1000))
model.add(Activation('relu'))
model.add(Dense(units=60))
# model.add(Activation('relu'))
model.add(Dense(units=1000))
model.add(Activation('relu'))
model.add(Dense(units=n))
model.compile(loss='mean_squared_error', optimizer='adam')
earlyStopping = EarlyStopping(monitor='val_loss', patience=5, verbose=1, mode='min')
model.fit(data, data, epochs=30, batch_size=64, callbacks=[earlyStopping], validation_split=0.10, shuffle=True)
return model
def get_prediction_errors(data, autoencoder):
predictions = autoencoder.predict(data)
sq_errs = (predictions - data)**2
scores = sq_errs.mean(axis=1)
return scores
def main_work(config, synth):
print
print get_data_dump_name(config)
join_data_dump = get_data_dump_name(config, joindata=True)
f = h5py.File(join_data_dump, "r")
start_join_feats = f["start_join_feats"][:,:]
end_join_feats = f["end_join_feats"][:,:]
f.close()
'''
print synth.join_weight_vector.shape
s = start_join_feats[100].reshape((5,87))
e = end_join_feats[99].reshape((5,87))
es = np.vstack([e,s])
print e.shape
pylab.plot(es)
pylab.show()
sys.exit('wewev')
'''
#print synth.unit_index
## Work out the minimal linguistic cues which should hold to consider a join:-
# lc_inventory = {}
# for ll_l_c_r_rr in synth.train_unit_names:
# (ll,l,c,r,rr) = ll_l_c_r_rr.split('/')
# lc_inventory[(l,c.split('_')[0])] = 0
# lc_inventory = lc_inventory.keys()
### build index of units by l phone only (c phones are already handled by synth.unit_index)
print 'build l index...'
l_index = {}
seen = {}
for ll_l_c_r_rr in synth.train_unit_names:
(ll,l,c,r,rr) = ll_l_c_r_rr.split('/')
if c.endswith('_L'):
#assert '%s/%s'%(l,c) not in l_index
if '%s/%s'%(l,c) not in seen:
seen['%s/%s'%(l,c)] = 0
if l not in l_index:
l_index[l] = []
l_index[l].extend(synth.unit_index['%s/%s'%(l,c)])
print '...done'
lengths = [(len(v), k) for (k,v) in l_index.items()]
# for k,v in l_index.items():
# print k
# print len(v)
# print
lengths.sort()
print lengths[-10:]
# sys.exit('evwevwsrb222')
#transitions = [] ## list of lists
transitions = shelve.open('/tmp/transitions2')
nlinks = 0
for (i,name) in enumerate(synth.train_unit_names):
if i % 100 == 0:
print '%s of %s'%(i, synth.number_of_units)
(ll,l,c,r,rr) = name.split('/')
if c.endswith('_L'):
next_unit = c.replace('_L','_R')
#transitions.append(synth.unit_index[next_unit])
transitions_from_here = synth.unit_index[next_unit]
else:
## find all following units compatible with it:
this_current = c.split('_')[0]
this_right = r
transitions_from_here = l_index.get(this_current, []) + synth.unit_index.get(this_right + '_L', [])
# for (l,c) in lc_inventory:
# if this_current == l or this_right == c:
# next_unit = '%s/%s_L'%(l,c)
# #print '%s %s %s'%(this_current, this_right, next_unit)
# transitions_from_here.extend(synth.unit_index[next_unit])
#transitions.append(transitions_from_here)
transitions[str(i)] = numpy.array(transitions_from_here, dtype=int)
nlinks += len(transitions_from_here)
#print transitions
print transitions['101']
#nlinks = sum([len(sublist) for sublist in transitions])
print nlinks
print synth.number_of_units
sys.exit('dvwsv0000000')
negative_scores = []
for i in xrange(synth.number_of_units):
if i % 100 == 0:
print i
to_index = transitions[str(i)]
## Remove natural transitions:-
to_index = to_index[to_index!=i+1]
from_ixx = np.ones(to_index.shape, dtype=int) * i
features = np.hstack([end_join_feats[from_ixx,:], start_join_feats[to_index,:]])
negative_scores.append(get_prediction_errors(features, autoencoder))
negative_scores = np.concatenate(negative_scores)
#pylab.subplot(211)
pylab.hist(positive_scores, bins=30, alpha=0.6, color='g', normed=1)
#pylab.subplot(212)
pylab.hist(negative_scores, bins=30, alpha=0.6, color='b', normed=1)
pylab.show()
transitions.close()
### 23 blizzard utts:
# 53909
# 1408
### all blizzard utts:
# for i in xrange(synth.number_of_units-1):
# j = i + 1
# distance = synth.get_natural_distance(i,j,order=2)
# print distance
if __name__=="__main__":
#################################################
# ======== process command line ==========
a = ArgumentParser()
a.add_argument('-c', dest='config_fname', required=True)
opts = a.parse_args()
config = {}
execfile(opts.config_fname, config)
del config['__builtins__']
print config
from active_learning_join import JoinDatabaseForActiveLearning
with JoinDatabaseForActiveLearning(opts.config_fname) as o:
### A) initial pool
#### look at which candidates are really considered:-
## o.analyse_candidate_types()
#### find k best joins for each unit:-
#### o.initialise_join_table_with_knn(100)
### was this one:--
#o.initialise_join_table_with_heldout_data()
#o.run_al_session(initial=True)
o.exhaustive_check_experiment('/tmp/cand.npy')
#o.random_walk(1800, '/tmp/random_walk.wav')
print o
# sys.exit('wvwrvwrv----------')
# synth = Synthesiser(opts.config_fname)
# main_work(config, synth)
| 6,593 | 2,450 |
import typing
import chainable_iterator
K = typing.TypeVar('K')
V = typing.TypeVar('V')
P = typing.List[K]
Predicate = typing.Callable[[P, K, V], bool]
_Node = typing.NamedTuple('_Node', (('path', P), ('data', typing.Mapping)))
def match_all(_kp: P, _k: K, _v: V) -> bool:
return True
def _decompose_dict(predicate: Predicate, the_dict: typing.Mapping[K, V]) \
-> typing.Generator[typing.Tuple[P, V], None, None]:
root = chainable_iterator.ChainableIterator((_Node([], the_dict),))
for node in root:
path = node.path
for k, v in node.data.items():
path.append(k)
if isinstance(v, dict):
root.chain((_Node(path, v),))
if predicate(path, k, v):
yield path, v
path = path[:-1]
def _recompose_dict(items: typing.Iterator[typing.Tuple[P, V]]) -> typing.Mapping[K, V]:
the_dict = {}
for path, value in items:
base, last = the_dict, len(path) - 1
for i, key in enumerate(path):
base = base.setdefault(key, {} if i < last else value)
return the_dict
def filter_dict(predicate: Predicate, the_dict: typing.Mapping[K, V]) -> typing.Mapping[K, V]:
return _recompose_dict(_decompose_dict(predicate, the_dict))
| 1,268 | 431 |
import argparse
import os
import re
from utils import interpret
parser = argparse.ArgumentParser()
parser.add_argument("-m", "--mode", choices=["interpret", "debug"], required=True, help="Mode to interpret your file")
parser.add_argument("-f", "--file", required=True, help="File to interpret, pass in either file name (in the current working directory) or a file path")
args = parser.parse_args()
def processor(file_name):
if not file_name:
return
try:
if not file_name.endswith(".marble"):
print(f"error: invalid file extension '{file_name[file_name.index('.'):]}'\nvalid extension: '*.marble'")
return
with open(f"./{file_name}", "r", encoding="utf8") as f:
raw_code = f.readlines()
pre_processed_code = []
for line_number, content in enumerate(raw_code, start=1):
content = re.sub("[^qweruiopasdjk!:<>\+-]", "", re.match("^[^\"]*", content).group())
if len(content) == 1 and "\n" in content or not content:
continue
pre_processed_code.append((line_number, content))
interpret(pre_processed_code, args.mode)
except FileNotFoundError:
print(f"error: file '{file_name}' not found in {os.getcwd()}")
return
except ValueError:
print(f"error: '{file_name}' is not recognized as a file")
return
except KeyboardInterrupt:
print("error: proccess has been interrupted")
return
def path_parser():
path = args.file.split("\\") if "\\" in args.file else args.file.split("/")
if not path[-1]:
path.pop()
file_name = path[-1]
invalid_characters = ['"', '~', '!', '@', '#', '$', '%', '^', '&', '*', '(', ')' ',', '+', '{', '}', '\\', '"', '|', '<', '>', '?', '`', '=', '[', ']', ';' "'", '\\', '/']
for entry in path:
if any(char in entry for char in invalid_characters):
print(f"error: invalid syntax '{args.file}'")
return
if len(path) != 1:
for entry in path[:-1]:
try:
os.chdir(entry)
except FileNotFoundError:
print(f"error: directory '{entry}' doesn't exist in {os.getcwd()}")
return
except OSError:
print(f"error: invalid syntax '{args.file}'")
return
return file_name
if __name__ == "__main__":
processor(path_parser())
| 2,549 | 771 |
import json
from uuid import UUID
from pathlib import Path
from typing import List, Dict, Any, Iterable
class SafeEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, UUID):
# if the obj is uuid, we simply return the value of uuid
return obj.hex
return json.JSONEncoder.default(self, obj)
def convert_to_uuid_str(
dict_args: Dict[str, Any], uuid_arg_names: Iterable[str]
) -> Dict[str, Any]:
for conv in uuid_arg_names:
if dict_args.get(conv):
dict_args[conv] = str(dict_args[conv])
return dict_args
| 598 | 198 |
#!/usr/bin/python
# -*- coding utf-8 -*-
import os
import subprocess
def make_snapshot(filename, page=0):
output = filename.replace('.pdf', '.jpg')
if os.path.exists(output):
return None
# os.unlink(output)
print filename
args = ['convert', '%s[%i]' % (filename, page), output]
stderr = subprocess.Popen(args, stderr=subprocess.PIPE).stderr.read().strip()
if stderr:
print stderr
LOGGER.write('\n%s\n%s\n%s\n' % (filename, ' '.join(args), stderr))
def find_pdf(dirname):
try:
contents = os.listdir(dirname)
except OSError:
return None
for filename in contents:
cwd = os.path.join(dirname, filename)
if os.path.isdir(cwd):
find_pdf(cwd)
else:
if filename.endswith('.pdf'):
make_snapshot(cwd)
if __name__ == '__main__':
LOGGER = file('%s.log' % os.sys.argv[0], 'a')
if len(os.sys.argv) == 1:
find_pdf(os.getcwd())
else:
for arg in os.sys.argv[1:]:
if arg.endswith('.pdf'):
make_snapshot(arg)
else:
find_pdf(arg)
LOGGER.close()
| 1,165 | 401 |
import csv
import json
import os
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--dzikir_type", dest="dzikir_type", type=str, help="Please Add Al Matsurat Type")
args = parser.parse_args()
dzikir_type = args.dzikir_type
if dzikir_type == None:
dzikir_type = "sugro"
__dir__ = os.path.dirname(__file__)
arabic_file_kubro = open(f"{__dir__}/data/kubo.json")
arabic_file_sugro = open(f"{__dir__}/data/sugro.json")
latin_sugro = open(f"{__dir__}/data/latin_al-matsurat_sugro.csv", mode="r")
if dzikir_type == "sugro":
dzikirData = json.loads(arabic_file_sugro.read())
else:
dzikirData = json.loads(arabic_file_kubro.read())
with latin_sugro as csv_file:
csv_reader = csv.DictReader(csv_file)
# print(csv_reader)
currentId = 0
contentsLength = 0
latins = []
for row in csv_reader:
if contentsLength ==0:
contentsLength = len(dzikirData[currentId]['contents'])
latins.append(row["Latin Text"])
if len(latins) == contentsLength:
dzikirData[currentId]['latins'] = latins
latins = []
contentsLength = 0
currentId += 1
if dzikir_type == "sugro" :
open(f"{__dir__}/data/sugro.json", "w").write(str(list(dzikirData)).replace('\'', '"'))
# print(csv) | 1,333 | 505 |
"""A pandas.DataFrame subclass for Prometheus range vector responses."""
from pandas import DataFrame
from pandas._typing import Axes, Dtype
from typing import Optional, Sequence
class MetricRangeDataFrame(DataFrame):
"""Subclass to format and represent Prometheus query response as pandas.DataFrame.
Assumes response is either a json or sequence of jsons.
This class should be used specifically to instantiate a query response,
where the query response has several timestamp values per series.
That is, a range vector is expected.
If the data is an instant vector, use MetricSnapshotDataFrame instead.
Some argument descriptions in this docstring were copied from pandas.core.frame.DataFrame.
:param data: (list|json) A single metric (json with keys "metric" and "values"/"value")
or list of such metrics received from Prometheus as a response to query
:param index: (pandas.Index|array-like) Index to use for resulting dataframe. Will default to
pandas.RangeIndex if no indexing information part of input data and no index provided.
:param columns: (pandas.Index|array-like) Column labels to use for resulting dataframe. Will
default to list of labels + "timestamp" + "value" if not provided.
:param dtype: (dtype) default None. Data type to force. Only a single dtype is allowed. If None, infer.
:param copy: (bool) default False. Copy data from inputs. Only affects DataFrame / 2d ndarray input.
Example Usage:
.. code-block:: python
prom = PrometheusConnect()
metric_data = prom.get_current_metric_value(metric_name='up', label_config=my_label_config)
metric_df = MetricRangeDataFrame(metric_data)
metric_df.head()
'''
+------------+------------+-----------------+--------------------+-------+
| | __name__ | cluster | label_2 | value |
+-------------------------+-----------------+--------------------+-------+
| timestamp | | | | |
+============+============+=================+====================+=======+
| 1577836800 | __up__ | cluster_id_0 | label_2_value_2 | 0 |
+-------------------------+-----------------+--------------------+-------+
| 1577836801 | __up__ | cluster_id_1 | label_2_value_3 | 1 |
+-------------------------+-----------------+------------=-------+-------+
'''
"""
def __init__(
self,
data=None,
index: Optional[Axes] = None,
columns: Optional[Axes] = None,
dtype: Optional[Dtype] = None,
copy: bool = False,
):
"""Functions as a constructor for MetricRangeDataFrame class."""
if data is not None:
# if just a single json instead of list/set/other sequence of jsons,
# treat as list with single entry
if not isinstance(data, Sequence):
data = [data]
row_data = []
for v in data:
if "value" in v:
raise TypeError(
"data must be a range vector. Expected range vector, got instant vector"
)
for t in v["values"]:
row_data.append({**v["metric"], "timestamp": t[0], "value": t[1]})
# init df normally now
super(MetricRangeDataFrame, self).__init__(
data=row_data, index=index, columns=columns, dtype=dtype, copy=copy
)
self.set_index(["timestamp"], inplace=True)
| 3,642 | 926 |
import sys
sys.path.append('/home/jwalker/dynamics/python/atmos-tools')
sys.path.append('/home/jwalker/dynamics/python/atmos-read')
import os
import xarray as xray
import pydap
import numpy as np
import collections
import atmos as atm
url = 'http://iridl.ldeo.columbia.edu/SOURCES/.OSU/.PRISM/.monthly/dods'
ds = xray.open_dataset(url, decode_times=False, engine='pydap')
ds2 = pydap.client.open_url(url) | 408 | 160 |
# EMD approximation module (based on auction algorithm)
# memory complexity: O(n)
# time complexity: O(n^2 * iter)
# author: Minghua Liu
# input:
# p1, p2: [#batch, #points, 3]
# where p1 is the predicted point cloud and p2 is the ground truth point cloud
# two point clouds should have same size and be normalized to [0, 1]
# #points should be a multiple of 1024
# #batch should be no greater than 512
# eps is a parameter which balances the error rate and the speed of convergence
# iters is the number of iteration
# we only calculate gradient for p1
# Output:
# dist: [#batch, #points], sqrt(dist) -> L2 distance
# assignment: [#batch, #points], index of the matched point in the ground truth point cloud
# the result is an approximation and the assignment is not guranteed to be a bijection
import torch
from torch import nn
from..functions.emd import emd_function
class emdModule(nn.Module):
def __init__(self):
super(emdModule, self).__init__()
def forward(self, p1, p2, eps, iters):
return emd_function(p1, p2, eps, iters)
| 1,076 | 345 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Website Google Map',
'category': 'Website',
'summary': '',
'version': '1.0',
'description': """
Odoo Website Google Map
==========================
""",
'depends': ['base_geolocalize', 'website_partner'],
'data': [
'views/google_map_templates.xml',
],
'installable': True,
}
| 437 | 150 |
from torch import nn
import torch
class EmbeddingOutput(nn.Module):
def __init__(self, n_vocab, emb_size, weight=None):
super().__init__()
self.n_vocab, self.emb_size = n_vocab, emb_size
if weight == None:
weight = torch.randn(n_vocab, emb_size)
# travarse matrix for easy matmul calclulation in forward
self.weight = nn.Parameter(weight)
self.weight.requires_grad = False
def forward(self, input):
# grad in this module makes learning unstable
return input.matmul(self.weight.data.t())
def __repr__(self):
return self.__class__.__name__ + \
f'(n_vocab={self.n_vocab}, emb_size={self.emb_size})'
| 710 | 239 |
'''
@说明 :满减送接口。
@时间 :2020/2/13 下午4:28:26
@作者 :任秋锴
@版本 :1.0
'''
from .base import base
from .product import product
from .store import store
class discount(base):
def __init__(self, token):
super().__init__(token)
def list(self,
state=None,
name=None,
productCodes=None,
companyId=None,
storeId=None,
pageNum=1, pageSize=1000):
api_name = "manager/discount/list"
data = {
"pageNum": pageNum,
"pageSize": pageSize,
}
if state:
data["state"] = state
if state:
data["name"] = name
if state:
data["productCodes"] = productCodes
if state:
data["companyId"] = companyId
if state:
data["storeId"] = storeId
return self.request(api_name, data)
def create(self, data):
api_name = "manager/discount/add"
response = self.request(api_name, data, method="POST")
return response
def createykj(self, storeCodes, productCodes,
price, beginTime, endTime, name):
''' 一口价导入方案 '''
beginTime = f"{beginTime} 00:00:00"
endTime = f"{endTime} 23:59:59"
# 请求的主数据结构
data = {
"activeGoods": 2,
"beginTime": beginTime,
"endTime": endTime,
"expressFree": 1,
"id": "",
"name": name,
"discountType": 2,
"type": 4,
"unit": 2,
"time": [beginTime, endTime],
"productList": [],
"rules": [],
"storeList": [],
}
# 添加活动规则
rule = {
"id": "",
"usePrice": 1,
"price": price,
"discount": None,
"giftId": None,
"giftIds": [],
"giftRule": 1,
"giftLimit": None,
}
data["rules"].append(rule)
# 添加门店列表
s = store(self.token)
store_map = {}
for store_data in s.getStoreList():
store_id = store_data.get("store_id")
store_code = store_data.get("store_code")
store_parent_id = store_data.get("store_parent_id")
store_map[store_code] = [store_id, store_parent_id]
for store_code in storeCodes:
store_id, parentId = store_map[store_code]
data["storeList"].append(
{"id": store_id, "type": 0, "parentId": parentId})
# 添加商品列表
p = product(self.token)
for code in productCodes:
product_data = p.list(productCode=code)
if not product_data:
print(f"{code}\t商品查找不到。")
return None
product_data = product_data[0]
productId = product_data.get("id")
product_request_payload = {
"productId": productId,
"specType": 1,
"specIds": [],
"price": None,
"beginTime": None,
"endTime": None,
}
data["productList"].append(product_request_payload)
response = self.create(data)
print(productCodes, response)
return response
def createmz(self, storeCodes, productCodes,
rules, beginTime, endTime, name):
''' 满折导入方案 '''
beginTime = f"{beginTime} 00:00:00"
endTime = f"{endTime} 23:59:59"
# 请求的主数据结构
data = {
"activeGoods": 2,
"beginTime": beginTime,
"endTime": endTime,
"expressFree": 1,
"id": "",
"name": name,
"discountType": 2,
"type": 2,
"unit": 2,
"time": [beginTime, endTime],
"productList": [],
"rules": [],
"storeList": [],
}
# 添加活动规则
for row in rules:
usePrice, discount_num = row
rule = {
"id": "",
"usePrice": usePrice,
"price": None,
"discount": discount_num,
"giftId": None,
"giftIds": [],
"giftRule": 1,
"giftLimit": None,
}
data["rules"].append(rule)
# 添加门店列表
data["storeList"] = storeCodes
# 添加商品列表
p = product(self.token)
for code in productCodes:
product_data = p.list(productCode=code)
if not product_data:
print(f"{code}\t商品查找不到。")
return None
product_data = product_data[0]
productId = product_data.get("id")
product_request_payload = {
"productId": productId,
"specType": 1,
"specIds": [],
"price": None,
"beginTime": None,
"endTime": None,
}
data["productList"].append(product_request_payload)
# print(data)
response = self.create(data)
print(productCodes, name, rules, response)
return response
def create_example(self):
api_name = "manager/discount/add"
data = {
# 添加门店列表
# 类型 0门店 1分公司
"storeList": [
{"id": 3, "type": 1, "parentId": 0},
{"id": 11, "type": 0, "parentId": 3},
{"id": 26, "type": 0, "parentId": 3},
{"id": 29, "type": 0, "parentId": 3},
{"id": 27002, "type": 0, "parentId": 3}
],
# 活动商品 1全店 2指定商品
"activeGoods": 2,
# 开始结束时间
"beginTime": "2020-02-15 00:00:00",
"endTime": "2020-02-22 23:59:59",
# 是否包邮 0:否 1:是
"expressFree": 1,
"id": "",
# 活动名称
"name": "整单-一般打折",
# 商品列表
# 规格信息
"productList": [
{"productId": 15641, "specIds": [], "specType":1}
],
# 折扣类型 1订单优惠 2商品优惠
"discountType": 2,
# 类型 1减现金 2打折 3送赠品 4一口价 5第N件打折
"type": 2,
# 单位 1元 2件
"unit": 2,
# 优惠折扣规则
"rules": [
{
# 限领限制
"giftLimit": "",
# 领取规则 1件 2元
"giftRule": 1,
# 规则id
"id": "",
# 打折
"discount": 9,
"isOpenGift": False,
# 赠品id
"giftIds": [],
# 减现金
"price":"",
# 优惠门槛
"usePrice":"0"
}
],
"time": ["2020-02-15 00:00:00", "2020-02-22 23:59:59"]
}
return self.request(api_name, data, method="POST")
def read(self, _id):
api_name = f"manager/discount/info"
data = {
"discountId": _id,
}
response = self.request(api_name, data)
# print(response)
return response
def update(self, data):
api_name = f"manager/discount/update"
# print(data)
response = self.request(api_name, data, method="POST")
return response
def copy(self, _id, name=None, beginTime=None, endTime=None):
response = self.read(_id)
data = response.get("data")
rules = data.get("rules")
data["id"] = ""
if name:
data["name"] = name
if beginTime:
data["beginTime"] = beginTime
if endTime:
data["endTime"] = endTime
if rules:
for idx, rule in enumerate(data["rules"]):
data["rules"][idx]["id"] = ""
response = self.create(data)
# print(data, response)
return response
def delete(self, _id):
api_name = f"manager/discount/delete_stop"
data = {
"id": _id,
"operate": 0
}
response = self.request(api_name, data, method="POST")
# print(response)
return response
| 8,216 | 2,722 |
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import time
usernameStr = 'user_teamj'
passwordStr = 'aad@123'
def login(quesno,diff):
# diff = 0
# # difficulty variable
# # 0 -> hard
# # 1-> normal
# # 2 -> easy
# quesno = 5
# # question number
# diff=2-diff
if diff == 1 :
diff = 2
elif diff == 2:
diff = 1
driverLocation = './chromedriver' #if windows
browser = webdriver.Chrome(driverLocation)
browser.get(('https://vjudge.net/contest'))
# fill in username and hit the next button
loginbutton = browser.find_element_by_xpath('//*[@id="navbarResponsive"]/ul/li[8]/a')
loginbutton.click()
time.sleep(1)
username = browser.find_element_by_xpath('//*[@id="login-username"]')
username.send_keys(usernameStr)
# nextButton = browser.find_element_by_id('identifierNext')
# nextButton.click()
# wait for transition then continue to fill items
password = browser.find_element_by_xpath('//*[@id="login-password"]')
password.send_keys(passwordStr)
# password = WebDriverWait(browser, 10).until(
# EC.presence_of_element_located((By.NAME, "password")))
# password.send_keys(passwordStr)
signInButton = browser.find_element_by_xpath('//*[@id="btn-login"]')
signInButton.click()
time.sleep(4)
path = '//*[@id="listContest"]/tbody/tr[' + str(diff +1) + ']/td[3]/div/a'
contest = browser.find_element_by_xpath(path)
contest.click()
qpath = '//*[@id="contest-problems"]/tbody/tr[' + str(quesno) + ']/td[4]/a'
question = browser.find_element_by_xpath(qpath)
question.click()
| 1,793 | 614 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import blanc_basic_assets.fields
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('assets', '0001_initial'),
('pages', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='page',
name='hero_image',
field=blanc_basic_assets.fields.AssetForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='assets.Image'),
),
]
| 589 | 190 |
# -*- coding: utf-8 -*-
""" Metrics Service
The metrics service manages:
* Connectivity to the Prometheus Server
* Creating default summary views in Prometheus
* Publishing `Request` metrics
"""
import datetime
import logging
from http.server import ThreadingHTTPServer
from brewtils.models import Request
from brewtils.stoppable_thread import StoppableThread
from prometheus_client import Gauge, Counter, Summary
from prometheus_client.exposition import MetricsHandler
from prometheus_client.registry import REGISTRY
import beer_garden.db.api as db
class PrometheusServer(StoppableThread):
"""Wraps a ThreadingHTTPServer to serve Prometheus metrics"""
def __init__(self, host, port):
self.logger = logging.getLogger(__name__)
self.display_name = "Prometheus Server"
self._host = host
self._port = port
# Basically prometheus_client.exposition.start_http_server
metrics_handler = MetricsHandler.factory(REGISTRY)
self.httpd = ThreadingHTTPServer((host, port), metrics_handler)
super(PrometheusServer, self).__init__(
logger=self.logger, name="PrometheusServer"
)
def run(self):
self.logger.debug("Initializing metric counts")
initialize_counts()
self.logger.info(f"Starting {self.display_name} on {self._host}:{self._port}")
self.httpd.serve_forever()
self.logger.info(f"{self.display_name} is stopped")
def stop(self):
self.httpd.shutdown()
# Summaries:
plugin_command_latency = Summary(
"bg_plugin_command_latency_seconds",
"Total time taken for a command to complete in seconds.",
["system", "instance_name", "system_version", "command", "status"],
)
# Counters:
completed_request_counter = Counter(
"bg_completed_requests_total",
"Number of completed requests.",
["system", "instance_name", "system_version", "command", "status"],
)
request_counter_total = Counter(
"bg_requests_total",
"Number of requests.",
["system", "instance_name", "system_version", "command"],
)
# Gauges:
queued_request_gauge = Gauge(
"bg_queued_requests",
"Number of requests waiting to be processed.",
["system", "instance_name", "system_version"],
)
in_progress_request_gauge = Gauge(
"bg_in_progress_requests",
"Number of requests IN_PROGRESS",
["system", "instance_name", "system_version"],
)
def request_latency(start_time):
"""Measure request latency in seconds as a float."""
return (datetime.datetime.utcnow() - start_time).total_seconds()
def initialize_counts():
requests = db.query(
Request, filter_params={"status__in": ["CREATED", "IN_PROGRESS"]}
)
for request in requests:
label_args = {
"system": request.system,
"system_version": request.system_version,
"instance_name": request.instance_name,
}
if request.status == "CREATED":
queued_request_gauge.labels(**label_args).inc()
elif request.status == "IN_PROGRESS":
in_progress_request_gauge.labels(**label_args).inc()
def request_created(request):
queued_request_gauge.labels(
system=request.system,
system_version=request.system_version,
instance_name=request.instance_name,
).inc()
request_counter_total.labels(
system=request.system,
system_version=request.system_version,
instance_name=request.instance_name,
command=request.command,
).inc()
def request_started(request):
"""Update metrics associated with a Request update
This call should happen after the save to the database.
"""
labels = {
"system": request.system,
"system_version": request.system_version,
"instance_name": request.instance_name,
}
queued_request_gauge.labels(**labels).dec()
in_progress_request_gauge.labels(**labels).inc()
def request_completed(request):
"""Update metrics associated with a Request update
This call should happen after the save to the database.
"""
labels = {
"system": request.system,
"system_version": request.system_version,
"instance_name": request.instance_name,
}
in_progress_request_gauge.labels(**labels).dec()
latency = request_latency(request.created_at)
labels["command"] = request.command
labels["status"] = request.status
completed_request_counter.labels(**labels).inc()
plugin_command_latency.labels(**labels).observe(latency)
| 4,538 | 1,370 |
import argparse
from .helpers import yaml_parser
from .mailer import Mailer
from .utils import csv_to_recipients
from .exceptions import TemplateNotFoundException
from .exceptions import TemplateAndCSVNotMatchException
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--config", help="yaml file as mail configuration")
parser.add_argument("--service", help="mail service", default="aws")
parser.add_argument("--mode", help="package app mode", default="send_email")
parser.add_argument("--template-name", help="name of the template to be made")
parser.add_argument(
"--subject",
help="subject of the email",
default="Congrats! Welcome to HMIF Mentoring program",
)
parser.add_argument("--txt", help="txt template content of the email")
parser.add_argument("--html", help="html template content of the email")
args = parser.parse_args()
mode = args.mode
service = args.service
if args.service == "aws":
if mode == "create_template":
mailer = Mailer()
template_name = args.template_name
subject = args.subject
txt = args.txt
html = args.html
mailer.create_template(template_name, subject, html, txt)
elif mode == "send_email":
config_yaml_file = args.config
config_data = yaml_parser(config_yaml_file)
for items in config_data:
sender_name = items["sender"]["name"]
sender_email = items["sender"]["email"]
mailer = Mailer(sender_name, sender_email)
for spec in items["spec"]:
template_name = spec["template"]
template_data = spec["recipient_data"]
try:
mailer.check_template_exist(template_name)
mailer.check_template_match(template_name, template_data)
mail_recipients = csv_to_recipients(template_data)
mailer.send_mail(mail_recipients, template_name)
except TemplateNotFoundException as e:
print(e)
except TemplateAndCSVNotMatchException as e:
print(e)
except Exception as e:
print(e)
| 2,368 | 616 |
import copy
from bson import ObjectId
from aquascope.webserver.data_access.db.upload import Upload, DEFAULT_UPLOAD_PROJECTION
from aquascope.webserver.data_access.db.util import project_dict
_DUMMY_UPLOADS = [
{
'_id': ObjectId('000000000000000000001000'),
'filename': 'dummy0',
'state': 'initialized',
'tags': []
},
{
'_id': ObjectId('000000000000000000001001'),
'filename': 'dummy1',
'state': 'uploaded',
'tags': ['dummy_tag_1', 'dummy_tag_2', 'with_broken_records_field'],
'broken_records': [],
'broken_record_count': 0
},
{
'_id': ObjectId('000000000000000000001002'),
'filename': 'dummy2',
'state': 'processing',
'image_count': 20,
'duplicate_image_count': 0,
'duplicate_filenames': [],
'tags': ['dummy_tag_1']
},
{
'_id': ObjectId('000000000000000000001003'),
'filename': 'dummy3',
'state': 'finished',
'image_count': 10,
'duplicate_image_count': 2,
'duplicate_filenames': [
'img1.jpg',
'img2.jpg'
],
'tags': ['sth']
},
{
'_id': ObjectId('000000000000000000001004'),
'filename': 'dummy4',
'state': 'failed',
'tags': []
}
]
DUMMY_UPLOADS_WITH_DEFAULT_PROJECTION = [
Upload(project_dict(copy.deepcopy(upload), DEFAULT_UPLOAD_PROJECTION)) for upload in _DUMMY_UPLOADS
]
DUMMY_UPLOADS = [Upload(upload) for upload in _DUMMY_UPLOADS]
| 1,540 | 628 |
import os
import sys
from shutil import rmtree
from setuptools import find_packages, setup, Command
# Package meta-data.
NAME = "soepy"
DESCRIPTION = (
"soepy is an open-source Python package for the simulation and estimation of a "
"dynamic model of human capital accumulation tailored to the German Socio-Economic"
" Panel (SOEP)."
)
URL = "http://soepy.readthedocs.io"
EMAIL = "bilieva@diw.de"
AUTHOR = "Boryana Ilieva"
# What packages are required for this module to be executed?
REQUIRED = ["numpy", "flake8", "pytest", "pandas", "oyaml"]
here = os.path.abspath(os.path.dirname(__file__))
about = {}
with open(os.path.join(here, NAME, "__version__.py")) as f:
exec(f.read(), about)
class PublishCommand(Command):
"""Support setup.py publish."""
description = "Build and publish the package."
user_options = []
@staticmethod
def status(s):
"""Prints things in bold."""
print("\033[1m{0}\033[0m".format(s))
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
try:
self.status("Removing previous builds…")
rmtree(os.path.join(here, "dist"))
except FileNotFoundError:
pass
self.status("Building Source and Wheel (universal) distribution…")
os.system("{0} setup.py sdist bdist_wheel --universal".format(sys.executable))
self.status("Uploading the package to PyPi via Twine…")
os.system("twine upload dist/*")
sys.exit()
setup(
name=NAME,
version=about["__version__"],
description=DESCRIPTION,
author=AUTHOR,
author_email=EMAIL,
url=URL,
packages=find_packages(exclude=("tests",)),
install_requires=REQUIRED,
license="MIT",
include_package_data=True,
cmdclass={"publish": PublishCommand},
)
| 1,855 | 612 |
class IndexMinPQ: # refer to https://www.cnblogs.com/nullzx/p/6624731.html
def __init__(self, max): # how about resize?
self.pq = [None] * (max + 1)
self.qp = [None] * (max + 1) # inverse of pq: qp[pq[i]] = pq[qp[i]] = i
self.keys = [None] * (max + 1) # keys[i] = priority of i
self.N = 0
self.max = max
def isEmpty(self):
return self.N == 0
def size(self):
return self.N
def contains(self, k):
assert k >= 0 and k <= self.max
return self.qp[k] != None
def insert(self, k: int, item):
self.N += 1
self.pq[self.N] = k
self.qp[k] = self.N
self.keys[k] = item
self.swim(self.N)
def change(self, k: int, item):
idx = self.qp[k]
self.keys[k] = item
self.swim(idx)
def min(self):
min = self.pq[1]
return self.keys[min]
def delMin(self):
min = self.pq[1]
self.exch(1, self.N)
self.N -= 1
self.sink(1)
self.pq[self.N + 1] = None
self.qp[min] = None
self.keys[min] = None
return min
def swim(self, k):
while k > 1 and self.less(k, k // 2):
self.exch(k // 2, k)
k = k // 2
def sink(self, k):
while 2 * k <= self.N:
j = 2 * k
if j < self.N and self.less(j + 1, j):
j += 1
if self.less(j, k):
self.exch(k, j)
else:
break
k = j
def exch(self, i, j):
# update pq
tmp = self.pq[i]
self.pq[i] = self.pq[j]
self.pq[j] = tmp
# update qp
self.qp[self.pq[i]] = i
self.qp[self.pq[j]] = j
def less(self, i, j):
return self.keys[self.pq[i]] < self.keys[self.pq[j]]
| 1,833 | 728 |
from django.db import models
# Create your models here.
class Office(models.Model):
name = models.CharField(max_length=200)
staff_rating = models.IntegerField(default=0)
forms_language = models.IntegerField(default=0)
class Provider(models.Model):
name = models.CharField(max_length=200)
zip_code = models.CharField(max_length=5)
provider_type = models.CharField(max_length=200)
office_id = models.ForeignKey(
Office,
models.SET_NULL,
blank=True,
null=True,
)
class Permission(models.Model):
email = models.EmailField(max_length=250)
password = models.CharField(max_length=200)
access_level = models.IntegerField(default=0) | 702 | 225 |
# Import required packages
import os
import time
import cv2
import imutils
import numpy as np
import pytesseract
from autocorrect import Speller
from PIL import Image
def VideoOn():
video = cv2.VideoCapture(0, cv2.CAP_DSHOW)
while True:
# check returns true if python can actually read and frame is ndim numpy array
check, frame = video.read()
cv2.imshow('Capturing...', frame)
key = cv2.waitKey(1)
if key == ord('q'):
check, frame = video.read()
a = cv2.imwrite("Data\\OCR_Data\\CaptureImage.jpg", frame)
break
video.release()
cv2.destroyAllWindows()
def TesseractSetup():
# Mention the installed location of Tesseract-OCR in your system
pytesseract.pytesseract.tesseract_cmd = 'C:\\Program Files (x86)\\Tesseract-OCR\\tesseract.exe'
def CropBorder():
# return cropped image from which text needs to be extracted
im = Image.open("Data\OCR_Data\SteveJobsQuote.jpg")
# im = Image.open("./Assets/quote-luck-is-when-skill-meets-opportunity-vinnie-paz-80-71-88.jpg")
if im.mode != 'RGB':
im = im.convert('RGB')
im.save("Data\OCR_Data\SteveJobsQuote.jpg", dpi=(300, 300))
# return border_crop("CaptureImage.jpg")
return cv2.imread("Data\OCR_Data\SteveJobsQuote.jpg")
def ExtractImageData(img):
# cv2.imshow("img", img)
# cv2.waitKey(0)
img = cv2.resize(img, None, fx=2, fy=2, interpolation=cv2.INTER_CUBIC)
# cv2.imshow("img", img)
# cv2.waitKey(0)
# image data
try:
data = pytesseract.image_to_osd(img).split()
except:
return "",0
# Detect language
language = data[-4]
# Detect angle
rotation = data[-9]
print(rotation)
print(data)
# return Image Data
return language, rotation
def PreprocessingImage(img, rotation):
cv2.imshow("img", img)
cv2.waitKey(0)
# apply rotation
rotated = imutils.rotate(img, angle=-(int(rotation)))
cv2.imshow("img", rotated)
cv2.waitKey(0)
# Resize the image to a given scale
img = cv2.resize(rotated, None, fx=2, fy=2, interpolation=cv2.INTER_CUBIC)
cv2.imshow("img", img)
cv2.waitKey(0)
# Blur using GaussianBlur method
img = cv2.GaussianBlur(img, (5, 5), 0)
cv2.imshow("img", img)
cv2.waitKey(0)
# Convert the image to gray scale
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# cv2.imshow("img", gray)
# cv2.waitKey(0)
# Apply threshhold
thresh1 = cv2.threshold(gray, 0, 255, cv2.THRESH_TOZERO)[1]
cv2.imshow("img", thresh1)
cv2.waitKey(0)
# Specify structure shape and kernel size.
# Kernel size increases or decreases the area
# of the rectangle to be detected.
# A smaller value like (10, 10) will detect
# each word instead of a sentence.
rect_kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (20, 20))
# Appplying dilation on the threshold image
dilation = cv2.dilate(thresh1, rect_kernel, iterations=1)
cv2.imshow("img", dilation)
cv2.waitKey(0)
# Finding contours
contours, hierarchy = cv2.findContours(
dilation, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
# Creating a copy of image
im2 = img.copy()
return im2, contours
def CreateFileToPrintTo():
# A text file is created and flushed
file = open("Data\\OCR_Data\\recognized.txt", "w+")
file.write("")
file.close()
def FindContour(im2, contours, language):
# Looping through the identified contours
# Then rectangular part is cropped and passed on
# to pytesseract for extracting text from it
# Extracted text is then written into the text file
result = ""
file = open("Data\\OCR_Data\\recognized.txt", "a", encoding='utf-8')
for cnt in contours:
x, y, w, h = cv2.boundingRect(cnt)
# Drawing a rectangle on copied image
rect = cv2.rectangle(im2, (x, y), (x + w, y + h), (0, 255, 0), 2)
# Cropping the text block for giving input to OCR
cropped = im2[y:y + h, x:x + w]
try:
# Apply OCR on the cropped image
if language.lower() == 'latin':
text = pytesseract.image_to_string(cropped, lang="eng")
else:
text = pytesseract.image_to_string(cropped)
except:
return "",""
# Storing the text
result += (text + "\n")
return result, file
def AppendResultToFile(result, file):
spell = Speller(only_replacements=True)
result = result.replace(" ", "")
var = spell(result)
file.write(var)
# Close the file
file.close
def launch():
# VideoOn()
TesseractSetup()
img = CropBorder()
language, rotation = ExtractImageData(img)
im2, contours = PreprocessingImage(img, rotation)
CreateFileToPrintTo()
result, file = FindContour(im2, contours, language)
AppendResultToFile(result, file)
os.remove('Data\\OCR_Data\\CaptureImage.jpg') | 4,961 | 1,762 |
sal = float(input('Digite seu salario: R$'))
aum = sal * 0.15
print('Parabens, você ganhou um aumento de 15%')
print('Seu novo salario é de R${}'.format(sal + aum)) | 167 | 71 |
from __future__ import absolute_import
import sys
import pytest
from django.test import RequestFactory
from django_cas_ng import backends
@pytest.mark.django_db
def test_backend_authentication_creating_a_user(monkeypatch, django_user_model):
"""
Test the case where CAS authentication is creating a new user.
"""
factory = RequestFactory()
request = factory.get('/login/')
request.session = {}
def mock_verify(ticket, service):
return 'test@example.com', {'ticket': ticket, 'service': service}, None
# we mock out the verify method so that we can bypass the external http
# calls needed for real authentication since we are testing the logic
# around authentication.
monkeypatch.setattr('cas.CASClientV2.verify_ticket', mock_verify)
# sanity check
assert not django_user_model.objects.filter(
username='test@example.com',
).exists()
backend = backends.CASBackend()
user = backend.authenticate(
request, ticket='fake-ticket', service='fake-service',
)
assert user is not None
assert user.username == 'test@example.com'
assert django_user_model.objects.filter(
username='test@example.com',
).exists()
def test_backend_authentication_do_not_create_user(monkeypatch, django_user_model, settings):
"""
Test the case where CAS authentication is creating a new user.
"""
factory = RequestFactory()
request = factory.get('/login/')
request.session = {}
def mock_verify(ticket, service):
return 'test@example.com', {'ticket': ticket, 'service': service}, None
# we mock out the verify method so that we can bypass the external http
# calls needed for real authentication since we are testing the logic
# around authentication.
monkeypatch.setattr('cas.CASClientV2.verify_ticket', mock_verify)
# sanity check
assert not django_user_model.objects.filter(
username='test@example.com',
).exists()
settings.CAS_CREATE_USER = False
backend = backends.CASBackend()
user = backend.authenticate(
request, ticket='fake-ticket', service='fake-service',
)
assert user is None
assert not django_user_model.objects.filter(
username='test@example.com',
).exists()
@pytest.mark.django_db
def test_backend_for_existing_user(monkeypatch, django_user_model):
"""
Test the case where CAS authenticates an existing user.
"""
factory = RequestFactory()
request = factory.get('/login/')
request.session = {}
def mock_verify(ticket, service):
return 'test@example.com', {'ticket': ticket, 'service': service}, None
# we mock out the verify method so that we can bypass the external http
# calls needed for real authentication since we are testing the logic
# around authentication.
monkeypatch.setattr('cas.CASClientV2.verify_ticket', mock_verify)
existing_user = django_user_model.objects.create_user('test@example.com', '')
backend = backends.CASBackend()
user = backend.authenticate(
request, ticket='fake-ticket', service='fake-service',
)
assert user is not None
assert user.username == 'test@example.com'
assert user == existing_user
@pytest.mark.django_db
def test_backend_for_existing_user(monkeypatch, django_user_model):
"""
Test the case where CAS authenticates an existing user, but request argument is None.
"""
def mock_verify(ticket, service):
return 'test@example.com', {'ticket': ticket, 'service': service}, None
# we mock out the verify method so that we can bypass the external http
# calls needed for real authentication since we are testing the logic
# around authentication.
monkeypatch.setattr('cas.CASClientV2.verify_ticket', mock_verify)
existing_user = django_user_model.objects.create_user('test@example.com', '')
backend = backends.CASBackend()
user = backend.authenticate(
None, ticket='fake-ticket', service='fake-service',
)
assert user is not None
assert user.username == 'test@example.com'
assert user == existing_user
@pytest.mark.django_db
def test_backend_for_failed_auth(monkeypatch, django_user_model):
"""
Test CAS authentication failure.
"""
factory = RequestFactory()
request = factory.get('/login/')
request.session = {}
def mock_verify(ticket, service):
return None, {}, None
# we mock out the verify method so that we can bypass the external http
# calls needed for real authentication since we are testing the logic
# around authentication.
monkeypatch.setattr('cas.CASClientV2.verify_ticket', mock_verify)
assert not django_user_model.objects.filter(
username='test@example.com',
).exists()
backend = backends.CASBackend()
user = backend.authenticate(
request, ticket='fake-ticket', service='fake-service',
)
assert user is None
assert not django_user_model.objects.filter(
username='test@example.com',
).exists()
@pytest.mark.django_db
def test_backend_user_can_authenticate(monkeypatch, django_user_model):
"""
Test CAS authentication failure.
"""
factory = RequestFactory()
request = factory.get('/login/')
request.session = {}
def mock_verify(ticket, service):
return 'test@example.com', {'ticket': ticket, 'service': service}, None
# we mock out the verify method so that we can bypass the external http
# calls needed for real authentication since we are testing the logic
# around authentication.
monkeypatch.setattr('cas.CASClientV2.verify_ticket', mock_verify)
user = backends.CASBackend().authenticate(
request, ticket='fake-ticket', service='fake-service',
)
assert user is not None
class AllowNoneBackend(backends.CASBackend):
def user_can_authenticate(self, user):
return False
user = AllowNoneBackend().authenticate(
request, ticket='fake-ticket', service='fake-service',
)
assert user is None
@pytest.mark.django_db
def test_backend_does_not_apply_attributes_by_default(monkeypatch):
"""
Test to make sure attributes returned from the provider are not assigned to
the User model by default.
"""
factory = RequestFactory()
request = factory.get('/login/')
request.session = {}
def mock_verify(ticket, service):
return 'test@example.com', {'is_staff': 'True', 'is_superuser': 'False'}, None
monkeypatch.setattr('cas.CASClientV2.verify_ticket', mock_verify)
backend = backends.CASBackend()
user = backend.authenticate(request, ticket='fake-ticket',
service='fake-service')
assert user is not None
assert not user.is_staff
@pytest.mark.django_db
def test_backend_applies_attributes_when_set(monkeypatch, settings):
"""
If CAS_APPLY_ATTRIBUTES_TO_USER is set, make sure the attributes returned
with the ticket are added to the User model.
"""
factory = RequestFactory()
request = factory.get('/login/')
request.session = {}
def mock_verify(ticket, service):
return 'test@example.com', {'is_staff': 'True', 'is_superuser': 'False'}, None
monkeypatch.setattr('cas.CASClientV2.verify_ticket', mock_verify)
settings.CAS_APPLY_ATTRIBUTES_TO_USER = True
backend = backends.CASBackend()
user = backend.authenticate(request, ticket='fake-ticket',
service='fake-service')
assert user is not None
assert user.is_staff
@pytest.mark.django_db
def test_boolean_attributes_applied_as_booleans(monkeypatch, settings):
factory = RequestFactory()
request = factory.get('/login/')
request.session = {}
def mock_verify(ticket, service):
return 'test@example.com', {'is_staff': 'True', 'is_superuser': 'False'}, None
monkeypatch.setattr('cas.CASClientV2.verify_ticket', mock_verify)
settings.CAS_APPLY_ATTRIBUTES_TO_USER = True
backend = backends.CASBackend()
user = backend.authenticate(request, ticket='fake-ticket',
service='fake-service')
assert user is not None
assert user.is_superuser is False
assert user.is_staff is True
| 8,289 | 2,474 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ServerAuthentication'
db.create_table(u'core_serverauthentication', (
('server', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['core.Server'], unique=True, primary_key=True)),
('password', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal(u'core', ['ServerAuthentication'])
# Adding field 'Server.method'
db.add_column(u'core_server', 'method',
self.gf('django.db.models.fields.IntegerField')(default=2),
keep_default=False)
def backwards(self, orm):
# Deleting model 'ServerAuthentication'
db.delete_table(u'core_serverauthentication')
# Deleting field 'Server.method'
db.delete_column(u'core_server', 'method')
models = {
u'core.application': {
'Meta': {'unique_together': "(('department', 'name'),)", 'object_name': 'Application'},
'department': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'applications'", 'to': u"orm['core.Department']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'core.department': {
'Meta': {'object_name': 'Department'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
u'core.environment': {
'Meta': {'unique_together': "(('application', 'name'),)", 'object_name': 'Environment'},
'application': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'environments'", 'to': u"orm['core.Application']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_production': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'core.server': {
'Meta': {'ordering': "['name']", 'unique_together': "(('environment', 'name'),)", 'object_name': 'Server'},
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'servers'", 'to': u"orm['core.Environment']"}),
'host': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'method': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'port': ('django.db.models.fields.IntegerField', [], {'default': '22'}),
'roles': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'servers'", 'symmetrical': 'False', 'to': u"orm['core.ServerRole']"}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'core.serverauthentication': {
'Meta': {'object_name': 'ServerAuthentication'},
'password': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'server': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['core.Server']", 'unique': 'True', 'primary_key': 'True'})
},
u'core.serverrole': {
'Meta': {'unique_together': "(('department', 'name'),)", 'object_name': 'ServerRole'},
'department': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'serverroles'", 'to': u"orm['core.Department']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'})
}
}
complete_apps = ['core'] | 4,208 | 1,286 |
from pandas import DataFrame
import mysql.connector as mc
from properties import user, password, host, port, database, data_path
cnx = mc.connect(user=user, password=password, host=host, port=port, database=database)
cursor = cnx.cursor()
# Find answer(with code block which has changed at least once) ids
sql1 = """CREATE OR REPLACE VIEW sotorrent18_09.edits AS
SELECT DISTINCT PH.Id,PH.Comment
FROM sotorrent18_09.posts AS P
INNER JOIN sotorrent18_09.posthistory AS PH ON PH.PostId=P.Id
INNER JOIN sotorrent18_09.postblockversion AS PBV ON PBV.PostHistoryId=PH.Id
WHERE P.PostTypeId=2 AND PBV.PostBlockTypeId=2 AND PBV.PredSimilarity<1 AND PH.Comment IS NOT NULL;
"""
cursor.execute(sql1)
# Find text content from mentioned ids
sql2 = """CREATE OR REPLACE VIEW sotorrent18_09.text AS
SELECT PostHistoryId AS Id,PredPostHistoryId,Content AS Text
FROM sotorrent18_09.postblockversion
WHERE PostBlockTypeId=1 AND LocalId=1 AND PostHistoryId IN (SELECT Id FROM sotorrent18_09.edits);
"""
cursor.execute(sql2)
# Find code content from mentioned ids
sql3 = """CREATE OR REPLACE VIEW sotorrent18_09.code AS
SELECT PostHistoryId AS Id,PredPostHistoryId,Content AS Code
FROM sotorrent18_09.postblockversion
WHERE PostBlockTypeId=2 AND LocalId=2 AND PostHistoryId IN (SELECT Id FROM sotorrent18_09.edits);
"""
cursor.execute(sql3)
# Create a view (fin_edits) with all previous data (id,predid,comment,text,code)
sql4 = """CREATE OR REPLACE VIEW sotorrent18_09.fin_edits AS
SELECT E.Id,T.PredPostHistoryId,E.Comment,T.Text,C.Code
FROM sotorrent18_09.edits AS E
INNER JOIN sotorrent18_09.text AS T ON T.Id=E.Id
INNER JOIN sotorrent18_09.code AS C ON C.Id=E.Id
ORDER BY E.Id;
"""
cursor.execute(sql4)
# Find text content from original anwser ids
sql5 = """CREATE OR REPLACE VIEW sotorrent18_09.roots1 AS
SELECT PostHistoryId AS Id,Content AS Text
FROM sotorrent18_09.postblockversion
WHERE PostBlockTypeId=1 AND LocalId=1 AND PostHistoryId IN (SELECT PredPostHistoryId FROM sotorrent18_09.fin_edits) AND PostHistoryId NOT IN (SELECT Id FROM sotorrent18_09.fin_edits);
"""
cursor.execute(sql5)
# Find code content from original anwser ids
sql6 = """CREATE OR REPLACE VIEW sotorrent18_09.roots2 AS
SELECT PostHistoryId AS Id,Content AS Code
FROM sotorrent18_09.postblockversion
WHERE PostBlockTypeId=2 AND LocalId=2 AND PostHistoryId IN (SELECT PredPostHistoryId FROM sotorrent18_09.fin_edits) AND PostHistoryId NOT IN (SELECT Id FROM sotorrent18_09.fin_edits);
"""
cursor.execute(sql6)
# Create a view (fin_roots) with data (id,text,code) from original answers
sql7 = """CREATE OR REPLACE VIEW sotorrent18_09.fin_roots AS
SELECT R1.Id,NULL,NULL,Text,Code
FROM sotorrent18_09.roots1 AS R1
INNER JOIN sotorrent18_09.roots2 AS R2 ON R1.Id=R2.Id
ORDER BY R1.Id;
"""
cursor.execute(sql7)
sql8 = """(SELECT * FROM sotorrent18_09.fin_edits)
UNION ALL
(SELECT * FROM sotorrent18_09.fin_roots)
ORDER BY Id;
"""
cursor.execute(sql8)
df = DataFrame(cursor.fetchall())
df.columns = cursor.column_names
df.to_csv(data_path + 'answer_posts_with_edits.csv', sep='\t', encoding='utf-8')
| 3,160 | 1,230 |
import argparse
def scan(conf, args):
pass
def setup_subcmd(subparsers):
scan_parser = subparsers.add_parser('scan', help='scan qcow2 image dir')
scan_parser.add_argument('srcdir', type=argparse.FileType('r'), nargs=1)
scan_parser.set_defaults(handler=scan)
| 278 | 99 |
from twisted.internet import defer
from twisted.trial import unittest
from scrapy.http import Request
from scrapy.http import HtmlResponse
from scrapy.utils.python import equal_attributes
from scrapy.contrib_exp.crawlspider.reqext import SgmlRequestExtractor
from scrapy.contrib_exp.crawlspider.reqgen import RequestGenerator
from scrapy.contrib_exp.crawlspider.reqproc import Canonicalize
from scrapy.contrib_exp.crawlspider.reqproc import FilterDomain
from scrapy.contrib_exp.crawlspider.reqproc import FilterUrl
from scrapy.contrib_exp.crawlspider.reqproc import FilterDupes
class RequestGeneratorTest(unittest.TestCase):
def setUp(self):
url = 'http://example.org/somepage/index.html'
html = """<html><head><title>Page title<title>
<body><p><a href="item/12.html">Item 12</a></p>
<p><a href="/about.html">About us</a></p>
<img src="/logo.png" alt="Company logo (not a link)" />
<p><a href="../othercat.html">Other category</a></p>
<p><a href="/" /></p></body></html>"""
self.response = HtmlResponse(url, body=html)
self.deferred = defer.Deferred()
self.requests = [
Request('http://example.org/somepage/item/12.html',
meta={'link_text': 'Item 12'}),
Request('http://example.org/about.html',
meta={'link_text': 'About us'}),
Request('http://example.org/othercat.html',
meta={'link_text': 'Other category'}),
Request('http://example.org/',
meta={'link_text': ''}),
]
def _equal_requests_list(self, list1, list2):
list1 = list(list1)
list2 = list(list2)
if not len(list1) == len(list2):
return False
for (req1, req2) in zip(list1, list2):
if not equal_attributes(req1, req2, ['url']):
return False
return True
def test_basic(self):
reqgen = RequestGenerator([], [], callback=self.deferred)
# returns generator
requests = reqgen.generate_requests(self.response)
self.failUnlessEqual(list(requests), [])
def test_request_extractor(self):
extractors = [
SgmlRequestExtractor()
]
# extract all requests
reqgen = RequestGenerator(extractors, [], callback=self.deferred)
requests = reqgen.generate_requests(self.response)
self.failUnless(self._equal_requests_list(requests, self.requests))
def test_request_processor(self):
extractors = [
SgmlRequestExtractor()
]
processors = [
Canonicalize(),
FilterDupes(),
]
reqgen = RequestGenerator(extractors, processors, callback=self.deferred)
requests = reqgen.generate_requests(self.response)
self.failUnless(self._equal_requests_list(requests, self.requests))
# filter domain
processors = [
Canonicalize(),
FilterDupes(),
FilterDomain(deny='example.org'),
]
reqgen = RequestGenerator(extractors, processors, callback=self.deferred)
requests = reqgen.generate_requests(self.response)
self.failUnlessEqual(list(requests), [])
# filter url
processors = [
Canonicalize(),
FilterDupes(),
FilterUrl(deny=(r'about', r'othercat')),
]
reqgen = RequestGenerator(extractors, processors, callback=self.deferred)
requests = reqgen.generate_requests(self.response)
self.failUnless(self._equal_requests_list(requests, [
Request('http://example.org/somepage/item/12.html',
meta={'link_text': 'Item 12'}),
Request('http://example.org/',
meta={'link_text': ''}),
]))
processors = [
Canonicalize(),
FilterDupes(),
FilterUrl(allow=r'/somepage/'),
]
reqgen = RequestGenerator(extractors, processors, callback=self.deferred)
requests = reqgen.generate_requests(self.response)
self.failUnless(self._equal_requests_list(requests, [
Request('http://example.org/somepage/item/12.html',
meta={'link_text': 'Item 12'}),
]))
| 4,395 | 1,276 |
import tensorflow as tf
import json
from sklearn.model_selection import KFold
import jsonpickle
import numpy as np
from minmax.game_model import GameState
def load_data(file_name):
with open(file_name, "r") as file:
data = json.load(file)
data_decoded = jsonpickle.decode(data)
states = list()
actions = list()
rewards = list()
next_states = list()
for state, action, reward, next_state in data_decoded:
state_idx = GameState.get_state_id(state)
states.append(state_idx)
next_state_idx = GameState.get_state_id(next_state)
next_states.append(next_state_idx)
actions.append(int(action))
rewards.append(reward)
states = np.stack(states)
actions = np.stack(actions)
rewards = np.stack(rewards)
next_states = np.stack(next_states)
return states, actions, rewards, next_states
def q_learning_model():
NUM_STATES = 12*12*12*12*2
NUM_ACTIONS = 18
GAMMA = 0.99
model_in = tf.keras.layers.Input(shape=(1,), dtype=tf.int32)
tmp = tf.one_hot(model_in, NUM_STATES)
tmp = tf.keras.layers.Dense(NUM_ACTIONS, use_bias=False)(tmp)
model_out = tf.squeeze(tmp, axis=1)
q_function = tf.keras.Model(model_in, model_out)
state = tf.keras.layers.Input(shape=(1,), dtype=tf.int32, name="State")
action = tf.keras.layers.Input(shape=(1,), dtype=tf.int32, name="Action")
reward = tf.keras.layers.Input(shape=(1,), name="Reward")
next_state = tf.keras.layers.Input(
shape=(1,), dtype=tf.int32, name="Next_State")
td_target = reward + GAMMA * tf.reduce_max(q_function(next_state), axis=-1)
predictions = tf.gather(q_function(state), action, axis=-1)
train_model = tf.keras.Model(
inputs=[state, action, reward, next_state],
outputs=[predictions, td_target]
)
# to date it still feels as if tf.stop_gradient is a horrible
# hack similar to DDQL to stabelize the algorithm
td_error = 0.5 * tf.abs(tf.stop_gradient(td_target) - predictions) ** 2
train_model.add_loss(td_error, [state, action, reward, next_state])
predicted_action = tf.argmax(q_function(state), axis=-1)
correct_predictions = tf.keras.metrics.categorical_accuracy(
action, predicted_action)
train_model.add_metric(correct_predictions,
name="Matched_Actions", aggregation="mean")
return q_function, train_model
def policy_gradient():
NUM_STATES = 12*12*12*12*2
NUM_ACTIONS = 18
GAMMA = 0.99
model_in = tf.keras.layers.Input(shape=(1,), dtype=tf.int32)
tmp = model_in
tmp = tf.keras.layers.Dense(NUM_ACTIONS, activation="softmax")(tmp)
model_out = 1.0 * tmp
policy_fn = tf.keras.Model(model_in, model_out)
state = tf.keras.layers.Input(shape=(1,), dtype=tf.int32, name="State")
action = tf.keras.layers.Input(shape=(1,), dtype=tf.int32, name="Action")
reward = tf.keras.layers.Input(shape=(1,), name="Reward")
next_state = tf.keras.layers.Input(
shape=(1,), dtype=tf.int32, name="Next_State")
target = tf.one_hot(action, NUM_ACTIONS)
target = tf.squeeze(target, axis=1)
predictions = policy_fn(state)
train_model = tf.keras.Model(
inputs=[state, action], outputs=[predictions, target])
error = tf.keras.losses.categorical_crossentropy(target, predictions)
train_model.add_loss(error, [state, action])
most_likely_action = tf.argmax(policy_fn(state), axis=-1)
correct_predictions = tf.keras.metrics.categorical_accuracy(
action, most_likely_action)
train_model.add_metric(correct_predictions,
name="Matched_Actions", aggregation="mean")
return policy_fn, train_model
if __name__ == "__main__":
states, actions, rewards, next_states = load_data("data.json")
indices = np.arange(len(states))
# # Q Learning
# # ===========
# q_function, train_q = q_learning_model()
# # training the q-learning agent
# train_q.compile(optimizer="sgd")
# train_q.fit([states, actions, rewards, next_states])
# # using the learned model
# q_values = q_function(states).numpy()
# best_actions = np.argmax(q_values, axis=-1)
# # Policy Gradient
# # ================
# policy_fn, train_policy = policy_gradient()
# # training the policy gradient
# train_policy.compile(optimizer="sgd")
# train_policy.fit([states, actions, rewards, next_states])
# # use the learned model
# action_props = policy_fn(states).numpy()
# cum_prop = np.cumsum(action_props, axis=-1)
# rng = np.random.rand(len(action_props))[..., np.newaxis]
# best_actions = np.argmax(rng <= cum_prop, axis=-1)
q_scores = list()
policy_scores = list()
for train_idx, test_idx in KFold(shuffle=True).split(indices):
train_data = [
states[train_idx, ...],
actions[train_idx, ...],
rewards[train_idx, ...],
next_states[train_idx, ...],
]
test_data = [
states[test_idx, ...],
actions[test_idx, ...],
rewards[test_idx, ...],
next_states[test_idx, ...],
]
q_function, train_q = q_learning_model()
train_q.compile(optimizer="sgd")
train_q.fit(train_data)
_, score = train_q.evaluate(test_data)
q_scores.append(score)
policy_fn, train_policy = q_learning_model()
train_policy.compile(optimizer="sgd")
train_policy.fit(train_data)
_, score = train_policy.evaluate(test_data)
policy_scores.append(score)
q_scores = np.array(q_scores)
print(f"Q-Learning Accuracy: M={np.mean(q_scores):.2f} "
f"(SD={np.std(q_scores):.2f})")
policy_scores = np.array(policy_scores)
print(f"Policy-Iteration Accuracy: M={np.mean(policy_scores):.2f} "
f"(SD={np.std(policy_scores):.2f})")
| 5,891 | 2,094 |
from pytest import mark
from blockapi.api.tronscan import TronscanAPI
from blockapi.test_init import test_addresses
class TestTronscanAPI:
ADDRESS = test_addresses['TRX'][0]
@mark.vcr()
def test_get_balance(self):
api = TronscanAPI(address=self.ADDRESS)
result = api.get_balance()
assert next((r["amount"] for r in result if r["symbol"] == "TRX")) ==\
0.588285
assert len(result) == 45
| 447 | 157 |
from .resnet_fpn import ResNetFPN_8_2, ResNetFPN_16_4
def build_backbone(config):
if config['backbone_type'] == 'ResNetFPN':
if config['resolution'] == (8, 2):
return ResNetFPN_8_2(config['resnetfpn'])
elif config['resolution'] == (16, 4):
return ResNetFPN_16_4(config['resnetfpn'])
else:
raise ValueError(f"LOFTR.BACKBONE_TYPE {config['backbone_type']} not supported.")
| 429 | 160 |
itaizi_list = {
"亞":"亜",
"啞":"唖",
"瘂":"唖",
"惡":"悪",
"蘆":"芦",
"鰺":"鯵",
"壓":"圧",
"葊":"庵",
"菴":"庵",
"桉":"案",
"圍":"囲",
"爲":"為",
"醫":"医",
"毓":"育",
"弌":"一",
"壹":"壱",
"稻":"稲",
"囙":"因",
"婣":"姻",
"飮":"飲",
"滛":"淫",
"婬":"淫",
"隱":"隠",
"阴":"陰",
"隂":"陰",
"夘":"卯",
"丒":"丑",
"噓":"嘘",
"鬱":"欝",
"廄":"厩",
"廏":"厩",
"廐":"厩",
"睿":"睿",
"營":"営",
"暎":"映",
"曵":"曳",
"榮":"栄",
"頴":"穎",
"偀":"英",
"衞":"衛",
"咏":"詠",
"驛":"駅",
"圎":"円",
"圓":"円",
"椻":"堰",
"焰":"焔",
"烟":"煙",
"齋":"斎"
# 40
} | 554 | 535 |
from .errors import (ApiError, ApiClientError)
from .mixins import ApiClientMixin
from .models.public import (
AvailableBooks
, Ticker
, OrderBook
, Trade
)
class Client(ApiClientMixin):
def __init__(self, key=None, secret=None):
self.base_url = 'https://bitso.com/api/v3'
self.key = key
self._secret = secret
# public api
def available_books(self):
url = '%s/available_books/' % self.base_url
resp = self._request_url(url, 'GET')
return AvailableBooks(resp)
def ticker(self, book):
url = '%s/ticker/' % self.base_url
parameters = {}
parameters['book'] = book
resp = self._request_url(url, 'GET', params=parameters)
return Ticker(resp['payload'])
def order_book(self, book, aggregate=True):
url = '%s/order_book/' % self.base_url
parameters = {}
parameters['book'] = book
parameters['aggregate'] = aggregate
resp = self._request_url(url, 'GET', params=parameters)
return OrderBook(resp['payload'])
def trades(self, book, **kwargs):
url = '%s/trades/' % self.base_url
parameters = {}
parameters['book'] = book
if 'marker' in kwargs:
parameters['marker'] = kwargs['marker']
if 'limit' in kwargs:
parameters['limit'] = kwargs['limit']
else:
parameters['limit'] = 100
if 'sort' in kwargs:
parameters['sort'] = kwargs['sort']
resp = self._request_url(url, 'GET', params=parameters)
return [Trade(o) for o in resp['payload']]
# private api
def account_status(self):
url = '%s/account_status/' % self.base_url
resp = self._request_url(url, 'GET', private=True)
return AccountStatus(resp['payload'])
def balance(self):
url = '%s/balance/' % self.base_url
resp = self._request_url(url, 'GET', private=True)
return Balance(resp['payload'])
def fees(self):
url = '%s/fees/' % self.base_url
resp = self._request_url(url, 'GET', private=True)
return Fees(resp['payload'])
def ledger(self, operation='', marker=None, limit=25, sort='desc'):
_operations = ['', 'trades', 'fees', 'fundings', 'withdrawals']
if not isinstance(operation, str) and operation not in _operations:
raise ApiClientError({'message': 'invalid operation'})
url = '%s/ledger/%s' % (self.base_url, operation)
parameters = {}
if marker:
parameters['marker'] = marker
if limit:
parameters['limit'] = limit
if sort:
parameters['sort'] = sort
resp = self._request_url(url, 'GET', params=parameters, private=True)
return [LedgerEntry(o) for entry in resp['payload']]
def withdrawals(self):
raise NotImplementedError
def fundings(self):
raise NotImplementedError
def user_trades(self, tids=[], book=None, marker=None, limit=25, sort='desc'):
raise NotImplementedError
def order_trades(self, oid):
raise NotImplementedError
def open_orders(self, book=None):
raise NotImplementedError
def lookup_orders(self, oids):
raise NotImplementedError
def cancel_orders(self, oids):
if isinstance(oids, str):
oids = [oids]
url = '%s/orders/' % self.base_url
url+= '%s/' % ('-'.join(oids))
resp = self._request_url(url, 'DELETE', private=True)
return resp['payload']
def place_order(self, book, side, type, **kwargs):
_sides = ['buy', 'sell']
_types = ['market', 'limit']
if not isinstance(book, str) and not len(book):
raise ApiClientError({'message': 'book not specified'})
if not isinstance(side, str) and side not in _sides:
raise ApiClientError({'message': 'side not specified'})
if not isinstance(type, str) and type not in _types:
raise ApiClientError({'message': 'type not specified'})
if not str(kwargs.get('major','')).strip() and not str(kwargs.get('minor','')).strip():
raise ApiClientError({'message': 'an order must be specified in terms of major or minor, never both'})
if str(kwargs.get('price')).strip() and not (type == 'limit'):
raise ApiClientError({'message': 'price for use only with limit orders'})
url = '%s/orders/' % self.base_url
parameters = {}
parameters['book'] = book
parameters['type'] = type
parameters['side'] = side
if 'major' in kwargs:
parameters['major'] = kwargs.get('major')
if 'minor' in kwargs:
parameters['minor'] = kwargs.get('minor')
if 'price' in kwargs:
parameters['price'] = kwargs.get('price')
resp = self._request_url(url, 'POST', params=parameters, private=True)
return resp['payload']
def funding_destination(self, fund_currency):
raise NotImplementedError
def btc_withdrawal(self, amount, address):
raise NotImplementedError
def eth_withdrawal(self, amount, address):
raise NotImplementedError
def spei_withdrawal(self):
raise NotImplementedError
| 5,298 | 1,574 |
# This Python file uses the following encoding: utf-8
from PyQt5 import QtCore, QtGui, QtWidgets
from view.productsview import ProductsView
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
pv = ProductsView()
#pv.pte_product_value.installEventFilter(self)
sys.exit(app.exec_())
| 328 | 110 |
from site import USER_BASE
from ipaddress import AddressValueError, IPv4Address
from pathlib import Path
from random import choice
from termcolor import cprint
from interutils import clear
class InvalidProxyFormatError(Exception):
pass
def is_ip_address(ip_addr: str) -> bool:
try:
IPv4Address(ip_addr)
return True
except AddressValueError:
return False
def is_proxy_format(pip: str) -> bool:
''' Check that the given proxy is a string and has a valid address + port '''
if not isinstance(pip, str):
return False
pts = pip.strip().split(':')
if len(pts) != 2:
return False
ip_addr, port = pts
try:
port = int(port)
except ValueError:
return False
if port < 0 or port > 65535:
return False
if not is_ip_address(ip_addr):
return False
return True
| 880 | 268 |
"""
DES3-specific mechanism implementations.
"""
import logging
from ctypes import c_void_p, cast, pointer, sizeof, POINTER
from . import Mechanism
from ..attributes import to_byte_array
from ..conversions import from_bytestring
from ..cryptoki import CK_ULONG, CK_BYTE, CK_BYTE_PTR, CK_DES_CTR_PARAMS, \
CK_KEY_DERIVATION_STRING_DATA, CK_DES_CBC_ENCRYPT_DATA_PARAMS
LOG = logging.getLogger(__name__)
class DES3CTRMechanism(Mechanism):
"""
DES3 CTR Mechanism param conversion.
"""
REQUIRED_PARAMS = ['cb', 'ulCounterBits']
def to_c_mech(self):
"""
Convert extra parameters to ctypes, then build out the mechanism.
:return: :class:`~pycryptoki.cryptoki.CK_MECHANISM`
"""
super(DES3CTRMechanism, self).to_c_mech()
ctr_params = CK_DES_CTR_PARAMS()
ctr_params.cb = (CK_BYTE * 8)(*self.params['cb'])
ctr_params.ulCounterBits = CK_ULONG(self.params['ulCounterBits'])
self.mech.pParameter = cast(pointer(ctr_params), c_void_p)
self.mech.usParameterLen = CK_ULONG(sizeof(ctr_params))
return self.mech
class DES3ECBEncryptDataMechanism(Mechanism):
"""
DES3 mechanism for deriving keys from encrypted data.
"""
REQUIRED_PARAMS = ['data']
def to_c_mech(self):
"""
Convert extra parameters to ctypes, then build out the mechanism.
:return: :class:`~pycryptoki.cryptoki.CK_MECHANISM`
"""
super(DES3ECBEncryptDataMechanism, self).to_c_mech()
# from https://www.cryptsoft.com/pkcs11doc/v220
# /group__SEC__12__14__2__MECHANISM__PARAMETERS.html
# CKM_DES3_ECB_ENCRYPT_DATA
# Note: data should same or > size of key in multiples of 8.
params = CK_KEY_DERIVATION_STRING_DATA()
pdata, data_len = to_byte_array(from_bytestring(self.params['data']))
pdata = cast(pdata, CK_BYTE_PTR)
params.pData = pdata
params.ulLen = CK_ULONG(data_len.value)
self.mech.pParameter = cast(pointer(params), c_void_p)
self.mech.usParameterLen = CK_ULONG(sizeof(params))
return self.mech
class DES3CBCEncryptDataMechanism(Mechanism):
"""
DES3 CBC mechanism for deriving keys from encrypted data.
"""
REQUIRED_PARAMS = ['iv', 'data']
def to_c_mech(self):
"""
Convert extra parameters to ctypes, then build out the mechanism.
:return: :class:`~pycryptoki.cryptoki.CK_MECHANISM`
"""
super(DES3CBCEncryptDataMechanism, self).to_c_mech()
# from https://www.cryptsoft.com/pkcs11doc/v220
# /group__SEC__12__14__2__MECHANISM__PARAMETERS.html
# CKM_DES3_CBC_ENCRYPT_DATA
# Note: data should same or > size of key in multiples of 8.
params = CK_DES_CBC_ENCRYPT_DATA_PARAMS()
pdata, data_len = to_byte_array(from_bytestring(self.params['data']))
pdata = cast(pdata, CK_BYTE_PTR)
# Note: IV should always be a length of 8.
params.iv = (CK_BYTE * 8)(*self.params['iv'])
params.pData = pdata
params.length = CK_ULONG(data_len.value)
self.mech.pParameter = cast(pointer(params), c_void_p)
self.mech.usParameterLen = CK_ULONG(sizeof(params))
return self.mech | 3,256 | 1,206 |
"""
Copyright (c) 2005-2009 Ben Bangert, James Gardner, Philip Jenvey
and contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author or contributors may not be used to endorse or
promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
-------------------------------------------------------------------------------
ALL TEMPLATES GENERATED ARE COVERED UNDER THE FOLLOWING LICENSE:
Copyright (c) 2005-2009 Ben Bangert, James Gardner, Philip Jenvey
and contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following condition is
met:
The name of the author or contributors may not be used to endorse or
promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
This code is a direct rip-off of the pylons jsonification decorator
"""
from decorator import decorator
import json
import webob
import warnings
import logging
log = logging.getLogger(__name__)
def jsonify(func, self, req):
"""Action decorator that formats output for JSON
Given a function that will return content, this decorator will turn
the result into JSON, with a content-type of 'application/json' and
output it.
taken from pylons and modified
"""
data = func(self, req)
if isinstance(data, (list, tuple)):
msg = "JSON responses with Array envelopes are susceptible to " \
"cross-site data leak attacks, see " \
"http://pylonshq.com/warnings/JSONArray"
warnings.warn(msg, Warning, 2)
log.warning(msg)
log.debug("Returning JSON wrapped action output")
resp = webob.Response(request=req, content_type="application/json")
resp.body = json.dumps(data)
return resp
jsonify = decorator(jsonify)
| 3,850 | 1,266 |
""" mail app views"""
from django.http import Http404
from django.http import HttpResponse
from mail.models import Receiver, Mail, generate_mail_conent
def mail_demo_view(request, mail_id, receiver_id):
""" final mail demo """
if mail_id:
try:
mail = Mail.objects.get(pk=mail_id)
except Mail.DoesNotExist:
mail = None
if receiver_id:
try:
receiver = Receiver.objects.get(pk=receiver_id)
except Receiver.DoesNotExist:
receiver = None
if mail and receiver:
return HttpResponse(generate_mail_conent(mail, receiver))
else:
raise Http404("mail or receiver not exist")
| 685 | 203 |
import tkinter as tk
from tkinter.filedialog import askopenfilename
from tkinter import scrolledtext
from PIL import ImageTk,Image
import execute
def updateText(text):
text_widget["state"] = tk.NORMAL
text_widget.insert(tk.INSERT, text)
text_widget["state"] = tk.DISABLED
def getFile():
global filename
filename = askopenfilename()
if ".png" not in filename.lower() and ".jpg" not in filename.lower():
updateText("Not a valid image, please use a jpg or png.\n")
else:
updateText("Valid image found at {file} \n".format(file=str(filename)))
img = Image.open(filename)
newHeight = 300/img.size[0]
img = img.resize((299, int(newHeight*img.size[1])), Image.ANTIALIAS)
img = ImageTk.PhotoImage(img)
untreatedImage.configure(image=img)
untreatedImage.image = img
untreatedImage.pack(side=tk.LEFT)
def executeProgram():
if filename == "":
updateText("Please select an image first.\n")
else:
global processedFlag
global processedImage
thresholdValue = thresholdEntry.get()
if type(int(thresholdValue)) is not int:
updateText("Value is not an integer.\n")
raise ValueError("Value is not an integer.")
elif type(int(thresholdValue)) is int and (int(thresholdValue) > 255 or int(thresholdValue) < 0):
updateText("Value of threshold should be 0-255.\n")
raise ValueError("Value of threshold should be 0-255.")
processedFlag = True
img = execute.processImage(filename, int(thresholdValue))
processedImage = img
img = Image.fromarray(img)
newHeight = 300/img.size[0]
img = img.resize((299, int(newHeight*img.size[1])), Image.ANTIALIAS)
img = ImageTk.PhotoImage(img)
procImage.configure(image=img)
procImage.image = img
procImage.pack(side=tk.RIGHT)
updateText("Processed image has been generated.\n")
def showImage():
def destroyWindow():
nwin.destroy()
showImageButton["state"], okButton["state"], downloadButton["state"] = tk.NORMAL, tk.NORMAL, tk.NORMAL
if filename == "":
updateText("Please select an image first.\n")
elif processedFlag == False:
updateText("Please process an image first.\n")
else:
global processedImage
nwin = tk.Toplevel()
nwin.title("Processed Image")
photo3 = Image.fromarray(processedImage)
photo2 = ImageTk.PhotoImage(photo3)
nwinCanvas = tk.Canvas(nwin, width = photo3.size[0], height = photo3.size[1])
nwinCanvas.pack(expand = tk.YES, fill = tk.BOTH)
showImageButton["state"], okButton["state"], downloadButton["state"] = tk.DISABLED, tk.DISABLED, tk.DISABLED
nwinCanvas.create_image(1, 1, image = photo2, anchor = tk.NW)
nwin.resizable(True, True)
nwin.protocol("WM_DELETE_WINDOW", destroyWindow)
nwin.mainloop()
filename = ""
processedFlag = False
root = tk.Tk()
root.title("Handwriting to LaTeX")
root.geometry("600x600")
root.resizable(False, False)
canvas = tk.Canvas(root, height=624, width = 600, bg="#e6e6e6")
canvas.pack()
imageFrame = tk.Frame(root)
imageFrame.place(relwidth=1, relheight=0.6, rely=0.1)
frame = tk.Frame(root)
frame.place(relwidth=1, relheight=0.1)
uploadImage = tk.PhotoImage(file="./UI/upload.png")
okImage = tk.PhotoImage(file="./UI/ok.png")
magnifyImage = tk.PhotoImage(file="./UI/magnify.png")
downloadImage = tk.PhotoImage(file="./UI/download.png")
downloadButton = tk.Button(frame, bg = "#ff8080", command=getFile, image=downloadImage, relief="flat", width=150, compound="left")
okButton = tk.Button(frame, bg = "#91FF80", command=executeProgram, image=okImage, relief="flat", width=150, compound="left")
showImageButton = tk.Button(frame, bg = "#73FFFB", command=showImage, image=magnifyImage, relief="flat", width=150, compound="left")
updateButton = tk.Button(frame, bg = "#FFFF99", image=uploadImage, relief="flat", width=150, compound="left")
downloadButton.place(relx = 0, rely = 0)
okButton.place(relx = 0.50, rely = 0)
showImageButton.place(relx = 0.25, rely=0)
updateButton.place(relx=0.75, rely=0)
textFrame = tk.Frame(root)
textFrame.place(relwidth=1, relheight=0.3, rely=0.7)
text_widget = tk.Text(textFrame, width=100, height=9, padx=3, pady=3)
text_widget.pack(side=tk.LEFT)
text_widget.insert(tk.INSERT, "Waiting for image input...\n")
text_widget["state"] = tk.DISABLED
procImage = tk.Label(imageFrame, width=298, height=372, pady = 1, padx=1)
untreatedImage = tk.Label(imageFrame, width=298, height=372, pady = 1, padx=1)
thresholdEntry = tk.Entry(imageFrame, bg="#91FF80")
thresholdEntry.place(relx=0.96, rely=0, width=25, height=20)
root.mainloop() | 4,772 | 1,632 |
from pygame.math import Vector2
from game.base.base import BasicObject
class AbstractModule(BasicObject):
"""Абстрактный модуль, гаджет, навес для spaceship"""
def __init__(self, display, space_pos, space_size, space_head):
super().__init__(display)
self._space_size = space_size
self._pos = self._calc_position(space_pos, space_head)
self._started = False
def started(self):
"""Статус: включен-выключен"""
return self._started
def start(self):
"""Включает модуль"""
self._started = True
def stop(self):
"""Выключает модуль"""
self._started = False
@property
def position(self):
"""Копия текущей позиции модуля"""
return Vector2(self._pos)
def _calc_position(self, space_pos, space_head):
"""Расчитать позицию относительно spaceship"""
pass
def update(self, space_pos, space_head):
"""Обновляет расположение, состояние"""
pass
def draw(self):
"""Отображает модуль"""
pass
| 1,067 | 347 |
"""
Air Sorce Heat Pump Base Configuration
--------------------------------------
Contains Air Sorce Heat Pump Base configuration info for community
data yaml file, and other set-up requirements
"""
from aaem.components import definitions
from pandas import DataFrame
COMPONENT_NAME = "air source heat pumps base"
IMPORT = "IMPORT"
UNKNOWN = "UNKNOWN"
order = [
'enabled',
'lifetime',
'start year',
'btu/hrs',
'cost per btu/hrs',
'o&m per year',
'data',
'performance data'
]
structure = {
COMPONENT_NAME: {
'enabled': bool, #
'lifetime': int, # number years <int>
'start year': int, # start year <int>
'btu/hrs': float,
'cost per btu/hrs': float,
'o&m per year': float,
'data': DataFrame,
'performance data': {
'COP': list,
'Temperature': list,
'Percent of Total Capacity': list,
}
}
}
comments = {
'enabled': definitions.ENABLED,
'lifetime': definitions.LIFETIME,
'start year': definitions.START_YEAR_WITH_TYPE,
'btu/hrs': '[float] per ASHP unit [btu/hrs]',
'cost per btu/hrs': '[float] cost per btu/hrs [$/(btu/hrs)]',
'o&m per year':'[float] operations and maintenance costs per year [$/year]',
'data':
"[DataFrame] Yearly climate data including 'Peak Month % of total', 'Capacity Factor', 'Minimum Temp', Avg. Temp(monthly), and % heating load (monthly)",
'performance data':
"[dict] contains lists of equal length for keys 'Temperature', 'COP' (Cofficient of performance), and 'Percent of Total Capacity'"
}
## list of prerequisites for module
prereq_comps = []
| 1,681 | 557 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
tkRAD - tkinter Rapid Application Development library
(c) 2013+ Raphaël SEBAN <motus@laposte.net>
This program is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program.
If not, see: http://www.gnu.org/licenses/
"""
# lib imports
from ..core import struct_dict as SD
from . import rad_xml_attribute as XA
class RADXMLAttributesDict (SD.StructDict):
r"""
StructDict subclass for commodity;
handles support for RADXMLAttribute items;
"""
def __init__ (self, *args, **kw):
r"""
class constructor;
implements @item_type=RADXMLAttribute;
"""
# super class inits
super().__init__(*args, **kw)
# member inits
self.item_type = XA.RADXMLAttribute
self.item_value_getter = "get_value"
self.item_value_setter = "set_value"
# end def
# end class RADXMLAttributesDict
| 1,481 | 452 |
import numpy as np
from math import sqrt
import time
from rpl.tools.geometry.surface_io import open_geom
from bsp_tree_poly import BSP_Tree
import logging
## Get rid of any existing handlers (e.g. if running interactively in ipython)
logging.getLogger().handlers = []
## Main logging goes to a file
logging.basicConfig(filename="test_bench.log",
filemode="w",
format="%(levelname)s %(asctime)s %(message)s",
level=logging.DEBUG)
## Also log INFO and above to the console
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(logging.Formatter("%(asctime)s %(message)s"))
logging.getLogger("").addHandler(console)
start_load = time.time()
surf = open_geom("h170_engine_comp_ign_diesel_prt.stl")
x = surf["x"] / 1000.0
y = surf["y"] / 1000.0
z = surf["z"] / 1000.0
print "X", np.min(x), np.max(x)
print "Y", np.min(y), np.max(y)
print "Z", np.min(z), np.max(z)
nodes = np.vstack((x, y, z)).T
b_tree = BSP_Tree(nodes, surf["tris"], min_tris=200, min_dim=0.5)
b_tree.generate_tree()
start_pt = np.array([-0.25, 0.5, -1.5])
end_pt = np.array([-0.05, 0.5, 0.5])
vector = (end_pt - start_pt)
print vector
unit_vector = vector / sqrt(np.dot(vector, vector))
print unit_vector
i_points = b_tree.get_line_intersection_all(start_pt, end_pt)
print i_points
print b_tree.get_line_intersection(start_pt, end_pt)
points = []
points.append(start_pt)
points.append(end_pt)
for i_p in i_points:
d = i_p[1]
points.append(start_pt + unit_vector * d)
points = np.array(points)
print points
#view_model(surf)
from mayavi import mlab
tris = surf["tris"]
scene = mlab.figure(size=(800, 800))
scene.scene.background = (0.0, 0.0, 0.0)
s = mlab.triangular_mesh(x, y, z, tris, color=(1, 1, 1), representation="wireframe", line_width=1.5, opacity=0.1)
mlab.axes(s)
q = mlab.points3d(points[:, 0],
points[:, 1],
points[:, 2],
color=(1.0, 0.0, 0.0),
scale_mode="none", scale_factor=0.01,
mode='cube', opacity=1.0)
scene.scene.disable_render = False
mlab.show() | 2,256 | 945 |
import enum
from sqlalchemy import Integer, String, Column, Enum, UniqueConstraint
from sqlalchemy.sql import exists, and_
from sqlalchemy_utils import ArrowType
from sqlalchemy.exc import IntegrityError
from .db import Base, session
import arrow
from .settings import settings
from .utils import get_scheduled_date
from .helpers import print_log
class State(enum.Enum):
waiting = 0
complete = 1
removed = 2
def __str__(self):
return self.name
class Action(enum.Enum):
home = 0
away = 1
eco = 2
heat = 3
def __str__(self):
return self.name
class Structure(Base):
"""Describes a Google structure."""
__tablename__ = "structures"
id = Column(Integer, primary_key=True, autoincrement=True, nullable=False)
name = Column(String, nullable=False)
custom_name = Column(String, nullable=True)
def __repr__(self) -> str:
return f"<Structure(custom_name={self.custom_name})>"
class Thermostat(Base):
"""Describes a Nest thermostat."""
__tablename__ = "thermostats"
id = Column(Integer, primary_key=True, autoincrement=True, nullable=False)
name = Column(String, nullable=False)
label = Column(String, nullable=True)
structure_name = Column(String, nullable=False)
def __repr__(self) -> str:
return f"<Thermostat(label={self.label})>"
class Event(Base):
"""Describes a single event stored in cache."""
__tablename__ = "events"
__table_args__ = (
UniqueConstraint(
"event_id", "scheduled_date", "calendar_id", name="event_id__date__cal__uc"
),
{"sqlite_autoincrement": True},
)
id = Column(Integer, primary_key=True, autoincrement=True, nullable=False)
event_id = Column(String, nullable=False)
name = Column(String, nullable=True)
action = Column(Enum(Action), nullable=False)
calendar_id = Column(String, default="primary")
parent_event_id = Column(Integer, nullable=True)
state = Column(Enum(State), nullable=False, default=State.waiting)
scheduled_date = Column(ArrowType, nullable=True)
actioned_date = Column(ArrowType, nullable=True)
description = Column(String, nullable=True)
structure_name = Column(String, default="", nullable=False)
structure_id = Column(String, default="", nullable=False)
def __str__(self):
return f"<Event {self.action}/{self.state}/{self.scheduled_date}>"
def __repr__(self):
return str(self)
@staticmethod
def waiting():
"""Return all waiting events"""
return session.query(Event).filter(Event.state == State.waiting).all()
@staticmethod
def exists(event_id, scheduled_date, state=State.waiting):
"""Returns True if the event_id exists, False otherwise"""
return session.query(
exists().where(
and_(
Event.event_id == event_id,
Event.scheduled_date == scheduled_date,
Event.state == state,
)
)
).scalar()
@staticmethod
def create_from_gcal(gcal_event, commit=True):
e = Event(
name=gcal_event["summary"], event_id=gcal_event["id"], state=State.waiting
)
e.actioned_date = None
parts = e.name.split(":")
if len(parts) == 2:
e.action = Action[parts[1].strip()]
elif len(parts) == 3:
e.action = Action[parts[1].strip()]
e.description = parts[2].strip()
else:
print_log(f'WARNING: Cannot parse event name: "{e.name}"')
if "date" in gcal_event["start"]:
# The user has an "all day" event in gcal.
default_time = (
settings.calendar.default_home_time
if e.action.value == Action.home
else settings.calendar.default_away_time
)
e.scheduled_date = arrow.get(
gcal_event["start"]["date"]
+ " "
+ default_time
+ " "
+ settings.calendar.timezone,
"YYYY-MM-DD H:mm ZZZ",
)
else:
# NOTE: 'dateTime' includes the timezone
e.scheduled_date = get_scheduled_date(gcal_event)
if commit:
try:
session.add(e)
session.commit()
except IntegrityError:
session.rollback()
return e
@staticmethod
def events_missing(gcal_event_list):
result = []
# If there are no events returned, return all waiting events.
if not gcal_event_list:
return session.query(Event).filter(Event.state == State.waiting).all()
for gcal_event in gcal_event_list:
scheduled_date = get_scheduled_date(gcal_event)
events = (
session.query(Event)
.filter(
and_(
Event.event_id == gcal_event["id"], Event.state == State.waiting
)
)
.all()
)
if not events:
continue
result += [x for x in events if x.scheduled_date != scheduled_date]
# Ensure that future events cached in the DB show up in the list from google
gcal_ids = [x["id"] for x in gcal_event_list]
removed_events = (
session.query(Event).filter(
and_(Event.state == State.waiting, Event.event_id.notin_(gcal_ids))
)
).all()
result += removed_events
return result
def mark_event_missing(self):
self.state = State.removed
session.add(self)
session.commit()
def mark_event_done(self):
self.state = State.complete
session.add(self)
session.commit()
def commit(self):
try:
session.add(self)
session.commit()
return True
except IntegrityError:
session.rollback()
raise
| 6,080 | 1,779 |