text string | size int64 | token_count int64 |
|---|---|---|
from abc import ABC
from typing import List, Type
from reamber.base.lists.notes.NoteList import NoteList
class BMSNoteList(NoteList, ABC):
def data(self) -> List[Type]: pass
def samples(self) -> List[float]:
return self.attribute('sample')
| 261 | 81 |
import os
import sys
import re
import json
import logging
###from pylons import request, response, session, app_globals, tmpl_context, url as c, config
from pylons import request, response, session, app_globals, tmpl_context as c, config, url
from pylons.controllers.util import abort, redirect
from pylons.decorators import jsonify
from authkit.authorize.pylons_adaptors import authorize,authorized
import sqlalchemy as sa
from sqlalchemy.orm.attributes import manager_of_class as manager
from config import Config
from cyberweb.lib.base import BaseController, render
from cyberweb.lib import auth, helpers as h
from cyberweb import model
from cyberweb.model import meta, JobState, Job, Message, Group, \
GroupDefinition, User, Service, ServiceName, Account, \
Resource, Protocol
log = logging.getLogger(__name__)
myclass, myfunc = config.get('authkit.form.authenticate.user.encrypt',':').split(':')
mysecret = config.get('authkit.form.authenticate.user.encrypt.secret','')
try:
exec('from %s import %s as encrypt' % (myclass,myfunc))
except:
log.error('No encrypt function is being used for passwords!(%s.%s)',myclass,myfunc)
encrypt = lambda x,y: x
class GsicredsController(BaseController):
@authorize(auth.is_valid_user)
def __before__(self):
pass
def index(self):
c.user = session['user']
user_id = session.get('user_id')
if not user_id:
raise Exception
return self.gsicreds()
def gsicreds(self):
c.user = session['user']
###c.username = u.username
c.results = "action: gsicreds"
c.status = ""
c.errmessage = ""
c.gsidir = self._get_gsi_dir()
c.title = config.get('project.shortname','CyberWeb') + ' User Page for: ' + session.get('user','you')
return render('/authentication/gsicreds/gsicreds.mako')
def _get_gsi_dir(self):
# Populatries from the development.ini or production.ini
#currently this is set up for local, but we can use jodis if the
# path is to a remote location
user = session['user']
gsidir = config.get('cw.cwproj_dir','.') + '/' + user + '/' + 'gsi'
if not os.path.isdir(gsidir):
try: os.makedirs(gsidir)
except Exception:
log.debug('Cannot create directory for user %s (%s)' % (user,gsidir))
else:
log.debug('Directory created for user %s (%s)' % (user,gsidir))
return gsidir
###############################################################
# OLD CODE
###############################################################
def old_index(self):
user_id = session.get('user_id')
c.user = session['user']
if not user_id:
raise Exception
# User Info
user = meta.Session.query(User).filter(User.id == session.get('user_id')).one()
accounts = meta.Session.query(Account).filter(sa.and_(Account.authkey_id != None , Account.user_id == session.get('user_id')));
dataString = []
accountHost = {}
for account in accounts:
if accountHost.get(account.resource.hostname, True):
accountDict = {}
accountDict['name'] = account.name
accountDict['hostname'] = account.resource.hostname
dataString.append(accountDict)
accountHost[account.resource.hostname] = False
## c.passwordLessAccount = dataString
meta.Session.close()
c.status = "index"
c.results = ""
#return render('/authentication/gsicreds/gsicreds.mako')
redirect(url(controller='gsicreds', action='gsicreds'))
def gsicreds_info(self):
c.user = session['user']
c.results = "action: gsicreds_info"
c.status = ""
c.errmessage = ""
c.user = session['user']
c.userdir = config.get('cw.cwuser_loc','.')
c.gsidir = c.userdir + '/' + session['user'] + '/gsi'
# Populatries from the development.ini or production.ini
#currently this is set up for local, but we can use jodis if the
# path is to a remote location
user = session['user']
gsidir = config.get('cw.cwproj_dir','.') + '/' + user + '/' + 'gsi'
if not os.path.isdir(c.gsidir):
try: os.makedirs(c.gsidir)
except Exception:
log.debug('Cannot create directory for user %s (%s)' % (c.user,c.gsidir))
else:
log.debug('Directory created for user %s (%s)' % (c.user,c.gsidir))
c.title = config.get('project.shortname','CyberWeb') + ' User Page for: ' + session.get('user','you')
c.gsidump=''
return render('/authentication/gsicreds/gsicreds_info.mako')
def gsicreds_create(self):
c.user = session['user']
c.results = "action: gsicreds_create"
c.status = ""
c.errmessage = ""
c.user = session['user']
c.userdir = config.get('cw.cwuser_loc','.')
c.gsidir = c.userdir + '/' + session['user'] + '/gsi'
c.title = config.get('project.shortname','CyberWeb') + ' User Page for: ' + session.get('user','you')
return render('/authentication/gsicreds/gsicreds_create.mako')
def gsicreds_del(self):
c.user = session['user']
c.results = "action: gsicreds_del"
c.status = ""
c.errmessage = ""
c.user = session['user']
c.userdir = config.get('cw.cwuser_loc','.')
c.gsidir = c.userdir + '/' + session['user'] + '/gsi'
c.title = config.get('project.shortname','CyberWeb') + ' User Page for: ' + session.get('user','you')
c.request_params = ''
return render('/authentication/gsicreds/gsicreds_del.mako')
def gsicreds_del_action(self):
c.user = session['user']
c.results = "action: gsicreds_del_action"
c.request_params=''
mylist = []
for k in request.params.keys():
mylist.append(k)
c.status = ""
c.request_params=mylist
c.errmessage = ""
c.user = session['user']
c.userdir = config.get('cw.cwuser_loc','.')
c.gsidir = c.userdir + '/' + session['user'] + '/gsi'
c.title = config.get('project.shortname','CyberWeb') + ' User Page for: ' + session.get('user','you')
return render('/authentication/gsicreds/gsicreds_del.mako')
def gsicreds_renew(self):
c.user = session['user']
c.results = "action: gsicreds_renew"
c.status = ""
c.errmessage = ""
c.user = session['user']
c.userdir = config.get('cw.cwuser_loc','.')
c.gsidir = c.userdir + '/' + session['user'] + '/gsi'
c.title = config.get('project.shortname','CyberWeb') + ' User Page for: ' + session.get('user','you')
return render('/authentication/gsicreds/gsicreds_renew.mako')
def gsicreds_stat(self):
c.user = session['user']
c.results = "action: gsicreds_del"
c.status = ""
c.errmessage = ""
c.user = session['user']
c.userdir = config.get('cw.cwuser_loc','.')
c.gsidir = c.userdir + '/' + session['user'] + '/gsi'
c.title = config.get('project.shortname','CyberWeb') + ' User Page for: ' + session.get('user','you')
return render('/authentication/gsicreds/gsicreds_stat.mako')
def gsicreds_upload(self):
c.user = session['user']
c.results = "action: gsicreds_upload"
c.status = ""
c.errmessage = ""
c.user = session['user']
c.userdir = config.get('cw.cwuser_loc','.')
c.gsidir = c.userdir + '/' + session['user'] + '/gsi'
c.title = config.get('project.shortname','CyberWeb') + ' User Page for: ' + session.get('user','you')
return render('/authentication/gsicreds/gsicreds_upload.mako')
#######################################################
#this function is called from gsicreds_create.mako which
#is invoked when myproxy_logon.mako is rendered
def myproxy_logon_action(self):
import pexpect
###
# set up user data, paths, etc.
c.user = session['user']
userdir = config.get('cw.cwuser_loc','.')
gsidir = userdir + '/' + session['user'] + '/gsi'
try:
if not os.path.isdir(gsidir):
os.makedirs(gsidir)
else:
log.error('DirCreate exists for %s' % gsidir)
except OSError:
log.error('DirCreate FAIL for %s' % gsidir)
else:
log.info("DirCreate PASS for %s " % gsidir)
###
# process form data
log.info( "MyProxyLogon: validating GSI credential ")
c.errmessage = ''
errflag = 0
if request.params.get('myproxy_username'):
c.mp_username = request.params.get('myproxy_username')
else:
errstr = "MyProxy Error: username required."
c.errmessage = c.errmessage + errstr
log.debug( errstr )
errflag = 1
if request.params.get('myproxy_password'):
c.mp_password = request.params.get('myproxy_password')
else:
errstr = "MyProxy Error: password required."
c.errmessage = c.errmessage + errstr
log.debug( errstr )
errflag = 1
if request.params.get('myproxy_hostname'):
c.mp_hostname = request.params.get('myproxy_hostname')
else:
errstr = "MyProxy Error: hostname required."
c.errmessage = c.errmessage + errstr
log.debug( errstr )
errflag = 1
if request.params.get('myproxy_port'):
c.mp_port = request.params.get('myproxy_port')
else:
errstr = "MyProxy Error: port required."
c.errmessage = c.errmessage + errstr
log.debug( errstr )
errflag = 1
if request.params.get('myproxy_lifetime'):
c.mp_lifetime = request.params.get('myproxy_lifetime')
else:
c.mp_lifetime = 8760
if errflag:
c.myproxy_cmd=""
return render('/authentication/gsicreds/gsicreds_create.mako')
############
# Build the MYPROXY COMMAND
# -d option instructs the server to associate the user DN to the proxy,
# -n option avoids the use of a passphrase to access the long-term proxy,
# so that the CyberWeb server can perform the renewal automatically
# use pexpect to run the command in 'interactive' mode
#############
myproxy_bin = "/usr/local/globus-5.0.2/bin/myproxy-logon" #ubuntu, fall 2010 updates
#myproxy_bin = "/usr/local/globus4.2.1/bin/myproxy-logon" #pipe3
#myproxy_bin = "/usr/local/globus-4.0.6/bin/myproxy-logon" #osX
myproxy_cmd = myproxy_bin + " -T "
myproxy_cmd = myproxy_cmd + " -l " + c.mp_username
myproxy_cmd = myproxy_cmd + " -t " + c.mp_lifetime
myproxy_cmd = myproxy_cmd + " -p " + c.mp_port
myproxy_cmd = myproxy_cmd + " -s " + c.mp_hostname
userdir = config.get('cw.cwuser_loc','.')
### note: the output file should contain DN information to ensure that the name is unique.
### either that or we user random numbers to name
### right now we only allow one hardcoded gsi credential.
c.gsi_outfile = userdir + '/' + c.user + "/gsi/x509proxy_" + c.user
#myproxy_cmd = myproxy_cmd + " -o " + userdir + '/' + c.user + "/gsi/x509up_" + c.user
myproxy_cmd = myproxy_cmd + " -o " + c.gsi_outfile
###################
# pexpect input, output, error response strings.
# must be treated as constants.
#
iostr1 = 'Enter MyProxy pass phrase:'
###iostr2 = 'A credential has been received for user thomasm in /tmp/x509up_u501.'
iostr2 = ("A credential has been received for user: %s in %s " % (c.user, c.gsi_outfile))
##iostr3 = ('Trust roots have been installed in %s' % '/Users/mthomas/.globus/certificates/.')
iostr3 = ('Trust roots have been installed in /home/carny/.globus/certificates/.')
errstr1 = 'Failed to receive credentials.'
errstr2 = 'ERROR from myproxy-server (' + c.mp_hostname + '):'
errstr3 = 'PAM authentication failed: Permission denied'
errstr4 = 'unknown myproxy username: ' + c.mp_username
errstr5 = 'No credentials for renewal authorization.'
errstr6 = 'Unable to perform password negotiation with server.'
errstr7 = "Unable to respond to server's authentication challenge."
errstr8 = 'Error entering passphrase.'
errstr9 = 'Passphrase must be at least 6 characters long.'
errstr10 = 'Unknown host "' + c.mp_hostname + '"'
errstr11 = 'Error trying to run myproxy-logon command.'
#### bad mp_port number...not sure how to handle this one
####errstr11, 12, 13? = '
####Unable to connect to 141.142.15.131:8512
####Unable to connect to myproxy.teragrid.org
####Operation timed out
####
# use pexpect to run external application and to interact with it
###
c.myproxy_cmd = myproxy_cmd
child = pexpect.spawn( myproxy_cmd )
log.debug('MyProxyLogon: (1) Running command time: %' + myproxy_cmd)
c.status='fail1'
try:
i = child.expect([iostr1, errstr11, errstr10,
pexpect.TIMEOUT, pexpect.EOF])
log.debug('MyProxyLogon: (1)child.after:: [' + str(child.after) + ']')
except Exception, e:
log.debug('MyProxyLogon: EXCEPTION:: pexpect.spawn(1):: unknown error with call.')
log.debug('MyProxyLogon: (2)child.before:: [' + str(child.before) + ']')
log.debug('MyProxyLogon: (2)child:: [' + str(child) + ']')
log.debug('MyProxyLogon: (2)child.after:: [' + str(child.after) + ']')
c.results='MyProxy Logon: Unknown Error. Please try again or contact web administrator.'
c.status='fail2 i=' + str(i)
return render('/authentication/gsicreds/gsicreds.mako')
###return render('/account/myproxy_logon.mako')
log.debug('#############################################################')
log.debug( 'MyProxyLogon: pexpect connection ok, condition = '+ str(i) +', iostr1::' + iostr1)
c.status='myproxy connection ok'
if i == 0:
log.debug('MyProxyLogin: status i='+str(i)+':: child.sendline:: sending passphrase: %s' % c.mp_password)
c.status = 'Sending Password'
try:
child.sendline(c.mp_password)
j=child.expect([pexpect.TIMEOUT,iostr2, iostr3, errstr1, errstr8, errstr9,
pexpect.EOF], timeout=50)
#log.debug('MyProxyLogin: (3)child.before:: [%s' % str(child.before) )
log.debug('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
log.debug('MyProxyLogin: [j= %s]:: send pwd child:: [%s]' % (j, str(child)))
log.debug('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
#log.debug('MyProxyLogin: (3)child.after:: [%s]' % str(child.after) )
c.status='gsi credential generated'
c.results = 'A GSI credential has been received for user ' + c.user + '.'
c.mp_dn = 'DN info'
outstr = 'login: ' + c.mp_username + '\n'
outstr = outstr + 'hostname: ' + c.mp_hostname + '\n'
outstr = outstr + 'dn: ' + c.mp_dn + '\n'
fname = ('%s/%s/gsi/x509proxy_%s_info' % (userdir, c.user, c.user ) )
#fname = "/home/carny/cyberweb/cw_user_data/mary/gsi/x509proxy_mary_info"
log.debug ("Writing info file for %s GSI proxy to file: %s" % (c.mp_username, fname))
try:
fout = open( fname, 'w')
try:
#fout.write("This is a test")
fout.write( outstr )
finally:
fout.close()
except Exception, e:
errstr = ("Problem writing info file for %s GSI proxy to file: %s" % (c.mp_username, fname))
log.debug (errstr)
log.debug ("File Open/Write Exception: %s " % e)
cla, exc, trbk = sys.exc_info()
excName = cla.__name__
try:
excArgs = exc.__dict__["args"]
except KeyError:
excArgs = "<no args>"
excTb = traceback.format_tb(trbk, 5)
log.debug ( "[ExcName: %s] [excArgs: %s] [excTb%s]" % (excName, excArgs, excTb))
c.results = errstr
return render('/authentication/gsicreds/gsicreds.mako')
##return render('/account/myproxy_logon.mako')
except Exception, e:
log.debug('MyProxyLogin: EXCEPTION:: pexpect.spawn(2):: child.expect failed: exception= %s ' % e)
#log.debug('MyProxyLogin: (4)child.before:: [' + str(child.before) + "]")
#log.debug('MyProxyLogin: (4)child:: [' + str(child) + "]")
#log.debug('MyProxyLogin: (4)child.after:: [' + str(child.after) + "]")
c.status='password send exception'
log.debug('MyProxyLogin exception: %s', c.status)
return render('/authentication/gsicreds/gsicreds.mako')
###return render('/account/myproxy_logon.mako')
log.debug('=============================================================')
log.debug('MyProxyLogin: STATUS: '+ str(j) )
log.debug('=============================================================')
if j == 1: # its all ok
log.debug("MyProxyLogin: SUCCESS!"+iostr2)
c.status='fail i= ' + str(i) + ', j= ' + str(j)
elif j == 3: # something wrong with c.mp_password: error entering passphrase
log.debug('MyProxyLogin: err[j='+str(j)+']:: bad input: \n '+ errstr8)
c.status='fail i= ' + str(i) + ', j= ' + str(j)
elif j == 4: # something wrong: c.mp_password too short
log.debug('MyProxyLogin: err[j='+str(j)+']:: bad input: \n '+ errstr9)
c.status='fail i= ' + str(i) + ', j= ' + str(j)
elif j == 6: #aksing for c.mp_password again
log.debug('MyProxyLogin: err[j=6], asking for c.mp_password again')
c.status='fail i= ' + str(i) + ', j= ' + str(j)
elif j == 2: # something wrong
c.status='fail i= ' + str(i) + ', j= ' + str(j)
log.debug('MyProxyLogin: err[j= '+str(j)+'] somethings wrong.')
k=child.expect([errstr2, errstr3, errstr4, errstr5, errstr6, errstr7])
if k == 2:
log.debug('MyProxyLogin: err[k= '+str(k)+']:: bad mp_username.\n '+ errstr1+ '\n '+ errstr4)
elif k == 3:
log.debug('MyProxyLogin: err[k= '+str(k)+']:: bad c.mp_password.\n '+errstr1+'\n '+errstr5+'\n '+errstr6+'\n '+errstr7)
else:
c.status='fail i= ' + str(i) + ', j= ' + str(j)
log.debug('MyProxyLogin: err[k= '+str(k)+']:: unknown, k='+ str(k))
else:
c.status='fail i= ' + str(i)
log.debug('MyProxyLogin: err[j= '+str(j)+']:: unkown password/user problem')
elif i == 1: # somethings wrong with c.mp_hostname
c.results = '<p>MyProxyLogin Err[i='+str(i)+']::' + errstr10
log.debug( c.results )
c.status='fail i= ' + str(i)
#elif i == 3: # Timeout
# c.results ='<p>MyProxyLogin Err['+str(i)+']:: timeout. could not contact myproxy server. '
# c.results = c.results + 'Might be bad port number causing a timeout.'
# log.debug( c.results )
# c.status='fail i= ' + str(i)
else:
log.debug('======================================================================')
log.debug('MyProxyLogin: child:: [' + str(child) + "]")
c.status='Communication failure: response = ' + str(i)
log.debug('MyProxyLogin: err[i= '+str(i)+']: unknown system/host issue')
return render('/authentication/gsicreds/gsicreds.mako')
#return render('/account/myproxy_logon.mako')
def grid_proxy_info(self):
c.results=""
c.user = session['user']
#system should get list of credentials from users dir (or db when that is written)
return render('/authentication/gsicreds/grid_proxy_info.mako')
##return render('/authentication/gsicreds/gsicreds.mako')
def grid_proxy_info_action(self):
c.user = session['user']
if request.params.get('myproxy_username'):
c.mp_username = request.params.get('myproxy_username')
else:
errstr = "MyProxy Error: username required."
c.errmessage = c.errmessage + errstr
log.debug( errstr )
errflag = 1
## now run the unix command, capture the output and pass to mako file - sample
#cmd = 'grid-proxy-info -f /home/carny/cyberweb/trunk/cw_user_data/mary/gsi/x509up_mary'
#credir = '/home/carny/cyberweb/trunk/cw_user_data/' + c.user + '/gsi/'
c.user = session['user']
userdir = config.get('cw.cwuser_loc','.')
gsidir = userdir + '/' + session['user'] + '/gsi'
c.gsi_outfile = userdir + '/' + c.user + "/gsi/x509proxy_" + c.user
c.userdir = userdir
try:
globus_dir='/usr/local/globus-5.0.2/bin'
cmd = globus_dir + '/grid-proxy-info' #pipe3
cmd = cmd + ' -f ' + c.gsi_outfile
c.general = 'CMD: ' + cmd
fi,foe = os.popen4( cmd, mode='t' )
results = foe.readlines()
c.results = results
fi.close(); foe.close()
except:
errstr = ("There are no GSI Credentials for grid user ID: %s" % c.mp_username)
log.debug (errstr)
c.results = errstr
return render('/authentication/gsicreds/gsicreds.mako')
return render('/authentication/gsicreds/grid_proxy_info.mako')
#############################################
# Manage CyberWeb Services
#############################################
def services(self):
# Gather the list of services
c.services = meta.Session.query(ServiceName).distinct().order_by(ServiceName.name)
c.resources = {}
for resource in meta.Session.query(Resource).filter(Resource.active == 1).distinct().order_by(Resource.name):
c.resources[resource.name] = {}
# Gather the list of services on each resource
for i in meta.Session.query(Service).distinct():
if i.resource:
c.resources.setdefault(i.resource.name, {})[str(i.service_name)] = i.id
dataString = '['
resources = meta.Session.query(Resource).filter(Resource.active == 1).distinct().order_by(Resource.name);
for resource in resources:
try:
service = [service for service in meta.Session.query(Service).filter(Service.resource_id == resource.id)];
if len(service) > 0:
dataString += '{'
dataString += '"Resource Id":"%s",' % resource.id
dataString += '"Resource Name":"%s",' % resource.name
if session.get('available_resources', {}).has_key(resource.name) or resource.name in session.get('available_resources', {}).values():
dataString += '"isResourceAvailable":"true",'
else:
dataString += '"isResourceAvailable":"false",'
dataString += '"Services":['
for serviceId in service:
dataString += '{'
protocol = meta.Session.query(Protocol).filter(Protocol.id == serviceId.protocol_id).first();
dataString += '"protocol":"%s",' % protocol.name
servicename = [servicename for servicename in meta.Session.query(ServiceName).filter(ServiceName.id == serviceId.servicename_id).all()];
for serviceNameId in servicename:
try:
dataString += '"serviceName":"%s",' % serviceNameId.name
#servicetype = meta.Session.query(ServiceType).filter(ServiceType.id == serviceNameId.service_type_id).first();
dataString += '"serviceType":"%s"' % serviceNameId.service_type.name
except:
dataString += '"serviceType":""'
dataString += '},'
dataString = dataString[0:len(dataString)-1];
dataString += ']'
dataString += '},'
except:
print "Unexpected error:", sys.exc_info()[0]
raise
if len(dataString) > 1:
dataString = dataString[0:len(dataString)-1];
dataString += ']'
c.resourceServiceJson = dataString
meta.Session.close()
return render('/account/services.mako')
| 25,785 | 8,096 |
import flair
import numpy as np
import spacy
import tensorflow_hub as hub
import torch
from flair.data import Sentence
from flair.models import SequenceTagger
from nltk.tokenize.treebank import TreebankWordDetokenizer
from sklearn.metrics.pairwise import cosine_similarity
from string import punctuation
from transformers import AutoTokenizer, GPT2LMHeadModel, MT5ForConditionalGeneration, T5ForConditionalGeneration
from .config import DEVICES
class ModelPool:
ENCODER_DECODER2MODEL_TOKENIZER = {
't5-base': 't5_base',
't5-large': 't5_large',
't5-v1_1-base': 't5_v1_1_base',
'mt5-base': 'mt5_base',
}
def encoder_decoder2model_token(self, encoder_decoder):
return getattr(self, self.ENCODER_DECODER2MODEL_TOKENIZER[encoder_decoder])
@property
def flair_pos_tagger(self):
if not hasattr(self, '_flair_pos_tagger'):
flair.device = torch.device(DEVICES[1])
self._flair_pos_tagger = SequenceTagger.load('upos-fast')
return self._flair_pos_tagger
@property
def gpt2(self):
if not hasattr(self, '_gpt2_model'):
self._gpt2_model = GPT2LMHeadModel.from_pretrained('gpt2')
if not hasattr(self, '_gpt2_tokenizer'):
self._gpt2_tokenizer = AutoTokenizer.from_pretrained('gpt2', use_fast=True)
return self._gpt2_model, self._gpt2_tokenizer
@property
def mt5_base(self):
if not hasattr(self, '_mt5_base_model'):
self._mt5_base_model = MT5ForConditionalGeneration.from_pretrained('google/mt5-base')
if not hasattr(self, '_mt5_base_tokenizer'):
self._mt5_base_tokenizer = AutoTokenizer.from_pretrained('google/mt5-base', use_fast=True)
return self._mt5_base_model, self._mt5_base_tokenizer
@property
def spacy_model(self):
if not hasattr(self, '_spacy_model'):
self._spacy_model = spacy.load('en_core_web_sm')
return self._spacy_model
@property
def t5_base(self):
if not hasattr(self, '_t5_base_model'):
self._t5_base_model = T5ForConditionalGeneration.from_pretrained('t5-base')
if not hasattr(self, '_t5_base_tokenizer'):
self._t5_base_tokenizer = AutoTokenizer.from_pretrained('t5-base', use_fast=True)
return self._t5_base_model, self._t5_base_tokenizer
@property
def t5_large(self):
if not hasattr(self, '_t5_large_model'):
self._t5_large_model = T5ForConditionalGeneration.from_pretrained('t5-large')
if not hasattr(self, '_t5_large_tokenizer'):
self._t5_large_tokenizer = AutoTokenizer.from_pretrained('t5-large', use_fast=True)
return self._t5_large_model, self._t5_large_tokenizer
@property
def t5_v1_1_base(self):
if not hasattr(self, '_t5_v1_1_base_model'):
self._t5_v1_1_base_model = T5ForConditionalGeneration.from_pretrained('google/t5-v1_1-base')
if not hasattr(self, '_t5_v1_1_base_tokenizer'):
self._t5_v1_1_base_tokenizer = AutoTokenizer.from_pretrained('google/t5-v1_1-base', use_fast=True)
return self._t5_v1_1_base_model, self._t5_v1_1_base_tokenizer
@property
def treebank_word_detokenizer(self):
if not hasattr(self, '_treebank_word_detokenizer'):
self._treebank_word_detokenizer = TreebankWordDetokenizer()
return self._treebank_word_detokenizer
@property
def use(self):
if not hasattr(self, '_use'):
self._use = hub.load('https://tfhub.dev/google/universal-sentence-encoder/4')
return self._use
model_pool = ModelPool()
def tokenize(text):
doc = model_pool.spacy_model(text)
tokens = [token.text for token in doc]
return tokens
def detokenize(tokens):
return model_pool.treebank_word_detokenizer.detokenize(tokens)
def is_continuous(sequence):
if len(sequence) == 0:
return False
for i in range(len(sequence) - 1):
if sequence[i] + 1 != sequence[i + 1]:
return False
return True
def is_punctuation(c):
return len(c) == 1 and c in punctuation
def is_one_word(text):
return len(tokenize(text)) == 1
def get_use_sim(text1, text2):
orig_embd, adv_embd = model_pool.use([text1, text2]).numpy()
sim = cosine_similarity(orig_embd[np.newaxis, ...], adv_embd[np.newaxis, ...])[0, 0]
return sim.item()
def get_lcs_len(words1, words2):
num_words1, num_words2 = len(words1), len(words2)
dp = np.zeros((num_words1 + 1, num_words2 + 1), dtype=int)
for i in range(1, num_words1 + 1):
for j in range(1, num_words2 + 1):
if words1[i - 1] == words2[j - 1]:
dp[i, j] = dp[i - 1, j - 1] + 1
else:
dp[i, j] = max(dp[i - 1, j], dp[i, j - 1])
return dp[num_words1, num_words2].item()
def get_num_word_pert(words1, words2):
words1, words2 = list(map(lambda w: w.lower(), words1)), list(map(lambda w: w.lower(), words2))
return max(len(words1), len(words2)) - get_lcs_len(words1, words2)
def get_pos_list(words):
sentence = Sentence(detokenize(words), use_tokenizer=lambda text: words)
model_pool.flair_pos_tagger.predict(sentence)
return [token.annotation_layers['pos'][0]._value for token in sentence.tokens]
| 5,305 | 1,962 |
# Insert Interval
class Solution:
def insert(self, intervals, newInterval):
ans = []
[nst, nen] = newInterval
for index, [st, en] in enumerate(intervals):
if en < nst:
ans.append(intervals[index])
elif nen < st:
# can return now
ans.append([nst, nen])
return ans + intervals[index:]
else:
nst = min(nst, st)
nen = max(nen, en)
ans.append([nst, nen])
return ans
if __name__ == "__main__":
sol = Solution()
intervals = [[1,3],[6,9]]
newInterval = [2,5]
intervals = [[1,2],[3,5],[6,7],[8,10],[12,16]]
newInterval = [4,18]
print(sol.insert(intervals, newInterval))
| 765 | 249 |
# encoding: UTF-8
""""
基于布林带的交易策略
观察周期:1min
策略周期:5min
策略逻辑:
1. 信号:突破上轨、下轨
2. 过滤:均线多头、空头排列
3. 出场:分级止盈;固定止损
"""
import talib
import numpy as np
from cyvn.trader.vtObject import VtBarData
from cyvn.trader.vtConstant import EMPTY_STRING
from cyvn.trader.app.ctaStrategy.ctaTemplate import CtaTemplate, BarGenerator, ArrayManager
from cyvn.trader.vtConstant import *
########################################################################
class BollingerBotStrategy01(CtaTemplate):
"""基于布林通道的交易策略"""
className = 'BollingerBotStrategy01'
author = 'Y.Raul'
# 策略参数
bollWindow = 28 # 通道窗口数
entryDevUp = 4 # 开仓偏差
entryDevDown = 3.2
# exitDev = 1.2 # 平仓偏差
# trailingPrcnt = 0.4
# 移动止损百分比
maWindow = 10 # 过滤用均线窗口
initDays = 10 # 初始化数据所用的天数
fixedSize = 1 # 每次交易的数量
# 策略变量
bollMid = 0 # 布林带中轨
bollStd = 0 # 布林带宽度
entryUp = 0 # 开仓上轨
# exitUp = 0 # 平仓上轨
entryDown = 0 #开仓下轨
# exitDown = 0 #平仓下轨
dispacedLen = 0 #均线平移长度
maFilter = 0 # 均线过滤
maFilter1 = 0 # 上一期均线
# 分级出场设置
trailingStart1 = 20
trailingStart2 = 30
exitOnTrailingStop1 = 5 # Trailing Stop 距离
exitOnTrailingStop2 = 10 # Trailing Stop 距离
exitOnLossStop = 20 # Loss Stop 距离
# 价格相关变量
intraTradeHigh = 0 # 持仓期内的最高点
intraTradeLow = 0 # 持仓期内的最低点
avgEntryPrice = 0
minDiff = 1
trailingExit = 0 #
stopExit = 0 # 空头止损
# longEntry = 0 # 多头开仓
# shortEntry = 0
# 信号相关变量
buySig = False
shortSig = False
sellSig = False
coverSig = False
# entrusted = False #是否已有委托
orderList = [] # 保存委托代码的列表
# 参数列表,保存了参数的名称
paramList = ['name',
'className',
'author',
'vtSymbol',
'bollWindow',
'entryDevUp',
'entryDevDown',
'trailingStart1',
'trailingStart2',
'exitOnTrailingStop1',
'exitOnTrailingStop2',
'maWindow',
'initDays',
'fixedSize']
# 变量列表,保存了变量的名称
varList = ['inited',
'trading',
'pos',
'buySig',
'shortSig',
'sellSig',
'coverSig',
'entryUp',
'entryDown',
'trailingExit',
'stopExit',
'intraTradeHigh',
'intraTradeLow',
'avgEntryPrice']
# 同步列表
syncList = ['pos',
'intraTradeHigh',
'intraTradeLow']
#----------------------------------------------------------------------
def __init__(self, ctaEngine, setting):
"""Constructor"""
super(BollingerBotStrategy01, self).__init__(ctaEngine, setting)
self.bm = BarGenerator(self.onBar, 5, self.onFiveBar)
self.am = ArrayManager(30)
self.orderList = []
self.entryPriceList = []
#----------------------------------------------------------------------
def onInit(self):
"""初始化策略(必须由用户继承实现)"""
self.writeCtaLog('%s策略初始化' %self.name)
# 载入历史数据,并采用回放计算的方式初始化策略数值
initData = self.loadBar(self.initDays)
for bar in initData:
self.onBar(bar)
self.putEvent()
#----------------------------------------------------------------------
def onStart(self):
"""启动策略(必须由用户继承实现)"""
self.writeCtaLog('%s策略启动' %self.name)
self.putEvent()
#----------------------------------------------------------------------
def onStop(self):
"""停止策略(必须由用户继承实现)"""
self.writeCtaLog('%s策略停止' %self.name)
self.putEvent()
#----------------------------------------------------------------------
def onTick(self, tick):
"""收到行情TICK推送(必须由用户继承实现)"""
self.bm.updateTick(tick)
#----------------------------------------------------------------------
def onBar(self, bar):
"""收到Bar推送(必须由用户继承实现)"""
# 观察周期1 Min,根据信号进行交易
# 回测数据传送的bar.datetime,为bar的开始时间
self.bm.updateBar(bar)
# if not self.trading:
# return
self.date = bar.date
self.time = bar.time
# 检查交易信号
if self.buySig:
res = self.buy(bar.close, self.fixedSize, True)
self.orderList.extend([x.split('.')[1] for x in res])
# self.orderList.extend(res.split('.')[1])
# self.entryPriceList.append(self.longEntry)
# self.avgEntryPrice = sum(self.entryPriceList) / len(self.entryPriceList)
# self.LossStopPrice = round(self.avgEntryPrice * (100.0 + self.exitOnLossStop) / 100)
# self.intraTradeHigh = max(bar.high, self.avgEntryPrice)
# self.intraTradeLow = min(bar.low, self.avgEntryPrice)
# log = "-----" * 10 + "\n@onBar\n" + \
# "bar.datetime: {0}; pos: {1} \n".format(bar.datetime, self.pos) + \
# "buySig: {0}; shortSig: {1}\n".format(self.buySig, self.shortSig) + \
# "sellSig: {0}; coverSig: {1}\n".format(self.sellSig, self.coverSig) + \
# "intraTradeHigh: {0}\n".format(self.intraTradeHigh) + \
# "intraTradeLow: {0}\n".format(self.intraTradeLow)
# self.writeCtaLog(log)
# 记录log
# log = "\n Trading: {0}\n".format(self.trading) + \
# "{0} Buy : longEntry: {1};\n".format(bar.datetime, bar.close) + \
# " entryUp:{0}; maFilter:{1}; maFilter1:{2}; \n".format(self.entryUp, self.maFilter, self.maFilter1)
# self.writeCtaLog(log)
self.buySig = False
self.saveSyncData()
# return
if self.shortSig:
self.res = self.short(bar.close, self.fixedSize, True)
self.orderList.extend([x.split('.')[1] for x in self.res])
# self.orderList.extend(res.split('.')[1])
# self.LossStopPrice = round(self.shortEntry * (100.0 + self.exitOnLossStop) / 100)
# self.entryPriceList.append(self.shortEntry)
# self.avgEntryPrice = sum(self.entryPriceList) / len(self.entryPriceList)
# self.LossStopPrice = round(self.avgEntryPrice * (100.0 + self.exitOnLossStop) / 100)
#
# self.intraTradeHigh = max(bar.high, self.avgEntryPrice)
# self.intraTradeLow = min(bar.low, self.avgEntryPrice)
# log = "-----" * 10 + "\n@onBar\n" + \
# "bar.datetime: {0}; pos: {1} \n".format(bar.datetime, self.pos) + \
# "buySig: {0}; shortSig: {1}\n".format(self.buySig, self.shortSig) + \
# "sellSig: {0}; coverSig: {1}\n".format(self.sellSig, self.coverSig) + \
# "intraTradeHigh: {0}\n".format(self.intraTradeHigh) + \
# "intraTradeLow: {0}\n".format(self.intraTradeLow)
# self.writeCtaLog(log)
# # 记录log
# log = "\n Trading: {0}\n".format(self.trading) + \
# "{0} Short : shortEntry: {1};\n".format(bar.datetime, bar.close) + \
# " entryDown:{0}; maFilter:{1}; maFilter1:{2}; \n".format(self.entryDown, self.maFilter, self.maFilter1)
# self.writeCtaLog(log)
self.shortSig = False
self.saveSyncData()
# return
if self.sellSig:
if bar.close > self.stopExit:
price = self.trailingExit
else:
price = bar.close
res = self.sell(price, abs(self.pos), True)
# self.orderList.extend(res)
# log = "-----" * 10 + "\n@onBar\n" + \
# "bar.datetime: {0}; pos: {1} \n".format(bar.datetime, self.pos) + \
# "buySig: {0}; shortSig: {1}\n".format(self.buySig, self.shortSig) + \
# "sellSig: {0}; coverSig: {1}\n".format(self.sellSig, self.coverSig) + \
# "intraTradeHigh: {0}\n".format(self.intraTradeHigh) + \
# "intraTradeLow: {0}\n".format(self.intraTradeLow)
# self.writeCtaLog(log)
# # 记录log
# log = "\n Trading: {0}\n".format(self.trading) + \
# "{0} Sell : {1};\n".format(bar.datetime, bar.close) + \
# " price:{0}; stopExit: {1}\n".format(price,self.stopExit)
# self.writeCtaLog(log)
# self.entryPriceList = []
# self.avgEntryPrice = 0
# self.stopExit = 0
self.sellSig = False
self.saveSyncData()
# return
if self.coverSig:
if bar.close < self.stopExit:
price = self.trailingExit
else:
price = bar.close
res = self.cover(price, abs(self.pos), True)
# self.orderList.extend(res)
# log = "-----" * 10 + "\n@onBar\n" + \
# "bar.datetime: {0}; pos: {1} \n".format(bar.datetime, self.pos) + \
# "buySig: {0}; shortSig: {1}\n".format(self.buySig, self.shortSig) + \
# "sellSig: {0}; coverSig: {1}\n".format(self.sellSig, self.coverSig) + \
# "intraTradeHigh: {0}\n".format(self.intraTradeHigh) + \
# "intraTradeLow: {0}\n".format(self.intraTradeLow)
# self.writeCtaLog(log)
# # 记录log
# log = "\n Trading: {0}\n".format(self.trading) + \
# "{0} Cover : {1};\n".format(bar.datetime, bar.close) + \
# " price:{0}; stopExit: {1}\n".format(price,self.stopExit)
# self.writeCtaLog(log)
# self.entryPriceList = []
# self.avgEntryPrice = 0
# self.stopExit = 0
self.coverSig = False
self.saveSyncData()
# return
self.putEvent()
#----------------------------------------------------------------------
def onFiveBar(self, bar):
"""收到5分钟K线"""
# 策略周期5Min,生成交易信号
# 保存K线数据
self.am.updateBar(bar)
if not self.am.inited:
return
# 撤销之前发出的尚未成交的委托(包括限价单和停止单)
self.cancelAll()
# 计算指标数值
self.bollMid = self.am.sma(self.bollWindow,True)[-1 * (self.dispacedLen + 1)]
self.bollStd = self.am.std(self.bollWindow)
self.entryUp = round(self.bollMid + self.bollStd * self.entryDevUp)
self.entryDown = round(self.bollMid - self.bollStd * self.entryDevDown)
maArray = self.am.sma(self.maWindow, True)
self.maFilter = round(maArray[-1])
self.maFilter1 = round(maArray[-2])
# 判断是否要进行交易
# 当前无仓位
if self.pos == 0:
self.intraTradeHigh = bar.high
self.intraTradeLow = bar.low
self.entryPriceList = []
self.orderList =[]
self.avgEntryPrice = 0
if bar.close > self.maFilter and self.maFilter > self.maFilter1:
# 均线多头过滤
if bar.close >= self.entryUp:
# 上轨突破
self.buySig = True
if bar.close < self.maFilter and self.maFilter < self.maFilter1:
# 均线空头过滤
if bar.close <= self.entryDown:
# 下轨突破
self.shortSig = True
# log = "-----" * 10 + "\n@onFiveBar\n" + \
# "bar.datetime: {0}; pos: {1} ; close: {2}\n".format(bar.datetime, self.pos,bar.close) + \
# "buySig: {0}; shortSig: {1}\n".format(self.buySig, self.shortSig) + \
# "intraTradeHigh: {0}\n".format(self.intraTradeHigh) + \
# "intraTradeLow: {0}\n".format(self.intraTradeLow)
# self.writeCtaLog(log)
# 当前有仓位
else:
self.intraTradeHigh = max(self.intraTradeHigh, bar.high)
self.intraTradeLow = min(self.intraTradeLow, bar.low)
if self.pos > 0:
# self.stopExit = self.avgEntryPrice - self.exitOnLossStop * self.minDiff #固定止损价位
if self.intraTradeHigh >= self.avgEntryPrice + self.trailingStart2 * self.minDiff:
# 二级止赢判断 盈利80跳
if (bar.close <= self.intraTradeHigh - self.exitOnTrailingStop2 * self.minDiff):
# 回撤20跳
self.trailingExit = self.intraTradeHigh - self.exitOnTrailingStop2 * self.minDiff
self.sellSig = True
# if bar.close < self.longExit:
# self.longExit = bar.close
# 记录log
# log = "\n{0} Sell(Trailing Stop2)\n".format(bar.datetime) + \
# 'bar.close: {0}; bar.low: {1}; longExit: {2}'.format(bar.close,bar.low, self.longExit)+ \
# 'intraTradeHigh: {0}; avgEntryPrice: {1}; bar.open: {2}'.format(self.intraTradeHigh,self.avgEntryPrice, bar.open)
# self.writeCtaLog(log)
elif self.intraTradeHigh >= self.avgEntryPrice + self.trailingStart1 * self.minDiff:
# 一级止赢判断,盈利50跳
if (bar.close <= self.intraTradeHigh - self.exitOnTrailingStop1 * self.minDiff):
# 回撤20跳
self.trailingExit = self.intraTradeHigh - self.exitOnTrailingStop1 * self.minDiff
self.sellSig = True
# if bar.close < self.longExit:
# self.longExit = bar.close
# 记录log
# log = "\n{0} Sell(Trailing Stop1)\n".format(bar.datetime) + \
# 'bar.close: {0}; bar.low: {1}; longExit: {2}'.format(bar.close, bar.low,
# self.longExit)+ \
# 'intraTradeHigh: {0}; avgEntryPrice: {1}; bar.open: {2}'.format(self.intraTradeHigh,self.avgEntryPrice, bar.open)
# self.writeCtaLog(log)
elif self.stopExit != 0:
if (bar.close <= self.stopExit):
# 固定止损,回撤20跳
self.sellSig = True
# log = "-----" * 10 + "\n@onFiveBar\n" + \
# "bar.datetime: {0}; pos: {1} ; close:{2}\n".format(bar.datetime, self.pos, bar.close) + \
# "sellSig: {0}; coverSig: {1}\n".format(self.sellSig, self.coverSig) + \
# "intraTradeHigh: {0}\n".format(self.intraTradeHigh) + \
# "intraTradeLow: {0}\n".format(self.intraTradeLow) + \
# "trailingStart1: {0}\n".format(self.avgEntryPrice + self.trailingStart1 * self.minDiff) + \
# "trailingStart2: {0}\n".format(self.avgEntryPrice + self.trailingStart2 * self.minDiff) + \
# "avgEntryPrice: {0}\n".format(self.avgEntryPrice) + \
# "trailingStop: {0}\n".format(self.trailingExit) + \
# "stopExit: {0}\n".format(self.stopExit)
#
# self.writeCtaLog(log)
# if bar.close < self.longExit:
# self.longExit = bar.close
# 记录log
# log = "\n{0} Sell(Loss Stop)\n".format(bar.datetime) + \
# 'bar.close: {0}; bar.low: {1}; longExit: {2}'.format(bar.close, bar.low,
# self.longExit)+ \
# 'intraTradeHigh: {0}; avgEntryPrice: {1}; bar.open: {2}'.format(self.intraTradeHigh,
# self.avgEntryPrice,
# bar.open)
# self.writeCtaLog(log)
elif self.pos < 0:
# self.stopExit = self.avgEntryPrice + self.exitOnLossStop * self.minDiff #固定止损价
if self.intraTradeLow <= self.avgEntryPrice - self.trailingStart2 * self.minDiff:
# 二级止赢判断 盈利80跳
if (bar.close >= self.intraTradeLow + self.exitOnTrailingStop2 * self.minDiff):
# 回撤20跳
self.trailingExit = self.intraTradeLow + self.exitOnTrailingStop2 * self.minDiff
self.coverSig = True
# if bar.close > self.shortExit:
# self.shortExit = bar.close
# 记录log
# log = "\n{0} Cover(Trailing Stop1)\n".format(bar.datetime) + \
# 'bar.close: {0}; bar.low: {1}; shortExit: {2}'.format(bar.close, bar.low,
# self.shortExit)+ \
# 'intraTradeLow: {0}; avgEntryPrice: {1}; bar.open: {2}'.format(self.intraTradeLow,
# self.avgEntryPrice,
# bar.open)
# self.writeCtaLog(log)
elif self.intraTradeLow <= self.avgEntryPrice - self.trailingStart1 * self.minDiff:
# 一级止赢判断,盈利50跳
if (bar.close >= self.intraTradeLow + self.exitOnTrailingStop1 * self.minDiff):
# 回撤20跳
self.trailingExit = self.intraTradeLow + self.exitOnTrailingStop1 * self.minDiff
self.coverSig = True
# if bar.close > self.shortExit:
# self.shortExit = bar.close
# 记录log
# log = "\n{0} Cover(Trailing Stop2)\n".format(bar.datetime) + \
# 'bar.close: {0}; bar.low: {1}; shortExit: {2}'.format(bar.close, bar.low,
# self.shortExit)+ \
# 'intraTradeLow: {0}; avgEntryPrice: {1}; bar.open: {2}'.format(self.intraTradeLow,
# self.avgEntryPrice,
# bar.open)
# self.writeCtaLog(log)
elif self.stopExit != 0:
if (bar.close >= self.stopExit):
# 固定止损,回撤20跳
# self.shortExit = self.avgEntryPrice + self.exitOnLossStop * self.minDiff
self.coverSig = True
# if bar.close > self.shortExit:
# self.shortExit = bar.close
# 记录log
# log = "\n{0} Cover(Loss Stop)\n".format(bar.datetime) + \
# 'bar.close: {0}; bar.low: {1}; shortExit: {2}'.format(bar.close, bar.low,
# self.shortExit)+ \
# 'intraTradeLow: {0}; avgEntryPrice: {1}; bar.open: {2}'.format(self.intraTradeLow,
# self.avgEntryPrice,
# bar.open)
# self.writeCtaLog(log)
# log = "-----" * 10 + "\n@onFiveBar\n" + \
# "bar.datetime: {0}; pos: {1} ; close:{2}\n".format(bar.datetime, self.pos, bar.close) + \
# "sellSig: {0}; coverSig: {1}\n".format(self.sellSig, self.coverSig) + \
# "intraTradeHigh: {0}\n".format(self.intraTradeHigh) + \
# "intraTradeLow: {0}\n".format(self.intraTradeLow) + \
# "trailingStart1: {0}\n".format(self.avgEntryPrice - self.trailingStart1 * self.minDiff)+\
# "trailingStart2: {0}\n".format(self.avgEntryPrice - self.trailingStart2 * self.minDiff)+\
# "avgEntryPrice: {0}\n".format(self.avgEntryPrice)+\
# "trailingStop: {0}\n".format(self.trailingExit)+\
# "stopExit: {0}\n".format(self.stopExit)
#
# self.writeCtaLog(log)
# 发出状态更新事件
self.saveSyncData()
self.putEvent()
#----------------------------------------------------------------------
def onOrder(self, order):
"""收到委托变化推送(必须由用户继承实现)"""
# CTA引擎中涉及到的交易方向类型
# CTAORDER_BUY = u'买开'
# CTAORDER_SELL = u'卖平'
# CTAORDER_SHORT = u'卖开'
# CTAORDER_COVER = u'买平'
# log = "-----" * 10 + "\n@onOrder\n" + \
# "orderTime: {0}; pos: {1} \n".format(order.orderTime, order.totalVolume) + \
# "status {0}; vtOrderID: {1}\n".format(order.status, order.vtOrderID)
# self.writeCtaLog(log)
# 对于开仓,记录相关价格
# if order.vtOrderID in self.orderList:
if order.direction == DIRECTION_LONG and order.offset == OFFSET_OPEN:
if order.totalVolume == order.tradedVolume:
# 更新入场价列表,更新平均入场价
self.entryPriceList.append(order.price)
self.avgEntryPrice = sum(self.entryPriceList) / len(self.entryPriceList)
self.stopExit = self.avgEntryPrice - self.exitOnLossStop * self.minDiff # 固定止损价
# self.orderList.remove(order.vtOrderID)
elif order.direction == DIRECTION_SHORT and order.offset == OFFSET_OPEN:
# 更新入场价列表,更新平均入场价
if order.totalVolume == order.tradedVolume:
# 更新入场价列表,更新平均入场价
self.entryPriceList.append(order.price)
self.avgEntryPrice = sum(self.entryPriceList) / len(self.entryPriceList)
self.stopExit = self.avgEntryPrice + self.exitOnLossStop * self.minDiff # 固定止损价
# self.orderList.remove(order.vtOrderID)
self.putEvent()
#----------------------------------------------------------------------
def onTrade(self, trade):
# 发出状态更新事件
data = trade.__dict__
self.putEvent()
#----------------------------------------------------------------------
def onStopOrder(self, so):
"""停止单推送"""
data = so.__dict__
self.putEvent()
if __name__ == "__main__":
from cyvn.trader.app.ctaStrategy.ctaBacktesting import BacktestingEngine, OptimizationSetting, MINUTE_DB_NAME
dbName = MINUTE_DB_NAME
symbol = 'rb88'
# 创建回测引擎对象
engine = BacktestingEngine()
# 设置回测使用的数据
engine.setBacktestingMode(engine.BAR_MODE) # 设置引擎的回测模式为K线
engine.setDatabase(dbName, symbol) # 设置使用的历史数据库
engine.setStartDate('20130101',10) # 设置回测用的数据起始日期
engine.setEndDate('20171231')
# 配置回测引擎参数
engine.setSlippage(0) # 设置滑点为股指1跳
engine.setRate(1.1 / 10000) # 设置手续费万1.1
engine.setSize(10) # 设置股指合约大小
engine.setPriceTick(1) # 设置股指最小价格变动
engine.setCapital(10000) # 设置回测本金
# 从当前目录加载策略类代码
from .strategyBollingerBot01 import BollingerBotStrategy01
# 使用策略类中的默认参数,则参数配置字典留空
d = {}
# 初始化策略
engine.initStrategy(BollingerBotStrategy01, d)
# 运行回测
engine.runBacktesting() # 运行回测
# engine.showBacktestingResult()
# engine.showDailyResult()
d = engine.calculateBacktestingResult()
# 记录Log
import logging
logger = logging.getLogger("backtest")
fh = logging.FileHandler('./{0}_backtest.log'.format(engine.strategy.className))
logger.setLevel(logging.INFO)
logger.addHandler(fh)
for log in engine.logList:
logger.info(log)
# logger2 = logging.getLogger("result")
# fh2 = logging.FileHandler('./{0}_result.log'.format(engine.strategy.className))
# logger2.setLevel(logging.INFO)
# logger2.addHandler(fh2)
result = d['resultList']
entryDate = []
entryPrice = []
exitDate = []
exitPrice = []
volume = []
pnl = []
for trade in result:
dic = trade.__dict__
entryDate.append(dic['entryDt'])
entryPrice.append(dic['entryPrice'])
exitDate.append(dic['exitDt'])
exitPrice.append(dic['exitPrice'])
volume.append(dic['volume'])
pnl.append(dic['pnl'])
# logger2.info("entryDate: {0}; entryPrice: {1}".format(dic['entryDt'], dic['entryPrice']))
# logger2.info("exitDate: {0}; exitPrice: {1}".format(dic['exitDt'], dic['exitPrice']))
# logger2.info("volume:{0}".format(dic['volume']))
# logger2.info("pnl:{0}".format(dic['pnl']))
import pandas as pd
data = {'entryDate': entryDate, 'entryPrice': entryPrice, 'exitDate':exitDate, 'exitPrice':exitPrice, 'volume':volume, 'pnl':pnl}
df = pd.DataFrame(data)
df.to_csv('./{0}_result.csv'.format(engine.strategy.className), index=False) | 25,595 | 9,083 |
# -*- coding: utf-8 -*-
import os
import re
from selenium import webdriver
from xvfbwrapper import Xvfb
from cabu.exceptions import DriverException
from cabu.utils.headers import Headers
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium import webdriver
try:
from urllib.parse import urlsplit
except ImportError: # pragma: no cover
from urlparse import urlsplit # flake8: noqa
def load_vdisplay(config):
"""Initialize a vdisplay (Xvfb subprocess instance).
Args:
config (dict): The configuration loaded previously in Cabu.
Returns:
vdisplay: An instance of Xvfb wrapper.
"""
vdisplay = None
if config['HEADLESS']:
vdisplay = Xvfb(
width=config['DRIVER_WINDOWS_WIDTH'],
height=config['DRIVER_WINDOWS_HEIGHT']
)
vdisplay.start()
return vdisplay
def unload_vdisplay(vdisplay):
"""Shutdown given Xvfb instance.
Args:
vdisplay (XvfbWrapper): The running virtual X server.
"""
vdisplay.stop()
def load_driver(config, vdisplay=None):
"""Initialize a weddriver selected in config with given config.
Args:
config (dict): The configuration loaded previously in Cabu.
Returns:
webdriver (selenium.webdriver): An instance of selenium webdriver or None.
"""
if config['DRIVER_NAME'] == 'Firefox':
driver = load_firefox(config)
elif config['DRIVER_NAME'] == 'Chrome':
driver = load_chrome(config)
elif config['DRIVER_NAME'] == 'PhantomJS':
driver = load_phantomjs(config)
elif not config.get('DRIVER_NAME'):
return None
else:
raise DriverException(vdisplay, 'Driver unrecognized.')
driver.set_page_load_timeout(config['DRIVER_PAGE_TIMEOUT'])
driver.set_window_size(config['DRIVER_WINDOWS_WIDTH'], config['DRIVER_WINDOWS_HEIGHT'])
return driver
def unload_driver(driver):
"""Shutdown given webdriver instance.
Args:
driver (selenium.webdriver): The running webdriver.
"""
driver.quit()
def load_firefox(config):
"""Start Firefox webdriver with the given configuration.
Args:
config (dict): The configuration loaded previously in Cabu.
Returns:
webdriver (selenium.webdriver): An instance of Firefox webdriver.
"""
binary = None
profile = webdriver.FirefoxProfile()
if os.environ.get('HTTPS_PROXY') or os.environ.get('HTTP_PROXY'):
proxy_address = os.environ.get('HTTPS_PROXY', os.environ.get('HTTP_PROXY'))
proxy_port = re.search('\:([0-9]+)$', proxy_address).group(1)
profile.set_preference('network.proxy.type', 1)
profile.set_preference(
'network.proxy.http',
proxy_address
)
profile.set_preference('network.proxy.http_port', proxy_port)
profile.update_preferences()
if 'HEADERS' in config and config['HEADERS']:
profile = Headers(config).set_headers(profile)
if config['DRIVER_BINARY_PATH']:
from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
binary = FirefoxBinary(config['DRIVER_BINARY_PATH'])
return webdriver.Firefox(firefox_binary=binary, firefox_profile=profile)
def load_chrome(config):
"""Start Chrome webdriver with the given configuration.
Args:
config (dict): The configuration loaded previously in Cabu.
Returns:
webdriver (selenium.webdriver): An instance of Chrome webdriver.
"""
return webdriver.Chrome()
def load_phantomjs(config):
"""Start PhantomJS webdriver with the given configuration.
Args:
config (dict): The configuration loaded previously in Cabu.
Returns:
webdriver (selenium.webdriver): An instance of phantomJS webdriver.
"""
dcap = dict(DesiredCapabilities.PHANTOMJS)
service_args = [
'--ignore-ssl-errors=true',
'--ssl-protocol=any',
'--web-security=false'
]
if os.environ.get('HTTPS_PROXY') or os.environ.get('HTTP_PROXY'):
proxy_address = os.environ.get('HTTPS_PROXY', os.environ.get('HTTP_PROXY'))
proxy_ip = re.search('http\:\/\/(.*)$', proxy_address).group(1)
service_args.append('--proxy=%s' % proxy_ip)
service_args.append('--proxy-type=http')
if 'HEADERS' in config and config['HEADERS']:
dcap = Headers(config).set_headers(dcap)
return webdriver.PhantomJS(
desired_capabilities=dcap,
service_args=service_args,
service_log_path=os.path.devnull
)
| 4,577 | 1,392 |
import pygame as pg
display = pg.display.set_mode((900,600))
clock = pg.time.Clock()
jump = 10
step = 0
img = 0
left = [pg.image.load('images/character/l1.png'),pg.image.load('images/character/l2.png'),pg.image.load('images/character/l3.png'),pg.image.load('images/character/l4.png'),pg.image.load('images/character/l5.png'),pg.image.load('images/character/l6.png'),pg.image.load('images/character/l7.png'),pg.image.load('images/character/l8.png')]
right = [pg.image.load('images/character/r1.png'),pg.image.load('images/character/r2.png'),pg.image.load('images/character/r3.png'),pg.image.load('images/character/r4.png'),pg.image.load('images/character/r5.png'),pg.image.load('images/character/r6.png'),pg.image.load('images/character/r7.png'),pg.image.load('images/character/r8.png')]
left_stand = pg.image.load('images/character/l.png')
right_stand = pg.image.load('images/character/r.png')
left_jump = pg.image.load('images/character/4.png')
right_jump = pg.image.load('images/character/3.png')
h1 = pg.image.load('images/character/h1.png')
h2 = pg.image.load('images/character/h2.png')
h3 = pg.image.load('images/character/h3.png')
h4 = pg.image.load('images/character/h4.png')
h5 = pg.image.load('images/character/h5.png')
h6 = pg.image.load('images/character/h6.png')
p = pg.image.load('images/character/p.png')
class fire(object):
b = pg.image.load('images/character/b.png')
eb = pg.image.load('images/enemy/eb.png')
def __init__(self,x,y,facing):
self.x = x
self.y = y
self.facing = facing
self.vel = 17*facing # Bullets velocity
def draw(self,display):
display.blit(self.b,(self.x,self.y))
def draw2(self,display):
display.blit(self.eb,(self.x,self.y))
def player(display,xin,yin,change,lm,rm,rs,ls,jump,jump_h,neg,bullet,health):
global step
delta = 0
if lm == True:
delta = -change
step+=1
elif rm == True:
delta = change
step+=1
if jump == True:
if jump_h >= -10:
neg = 1
if jump_h < 0 :
neg = -1
yin -= (jump_h**2)*0.30*neg
jump_h-=1
else:
jump = False
l_jump = False
r_jump = False
jump_h = 10
xin += delta
img = (step//3)
if step+1 >= 24:
step = 0
if jump:
if rs or rm:
display.blit(right_jump,[xin,yin])
elif ls or lm:
display.blit(left_jump,[xin,yin])
if not jump:
if lm:
display.blit(left[img],[xin,yin])
elif rm:
display.blit(right[img],[xin,yin])
elif rs:
display.blit(right_stand,[xin,yin])
elif ls:
display.blit(left_stand,[xin,yin])
display.blit(p,[50,40])
for bullets in bullet:
bullets.draw(display)
display.blit(h1,[75,50])
if health == 10 or health == 9:
display.blit(h6,[75,50])
if health == 8 or health == 7:
display.blit(h5,[75,50])
if health == 6 or health == 5:
display.blit(h4,[75,50])
if health == 4 or health == 3:
display.blit(h3,[75,50])
if health == 2 or health == 1:
display.blit(h2,[75,50])
return xin,yin,jump_h,neg,jump
| 3,256 | 1,254 |
#!/usr/bin/env python3
import picamera
import file_utils
import os
class PiCam:
"""
Uses Raspberry Pi camera.
http://picamera.readthedocs.org/en/release-1.9/api.html
"""
def __init__(self):
self.camera = picamera.PiCamera()
""" PiCamera properties default values
camera.sharpness = 0
camera.contrast = 0
camera.brightness = 50
camera.saturation = 0
camera.ISO = 0
camera.video_stabilization = False
camera.exposure_compensation = 0
camera.exposure_mode = 'auto'
camera.meter_mode = 'average'
camera.awb_mode = 'auto'
camera.image_effect = 'none'
camera.color_effects = None
camera.rotation = 0
camera.hflip = False
camera.vflip = False
camera.crop = (0.0, 0.0, 1.0, 1.0)
"""
self.camera.hflip = True
self.camera.vflip = True
def __del__(self):
""" class destructor.
Close camera to avoid error picamera.exc.PiCameraMMALError
Camera component couldn't be enabled: Out of resources (other than memory)
http://stackoverflow.com/questions/27468543/
picamera-cannot-be-initialized-as-a-class-member-when-the-script-is-run-from-com
https://github.com/waveform80/picamera/issues/35
"""
self.camera.close()
def take_picture(self, camera, dir_name, base_name):
""" Use arguments for dependency injection.
This way unit tests can call with a mock camera.
"""
file_name_no_dir = file_utils.FileUtils.filename_with_timestamp(base_name)
image_name = os.path.join(dir_name, file_name_no_dir)
camera.capture(image_name)
| 1,721 | 561 |
_base_ = [
'../_base_/models/flownets.py',
'../_base_/datasets/flyingchairs_384x448.py',
'../_base_/schedules/schedule_s_long.py', '../_base_/default_runtime.py'
]
| 176 | 84 |
from CalibTracker.SiStripCommon.shallowTree_test_template import *
process.TFileService.fileName = 'test_shallowRechitClustersProducer.root'
process.load('RecoTracker.TrackProducer.TrackRefitters_cff')
process.load('CalibTracker.SiStripCommon.ShallowRechitClustersProducer_cfi')
process.testTree = cms.EDAnalyzer(
"ShallowTree",
outputCommands = cms.untracked.vstring(
'drop *',
'keep *_shallowRechitClusters_*_*',
)
)
process.p = cms.Path(
process.siStripMatchedRecHits*
process.shallowRechitClusters*
process.testTree
)
| 559 | 209 |
from typing import Sequence
import numpy as np
import h5py
from easistrain.EDD.io import (
create_info_group,
peak_dataset_data,
save_fit_data,
)
from easistrain.EDD.utils import fit_detector_data, run_from_cli
def fitEDD(
fileRead: str,
fileSave: str,
sample: str,
dataset: str,
scanNumber: int,
nameHorizontalDetector: str,
nameVerticalDetector: str,
positioners: Sequence[str],
numberOfBoxes: int,
nbPeaksInBoxes: Sequence[int],
rangeFitHD: Sequence[int],
rangeFitVD: Sequence[int],
):
print(f"Fitting scan n.{scanNumber}")
with h5py.File(fileRead, "r") as h5Read: ## Read the h5 file of raw data
scan_meas = h5Read.get(
f"{sample}_{dataset}_{scanNumber}.1/measurement",
default=None,
)
if (
not isinstance(scan_meas, h5py.Group)
or nameHorizontalDetector not in scan_meas
or nameVerticalDetector not in scan_meas
):
print("No pattern was saved in this scan")
return
h5Save = h5py.File(fileSave, "a") ## create/append h5 file to save in
scanGroup = h5Save.create_group(
f"{sample}_{dataset}_{scanNumber}.1"
) ## create the group of the scan wich will contatin all the results of a scan
positionersGroup = scanGroup.create_group(
"positioners"
) ## positioners subgroup in scan group
patternHorizontalDetector = h5Read[
f"{sample}_{dataset}_{scanNumber}.1/measurement/{nameHorizontalDetector}"
][
()
] ## pattern of horizontal detector
patternVerticalDetector = h5Read[
f"{sample}_{dataset}_{scanNumber}.1/measurement/{nameVerticalDetector}"
][
()
] ## pattern of vertical detector
twoD_detector_data = (
np.ndim(patternHorizontalDetector) == 2
or np.ndim(patternVerticalDetector) == 2
)
nDetectorPoints = len(patternHorizontalDetector) if twoD_detector_data else 1
positionAngles = np.zeros((nDetectorPoints, 6), "float64")
for i, positioner in enumerate(positioners):
pos_data = h5Read[
f"{sample}_{dataset}_{scanNumber}.1/instrument/positioners/{positioner}"
][()]
positionersGroup.create_dataset(
positioner,
dtype="float64",
data=pos_data,
) ## saving all the requested positioners
if i < 6:
positionAngles[:, i] = pos_data
else:
print("Too many positioners given ! Only 6 are handled for now.")
rawDataLevel1_1 = scanGroup.create_group(
"rawData" + "_" + str(dataset) + "_" + str(scanNumber)
) ## rawData subgroup in scan group
fitGroup = scanGroup.create_group("fit") ## fit subgroup in scan group
tthPositionsGroup = scanGroup.create_group(
"tthPositionsGroup"
) ## two theta positions subgroup in scan group
rawDataLevel1_1.create_dataset(
"horizontalDetector", dtype="float64", data=patternHorizontalDetector
) ## save raw data of the horizontal detector
rawDataLevel1_1.create_dataset(
"verticalDetector", dtype="float64", data=patternVerticalDetector
) ## save raw data of the vertical detector
for k in range(nDetectorPoints):
fitParams = {"horizontal": np.array(()), "vertical": np.array(())}
uncertaintyFitParams = {
"horizontal": np.array(()),
"vertical": np.array(()),
}
pointInScan = fitGroup.create_group(
f"{str(k).zfill(4)}"
) ## create a group of each pattern (point of the scan)
fitParamsGroup = pointInScan.create_group(
"fitParams"
) ## fit results group for the two detector
for i, nb_peaks in enumerate(nbPeaksInBoxes):
fitLine = pointInScan.create_group(
f"fitLine_{str(i).zfill(4)}"
) ## create group for each range of peak(s)
for detector in ["horizontal", "vertical"]:
fit_min, fit_max = (
(rangeFitHD[2 * i], rangeFitHD[2 * i + 1])
if detector == "horizontal"
else (rangeFitVD[2 * i], rangeFitVD[2 * i + 1])
) # To be improved
pattern = (
patternHorizontalDetector
if detector == "horizontal"
else patternVerticalDetector
) # To be improved
channels = np.arange(fit_min, fit_max)
raw_data = pattern[k, fit_min:fit_max]
assert isinstance(raw_data, np.ndarray)
# print(np.shape(pattern),pattern)
(
background,
fitted_data,
boxFitParams,
uncertaintyBoxFitParams,
) = fit_detector_data(
channels=channels,
raw_data=raw_data,
nb_peaks=nb_peaks,
boxCounter=i,
scanNumber=scanNumber,
detectorName=detector,
)
save_fit_data(
fitLine, detector, channels, raw_data, background, fitted_data
)
# Accumulate fit parameters of this box
fitParams[detector] = np.append(fitParams[detector], boxFitParams)
uncertaintyFitParams[detector] = np.append(
uncertaintyFitParams[detector], uncertaintyBoxFitParams
)
# End of fitting procedure
savedFitParamsHD = np.reshape(
fitParams["horizontal"], (int(np.size(fitParams["horizontal"]) / 6), 6)
)
fitParamsGroup.create_dataset(
"fitParamsHD",
dtype="float64",
data=savedFitParamsHD,
) ## save parameters of the fit of HD
savedUncertaintyFitParamsHD = np.reshape(
uncertaintyFitParams["horizontal"],
(int(np.size(uncertaintyFitParams["horizontal"]) / 5), 5),
)
fitParamsGroup.create_dataset(
"uncertaintyFitParamsHD",
dtype="float64",
data=savedUncertaintyFitParamsHD,
) ## save uncertainty on the parameters of the fit of HD
savedFitParamsVD = np.reshape(
fitParams["vertical"], (int(np.size(fitParams["vertical"]) / 6), 6)
)
fitParamsGroup.create_dataset(
"fitParamsVD",
dtype="float64",
data=savedFitParamsVD,
) ## save parameters of the fit of VD
savedUncertaintyFitParamsVD = np.reshape(
uncertaintyFitParams["vertical"],
(int(np.size(uncertaintyFitParams["vertical"]) / 5), 5),
)
fitParamsGroup.create_dataset(
"uncertaintyFitParamsVD",
dtype="float64",
data=savedUncertaintyFitParamsVD,
) ## save uncertainty on the parameters of the fit of VD
for peakNumber in range(np.sum(nbPeaksInBoxes)):
if f"peak_{str(peakNumber).zfill(4)}" not in tthPositionsGroup.keys():
peakDataset = tthPositionsGroup.create_dataset(
f"peak_{str(peakNumber).zfill(4)}",
dtype="float64",
data=np.zeros((2 * nDetectorPoints, 13), "float64"),
) ## create a dataset for each peak in tthPositionGroup
uncertaintyPeakDataset = tthPositionsGroup.create_dataset(
f"uncertaintyPeak_{str(peakNumber).zfill(4)}",
dtype="float64",
data=np.zeros((2 * nDetectorPoints, 13), "float64"),
) ## create a dataset for uncertainty for each peak in tthPositionGroup
else:
peakDataset = tthPositionsGroup[f"peak_{str(peakNumber).zfill(4)}"]
assert isinstance(peakDataset, h5py.Dataset)
uncertaintyPeakDataset = tthPositionsGroup[
f"uncertaintyPeak_{str(peakNumber).zfill(4)}"
]
assert isinstance(uncertaintyPeakDataset, h5py.Dataset)
peakDataset[2 * k] = peak_dataset_data(
positionAngles, savedFitParamsHD[peakNumber], -90, k
)
peakDataset[2 * k + 1] = peak_dataset_data(
positionAngles, savedFitParamsVD[peakNumber], 0, k
)
uncertaintyPeakDataset[2 * k] = peak_dataset_data(
positionAngles, savedUncertaintyFitParamsHD[peakNumber], -90, k
)
uncertaintyPeakDataset[2 * k + 1] = peak_dataset_data(
positionAngles, savedUncertaintyFitParamsVD[peakNumber], 0, k
)
if "infoPeak" not in tthPositionsGroup.keys():
tthPositionsGroup.create_dataset(
"infoPeak",
dtype=h5py.string_dtype(encoding="utf-8"),
data=f"{positioners}, delta, theta, position in channel, Intenstity, FWHM, shape factor, goodness factor",
) ## create info about dataset saved for each peak in tthPositionGroup
create_info_group(
scanGroup,
fileRead,
fileSave,
sample,
dataset,
scanNumber,
nameHorizontalDetector,
nameVerticalDetector,
numberOfBoxes,
nbPeaksInBoxes,
rangeFitHD,
rangeFitVD,
positioners,
)
h5Save.close()
return
def fitEDD_with_scan_number_parse(**config):
"""Wrapper function to allow scanNumber to be a list or a slice."""
n_scan_arg = config.pop("scanNumber")
if isinstance(n_scan_arg, int):
fitEDD(**config, scanNumber=n_scan_arg)
elif isinstance(n_scan_arg, list):
for i in n_scan_arg:
fitEDD_with_scan_number_parse(**config, scanNumber=i)
elif isinstance(n_scan_arg, str):
if ":" in n_scan_arg:
min_scan, max_scan = n_scan_arg.split(":")
for i in range(int(min_scan), int(max_scan)):
fitEDD(**config, scanNumber=i)
else:
fitEDD(**config, scanNumber=int(n_scan_arg))
else:
raise ValueError(f"Unrecognized value for scanNumber: {n_scan_arg}")
if __name__ == "__main__":
run_from_cli(fitEDD_with_scan_number_parse)
| 10,468 | 3,032 |
from sys import argv
script, filename = argv
print "We are going to erase %r." % filename
print "If you don't want that, hit CTRL-C (^C)."
print "If you do want that, hit RETURN."
raw_input("?")
print "Opening the file..."
target = open(filename, 'w')
print "Truncating the file. Goodbye!"
target.truncate()
print "Now I'm going to ask you for three lines."
line1 = raw_input("line 1: ")
line2 = raw_input("line 2: ")
line3 = raw_input("line 3: ")
print "I'm going to write these to the file."
target.write(line1)
target.write("\n")
target.write(line2)
target.write("\n")
target.write(line3)
target.write("\n")
print "And finally, we close it."
target.close()
# sarahs-mac:learn-python-the-hard-way SarahS$ python ex16.py test.txt
# We are going to erase 'test.txt'.
# If you don't want that, hit CTRL-C (^C).
# If you do want that, hit RETURN.
# ?^CTraceback (most recent call last):
# File "ex16.py", line 9, in <module>
# raw_input("?")
# KeyboardInterrupt
# sarahs-mac:learn-python-the-hard-way SarahS$ python ex16.py test.txt
# We are going to erase 'test.txt'.
# If you don't want that, hit CTRL-C (^C).
# If you do want that, hit RETURN.
# ?
# Opening the file...
# Truncating the file. Goodbye!
# Now I'm going to ask you for three lives.
# line 1: Mary had a little lamb
# line 2: Its fleece was white as snow
# line 3: It was also tasty
# I'm going to write these to the file.
# Traceback (most recent call last):
# File "ex16.py", line 29, in <module>
# target.write(line3)
# NameError: name 'line3' is not defined #accidentally wrote line2 again on line 21. i fixed it and ran it again though
# sarahs-mac:learn-python-the-hard-way SarahS$ python ex16.py test.txt
# We are going to erase 'test.txt'.
# If you don't want that, hit CTRL-C (^C).
# If you do want that, hit RETURN.
# ?
# Opening the file...
# Truncating the file. Goodbye!
# Now I'm going to ask you for three lines.
# line 1: Mary had a little lamb
# line 2: Its fleece was white as snow
# line 3: It was also tasty
# I'm going to write these to the file.
# And finally, we close it. | 2,082 | 765 |
# Generated by Django 2.0.6 on 2018-11-01 14:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='verify_code',
field=models.IntegerField(default=0),
preserve_default=False,
),
]
| 408 | 134 |
from django.test import TestCase
from filler.plain_classes.teams_data import TeamsData
class TestTeamsData(TestCase):
def test_participants_none(self):
with self.assertRaises(AssertionError):
TeamsData(participants=None, actions=['Action'], dates=['Date'])
def test_actions_none(self):
with self.assertRaises(AssertionError):
TeamsData(participants=['Some'], actions=None, dates=['Date'])
def test_dates_none(self):
with self.assertRaises(AssertionError):
TeamsData(participants=['Some'], actions=['Action'], dates=None)
| 599 | 181 |
from dungeon_model import Monsters, Players
import re
import math
def initiative_sort(init_order):
"""sorts all the characters for a given combat by initiative"""
print("passed into sort function: ", init_order)
for i in range(len(init_order)):
check = init_order[i]
print("the check is: ", check, " and i is: ", i)
index = i
while index > 0 and init_order[index - 1][0] < check[0]:
init_order[index] = init_order[index - 1]
index = index - 1
init_order[index] = check
print("we will return init order as: ", init_order)
return init_order
def instantiate_player(player_info, game_id):
"""receives info about player and adds to the DB"""
game_id = game_id
character = player_info
name = character['name']
char_name = name.title()
char_init = character['init']
new_character = Players(name=char_name,
game_id=game_id,
initiative_mod=char_init,
type='pla')
print("we just created: ", new_character)
return new_character
def instantiate_monster(monst_info):
"""receives dictionary of monster info and adds to DB"""
# room_id = 10
species = monst_info['type']
size = monst_info['size']
ac = monst_info['ac']
total_hp = monst_info['hp']
hit_dice_num = monst_info['dice_num']
hit_dice_type = monst_info['dice_type']
bonus = monst_info['bonus']
speed = monst_info['speed']
burrow = monst_info['burrow']
swim = monst_info['swim']
fly = monst_info['fly']
hover = monst_info['hover']
str = monst_info['str']
dex = monst_info['dex']
con = monst_info['con']
wis = monst_info['wis']
cha = monst_info['cha']
int = monst_info['int']
initiative = (monst_info['dex'] - 10) / 2
initiative_mod = math.trunc(initiative)
# game_id = monst_info['game_id']
monster = Monsters(# room_id=room_id,
species=species,
size=size,
total_hp=total_hp,
ac=ac,
hit_dice_num=hit_dice_num,
hit_dice_type=hit_dice_type,
bonus=bonus,
initiative_mod=initiative_mod,
speed=speed,
burrow=burrow,
swim=swim,
fly=fly,
hover=hover,
str=str,
dex=dex,
con=con,
wis=wis,
cha=cha,
int=int,
type='mon')
# game_id=game_id)
return monster
| 2,784 | 866 |
def infoGAN_encoder(params,is_training):
is_training = tf.constant(is_training, dtype=tf.bool)
def encoder(x):
with tf.variable_scope('model/encoder',['x'], reuse=tf.AUTO_REUSE):
net = lrelu(conv2d(x, 64, 4, 4, 2, 2, name='conv1', use_sn=True))
net = conv2d(net, 128, 4, 4, 2, 2, name='conv2', use_sn=True)
net = batch_norm(net, is_training=is_training, scope='b_norm1')
net = tf.layers.dropout(net,rate=params['dropout_rate'],training=is_training)
net = lrelu(net)
net = tf.reshape(net, [params['batch_size'], -1])
net = linear(net, 1024, scope="ln1", use_sn=True)
net = batch_norm(net, is_training=is_training, scope='b_norm2')
net = tf.layers.dropout(net,rate=params['dropout_rate'],training=is_training)
net = lrelu(net)
net = linear(net, 2 * params['latent_size'], scope="ln_output", use_sn=True)
return net
return encoder
def infoGAN_decoder(params,is_training):
is_training = tf.constant(is_training, dtype=tf.bool)
def decoder(z):
with tf.variable_scope('model/decoder',['z'], reuse=tf.AUTO_REUSE):
net = tf.nn.relu(batch_norm(linear(z, 1024, 'ln2'), is_training=is_training, scope='b_norm3'))
net = tf.nn.relu(batch_norm(linear(net, 128 * (params['width'] // 4) * (params['height'] // 4), scope='ln3'), is_training=is_training, scope='b_norm4'))
net = tf.layers.dropout(net,rate=params['dropout_rate'],training=is_training)
net = tf.reshape(net, [params['batch_size'], params['width'] // 4, params['height'] // 4, 128])
net = tf.nn.relu(batch_norm(deconv2d(net, [params['batch_size'], params['width'] // 2, params['height'] // 2, 64], 4, 4, 2, 2, name='conv3'), is_training=is_training, scope='b_norm5'))
net = tf.layers.dropout(net,rate=params['dropout_rate'],training=is_training)
net = tf.nn.sigmoid(deconv2d(net, [params['batch_size'], params['width'], params['height'], params['n_channels']], 4, 4, 2, 2, name='conv4'))
net = net-0.5
return net
return decoder# -*- coding: utf-8 -*-
"""
Created on Mon Jun 7 12:57:10 2021
@author: horvat
"""
| 2,306 | 861 |
# administrative username and password for development
ADMIN_USERNAME = 'admin'
ADMIN_PASSWORD = 'password'
ADMIN_TYPE = 'admin'
# for production
# ADMIN_USERNAME = 'environ.get('ADMIN_USERNAME')
# ADMIN_PASSWORD = 'environ.get('ADMIN_PASSWORD')
# ADMIN_TYPE = 'environ.get('ADMIN_TYPE')
| 288 | 97 |
from agents.agent import Agent
from models.actor_critic_mlp import ActorCriticMLP
import numpy as np
import torch
import torch.optim as optim
from utils import plot_grad_flow
class A2C(Agent):
def __init__(
self,
state_size,
action_size,
hidden_size,
memory,
lr,
gamma,
device,
use_norm,
**kwargs
):
super(A2C, self).__init__(state_size, action_size, hidden_size, memory)
self.device = device
self.net = ActorCriticMLP(state_size, action_size, hidden_size, memory).to(
self.device
)
self.optimiser = optim.Adam(self.net.parameters(), lr=lr)
self.gamma = gamma
self.log_probs = []
self.values = []
self.rewards = []
self.use_norm = use_norm
def _compute_returns(self):
R = 0
returns = []
for step in reversed(range(len(self.rewards))):
R = self.rewards[step] + self.gamma * R
returns.insert(0, R)
returns = np.array(returns)
if self.use_norm:
returns -= returns.mean()
if returns.std() > 0.0:
returns /= returns.std()
return returns
def optimize_network(self):
returns = self._compute_returns()
returns = torch.from_numpy(returns).float().to(self.device)
values = torch.cat(self.values).squeeze(1)
log_probs = torch.cat(self.log_probs)
delta = returns - values
policy_loss = -torch.sum(log_probs * delta.detach())
value_function_loss = 0.5 * torch.sum(delta ** 2)
loss = policy_loss + value_function_loss
self.optimiser.zero_grad()
loss.backward()
# plot_grad_flow(self.net.named_parameters())
self.optimiser.step()
return loss.detach().item()
def reset(self):
self.values = []
self.log_probs = []
self.rewards = []
self.net.reset()
def act(self, state):
dist, value = self.net(state)
action = dist.sample()
log_prob = dist.log_prob(action)
self.log_probs.append(log_prob)
self.values.append(value)
return action.detach().item()
def collect_experience(self, state, action, reward, next_state, done):
self.rewards.append(reward)
| 2,344 | 743 |
from common import google_cloud
class GANTrainParameters():
def __init__(self):
self.num_epochs = 2000
self.batch_size = 10000
self.num_steps = 1
self.lr_d = 0.01
self.lr_g = 0.001
if not google_cloud:
self.batch_size = 1
training_param = GANTrainParameters()
| 329 | 124 |
#!/usr/bin/env python
# --------------------------------------------------------
# Tensorflow Faster R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Xinlei Chen, based on code from Ross Girshick
# --------------------------------------------------------
"""
Demo script showing detections in sample images.
See README.md for installation instructions before running.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import _init_paths
from model.config import cfg
from model.test import im_detect
from model.nms_wrapper import nms
from utils.timer import Timer
import tensorflow as tf
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
import numpy as np
import os, cv2
import argparse
from nets.vgg16 import vgg16
from nets.resnet_v1 import resnetv1
import glob
import os
import datetime
import pickle
import itertools
CLASSES = ('__background__', # always index 0
'normal',
'polyp','dyed-lifted-polyp','dyed-resection-margin')
NETS = {'vgg16': ('vgg16_faster_rcnn_iter_70000.ckpt',),'res101': ('res101_faster_rcnn_iter_5000.ckpt',)}
DATASETS= {'pascal_voc': ('voc_2007_trainval',),'pascal_voc_0712': ('voc_2007_trainval+voc_2012_trainval',),'medico_2018':('medico_2018_trainval',)}
def plot_confusion_matrix(cm, classes,
normalize=True,
title='Confusion matrix',
cmap=plt.cm.Reds):
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else '.2f'
thresh = cm.max() / 2.
n_classes = len(classes)
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label\n')
def class_max_conf(dets, thresh=0.5):
inds = np.where(dets[:, -1] >= thresh)[0]
if len(inds) == 0:
return 0.0
tmp = np.argmax(dets[:,-1])
return dets[tmp,-1]
def demo(log_out,sess, net, image_name, gt, cfs_mat, INP_DIR, CONF_THRESH):
"""Detect object classes in an image using pre-computed object proposals."""
# Load the input image
im = cv2.imread(image_name)
# Detect all object classes and regress object bounds
timer = Timer()
timer.tic()
scores, boxes = im_detect(sess, net, im)
timer.toc()
# Visualize detections for each class
NMS_THRESH = 0.3
res_cls = CLASSES[1]
res_conf = 0.0
for cls_ind, cls in enumerate(CLASSES[2:]):
cls_ind += 1 # because we skipped background
cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
cls_scores = scores[:, cls_ind]
dets = np.hstack((cls_boxes,
cls_scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, NMS_THRESH)
dets = dets[keep, :]
tmp = class_max_conf(dets,CONF_THRESH)
if (tmp>res_conf):
res_conf = tmp
res_cls = cls
cfs_mat[gt][res_cls] += 1
correct = (gt == res_cls)
img_id = image_name.replace(INP_DIR,'')
log_out.write(img_id+','+str(correct)+','+gt+','+res_cls+','+'{:3f},{:3f}'.format(res_conf,timer.total_time)+'\n')
return correct
def parse_args():
"""Parse input arguments."""
parser = argparse.ArgumentParser(description='Tensorflow Faster R-CNN demo')
parser.add_argument('--net', dest='demo_net', help='Network to use [vgg16 res101]',
choices=NETS.keys(), default='res101')
parser.add_argument('--dataset', dest='dataset', help='Trained dataset [pascal_voc pascal_voc_0712]',
choices=DATASETS.keys(), default='medico_2018')
parser.add_argument('--inpdir', dest='inpdir')
parser.add_argument('--testlist', dest='testlist')
parser.add_argument('--conf', dest='conf', default='0.9')
parser.add_argument('--outdir', dest='outdir', default = 'result')
args = parser.parse_args()
return args
if __name__ == '__main__':
cfg.TEST.HAS_RPN = True # Use RPN for proposals
#CREATE TIME STAMP ID
time_stamp = str(datetime.datetime.now())
#INPUT AND OUTPUT DIRECTORY
args = parse_args()
INPUT_DIR = args.inpdir
OUTPUT_DIR = os.path.join('cls_result',args.outdir+'_'+time_stamp+'/')
OUTPUT_LOG = OUTPUT_DIR + 'log_'+time_stamp+'.csv'
TEST_LIST = args.testlist
#SAVE LOG FILE
print('Save log to = '+ OUTPUT_LOG)
if not os.path.exists(os.path.dirname(OUTPUT_LOG)):
os.makedirs(os.path.dirname(OUTPUT_LOG))
flog = open(OUTPUT_LOG,"w")
flog.write('id,correct,gt_cls,predict_cls,conf,time\n')
#CONFIDENT THRESH
CONF_THRESH = float(args.conf)
demonet = args.demo_net
dataset = args.dataset
tfmodel = os.path.join('output', demonet, DATASETS[dataset][0], 'default',
NETS[demonet][0])
if not os.path.isfile(tfmodel + '.meta'):
raise IOError(('{:s} not found.\nDid you download the proper networks from '
'our server and place them properly?').format(tfmodel + '.meta'))
# set config
tfconfig = tf.ConfigProto(allow_soft_placement=True)
tfconfig.gpu_options.allow_growth=True
# init session
sess = tf.Session(config=tfconfig)
# load network
if demonet == 'vgg16':
net = vgg16()
elif demonet == 'res101':
net = resnetv1(num_layers=101)
else:
raise NotImplementedError
net.create_architecture("TEST", 5,
tag='default', anchor_scales=[4, 8, 16, 32])
saver = tf.train.Saver()
saver.restore(sess, tfmodel)
print('Loaded network {:s}'.format(tfmodel))
fi = open(TEST_LIST)
lines = fi.readlines()
print('Total input imgs = '+str(len(lines)))
num_of_test = len(lines)
cfs_mat = {}
for i_class in CLASSES[1:]:
cfs_mat[i_class] = {}
for j_class in CLASSES[1:]:
cfs_mat[i_class][j_class] = 0
for i,line in enumerate(lines):
# print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
# print('Demo for data/demo/{}'.format(im_name))
im_name, gt = line.strip().split(' ')
im_name = os.path.join(INPUT_DIR,im_name)
if (i%10 == 0):
print(str(i) + '/' + str(num_of_test))
if (i%100 == 0 and i>0) or (i == len(im_name)-1):
c = '{:25s}'.format('')
for i_class in CLASSES[1:]:
c+= '{:25s}'.format(i_class)
print(c)
for i_class in CLASSES[1:]:
c = '{:25s}'.format(i_class)
for j_class in CLASSES[1:]:
c+= '{:25s}'.format(str(cfs_mat[i_class][j_class]))
print(c+'\n')
print('-------------------')
crr = demo(flog, sess, net, im_name, gt, cfs_mat, INPUT_DIR, CONF_THRESH)
flog.close()
#SAVE cvs_mat
fo = open(OUTPUT_DIR+'confusion_matrix.pickle',"wb")
pickle.dump((CLASSES,cfs_mat),fo)
fo.close()
#PRINT result
fo = open(OUTPUT_DIR+'confusion_matrix.txt',"w")
print('--------FINAL RESULT-----------')
print('Total = ' + str(num_of_test))
print('Confusion matrix: ')
c = '{:25s}'.format('')
for i_class in CLASSES[1:]:
c+= '{:25s}'.format(i_class)
print(c)
fo.write(c + '\n')
for i_class in CLASSES[1:]:
c = '{:25s}'.format(i_class)
for j_class in CLASSES[1:]:
c+= '{:25s}'.format(str(cfs_mat[i_class][j_class]))
print(c+'\n')
fo.write(c + '\n')
fo.close()
#SAVE RES IMG
n_cls = len(CLASSES[1:])
cm = np.zeros((n_cls,n_cls))
for i,i_class in enumerate(CLASSES[1:]):
for j,j_class in enumerate(CLASSES[1:]):
cm[i][j] = int(cfs_mat[i_class][j_class])
plt.figure()
plot_confusion_matrix(cm,CLASSES[1:], title = 'Confusion matrix normalized')
plt.tight_layout()
plt.savefig(OUTPUT_DIR+'confusion_matrix_normalized.png', dpi = 600)
print('Confusion matrix normalize saved!')
plt.figure()
plot_confusion_matrix(cm,CLASSES[1:],normalize=False)
plt.tight_layout()
plt.savefig(OUTPUT_DIR+'confusion_matrix.png', dpi = 600)
print('Confusion matrix saved!')
| 8,672 | 3,212 |
# pylint:disable=too-many-lines
import os
import time
from faker import Faker
from unittest.mock import patch
import pytest
from hestia.internal_services import InternalServices
from rest_framework import status
import conf
import stores
from api.experiments import queries
from api.experiments.serializers import (
BookmarkedExperimentSerializer,
ExperimentChartViewSerializer,
ExperimentDeclarationsSerializer,
ExperimentDetailSerializer,
ExperimentJobDetailSerializer,
ExperimentJobSerializer,
ExperimentJobStatusSerializer,
ExperimentLastMetricSerializer,
ExperimentMetricSerializer,
ExperimentSerializer,
ExperimentStatusSerializer
)
from api.utils.views.protected import ProtectedView
from constants.urls import API_V1, WS_V1
from db.models.bookmarks import Bookmark
from db.models.experiment_groups import GroupTypes
from db.models.experiment_jobs import ExperimentJob, ExperimentJobStatus
from db.models.experiments import (
Experiment,
ExperimentChartView,
ExperimentMetric,
ExperimentStatus
)
from db.redis.ephemeral_tokens import RedisEphemeralTokens
from db.redis.group_check import GroupChecks
from db.redis.heartbeat import RedisHeartBeat
from db.redis.ttl import RedisTTL
from factories.factory_build_jobs import BuildJobFactory
from factories.factory_experiment_groups import ExperimentGroupFactory
from factories.factory_experiments import (
ExperimentChartViewFactory,
ExperimentFactory,
ExperimentJobFactory,
ExperimentJobStatusFactory,
ExperimentMetricFactory,
ExperimentStatusFactory
)
from factories.factory_jobs import JobFactory
from factories.factory_projects import ProjectFactory
from factories.fixtures import (
exec_experiment_outputs_refs_parsed_content,
exec_experiment_resources_parsed_content,
exec_experiment_spec_parsed_content,
exec_experiment_spec_parsed_regression_artifact_refs,
)
from lifecycles.experiments import ExperimentLifeCycle
from lifecycles.jobs import JobLifeCycle
from options.registry.archives import ARCHIVES_ROOT_ARTIFACTS
from options.registry.scheduler import SCHEDULER_GLOBAL_COUNTDOWN
from schemas import ExperimentSpecification
from tests.base.clients import EphemeralClient
from tests.base.views import BaseEntityCodeReferenceViewTest, BaseFilesViewTest, BaseViewTest
@pytest.mark.experiments_mark
class TestProjectExperimentListViewV1(BaseViewTest):
serializer_class = BookmarkedExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
num_objects = 3
HAS_AUTH = True
DISABLE_EXECUTOR = False
def setUp(self):
super().setUp()
self.project = ProjectFactory(user=self.auth_client.user)
self.other_project = ProjectFactory()
self.url = '/{}/{}/{}/experiments/'.format(API_V1,
self.project.user.username,
self.project.name)
self.other_url = '/{}/{}/{}/experiments/'.format(API_V1,
self.other_project.user.username,
self.other_project.name)
self.objects = [self.factory_class(project=self.project) for _ in range(self.num_objects)]
# one object that does not belong to the filter
self.factory_class()
self.queryset = self.model_class.objects.filter(project=self.project)
self.other_object = self.factory_class(project=self.other_project)
self.queryset = self.queryset.order_by('-updated_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
# Test other
resp = self.auth_client.get(self.other_url)
assert resp.status_code == status.HTTP_200_OK
independent_count = self.queryset.count()
# Create group to test independent filter
with patch('scheduler.tasks.experiment_groups.'
'experiments_group_create.apply_async') as mock_fct:
group = ExperimentGroupFactory(project=self.project)
assert mock_fct.call_count == 1
[self.factory_class(project=self.project, experiment_group=group) for _ in range(2)] # noqa
all_experiment_count = self.queryset.all().count()
assert all_experiment_count == independent_count + group.experiments.count()
# Getting all experiments
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == all_experiment_count
# Getting only independent experiments
resp = self.auth_client.get(self.url + '?independent=true')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == independent_count
# Through query
resp = self.auth_client.get(self.url + '?query=independent:true')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == independent_count
# Getting only group experiments
resp = self.auth_client.get(self.url + '?group={}'.format(group.id))
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == group.experiments.count()
# Filtering for independent and group experiments should raise
resp = self.auth_client.get(self.url + '?independent=true&group={}'.format(group.id))
assert resp.status_code == status.HTTP_400_BAD_REQUEST
def test_get_with_bookmarked_objects(self):
# Other user bookmark
Bookmark.objects.create(
user=self.other_project.user,
content_object=self.objects[0])
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
self.assertEqual(len([1 for obj in resp.data['results'] if obj['bookmarked'] is True]), 0)
# Authenticated user bookmark
Bookmark.objects.create(
user=self.auth_client.user,
content_object=self.objects[0])
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert len([1 for obj in resp.data['results'] if obj['bookmarked'] is True]) == 1
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_get_order(self):
resp = self.auth_client.get(self.url + '?sort=created_at,updated_at')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data != self.serializer_class(self.queryset, many=True).data
assert data == self.serializer_class(self.queryset.order_by('created_at', 'updated_at'),
many=True).data
resp = self.auth_client.get(self.url + '?sort=-started_at')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset.order_by('-started_at'),
many=True).data
def test_get_order_pagination(self):
queryset = self.queryset.order_by('created_at', 'updated_at')
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}&{}".format(self.url,
limit,
'sort=created_at,updated_at'))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(queryset[limit:], many=True).data
@pytest.mark.filterwarnings('ignore::RuntimeWarning')
def test_get_filter(self): # pylint:disable=too-many-statements
# Wrong filter raises
resp = self.auth_client.get(self.url + '?query=created_at<2010-01-01')
assert resp.status_code == status.HTTP_400_BAD_REQUEST
resp = self.auth_client.get(self.url + '?query=created_at:<2010-01-01')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 0
resp = self.auth_client.get(self.url +
'?query=created_at:>=2010-01-01,status:Finished')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 0
resp = self.auth_client.get(self.url +
'?query=created_at:>=2010-01-01,status:created|running')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
# Id
resp = self.auth_client.get(self.url +
'?query=id:{}|{}'.format(self.objects[0].id,
self.objects[1].id))
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 2
# Name
self.objects[0].name = 'exp_foo'
self.objects[0].save()
resp = self.auth_client.get(self.url +
'?query=name:exp_foo')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 1
# Name Regex
resp = self.auth_client.get(self.url +
'?query=name:%foo')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 1
resp = self.auth_client.get(self.url +
'?query=project.name:{}'.format(self.project.name))
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
# Set metrics
optimizers = ['sgd', 'sgd', 'adam']
tags = [['tag1'], ['tag1', 'tag2'], ['tag2']]
losses = [0.1, 0.2, 0.9]
for i, obj in enumerate(self.objects[:3]):
ExperimentMetricFactory(experiment=obj, values={'loss': losses[i]})
obj.params = {'optimizer': optimizers[i]}
obj.tags = tags[i]
obj.save()
resp = self.auth_client.get(
self.url + '?query=created_at:>=2010-01-01,'
'params.optimizer:sgd,'
'metric.loss:>=0.2,'
'tags:tag1')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 1
# Test that metrics works as well
resp = self.auth_client.get(
self.url + '?query=created_at:>=2010-01-01,'
'params.optimizer:sgd,'
'metrics.loss:>=0.2,'
'tags:tag1')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 1
resp = self.auth_client.get(
self.url + '?query=created_at:>=2010-01-01,'
'params.optimizer:sgd|adam,'
'metric.loss:>=0.2,'
'tags:tag1|tag2')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 2
# Order by metrics
resp = self.auth_client.get(self.url + '?sort=-metric.loss')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == [self.serializer_class(obj).data for obj in reversed(self.objects)]
resp = self.auth_client.get(self.url + '?sort=metric.loss')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == [self.serializer_class(obj).data for obj in self.objects]
# Order by metrics
resp = self.auth_client.get(self.url + '?sort=-metrics.loss')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == [self.serializer_class(obj).data for obj in reversed(self.objects)]
resp = self.auth_client.get(self.url + '?sort=metrics.loss')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == [self.serializer_class(obj).data for obj in self.objects]
def test_get_filter_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}&{}".format(
self.url,
limit,
'?query=created_at:>=2010-01-01,status:created|running'))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_create_ttl(self):
data = {'is_managed': False}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
xp = Experiment.objects.last()
assert RedisTTL.get_for_experiment(xp.id) == conf.get(SCHEDULER_GLOBAL_COUNTDOWN)
data = {'ttl': 10, 'is_managed': False}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
xp = Experiment.objects.last()
assert RedisTTL.get_for_experiment(xp.id) == 10
data = {'ttl': 'foo', 'is_managed': False}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
def test_create_is_managed(self):
data = {'is_managed': False}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
xp = Experiment.objects.last()
assert xp.is_managed is False
assert xp.run_env is None
data = {'is_managed': False, 'run_env': {'foo': 'bar'}}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
xp = Experiment.objects.last()
assert xp.is_managed is False
assert xp.run_env == {'foo': 'bar'}
def test_create_with_invalid_config(self):
data = {'content': 'bar'}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
def test_create(self):
resp = self.auth_client.post(self.url)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = {'content': exec_experiment_spec_parsed_regression_artifact_refs.raw_data}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == self.num_objects + 1
# Test other
resp = self.auth_client.post(self.other_url, data)
assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
def test_create_with_runner(self):
resp = self.auth_client.post(self.url)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = {'content': exec_experiment_spec_parsed_content.raw_data}
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == self.num_objects + 1
# Test other
resp = self.auth_client.post(self.other_url, data)
assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
def test_create_with_outputs_refs(self):
data = {'content': exec_experiment_outputs_refs_parsed_content.raw_data}
resp = self.auth_client.post(self.url, data)
# No job refs
assert resp.status_code == status.HTTP_400_BAD_REQUEST
# Creating the job should pass
JobFactory(project=self.project, name='foo') # noqa
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == self.num_objects + 1
experiment = self.queryset.order_by('created_at').last()
assert experiment.outputs_refs is not None
assert len(experiment.outputs_refs_jobs) == 1
assert experiment.outputs_refs_experiments is None
assert len(experiment.outputs_jobs) == 1
assert experiment.outputs_experiments is None
def test_create_without_config_passes_if_no_spec_validation_requested(self):
data = {'is_managed': False}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.project == self.project
assert last_object.content is None
def test_create_with_params(self):
data = {
'is_managed': False,
'params': {
'lr': 0.1,
'dropout': 0.5
}
}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.project == self.project
assert last_object.content is None
assert last_object.params == {
'lr': 0.1,
'dropout': 0.5
}
def test_create_in_group(self):
# Create in wrong group raises
group = ExperimentGroupFactory()
assert group.experiments.count() == 0
data = {
'is_managed': False,
'params': {
'lr': 0.1,
'dropout': 0.5
},
'experiment_group': group.id
}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
# Create in correct group passes
group = ExperimentGroupFactory(project=self.project)
assert group.experiments.count() == 0
data = {
'is_managed': False,
'params': {
'lr': 0.1,
'dropout': 0.5
},
'experiment_group': group.id
}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert group.experiments.count() == 1
def test_create_in_selection(self):
# Create in wrong selection raises
group = ExperimentGroupFactory(group_type=GroupTypes.SELECTION, content=None)
assert group.experiments.count() == 0
data = {
'params': {
'lr': 0.1,
'dropout': 0.5
},
'experiment_group': group.id
}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
# Create in correct group passes
group = ExperimentGroupFactory(project=self.project,
group_type=GroupTypes.SELECTION,
content=None)
assert group.experiments.count() == 0
assert group.selection_experiments.count() == 0
data = {
'is_managed': False,
'params': {
'lr': 0.1,
'dropout': 0.5
},
'experiment_group': group.id
}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert group.selection_experiments.count() == 1
def test_create_with_build(self):
# Test create with build
build = BuildJobFactory()
data = {'build_job': build.id, 'is_managed': False}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
last_object = self.model_class.objects.last()
assert last_object.build_job == build
@pytest.mark.experiments_mark
class TestProjectExperimentLastMetricListViewV1(BaseViewTest):
metrics_serializer_class = ExperimentLastMetricSerializer
params_serializer_class = ExperimentDeclarationsSerializer
model_class = Experiment
factory_class = ExperimentFactory
num_objects = 3
HAS_AUTH = True
def setUp(self):
super().setUp()
self.project = ProjectFactory(user=self.auth_client.user)
self.url = '/{}/{}/{}/experiments/'.format(API_V1,
self.project.user.username,
self.project.name)
self.objects = [self.factory_class(project=self.project,
params={'param1': i, 'param2': i * 2})
for i in range(self.num_objects)]
# Create Metrics
for obj in self.objects:
ExperimentMetricFactory(experiment=obj)
self.queryset = self.model_class.objects.filter(project=self.project)
self.queryset = self.queryset.order_by('-updated_at')
def test_get_metrics(self):
resp = self.auth_client.get(self.url + '?metrics=true')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == self.queryset.count()
assert resp.data['results'] == self.metrics_serializer_class(
self.queryset, many=True).data
def test_get_params(self):
resp = self.auth_client.get(self.url + '?params=true')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == self.queryset.count()
assert resp.data['results'] == self.params_serializer_class(
self.queryset, many=True).data
def test_get_all(self):
Experiment.objects.bulk_create([
Experiment(project=self.project, user=self.auth_client.user)
for _ in range(30)
])
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == self.queryset.count()
assert len(resp.data['results']) < self.queryset.count()
resp = self.auth_client.get(self.url + '?all=true')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == self.queryset.count()
assert len(resp.data['results']) == self.queryset.count()
@pytest.mark.experiments_mark
class TestExperimentGroupExperimentListViewV1(BaseViewTest):
serializer_class = BookmarkedExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
num_objects = 3
HAS_AUTH = True
def setUp(self):
super().setUp()
self.project = ProjectFactory()
self.experiment_group = ExperimentGroupFactory(project=self.project)
self.objects = [self.factory_class(project=self.project,
experiment_group=self.experiment_group)
for _ in range(self.num_objects)]
self.url = '/{}/{}/{}/experiments?group={}'.format(
API_V1,
self.experiment_group.project.user,
self.experiment_group.project.name,
self.experiment_group.id)
# one object that does not belong to the filter
self.factory_class(project=self.experiment_group.project)
self.queryset = self.model_class.objects.filter(experiment_group=self.experiment_group)
self.queryset = self.queryset.order_by('-updated_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_pagination_all(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_get_order(self):
resp = self.auth_client.get(self.url + '&sort=created_at,updated_at')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data != self.serializer_class(self.queryset, many=True).data
assert data == self.serializer_class(self.queryset.order_by('created_at', 'updated_at'),
many=True).data
def test_get_order_pagination(self):
queryset = self.queryset.order_by('created_at', 'updated_at')
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}&{}".format(self.url,
limit,
'sort=created_at,updated_at'))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(queryset[limit:], many=True).data
@pytest.mark.experiments_mark
class TestExperimentSelectionListViewV1(BaseViewTest):
serializer_class = BookmarkedExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
num_objects = 3
HAS_AUTH = True
def setUp(self):
super().setUp()
self.project = ProjectFactory()
self.experiment_group = ExperimentGroupFactory(project=self.project,
content=None,
group_type=GroupTypes.SELECTION)
self.objects = [self.factory_class(project=self.project)
for _ in range(self.num_objects)]
self.experiment_group.selection_experiments.set(self.objects)
self.url = '/{}/{}/{}/experiments?group={}'.format(
API_V1,
self.experiment_group.project.user,
self.experiment_group.project.name,
self.experiment_group.id)
# one object that does not belong to the filter
self.factory_class(project=self.experiment_group.project)
self.queryset = self.experiment_group.selection_experiments.all()
self.queryset = self.queryset.order_by('-updated_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_get_order(self):
resp = self.auth_client.get(self.url + '&sort=created_at,updated_at')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data != self.serializer_class(self.queryset, many=True).data
assert data == self.serializer_class(self.queryset.order_by('created_at', 'updated_at'),
many=True).data
def test_get_order_pagination(self):
queryset = self.queryset.order_by('created_at', 'updated_at')
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}&{}".format(self.url,
limit,
'sort=created_at,updated_at'))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(queryset[limit:], many=True).data
@pytest.mark.experiments_mark
class TestRunnerExperimentGroupExperimentListViewV1(BaseViewTest):
serializer_class = BookmarkedExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
num_objects = 3
HAS_AUTH = True
DISABLE_EXECUTOR = False
DISABLE_RUNNER = False
def setUp(self):
super().setUp()
content = """---
version: 1
kind: group
hptuning:
matrix:
lr:
linspace: '1.:3.:3'
run:
cmd: python -u model.py --lr={{ lr }}
"""
self.project = ProjectFactory()
with patch.object(GroupChecks, 'is_checked') as mock_is_check:
with patch('hpsearch.tasks.grid.hp_grid_search_start.retry') as start_fct:
with patch('scheduler.tasks.experiments.'
'experiments_build.apply_async') as build_fct:
mock_is_check.return_value = False
self.experiment_group = ExperimentGroupFactory(
project=self.project,
content=content)
assert start_fct.call_count == 1
assert build_fct.call_count == 1
assert self.experiment_group.specification.matrix_space == 3
self.url = '/{}/{}/{}/experiments?group={}'.format(
API_V1,
self.experiment_group.project.user,
self.experiment_group.project.name,
self.experiment_group.id)
# one object that does not belong to the filter
self.factory_class(project=self.project)
self.queryset = self.model_class.objects.filter(experiment_group=self.experiment_group)
self.queryset = self.queryset.order_by('-updated_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_get_order(self):
resp = self.auth_client.get(self.url + '&sort=created_at,updated_at')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == self.num_objects
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data != self.serializer_class(self.queryset, many=True).data
assert data == self.serializer_class(self.queryset.order_by('created_at', 'updated_at'),
many=True).data
def test_get_order_pagination(self):
queryset = self.queryset.order_by('created_at', 'updated_at')
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}&{}".format(self.url,
limit,
'sort=created_at,updated_at'))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(queryset[limit:], many=True).data
@pytest.mark.experiments_mark
class TestExperimentDetailViewV1(BaseViewTest):
serializer_class = ExperimentDetailSerializer
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
DISABLE_RUNNER = False
DISABLE_EXECUTOR = False
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
with patch('scheduler.dockerizer_scheduler.start_dockerizer') as spawner_mock_start:
self.object = self.factory_class(project=project)
assert spawner_mock_start.call_count == 1
self.url = '/{}/{}/{}/experiments/{}/'.format(API_V1,
project.user.username,
project.name,
self.object.id)
self.queryset = self.model_class.objects.all()
# Create related fields
for _ in range(2):
ExperimentJobFactory(experiment=self.object)
self.object_query = queries.experiments_details.get(id=self.object.id)
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
self.object.refresh_from_db()
assert resp.data == self.serializer_class(self.object_query).data
assert resp.data['num_jobs'] == 2
def test_get_with_resource_reg_90(self):
# Fix issue#90:
# Failed to getting experiment when specify resources without framework in environment
spec_content = """---
version: 1
kind: experiment
environment:
node_selector:
foo: bar
tolerations:
- key: "key"
operator: "Equal"
value: "value"
effect: "NoSchedule"
affinity:
foo: bar
resources:
gpu:
requests: 1
limits: 1
tpu:
requests: 1
limits: 1
build:
image: my_image
run:
cmd: video_prediction_train --model=DNA --num_masks=1
"""
spec_parsed_content = ExperimentSpecification.read(spec_content)
project = ProjectFactory(user=self.auth_client.user)
exp = self.factory_class(project=project, content=spec_parsed_content.raw_data)
url = '/{}/{}/{}/experiments/{}/'.format(API_V1,
project.user.username,
project.name,
exp.id)
resp = self.auth_client.get(url)
assert resp.status_code == status.HTTP_200_OK
exp_query = queries.experiments_details.get(id=exp.id)
assert resp.data == self.serializer_class(exp_query).data
def test_patch_exp(self): # pylint:disable=too-many-statements
new_description = 'updated_xp_name'
data = {'description': new_description}
assert self.object.description != data['description']
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.user == self.object.user
assert new_object.description != self.object.description
assert new_object.description == new_description
assert new_object.jobs.count() == 2
# path is_managed
data = {'is_managed': False}
assert self.object.is_managed is True
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.jobs.count() == 2
assert new_object.is_managed is False
# path is_managed
data = {'is_managed': None}
assert new_object.is_managed is False
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.jobs.count() == 2
assert new_object.is_managed is True
# path is_managed
data = {'is_managed': False}
assert new_object.is_managed is True
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.jobs.count() == 2
assert new_object.is_managed is False
data = {'is_managed': True}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.jobs.count() == 2
assert new_object.is_managed is True
# Update original experiment
assert new_object.is_clone is False
new_experiment = ExperimentFactory()
data = {'original_experiment': new_experiment.id}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.user == self.object.user
assert new_object.description == new_description
assert new_object.jobs.count() == 2
assert new_object.is_clone is True
assert new_object.original_experiment == new_experiment
# Update tags
assert new_object.tags == ['fixtures']
data = {'tags': ['foo', 'bar']}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert sorted(new_object.tags) == sorted(['foo', 'bar'])
data = {'tags': ['foo_new', 'bar_new'], 'merge': False}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert sorted(new_object.tags) == sorted(['foo_new', 'bar_new'])
data = {'tags': ['foo', 'bar'], 'merge': True}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert sorted(new_object.tags) == sorted(['foo_new', 'bar_new', 'foo', 'bar'])
# Update params
assert new_object.params is None
data = {'params': {'foo': 'bar'}}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.params == {'foo': 'bar'}
data = {'params': {'foo_new': 'bar_new'}, 'merge': False}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.params == {'foo_new': 'bar_new'}
data = {'params': {'foo': 'bar'}, 'merge': True}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.params == {'foo_new': 'bar_new', 'foo': 'bar'}
# Update name
data = {'name': 'new_name'}
assert new_object.name is None
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.name == data['name']
def test_delete_from_created_status_archives_and_schedules_stop(self):
assert self.model_class.objects.count() == 1
assert ExperimentJob.objects.count() == 2
with patch('scheduler.experiment_scheduler.stop_experiment') as spawner_mock_stop:
resp = self.auth_client.delete(self.url)
assert spawner_mock_stop.call_count == 1
assert resp.status_code == status.HTTP_204_NO_CONTENT
# Deleted
assert self.model_class.objects.count() == 0
assert self.model_class.all.count() == 0
assert ExperimentJob.objects.count() == 0
def test_delete_from_running_status_archives_and_schedules_stop(self):
self.object.set_status(ExperimentLifeCycle.RUNNING)
assert self.model_class.objects.count() == 1
assert ExperimentJob.objects.count() == 2
with patch('scheduler.experiment_scheduler.stop_experiment') as spawner_mock_stop:
resp = self.auth_client.delete(self.url)
assert spawner_mock_stop.call_count == 1
assert resp.status_code == status.HTTP_204_NO_CONTENT
# Deleted
assert self.model_class.objects.count() == 0
assert self.model_class.all.count() == 0
assert ExperimentJob.objects.count() == 0
def test_delete_archives_and_schedules_deletion(self):
self.object.set_status(ExperimentLifeCycle.RUNNING)
assert self.model_class.objects.count() == 1
assert ExperimentJob.objects.count() == 2
with patch('scheduler.tasks.experiments.'
'experiments_schedule_deletion.apply_async') as spawner_mock_stop:
resp = self.auth_client.delete(self.url)
assert spawner_mock_stop.call_count == 1
assert resp.status_code == status.HTTP_204_NO_CONTENT
# Patched
assert self.model_class.objects.count() == 0
assert self.model_class.all.count() == 1
assert ExperimentJob.objects.count() == 2
def test_archive_schedule_deletion(self):
self.object.set_status(ExperimentLifeCycle.RUNNING)
assert self.model_class.objects.count() == 1
assert ExperimentJob.objects.count() == 2
with patch('scheduler.tasks.experiments.'
'experiments_schedule_deletion.apply_async') as spawner_mock_stop:
resp = self.auth_client.post(self.url + 'archive/')
assert resp.status_code == status.HTTP_200_OK
assert spawner_mock_stop.call_count == 1
assert self.model_class.objects.count() == 1
assert self.model_class.all.count() == 1
def test_archive_schedule_archives_and_schedules_stop(self):
self.object.set_status(ExperimentLifeCycle.RUNNING)
assert self.model_class.objects.count() == 1
assert ExperimentJob.objects.count() == 2
with patch('scheduler.tasks.experiments.'
'experiments_stop.apply_async') as spawner_mock_stop:
resp = self.auth_client.post(self.url + 'archive/')
assert resp.status_code == status.HTTP_200_OK
assert spawner_mock_stop.call_count == 1
assert self.model_class.objects.count() == 0
assert self.model_class.all.count() == 1
assert ExperimentJob.objects.count() == 2
def test_restore(self):
self.object.archive()
assert self.model_class.objects.count() == 0
assert self.model_class.all.count() == 1
resp = self.auth_client.post(self.url + 'restore/')
assert resp.status_code == status.HTTP_200_OK
assert self.model_class.objects.count() == 1
assert self.model_class.all.count() == 1
assert ExperimentJob.objects.count() == 2
@pytest.mark.experiments_mark
class TestExperimentCodeReferenceViewV1(BaseEntityCodeReferenceViewTest):
entity_factory_class = ExperimentFactory
def get_url(self):
return '/{}/{}/{}/experiments/{}/coderef/'.format(API_V1,
self.project.user.username,
self.project.name,
self.obj.id)
@pytest.mark.experiments_mark
class TestExperimentStatusListViewV1(BaseViewTest):
serializer_class = ExperimentStatusSerializer
model_class = ExperimentStatus
factory_class = ExperimentStatusFactory
num_objects = 3
HAS_AUTH = True
HAS_INTERNAL = True
INTERNAL_SERVICE = InternalServices.SIDECAR
def setUp(self):
super().setUp()
with patch.object(Experiment, 'set_status') as _:
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as _: # noqa
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/statuses/'.format(API_V1,
project.user.username,
project.name,
self.experiment.id)
self.objects = [self.factory_class(experiment=self.experiment,
status=ExperimentLifeCycle.CHOICES[i][0])
for i in range(self.num_objects)]
self.queryset = self.model_class.objects.all()
self.queryset = self.queryset.order_by('created_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
resp = self.internal_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_create(self):
data = {}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.status == ExperimentLifeCycle.CREATED
data = {'status': ExperimentLifeCycle.RUNNING}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 2
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.status == data['status']
# Create with message and traceback
data = {'status': ExperimentLifeCycle.FAILED,
'message': 'message1',
'traceback': 'traceback1'}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 3
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.message == data['message']
assert last_object.traceback == data['traceback']
# Test internal
data = {}
resp = self.internal_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 4
@pytest.mark.experiments_mark
class TestExperimentMetricListViewV1(BaseViewTest):
serializer_class = ExperimentMetricSerializer
model_class = ExperimentMetric
factory_class = ExperimentMetricFactory
num_objects = 3
HAS_AUTH = True
HAS_INTERNAL = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/metrics/'.format(API_V1,
project.user.username,
project.name,
self.experiment.id)
self.objects = [self.factory_class(experiment=self.experiment, values={'accuracy': i / 10})
for i in range(self.num_objects)]
self.queryset = self.model_class.objects.all()
self.queryset = self.queryset.order_by('created_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_create(self):
data = {}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = {'values': {'precision': 0.9}}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.values == data['values']
def test_create_many(self):
data = {}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = [
{'values': {'precision': 0.9}},
{'values': {'precision': 0.95}},
{'values': {'precision': 0.99}}
]
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 3
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.values == data[-1]['values']
with patch('scheduler.tasks.experiments.experiments_set_metrics.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
def test_create_internal(self):
data = {}
resp = self.internal_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = {'values': {'precision': 0.9}}
resp = self.internal_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.values == data['values']
@pytest.mark.experiments_mark
class TestExperimentStatusDetailViewV1(BaseViewTest):
serializer_class = ExperimentStatusSerializer
model_class = ExperimentStatus
factory_class = ExperimentStatusFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
with patch.object(Experiment, 'set_status') as _: # noqa
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as _: # noqa
self.experiment = ExperimentFactory()
self.object = self.factory_class(experiment=self.experiment)
self.url = '/{}/{}/{}/experiments/{}/statuses/{}/'.format(
API_V1,
self.experiment.project.user.username,
self.experiment.project.name,
self.experiment.id,
self.object.uuid.hex)
self.queryset = self.model_class.objects.all()
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == self.serializer_class(self.object).data
def test_patch(self):
data = {'status': ExperimentLifeCycle.SUCCEEDED}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_delete(self):
assert self.model_class.objects.count() == 1
resp = self.auth_client.delete(self.url)
assert resp.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
assert self.model_class.objects.count() == 1
@pytest.mark.experiments_mark
class TestExperimentJobListViewV1(BaseViewTest):
serializer_class = ExperimentJobSerializer
model_class = ExperimentJob
factory_class = ExperimentJobFactory
num_objects = 3
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/jobs/'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id)
self.objects = [self.factory_class(experiment=self.experiment)
for _ in range(self.num_objects)]
self.queryset = self.model_class.objects.all()
self.queryset = self.queryset.order_by('-updated_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_create(self):
data = {'definition': {'key': 'my new kob k8s'}}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.definition == data['definition']
@pytest.mark.experiments_mark
class TestExperimentJobDetailViewV1(BaseViewTest):
serializer_class = ExperimentJobDetailSerializer
model_class = ExperimentJob
factory_class = ExperimentJobFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.object = self.factory_class(experiment=self.experiment)
self.url = '/{}/{}/{}/experiments/{}/jobs/{}/'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id,
self.object.id)
self.queryset = self.model_class.objects.filter(experiment=self.experiment)
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == self.serializer_class(self.object).data
def test_patch(self):
data = {'definition': {'new_key': 'new_value'}}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.experiment == self.object.experiment
assert new_object.definition != self.object.definition
assert new_object.definition == data['definition']
def test_cannot_path_experiment(self):
data = {'experiment': ExperimentFactory().id}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.experiment == self.object.experiment
def test_delete(self):
assert self.model_class.objects.count() == 1
resp = self.auth_client.delete(self.url)
assert resp.status_code == status.HTTP_204_NO_CONTENT
assert self.model_class.objects.count() == 0
@pytest.mark.experiments_mark
class TestExperimentJobStatusListViewV1(BaseViewTest):
serializer_class = ExperimentJobStatusSerializer
model_class = ExperimentJobStatus
factory_class = ExperimentJobStatusFactory
num_objects = 3
HAS_AUTH = True
def setUp(self):
super().setUp()
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as _: # noqa
with patch.object(ExperimentJob, 'set_status') as _: # noqa
project = ProjectFactory(user=self.auth_client.user)
experiment = ExperimentFactory(project=project)
self.experiment_job = ExperimentJobFactory(experiment=experiment)
self.url = '/{}/{}/{}/experiments/{}/jobs/{}/statuses/'.format(
API_V1,
project.user.username,
project.name,
experiment.id,
self.experiment_job.id)
self.objects = [self.factory_class(job=self.experiment_job,
status=JobLifeCycle.CHOICES[i][0])
for i in range(self.num_objects)]
self.queryset = self.model_class.objects.filter(job=self.experiment_job)
self.queryset = self.queryset.order_by('created_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_create(self):
data = {}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.status == JobLifeCycle.CREATED
data = {'status': JobLifeCycle.SUCCEEDED}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 2
last_object = self.model_class.objects.last()
assert last_object.job == self.experiment_job
assert last_object.status == data['status']
@pytest.mark.experiments_mark
class TestExperimentJobStatusDetailViewV1(BaseViewTest):
serializer_class = ExperimentJobStatusSerializer
model_class = ExperimentJobStatus
factory_class = ExperimentJobStatusFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as _: # noqa
with patch.object(ExperimentJob, 'set_status') as _: # noqa
project = ProjectFactory(user=self.auth_client.user)
experiment = ExperimentFactory(project=project)
self.experiment_job = ExperimentJobFactory(experiment=experiment)
self.object = self.factory_class(job=self.experiment_job)
self.url = '/{}/{}/{}/experiments/{}/jobs/{}/statuses/{}'.format(
API_V1,
project.user.username,
project.name,
experiment.id,
self.experiment_job.id,
self.object.uuid.hex)
self.queryset = self.model_class.objects.filter(job=self.experiment_job)
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == self.serializer_class(self.object).data
def test_patch(self):
data = {'details': {'message': 'bla', 'reason': 'some reason'}}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
assert self.object.details == {}
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.details == {'message': 'bla', 'reason': 'some reason'}
data = {'message': 'new reason', 'details': {'message': 'bla2', 'reason': 'some reason3'}}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.message == 'new reason'
assert new_object.details == {'message': 'bla2', 'reason': 'some reason3'}
def test_delete(self):
assert self.model_class.objects.count() == 1
resp = self.auth_client.delete(self.url)
assert resp.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
assert self.model_class.objects.count() == 1
@pytest.mark.experiments_mark
class TestExperimentJobLogsViewV1(BaseViewTest):
num_log_lines = 10
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(
project=project,
content=exec_experiment_resources_parsed_content.raw_data)
self.experiment_job = ExperimentJobFactory(experiment=self.experiment)
self.logs = []
self.url = '/{}/{}/{}/experiments/{}/jobs/{}/logs'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id,
self.experiment_job.id)
self.stream_url = '/{}/{}/{}/experiments/{}/jobs/{}/logs/stream'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id,
self.experiment_job.id)
self.ws_url = '/{}/{}/{}/experiments/{}/jobs/{}/logs'.format(
WS_V1,
project.user.username,
project.name,
self.experiment.id,
self.experiment_job.id)
def create_logs(self, temp):
log_path = stores.get_experiment_job_logs_path(
experiment_job_name=self.experiment_job.unique_name,
temp=temp)
stores.create_experiment_job_logs_path(experiment_job_name=self.experiment_job.unique_name,
temp=temp)
fake = Faker()
self.logs = []
for _ in range(self.num_log_lines):
self.logs.append(fake.sentence())
with open(log_path, 'w') as file:
for line in self.logs:
file.write(line)
file.write('\n')
def test_get_done_experiment(self):
self.experiment.set_status(ExperimentLifeCycle.SUCCEEDED)
self.assertTrue(self.experiment.is_done)
# No logs
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check the it does not return temp file
self.create_logs(temp=True)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check returns the correct file
self.create_logs(temp=False)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
data = [i for i in resp._iterator] # pylint:disable=protected-access
data = [d for d in data[0].decode('utf-8').split('\n') if d]
assert len(data) == len(self.logs)
assert data == self.logs
@patch('api.experiments.views.process_experiment_job_logs')
def test_get_non_done_experiment(self, _):
self.assertFalse(self.experiment.is_done)
# No logs
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check the it does not return non temp file
self.create_logs(temp=False)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check returns the correct file
self.create_logs(temp=True)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
data = [i for i in resp._iterator] # pylint:disable=protected-access
data = [d for d in data[0].decode('utf-8').split('\n') if d]
assert len(data) == len(self.logs)
assert data == self.logs
def test_stream_redirects_to_internal_service(self):
response = self.auth_client.get(self.stream_url)
self.assertEqual(response.status_code, 200)
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], self.ws_url)
@pytest.mark.experiments_mark
class TestRestartExperimentViewV1(BaseViewTest):
serializer_class = ExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
DISABLE_RUNNER = False
DISABLE_EXECUTOR = False
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.object = self.factory_class(project=project)
self.url = '/{}/{}/{}/experiments/{}/restart'.format(
API_V1,
project.user.username,
project.name,
self.object.id)
self.queryset = self.model_class.objects.all()
def test_restart(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == 2
last_experiment = self.queryset.last()
assert last_experiment.is_clone is True
assert last_experiment.is_restart is True
assert last_experiment.is_copy is False
assert last_experiment.is_resume is False
assert last_experiment.original_experiment == self.object
assert last_experiment.original_unique_name == self.object.unique_name
def test_restart_patch_config(self):
data = {'content': "{'params': {'lr': 0.1}}"}
assert self.queryset.first().params is None
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == 2
assert self.queryset.first().params is None
assert self.queryset.last().params == {'lr': 0.1}
last_experiment = self.queryset.last()
assert last_experiment.is_clone is True
assert last_experiment.is_restart is True
assert last_experiment.is_copy is False
assert last_experiment.is_resume is False
assert last_experiment.original_experiment == self.object
assert last_experiment.original_unique_name == self.object.unique_name
def test_restart_patch_wrong_config_raises(self):
data = {'content': "{'lr': 0.1}"}
assert self.queryset.first().params is None
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
assert mock_fct.call_count == 0
assert self.queryset.count() == 1
@pytest.mark.experiments_mark
class TestResumeExperimentViewV1(BaseViewTest):
serializer_class = ExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.object = self.factory_class(project=project)
self.url = '/{}/{}/{}/experiments/{}/resume'.format(
API_V1,
project.user.username,
project.name,
self.object.id)
self.queryset = self.model_class.objects.all()
def test_resume(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == 2
last_experiment = self.queryset.last()
assert last_experiment.is_clone is True
assert last_experiment.is_restart is False
assert last_experiment.is_copy is False
assert last_experiment.is_resume is True
assert last_experiment.original_experiment == self.object
assert last_experiment.original_unique_name == self.object.unique_name
def test_resume_patch_config(self):
data = {'content': "{'params': {'lr': 0.1}}"}
assert self.queryset.first().params is None
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == 2
assert self.queryset.first().params is None
assert self.queryset.last().params == {'lr': 0.1}
last_experiment = self.queryset.last()
assert last_experiment.is_clone is True
assert last_experiment.is_restart is False
assert last_experiment.is_copy is False
assert last_experiment.is_resume is True
assert last_experiment.original_experiment == self.object
assert last_experiment.original_unique_name == self.object.unique_name
def test_resume_patch_wrong_config_raises(self):
data = {'content': "{'lr': 0.1}"}
assert self.queryset.first().params is None
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
assert mock_fct.call_count == 0
assert self.queryset.count() == 1
@pytest.mark.experiments_mark
class TestCopyExperimentViewV1(BaseViewTest):
serializer_class = ExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
DISABLE_RUNNER = False
DISABLE_EXECUTOR = False
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.object = self.factory_class(project=project)
self.url = '/{}/{}/{}/experiments/{}/copy'.format(
API_V1,
project.user.username,
project.name,
self.object.id)
self.queryset = self.model_class.objects.all()
def test_resume(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == 2
last_experiment = self.queryset.last()
assert last_experiment.is_clone is True
assert last_experiment.is_restart is False
assert last_experiment.is_copy is True
assert last_experiment.is_resume is False
assert last_experiment.original_experiment == self.object
assert last_experiment.original_unique_name == self.object.unique_name
def test_resume_patch_config(self):
data = {'content': "{'params': {'lr': 0.1}}"}
assert self.queryset.first().params is None
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == 2
assert self.queryset.first().params is None
assert self.queryset.last().params == {'lr': 0.1}
last_experiment = self.queryset.last()
assert last_experiment.is_clone is True
assert last_experiment.is_restart is False
assert last_experiment.is_copy is True
assert last_experiment.is_resume is False
assert last_experiment.original_experiment == self.object
assert last_experiment.original_unique_name == self.object.unique_name
def test_resume_patch_wrong_config_raises(self):
data = {'content': "{'lr': 0.1}"}
assert self.queryset.first().params is None
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
assert mock_fct.call_count == 0
assert self.queryset.count() == 1
@pytest.mark.experiments_mark
class TestStopExperimentViewV1(BaseViewTest):
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.object = self.factory_class(project=project)
self.url = '/{}/{}/{}/experiments/{}/stop'.format(
API_V1,
project.user.username,
project.name,
self.object.id)
self.queryset = self.model_class.objects.all()
def test_stop(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tasks.experiments.experiments_stop.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 1
@pytest.mark.experiments_mark
class TestStopExperimentManyViewV1(BaseViewTest):
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.objects = [self.factory_class(project=project) for _ in range(3)]
self.url = '/{}/{}/{}/experiments/stop'.format(
API_V1,
project.user.username,
project.name)
self.queryset = self.model_class.objects.all()
def test_stop_many(self):
data = {}
assert self.queryset.count() == 3
with patch('scheduler.tasks.experiments.experiments_stop.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_200_OK
assert mock_fct.call_count == 0
data = {'ids': [obj.id for obj in self.objects]}
with patch('scheduler.tasks.experiments.experiments_stop.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_200_OK
assert mock_fct.call_count == 3
assert self.queryset.count() == 3
@pytest.mark.experiments_mark
class TestDeleteExperimentManyViewV1(BaseViewTest):
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.objects = [self.factory_class(project=project) for _ in range(3)]
self.url = '/{}/{}/{}/experiments/delete'.format(
API_V1,
project.user.username,
project.name)
self.queryset = self.model_class.objects.all()
def test_delete_many(self):
data = {}
assert self.queryset.count() == 3
resp = self.auth_client.delete(self.url, data)
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 3
data = {'ids': [obj.id for obj in self.objects]}
resp = self.auth_client.delete(self.url, data)
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 0
@pytest.mark.experiments_mark
class TestExperimentLogsViewV1(BaseViewTest):
num_log_lines = 10
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.logs = []
self.url = '/{}/{}/{}/experiments/{}/logs'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id)
self.stream_url = '/{}/{}/{}/experiments/{}/logs/stream'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id)
self.ws_url = '/{}/{}/{}/experiments/{}/logs'.format(
WS_V1,
project.user.username,
project.name,
self.experiment.id)
def create_logs(self, temp):
log_path = stores.get_experiment_logs_path(
experiment_name=self.experiment.unique_name,
temp=temp)
stores.create_experiment_logs_path(experiment_name=self.experiment.unique_name, temp=temp)
fake = Faker()
self.logs = []
for _ in range(self.num_log_lines):
self.logs.append(fake.sentence())
with open(log_path, 'w') as file:
for line in self.logs:
file.write(line)
file.write('\n')
def test_get_done_experiment(self):
self.experiment.set_status(ExperimentLifeCycle.SUCCEEDED)
self.assertTrue(self.experiment.is_done)
# No logs
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check the it does not return temp file
self.create_logs(temp=True)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check returns the correct file
self.create_logs(temp=False)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
data = [i for i in resp._iterator] # pylint:disable=protected-access
data = [d for d in data[0].decode('utf-8').split('\n') if d]
assert len(data) == len(self.logs)
assert data == self.logs
@patch('api.experiments.views.process_logs')
def test_get_non_done_experiment(self, _):
self.assertFalse(self.experiment.is_done)
# No logs
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check the it does not return non temp file
self.create_logs(temp=False)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check returns the correct file
self.create_logs(temp=True)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
data = [i for i in resp._iterator] # pylint:disable=protected-access
data = [d for d in data[0].decode('utf-8').split('\n') if d]
assert len(data) == len(self.logs)
assert data == self.logs
def test_post_logs(self):
resp = self.auth_client.post(self.url)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = 'logs here'
with patch('logs_handlers.tasks.logs_handle_experiment_job.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_200_OK
assert mock_fct.call_count == 1
data = ['logs here', 'dfg dfg']
with patch('logs_handlers.tasks.logs_handle_experiment_job.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_200_OK
assert mock_fct.call_count == 1
def test_stream_redirects_to_internal_service(self):
response = self.auth_client.get(self.stream_url)
self.assertEqual(response.status_code, 200)
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], self.ws_url)
@pytest.mark.experiments_mark
class TestExperimentOutputsTreeViewV1(BaseFilesViewTest):
num_log_lines = 10
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/outputs/tree'.format(
API_V1,
project.user.username,
project.name,
experiment.id)
outputs_path = stores.get_experiment_outputs_path(
persistence=experiment.persistence_outputs,
experiment_name=experiment.unique_name,
original_name=experiment.original_unique_name,
cloning_strategy=experiment.cloning_strategy)
stores.create_experiment_outputs_path(
persistence=experiment.persistence_outputs,
experiment_name=experiment.unique_name)
self.create_paths(path=outputs_path, url=self.url)
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
self.assert_same_content(resp.data['files'], self.top_level['files'])
self.assert_same_content(resp.data['dirs'], self.top_level['dirs'])
resp = self.auth_client.get(self.url_second_level)
assert resp.status_code == status.HTTP_200_OK
self.assert_same_content(resp.data['files'], self.second_level['files'])
self.assert_same_content(resp.data['dirs'], self.second_level['dirs'])
resp = self.auth_client.get(self.url_second_level2)
assert resp.status_code == status.HTTP_200_OK
self.assert_same_content(resp.data['files'], self.second_level['files'])
self.assert_same_content(resp.data['dirs'], self.second_level['dirs'])
@pytest.mark.experiments_mark
class TestExperimentOutputsFilesViewV1(BaseFilesViewTest):
num_log_lines = 10
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/outputs/files'.format(
API_V1,
project.user.username,
project.name,
experiment.id)
outputs_path = stores.get_experiment_outputs_path(
persistence=experiment.persistence_outputs,
experiment_name=experiment.unique_name,
original_name=experiment.original_unique_name,
cloning_strategy=experiment.cloning_strategy)
stores.create_experiment_outputs_path(
persistence=experiment.persistence_outputs,
experiment_name=experiment.unique_name)
self.create_paths(path=outputs_path, url=self.url)
def test_get(self):
for file_content in self.top_level_files:
resp = self.auth_client.get(self.url + '?path={}'.format(file_content['file']))
assert resp.status_code == status.HTTP_200_OK
data = [i for i in resp._iterator] # pylint:disable=protected-access
assert data[0].decode('utf-8') == file_content['data']
for file_content in self.second_level_files:
resp = self.auth_client.get(self.url + '?path={}'.format(file_content['file']))
assert resp.status_code == status.HTTP_200_OK
data = [i for i in resp._iterator] # pylint:disable=protected-access
assert data[0].decode('utf-8') == file_content['data']
@pytest.mark.experiments_mark
class DownloadExperimentOutputsViewTest(BaseViewTest):
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
HAS_INTERNAL = True
def setUp(self):
super().setUp()
self.project = ProjectFactory(user=self.auth_client.user)
self.experiment = self.factory_class(project=self.project)
self.download_url = '/{}/{}/{}/experiments/{}/outputs/download'.format(
API_V1,
self.project.user.username,
self.project.name,
self.experiment.id)
self.experiment_outputs_path = stores.get_experiment_outputs_path(
persistence=self.experiment.persistence_outputs,
experiment_name=self.experiment.unique_name)
self.url = self.download_url
def create_tmp_outputs(self):
stores.create_experiment_outputs_path(
persistence=self.experiment.persistence_outputs,
experiment_name=self.experiment.unique_name)
for i in range(4):
open('{}/{}'.format(self.experiment_outputs_path, i), '+w')
def test_redirects_nginx_to_file(self):
self.create_tmp_outputs()
# Assert that the experiment outputs
self.assertTrue(os.path.exists(self.experiment_outputs_path))
response = self.auth_client.get(self.download_url)
self.assertEqual(response.status_code, 200)
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER],
'{}/{}.tar.gz'.format(conf.get(ARCHIVES_ROOT_ARTIFACTS),
self.experiment.unique_name.replace('.', '_')))
@pytest.mark.experiments_mark
class TestExperimentEphemeralTokenViewV1(BaseViewTest):
HAS_AUTH = False
factory_class = ExperimentFactory
def setUp(self):
super().setUp()
self.auth_user = self.auth_client.user
self.project = ProjectFactory(user=self.auth_client.user)
self.experiment = self.factory_class(project=self.project)
self.other_experiment = self.factory_class(project=self.project)
self.url = '/{}/{}/{}/experiments/{}/ephemeraltoken'.format(
API_V1,
self.project.user.username,
self.project.name,
self.experiment.id)
self.other_url = '/{}/{}/{}/experiments/{}/ephemeraltoken'.format(
API_V1,
self.project.user.username,
self.project.name,
self.other_experiment.id)
@staticmethod
def create_ephemeral_token(experiment, **kwargs):
scope = RedisEphemeralTokens.get_scope(user=experiment.user.id,
model='experiment',
object_id=experiment.id)
return RedisEphemeralTokens.generate(scope=scope, **kwargs)
def test_is_forbidden_for_non_running_or_scheduled_experiment(self):
ephemeral_token = self.create_ephemeral_token(self.experiment)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_client = EphemeralClient(token=token)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_403_FORBIDDEN
self.assertEqual(ephemeral_token.get_state(), None)
def test_using_other_experiment_token(self):
ephemeral_token = self.create_ephemeral_token(self.other_experiment)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_client = EphemeralClient(token=token)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_403_FORBIDDEN
self.assertEqual(ephemeral_token.get_state(), None)
def test_using_timed_out_experiment_token(self):
self.experiment.set_status(status=JobLifeCycle.RUNNING)
ephemeral_token = self.create_ephemeral_token(self.experiment, ttl=1)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_client = EphemeralClient(token=token)
time.sleep(1.1)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_401_UNAUTHORIZED
self.assertEqual(ephemeral_token.get_state(), None)
def test_using_used_experiment_token(self):
self.experiment.set_status(status=JobLifeCycle.RUNNING)
ephemeral_token = self.create_ephemeral_token(self.experiment)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_token.clear()
ephemeral_client = EphemeralClient(token=token)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_401_UNAUTHORIZED
self.assertEqual(ephemeral_token.get_state(), None)
def test_using_scheduled_experiment_token(self):
self.experiment.set_status(status=ExperimentLifeCycle.SCHEDULED)
ephemeral_token = self.create_ephemeral_token(self.experiment)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_client = EphemeralClient(token=token)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == {'token': self.experiment.user.tokens.last().key}
self.assertEqual(ephemeral_token.get_state(), None)
def test_using_starting_experiment_token(self):
self.experiment.set_status(status=ExperimentLifeCycle.STARTING)
ephemeral_token = self.create_ephemeral_token(self.experiment)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_client = EphemeralClient(token=token)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == {'token': self.experiment.user.tokens.last().key}
self.assertEqual(ephemeral_token.get_state(), None)
def test_using_running_experiment_token(self):
self.experiment.set_status(status=ExperimentLifeCycle.RUNNING)
ephemeral_token = self.create_ephemeral_token(self.experiment)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_client = EphemeralClient(token=token)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == {'token': self.experiment.user.tokens.last().key}
self.assertEqual(ephemeral_token.get_state(), None)
@pytest.mark.experiments_mark
class TestExperimentChartViewListViewV1(BaseViewTest):
serializer_class = ExperimentChartViewSerializer
model_class = ExperimentChartView
factory_class = ExperimentChartViewFactory
num_objects = 3
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/chartviews/'.format(API_V1,
project.user.username,
project.name,
self.experiment.id)
self.objects = [self.factory_class(experiment=self.experiment, name='view{}'.format(i))
for i in range(self.num_objects)]
self.queryset = self.model_class.objects.all()
self.queryset = self.queryset.order_by('created_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_create(self):
data = {}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = {'charts': [{'id': '1'}, {'id': '2'}]}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.charts == data['charts']
@pytest.mark.experiments_mark
class TestExperimentChartViewDetailViewV1(BaseViewTest):
serializer_class = ExperimentChartViewSerializer
model_class = ExperimentChartView
factory_class = ExperimentChartViewFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
self.project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=self.project)
self.object = self.factory_class(experiment=self.experiment)
self.url = '/{}/{}/{}/experiments/{}/chartviews/{}/'.format(
API_V1,
self.experiment.project.user.username,
self.experiment.project.name,
self.experiment.id,
self.object.id)
self.queryset = self.model_class.objects.all()
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == self.serializer_class(self.object).data
def test_patch(self):
data = {'charts': [{'uuid': 'id22'}, {'uuid': 'id23'}, {'uuid': 'id24'}, {'uuid': 'id25'}]}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['charts'] == data['charts']
def test_delete(self):
assert self.model_class.objects.count() == 1
resp = self.auth_client.delete(self.url)
assert resp.status_code == status.HTTP_204_NO_CONTENT
assert self.model_class.objects.count() == 0
@pytest.mark.experiments_mark
class TestExperimentHeartBeatViewV1(BaseViewTest):
HAS_AUTH = True
HAS_INTERNAL = True
INTERNAL_SERVICE = InternalServices.SIDECAR
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/_heartbeat'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id)
def test_post_experiment_heartbeat(self):
self.assertEqual(RedisHeartBeat.experiment_is_alive(self.experiment.id), False)
resp = self.auth_client.post(self.url)
assert resp.status_code == status.HTTP_200_OK
self.assertEqual(RedisHeartBeat.experiment_is_alive(self.experiment.id), True)
def test_post_internal_experiment_heartbeat(self):
self.assertEqual(RedisHeartBeat.experiment_is_alive(self.experiment.id), False)
resp = self.internal_client.post(self.url)
assert resp.status_code == status.HTTP_200_OK
self.assertEqual(RedisHeartBeat.experiment_is_alive(self.experiment.id), True)
@pytest.mark.experiments_mark
class TestExperimentJobReconcileViewV1(BaseViewTest):
HAS_AUTH = True
HAS_INTERNAL = True
INTERNAL_SERVICE = InternalServices.SIDECAR
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.object = ExperimentJobFactory(experiment=self.experiment)
self.url = '/{}/{}/{}/experiments/{}/jobs/{}/_reconcile/'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id,
self.object.uuid.hex)
def _reconcile(self, client):
with patch('k8s_events_handlers.tasks.'
'k8s_events_reconcile_experiment_job_statuses.apply_async') as mock_fct:
resp = client.post(self.url, data={'status': 'succeeded'})
assert resp.status_code == status.HTTP_200_OK
assert mock_fct.call_count == 1
def _reconcile_done(self, client):
ExperimentJobStatusFactory(job=self.object, status='failed')
with patch('k8s_events_handlers.tasks.'
'k8s_events_reconcile_experiment_job_statuses.apply_async') as mock_fct:
resp = client.post(self.url, data={'status': 'succeeded'})
assert mock_fct.call_count == 0
assert resp.status_code == status.HTTP_200_OK
def test_reconcile(self):
self._reconcile(self.auth_client)
def test_reconcile_done(self):
self._reconcile(self.auth_client)
def test_reconcile_internal(self):
self._reconcile(self.internal_client)
def test_reconcile_done_internal(self):
self._reconcile(self.internal_client)
del BaseEntityCodeReferenceViewTest
| 107,563 | 33,015 |
import pygame
import speech_recognition as sr
from time import sleep
import events
import objects as obj_types
from settings import SPEECH_CRED_FILE
from speech_helpers import correct_text, either_side, get_after, get_position, get_positions, get_size, is_in_objects, process_relative, select_obj_type
# A variable listing currently-supported commands
COMMANDS = {"create", "save", "add", "insert", "delete", "remove", "goodbye", "exit", "quit", "new", "open", "move", "relocate", "here", "there", "rename", "export", "right", "left", "up", "down", "resize"}
# Some functions to abstract out the event creation process.
def create(text):
"""Create an object in the room."""
# Parameters
location = get_position(text)
size = get_size(text)
if "called" in text:
called = " ".join(get_after("called", text))
else:
called = None
# Object types
obj = is_in_objects(text)
if obj is not None:
obj_type = obj_types.obj_types[obj]
pygame.event.post(
pygame.event.Event(events.design_type,
method="create",
shape=obj_type["shape"],
location=location,
color=obj_type["color"],
size=size,
outline=obj_type["outline"],
obj_type=obj,
text=called,
text_color=obj_type["text_color"]))
def delete(text):
"""Delete an object in the room."""
location = get_position(text)
obj_type = select_obj_type(text)
# Post event
evt = pygame.event.Event(events.design_type, method="delete", location=location, obj_type=obj_type)
pygame.event.post(evt)
def move(text):
"""Move an object in the room."""
# Parameters
locations = get_positions(text, 2)
location = locations[0]
# Check for relative positioning, then move on to explicit positioning
to_location = process_relative(text)
if to_location is None:
to_location = locations[1]
obj_type = select_obj_type(text)
# Post event
evt = pygame.event.Event(events.design_type,
method="move",
location=location,
to_location=to_location,
obj_type=obj_type)
pygame.event.post(evt)
def rename(text):
"""Rename an object in the scene."""
# Parameters
location = get_position(text)
if "to" in text:
called = " ".join(get_after("to", text))
elif "as" in text:
called = " ".join(get_after("as", text))
elif "2" in text:
called = " ".join(get_after("2", text))
else:
called = None
obj_type = select_obj_type(text)
# Post event
evt = pygame.event.Event(events.design_type, method="rename", location=location, obj_type=obj_type, text=called)
pygame.event.post(evt)
def resize(text):
"""Resize an object in the scene."""
# Parameters
location = get_position(text)
size = get_size(text)
obj_type = select_obj_type(text)
# Post event
evt = pygame.event.Event(events.design_type, method="resize", location=location, obj_type=obj_type, size=size)
pygame.event.post(evt)
# Process individual voice commands.
def process_command(text, roomGrid):
"""Process voice commands. Returns False if program should quit."""
text = correct_text(text)
# Program controls
if "quit" in text or "exit" in text or "close" in text or "goodbye" in text:
pygame.event.post(pygame.event.Event(pygame.QUIT))
return False
elif "open" in text:
pygame.event.post(events.file_open)
elif "new" in text and ("design" in text or "room" in text or "file" in text or "project" in text):
pygame.event.post(events.file_new)
elif "save" in text:
pygame.event.post(pygame.event.Event(events.file_type, method="save", change_name=("as" in text)))
elif "export" in text:
pygame.event.post(events.file_export)
# If finishing up a previous command
elif ("here" in text or "there" in text or "cheer" in text) and len(roomGrid.waitFunction) > 0:
location = get_position(text)
pygame.event.post(pygame.event.Event(events.ui_type, method="finish_waiting", location=location))
# Creating things
elif "add" in text or "create" in text:
create(text)
# Moving things
# fruit is a keyword because Google thinks "fruit" and "cocktail" go together real nice...
elif "move" in text or "relocate" in text or "fruit" in text:
move(text)
# Renaming things
elif "rename" in text:
rename(text)
# Resizing things
elif "resize" in text:
resize(text)
# Deleting things
elif "remove" in text or "delete" in text:
delete(text)
pygame.event.post(events.done_listening_event)
return True
# Listen for voice commands.
def listen(roomGrid):
with open(SPEECH_CRED_FILE) as f:
GOOGLE_CLOUD_SPEECH_CREDENTIALS = f.read()
context_list = list(COMMANDS.union(obj_types.possible))
r = sr.Recognizer()
try:
with sr.Microphone() as source:
r.adjust_for_ambient_noise(source, duration=2)
while True:
if roomGrid.dead:
break
audio = r.listen(source, phrase_time_limit=6)
try:
pygame.event.post(events.capture_space_event)
text = r.recognize_google_cloud(audio,
language="en-us",
credentials_json=GOOGLE_CLOUD_SPEECH_CREDENTIALS,
preferred_phrases=context_list)
try:
res = process_command(text, roomGrid)
except:
print("There was an error processing and executing the command.")
pygame.event.post(events.error_listening_event)
if not res:
break
except sr.UnknownValueError:
pygame.event.post(events.error_listening_event)
except:
print("Could not request results from Google Cloud Speech service.")
pygame.event.post(pygame.event.Event(events.error_type, error = "Speech recognition error."))
except OSError:
pygame.event.post(pygame.event.Event(events.error_type, error = "Could not connect to a microphone."))
| 6,705 | 1,951 |
class OacensusError(Exception):
pass
class UserFeedback(OacensusError):
"""
An exception which was caused by user input or a runtime error and which
should be presented nicely.
"""
class ConfigFileFormatProblem(UserFeedback):
"""
A problem with config files.
"""
pass
class APIError(UserFeedback):
"""
An exception raised by a remote API.
"""
pass
| 403 | 112 |
import gin
from colosseum.loops import human_loop
from colosseum.mdps import EpisodicMDP
from colosseum.mdps.river_swim.river_swim import RiverSwimMDP
@gin.configurable
class RiverSwimEpisodic(EpisodicMDP, RiverSwimMDP):
@property
def _graph_layout(self):
return {node: tuple(node) for node in self.G}
if __name__ == "__main__":
mdp = RiverSwimEpisodic(
seed=42,
randomize_actions=False,
size=15,
lazy=0.01,
random_action_p=0.1,
make_reward_stochastic=True,
)
# random_loop(mdp, 50, verbose=True)
human_loop(mdp)
| 600 | 235 |
# How to add a test:
# Copy this file
# Rename TestTemplate to TestWhatever in line 9
# Rename machine path and config file in lines 11 and 14
from mpfmc.tests.MpfMcTestCase import MpfMcTestCase
class TestTemplate(MpfMcTestCase):
def get_machine_path(self):
return 'tests/machine_files/test_template'
def get_config_file(self):
return 'test_template.yaml'
def test_something(self):
pass
| 428 | 139 |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: proto/availability-msgs.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dproto/availability-msgs.proto\x12\x05proto\"#\n\x10SiteAvailableReq\x12\x0f\n\x07site_id\x18\x01 \x01(\x03\"-\n\x10SiteAvailableRes\x12\x19\n\x04site\x18\x01 \x01(\x0b\x32\x0b.proto.Site\"\x13\n\x11SitesAvailableReq\"?\n\x11SitesAvailableRes\x12*\n\tresponses\x18\x01 \x03(\x0b\x32\x17.proto.SiteAvailableRes\"*\n\x04Site\x12\x0f\n\x07site_id\x18\x01 \x01(\x03\x12\x11\n\tavailable\x18\x02 \x01(\x08\x42\tZ\x07.;protob\x06proto3')
_SITEAVAILABLEREQ = DESCRIPTOR.message_types_by_name['SiteAvailableReq']
_SITEAVAILABLERES = DESCRIPTOR.message_types_by_name['SiteAvailableRes']
_SITESAVAILABLEREQ = DESCRIPTOR.message_types_by_name['SitesAvailableReq']
_SITESAVAILABLERES = DESCRIPTOR.message_types_by_name['SitesAvailableRes']
_SITE = DESCRIPTOR.message_types_by_name['Site']
SiteAvailableReq = _reflection.GeneratedProtocolMessageType('SiteAvailableReq', (_message.Message,), {
'DESCRIPTOR' : _SITEAVAILABLEREQ,
'__module__' : 'proto.availability_msgs_pb2'
# @@protoc_insertion_point(class_scope:proto.SiteAvailableReq)
})
_sym_db.RegisterMessage(SiteAvailableReq)
SiteAvailableRes = _reflection.GeneratedProtocolMessageType('SiteAvailableRes', (_message.Message,), {
'DESCRIPTOR' : _SITEAVAILABLERES,
'__module__' : 'proto.availability_msgs_pb2'
# @@protoc_insertion_point(class_scope:proto.SiteAvailableRes)
})
_sym_db.RegisterMessage(SiteAvailableRes)
SitesAvailableReq = _reflection.GeneratedProtocolMessageType('SitesAvailableReq', (_message.Message,), {
'DESCRIPTOR' : _SITESAVAILABLEREQ,
'__module__' : 'proto.availability_msgs_pb2'
# @@protoc_insertion_point(class_scope:proto.SitesAvailableReq)
})
_sym_db.RegisterMessage(SitesAvailableReq)
SitesAvailableRes = _reflection.GeneratedProtocolMessageType('SitesAvailableRes', (_message.Message,), {
'DESCRIPTOR' : _SITESAVAILABLERES,
'__module__' : 'proto.availability_msgs_pb2'
# @@protoc_insertion_point(class_scope:proto.SitesAvailableRes)
})
_sym_db.RegisterMessage(SitesAvailableRes)
Site = _reflection.GeneratedProtocolMessageType('Site', (_message.Message,), {
'DESCRIPTOR' : _SITE,
'__module__' : 'proto.availability_msgs_pb2'
# @@protoc_insertion_point(class_scope:proto.Site)
})
_sym_db.RegisterMessage(Site)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'Z\007.;proto'
_SITEAVAILABLEREQ._serialized_start=40
_SITEAVAILABLEREQ._serialized_end=75
_SITEAVAILABLERES._serialized_start=77
_SITEAVAILABLERES._serialized_end=122
_SITESAVAILABLEREQ._serialized_start=124
_SITESAVAILABLEREQ._serialized_end=143
_SITESAVAILABLERES._serialized_start=145
_SITESAVAILABLERES._serialized_end=208
_SITE._serialized_start=210
_SITE._serialized_end=252
# @@protoc_insertion_point(module_scope)
| 3,366 | 1,374 |
import numpy as np
import pandas as pd
import datetime
from okokyst_metadata import surveys_lookup_table
import os
import re
import glob
import gsw
from okokyst_tools import pressure_to_depth
encoding = "ISO-8859-1"
__author__ = 'Elizaveta Protsenko'
__email__ = 'Elizaveta.Protsenko@niva.no'
__created__ = datetime.datetime(2020, 9, 23)
__version__ = "1.0"
__status__ = "Development"
def to_rename_columns(df,old_name, new_name):
if old_name in df.columns:
df = df.rename(columns={old_name : new_name})
return df
def modify_df(df,onedrive,filename):
#print ("modify_df")
'''
Convert columns name to the format used further in the processing steps
'''
# df = to_rename_columns(df, 'Press', "Depth")
# (df.columns)
df = to_rename_columns(df, 'Depth(u)', "Depth")
df = to_rename_columns(df, 'Sal.', 'Salinity')
df = to_rename_columns(df, 'T(FTU)', 'FTU')
df = to_rename_columns(df, 'T (FTU)', 'FTU')
df = to_rename_columns(df, 'OpOx %', 'OptOx')
df = to_rename_columns(df, 'Ox %', 'OptOx')
df = to_rename_columns(df, 'mg/l', 'OxMgL')
df = to_rename_columns(df, 'Opt', 'OptOx')
df = to_rename_columns(df, 'Opmg/l', 'OxMgL')
df = to_rename_columns(df, 'Opml/l', 'OxMlL')
# recalculate Oxygen into Ml/l
convert_dict = {
'Press': float
}
df = df.astype(convert_dict)
#print ("press to float")
if 'OxMgL' in df.columns:
print ('recalculate to ml/l')
df = df.astype({'OxMgL': float})
df['OxMgL'] = df.OxMgL.values / 1.42905
df = to_rename_columns(df, 'OxMgL', 'OxMlL')
try:
df['Date'] = pd.to_datetime(df['Date'], format='%d.%m.%Y').dt.strftime('%d.%m.%Y')
except Exception as e:
print ('date',e)
try:
df['Time'] = pd.to_datetime(df['Time'], format='%H:%M:%S').dt.strftime('%H.%M.%S')
except Exception as e:
print ('time', e)
try:
df = df.astype({'OxMlL': float})
except Exception as e:
print ('float', e)
try:
df = df.astype({'OxMgL': float})
except:
print ('Probably Oxygen is missing')
df = df.dropna(how='all', axis=1)
df = df.round(4)
if len(set(df['OptOx'].values)) < 5:
er=open(f"{onedrive}\\NoOxygenData.txt","w+")
er.write(filename)
er.close()
return df
class processStation(object):
def __init__(self, inputpath,onedrive,survey = None):
self.input_path = inputpath
self.base_path = os.path.split(self.input_path)[0]
name = os.path.split(self.input_path)[1]
self.onedrive = onedrive
if survey != None:
self.survey = survey
else:
self.survey = self.get_region_from_path()
#try:
# y = re.findall("[0-9]", str(name))
# x = ''.join(y)
# print (name,x)
# self.correct_survey_date = pd.to_
# datetime(x, format='%Y%m%d').strftime('%d.%m.%Y')
# print ('correct_survey_date', self.correct_survey_date)#.values
#except:
# y = re.findall("[0-9]{8}", str(name))
# x = ''.join(y)
# print(name, x)
# self.correct_survey_date = pd.to_datetime(x, format='%Y%m%d').strftime('%d.%m.%Y')
# print('correct_survey_date', self.correct_survey_date) # .values
self.non_assigned = []
self.assigned = []
self.stations_list = list(surveys_lookup_table[self.survey].keys())
self.stations_depths = np.array([surveys_lookup_table[self.survey][st]['depth'] for st in self.stations_list])
self.df_all = self.read_convert_df()
try:
self.calc_depth()
except Exception as e:
print('Error in reading the dataframe', e)
try:
self.df_all = modify_df(self.df_all, self.onedrive,name)
grouped = self.df_all.groupby('Ser')
for name, group_df in grouped:
self.match_stations_by_depth(group_df)
except Exception as e:
print('Error in reading the dataframe',e)
def calc_depth(self):
first_st = list(surveys_lookup_table[self.survey].keys())[0]
#print ('calc depth')
latitude = surveys_lookup_table[self.survey][first_st]["station.latitude"]
depths = []
for p in self.df_all['Press'].values:
d = pressure_to_depth(float(p), latitude)
depths.append(d)
self.df_all['Depth'] = depths
def get_region_from_path(self):
regions = {'Leon': 'Sognefjorden', 'Kvitsoy': 'Hardangerfjorden',
'Hardangerfjorden': 'Hardangerfjorden', 'Sognefjorden': 'Sognefjorden', 'RMS': 'RMS',
'Aquakompetens': 'Aqua kompetanse'}
for r in regions:
name_to_check = re.compile(r, re.IGNORECASE)
find_match = name_to_check.search(self.input_path)
if find_match:
return regions[r]
def read_convert_df(self):
print ('\n******************************')
print ('Reading', self.input_path)
# read the document and skip undefined number of unneeded rows
for n in range(1, 16):
#print('Attempt N', n)
try:
df_all = pd.read_csv(self.input_path, skiprows=n, header=n-1,
sep=';', decimal=',', encoding=encoding)
#print (df_all.head())
if len(df_all.columns) < 10:
#print('short', df_all.columns)
try:
df_all = pd.read_csv(self.input_path, skiprows=n, header=n,
sep=';', decimal=',', encoding=encoding)
#print(df_all.columns)
break
except Exception as e:
#print('Exception 2')
pass
else:
break
except Exception as e:
#print('Exception 1')
df_all = None
try:
df_all = pd.read_csv(self.input_path, skiprows=n, header=n-1,
sep=';', decimal='.')
if len(df_all.columns) < 10:
#print('short', df_all.columns)
try:
df_all = pd.read_csv(self.input_path, skiprows=n, header=n,
sep=';', decimal=',')
#print(df_all.columns)
df_all.head()
break
except Exception as e:
#print('Exception 4')
pass
except Exception as e:
#print('Exception 3')
df_all = None
try:
pass
#print ('Successfully read file')
#print (df_all.columns)
except Exception as e:
#print (e)
pass
return df_all
def match_stations_by_depth(self, group):
# Get number of the cast
Ser = group['Ser'].values[0]
print('Processing Cast', Ser)
self.survey_date = group.Date.values[0]
max_depth = np.max(group['Depth'].max())
# find the closest depth in the arr with all stations for this region
difs = self.stations_depths - max_depth
print('difs', difs)
difs_pos = list(filter(lambda x : x > -1, difs))
#print (difs_pos,'filtered difs')
#sqr_difs = np.sqrt(difs**2)
min_dif = np.min(difs_pos)
print('max depth', max_depth,'min difference', min_dif, 'Time', group.Time.values[0])
self.make_new_base_path()
if 'Salinity' not in group.columns:
group = self.calc_salinity(group)
#if self.survey == 'Hardangerfjorden':
# dif_threshold = 50
#else:
dif_threshold = 50
group=group.drop(columns=['Press'])
columns = group.columns
if 'OxMgL' in columns:
columnOrder=['Ser','Meas','Salinity','Conductivity', 'Temp', 'FTU',
'OptOx', 'OxMgL', 'Density', 'Depth', 'Date', 'Time']
#print('max OxMlL') #, group['OxMgL'].max(), group.columns)
else:
columnOrder=['Ser','Meas','Salinity','Conductivity', 'Temp', 'FTU',
'OptOx', 'OxMlL', 'Density', 'Depth', 'Date', 'Time']
#print('max OxMlL') #, group['OxMlL'].max(), group.columns)
group=group.reindex(columns=columnOrder)
if min_dif < dif_threshold:
# double check the sign of the difference (if cast went deeper than the station, do no assign)
nearest_depth_id = np.where(difs == min_dif)[0][0]
#print ('stations list', self.stations_list)
self.station_name = self.stations_list[nearest_depth_id]
self.station_metadata = surveys_lookup_table[self.survey][self.station_name]
if self.station_name in self.assigned:
print(self.station_name, 'already assigned stations:', self.assigned)
print ("duplicate")
self.station_name = self.station_name + "_duplicate"
# Save df matched by station
#self.filename = os.path.join(self.base_path, self.station_name + '.txt')
self.filename = os.path.join(self.new_base_path, self.station_name + '_temp.txt')
self.figname = os.path.join(self.new_base_path, self.station_name + '.png')
print('Assigned station_name', self.station_name)
##print('save data to file with ', self.filename, Ser)
import matplotlib.pyplot as plt
plt.figure()
plt.style.use('ggplot')
plt.title(self.station_name)
plt.plot(group['OxMlL'],group.Depth)
plt.ylim(group.Depth.max(),group.Depth.min())
plt.savefig(self.figname)
group.to_csv(self.filename, sep=';')
#Add header and save update file in the new location
self.assigned.append(self.station_name)
self.add_metadata_header()
else:
print('Was not able to find a matching station name')
if max_depth < 10:
print("Probably it is a cleaning station ")
new_filename = os.path.join(self.new_base_path, 'Cleaning_station' + str(Ser) + '.txt')
else:
#print('available station depths', self.stations_depths)
#filename = self.base_path + r'\\Unknown_station' + str(Ser) + '.txt'
print('Cast Unknown_station', Ser)
new_filename = self.new_base_path + r'\\Unknown_station' + str(Ser) + '.txt'
self.non_assigned.append(new_filename)
#group.to_csv(filename, index=False, sep=';')
#print (group['OxMlL'].values.max())
group.to_csv(new_filename, index=False, sep=';')
#else:
# print ('Date of measurement does not match date in a filename')
# print(self.survey_date, self.correct_survey_date, self.survey_date == self.correct_survey_date)
return
def calc_salinity(self,group):
''' If salinity is not in the list
calculate if from TSP
'''
print( 'calculating_salinity')
salinity = []
for n in range(len(group['Cond.'])):
s = gsw.SP_from_C(group['Cond.'].values[n], group['Temp'].values[n], group['Press'].values[n])
salinity.append(s)
group['Salinity'] = salinity
return group
def make_new_base_path(self):
# datetime.datetime.strptime(
date_folder = pd.to_datetime(str(self.survey_date), format='%d.%m.%Y').strftime('%Y-%m-%d')
##self.new_base_path = os.path.join(onedrive, self.survey, date_folder, date_folder + " CTD data")
self.new_base_path = os.path.join(self.onedrive, date_folder + " CTD data")
if not os.path.exists(self.new_base_path):
os.makedirs(self.new_base_path)
def add_metadata_header(self):
header = self.station_metadata['station.header']
#print ('adding metadata header to ', self.station_name,'.txt')
new_filename = os.path.join(self.new_base_path, self.station_name + '.txt')
print ('save data to', new_filename)
# Open initial file, update header, save the new file in One_Drive
with open(self.filename, 'r') as read_obj, open(new_filename, 'w') as write_obj:
write_obj.write(header)
for line in read_obj:
write_obj.write(line)
try:
os.remove(self.filename)
except Exception as e:
print(e)
def manual_add_metadata_header(filepath, station_name):
t = surveys_lookup_table
base_path = os.path.split(filepath)[0]
surveys = t.keys()
for key in surveys:
if station_name in t[key]:
header = t[key][station_name]['station.header']
break
new_filename = os.path.join(base_path, station_name + '.txt')
# Open initial file, update header, save the new file in One_Drive
with open(filepath, 'r') as read_obj, open(new_filename, 'w') as write_obj:
write_obj.write(header)
for line in read_obj:
write_obj.write(line)
try:
os.remove(filepath)
except Exception as e:
print (e)
#os.rename(filepath, base_path +f'to_{station_name}.txt')
if __name__ == "__main__":
#k_work_dir = r'K:/Avdeling/214-Oseanografi/DATABASER/OKOKYST_2017/'
#task = "sognefjorden"
#leon = r"K:\Avdeling\214-Oseanografi\DATABASER\OKOKYST_2017\OKOKYST_NS_Nord_Leon\\"
def call_process(main_path, foldername):
path = os.path.join(main_path, foldername)
onedrive = path
files = glob.glob(path + '\*txt')
for f in files:
if 'OBS' not in f:
processStation(f,onedrive)
user = 'ELP'
main_path_RMS = fr"C:\Users\{user}\OneDrive - NIVA\Okokyst_CTD\Norskehavet_Sor\RMS"
main_path_aqua = fr"C:\Users\{user}\OneDrive - NIVA\Okokyst_CTD\Norskehavet_Sor\Aquakompetens"
#foldernames = [f for f in os.listdir(main_path) if re.match(r'2021', f)]
#RMS
#call_process(main_path_RMS,'06_2021')
#call_process('04-2021')
#call_process('06-2021')
#call_process('07-2021')
#call_process('08-2021')
#Aqua kompetanse
call_process(main_path_aqua,'2021-08')
# Sognefjorden 2021
main_path_sognefjorden = fr"C:\Users\{user}\OneDrive - NIVA\Okokyst_CTD\Nordsjoen_Nord\Sognefjorden"
#foldername = "2021-01-25"
# Here the automatic assignment did not work, due to bad weather the CTD did not reach the bottom
#call_process(main_path_sognefjorden, "2021-02-17")
#manual_add_metadata_header(r"C:\Users\ELP\OneDrive - NIVA\Okokyst_CTD\Nordsjoen_Nord\Sognefjorden\2021-02-17\2021-02-17 CTD data\Unknown_station2.txt", 'VT16')
#call_process(main_path_sognefjorden, '2021-03-14')
#call_process(main_path_sognefjorden, '2021-04-18')
#call_process(main_path_sognefjorden, '2021-05-19')
#call_process(main_path_sognefjorden, '2021-06-17')
#call_process(main_path_sognefjorden, '2021-07-14')
#call_process(main_path_sognefjorden, '2021-08-18')
main_path_hardangerfjorden = r'C:\Users\ELP\OneDrive - NIVA\Okokyst_CTD\Nordsjoen_Nord\Hardangerfjorden'
#call_process(main_path_hardangerfjorden,'2021-01-18',survey = 'Hardangerfjorden_old')
#manual_add_metadata_header(r'C:\Users\ELP\OneDrive - NIVA\Okokyst_CTD\Nordsjoen_Nord\Hardangerfjorden\2021-01-18\2021-01-18 CTD data\Unknown_station3.txt',
# "VT70")
#call_process(main_path_hardangerfjorden,'2021-02-23',survey = 'Hardangerfjorden_old')
#call_process(main_path_hardangerfjorden,'2021-03-22-23')#,survey = 'Hardangerfjorden_old'
#manual_add_metadata_header(r"C:\Users\ELP\OneDrive - NIVA\Okokyst_CTD\Nordsjoen_Nord\Hardangerfjorden\2021-03-22-23\2021-03-22 CTD data\Unknown_station4.txt",
# 'VR49')
call_process(main_path_hardangerfjorden, "2021-04-20-21")
#call_process(main_path_hardangerfjorden, '2021-05-18-20')
#call_process(main_path_hardangerfjorden, '2021-06')
#call_process(main_path_hardangerfjorden, "2021-07")
#call_process(main_path_hardangerfjorden, '2021-08')
#Has to be checked, no oxygen! did not work
###call_process(main_path_hardangerfjorden, "2021-05-18-20")
#call_process(main_path_hardangerfjorden, "2021-07")
print ('\n\n')
##for f in foldernames:
## call_process(f)
| 16,780 | 5,924 |
import pytest
from tartiflette.language.ast import InterfaceTypeExtensionNode
def test_interfacetypeextensionnode__init__():
interface_type_extension_node = InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
)
assert interface_type_extension_node.name == "interfaceTypeExtensionName"
assert (
interface_type_extension_node.directives
== "interfaceTypeExtensionDirectives"
)
assert (
interface_type_extension_node.fields == "interfaceTypeExtensionFields"
)
assert (
interface_type_extension_node.location
== "interfaceTypeExtensionLocation"
)
@pytest.mark.parametrize(
"interface_type_extension_node,other,expected",
[
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
Ellipsis,
False,
),
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionNameBis",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
False,
),
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectivesBis",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
False,
),
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFieldsBis",
location="interfaceTypeExtensionLocation",
),
False,
),
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocationBis",
),
False,
),
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
True,
),
],
)
def test_interfacetypeextensionnode__eq__(
interface_type_extension_node, other, expected
):
assert (interface_type_extension_node == other) is expected
@pytest.mark.parametrize(
"interface_type_extension_node,expected",
[
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
"InterfaceTypeExtensionNode("
"name='interfaceTypeExtensionName', "
"directives='interfaceTypeExtensionDirectives', "
"fields='interfaceTypeExtensionFields', "
"location='interfaceTypeExtensionLocation')",
)
],
)
def test_interfacetypeextensionnode__repr__(
interface_type_extension_node, expected
):
assert interface_type_extension_node.__repr__() == expected
| 5,250 | 1,062 |
'''Given two arrays, write a function to compute their intersection.
'''
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
m,n=len(nums1),len(nums2)
l=[]
if len(nums1)>=len(nums2):
for i in range(len(nums1)):
if nums1[i] in nums2:
l.append(nums1[i])
nums2.remove(nums1[i])
else:
for i in range(len(nums2)):
if nums2[i] in nums1:
l.append(nums2[i])
nums1.remove(nums2[i])
return l
| 715 | 246 |
from collections import defaultdict
import json
from pandas.core import frame
import torch
import pandas as pd
import os
import pickle as pkl
import numpy as np
import cv2
import h5py
import tqdm
import functools
import lmdb
class EGTEA_GAZE_DATASET(torch.utils.data.Dataset):
def __init__(self, logger, config, root = None):
super().__init__()
self.root = './data/EG+'
self.name = config.name
self.split = config.split
self.config = config
self.model_fps = config.fps
self.tau_a = config.tau_a
self.feature = config.feature
self.feature_fps = config.feature_fps
self.feature_dim = config.feature_dim
assert config.name == 'EGTEA_GAZE+'
self.class_info = pd.read_csv(os.path.join(self.root,'actions.csv'), names=['action_class','verb_noun_class','text'])
self.num_action = self.class_info.shape[0]
self.vn2action = []
for _, a in self.class_info.iterrows():
v,n = list(map(int,a.verb_noun_class.split('_')))
self.vn2action.append([v,n])
self.num_verb = len(set([a[0] for a in self.vn2action]))
self.num_noun = len(set([a[1] for a in self.vn2action]))
annotation_file = {
'train1':'training1.csv',
'train2':'training2.csv',
'train3':'training3.csv',
'valid1':'validation1.csv',
'valid2':'validation2.csv',
'valid3':'validation3.csv',
}[config.split]
annotation_file = os.path.join(self.root,annotation_file)
assert config.past_frame > 0
self.data = []
info = pd.read_csv(annotation_file, header=None, names=['video','start','end','verb','noun','action'])
for idx,a in info.iterrows():
video_name = a.video
start_frame = a.start
end_frame = a.end
aid = a.action
vid = a.verb
nid = a.noun
segment = {
'id' : idx,
'video_id' : video_name,
'next_verb_class' : vid,
'next_noun_class' : nid,
'next_action_class' : aid,
}
if config.drop and start_frame<=self.tau_a * self.feature_fps:
continue
frame_index = np.arange(
start_frame - self.tau_a * self.feature_fps + config.forward_frame * self.feature_fps / self.model_fps,
start_frame - self.tau_a * self.feature_fps - config.past_frame * self.feature_fps / self.model_fps,
- self.feature_fps / self.model_fps
).astype(int)[::-1]
assert len(frame_index) == config.past_frame + config.forward_frame
frame_index[frame_index<1] = 1
segment['frame_index'] = frame_index
self.data.append(segment)
# debug
# break
self.verb_weight, self.noun_weight, self.action_weight = None, None, None
##### feature
assert config.feat_file
self.f = lmdb.open(config.feat_file, readonly=True, lock=False)
logger.info('[%s] # Frame: Past %d. Forward %d.' % (
config.split, config.past_frame,config.forward_frame))
logger.info('[%s] # segment %d. verb %d. noun %d. action %d.' % (
config.split, len(self.data), self.num_verb, self.num_noun, self.num_action))
self.cache = {}
if config.cache:
self.make_cache(logger)
def make_cache(self,logger):
logger.info('Cache: Load all feature into memory')
for segment in self.data:
for fid in segment['frame_index']:
key = '%s_frame_%010d.jpg' % (segment['video_id'],fid)
if key not in self.cache:
res = self._read_one_frame_feat(key)
self.cache[key] = res
logger.info('Cache: Finish loading. Cache Size %d' % len(self.cache))
def _read_one_frame_feat(self,key):
if key in self.cache:
return self.cache[key]
with self.f.begin() as e:
buf = e.get(key.strip().encode('utf-8'))
if buf is not None:
res = np.frombuffer(buf,'float32')
else:
res = None
return res
def _load_feat(self,video_id, frame_ids):
frames = []
dim = self.feature_dim
for fid in frame_ids:
key = '%s_frame_%010d.jpg' % (video_id,fid)
frame_feat = self._read_one_frame_feat(key)
if frame_feat is not None:
frames.append(frame_feat)
elif len(frames) > 0:
frames.append(frames[-1])
# print('Copy frame: %s' % key)
else:
frames.append(np.zeros(dim))
# print('Zero frame: %s' % key)
return torch.from_numpy(np.stack(frames,0)).float()
def __len__(self):
return len(self.data)
def __getitem__(self,i):
segment = self.data[i]
out = {
'id' : segment['id'],
'index' : i
}
out['next_action_class'] = segment['next_action_class']
out['next_verb_class'] = segment['next_verb_class']
out['next_noun_class'] = segment['next_noun_class']
out['past_frame'] = self._load_feat(
segment['video_id'],
segment['frame_index'],
)
return out
| 5,609 | 1,748 |
'''
Merge Two Sorted Lists
Asked in:
Microsoft
Yahoo
Amazon
Merge two sorted linked lists and return it as a new list.
The new list should be made by splicing together the nodes of the first two lists, and should also be sorted.
For example, given following linked lists :
5 -> 8 -> 20
4 -> 11 -> 15
The merged list should be :
4 -> 5 -> 8 -> 11 -> 15 -> 20
'''
class Node:
def __init__(self, data):
self.data = data
# store reference (next item)
self.next = None
return
class Solution:
# @param A : head node of linked list
# @param B : head node of linked list
# @return the head node in the linked list
def mergeTwoLists(self, h1, h2):
d=Node('a')
td=d
while h1 != None and h2 != None:
if h1.data < h2.data:
d.next = h1
h1 = h1.next
else:
d.next = h2
h2 = h2.next
d = d.next
if h1 != None:
d.next = h1
if h2 != None:
d.next = h2
return td.next
| 1,092 | 354 |
v = int(input('Digite um valor: '))
validador = 0
contador = 1
while contador < v:
if v % contador == 0:
validador += 1
contador +=1
if validador > 1:
print(f'Esse número NÃO é primo, pois é divisível por {validador+1} números diferentes ')
else:
print('Esse número é primo') | 299 | 113 |
"""Testing methods that need Handle server read access"""
import sys
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
import requests
import json
import mock
import b2handle
from b2handle.handleclient import EUDATHandleClient
from b2handle.handleexceptions import *
# Load some data that is needed for testing
PATH_RES = b2handle.util.get_neighbour_directory(__file__, 'resources')
RESOURCES_FILE = json.load(open(PATH_RES+'/testvalues_for_integration_tests_IGNORE.json'))
# This file is not public, as it contains valid credentials for server
# write access. However, by providing such a file, you can run the tests.
# A template can be found in resources/testvalues_for_integration_tests_template.json
class EUDATHandleClientReadaccessTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
# Read resources from file:
self.testvalues = RESOURCES_FILE
# Test values that need to be given by user:
self.handle = self.testvalues['handle_for_read_tests']
self.handle_global = self.testvalues['handle_globally_resolvable']
self.user = self.testvalues['user']
# Optional:
self.https_verify = True
if 'HTTPS_verify' in self.testvalues:
self.https_verify = self.testvalues['HTTPS_verify']
self.url = 'http://hdl.handle.net'
if 'handle_server_url_read' in self.testvalues.keys():
self.url = self.testvalues['handle_server_url_read']
self.path_to_api = None
if 'url_extension_REST_API' in self.testvalues.keys():
self.path_to_api = self.testvalues['url_extension_REST_API']
# Others
prefix = self.handle.split('/')[0]
self.inexistent_handle = prefix+'/07e1fbf3-2b72-430a-a035-8584d4eada41'
self.randompassword = 'some_random_password_shrgfgh345345'
def setUp(self):
""" For most test, provide a client instance with the user-specified
handle server url."""
self.inst = EUDATHandleClient(
HTTPS_verify=self.https_verify,
handle_server_url=self.url,
url_extension_REST_API=self.path_to_api)
# Before being able to run these tests without write access,
# the handle that we use for testing must exist. With this code,
# you can create it. You only need to create it once and leave it
# on the server, it will not be modified and can be used eternally.
if False:
# This should always be false!!! Except for creating the
# required handle once!
self.create_required_test_handles()
def tearDown(self):
pass
pass
def create_required_test_handles(self):
# Creating an instance that knows how to write:
pw = self.testvalues['password']
inst = EUDATHandleClient.instantiate_with_username_and_password(
self.testvalues['handle_server_url_write'],
self.user,
pw,
HTTPS_verify=self.https_verify)
authstring = b2handle.utilhandle.create_authentication_string(self.user, pw)
headers = {
'Content-Type': 'application/json',
'Authorization': 'Basic '+authstring
}
list_of_all_entries = [
{
"index":100,
"type":"HS_ADMIN",
"data":{
"format":"admin",
"value":{
"handle":"21.T14999/B2HANDLE_INTEGRATION_TESTS",
"index":300,
"permissions":"011111110011"
}
}
},
{
"index":111,
"type":"TEST1",
"data":"val1"
},
{
"index":2222,
"type":"TEST2",
"data":"val2"
},
{
"index":333,
"type":"TEST3",
"data":"val3"
},
{
"index":4,
"type":"TEST4",
"data":"val4"
}
]
testhandle = self.handle
url = self.testvalues['handle_server_url_write']+self.testvalues['url_extension_REST_API']+testhandle
veri = self.https_verify
head = headers
data = json.dumps({'values':list_of_all_entries})
resp = requests.put(url, data=data, headers=head, verify=veri)
# retrieve_handle_record_json
def test_retrieve_handle_record_json(self):
"""Test reading handle record from server."""
rec = self.inst.retrieve_handle_record_json(self.handle)
received_type = rec['values'][2]['type']
received_value = rec['values'][2]['data']['value']
self.assertEqual(received_type, 'TEST1',
'The type should be "TEST3" but was "%s" (%s).'% (received_type, self.handle))
self.assertEqual(received_value, 'val1',
'The value should be "val3" but is "%s" (%s).' % (received_value, self.handle))
# get_value_from_handle
def test_get_value_from_handle_normal(self):
"""Test reading existent and inexistent handle value from server."""
val = self.inst.get_value_from_handle(self.handle, 'TEST1')
self.assertEqual(val, 'val1',
'Retrieving "TEST1" from %s should lead to "val1", but it lead to "%s"' % (self.handle,val))
def test_get_value_from_handle_inexistent_key(self):
val = self.inst.get_value_from_handle(self.handle, 'TEST100')
self.assertIsNone(val,
'Retrieving "TEST100" from %s should lead to "None", but it lead to "%s"' % (self.handle,val))
def test_get_value_from_handle_inexistent_record(self):
"""Test reading handle value from inexistent handle."""
with self.assertRaises(HandleNotFoundException):
val = self.inst.get_value_from_handle(self.inexistent_handle, 'anykey')
# instantiate
def test_instantiate_with_username_and_wrong_password(self):
"""Test instantiation of client: No exception if password wrong."""
# Create client instance with username and password
inst = EUDATHandleClient.instantiate_with_username_and_password(
self.url,
self.user,
self.randompassword,
HTTPS_verify=self.https_verify)
self.assertIsInstance(inst, EUDATHandleClient)
def test_instantiate_with_username_without_index_and_password(self):
"""Test instantiation of client: Exception if username has no index."""
testusername_without_index = self.user.split(':')[1]
# Run code to be tested + check exception:
with self.assertRaises(HandleSyntaxError):
# Create client instance with username and password
inst = EUDATHandleClient.instantiate_with_username_and_password(
self.url,
testusername_without_index,
self.randompassword,
HTTPS_verify=self.https_verify)
def test_instantiate_with_nonexistent_username_and_password(self):
"""Test instantiation of client: Exception if username does not exist."""
testusername_inexistent = '100:'+self.inexistent_handle
# Run code to be tested + check exception:
with self.assertRaises(HandleNotFoundException):
# Create client instance with username and password
inst = EUDATHandleClient.instantiate_with_username_and_password(
self.url,
testusername_inexistent,
self.randompassword,
HTTPS_verify=self.https_verify)
def test_instantiate_with_credentials(self):
"""Test instantiation of client: No exception if password wrong."""
# Test variables
credentials = b2handle.clientcredentials.PIDClientCredentials(
handle_server_url=self.url,
username=self.user,
password=self.randompassword)
# Run code to be tested
# Create instance with credentials
inst = EUDATHandleClient.instantiate_with_credentials(
credentials,
HTTPS_verify=self.https_verify)
# Check desired outcomes
self.assertIsInstance(inst, EUDATHandleClient)
def test_instantiate_with_credentials_inexistentuser(self):
"""Test instantiation of client: Exception if username does not exist."""
# Test variables
testusername_inexistent = '100:'+self.inexistent_handle
credentials = b2handle.clientcredentials.PIDClientCredentials(
handle_server_url=self.url,
username=testusername_inexistent,
password=self.randompassword)
# Run code to be tested + check exception:
# Create instance with credentials
with self.assertRaises(HandleNotFoundException):
inst = EUDATHandleClient.instantiate_with_credentials(credentials,
HTTPS_verify=self.https_verify)
# If the user name has no index, exception is already thrown in credentials creation!
#self.assertRaises(HandleSyntaxError, b2handle.PIDClientCredentials, 'url', 'prefix/suffix', randompassword)
def test_instantiate_with_credentials_config_override(self):
"""Test instantiation of client: No exception if password wrong."""
# Test variables
credentials = mock.MagicMock()
config_from_cred = {}
valuefoo = 'foo/foo/foo/' # passed via credentials
valuebar = 'bar/bar/bar' # passed directly to constructor
config_from_cred['REST_API_url_extension'] = valuefoo
credentials = b2handle.clientcredentials.PIDClientCredentials(
handle_server_url=self.url,
username=self.user,
password=self.randompassword,
handleowner=self.user,
REST_API_url_extension=valuefoo
)
self.assertEqual(credentials.get_config()['REST_API_url_extension'],valuefoo,
'Config: '+str(credentials.get_config()))
# foo/foo/ from the credentials should be overridden by bar/bar/ which is directly passed
# Run code to be tested - we expect an exception, as it will try to do a GET on the bogus rest api:
with self.assertRaises(GenericHandleError):
inst = EUDATHandleClient.instantiate_with_credentials(
credentials,
HTTPS_verify=self.https_verify,
REST_API_url_extension=valuebar)
# So this code can only be reached if something went wrong:
self.assertIsInstance(inst, EUDATHandleClient)
# Check if bar/bar instead of foo/foo was stored as path!
serverconn = inst._EUDATHandleClient__handlesystemconnector
self.assertIn('/bar/', serverconn._HandleSystemConnector__REST_API_url_extension)
self.assertNotIn('/foo/', serverconn._HandleSystemConnector__REST_API_url_extension)
self.assertEquals(serverconn._HandleSystemConnector__REST_API_url_extension, valuebar)
def test_instantiate_with_credentials_config(self):
"""Test instantiation of client: No exception if password wrong."""
# Test variables
credentials = mock.MagicMock()
config_from_cred = {}
valuefoo = 'foo/foo/foo/'
config_from_cred['REST_API_url_extension'] = valuefoo
credentials = b2handle.clientcredentials.PIDClientCredentials(
handle_server_url=self.url,
username=self.user,
password=self.randompassword,
handleowner=self.user,
REST_API_url_extension=valuefoo
)
self.assertEqual(credentials.get_config()['REST_API_url_extension'],valuefoo,
'Config: '+str(credentials.get_config()))
# foo/foo/ from the credentials should override default api/handles/
# Run code to be tested - we expect an exception, as it will try to do a GET on the bogus rest api:
with self.assertRaises(GenericHandleError):
inst = EUDATHandleClient.instantiate_with_credentials(
credentials,
HTTPS_verify=self.https_verify)
# So this code can only be reached if something went wrong:
self.assertIsInstance(inst, EUDATHandleClient)
# Check if foo/foo instead of api/handles was stored as path!
serverconn = inst._EUDATHandleClient__handlesystemconnector
self.assertIn('/foo/', serverconn._HandleSystemConnector__REST_API_url_extension)
self.assertEquals(serverconn._HandleSystemConnector__REST_API_url_extension, valuefoo)
def test_global_resolve(self):
"""Testing if instantiating with default handle server'works
and if a handle is correctly retrieved. """
# Create instance with default server url:
inst = EUDATHandleClient(HTTPS_verify=self.https_verify)
rec = inst.retrieve_handle_record_json(self.handle_global)
self.assertIn('handle', rec,
'Response lacks "handle".')
self.assertIn('responseCode', rec,
'Response lacks "responseCode".')
def test_instantiate_for_read_access(self):
"""Testing if instantiating with default handle server works
and if a handle is correctly retrieved. """
# Create client instance with username and password
inst = EUDATHandleClient.instantiate_for_read_access(HTTPS_verify=self.https_verify)
rec = self.inst.retrieve_handle_record_json(self.handle)
self.assertIsInstance(inst, EUDATHandleClient)
self.assertIn('handle', rec,
'Response lacks "handle".')
self.assertIn('responseCode', rec,
'Response lacks "responseCode".') | 13,906 | 3,889 |
# !/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Filename: models.py
# Project: core
# Author: Brian Cherinka
# Created: Saturday, 12th September 2020 12:55:22 pm
# License: BSD 3-clause "New" or "Revised" License
# Copyright (c) 2020 Brian Cherinka
# Last Modified: Saturday, 12th September 2020 12:55:22 pm
# Modified By: Brian Cherinka
from __future__ import print_function, division, absolute_import
import re
from marshmallow.fields import Field
import six
import orjson
from marshmallow import Schema, fields, post_load
from fuzzy_types.fuzzy import FuzzyList
# core classes
class BaseClass(object):
def __new__(cls, *args, **kwargs):
pass
class BaseSchema(Schema):
''' Base class to use for all new Schema objects '''
_class = None
class Meta:
ordered = True
render_module = orjson
@post_load
def make_object(self, data, **kwargs):
''' this function deserializes a schema to a class object '''
return self._class(**data)
class ObjectField(fields.Field):
''' custom marshmallow object field
This is a custom marshmallow Field class used to indicate that an attribute
should be represented by a custom model object type, rather than a string or integer. It
contains special methods for custom serialization and deserialization of model datatypes.
For example, the yaml string representation 'LOG' for a log-linear wavelength will get
deserialized into an instance Wavelength('LOG'). Custom fields are described at
https://marshmallow.readthedocs.io/en/3.0/custom_fields.html.
'''
def _serialize(self, value, attr, obj, **kwargs):
if value is None:
return ''
return (value.release if hasattr(value, 'release') else value.name if hasattr(value, 'name')
else value.title if hasattr(value, 'title') else '')
def _deserialize(self, value, attr, data, **kwargs):
name = self.default
assert isinstance(value, six.string_types), f'{value} must be a string'
data = self.models.get(name, None)
return data[value] if data and value in data else value
# main/helper functions
def _get_attr(obj: object, name: str):
''' Get an attribute from a class object
Attempts to retrieve an attribute from a class object
Parameters
----------
obj : object
A class object to access
name : str
The attribute name to access
Returns
-------
a class attribute
'''
if hasattr(obj, name):
return obj.__getattribute__(name)
else:
return None
def create_class(data: dict, mixin: object = None) -> object:
''' creates a new datamodel object class
Constructs a Python class object based on a model "schema" dictionary.
Converts a model yaml file, 'versions.yaml' into a Python Version class object,
which is used for instantiating the designated "objects" in the yaml section.
Parameters
----------
data : dict
The schema dictonary section of a yaml file
mixin : object
A custom model class to mixin with base model
Returns
-------
A new Python class object
'''
name = data.get('name', None) or data.get('title', None)
# define custom repr
def new_rep(self):
reprstr = f'<{name}({self._repr_fields})>'
return reprstr
# define custom str
def new_str(self):
name = (_get_attr(self, 'name') or _get_attr(self, 'title') or
_get_attr(self, 'release') or '')
return name
# get the attributes to add to the repr
props = data.get('attributes', None) or data.get('properties', None)
if props:
added_fields = [a for a, vals in props.items()
if vals.get('add_to_repr', None)]
# define a new init
def new_init(self, **kwargs):
repr_fields = ''
# loop for attributes
for key, value in list(kwargs.items()):
self.__setattr__(key, value)
# create a repr field string
if key in added_fields:
repr_fields += f', {key}={value}'
# create a string of the repr fields
name = (_get_attr(self, 'name') or _get_attr(self, 'title') or
_get_attr(self, 'release') or '')
self._repr_fields = f'{name}' + repr_fields
# create the new class and add the new methods
bases = (mixin, object,) if mixin else (object,)
obj = type(name, bases, {})
obj.__init__ = new_init
obj.__repr__ = new_rep
obj.__str__ = new_str
return obj
def parse_kind(value: str) -> tuple:
''' parse the kind value into a kind and subkind
Parses the schema "kind" attribute into a kind and subkind if
kind contain paranetheses, i.e. kind(subkind). For example,
list(objects) return kind=list, subkind=objects.
Parameters
----------
value : str
The type of field
Returns
-------
A tuple of the field type and any sub-type
'''
subkind = re.search(r'\((.+?)\)', value)
if subkind:
kind = value.split('(', 1)[0]
subkind = subkind.group(1)
else:
kind = value
# set default list or tuple subfield to string
if kind.lower() == 'list':
subkind = 'string'
elif kind.lower() == 'tuple':
subkind = 'string'
return kind, subkind
def get_field(value: str, key: str = None) -> Field:
''' Get a Marshmallow Fields type
Using the model schema attribute "kind" parameter, determines the
appropriate marshmallow field type. If the value is "Objects"
then it uses a custom ObjectField definition.
Parameters
----------
value : str
The kind of field to retrieve, e.g. string
key : str
The name of the attribute for the field
Returns
-------
a marshmallow field class
'''
if hasattr(fields, value):
field = fields.__getattribute__(value)
return field
elif value == 'Objects':
return ObjectField(data_key=key)
else:
raise ValueError(f'Marshmallow Fields does not have {value}')
def create_field(data: dict, key: str = None, required: bool = None,
nodefault: bool = None) -> Field:
''' creates a marshmallow.fields object
Parameters
----------
data : dict
A values dictionary for a given model attribute
key : str
The name of the attribute
required : bool
If True, sets the field as a required one. Default is False.
nodefault : bool
If True, turns off any defaults specified for fields. Default is False.
Returns
-------
A marshmallow field instance to attach to a schema
'''
# parse the kind of input
kind = data.get('kind', None) or data.get('type', None)
kind = kind.title() if kind else kind
kind, subkind = parse_kind(kind)
# get the marshmallow field
field = get_field(kind)
# create a parameters dictionary to pass into the fields object
params = {}
params['required'] = data.get('required', False) if required is None else required
if 'default' in data and not nodefault:
params['missing'] = data.get('default', None)
params['default'] = data.get('default', None)
# set key to use the model indicated if use_model is set
key = data['use_model'] if 'use_model' in data else key
# create any arguments for sub-fields
args = []
if subkind:
skinds = subkind.split(',')
subfields = [get_field(i.title(), key=key) for i in skinds]
# differentiate args for lists and tuples
if kind == 'List':
assert len(subfields) == 1, 'List can only accept one subfield type.'
args.extend(subfields)
elif kind == 'Tuple':
args.append(subfields)
# instantiate the fields object with the relevant args and parameters
return field(*args, **params)
def create_schema(data: dict, mixin: object = None) -> Schema:
''' creates a new class for schema validation
Constructs a marshmallow schema class object used to validate
the creation of new Python objects for this class. Takes a
model "schema" dictionary and builds new Python classes to represent
the model Object and an Object Schema for purposes of validation.
See https://marshmallow.readthedocs.io/en/3.0/quickstart.html for a guide on
deserializing data using marshmallow schema validation.
Parameters
----------
data : dict
The schema dictonary section of a yaml file
mixin : object
A custom model class to mixin with base model
Returns
-------
A marshmallow schema class object
'''
# create a dictionary of class attributes from the schema
name = data.get('name') or data.get('title')
attrs = {}
props = data.get('attributes', None) or data.get('properties', None)
if props:
# create marshmallow schema fields for each attribute
for attr, values in props.items():
attrs[attr] = create_field(values, key=attr)
# create the base object class
class_obj = create_class(data, mixin=mixin)
# add the object class to the schema attributes to allow
# for object deserialization from yaml representation. See BaseSchema for use.
attrs['_class'] = class_obj
# create the new schema class object
objSchema = type(name + 'Schema', (BaseSchema,), attrs)
# add the schema class instance to the object class for accessibility
class_obj._schema = objSchema()
return objSchema
def generate_models(data: dict, make_fuzzy: bool = True, mixin: object = None) -> list:
''' Generate a list of datamodel types
Converts a models yaml file, e.g. manga/versions.yaml, into a list of Python instances.
A model Schema class is created using the "schema" section of the yaml file. The schema
class is used to validate and instantiate the list of objects defined in the "objects"
section.
Parameters
----------
data : dict
A yaml loaded data structure
make_fuzzy : bool
If True, returns a Fuzzy list of models
mixin : object
A custom model class to mixin with base model
Returns
-------
A list of instantiated models
'''
# create the schema class object
schema = create_schema(data['schema'], mixin=mixin)
# validate and deserialize the model data in Python objects
models = schema(many=True).load(data['objects'], many=True)
# optionally make the model list fuzzy
if make_fuzzy:
models = FuzzyList(models)
return models
| 10,819 | 3,070 |
import re
import html
import pandas as pd
re1 = re.compile(r' +')
def imdb(fold_id: int, split_size: int):
df = pd.read_pickle('df_train.pkl')
df = df.reindex(columns=['sentiment', 'text'])
df['text'] = df['text'].apply(fixup)
# Split the data into k-folds.
df_val = df[split_size * fold_id:split_size * (fold_id + 1)]
df_train = pd.concat((df[0:split_size * fold_id],
df[split_size * (fold_id + 1):]))
# Sanity check to make sure there are no common elements between the two splits.
if set(df_train.index).intersection(set(df_val.index)):
raise ValueError('There are common training examples in the training and validation splits!')
df_test = pd.read_pickle('df_test.pkl')
df_test = df_test.reindex(columns=['review_id', 'text'])
df_test['text'] = df_test['text'].apply(fixup)
return (df_train.text.values, df_train.sentiment.values), (df_val.text.values, df_val.sentiment.values),\
(df_test.text.values,)
# https://github.com/prajjwal1/language-modelling/blob/master/ULMfit.py
def fixup(x):
x = x.replace('#39;', "'").replace('amp;', '&').replace('#146;', "'").replace(
'nbsp;', ' ').replace('#36;', '$').replace('\\n', "\n").replace('quot;', "'").replace(
'<br />', "\n").replace('\\"', '"').replace('<unk>', 'u_n').replace(' @.@ ', '.').replace(
' @-@ ', '-').replace('\\', ' \\ ')
return re1.sub(' ', html.unescape(x))
| 1,458 | 528 |
from .core import Config
def simple():
from optparse import OptionParser
op = OptionParser(usage="\n %prog\n %prog -c config.yaml")
op.add_option('-c', '--config', metavar="FILENAME",
help="Configuration file to parse",
dest="configfile", default=None, type="string")
op.add_option('-n', '--name', metavar="NAME",
help="Name of configuration (default `config`), usefull if you have"
"several configuration in single binary",
dest="name", default="config", type="string")
op.add_option('-f', '--filename', metavar="NAME",
help="Filename to read",
dest="filename", default="config", type="string")
op.add_option('-p', '--print',
help="Print parsed configuration file",
dest="print", default=False, action="store_true")
options, args = op.parse_args()
if args:
op.error("No arguments expected")
cfg = Config(options.name, options.filename)
if options.configfile:
inp = open(options.configfile, 'rt', encoding='utf-8')
else:
import sys
inp = sys.stdin
return cfg, inp, options
| 1,136 | 337 |
from xd.build.core.data.namespace import *
from xd.build.core.data.expr import Expression
from xd.build.core.data.string import String
from xd.build.core.data.list import List
from xd.build.core.data.dict import Dict
from xd.build.core.data.func import Function
from xd.build.core.data.num import *
import unittest
class tests(unittest.case.TestCase):
def setUp(self):
self.ns = Namespace()
def test_set_get_1(self):
self.ns['FOO'] = 'foo'
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_set_get_2(self):
self.ns['FOO'] = String('foo')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_set2_get_1(self):
self.ns['FOO'] = 'foo'
self.ns['FOO'] = 'bar'
self.assertEqual(self.ns['FOO'].get(), 'bar')
def test_set_variable(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['BAR'] = self.ns['FOO']
self.ns['FOO'] = 'hello world'
self.assertEqual(self.ns['FOO'].get(), 'hello world')
self.assertEqual(self.ns['BAR'].get(), 'hello world')
def test_set_get_bool(self):
self.ns['FOO'] = True
self.assertEqual(self.ns['FOO'].get(), True)
def test_set_get_int(self):
self.ns['FOO'] = 42
self.assertEqual(self.ns['FOO'].get(), 42)
def test_set_get_float(self):
self.ns['FOO'] = 3.14
self.assertEqual(self.ns['FOO'].get(), 3.14)
def test_set_bad_type(self):
self.ns['FOO'] = 'foo'
with self.assertRaises(TypeError):
self.ns['FOO'] = 42
def test_get_keyerror(self):
with self.assertRaises(KeyError):
self.ns['FOO']
def test_get_typeerror(self):
self.ns['FOO'] = String()
self.ns['I'] = 42
self.ns['FOO'] = Expression('I')
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_del(self):
self.ns['FOO'] = 'foo'
del self.ns['FOO']
with self.assertRaises(KeyError):
self.ns['FOO']
def test_eval_source_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.assertEqual(self.ns.eval('FOO+BAR'), 'foobar')
def test_eval_source_2(self):
self.ns['FOO'] = 'foo'
with self.assertRaises(NameError):
self.ns.eval('FOO+BAR')
def test_eval_expression_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
expr = Expression('FOO+BAR')
self.assertEqual(self.ns.eval(expr), 'foobar')
def test_eval_expression_2(self):
self.ns['FOO'] = 'foo'
expr = Expression('FOO+BAR')
with self.assertRaises(NameError):
self.ns.eval(expr)
def test_eval_globals(self):
self.ns['FOO'] = 'foo'
BAR = 'bar'
expr = Expression('FOO+BAR')
self.assertEqual(self.ns.eval(expr, g={'BAR': BAR}), 'foobar')
def test_append_variable(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['FOO'].append(self.ns['BAR'])
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_append_to_expr(self):
self.ns['FOO'] = 'foo'
self.ns['FOOBAR'] = String(Expression('FOO'))
self.ns['FOOBAR'].append('bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
self.assertEqual(self.ns['FOOBAR'].get(), 'foobar')
def test_append_expr(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['FOO'].append(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_append_expr_none_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOO'].append(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_append_expr_none_2(self):
self.ns['FOO'] = String()
self.ns['BAR'] = 'bar'
self.ns['FOO'].append(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'bar')
def test_append_expr_typeerror(self):
self.ns['FOO'] = String()
self.ns['BAR'] = 42
self.ns['FOO'].append(Expression('BAR'))
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_prepend_variable(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['FOO'].prepend(self.ns['BAR'])
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_prepend_expr(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['FOO'].prepend(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_prepend_expr_none_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOO'].prepend(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_prepend_expr_none_2(self):
self.ns['FOO'] = String()
self.ns['BAR'] = 'bar'
self.ns['FOO'].prepend(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'bar')
def test_prepend_expr_typeerror(self):
self.ns['FOO'] = String()
self.ns['BAR'] = 42
self.ns['FOO'].prepend(Expression('BAR'))
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_multibinding(self):
FOO = self.ns['FOO'] = 'foo'
with self.assertRaises(MultiBinding):
self.ns['BAR'] = self.ns['FOO']
def test_expr_as_init(self):
FOO = self.ns['FOO'] = 'foo'
self.ns['BAR'] = Expression('FOO')
self.assertEqual(self.ns['FOO'].get(), 'foo')
self.assertEqual(self.ns['BAR'].get(), 'foo')
def test_init_with_unsupported(self):
with self.assertRaises(TypeError):
self.ns['BAR'] = set()
def test_init_with_other_variable(self):
self.ns['FOO'] = 'foo'
FOO = String(self.ns['FOO'])
self.ns['BAR'] = FOO
self.ns['FOO'] = 'bar'
self.assertEqual(self.ns['FOO'].get(), 'bar')
self.assertEqual(self.ns['BAR'].get(), 'bar')
def test_str_set_if_1(self):
self.ns['FOOBAR'] = 'foo'
self.ns['BAR'] = 'b'
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'bar')
def test_str_set_if_2(self):
self.ns['FOOBAR'] = 'foo'
self.ns['BAR'] = ''
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_3(self):
self.ns['FOOBAR'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_4(self):
self.ns['FOOBAR'] = 'foo'
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_5(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['FOO'] = 'f'
self.ns['BAR'] = 'b'
self.ns['FOOBAR'].set_if(Expression('FOO'), 'foo')
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'bar')
def test_str_set_if_6(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['FOO'] = 'f'
self.ns['BAR'] = 'b'
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.ns['FOOBAR'].set_if(Expression('FOO'), 'foo')
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_7(self):
self.ns['FOOBAR'] = 'foo'
self.ns['BAR'] = 'b'
self.ns['FOOBAR'].set_if(self.ns['BAR'], 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'bar')
def test_str_set_if_8(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['BAR'] = 'bar'
self.ns['FOO'] = 'foo'
self.ns['FOOBAR'].set_if(Expression('BAR'), Expression('FOO'))
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_9(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['BAR'] = 'bar'
self.ns['FOO'] = 'foo'
self.ns['FOOBAR'].set_if(Expression('BAR'), self.ns['FOO'])
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_typeerror_1(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['BAR'] = True
with self.assertRaises(TypeError):
self.ns['FOOBAR'].set_if(Expression('BAR'), 42)
def test_str_set_if_typeerror_2(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['BAR'] = True
self.ns['FOO'] = 42
self.ns['FOOBAR'].set_if(Expression('BAR'), Expression('FOO'))
with self.assertRaises(TypeError):
self.ns['FOOBAR'].get()
def test_str_append_if_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'b'
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_str_append_if_2(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = ''
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_3(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_4(self):
self.ns['FOO'] = 'foo'
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_5(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'b'
BAR = self.ns['BAR']
self.ns['FOO'].append_if(BAR, 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_str_append_if_6(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
BAR = self.ns['BAR']
self.ns['FOO'].append_if(BAR, 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_7(self):
self.ns['FOO'] = 'foo'
self.ns['B'] = 'b'
self.ns['BAR'] = 'bar'
BAR = self.ns['BAR']
self.ns['FOO'].append_if(Expression('B'), BAR)
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_str_append_if_8(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
BAR = self.ns['BAR']
self.ns['FOO'].append_if(Expression('B'), BAR)
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_9(self):
self.ns['FOO'] = 'foo'
self.ns['X'] = 'x'
self.ns['Y'] = ''
self.ns['Z'] = 'z'
self.ns['FOO'].append_if(Expression('X'), 'xxx')
self.ns['FOO'].append_if(Expression('Y'), 'yyy')
self.ns['FOO'].append_if(Expression('Z'), 'zzz')
self.assertEqual(self.ns['FOO'].get(), 'fooxxxzzz')
def test_str_append_if_typeerror_1(self):
self.ns['FOO'] = 'foo'
self.ns['b'] = True
with self.assertRaises(TypeError):
self.ns['FOO'].append_if(Expression('b'), 42)
def test_str_append_if_typeerror_2(self):
self.ns['FOO'] = 'foo'
self.ns['I'] = 42
self.ns['FOO'].append_if(Expression('I'), Expression('I'))
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_str_prepend_if_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'b'
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_str_prepend_if_2(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = ''
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_3(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_4(self):
self.ns['FOO'] = 'foo'
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_5(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'b'
BAR = self.ns['BAR']
self.ns['FOO'].prepend_if(BAR, 'bar')
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_str_prepend_if_6(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
BAR = self.ns['BAR']
self.ns['FOO'].prepend_if(BAR, 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_7(self):
self.ns['FOO'] = 'foo'
self.ns['B'] = 'b'
self.ns['BAR'] = 'bar'
BAR = self.ns['BAR']
self.ns['FOO'].prepend_if(Expression('B'), BAR)
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_str_prepend_if_8(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
BAR = self.ns['BAR']
self.ns['FOO'].prepend_if(Expression('B'), BAR)
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_9(self):
self.ns['FOO'] = 'foo'
self.ns['X'] = 'x'
self.ns['Y'] = ''
self.ns['Z'] = 'z'
self.ns['FOO'].prepend_if(Expression('X'), 'xxx')
self.ns['FOO'].prepend_if(Expression('Y'), 'yyy')
self.ns['FOO'].prepend_if(Expression('Z'), 'zzz')
self.assertEqual(self.ns['FOO'].get(), 'zzzxxxfoo')
def test_str_prepend_if_typeerror_1(self):
self.ns['FOO'] = 'foo'
self.ns['b'] = True
with self.assertRaises(TypeError):
self.ns['FOO'].prepend_if(Expression('b'), 42)
def test_str_prepend_if_typeerror_2(self):
self.ns['FOO'] = 'foo'
self.ns['I'] = 42
self.ns['FOO'].prepend_if(Expression('I'), Expression('I'))
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_str_string(self):
self.ns['FOO'] = ''
self.assertEqual(str(self.ns['FOO']), 'String(FOO)')
def test_str_bool(self):
self.ns['FOO'] = True
self.assertEqual(str(self.ns['FOO']), 'Bool(FOO)')
def test_str_int(self):
self.ns['FOO'] = 42
self.assertEqual(str(self.ns['FOO']), 'Int(FOO)')
def test_str_float(self):
self.ns['FOO'] = 3.14
self.assertEqual(str(self.ns['FOO']), 'Float(FOO)')
def test_list_set_if_1(self):
self.ns['FOOBAR'] = ['foo']
self.ns['BAR'] = True
self.ns['FOOBAR'].set_if(Expression('BAR'), ['bar'])
self.assertEqual(self.ns['FOOBAR'].get(), ['bar'])
def test_list_set_if_2(self):
self.ns['FOOBAR'] = ['foo']
self.ns['BAR'] = False
self.ns['FOOBAR'].set_if(Expression('BAR'), ['bar'])
self.assertEqual(self.ns['FOOBAR'].get(), ['foo'])
def test_list_set_if_3(self):
self.ns['FOOBAR'] = ['foo']
self.ns['FOOBAR'].set_if(Expression('BAR'), ['bar'])
self.assertEqual(self.ns['FOOBAR'].get(), ['foo'])
def test_list_prepend_if_1(self):
self.ns['FOO'] = ['foo']
self.ns['BAR'] = True
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['bar', 'foo'])
def test_list_prepend_if_2(self):
self.ns['FOO'] = ['foo']
self.ns['BAR'] = False
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo'])
def test_list_prepend_if_3(self):
self.ns['FOO'] = ['foo']
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo'])
def test_list_append_if_1(self):
self.ns['FOO'] = ['foo']
self.ns['BAR'] = True
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo', 'bar'])
def test_list_append_if_2(self):
self.ns['FOO'] = ['foo']
self.ns['BAR'] = False
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo'])
def test_list_append_if_3(self):
self.ns['FOO'] = ['foo']
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo'])
def test_list_remove_1(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = 'bar'
self.ns['L'].remove(Expression('BAR'))
self.assertEqual(self.ns['L'].get(), ['foo'])
def test_list_remove_2(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = 'bar'
self.ns['L'].remove(self.ns['BAR'])
self.assertEqual(self.ns['L'].get(), ['foo'])
def test_list_remove_if_1(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = True
self.ns['L'].remove_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['L'].get(), ['foo'])
def test_list_remove_if_2(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = False
self.ns['L'].remove_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['L'].get(), ['foo', 'bar'])
def test_list_remove_if_3(self):
self.ns['L'] = ['foo', 'bar']
self.ns['L'].remove_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['L'].get(), ['foo', 'bar'])
def test_list_extend_if_1(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = True
self.ns['L'].extend_if(Expression('BAR'), ['hello', 'world'])
self.assertEqual(self.ns['L'].get(), ['foo', 'bar', 'hello', 'world'])
def test_list_extend_if_2(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = False
self.ns['L'].extend_if(Expression('BAR'), ['hello', 'world'])
self.assertEqual(self.ns['L'].get(), ['foo', 'bar'])
def test_list_extend_if_3(self):
self.ns['L'] = ['foo', 'bar']
self.ns['L'].extend_if(Expression('BAR'), ['hello', 'world'])
self.assertEqual(self.ns['L'].get(), ['foo', 'bar'])
def test_list_item_invalid(self):
self.ns['l'] = []
def foo():
return 42
self.ns['f'] = Function(foo)
self.ns['l'].append(Expression('f'))
with self.assertRaises(TypeError):
self.ns['l'].get()
def test_dict_update_if_1(self):
self.ns['D'] = {'foo': 42}
self.ns['BAR'] = True
self.ns['D'].update_if(Expression('BAR'), {'bar': 43})
self.assertEqual(self.ns['D'].get(), {'foo': 42, 'bar': 43})
def test_dict_update_if_2(self):
self.ns['D'] = {'foo': 42}
self.ns['BAR'] = False
self.ns['D'].update_if(Expression('BAR'), {'bar': 43})
self.assertEqual(self.ns['D'].get(), {'foo': 42})
def test_dict_update_if_3(self):
self.ns['D'] = {'foo': 42}
self.ns['D'].update_if(Expression('BAR'), {'bar': 43})
self.assertEqual(self.ns['D'].get(), {'foo': 42})
def test_dict_update_if_4(self):
self.ns['D'] = {'foo': 42}
self.ns['E'] = Dict()
self.ns['BAR'] = False
self.ns['D'].update_if(Expression('BAR'), Expression('E'))
self.assertEqual(self.ns['D'].get(), {'foo': 42})
def test_dict_item_1(self):
self.ns['D'] = {}
self.ns['D']['i'] = 42
self.assertIsInstance(self.ns['D']['i'], Int)
self.assertEqual(self.ns['D']['i'].get(), 42)
def test_dict_item_2(self):
self.ns['D'] = {}
self.ns['D']['i'] = 42
self.ns['D']['i'].set_if(Expression('FOO'), 43)
self.assertIsInstance(self.ns['D']['i'], Int)
self.assertEqual(self.ns['D']['i'].get(), 42)
def test_dict_item_3(self):
self.ns['D'] = {}
self.ns['D']['i'] = 42
self.ns['D']['i'].set_if(Expression('FOO'), 43)
self.ns['FOO'] = True
self.assertIsInstance(self.ns['D']['i'], Int)
self.assertEqual(self.ns['D']['i'].get(), 43)
def test_dict_item_4(self):
self.ns['D'] = {}
self.ns['D']['i'] = [42]
self.ns['D']['i'].append_if(Expression('FOO'), 43)
self.ns['FOO'] = True
self.assertIsInstance(self.ns['D']['i'], List)
self.assertEqual(self.ns['D']['i'].get(), [42, 43])
def test_dict_item_5(self):
self.ns['D'] = {}
self.ns['D']['i'] = {'foo': 42}
self.ns['D']['i'].update_if(Expression('FOO'), {'bar': 43})
self.ns['FOO'] = True
self.assertIsInstance(self.ns['D']['i'], Dict)
self.assertEqual(self.ns['D']['i'].get(), {'foo': 42, 'bar': 43})
def test_dict_item_6(self):
self.ns['D'] = {}
self.ns['D']['i'] = {'foo': 42}
self.ns['D']['i'].update_if(Expression('FOO'), {'foo': 43})
self.ns['FOO'] = True
self.assertIsInstance(self.ns['D']['i'], Dict)
self.assertEqual(self.ns['D']['i'].get(), {'foo': 43})
def test_dict_item_implicit_expr_1(self):
self.ns['D'] = {}
self.ns['d'] = {'foo': 42}
self.ns['D']['i'] = self.ns['d']
self.ns['d']['foo'] = 43
self.assertEqual(self.ns['D'].get()['i'], {'foo': 43})
def test_dict_item_bad(self):
self.ns['D'] = {}
with self.assertRaises(TypeError):
self.ns['D']['i'] = self.ns
def test_dict_item_invalid(self):
self.ns['D'] = {}
def foo():
return 42
self.ns['f'] = Function(foo)
self.ns['D']['i'] = Expression('f')
with self.assertRaises(TypeError):
self.ns['D'].get()
def test_nested_scope_1(self):
D = Dict({'foo': Dict({'bar': 'baah'})})
D['foo'].set_if(Expression('BAR'),
{'bar': String(Expression('hello'))})
self.ns['D'] = D
self.ns['hello'] = 'booh'
self.assertEqual(self.ns['D'].get()['foo']['bar'], 'baah')
self.ns['BAR'] = True
self.assertEqual(self.ns['D'].get()['foo']['bar'], 'booh')
def test_nested_scope_2(self):
D = Dict({'foo': Dict({'bar': 42})})
D['foo'].update({'bar': 43})
self.ns['D'] = D
self.assertEqual(self.ns['D'].get()['foo']['bar'], 43)
def test_nested_scope_3(self):
D = Dict({'foo': Dict({'bar': 42})})
D['foo'].update_if(Expression('BAR'),
{'bar': Float(Expression('pi'))})
self.ns['D'] = D
self.ns['BAR'] = True
self.ns['pi'] = 3.14
self.assertEqual(self.ns['D'].get()['foo']['bar'], 3.14)
| 22,597 | 8,562 |
from google.appengine.ext import ndb
from protorpc import messages
from google.appengine.ext.ndb import msgprop
from csvmodel import CsvModel
class Stop(CsvModel):
class LocationType(messages.Enum):
STOP = 0
STATION = 1
class WheelchairBoarding(messages.Enum):
UNKNOWN = 0
POSSIBLE = 1
IMPOSSIBLE = 2
_csv_file = 'stops.txt'
_csv_id = 'stop_id'
stop_code = ndb.StringProperty()
stop_name = ndb.StringProperty(required=True)
stop_desc = ndb.TextProperty()
stop_latlon = ndb.GeoPtProperty(required=True)
zone_id = ndb.KeyProperty(kind='Zone')
stop_url = ndb.StringProperty()
location_type = msgprop.EnumProperty(LocationType)
parent_station = ndb.KeyProperty(kind='Stop')
stop_timezone = ndb.StringProperty()
wheelchair_boarding = msgprop.EnumProperty(WheelchairBoarding)
| 869 | 307 |
##***
##class Base:
## def methodBase(self):
## print("In base class")
##class child(Base):
## def methodchild(Base):
## print("In child class")
##c1=child()
##c1.methodBase()
##c1.methodchild()
##***
##class Base:
## def ___init__(self):
## print('base')
##class child(Base):
## pass
##
##
##c1=Base()
class stud:
def __init__(self,r,n):
self.rollno=r
self.name=n
def Displaystud(self):
print("enter rollno :",self.rollno)
print("name is :",self.name)
class ArtsStud(stud):
def __init__(self,t):
super().__init__(101,'abc')
self.typeOfArt=t
def DisplayArtsStud(self):
print("enter the type of art:",self.typeOfArt)
s1=ArtsStud("dance")
s1.Displaystud()
s1.DisplayArtsStud()
class Animal():
def __init__(self,n,c,a):
self.name=n
self.color=c
self.age=a
def DisplayA(self):
print("nameof the animal:",self.name)
print("color:",self.color)
print("age:",self.age)
class breed(Animal):
def __init__(self,t):
super().__init__("dog","black","4yrs")
self.breedname=t
def DisplayB(self):
print("breed is:",self.breedname)
c1=breed("doberman")
c1.DisplayA()
c1.DisplayB()
| 1,314 | 499 |
from ._bounding_box import *
from ._user_IDs import *
from ._user_points import *
| 82 | 30 |
import appdaemon.plugins.hass.hassapi as hass
#
# Listen for presence sensor change state and change alarm control panel state.
#
# Args:
# sensor - home presence 'sensor'
# ha_panel - alarm control panel entity (to arm and disarm).
# constraint - (optional, input_boolen), if turned off - alarm panel will be not armed\disarmed.
#
# Release Notes
#
# Version 1.0:
# Initial Version
class AlarmPanelBySensor(hass.Hass):
def initialize(self):
if "sensor" not in self.args or "ha_panel" not in self.args:
self.error("Please provide sensor and ha_panel in config!")
return
self.listen_state(self.sensor_trigger, self.args['sensor'])
self.listen_event(self.ha_event, "ha_started")
def ha_event(self, event_name, data, kwargs):
self.log('Starting up!')
state = self.get_state(self.args['sensor'])
self.log('Updating alarm_control_panel state: {}'.format(state))
if state == "off":
self.away_mode()
def sensor_trigger(self, entity, attribute, old, new, kwargs):
self.log("{} turned {}".format(entity, new))
if new == "off" and old == "on":
self.away_mode()
if new == "on" and old == "off":
self.return_home_mode()
def away_mode(self):
if 'constraint' in self.args and not self.constrain_input_boolean(self.args['constraint']):
return
self.call_service("alarm_control_panel/alarm_arm_away", entity_id = self.args['ha_panel'])
def return_home_mode(self):
if 'constraint' in self.args and not self.constrain_input_boolean(self.args['constraint']):
return
self.call_service("alarm_control_panel/alarm_disarm", entity_id = self.args['ha_panel']) | 1,701 | 587 |
from random import randint
import datetime
lvl = 10
base_rounds = 10
rounds = lvl * base_rounds
print("You have", rounds, "rounds to try to get through.")
for i in range(rounds):
r = randint(1, 100)
print(r)
if r >= 96:
break
print("Number of rounds:", i)
if i == rounds - 1:
print("Nothing got through")
else:
print("It took", str(datetime.timedelta(seconds=i*6))) | 398 | 153 |
from ._version import get_versions
from .contexts import cd
from .prompting import error, prompt, status, success
from .unix import cp, ln_s
__all__ = ["prompt", "status", "success", "error", "cp", "cd", "ln_s"]
__version__ = get_versions()["version"]
del get_versions
| 272 | 91 |
"""Test Trotter Hamiltonian methods from `qibo/core/hamiltonians.py`."""
import pytest
import numpy as np
import qibo
from qibo import hamiltonians, K
from qibo.tests.utils import random_state, random_complex, random_hermitian
@pytest.mark.parametrize("nqubits", [3, 4])
@pytest.mark.parametrize("model", ["TFIM", "XXZ", "Y", "MaxCut"])
def test_trotter_hamiltonian_to_dense(backend, nqubits, model):
"""Test that Trotter Hamiltonian dense form agrees with normal Hamiltonian."""
local_ham = getattr(hamiltonians, model)(nqubits, dense=False)
target_ham = getattr(hamiltonians, model)(nqubits)
final_ham = local_ham.dense
K.assert_allclose(final_ham.matrix, target_ham.matrix, atol=1e-15)
def test_trotter_hamiltonian_scalar_mul(nqubits=3):
"""Test multiplication of Trotter Hamiltonian with scalar."""
local_ham = hamiltonians.TFIM(nqubits, h=1.0, dense=False)
target_ham = 2 * hamiltonians.TFIM(nqubits, h=1.0)
local_dense = (2 * local_ham).dense
K.assert_allclose(local_dense.matrix, target_ham.matrix)
local_ham = hamiltonians.TFIM(nqubits, h=1.0, dense=False)
local_dense = (local_ham * 2).dense
K.assert_allclose(local_dense.matrix, target_ham.matrix)
def test_trotter_hamiltonian_scalar_add(nqubits=4):
"""Test addition of Trotter Hamiltonian with scalar."""
local_ham = hamiltonians.TFIM(nqubits, h=1.0, dense=False)
target_ham = 2 + hamiltonians.TFIM(nqubits, h=1.0)
local_dense = (2 + local_ham).dense
K.assert_allclose(local_dense.matrix, target_ham.matrix)
local_ham = hamiltonians.TFIM(nqubits, h=1.0, dense=False)
local_dense = (local_ham + 2).dense
K.assert_allclose(local_dense.matrix, target_ham.matrix)
def test_trotter_hamiltonian_scalar_sub(nqubits=3):
"""Test subtraction of Trotter Hamiltonian with scalar."""
local_ham = hamiltonians.TFIM(nqubits, h=1.0, dense=False)
target_ham = 2 - hamiltonians.TFIM(nqubits, h=1.0)
local_dense = (2 - local_ham).dense
K.assert_allclose(local_dense.matrix, target_ham.matrix)
target_ham = hamiltonians.TFIM(nqubits, h=1.0) - 2
local_ham = hamiltonians.TFIM(nqubits, h=1.0, dense=False)
local_dense = (local_ham - 2).dense
K.assert_allclose(local_dense.matrix, target_ham.matrix)
def test_trotter_hamiltonian_operator_add_and_sub(nqubits=3):
"""Test addition and subtraction between Trotter Hamiltonians."""
local_ham1 = hamiltonians.TFIM(nqubits, h=1.0, dense=False)
local_ham2 = hamiltonians.TFIM(nqubits, h=0.5, dense=False)
local_ham = local_ham1 + local_ham2
target_ham = (hamiltonians.TFIM(nqubits, h=1.0) +
hamiltonians.TFIM(nqubits, h=0.5))
dense = local_ham.dense
K.assert_allclose(dense.matrix, target_ham.matrix)
local_ham = local_ham1 - local_ham2
target_ham = (hamiltonians.TFIM(nqubits, h=1.0) -
hamiltonians.TFIM(nqubits, h=0.5))
dense = local_ham.dense
K.assert_allclose(dense.matrix, target_ham.matrix)
@pytest.mark.parametrize("nqubits,normalize", [(3, False), (4, False)])
def test_trotter_hamiltonian_matmul(nqubits, normalize):
"""Test Trotter Hamiltonian expectation value."""
local_ham = hamiltonians.TFIM(nqubits, h=1.0, dense=False)
dense_ham = hamiltonians.TFIM(nqubits, h=1.0)
state = K.cast(random_complex((2 ** nqubits,)))
trotter_ev = local_ham.expectation(state, normalize)
target_ev = dense_ham.expectation(state, normalize)
K.assert_allclose(trotter_ev, target_ev)
state = random_complex((2 ** nqubits,))
trotter_ev = local_ham.expectation(state, normalize)
target_ev = dense_ham.expectation(state, normalize)
K.assert_allclose(trotter_ev, target_ev)
from qibo.core.states import VectorState
state = VectorState.from_tensor(state)
trotter_matmul = local_ham @ state
target_matmul = dense_ham @ state
K.assert_allclose(trotter_matmul, target_matmul)
def test_trotter_hamiltonian_three_qubit_term(backend):
"""Test creating ``TrotterHamiltonian`` with three qubit term."""
from scipy.linalg import expm
from qibo.core.terms import HamiltonianTerm
m1 = random_hermitian(3)
m2 = random_hermitian(2)
m3 = random_hermitian(1)
terms = [HamiltonianTerm(m1, 0, 1, 2), HamiltonianTerm(m2, 2, 3),
HamiltonianTerm(m3, 1)]
ham = hamiltonians.SymbolicHamiltonian()
ham.terms = terms
# Test that the `TrotterHamiltonian` dense matrix is correct
eye = np.eye(2, dtype=m1.dtype)
mm1 = np.kron(m1, eye)
mm2 = np.kron(np.kron(eye, eye), m2)
mm3 = np.kron(np.kron(eye, m3), np.kron(eye, eye))
target_ham = hamiltonians.Hamiltonian(4, mm1 + mm2 + mm3)
K.assert_allclose(ham.matrix, target_ham.matrix)
dt = 1e-2
initial_state = random_state(4)
if K.op is not None:
with pytest.raises(NotImplementedError):
circuit = ham.circuit(dt=dt)
else:
circuit = ham.circuit(dt=dt)
final_state = circuit(np.copy(initial_state))
u = [expm(-0.5j * dt * (mm1 + mm3)), expm(-0.5j * dt * mm2)]
target_state = u[1].dot(u[0].dot(initial_state))
target_state = u[0].dot(u[1].dot(target_state))
K.assert_allclose(final_state, target_state)
def test_old_trotter_hamiltonian_errors():
"""Check errors when creating the deprecated ``TrotterHamiltonian`` object."""
with pytest.raises(NotImplementedError):
h = hamiltonians.TrotterHamiltonian()
with pytest.raises(NotImplementedError):
h = hamiltonians.TrotterHamiltonian.from_symbolic(0, 1)
| 5,566 | 2,143 |
import asyncio
async def sleep(delay):
for i in range(delay):
await asyncio.sleep(0)
| 99 | 35 |
import warnings
import torch.nn as nn
def conv1x1_group(in_planes, out_planes, stride=1, groups=1):
"""
1x1 convolution with group, without bias
- Normal 1x1 convolution when groups == 1
- Grouped 1x1 convolution when groups > 1
"""
return nn.Conv2d(in_channels=in_planes,
out_channels=out_planes,
kernel_size=1,
stride=stride,
groups=groups,
bias=False)
def conv3x3_group(in_planes, out_planes, stride=1, dilation=1, groups=1):
"""
3x3 convolution with padding and group, without bias, in this situation,
padding is same as dilation.
"""
return nn.Conv2d(in_channels=in_planes,
out_channels=out_planes,
kernel_size=3,
stride=stride,
padding=dilation,
dilation=dilation,
groups=groups,
bias=False)
def conv7x7_group(in_planes, out_planes, stride=1, groups=1):
"""
7x7 convolution with padding and group, without bias, as first conv
dilation is set to 1 and padding set to 3.
"""
return nn.Conv2d(in_channels=in_planes,
out_channels=out_planes,
kernel_size=7,
stride=stride,
padding=3,
dilation=1,
groups=groups,
bias=False)
def norm_layer(planes, use_gn=False):
if not use_gn:
return nn.BatchNorm2d(planes)
else:
return nn.GroupNorm(get_group_gn(planes), planes)
class ConvModule(nn.Module):
"""
This class currently does not used in backbone, only use in necks, heads.
TODO: combine the conv layer in backbone with this class
This class support several types of layers:
1. only conv layer
2. conv + bn/gn
3. conv + bn/gn + relu
4. conv + relu
5. bn/gn + relu + conv
"""
def __init__(self,
in_channels,
out_channels,
kernel_size,
stride=1,
padding=0,
dilation=1,
groups=1,
bias=True,
normalize=None,
use_gn=False,
activation=None,
activate_last=True):
super(ConvModule, self).__init__()
self.with_norm = normalize is not None
self.with_activation = activation is not None
self.with_bias = bias
self.activation = activation
self.activate_last = activate_last
if self.with_norm and self.with_bias:
warnings.warn('ConvModule has norm and bias at the same time')
self.conv = nn.Conv2d(in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias)
self.in_channels = self.conv.in_channels
self.out_channels = self.conv.out_channels
self.kernel_size = self.conv.kernel_size
self.stride = self.conv.stride
self.padding = self.conv.padding
self.dilation = self.conv.dilation
self.groups = self.conv.groups
if self.with_norm:
norm_channels = out_channels if self.activate_last else in_channels
self.norm = norm_layer(norm_channels, use_gn=use_gn)
if self.with_activation:
assert activation in ['relu', 'relu6'], \
'Only ReLU and ReLU6 are supported'
if self.activation == 'relu':
self.activate = nn.ReLU(inplace=True)
elif self.activation == 'relu6':
self.activate = nn.ReLU6(inplace=True)
def forward(self, x):
if self.activate_last:
x = self.conv(x)
if self.with_norm:
x = self.norm(x)
if self.with_activation:
x = self.activate(x)
else:
if self.with_norm:
x = self.norm(x)
if self.with_activation:
x = self.activate(x)
x = self.conv(x)
return x
def get_group_gn(planes):
"""
get number of groups used by GroupNorm, based on number of channels
"""
dim_per_gp = -1
num_groups = 32
assert dim_per_gp == -1 or num_groups == -1, \
'GroupNorm: can only specify G or C/G'
if dim_per_gp > 0:
assert planes % dim_per_gp == 0
groups = planes // dim_per_gp
else:
assert planes % num_groups == 0
groups = num_groups
return groups
class ShuffleLayer(nn.Module):
def __init__(self, groups):
super(ShuffleLayer, self).__init__()
self.groups = groups
def forward(self, x):
"""
Channel shuffle: [N, C, H, W] -> [N, g, C/g, H, W] ->
[N, C/g, g, H, W] -> [N, C, H, W]
"""
N, C, H, W = x.size()
g = self.groups
return x.view(N, g, C / g, H, W).permute(
0, 2, 1, 3, 4).reshape(x.size())
class ChannelSplit(nn.Module):
def __init__(self):
super(ChannelSplit, self).__init__()
def forward(self, x):
half_channel = x.shape[2] // 2
return x[:, :half_channel, ...], x[:, half_channel:, ...]
class SELayer(nn.Module):
"""
Paper: https://arxiv.org/abs/1709.01507
"""
def __init__(self, channel, reduction=16):
super(SELayer, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Sequential(
nn.Linear(channel, channel // reduction),
nn.ReLU(inplace=True),
nn.Linear(channel // reduction, channel),
nn.Sigmoid()
)
def forward(self, x):
batch, channel, _, _ = x.size()
y = self.avg_pool(x).view(batch, channel)
y = self.fc(y).view(batch, channel, 1, 1)
return x * y
| 6,186 | 1,964 |
from sanic import Sanic
from sanic.response import json
from sanic_openapi import doc, swagger_blueprint
from util import authorized
app = Sanic(__name__)
app.config["API_TITLE"] = "My-DataHub-OpenAPI"
app.config["API_VERSION"] = "0.1.0"
app.config["API_DESCRIPTION"] = "An example Swagger from Sanic-OpenAPI"
app.config["API_CONTACT_EMAIL"] = "cagojeiger@naver.com"
app.config["API_TERMS_OF_SERVICE"] = "https://github.com/kangheeyong/PROJECT-datahub-api-server.git"
app.config["API_LICENSE_NAME"] = "MIT LICENSE"
app.blueprint(swagger_blueprint)
class Test_status:
status = doc.String()
@app.route('/test')
@doc.tag('test')
@doc.summary('test koken')
@doc.description('This is a test route with detail description.')
@doc.consumes(doc.String(name='token'), location='header', required=True)
@doc.response(200, Test_status, description='한글도 되나?')
@doc.response(403, Test_status, description='123aaa')
@authorized(token='12')
async def test(request):
return json({'status': 'success'})
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8070)
| 1,073 | 415 |
# -*- coding: utf-8 -*-
import contextlib
import sqlalchemy
import sqlalchemy.orm
from twisted.application.service import Service
from zope.interface.declarations import implementer
from bouser.helpers.plugin_helpers import Dependency, BouserPlugin
from .interfaces import IDataBaseService
__author__ = 'mmalkov'
@implementer(IDataBaseService)
class DataBaseService(Service, BouserPlugin):
signal_name = 'bouser.db'
root = Dependency('bouser')
def __init__(self, config):
self.url = config['url']
self.db = None
self.session = None
def startService(self):
Service.startService(self)
self.db = sqlalchemy.create_engine(self.url, pool_recycle=3600)
self.session = sqlalchemy.orm.sessionmaker(bind=self.db)
def stopService(self):
Service.startService(self)
self.db = self.session = None
def get_session(self):
return self.Session()
@contextlib.contextmanager
def context_session(self, read_only=False):
session = self.session()
try:
yield session
except:
session.rollback()
raise
else:
if read_only:
session.rollback()
else:
session.commit()
finally:
session.close()
| 1,323 | 389 |
# -*- coding: utf-8 -*-
from pydub import AudioSegment
import sys
import glob
if __name__ == "__main__":
args = sys.argv
folder = glob.glob(args[1] + "/*.wav")
initial = False
for file in folder:
soundfile = AudioSegment.from_file(file, "wav")
if initial == False:
soundfile.export(args[2], format = "wav")
initial = True
else:
outfile = AudioSegment.from_file(args[2], "wav")
sound = outfile + soundfile
sound.export(args[2], format="wav")
print("connect " + file)
| 581 | 187 |
import numpy as np
import pandas as pd
from utils import calculate_q
from scipy import stats
def calculate_deg_fold_change(data1_df, data2_df, fc_cutoff=1,
alternative='two-sided'):
"""
This function calculates differentially expressed genes (DEGs)
between two DataFrames or Series based on fold-change.
Parameters
----------
data1_df : DataFrame or Series
gene expression data 1 (row: genes, col: samples)
data2_df : DataFrame or Series
gene expression data 2 (row: genes, col: samples)
fc_cutoff : float, optional
log2 fold-change cutoff. Default is 1.
alternative : {'greater', 'less', 'two-sided'}, optional
indicates the way to compare the two data. Default is 'two-sided'.
Returns
-------
gene_arr : ndarray
differentially expressed genes.
"""
if data1_df.ndim == 2:
diff_sr = data1_df.mean(axis=1) - data2_df.mean(axis=1)
else:
diff_sr = data1_df - data2_df
if alternative == 'two-sided':
gene_arr = diff_sr[diff_sr.abs() > fc_cutoff].index.values
elif alternative == 'greater':
gene_arr = diff_sr[diff_sr > fc_cutoff].index.values
elif alternative == 'less':
gene_arr = diff_sr[diff_sr < -fc_cutoff].index.values
else:
raise ValueError("<alternative> must be 'greater', 'less', or 'two-sided'.")
return gene_arr
def calculate_deg_t_test(data1_df, data2_df, fdr=0.05,
alternative='two-sided'):
"""
This function calculates differentially expressed genes (DEGs)
between two DataFrames based on T-test. False discovery rate
(FDR) control is used.
Parameters
----------
data1_df : DataFrame
gene expression data 1 (row: genes, col: samples)
data2_df : DataFrame
gene expression data 2 (row: genes, col: samples)
fdr : float, optional
acceptable FDR. Default is 0.05.
alternative : {'greater', 'less', 'two-sided'}, optional
indicates the way to compare the two data. Default is 'two-sided'.
Returns
-------
gene_arr : ndarray
differentially expressed genes.
"""
t_arr, p_arr = stats.ttest_ind(data1_df.T, data2_df.T, equal_var=False)
if alternative == 'two-sided':
pass
elif alternative == 'greater':
p_arr /= 2
p_arr[t_arr < 0] = 1 - p_arr[t_arr < 0]
elif alternative == 'less':
p_arr /= 2
p_arr[t_arr > 0] = 1 - p_arr[t_arr > 0]
else:
raise ValueError("<alternative> must be 'greater', 'less', or 'two-sided'.")
return data1_df.index.values[calculate_q(p_arr) <= fdr]
def calculate_deg(data1_df, data2_df, fc_cutoff=1, fdr=0.05,
alternative='two-sided',
func=np.intersect1d):
"""
This function calculates differentially expressed genes (DEGs)
between two DataFrames based on both fold-change and T-test.
T-test uses false discovery rate (FDR) control.
Parameters
----------
data1_df : DataFrame
gene expression data 1 (row: genes, col: samples)
data2_df : DataFrame
gene expression data 2 (row: genes, col: samples)
fc_cutoff : float, optional
log2 fold-change cutoff. Default is 1.
fdr : float, optional
acceptable FDR. Default is 0.05.
alternative : {'greater', 'less', 'two-sided'}, optional
indicates the way to compare the two data. Default is 'two-sided'.
func : callable, optional
indicates the way to combine the genes obtained from fold-change
analysis and T-test. Default is np.intersect1d.
Returns
-------
gene_arr : ndarray
differentially expressed genes.
"""
gene_fc_arr = calculate_deg_fold_change(data1_df, data2_df, fc_cutoff,
alternative)
gene_tt_arr = calculate_deg_t_test(data1_df, data2_df, fdr,
alternative)
return func(gene_fc_arr, gene_tt_arr)
if __name__ == '__main__':
data1_df = pd.DataFrame(np.random.randn(1000,5))
data2_df = pd.DataFrame(np.random.randn(1000,5) + 1.5)
print(len(calculate_deg(data1_df, data2_df)))
| 3,996 | 1,408 |
"""
This is a test that we're using to gather example data from our two example models.
This is passed a list of image names, image numbers, and the vector representing the face in the photo,
and this script takes that and a split of testing vs training data to determine how accurate the model was by simply
checking which labeled vector (from the train data) the test data is closest to, and returning whether that was right or not.
python nearestNeighborTest.py ../../../../Downloads/facenet_lfw_vector.txt training_split.txt testing_split.txt facenet_results.txt
python nearestNeighborTest.py dlib_output_vectors.txt training_split.txt testing_split.txt dlib_results.txt
"""
import sys
import numpy as np
def load_split_file(filename):
# this loads the split files, reads them, closes them, and returns the data
f = open(filename, "r")
data = f.readlines()
data = [(line.split("\t")[0], int(line.split("\t")[1])) for line in data]
f.close()
return data
def nearest_neighbor(vector, neighbors):
# neighbors is a list of (name, number, vector)s
# requires at least one neighbor
# this could be done much, much more efficiently
closest = neighbors[0]
# print(neighbors[0])
closestDistance = np.linalg.norm(vector - neighbors[0][2])
for neighbor in neighbors:
distance = np.linalg.norm(vector - neighbor[2])
if distance < closestDistance:
closestDistance = distance
closest = neighbor
return closest, closestDistance
def main(args):
results_file = open(args[1], "r") # this contains the vectors describing all of the photos
output_filename = args[4]
# then go load all of the files
all_vector_dict = {}
all_results = []
lines = results_file.readlines()
lines = [line.split(" - ") for line in lines]
for result in lines:
words = result[0].split("_")
words[-1] = words[-1].split(".")[0] # remove the file type from the number
number = int(words[-1])
name = "_".join(words[:-1]) # the rest of the underscore separated things before the number
vector = np.array([float(x) for x in result[1].replace("[", "").replace("]", "").split(", ")])
r = (name, number, vector)
all_results += [r]
if (name, number) not in all_vector_dict:
all_vector_dict[(name, number)] = []
all_vector_dict[(name, number)] += [r] # add it to the list of vectors under that name and number because some photos have multiple faces :P
results_file.close()
vector_length = len(all_results[0][2])
# we assume that at least one of the first two is correct otherwise we'll just fail I guess...
if len(all_results[1][2]) != vector_length:
print("ERROR: unknown vector length " + str(vector_length) + " != " + str(len(all_results[1][2])))
sys.exit(1)
# now we have the vectors. Now lets load the split
training_names = load_split_file(args[2])
testing_names = load_split_file(args[3])
# now find all of the labeled images so we can loop over them all
labeled_data = []
for label in training_names:
# add the vector to our list of labeled data:
if label not in all_vector_dict:
# then we just add a zero vector to it with that name and number
labeled_data += [(label[0], label[1], np.zeros(vector_length))]
print("Created zeros vector for " + str(label))
else:
for face_in_photo in all_vector_dict[label]:
labeled_data += [face_in_photo]
print("amount of labeled data: " + str(len(labeled_data)))
# then go test it!
# the output is a list of (name, number, is_result_less_than_.6, nearest_name, nearest_number, is_same_person_bool)
# which we then output into a text file split by tabs probably.
output_file = open(output_filename, "w")
# write everything here!
# if you uncomment this line then it'll generate the results for ALL images not just the testing data.
# testing_names += training_names
# results = []
# I also save everything to here just in case Matt wants to just edit this code instead of loading the file I guess?
# there are a couple lines inside the for loop which have to be uncommented to use the results array
total = 0
correct = 0
for testing_name in testing_names:
# this is a name and number tuple
# first create a default fake thing if we weren't able to find a face in that photo
testing_vector = [(testing_name[0], testing_name[1], np.zeros(vector_length))]
if testing_name in all_vector_dict:
# print("Found testing vector for " + str(testing_name))
testing_vector = all_vector_dict[testing_name] # a list of all the photos in the picture with all their faces
# [(name, number, vector), (name, number, vector)]
nearest = None
nearest_distance = -1
for face_vector in testing_vector:
# print("HERE", testing_vector, face_vector)
nearest_face, nearest_face_distance = nearest_neighbor(face_vector[2], labeled_data)
if nearest_face_distance < nearest_distance or nearest_distance == -1:
# then it's closer, so choose that one
nearest_distance = nearest_face_distance
nearest = nearest_face
# nearest is (name, number, vector)
r = (testing_name[0], testing_name[1], nearest_distance < .6, nearest[0], nearest[1], testing_name[0] == nearest[0])
total += 1
correct += testing_name[0] == nearest[0]
# results += [r]
string_r = [str(x) for x in r]
o = "\t".join(string_r) + "\n"
output_file.write(o)
output_file.close()
print("Total:", total, "Correct:", correct)
# if you uncomment things you can now do stuff with results, which is a list of (name, number, is_result_less_than_.6, nearest_name, nearest_number, is_same_person_bool)
# for each result. Currently we only test the testing_files, you can also uncomment the line above the for loop which then means
# we generate results for ALL images including training data (which should always be correct since its nearest neighbor is itself)
# but that may be useful for adding more data to the ontology, we'll figure it up later
if __name__ == "__main__":
"""this loads the attributes file that has the data for all the photos. Pass in the filename of the tab separated file downloaded
from http://vis-www.cs.umass.edu/lfw/ with the list of all people names and the number of images associated with them"""
if len(sys.argv) != 5:
print(
"""Usage: nearestNeighborTest.py results_filename training_filename testing_filename output_filename"""
)
sys.exit(0) # exit out
main(sys.argv) | 6,326 | 2,037 |
import numpy as np
def save_list_to_file(z_list, z_file):
with open(z_file, 'w') as fw:
fw.writelines(z_list)
def random_split_train_test(train_file, out_train_file, out_test_file, train_percentage=0.8):
with open(train_file) as fr:
lines = fr.readlines()
np.random.shuffle(lines)
train_data, test_data = lines[0:int(train_percentage*len(lines))], lines[int(train_percentage*len(lines)):]
save_list_to_file(train_data, out_train_file)
save_list_to_file(test_data, out_test_file)
random_split_train_test("/home/bassel/data/oa_kinetics/lbls/actions_stack_list.txt",
"/home/bassel/data/oa_kinetics/lbls/action_train_stacks_list.txt",
"/home/bassel/data/oa_kinetics/lbls/action_test_stacks_list.txt") | 792 | 304 |
"""
Functions and classes for interacting with the CodeRED data format
"""
from dataclasses import dataclass
from typing import List, Optional, Union
import pandas as pd
from .types import FilenameType
# The required headers for CodeRED
EXCEL_HEADERS = (
"Command",
"CustomKey",
"ContactId",
"First Name",
"Last Name",
"Groups",
"Tags",
"HomePhone",
"WorkPhone",
"CellPhone",
"OtherPhone",
"TextNumber",
"MobileProvider",
"HomeEmail",
"WorkEmail",
"OtherEmail",
"StreetAddress",
"City",
"State",
"Zip",
"Zip4",
"Preferred Language",
)
# The name of the Worksheet to submit to CodeRED
EXCEL_SHEET_NAME = "5. CodeRed"
@dataclass(frozen=True)
class CoderedContact:
"""A representation of a contact ot be sent to CodeRED"""
contact_id: Union[str, int]
first_name: str
last_name: str
# Represents the text message the person will get
groups: str
# Must be exactly 10 characters
text_number: str
# Maybe necessary?
tags: str = "English"
preferred_language: str = "English"
command: Optional[str] = None
custom_key: Optional[str] = None
home_phone: Optional[str] = None
work_phone: Optional[str] = None
cell_phone: Optional[str] = None
other_phone: Optional[str] = None
mobile_provider: Optional[str] = None
home_email: Optional[str] = None
work_email: Optional[str] = None
other_email: Optional[str] = None
street_address: Optional[str] = None
city: Optional[str] = None
state: Optional[str] = None
zip_code: Optional[str] = None
zip_code_plus_four: Optional[str] = None
def to_excel_row(self) -> List[Optional[Union[int, str]]]:
"""
Convert this contact into a row in the appropriate order for Excel output
"""
return [
self.command,
self.custom_key,
self.contact_id,
self.first_name,
self.last_name,
self.groups,
self.tags,
self.home_phone,
self.work_phone,
self.cell_phone,
self.other_phone,
self.text_number,
self.mobile_provider,
self.home_email,
self.work_email,
self.other_email,
self.street_address,
self.city,
self.state,
self.zip_code,
self.zip_code_plus_four,
self.preferred_language,
]
def make_df_from_data(contacts: List[CoderedContact]) -> pd.DataFrame:
"""
Convert a list of contacts to a data frame for easy conversion to Excel
Args:
contacts: The contacts to transform into a data frame
Returns:
The contacts as a data frame
"""
data = [contact.to_excel_row() for contact in contacts]
return pd.DataFrame.from_records(data, columns=EXCEL_HEADERS)
def make_excel_file(
filename: FilenameType, contacts: List[CoderedContact], drop_message_0: bool = True
):
"""
Turn a list of contacts into an Excel file stored at `filename`.
Args:
filename: The location of the Excel file to create
contacts: The contacts to transform into an Excel file
drop_message_0: If True, remove those people assigned to message_0
(i.e., the control) from the output
"""
df = make_df_from_data(contacts)
if drop_message_0:
df = df[df["Groups"] != "message_0"]
with pd.ExcelWriter(filename) as writer:
df.to_excel(writer, index=False, sheet_name=EXCEL_SHEET_NAME)
| 3,596 | 1,085 |
from imagekit.admin import AdminThumbnail
from django.contrib.admin import TabularInline
from core.admin.forms import LimitedInlineFormSet
from core.admin.utils import (
get_change_view_link,
get_changelist_view_link
)
from ..models import PremierProduct
class PremierManufacturerProductsTabularInline(TabularInline):
model = PremierProduct
fk_name = 'manufacturer'
formset = LimitedInlineFormSet
extra = 0
verbose_name_plural = 'products (top 10)'
all_link_query = 'manufacturer__id__exact'
ordering = (
'premier_part_number',
)
classes = (
'collapse',
)
fields = (
'all_link',
'detail_link',
'premier_part_number',
'vendor_part_number',
'description',
'manufacturer',
'inventory_ab',
'cost_cad',
'primary_image_preview',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
readonly_fields = (
'relevancy_warnings',
'relevancy_errors',
'may_be_relevant_flag',
'primary_image_preview',
'all_link',
'detail_link'
)
def get_rel_obj(self, obj):
return getattr(obj, self.fk_name)
def detail_link(self, obj):
if not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def all_link(self, obj):
if not obj:
return None
query = f'{self.all_link_query}={getattr(self.get_rel_obj(obj), "pk")}'
return get_changelist_view_link(obj._meta.model, 'See All', query)
all_link.short_description = ''
primary_image_preview = AdminThumbnail(
image_field='primary_image_thumbnail'
)
primary_image_preview.short_description = 'primary image'
def may_be_relevant_flag(self, obj):
if obj.is_relevant != obj.may_be_relevant:
return '~'
else:
return ''
may_be_relevant_flag.short_description = ''
def get_queryset(self, request):
return super().get_queryset(request).filter(
is_relevant=True
).with_admin_data()
def get_readonly_fields(self, request, obj=None):
readonly_fields = super().get_readonly_fields(request, obj)
if not request.user.is_superuser:
readonly_fields += (
'premier_part_number',
)
return readonly_fields
| 2,516 | 794 |
import unittest
from src.cumulator.base import Cumulator
class TestBase(unittest.TestCase):
def test_run(self):
cumulator = Cumulator()
# Test without parameters
def foo():
return 1
output = cumulator.run(foo)
self.assertEqual(1, output)
# Tests with arguments
def foo(x, y=1):
return x*y
# (only positional)
output = cumulator.run(foo, 3)
self.assertEqual(3, output)
# (both)
output = cumulator.run(foo, 3, y=2)
self.assertEqual(6, output)
if __name__ == '__main__':
unittest.main()
| 629 | 204 |
#!/usr/bin/python3
import argparse
import logging as log
from aiohttp import web
from api.databasemanager import DictionaryDatabaseManager
from api.dictionary import \
entry, \
definition, \
translation, \
configuration
from api.dictionary import \
get_dictionary, \
get_dictionary_xml, \
get_language_list, \
download_dictionary, \
get_inferred_multilingual_dictionary
from api.dictionary.middlewares import \
json_error_handler, \
auto_committer
parser = argparse.ArgumentParser(description='Dictionary service')
parser.add_argument(
'-d',
'--db-file',
dest='STORAGE',
required=False,
default='default')
parser.add_argument('-p', '--port', dest='PORT', type=int, default=8001)
parser.add_argument(
'-l',
'--log-file',
dest='LOG_FILE',
type=str,
default='/opt/botjagwar/user_data/dictionary_service.log')
parser.add_argument('--host', dest='HOST', type=str, default='0.0.0.0')
parser.add_argument('--log-level', dest='LOG_LEVEL', type=str, default='debug')
parser.add_argument('--autocommit', dest='autocommit', type=bool, default=True)
parser.add_argument(
'--commit-every',
dest='commit_every',
type=int,
default=100)
args = parser.parse_args()
WORD_STORAGE = args.STORAGE
HOST = args.HOST
PORT = args.PORT
LOG = args.LOG_FILE
try:
LOG_LEVEL = log._nameToLevel[args.LOG_LEVEL.upper()]
except KeyError:
LOG_LEVEL = 10
log.basicConfig(filename=LOG, level=log.DEBUG)
dictionary_db_manager = DictionaryDatabaseManager(
database_file=WORD_STORAGE, db_header='')
routes = web.RouteTableDef()
app = web.Application(middlewares=[
json_error_handler,
auto_committer,
])
app['database'] = dictionary_db_manager
app['session_instance'] = dictionary_db_manager.session
app['autocommit'] = args.autocommit
app['commit_every'] = args.commit_every
app['commit_count'] = 0
app.router.add_route('GET', '/languages/list', get_language_list)
app.router.add_route('GET', '/languages/list/download', download_dictionary)
app.router.add_route(
'GET',
'/definition/{definition_id}',
definition.get_definition)
app.router.add_route(
'GET',
'/definition_words/{definition_id}',
definition.get_definition_with_words)
app.router.add_route(
'PUT',
'/definition/{definition_id}/edit',
definition.edit_definition)
#app.router.add_route('POST', '/definition/{language}/create', definition.create_definition)
app.router.add_route(
'DELETE',
'/definition/{definition_id}/delete',
definition.delete_definition)
app.router.add_route(
'POST',
'/definition/search',
definition.search_definition)
app.router.add_route('GET', '/dictionary/{language}', get_dictionary_xml)
app.router.add_route('GET', '/xml_dictionary/{language}', get_dictionary)
app.router.add_route(
'GET',
'/dictionary/{source}/{bridge}/{target}',
get_inferred_multilingual_dictionary)
app.router.add_route('GET', '/entry/{language}/{word}', entry.get_entry)
app.router.add_route('POST', '/entry/{language}/create', entry.add_entry)
app.router.add_route('POST', '/entry/batch', entry.add_batch)
app.router.add_route('PUT', '/entry/{word_id}/edit', entry.edit_entry)
app.router.add_route('DELETE', '/entry/{word_id}/delete', entry.delete_entry)
app.router.add_route(
'GET',
'/translations/{origin}/{target}/{word}',
translation.get_translation)
app.router.add_route(
'GET',
'/translations/{origin}/{word}',
translation.get_all_translations)
app.router.add_route('GET', '/word/{word_id}', entry.get_word_by_id)
app.router.add_route('GET', '/ping', configuration.pong)
app.router.add_route('POST', '/commit', configuration.do_commit)
app.router.add_route('POST', '/rollback', configuration.do_rollback)
app.router.add_route('PUT', '/configure', configuration.configure_service)
if __name__ == '__main__':
try:
app.router.add_routes(routes)
web.run_app(app, host=HOST, port=PORT, access_log=log)
except Exception as exc:
log.exception(exc)
log.critical("Error occurred while setting up the server")
finally:
app['session_instance'].flush()
app['session_instance'].close()
| 4,191 | 1,378 |
from output.models.nist_data.list_pkg.any_uri.schema_instance.nistschema_sv_iv_list_any_uri_enumeration_1_xsd.nistschema_sv_iv_list_any_uri_enumeration_1 import (
NistschemaSvIvListAnyUriEnumeration1,
NistschemaSvIvListAnyUriEnumeration1Type,
)
__all__ = [
"NistschemaSvIvListAnyUriEnumeration1",
"NistschemaSvIvListAnyUriEnumeration1Type",
]
| 360 | 150 |
import torch
import numpy as np
import re
import itertools
from textwrap import wrap
import matplotlib.pyplot as plt
def padding_mask(lengths, batch_size, time_size=None):
"""
Computes a [batch_size, time_size] binary mask which selects all and only the
non padded values in the input tensor
:param torch.tensor lengths: a [batch_size] tensor containing the actual length
(before padding) of every sample in the batch
:param int batch_size: the number of samples in the batch
:param int time_size: the length of the padded sequences
:retype: torch.tensors
"""
max_len = torch.max(lengths) if time_size is None else time_size
mask = torch.arange(max_len, device=lengths.device, dtype=lengths.dtype)
mask = mask.expand(batch_size, max_len) < lengths.unsqueeze(1)
return mask.type(torch.uint8)
def cat_arange(counts, dtype=torch.int32):
"""
Concatenate results of multiple arange calls
E.g.: cat_arange([2,1,3]) = [0, 1, 0, 0, 1, 2]
Credits: https://stackoverflow.com/a/20033438
:param torch.tensor counts: a 1D tensor
:return: equivalent to torch.cat([torch.arange(c) for c in counts])
"""
counts1 = counts[:-1].type(dtype)
reset_index = torch.cumsum(counts1, dim=0).type(torch.int64)
incr = torch.ones(counts.sum(), dtype=dtype, device=counts.device)
incr[0] = 0
incr[reset_index] = 1 - counts1
# Reuse the incr array for the final result.
return torch.cumsum(incr, dim=0)
def repeat_arange(counts, dtype=torch.int32):
"""
Repeat each element of arange multiple times
E.g.: repeat_arange([2,1,3]) = [0, 0, 1, 2, 2, 2]
:param counts: a 1D tensor having the same length of 'tensor'
:return: equivalent to torch.cat([torch.tensor([v]).expand(n) for v, n in enumerate(counts)])
"""
incr = torch.zeros(counts.sum(), dtype=dtype, device=counts.device)
set_index = torch.cumsum(counts[:-1], dim=0).type(torch.int64)
incr[set_index] = 1
return torch.cumsum(incr, dim=0)
def select_padded(source, mask):
lengths = mask.sum(-1)
max_length = lengths.max()
batch_size, time_size, feature_size = source.shape
out_tensor = source.new_zeros([batch_size, max_length, feature_size])
batch_idx = repeat_arange(lengths, torch.int64)
time_idx = cat_arange(lengths, torch.int64)
out_tensor[batch_idx, time_idx] = source[mask]
return out_tensor
def confusion_matrix_fig(cm, labels, normalize=False):
if normalize:
cm = cm.astype('float') * 10 / cm.sum(axis=1)[:, np.newaxis]
cm = np.nan_to_num(cm, copy=True)
cm = cm.astype('int')
fig = plt.figure(figsize=(7, 7), facecolor='w', edgecolor='k')
ax = fig.add_subplot(1, 1, 1)
im = ax.imshow(cm, cmap='Oranges')
classes = ['\n'.join(wrap(l, 40)) for l in labels]
tick_marks = np.arange(len(classes))
ax.set_xlabel('Predicted', fontsize=7)
ax.set_xticks(tick_marks)
c = ax.set_xticklabels(classes, fontsize=4, rotation=-90, ha='center')
ax.xaxis.set_label_position('bottom')
ax.xaxis.tick_bottom()
ax.set_ylabel('True Label', fontsize=7)
ax.set_yticks(tick_marks)
ax.set_yticklabels(classes, fontsize=4, va='center')
ax.yaxis.set_label_position('left')
ax.yaxis.tick_left()
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
ax.text(j, i, format(cm[i, j], 'd') if cm[i, j] != 0 else '.',
horizontalalignment="center", fontsize=6,
verticalalignment='center', color="black")
return fig
| 3,578 | 1,339 |
from faq_module.storage import FAQManager # , FAQConfig, FAQData
# from faq_module.commands import text
# from discord.ext import commands
# import faq_module.text
# import logging
import discord
# import typing
import re
async def faq_on_message(faq_manager: FAQManager, message: discord.Message):
embed = discord.Embed(title="Info Requested", color=0x00ffff)
found_keys = set()
faq_on_recursive(faq_manager, message.content, embed, found_keys, message.guild.id)
if embed.fields or embed.image:
await message.channel.send(embed=embed)
def faq_on_recursive(faq_manager: FAQManager, message_content: str, embed: discord.Embed, found_keys: set, guild_id: int):
for keyword in get_keywords(message_content):
if keyword.lower() in faq_manager.get_keywords(guild_id):
found_keys.add(keyword)
faq_data = faq_manager.get(guild_id, keyword)
if not just_keywords(faq_data.phrase):
# \u200b is a zero width space.
embed.add_field(name=keyword, value=faq_data.phrase + "\u200b")
if faq_data.image_url:
embed.set_image(url=faq_data.image_url)
faq_on_recursive(faq_manager, faq_data.phrase, embed, found_keys, guild_id)
def get_keywords(input_string: str) -> list:
comp = re.compile("{(.+?)}")
return comp.findall(input_string)
def just_keywords(input_string: str) -> bool:
comp = re.compile("({.+?})")
keywords = comp.findall(input_string)
if keywords and len("".join(keywords)) == len("".join(input_string.split())):
return True
else:
return False
| 1,642 | 557 |
from tests.utils import hass_mock, get_instances
import devices as devices_module
from core import Controller
from core import type as type_module
def _import_modules(file_dir, package):
pkg_dir = os.path.dirname(file_dir)
for (module_loader, name, ispkg) in pkgutil.iter_modules([pkg_dir]):
if ispkg:
_import_modules(pkg_dir + "/" + name + "/__init__.py", package + "." + name)
else:
importlib.import_module("." + name, package)
def _all_subclasses(cls):
return list(
set(cls.__subclasses__()).union(
[s for c in cls.__subclasses__() for s in _all_subclasses(c)]
)
)
def get_devices():
_import_modules(devices_module.__file__, devices_module.__package__)
subclasses = _all_subclasses(Controller)
devices = [cls_() for cls_ in subclasses if len(cls_.__subclasses__()) == 0]
return devices
def check_mapping(mapping, all_possible_actions, device):
if mapping is None:
return
for k, v in mapping.items():
if type(v) != str:
raise ValueError(
"The value from the mapping should be a string, matching "
+ "one of the actions from the controller. "
+ f"The possible actions are: {all_possible_actions}. "
+ f"Device class: {device.__class__.__name__}"
)
if v not in all_possible_actions:
raise ValueError(
f"{v} not found in the list of possible action from the controller. "
+ f"The possible actions are: {all_possible_actions}"
)
def test_devices(hass_mock):
devices = get_instances(
devices_module.__file__, devices_module.__package__, Controller
)
for device in devices:
type_actions_mapping = device.get_type_actions_mapping()
if type_actions_mapping is None:
continue
possible_actions = list(type_actions_mapping.keys())
mappings = device.get_z2m_actions_mapping()
check_mapping(mappings, possible_actions, device)
mappings = device.get_deconz_actions_mapping()
check_mapping(mappings, possible_actions, device)
mappings = device.get_zha_actions_mapping()
check_mapping(mappings, possible_actions, device)
| 2,298 | 660 |
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='iocfg',
version='0.1',
description='Configuration for IO modules on Novus IHM',
author='Thomas Del Grande',
author_email='tgrande@pd3.com.br',
packages=find_packages(),
scripts=[
'scripts/diocfg',
'scripts/aoutcfg',
'scripts/aincfg',
'scripts/config_analog_inputs.sh',
'scripts/config_analog_outputs.sh',
'scripts/config_digital_ios.sh',
],
)
| 521 | 168 |
# Import pandas using the alias pd
import pandas as pd
# Print the head of the homelessness data
print(homelessness.head())
# Print the values of homelessness
print(homelessness.values)
# Print the column index of homelessness
print(homelessness.columns)
# Print the row index of homelessness
print(homelessness.index)
# Sort homelessness by individual
homelessness_ind = homelessness.sort_values('individuals')
# Print the top few rows
print(homelessness_ind.head())
# Select the individuals column
individuals = homelessness['individuals']
# Print the head of the result
print(individuals.head())
# Filter for rows where individuals is greater than 10000
ind_gt_10k = homelessness[homelessness['individuals'] > 10000]
# See the result
print(ind_gt_10k)
# Subset for rows in South Atlantic or Mid-Atlantic regions
south_mid_atlantic = homelessness[(homelessness['region'] == 'South Atlantic') | (
homelessness['region'] == 'Mid-Atlantic')]
# See the result
print(south_mid_atlantic)
# Add total col as sum of individuals and family_members
homelessness['total'] = homelessness['individuals']+homelessness['family_members']
# Add p_individuals col as proportion of individuals
homelessness['p_individuals'] = homelessness['individuals']/homelessness['total']
# See the result
print(homelessness)
# Create indiv_per_10k col as homeless individuals per 10k state pop
homelessness["indiv_per_10k"] = 10000 * \
((homelessness['individuals']) / (homelessness['state_pop']))
# Subset rows for indiv_per_10k greater than 20
high_homelessness = homelessness[homelessness['indiv_per_10k'] > 20]
# Sort high_homelessness by descending indiv_per_10k
high_homelessness_srt = high_homelessness.sort_values(
'indiv_per_10k', ascending=False)
# From high_homelessness_srt, select the state and indiv_per_10k cols
result = high_homelessness_srt[['state', 'indiv_per_10k']]
# See the result
| 1,912 | 655 |
import aiohttp
from time import time
import json
from hashlib import sha512
import hmac
from .fetcher import Fetcher
class BittrexAPI(Fetcher):
_URL = 'https://bittrex.com/api/v1.1/'
_KEY = None
_SECRET = None
def __init__(self, key, secret):
if key is None or secret is None:
raise EnvironmentError("Bittrex key and secret must be specified in configs")
self._KEY = key
self._SECRET = secret
def _signature(self, query):
message = query
return hmac.new(
key=self._SECRET.encode(),
msg=message.encode(),
digestmod=sha512
).hexdigest().upper()
async def get_balances(self, loop, symbols, callback=None):
async with aiohttp.ClientSession(loop=loop) as session:
nonce = int(time())
endpoint = self._URL + \
'account/getbalances?apikey={}&nonce={}'.format(self._KEY, nonce)
signature = self._signature(endpoint)
headers = {
'apisign': signature
}
_response = await self._fetch(session=session, url=endpoint, headers=headers)
balances = json.loads(_response).get('result', [])
result = []
for balance in balances:
if balance['Currency'] in symbols:
result.append(
(balance['Currency'],
float(balance.get('Balance', 0)))
)
if callback is not None:
callback(result)
return result
| 1,599 | 439 |
import time
import pytest
from tango.common.logging import initialize_logging
from tango.common.testing import TangoTestCase
from tango.executors.multicore_executor import MulticoreExecutor
from tango.step_graph import StepGraph
from tango.workspaces import LocalWorkspace
from test_fixtures.package.steps import SleepPrintMaybeFail
class TestMulticoreExecutor(TangoTestCase):
def setup_method(self):
super().setup_method()
initialize_logging()
def test_simple_execution_in_parallel(self):
step_graph = StepGraph(
{
"step1": SleepPrintMaybeFail(string="hello", seconds=5, fail=False),
"step2": SleepPrintMaybeFail(string="hi", seconds=5, fail=False),
}
)
executor = MulticoreExecutor(workspace=LocalWorkspace(self.TEST_DIR), parallelism=2)
start_time = time.time()
executor.execute_step_graph(step_graph)
end_time = time.time()
time_taken = end_time - start_time
assert time_taken < 10 # TODO: will this be flaky?
assert len(executor.workspace.step_cache) == 2
def test_more_processes_ready_than_parallelism(self):
step_graph = StepGraph(
{
"step1": SleepPrintMaybeFail(string="hello", seconds=5, fail=False),
"step2": SleepPrintMaybeFail(string="hi", seconds=5, fail=False),
"step3": SleepPrintMaybeFail(string="howdy", seconds=5, fail=False),
}
)
executor = MulticoreExecutor(workspace=LocalWorkspace(self.TEST_DIR), parallelism=2)
start_time = time.time()
executor.execute_step_graph(step_graph)
end_time = time.time()
time_taken = end_time - start_time
assert 10 < time_taken < 20 # TODO: will this be flaky?
assert len(executor.workspace.step_cache) == 3
@pytest.mark.parametrize("parallelism", [1, 2, 3])
def test_failing_step_no_downstream_task(self, parallelism):
step_graph = StepGraph.from_params(
{
"step1": {
"type": "sleep-print-maybe-fail",
"string": "string_to_pass_down",
"seconds": 0,
"fail": False,
},
"step2": {
"type": "sleep-print-maybe-fail",
"string": {"type": "ref", "ref": "step1"},
"seconds": 0,
"fail": False,
},
"step3": {
"type": "sleep-print-maybe-fail",
"string": "This is going to fail!",
"seconds": 0,
"fail": True,
},
}
)
executor = MulticoreExecutor(
workspace=LocalWorkspace(self.TEST_DIR),
parallelism=parallelism,
include_package=["test_fixtures.package.steps"],
)
executor.execute_step_graph(step_graph)
assert len(executor.workspace.step_cache) == 2
@pytest.mark.parametrize("parallelism", [1, 2, 3])
def test_failing_step_with_downstream_task(self, parallelism):
step_graph = StepGraph.from_params(
{
"step1": {
"type": "sleep-print-maybe-fail",
"string": "string_to_pass_down",
"seconds": 0,
"fail": True,
},
"step2": {
"type": "sleep-print-maybe-fail",
"string": {"type": "ref", "ref": "step1"},
"seconds": 0,
"fail": False,
},
"step3": {
"type": "sleep-print-maybe-fail",
"string": "This is going to fail!",
"seconds": 0,
"fail": False,
},
}
)
executor = MulticoreExecutor(
workspace=LocalWorkspace(self.TEST_DIR),
parallelism=parallelism,
include_package=["test_fixtures.package.steps"],
)
executor.execute_step_graph(step_graph)
assert len(executor.workspace.step_cache) == 1
@pytest.mark.parametrize("parallelism", [1, 2, 3])
def test_failing_step_with_further_downstream_task(self, parallelism):
step_graph = StepGraph.from_params(
{
"step1": {
"type": "sleep-print-maybe-fail",
"string": "string_to_pass_down",
"seconds": 0,
"fail": True,
},
"step2": {
"type": "sleep-print-maybe-fail",
"string": {"type": "ref", "ref": "step1"},
"seconds": 0,
"fail": False,
},
"step3": {
"type": "sleep-print-maybe-fail",
"string": {"type": "ref", "ref": "step2"},
"seconds": 0,
"fail": False,
},
}
)
executor = MulticoreExecutor(
workspace=LocalWorkspace(self.TEST_DIR),
parallelism=parallelism,
include_package=["test_fixtures.package.steps"],
)
executor.execute_step_graph(step_graph)
assert len(executor.workspace.step_cache) == 0
def test_uncacheable_failing_step_no_downstream_task(self):
step_graph = StepGraph.from_params(
{
"step1": {
"type": "sleep-print-maybe-fail",
"string": "string_to_pass_down",
"seconds": 0,
"fail": False,
},
"step2": {
"type": "sleep-print-maybe-fail",
"string": {"type": "ref", "ref": "step1"},
"seconds": 0,
"fail": False,
},
"step3": {
"type": "sleep-print-maybe-fail",
"string": "This is going to fail!",
"seconds": 0,
"fail": True,
"cache_results": False,
},
}
)
executor = MulticoreExecutor(
workspace=LocalWorkspace(self.TEST_DIR),
parallelism=2,
include_package=["test_fixtures.package.steps"],
)
executor.execute_step_graph(step_graph)
assert len(executor.workspace.step_cache) == 2
def test_uncacheable_failing_step_with_downstream_task(self):
step_graph = StepGraph.from_params(
{
"step1": {
"type": "sleep-print-maybe-fail",
"string": "string_to_pass_down",
"seconds": 0,
"fail": True,
"cache_results": False,
},
"step2": {
"type": "sleep-print-maybe-fail",
"string": {"type": "ref", "ref": "step1"},
"seconds": 0,
"fail": False,
},
"step3": {
"type": "sleep-print-maybe-fail",
"string": "This is going to fail!",
"seconds": 0,
"fail": False,
},
}
)
executor = MulticoreExecutor(
workspace=LocalWorkspace(self.TEST_DIR),
parallelism=2,
include_package=["test_fixtures.package.steps"],
)
executor.execute_step_graph(step_graph)
assert len(executor.workspace.step_cache) == 1
@pytest.mark.parametrize("parallelism", [1, 2, 3])
def test_steps_with_their_own_multiprocessing(self, parallelism):
step_graph = StepGraph.from_params(
{
"step1": {"type": "multiprocessing_step", "num_proc": 2},
"step2": {"type": "multiprocessing_step", "num_proc": 3},
"step3": {"type": "multiprocessing_step", "num_proc": 1},
}
)
executor = MulticoreExecutor(
workspace=LocalWorkspace(self.TEST_DIR),
parallelism=parallelism,
include_package=["test_fixtures.package.steps"],
)
executor.execute_step_graph(step_graph)
assert len(executor.workspace.step_cache) == 3
| 8,509 | 2,438 |
import cv2
import numpy as np
face_classifier=cv2.CascadeClassifier('HaarCascade/haarcascade_frontalface_default.xml')
def face_extractor(img):
gray=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
faces=face_classifier.detectMultiScale(gray,1.3,5)
if faces is():
return None
for(x,y,w,h) in faces:
cropped_face=img[y:y+h,x:x+w]
return cropped_face
cap=cv2.VideoCapture(0)
count=0
while True:
ret, frame=cap.read()
if face_extractor(frame)is not None:
count+=1
face=cv2.resize(face_extractor(frame),(200,200))
face=cv2.cvtColor(face,cv2.COLOR_BGR2GRAY)
file_name_path='faces/user'+str(count)+'.jpg'
cv2.imwrite(file_name_path,face)
cv2.putText(face,str(count),(50,50),cv2.FONT_HERSHEY_COMPLEX,1,(0,255,0),2)
cv2.imshow('Face Cropper',face)
else:
print("Face Not Found")
pass
if cv2.waitKey(1)==13 or count==100:
break
cap.release()
cv2.destroyAllWindows()
print('Collecting samples complete!!!') | 1,051 | 433 |
import time
import torch
from torch import nn
from transformers import GPT2Tokenizer, GPT2LMHeadModel, GPT2Config
import lightseq.inference as lsi
from lightseq.training.ops.pytorch.quantization import (
qat_mode,
QuantLinear,
TensorQuantizer,
weight_quant_config,
)
from lightseq.training.ops.pytorch.torch_transformer_layers import (
TransformerDecoderLayer,
)
from export.util import parse_args
def ls_gpt2(model, inputs, generation_method="topk"):
torch.cuda.synchronize()
start_time = time.perf_counter()
results = None
if generation_method == "topk" or generation_method == "topp":
results = model.sample(inputs)
elif generation_method == "ppl":
results = model.ppl(inputs)[0]
torch.cuda.synchronize()
end_time = time.perf_counter()
return results, end_time - start_time
def compute_hf_ppl(model, inputs):
max_length = 512
stride = 512
end_loc = 0
nlls = []
for i in range(0, inputs.size(1), stride):
begin_loc = max(i + stride - max_length, 0)
end_loc = min(i + stride, inputs.size(1))
trg_len = end_loc - i
input_ids = inputs[:, begin_loc:end_loc].to("cuda:0")
target_ids = input_ids.clone()
target_ids[:, :-trg_len] = -100
with torch.no_grad():
outputs = model(input_ids, labels=target_ids)
neg_log_likelihood = outputs[0] * trg_len
nlls.append(neg_log_likelihood)
ppl = torch.stack(nlls).sum() / end_loc
return ppl.cpu().numpy()
def hf_gpt2(model, inputs, tokenizer, generation_method="topk"):
inputs = inputs.to("cuda:0")
torch.cuda.synchronize()
start_time = time.perf_counter()
results = None
if generation_method == "topk" or generation_method == "topp":
results = model.generate(
inputs, max_length=50, pad_token_id=tokenizer.eos_token_id
)
elif generation_method == "ppl":
results = compute_hf_ppl(model, inputs)
torch.cuda.synchronize()
end_time = time.perf_counter()
return results, end_time - start_time
def ls_generate(model, tokenizer, inputs):
print("=========lightseq=========")
print("lightseq generating...")
ls_res_ids, ls_time = ls_gpt2(model, inputs)
ls_res = tokenizer.batch_decode(ls_res_ids, skip_special_tokens=True)
print(f"lightseq time: {ls_time}s")
print("lightseq results:")
for sent in ls_res:
print(sent)
def hf_generate(model, tokenizer, inputs):
print("=========huggingface=========")
print("huggingface generating...")
hf_res_ids, hf_time = hf_gpt2(model, inputs, tokenizer)
hf_res = tokenizer.batch_decode(hf_res_ids, skip_special_tokens=True)
print(f"huggingface time: {hf_time}s")
print("huggingface results:")
for sent in hf_res:
print(sent)
def ls_ppl(model, tokenizer, inputs):
print("=========lightseq=========")
print("lightseq calculating ppl...")
ls_ppl, ls_time = ls_gpt2(model, inputs, "ppl")
print(f"lightseq time: {ls_time}s")
print("lightseq results:")
print(ls_ppl)
def hf_ppl(model, tokenizer, inputs):
print("=========huggingface=========")
print("huggingface calculating ppl...")
hf_ppl, hf_time = hf_gpt2(model, inputs, tokenizer, "ppl")
print(f"huggingface time: {hf_time}s")
print("huggingface results:")
print(hf_ppl)
def warmup(
ls_tokenizer, hf_tokenizer, ls_model, hf_model, sentences, generation_method
):
ls_inputs = ls_tokenizer(sentences, return_tensors="pt", padding=True)["input_ids"]
hf_inputs = hf_tokenizer(sentences, return_tensors="pt", padding=True)["input_ids"]
if generation_method == "topk" or generation_method == "topp":
ls_generate(ls_model, ls_tokenizer, ls_inputs)
# hf_generate(hf_model, hf_tokenizer, hf_inputs)
elif generation_method == "ppl":
ls_ppl(ls_model, ls_tokenizer, ls_inputs)
hf_ppl(hf_model, hf_tokenizer, hf_inputs)
class GptEmbedding(nn.Embedding):
def __init__(self, *args, **kwargs):
super(GptEmbedding, self).__init__(*args, **kwargs)
self.emb_quant = TensorQuantizer(weight_quant_config)
def forward(self, input_ids):
x = super(GptEmbedding, self).forward(input_ids)
x = self.emb_quant(x)
return x
def gen_gpt_enc_config(config):
gpt_enc_config = TransformerDecoderLayer.get_config(
max_batch_tokens=8192,
max_seq_len=config.max_position_embeddings,
hidden_size=config.hidden_size,
intermediate_size=4 * config.hidden_size,
nhead=config.num_attention_heads,
attn_prob_dropout_ratio=config.attn_pdrop,
activation_dropout_ratio=config.resid_pdrop,
hidden_dropout_ratio=config.resid_pdrop,
pre_layer_norm=True,
fp16=True,
local_rank=0,
nlayer=config.num_hidden_layers,
activation_fn="gelu",
has_cross_attn=False,
)
return gpt_enc_config
class LSHFGptEncoderLayer(TransformerDecoderLayer):
def __init__(self, *args, **kwargs):
super(LSHFGptEncoderLayer, self).__init__(*args, **kwargs)
def forward(self, hidden_states, attention_mask=None, *args, **kwargs):
if attention_mask is not None:
ls_attention_mask = attention_mask.squeeze()
else:
ls_attention_mask = torch.zeros(hidden_states.size()[:2])
output = super().forward(hidden_states, ls_attention_mask)
return output
def inject_ls_layer(model, config):
model.transformer.wte = GptEmbedding(config.vocab_size, config.hidden_size)
model.transformer.wte.apply(qat_mode)
for i in range(config.num_hidden_layers):
gpt_enc_config = gen_gpt_enc_config(config)
model.transformer.h[i] = LSHFGptEncoderLayer(gpt_enc_config).cuda()
model.transformer.h[i].apply(qat_mode)
q_lm_head = QuantLinear(config.n_embd, config.vocab_size, bias=False)
q_lm_head.weight = model.transformer.wte.weight
q_lm_head.weight_quant = model.transformer.wte.emb_quant
model.lm_head = q_lm_head
def main():
args = parse_args()
if args.generation_method not in ["topk", "topp", "ppl"]:
args.generation_method = "topk"
model_name = ".".join(args.model.split(".")[:-1])
ckpt_path = f"{model_name}.bin"
print("initializing gpt2 config...")
config = GPT2Config.from_pretrained("gpt2")
print("initializing gpt2 tokenizer...")
ls_tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
# lightseq use len(tokenizer) as pad_token in default
ls_tokenizer.add_special_tokens({"pad_token": "[PAD]"})
print(f"lightseq tokenizer pad token id: {ls_tokenizer.pad_token_id}")
hf_tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
# use EOS as PAD for huggingface to avoid warning according to https://huggingface.co/blog/how-to-generate while avoid reshaping the model embedding
hf_tokenizer.pad_token = hf_tokenizer.eos_token
print(f"huggingface tokenizer pad token id: {hf_tokenizer.pad_token_id}")
print("creating huggingface model...")
hf_model = GPT2LMHeadModel.from_pretrained("gpt2", config=config)
inject_ls_layer(hf_model, config)
state_dict = torch.load(ckpt_path, map_location="cpu")
hf_model.load_state_dict(state_dict, strict=False)
hf_model.to("cuda:0")
hf_model.eval()
print("creating lightseq model...")
ls_model = lsi.QuantGpt(args.model, max_batch_size=16)
# lightseq gpt perplexity supports batch infer with different lengths,
# but sampling doesn't support
sentences = [
"I love you, but you say that",
"I love you, but you say that",
"I love you, but you say that",
"I love you, but you say that",
]
print("====================START warmup====================")
warmup(
ls_tokenizer,
hf_tokenizer,
ls_model,
hf_model,
sentences,
args.generation_method,
)
print("====================END warmup====================")
print("tokenizing the sentences...")
ls_inputs = ls_tokenizer(sentences, return_tensors="pt", padding=True)["input_ids"]
hf_inputs = hf_tokenizer(sentences, return_tensors="pt", padding=True)["input_ids"]
if args.generation_method == "topk" or args.generation_method == "topp":
ls_generate(ls_model, ls_tokenizer, ls_inputs)
# hf_generate(hf_model, hf_tokenizer, hf_inputs)
elif args.generation_method == "ppl":
ls_ppl(ls_model, ls_tokenizer, ls_inputs)
hf_ppl(hf_model, hf_tokenizer, hf_inputs)
if __name__ == "__main__":
main()
| 8,611 | 3,083 |
# Copyright (C) 2015-2021 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Optional, Dict
from gunicorn.app.base import BaseApplication # type: ignore
class GunicornApplication(BaseApplication): # type: ignore
"""
An entry point to integrate a Gunicorn WSGI server in Python. To start a
WSGI application with callable `app`, run the following code:
WSGIApplication(app, options={
...
}).run()
For more details, see: https://docs.gunicorn.org/en/latest/custom.html
"""
def __init__(self, app: object, options: Optional[Dict[str, Any]] = None):
self.options = options or {}
self.application = app
super().__init__()
def init(self, *args: Any) -> None:
pass
def load_config(self) -> None:
for key, value in self.options.items():
if key in self.cfg.settings and value is not None:
self.cfg.set(key.lower(), value)
def load(self) -> object:
return self.application
def run_app(app: object, options: Optional[Dict[str, Any]] = None) -> None:
"""
Run a Gunicorn WSGI server.
"""
GunicornApplication(app, options=options).run()
| 1,748 | 516 |
"""
1. Clarification
2. Possible solutions
- Cheat
- Binary search II
3. Coding
4. Tests
"""
# T=O(n), S=O(1)
class Solution:
def findMin(self, nums: List[int]) -> int:
if not nums: return int(-inf)
return min(nums)
# T=O(lgn), S=O(1)
class Solution:
def findMin(self, nums: List[int]) -> int:
if not nums: return int(-inf)
left, right = 0, len(nums) - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] < nums[right]:
right = mid
else:
left = mid + 1
return nums[left]
| 623 | 223 |
import time
from unittest import TestCase
import grpc_testing
from grpc import StatusCode
from grpc.framework.foundation import logging_pool
from cowsay_client import CowsayClient
from cowsay_pb2 import DESCRIPTOR as COWSAY_DESCRIPTOR, QuoteRequest, QuoteResponse
from cowsay_pb2_grpc import CowsayStub
target_service = COWSAY_DESCRIPTOR.services_by_name['Cowsay']
class TestCowsayClient(TestCase):
def setUp(self):
self._client_execution_thread_pool = logging_pool.pool(1)
self._fake_time = grpc_testing.strict_fake_time(time.time())
self._real_time = grpc_testing.strict_real_time()
self._fake_time_channel = grpc_testing.channel(COWSAY_DESCRIPTOR.services_by_name.values(), self._fake_time)
self._real_time_channel = grpc_testing.channel(COWSAY_DESCRIPTOR.services_by_name.values(), self._real_time)
def tearDown(self):
self._client_execution_thread_pool.shutdown(wait=False)
def test_get_quote(self):
arguments = ('cow', 'foo')
def run(scenario, channel):
stub = CowsayStub(channel)
client = CowsayClient(stub)
return client.get_quote(*scenario)
f = self._client_execution_thread_pool.submit(run, arguments, self._real_time_channel)
invocation_metadata, request, rpc = self._real_time_channel.take_unary_unary(
target_service.methods_by_name['GetQuote'])
self.assertEqual(QuoteRequest(message='foo', animal=QuoteRequest.COW), request)
self.assertIn(('z', 'y'), invocation_metadata)
rpc.send_initial_metadata([('abc', 'def')])
rpc.terminate(QuoteResponse(output='foo2'), [('uvw', 'xyz')], StatusCode.OK, '')
result = f.result()
self.assertEqual('foo2', result)
| 1,766 | 586 |
SPEC = 'swagger.yaml'
IMPLEMENTATION = 'flask'
OUTPUT = 'build'
FLASK_SERVER_NAME = 'my_flask_server'
| 102 | 47 |
# coding: utf-8
"""
Web API Swagger specification
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import warnings
ABSPATH = os.path.abspath(os.path.realpath(os.path.dirname(__file__)) + "/..")
sys.path.append(ABSPATH)
import asposecellscloud
from asposecellscloud.rest import ApiException
from asposecellscloud.apis.cells_api import CellsApi
import AuthUtil
from asposecellscloud.models import Shape
global_api = None
class TestCellsShapesApi(unittest.TestCase):
""" CellsShapesApi unit test stubs """
def setUp(self):
warnings.simplefilter('ignore', ResourceWarning)
global global_api
if global_api is None:
global_api = asposecellscloud.apis.cells_api.CellsApi(AuthUtil.GetClientId(),AuthUtil.GetClientSecret(),"v3.0",AuthUtil.GetBaseUrl())
self.api = global_api
def tearDown(self):
pass
def test_cells_shapes_delete_worksheet_shape(self):
"""
Test case for cells_shapes_delete_worksheet_shape
Delete a shape in worksheet
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
shapeindex = 0
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_shapes_delete_worksheet_shape(name, sheet_name, shapeindex,folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_shapes_delete_worksheet_shapes(self):
"""
Test case for cells_shapes_delete_worksheet_shapes
delete all shapes in worksheet
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
shapeindex = 0
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_shapes_delete_worksheet_shapes(name, sheet_name, folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_shapes_get_worksheet_shape(self):
"""
Test case for cells_shapes_get_worksheet_shape
Get worksheet shape
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
shapeindex = 0
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_shapes_get_worksheet_shape(name, sheet_name, shapeindex,folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_shapes_get_worksheet_shapes(self):
"""
Test case for cells_shapes_get_worksheet_shapes
Get worksheet shapes
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
shapeindex = 0
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_shapes_get_worksheet_shapes(name, sheet_name, folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_shapes_post_worksheet_shape(self):
"""
Test case for cells_shapes_post_worksheet_shape
Update a shape in worksheet
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
shapeindex = 0
dto = Shape()
dto.lower_right_column = 10
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_shapes_post_worksheet_shape(name, sheet_name, shapeindex,dto=dto,folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_shapes_put_worksheet_shape(self):
"""
Test case for cells_shapes_put_worksheet_shape
Add shape in worksheet
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
drawingType= "button"
upperLeftRow=1
upperLeftColumn= 1
top=10
left= 10
width= 100
height= 90
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_shapes_put_worksheet_shape(name, sheet_name,drawing_type=drawingType,upper_left_row=upperLeftRow,upper_left_column=upperLeftColumn, top=top, left=left, width=width, height= height,folder=folder)
self.assertEqual(result.code,200)
pass
if __name__ == '__main__':
unittest.main()
| 4,792 | 1,552 |
from dataclasses import dataclass
from typing import List
from source.device_manager.database import get_database_connection,release_database_connection
@dataclass
class ScriptInfo:
id: int
name: str
fileName: str
user: int
@dataclass
class Script:
id: int
name: str
fileName: str
user: int
data: str
def get_user_scripts(user: int) -> List[Script]:
scripts=[]
conn = get_database_connection()
with conn:
with conn.cursor() as cursor:
cursor.execute(
"""
select scripts.id, scripts.name,scripts.fileName,scripts.userID,scripts.data
from scripts where userID=%s
""", [user])
scripts=[
Script(row[0], row[1], row[2], row[3], row[4])
for row in cursor
]
release_database_connection(conn)
return scripts
def get_user_scripts_info(user: int) -> List[ScriptInfo]:
script_infos=[]
conn = get_database_connection()
with conn:
with conn.cursor() as cursor:
cursor.execute(
"""
select scripts.id, scripts.name,scripts.fileName,scripts.userID
from scripts where userID=%s
""", [user])
script_infos=[
ScriptInfo(row[0], row[1], row[2], row[3]) for row in cursor
]
release_database_connection(conn)
return script_infos
def get_user_script(script_id: int) -> Script:
script=None
conn = get_database_connection()
with conn:
with conn.cursor() as cursor:
cursor.execute(
"""
select scripts.id, scripts.name,scripts.fileName,scripts.userID,scripts.data
from scripts where id=%s
""", [script_id])
result = cursor.fetchone()
script = Script(result[0], result[1], result[2], result[3],
result[4])
release_database_connection(conn)
return script
def get_user_script_info(script_id: int) -> ScriptInfo:
script_info=None
conn = get_database_connection()
with conn:
with conn.cursor() as cursor:
cursor.execute(
"""
select scripts.id, scripts.name,scripts.fileName,scripts.userID
from scripts where id=%s
""", [script_id])
result = cursor.fetchone()
script_info = ScriptInfo(result[0], result[1], result[2], result[3])
release_database_connection(conn)
return script_info
def create_user_script(name: str, file_name: str, user: int, data: str) -> int:
id = -1
conn = get_database_connection()
with conn:
with conn.cursor() as cursor:
cursor.execute(
'insert into scripts values (default,%s,%s,%s,%s) returning id',
[name, file_name, user, data])
id = cursor.fetchone()[0]
release_database_connection(conn)
return id
def set_user_script_info(script_id: int, name: str, file_name: str,
user_id: int):
conn = get_database_connection()
with conn:
with conn.cursor() as cursor:
cursor.execute(
'update scripts set name=%s, fileName=%s, userID=%s where id=%s',
[name, file_name, user_id, script_id])
release_database_connection(conn)
def set_user_script(script_id: int, name: str, file_name: str, user_id: int,
data: str):
conn = get_database_connection()
with conn:
with conn.cursor() as cursor:
cursor.execute(
'update scripts set name=%s, fileName=%s, userID=%s, data=%s where id=%s',
[name, file_name, user_id, data, script_id])
release_database_connection(conn)
def delete_user_script(script_id: int):
conn = get_database_connection()
with conn:
with conn.cursor() as cursor:
cursor.execute('delete from scripts where id=%s', [script_id])
release_database_connection(conn)
| 4,088 | 1,149 |
#! /usr/bin/env python3
import sys
import math
import argparse as ap
from json import dumps as jdumps
from random import choices
class LevelNotFoundException(Exception):
pass
def checkLevel(taxon, level):
if level == 'species':
return ('s__' in taxon) and ('t__' not in taxon)
elif level == 'genus':
return ('g__' in taxon) and ('s__' not in taxon)
raise LevelNotFoundException()
class Sample:
def __init__(self, tool, level):
self.tool = tool
self.level = level
self.abunds = {}
self._total = None
def addLine(self, line):
taxon, abund = line.split()
if checkLevel(taxon, self.level):
self.abunds[taxon] = float(abund)
@classmethod
def parseMPA(ctype, tool, mpaFile, level):
sample = Sample(tool, level)
with open(mpaFile) as mF:
for line in mF:
sample.addLine(line)
return sample
def subset(self, n):
if n == self.total():
return self
brkpoints = [0]
rmap = {}
for i, (key, val) in enumerate(self.abunds.items()):
brkpoints.append(brkpoints[i] + val)
rmap[i] = key
i = 0
outAbunds = {}
indices = range(int(self.total()))
indices = sorted(choices(indices, k=n))
for ind in indices:
while ind >= brkpoints[i + 1]:
i += 1
key = rmap[i]
try:
outAbunds[key] += 1
except KeyError:
outAbunds[key] = 1
outSamp = Sample(self.tool, self.level)
outSamp.abunds = outAbunds
return outSamp
def total(self):
if self._total is None:
self._total = sum(self.abunds.values())
return self._total
def richness(self):
return len(self.abunds)
def shannonIndex(self):
H = 0
for count in self.abunds.values():
p = count / self.total()
assert p <= 1
H += p * math.log(p)
if H < 0:
H *= -1
return H
def ginisimpson(self):
H = 0
for count in self.abunds.values():
p = count / self.total()
assert p <= 1
H += p * p
H = 1 - H
return H
def chao1(self):
sings, doubs = 0, 1 # give doubles a pseudocount to avoid div by zero
for val in self.abunds.values():
if val == 1:
sings += 1
elif val == 2:
doubs += 1
est = (sings * sings) / (2 * doubs)
return self.richness() + est
def getSubsets(N):
vals = [1, 5, 10, 100, 500, 1000, 10 * 1000]
vals = [el * 1000 for el in vals]
out = []
for val in vals:
if val < N:
out.append(val)
else:
out.append(N)
break
return out
def handleCounts(tool, fname):
obj = {
'species': {
'richness': {},
'shannon_index': {},
'gini-simpson': {},
'chao1': {}
},
'genus': {
'richness': {},
'shannon_index': {},
'gini-simpson': {},
'chao1': {}
}
}
for level in obj.keys():
sample = Sample.parseMPA(tool, fname, level)
for subsetSize in getSubsets(sample.total()):
subsample = sample.subset(subsetSize)
key = str(subsetSize)
if subsample == sample:
key = 'all_reads'
obj[level]['shannon_index'][key] = subsample.shannonIndex()
obj[level]['richness'][key] = subsample.richness()
obj[level]['gini-simpson'][key] = subsample.ginisimpson()
obj[level]['chao1'][key] = subsample.chao1()
return obj
def handleProportions(tool, fname):
obj = {
'species': {
'richness': {},
'shannon_index': {},
'gini-simpson': {}
},
'genus': {
'richness': {},
'shannon_index': {},
'gini-simpson': {}
}
}
for level in obj.keys():
sample = Sample.parseMPA(tool, fname, level)
key = 'all_reads'
obj[level]['richness'][key] = sample.richness()
obj[level]['shannon_index'][key] = sample.shannonIndex()
obj[level]['gini-simpson'][key] = sample.ginisimpson()
return obj
def main():
args = parseArgs()
outobj = {}
for mpaFilePair in args.mpa_files:
tool, mpaFile = mpaFilePair.split(',')
if tool.lower() == 'kraken':
outobj['kraken'] = handleCounts(tool, mpaFile)
elif tool.lower() == 'metaphlan2':
outobj['metaphlan2'] = handleProportions(tool, mpaFile)
else:
sys.stderr.write('tool {} unsupported'.format(tool))
sys.stdout.write(jdumps(outobj))
def parseArgs():
parser = ap.ArgumentParser()
parser.add_argument('mpa_files', nargs='+',
help='pairs of tool_name,mpa_file')
args = parser.parse_args()
return args
if __name__ == '__main__':
main()
| 5,140 | 1,677 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import image_collection.models
class Migration(migrations.Migration):
dependencies = [
('image_collection', '0003_auto_20160113_0445'),
]
operations = [
migrations.RemoveField(
model_name='imageslide',
name='link',
),
migrations.AddField(
model_name='imageslide',
name='external_link',
field=models.URLField(help_text='E.g. "http://www.example.com/my-page/". Enter absolute URL, that the image should link to.', verbose_name='external link', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='imageslide',
name='internal_link',
field=image_collection.models.RelativeURLField(help_text='E.g. "/my-page/". Enter slug of internal pager, that the image should link to.', verbose_name='internal link', blank=True),
preserve_default=True,
),
]
| 1,061 | 310 |
# Copying Holly Grimm's solution https://github.com/hollygrimm/cs294-homework/blob/master/hw1/bc.py
# Copy and pasting and merging it into a copy of my behavior_cloner.py code.
import argparse
import pickle
import os
import sys
import tensorflow.compat.v1 as tf
import numpy as np
from sklearn.model_selection import train_test_split
import mlflow.tensorflow
import gym
from gym import wrappers
from tqdm import tqdm
#Imports copied from hollygrimm's solution
import logging
from hollygrimm_model import Model
# The following doesn't seem to work with the way Holly Grimm builds her tensorflow model.
mlflow.tensorflow.autolog()
def config_logging(log_file):
if os.path.exists(log_file):
os.remove(log_file)
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(message)s')
fh = logging.FileHandler(log_file)
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logger.addHandler(fh)
return logger
def create_model(session, obs_samples, num_observations, num_actions, logger, optimizer,
learning_rate, restore, checkpoint_dir):
model = Model(obs_samples, num_observations, num_actions, checkpoint_dir, logger,
optimizer, learning_rate)
if restore:
model.load(session)
else:
logger.info("Created model with fresh parameters")
session.run(tf.global_variables_initializer())
return model
def bc(expert_data_filename, env_name, restore, results_dir, max_timesteps=None,
optimizer='adam', num_epochs=100, learning_rate=.001, batch_size=32, keep_prob=1):
# Reset TF env
tf.reset_default_graph()
# Create a gym env.
env = gym.make(env_name)
max_steps = max_timesteps or env.spec.max_episode_steps
with open(expert_data_filename, 'rb') as f:
data = pickle.loads(f.read())
obs = np.stack(data['observations'], axis=0)
actions = np.squeeze(np.stack(data['actions'], axis=0))
x_train, x_test, y_train, y_test = train_test_split(obs, actions, test_size=0.2)
num_samples = len(x_train)
min_val_loss = sys.maxsize
with tf.Session() as session:
model = create_model(session, x_train, x_train.shape[1], y_train.shape[1], logger,
optimizer, learning_rate, restore, results_dir)
file_writer = tf.summary.FileWriter(results_dir, session.graph)
#file_writer = tf.summary.FileWriter(results_dir, session.graph)
for epoch in tqdm(range(num_epochs)):
perm = np.random.permutation(x_train.shape[0])
obs_samples = x_train[perm]
action_samples = y_train[perm]
loss = 0.
for k in range(0, obs_samples.shape[0], batch_size):
batch_loss, training_scalar = model.update(session, obs_samples[k:k + batch_size],
action_samples[k:k + batch_size],
keep_prob)
loss += batch_loss
file_writer.add_summary(training_scalar, epoch)
min_val_loss, validation_scalar = validate(model, logger, session, x_test, y_test,
epoch, batch_size, min_val_loss, results_dir)
file_writer.add_summary(validation_scalar, epoch)
# Test the updated model after each epoch of training the DNN.
new_exp = model.test_run(session, env, max_steps)
tqdm.write(
"Epoch %3d; Loss %f; Reward %f; Steps %d" % (epoch, loss / num_samples,
new_exp['reward'], new_exp['steps']))
# Write a video of the final gym test results.
env = wrappers.Monitor(env, results_dir, force=True)
results = []
for _ in tqdm(range(10)):
results.append(model.test_run(session, env, max_steps)['reward'])
logger.info("Reward mean and std dev with behavior cloning: %f(%f)" % (np.mean(results),
np.std(results)))
mlflow.log_params({"reward_mean": np.mean(results), "reward_std": np.std(results)})
return np.mean(results), np.std(results)
def validate(model, logger, session, x_test, y_test, num_epoch, batch_size, min_loss, checkpoint_dir):
avg_loss = []
# for k in range(0, x_test.shape[0], batch_size):
loss, validation_scalar = model.validate(session, x_test, y_test)
avg_loss.append(loss)
new_loss = sum(avg_loss) / len(avg_loss)
logger.info("Finished epoch %d, average validation loss = %f" % (num_epoch, new_loss))
if new_loss < min_loss: # Only save model if val loss dropped
model.save(session)
min_loss = new_loss
return min_loss, validation_scalar
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('expert_run_id', type=str)
parser.add_argument('--num_epochs', type=int, default=100)
parser.add_argument('--batch_size', type=int, default=32)
parser.add_argument("--restore", type=bool, default=False)
args = parser.parse_args()
for k, v in vars(args).items():
mlflow.log_param(k, v)
if not os.path.exists('results'):
os.makedirs('results')
log_file = os.path.join(os.getcwd(), 'results', 'train_out.log')
logger = config_logging(log_file)
#env_models = [('Ant-v1', 'data/Ant-v1_data_250_rollouts.pkl', 'experts/Ant-v1.pkl', 250),
# ('HalfCheetah-v1', 'data/HalfCheetah-v1_data_10_rollouts.pkl', 'experts/HalfCheetah-v1.pkl', 10),
# ('Hopper-v1', 'data/Hopper-v1_data_10_rollouts.pkl', 'experts/Hopper-v1.pkl', 10),
# ('Humanoid-v1', 'data/Humanoid-v1_data_250_rollouts.pkl', 'experts/Humanoid-v1.pkl', 250),
# ('Reacher-v1', 'data/Reacher-v1_data_250_rollouts.pkl', 'experts/Reacher-v1.pkl', 250),
# ('Walker2d-v1', 'data/Walker2d-v1_data_10_rollouts.pkl','experts/Walker2d-v1.pkl', 10)
# ]
#for env_name, rollout_data, expert_policy_file, num_rollouts in env_models :
# ===================================================
# read in dataset from expert policy rollouts.
mlflow_c = mlflow.tracking.MlflowClient()
expert_data_file_base = mlflow_c.download_artifacts(args.expert_run_id, "")
expert_data_file_rel_path = mlflow_c.list_artifacts(args.expert_run_id, "expert_data_file")[
0].path
expert_data_filename = expert_data_file_base + "/" + expert_data_file_rel_path
print("opening {0}".format(expert_data_filename))
env_name = mlflow_c.get_run(args.expert_run_id).data.params["envname"]
bc_results_dir = os.path.join(os.getcwd(), 'results', env_name, 'bc')
bc_reward_mean, bc_reward_std = bc(expert_data_filename, env_name, args.restore, bc_results_dir,
batch_size=args.batch_size, num_epochs=args.num_epochs)
logger.info('Behavior Cloning mean & std rewards: %f(%f))' %
(bc_reward_mean, bc_reward_std))
print("logging 'results' directory to mlflow.")
mlflow.log_artifacts('results')
# Commenting out dagger for now.
#da_results_dir = os.path.join(os.getcwd(), 'results', env_name, 'da')
#if not os.path.exists(da_results_dir):
# os.makedirs(da_results_dir)
#_,_, da_mean,da_std = dagger(rollout_data, expert_policy_file, env_name, args.restore, da_results_dir, num_rollouts)
#results.append((env_name, ex_mean, ex_std, bc_mean, bc_std, da_mean, da_std))
#for env_name, ex_mean, ex_std, bc_mean, bc_std, da_mean, da_std in results :
# logger.info('Env: %s, Expert: %f(%f), Behavior Cloning: %f(%f), Dagger: %f(%f)'%
# (env_name, ex_mean, ex_std, bc_mean, bc_std, da_mean, da_std))
| 7,913 | 2,763 |
from __future__ import print_function
import argparse
import torch
import torch.utils.data
from torch import nn, optim
from torch.nn import functional as F
from torchvision import datasets, transforms
from torchvision.utils import save_image
import numpy as np
parser = argparse.ArgumentParser(description='VAE MNIST Example')
parser.add_argument('--batch-size', type=int, default=128, metavar='N',
help='input batch size for training (default: 128)')
parser.add_argument('--epochs', type=int, default=10, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='enables CUDA training')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
parser.add_argument('--log-interval', type=int, default=10, metavar='N',
help='how many batches to wait before logging training status')
args = parser.parse_args()
args.cuda = not args.no_cuda and torch.cuda.is_available()
torch.manual_seed(args.seed)
device = torch.device("cuda" if args.cuda else "cpu")
kwargs = {'num_workers': 1, 'pin_memory': True} if args.cuda else {}
train_loader = torch.utils.data.DataLoader(
datasets.MNIST('./data', train=True, download=True,
transform=transforms.ToTensor()),
batch_size=args.batch_size, shuffle=True, **kwargs)
test_loader = torch.utils.data.DataLoader(
datasets.MNIST('./data', train=False, transform=transforms.ToTensor()),
batch_size=args.batch_size, shuffle=False, **kwargs)
norm = torch.nn.functional.normalize
class GLU(nn.Module):
def __init__(self, c1, c2):
super(GLU, self).__init__()
self.s = nn.Linear(c1, c2)
self.g = nn.Linear(c1, c2)
def forward(self, x):
s = torch.sigmoid(self.s(x))
g = torch.relu(self.g(x))
output = s * g
return output
class Encoder(nn.Module):
def __init__(self):
super(Encoder, self).__init__()
self.fc1 = nn.Linear(784, 400)
self.fc2 = nn.Linear(400, 50)
def forward(self, x):
x = torch.relu(self.fc1(x))
phase = torch.sigmoid(self.fc2(x))
return phase
class Decoder(nn.Module):
def __init__(self):
super(Decoder, self).__init__()
self.fc1 = GLU(100, 400)
self.fc2 = nn.Linear(400, 784)
def forward(self, x):
x = self.fc1(x)
x = torch.sigmoid(self.fc2(x))
return x
class Key(nn.Module):
def __init__(self):
super(Key, self).__init__()
self.fc1 = nn.Linear(10, 50)
self.fc2 = nn.Linear(50, 50)
def forward(self, x):
x = torch.relu(self.fc1(x))
w = torch.sigmoid(self.fc2(x))
return w
class VAE(nn.Module):
def __init__(self):
super(VAE, self).__init__()
self.e = Encoder()
self.d = Decoder()
self.amplitude = Key()
def forward(self, x, c, t):
x = x.view(-1, 784)
N = x.shape[0]
w = self.amplitude(c)
phase = self.e(x)
w = w.view(N, 50, 1)
phase = phase.view(N, 50, 1)
w = w.repeat(1, 1, 100)
phase = phase.repeat(1, 1, 100)
x = torch.sin(2 * np.pi * w * t + np.pi * phase )
x = x.sum(dim=1)
x = x.view(N, 100)
noise = torch.randn_like(x)
x = noise + x
x = self.d(x)
return x, w, phase
model = VAE().to(device)
model.load_state_dict(torch.load('checkpoints/mnist/fft_400.pt'))
def test():
model.eval()
with torch.no_grad():
t = torch.arange(100)
t = t.type(torch.FloatTensor)
t = t.to(device)
c = torch.zeros(64, 10).to(device)
c[:, 4] =1
data = torch.rand(64, 1, 28, 28).to(device)
rx, w, phase= model(data, c, t)
img = rx.view(64, 1, 28, 28)
save_image(img.cpu(),
'images/sample_4.png', nrow=8)
# for i in range(100):
# rx, w, phase= model(data, c, t)
# img = rx.view(1, 1, 28, 28)
# save_image(img.cpu(),
# 'images/sample_t_%d.png' % i, nrow=1)
# data = rx
#
if __name__ == "__main__":
test()
| 4,397 | 1,658 |
# Author: Gheorghe Postelnicu
from datetime import date
import pandas as pd
from io import BytesIO
from urllib.request import urlopen
class Yahoo(object):
# Taken from http://www.jarloo.com/yahoo_finance/
yahoo_query_params = {
'ticker': 's',
'average_daily_volume': 'a2',
'dividend_yield': 'y',
'dividend_per_share': 'd',
'earnings_per_share': 'e',
'est_eps_yr': 'e7',
'est_eps_next_yr': 'e8',
'ex_dividend_date': 'q',
'market_cap': 'j1',
'price_earnings_ratio': 'r',
'short_ratio': 's7',
'volume': 'v',
'52w_low': 'j',
'52w_high': 'k'
}
def __init__(self, chunk_size=500):
self.chunk_size = chunk_size
self.market_cap_pattern = '(\d+[\.]\d+)([MB])'
@staticmethod
def _convert_market_cap(str_value):
if type(str_value) != str:
return -1.
last_char = str_value[-1]
if last_char in ['B', 'M']:
base = float(str_value[:-1])
multiplier = 10. ** 9 if last_char == 'B' else 10. ** 6
return base * multiplier
return float(str_value)
def _fetch_fields(self, symbols, fields):
def chunker(symbols_):
i = 0
while i < len(symbols_):
count_chunk = min(self.chunk_size, len(symbols_) - i)
yield symbols_[i:(i + count_chunk)]
i += count_chunk
dfs = []
for chunk in chunker(symbols):
request = 'http://download.finance.yahoo.com/d/quotes.csv?s={}&f={}'.format(','.join(chunk), fields)
raw_dat = urlopen(request).read()
df = pd.read_csv(BytesIO(raw_dat), header=None)
dfs.append(df)
ret = pd.concat(dfs)
return ret
def batch_snapshot(self, tickers):
"""
Retrieves financial information for a batch of stock symbols.
Args:
tickers (list<str>): list of stock symbols
Returns:
pandas.Dataframe: dataframe with one row per symbol.
"""
ret = self._fetch_fields(tickers, ''.join(Yahoo.yahoo_query_params.values()))
ret.columns = Yahoo.yahoo_query_params.keys()
for col in ['ex_dividend_date']:
ret[col] = pd.to_datetime(ret[col])
ret['market_cap'] = [self._convert_market_cap(mc) for mc in ret.market_cap]
return ret
@staticmethod
def _history_call(ticker, from_date, to_date, params):
base_url = 'http://ichart.finance.yahoo.com/table.csv'
params.update({'s': ticker,
'a': from_date.month - 1,
'b': from_date.day,
'c': from_date.year,
'd': to_date.month - 1,
'e': to_date.day,
'f': to_date.year
})
url = '{}?{}'.format(base_url, '&'.join('{}={}'.format(k, params[k]) for k in params))
raw_dat = urlopen(url).read()
df = pd.read_csv(BytesIO(raw_dat), parse_dates=[0])
return df
def historic_close(self, tickers, from_date=date(2010, 1, 1), to_date=date.today(), join_type='outer'):
"""
Extracts the adjusted close for a set of tickers.
Args:
tickers (list(str)): stock symbol
from_date (date): start date
to_date (date): end date
join_type (str): type of join
Returns:
Dataframe indexed by date with one column by stock ticker.
"""
def fetch_adj_close(ticker, from_date_, to_date_):
dat = self._single_historic_ohlc(ticker, from_date_, to_date_)
dat['Date'] = pd.to_datetime(dat.Date, infer_datetime_format=True)
dat.set_index('Date', inplace=True)
dat.sort_index(inplace=True)
ret = dat[['Adj Close']]
ret.columns = [ticker]
return ret
dats = [fetch_adj_close(ticker, from_date_=from_date, to_date_=to_date) for ticker in tickers]
return dats[0].join(dats[1:], how=join_type)
def _single_historic_ohlc(self, ticker, from_date=date(2010, 1, 1), to_date=date.today()):
return self._history_call(ticker, from_date, to_date, {'g': 'd'})
def historic_dividends(self, ticker, from_date=date(2010, 1, 1), to_date=date.today()):
"""
Extracts the dividend payout history for an individual stock.
Args:
ticker (str): stock symbol
from_date (date): start date
to_date (date): end date
Returns:
pandas.DataFrame: dataframe with dates and dividends.
"""
return self._history_call(ticker, from_date, to_date, {'g': 'v'})
| 4,760 | 1,574 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='SelfRegistrationInvitation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('domain', models.CharField(max_length=126, db_index=True)),
('phone_number', models.CharField(max_length=30, db_index=True)),
('token', models.CharField(unique=True, max_length=126, db_index=True)),
('app_id', models.CharField(max_length=126, null=True)),
('expiration_date', models.DateField()),
('created_date', models.DateTimeField()),
('odk_url', models.CharField(max_length=126, null=True)),
('phone_type', models.CharField(max_length=20, null=True, choices=[('android', 'Android'), ('other', 'Other')])),
('registered_date', models.DateTimeField(null=True)),
],
options={
},
bases=(models.Model,),
),
]
| 1,192 | 334 |
"""
Definition of the :class:`PersonNameTestCase` class.
"""
from dicom_parser.data_elements.person_name import PersonName
from tests.test_data_element import DataElementTestCase
class PersonNameTestCase(DataElementTestCase):
"""
Tests for the
:class:`~dicom_parser.data_elements.person_name.PersonName`
class.
"""
TEST_CLASS = PersonName
SAMPLE_KEY = "PatientName"
| 397 | 127 |
"""Fixtures for models module."""
from physalia.models import Measurement
import numpy
def create_measurement(use_case='login',
app_pkg='com.package',
duration=2,
energy_consumption=30):
"""Fake data for measurement."""
return Measurement(
1485634263.096069, # timestamp
use_case, # use_case
app_pkg, # application package
'1.0.0', # version
'Nexus 5X', # device model
duration, # duration
energy_consumption # energy consumption
)
def create_random_sample(mean, std,
app_pkg='com.package',
use_case='login',
count=30, seed=1):
"""Create a sample of measurements."""
# pylint: disable=too-many-arguments
if seed is not None:
numpy.random.seed(seed)
energy_consumptions = numpy.random.normal(loc=mean,
scale=std,
size=count)
return [
create_measurement(
energy_consumption=energy_consumptions[i],
app_pkg=app_pkg,
use_case=use_case
)
for i in range(count)
]
def create_random_samples(count=30, seed=1):
"""Create a sample of measurements."""
if seed is not None:
numpy.random.seed(seed)
sample_a = create_random_sample(10.0, 1.0, count=count, seed=None)
sample_b = create_random_sample(12.0, 1.0, count=count, seed=None)
return sample_a, sample_b
| 1,627 | 472 |
import tensorflow as tf
from config import config
from utils.utils import *
import logging
from DL_Models.tf_models.ConvNet import ConvNet
class XCEPTION(ConvNet):
"""
The Xception architecture. This is inspired by Xception paper, which describes how 'extreme' convolutions can be represented
as separable convolutions and can achieve better accuracy then the Inception architecture. It is made of modules in a specific depth.
Each module, in our implementation, consists of a separable convolution followed by batch normalization and a ReLu activation layer.
"""
def __init__(self, loss, model_number, batch_size, input_shape, output_shape, kernel_size=40, nb_filters=128, verbose=True, epochs=1,
use_residual=True, depth=6):
super(XCEPTION, self).__init__(input_shape=input_shape, output_shape=output_shape, loss=loss, kernel_size=kernel_size, nb_filters=nb_filters,
verbose=verbose, epochs=epochs, batch_size=batch_size, use_residual=use_residual, depth=depth,
model_number=model_number, preprocessing=False)
def _module(self, input_tensor, current_depth):
"""
The module of Xception. Consists of a separable convolution followed by batch normalization and a ReLu activation function.
"""
x = tf.keras.layers.SeparableConv1D(filters=self.nb_filters, kernel_size=self.kernel_size, padding='same', use_bias=False, depth_multiplier=1)(input_tensor)
x = tf.keras.layers.BatchNormalization()(x)
x = tf.keras.layers.Activation(activation='relu')(x)
return x | 1,621 | 466 |
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import scriptcontext as sc
import compas_rhino
from compas_3gs.rhino import SettingsForm
from compas_3gs.rhino import ForceVolMeshObject
from compas_3gs.rhino import FormNetworkObject
__commandname__ = "TGS_settings"
def RunCommand(is_interactive):
if '3GS' not in sc.sticky:
compas_rhino.display_message('3GS has not been initialised yet.')
return
scene = sc.sticky['3GS']['scene']
SettingsForm.from_scene(scene, object_types=[ForceVolMeshObject, FormNetworkObject], global_settings=['3GS', 'Solvers'])
scene.update()
scene.save()
# ==============================================================================
# Main
# ==============================================================================
if __name__ == '__main__':
RunCommand(True)
| 909 | 264 |
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="xdcc",
version="0.0.3",
author="Thiago T. P. Silva",
author_email="thiagoteodoro501@gmail.com",
description="A simple XDCC downloader written in python3",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/thiagotps/xdcc",
packages=setuptools.find_packages(),
install_requires = ['irc'],
keywords="irc xdcc",
entry_points={"console_scripts": ["xdcc=xdcc.__main__:main"]},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.7',
)
| 786 | 273 |
"""Modified code from https://developers.google.com/optimization/routing/tsp#or-tools """
# Copyright Matthew Mack (c) 2020 under CC-BY 4.0: https://creativecommons.org/licenses/by/4.0/
from __future__ import print_function
import math
from ortools.constraint_solver import routing_enums_pb2
from ortools.constraint_solver import pywrapcp
from PIL import Image, ImageDraw
import os
import time
import copy
from itertools import permutations
# Change these file names to the relevant files.
ORIGINAL_IMAGE = "images/brother-1024-stipple.png"
IMAGE_TSP = "images/brother-1024-stipple.tsp"
# Change the number of points according to the base tsp file you are using.
NUMBER_OF_POINTS = 1024
NUMBER_OF_PARTITIONS = 8
INITIAL_VERTEX = 0
def create_data_model():
"""Stores the data for the problem."""
# Extracts coordinates from IMAGE_TSP and puts them into an array
list_of_nodes = []
with open(IMAGE_TSP) as f:
for _ in range(6):
next(f)
for line in f:
i,x,y = line.split()
list_of_nodes.append((int(float(x)),int(float(y))))
data = {}
# Locations in block units
data['locations'] = list_of_nodes # yapf: disable
data['num_vehicles'] = 1
data['depot'] = 0
return data
def compute_euclidean_distance_matrix(locations):
"""Creates callback to return distance between points."""
distances = {}
for from_counter, from_node in enumerate(locations):
distances[from_counter] = {}
for to_counter, to_node in enumerate(locations):
if from_counter == to_counter:
distances[from_counter][to_counter] = 0
else:
# Euclidean distance
distances[from_counter][to_counter] = (int(
math.hypot((from_node[0] - to_node[0]),
(from_node[1] - to_node[1]))))
return distances
def print_solution(manager, routing, solution):
"""Prints solution on console."""
print('Objective: {}'.format(solution.ObjectiveValue()))
index = routing.Start(0)
plan_output = 'Route:\n'
route_distance = 0
while not routing.IsEnd(index):
plan_output += ' {} ->'.format(manager.IndexToNode(index))
previous_index = index
index = solution.Value(routing.NextVar(index))
route_distance += routing.GetArcCostForVehicle(previous_index, index, 0)
plan_output += ' {}\n'.format(manager.IndexToNode(index))
print(plan_output)
plan_output += 'Objective: {}m\n'.format(route_distance)
def get_routes(solution, routing, manager):
"""Get vehicle routes from a solution and store them in an array."""
# Get vehicle routes and store them in a two dimensional array whose
# i,j entry is the jth location visited by vehicle i along its route.
routes = []
for route_nbr in range(routing.vehicles()):
index = routing.Start(route_nbr)
route = [manager.IndexToNode(index)]
#while not routing.IsEnd(index):
# index = solution.Value(routing.NextVar(index))
counter = 0
while counter < len(solution):
counter += 1
index = solution[index]
route.append(manager.IndexToNode(index))
routes.append(route)
return routes[0]
def draw_routes(nodes, path):
"""Takes a set of nodes and a path, and outputs an image of the drawn TSP path"""
tsp_path = []
for location in path:
tsp_path.append(nodes[int(location)])
original_image = Image.open(ORIGINAL_IMAGE)
width, height = original_image.size
tsp_image = Image.new("RGBA",(width,height),color='white')
tsp_image_draw = ImageDraw.Draw(tsp_image)
#tsp_image_draw.point(tsp_path,fill='black')
tsp_image_draw.line(tsp_path,fill='black',width=1)
tsp_image = tsp_image.transpose(Image.FLIP_TOP_BOTTOM)
FINAL_IMAGE = IMAGE_TSP.replace("-stipple.tsp","-tsp.png")
tsp_image.save(FINAL_IMAGE)
print("TSP solution has been drawn and can be viewed at", FINAL_IMAGE)
def nearest_neighbors_solution(distance_matrix):
visited = {i: False for i in range(NUMBER_OF_POINTS)}
nearest_neighbors = {i: -1 for i in range(NUMBER_OF_POINTS)}
last_vertex = INITIAL_VERTEX
should_continue = True
while should_continue:
should_continue = False
visited[last_vertex] = True
shortest_distance = float("inf")
closest_neighbor = -1
for i in distance_matrix[last_vertex]:
if distance_matrix[last_vertex][i] < shortest_distance and not (visited[i]):
shortest_distance = distance_matrix[last_vertex][i]
closest_neighbor = i
should_continue = True
if should_continue:
nearest_neighbors[last_vertex] = closest_neighbor
last_vertex = closest_neighbor
else:
nearest_neighbors[last_vertex] = INITIAL_VERTEX
return nearest_neighbors
def two_opt_solution(distance_matrix):
solution = nearest_neighbors_solution(distance_matrix)
original_group = convert_solution_to_group(solution)
partitions = NUMBER_OF_PARTITIONS
while(partitions > 0):
two_opt(distance_matrix, original_group, partitions)
partitions = int(partitions / 2)
new_solution = convert_group_to_solution(original_group)
return new_solution
def two_opt(distance_matrix, group, partitions):
partition_size = int(len(group)/partitions)
for k in range(partitions):
while True:
min_change = 0
min_i = -1
min_j = -1
for i in range(1 + (k*partition_size), ((k+1)*partition_size)-2):
for j in range(i+1, ((k+1)*partition_size)):
u = group[i-1]
v = group[i]
w = group[j]
x = group[(j+1) % ((k+1)*partition_size)]
current_distance = (distance_matrix[u][v] + distance_matrix[w][x])
new_distance = (distance_matrix[u][w] + distance_matrix[v][x])
change = new_distance - current_distance
if change < min_change:
min_change = change
min_i = i
min_j = j
swap_edges(group, min_i, min_j)
if min_change == 0:
break
print(min_change)
def swap_edges(group, v, w):
#Reverses the entire slice, from vertex v to vertex w (including v and w)
group[v:w+1] = group[v:w+1][::-1]
def convert_group_to_solution(group):
solution = {}
for i in range(len(group)-1):
solution[group[i]] = group[i+1]
solution[group[-1]] = NUMBER_OF_POINTS
print(solution)
return solution
def convert_solution_to_group(solution):
head = INITIAL_VERTEX
group = []
for i in range(NUMBER_OF_POINTS):
group.append(head)
head = solution[head]
return group
def calculate_group_cost(distance_matrix, group):
cost = 0
for i in range(len(group)):
cost += distance_matrix[group[i]][group[(i+1) % len(group)]]
return cost
def main():
"""Entry point of the program."""
starting_moment = time.time()
# Instantiate the data problem.
print("Step 1/5: Initialising variables")
data = create_data_model()
# Create the routing index manager.
manager = pywrapcp.RoutingIndexManager(len(data['locations']),
data['num_vehicles'], data['depot'])
# Create Routing Model.
routing = pywrapcp.RoutingModel(manager)
print("Step 2/5: Computing distance matrix")
distance_matrix = compute_euclidean_distance_matrix(data['locations'])
def distance_callback(from_index, to_index):
"""Returns the distance between the two nodes."""
# Convert from routing variable Index to distance matrix NodeIndex.
from_node = manager.IndexToNode(from_index)
to_node = manager.IndexToNode(to_index)
return distance_matrix[from_node][to_node]
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
# Define cost of each arc.
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
# Setting first solution heuristic.
print("Step 3/5: Setting an initial solution")
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)
# Solve the problem.
print("Step 4/5: Solving")
#solution = routing.SolveWithParameters(search_parameters)
#solution = nearest_neighbors_solution(distance_matrix)
solution = two_opt_solution(distance_matrix)
# Print solution on console.
if solution:
#print_solution(manager, routing, solution)
print("Step 5/5: Drawing the solution")
routes = get_routes(solution, routing, manager)
draw_routes(data['locations'], routes)
else:
print("A solution couldn't be found :(")
finishing_moment = time.time()
print("Total time elapsed during execution: " + str(finishing_moment - starting_moment) + " seconds")
print("Total distance: " + str(calculate_group_cost(distance_matrix, convert_solution_to_group(solution))))
if __name__ == '__main__':
main() | 9,319 | 2,924 |
import pytest
import payload as pl
from .fixtures import Fixtures
@pytest.fixture()
def billing_schedule(processing_account, customer_account):
billing_schedule = pl.BillingSchedule.create(
start_date="2019-01-01",
end_date="2019-12-31",
recurring_frequency="monthly",
type="subscription",
customer_id=customer_account.id,
processing_id=processing_account.id,
charges=pl.BillingCharge(type="option_1", amount=39.99),
)
return billing_schedule
class TestBilling(Fixtures):
def test_create_billing_schedule(
self, api_key, billing_schedule, processing_account
):
assert billing_schedule.processing_id == processing_account.id
assert billing_schedule.charges[0].amount == 39.99
def test_update_billing_schedule_frequency(
self, api_key, billing_schedule, processing_account
):
assert billing_schedule.processing_id == processing_account.id
assert billing_schedule.charges[0].amount == 39.99
billing_schedule.update(recurring_frequency="quarterly")
assert billing_schedule.recurring_frequency == "quarterly"
| 1,162 | 386 |
import numpy as np
from pydrake.all import *
class BasicTrunkPlanner(LeafSystem):
"""
Implements the simplest possible trunk-model planner, which generates
desired positions, velocities, and accelerations for the feet, center-of-mass,
and body frame orientation.
"""
def __init__(self, frame_ids):
LeafSystem.__init__(self)
# Dictionary of geometry frame ids {"trunk": trunk_frame_id, "lf": lf_foot_frame_id, ...}
self.frame_ids = frame_ids
# We'll use an abstract output port so we can send all the
# data we'd like to include in a dictionary format
self.DeclareAbstractOutputPort(
"trunk_trajectory",
lambda: AbstractValue.Make({}),
self.SetTrunkOutputs)
# Another output port is used to send geometry data regarding the
# trunk model to the scene graph for visualization
fpv = FramePoseVector()
for frame in self.frame_ids:
fpv.set_value(frame_ids[frame], RigidTransform())
self.DeclareAbstractOutputPort(
"trunk_geometry",
lambda: AbstractValue.Make(fpv),
self.SetGeometryOutputs)
# The output data is a class-level object so we can be sure we're sending
# the same info to the controller as to the scene graph
self.output_dict = {}
self.SimpleStanding() # set initial values to self.output_dict
def SimpleStanding(self):
"""
Set output values corresponing to simply
standing on all four feet.
"""
# Foot positions
self.output_dict["p_lf"] = np.array([ 0.175, 0.11, 0.0]) # mini cheetah
self.output_dict["p_rf"] = np.array([ 0.175,-0.11, 0.0])
self.output_dict["p_lh"] = np.array([-0.2, 0.11, 0.0])
self.output_dict["p_rh"] = np.array([-0.2, -0.11, 0.0])
#self.output_dict["p_lf"] = np.array([ 0.34, 0.19, 0.0]) # anymal
#self.output_dict["p_rf"] = np.array([ 0.34,-0.19, 0.0])
#self.output_dict["p_lh"] = np.array([-0.34, 0.19, 0.0])
#self.output_dict["p_rh"] = np.array([-0.34,-0.19, 0.0])
# Foot velocities
self.output_dict["pd_lf"] = np.zeros(3)
self.output_dict["pd_rf"] = np.zeros(3)
self.output_dict["pd_lh"] = np.zeros(3)
self.output_dict["pd_rh"] = np.zeros(3)
# Foot accelerations
self.output_dict["pdd_lf"] = np.zeros(3)
self.output_dict["pdd_rf"] = np.zeros(3)
self.output_dict["pdd_lh"] = np.zeros(3)
self.output_dict["pdd_rh"] = np.zeros(3)
# Foot contact states: [lf,rf,lh,rh], True indicates being in contact.
self.output_dict["contact_states"] = [True,True,True,True]
# Foot contact forces, where each row corresponds to a foot [lf,rf,lh,rh].
self.output_dict["f_cj"] = np.zeros((3,4))
# Body pose
self.output_dict["rpy_body"] = np.array([0.0, 0.0, 0.0])
self.output_dict["p_body"] = np.array([0.0, 0.0, 0.3])
# Body velocities
self.output_dict["rpyd_body"] = np.zeros(3)
self.output_dict["pd_body"] = np.zeros(3)
# Body accelerations
self.output_dict["rpydd_body"] = np.zeros(3)
self.output_dict["pdd_body"] = np.zeros(3)
# Max control input (accelerations)
self.output_dict["u2_max"] = 0.0
def OrientationTest(self, t):
"""
Given the current time t, generate output values for
for a simple orientation test.
"""
self.SimpleStanding()
self.output_dict["rpy_body"] = np.array([0.0, 0.4*np.sin(t), 0.4*np.cos(t)])
self.output_dict["rpyd_body"] = np.array([0.0, 0.4*np.cos(t), -0.4*np.sin(t)])
self.output_dict["rpydd_body"] = np.array([0.0, -0.4*np.sin(t), -0.4*np.cos(t)])
def RaiseFoot(self, t):
"""
Modify the simple standing output values to lift one foot
off the ground.
"""
self.SimpleStanding()
self.output_dict["p_body"] += np.array([-0.1, 0.05, 0.0])
if t>1:
self.output_dict["contact_states"] = [True,False,True,True]
self.output_dict["p_rf"] += np.array([ 0.0, 0.0, 0.1])
def EdgeTest(self):
"""
Move the trunk right to the edge of feasibility, ensuring that
friction constraints become active (may require a smaller timestep)
"""
self.SimpleStanding()
self.output_dict["p_body"] += np.array([-0.1, 0.63, 0.0])
def SetTrunkOutputs(self, context, output):
self.output_dict = output.get_mutable_value()
#self.SimpleStanding()
#self.output_dict["p_body"] += np.array([0,0,0.05])
self.OrientationTest(context.get_time())
#self.EdgeTest()
#self.RaiseFoot(context.get_time())
def SetGeometryOutputs(self, context, output):
fpv = output.get_mutable_value()
fpv.clear()
X_trunk = RigidTransform()
X_trunk.set_rotation(RollPitchYaw(self.output_dict["rpy_body"]))
X_trunk.set_translation(self.output_dict["p_body"])
fpv.set_value(self.frame_ids["trunk"], X_trunk)
for foot in ["lf","rf","lh","rh"]:
X_foot = RigidTransform()
X_foot.set_translation(self.output_dict["p_%s" % foot])
fpv.set_value(self.frame_ids[foot],X_foot)
| 5,415 | 1,927 |
import os
import shadercompiler
def spv_folder_to_glsl_folder(spv_folder):
return os.path.join(spv_folder, '../../toy/shader')
def glsl_from_spv(spv):
spv_folder, spv_name = os.path.split(spv)
glsl_folder = spv_folder_to_glsl_folder(spv_folder)
glsl_name = spv_name[:-4] + '.glsl'
glsl = os.path.join(glsl_folder, glsl_name)
return glsl
def reload_pipelines(pipelines):
spv_pipeline_map = {}
for pipeline in pipelines:
spvs = pipeline.get_shader_spvs()
for spv in spvs:
spv_pipeline_map.setdefault(spv, set()).add(pipeline)
outdated_pipelines = set()
for spv in spv_pipeline_map:
glsl = glsl_from_spv(spv)
if shadercompiler.is_shader_outdated(glsl, spv):
res = shadercompiler.compile_glsl(glsl, spv)
if not res:
print('ERROR reload failed')
return
outdated_pipelines.update(spv_pipeline_map[spv])
for pipeline in outdated_pipelines:
pipeline.reload_shader()
| 1,027 | 374 |
# Generated by Django 2.2.13 on 2021-01-22 17:41
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Account',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('amount', models.DecimalField(decimal_places=2, max_digits=8)),
('icon', models.CharField(max_length=100, null=True)),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('updated_date', models.DateTimeField(default=django.utils.timezone.now)),
],
options={
'ordering': ['created_date'],
},
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('icon', models.CharField(max_length=100)),
('category_type', models.CharField(choices=[('income', '收入'), ('expense', '支出')], default='expense', max_length=100)),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='Currency',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('icon', models.CharField(max_length=100)),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='SubCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('icon', models.CharField(max_length=100)),
('parent', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounting.Category')),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='HistoryRecord',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time_of_occurrence', models.DateTimeField(default=django.utils.timezone.now)),
('amount', models.DecimalField(decimal_places=2, max_digits=8)),
('comment', models.CharField(blank=True, max_length=500, null=True)),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('updated_date', models.DateTimeField(default=django.utils.timezone.now)),
('account', models.ForeignKey(default=1, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounting.Account')),
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounting.Category')),
('currency', models.ForeignKey(default=1, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounting.Currency')),
('sub_category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounting.SubCategory')),
],
options={
'ordering': ['-time_of_occurrence'],
},
),
migrations.AddField(
model_name='account',
name='currency',
field=models.ForeignKey(default=1, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounting.Currency'),
),
]
| 4,060 | 1,140 |
import urllib.request
import cv2
import numpy as np
import time
URL = "http://192.168.1.3:8080/shot.jpg"
while True:
img_arr = np.array(bytearray(urllib.request.urlopen(URL).read()),dtype=np.uint8)
img = cv2.imdecode(img_arr,-1)
cv2.imshow('IPWebcam',img)
q = cv2.waitKey(1)
if q == ord("q"):
break;
cv2.destroyAllWindows()
| 373 | 159 |
import sys
import time
import arcpy
import traceback
import GeneralizeDEM
if __name__ == '__main__':
# SET PARAMETERS HERE
# --------------------------------------------------------------------
demdataset = 'X:/Work/Scripts & Tools/MY/DEMGEN/mistral'
marine = 'X:/Work/Scripts & Tools/MY/DEMGEN/DEMGENEW.gdb/ne_10m_ocean_P'
output = 'X:/Work/DEMGEN/DEMGENEW.gdb/mistral_gen2'
outputcellsize = 2000
minacc1 = 40
minlen1 = 10
minacc2 = 20
minlen2 = 5
is_widen = True
widentype = 'Min/Max'
widendist = 4000
filtersize = 5
is_smooth = True
is_tiled = True
is_parallel = True
num_processes = 6
tilesize = 256
is_continued = False
continued_folder = 'X:/Work/DEMGEN/scratch1'
# --------------------------------------------------------------------
print('> Initializing GeneralizeDEM script...')
print('')
start = int(time.time())
try:
if arcpy.CheckProduct("ArcInfo") == "Available":
GeneralizeDEM.execute(demdataset, marine, output, outputcellsize,
minacc1, minlen1, minacc2, minlen2,
is_widen, widentype, widendist, filtersize,
is_smooth, is_tiled, tilesize, num_processes,
is_parallel, is_continued, continued_folder)
else:
msg = 'ArcGIS for Desktop Advanced license not available'
arcpy.AddError(msg)
except Exception:
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
pymsg = "Traceback Info:\n" + tbinfo + "\nError Info:\n " + \
str(sys.exc_type) + ": " + str(sys.exc_value) + "\n"
arcpy.AddError(pymsg)
print("Processing failed")
finish = int(time.time())
seconds = finish - start
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
print('')
print("> Finished in %02d h %02d m %02d s" % (h, m, s))
print('')
input("Press Enter to continue...")
| 2,046 | 710 |
#! /usr/bin/python3
# -*- coding: utf-8 -*-
#
# json_python_read.py
#
# Jul/25/2014
#
# ---------------------------------------------------------------------
import sys
sys.path.append ("/var/www/data_base/common/python_common")
#
from file_io import file_to_str_proc
#
# ---------------------------------------------------------------------
file_json = "/var/tmp/json/cities.json"
#
json_str=file_to_str_proc (file_json)
#
print ("Content-type: text/json\n\n")
#
print (json_str)
#
# ---------------------------------------------------------------------
| 559 | 187 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('reports', '0096_auto_20170920_1521'),
]
operations = [
migrations.AlterField(
model_name='reporttype',
name='notification_buffer',
field=models.FloatField(help_text=b'Radius of buffer that use to find intersects authorities', null=True, blank=True),
preserve_default=True,
),
]
| 534 | 166 |
from math import radians, cos, sin, asin, sqrt, floor, pow
import math
lat1 = 11.00461011
lon1 = 76.95691543
lat2 = 11.0070471
lon2 = 76.96110704
lon1 = radians(lon1)
lon2 = radians(lon2)
lat1 = radians(lat1)
lat2 = radians(lat2)
# Haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2
c = 2 * asin(sqrt(a))
# Radius of earth in kilometers. Use 3956 for miles
r = 6371
# calculate the result
print(c * r)
| 513 | 260 |
from slixmpp.exceptions import XMPPError
from ..conf import settings
mechanisms = {}
def sasl_mech():
def register(mech):
mechanisms[mech.name] = mech
return mech
return register
class Mechanism(object):
name = None
def __init__(self, auth):
self.auth = auth
@staticmethod
async def available(auth):
return True
@property
def stream(self):
return self.auth.stream
@property
def boundjid(self):
return self.auth.stream.boundjid
async def challenge(self, data=None):
return await self.auth._async_challenge(data)
def process(self, request):
raise NotImplementedError()
class LegacyAuth(Mechanism):
name = 'xep_0078'
@staticmethod
async def available(auth):
return settings.ALLOW_LEGACY_AUTH
async def process(self, request):
if 'username' not in request or \
'resource' not in request:
raise XMPPError('not-acceptable')
username = request['username']
if not await self.auth.check_password(username,
request.get('password', '')):
raise XMPPError('not-authorized')
self.boundjid.user = username
self.boundjid.resource = request['resource']
@sasl_mech()
class Anonymous(Mechanism):
name = 'ANONYMOUS'
@staticmethod
async def available(auth):
if settings.ALLOW_ANONYMOUS_LOGIN:
return True
else:
return False
async def process(self, request):
if settings.ALLOW_ANONYMOUS_LOGIN:
username = self.auth.generate_anonymous_user()
else:
raise XMPPError('not-authorized')
self.boundjid.user = username
@sasl_mech()
class External(Mechanism):
name = 'EXTERNAL'
@staticmethod
async def available(auth):
# check client certificate, if available
cert = auth.stream.get_client_cert()
if not cert:
return False
# TODO: handle client certificates
return False
async def process(self, request):
pass
@sasl_mech()
class Plain(Mechanism):
name = 'PLAIN'
async def process(self, request):
if request.xml.text:
value = request['value']
else:
value = await self.challenge()
toks = value.split(b'\0')
if len(toks) != 3:
raise XMPPError('malformed-request')
toks = [x.decode('utf8') for x in toks]
username = toks[1]
if not await self.auth.check_password(username,
toks[2]):
raise XMPPError('not-authorized')
authcid = "%s@%s" % (username, self.stream.host)
if toks[0] != '' and toks[0] != authcid:
# authzid not supported yet
raise XMPPError('invalid-authzid')
self.boundjid.user = username
def get_sasl_by_name(name):
return mechanisms.get(name, None)
async def get_sasl_available(stream):
return [m for m in mechanisms.values() if await m.available(stream)]
| 3,106 | 936 |
from django.http import HttpResponse
def index(request):
return HttpResponse(request.get_full_path())
| 108 | 31 |
class Player:
def __init__(self, id, rating=1000, win_count=0, lose_count=0, win_streak=0, best_win_streak=0):
self.id = id
self.rating = rating
self.win_count = win_count
self.lose_count = lose_count
self.win_streak = win_streak
self.best_win_streak = best_win_streak
def get_id(self):
return self.id
def get_rating(self):
return self.rating
def set_rating(self, rating):
self.rating = rating
# def set_char_selected(self, char_selected):
# self.char_selected = char_selected
# def get_char(self):
# return self.char_selected
def plus_win(self):
self.win_count += 1
self.win_streak += 1
if self.win_streak > self.best_win_streak:
self.best_win_streak = self.win_streak
def plus_lose(self):
self.lose_count += 1
self.win_streak = 0
def get_tier(rating):
if 0 < rating <= 1149:
return "Bronze"
elif 1149 < rating <= 1499:
return "Silver"
elif 1499 < rating <= 1849:
return "Gold"
elif 1849 < rating <= 2199:
return "Platinum"
elif 2199 < rating:
return "Diamond"
| 1,258 | 444 |
"""
Sprite Rotation With A Tank.
Vehicles or tower defense turrets can have parts
that can rotate toward targets.
These parts are usually represented with separate sprites
drawn relative to attachment points on the main body.
Because these sprites are usually asymmetrical,
we have to rotate them around
their attachment points on the main body.
They will look wrong otherwise!
This example allows the player to switch between
two ways of rotating a tank's turret and barrel:
1. correctly, around a point on the tank's body
2. incorrectly, around the center of the barrel.
Artwork from https://kenney.nl
If Python and Arcade are installed, this example can be run from the command line with:
python -m arcade.examples.sprite_rotate_tank
"""
import arcade
import math
TANK_SPEED = 64 # How many pixels per second the tank travels
TANK_TURNING_SPEED = 60 # how many degrees per second the tank spins by.
# This is half the length of the barrel sprite.
# We use this value to ensure the end of the barrel sit in the middle of the tank.
TANK_BARREL_LENGTH_HALF = 15
SCREEN_WIDTH = 800
SCREEN_HEIGHT = 600
SCREEN_TITLE = "Rotating Tank Example"
class RotatingSprite(arcade.Sprite):
"""
Sprite subclass which can be rotated around a point.
"""
def rotate_around_point(self, point, degrees):
"""
Rotates the sprite around a point by the set amount of degrees
:param point: The point that the sprite will rotate about
:param degrees: How many degrees to rotate the sprite
"""
# This is so the direction the sprite faces changes when rotating.
# It isn't necessary to have this.
# For example, you would want a rotating platform to always face upwards.
self.angle += degrees
# rotate the sprite around.
self.position = arcade.rotate_point(self.center_x, self.center_y,
point[0], point[1], degrees)
class ExampleWindow(arcade.Window):
def __init__(self):
super().__init__(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
# Set Background to be green.
self.background_color = arcade.color.GREEN
# The tank and barrel sprite.
self.tank = arcade.Sprite(":resources:images/topdown_tanks/tankBody_dark_outline.png")
self.tank.position = SCREEN_WIDTH // 2, SCREEN_HEIGHT // 2
self.barrel = RotatingSprite(":resources:images/topdown_tanks/tankDark_barrel3_outline.png")
self.barrel.position = SCREEN_WIDTH // 2, SCREEN_HEIGHT // 2 - TANK_BARREL_LENGTH_HALF
self.tank_direction = 0.0 # If the tank is moving forward or backwards.
self.tank_turning = 0.0 # If the tank is turning left or right.
self.mouse_pos = [0, 0]
self.tank_sprite_list = arcade.SpriteList()
self.tank_sprite_list.extend([self.tank, self.barrel])
self._correct = True
self.correct_text = arcade.Text("Turret Rotation is Correct, Press P to Switch",
SCREEN_WIDTH // 2, SCREEN_HEIGHT - 25,
anchor_x='center')
self.control_text = arcade.Text("WASD to move tank, Mouse to aim",
SCREEN_WIDTH // 2, 15,
anchor_x='center')
def on_draw(self):
self.clear()
self.background.draw()
self.tank_sprite_list.draw()
self.control_text.draw()
self.correct_text.draw()
def on_update(self, delta_time: float):
self.move_tank(delta_time)
def move_tank(self, delta_time):
"""
Perform all calculations about how to move the tank.
This includes both the body and the barrel
"""
# update the angle of the tank's body alone.
# The barrel will be updated after the body is moved
self.tank.angle += TANK_SPEED * self.tank_turning * delta_time
# find how much the tank's x and y should change to move forward or back.
x_dir = (math.cos(self.tank.radians - math.pi / 2)
* self.tank_direction * TANK_SPEED * delta_time)
y_dir = (math.sin(self.tank.radians - math.pi / 2)
* self.tank_direction * TANK_SPEED * delta_time)
# we then move the tank and the barrel since they are connected together.
self.tank.center_x += x_dir
self.tank.center_y += y_dir
self.barrel.center_x += x_dir
self.barrel.center_y += y_dir
if self.correct:
# Rotate the barrel sprite around the center of the tank,
# not the center of the barrel sprite
# we need to add 90 to the angle due to orientation of the barrel texture.
# we need to remove the barrels angle as we only want the change in angle.
angle_change = (arcade.get_angle_degrees(self.tank.center_y, self.tank.center_x,
self.mouse_pos[1], self.mouse_pos[0])
- self.barrel.angle + 90)
self.barrel.rotate_around_point((self.tank.center_x, self.tank.center_y),
angle_change)
else:
# In this situation we only change the angle without changing the position which is incorrect.
# we need to add 90 to the angle due to orientation of the barrel texture.
angle = arcade.get_angle_degrees(self.tank.center_y, self.tank.center_x,
self.mouse_pos[1], self.mouse_pos[0]) + 90
self.barrel.angle = angle
def on_key_press(self, symbol: int, modifiers: int):
if symbol == arcade.key.W:
self.tank_direction += 1
elif symbol == arcade.key.S:
self.tank_direction -= 1
elif symbol == arcade.key.A:
self.tank_turning += 1
elif symbol == arcade.key.D:
self.tank_turning -= 1
elif symbol == arcade.key.P:
self.correct = bool(1 - self.correct)
self.correct_text.text = f"Turret Rotation is" \
f" {'Correct' if self.correct else 'Incorrect'}," \
f" Press P to Switch"
def on_key_release(self, symbol: int, modifiers: int):
if symbol == arcade.key.W:
self.tank_direction -= 1
elif symbol == arcade.key.S:
self.tank_direction += 1
elif symbol == arcade.key.A:
self.tank_turning -= 1
elif symbol == arcade.key.D:
self.tank_turning += 1
def on_mouse_motion(self, x: int, y: int, dx: int, dy: int):
self.mouse_pos = x, y
@property
def correct(self):
return self._correct
@correct.setter
def correct(self, value):
if value:
self._correct = True
angle = math.radians(arcade.get_angle_degrees(self.tank.center_y, self.tank.center_x,
self.mouse_pos[1], self.mouse_pos[0]))
self.barrel.center_x = (self.tank.center_x + math.cos(angle)
* TANK_BARREL_LENGTH_HALF)
self.barrel.center_y = (self.tank.center_y + math.sin(angle)
* TANK_BARREL_LENGTH_HALF)
else:
self._correct = False
self.barrel.center_x = self.tank.center_x
self.barrel.center_y = self.tank.center_y
def main():
window = ExampleWindow()
window.run()
if __name__ == '__main__':
main()
| 7,625 | 2,340 |
import datetime
import requests
import requests_cache
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from api.commonresponses import DOWNSTREAM_ERROR_RESPONSE
from api.modules.github import constants
from api.modules.github.github_response import ContributorResponse, IssueResponse
requests_cache.install_cache(expire_after=datetime.timedelta(days=7))
@api_view(['GET'])
def get_contributors(request, project):
"""
Return list of people contributed
:param request:
:param project:
:return: 503 if github api fails
:return: 200 successful
"""
try:
api_response = requests.get(
constants.GITHUB_API_GET_CONTRIBUTORS_URL.format(project_name=project)
)
api_response_json = api_response.json()
# if authentication fails
if api_response.status_code == 401:
raise Exception("Authentication fails. Invalid github access token.")
response = []
for contributor in api_response_json:
if contributor['type'] != 'User':
continue
result = ContributorResponse(
username=contributor['login'],
url=contributor['html_url'],
avatar_url=contributor['avatar_url'],
contributions=contributor['contributions'],
repository_name=project,
)
result_as_json = result.to_json()
response.append(result_as_json)
except Exception:
return DOWNSTREAM_ERROR_RESPONSE
return Response(response)
@api_view(['GET'])
def get_all_contributors(request):
"""
Return list of people contributed
:param request:
:return: 503 if github api fails
:return: 200 successful
"""
response_dict = {}
for project in constants.ACTIVE_REPOSITORIES:
try:
api_response = requests.get(
constants.GITHUB_API_GET_CONTRIBUTORS_URL.format(project_name=project)
)
api_response_json = api_response.json()
# if authentication fails
if api_response.status_code == 401:
raise Exception("Authentication fails. Invalid github access token.")
for contributor in api_response_json:
if contributor['type'] != 'User':
continue
result = ContributorResponse(
username=contributor['login'],
url=contributor['html_url'],
avatar_url=contributor['avatar_url'],
contributions=contributor['contributions'],
repository_name=[project],
)
if result.username in response_dict.keys():
response_dict[result.username]['contributions'] += result.contributions
response_dict[result.username]['repository_name'].append(project)
else:
response_dict[result.username] = result.to_json()
except Exception:
return DOWNSTREAM_ERROR_RESPONSE
response = sorted(response_dict.values(), key=lambda x: x['contributions'], reverse=True)
return Response(response)
@api_view(['GET'])
def get_issues(request, project):
"""
Return list of issues
:param request:
:param project:
:return: 503 if github api fails
:return: 200 successful
"""
try:
api_response = requests.get(constants.GITHUB_API_GET_ISSUES_URL.format(project_name=project))
api_response_json = api_response.json()
if api_response.status_code == 404:
error_message = "Repository does not exist"
return Response(error_message, status=status.HTTP_404_NOT_FOUND)
if api_response.status_code == 401:
raise Exception("Authentication fails. Invalid github access token.")
response = []
for issue in api_response_json:
labels_length = len(issue['labels'])
tags = []
# Making custom dictionary for tags
for i in range(0, labels_length):
# Searching inside "labels" key for tag_name
for tag, tag_name in issue["labels"][i].items():
if tag in ["name"]:
label = tag_name
tags.append(label)
result = IssueResponse(
title=issue['title'],
created_at=issue['created_at'],
comments=issue['comments'],
issue_number=issue['number'],
repository_url=issue['repository_url'],
labels=tags
)
result_as_json = result.to_json()
response.append(result_as_json)
except Exception:
return DOWNSTREAM_ERROR_RESPONSE
return Response(response)
| 4,913 | 1,276 |
import os
# import pygame
from mutagen.mp3 import MP3
import time
from classuiui import MyFrame1
import wx
# import thread
import threading
import multiprocessing
from playsound import playsound
from test001 import MyFrame1
# file="cd&&cd music&&cd&&StarSky.mp3"
# os.system(file)
# pygame.mixer.init()
# audio = MP3("C:\\\\Users\\\\1\\\\Desktop\\\\test_phone\\\\music\\\\StarSky.mp3")
# track = pygame.mixer.music.load("C:\\\\Users\\\\1\\\\Desktop\\\\test_phone\\\\music\\\\StarSky.mp3")
# pygame.mixer.music.set_volume(0.7)
# pygame.mixer.music.play()
# print(audio.info.length)
# time.sleep(int(audio.info.length))
# pygame.mixer.music.stop()
# pygame.mixer.music.pause() #暂停
# pygame.mixer.music.unpause()#取消暂停
# 成功播放音乐,并有暂停,取消暂停功能。
# class CalcFrame(MyFrame1):
class CalcFrame(MyFrame1):
def __init__(self, parent):
MyFrame1.__init__(self, parent)
def m1(self, event):
p = multiprocessing.Process(target=playsound, args=("C:\\\\Users\\\\1\\\\Desktop\\\\test_phone\\\\music\\\\StarSky.mp3",))
p.start()
# input("press ENTER to stop playback")
# p.terminate()
p.join()
event.Skip()
return super().m1(event)
def m2(self, event):
global p
p.terminate()
event.Skip()
return super().m2(event)
def m3(self, event):
event.Skip()
return super().m3(event)
if __name__ == '__main__':
"""
主函数
"""
app = wx.App(False)
frame = CalcFrame(None)
frame.Show(True)
# start the applications
app.MainLoop()
| 1,574 | 607 |
import mmcv
import os
import numpy as np
from mmcv.runner import load_checkpoint
from mmdet.models import build_detector
from mmdet.apis import init_detector, inference_detector
import torch
# device = torch.device("cpu")
# os.environ["CUDA_VISIBLE_DEVICES"]=""
# import pdb; pdb.set_trace()
config_file = '../configs/fenghuo/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_sockets.py'
checkpoint_file = '../work_dirs/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_sockets/epoch_17.pth'
model = init_detector(config_file, checkpoint_file)
# img_dir = '../data/sockets/train/'
# out_dir = 'results/'
img = '00019.jpg'
# img = mmcv.imread(img)
result = inference_detector(model, img)
model.show_result(img, result, out_file='testOut6.jpg')
# model.show_result(img, result, model.CLASSES)
print(result) | 787 | 318 |