index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
984,900 | a2969c9795560c37863f5ccd46a783736dffdad8 | #!/usr/bin/env python3
from __future__ import print_function
import platform
import sys
print(platform.python_version())
python_version = sys.version_info.major
print("version is %s"%python_version)
def main():
# arrLen = int(input("Please input arrLen : "))
# rotation = int(input("Please input rotation : "))
arrLen, rotation = map(int, input("Enter arrLen and rotation seperated by space : ").strip().split(' '))
arr = list(map(int,input("enter all elements of list seperated by space : ").strip().split(' ')))
print("Entered array is : ")
print(*arr, sep=' ')
new_arr = rotationf(arrLen, rotation, arr)
print("After rotation : ")
print(*new_arr, sep=' ')
def rotationf(len, n, l):
return l[n:]+l[:n]
if __name__ == '__main__':main()
|
984,901 | 7e8db926f6e3608cca8116734a404903042364c3 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-17 21:58
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0002_auto_20170609_0544'),
]
operations = [
migrations.RemoveField(
model_name='homepage',
name='video',
),
migrations.AddField(
model_name='homepage',
name='mp4',
field=models.FileField(blank=True, null=True, upload_to='home/'),
),
migrations.AddField(
model_name='homepage',
name='poster',
field=models.ImageField(blank=True, null=True, upload_to='home/'),
),
]
|
984,902 | 182cd289e6c2b7abd3fb43047f48da36fd1a2e55 | from .tensor import Tensor
from .modules import Module
# 几类优化器的实现
class Optim(object):
def __init__(self, module, lr):
self.module = module
self.lr = lr
def step(self):
self._step_module(self.module)
def _step_module(self, module):
# TODO Traverse the attributes of `self.module`,
# if is `Tensor`, call `self._update_weight()`,
# else if is `Module` or `List` of `Module`,
# call `self._step_module()` recursively.
...
# 反向传播看作一张图,每层的参数都会进行反向传播求出每层参数的梯度,
# 调用optim可以进行梯度优化算法将梯度用于计算后每层的参数都会更新的新的值
if isinstance(module,Module):
for i in range(len(module.layer)):
self._update_weight(module.layer[i].tensor)
elif isinstance(module,Tensor):
self._update_weight(module)
elif isinstance(module,list):
for i in range(len(module)):
self._step_module(module[i])
# End of todo
def _update_weight(self, tensor):
tensor -= self.lr * tensor.grad
class SGD(Optim): # 随机梯度下降
def __init__(self, module, lr, momentum: float=0):
super(SGD, self).__init__(module, lr)
self.momentum = momentum
self.tensor_dt = None
def _update_weight(self, tensor):
# TODO Update the weight of tensor
# in SGD manner.
...
if self.tensor_dt is None:
self.tensor_dt = Tensor.from_array(tensor.grad)
self.tensor_dt = self.momentum*self.tensor_dt + (1-self.momentum)*tensor.grad
tensor -= self.lr*self.tensor_dt
# End of todo
class Adam(Optim):
def __init__(self, module, lr):
super(Adam, self).__init__(module, lr)
# TODO Initialize the attributes
# of Adam optimizer.
...
self.beta = [0.99,0.999]
self.eps = 1e-7
# End of todo
def _update_weight(self, tensor):
# TODO Update the weight of
# tensor in Adam manner.
...
# End of todo
|
984,903 | 14f85f19b4ea29b2d115e567336d8948733f40b3 | # coding=gb18030
import serial # 导入serial包
import time # 导入time包
import pymysql # 导入pymysql包
log = 0 # 设一个log变量用于记录单次接收次数
s = serial.Serial('com1', 9600, timeout=2) # 打开串口,配置串口
db = pymysql.connect("localhost", "root", "jinhao", "zigbees") # 打开数据库,配置数据库
cursor = db.cursor() # 数据库操作
while True: # 无限循环读取数据
localtime = time.asctime(time.localtime(time.time())) # time包操作,打印本地时间
n = s.readline() # 读取串口一行数据
log += 1 # 传输次数记录+1
data_pre = str(n) # 强制用字符串格式
data = data_pre[2:] # 取部分数据
local_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) # 规整本地时间的格式
print(data)
sql = "INSERT INTO building(BUILDING_ID)VALUES('%s')" % (
data) # 存入数据库
cursor.execute(sql) # 执行数据库语句
db.commit() # 提交
cursor.close()
db.close()
|
984,904 | ac01abb0a9bae4a0b39160899dd90df1ebdec13c | from typing import List
class Solution:
@staticmethod
def dump(v):
print(', '.join([f'{k}: {v}' for k, v in v.items() if k != 'self']))
def movesToMakeZigzag(self, nums: List[int]) -> int:
# 2 cases (start up->down) or (start down->up)
start_up_down = 0
start_down_up = 0
if len(nums) <= 1:
return 0
up_down = [*nums]
down_up = [*nums]
for i in range(len(nums)-1):
if i % 2 == 0:
# start_up_down -> expect UP move
if up_down[i] >= up_down[i+1]:
diff = up_down[i] - up_down[i + 1] + 1
up_down[i] -= diff
start_up_down += diff
# start_down_up -> expect DOWN move
if down_up[i] <= down_up[i+1]:
diff = down_up[i+1] - down_up[i] + 1
down_up[i+1] -= diff
start_down_up += diff
else:
# start_up_down -> expect DOWN move
if up_down[i] <= up_down[i+1]:
diff = up_down[i+1] - up_down[i] + 1
up_down[i+1] -= diff
start_up_down += diff
# start_down_up -> expect UP move
if down_up[i] >= down_up[i+1]:
diff = down_up[i] - down_up[i+1] + 1
down_up[i] -= diff
start_down_up += diff
return min(start_down_up, start_up_down) |
984,905 | 4bb1bb941ac913b2f56437a7460d8090a920c682 | from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
from django.contrib import admin
admin.autodiscover()
from posts.views import PostArchiveIndexView, PostArchiveMonthView, PostArchiveYearIndex, PostDetailView, PostListView, PostTagListView, sitemaps
from posts.feeds import PostFeed
urlpatterns = patterns('',
url(r'^$', PostListView.as_view(), name='post_list'),
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
url(r'^atom\.xml$', PostFeed(), name='feed'),
url(r'^admin/', include(admin.site.urls)),
url(r'^archive/$', PostArchiveIndexView.as_view(), name='archive_index'),
url(r'^archive/(?P<year>[0-9]{4})/$', PostArchiveYearIndex.as_view(), name='archive_year'),
url(r'^archive/(?P<year>[0-9]{4})/(?P<month>[0-9]{,2})/$', PostArchiveMonthView.as_view(), name='archive_month'),
url(r'^tags/$', PostTagListView.as_view(), {'tag': None}, name='tag_list'),
url(r'^tag/(?P<tag>[a-zA-Z\-0-9]+)$', PostTagListView.as_view(), name='tag_detail'),
url(r'^(?P<slug>[a-zA-Z\-0-9]+)/$', PostDetailView.as_view(), name='post_detail'),
)
handler500 = TemplateView.as_view(template_name="500.html")
handler403 = TemplateView.as_view(template_name="403.html")
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns += staticfiles_urlpatterns()
|
984,906 | 13c56f189d43308af1c39aa501e224c35ac67bc6 | #!/usr/bin/python3
import json
import sys
import getopt
import requests
import hashlib
import json
import os
import gnupg
class backmeup():
def __init__(self,serverurl,name,key):
self.endpoint=serverurl
self.key=key
self.name=name
self.gpg = gnupg.GPG(gnupghome='.')
def filebackup(self,runref,jobreference,filepath):
#print(filepath)
#this is a two step backup process, first we send the hash
#depending on the response to that we might send the file
#the hash request is going to be blocking
with open(filepath,"rb") as f:
filehash=self.hashfile(f)
#print(filehash)
payload={'jobreference':jobreference,'fileinfo':json.dumps({filepath:filehash})}
r=requests.post(self.endpoint+'/haveyougot.php',data=payload,auth=(self.name,self.key))
print(r.text)
serverrequest=r.json()
#the server responds to say it has the file with the matching hash, or it needs it
if serverrequest['ineed'][0]==filepath:
#we check if the server needs the file we just offered
r=requests.post(self.endpoint+'/submit.php',params={'runref':runref,'fileinfo':json.dumps({filepath:filehash})},files={filehash:f})
print(runref,r.text)
def folderbackup(self,runref,jobreference,folderpath):
#we need to walk recursively down a tree of files, checking hashes
#there is an opportunity here to do multiple requests at a time rather than one at a time
#but a simplistic call to filebackup should work
#having built up our array of hashes we send it all to the server
#that will respond confirming what files it wants
#and also what files it thinks we may have deleted
#there may be some opportunity to divide up huge directory trees into multiple requests
#perhaps a folder at a time
for root, subdirs, files in os.walk(folderpath):
print(root)
filelist={}
for filename in files:
with open(os.path.join(root,filename),"rb") as f:
filehash=self.hashfile(f)
filelist[os.path.join(root,filename)]=filehash
payload={'jobreference':jobreference,'fileinfo':json.dumps(filelist)}
try:
r=requests.post(self.endpoint+'/haveyougot.php',data=payload,auth=(self.name,self.key))
serverrequest=r.json()
for neededfile in serverrequest['ineed']:
with open(neededfile,"rb") as f:
#rehash the file (it may have changed)
filehash=self.hashfile(f)
f.seek(0)
r=requests.post(self.endpoint+'/submit.php',params={'runref':runref,'jobline':jobreference,'path':root,'fileinfo':json.dumps({neededfile:filehash})},files={filehash:self.encrypt(f)})
print(runref,neededfile,r.text)
except Exception as e:
print("timed out probably, we should make a note of it and come back later. %s" % e)
#now have a second go at anything that failed
#this should deal with transient timeouts
#if this should fail then we should do something more about it.
def encrypt(self,filehandle):
#this can return a string, or a file handle
#return filehandle
g=self.gpg.encrypt_file(filehandle, recipients=None, symmetric='AES256', passphrase='12345', armor=True)
return str(g)
def mysqlbackup(self,username,password,database,host,runref):
print("backing up a mysql database")
def mongodbbackup(self,username,password,database,host):
print("backing up a mongodb database")
def getjobs(self):
print("getting job list from server")
#get request to endpoint/job should provide us with our order of business
#the key is important, it tells the backup server who we are
#the backup server may check our IP address is valid for the key
r=requests.get(self.endpoint + '/job.php',auth=(self.name,self.key))
print(r.text)
return r.json()
def hashfile(self,afile):
#md5 would work just fine, however this is a bit more robust
blocksize=65536
hasher=hashlib.sha256()
buf=afile.read(blocksize)
while len(buf)>0:
hasher.update(buf)
buf=afile.read(blocksize)
return hasher.hexdigest()
def main(argv):
serverurl=''
key=''
name=''
try:
opts,args=getopt.getopt(argv,"s:k:n:",["server=","name=","key="])
except getopt.GetoptError:
print('backmeup.py -s <backup server URL> -n <server name> -k <backup key>')
sys.exit(2)
for opt, arg in opts:
if opt == '-s':
serverurl=arg
if opt == '-k':
key=arg
if opt == '-k':
name=arg
#create a backup task to that endpoint
backup=backmeup(serverurl,name,key)
joblist=backup.getjobs()
#prioritise individual file backups
#print(joblist['files'])
#for (job,filepath) in joblist['File'].items():
# backup.filebackup(joblist['runref'],job,filepath)
for (job,filepath) in joblist['Folder'].items():
backup.folderbackup(joblist['runref'],job,filepath)
#now databases
#recursively backup folders
#finally things that are to be done incrementally, that involves storing a copy of what we send so we can generate a delta
if __name__ == "__main__":
main(sys.argv[1:])
|
984,907 | 9987e9604e56c44e3c8e066deecaddebea62eec5 | ST_s_photon_ID_ = [ 1.02619, 1.02181, 1.02215, 1.0292, 1.02855, 1.0189]
ST_s_electron_ID_ = [ 1.00389, 1.00624, 1.00682, 1.00309, 1.15322, 1.00433]
ST_s_electron_Reco_ = [ 1.00092, 1.00116, 1.00123, 1.00024, 1.00076, 1.00091]
ST_s_electron_HLT_ = [ 1.00106, 1.00154, 1.00316, 1.00059, 1.00218, 1.00151]
ST_s_muon_ID_ = [ 1.00139, 1.00036, 1.00002, 1.00207, 1.00128, 1.00566]
ST_s_muon_iso_ = [ 1.00044, 1.00018, 1.00024, 1.00053, 1.0005, 1.00004]
ST_s_muon_HLT_ = [ 1.0013, 1.0002, 1.00012, 1.00096, 1.00047, 1.00054]
ST_s_JEC_ = [ 1.09743, 1.23049, 1.08525, 1.33281, 1, 1]
ST_s_JER_ = [ 1.07704, 1.2382, 1.08525, 1.33396, 1.12764, 1.66595]
ST_tW_photon_ID_ = [ 1.02312, 1.02598, 1.02196, 1.02555, 1.02449, 1.01414]
ST_tW_electron_ID_ = [ 1.0185, 1.0029, 1.04778, 1.00463, 1.00625, 1.00582]
ST_tW_electron_Reco_ = [ 1.00181, 1.00067, 1.00258, 1.0009, 1.00116, 1.00147]
ST_tW_electron_HLT_ = [ 1.0031, 1.00069, 1.00232, 1.00119, 1.00187, 1.00145]
ST_tW_muon_ID_ = [ 1.00069, 1.00167, 1.0008, 1.00141, 1.00164, 1.00134]
ST_tW_muon_iso_ = [ 1.00033, 1.00047, 1.00022, 1.00037, 1.00039, 1.00034]
ST_tW_muon_HLT_ = [ 1.00082, 1.00074, 1.00108, 1.00005, 1.00308, 1.00062]
ST_tW_JEC_ = [ 1.03555, 1.27839, 1.03718, 1.13621, 1, 1]
ST_tW_JER_ = [ 1.00604, 1.27287, 1.03718, 1.04556, 1, 1.27792]
ST_t_photon_ID_ = [ 1.02287, 1.02855, 1.02607, 1.02407, 1.027, 1.02427]
ST_t_electron_ID_ = [ 1.01133, 1.00262, 1.02105, 1.00828, 1.03739, 1.00354]
ST_t_electron_Reco_ = [ 1.00121, 1.00074, 1.00158, 1.00087, 1.00248, 1.0012]
ST_t_electron_HLT_ = [ 1.00118, 1.00083, 1.0014, 1.00117, 1.00214, 1.00093]
ST_t_muon_ID_ = [ 1.00088, 1.0012, 1.001, 1.00115, 1.0012, 1.00101]
ST_t_muon_iso_ = [ 1.00033, 1.00038, 1.00034, 1.00037, 1.00038, 1.00036]
ST_t_muon_HLT_ = [ 1.00042, 1.00034, 1.00115, 1.00043, 1.00076, 1.00049]
ST_t_JEC_ = [ 1.09476, 1.10973, 1.02631, 1.11301, 1, 1.04309]
ST_t_JER_ = [ 1.04664, 1.0661, 1.0439, 1.08202, 1.10266, 1.07135]
ST_tbarW_photon_ID_ = [ 1.02385, 1.02098, 1.02587, 1.02178, 1.02926, 1.00285]
ST_tbarW_electron_ID_ = [ 1.02101, 1.00568, 1.00779, 1.0054, 1.00323, 1]
ST_tbarW_electron_Reco_ = [ 1.00166, 1.00299, 1.00227, 1.00122, 1.00151, 1]
ST_tbarW_electron_HLT_ = [ 1.0017, 1.00167, 1.00223, 1.00125, 1.00036, 1]
ST_tbarW_muon_ID_ = [ 1.00096, 1.00123, 1.00158, 1.001, 1.00056, 1.00054]
ST_tbarW_muon_iso_ = [ 1.00034, 1.00033, 1.00034, 1.00029, 1.00045, 1.00042]
ST_tbarW_muon_HLT_ = [ 1.00061, 1.00042, 1.00065, 1.00059, 1.00312, 1.0002]
ST_tbarW_JEC_ = [ 1.05605, 1.34382, 1.04869, 1.18378, 1, 1]
ST_tbarW_JER_ = [ 1.01769, 1.20415, 1.15839, 1.18378, 1, 1]
ST_tbar_photon_ID_ = [ 1.02554, 1.02365, 1.02334, 1.02797, 1.01923, 1.01935]
ST_tbar_electron_ID_ = [ 1.00355, 1.01425, 1.00377, 1.01398, 1.00771, 1.00311]
ST_tbar_electron_Reco_ = [ 1.00063, 1.00063, 1.00105, 1.00051, 1.00165, 1.00083]
ST_tbar_electron_HLT_ = [ 1.00102, 1.00086, 1.00102, 1.00074, 1.0021, 1.00101]
ST_tbar_muon_ID_ = [ 1.00122, 1.00081, 1.00075, 1.00103, 1.00026, 1.00091]
ST_tbar_muon_iso_ = [ 1.00035, 1.00031, 1.00036, 1.00041, 1.00024, 1.00034]
ST_tbar_muon_HLT_ = [ 1.00048, 1.00028, 1.0007, 1.00037, 1.00121, 1.00052]
ST_tbar_JEC_ = [ 1.09861, 1.14493, 1, 1.05431, 1, 1.02478]
ST_tbar_JER_ = [ 1.0534, 1.15411, 1.08014, 1.10176, 1, 1.10231]
WGJets_photon_ID_ = [ 1.02573, 1.02413, 1.02642, 1.02621, 1.0283, 1.02396]
WGJets_electron_ID_ = [ 1.01705, 1.00809, 1.02123, 1.01208, 1.02238, 1.03055]
WGJets_electron_Reco_ = [ 1.00162, 1.00104, 1.00177, 1.00167, 1.00195, 1.00214]
WGJets_electron_HLT_ = [ 1.0019, 1.00097, 1.00184, 1.00157, 1.00262, 1.00227]
WGJets_muon_ID_ = [ 1.00125, 1.00133, 1.00118, 1.00112, 1.00096, 1.001]
WGJets_muon_iso_ = [ 1.0004, 1.00039, 1.00041, 1.00038, 1.0004, 1.00039]
WGJets_muon_HLT_ = [ 1.00134, 1.00058, 1.00124, 1.00105, 1.00246, 1.00135]
WGJets_JEC_ = [ 1.13353, 1.36398, 1.01633, 1.24542, 1.01603, 1.11854]
WGJets_JER_ = [ 1.07828, 1.22412, 1.00981, 1.11251, 1.05686, 1.04101]
WW_photon_ID_ = [ 1.02149, 1.02318, 1.01988, 1.02668, 1.04611, 1.02659]
WW_electron_ID_ = [ 1.0272, 1.0019, 1.00519, 1.00545, 1.00259, 1.00365]
WW_electron_Reco_ = [ 1.00181, 1.00044, 1.00124, 1.00185, 1.00253, 1.00053]
WW_electron_HLT_ = [ 1.00139, 1.00045, 1.0012, 1.00172, 1.0009, 1.00093]
WW_muon_ID_ = [ 1.0006, 1.00147, 1.00166, 1.00045, 1.00049, 1.00155]
WW_muon_iso_ = [ 1.00033, 1.00053, 1.00048, 1.00039, 1.00031, 1.00051]
WW_muon_HLT_ = [ 1.00097, 1.00073, 1.00091, 1.00159, 1.0023, 1.00049]
WW_JEC_ = [ 1.15787, 1.31049, 1, 1.18749, 1, 1.09765]
WW_JER_ = [ 1.10633, 1.27939, 1.09382, 1.17256, 1, 1.10605]
WZ_photon_ID_ = [ 1.02043, 1.03109, 1.0178, 1.03577, 1.013, 1.01289]
WZ_electron_ID_ = [ 1.00444, 1, 1.00314, 1, 1.01685, 1]
WZ_electron_Reco_ = [ 1.0017, 1, 1.00087, 1, 1.00452, 1]
WZ_electron_HLT_ = [ 1.00126, 1, 1.00059, 1, 1.00289, 1]
WZ_muon_ID_ = [ 1.00097, 1.00166, 1.00065, 1.00062, 1, 1.00218]
WZ_muon_iso_ = [ 1.0004, 1.00056, 1.0005, 1.00054, 1, 1.00081]
WZ_muon_HLT_ = [ 1.00123, 1.00096, 1.00244, 1.00283, 1, 1.00041]
WZ_JEC_ = [ 1.20519, 1.24634, 1, 1.21058, 1, 1]
WZ_JER_ = [ 1.14178, 1.14057, 1.2616, 1.41831, 1, 1]
ZG_photon_ID_ = [ 1.02309, 1.02691, 1.02759, 1.02582, 1.02913, 1.02066]
ZG_electron_ID_ = [ 1.00665, 1.01215, 1.03034, 1.01505, 1.01021, 1.03342]
ZG_electron_Reco_ = [ 1.00156, 1.0011, 1.00227, 1.00163, 1.00329, 1.00228]
ZG_electron_HLT_ = [ 1.00154, 1.00146, 1.00276, 1.00183, 1.00324, 1.0018]
ZG_muon_ID_ = [ 1.00111, 1.00092, 1.00079, 1.00118, 1.00041, 1.00086]
ZG_muon_iso_ = [ 1.00037, 1.00029, 1.00035, 1.00033, 1.00018, 1.00035]
ZG_muon_HLT_ = [ 1.00106, 1.0003, 1.00097, 1.00043, 1.00068, 1.00059]
ZG_JEC_ = [ 1.18661, 1.38383, 1.05342, 1.19531, 1.03141, 1.09075]
ZG_JER_ = [ 1.12594, 1.26956, 1.07035, 1.15242, 1.11084, 1.11706]
ZZ_photon_ID_ = [ 1.00973, 1.01289, 1, 1.03082, 1, 1]
ZZ_electron_ID_ = [ 1.01751, 1.00655, 1, 1, 1, 1]
ZZ_electron_Reco_ = [ 1.00433, 1.00148, 1, 1, 1, 1]
ZZ_electron_HLT_ = [ 1.0059, 1.00213, 1, 1, 1, 1]
ZZ_muon_ID_ = [ 1, 1, 1, 1.00164, 1, 1]
ZZ_muon_iso_ = [ 1, 1, 1, 1.00038, 1, 1]
ZZ_muon_HLT_ = [ 1, 1, 1, 1.00035, 1, 1]
ZZ_JEC_ = [ 1, 1.56408, 1, 1.50432, 1, 1]
ZZ_JER_ = [ 2, 2.02776, 1, 1.50432, 1, 1]
TTG_photon_ID_ = [ 1.02605, 1.0228, 1.02587, 1.02424, 1.0295, 1.02981]
TTG_electron_ID_ = [ 1.01897, 1.01258, 1.01781, 1.01022, 1.01164, 1.01112]
TTG_electron_Reco_ = [ 1.00155, 1.0011, 1.00161, 1.00129, 1.0016, 1.00138]
TTG_electron_HLT_ = [ 1.00158, 1.00125, 1.00175, 1.00136, 1.00245, 1.00163]
TTG_muon_ID_ = [ 1.00116, 1.00156, 1.00105, 1.00143, 1.00088, 1.0009]
TTG_muon_iso_ = [ 1.00038, 1.00042, 1.00037, 1.00043, 1.00036, 1.00036]
TTG_muon_HLT_ = [ 1.00087, 1.0008, 1.00108, 1.0007, 1.00158, 1.00073]
TTG_JEC_ = [ 1.10575, 1.10964, 1.02212, 1.11083, 1.00515, 1.00559]
TTG_JER_ = [ 1.04607, 1.17184, 1.02131, 1.10331, 1.0269, 1.04881]
|
984,908 | b131e2a01d99dd8280afc8706997eec689e7b19b |
possibles = [(x,y,z) for x in xrange(500) for y in xrange(500) for z in xrange(500) if x**2 + y**2 == z**2 and x+y+z == 1000]
answer = reduce(lambda x,y: x*y, possibles[0])
print answer
|
984,909 | 820c81f962b15a750d2578eaf8c1846590a25e76 | #MenuTitle: Storm
# -*- coding: utf-8 -*-
__doc__="""
Storm
"""
import GlyphsApp
from NaNGFGraphikshared import *
from NaNGFNoise import *
from NaNFilter import NaNFilter
class Storm(NaNFilter):
gridsize = 30
minsize, maxsize = 30, 80
def setup(self):
self.stormcomponent = CreateShapeComponent(self.font, self.maxsize, self.maxsize, "rectangle", "StormShape")
def drawStorm(self, x,y,layer):
freq = 0.005
noiz = snoise2(x*freq, y*freq, 3)
size = noiseMap( noiz, self.minsize, self.maxsize )
if size <= 4:
return
stormcomp = GSComponent(self.stormcomponent)
scale = (float(1)/self.maxsize)*size
stormcomp.scale = (scale, scale)
stormcomp.position = (x,y)
layer.components.append(stormcomp)
def processLayer(self, thislayer, params):
operateOnBlackAtInterval(thislayer, self.drawStorm, self.gridsize)
ClearPaths(thislayer)
Storm()
|
984,910 | 629f6b13052e1d4dfbbdd61440e59ec80f2dfd4a |
# unit test case
import unittest
import collections
import sys
f = open("AdamFarid.ged", "r")
nameArr = []
birthDate = []
for line in f:
names = line.split(" ")
if "NAME" in line:
ans = " ".join(names[2:])
nameArr.append(ans)
if "DATE" in line:
dates = " ".join(names[2:])
birthDate.append(dates)
checkNames = collections.Counter(nameArr)
checkDates = collections.Counter(birthDate)
def is_unique_name(name):
name = "".join(name)
if name == None:
return False
if name not in nameArr:
return False
else:
if(checkNames[name] > 1):
return False
else:
return True
def is_unique_date(date=None):
if date == None:
return False
date = "".join(date)
if date not in birthDate:
return False
else:
if(checkDates[date] > 1):
return False
else:
return True
class TestStringMethods(unittest.TestCase):
def test_unique(self):
self.assertTrue(is_unique_name(['Deysi /Geronimo/\n']))
def test_1_item(self):
self.assertTrue(is_unique_name(['Xavier /Diaz/\n']))
def test_no_item(self):
self.assertFalse(is_unique_date())
def test_date(self):
self.assertTrue(is_unique_date(['20 MAR 1997\n']))
def test_multi_date(self):
self.assertFalse(is_unique_date(['27 AUG 2000\n']))
if __name__ == '__main__':
unittest.main()
|
984,911 | 2ca66949858005a655113026421fc9a074593469 | '''
Collection of little pythonic tools. Might need to organize this better in the future.
@author: danielhernandez
'''
import datetime
import string
def addDateTime(s = ""):
"""
Adds the current date and time at the end of a string.
Inputs:
s -> string
Output:
S = s_Dyymmdd_HHMM
"""
date = str(datetime.datetime.now())
allchars = string.maketrans('','')
nodigs = allchars.translate(allchars, string.digits)
date = date.translate(allchars, nodigs)
return s + '_D' + date[2:8] + '_' + date[8:12]
if __name__ == "__main__":
print addDateTime('Hello')
print addDateTime()
|
984,912 | b9a71becbf1728edca7c6cd703c7e09d09ffeccf | from selenium import webdriver
import time
#------| Code used to login wit user : user and password : password#
def login(driver,user,password):
try:
element=driver.find_element_by_xpath("//li[@id='login']//a[text()='Login']")
element.click()
time.sleep(2)
element=driver.find_element_by_xpath("//input[@id='username-modal']")
element.clear()
element.send_keys(user)
element=driver.find_element_by_xpath("//input[@id='password-modal']")
element.clear()
element.send_keys(password)
element=driver.find_element_by_xpath("//button[@onclick='return login()']")
element.click()
time.sleep(5)
return 1
except:
return 0 |
984,913 | bd0010d35d3476de727f0ed6ffd4ffcf93d435cb | # Script to run shiftx2 on the test set of protein structures
# Can't use batch mode because they may have different pH and temperatures
import nmrstarlib
import collections
from os import system
import pandas as pd
from pathlib import Path
path = Path("/Users/aph516/GitHub/NAPS/")
# Get table which links BMRBs to PDBs
testset_df = pd.read_table(path/"data/testset/testset.txt", header=None,
names=["ID","PDB","BMRB","Resolution","Length"])
testset_df.index = testset_df["ID"]
# Get pH and temperature from the BMRB files
dir_starfiles = nmrstarlib.read_files(path/"data/testset/CS-corrected-testset-addPDBresno")
starfiles_list = list(dir_starfiles)
pars = {}
for sf in starfiles_list:
for k in sf.keys():
if isinstance(sf[k], collections.OrderedDict):
if sf[k]["Saveframe_category"] == "sample_conditions":
#print sf["data"]
# Set default values
pH = 6
temp = 298
for x in sf[k]["loop_0"][1]:
if x["Variable_type"] == "pH" or x["Variable_type"] == "pH*":
pH = x["Variable_value"]
#print " pH:", pH
elif x["Variable_type"] == "temperature":
temp = x["Variable_value"]
#print " temp:", temp
pars[sf["data"]] = (pH, temp)
# Run ShiftX2 on each PDB file
for i in testset_df["ID"]:
print(testset_df.loc[i, "ID"], testset_df.loc[i, "PDB"], testset_df.loc[i, "BMRB"])
pdbfile = (path/"data/testset/PDB-testset-addHydrogens"/
(testset_df.loc[i, "ID"]+"_"+testset_df.loc[i, "PDB"]+".pdbH"))
outfile = (path/"data/testset/sparta+_predictions"/
(testset_df.loc[i, "ID"]+"_"+testset_df.loc[i, "PDB"]+".cs"))
system("sparta+ -in %s -out %s" % (pdbfile, outfile)) |
984,914 | 3390c7f1c4dc46cb37ed1ace3bd34f37105b6ae6 | class Node(object):
def __init__(self, v):
self.val = v
self.next = None
def play(x, p2=False):
x = [int(i) for i in x]
if p2:
x += [i for i in range(10, 1_000_000 + 1)]
s = Node(x[0])
d = {s.val: s}
for i, v in enumerate(x[1:]):
d[v] = Node(v)
d[x[i]].next = d[v]
d[x[-1]].next = d[x[0]]
for i in range(10_000_000 if p2 else 100):
a, b, c = s.next, s.next.next, s.next.next.next
dest = s.val - 1 or max(x)
while dest in [a.val, b.val, c.val]:
dest -= 1
if dest == 0:
dest = max(x)
break
s.next = c.next
c.next = d[dest].next
d[dest].next = a
s = s.next
return d
x = "583976241"
d = play(x)
s = d[1]
ans = ""
while s.next.val != 1:
ans += str(s.next.val)
s = s.next
print(ans)
d = play(x, True)
print(d[1].next.val * d[1].next.next.val)
|
984,915 | 9f36f1e2e8a2e1fcafb7fa6e5682d1aa182a78f7 | #Solving a 4x4 gridworld using Monte Carlo Every-visit On Policy method (epsilon-greedy).
#The policy pi_(a|s) takes 4 actions equiprobably: Left, Right, Up, Down. Thus pi_(a|s) = 0.25 for all states
#The agent is transferred deterministically, hence p(s',r|s,a) = 1 for all states
import random
import numpy as np
from numpy.random import choice
#GRID:
#-------------
#| 0 1 2 3|
#| 4 5 6 7|
#| 8 9 10 11|
#|12 13 14 15|
#-------------
#With states 0,15 being the terminal states
#provide a reward of -1 to all the transitions.
r = -1
actions = ['<', '>', '^', 'v']
iterations = int(input("Please enter the number of iterations to be made: "))
#Assign Q(s,a) randomly, except the terminal states as they have 0 values for all actions.
#Each row represents state(1-16) with state1 and state16 being terminal states. Each column represents action in the order: left, right, up, down
epsilon = 0.01
Q_sa = np.random.rand(16,4)
Q_sa[0,:] = np.zeros(4)
Q_sa[15,:] = np.zeros(4)
pi_as = [[0.25]*4]*16 #16x4 list, with each rows representing probabilities of choosing l,r,u,d actions respectively for each state
pi_as[0] = [0]*4 #These are terminal states. Thus no actions are taken here, and the probabilities are all zeros.
pi_as[15] = [0]*4
grid_actions = [['', '', '', ''], ['', '', '', ''], ['', '', '', ''], ['', '', '', '']]
#=======================================================================================================================================
#Generate Trajectories based on pi_(a|s) given state and the action took in that state
def generate_episode(state, action, pi_as):
trajectory = []
while(state != 0 and state != 15):
if(action == 1):#Left
state_ = state - 1
if(state_ == 3 or state_ == 7 or state_ == 11): #If the agent hits the left wall, it stays in its previous location
state_ = state_ + 1
if(action == 2):#Right
state_ = state + 1
if(state_ == 4 or state_ == 8 or state_ == 12): #If the agent hits the right wall, it stays in its previous location
state_ = state_ - 1
if(action == 3):#Top
state_ = state - 4
if(state_ == -3 or state_ == -2 or state_ == -1): #If the agent hits the top wall, it stays in its previous location
state_ = state_ + 4
if(action == 4):#Down
state_ = state + 4
if(state_ == 16 or state_ == 17 or state_ == 18): #If the agent hits the bottom wall, it stays in its previous location
state_ = state_ - 4
trajectory.append([state, action]) #Append s, a to the trajectory. [[S1, A1], [S2, A2], ...] and so on. Reward is -1 for all transitions, thus not appended in the trajectory.
action = choice([1,2,3,4], p=pi_as[state]) #Choose any action from 1-4 i.e left, right, top, or down based on their probabilities
state = state_ #Previous state becomes current state in the next iteration
return trajectory
#=====================================================================================================================================
#Generalised Policy Iteration:
for _ in range(0, iterations):
state = np.random.randint(1,15) #Explore all the state-action pairs with equal probability
action = choice([1,2,3,4], p=pi_as[state])
value = 0.0
j = 0.0
trajectory = generate_episode(state, action, pi_as) #Generate a sample trajectory
G = 0
for i in range(1, len(trajectory)):
G += -1
if([state, action] == trajectory[i]):
j += 1.0
value += (1.0/j)*(G - value) #Update the value of the state-action pair incrementally
Q_sa[state, action-1] = value
A = np.argmax(Q_sa[state,:])
probability_actions = [epsilon/len(actions)]*4
probability_actions[A] = 1 - epsilon + epsilon/len(actions)
pi_as[state] = probability_actions
#=======================================================================================================================================
#Show visually what actions to be taken in the gridworld
for i in range(0, len(pi_as)):
if i < 3:
grid_actions[0][i+1] = actions[pi_as[i+1].index(max(pi_as[i+1]))]
if 4 <= i < 8:
grid_actions[1][i-4] = actions[pi_as[i].index(max(pi_as[i]))]
if 8<= i < 12:
grid_actions[2][i-8] = actions[pi_as[i].index(max(pi_as[i]))]
if 12<= i < 15:
grid_actions[3][i-12] = actions[pi_as[i].index(max(pi_as[i]))]
print("The actions to be taken after " + str(iterations) + " iterations are:")
print(np.asarray(grid_actions))
#=========================================================================================================================================
|
984,916 | 9ee3228f924269e0884da81532e00b6d60a8eace | from django.contrib.auth.models import User
from django.db.models import Q
from django.shortcuts import render, redirect, get_object_or_404
from django.views.generic import ListView, DetailView
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from .models import Profile, Relation
from .forms import ProfileForm
@login_required
def my_profile_view(request):
profile = Profile.objects.get(user=request.user)
form = ProfileForm(request.POST or None, request.FILES or None, instance=profile)
confirm = False
if request.method == 'POST':
if form.is_valid():
form.save()
confirm = True
context = {
'profile': profile,
'form': form,
'confirm': confirm,
}
return render(request, 'profiles/my_profile.html', context)
@login_required
def invite_received_views(request):
profile = Profile.objects.get(user=request.user)
qs = Relation.objects.invitations_received(profile)
result = list(map(lambda x: x.sender, qs))
is_empty = False
if len(result) == 0:
is_empty = True
context = {
'invites': result,
'is_empty': is_empty
}
return render(request, 'profiles/my_invites.html', context)
@login_required
def accept_invitation(request):
if request.method == 'POST':
pk = request.POST.get('profile_pk')
user = request.user.pk
s_profile = Profile.objects.get(id=pk)
r_profile = Profile.objects.get(user_id=user)
relation = get_object_or_404(Relation, sender=s_profile, receiver=r_profile)
if relation.status == 'send':
relation.status = 'accepted'
relation.save()
return redirect('profiles:invites')
@login_required
def reject_invitation(request):
if request.method == 'POST':
pk = request.POST.get('profile_pk')
user = request.user.pk
r_profile = Profile.objects.get(id=user)
s_profile = Profile.objects.get(id=pk)
relation = get_object_or_404(Relation, sender=s_profile, receiver=r_profile)
relation.delete()
return redirect('profiles:invites')
@login_required
def profile_list_view(request):
user = request.user
qs = Profile.objects.get_all_profiles(user)
context = {
'profiles': qs
}
return render(request, 'profiles/profiles_list.html', context)
@login_required
def invite_profile_list_view(request):
user = request.user
qs = Profile.objects.get_all_profiles_to_invite(user)
context = {
'profiles': qs
}
return render(request, 'profiles/profiles_list.html', context)
class ProfileListView(LoginRequiredMixin, ListView):
template_name = 'profiles/profiles_list.html'
def get_queryset(self):
return Profile.objects.get_all_profiles(self.request.user)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
profile = Profile.objects.get(user=self.request.user)
rel_r = Relation.objects.filter(sender=profile)
rel_s = Relation.objects.filter(receiver=profile)
rel_receiver = []
rel_sender = []
for item in rel_r:
rel_receiver.append(item.receiver.user)
for item in rel_s:
rel_sender.append(item.sender.user)
context["rel_receiver"] = rel_receiver
context["rel_sender"] = rel_sender
context['is_empty'] = False
if len(self.get_queryset()) == 0:
context['is_empty'] = True
return context
class ProfileDetail(LoginRequiredMixin, DetailView):
model = Profile
template_name = 'profiles/detail.html'
# def get_object(self, **kwargs):
# pk = self.kwargs.get('pk')
# profile = Profile.objects.get(id=pk)
# return profile
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
profile = Profile.objects.get(user=self.request.user)
rel_r = Relation.objects.filter(sender=profile)
rel_s = Relation.objects.filter(receiver=profile)
rel_receiver = []
rel_sender = []
for item in rel_r:
rel_receiver.append(item.receiver.user)
for item in rel_s:
rel_sender.append(item.sender.user)
context["rel_receiver"] = rel_receiver
context["rel_sender"] = rel_sender
context['posts'] = self.get_object().get_all_author_post()
context['len_post'] = True if len(self.get_object().get_all_author_post()) > 0 else False
return context
@login_required
def send_invitation(request):
if request.method == 'POST':
user = request.user
pk = request.POST.get('profile_pk')
sender = Profile.objects.get(user=user)
receiver = Profile.objects.get(user_id=pk)
Relation.objects.create(sender=sender, receiver=receiver, status='send')
return redirect(request.META.get('HTTP_REFERER'))
else:
return redirect('profiles:invites')
@login_required
def delete_from_friend(request):
if request.method == 'POST':
user = request.user
profile_id = request.POST.get('profile_id')
me = Profile.objects.get(user=user)
profile = Profile.objects.get(id=profile_id)
Relation.objects.get(
Q(sender=me, receiver=profile, status='accepted') | Q(sender=profile, receiver=me,
status='accepted')).delete()
return redirect(request.META.get('HTTP_REFERER'))
else:
return redirect('profiles:invites')
|
984,917 | ce1b55c0815b50c65df06204b739eab275e6c243 | import os
import time
from lambdatrader.constants import M5
MINUTE_SECONDS = 60
HOUR_SECONDS = 60 * MINUTE_SECONDS
DAY_SECONDS = 24 * HOUR_SECONDS
WEEK_SECONDS = 7 * DAY_SECONDS
MONTH_SECONDS = 30 * DAY_SECONDS
YEAR_SECONDS = 365 * DAY_SECONDS
def pair_from(first_currency, second_currency):
return first_currency + '_' + second_currency
def pair_first(pair):
return pair[:pair.index('_')]
def pair_second(pair):
return pair[pair.index('_')+1:]
def get_now_timestamp():
return time.time()
def date_floor(date, period=M5):
date = int(date)
return date - (date % period.seconds())
def date_ceil(date, period=M5):
date = int(date)
return date - (date % period.seconds()) + period.seconds()
def get_one_day_seconds():
return 24 * 3600
def seconds(years=0, months=0, weeks=0, days=0, hours=0, minutes=0, seconds=0):
return seconds + minutes * MINUTE_SECONDS + hours * HOUR_SECONDS +\
days * DAY_SECONDS + weeks * WEEK_SECONDS + months * MONTH_SECONDS + years * YEAR_SECONDS
def candlesticks(years=0, months=0, weeks=0, days=0, hours=0, minutes=0, _seconds=0, period=M5):
num_seconds = seconds(years=years, months=months, weeks=weeks,
days=days, hours=hours, minutes=minutes, seconds=_seconds)
return int(num_seconds // period.seconds())
def get_n_day_seconds(n):
return get_one_day_seconds() * n
def get_project_directory():
return os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
def running_in_docker():
return os.path.isfile('/.dockerenv')
|
984,918 | 9e98c3c99d6ed690edfce6674403a9e6c44ffb3d | from django.http import HttpResponse
import json
from django.db import connections
from datetime import date, datetime,time
import math
def dictfetchall(cursor):
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def json_serial(obj):
"""JSON serializer for objects not serializable by default json code"""
if isinstance(obj, (datetime, date,date)):
return obj.isoformat()
raise TypeError ("Type %s not serializable" % type(obj))
def index(request):
pno = request.GET['pno']
raw = {}
cursor = connections['klook'].cursor()
cursor.execute("select activities.ano,atitle1,anum,ascore,aprice,aprice_old,ahour,adate,aurl,ptitle from activities,place,activity_type where activities.pno = %s and activities.pno = place.pno and activities.ano = activity_type.ano and tno = 2 order by anum desc",(pno,))
raw['list'] = dictfetchall(cursor)
cursor.close()
for item in raw['list']:
item['adate'] = json_serial(item['adate'])
response = HttpResponse(json.dumps(raw), content_type="application/json")
return response |
984,919 | 664fabd9ed01de098edd3b9faf81d80081d3be95 | # -*- coding: utf-8 -*-
"""
Created on Fri Aug 4 21:07:45 2017
@author: Renuka L K
This is the code to implement Logistic Regression algorithm from scratch in Python
This code expects the user to give data inputs
"""
import math
def Sigmoid(z):
return float(1.0/float(1.0 + math.exp(-1.0*z)))
def Hypothesis(theta, X):
z = 0
for i in range(len(theta)):
z += theta[i]*X[i]
return Sigmoid(z)
def Cost_Function_Gradient(X,Y,theta,alpha,j):
sum_errors = 0
m = len(Y)
cst = float(alpha)/float(m)
for i in range(m):
xi = X[i]
xij = xi[j]
hi = Hypothesis(theta,xi)
sum_errors += ((hi - Y[i]) * xij)
return cst*sum_errors
def Gradient_Descent(X,Y,theta,alpha):
new_theta = []
for j in range(len(theta)):
CFGrad = Cost_Function_Gradient(X,Y,theta,alpha,j)
theta_val = (theta[j] - CFGrad)
new_theta.append(theta_val)
return new_theta
def Cost_Function(X,Y,theta):
sum_errors = 0
m = len(Y)
cst = (-1/m)
for i in range(m):
hi = Hypothesis(theta,X[i])
if Y[i] == 1:
sum_errors += Y[i] * math.log(hi)
elif Y[i] == 0:
sum_errors += (1-Y[i]) * math.log(1-hi)
return cst*sum_errors
def Logistic_Regression(X,Y,theta,alpha,it):
m = len(Y)
for x in range(it):
new_theta = Gradient_Descent(X,Y,theta,alpha)
theta = new_theta
if x%100 == 0:
Cost_Function(X,Y,theta)
return theta
|
984,920 | 2731006c1c1e258ebac1789360fc61903f6a297d | #!/usr/bin/env python
# script to grab mate after subsetting somehow
# Matthew J. Neave 27.7.2017 <matthewjneave1@gmail.com>
# library imports
import sys
import argparse
from Bio.SeqIO.QualityIO import FastqGeneralIterator # requires Biopython
# use argparse to grab command line arguments
parser = argparse.ArgumentParser("get opposite read pair from subsetted file")
parser.add_argument('subset_reads', type = str,
nargs = "?", help = "fastq file containing subsetted reads")
parser.add_argument('opposite_reads', type = str,
nargs = "?", help = "fastq file containing opposite reads to extract")
parser.add_argument('output', type = str,
nargs = "?", help = "name for the output file")
args = parser.parse_args()
output_handle = open(args.output, "w")
print "Scanning subsetted file and building list of names..."
subset_ids = set()
for title, seq, qual in FastqGeneralIterator(open(args.subset_reads)):
subset_ids.add(title.split()[0])
print "Processing opposite file"
for title, seq, qual in FastqGeneralIterator(open(args.opposite_reads)):
name = title.split()[0]
if name in subset_ids:
# paired reads
subset_ids.remove(name) # saves a little memory
output_handle.write("@%s\n%s\n+\n%s\n" % (title, seq, qual))
output_handle.close()
print "done"
|
984,921 | 79126310a0e13af08d00c572d8c2d02bf19e5ac4 | import os
import time
import sqlite3
from pprint import pformat, pprint
from dir_file import GenericFile
from markup import Frame
import global_user_settings as settings
class Loader():
def __init__(self, f, aggregate_duplicate_varnames=False):
self.queue = []
self.add(f)
self._aggregate_duplicate_varnames = aggregate_duplicate_varnames
def add(self, f):
if isinstance(f, Frame):
self.queue.append(f)
else:
raise NameError("Argument is not a Frame() class instance")
def get_filename(self):
openfile = None
filenames = [q.data_area._sheet._file.base for q in self.queue]
if len(set(filenames)) > 1:
raise NameError("Frames belong to different files")
else:
fn = filenames[0] + ".db3"
openfile = GenericFile(fn, dir_type="db3",
must_exist=False).fullname
return(openfile)
def execute(self):
filename = self.get_filename()
conn, cursor = start_db(filename)
for frame in self.queue:
load_frame(frame, conn, self._aggregate_duplicate_varnames)
#SN_1 Loader.execute() - aggregation added
if self._aggregate_duplicate_varnames:
cursor.execute('''DROP TABLE IF EXISTS %s''' % settings.DB_TABLE)
line = '''\
CREATE TABLE %s as
SELECT varname, dt_string, sum(value) as value
FROM %s
GROUP BY varname, dt_string
ORDER by 1, 3''' % (settings.DB_TABLE, settings.DB_TABLE_RAW)
cursor.execute(line)
conn.commit()
conn.close()
#SN_1 Loader Class - properties added
@property
def aggregate_duplicate_varnames(self):
return self._aggregate_duplicate_varnames
@aggregate_duplicate_varnames.setter
def aggregate_duplicate_varnames(self, value):
self._aggregate_duplicate_varnames = value
def create_table(filename):
conn = sqlite3.connect(filename)
c = conn.cursor()
c = c.execute('''DROP TABLE IF EXISTS %s''' % settings.DB_TABLE_RAW)
c = c.execute('''CREATE TABLE %s (
varname VARCHAR(256) NOT NULL,
dt_string DATE NOT NULL,
value FLOAT NOT NULL)''' % settings.DB_TABLE_RAW)
#SN_1 create final table for loading if _aggregate_duplicate_varnames==False
c = c.execute('''DROP TABLE IF EXISTS %s''' % settings.DB_TABLE)
c = c.execute('''CREATE TABLE %s (
varname VARCHAR(256) NOT NULL,
dt_string DATE NOT NULL,
value FLOAT NOT NULL)''' % settings.DB_TABLE)
conn.commit()
#c.execute('''PRAGMA table_info('%s')''' % settings.DB_TABLE_RAW)
#oevre = c.fetchall()
#print(oevre)
#c.execute('''SELECT * FROM sqlite_master WHERE type='table' ''')
#oevre = c.fetchall()
#print(oevre)
conn.close()
def start_db(filename):
# if database file not found, create it
#SN_1 must create tables, even if database file already exists,
#it may not contain some tables
# if not os.path.isfile(filename):
create_table(filename)
# open database
conn = sqlite3.connect(filename)
c = conn.cursor()
c.execute("delete from %s" % settings.DB_TABLE_RAW)
# SN_1 empty final table
c.execute("delete from %s" % settings.DB_TABLE)
conn.commit()
return(conn, c)
def load_frame(frame, conn, aggregate_duplicate_varnames):
cursor = conn.cursor()
print ("Inserting data...")
#SN_1 choose target table using aggregate_duplicate_varnames flag
target_table_name = settings.DB_TABLE_RAW if aggregate_duplicate_varnames \
else settings.DB_TABLE
for vn, dt, x in frame.data:
line = ("INSERT INTO {0} (varname, dt_string, value) "
"VALUES (\'{1}\', \'{2}\', {3})").format(target_table_name,
vn, dt, x)
print(line)
cursor.execute(line)
# Pause to allow sqlite to finish its job
DELAY = 1
print('Inserting complete. Pausing for %s sec...' % DELAY)
time.sleep(DELAY)
# EP_1 view_file default arg changed from table = 'temp_table'
def view_file(fn, table = settings.DB_TABLE):
dbf = GenericFile(fn, dir_type='db3').fullname
conn = sqlite3.connect(dbf)
c = conn.cursor()
c.execute('''SELECT * from %s''' % table)
z = c.fetchall()
conn.close()
print ("Filename:", fn)
print ("Datapoints:", len(z))
pprint (z)
|
984,922 | d6a0cbc166b59f20aea78c59194cbb3ada6ea827 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import xmlrpclib
import socket
import ConfigParser
import optparse
import sys
import thread
import threading
import os
import time
import pickle
import base64
import socket
import random
HOST=''
PORT=
DB=''
USER=''
PASS=''
url ='http://%s:%d/xmlrpc/' % (HOST,PORT)
common_proxy = xmlrpclib.ServerProxy(url+'common')
object_proxy = xmlrpclib.ServerProxy(url+'object')
wizard_proxy = xmlrpclib.ServerProxy(url+'wizard')
#### MODULE LIST TO BE INSTALLED
#### LOGIN IN
uid = common_proxy.login(DB,USER,PASS)
### INSTALL MODULES
from time import sleep
invo_ids = object_proxy.execute(DB,uid,PASS,'account.invoice','search',[('invoice_line.product_id','=', 431)])
read = object_proxy.execute(DB,uid,PASS,'account.invoice','read',invo_ids[0],[])
print "read",read
for i in range(10000):
object_proxy.execute(DB,uid,PASS,'account.invoice','copy',random.randrange(38,43),{'date_invoice':'2012-03-%s'%random.randrange(23,30)})
#~ object_proxy.execute(DB,uid,PASS,'ir.rule','create',rules)
#~
#~ company_id = object_proxy.execute(DB,uid,PASS,'res.company','search',[('name','=',i)])
#~
|
984,923 | 59bf3317f36689ca8263a31fe23919485cf51af2 | import numpy as np
from scipy.interpolate import interp2d
import cv2
import os
from data_helper import Load_Data
def BiInterpn(x, y, img, H, W, C, img_mask = None):
tmp_img = np.zeros((H, W, C), dtype = np.int)
## ramove all the illegal points
img_h, img_w = img.shape[:2]
mask = (x < 0) | (x >= img_w - 1) | (y < 0) | (y >= img_h - 1)
x = x[~mask]
y = y[~mask]
x_coor, y_coor = np.meshgrid(range(W), range(H))
x_coor = x_coor[~mask]
y_coor = y_coor[~mask]
## find out all the interpolation component ##
x_1 = np.floor(x + 1).astype(int) # floor(x + 1) to avoid ceil(x) == floor(x)
x_0 = np.floor(x).astype(int)
y_1 = np.floor(y + 1).astype(int)
y_0 = np.floor(y).astype(int)
if img_mask is not None: # need to exclude coordinate in empty region
assert img.shape[:2] == img_mask.shape[:2]
mask = (~img_mask[y_0, x_0]) | (~img_mask[y_0, x_1]) | (~img_mask[y_1, x_0]) | (~img_mask[y_1, x_1])
x = x[~mask]
y = y[~mask]
x_coor = x_coor[~mask]
y_coor = y_coor[~mask]
x_0 = x_0[~mask]
x_1 = x_1[~mask]
y_0 = y_0[~mask]
y_1 = y_1[~mask]
## weighting on four region ##
a = (x - x_0) * (y - y_0)
b = (x_1 - x) * (y - y_0)
c = (x - x_0) * (y_1 - y)
d = (x_1 - x) * (y_1 - y)
tmp_img[y_coor, x_coor, :] = (a[..., None] * img[y_1, x_1, :]) \
+ (b[..., None] * img[y_1, x_0, :]) + (c[..., None] * img[y_0, x_1, :]) + (d[..., None] * img[y_0, x_0, :])
new_mask = np.zeros((H, W), dtype = bool)
new_mask[y_coor, x_coor] = True
return tmp_img.astype('uint8'), new_mask
"""
cylindrical projection by inverse mapping:
First, find out inverse transform to map new coordinate to original image.
This guarantees that each point after transformed can find out its correspoding point on original
image. And it's easy to interpolate using original data on grid.
Args:
img: image to be applied cylindrical projection
focal: focal length of image
Returns:
new_img: transformed image
mask: black region mask on new image
"""
def inverse_cylindrical_projection(img, focal, Interpolate = True):
H, W, Ch = img.shape
## establish inverse coordinate w.r.t original img ##
x_center = float(W-1)/2
y_center = float(H-1)/2
x = np.arange(W, dtype = np.float32) - x_center
y = np.arange(H, dtype = np.float32) - y_center
x = focal * np.tan(x / focal)
r = np.sqrt(x ** 2 + focal ** 2)
y = (y / focal)[:, np.newaxis] @ r[np.newaxis, :]
x += x_center
y += y_center
if not Interpolate :
x = np.round(np.tile(x, H).ravel()).astype(int)
y = np.round(y.ravel()).astype(int)
mask = ((x >= W) | (x < 0)) | ((y >= H) | (y < 0))
tmp_img = np.zeros((H, W, Ch), dtype = np.int)
x_coor, y_coor = np.meshgrid(range(W), range(H))
x_min = np.amin(x_coor.ravel()[~mask])
x_max = np.amax(x_coor.ravel()[~mask])
new_W = x_max - x_min + 1
tmp_img[y_coor.ravel()[~mask], x_coor.ravel()[~mask], :] = img[y[~mask], x[~mask], :]
new_img = np.zeros((H, new_W, Ch), dtype = np.int)
new_img = tmp_img[:, x_min:x_max+1, :]
mask = mask.reshape(H, W)[:, x_min:x_max + 1]
else:
img, mask = BiInterpn(np.tile(x, [H, 1]), y, img, H, W, Ch)
y_coor, x_coor = np.mgrid[range(H), range(W)]
x_min = np.amin(x_coor[mask])
x_max = np.amax(x_coor[mask])
new_W = x_max - x_min + 1
y_min = np.amin(y_coor[mask])
y_max = np.amax(y_coor[mask])
new_H = y_max - y_min + 1
new_img = np.zeros((new_H, new_W, Ch), dtype = np.int)
new_img = img[y_min:y_max + 1, x_min:x_max+1, :]
new_mask = mask[y_min:y_max + 1, x_min:x_max + 1]
return new_img.astype('uint8'), new_mask
### establish projection coordinate###
def cylindrical_projection(img, focal):
H, W, Ch = img.shape
x_center = float(W - 1) / 2
y_center = float(H - 1) / 2
### first we establish coordinate ###
x = np.arange(W, dtype = np.float32) - x_center
y = np.arange(H, dtype = np.float32) - y_center
r = 1 / np.sqrt(x ** 2 + focal ** 2)
h = y[:, np.newaxis] @ r[np.newaxis, :]
x = focal * np.arctan(x / focal)
y = focal * h
#x += x_center
x -= np.amin(x)
y += y_center
#new_img = interpolate(img, np.tile(x, H), y.ravel()).reshape(H, W, Ch).astype(np.uint8)
new_W = (np.amax(np.ceil(x)) - np.amin(np.floor(x)) + 1).astype(int)
new_img = np.zeros((H, new_W, Ch), dtype=int)
new_img_mask = np.zeros((H, new_W), dtype='bool')
interp_mask = np.zeros((H, new_W), dtype=np.float32)
gray_code = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
for i in range(4):
new_img[np.floor(y + gray_code[i, 0]).astype(int), np.floor(np.tile(x, (H, 1)) + gray_code[i, 1]).astype(int), :] += img
new_img_mask[np.floor(y + gray_code[i, 0]).astype(int), np.floor(np.tile(x, (H, 1))+gray_code[i, 1]).astype(int)] = True
interp_mask[np.floor(y + gray_code[i, 0]).astype(int), np.floor(np.tile(x, (H, 1)) + gray_code[i, 1]).astype(int)] += 1.0
"""
new_img[np.floor(y).astype(int), np.ceil(np.tile(x, (H, 1))).astype(int), :] += img
new_img_mask[np.floor(y).astype(int), np.ceil(np.tile(x, (H, 1))).astype(int)] = True
interp_mask[np.floor(y).astype(int), np.ceil(np.tile(x, (H, 1))).astype(int)] += 1.0
new_img[np.ceil(y).astype(int), np.floor(np.tile(x, (H, 1))).astype(int), :] += img
new_img_mask[np.ceil(y).astype(int), np.floor(np.tile(x, (H, 1))).astype(int)] = True
interp_mask[np.ceil(y).astype(int), np.floor(np.tile(x, (H, 1))).astype(int)] += 1.0
new_img[np.ceil(y).astype(int), np.ceil(np.tile(x, (H, 1))).astype(int), :] += img
new_img_mask[np.ceil(y).astype(int), np.ceil(np.tile(x, (H, 1))).astype(int)] = True
interp_mask[np.ceil(y).astype(int), np.ceil(np.tile(x, (H, 1))).astype(int)] += 1.0
"""
interp_mask[interp_mask < 1.0] = 1.0
new_img = new_img / interp_mask[..., None]
#interpolate(new_img, interp_mask)
return new_img, new_img_mask
if __name__ == '__main__':
imgs, fs = Load_Data('./photos/riverside', './photos/riverside/f.txt', '.JPG')
img_proj = []
img_proj_mask = []
fs *= 8
"""
for i in range(imgs.shape[0]):
new_img, new_img_mask = cylindrical_projection(imgs[i], fs[i])
img_proj.append(new_img)
img_proj_mask.append(new_img_mask)
"""
img, mask= inverse_cylindrical_projection(imgs[1], fs[1])
cv2.imshow('old', imgs[1])
#cv2.imwrite('cy.jpg', (img_proj[1]*img_proj_mask[1][:,:,None]).astype('uint8'))
cv2.imwrite('cy.jpg', (img * mask[:,:,None]).astype('uint8'))
cv2.imshow('new', (img[:,:,:]).astype('uint8'))
k = cv2.waitKey(0)
if k == 27: # wait for ESC key to exit
cv2.destroyAllWindows()
|
984,924 | 9dd68ace2406cbd33461f9b5d27b918e9fb19cd3 | import sys
from PyQt5.QtWidgets import QApplication, QWidget, QLabel
#from PyQt5.QtGui import QIcon
#from PyQt5.QtCore import pyqtSlot
from SimpleGame.Scene import Scene
from SimpleGame.Sprite import Sprite
from SimpleGame.Block import Block
from SimpleGame.Background import Background
from enum import Enum
import random
class States(Enum):
FALLING = 0
WALK = 1
JUMP = 2
STAND = 3
class Facing():
RIGHT = 0
LEFT = 1
class Camera():
def __init__(self, thisScene):
self.viewWidth = Scene.width
self.viewHeight = Scene.height
self.scene = thisScene
def follow(self, sprite):
self.sprite = sprite
def update(self):
if self.sprite.drawX < 250:
if self.sprite.x < 300:
self.sprite.x = 300
else:
self.scene.offsetX -= 6
if self.sprite.drawX > (350):
if self.sprite.x > (26*120):
self.sprite.x = (26*120)
else:
self.scene.offsetX += 6
class Ground(Block):
def __init__(self, thisScene):
spriteMaker = [["sprites/ground.png"] ] *30
super().__init__(thisScene, spriteMaker, 120, 40)
self.x = 0
self.y = 500
def update(self, offsetX, offsetY):
super().update(offsetX, offsetY)
class Character(Sprite):
def __init__(self, thisScene, sprite, x, y):
self.state = States.FALLING
self.facing = Facing.RIGHT
super().__init__(thisScene, sprite, x, y)
self.stateTimer = 0
self.dy = 7
self.setBoundAction(Scene.CONTINUE)
def update(self, offsetX = 0, offsetY = 0):
if self.state == States.FALLING:
if self.scene.ground.collidesWith(self):
self.standBehavior()
elif self.state == States.STAND or self.state == States.WALK:
if self.scene.keysDown[Scene.K_SPACE]:
self.jumpBehavior()
elif self.scene.keysDown[Scene.K_RIGHT] or self.scene.keysDown[Scene.K_LEFT]:
self.walkBehavior()
elif self.state == States.WALK:
if (self.facing == Facing.RIGHT) and (self.scene.keysDown[Scene.K_RIGHT] == None):
self.standBehavior()
if (self.facing == Facing.LEFT) and (self.scene.keysDown[Scene.K_LEFT] == None):
self.standBehavior()
elif self.state == States.JUMP:
self.stateTimer = self.stateTimer - 1
if self.stateTimer < 1:
self.dy = self.dy * -1
self.state = States.FALLING
super().update(offsetX, offsetY)
def standBehavior(self):
self.dy = 0
self.dx = 0
self.state = States.STAND
self.pauseAnimation()
# override this in your Character
def jumpBehavior(self):
pass
# override this in your Character
def walkBehavior(self):
pass
# sean mahady
#250x100
# 50, 50
class Sean(Character):
def __init__(self, thisScene):
super().__init__(thisScene, "sprites/sean_sheet.png", 250, 100)
self.x = 75
self.y = 100
self.dy = 10
self.boundAction = Scene.WRAP
self.loadAnimation(250, 100, 50, 50) # divides the sprite sheet into pieces
self.generateAnimationCycles() #sets up each "cylce" into rows
self.setAnimationSpeed(10) #sets a QTimer to 100ms
self.playAnimation() #starts the QTimer
#make a state for you class
self.state = States.FALLING #falling
# Add a method called walkBehavior.
# This should check if self.scene.keysDown[Scene.K_RIGHT]is True. If so self.facing to 0, self.setCurrentCycle to 0, call the self.playAnimation method. Set the DX to a value between 0 and 10. Set a State to States.WALK
# If not check if self.scene.keysDown[Scene.K_LEFT] is True. If so self.facing to 1, self.setCurrentCycle to 1, call the self.playAnimation method. Set the DX to a value between 0 and -10. Set a State to States.WALK
def walkBehavior(self):
if self.scene.keysDown[Scene.K_RIGHT]:
self.facing = 0
self.setCurrentCycle(0)
self.playAnimation()
self.dx = 4
self.state = States.WALK
elif self.scene.keysDown[Scene.K_LEFT]:
self.facing = 1
self.setCurrentCycle(1)
self.playAnimation()
self.dx = -4
self.state = States.WALK
# Add a method called jumpBehavior. This should set the dy to a negative number (moving up), and set the stateTimer to the number of frames before falling.
def jumpBehavior(self):
self.stateTimer = 25
self.dy = -4
self.state = States.JUMP
#Ethan's Character
# 125 x 123 - non-animated
# 1600x800 sheet version
# 400 x 200 - animation cells
# Change arguments on the super init to 1600 x 800
class CheesePuff(Character):
def __init__(self, thisScene):
super().__init__(thisScene, "sprites/ethan_sheet.png", 400, 200)
self.x = 150
self.y = 150
# add loadAnimation, generateAnimation, setAnimationSpeed, and playAnimation methods
#loadAnimation(sheetX, sheetY, cellX, cellY)
self.loadAnimation(400, 200, 100, 100)
self.generateAnimationCycles()
self.setAnimationSpeed(30)
self.playAnimation()
self.dx = 8
self.dy = 9
self.state = States.FALLING
def update(self, offsetX, offsetY):
super().update(offsetX, offsetY)
def walkBehavior(self):
if self.scene.keysDown[Scene.K_RIGHT]:
self.facing = 0
self.setCurrentCycle(0)
self.playAnimation()
self.dx = 8
self.state = States.WALK
elif self.scene.keysDown[Scene.K_LEFT]:
self.facing = 1
self.setCurrentCycle(1)
self.playAnimation()
self.dx = -8
self.state = States.WALK
def jumpBehavior(self):
self.stateTimer = 23
self.dy = -6
self.state = States.JUMP
# Add a method called jumpBehavior. This should set the dy to a negative number (moving up), and set the stateTimer to the number of frames before falling.
#Henry's Character
# 75 x 75
#Sheet : 176 x 192
# Animation cell: 44 x 48
class RickAstley(Character):
def __init__(self, thisScene):
super().__init__(thisScene, "sprites/henry_sheet.png", 176, 192)
self.dx = 9
self.x = 50
self.y = 50
self.dy = 9
#loadAnimation(sheetX, sheetY, cellX, cellY)
self.loadAnimation(176, 192, 88, 96)
self.generateAnimationCycles()
self.setAnimationSpeed(30)
self.playAnimation()
self.boundAction = Scene.WRAP
self.state = States.FALLING
# add loadAnimation, generateAnimation, setAnimationSpeed, and playAnimation methods
def walkBehavior(self):
if self.scene.keysDown[Scene.K_RIGHT]:
self.facing = 0
self.setCurrentCycle(0)
self.playAnimation()
self.dx = 9
self.state = States.WALK
elif self.scene.keysDown[Scene.K_LEFT]:
self.facing = 1
self.setCurrentCycle(1)
self.playAnimation()
self.dx = -9
self.state = States.WALK
def jumpBehavior(self):
self.stateTimer = 20
self.dy = -9
self.state = States.JUMP
def update(self, offsetX, offsetY):
super().update(offsetX, offsetY)
# Kamille's Character
# 75 x 79
# Sheet: 320 x 128
# Animation Cell: 64 x 64
# change super init arguments to 320 x 128
class Kamille(Character):
def __init__(self, thisScene):
super().__init__(thisScene, "sprites/kamille_sheet.png", 320, 128)
self.x = 70
self.y = 70
self.dx += 5
self.dy += 5
self.boundAction = Scene.WRAP
# add loadAnimation, generateAnimation, setAnimationSpeed, and playAnimation methods
self.loadAnimation(320, 128, 64, 64)
self.generateAnimationCycles()
self.setAnimationSpeed(30)
self.playAnimation()
self.state = States.FALLING
def update(self, offsetX, offsetY):
super().update(offsetX, offsetY)
# Add a method called walkBehavior.
# This should check if self.scene.keysDown[K_RIGHT]is True. If so self.facing to Facing.RIGHT, self.setCurrentCycle to Facing.RIGHT, call the self.startAnimation method. Set the DX to a value between 0 and 10
# If not check if self.scene.keysDown[K_LEFT] is True. If so self.facing to Facing.RIGHT, self.setCurrentCycle to Facing.RIGHT, call the self.startAnimation method. Set the DX to a value between 0 and -10
def walkBehavior(self):
if self.scene.keysDown[Scene.K_RIGHT]:
self.facing = 0
self.setCurrentCycle(0)
self.playAnimation()
self.dx = 5
self.state = States.WALK
elif self.scene.keysDown[Scene.K_LEFT]:
self.facing = 1
self.setCurrentCycle(1)
self.playAnimation()
self.dx = -5
self.state = States.WALK
# Add a method called jumpBehavior. This should set the dy to a negative number (moving up), and set the stateTimer to the number of frames before falling.
def jumpBehavior(self):
self.stateTimer = 21
self.dy = -5
self.state=States.JUMP
#Raphael's Character
# 112 x 67
# Sheet: 1232 x 130
# Animation cell: 112x65
class Raphael(Character):
def __init__(self, thisScene):
super().__init__(thisScene,"sprites/raphael_sheet.png", 1232, 130)
self.x = 65
self.y = 65
self.dx = 3
self.dy = 3
self.boundAction = Scene.WRAP
self.loadAnimation(1232, 130, 112,65)
self.generateAnimationCycles()
self.setAnimationSpeed(30)
self.playAnimation()
self.state = States.FALLING
def walkBehavior(self):
if self.scene.keysDown[Scene.K_RIGHT]:
self.facing = 0
self.setCurrentCycle(0)
self.playAnimation()
self.dx = 5
self.state = States.WALK
elif self.scene.keysDown[Scene.K_LEFT]:
self.facing = 1
self.setCurrentCycle(1)
self.playAnimation()
self.dx = -5
self.state = States.WALK
def update(self, offsetX, offsetY):
super().update(offsetX, offsetY)
def jumpBehavior(self):
self.stateTimer = 25
self.dy = -6
self.state = States.JUMP
# Nelsun's Character
# 112 x 67
# Sheet: 1232 x 130
# Animation cell: 112x65
class SourCreamAndOnionPringles(Character):
def __init__(self, thisScene):
super().__init__(thisScene,"sprites/nelsun_sprite.png" , 1232, 130) # change to sheet size
self.x += 60
self.y += 60
# add loadAnimation, generateAnimation, setAnimationSpeed, and playAnimation methods
self.boundAction = Scene.WRAP
self.loadAnimation(1232, 130, 112, 65)
self.generateAnimationCycles()
self.setAnimationSpeed(1000)
self.playAnimation()
self.dx = 10
self.dy = 8
self.boundAction = Scene.WRAP
self.state = States.FALLING
def update(self, offsetX, offsetY):
super().update(offsetX, offsetY)
# Add a method called walkBehavior.
# This should check if self.scene.keysDown[K_RIGHT]is True. If so self.facing to Facing.RIGHT, self.setCurrentCycle to Facing.RIGHT, call the self.startAnimation method. Set the DX to a value between 0 and 10
# If not check if self.scene.keysDown[K_LEFT] is True. If so self.facing to Facing.RIGHT, self.setCurrentCycle to Facing.RIGHT, call the self.startAnimation method. Set the DX to a value between 0 and -10
# Add a method called jumpBehavior. This should set the dy to a negative number (moving up), and set the stateTimer to the number of frames before falling.
# Make a class that inherits character
#Sophie's Character
# 75 x 50
# Sheet: 144x64
# cell: 48x32
class Sophie(Character):
def __init__(self, thisScene):
super().__init__(thisScene, "sprites/sophie_sheet.png", 144, 64)
self.x += 75
self.y += 50
self.dx = 1
self.boundAction = Scene.WRAP
# add loadAnimation, generateAnimation, setAnimationSpeed, and playAnimation methods
self.loadAnimation(144, 64, 48, 32)
self.generateAnimationCycles()
self.setAnimationSpeed(30)
self.playAnimation()
self.dx = 1
self.dy = 6
self.boundAction = Scene.WRAP
self.state = States.FALLING
def walkBehavior(self):
if self.scene.keysDown[Scene.K_RIGHT]:
self.facing = Facing.RIGHT
self.setCurrentCycle(Facing.RIGHT)
self.playAnimation()
self.dx = 3
self.state = States.WALK
elif self.scene.keysDown[Scene.K_LEFT]:
self.facing = Facing.LEFT
self.setCurrentCycle(Facing.LEFT)
self.playAnimation()
self.dx = -3
self.state = States.WALK
def jumpBehavior(self):
self.startTimer = 50
self.dy = -6
self.state = States.JUMP
def update(self, offsetX, offsetY):
super().update(offsetX, offsetY)
app = QApplication(sys.argv)
class Spaceship(Sprite):
def __init__(self, thisScene):
super().__init__(thisScene, "sprites/spaceship100.png", 100, 100)
self.x = 300
self.y = 100
self.dx = 6
self.timer = 60
self.enemies = []
def checkBounds(self):
if self.drawX < 0:
self.dx = 6
if self.drawX > 550:
self.dx = -6
self.timer -= 1
if self.timer < 1:
self.timer = 60
self.enemySpawn()
for enemy in self.enemies:
enemy.update(self.scene.offsetX, self.scene.offsetY)
def enemySpawn(self):
temp = random.randint(0,2)
newEnemy = 0
if temp == 0:
newEnemy = Enemy(self.scene, self.x, self.y)
elif temp==1:
newEnemy = GroundEnemy(self.scene, self.x, self.y)
elif temp ==2:
newEnemy = FlyingEnemy(self.scene, self.x, self.y)
self.enemies.append(newEnemy)
# Abstract base class - a base class we intend to inherit in another class
class BaseEnemy(Sprite):
def __init__(self, thisScene, file, width, height, x, y):
super().__init__(thisScene, file, width, height)
self.setBoundAction(Scene.DIE)
self.x = x
self.y = y
self.dy = 3
self.timer = 120
def update(self, offsetX, offsetY):
self.timer -= 1
if self.timer < 1:
self.makeDecision()
super().update(offsetX, offsetY)
def makeDecision(self):
pass
class Enemy(BaseEnemy):
def __init__(self, thisScene, x, y):
super().__init__(thisScene, "sprites/egg3.png", 128, 128, x, y)
def update(self, offsetX, offsetY):
super().update(offsetX, offsetY)
def makeDecision(self):
self.dy = 3
self.timer = 120
class GroundEnemy(BaseEnemy):
def __init__(self, thisScene, x, y):
super().__init__(thisScene, "sprites/snek.png", 100, 100, x, y)
self.state = States.FALLING
def update(self, offsetX, offsetY):
super().update(offsetX, offsetY)
if self.state == States.FALLING:
if self.scene.ground.collidesWith(self):
self.state = States.STAND
self.dy = 0
def makeDecision(self):
self.stateTimer = 100
if self.state == States.STAND:
decision = random.randint(0,1)
if decision == 0:
self.dx = random.randint(-5, 5)
# if decision 1 run toward character
if decision ==1:
movementX = 0
movementY = 0
#find out if the main character is to the left of the enemy, if so move toward them - Kamille
if self.scene.main.x < self.x:
movementX = -1
# find out if the main character is to the right of the enemy, if so move toward them - Raphael
if self.scene.main.x > self.x:
movementX = 1
# move at random speed
self.dx = (random.randint(0,5) * movementX)
class FlyingEnemy(BaseEnemy):
def __init__(self, thisScene, x, y):
super().__init__(thisScene, "sprites/birb.png", 100, 73, x, y)
def update(self, offsetX, offsetY):
super().update(offsetX, offsetY)
def makeDecision(self):
self.timer = 100
decision = random.randint(0,1)
# decision 1, fly after main character
if decision == 0:
self.dx = random.randint(-5, 5)
self.dy = random.randint(-5, 5)
if decision ==1:
movementX = 0
movementY = 0
# find out if the main character is to the left of the enemy
if self.scene.main.x < self.x:
movementX = -1
# find out if the main character is to the right of the enemy - Raphael
if self.scene.main.x > self.x:
movementX = 1
# find out if the main character is underneath the enemy (hint check y) - sophie
if self.scene.main.y < self.y:
movementY = -1
# find out if the main character is above of the enemy - Kamille
if self.scene.main.y > self.y:
movementY = 1
# move at random speed
self.dx = (random.randint(0,5) * movementX)
self.dy = (random.randint(0,5) * movementY)
class Game(Scene):
def __init__(self):
super().__init__(600,600)
self.changeBoundSize(4096, 600)
self.offsetX = 20
self.offsetY = 20
self.bg0 = Background(self, "sprites/parallax-forest-back-trees.png", 1020, 600, .25, 0)
self.bg1 = Background(self, "sprites/parallax-forest-middle-trees.png", 1020, 600, .5, 0)
self.bg2 = Background(self, "sprites/parallax-forest-front-trees.png", 1020, 600, .75, 0)
self.bg3 = Background(self, "sprites/parallax-forest-lights.png", 1020, 600, 1, 0)
self.ground = Ground(self)
self.sean = Sean(self)
#self.SourCreamAndOnionPringles = SourCreamAndOnionPringles(self)
self.kamille = Kamille(self)
#self.Rickrolled = RickAstley(self)
self.main = Raphael(self)
#self.Ethan = CheesePuff(self) #CheesePuff
#self.CaptainPanini = CaptainPanini(self)
#self.sophie = Sophie(self)
self.spaceship = Spaceship(self)
self.camera = Camera(self)
self.camera.follow(self.main)
def updateGame(self):
self.bg0.update(self.offsetX, self.offsetY)
self.bg1.update(self.offsetX, self.offsetY)
self.bg2.update(self.offsetX, self.offsetY)
self.bg3.update(self.offsetX, self.offsetY)
self.ground.update(self.offsetX, self.offsetY)
self.sean.update(self.offsetX, self.offsetY)
self.kamille.update(self.offsetX, self.offsetY)
#self.Ethan.update(self.offsetX, self.offsetY)
#self.Rickrolled.update(self.offsetX, self.offsetY)
self.main.update(self.offsetX, self.offsetY)
#self.sophie.update(self.offsetX, self.offsetY)
self.camera.update()
self.spaceship.update(self.offsetX, self.offsetY)
for enemy in self.spaceship.enemies:
if enemy.distanceTo(self.main) < 50:
print("You died!")
self.stop()
myGame = Game()
myGame.start()
myGame.show()
sys.exit(app.exec_())
'''''
####################################
#app = QApplication(sys.argv)
#widget = QWidget()
#textLabel = QLabel(widget)
#textLabel.setText("Hello World!")
#textLabel.move(110,85)
#widget.setGeometry(50,50,320,200) #tk.geometry('320x200+50+50')
#widget.setWindowTitle("PyQt5 Example")
#widget.show()
#sys.exit(app.exec_()) #Tk.mainloop()
from students import *
arr = []
arr.append(Doge())
arr.append(Congrats())
arr.append(Car())
arr.append(Cat())
arr.append(Poyo())
arr.append(what())
for chara in arr:
chara.move()
# Sean Mahady's Sprite
# 75 x 75
# https://opengameart.org/content/cat-fighter-sprite-sheet
# Cat Fighter by DogChicken @ OpenGameArt.org
# Raphael's Sprite
# 112 x 67
# https://opengameart.org/content/dog-walk-sprite-and-bone
# dog-walk-sprite-and-bone by kirard
# Sophie's Sprite
# 75 x 50
# https://opengameart.org/content/rabbit-2
# Rabbit by Aeynit
# HenryWasTaken
# 75 x 75
# https://opengameart.org/content/skeleton-guy-animated
# Disthron @ opengameart.org
# Nelsun's sprite sheet
# 112 x 67
# https://opengameart.org/content/dog-walk-sprite-and-bone
# dog sprite and bone by krirard
# Kamille's sprite sheet
# 75 x 79
# https://opengameart.org/content/deer
# deer sprite by calciumtrice
#Ethan's Sprite sheet
# 125 x 123
#https://opengameart.org/content/astronaut-4
#sprite sheet by gamer805'''''
|
984,925 | e537504c2615ad0fd1cc7203adea61583d00978d | import os
import io
import torch
import PIL.Image
import numpy as np
import scipy.signal
import matplotlib.pyplot as plt
from torchvision.transforms import ToTensor
import iirnet.signal as signal
def plot_response_grid(
pred_coefs,
target_coefs=None,
target_mags=None,
num_points=512,
num_filters=5,
eps=1e-8,
fs=44100,
):
ncols = 2
nrows = num_filters
pred_coefs = pred_coefs[:num_filters]
if target_coefs is not None:
target = target_coefs[:num_filters]
elif target_mags is not None:
target = target_mags[:num_filters]
else:
raise ValueError("Must pass either `target_coefs` or `target_mags`.")
fig, axs = plt.subplots(nrows=nrows, ncols=ncols, figsize=(6, 12))
axs = axs.reshape(-1)
for idx, (p, t) in enumerate(zip(pred_coefs, target)):
mag_idx = idx * 2
plot_idx = mag_idx + 1
try:
zeros, poles, k = scipy.signal.sos2zpk(p.squeeze())
except:
zeros = []
poles = []
k = 0
w_pred, h_pred = signal.sosfreqz(p, worN=num_points, fs=fs)
mag_pred = 20 * np.log10(np.abs(h_pred.squeeze()) + 1e-8)
if target_coefs is not None:
w_target, h_target = signal.sosfreqz(t, worN=num_points, fs=fs)
mag_target = 20 * np.log10(np.abs(h_target.squeeze()) + 1e-8)
else:
mag_target = t.squeeze()
axs[mag_idx].plot(w_pred, mag_target, color="tab:blue", label="target")
axs[mag_idx].plot(w_pred, mag_pred, color="tab:red", label="pred")
axs[mag_idx].set_xscale("log")
# axs[mag_idx].set_ylim([-60, 40])
axs[mag_idx].grid()
axs[mag_idx].spines["top"].set_visible(False)
axs[mag_idx].spines["right"].set_visible(False)
axs[mag_idx].spines["bottom"].set_visible(False)
axs[mag_idx].spines["left"].set_visible(False)
axs[mag_idx].set_ylabel("Amplitude (dB)")
axs[mag_idx].set_xlabel("Frequency (Hz)")
# pole-zero plot
for pole in poles:
axs[plot_idx].scatter(
np.real(pole),
np.imag(pole),
c="tab:red",
s=10,
marker="x",
facecolors="none",
)
for zero in zeros:
axs[plot_idx].scatter(
np.real(zero),
np.imag(zero),
s=10,
marker="o",
facecolors="none",
edgecolors="tab:red",
)
# unit circle
unit_circle = circle1 = plt.Circle((0, 0), 1, color="k", fill=False)
axs[plot_idx].add_patch(unit_circle)
axs[plot_idx].set_ylim([-1.5, 1.5])
axs[plot_idx].set_xlim([-1.5, 1.5])
axs[plot_idx].grid()
axs[plot_idx].spines["top"].set_visible(False)
axs[plot_idx].spines["right"].set_visible(False)
axs[plot_idx].spines["bottom"].set_visible(False)
axs[plot_idx].spines["left"].set_visible(False)
axs[plot_idx].set_aspect("equal")
axs[plot_idx].set_axisbelow(True)
axs[plot_idx].set_ylabel("Im")
axs[plot_idx].set_xlabel("Re")
plt.tight_layout()
buf = io.BytesIO()
plt.savefig(buf, format="png")
buf.seek(0)
image = PIL.Image.open(buf)
image = ToTensor()(image) # .unsqueeze(0)
plt.close("all")
return image
def plot_compare_response(
pred_coef, target_coef, num_points=512, eps=1e-8, fs=44100, ax=None
):
w_pred, h_pred = signal.sosfreqz(pred_coef, worN=num_points, fs=fs)
w_target, h_target = signal.sosfreqz(target_coef, worN=num_points, fs=fs)
fig, ax = plt.subplots(nrows=2, ncols=1, figsize=(8, 8))
mag_pred = 20 * np.log10(np.abs(h_pred.squeeze()) + 1e-8)
mag_target = 20 * np.log10(np.abs(h_target.squeeze()) + 1e-8)
ax[0].plot(w_target, mag_target, color="b", label="target")
ax[0].plot(w_pred, mag_pred, color="r", label="pred")
ax[0].set_xscale("log")
ax[0].set_ylim([-60, 40])
ax[0].set_ylabel("Amplitude [dB]")
ax[0].set_xlabel("Frequency [Hz]")
ax[0].legend()
ax[0].grid()
ax[0].spines["top"].set_visible(False)
ax[0].spines["right"].set_visible(False)
ax[0].spines["bottom"].set_visible(False)
ax[0].spines["left"].set_visible(False)
ang_pred = np.unwrap(np.angle(h_pred.squeeze()))
ang_target = np.unwrap(np.angle(h_target.squeeze()))
ax[1].plot(w_target, ang_target, color="b", label="target")
ax[1].plot(w_pred, ang_pred, color="r", label="pred")
ax[1].set_ylabel("Angle (radians)")
ax[1].set_xlabel("Frequency [Hz]")
ax[1].set_xscale("log")
ax[1].grid()
ax[1].axis("tight")
ax[1].legend()
ax[1].spines["top"].set_visible(False)
ax[1].spines["right"].set_visible(False)
ax[1].spines["bottom"].set_visible(False)
ax[1].spines["left"].set_visible(False)
buf = io.BytesIO()
plt.savefig(buf, format="png")
buf.seek(0)
image = PIL.Image.open(buf)
image = ToTensor()(image) # .unsqueeze(0)
plt.close("all")
return image
def plot_responses(pred_sos, target_dB, filename=None, zero_mean=False):
mag_idx = 0
# phs_idx = 1
plot_idx = 1
fig, axs = plt.subplots(nrows=1, ncols=2, figsize=(6, 3))
try:
zeros, poles, k = scipy.signal.sos2zpk(pred_sos.squeeze())
except:
zeros = []
poles = []
k = 0
w_pred, h_pred = signal.sosfreqz(pred_sos, worN=target_dB.shape[-1], fs=44100)
mag_pred = 20 * torch.log10(h_pred.abs() + 1e-8)
if zero_mean:
mag_pred = mag_pred - np.mean(mag_pred.squeeze().numpy())
target_dB = target_dB - np.mean(target_dB.squeeze().numpy())
axs[mag_idx].plot(w_pred, target_dB, color="tab:blue", label="target")
axs[mag_idx].plot(w_pred, mag_pred.squeeze(), color="tab:red", label="pred")
# axs[mag_idx].plot(w_pred, mag_pred - target_dB, color='tab:green', label="error")
axs[mag_idx].set_xscale("log")
# axs[mag_idx].set_ylim([-60, 40])
axs[mag_idx].grid()
axs[mag_idx].spines["top"].set_visible(False)
axs[mag_idx].spines["right"].set_visible(False)
axs[mag_idx].spines["bottom"].set_visible(False)
axs[mag_idx].spines["left"].set_visible(False)
axs[mag_idx].set_ylabel("Amplitude (dB)")
axs[mag_idx].set_xlabel("Frequency (Hz)")
axs[mag_idx].legend()
# axs[phs_idx].plot(w_pred, np.squeeze(np.angle(h_pred)), color='tab:red', label="pred")
# axs[phs_idx].plot(w_pred, target_h_ang, color='tab:blue', label="target")
# axs[phs_idx].plot(w_pred, target_h_ang, color='tab:blue', label="target")
# axs[phs_idx].set_xscale('log')
# axs[phs_idx].set_ylim([-60, 40])
# axs[phs_idx].grid()
# axs[phs_idx].spines['top'].set_visible(False)
# axs[phs_idx].spines['right'].set_visible(False)
# axs[phs_idx].spines['bottom'].set_visible(False)
# axs[phs_idx].spines['left'].set_visible(False)
# axs[phs_idx].set_ylabel('Angle (radians)')
# pole-zero plot
for pole in poles:
axs[plot_idx].scatter(
np.real(pole),
np.imag(pole),
c="tab:red",
s=10,
marker="x",
facecolors="none",
)
for zero in zeros:
axs[plot_idx].scatter(
np.real(zero),
np.imag(zero),
s=10,
marker="o",
facecolors="none",
edgecolors="tab:red",
)
# unit circle
unit_circle = circle1 = plt.Circle((0, 0), 1, color="k", fill=False)
axs[plot_idx].add_patch(unit_circle)
axs[plot_idx].set_ylim([-1.5, 1.5])
axs[plot_idx].set_xlim([-1.5, 1.5])
axs[plot_idx].grid()
axs[plot_idx].spines["top"].set_visible(False)
axs[plot_idx].spines["right"].set_visible(False)
axs[plot_idx].spines["bottom"].set_visible(False)
axs[plot_idx].spines["left"].set_visible(False)
axs[plot_idx].set_aspect("equal")
axs[plot_idx].set_axisbelow(True)
axs[plot_idx].set_ylabel("Im")
axs[plot_idx].set_xlabel("Re")
plt.tight_layout()
if filename is not None:
plt.savefig(f"{filename}")
plt.close("all")
|
984,926 | d2527b6aba7186c50fc586cb1b7f7333408a7f15 | from django import forms
from website.models import Propriedade
class InserePropriedadeForm(forms.ModelForm):
class Meta:
model = Propriedade
fields = [
'nome_produtor',
'data',
'municipio',
'lote',
'area_total',
'talhao',
'area_talhao',
'matricula_lote',
'profundidade_amostras',
'resultado_analise',
'text_solo',
'sist_cultivo',
'fosforo',
'potassio',
'calcio',
'magnesio',
'enxofre',
'aluminio',
'hal',
'materia_organica',
'fosforo_atingir',
'fonte_fosforo',
'eficiencia_fosforo',
'valor_fosforo',
'aplicar_fosforo',
'custo_fosforo',
'potassio_atingir',
'fonte_potassio',
'valor_potassio',
'particip_potassio',
'aplicar_potassio',
'custo_potassio',
'calcio_atingir',
'fonte_calmag',
'prnt',
'cao_corretivo',
'valor_calmag',
'particip_calc',
'particip_magnes',
'aplicar_calmag',
'custo_calmag'
]
|
984,927 | 2facd5458d23d3c3d621aee394b231665b8dbc2c |
# test
print("init")
print("hello")
print("check")
print("final test")
|
984,928 | 78f238c735e17bcdddfec86a6943b3750d60632c | m = int(input("Informe o valor em metros: "))
conversao = m * 100
print("A conversão de {} metros é {} centimetros.".format(m, conversao))
|
984,929 | a3192017a31049ffb22857aab054fe956a2e7f3e | #!/usr/bin/env python
import base
import vault
import requests
import json
import sys
from termcolor import colored
ENABLED = True
class style:
BOLD = '\033[1m'
END = '\033[0m'
def banner():
print colored(style.BOLD + '[+] Searching in Shodan' + style.END)
def main(ip):
shodan_api = vault.get_key('shodan_api')
if shodan_api != None:
endpoint = "https://api.shodan.io/shodan/host/" + str(ip) + "?key=" + shodan_api
req = requests.get(endpoint)
return json.loads(req.content)
else:
return [False, "INVALID_API"]
def output(data, ip=""):
if type(data) == list and data[1] == "INVALID_API":
print colored(
style.BOLD + '\n[-] Shodan API Key not configured. Skipping Shodan search.\nPlease refer to Shodan API docs.\n' + style.END, 'red')
else:
if 'error' in data.keys():
print 'No information available for that IP.'
else:
asn = ''
print colored(style.BOLD + '\n----------- Per Port Results -----------' + style.END)
if 'data' in data.keys():
for x in data['data']:
print colored(style.BOLD + '\nResponse from Open Port: %s' + style.END, 'green') % (x['port'])
'''if 'title' in x.keys():
print colored(style.BOLD + '[+] Title:\t\t' + style.END, 'green') + str(x['title'])'''
if 'title' in x.keys():
print colored(style.BOLD + '[+] HTML Content:\t' + style.END, 'green') + str(
'Yes (Please inspect Manually on this port)')
if 'http' in x.keys():
print colored(style.BOLD + '[+] HTTP port present:\t' + style.END, 'green')
print '\tTitle: %s' % x['http']['title']
print '\tRobots: %s' % x['http']['robots']
print '\tServer: %s' % x['http']['server']
print '\tComponents: %s' % x['http']['components']
print '\tSitemap: %s' % x['http']['sitemap']
if 'ssh' in x.keys():
print colored(style.BOLD + '[+] HTTP port present:\t' + style.END, 'green')
print '\tType: %s' % x['ssh']['type']
print '\tCipher: %s' % x['ssh']['cipher']
print '\tFingerprint: %s' % x['ssh']['fingerprint']
print '\tMac: %s' % x['ssh']['mac']
print '\tKey: %s' % x['ssh']['key']
if 'ssl' in x.keys():
print '\tSSL Versions: %s' % x['ssl']['versions']
if 'asn' in x.keys():
asn = data['asn']
if 'vulns' in x['opts']:
for y in x['opts'].keys():
print x['opts'][y]
if 'product' in x.keys():
print 'Product: %s' % x['product']
if 'version' in x.keys():
print 'Version: %s' % x['version']
print colored(style.BOLD + '\n----------- Basic Info -----------' + style.END, 'blue')
print 'Open Ports: %s' % data['ports']
print 'Latitude: %s' % data['latitude']
print 'Hostnames: %s' % data['hostnames']
print 'Postal Code: %s' % data['postal_code']
print 'Country Code: %s' % data['country_code']
print 'Organization: %s' % data['org']
if asn != '':
print 'ASN: %s' % asn
if 'vulns' in data.keys():
print colored(style.BOLD + 'Vulnerabilties: %s' + style.END, 'red') % data['vulns']
print ""
if __name__ == "__main__":
try:
ip = sys.argv[1]
banner()
result = main(ip)
if result:
output(result, ip)
except Exception as e:
print e
print "Please provide an IP Address as argument"
|
984,930 | ece41b07d6a1a3f4cc33796a765fa69668f5062d | from django.urls import include, path
from . import views
urlpatterns = [
path('',
views.UserListView.as_view(),
name='user-list'),
path('<int:pk>',
views.UserDetailView.as_view(),
name='customuser-detail'),
path('GetMyUser',
views.GetMyUser.as_view(),
name='getmyuser'),
# path('getUser',
# views.GetMatchCandidateUser.as_view(),
# name='get-user'),
]
|
984,931 | 931a4d91627721c10d3f47ccd7e8c1cb30ce961f | L = int(input())
x = L / 3
V = x ** 3
print(V) |
984,932 | 1a9afc1f95113b7d8dedcf8c5d7ad9b58cd8f19f | from typing import Union
from disnake import ApplicationCommandInteraction
from disnake.ext.commands import (
bot_has_permissions,
BucketType,
Cog,
command,
Context,
guild_only,
max_concurrency,
slash_command,
)
from data import Utils
class Dj(Cog, name="dj.skip"):
def __init__(self, bot):
self.bot = bot
@command(
name="skip",
aliases=["next"],
usage="(number of skip(s))",
description="Skip the music a given number of times!",
)
@Utils.check_bot_starting()
@Utils.check_dj()
@bot_has_permissions(send_messages=True)
@max_concurrency(1, per=BucketType.guild)
async def skip_command(self, ctx: Context, skips: int = 1):
await self.handle_skip(ctx, skips)
@slash_command(
name="skip",
description="Skip the music a given number of times!",
)
@guild_only()
@Utils.check_bot_starting()
@Utils.check_dj()
@max_concurrency(1, per=BucketType.guild)
async def skip_slash_command(
self, inter: ApplicationCommandInteraction, skips: int = 1
):
await self.handle_skip(inter, skips)
""" METHOD(S) """
async def handle_skip(
self, source: Union[Context, ApplicationCommandInteraction], skips: int
):
"""skip the musics a given number of times."""
player = self.bot.lavalink.player_manager.get(source.guild.id)
if not player or not player.is_playing:
if isinstance(source, Context):
return await source.reply(
f"⚠️ - {source.author.mention} - The bot isn't playing!",
delete_after=20,
)
else:
return await source.response.send_message(
f"⚠️ - {source.author.mention} - The bot isn't playing!",
ephemeral=True,
)
elif not player.is_connected:
# We can't disconnect, if we're not connected.
if isinstance(source, Context):
return await source.reply(
f"⚠️ - {source.author.mention} - The player isn't connected!",
delete_after=20,
)
else:
return await source.response.send_message(
f"⚠️ - {source.author.mention} - The player isn't connected!",
ephemeral=True,
)
elif not source.author.voice or (
player.is_connected
and source.author.voice.channel.id != int(player.channel_id)
):
# Abuse prevention. Users not in voice channels, or not in the same voice channel as the bot
# may not disconnect the bot.
if isinstance(source, Context):
return await source.reply(
f"⚠️ - {source.author.mention} - Please be in the same voice room as the bot to control the music!",
delete_after=20,
)
else:
return await source.response.send_message(
f"⚠️ - {source.author.mention} - Please be in the same voice room as the bot to control the music!",
ephemeral=True,
)
for _ in range(skips):
await player.skip()
if isinstance(source, Context):
await source.send(
f"⏭️ - Skipping `{skips}` music{'s' if skips > 1 else ''}!"
)
else:
await source.response.send_message(
f"⏭️ - Skipping `{skips}` music{'s' if skips > 1 else ''}!"
)
def setup(bot):
bot.add_cog(Dj(bot))
|
984,933 | dfb7a6bc5d820998c3bf50dd10b32d583d8cfd5e | import boto3
import argparse
from get_instances import get_instances
import pprint
ec2 = boto3.resource('ec2')
ec2_client = boto3.client('ec2')
pp = pprint.PrettyPrinter(indent=4)
name = None
def lambda_handler(event, context):
instances = get_instances()
if name:
instance_id = instances[name][0]['instanceId']
else:
instance_id = 'i-0e6a7023f20d1bc63'
existing_instance = ec2.Instance(id=instance_id)
res = existing_instance.start()
res = instances
return {
"version": "1.0",
"response": {
"outputSpeech": {
"type": "PlainText",
# "text": "I've created a new instance on your AWS account. "
"text": str(res.keys())+str(res.values())
},
"shouldEndSession": False
}
}
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--name', default=None)
args = parser.parse_args()
global name
name = args.name
res = lambda_handler({}, {})
pp.pprint(res)
if __name__ == '__main__':
main()
|
984,934 | b372e928fe7777896b866052874da65747134a2e | # Import base library modules - From Bluetooth symbolic link to /base_lib
from base_lib.v1_00_Config_Logger \
import v1_00_Config_Logger
#
# SuperClass.
# ----------------------------------------------------------------------------
class Config_Logger(v1_00_Config_Logger):
def __init__(self, control=None, module=None):
super(Config_Logger, self).__init__(control, module)
|
984,935 | d5ad6d7c22d647be13c4d019c1289512ae3c728a | # OR157.LRU Cache
# 题目描述
# 设计一个数据结构,实现LRU Cache的功能(Least Recently Used – 最近最少使用缓存)。它支持如下2个操作: get 和 put。
# int get(int key) – 如果key已存在,则返回key对应的值value(始终大于0);如果key不存在,则返回-1。
# void put(int key, int value) – 如果key不存在,将value插入;如果key已存在,则使用value替换原先已经存在的值。如果容量达到了限制,LRU Cache需要在插入新元素之前,将最近最少使用的元素删除。
# 请特别注意“使用”的定义:新插入或获取key视为被使用一次;而将已经存在的值替换更新,不算被使用。
# 限制:请在O(1)的时间复杂度内完成上述2个操作。
# 输入描述:
# 第一行读入一个整数n,表示LRU Cache的容量限制。 从第二行开始一直到文件末尾,每1行代表1个操作。
# 如果每行的第1个字符是p,则该字符后面会跟随2个整数,表示put操作的key和value。
# 如果每行的第1个字符是g,则该字符后面会跟随1个整数,表示get操作的key。
# 输出描述:
# 按照输入中get操作出现的顺序,按行输出get操作的返回结果。
# 示例1
# 输入
# 复制
# 2
# p 1 1
# p 2 2
# g 1
# p 2 102
# p 3 3
# g 1
# g 2
# g 3
# 输出
# 复制
# 1
# 1
# -1
# 3
# 说明
# 2 //Cache容量为2
# p 1 1 //put(1, 1)
# p 2 2 //put(2, 2)
# g 1 //get(1), 返回1
# p 2 102 //put(2, 102),更新已存在的key,不算被使用
# p 3 3 //put(3, 3),容量超过限制,将最近最少使用的key=2清除
# g 1 //get(1), 返回1
# g 2 //get(2), 返回-1
# g 3 //get(3), 返回3
class ListNode:
def __init__(self, key, value):
self.key = key
self.value = value
self.next = None
self.pre = None
class LRU_Cache:
def __init__(self, cap):
self.cap = cap
self.head = ListNode(None, None)
self.tail = ListNode(None, None)
self.head.next = self.tail
self.tail.pre = self.head
self.hashMap = {}
def put(self, key, value):
if key in self.hashMap:
self.hashMap[key].value = value
# self.move_to_head(key)
else:
if self.cap == 0:
return
if len(self.hashMap) >= self.cap:
# delete last
tailPre = self.tail.pre
tailPrePre = tailPre.pre
tailPrePre.next = self.tail
self.tail.pre = tailPrePre
self.hashMap.pop(tailPre.key)
newNode = ListNode(key, value)
self.hashMap[key] = newNode
self.insert_to_head(key)
def get(self, key):
if key in self.hashMap:
self.move_to_head(key)
return self.hashMap[key].value
else:
return -1
def move_to_head(self, key):
node = self.hashMap[key]
preNode = node.pre
nextNode = node.next
preNode.next = nextNode
nextNode.pre = preNode
self.insert_to_head(key)
def insert_to_head(self, key):
node = self.hashMap[key]
headNext = self.head.next
self.head.next = node
node.pre = self.head
node.next = headNext
headNext.pre = node
if __name__ == '__main__':
n = int(input())
lru = LRU_Cache(n)
while True:
try:
row = input().split(' ')
op = row[0]
#print(lru.hashMap)
if op == 'p':
lru.put(int(row[1]), int(row[2]))
else:
print(lru.get(int(row[1])))
except:
break |
984,936 | 4ed99b6fb20376ce0e98fe6f3c3b5e353ba1c5dd | #-*-coding:utf-8 -*-
from django.contrib import admin
from models import Artigo
admin.site.register(Artigo) |
984,937 | e8b89ec89e14f8ea9e35c6c21db86b0c05679e9e | A,B,T= map(int, input().split())
print(T//A*B) |
984,938 | 849a3b347359507f8cb97b1568b40c339c3136d9 | #!/usr/bin/python3
"""0-rectangle
"""
class Rectangle:
"""Rectangle
"""
def __init__(self):
"""init-self
"""
pass
|
984,939 | a74845d74af389c3c49bb36f6842c1c3e8d79698 | import copy
import sys
A_COMMAND = 'A'
L_COMMAND = 'L'
C_COMMAND = 'C'
SYMBOL_TABLE = {'SP':0,'LCL':1,'ARG':2,'THIS':3,'THAT':4,'SCREEN':16384,'KBD': 24576} #R0-15 added in Parser
JUMP_DICT = {None:'000','JGT':'001','JEQ':'010','JGE':'011','JLT':'100','JNE':'101','JLE':'110','JMP':'111'}
DEST_DICT = {None:'000','M':'001','D':'010','MD':'011','A':'100','AM':'101','AD':'110','AMD':'111'}
COMP_DICT = {'0':'101010','1':'111111','-1':'111010','D':'001100','A':'110000','!D':'001101','!A':'110001','-D':'001111','-A':'110011','D+1':'011111','A+1':'110111','D-1':'001110','A-1':'110010','D+A':'000010','D-A':'010011','A-D':'000111','D&A':'000000','D|A':'010101'}
class Parser(object):
'''
Parses a file and stores a list of dictionaries wih the command type and additional information about the command
'''
def __init__(self, input_file):
self.input_file = input_file
self.command_list = self.create_command_list()
self.parsed_list = []
self.parsed = False
self.symbol_table = self.set_symbol_table()
def set_symbol_table(self):
'''
Sets symbol table from defaults and adds R0-15
'''
symbol_table = copy.copy(SYMBOL_TABLE)
for x in range(16):
symbol_table['R' + str(x)] = x
return symbol_table
def create_command_list(self):
'''
Strip white space and comments from file to create a list of all the commands
'''
command_list = []
with open(self.input_file,'r') as input_file:
for full_line in input_file:
# Remove comments, spaces and white space
full_line = full_line.split('//')[0]
reduced_line = full_line.replace(' ','').replace('\t','').strip()
# Only add rows with text, covers comment lines and initally blank lines
if reduced_line != '\n' and reduced_line != '':
command_list.append(reduced_line)
return command_list
def process_type(self,command):
'''
Returns type of command (A, C, or L) and associated values as a tuple
'''
if command[0] == '@':
command_type = A_COMMAND
symbol = command[1:]
elif command[0] == '(' and command[-1] == ')':
command_type = L_COMMAND
symbol = command[1:-1]
else:
command_type = C_COMMAND
symbol = self.divide_c_command(command)
return {'type':command_type,'command':symbol}
def divide_c_command(self,command):
'''
Takes c command and returns dictionary of dest, comp and jump
'''
rv = {'dest':None,'comp':None,'jump':None}
if ';' in command and '=' in command:
split_by_semi = command.split(';')
split_by_eq = split_by_semi[0].split('=')
rv['comp'] = split_by_eq[1]
rv['jump'] = split_by_semi[1]
rv['dest'] = split_by_eq[0]
return rv
if ';' not in command:
split_by_eq = command.split('=')
rv['dest'] = split_by_eq[0]
rv['comp'] = split_by_eq[1]
return rv
if '=' not in command:
split_by_semi = command.split(';')
rv['jump'] = split_by_semi[1]
rv['comp'] = split_by_semi[0]
return rv
def get_parsed_list(self):
'''
Take full list of commands and converts into commands that can be used by the encoder
'''
if self.parsed:
return self.parsed_list
counter = 0
for command in self.command_list:
command_dict = self.process_type(command)
# Add dict with type and command to list if not L and iterate counter, add L to symbol table
if command_dict['type'] != L_COMMAND:
self.parsed_list.append(command_dict)
counter += 1
else:
self.symbol_table[command_dict['command']] = counter
self.parsed = True
return self.parsed_list
class Encoder(object):
'''
Uses hard-coded dictionaries and passed symbol tables to encode given a dictionary
'''
def __init__(self,symbol_table):
self.parsed_list = parsed_list
self.jump_dict = JUMP_DICT
self.dest_dict = DEST_DICT
self.comp_dict = COMP_DICT
self.symbol_table = symbol_table
self.symbol_table_counter = 16
def convert_to_machine(self,command_tuple):
'''
Use code type to convert to machine code
'''
if command_tuple['type'] == A_COMMAND:
return self.get_A_code(command_tuple['command'])
elif command_tuple ['type'] == C_COMMAND:
return self.get_C_code(command_tuple['command'])
else:
print(command_tuple)
exit(1)
def get_A_code(self,command):
'''
Convert int number to binary or look up value in symbol table
'''
try:
# Handle overflow and only get 15 characters
return '0' + "{0:015b}".format(int(command))[-15:]
except:
if command in self.symbol_table:
num = int(self.symbol_table[command])
else:
num = self.symbol_table_counter
self.symbol_table[command] = num
self.symbol_table_counter += 1
return '0' + "{0:015b}".format(num)[-15:]
def get_C_code(self,command_dict):
'''
Convert C code logic to binary
'''
dest = self.dest_dict[command_dict['dest']]
jump = self.jump_dict[command_dict['jump']]
comp = self.get_comp_code(command_dict['comp'])
return '111' + comp + dest + jump
def get_comp_code(self,comp):
if 'M' in comp:
a = '1'
comp = comp.replace('M','A')
else:
a = '0'
return a + self.comp_dict[comp]
if __name__ == '__main__':
#Check that script is called appropriately
if len(sys.argv) < 2:
print("Usage: python assembler.py FILENAME")
sys.exit(1)
input_filename = sys.argv[1]
#Get name of output file and open for writing
output_file = input_filename[:input_filename.rfind('.')] + '.hack'
out = open(output_file,'w')
parser = Parser(input_filename)
parsed_list = parser.get_parsed_list()
encoder = Encoder(parser.symbol_table)
for command_dict in parsed_list:
out.write(encoder.convert_to_machine(command_dict)+'\n')
out.close()
|
984,940 | 0ee9e0104a1616349d61c7e49c4b03d66747a65e | import requests
import os
from pyquery import PyQuery
from urllib.parse import urlparse, urljoin
import posixpath
import zipfile
USER_AGENT = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.98 Safari/537.36 Vivaldi/1.6.689.46'
VIVALDI_COM_URL = 'https://vivaldi.com/download/'
LIBFFMPEG_URL = 'https://github.com/iteufel/nwjs-ffmpeg-prebuilt/releases/latest'
LIBFFMPEG = '/opt/vivaldi/lib/libffmpeg.so'
def http_get(url):
headers = {
'User-Agent': USER_AGENT
}
res = requests.get(url)
return res
def make_filename(url, dest_dir):
r = urlparse(url)
name = posixpath.basename(r.path)
filename = os.path.join(dest_dir, name)
return filename
def download_to(url, dest_file):
r = urlparse(url)
name = posixpath.basename(r.path)
print('Downloading {}...'.format(name))
res = http_get(url)
with open(dest_file, 'wb') as wb:
wb.write(res.content)
wb.close()
print('done.')
def download_to_dir(url, dest_dir):
out_file = make_filename(url, dest_dir)
download_to(url, out_file)
class VivaldiClawler(object):
def get_vivaldi_com(self):
res = http_get(VIVALDI_COM_URL)
return PyQuery(res.text)
def get_download_links(self):
dom = self.get_vivaldi_com()
anchors = dom('a')
for a in anchors.items():
href = a.attr['href']
if href.find('downloads') > 0:
yield href
def get_download_links_for(self, parts):
links = self.get_download_links()
for link in links:
matched = list(filter(lambda p:link.find(p) > -1, parts))
if len(matched) != len(parts):
continue
yield link
def get_download_link_for(self, parts):
links = self.get_download_links_for(parts)
link = next(links)
return link
class LibFFmpegClawler(object):
def __init__(self, url):
self.url = url
def get_libffmpeg_releases(self):
res = http_get(self.url)
return PyQuery(res.text)
def get_download_links(self):
dom = self.get_libffmpeg_releases()
anchors = dom('a')
for a in anchors.items():
href = a.attr['href']
if href.find('download') > 0:
yield href
def get_download_links_for(self, parts):
links = self.get_download_links()
for link in links:
matched = list(filter(lambda p:link.find(p) > -1, parts))
if len(matched) != len(parts):
continue
yield link
def get_download_link_for(self, parts):
links = self.get_download_links_for(parts)
link = next(links)
if link is not None:
link = urljoin(self.url, link)
return link
def download_vivaldi(dest_dir):
clawler = VivaldiClawler()
url = clawler.get_download_link_for(['x86_64', 'rpm'])
filename = make_filename(url, dest_dir)
if os.path.isfile(filename):
return None
download_to(url, filename)
return filename
def download_libffmpeg(dest_dir):
clawler = LibFFmpegClawler(LIBFFMPEG_URL)
url = clawler.get_download_link_for(['linux', 'x64'])
filename = make_filename(url, dest_dir)
if os.path.isfile(filename):
return None
download_to(url, filename)
zipFile = zipfile.ZipFile(filename)
zipFile.extract('libffmpeg.so', dest_dir)
zipFile.close()
filename = os.path.join(dest_dir, 'libffmpeg.so')
return filename
def main():
PWD = os.path.dirname((os.path.abspath(__file__)))
download_dir = os.path.join(PWD, '..', 'data')
os.makedirs(download_dir, mode=755, exist_ok=True)
vivaldi_file = download_vivaldi(download_dir)
libffmpeg_file = download_libffmpeg(download_dir)
commands = []
if vivaldi_file is not None:
commands.append('dnf install {src}'.format(src=vivaldi_file))
if libffmpeg_file is not None:
commands.append('install {src} {dest}'.format(src=libffmpeg_file, dest=LIBFFMPEG))
if len(commands) == 0:
print('Not updated.')
else:
print('Update found, run following command:')
script = '''
#!/bin/sh
sudo -- sh -c '{command}'
'''.strip()
script = script.format(command='; '.join(commands))
print(script)
main()
|
984,941 | 2af548bbe5a7cb273a85b1f7ed79e3f593801b14 | '''
Created on Dec 1, 2013
@author: KevinVillela
'''
def getBaseURL(startDate):
return "http://www.latimes.com/search/dispatcher.front?Query=finance&target=adv_article&date=" + startDate.strftime("%m/%d/%Y-%m/%d/%Y") + "&sortby=contentrankprof"
def sentimentToNumber(sentiment):
if (sentiment == "neutral"):
return "0"
elif (sentiment == "negative"):
return "-1"
elif (sentiment == "positive"):
return "1"
else:
return "999999"
def getAPIKey(user_number):
return {
1 : "a01778a8cafdedfc2e676b8ba0495a19", # API KEY for KVillela
2 : "6193d654c5eebe977005717702592f26", # API KEY for KevinV
3 : "aafd2b194cfbe927cb85c4e1fc579141", # API KEY for villka02
4 : "c15e170662def9c247a0d074f01f0222", # API KEY for MrMAV
}[user_number]
USER_NUMBER = 4
API_KEY = getAPIKey(USER_NUMBER)
MAX_TRIES = 7
seperator = "|"
from DatumBox import DatumBox
from bs4 import BeautifulSoup
import webarticle2text
import urllib
import socket
import threading
from datetime import date, timedelta, datetime
from xgoogle.search import GoogleSearch, SearchError
datum_box = DatumBox(API_KEY)
try:
keyword = "investing"
sites = ["http://online.wsj.com/public/page/news-business-us.html", "http://www.bloomberg.com/news/economy/", "http://www.marketwatch.com/", "http://www.rttnews.com/list/us-economic-news.aspx", "http://www.reuters.com/finance", "http://www.usatoday.com/money/", "money.usnews.com", "www.ft.com/home/us", "http://www.cnbc.com/" ]
query = "money.cnn.com"
for site in sites:
query = query + " OR site:" + site
qeury = query + " " + keyword + "daterange:";
gs = GoogleSearch(query);#"investing daterange:2456294-2456294")
gs.results_per_page = 50
results = gs.get_results()
for res in results:
print res.title.encode("utf8")
print res.desc.encode("utf8")
print res.url.encode("utf8")
print
except SearchError, e:
print "Search failed: %s" % e
def getSentimentOfArticle(articleURL, articleNumber, sentimentsFileName, dateToSearch, mutex_writefile):
tries = 0
sentiment = ""
print "\tArticle #" + str(articleNumber) + " for date " + dateToSearch.strftime("%m/%d/%Y") + " being analyzed..."
while (tries < MAX_TRIES):
try:
sentiment = sentimentToNumber(datum_box.sentiment_analysis(webarticle2text.extractFromURL(articleURL)))
break;
except socket.timeout:
print("\t ^^Article #" + str(articleNumber) + " timed out " + str(tries + 1) + " time(s)...")
tries = tries + 1
if ( tries == MAX_TRIES):
return
mutex_writefile.acquire()
sentimentsFile = open(sentimentsFileName, 'a')
sentimentsFile.write(articleURL + seperator)
sentimentsFile.write(dateToSearch.strftime("%m/%d/%Y") + seperator)
sentimentsFile.write(sentiment);
sentimentsFile.write("\n")
sentimentsFile.close()
mutex_writefile.release()
def crawl(dateToSearch, daysToSearch, fileName):
'dateToSearch = date(2013, 8, 30)'
sentimentsFile = open(fileName, "a")
for daysToGoBack in range(1, daysToSearch + 1):
print "Searching on date " + dateToSearch.strftime("%m/%d/%Y") + " (day " + str(daysToGoBack) + " of " + str(daysToSearch) + ")"
url = getBaseURL(dateToSearch)
try:
f = urllib.urlopen(url)
except socket.timeout:
daysToGoBack = daysToGoBack - 1
continue
myfile = f.read()
f.close()
soup = BeautifulSoup(myfile)
mydivs = soup.findAll("div", { "class" : "result" })
i = 1
threads = []
mutex_writefile = threading.Lock()
for each_div in mydivs:
articleURL = each_div.find("a", href=True)['href']
if (articleURL[0] == '/'):
articleURL = "http://www.latimes.com" + articleURL
thread = threading.Thread(target=getSentimentOfArticle, args=(articleURL, i, fileName, dateToSearch, mutex_writefile))
thread.start()
threads.append(thread)
i = i + 1
for thread in threads:
thread.join()
dateToSearch = dateToSearch - timedelta(1)
sentimentsFile.close()
''' End of crawling '''
#crawl(date(2013, 1, 16), 16, "articlesentiments.psv") |
984,942 | 98fe1916ab86c07e4d4228fae3c6e63a6642e1cf | from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "I Made Website With Python + Flask + Linux + Apache2!"
@app.route("/returnsHTML")
def secondEndPoint():
return """
<html>
<body>
<h1>What I learned about sed</h1>
<p><a href="https://www.grymoire.com/Unix/Sed.html">I learned it all here!</a>
<h2>First thing I learned</h2>
<p> You do not have to put quotes in a sed command. But it is better and recommended if quotes (specifically single quotes) in doing sed commands </p>
<h2>Second thing I learned</h2>
<p> & is a special character used to put the string found in the replacement string even though it is not known. It can also be used any number of times in the replacement string </p>
<h2>Third thing I learned</h2>
<p> You can specify which occurence should be edited in multiple ways such as:
1. using "\(" and "\)" to mark the pattern,
2. adding a number after the subtitution command to indicate which pattern to be matched, and
3. combining a number with the global (g) flagto specify the portion to be changed </p>
</body>
</html>
"""
if __name__ == "__main__":
app.run()
|
984,943 | 92874c6849f2d326ca3c0471eb0923cf86652022 | # This file contains the loss calculation function that is specified in the paper
from itertools import product
import tensorflow as tf
import tensorflow.keras as keras
import tensorflow.keras.backend as K
import numpy as np
from tensorflow.keras.losses import KLDivergence
from tensorflow.math import divide_no_nan
from tensorflow.keras.layers import Add
from graph.knowledge_graph import *
tf.compat.v1.enable_eager_execution()
# siftflow
siftflow_labels = ["void", "awning", "balcony", "bird", "boat", "bridge", "building", "bus", \
"car", "cow", "crosswalk", "desert", "door", "fence", "field", \
"grass", "moon", "mountain", "person", "plant", "pole", "river", \
"road", "rock", "sand", "sea", "sidewalk", "sign", "sky", \
"staircase", "streetlight", "sun", "tree", "window"]
# cityscape
cityscape_labels = ['unlabeled', 'road', 'sidewalk', 'building', 'wall', 'fence', 'pole', 'traffic light',\
'traffic sign', 'vegetation', 'terrain', 'sky', 'person', 'rider', 'car', 'truck', 'bus',\
'train', 'motorcycle', 'bicycle']
# camvid
camvid_labels = ['animal','archway','bicyclist','bridge','building','car','pram','child','column',
'fence','drive','lane','text','scooter','others','parking','pedestrian','road','shoulder',
'sidewalk','sign','sky','suv','traffic cone','traffic light','train','tree','truck',
'tunnel', 'vegetation','void','wall']
kgraph = CN_based_KnowledgeGraph(camvid_labels, 0.15, 100, '/content/drive/My Drive/thesis/siftflow_similarity3.txt')
paddings = tf.constant([[1, 1,], [1, 1]])
# This file contains the loss calculation function that is specified in the paper
def dk_distance(y_neighbor, y):
return (y_neighbor+0.00001) * (K.log(y_neighbor+0.00001)-K.log(y+0.00001)) + (1.00001-y_neighbor) * (K.log(1.00001-y_neighbor) - K.log(1.00001-y))
def get_class_similarity(pred):
pred = tf.cast(pred, tf.int64)
res = tf.gather_nd(kgraph.get_similarity(), pred)
res = tf.cast(res, tf.float32)
return res
def recursive_map(pred):
if K.ndim(pred) > 2:
return K.map_fn(lambda x: recursive_map(x), pred, dtype=tf.float32)
else:
return get_class_similarity(pred)
def dk_loss(y_true, y_pred):
img_height = y_true.shape[1]
img_width = y_true.shape[2]
id_y_pred = K.argmax(y_pred, axis=3) # find the prediction label
pred_y_pred = K.max(y_pred, axis=3) # find the probability of the prediction
true_y_true = tf.cast(K.argmax(y_true, axis=3), dtype=tf.int32) # find the truth label
s = tf.cast(true_y_true * 0, dtype=tf.float32)
# Compute neighboring pixel loss contributions
for i, j in product((-1, 0, 1), repeat=2):
if i == j == 0: continue
# Take sliced image
sliced_id_y_pred = id_y_pred[:, 1:-1, 1:-1]
sliced_y_true = true_y_true[:, 1:-1, 1:-1]
sliced_y_pred = pred_y_pred[:, 1:-1, 1:-1]
# Take "shifted" image
displaced_y_true = true_y_true[:, 1 + i:img_width - 1 + i, 1 + j:img_height - 1 + j]
displaced_y_pred = pred_y_pred[:, 1 + i:img_width - 1 + i, 1 + j:img_height - 1 + j]
displaced_id_y_pred = id_y_pred[:, 1+i:img_width - 1 + i, 1 + j:img_height - 1 + j]
# calculate KLDivergence
dk = dk_distance(displaced_y_pred, sliced_y_pred) # KLDivergence(displaced_y_pred, sliced_y_pred)
diff = sliced_y_true - displaced_y_true
mask_t = tf.cast(K.equal(0, diff), dtype=tf.float32) # equal mask
mask_f = tf.cast(K.not_equal(0, diff), dtype=tf.float32) # unequal mask
# choice 1
c1 = tf.multiply(dk_distance(displaced_y_pred, sliced_y_pred), mask_t)
# choice 2
stacked_ids = tf.stack([displaced_id_y_pred, sliced_id_y_pred], axis=3)
simi = K.map_fn(lambda x: recursive_map(x), stacked_ids, dtype=tf.float32)
c2 = tf.multiply(K.relu(3.0-tf.multiply(dk, simi)), mask_f)
# concatenate
c = Add()([c1, c2])
c = tf.expand_dims(c, -1)
c = tf.image.pad_to_bounding_box(c, 1, 1, img_height, img_width)
c = tf.squeeze(c, -1)
s = Add()([s, c])
s = s / 8.0
return s |
984,944 | a3976b4c40dbdb8e7c573efdf4d8bd06d34730b6 | #!/usr/local/bin/python
# -*- coding: utf-8 -*-
#System import
import os
# Django import
from django.core.exceptions import ObjectDoesNotExist
#Billing import
from default_periodic import settings
from probill.nas.models import *
from probill.billing.models import PeriodicLog,Account
from settings import *
def main():
for nas in NasServer.objects.filter(active=True):
process_nas(nas)
def process_nas(nas):
new_config = ''
local_subnet = []
for d_server in nas.dhcpserver_set.all():
new_config = configHead(d_server)
for d_subnet in d_server.dhcpsubnet_set.all():
new_config += netConfig(d_subnet)
local_subnet.append(d_subnet.subnet)
for subnet in local_subnet:
for account in Account.objects.filter(ip__in=subnet.network).exclude(mac=None):
new_config += hostConfig(account)
old_config = nas.open('/usr/local/etc/dhcpd.conf', 'r').read()
if old_config <> new_config:
nas.open('/usr/local/etc/dhcpd.conf', 'w').write(new_config)
if checkConfig(nas):
if settings.DEBUG:
PeriodicLog.log('New dhcp config check ok. Restarting dhcpd.')
stdin, stdout, stderr = nas.exec_command(' '.join([SUDO_PATH, '/usr/local/etc/rc.d/isc-dhcpd restart']))
print stdout.read()
print stderr.read()
else:
PeriodicLog.log('New dhcp config check fail!!!! Restor old config.')
nas.open('/usr/local/etc/dhcpd.conf', 'w').write(old_config)
def checkConfig(nas):
stdin, stdout, stderr = nas.exec_command(' '.join([SUDO_PATH, 'dhcpd -t']))
test = stderr.read()
if test.find('Configuration file errors') <> -1:
return False
else:
return True
def hostConfig(account):
if account.login:
login = account.login
else:
login = '{}-{}'.format(account.subscriber.login, str(account.ip).replace('.', '-'))
return """
host %s {
hardware ethernet %s;
fixed-address %s;
}
""" % (login, account.mac, account.ip)
def netConfig (dhcp_subnet):
return """
subnet %s netmask %s {
authoritative;
max-lease-time 86400;
option routers %s;
}
""" % (dhcp_subnet.subnet.network.network,dhcp_subnet.subnet.network.netmask,dhcp_subnet.default_router)
def configHead(dhcp_server):
return """
option domain-name-servers %s, %s;
ddns-update-style none;
default-lease-time 86400;
""" % (dhcp_server.dns_first,dhcp_server.dns_second)
def mac2mac(mac):
tmp=[]
for x in range(0,11,2):
tmp.append(mac[x]+mac[x+1])
return ':'.join(tmp)
if __name__=="__main__":
main()
|
984,945 | 7a21008be471e3925187dfc7cdca72eeacb4497b | from django.test import TestCase, Client
from django.urls import reverse
from django.contrib.auth.models import User
from profiles.forms import UserProfileForm
class TestUserViews(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create_user(
username='testuser',
email='test@email.com',
password='testpassword'
)
self.profile = reverse("profile")
self.login = reverse("account_login")
self.form = UserProfileForm
def test_profile_login_required(self):
''' Test the user needs to be logged in to see the userprofile page '''
response = self.client.get(self.profile)
self.assertNotEqual(response.status_code, 200)
def test_profile_page_logged_in(self):
''' Test the user profile page when logged in '''
self.client.login(username="testuser", password="testpassword")
response = self.client.get(self.profile)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "profiles/profile.html")
self.assertTemplateUsed(response, "base.html")
|
984,946 | 56f3af60a61979b02b11c095578618c1aa269c55 | def print_name(name):
print('Hello '+name)
print_name('Rajesh')
|
984,947 | 11b860d71608b8e0caed1e06a8db0ed97534864a | import numpy as np
import numpy.random as nprng
import theano
import theano.tensor as T
from theano_utils import floatX
class LogReg:
def __init__(self, inp, shape, act=T.nnet.sigmoid):
self.shape = shape
print(shape)
self.W = theano.shared(
value=floatX(nprng.randn(shape[0], shape[1])*np.sqrt(2/shape[1])),
# value=floatX(nprng.randn(shape[0], shape[1])*np.sqrt(2/(shape[1] + shape[0]))),
name='W',
borrow=True
)
# self.b = theano.shared(
# value=floatX(nprng.randn(shape[0])*np.sqrt(2/shape[0])),
# name='b',
# borrow=True
# )
# self.s = T.dot(self.W, inp.T).T + self.b
self.s = T.dot(self.W, inp.T).T
self.a = act(self.s)
# self.params = [self.W, self.b]
self.params = [self.W]
self.inp = inp
|
984,948 | 8cc9ae53f0ee8dbde98e7bc09975822d09f22ac5 | from aiounittest import AsyncTestCase
from robot.collector.shortcut import *
class FlatCollectorTest(AsyncTestCase):
async def test_flat(self):
item = [
[0, 1, 2, 3],
[4, 5],
(6, 7,),
[8],
(9,),
]
expected = list(range(10))
collector = flat()
_, result = await collector(None, item)
self.assertEqual(result, expected)
|
984,949 | 281d1ea7990beea595437e93576c7d169d495e9f | """
This program maps out the config file to be ordered in the form of (r,g,b)
it remaps the original scheme to instead have it so that
each slot in the matrix to be arranged as such:
R
G B
it then takes the settings in rgbvalues.in and writes it into config.in
this loses purpose if other LED's are used.
"""
def use_rgb():
light_settings=open("RGBvalues.in","r")
config=open("config.in","w")
config.write("slot, l1, l2, l3")
lines = light_settings.readlines()
lines.pop(0)
newlines = []
red = []
green = []
blue = []
for i in lines:
newlines.append(i.rstrip('\n').split(","))
for i in range(len(newlines)):
red.append(newlines[i][1])
green.append(newlines[i][2])
blue.append(newlines[i][3])
config.write("\n")
config.write("0, %s, %s, %s \n" % (red[0],green[0],blue[0]))
config.write("1, %s, %s, %s \n" % (blue[1],red[1],green[1]))
config.write("2, %s, %s, %s \n" % (blue[2],red[2],green[2]))
config.write("3, %s, %s, %s \n" % (green[3],blue[3],red[3]))
config.write("4, %s, %s, %s \n" % (blue[4],red[4],green[4]))
config.write("5, %s, %s, %s \n" % (green[5],blue[5],red[5]))
config.write("6, %s, %s, %s \n" % (green[6],blue[6],red[6]))
config.write("7, %s, %s, %s \n" % (green[7],blue[7],red[7]))
config.write("8, %s, %s, %s \n" % (blue[8],red[8],green[8]))
config.write("9, %s, %s, %s \n" % (blue[9],red[9],green[9]))
config.write("10, %s, %s, %s \n" % (blue[10],red[10],green[10]))
config.write("11, %s, %s, %s \n" % (blue[11],green[11],red[11]))
config.write("12, %s, %s, %s \n" % (blue[12],red[12],green[12]))
config.write("13, %s, %s, %s \n" % (green[13],blue[13],red[13]))
config.write("14, %s, %s, %s \n" % (green[14],blue[14],red[14]))
config.write("15, %s, %s, %s \n" % (green[15],blue[15],red[15]))
light_settings.close()
config.close()
|
984,950 | e8ee6f603bf60dd2430a34aefcc78c6207abfcb8 | import logging
logger = logging.getLogger(__name__)
from abc import abstractmethod, ABCMeta
import game
from gcc_utils import deep_unmarshal, lto_to_cons, is_cons, cons_to_list, cons_to_mat
class InterpreterException(Exception):
pass
class GCCInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def call(self, address_or_closure, *args, **kwargs):
'''Call a function. Put args into a new environment frame, return the top of the data stack after the function returns.
Args and the return value are automatically marshalled.'''
@abstractmethod
def marshal(self, x):
'''Return an opaque handle representing i, which can be an int or a two-element tuple.
Shallow, so the elements of the tuple must be marshalled handles.'''
@abstractmethod
def unmarshal(self, x):
'''If x is an opaque handle representing an int or cons, unmarshal (shallowly for cons) and return it.
Otherwise return the opaque handle unchanged.
This could cause problems for unmarshall_deep if we had a GCC representing opaque handles as raw tuples, but we don't.
'''
def last_call_ticks(self):
'Return the number of ticks taken to execute the last call'
return 0
class GCCWrapper(object):
def __init__(self, gcc):
assert isinstance(gcc, GCCInterface)
self.gcc = gcc
self.total_step_ticks = 0
self.max_step_ticks = 0
self.moves = 0
def initialize(self, world, undocumented):
world_state = self.marshal_world_state(world)
self.ai_state, self.step_function = self.gcc.call(0, world_state, undocumented, max_ticks=game.MAX_TICKS_INIT)
self.init_ticks = self.gcc.last_call_ticks()
def get_move(self, world):
gcc = self.gcc
world_state = self.marshal_world_state(world)
self.ai_state, move = gcc.call(self.step_function, self.ai_state, world_state, max_ticks=game.MAX_TICKS)
ticks = gcc.last_call_ticks()
self.moves += 1
self.total_step_ticks += ticks
if ticks > self.max_step_ticks:
self.max_step_ticks = ticks
self.log_ai_state(self.ai_state)
return move
@staticmethod
def log_ai_state(ai_state):
if is_cons(ai_state) and ai_state[0] == 999888777: # password from ff.py
field = ai_state[1]
field = [cons_to_mat(row)
for row in cons_to_list(field)]
logger.info('ff field state:')
for line in field:
s = ''
for e in line:
s += '{:6}'.format(e)
logger.info(s)
else:
logger.info('ai state: {}'.format(ai_state))
def get_vm_statistics(self):
return game.GccStats(
init=self.init_ticks,
avg=1.0 * self.total_step_ticks / self.moves if self.moves else 0,
total=self.max_step_ticks)
def marshal_world_state(self, world):
world_state = self.convert_world_state(world)
return lto_to_cons(world_state)
def convert_world_state(self, world):
'''convert world_state to the list/tuple/int representation'''
return (self.encode_map(world),
self.encode_lman(world),
self.encode_ghosts(world),
world.remaining_fruit_ticks())
def encode_map(self, world):
result = [self.encode_map_row(world, y) for y in range(world.height())]
if world.fruit_spawn is not None:
x, y = world.fruit_spawn
result[y][x] = game.FRUIT
return result
def encode_map_row(self, world, y):
return [world.at(x, y) for x in range(world.width())]
def encode_lman(self, world):
lman = world.lambdaman
return (world.remaining_power_pill_ticks(),
(lman.x, lman.y),
lman.direction,
lman.lives,
lman.score)
def encode_ghosts(self, world):
return [(ghost.vitality, (ghost.x, ghost.y), ghost.direction)
for ghost in world.ghosts]
|
984,951 | 733f7137822c35ff69b0d0c877436e8eea9c5684 | from sys import maxsize
from Node import Node
from ChessBoard import ChessBoard
##======================================================================================================================
## Game Implementation
def Check(chessboard):
""" Check if anyone wins the game.
@param ChessBoard chessboard: the chessboard this game is played on
@rtype: int
"""
# Check if this game ends by finishing all the spots.
if chessboard.MovesLeft() == 0:
print("*" * 60)
if chessboard.WinCheck() == 0:
print("\tOpps, no more spot on the chessboard... New Game? =)")
elif chessboard.WinCheck() == 1:
print("\tCongrats you won!!! =D")
elif chessboard.WinCheck() == -1:
print("\tComputer won, maybe better luck next time... =(")
print("*" * 60)
return 0
else:
# Check if this game ends by one side winning.
if chessboard.WinCheck() != 0:
print("*" * 60)
if chessboard.WinCheck() == 1:
print("\tCongrats you won!!! =D")
elif chessboard.WinCheck() == -1:
print("\tComputer won, maybe better luck next time... =(")
print("*" * 60)
return 0
return 1
if __name__ == "__main__":
currPlayer = 1
thisChessboard = ChessBoard()
print("Welcome to the Tic-Tac-Toe Game!!\n")
print("How to play: Occupy three adjacent tiles to win the game!!\n" +
"You can only occupy one tile each turn.")
while thisChessboard.MovesLeft() > 0:
print("This is the current board: \n")
thisChessboard.Draw()
print("Which tile would you want to play? \n")
# Get the row and column for the play.
row = int(input("Row (0, 1 or 2): \n"))
col = int(input("Column (0, 1 or 2): \n"))
thisChessboard.MakeMove(row, col, currPlayer)
# The depth of the decision node tree should be dynamically updated as how many moves are left.
depth = thisChessboard.MovesLeft()
thisChessboard.Draw()
# Check if anyone wins the game.
if thisChessboard.WinCheck() != 1 and thisChessboard.WinCheck() != -1:
decisionNode = Node(depth, currPlayer, thisChessboard)
decisionNode.MinMax()
bestChoice = thisChessboard.board
bestValue = maxsize * -currPlayer
# Get the best choice and the corresponding value using the Minmax algorithm.
for i in range(len(decisionNode.children)):
child = decisionNode.children[i]
# Since current player is always the human player(currPlayer == 1),
# we want to get the maximum value.
if bestValue <= child.value:
bestValue = child.value
# Move the chessboard as the child node's chessboard if this is the maximum.
bestChoice = child.chessboard.board
thisChessboard.board = bestChoice
if thisChessboard.MovesLeft() > 0:
print("The computer has moved")
# Used for debugging.
# print(thisChessboard.board)
# If the computer wins the game.
if thisChessboard.WinCheck() == -1:
thisChessboard.Draw()
print("Opps computer wins the game... Better luck next time!! :(")
break
# If the human player wins the game.
else:
print("Congrats you win the game!! =D")
break
if thisChessboard.MovesLeft() <= 0:
print("Opps, the tiles are all occupied... try another game! :)")
|
984,952 | cfc9eb5896176b06bbcbdefd6dea681c29d0e2a6 | __author__ = 'zhangxa'
from tornado import gen
from tornado.ioloop import IOLoop
@gen.coroutine
def cor(n,str):
for i in range(n):
print(str,n)
yield gen.sleep(1)
return str
@gen.coroutine
def main():
a = cor(3,"first")
b = cor(3,"second")
print(a,b)
IOLoop.instance().run_sync(main)
|
984,953 | fde63862332d28273792b646730156ccf84e1a1d | """A dictionary is a python data structure that matches KEYS with VALUES.
You can look up a value using its key. KEYS must be unique,
but values can be the same."""
"""Example is the English Dictionary. Key = the word
value = the definition."""
#Declare a dictionary with known VALUES
spanish_english = {
'hola':"hello",
'gato':'cat',
'mujer':'women'
}
first_value = spanish_english['hola']
bikes = [] #empty List
users = {} #empty Dictionary
users['Lila'] = 20
# if print users: users= {'Lila': 20}
friends_family = {
'Amber': 4,
'Grace': 7,
'Haona': 16
}
friends_family2 = {}
friends_family['Amber'] = 16
dictTest = {}
dictTest['Haona'] = [16]
dictTest['Haona'].append("Junior")
dictTest['Amber'] = [4]
dictTest['Amber'].append("preschool")
dictTest['Grace'] = [6]
dictTest['Grace'].append("elementary")
print(dictTest)
|
984,954 | 662ef9c33080257fdf685daf71af8eadd4d3dfb6 | from datetime import datetime
from unittest import TestCase
from Budget import Budget
from BudgetManager import BudgetManager
from Period import Period
class TestBudgetManager(TestCase):
def test_no_period(self):
bm = BudgetManager
bm.get_budgets(Budget("201703", 31))
self.assertEqual(bm.account_budget(None), 0)
def test_20_days_period_in_budget_time(self):
bm = BudgetManager
bm.get_budgets(Budget("201703", 31))
self.assertEqual(bm.account_budget(Period(datetime(2017, 3, 1), datetime(2017, 3, 20))), 20)
def test_period_before_budget_time(self):
bm = BudgetManager
bm.get_budgets(Budget("201703", 31))
self.assertEqual(bm.account_budget(Period(datetime(2017, 2, 27),datetime(2017, 2, 28))), 0)
def test_period_after_budget_time(self):
bm = BudgetManager
bm.get_budgets(Budget("201703", 31))
self.assertEqual(bm.account_budget(Period(datetime(2017, 4, 1),datetime(2017, 4, 3))), 0)
def test_period_overlapping_budget_first_day(self):
bm = BudgetManager
bm.get_budgets(Budget("201703", 31))
self.assertEqual(bm.account_budget(Period(datetime(2017, 2, 27),datetime(2017, 3, 3))), 3)
def test_period_overlapping_budget_last_day(self):
bm = BudgetManager
bm.get_budgets(Budget("201703", 31))
self.assertEqual(bm.account_budget(Period(datetime(2017, 3, 27),datetime(2017, 4, 2))), 5)
def test_invalid_period(self):
bm = BudgetManager
bm.get_budgets(Budget("201703", 31))
self.assertRaises(Exception, Period, datetime(2017, 3, 27), datetime(2017, 3, 21))
def test_amount_is_100_per_day(self):
bm = BudgetManager
bm.get_budgets(Budget("201703", 3100))
self.assertEqual(bm.account_budget(Period(datetime(2017, 3, 20), datetime(2017, 3, 26))), 700)
def test_multiple_budgets_with_overlapping_period(self):
bm = BudgetManager
bm.get_budgets(Budget("201703", 3100), Budget("201704", 120))
self.assertEqual(bm.account_budget(Period(datetime(2017, 3, 30), datetime(2017, 4, 3))), 212)
|
984,955 | 7b9e517c7e4598ea0e99ef8ef3ee835cbc176f80 | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from qubayes_tools import *
from network_setup import *
def get_probabilities(node):
############################################
# USE THIS FUNCTION TO FIND THE PROBABILITIES FOR AN INDIVIDUAL NODE IN THE NETWORK
### INPUT ###
# node: Node Node object in network
### OUTPUT ###
# probs: dict probabilities
############################################
data= node.data
states = node.states
name = node.name
num_total = len(data) #total number of data points with which to calculate probabilities
probs = {}
prob_sum = 0
for state in states.keys(): #loop through different state strings
prob = np.shape(np.where(data == states[state]))[1]/num_total
prob_sum += prob
probs.update({name + "_" + state : prob})
assert round(prob_sum, 3) == 1.
return probs
def get_conditional_probability(child, *ps):
############################################
### THIS FUNCTION CALCULATES CONDITIONAL PROBABILITIES FOR CHILD NODE
### THAT HAS s_m STATES AND m PARENT NODES EACH WITH s_i STATES WHERE i = 0, ..., m-1
### INPUTS ###
# child Node
# *ps Node(s) or a single list of Nodes
### OUTPUT ###
# a dictionary of conditional probabilities
############################################
#we might want to add some assert statements checking that all inputs have the same shape
#also use assert to check that for all p in ps, ps.name is in child.parents, and vice versa
if type(ps[0]) == list:
ps = ps[0]
if type(ps[0]) != Node:
print("ERROR: This input is not right!")
keys = generate_cond_keys(child, ps)
cond_probs = {key: 0 for key in keys} #initialize a dictionary for conditional probabilities
for key in keys:
numer, tot = 0, 0
n = len(child.data)
for i in range(n):
all_ps = True
for j in range(len(ps)):
p = ps[j]
if p.data[i] != int(p.states[key.split("|")[1].split(",")[j].split("_")[1]]):
all_ps = False
break
if all_ps:
tot += 1
if child.data[i] == int(child.states[key.split("|")[0].split("_")[1]]):
numer += 1
cond_probs.update({key : numer/tot})
return cond_probs
# example: results from running simple model on simulator:
# {'000': 2783, '001': 1240, '100': 603, '111': 815, '110': 294, '010': 1712, '101': 485, '011': 260}
def get_marginal_0probabilities(state_counts):
#state_counts: dict, counts for each state from network result (should have 2^n entries)
#marg_probs: array of length n, marginal probabilities that each qubit is 0,
#from most significant to least significant qubit
n = len(list(state_counts.keys())[0]) #number of qubits
prob = np.zeros(n)
total = sum(state_counts.values())
for i in range(n):
for key in state_counts:
if int(key[i]) == 0:
prob[i] += state_counts[key]
prob[i] = prob[i]/total
return prob
def func(**counts):
return counts["c001"] |
984,956 | 36cf9137ce62f11f7560f5b102aef1f9c4632d68 | # Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#! /usr/bin/env python3
import pandas as pd
import sys
import json
import numpy as np
import mmh3
import binascii
def compute_hash(url, text):
if url is None:
url = ''
if text is None:
text = ''
total = (url + text).encode("utf-8")
return mmh3.hash64(total)[0]
def nan_handler(number):
if np.isnan(number):
return 0
else :
return number
df = pd.read_parquet(sys.argv[1])
vectors = np.load(sys.argv[2], mmap_mode='r')
for index, row in df.iterrows():
url = row['url']
caption = row['caption']
id = compute_hash(url, caption)
similarity = nan_handler(row['similarity'])
similarity_scaled = min(int(100*similarity), 127)
doc = {
"put": "id:laion:image::%i" % id,
"fields": {
"url": row['url'],
"caption": row['caption'],
"nsfw": row['NSFW'],
"similarity": similarity_scaled,
"height": row['height'],
"width": row['width'],
"license": row['LICENSE'],
"vector": {
"values": vectors[index].astype(np.float32).tolist()
}
}
}
print(json.dumps(doc))
|
984,957 | 13d505d558f20b6eb9ae2d3f6307d46fa02552b2 | #-*- coding:utf-8 -*-
#'''
# Created on 19-7-16 下午2:16
#
# @Author: Greg Gao(laygin)
#'''
from .std_vgg16 import StdVGG16
__all__ = ['StdVGG16']
|
984,958 | 2962f48dde414b1f94683c4b8d847713430c0619 | """Solves the maze using A* algorithm"""
#****************************************Imports********************************
#****************************************Classes********************************
class Node(object) :
def __init__(self, coords, goal_coords, current_path_length) :
"""Sets the attributes of the node"""
self.coords = coords #The coordinates of the node
#Calculating the g(n) value of node
self.calculate_gn_value(current_path_length)
#Calculating the h(n) value of node
self.calculate_hn_value(goal_coords)
#Calculating f(n) value of node
self.calculate_fn_value()
def calculate_gn_value(self, current_path_length) :
"""Calculates the g(n) value if the node is traversed"""
self.gn_value = (current_path_length) #The g(n) value is the distance of the path if the node is traversed
def calculate_hn_value(self, goal_coords) :
"""Calculates the h(n) value of the node"""
#The h(n) value is the Manhattan distance of node from goal node
self.hn_value = abs(self.coords[0] - goal_coords[0]) + abs(self.coords[1] - goal_coords[1])
def calculate_fn_value(self) :
"""Calculates the f(n) value of the node"""
self.fn_value = self.gn_value + self.hn_value #f(n) = g(n) + h(n)
def __eq__(self, other) :
"""Overloading == operator"""
if(self.coords == other.coords) :
return True
return False
#****************************************Global Variables********************************
grid_dims = None #The dimensions of the maze grid
goal_pos = None #The coordinates of the goal node
obstacle_coords = [] #The coordinates of the obstacles on the grid
open_list = [] #A list of the next possible node to move to
closed_list = [] #A list of the already traversed nodes
#****************************************Functions********************************
def solve_maze(grid_dimensions, goal_position, start_position, obstacle_coordinates) :
"""Solves the maze"""
reset_bot() #Resetting the bot before solving a new maze
#Setting the parameters before starting
global grid_dims
grid_dims = grid_dimensions
global goal_pos
goal_pos = goal_position
global obstacle_coords
obstacle_coords = obstacle_coordinates
#Adding the start position to the closed_list
closed_list.append(Node(start_position, goal_position, 0))
#Traversing the maze
paths = [] #A list of the paths traversed
paths.append([closed_list[0]]) #Adding a new path
path = traverse_maze(paths, paths[0]) #Getting the path
return get_path_coordinates(path)
def reset_bot() :
"""Resets the bot to solve a new maze"""
open_list.clear()
closed_list.clear()
def traverse_maze(paths, current_path) :
"""Traverses the maze"""
#Getting the traversible nodes connected to the current node
connected_nodes = get_connected_nodes(current_path[-1], current_path.__len__())
#Checking if the goal can be reached
if(Node(goal_pos, goal_pos, current_path.__len__()) in connected_nodes) :
current_path.append(Node(goal_pos, goal_pos, current_path.__len__() + 1))
closed_list.append(current_path[-1])
return current_path
#Adding the connected nodes to the open_list
open_list.extend(connected_nodes)
#Selecting the next node to travel to
next_node = get_next_node()
#Removing the next node from open list and adding to closed list
open_list.remove(next_node)
closed_list.append(next_node)
#Checking if the next node belongs to the current path or to a different path
if(not next_node in connected_nodes) :
new_path = manage_paths(next_node, paths) #Creating a new path-branch
paths.append(new_path) #Adding the new path to the list of paths
return traverse_maze(paths, new_path) #Traversing the new path
else :
next_node.gn_value = current_path.__len__() #Updating the node's g(n) value
current_path.append(next_node) #Adding the node to the current path
return traverse_maze(paths, current_path)
def get_connected_nodes(node, current_path_len) :
"""Gets the traversible nodes connected to the given node"""
connected_nodes = [] #A list of the connected nodes
closed_list_coords = get_path_coordinates(closed_list)
#Checking if the node belongs to the 1st row
if(node.coords[0] != 0) :
connected_node = Node((node.coords[0] - 1, node.coords[1]), goal_pos, current_path_len)
#Checking if the node has already been traversed or is it is an obstacle
if(not connected_node.coords in closed_list_coords and not connected_node.coords in obstacle_coords) :
connected_nodes.append(connected_node)
#Checking if the node belongs to the last row
if(node.coords[0] != grid_dims[0] - 1) :
connected_node = Node((node.coords[0] + 1, node.coords[1]), goal_pos, current_path_len)
#Checking if the node has already been traversed or is it is an obstacle
if(not connected_node.coords in closed_list_coords and not connected_node.coords in obstacle_coords) :
connected_nodes.append(connected_node)
#Checking if the node belongs to the 1st column
if(node.coords[1] != 0) :
connected_node = Node((node.coords[0], node.coords[1] - 1), goal_pos, current_path_len)
#Checking if the node has already been traversed or is it is an obstacle
if(not connected_node.coords in closed_list_coords and not connected_node.coords in obstacle_coords) :
connected_nodes.append(connected_node)
#Checking if the node belongs to the 1st column
if(node.coords[1] != grid_dims[1] - 1) :
connected_node = Node((node.coords[0], node.coords[1] + 1), goal_pos, current_path_len)
#Checking if the node has already been traversed or is it is an obstacle
if(not connected_node.coords in closed_list_coords and not connected_node.coords in obstacle_coords) :
connected_nodes.append(connected_node)
return connected_nodes
def get_node_coordinates(nodes) :
"""Returns a list containing the coordinates of the given nodes"""
coords = [] #The list of coordinates
for node in nodes :
coords.append(node.coords)
return coords
def get_next_node() :
"""From the open_list, selects the next node to travel to"""
#Checking if any traversible nodes are left
if(open_list.__len__() == 0) :
raise Exception("No traversible nodes left")
next_nodes = get_node_with_lowest_fn(open_list) #Getting the list of nodes having min. f(n) value
#In case of multiple nodes, returning the node with lowest h(n) value
if(next_nodes.__len__() > 1) :
return get_node_with_lowest_hn(next_nodes)
return next_nodes[0]
def get_node_with_lowest_fn(nodes) :
"""Returns the node with the lowest f(n) value"""
next_nodes = [nodes[0]] #The nodes having the lowest f(n) value
min_fn = next_nodes[0].fn_value
for a in range(1, nodes.__len__()) :
if(nodes[a].fn_value < min_fn) :
next_nodes.clear()
next_nodes.append(nodes[a])
elif(nodes[a].fn_value == min_fn) :
next_nodes.append(nodes[a])
return next_nodes
def get_node_with_lowest_hn(nodes) :
"""From open_list, returns the node having lowest h(n) value"""
node = nodes[0]
min_hn = node.hn_value
for a in range(1, nodes.__len__()) :
if(nodes[a].hn_value < min_hn) :
node = nodes[a]
min_hn = node.hn_value
return node
def manage_paths(node, paths) :
"""Creates a new path branch"""
#Getting the nodes neighbouring the given node
neighbours = get_neighbouring_nodes(node)
#Creating a new path branch
new_path = [] #The new path
path_found = False #Indicates whether the path to which the node belongs has been found
#Looping through the neighbours
for neighbour in neighbours :
for path in paths :
#Checking whether the path contains the neighbour
if(neighbour in path) :
index = path.index(neighbour)
#Checking if the branch belongs to the current path
if(path[index].gn_value == neighbour.gn_value) :
new_path = path[:index + 1] + [node] #Creating a new path branch
new_path[-1].gn_value = new_path.__len__() - 1 #Updating the node's g(n) value
path_found = True
break
if(path_found) :
break
if(not path_found) :
raise Exception("No branch junction found")
#Setting the new path as the current path
return new_path
def get_neighbouring_nodes(node) :
"""Returns the nodes neighbouring the given node"""
connected_nodes = [] #A list of the connected nodes
#Checking if the node belongs to the 1st row
if(node.coords[0] != 0) :
connected_node = Node((node.coords[0] - 1, node.coords[1]), goal_pos, node.gn_value - 1)
#Checking if the node is an obstacle
if(not connected_node.coords in obstacle_coords) :
connected_nodes.append(connected_node)
#Checking if the node belongs to the last row
if(node.coords[0] != grid_dims[0] - 1) :
connected_node = Node((node.coords[0] + 1, node.coords[1]), goal_pos, node.gn_value - 1)
#Checking if the node is an obstacle
if(not connected_node.coords in obstacle_coords) :
connected_nodes.append(connected_node)
#Checking if the node belongs to the 1st column
if(node.coords[1] != 0) :
connected_node = Node((node.coords[0], node.coords[1] - 1), goal_pos, node.gn_value - 1)
#Checking if the node is an obstacle
if(not connected_node.coords in obstacle_coords) :
connected_nodes.append(connected_node)
#Checking if the node belongs to the 1st column
if(node.coords[1] != grid_dims[1] - 1) :
connected_node = Node((node.coords[0], node.coords[1] + 1), goal_pos, node.gn_value - 1)
#Checking if the node is an obstacle
if(not connected_node.coords in obstacle_coords) :
connected_nodes.append(connected_node)
return connected_nodes
def get_path_coordinates(path) :
"""Returns the coordinates of the nodes in the path"""
coords = []
for node in path :
coords.append(node.coords)
return coords
|
984,959 | fb982e38be9856f62c6a77c0ce1b09465bc30f59 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created 24.05.19 09:56
@author: mvb
"""
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 5 09:11:42 2019
@author: mvb
"""
from multiprocessing.connection import Client
import numpy as np
import os
import pickle
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from simulator.textcommunication import encode_request_msg_to_txt, decode_answer_msg_from_txt
def main():
#___user inputs
# pathmpcsolutiondata = '/home/mvb/0_ETH/01_MasterThesis/Logs_GoKart/LogData/dynamics_newFormat/cuts/20190902/20190902T174135_05/mpc' #path where all the raw, sorted data is that you want to sample and or batch and or split
# pathmpcsolutiondata = '/home/mvb/0_ETH/01_MasterThesis/Logs_GoKart/LogData/dynamics_newFormat/cuts/20190905/20190905T191253_06/mpc' #path where all the raw, sorted data is that you want to sample and or batch and or split
# pathmpcsolutiondata = '/home/mvb/0_ETH/01_MasterThesis/Logs_GoKart/LogData/dynamics_newFormat/cuts/20190909/20190909T174744_07/mpc' #path where all the raw, sorted data is that you want to sample and or batch and or split
# pathmpcsolutiondata = '/home/mvb/0_ETH/01_MasterThesis/Logs_GoKart/LogData/dynamics_newFormat/cuts/20190912/20190912T162356_05/mpc' #path where all the raw, sorted data is that you want to sample and or batch and or split
# pathmpcsolutiondata = '/home/mvb/0_ETH/01_MasterThesis/Logs_GoKart/LogData/dynamics_newFormat/cuts/20190916/20190916T175046_05/mpc' #path where all the raw, sorted data is that you want to sample and or batch and or split
# pathmpcsolutiondata = '/home/mvb/0_ETH/01_MasterThesis/Logs_GoKart/LogData/dynamics_newFormat/cuts/20190921/20190921T124329_10/mpc'
# pathmpcsolutiondata = '/home/mvb/0_ETH/01_MasterThesis/Logs_GoKart/LogData/dynamics_newFormat/cuts/20190923/20190923T161636_03/mpc'
pathmpcsolutiondata = '/home/mvb/0_ETH/01_MasterThesis/Logs_GoKart/LogData/dynamics_newFormat/cuts/20190926/20190926T121623_05/mpc'
mpcsolfiles = []
for r, d, f in os.walk(pathmpcsolutiondata):
for file in f:
if '.csv' in file:
mpcsolfiles.append([os.path.join(r, file),file])
mpcsolfiles.sort()
part = 80
# for file_path, file_name in mpcsolfiles[part:part+1]:
# for file_path, file_name in mpcsolfiles[245:246]:
# for file_path, file_name in mpcsolfiles[60:100]:
solve_times = []
t0 = 0
vx_ref = []
vy_ref = []
vtheta_ref = []
BETA_ref = []
AB_ref = []
TV_ref = []
t_offset = 0.0
t_abs0 = None
# for file_path, file_name in mpcsolfiles[130:150]:
# for file_path, file_name in mpcsolfiles[100:150]:
# for file_path, file_name in mpcsolfiles[220:280:5]:
for file_path, file_name in mpcsolfiles[:]:
if t_abs0 is None:
mpc_sol_data = pd.read_csv(str(mpcsolfiles[0][0]), header=None,
names=["U wheel left", "U wheel right", "U dotS", "U brake", "X U AB", "time",
"X Ux", "X Uy", "X dotPsi", "X X", "X Y", "X Psi", "X w2L", "X w2R",
"X s", "X bTemp"])
t_abs0 = mpc_sol_data['time'][0]
print(f'Loading file {file_name}')
try:
mpc_sol_data = pd.read_csv(str(file_path), header=None,
names=["U wheel left", "U wheel right", "U dotS", "U brake", "X U AB", "time",
"X Ux", "X Uy", "X dotPsi", "X X", "X Y", "X Psi", "X w2L", "X w2R",
"X s", "X bTemp"])
except:
print('Could not open file at', file_path)
raise
# print(mpc_sol_data.head())
# print(type(mpc_sol_data))
#___simulation parameters
# data_time_step = np.round(mpc_sol_data['time'].iloc[1] - mpc_sol_data['time'].iloc[0],3) # [s] Log data sampling time step
# sim_time_increment = data_time_step # [s] time increment used in integration scheme inside simulation server (time step for logged simulation data)
# simTime = np.round(mpc_sol_data['time'].iloc[-1] - mpc_sol_data['time'].iloc[0]) # [s] Total simulation time
# simTime = data_time_step
# initial state [simulationTime, x, y, theta, vx, vy, vrot, beta, accRearAxle, tv]
if t0 != 0:
solve_times.append(mpc_sol_data['time'][0] - t0)
t0 = mpc_sol_data['time'][0]
# AB = (mpc_sol_data['U wheel left'] + mpc_sol_data['U wheel right']) / 2.0
# TV = (mpc_sol_data['U wheel right'] - mpc_sol_data['U wheel left']) / 2.0
# steerCal = mpc_sol_data['X s']
# BETA = -0.63 * np.power(steerCal, 3) + 0.94 * steerCal
#
# U = np.array((mpc_sol_data['time'].values-t_abs0+t_offset,
# BETA.values,
# AB.values,
# TV.values))
#
# Y = np.array((mpc_sol_data['time']-t_abs0+t_offset,
# np.add(mpc_sol_data['X X'], np.cos(mpc_sol_data['X Psi']) * 0.46),
# np.add(mpc_sol_data['X Y'], np.sin(mpc_sol_data['X Psi']) * 0.46),
# mpc_sol_data['X Psi'],
# mpc_sol_data['X Ux'],
# np.add(mpc_sol_data['X Uy'],mpc_sol_data['X dotPsi'][0]*0.46),
# # mpc_sol_data['X Uy']+0.88,
# mpc_sol_data['X dotPsi'])).transpose()
# # ______^^^______
#
# arrow_length = 1
#
# plt.figure(1)
#
# plt.plot(Y[:,1],Y[:,2],'r', linewidth=0.5)
# plt.scatter(Y[0,1],Y[0,2],c='r')
# # plt.plot(X1[:, 1], X1[:, 2], 'b')
# # plt.scatter(X1[0, 1], X1[0, 2], c='b')
# for i in range(len(Y[:,1])):
# plt.arrow(Y[i,1],Y[i,2], arrow_length * np.cos(Y[i, 3]), arrow_length * np.sin(Y[i, 3]),color = 'm', linewidth=0.5, alpha=0.5)
# # for i in range(len(Y[:,1])):
# # plt.arrow(Y[i,1],Y[i,2], arrow_length * np.cos(Y[i, 3]+BETA[i]), arrow_length * np.sin(Y[i, 3]+BETA[i]),color='m')
# # plt.plot(Y[:, 0], Y[:, 1], 'r')
# # plt.plot(Y[:, 0], Y[:, 2], 'r')
# # plt.axis('equal')
# plt.legend(['MPC prediction','Kartsim (RK45)'])
# plt.xlabel('pose x')
# plt.ylabel('pose y')
# plt.axis('equal')
# # plt.title('Euler Integration')
# # plt.hold()
#
# # plt.figure(2)
# # # plt.plot(Y[:,0], Y[:,3], 'r')
# # # plt.plot(X1[:,0],X1[:,3], 'b')
# # # plt.plot( Y[:, -1], 'r')
# # # plt.plot( X1[:, -1], 'b')
# # plt.plot(Y[:, 0], Y[:, 6], 'r')
# # plt.plot(X1[:, 0], X1[:, 6], 'b')
# # plt.plot(Y[:, 0], Y[:, 3], 'r')
# # plt.plot(X1[:, 0], X1[:, 3], 'b')
# #
#
# plt.figure(3)
# plt.plot(Y[:, 0], Y[:, 6], 'g', linewidth=0.5, alpha=0.5)
# plt.plot(Y[:, 0], Y[:, 5], 'r', linewidth=0.5, alpha=0.5)
# plt.plot(Y[:, 0], Y[:, 4], 'b', linewidth=0.5, alpha=0.5)
# plt.scatter(Y[0, 0], Y[0, 6], c='g')
# plt.scatter(Y[0, 0], Y[0, 5], c='r')
# plt.scatter(Y[0, 0], Y[0, 4], c='b')
# # plt.plot(Y[1:, 0], x_dot, 'c')
# # plt.plot(Y[1:, 0], y_dot, 'c')
# # plt.plot(Y[:, 0], np.sqrt(np.square(Y[:, 4]) + np.square(Y[:, 5])), 'k')
# plt.legend(['dottheta', 'vy', 'vx'])
#
#
# # plt.figure(4)
# # plt.plot(Y[:,0], Y_slip_angle, 'r')
# # plt.plot(X1[:,0], X1_slip_angle, 'orange')
# # plt.plot(Y[1:,0], Y_slip_angle_from_pose, 'm')
# # plt.plot(X1[1:,0], X1_slip_angle_from_pose, 'c')
# # plt.title('slip angle [rad]')
# #
# # plt.figure(5)
# # plt.plot(Y[1:, 0]-0.05, vy, 'm')
# # # plt.scatter(Y[1:, 0], vy, c='m')
# # plt.plot(Y[:, 0], Y[:, 5], 'r')
# # # plt.scatter(Y[0, 0], Y[0, 5], c='r')
# # plt.plot(Y[:, 0], Y[:, 4], 'r')
# # # plt.scatter(Y[0, 0], Y[0, 4], c='orange')
# # plt.plot(Y[:-1, 0], vx, 'c')
# # # plt.plot(Y[:, 0], Y[:, 4], 'b')
# # plt.legend(['MPC from pose','MPC output','Kartsim output', 'Kartsim from pose'])
# # plt.xlabel('time [s]')
# # plt.ylabel('U y [m/s]')
#
# plt.figure(6)
# plt.plot(U[0,:], U[1,:],c='m', linewidth=0.5, alpha=0.5)
# plt.plot(U[0,:], U[2,:],c='b', linewidth=0.5, alpha=0.5)
# plt.plot(U[0,:], U[3,:],c='g', linewidth=0.5, alpha=0.5)
# plt.scatter(U[0, 0], U[1, 0], c='m')
# plt.scatter(U[0, 0], U[2, 0], c='b')
# plt.scatter(U[0, 0], U[3, 0], c='g')
# plt.legend(['BETA','AB','TV'])
# plt.grid('on')
#
# vx_ref.append([Y[0, 0], Y[0, 4]])
# vy_ref.append([Y[0, 0], Y[0, 5]])
# vtheta_ref.append([Y[0, 0], Y[0, 6]])
# BETA_ref.append([U[0, 0], U[1, 0]])
# AB_ref.append([U[0, 0], U[2, 0]])
# TV_ref.append([U[0, 0], U[3, 0]])
sns.distplot(solve_times)
plt.title('MPC normal - 0s delay')
print('solve times:', solve_times)
print('avg solve time:', np.average(solve_times))
print('median solve time:', np.median(solve_times))
print('std solve time:', np.std(solve_times))
# vx_ref = np.array(vx_ref)
# vy_ref = np.array(vy_ref)
# vtheta_ref = np.array(vtheta_ref)
# BETA_ref = np.array(BETA_ref)
# AB_ref = np.array(AB_ref)
# TV_ref = np.array(TV_ref)
#
# plt.figure(3)
# plt.plot(vx_ref[:,0], vx_ref[:,1], 'b')
# plt.plot(vy_ref[:,0], vy_ref[:,1], 'r')
# plt.plot(vtheta_ref[:,0], vtheta_ref[:,1], 'g')
# # plt.legend(['vy mpc', 'vy kartsim', 'vy reference'])
#
# plt.figure(6)
# plt.plot(BETA_ref[:,0], BETA_ref[:,1], 'm')
# plt.plot(AB_ref[:,0], AB_ref[:,1], 'b')
# plt.plot(TV_ref[:,0], TV_ref[:,1], 'g')
#
# plt.show()
print("Done.")
def getpreprodata(pathpreprodata):
files = []
for r, d, f in os.walk(pathpreprodata):
for file in f:
if '.pkl' in file:
files.append(os.path.join(r, file))
for filePath in files[0:1]:
try:
with open(filePath, 'rb') as f:
mpc_sol_data = pickle.load(f)
except:
print('Could not open file at', filePath)
mpc_sol_data = pd.DataFrame()
return mpc_sol_data
if __name__ == '__main__':
main() |
984,960 | 46447f4113a099fac0021077b137c1b2f9ea8dc7 | import uvicorn
from fastapi import FastAPI
from joker.controller import joke_controller
app = FastAPI(
title="The Joker API",
description="Handle (really) funny jokes",
version="0.1beta"
)
app.include_router(joke_controller.router, tags=["jokes"])
if __name__ == "__main__":
uvicorn.run(app)
|
984,961 | f45c30beccda61e48aa61bb0acc1825a00995f4a | import svgwrite
from xml.dom import minidom
import math
def distanceBetweenPoints(x1, y1, x2, y2):
distance = math.sqrt(((x1 - x2)**2) + ((y1 - y2)**2))
return distance
class Element:
def __init__(self, xcoordinates, ycoordinates, tag):
self.xcoordinates = xcoordinates
self.ycoordinates = ycoordinates
self.tag = tag
self.x1 = self.x2 = self.y1 = self.y2 = 0
self.neighbour1 = None
self.neighbour2 = None
self.flag1 = None
self.flag2 = None
self.stroke_width = 2
self.role = None
self.red = 0
self.green = 0
self.blue = 0
# if self.tag == "linea verticale":
# self.x1 = x1
# self.x2 = (min(self.xcoordinates) + max(self.xcoordinates)) / 2
# self.y1 = y1
# self.y2 = max(self.ycoordinates)
# else:
# self.y1 = y1
# self.y2 = (min(self.ycoordinates) + max(self.ycoordinates)) / 2
# self.x1 = x1
# self.x2 = max(self.xcoordinates)
#
def adjust(self, xupperbound, xlowerbound, yupperbound, ylowerbound):
if self.tag == 'linea verticale':
self.x1 = (min(self.xcoordinates) + max(self.xcoordinates)) / 2
self.x2 = (min(self.xcoordinates) + max(self.xcoordinates)) / 2
self.y1 = self.ycoordinates[0]
self.y2 = self.ycoordinates[-1]#max(self.ycoordinates)
if self.tag == 'linea orizzontale':
self.y1 = (min(self.ycoordinates) + max(self.ycoordinates)) / 2
self.y2 = (min(self.ycoordinates) + max(self.ycoordinates)) / 2
self.x1 = self.xcoordinates[0]
self.x2 = self.xcoordinates[-1]#max(self.xcoordinates)
if self.tag == 'linea diagonale':
self.y1 = self.ycoordinates[0]
self.y2 = self.ycoordinates[-1]
self.x1 = self.xcoordinates[0]
self.x2 = self.xcoordinates[-1]
if abs(self.x1 - xupperbound) < 50: self.x1 = xupperbound
if abs(self.x1 - xlowerbound) < 50: self.x1 = xlowerbound
if abs(self.x2 - xupperbound) < 50: self.x2 = xupperbound
if abs(self.x2 - xlowerbound) < 50: self.x2 = xlowerbound
if abs(self.y1 - yupperbound) < 50: self.y1 = yupperbound
if abs(self.y1 - ylowerbound) < 50: self.y1 = ylowerbound
if abs(self.y2 - yupperbound) < 50: self.y2 = yupperbound
if abs(self.y2 - ylowerbound) < 50: self.y2 = ylowerbound
if self.tag != 'linea diagonale':
if self.y2 < self.y1:
tmp = self.y1
self.y1 = self.y2
self.y2 = tmp
if self.x2 < self.x1:
tmp = self.x1
self.x1 = self.x2
self.x2 = tmp
def fix(self):
if self.tag == 'linea verticale':
if self.flag1 == True: self.y1 = self.neighbour1.y1
if self.flag2 == True: self.y2 = self.neighbour2.y1
if self.tag == 'linea orizzontale':
if self.flag1 == True: self.x1 = self.neighbour1.x1
if self.flag2 == True: self.x2 = self.neighbour2.x1
if self.tag == 'linea diagonale':
if self.flag1 == True:
distance1 = distanceBetweenPoints(self.x1, self.y1, self.neighbour1.x1, self.neighbour1.y1)
distance2 = distanceBetweenPoints(self.x1, self.y1, self.neighbour1.x2, self.neighbour1.y2)
if distance1 < distance2:
self.x1 = self.neighbour1.x1
self.y1 = self.neighbour1.y1
else:
self.x1 = self.neighbour1.x2
self.y1 = self.neighbour1.y2
if self.flag2 == True:
distance1 = distanceBetweenPoints(self.x2, self.y2, self.neighbour2.x1, self.neighbour2.y1)
distance2 = distanceBetweenPoints(self.x2, self.y2, self.neighbour2.x2, self.neighbour2.y2)
if distance1 < distance2:
self.x2 = self.neighbour2.x1
self.y2 = self.neighbour2.y1
else:
self.x2 = self.neighbour2.x2
self.y2 = self.neighbour2.y2
doc = minidom.parse('Esempio16_prima.svg')
svg_width = doc.getElementsByTagName('svg')[0].getAttribute('width')
svg_height = doc.getElementsByTagName('svg')[0].getAttribute('height')
# print(svg_width, svg_height)
path_strings = [path.getAttribute('d') for path in doc.getElementsByTagName('path')]
#print(path_strings[0])
#doc.unlink()
path_stringsM = []
for i in range(len(path_strings)):
path_stringsM.append(path_strings[i].replace('M', ''))
#path_strings[i].replace('L', '')
#print(path_stringsM)
path_stringsL = []
for i in range(len(path_strings)):
path_stringsL.append(path_stringsM[i].replace('L', ''))
#print(path_stringsL)
#path_strings[i].replace('L', '')
Xcoordinates = []
Ycoordinates = []
coordinates = []
verticalElements = []
horizontalElements = []
diagonalElements = []
elements = []
for i in range(len(path_stringsL)):
coordinates.append(path_stringsL[i].split())
for i in range(len(coordinates)):
for j in range(len(coordinates[i])):
coordinates[i][j] = float(coordinates[i][j])
for i in range(len(coordinates)):
support = []
for j in range(0,len(coordinates[i]),2):
support.append(coordinates[i][j])
Xcoordinates.append(support)
for i in range(len(coordinates)):
support = []
for j in range(1,len(coordinates[i]),2):
support.append(coordinates[i][j])
Ycoordinates.append(support)
# print(coordinates)
# print(Xcoordinates)
# print(Ycoordinates)
Xupperbound = 0
Xlowerbound = 5000
Yupperbound = 0
Ylowerbound = 5000
for i in range (len(Xcoordinates)):
maximum = max(Xcoordinates[i])
if Xupperbound < maximum: Xupperbound = maximum
for i in range (len(Xcoordinates)):
minimum = min(Xcoordinates[i])
if Xlowerbound > minimum: Xlowerbound = minimum
for i in range (len(Ycoordinates)):
maximum = max(Ycoordinates[i])
if Yupperbound < maximum: Yupperbound = maximum
for i in range (len(Ycoordinates)):
minimum = min(Ycoordinates[i])
if Ylowerbound > minimum: Ylowerbound = minimum
# print(Xlowerbound, Xupperbound, Ylowerbound, Yupperbound)
for i in range(len(coordinates)):
x_difference = abs(Xcoordinates[i][-1] - Xcoordinates[i][0])
y_difference = abs(Ycoordinates[i][-1] - Ycoordinates[i][0])
if abs(x_difference - y_difference) < 50:
diagonalElements.append(Element(Xcoordinates[i],Ycoordinates[i], 'linea diagonale'))
else :
if abs(Xcoordinates[i][-1] - Xcoordinates[i][0]) < abs(Ycoordinates[i][-1] - Ycoordinates[i][0]):
#print("elemento numero:", i, " : linea verticale")
verticalElements.append(Element(Xcoordinates[i],Ycoordinates[i], 'linea verticale'))
else:
#print("elemento numero:", i, " : linea orizzontale")
horizontalElements.append(Element(Xcoordinates[i], Ycoordinates[i], 'linea orizzontale'))
print('Ci sono', len(diagonalElements), 'elementi diagonali')
for i in range(len(verticalElements)):
#print(elements[i].tag)
verticalElements[i].adjust(Xupperbound, Xlowerbound, Yupperbound, Ylowerbound)
for i in range(len(horizontalElements)):
#print(elements[i].tag)
horizontalElements[i].adjust(Xupperbound, Xlowerbound, Yupperbound, Ylowerbound)
for i in range(len(diagonalElements)):
# print("elemento diagonale:", i)
# print(diagonalElements[i].xcoordinates)
# print(diagonalElements[i].ycoordinates)
diagonalElements[i].adjust(Xupperbound, Xlowerbound, Yupperbound, Ylowerbound)
# print(diagonalElements[i].x1, diagonalElements[i].y1, diagonalElements[i].x2, diagonalElements[i].y2)
# for i in range(len(horizontalElements)):
# for j in range(len(horizontalElements)):
# if i != j:
# if abs(horizontalElements[i].y1 - horizontalElements[j].y1) <= 10:
# # horizontalElements[i].y1 = horizontalElements[j].y1
# # horizontalElements[i].y2 = horizontalElements[j].y1
# if horizontalElements[i].x1 > horizontalElements[j].x2:
# if abs(horizontalElements[i].x1 - horizontalElements[j].x2) > 50:
# horizontalElements[i].flag1 = False
# else: horizontalElements[i].flag1 = True
# if horizontalElements[i].x2 < horizontalElements[j].x1:
# if abs(horizontalElements[i].x2 - horizontalElements[j].x1) > 50:
# horizontalElements[i].flag2 = False
# else: horizontalElements[i].flag2 = True
#
# for i in range(len(verticalElements)):
# for j in range(len(verticalElements)):
# if i != j:
# if abs(verticalElements[i].x1 - verticalElements[j].x1) <= 10:
# # verticalElements[i].x1 = verticalElements[j].x1
# # verticalElements[i].x2 = verticalElements[j].x1
# if verticalElements[i].y1 > verticalElements[j].y2:
# if abs(verticalElements[i].y1 - verticalElements[j].y2) > 50:
# verticalElements[i].flag1 = False
# else: verticalElements[i].flag1 = True
# if verticalElements[i].y2 < verticalElements[j].y1:
# if abs(verticalElements[i].y2 - verticalElements[j].y1) > 50:
# verticalElements[i].flag2 = False
# else: verticalElements[i].flag2 = True
#cicli per individuare gli spazi vuoti ed assegnare gli eventuali elementi adiacenti
for i in range(len(verticalElements)):
distance = 1000
for j in range(len(horizontalElements)):
if abs(verticalElements[i].y1 - horizontalElements[j].y1) < distance:
distance = abs(verticalElements[i].y1 - horizontalElements[j].y1)
verticalElements[i].neighbour1 = horizontalElements[j]
if distance < 50: verticalElements[i].flag1 = True
else :
verticalElements[i].flag1 = False
horizontal_distance = distance
distance = 1000
for j in range(len(verticalElements)):
if i != j:
if abs(verticalElements[i].x1 - verticalElements[j].x1) <= 30:
# verticalElements[i].x1 = verticalElements[j].x1
# verticalElements[i].x2 = verticalElements[j].x1
if verticalElements[i].y1 > verticalElements[j].y2:
if abs(verticalElements[i].y1 - verticalElements[j].y2) < distance:
distance = abs(verticalElements[i].y1 - verticalElements[j].y2)
if distance < horizontal_distance:
verticalElements[i].neighbour1 = verticalElements[j]
verticalElements[i].x1 = verticalElements[j].x1
verticalElements[i].x2 = verticalElements[j].x2
if distance < horizontal_distance:
if distance < 50: verticalElements[i].flag1 = True
else:
verticalElements[i].flag1 = False
distance = 1000
for j in range(len(horizontalElements)):
if abs(verticalElements[i].y2 - horizontalElements[j].y1) < distance:
distance = abs(verticalElements[i].y2 - horizontalElements[j].y1)
verticalElements[i].neighbour2 = horizontalElements[j]
if distance < 50: verticalElements[i].flag2 = True
else :
verticalElements[i].flag2 = False
horizontal_distance = distance
distance = 1000
for j in range(len(verticalElements)):
if i != j:
if abs(verticalElements[i].x1 - verticalElements[j].x1) <= 30:
# verticalElements[i].x1 = verticalElements[j].x1
# verticalElements[i].x2 = verticalElements[j].x1
if verticalElements[i].y2 < verticalElements[j].y1:
if abs(verticalElements[i].y2 - verticalElements[j].y1) < distance:
distance = abs(verticalElements[i].y2 - verticalElements[j].y1)
if distance < horizontal_distance:
verticalElements[i].neighbour2 = verticalElements[j]
verticalElements[i].x1 = verticalElements[j].x1
verticalElements[i].x2 = verticalElements[j].x2
if distance < horizontal_distance:
if distance < 50:
verticalElements[i].flag2 = True
else:
verticalElements[i].flag2 = False
for i in range(len(horizontalElements)):
distance = 1000
for j in range(len(verticalElements)):
if abs(horizontalElements[i].x1 - verticalElements[j].x1) < distance:
distance = abs(horizontalElements[i].x1 - verticalElements[j].x1)
horizontalElements[i].neighbour1 = verticalElements[j]
if distance < 50: horizontalElements[i].flag1 = True
else :
horizontalElements[i].flag1 = False
vertical_distance = distance
distance = 1000
for j in range(len(horizontalElements)):
if i != j:
if abs(horizontalElements[i].y1 - horizontalElements[j].y1) <= 30:
# horizontalElements[i].y1 = horizontalElements[j].y1
# horizontalElements[i].y2 = horizontalElements[j].y1
if horizontalElements[i].x1 > horizontalElements[j].x2:
if abs(horizontalElements[i].x1 - horizontalElements[j].x2) < distance:
distance = abs(horizontalElements[i].x1 - horizontalElements[j].x2)
if distance < vertical_distance:
horizontalElements[i].neighbour1 = horizontalElements[j]
horizontalElements[i].y1 = horizontalElements[j].y1
horizontalElements[i].y2 = horizontalElements[j].y2
if distance < vertical_distance:
if distance < 50:
horizontalElements[i].flag1 = True
else:
horizontalElements[i].flag1 = False
distance = 1000
for j in range(len(verticalElements)):
if abs(horizontalElements[i].x2 - verticalElements[j].x1) < distance:
distance = abs(horizontalElements[i].x2 - verticalElements[j].x1)
horizontalElements[i].neighbour2 = verticalElements[j]
if distance < 50: horizontalElements[i].flag2 = True
else :
horizontalElements[i].flag2 = False
vertical_distance = distance
for j in range(len(horizontalElements)):
if i != j:
if abs(horizontalElements[i].y1 - horizontalElements[j].y1) <= 30:
# horizontalElements[i].y1 = horizontalElements[j].y1
# horizontalElements[i].y2 = horizontalElements[j].y1
if horizontalElements[i].x2 < horizontalElements[j].x1:
if abs(horizontalElements[i].x2 - horizontalElements[j].x1) < distance:
distance = abs(horizontalElements[i].x2 - horizontalElements[j].x1)
if distance < vertical_distance:
horizontalElements[i].neighbour2 = horizontalElements[j]
horizontalElements[i].y1 = horizontalElements[j].y1
horizontalElements[i].y2 = horizontalElements[j].y2
if distance < vertical_distance:
if distance < 50:
horizontalElements[i].flag2 = True
else:
horizontalElements[i].flag2 = False
for i in range(len(diagonalElements)):
distance = 1000
for j in range(len(horizontalElements)):
if distanceBetweenPoints(diagonalElements[i].x1, diagonalElements[i].y1, horizontalElements[j].x1, horizontalElements[j].y1) < distance:
distance = distanceBetweenPoints(diagonalElements[i].x1, diagonalElements[i].y1, horizontalElements[j].x1, horizontalElements[j].y1)
diagonalElements[i].neighbour1 = horizontalElements[j]
if distanceBetweenPoints(diagonalElements[i].x1, diagonalElements[i].y1, horizontalElements[j].x2, horizontalElements[j].y1) < distance:
distance = distanceBetweenPoints(diagonalElements[i].x1, diagonalElements[i].y1, horizontalElements[j].x2, horizontalElements[j].y1)
diagonalElements[i].neighbour1 = horizontalElements[j]
#horizontal_distance = distance
for j in range(len(verticalElements)):
if distanceBetweenPoints(diagonalElements[i].x1, diagonalElements[i].y1, verticalElements[j].x1, verticalElements[j].y1) < distance:
distance = distanceBetweenPoints(diagonalElements[i].x1, diagonalElements[i].y1, verticalElements[j].x1, verticalElements[j].y1)
diagonalElements[i].neighbour1 = verticalElements[j]
if distanceBetweenPoints(diagonalElements[i].x1, diagonalElements[i].y1, verticalElements[j].x1, verticalElements[j].y2) < distance:
distance = distanceBetweenPoints(diagonalElements[i].x1, diagonalElements[i].y1, verticalElements[j].x1, verticalElements[j].y2)
diagonalElements[i].neighbour1 = verticalElements[j]
if distance < 50:
diagonalElements[i].flag1 = True
else: diagonalElements[i].flag1 = False
distance1 = distance
distance = 1000
for j in range(len(horizontalElements)):
if distanceBetweenPoints(diagonalElements[i].x2, diagonalElements[i].y2, horizontalElements[j].x1, horizontalElements[j].y1) < distance:
distance = distanceBetweenPoints(diagonalElements[i].x2, diagonalElements[i].y2, horizontalElements[j].x1, horizontalElements[j].y1)
diagonalElements[i].neighbour2 = horizontalElements[j]
if distanceBetweenPoints(diagonalElements[i].x2, diagonalElements[i].y2, horizontalElements[j].x2, horizontalElements[j].y1) < distance:
distance = distanceBetweenPoints(diagonalElements[i].x2, diagonalElements[i].y2, horizontalElements[j].x2, horizontalElements[j].y1)
diagonalElements[i].neighbour2 = horizontalElements[j]
#horizontal_distance = distance
for j in range(len(verticalElements)):
if distanceBetweenPoints(diagonalElements[i].x2, diagonalElements[i].y2, verticalElements[j].x1, verticalElements[j].y1) < distance:
distance = distanceBetweenPoints(diagonalElements[i].x2, diagonalElements[i].y2, verticalElements[j].x1, verticalElements[j].y1)
diagonalElements[i].neighbour2 = verticalElements[j]
if distanceBetweenPoints(diagonalElements[i].x2, diagonalElements[i].y2, verticalElements[j].x1, verticalElements[j].y2) < distance:
distance = distanceBetweenPoints(diagonalElements[i].x2, diagonalElements[i].y2, verticalElements[j].x1, verticalElements[j].y2)
diagonalElements[i].neighbour2 = verticalElements[j]
if distance < 50:
diagonalElements[i].flag2 = True
else: diagonalElements[i].flag2 = False
distance2 = distance
if distance1 < distance2:
diagonalElements[i].flag2 = False
else: diagonalElements[i].flag1 = False
#cicli per sistemare gli spazi vuoti
for i in range(len(verticalElements)):
verticalElements[i].fix()
elements.append(verticalElements[i])
for i in range(len(horizontalElements)):
horizontalElements[i].fix()
elements.append(horizontalElements[i])
for i in range(len(diagonalElements)):
print("punto1 elemento diagonale :", i, ":", diagonalElements[i].x1, diagonalElements[i].y1)
print("punto2 elemento diagonale :", i, ":", diagonalElements[i].x2, diagonalElements[i].y2)
diagonalElements[i].fix()
print("Neighbour1 elemento diagonale:", i, ":", diagonalElements[i].flag1, diagonalElements[i].neighbour1.x1, diagonalElements[i].neighbour1.y1, diagonalElements[i].neighbour1.x2, diagonalElements[i].neighbour1.y2 )
print("Neighbour2 elemento diagonale:", i, ":", diagonalElements[i].flag2, diagonalElements[i].neighbour2.x1, diagonalElements[i].neighbour2.y1, diagonalElements[i].neighbour2.x2, diagonalElements[i].neighbour2.y2 )
print("punto1 elemento diagonale dopo fix:", i, ":", diagonalElements[i].x1, diagonalElements[i].y1)
print("punto2 elemento diagonale dopo fix:", i, ":", diagonalElements[i].x2, diagonalElements[i].y2)
elements.append(diagonalElements[i])
#ciclo per individuare il bordo esterno
topElement = horizontalElements[0]
for i in range(len(horizontalElements)):
if horizontalElements[i].y1 < topElement.y1:
topElement = horizontalElements[i]
# print(topElement.y1)
loop = True
current = topElement
next = topElement.neighbour2
visited = []
while loop == True:
current.stroke_width = 10
current.role = 'bordo'
visited.append(current)
if current.neighbour2 not in visited:
next = current.neighbour2
else: next = current.neighbour1
current = next
if current == topElement:
loop = False
for i in range(len(elements)):
if elements[i].role != 'bordo':
if elements[i].tag == 'linea diagonale':
elements[i].role = 'porta'
elements[i].stroke_width = 8
elements[i].red = 50
elements[i].green = 250
elements[i].blue = 50
else:
elements[i].role = 'interno'
elements[i].red = 255
elements[i].green = 0
elements[i].blue = 0
dwg = svgwrite.Drawing('Esempio16_dopo.svg', profile='full')
dwg.viewbox(width= svg_width, height= svg_height)
for i in range(len(elements)):
dwg.add(dwg.line((elements[i].x1, elements[i].y1), (elements[i].x2, elements[i].y2), stroke = svgwrite.rgb(elements[i].red, elements[i].green, elements[i].blue, '%'), stroke_width = elements[i].stroke_width))
dwg.save()
|
984,962 | 002a23f49c418f303aa4f4f41e88cfb0573a538d | import os
def url_user_img(instance, filename):
return 'users/%d/profile/%s'%(instance.user.pk, filename.encode('utf-8'))
def url_gallery_img(instance, filename):
return 'gallery/users/%d/uploads/%s'%(instance.user.pk, filename.encode('utf-8'))
def url_gallery_thumbnail(instance, filename):
return 'gallery/users/%d/uploads/thumbnails/%s'%(instance.user.pk, filename.encode('utf-8'))
|
984,963 | 82b5e6dbb4b2fce722c43b791c0987bf9093a7ba | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 25 11:36:41 2018
@author: jorge
"""
import pandas as pd
import numpy as np
from sklearn.preprocessing import MinMaxScaler
class VenuesReader():
"""
it receives a filename to get the scores
"""
def __init__(self, filename, eps):
self.epsilon = eps
self.missing_year = 0
self.filename = filename
self.associations = dict() ##associates the name oof the conferences in the network to the name of the conference on scopus. required to get the scores
self.venue_scores = dict() ##the scores
self.normalized_scores = dict() ##standardizes the scores per year and has a defined x year whcih is a standardized representation for years when the conference has no score
self.__readAssociations()
self.__readScores()
self.__calculateNormalizedScores()
def __readAssociations(self):
df = pd.read_csv(self.filename, names=['kdd_name', 'url', 'scopus_name', 'year', 'score'])
assoc = set(zip(list(df['kdd_name'].values), list(df['scopus_name'].values)))
self.associations = {a[0] : a[1] for a in assoc}
def __readScores(self):
df = pd.read_csv(self.filename, names=['kdd_name', 'url', 'scopus_name', 'year', 'score'])
df2 = df[['scopus_name', 'year', 'score']] ##ignore the kdd_name and url columns we only need them for associations which we already obtained
df2 = df2.drop_duplicates() ##since some kdd venues are mapped into the same venue, we have duplicate scores which we can remove
#self.scores_df = df2
scores = zip(list(df2['scopus_name']), list(df2['year']), list(df2['score']))
for s in scores:
venue, year, score = s
if venue not in self.venue_scores:
self.venue_scores[venue] = dict()
self.venue_scores[venue][year] = score
def __calculateNormalizedScores(self):
"""
saves the standard scores per year for every conference
also for each conference creates an x-year which is the average of the scores
the x-year should be used for years when we have no information
the x-year is also standardized
"""
year_scores = {0 : []}
for venue in self.venue_scores:
v_scores = []
for year in self.venue_scores[venue]:
v_scores.append(self.venue_scores[venue][year])
if year not in year_scores:
year_scores[year] = []
year_scores[year].append(self.venue_scores[venue][year])
x_year = np.average(np.array(v_scores))
self.venue_scores[venue][0] = x_year
year_scores[0].append(x_year)
##for standardization
#year_metrics = {x : (np.average(np.array(year_scores[x])), np.std(np.array(year_scores[x]))) for x in year_scores}
##for normalization
year_metrics = {x: (max(year_scores[x]), min(year_scores[x])) for x in year_scores}
#print year_metrics
for venue in self.venue_scores:
self.normalized_scores[venue] = dict()
for year in self.venue_scores[venue]:
#self.standard_scores[venue][year] = round((self.venue_scores[venue][year] - year_metrics[year][0]) / year_metrics[year][1],5)
#self.normalized_scores[venue][year] = (self.venue_scores[venue][year] - year_metrics[year][1]) / (year_metrics[year][0] - year_metrics[year][1]) + eps
self.normalized_scores[venue][year] = (self.venue_scores[venue][year] - year_metrics[year][1] + self.epsilon) / (year_metrics[year][0] - year_metrics[year][1] + self.epsilon)
def getVenueName(self, v_name):
"""
Returns the name of the venue on the scopus
"""
if v_name not in self.associations:
#print "Venue %s does not exist on the associations" % v_name
return None
return self.associations[v_name]
def getVenueScores(self, v_name, normalized=True, scopus_name=False):
"""
Returns the scores per year of the venue
if scopus_name = true then it assumes that it is already a venue name from scopus,
otherwise it gets the association value before getting the scores
if normalized returns the normalized values
"""
if not scopus_name:
v_name = self.getVenueName(v_name)
if not v_name:
return None
#s_df = self.scores_df.loc[self.scores_df['scopus_name'] == v_name]
#scores = zip(list(s_df['year'].values),list(s_df['score'].values))
if normalized:
return self.normalized_scores[v_name]
return self.venue_scores[v_name]
def getVenueScoreYear(self, v_name, year, normalized=True, scopus_name=False):
"""
Returns the score of the venue in a certain year
if scopus_name = true then it assumes that it is already a venue name from scopus,
otherwise it gets the association value before getting the scores
if normalized returns the normalized values
"""
if not scopus_name:
v_name = self.getVenueName(v_name)
if not v_name:
return None
if v_name not in self.venue_scores:
#print "Venue %s does not have any score" % v_name
return None
if year not in self.venue_scores[v_name]:
#print "Venue %s does not have a score for year %d" % (v_name, year)
if normalized:
return self.normalized_scores[v_name][0]
return self.venue_scores[v_name][0]
if normalized:
return self.normalized_scores[v_name][year]
return self.venue_scores[v_name][year]
#s_df = self.scores_df.loc[(self.scores_df['scopus_name'] == v_name) & (self.scores_df['year'] == year)]
#if s_df.empty:
#print "Either conference %s does not exist, or it does not have a score for year %d" % (v_name, year)
# return None
#return s_df['score'].values[0] ##there should be only one score per conference and year
|
984,964 | 628cda49ce747fde8a84abfe9909c8da463c15ec | import pygame, events
from events import event_maker
deadzone = 0.15
pygame.joystick.init()
# this method is supposed to be called early on in the main method. It will check all available joysticks, hopefully
# initialize them, and return them to main.
def prepare_joysticks():
joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.joystick.get_count())]
event_maker.make_entry("trace", 'joysticks', "preparing joysticks", 'controllers')
print("joysticks: ", joysticks)
return joysticks
# this method takes a joystick, and returns a properly initialized controller. it automagically determines what type of
# joystick it has been passed. Currently it can distinguish between joysticks that are Xbox One controllers, and
# joysticks that are not Xbox One controllers. The former is treated as an Xbox 360 controller.
# note that the current usage of this method within main does not allow for the use of a keyboard.
def auto_assign(x):
if 'Xbox One' in x.get_name():
return xbone_gamepad(x)
elif 'Xbox 360' in x.get_name() or '360' in x.get_name():
return xb360_gamepad(x)
elif pygame.key.get_pressed()[pygame.K_SPACE]:
return keyboard()
else:
return other_gamepad(x)
'''
for the xbone controller:
AXES
left stick (x,y) = (0,1)
right stick (x,y) = (4,3)
left trigger = 2
right trigger = -2
BUTTONS
A = 0
B = 1
X = 2
Y = 3
left bumper = 4
right bumper = 5
Select = 6
Start = 7
left stick = 8
right stick = 9
'''
class xbone_gamepad(object):
def __init__(self, jub):
# I am unsure why, but I seemed to have named this controller jub
self.jub = jub
self.jub.init()
#print(jub.get_name())
self.sticks = {'LX': self.jub.get_axis(0), 'LY': self.jub.get_axis(1),
'RX': self.jub.get_axis(4), 'RY': self.jub.get_axis(3)}
self.triggers = jub.get_axis(2)
self.buttons = {'A': jub.get_button(0), 'B': jub.get_button(1),
'X': jub.get_button(2), 'Y': jub.get_button(3),
'LB': jub.get_button(4), 'RB': jub.get_button(5),
'Start': jub.get_button(7), 'Select': jub.get_button(6),
'LStick': jub.get_button(8), 'RStick': jub.get_button(9)}
self.new_sticks = {'LX': self.jub.get_axis(0), 'LY': self.jub.get_axis(1),
'RX': self.jub.get_axis(4), 'RY': self.jub.get_axis(3)}
self.new_triggers = jub.get_axis(2)
self.new_buttons = {'A': jub.get_button(0), 'B': jub.get_button(1),
'X': jub.get_button(2), 'Y': jub.get_button(3),
'LB': jub.get_button(4), 'RB': jub.get_button(5),
'Start': jub.get_button(7), 'Select': jub.get_button(6),
'LStick': jub.get_button(8), 'RStick': jub.get_button(9)}
# the way jub works is like this: he gathers all the controller input once per frame and saves it to himself (when
# first created, he saves that frame's input twice). at the start of each frame, the previous frame's new input is
# saved to (old) input, and this frame's new input is collected. Thus, jub maintains two frame's worth of input.
# doing it this way is important because it allows the game to check for when buttons are pressed and held or
# pressed and then released
def update(self):
self.buttons = self.new_buttons
self.sticks = self.new_sticks
self.triggers = self.new_triggers
self.new_buttons = {
'A': self.jub.get_button(0), 'B': self.jub.get_button(1),
'X': self.jub.get_button(2), 'Y': self.jub.get_button(3),
'LB': self.jub.get_button(4), 'RB': self.jub.get_button(5),
'Start': self.jub.get_button(7), 'Select': self.jub.get_button(6),
'LStick': self.jub.get_button(8), 'RStick': self.jub.get_button(9)
}
self.new_sticks = {'LX': self.jub.get_axis(0), 'LY': self.jub.get_axis(1),
'RX': self.jub.get_axis(4), 'RY': self.jub.get_axis(3)}
# the triggers are special, in that they are not 2 separate axes, but instead the signed difference between
# both triggers as a single axis. If RT is pressed, axis 2 is
self.new_triggers = self.jub.get_axis(2)
# returns the old and new input states for a single button (this allows button mapping at the player level)
def pull_button(self, button_name):
return self.buttons[button_name], self.new_buttons[button_name]
def pull_sticks(self):
return self.sticks, self.new_sticks
def pull_triggers(self):
rt, lt, nrt, nlt = False, False, False, False
if self.triggers > 0.5:
lt = True
elif self.triggers < -0.5:
rt = True
if self.new_triggers > 0.5:
nlt = True
elif self.new_triggers < -0.5:
nrt = True
return rt or nrt, lt or nlt
def pull_face(self, **kwargs):
ret = {'fire': self.pull_button('X'),
'interact': self.pull_button('Y'),
'accept': self.pull_button('A'),
'back': self.pull_button('B'),
'start': self.pull_button('Start'),
'select': self.pull_button('Select'),
'lock_next': self.pull_button('RStick'),
'lock_prev': self.pull_button('LStick')}
return ret
def pull_selectors(self, **kwargs):
ret = {'prev': self.pull_button('LB'),
'next': self.pull_button('RB'),
'select': 9,
'lock_aim': self.pull_triggers()[0],
'adj_aim': self.pull_triggers()[1]
}
return ret
def pull_movement(self):
mov_x, mov_y, dir_x, dir_y = 0, 0, 0, 0
if abs(self.sticks['LX']) > deadzone:
mov_x = self.sticks['LX']
if abs(self.sticks['LY']) > deadzone:
mov_y = self.sticks['LY']
if abs(self.sticks['RX']) > deadzone:
dir_x = self.sticks['RX']
if abs(self.sticks['RY']) > deadzone:
dir_y = self.sticks['RY']
#if self.new_sticks["LX"]:
#print("newStciks: ", self.new_sticks["LX"])
# hopefully, this will eliminate controller flick
# commented out for now, as it broke the game. Live with controller flick
'''if self.new_sticks['LX'] !=0 and\
self.new_sticks['LX']/abs(self.new_sticks['LX']) != self.sticks['LX']/abs(self.sticks['LX']):
print("detected flick on LX")
mov_x = 0
if self.new_sticks['LY'] !=0 and\
self.new_sticks['LY'] / abs(self.new_sticks['LY']) != self.sticks['LY'] / abs(self.sticks['LY']):
print("detected flick on LY")
mov_y = 0'''
ret = {'move': (mov_x, mov_y),
'look': (dir_x, dir_y),
'lock_look': self.pull_triggers()[0],
'mod_look': self.pull_triggers()[1]}
return ret
def check_status(self):
if self.new_buttons['Start'] or self.buttons['Start']:
return True
else:
return False
# after extensive testing, i have discovered that xbox 360 controllers are almost exactly the same as xbox one
# controllers, the only real difference that I found is that 360 controllers have the ability to represent both triggers
# being pulled at the same time
'''
for the xb360 controller:
AXES
left stick (x,y) = (0,1)
right stick (x,y) = (4,3)
left trigger = 2
right trigger = -2
BOTH right AND left trigger = axis 2 will read approx -3
BUTTONS
A = 0
B = 1
X = 2
Y = 3
left bumper = 4
right bumper = 5
Select = 6
Start = 7
left stick = 8
right stick = 9
'''
class xb360_gamepad(object):
def __init__(self, jub):
# I am unsure why, but I seemed to have named this controller jub
self.jub = jub
self.jub.init()
#print(jub.get_name())
self.sticks = {'LX': self.jub.get_axis(0), 'LY': self.jub.get_axis(1),
'RX': self.jub.get_axis(4), 'RY': self.jub.get_axis(3)}
self.triggers = jub.get_axis(2)
self.buttons = {'A': jub.get_button(0), 'B': jub.get_button(1),
'X': jub.get_button(2), 'Y': jub.get_button(3),
'LB': jub.get_button(4), 'RB': jub.get_button(5),
'Start': jub.get_button(7), 'Select': jub.get_button(6),
'LStick': jub.get_button(8), 'RStick': jub.get_button(9)}
self.new_sticks = {'LX': self.jub.get_axis(0), 'LY': self.jub.get_axis(1),
'RX': self.jub.get_axis(4), 'RY': self.jub.get_axis(3)}
self.new_triggers = jub.get_axis(2)
self.new_buttons = {'A': jub.get_button(0), 'B': jub.get_button(1),
'X': jub.get_button(2), 'Y': jub.get_button(3),
'LB': jub.get_button(4), 'RB': jub.get_button(5),
'Start': jub.get_button(7), 'Select': jub.get_button(6),
'LStick': jub.get_button(8), 'RStick': jub.get_button(9)}
# the way jub works is like this: he gathers all the controller input once per frame and saves it to himself (when
# first created, he saves that frame's input twice). at the start of each frame, the previous frame's new input is
# saved to (old) input, and this frame's new input is collected. Thus, jub maintains two frame's worth of input.
# doing it this way is important because it allows the game to check for when buttons are pressed and held or
# pressed and then released
def update(self):
self.buttons = self.new_buttons
self.sticks = self.new_sticks
self.triggers = self.new_triggers
self.new_buttons = {
'A': self.jub.get_button(0), 'B': self.jub.get_button(1),
'X': self.jub.get_button(2), 'Y': self.jub.get_button(3),
'LB': self.jub.get_button(4), 'RB': self.jub.get_button(5),
'Start': self.jub.get_button(7), 'Select': self.jub.get_button(6),
'LStick': self.jub.get_button(8), 'RStick': self.jub.get_button(9)
}
self.new_sticks = {
'LX': self.jub.get_axis(0), 'LY': self.jub.get_axis(1),
'RX': self.jub.get_axis(4), 'RY': self.jub.get_axis(3)
}
# the triggers are special, in that they are not 2 separate axes, but instead the signed difference between
# both triggers as a single axis. If RT is pressed, axis 2 is
self.new_triggers = self.jub.get_axis(2)
# returns the old and new input states for a single button (this allows button mapping at the player level)
def pull_button(self, button_name):
return self.buttons[button_name], self.new_buttons[button_name]
def pull_sticks(self):
return self.sticks, self.new_sticks
def pull_triggers(self):
rt, lt, nrt, nlt = False, False, False, False
if self.triggers > 0.5:
lt = True
elif self.triggers < -0.5:
rt = True
if self.new_triggers > 0.5:
nlt = True
elif self.new_triggers < -0.5:
nrt = True
return rt or nrt, lt or nlt
def pull_face(self, **kwargs):
ret = {'fire': self.pull_button('X'),
'interact': self.pull_button('Y'),
'accept': self.pull_button('A'),
'back': self.pull_button('B'),
'start': self.pull_button('Start'),
'select': self.pull_button('Select'),
'lock_next': self.pull_button('RStick'),
'lock_prev': self.pull_button('LStick')}
return ret
def pull_selectors(self, **kwargs):
ret = {'prev': self.pull_button('LB'),
'next': self.pull_button('RB'),
'select': 9,
'adj_aim': self.pull_triggers()[1],
'lock_aim': self.pull_triggers()[0]
}
return ret
def pull_movement(self):
mov_x, mov_y, dir_x, dir_y = 0, 0, 0, 0
if abs(self.sticks['LX']) > 0.1:
mov_x = self.sticks['LX']
if abs(self.sticks['LY']) > 0.1:
mov_y = self.sticks['LY']
if abs(self.sticks['RX']) > 0.1:
dir_x = self.sticks['RX']/abs(self.sticks['RX'])
if abs(self.sticks['RY']) > 0.1:
dir_y = self.sticks['RY']/abs(self.sticks['RY'])
ret = {'move': (mov_x, mov_y),
'look': (dir_x, dir_y),
'lock_look': self.pull_triggers()[0],
'mod_look': self.pull_triggers()[1]}
return ret
def check_status(self):
if self.new_buttons['Start'] or self.buttons['Start']:
return True
else:
return False
class other_gamepad(xb360_gamepad):
def __init__(self, jub):
# I am unsure why, but I seemed to have named this controller jub
self.jub = jub
self.jub.init()
#print(jub.get_name())
self.sticks = {
'LX': self.jub.get_axis(0), 'LY': self.jub.get_axis(1),
'RX': self.jub.get_axis(2), 'RY': self.jub.get_axis(3)
}
self.triggers = \
jub.get_axis(5), jub.get_axis(4)
self.trigger_btns = \
jub.get_button(6), jub.get_button(7)
self.buttons = {
'A': jub.get_button(1), 'B': jub.get_button(2),
'X': jub.get_button(0), 'Y': jub.get_button(3),
'LB': jub.get_button(4), 'RB': jub.get_button(5),
'Start': jub.get_button(9), 'Select': jub.get_button(8),
'LStick': jub.get_button(10), 'RStick': jub.get_button(11)
}
self.new_sticks = {
'LX': self.jub.get_axis(0), 'LY': self.jub.get_axis(1),
'RX': self.jub.get_axis(2), 'RY': self.jub.get_axis(3)
}
self.new_triggers = \
jub.get_axis(5), jub.get_axis(4)
self.new_trigger_btns = \
jub.get_button(6), jub.get_button(7)
self.new_buttons = {
'A': jub.get_button(1), 'B': jub.get_button(2),
'X': jub.get_button(0), 'Y': jub.get_button(3),
'LB': jub.get_button(4), 'RB': jub.get_button(5),
'Start': jub.get_button(9), 'Select': jub.get_button(8),
'LStick': jub.get_button(10), 'RStick': jub.get_button(11)
}
def pull_triggers(self):
rt, lt, nrt, nlt = False, False, False, False
'''if self.trigger_btns > 0.5:
lt = True
elif self.trigger_btns < -0.5:
rt = True
if self.new_triggers > 0.5:
nlt = True
elif self.new_triggers < -0.5:
nrt = True'''
return self.new_trigger_btns
def update(self):
self.buttons = self.new_buttons
self.sticks = self.new_sticks
self.triggers = self.new_triggers
self.trigger_btns = self.new_trigger_btns
self.new_buttons = {
'A': self.jub.get_button(1), 'B': self.jub.get_button(2),
'X': self.jub.get_button(0), 'Y': self.jub.get_button(3),
'LB': self.jub.get_button(4), 'RB': self.jub.get_button(5),
'Start': self.jub.get_button(9), 'Select': self.jub.get_button(8),
'LStick': self.jub.get_button(10), 'RStick': self.jub.get_button(11)
}
self.new_sticks = {
'LX': self.jub.get_axis(0), 'LY': self.jub.get_axis(1),
'RX': self.jub.get_axis(2), 'RY': self.jub.get_axis(3)
}
# the triggers are special, in that they are not 2 separate axes, but instead the signed difference between
# both triggers as a single axis. If RT is pressed, axis 2 is
self.new_triggers = self.jub.get_axis(5), self.jub.get_axis(4)
self.new_trigger_btns = \
self.jub.get_button(6), self.jub.get_button(7)
# super basice keyboard class for keyboard input. like the controllers, it stores two frames of input
class keyboard():
def __init__(self):
self.jub = dummy()
self.key = pygame.key.get_pressed()
self.new_key = pygame.key.get_pressed()
self.shift_held = False
self.shift_released = True
def update(self):
# print("keyboard update")
self.key = self.new_key
self.new_key = pygame.key.get_pressed()
def check_status(self):
if self.key[pygame.K_SPACE] or self.new_key[pygame.K_SPACE]:
return True
else:
return False
def pull_key(self, key_name):
lookup = {'a': pygame.K_e,
'b': pygame.K_e,
'c': pygame.K_e,
'd': pygame.K_e
}
def pull_face(self, **kwargs):
ret = {'fire': (self.key[pygame.K_f], self.new_key[pygame.K_f]),
'interact': (self.key[pygame.K_e], self.new_key[pygame.K_e]),
'accept': (self.key[pygame.K_SPACE], self.new_key[pygame.K_SPACE]),
'back': (self.key[pygame.K_1], self.new_key[pygame.K_1]),
'start': (self.key[pygame.K_SPACE], self.new_key[pygame.K_SPACE]),
'select': (self.key[pygame.K_TAB], self.new_key[pygame.K_TAB])}
return ret
def pull_selectors(self, **kwargs):
index = 9
if self.key[pygame.K_1]:
index = 0
elif self.key[pygame.K_2]:
index = 1
elif self.key[pygame.K_3]:
index = 2
elif self.key[pygame.K_4]:
index = 3
elif self.key[pygame.K_5]:
index = 4
elif self.key[pygame.K_6]:
index = 5
elif self.key[pygame.K_7]:
index = 6
elif self.key[pygame.K_8]:
index = 7
elif self.key[pygame.K_9]:
index = 8
# print("checking old e key", self.key[pygame.K_e])
# print("checking new e key", self.new_key[pygame.K_e])
return {'next': (self.key[pygame.K_e], self.new_key[pygame.K_e]),
'prev': (self.key[pygame.K_q], self.new_key[pygame.K_q]),
'select': index,
'lock_aim': (self.key[pygame.K_LSHIFT], self.key[pygame.K_LSHIFT]),
'lock_feet': (self.key[pygame.K_LCTRL], self.key[pygame.K_LCTRL])}
def pull_movement(self, **kwargs):
mov_x, mov_y, dir_x, dir_y = 0, 0, 0, 0
if self.key[pygame.K_a]:
mov_x = -1
elif self.key[pygame.K_d]:
mov_x = 1
if self.key[pygame.K_w]:
mov_y = -1
elif self.key[pygame.K_s]:
mov_y = 1
if self.key[pygame.K_LEFT]:
dir_x = -1
elif self.key[pygame.K_RIGHT]:
dir_x = 1
if self.key[pygame.K_DOWN]:
dir_y = -1
elif self.key[pygame.K_UP]:
dir_y = 1
ret = {'move': (mov_x, mov_y),
'look': (dir_x, dir_y)}
return ret
class dummy():
def __init__(self):
self.id = 9808797
def get_id(self):
return self.id
def controller_tester():
pygame.init()
pygame.joystick.init()
joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.joystick.get_count())]
for each in joysticks:
each.init()
print("buttons: ", each.get_numbuttons())
print("hats: ", each.get_numhats())
print("axes: ", each.get_numaxes())
return joysticks[0]
|
984,965 | be66c6e5b5c4c55a95bf0c7c12b9d3c2033d02e6 | import sys
import os
import os.path as osp
import datetime
from MDRSREID.utils.may_make_dirs import may_make_dirs
class ReDirectSTD(object):
"""Modified from Tong Xiao's `Logger` in open-reid.
This class overwrites sys.stdout or sys.stderr, so that console logs can
also be written to file.
Args:
fpath: file path
console: one of ['stdout', 'stderr']
immediately_visible: If `False`, the file is opened only once and closed
after exiting. In this case, the message written to file may not be
immediately visible (Because the file handle is occupied by the
program?). If `True`, each writing operation of the console will
open, write to, and close the file. If your program has tons of writing
operations, the cost of opening and closing file may be obvious. (?)
Usage example:
`ReDirectSTD('stdout.txt', 'stdout', False)`
`ReDirectSTD('stderr.txt', 'stderr', False)`
NOTE: File will be deleted if already existing. Log dir and file is created
lazily -- if no message is written, the dir and file will not be created.
"""
def __init__(self, fpath=None, console='stdout', immediately_visible=False):
assert console in ['stdout', 'stderr']
self.console = sys.stdout if console == 'stdout' else sys.stderr
self.file = fpath
self.f = None
self.immediately_visible = immediately_visible
if fpath is not None:
# Remove existing log file.
if osp.exists(fpath):
os.remove(fpath)
# Overwrite
if console == 'stdout':
sys.stdout = self
else:
sys.stderr = self
def __del__(self):
self.close()
def __enter__(self):
pass
def __exit__(self, *args):
self.close()
def write(self, msg):
self.console.write(msg)
if self.file is not None:
may_make_dirs(self, os.path.dirname(osp.abspath(self.file)))
if self.immediately_visible:
with open(self.file, 'a') as f:
f.write(msg)
else:
if self.f is None:
self.f = open(self.file, 'w')
self.f.write(msg)
self.flush()
def flush(self):
self.console.flush()
if self.f is not None:
self.f.flush()
import os
os.fsync(self.f.fileno())
def close(self):
self.console.close()
if self.f is not None:
self.f.close()
def time_str(fmt=None):
if fmt is None:
fmt = '%Y-%m-%d_%H-%M-%S'
return datetime.datetime.today().strftime(fmt)
def array_str(array, fmt='{:.2f}', sep=', ', with_boundary=True):
"""String of a 1-D tuple, list, or numpy array containing digits."""
ret = sep.join([fmt.format(float(x)) for x in array])
if with_boundary:
ret = '[' + ret + ']'
return ret
def array_2d_str(array, fmt='{:.2f}', sep=', ', row_sep='\n', with_boundary=True):
"""String of a 2-D tuple, list, or numpy array containing digits."""
ret = row_sep.join([array_str(x, fmt=fmt, sep=sep, with_boundary=with_boundary) for x in array])
if with_boundary:
ret = '[' + ret + ']'
return ret
def tight_float_str(x, fmt='{:.4f}'):
return fmt.format(x).rstrip('0').rstrip('.')
def score_str(x):
return '{:5.1%}'.format(x).rjust(6)
def join_str(sequence, sep):
sequence = [s for s in sequence if s != '']
return sep.join(sequence)
def write_to_file(file, msg, append=True):
with open(file, 'a' if append else 'w') as f:
f.write(msg)
|
984,966 | cf3f54a46322bb1c1860d19dadb128a5e5885681 | import tkinter as tk
import os
import requests
from bs4 import BeautifulSoup
from tkinter.filedialog import askdirectory
def selectPath():
path_ = askdirectory()
path.set(path_)
def check_null():
#获取输入值
save_path = path.get()
img_path_a = img_path.get()
#判断输入是否为空
if save_path.strip() =="" or img_path_a.strip() == "":
l3.config(text="输入框不能为空")
return;
#判断存储路径是否正确和存在
elif not os.path.isdir(save_path) or not os.path.exists(save_path):
l3.config(text="存储路径不正确")
return;
#判断网址格式是否输入正确
# elif not os.path.isdir(save_path) or not os.path.exists(save_path):
# l3.config(text="the save_path is error")
# return;
else:
download_img(save_path, img_path_a);
def download_img(sv_path, wb_path):
try:
string = ""
root = sv_path
res = requests.get(wb_path)
soup = BeautifulSoup(res.content, "lxml")
for pa_web in soup.find_all('img'):
if "https:" in pa_web.get("src") or "http:" in pa_web.get("src"):
im_path = pa_web.get("src")
else:
im_path = wb_path + "/" + pa_web.get("src")
# print(im_path)
url = im_path # 图片地址
path = root + "/" + url.split("/")[-1]
if not path.endswith(".jpg"):
path += ".jpg"
if not os.path.exists(root): # 目录不存在创建目录
os.mkdir(root)
if not os.path.exists(path): # 文件不存在则下载
r = requests.get(url)
r.raise_for_status
f = open(path, "wb")
f.write(r.content)
f.close()
string += im_path + "下载成功 \n"
l3.config(text=string)
else:
string += im_path + "文件已经存在 \n"
l3.config(text=string)
string += r"下载完成"
l3.config(text=string)
except:
l3.config(text="获取失败")
w = tk.Tk()
w.title("网页图片抓取工具")
w.geometry("500x500")
w.resizable(0,0)
path = tk.StringVar()
img_path = tk.StringVar()
l1 = tk.Label(w, text="图片网络路径:")
l1.grid(row=0, column=0)
e1 = tk.Entry(w, textvariable=img_path, width=34)
e1.grid(row=0, column=1, columnspan=2)
l2 = tk.Label(w, text="存储路径:")
l2.grid(row=1, column=0)
e2 = tk.Entry(w, textvariable=path).grid(row=1, column=1)
b1 = tk.Button(w, text="路径选择", command=selectPath).grid(row=1, column=2)
b2 = tk.Button(w, text="抓取", command=check_null, width=10).grid(row=2, column=1)
l3 = tk.Label(w, text="empty", width=55, height=20, bg="yellow")
l3.grid(row=3, columnspan=3)
w.mainloop()
|
984,967 | 19a296804ee1aab43987e65671b660423e7ee970 | #!/usr/bin/python3
# 1024.py
# Brennan D Baraban <375@holbertonschool.com>
"""Hodor with my Holberton ID 1024 times."""
import requests
from bs4 import BeautifulSoup
php = "http://158.69.76.135/level2.php"
user_agent = ("Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:64.0) "
"Gecko/20100101 Firefox/64.0")
header = {
"User-Agent": user_agent,
"referer": php
}
vote = {
"id": "375",
"holdthedoor": "Submit",
"key": ""
}
if __name__ == "__main__":
for i in range(0, 1024):
session = requests.session()
page = session.get(php, headers=header)
soup = BeautifulSoup(page.text, "html.parser")
hidden_value = soup.find("form", {"method": "post"})
hidden_value = hidden_value.find("input", {"type": "hidden"})
vote["key"] = hidden_value["value"]
session.post(php, headers=header, data=vote)
|
984,968 | d15b1e0ebcaad40d1e63f86516f9ea693408e80b | # Copyright ETH-VAW / Glaciology
#
# Module : Scripts.RadarDataLibrary.RadarData.DataDataShapefileWriter
#
# Created by: yvow
# Created on: 04.05.2015
# Imports
import os
import re
import abc
from RadarDataWriter import RadarDataWriter
class RadarDataShapefileWriter(RadarDataWriter):
__metaclass__ = abc.ABCMeta
'''
classdocs
'''
#TODO: Include class description.
_shapeFileLine = ""
_shapeFilePoint = ""
# Field names for lines and points
_FIELD_NAME_PROFILE_ID = "PROFILE"
_FIELD_NAME_DATE = "DATE_ACQ"
# Field names for lines
_FIELD_NAME_LINE_ACQUISITION_TYPE = "ACQ_TYPE"
_FIELD_NAME_LINE_INSTRUMENT = "INSTRUMENT"
_FIELD_NAME_LINE_FREQUENCY_FROM = "FREQ_FROM"
_FIELD_NAME_LINE_FREQUENCY_TO = "FREQ_TO"
_FIELD_NAME_LINE_SUMMARY = "SUMMARY"
_FIELD_NAME_LINE_IMAGE_BEDROCK = "BED"
_FIELD_NAME_LINE_IMAGE_MAP = "MAP"
_FIELD_NAME_LINE_IMAGE_MIG = "MIG"
# Field names for points
_FIELD_NAME_COUNT_RESULTS = "NUM_RES"
_FIELD_NAME_POINT_Z_BEDROCK_1 = "Z_BED_1"
_FIELD_NAME_POINT_Z_ICE_SUR_1 = "Z_ICE_1"
_FIELD_NAME_POINT_THICKNESS_1 = "THICK_1"
_FIELD_NAME_POINT_QUALITY_1 = "Q_1"
_FIELD_NAME_POINT_Z_BEDROCK_2 = "Z_BED_2"
_FIELD_NAME_POINT_Z_ICE_SUR_2 = "Z_ICE_2"
_FIELD_NAME_POINT_THICKNESS_2 = "THICK_2"
_FIELD_NAME_POINT_QUALITY_2 = "Q_2"
def __init__(self, radarLine, shapeFileLine, shapeFilePoint, doAppend = True):
'''
Constructor
'''
#TODO: Include constructor description.
super(RadarDataShapefileWriter, self).__init__(radarLine)
self._shapeFileLine = shapeFileLine
self._shapeFilePoint = shapeFilePoint
if doAppend == False:
if os.path.exists(self._shapeFileLine):
self._deleteShapefile(self._shapeFileLine)
if os.path.exists(self._shapeFilePoint):
self._deleteShapefile(self._shapeFilePoint)
if doAppend == True and os.path.exists(self._shapeFileLine) == False:
pass
if doAppend == True and os.path.exists(self._shapeFilePoint) == False:
pass
def _deleteShapefile(self, shapefile):
shapefileDirectory = os.path.dirname(shapefile)
shapefileName = os.path.basename(shapefile)
shapefileBasename = os.path.splitext(shapefileName)[0]
for fileInDirectory in os.listdir(shapefileDirectory):
if re.search(shapefileBasename, fileInDirectory):
os.remove(os.path.join(shapefileDirectory, fileInDirectory))
@abc.abstractmethod
def writeData(self):
return
# Imports used by EsriShapefileWriter
import arcpy
import arcpy.da
from arcpy import env
from arcpy.da import InsertCursor
class EsriShapefileWriter(RadarDataShapefileWriter):
'''
classdocs
'''
#TODO: Include class description.
__SHAPE_GEOMETRY_TYPE_POLYLINE = "POLYLINE"
__SHAPE_GEOMETRY_TYPE_POINT = "POINT"
__spatialReferenceString = "CH1903 LV03"
__spatialReference = None
def __init__(self, radarLine, shapeFileLine, shapeFilePoint, doAppend = True):
'''
Constructor
'''
#TODO: Include constructor description.
super(EsriShapefileWriter, self).__init__(radarLine, shapeFileLine, shapeFilePoint, doAppend)
self.__spatialReference = arcpy.SpatialReference(self.__spatialReferenceString)
if doAppend == False:
self.__prepareFeatureClass(self._shapeFileLine, self.__SHAPE_GEOMETRY_TYPE_POLYLINE)
self.__createAttributesLine()
self.__prepareFeatureClass(self._shapeFilePoint, self.__SHAPE_GEOMETRY_TYPE_POINT)
self.__createAttributesPoint()
def __prepareFeatureClass(self, shapefile, shapefileType):
shapeDirectory = os.path.dirname(shapefile)
env.workspace = shapeDirectory
shapefileName = os.path.basename(shapefile)
arcpy.CreateFeatureclass_management(shapeDirectory, shapefileName, shapefileType, "", "", "", self.__spatialReference)
def __createAttributesLine(self):
#TODO: Getting one general function to create attributes for lines and points.
env.workspace = os.path.dirname(self._shapeFileLine)
shapefileName = os.path.basename(self._shapeFileLine)
# Adding the needed fields
arcpy.AddField_management(shapefileName, self._FIELD_NAME_PROFILE_ID , "TEXT", "", "", 20, "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_DATE , "DATE", "", "", "", "", "NULLABLE", "")
# ---
arcpy.AddField_management(shapefileName, self._FIELD_NAME_LINE_ACQUISITION_TYPE, "TEXT", "", "", 50, "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_LINE_INSTRUMENT , "TEXT", "", "", 50, "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_LINE_FREQUENCY_FROM , "DOUBLE", "", "", "", "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_LINE_FREQUENCY_TO , "DOUBLE", "", "", "", "", "NULLABLE", "")
# Fields with file information
arcpy.AddField_management(shapefileName, self._FIELD_NAME_LINE_SUMMARY , "TEXT", "", "", 500, "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_LINE_IMAGE_BEDROCK, "TEXT", "", "", 500, "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_LINE_IMAGE_MAP , "TEXT", "", "", "", 500, "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_LINE_IMAGE_MIG , "TEXT", "", "", "", 500, "NULLABLE", "")
# Removing the default Id field
arcpy.DeleteField_management(shapefileName, ["Id"])
def __createAttributesPoint(self):
#TODO: Getting one general function to create attributes for lines and points.
env.workspace = os.path.dirname(self._shapeFilePoint)
shapefileName = os.path.basename(self._shapeFilePoint)
# Adding the needed fields: General information
arcpy.AddField_management(shapefileName, self._FIELD_NAME_PROFILE_ID , "TEXT", "", "", 20, "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_DATE , "DATE", "", "", "", "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_COUNT_RESULTS , "SHORT", "", "", "", "", "NULLABLE", "")
# Adding the needed fields: Analyzed information
arcpy.AddField_management(shapefileName, self._FIELD_NAME_POINT_Z_BEDROCK_1, "DOUBLE", "", "", "", "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_POINT_Z_ICE_SUR_1, "DOUBLE", "", "", "", "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_POINT_THICKNESS_1, "DOUBLE", "", "", "", "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_POINT_QUALITY_1 , "SHORT", "", "", "", "", "NULLABLE", "")
# ------------
arcpy.AddField_management(shapefileName, self._FIELD_NAME_POINT_Z_BEDROCK_2, "DOUBLE", "", "", "", "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_POINT_Z_ICE_SUR_2, "DOUBLE", "", "", "", "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_POINT_THICKNESS_2, "DOUBLE", "", "", "", "", "NULLABLE", "")
arcpy.AddField_management(shapefileName, self._FIELD_NAME_POINT_QUALITY_2 , "SHORT", "", "", "", "", "NULLABLE", "")
# Removing the default Id field
arcpy.DeleteField_management(shapefileName, ["Id"])
def writeData(self):
self.__writeLines()
self.__writePoints()
def __writeLines(self):
array = arcpy.Array()
cursor = InsertCursor(self._shapeFileLine, [ \
"SHAPE@", \
self._FIELD_NAME_LINE_ACQUISITION_TYPE, self._FIELD_NAME_LINE_INSTRUMENT, \
self._FIELD_NAME_LINE_FREQUENCY_FROM, self._FIELD_NAME_LINE_FREQUENCY_TO, \
self._FIELD_NAME_PROFILE_ID, self._FIELD_NAME_DATE, self._FIELD_NAME_LINE_SUMMARY, \
self._FIELD_NAME_LINE_IMAGE_BEDROCK, self._FIELD_NAME_LINE_IMAGE_MAP, self._FIELD_NAME_LINE_IMAGE_MIG, \
])
for radarPoint in self._radarLine.radarPoints:
array.add(arcpy.Point(radarPoint.xCoordinate, radarPoint.yCoordinate))
lineGeometry = arcpy.Polyline(array)
# Setting an empty string in case of possible Null values.
if self._radarLine.summaryFile == None:
summaryFile = ""
else:
summaryFile = self._radarLine.summaryFile
if self._radarLine.bedrockImageFile == None:
bedrockImageFile = ""
else:
bedrockImageFile = self._radarLine.bedrockImageFile
if self._radarLine.mapImageFile == None:
mapImageFile = ""
else:
mapImageFile = self._radarLine.mapImageFile
if self._radarLine.migImageFile == None:
migImageFile = ""
else:
migImageFile = self._radarLine.migImageFile
cursor.insertRow([ \
lineGeometry, \
self._radarLine.acquisitionType, self._radarLine.instrument, \
self._radarLine.frequencyFrom, self._radarLine.frequencyTo, \
self._radarLine.lineId, self._radarLine.date, summaryFile, \
bedrockImageFile, mapImageFile, migImageFile, \
])
del(cursor)
def __writePoints(self):
cursor = InsertCursor(self._shapeFilePoint, [ \
"SHAPE@", \
self._FIELD_NAME_PROFILE_ID, self._FIELD_NAME_DATE, self._FIELD_NAME_COUNT_RESULTS, \
self._FIELD_NAME_POINT_Z_BEDROCK_1, self._FIELD_NAME_POINT_Z_ICE_SUR_1, self._FIELD_NAME_POINT_THICKNESS_1, self._FIELD_NAME_POINT_QUALITY_1, \
self._FIELD_NAME_POINT_Z_BEDROCK_2, self._FIELD_NAME_POINT_Z_ICE_SUR_2, self._FIELD_NAME_POINT_THICKNESS_2, self._FIELD_NAME_POINT_QUALITY_2 \
])
for radarPoint in self._radarLine.radarPoints:
pointGeometry = arcpy.Point(radarPoint.xCoordinate, radarPoint.yCoordinate)
zBed_1 = 0.0
zIce_1 = 0.0
thickness_1 = 0.0
quality_1 = 0.0
zBed_2 = 0.0
zIce_2 = 0.0
thickness_2 = 0.0
quality_2 = 0.0
if radarPoint.countResults > 0:
zBed_1 = radarPoint.results[0].zBed
zIce_1 = radarPoint.results[0].zIceSurface
thickness_1 = radarPoint.results[0].thickness
quality_1 = radarPoint.results[0].quality
if radarPoint.countResults >= 2:
zBed_2 = radarPoint.results[1].zBed
zIce_2 = radarPoint.results[1].zIceSurface
thickness_2 = radarPoint.results[1].thickness
quality_2 = radarPoint.results[1].quality
cursor.insertRow([ \
pointGeometry, \
self._radarLine.lineId, self._radarLine.date, radarPoint.countResults, \
zBed_1, zIce_1, thickness_1, quality_1, \
zBed_2, zIce_2, thickness_2, quality_2 \
])
del(cursor) |
984,969 | 4c3954893a86cb4634b1608d2020cfcf87f71376 | # Time Complexity : O(N)
# Space Complexity : O(N)
# Did this code successfully run on Leetcode : YES
# Any problem you faced while coding this : NO
"""
# Definition for Employee.
class Employee:
def __init__(self, id: int, importance: int, subordinates: List[int]):
self.id = id
self.importance = importance
self.subordinates = subordinates
"""
#BFS
class Solution:
def getImportance(self, employees: List['Employee'], id: int) -> int:
if len(employees)==0:
return 0
hashmap = {} #key - id , value = employees obj
for e in employees:
hashmap[e.id] = e
q = deque()
#add id to the queue
q.append(id)
result = 0
while(len(q)):
eid = q.popleft()
edetails = hashmap[eid]
result+=edetails.importance
for subid in edetails.subordinates:
q.append(subid)
return result
#DFS
class Solution:
def __init__(self):
self.importance = 0
def getImportance(self, employees: List['Employee'], id: int) -> int:
if len(employees)==0:
return 0
hashmap = {} #key - id , value = employees obj
for e in employees:
hashmap[e.id] = e
self.dfs(id,hashmap)
return self.importance
def dfs(self,id,hashmap):
emp = hashmap[id]
self.importance+=emp.importance
for e in emp.subordinates:
self.dfs(e,hashmap)
|
984,970 | f6d0424db5b202bdf125396f60731d956f9e7986 | # coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from swagger_server.models.base_model_ import Model
from swagger_server import util
class Story(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, id: str=None, title: str=None, language: str=None, private: bool=None, last_updated: str=None): # noqa: E501
"""Story - a model defined in Swagger
:param id: The id of this Story. # noqa: E501
:type id: str
:param title: The title of this Story. # noqa: E501
:type title: str
:param language: The language of this Story. # noqa: E501
:type language: str
:param private: The private of this Story. # noqa: E501
:type private: bool
:param last_updated: The last_updated of this Story. # noqa: E501
:type last_updated: str
"""
self.swagger_types = {
'id': str,
'title': str,
'language': str,
'private': bool,
'last_updated': str
}
self.attribute_map = {
'id': 'id',
'title': 'title',
'language': 'language',
'private': 'private',
'last_updated': 'last_updated'
}
self._id = id
self._title = title
self._language = language
self._private = private
self._last_updated = last_updated
@classmethod
def from_dict(cls, dikt) -> 'Story':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The Story of this Story. # noqa: E501
:rtype: Story
"""
return util.deserialize_model(dikt, cls)
@property
def id(self) -> str:
"""Gets the id of this Story.
:return: The id of this Story.
:rtype: str
"""
return self._id
@id.setter
def id(self, id: str):
"""Sets the id of this Story.
:param id: The id of this Story.
:type id: str
"""
self._id = id
@property
def title(self) -> str:
"""Gets the title of this Story.
:return: The title of this Story.
:rtype: str
"""
return self._title
@title.setter
def title(self, title: str):
"""Sets the title of this Story.
:param title: The title of this Story.
:type title: str
"""
self._title = title
@property
def language(self) -> str:
"""Gets the language of this Story.
:return: The language of this Story.
:rtype: str
"""
return self._language
@language.setter
def language(self, language: str):
"""Sets the language of this Story.
:param language: The language of this Story.
:type language: str
"""
self._language = language
@property
def private(self) -> bool:
"""Gets the private of this Story.
:return: The private of this Story.
:rtype: bool
"""
return self._private
@private.setter
def private(self, private: bool):
"""Sets the private of this Story.
:param private: The private of this Story.
:type private: bool
"""
self._private = private
@property
def last_updated(self) -> str:
"""Gets the last_updated of this Story.
Date # noqa: E501
:return: The last_updated of this Story.
:rtype: str
"""
return self._last_updated
@last_updated.setter
def last_updated(self, last_updated: str):
"""Sets the last_updated of this Story.
Date # noqa: E501
:param last_updated: The last_updated of this Story.
:type last_updated: str
"""
self._last_updated = last_updated
|
984,971 | b395a12224fe52cfa097f945a5886f6b0dbcc02d | import unittest
from unittest.mock import MagicMock
import unittest.mock
class Airport:
def __init__ (self):
self.planes = []
def land(self,plane):
self.planes.append(plane)
plane.landed(self)
class TestingAirport(unittest.TestCase):
def test_land(self):
airport = Airport()
plane = MagicMock()
airport.land(plane)
assert airport.planes == [plane]
plane.landed.assert_called_with(airport)
if __name__ == '__main__':
unittest.main()
|
984,972 | 14d7db11f6b67b3f8007e969cc8e0cf1816c27f1 | from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
'''
视图函数需要一个参数,类型 应该是HttpResquest
'''
def do_normalmap(request):
print("In do normalmap")
return HttpResponse("This is normalmap")
def withparam(request,year,month):
return HttpResponse('This is with param{0},{1}'.format(year, month))
def do_app(requset):
return HttpResponse('这是个子路由')
def do_param2(requset,pn):
return HttpResponse('This is book{0}'.format(pn))
def extremParam(r, name):
return HttpResponse('my name is {0}'.format(name))
|
984,973 | 228309ceea72cb87375073dcbddc919b3191e94d | def solution(phone_number):
answer = ''
for i in phone_number[:-4]:
answer += '*'
answer += phone_number[-4:]
return answer
phone_number1 = "01033334444"
phone_number2 = "027778888"
solution(phone_number1)
solution(phone_number2)
# phone_number return
# 01033334444 *******4444
# 027778888 *****8888
|
984,974 | 8e31ad26c3b61d4179dfa7b2897b5aa38e3e44af | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(unique=True, max_length=80)),
('fullname', models.CharField(max_length=80, null=True, blank=True)),
('email', models.EmailField(max_length=75, unique=True, null=True, blank=True)),
('about', models.TextField(null=True, blank=True)),
('is_active', models.BooleanField(default=True)),
('is_admin', models.BooleanField(default=False)),
('is_staff', models.BooleanField(default=False)),
('original_photo', models.ImageField(null=True, upload_to=b'user_photos/original/', blank=True)),
('small_photo', models.ImageField(null=True, upload_to=b'user_photos/small/', blank=True)),
('large_photo', models.ImageField(null=True, upload_to=b'user_photos/large/', blank=True)),
('thumbnail', models.ImageField(null=True, upload_to=b'user_photos/thumbnail/', blank=True)),
('personal_site', models.URLField(null=True, blank=True)),
('groups', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of his/her group.', verbose_name='groups')),
('user_permissions', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Bookmark',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=50)),
('description', models.TextField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=30)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='bookmark',
name='category',
field=models.ForeignKey(to='bookmarks.Category'),
preserve_default=True,
),
]
|
984,975 | 9f908e66584ef62e6ef9f9d75df7430f403a8c0c | import os
import pytest
ROOT = '/'.join(os.path.dirname(__file__).split('/')[:-3])
COVER_PACKAGE = '.'.join(__package__.split('.')[:-1])
def suite(*args):
source = [ROOT, '--cov={}'.format(COVER_PACKAGE)]
for arg in args:
source.append(arg)
pytest.main(source)
|
984,976 | cefbbec787c005b656b0562878248a4342b87c5c | from django.shortcuts import render
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework import status
from owner.models import Player, News, Own, Tournament
from .serializers import PlayerSerializer, NewsSerializer, OwnSerializer, TournamentSerializer
# Create your views here.
@api_view(['POST',])
def delPlayerfromTournament(request):
print(request.data)
player_id = request.data.get('player_id')
tournament_name = request.data.get('tournament_name')
tnm = Tournament.objects.get(Tnm = tournament_name)
player = Player.objects.get(pk=player_id)
try:
tnm.players.remove(player)
except:
pass
return Response(status = status.HTTP_200_OK)
@api_view(['POST',])
def add_player(request):
if request.method == "POST":
player_serializer = PlayerSerializer(data = request.data)
if not player_serializer.is_valid():
print(player_serializer.errors)
return Response(player_serializer.errors, status = status.HTTP_400_BAD_REQUEST)
player_serializer.save()
data = {
'message': 'Player Added Successfully!',
'data': player_serializer.data
}
return Response(data, status = status.HTTP_200_OK)
@api_view(['GET',])
def get_player(request):
if request.method == "GET":
players = Player.objects.all()
player_serializer = PlayerSerializer(players, many = True)
data = {
'data': player_serializer.data
}
return Response(player_serializer.data, status = status.HTTP_200_OK)
@api_view(['POST',])
def del_player(request):
instance = Player.objects.get(pk=request.data['player_id'])
instance.delete()
return Response(status = status.HTTP_200_OK)
@api_view(['POST',])
def add_news(request):
if request.method == "POST":
news_serializer = NewsSerializer(data = request.data)
if not news_serializer.is_valid():
print(news_serializer.errors)
return Response(news_serializer.errors, status = status.HTTP_400_BAD_REQUEST)
news_serializer.save()
data = {
'message': 'News Added Successfully!',
'data': news_serializer.data
}
return Response(data, status = status.HTTP_200_OK)
@api_view(['GET',])
def get_news(request):
if request.method == "GET":
news = News.objects.all()
news_serializer = NewsSerializer(news, many = True)
data = {
'data': news_serializer.data
}
return Response(news_serializer.data, status = status.HTTP_200_OK)
@api_view(['POST',])
def del_news(request):
print(request.data)
instance = News.objects.get(pk=request.data['id'])
instance.delete()
return Response(status = status.HTTP_200_OK)
@api_view(['POST',])
def add_own(request):
if request.method == "POST":
print(request.data)
own_serializer = OwnSerializer(data = request.data)
if not own_serializer.is_valid():
print(own_serializer.errors)
return Response(own_serializer.errors, status = status.HTTP_400_BAD_REQUEST)
own_serializer.save()
print(own_serializer.data)
data = {
'message': 'Owner Added Successfully!',
'data': own_serializer.data
}
return Response(data, status = status.HTTP_200_OK)
@api_view(['GET',])
def get_own(request):
if request.method == "GET":
own = Own.objects.all()
own_serializer = OwnSerializer(own, many = True)
data = {
'data': own_serializer.data
}
return Response(own_serializer.data, status = status.HTTP_200_OK)
@api_view(['POST',])
def del_own(request):
print(request.data)
Own.objects.filter(owner_id=request.data['owner_id']).delete()
# instance.delete()
return Response(status = status.HTTP_200_OK)
@api_view(['POST',])
def add_tournament(request):
if request.method == "POST":
# tournament_serializer = TournamentSerializer(data = request.data)
# if not tournament_serializer.is_valid():
# print(tournament_serializer.errors)
# return Response(tournament_serializer.errors, status = status.HTTP_400_BAD_REQUEST)
# tournament_serializer.save()
t = Tournament.objects.create(Ttype = request.data['Ttype'],Tnm = request.data['Tnm'], Sdate = request.data['Sdate'], Edate = request.data['Edate'] )
print(request.data['list'])
player_ids = request.data['list']
# print(t)
# tournament = Tournament.objects.get(pk = t.pk)
# print(tournament)
# t = Tournament.objects.get(Tnm=request.data['Tnm'])
print(Player.objects.all())
for elem in player_ids:
instance = Player.objects.get(player_id = elem)
print(instance)
t.players.add(instance)
# t.save()
print(t.Ttype)
print(t.players)
data = {
'message': 'Tournament Added Successfully!',
# 'data': tournament_serializer.data
}
return Response(data, status = status.HTTP_200_OK)
@api_view(['GET',])
def get_tournament(request):
if request.method == "GET":
tournament = Tournament.objects.filter(verified=True)
tournament_serializer = TournamentSerializer(tournament, many = True)
data = {
'data': tournament_serializer.data
}
return Response(tournament_serializer.data, status = status.HTTP_200_OK)
@api_view(['POST',])
def del_tournament(request):
# print(request.data)
Tournament.objects.filter(Tnm=request.data['id']).delete()
# instance.delete()
return Response(status = status.HTTP_200_OK)
@api_view(['POST',])
def login(request):
if request.method == "POST":
email_id = request.data.get('email_id')
password = request.data.get('password')
print(request.data)
try:
user = Player.objects.get(player_id = email_id , password = password)
data = {
'id': user.pk,
'message': 'player'
}
return Response(data, status = status.HTTP_200_OK)
except:
pass
print(1)
try:
user = Own.objects.get(owner_id = email_id , password = password)
data = {
'id': user.pk,
'message': 'owner'
}
return Response(data, status = status.HTTP_200_OK)
except:
data = {
'message': 'User Does not exist or Password Incorrect!'
}
return Response(data, status = status.HTTP_400_BAD_REQUEST)
# @api_view(['POST',])
# def add_tournament_player(request):
# player_id = request.data['player_id']
# player = Player.objects.get(player_id = player_id)
|
984,977 | 68072761cb1f50f751cb67db990545f14f89cd32 | # Generated by Django 2.2.16 on 2020-09-28 06:12
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
('front', '0027_auto_20200928_0807'),
]
operations = [
migrations.AlterField(
model_name='payment',
name='account',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='front.Account', verbose_name='Аккаунт'),
),
migrations.AlterField(
model_name='payment',
name='uuid',
field=models.UUIDField(blank=True, default=uuid.UUID('566fd80c-e6a4-436a-b1df-ad3dbcdd2a2b'), primary_key=True, serialize=False, verbose_name='Идентификатор платежа в системе / Ключ идемпотентности'),
),
]
|
984,978 | bc4192b1cd779f9b5172754c43e07d84a9a38c62 | from player import Player
# from board import Board
import random
class IA(Player):
name = "Berlin"
gameSize = 5
no_win = 0
color = ["black", "white"]
DEPTH = 1
turn = 0
#declared here to prevent from initialization at each method calls
corners = ((0,0), (0,4), (4,0), (4,4))
borders = ((0,1), (0,2), (0,3), (1,0), (1,4), (2,0), (2,4), (3,0), (3,4), (4,1), (4,2), (4,3))
centers = ((1,1), (1,2), (1,3), (2,1), (2,3), (3,1), (3,2), (3,3))
horizontalEntrapmentCoords = ((0,0), (0,1), (0,2), (0,3), (0,4), (4,0), (4,1), (4,2), (4,3), (4,4))
verticalEntrapmentCoords = ((0,0), (1,0), (2,0), (3,0), (4,0), (0,4), (1,4), (2,4), (3,4), (4,4))
def __init__(self, position, gameSize):
Player.__init__(self, position, gameSize)
def play(self, dethToCover, board, step):
if step == 0:
a, b = self.playStep0(board)
return a, b
elif step == 1:
a, b, c, d = self.playStep1(self.clone(board))
return a, b, c, d
def playOld(self, board, step):
if(step == 0):
for i in range(self.gameSize):
for j in range(self.gameSize):
if(self.canPlayHere(board, step, i, j)):
return (i, j)
if(step == 1):
for i in range(self.gameSize):
for j in range(self.gameSize):
if(self.canPlayHere(board, step, i, j)):
if board[i][j] == self.playerColor:
if len(self.getRealsMoves(board, i, j)) > 0:
print("ici", i, j, self.getRealsMoves(
board, i, j)[0])
(c, d) = self.getRealsMoves(board, i, j)[0]
return (i, j, c, d)
return -1
def playRandom(self, board, step):
playable = []
if(step == 0):
for i in range(self.gameSize):
for j in range(self.gameSize):
if self.canPlayHere(board, step, i, j):
playable.append((i, j))
choix = playable[random.randint(0, len(playable)-1)]
return choix[0], choix[1]
if(step == 1):
origins = self.getMovingPiece(board, self.playerColor)
origin = origins[random.randint(0, len(origins)-1)]
destinations = self.getRealsMoves(board, origin[0], origin[1])
destination = destinations[random.randint(0, len(destinations)-1)]
print(origin[0], origin[1], destination[0], destination[1])
return (origin[0], origin[1], destination[0], destination[1])
return -1
# Method called on step of initialisation
# To ensure our pieces are on the sides (better approach or... maybe not haha)
def playStep0(self, board):
self.turn += 1
if self.turn == 3:
print(board)
side = random.randint(0, 3)
for i in range(self.gameSize):
for j in range(self.gameSize):
if self.canPlayHere(board, 0, i, 0):
return (i, 0)
if self.canPlayHere(board, 0, 0, j):
return (0, j)
if self.canPlayHere(board, 0, i, 4):
return (i, 4)
if self.canPlayHere(board, 0, 4, j):
return (4, j)
return self.playRandom(board, 0)
# Method play called on step of moving pieces based on minimax with alpha beta pruning
def playStep1(self, board):
bestMove = None
bestMoveScore = 0
possibleBoards = []
moves = []
piecesCanMove = self.getMovingPiece(board, self.playerColor)
for piece in piecesCanMove:
piecesDestinations = self.getRealsMoves(board, piece[0], piece[1])
for destination in piecesDestinations:
move = piece[0], piece[1], destination[0], destination[1], self.playerColor
moves.append(move)
newBoard = self.clone(board)
self.doMove(newBoard, move)
possibleBoards.append(newBoard)
bestMove = moves[0]
bestMoveScore = self.evaluatePosition(
possibleBoards[0], float('-inf'), float('inf'), self.DEPTH, self.getOpponentColor())
i = -1
for aBoard in possibleBoards:
i += 1
if i > 1:
score = self.evaluatePosition(
aBoard, float('-inf'), float('inf'), self.DEPTH, self.getOpponentColor())
if score > bestMoveScore:
bestMove = moves[i]
bestMoveScore = score
w, x, y, z, color = bestMove
return w, x, y, z
# doMove simulate a movement on the board
# Its implementation looks like the play method from ia_game_cli.py file with some updates
def doMove(self, board, move):
a, b, c, d, playerColor = move
if self.isPiece(board, a, b) and (c, d) in self.getRealsMoves(board, a, b):
board[a][b] = None
board[c][d] = playerColor
captured = self.hasCaptured(board, c, d, playerColor)
if len(captured) > 0:
self.no_win = 0
for pos in captured:
board[pos[0]][pos[1]] = 'None'
else:
self.no_win += 1
return
# return the score (beta for this IA, alpha for the other IA) of the game at the depth "depth" with the state of board in "board"
# playerColor indicate the current player (the one who can do some movement)
def evaluatePosition(self, board, alpha, beta, depth, playerColor):
if depth == 0:
evaluation = self.evaluate(board)
return evaluation
if playerColor == self.getOpponentColor():
moves = []
piecesCanMove = self.getMovingPiece(board, playerColor)
for piece in piecesCanMove:
destinations = self.getRealsMoves(board, piece[0], piece[1])
for destination in destinations:
move = piece[0], piece[1], destination[0], destination[1], playerColor
moves.append(move)
newBeta = beta
for move in moves:
successorBoard = self.clone(board)
self.doMove(successorBoard, move)
newBeta = min(newBeta, self.evaluatePosition(
successorBoard, alpha, beta, depth-1, self.playerColor))
if newBeta <= alpha:
break
return newBeta
else:
moves = []
piecesCanMove = self.getMovingPiece(board, playerColor)
for piece in piecesCanMove:
destinations = self.getRealsMoves(board, piece[0], piece[1])
for destination in destinations:
move = piece[0], piece[1], destination[0], destination[1], playerColor
moves.append(move)
newAlpha = alpha
for move in moves:
successorBoard = self.clone(board)
self.doMove(successorBoard, move)
newAlpha = max(newAlpha, self.evaluatePosition(
successorBoard, alpha, beta, depth - 1, self.getOpponentColor()))
if beta <= newAlpha:
break
return newAlpha
# Return a number which indicates how good is the board for this IA
# We personnally do a simple difference between scores (but this is the method that must be improved, we think)
def evaluate(self, board):
myScore = self.getScore(board, self.getOpponentColor())
opponentScore = self.getScore(board, self.playerColor)
return myScore - opponentScore
# -----------------------------------------------------------
# Evaluation features f1...f13 |
# -----------------------------------------------------------
#corner domination
def f1(self, board):
opponentColor = self.getOpponentColor()
#nb pièces de notre joueur
nbSelf = 0
#nb pièces adversaire
nbOpponent = 0
for corner in self.corners:
if(board[corner[0]][corner[1]] == self.playerColor):
nbSelf+=1
elif(board[corner[0]][corner[1]] == opponentColor):
nbOpponent+=1
return (nbSelf - nbOpponent)/4
#border domination
def f2(self, board):
opponentColor = self.getOpponentColor()
nbSelf = 0
nbOpponent = 0
for border in self.borders:
if(board[border[0]][border[1]] == self.playerColor):
nbSelf+=1
elif(board[border[0]][border[1]] == opponentColor):
nbOpponent+=1
return (nbSelf - nbOpponent)/12
#Horizontal clustering for playerColor (f3 for Berlin's color & f5 for his opponent)
def f3(self, board):
return self.numberPiecesAdjacentHoriz(board, self.playerColor)/12
def f5(self, board):
return self.numberPiecesAdjacentHoriz(board, self.getOpponentColor())/12
#Vertical clustering for playerColor (f4 for Berlin's color & f6 for his opponent)
def f4(self, board):
return self.numberPiecesAdjacentVert(board, self.playerColor)/12
def f6(self, board):
return self.numberPiecesAdjacentVert(board, self.getOpponentColor())/12
# Horizontal mass dist
def f7(self, board):
return abs(self.horizontalCenterMass(board, self.playerColor) - self.horizontalCenterMass(board, self.getOpponentColor()))/4
# Vertical mass dist
def f8(self, board):
return abs(self.verticalCenterMass(board, self.playerColor) - self.verticalCenterMass(board, self.getOpponentColor()))/4
#horizontal entrapment
def f9(self, board):
opponentColor = self.getOpponentColor()
nbSelf = 0
nbOpponent = 0
for square in self.horizontalEntrapmentCoords:
if(board[square[0]][square[1]] == self.playerColor):
nbSelf+=1
elif(board[square[0]][square[1]] == opponentColor):
nbOpponent+=1
return (nbSelf - nbOpponent)/10
#vertical entrapment
def f10(self, board):
opponentColor = self.getOpponentColor()
nbSelf = 0
nbOpponent = 0
for square in self.verticalEntrapmentCoords:
if(board[square[0]][square[1]] == self.playerColor):
nbSelf+=1
elif(board[square[0]][square[1]] == opponentColor):
nbOpponent+=1
return (nbSelf - nbOpponent)/10
# Counts number of Berlin's pieces vs. his opponent
def f11(self, board):
return (self.countPieces(board, self.playerColor) - self.countPieces(board, self.getOpponentColor()))/11
#phase two starts
#how many captures Black (us) will make on the first move in phase one
def f12(self, board): #on passe le board du début de la phase 2
captured = 0
#Récupérer les pièces qui peuvent bouger.
piecesCanMove = self.getMovingPiece(board, self.playerColor)
#Si aucune, (donc aucune au bord du centre) donc aucun gain possible
#sinon calculer le gain possible
if(len(piecesCanMove) == 0):
for piece in piecesCanMove:
piecesDestinations = self.getRealsMoves(board, piece[0], piece[1])
for destination in piecesDestinations:
newBoard = self.clone(board)
move = piece[0], piece[1], destination[0], destination[1], self.playerColor
result = self.doMoveForF12(newBoard, move)
if(result > captured):
captured = result
return captured
#black can start
#returns 0 if the four squares around the middle square are occupied
#by White (i.e. Black cannot make the first move), and
#returns 1 otherwise.
def f13(self):
if(self.playerColor in self.centers):
return 1
else:
return 0
# -----------------------------------------------------------
# Helpers functions |
# -----------------------------------------------------------
def numberPiecesAdjacentHoriz(self, board, playerColor):
numberHorizontallyAdjacent = 0
for i in range(self.gameSize):
for j in range(self.gameSize):
if board[i][j] == playerColor:
#Si la pièce courante est notre et a une voisine notre à gauche ou à droite
if 0 <= i-1 < self.gameSize:
if board[i-1][j] == playerColor:
numberHorizontallyAdjacent += 1
elif 0 <= i+1 < self.gameSize:
if board[i+1][j] == playerColor:
numberHorizontallyAdjacent += 1
return numberHorizontallyAdjacent
def numberPiecesAdjacentVert(self, board, playerColor):
numberVerticallyAdjacent = 0
for i in range(self.gameSize):
for j in range(self.gameSize):
if board[i][j] == playerColor:
#Si la pièce courante est notre et a une voisine en haut ou en bas
if 0 <= j-1 < self.gameSize:
if board[i][j-1] == playerColor:
numberVerticallyAdjacent += 1
elif 0 <= j+1 < self.gameSize:
if board[i][j+1] == playerColor:
numberVerticallyAdjacent += 1
return numberVerticallyAdjacent
def horizontalCenterMass(self, board, playerColor):
totalWeight = 0
totalX = 0
for i in range(self.gameSize):
for j in range(self.gameSize):
if(board[i][j] == playerColor):
totalX += j
totalWeight += 1
return totalX/totalWeight
def verticalCenterMass(self, board, playerColor):
totalWeight = 0
totalY = 0
for i in range(self.gameSize):
for j in range(self.gameSize):
if(board[i][j] == playerColor):
totalY += i
totalWeight += 1
return totalY/totalWeight
def getOpponentColor(self):
return self.color[(self.position+1) % 2]
# playColor est ici la couleur de l'adversaire
def getScore(self, board, playerColor):
score = 0
nbPiecesRestantes = 0
for i in range(self.gameSize):
for j in range(self.gameSize):
if board[i][j] == playerColor:
nbPiecesRestantes += 1
score = 12 - nbPiecesRestantes
return score
# count the number of pieces of a given color (playerColor)
def countPieces(self, board, color):
nbPieces = 0
for i in range(self.gameSize):
for j in range(self.gameSize):
if board[i][j] == color:
nbPieces += 1
return nbPieces
#Move piece for f12 function
def doMoveForF12(self, board, move):
a, b, c, d, playerColor = move
if self.isPiece(board, a, b) and (c, d) in self.getRealsMoves(board, a, b):
board[a][b] = None
board[c][d] = playerColor
captured = self.hasCaptured(board, c, d, playerColor)
return captured
|
984,979 | 701c24a5c1e29f8eedf25c396a08d40b8f4c3874 | from memory_profiler import profile
import numpy as np
import time
import sys
src2index = dict()
index2src = dict()
def get_index(data_file):
"""获得原始data <-> index之间的映射
src2index 原始编号 =》现有编号
index2src 现有编号 =》原始编号
"""
nodes = set()
with open(data_file, 'r', encoding='utf-8') as f:
for line in f:
x, y = line.split()
nodes.add(int(x))
nodes.add(int(y))
i = 0
for node in nodes:
src2index[node] = i
index2src[i] = node
i += 1
def get_nodes_num():
return len(src2index)
def get_out_degree(data_file):
"""
:param data_file:
:return:
"""
out_degree = np.zeros(get_nodes_num())
with open(data_file, 'r', encoding='utf-8') as f:
for line in f:
out_degree[int(src2index[int(line.split()[0])])] += 1
return out_degree
def load_data(path):
get_index(path)
out_degree = get_out_degree(path)
data = np.zeros(shape=(get_nodes_num(), get_nodes_num()))
f = open(path, 'r')
for line in f:
x, y = line.split()
data[src2index[int(y)]][src2index[int(x)]] = 1 / out_degree[src2index[int(x)]]
f.close()
for i in range(len(out_degree)):
if out_degree[i] == 0:
data[:, i] = 1.0 / len(out_degree)
return data
def pagerank(M, num_iterations: int = 100, d: float = 0.85):
"""PageRank核心算法
Parameters
----------
M : numpy array
adjacency matrix where M_i,j represents the link from 'j' to 'i', such that for all 'j'
sum(i, M_i,j) = 1
num_iterations : int, optional
number of iterations, by default 100
d : float, optional
damping factor, by default 0.85
Returns
-------
numpy array
a vector of ranks such that v_i is the i-th rank from [0, 1],
v sums to 1
"""
# print(N)
v = np.ones(get_nodes_num()) / get_nodes_num()
for i in range(num_iterations):
v_new = M @ v * d + (1 - d) / get_nodes_num()
s = np.sqrt(sum((v - v_new) ** 2))
v = v_new
if s < 1e-5:
break
return v
# @profile
def get_top():
"""获取前100个PageRank值
"""
M = load_data("WikiData.txt")
print(sys.getsizeof(M))
v = pagerank(M, 100, 0.85)
sort_index = v.argsort()[::-1][:100]
v.sort()
top_vec = v[::-1][:100]
top_val = []
for i in range(100):
top_val.append([index2src[sort_index[i]], top_vec[i]])
res = open('res.txt', 'w')
for i in range(100):
print(top_val[i][0], top_val[i][1])
res.write(str(top_val[i][0]) + ' ' + str(top_val[i][1]) + '\n')
res.close()
# @profile()
def main():
start = time.perf_counter()
get_top()
end = time.perf_counter()
print('time cost: ', str(end - start), 's')
if __name__ == "__main__":
main() |
984,980 | 552ccbf042245d16155ad4fc57a9967dbfa433b0 | import requests as req
import json
from requests.auth import HTTPBasicAuth
import base64
from cryptography.fernet import Fernet
url = "http://localhost:8083/login"
'''
filepath = input("Enter the file name:")
url = url+filepath
response = req.post(url)
#print("File: ",response.json())
print("Response: ",response.text)
'''
def client_proxy(method, filename):
# Calls the directory server.
# The filename is encrypted. The ticket is appended with the filename and send.
directory_service_url = "http://localhost:8080/file/"
message_encrypted = encrypt_message(filename)
print("------------")
print("message encrypted: ",message_encrypted.decode())
print("------------")
print("Ticket: ",ticket.decode())
print("--------------")
message_to_be_send = str(len(str(len(message_encrypted)))) + str(len(message_encrypted)) + str(message_encrypted.decode()) + str(ticket.decode())
directory_service_url = directory_service_url+message_to_be_send
response = req.get(directory_service_url)
print("Response: ",response.text)
filepath = response.text
(port_encr,filepath_encr) = get_filepath_port(filepath)
filepath_decrypted = decrypt_message_from_server(filepath_encr).decode()
if len(port_encr) > 0:
port_decrypted = decrypt_message_from_server(port_encr).decode()
print("----------")
print("Port: ",port_decrypted)
print("----------")
print("Filepath: ",filepath_decrypted)
print("----------")
#Check if the method is read or write
if len(port_encr) > 0:
if method == "read":
file_server_url = "http://localhost:"+port_decrypted+"/filepath/"
filepath_encrypted = encrypt_message(filepath_decrypted)
print("------------")
print("filepath encrypted: ",filepath_encrypted.decode())
print("------------")
filepath_to_be_send = str(len(str(len(filepath_encrypted)))) + str(len(filepath_encrypted)) + str(filepath_encrypted.decode()) + str(ticket.decode())
file_server_url = file_server_url+filepath_to_be_send
response = req.get(file_server_url)
print("Response: ",response.text)
filecontent = response.text
filecontent_decrypted = decrypt_message_from_server(filecontent).decode()
print("----------")
print("File content: ",filecontent_decrypted)
print("----------")
else:
payload = input("Enter content to be written into file: ")
lock_server_url = "http://localhost:8082/file/lock/"
filename_encrypted = encrypt_message(filename)
print("------------")
print("filename encrypted: ",filename_encrypted.decode())
print("------------")
print("Ticket: ",ticket.decode())
print("--------------")
filename_to_be_send = str(len(str(len(filename_encrypted)))) + str(len(filename_encrypted)) + str(filename_encrypted.decode()) + str(ticket.decode())
lock_server_url = lock_server_url+filename_to_be_send
response = req.get(lock_server_url)
print("Response: ",response.text)
lock = response.text
lock_decrypted = decrypt_message_from_server(lock).decode()
print("----------")
print("Lock: ",lock_decrypted)
print("----------")
if lock_decrypted == filename:
# to lock the file
lock_server_url = "http://localhost:8082/file/lock/"
filename_encrypted = encrypt_message(filename)
print("------------")
print("filename encrypted: ",filename_encrypted.decode())
print("------------")
print("Ticket: ",ticket.decode())
print("--------------")
filename_to_be_send = str(len(str(len(filename_encrypted)))) + str(len(filename_encrypted)) + str(filename_encrypted.decode()) + str(ticket.decode())
lock_server_url = lock_server_url+filename_to_be_send
response = req.post(lock_server_url)
print("Response: ",response.text)
tolock = response.text
tolock_decrypted = decrypt_message_from_server(tolock).decode()
print("----------")
print("Lock: ",tolock_decrypted)
print("----------")
# write into file
file_server_url = "http://localhost:"+port_decrypted+"/filepath/"
filepath_encrypted = encrypt_message(filepath_decrypted)
print("------------")
print("filepath encrypted: ",filepath_encrypted.decode())
print("------------")
filepath_to_be_send = str(len(str(len(filepath_encrypted)))) + str(len(filepath_encrypted)) + str(filepath_encrypted.decode()) + str(ticket.decode())
file_server_url = file_server_url+filepath_to_be_send
# encrypting payload to be written into file
payload_encrypted = encrypt_message(payload)
print("------------")
print("payload encrypted: ",payload_encrypted.decode())
print("------------")
print("Ticket: ",ticket.decode())
print("--------------")
payload_to_be_send = str(len(str(len(payload_encrypted)))) + str(len(payload_encrypted)) + str(payload_encrypted.decode()) + str(ticket.decode())
response = req.post(file_server_url,data=payload_to_be_send)
print("Response: ",response.text)
filecontent = response.text
filecontent_decrypted = decrypt_message_from_server(filecontent).decode()
print("----------")
print("File write: ",filecontent_decrypted)
print("----------")
# to unlock lock
lock_server_url = "http://localhost:8082/file/unlock/"
filename_encrypted = encrypt_message(filename)
print("------------")
print("filename encrypted: ",filename_encrypted.decode())
print("------------")
print("Ticket: ",ticket.decode())
print("--------------")
filename_to_be_send = str(len(str(len(filename_encrypted)))) + str(len(filename_encrypted)) + str(filename_encrypted.decode()) + str(ticket.decode())
lock_server_url = lock_server_url+filename_to_be_send
response = req.post(lock_server_url)
print("Response: ",response.text)
unlock = response.text
unlock_decrypted = decrypt_message_from_server(unlock).decode()
print("----------")
print("UnLock: ",unlock_decrypted)
print("----------")
'''
# to unlock lock
lock_server_url = "http://localhost:8082/file/unlock/"
filename_encrypted = encrypt_message(filename)
print("------------")
print("filename encrypted: ",filename_encrypted.decode())
print("------------")
print("Ticket: ",ticket.decode())
print("--------------")
filename_to_be_send = str(len(str(len(filename_encrypted)))) + str(len(filename_encrypted)) + str(filename_encrypted.decode()) + str(ticket.decode())
lock_server_url = lock_server_url+filename_to_be_send
response = req.post(lock_server_url)
print("Response: ",response.text)
unlock = response.text
unlock_decrypted = decrypt_message_from_server(unlock).decode()
print("----------")
print("UnLock: ",unlock_decrypted)
print("----------")
'''
else:
msg = method + " is not possible!"
print("----------")
print(msg)
print("----------")
def encrypt_message(message):
# The message is encrypted using the session key that is passed from Authentication Server.
session_key_32bytes = session_key+session_key
session_key_32bytes_encoded = base64.urlsafe_b64encode(session_key_32bytes)
cipher_session_key = Fernet(session_key_32bytes_encoded)
message_encrypted = cipher_session_key.encrypt(message.encode())
return message_encrypted
def decrypt_message_from_server(message):
# The message that is encrypted and send from server is decrypted using the session key at the client side.
session_key_32bytes = session_key+session_key
session_key_32bytes_encoded = base64.urlsafe_b64encode(session_key_32bytes)
cipher_session_key = Fernet(session_key_32bytes_encoded)
message_decrypted = cipher_session_key.decrypt(message.encode())
return message_decrypted
def get_filepath_port(message):
# get the filename and ticket from the encrypted message.
print("----------")
print("message: ",message)
print("-----------")
number_of_digits_of_port_length = int(message[0])
encrypted_port_length = int(message[1:number_of_digits_of_port_length+1])
encrypted_port = message[number_of_digits_of_port_length+1:(number_of_digits_of_port_length+encrypted_port_length)+1]
encrypted_filepath = message[number_of_digits_of_port_length+encrypted_port_length+1:len(message)]
print("----------")
print("number of digits: ",number_of_digits_of_port_length)
print("-----------")
print("port length: ",encrypted_port_length)
print("----------")
print("encrypted port: ",encrypted_port)
print("-----------")
print("Encrypted filepath:", encrypted_filepath)
return (encrypted_port,encrypted_filepath)
# HTTP_AUTHORIZATION
username = input("Enter the user name: ")
password = input("Enter password: ")
response = req.get(url, auth=HTTPBasicAuth(username,password))
#print("File: ",response.json())
print("Response: ",response.text)
if response.text == "Not Authorized!":
print("Username and password are incorrect!")
else:
token = response.text
# Decrypt the token received from authentication server with client password before sending to client
# If the length of the password greater that 32 bytes, only the first 32 bytes are taken as encrypt key
# If the length of the password is less that 32 bytes, password is appended with '0's. So that the length is 32 bytes
if len(password) >= 32:
password_32bytes = password[:32]
else:
letter_count = 32-len(password)
password_32bytes = ''.join(('0') for i in range(letter_count))
encryption_key_with_password = password+password_32bytes
encryption_key_with_password_encoded = base64.urlsafe_b64encode(encryption_key_with_password.encode())
cipher = Fernet(encryption_key_with_password_encoded)
token_decrypted = cipher.decrypt(token.encode())
print("Token Received: ",token_decrypted)
print("----------")
session_key = token_decrypted[:16]
ticket = token_decrypted[16:]
print("------------")
print("Session_key: ",session_key)
print("------------")
print("Ticket: ",ticket)
method = input("Enter method: ")
filename = input("Enter filename: ")
client_proxy(method, filename)
|
984,981 | 41544b61fa5f5f4a0f91f31d5a5fd403890455f7 | '''
--- Directions
Write a program that returns a list of all
primes up to a designated max.
--- Example
PrimeCounter(25)
[2, 3, 5, 7, 11, 13, 17, 19, 23]
PrimeCounter(11)
[2, 3, 5, 7, 11]
PrimeCounter(1)
[]
'''
def PrimeCounter(max_val):
if max_val < 2:
return []
primes = [2]
if max_val == 2:
return primes
count_start = 3
for count in range(count_start,max_val+1):
for checker in primes:
if count % checker == 0:
break
elif checker == primes[len(primes)-1]:
primes.append(count)
return primes
|
984,982 | d17cdeff04c06e2217540f7386c3ecb822610b8b | #! /usr/bin/env python
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Generate beam search visualization.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import json
import os
import shutil
from string import Template
import networkx as nx
import numpy as np
from networkx.readwrite import json_graph
PARSER = argparse.ArgumentParser(
description="Generate beam search visualizations")
PARSER.add_argument(
"-d", "--data", type=str, required=True,
help="path to the beam search data file")
PARSER.add_argument(
"-o", "--output_dir", type=str, required=True,
help="path to the output directory")
PARSER.add_argument(
"-v", "--vocab", type=str, required=False,
help="path to the vocabulary file")
ARGS = PARSER.parse_args()
HTML_TEMPLATE = Template("""
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Beam Search</title>
<link rel="stylesheet" type="text/css" href="tree.css">
<script src="http://d3js.org/d3.v3.min.js"></script>
</head>
<body>
<script>
var treeData = $DATA
</script>
<script src="tree.js"></script>
</body>
</html>""")
def _add_graph_level(graph, level, parent_ids, names, scores):
"""Adds a levelto the passed graph"""
for i, parent_id in enumerate(parent_ids):
new_node = (level, i)
parent_node = (level - 1, parent_id)
graph.add_node(new_node)
graph.node[new_node]["name"] = names[i]
graph.node[new_node]["score"] = str(scores[i])
graph.node[new_node]["size"] = 100
# Add an edge to the parent
graph.add_edge(parent_node, new_node)
def create_graph(predicted_ids, parent_ids, scores, vocab=None):
def get_node_name(pred):
return vocab[pred] if vocab else str(pred)
seq_length = predicted_ids.shape[0]
graph = nx.DiGraph()
for level in range(seq_length):
names = [get_node_name(pred) for pred in predicted_ids[level]]
_add_graph_level(graph, level + 1, parent_ids[level], names, scores[level])
graph.node[(0, 0)]["name"] = "START"
return graph
def main():
beam_data = np.load(ARGS.data)
# Optionally load vocabulary data
vocab = None
if ARGS.vocab:
with open(ARGS.vocab) as file:
vocab = file.readlines()
vocab = [_.strip() for _ in vocab]
vocab += ["UNK", "SEQUENCE_START", "SEQUENCE_END"]
if not os.path.exists(ARGS.output_dir):
os.makedirs(ARGS.output_dir)
# Copy required files
shutil.copy2("./bin/tools/beam_search_viz/tree.css", ARGS.output_dir)
shutil.copy2("./bin/tools/beam_search_viz/tree.js", ARGS.output_dir)
for idx in range(len(beam_data["predicted_ids"])):
predicted_ids = beam_data["predicted_ids"][idx]
parent_ids = beam_data["beam_parent_ids"][idx]
scores = beam_data["scores"][idx]
graph = create_graph(
predicted_ids=predicted_ids,
parent_ids=parent_ids,
scores=scores,
vocab=vocab)
json_str = json.dumps(
json_graph.tree_data(graph, (0, 0)),
ensure_ascii=False)
html_str = HTML_TEMPLATE.substitute(DATA=json_str)
output_path = os.path.join(ARGS.output_dir, "{:06d}.html".format(idx))
with open(output_path, "w") as file:
file.write(html_str)
print(output_path)
if __name__ == "__main__":
main()
|
984,983 | 06996c6a3dca6f1838d85ee336564c2906a70736 | import sys
from antlr4 import *
from JeleniepLexer import JeleniepLexer
from JeleniepParser import JeleniepParser
from JeleniepListener import JeleniepListener
def main(argv):
input_stream = FileStream(argv[1])
lexer = JeleniepLexer(input_stream)
stream = CommonTokenStream(lexer)
parser = JeleniepParser(stream)
tree = parser.prog()
printer = JeleniepListener(argv[1])
walker = ParseTreeWalker()
walker.walk(printer, tree)
if __name__ == '__main__':
main(sys.argv)
|
984,984 | 1b1fd7e5c891e0ebb76af2a107d715f1e92d0292 | #coding=utf-8
# Import the converted model's class
from VOC0712Plus.VOC0712Plus import VOC0712Plus
import tensorflow as tf
from utils.nms_wrapper import nms
from jade import *
from layers.transformed_layer import transformed_image_tf
import argparse
class Refinedet512Model():
def __init__(self,args):
self.model_path = args.model_path
self.num_classes = args.num_classes
self.categories,self.label_map = ReadProTxt(args.label_map_path)
self.net,self.sess = self.load_model()
def load_model(self):
sess = tf.Session()
input = tf.placeholder(tf.float32, (None, None, 3), 'input')
input = transformed_image_tf(input, default_size=512)
net = VOC0712Plus({'data': input})
net.load(self.model_path, sess)
return net, sess
def get_label_map(self):
with open(self.label_map_path,'r') as f:
results = f.readline()
def predict(self,img,threshold=0.6):
if type(img) == str:
img = cv2.imread(img)
boxes, scores = self.sess.run(self.net.get_output(), feed_dict={'input:0': img})
scale = ([img.shape[1], img.shape[0],
img.shape[1], img.shape[0]])
boxes = boxes[0]
scores = scores[0]
boxes *= scale
label_text = []
labels = []
bboxes_out = []
scores_out = []
classes_out = []
# scale each detection back up to the image
for j in range(1, self.num_classes+1):
inds = np.where(scores[:, j] > 0.45)[0]
c_bboxes = boxes[inds]
c_scores = scores[inds, j]
c_dets = np.hstack((c_bboxes, c_scores[:, np.newaxis])).astype(
np.float32, copy=False)
keep = nms(c_dets, 0.45, force_cpu=True)
c_dets = c_dets[keep, :]
for i in range(len(c_dets)):
box = [c_dets[i][0], c_dets[i][1], c_dets[i][2], c_dets[i][3]]
bboxes_out.append(box)
scores_out.append(c_dets[i][4])
classes_out.append(j)
for cls_id in classes_out:
if cls_id in self.categories:
class_name = self.categories[cls_id]['name']
label_text.append(class_name)
return bboxes_out,label_text,classes_out,scores_out,c_dets
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gpu_id', type=int, default=0)
parser.add_argument('--model_path', type=str, default="models/Refinedet.npy")
parser.add_argument('--num_classes', type=int, default=3)
parser.add_argument('--label_map_path',type=str,default="/home/jade/label_map/hand_gesture_label_map.pbtxt")
args = parser.parse_args()
refinedetModel = Refinedet512Model(args)
image = cv2.imread(
"/home/jade/Data/HandGesture/done/v1_2018-12-20_14-22-21/JPEGImages/0bfaee80-0450-11e9-a71b-88d7f6413e60.jpg")
bbooxes,label_text,classes,scores = refinedetModel.predict(image)
CVShowBoxes(image,bbooxes,label_text,classes,scores,waitkey=True) |
984,985 | a539f36745ccab2cb88c098c3e8f259f6a750196 | import sys
def is_tidy(n):
last = 0
for c in str(n):
if int(c) < last:
return False
last = int(c)
return True
def last_tidy(n):
str_n = str(n)
last = str_n[0]
# for i in range(1, len(str_n)):
# if str_n[i] == last and str_n[i:] != last * (len(str_n[i:])):
# last = str_n[i]
# str_n = str_n[:i] + "0" + str_n[i+1:]
# else:
# last = str_n[i]
# print(n, str_n)
n = int(str_n)
for i in range(n, 0, -1):
if is_tidy(i):
return i
return 0
def parser(filename):
with open(filename.replace('in','out'), 'w') as o:
with open(filename) as f:
cases = int(f.readline())
print("{} cases".format(cases))
for i in range(cases):
n = f.readline().replace('\n','')
o.write('Case #{}: {}\n'.format(i+1, last_tidy(int(n))))
if __name__ == "__main__":
parser(sys.argv[1])
|
984,986 | 051b665eeae501467103dd83f1637e34d10eba30 | # -*- coding: utf-8 -*-
import os
import pandas as pd
import nltk
from tools import proc_text, split_train_test, get_word_list_from_data, extract_feat_from_data, cal_acc
from nltk.text import TextCollection
from sklearn.naive_bayes import GaussianNB
from wordcloud import WordCloud
import matplotlib.pyplot as plt # 图像展示库
dataset_path = './dataset'
text_filenames = ['0_simplifyweibo.txt', '1_simplifyweibo.txt',
'2_simplifyweibo.txt', '3_simplifyweibo.txt']
# 原始数据的csv文件
output_text_filename = 'raw_weibo_text.csv'
# 清洗好的文本数据文件
output_cln_text_filename = 'clean_weibo_text.csv'
# 处理和清洗文本数据的时间较长,通过设置is_first_run进行配置
# 如果是第一次运行需要对原始文本数据进行处理和清洗,需要设为True
# 如果之前已经处理了文本数据,并已经保存了清洗好的文本数据,设为False即可
is_first_run = False
def read_and_save_to_csv():
"""
读取原始文本数据,将标签和文本数据保存成csv
"""
# 存储所有向量化的DataFrame对象
# 每个DataFrame对象表示一个文本数据
text_w_label_df_lst = []
# 循环获取每一个微博文本文件名
for text_filename in text_filenames:
# 组合文件路径
text_file = os.path.join(dataset_path, text_filename)
# 获取标签,即0, 1, 2, 3
label = int(text_filename[0])
# 读取文本文件
with open(text_file, 'r', encoding='utf-8') as f:
# 将文本字符串按换行符(\n、\r、\r\n)分隔,返回包含每行数据的列表
lines = f.read().splitlines()
# 生成一个向量,[0, 0, 0, 0 ....]
labels = [label] * len(lines)
# 当前文本内容的Series对象
text_series = pd.Series(lines)
# 当前文本的标签Series对象
label_series = pd.Series(labels)
# concat合并多个Series对象,返回一个DataFrame对象
text_w_label_df = pd.concat([label_series, text_series], axis=1)
# 将所有的数据集存到同一个列表里
text_w_label_df_lst.append(text_w_label_df)
result_df = pd.concat(text_w_label_df_lst, axis=0)
# 保存成csv文件
# 指定列名,第一个label,第二个text
result_df.columns = ['label', 'text']
# 将所有数据集写入到本地磁盘文件
result_df.to_csv(os.path.join(dataset_path, output_text_filename), index=None, encoding='utf-8')
def run_main():
"""
主函数
"""
# 1. 数据读取,处理,清洗,准备
if is_first_run:
print('处理清洗文本数据中...', end=' ')
# 如果是第一次运行需要对原始文本数据进行处理和清洗
# 读取原始文本数据,将标签和文本数据保存成csv
read_and_save_to_csv()
# 读取处理好的csv文件,构造数据集
text_df = pd.read_csv(os.path.join(dataset_path, output_text_filename),
encoding='utf-8')
# 处理文本数据
text_df['text'] = text_df['text'].apply(proc_text)
# 过滤空字符串,去掉所有空行部分
text_df = text_df[text_df['text'] != '']
# 保存处理好的文本数据,文本预处理结束
text_df.to_csv(os.path.join(dataset_path, output_cln_text_filename),
index=None, encoding='utf-8')
print('完成,并保存结果。')
# 2. 分割训练集、测试集
print('加载处理好的文本数据')
clean_text_df = pd.read_csv(os.path.join(dataset_path, output_cln_text_filename),
encoding='utf-8')
# 分割训练集和测试集
# 按每个情感值的80%做分割,
train_text_df, test_text_df = split_train_test(clean_text_df)
# 查看训练集测试集基本信息
print('训练集中各类的数据个数:', train_text_df.groupby('label').size())
print('测试集中各类的数据个数:', test_text_df.groupby('label').size())
# 3. 特征提取
# 计算词频
n_common_words = 200
# 将训练集中的单词拿出来统计词频
print('统计词频...')
# 获取训练集数据集里所有的词语的列表
all_words_in_train = get_word_list_from_data(train_text_df)
# 统计词频
fdisk = nltk.FreqDist(all_words_in_train)
# 获取词频排名前200个的词语的词频
# 构建“常用单词列表”
common_words_freqs = fdisk.most_common(n_common_words)
# 设置字体格式,如不设置显示不了中文,
wc = WordCloud(font_path="./simhei.ttf")
wc.generate_from_frequencies({word: count for word, count in common_words_freqs})
# 显示词云
plt.imshow(wc)
# 关闭坐标轴
plt.axis('off')
# 显示图像
plt.show()
# 词云保存在本地
wc.to_file(os.path.join(os.path.dirname(__file__), "wordcloud.jpg"))
print('出现最多的{}个词是:'.format(n_common_words))
for word, count in common_words_freqs:
print('{}: {}次'.format(word, count))
# 在训练集上提取特征
# 将text部分转换为list做为参数
text_collection = TextCollection(train_text_df['text'].values.tolist())
# 提取训练样本和测试样本的特征
# _X 表示常用单词在每一行的tf-idf值,_y 表示情感值
print('训练样本提取特征...', end=' ')
train_X, train_y = extract_feat_from_data(train_text_df, text_collection, common_words_freqs)
print('完成')
print()
print('测试样本提取特征...', end=' ')
test_X, test_y = extract_feat_from_data(test_text_df, text_collection, common_words_freqs)
print('完成')
# 4. 训练模型Naive Bayes
print('训练模型...', end=' ')
# 创建高斯朴素贝叶斯模型
gnb = GaussianNB()
# 向模型加载训练集特征数据,训练模型,
gnb.fit(train_X, train_y)
print('完成')
print()
# 5. 预测
print('测试模型...', end=' ')
# 加载测试集特征数据,用来预测数据。
test_pred = gnb.predict(test_X)
# test_pred : ndarray : array([3., 3., 3., 2., 3., 3., 3., 0., 3., 3., 3., 2., 1. .....])
print('完成')
# 输出准确率
print('准确率:', cal_acc(test_y, test_pred))
# test_y : ndarray : array([3., 3., 3., 2., 3., 3., 3., 0., 3., 3., 3., 2., 1. .....])
if __name__ == '__main__':
run_main()
|
984,987 | 0c8ff3250c3a441ec815af1b96763e35d8583a27 | n = int(input())
p = list(map(int, input().split()))
minp = 2*10**5
ans = 0
for i in range(n):
if minp >= p[i]:
minp = p[i]
ans += 1
print(ans) |
984,988 | 740a8297ebd8671c5b8a915146592620400ded56 | from django.contrib import admin
from django.urls import path, include
from . import views
urlpatterns = [
path('pythonprojects/', views.pythonprojects, name='pythonprojects'),
path('webdevprojects/', views.webdev, name='webdev'),
path('mlprojects/', views.mlprojects, name='mlprojects'),
path('pythonprojects/<str:slug>', views.pythonposts, name='pythonposts'),
path('webdevprojects/<str:slug>', views.webdevposts, name='webdevposts'),
path('mlprojects/<str:slug>', views.mlprojectsposts, name='mlprojectspostss'),
] |
984,989 | 4b15c0642fbf6c8797c13f0ed2114acf9bc0d19e | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from ._enums import *
__all__ = ['ResolverConfigArgs', 'ResolverConfig']
@pulumi.input_type
class ResolverConfigArgs:
def __init__(__self__, *,
autodefined_reverse_flag: pulumi.Input['ResolverConfigAutodefinedReverseFlag'],
resource_id: pulumi.Input[str]):
"""
The set of arguments for constructing a ResolverConfig resource.
:param pulumi.Input['ResolverConfigAutodefinedReverseFlag'] autodefined_reverse_flag: Represents the desired status of AutodefinedReverse. The only supported value on creation is DISABLE. Deletion of this resource will return AutodefinedReverse to its default value (ENABLED).
:param pulumi.Input[str] resource_id: ResourceId
"""
pulumi.set(__self__, "autodefined_reverse_flag", autodefined_reverse_flag)
pulumi.set(__self__, "resource_id", resource_id)
@property
@pulumi.getter(name="autodefinedReverseFlag")
def autodefined_reverse_flag(self) -> pulumi.Input['ResolverConfigAutodefinedReverseFlag']:
"""
Represents the desired status of AutodefinedReverse. The only supported value on creation is DISABLE. Deletion of this resource will return AutodefinedReverse to its default value (ENABLED).
"""
return pulumi.get(self, "autodefined_reverse_flag")
@autodefined_reverse_flag.setter
def autodefined_reverse_flag(self, value: pulumi.Input['ResolverConfigAutodefinedReverseFlag']):
pulumi.set(self, "autodefined_reverse_flag", value)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> pulumi.Input[str]:
"""
ResourceId
"""
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_id", value)
class ResolverConfig(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
autodefined_reverse_flag: Optional[pulumi.Input['ResolverConfigAutodefinedReverseFlag']] = None,
resource_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Resource schema for AWS::Route53Resolver::ResolverConfig.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input['ResolverConfigAutodefinedReverseFlag'] autodefined_reverse_flag: Represents the desired status of AutodefinedReverse. The only supported value on creation is DISABLE. Deletion of this resource will return AutodefinedReverse to its default value (ENABLED).
:param pulumi.Input[str] resource_id: ResourceId
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ResolverConfigArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Resource schema for AWS::Route53Resolver::ResolverConfig.
:param str resource_name: The name of the resource.
:param ResolverConfigArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ResolverConfigArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
autodefined_reverse_flag: Optional[pulumi.Input['ResolverConfigAutodefinedReverseFlag']] = None,
resource_id: Optional[pulumi.Input[str]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ResolverConfigArgs.__new__(ResolverConfigArgs)
if autodefined_reverse_flag is None and not opts.urn:
raise TypeError("Missing required property 'autodefined_reverse_flag'")
__props__.__dict__["autodefined_reverse_flag"] = autodefined_reverse_flag
if resource_id is None and not opts.urn:
raise TypeError("Missing required property 'resource_id'")
__props__.__dict__["resource_id"] = resource_id
__props__.__dict__["autodefined_reverse"] = None
__props__.__dict__["owner_id"] = None
super(ResolverConfig, __self__).__init__(
'aws-native:route53resolver:ResolverConfig',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ResolverConfig':
"""
Get an existing ResolverConfig resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ResolverConfigArgs.__new__(ResolverConfigArgs)
__props__.__dict__["autodefined_reverse"] = None
__props__.__dict__["autodefined_reverse_flag"] = None
__props__.__dict__["owner_id"] = None
__props__.__dict__["resource_id"] = None
return ResolverConfig(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="autodefinedReverse")
def autodefined_reverse(self) -> pulumi.Output['ResolverConfigAutodefinedReverse']:
"""
ResolverAutodefinedReverseStatus, possible values are ENABLING, ENABLED, DISABLING AND DISABLED.
"""
return pulumi.get(self, "autodefined_reverse")
@property
@pulumi.getter(name="autodefinedReverseFlag")
def autodefined_reverse_flag(self) -> pulumi.Output['ResolverConfigAutodefinedReverseFlag']:
"""
Represents the desired status of AutodefinedReverse. The only supported value on creation is DISABLE. Deletion of this resource will return AutodefinedReverse to its default value (ENABLED).
"""
return pulumi.get(self, "autodefined_reverse_flag")
@property
@pulumi.getter(name="ownerId")
def owner_id(self) -> pulumi.Output[str]:
"""
AccountId
"""
return pulumi.get(self, "owner_id")
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> pulumi.Output[str]:
"""
ResourceId
"""
return pulumi.get(self, "resource_id")
|
984,990 | 32b6bdfbf93d9de828ce3a9997cc4cc2b7089745 | import numpy as np
from django.http import JsonResponse
from django.shortcuts import HttpResponse
def login(request):
a = np.array([[1,2,3],[4,5,6]])
return HttpResponse(a) |
984,991 | b406b7dd407831e76624c8a1a97dd7ec3c4a745f | import csv
from abc import ABC, ABCMeta, abstractmethod
from typing import Union
class ZajSpectr():
def __init__(self, filename:str = '', data: list = None, channel: list = None, exposition: int = 0, time: int = 0):
self._filename = filename
self._data = data
self._channel = channel
self._exposition = exposition
self._time = time
@property
def channel(self):
return self._channel
@property
def data(self):
return self._data
@property
def details(self):
detail = ''
"""if self._filename:
detail += self._filename.split('\\')[-1] + '\n'"""
if self._time:
detail += 'Time: ' + str(self._time) + '\n'
if self._exposition:
detail += 'Экспозиция: ' + str(self._exposition) + '\n'
return detail.strip()
class ZajSpectrReaderAbstract(ABC):
__metaclass__ = ABCMeta
@abstractmethod
def return_zai_spectr(self, filename: str) -> ZajSpectr:
""":input принимает на вход строку с именем файла
:return Возвращает экземпляр класса ZajSpectr"""
class ZajSpectrReader(ZajSpectrReaderAbstract):
def __init__(self):
self.cur_data_delimiter = ''
self.cur_header_delimiter = ''
@staticmethod
def _return_number_or_false(num: str) -> Union[int, float, bool]:
if num.isdigit():
return int(num)
try:
return float(num)
except ValueError:
return False
@staticmethod
def _find_delimiter(line: str) -> Union[str, bool]:
if ':' in line and len(line.strip(':')) > 1:
return ':'
elif '\t' in line and len(line.strip('\t')) > 1:
return '\t'
elif ';' in line and len(line.strip(';')) > 1:
return ';'
else:
return False
def read_headed_spectr(self, file) -> dict:
"""Читаем из файла строки с заголовками и данными вида
[header][self.cur_header_delimiter]
[int][self.cur_data_delimiter][int]
[int][self.cur_data_delimiter][int]
или
[header][self.cur_header_delimiter][int]
[header][self.cur_header_delimiter][int]
:return возвращаем словарь с результатами
"""
result = {}
cur_header = ''
file.seek(0, 0)
self.cur_data_delimiter, self.cur_header_delimiter = '', self.cur_data_delimiter
for line in csv.reader(file, delimiter=self.cur_header_delimiter):
# Если успешно разделили строку, значит перед нами заголовок
if len(line) > 1:
if line[1] != '':
result[line[0]] = line[1]
else:
cur_header = line[0]
result[cur_header] = {}
# Если не разделили - значит перед нами данные
else:
# Если разделитель для данных еще не найден
if self.cur_data_delimiter == '':
self.cur_data_delimiter = self._find_delimiter(line[0])
# print( 'разделитель ', self.cur_data_delimiter)
if not not self.cur_data_delimiter and cur_header:
cur_data = line[0].split(self.cur_data_delimiter)
result[cur_header][int(cur_data[0])] = int(cur_data[1])
return result
def read_csv_spectr(self, file) -> dict:
"""Читаем из файла строки с данными вида [int][self.cur_data_delimiter][int] и
:return возвращаем словарь с результатами"""
result = {}
for line in csv.reader(file, delimiter=self.cur_data_delimiter):
result[int(line[0])] = float(line[1])
return result
def return_zai_spectr(self, filename: str) -> ZajSpectr:
# print(filename)
with open(filename, 'r') as file:
result = {}
# читаем одну линию из файла
first_line = file.readline().strip()
self.cur_data_delimiter = self._find_delimiter(first_line)
splitted_line = first_line.split(self.cur_data_delimiter)
# если нашли разделитель и строка разбилась минимум на 2 части
if len(splitted_line) > 1:
# если первое часть - число, то имеем дело с обычным спектром, без заголовка
if splitted_line[0].isdigit():
# if self._return_number_or_false(splitted_line[0]):
result['data'] = self.read_csv_spectr(self, file)
# В противном случае у нас есть заголовки
else:
result = self.read_headed_spectr(self, file)
# если разделитель так и не нашли, кидаем эксепшн
else:
raise TypeError('Не могу определить содержимое спектра')
# Если есть данные темновых пикселей, удаляем из данных усреднённую постоянную составляющую
if 'BlackPixels' in result and 'SpectrumPixels' in result:
black_mean = int(sum(result['BlackPixels'].values()) / len(result['BlackPixels']))
result['data'] = {x: y - black_mean for x, y in result['SpectrumPixels'].items()}
result['filename'] = filename
if 'Exposition' not in result:
result['Exposition'] = 0
if 'Time' not in result:
result['Time'] = 0
result['channel'] = list(result['data'].keys())
result['values'] = list(result['data'].values())
return ZajSpectr(filename=filename,
data=result['values'],
channel=result['channel'],
exposition=result['Exposition'],
time=result['Time'])
'''spectrs = []
reader = ZajSpectrReader()
spectrs.append(reader.return_zai_spectr(r'..\example\20171018_135311_A703OLZX_6.spec'))
spectrs.append(reader.return_zai_spectr(r'C:\py\Projects\Spectr_Viewver\Data\FSR04_calib_110820\6\20200811_11_01_51_837#15_16.asc'))
print(spectrs[1].details)''' |
984,992 | cc53efbdb029be59446ffb600befe38e2498251e |
b=pow(2,2)
print1(b*2)
|
984,993 | 6f910c3342474214071d3c49f05a92ba3d3b2098 | from django.db import models
from apps.utils.models import Timestamps
class Certificate(Timestamps, models.Model):
name = models.CharField(max_length=100)
description = models.TextField()
|
984,994 | 2567e6902ba853491b9f558859cc7fc1e247482f | # -*- coding: utf-8 -*-
# @Time : 2019/7/1 10:11
# @Author : Mr.Li
|
984,995 | 2f8f80eb860376e837024ee44e5b476af4493e51 |
# coding: utf-8
# In[68]:
# import modules
import pickle
import pandas as pd
from pathlib import Path
from pull_data import get_url_csv
from pull_data import train
from pull_data import test
import numpy as np
from matplotlib import pyplot as plt
from sklearn.base import TransformerMixin, BaseEstimator
from sklearn.cross_validation import train_test_split, cross_val_predict
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.dummy import DummyClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import (precision_recall_fscore_support as score,
confusion_matrix, accuracy_score,
classification_report)
from sklearn.model_selection import (cross_val_score, GridSearchCV,
KFold, cross_val_score)
from sklearn.externals import joblib
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline, make_pipeline, make_union
import scikitplot as skplt
import random
from train_model import BinarizeColumn
pd.options.mode.chained_assignment = None
# Function to rebuild model features for train dataset
def Create_train_model():
features = ['Pclass', 'Sex', 'Age', 'SibSp', 'Fare']
label = 'Survived'
X = df[features]
y = df[label].ravel()
return(features, label, X, y)
# Load pickle
my_pipeline = joblib.load('my_pipeline.pkl')
print("proof of pipeline:", my_pipeline)
# Load test csv
df = test
if len(test) > 0:
print("test_df loaded")
# create empty target column
df = df.assign(Survived="NA")
# Set seed
np.random.seed(7)
# Create model features
features, label, X, y = Create_train_model()
# Special case imputation for test[Fair]
med_fair = X["Fare"].median()
X["Fare"] = X["Fare"].fillna(med_fair)
# Check for NA
print("total NA's:", X.isna().sum(), "\n Now display pipeline imputation:")
# Predict
predicted = pd.DataFrame(my_pipeline.predict_proba(X))
# Predict survival by > .5 proba
predicted_survival = pd.DataFrame(np.where(predicted.iloc[:, 1] > 0.5, 1, 0))
predicted_survival["PassengerId"] = df[["PassengerId"]]
# Write to csv
predicted_survival.to_csv("predicted_survival.csv", encoding='utf-8')
# Check if file saved
file=Path("predicted_survival.csv")
if file.is_file():
print("Prediction csv is saved as predicted_survival.csv, the predictions are based on a probability threshold of .5 and are accompanied by passenger id/original index")
else: print("File did not save")
## Import classification report and display
my_classification_report=pd.read_csv('classification_report2.csv')
my_classification_report
|
984,996 | fa24b6a8eea3e0fdb3f529ad107f9da1382c0243 | from pytube import YouTube
import argparse
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument("link",type=str,help='give URL of vedio to br download')
parser.add_argument("resolution",type=str,nargs='?',help="Resolution of vedio to be download ('High','Medium','Low')",default="High")
parser.add_argument("file_path",type=str,nargs='?',help="file path to save vedio",default= './')
args = parser.parse_args()
def youtube_vedio_download(link,resolution,file_path):
yt = YouTube(link)
if resolution == "High":
stream = yt.streams.all()[1]
elif resolution == "Medium":
stream = yt.streams.all()[5]
else:
stream = yt.streams.all()[-6]
print(f"Downloading video from YouTube to {str(file_path)} at {resolution} resolution ")
stream.download(file_path)
if __name__ == "__main__":
youtube_vedio_download(link=args.link,resolution = args.resolution,file_path = args.file_path) |
984,997 | 03b8f9b216aeb8ee5282ce36dd590a588d53ba56 | sw_xss = [
['innerhtml', 'a', 0],
['script', 'a', 0],
['svg', 'a', 0],
['contenteditable', 'a', 0],
['x', 'a', 0],
['src', 'a', 0],
['iframe', 'a', 0],
['javascript', 'a', 0],
['embed', 'a', 0],
['math', 'a', 0],
['brute', 'a', 0],
['href', 'a', 0],
['form', 'a', 0],
['action', 'a', 0],
['input', 'a', 0],
['type', 'a', 0],
['submit', 'a', 0],
['isindex', 'a', 0],
['value', 'a', 0],
['button', 'a', 0],
['formaction', 'a', 0],
['srcdoc', 'a', 0],
['xlink', 'a', 0],
['img', 'a', 0],
['xmlns', 'a', 0],
['link', 'a', 0],
['base', 'a', 0],
['style', 'a', 0],
['marquee', 'a', 0],
['audio', 'a', 0],
['video', 'a', 0],
['keygen', 'a', 0],
['autofocus', 'a', 0],
['select', 'a', 0],
['option', 'a', 0],
['menu', 'a', 0],
['contextmenu', 'a', 0],
['textarea', 'a', 0],
['source', 'a', 0],
['meta', 'a', 0],
['object', 'a', 0],
['html', 'a', 0],
['target', 'a', 0],
['card ', 'a', 0],
['onevent', 'a', 0],
['animate', 'a', 0],
['handler', 'a', 0],
['feimage', 'a', 0],
['table', 'a', 0],
['background', 'a', 0],
['frameset', 'a', 0],
['div', 'a', 0],
['allowscriptaccess', 'a', 0],
###############################
['onload', 'b',0],
['onmouseover', 'b',0],
['onsubmit', 'b',0],
['onfocus', 'b',0],
['onblur', 'b',0],
['onclick', 'b',0],
['oncopy', 'b',0],
['oncontextmenu', 'b',0],
['oncut', 'b',0],
['ondblclick', 'b',0],
['ondrag', 'b',0],
['oninput', 'b',0],
['onkeydown', 'b',0],
['onkeypress', 'b',0],
['onkeyup', 'b',0],
['onmousedown', 'b',0],
['onmousemove', 'b',0],
['onmouseout', 'b',0],
['onmouseup', 'b',0],
['onpaste', 'b',0],
['ontouchstart', 'b',0],
['ontouchend', 'b',0],
['ontouchmove', 'b',0],
['ontouchcancel', 'b',0],
['onorientationchange', 'b',0],
['onerror', 'b',0],
['onpageshow', 'b',0],
['onhashchange', 'b',0],
['onscroll', 'b',0],
['onresize', 'b',0],
['onhelp', 'b',0],
['onstart', 'b',0],
['onloadstart', 'b',0],
['onchange', 'b',0],
['onshow', 'b',0],
['oneonerrorrror', 'b',0],
['ontoggle', 'b',0],
['onafterscriptexecute', 'b',0],
['onbeforescriptexecute', 'b',0],
['onfinish', 'b',0],
['expression', 'b',0],
['onbeforeload', 'b',0],
['onbeforeunload', 'b',0],
['onformchange', 'b',0],
['vbscript', 'b',0],
##########################
['eval', 'c',0],
['find', 'c',0],
['top', 'c',0],
['source', 'c',0],
['tostring', 'c',0],
['url', 'c',0],
['slice', 'c',0],
['location', 'c',0],
['hash', 'c',0],
['setInterval', 'c',0],
['function', 'c',0],
['appendchild', 'c',0],
['createelement', 'c',0],
['rel', 'c',0],
['string', 'c',0],
['fromcharcode', 'c',0],
['window', 'c',0],
['parent', 'c',0],
['self', 'c',0],
['prompt', 'c',0],
['defineproperties', 'c',0],
['event', 'c',0],
['initmouseevent', 'c',0],
['childnodes', 'c',0],
['clonenode', 'c',0],
['match', 'c',0],
['head', 'c',0],
['substr', 'c',0],
['unescape', 'c',0],
['xmlhttp', 'c',0],
['open', 'c',0],
['content', 'c',0],
['frames', 'c',0],
['import', 'c',0],
['behavior', 'c',0],
['geturl', 'c',0],
['charset', 'c',0],
#######################
['alert', 'd',0],
['navigator', 'd',0],
['vibrate', 'd',0],
['document', 'd',0],
['domain', 'd',0],
['message', 'd',0],
['write', 'd',0],
['cookie', 'd',0],
['echo', 'd',0],
['exec', 'd',0],
['cmd', 'd',0],
['msgbox', 'd',0],
########################
['xss', 'e',0],
['hello', 'e',0],
['fuzzelement', 'e',0],
['test', 'e',0],
['injectx', 'e',0],
['netsparker', 'e',0],
['openbugbounty', 'e',0],
['baiduspider', 'e',0],
#['write', 'e',0],
['csrf', 'e',0]
]
# sqli ###########################
sw_sqli = [
['case', 'A', 0],
['by', 'A', 0],
['all', 'A', 0],
['char', 'A', 0],
['character', 'A', 0],
['chr', 'A', 0],
['column', 'A', 0],
['concat', 'A', 0],
['convert', 'A', 0],
['count', 'A', 0],
['create', 'A', 0],
['declare', 'A', 0],
['delete', 'A', 0],
['distinct', 'A', 0],
['drop', 'A', 0],
['from', 'A', 0],
['function', 'A', 0],
['group', 'A', 0],
['having', 'A', 0],
['if', 'A', 0],
['ifnull', 'A', 0],
['insert', 'A', 0],
['into', 'A', 0],
['like', 'A', 0],
['limit', 'A', 0],
['or', 'A', 0],
['and', 'A', 0],
['order', 'A', 0],
['select', 'A', 0],
['union', 'A', 0],
['update', 'A', 0],
['when', 'A', 0],
['where', 'A', 0],
['grant', 'A', 0],
#######################
['address', 'B', 0],
['data', 'B', 0],
['database', 'B', 0],
['dba', 'B', 0],
['etc', 'B', 0],
['file', 'B', 0],
['filename', 'B', 0],
['id', 'B', 0],
['name', 'B', 0],
['passwd', 'B', 0],
['password', 'B', 0],
['pg', 'B', 0],
['pwd', 'B', 0],
['resource', 'B', 0],
['sys', 'B', 0],
['system', 'B', 0],
['table', 'B', 0],
['tablename', 'B', 0],
['tables', 'B', 0],
['uid', 'B', 0],
['user', 'B', 0],
['username', 'B', 0],
['users', 'B', 0],
['utl', 'B', 0],
['value', 'B', 0],
['values', 'B', 0],
['version', 'B', 0],
['schema', 'B', 0],
['information', 'B', 0],
['inaddr', 'B', 0],
['admin', 'B', 0],
#############################
['cmd', 'C', 0],
['cmdshell', 'C', 0],
['echo', 'C', 0],
['exe', 'C', 0],
['exec', 'C', 0],
['shell', 'C', 0],
['master', 'C', 0],
['xp', 'C', 0],
['sp', 'C', 0],
['regdelete', 'C', 0],
['availablemedia', 'C', 0],
['terminate', 'C', 0],
['regwrite', 'C', 0],
['regremovemultistring', 'C', 0],
['regread', 'C', 0],
['regenumvalues', 'C', 0],
['regenumkeys', 'C', 0],
['regenumbalues', 'C', 0],
['regdeletevalue', 'C', 0],
['regdeletekey', 'C', 0],
['regaddmultistring', 'C', 0],
['ntsec', 'C', 0],
['makecab', 'C', 0],
['loginconfig', 'C', 0],
['enumdsn', 'C', 0],
['filelist', 'C', 0],
['execresultset', 'C', 0],
['dirtree', 'C', 0],
['cmdshell', 'C', 0],
['reg', 'C', 0],
['servicecontrol', 'C', 0],
['webserver', 'C', 0],
############################
['decode', 'D', 0],
['default', 'D', 0],
['delay', 'D', 0],
['document', 'D', 0],
['eval', 'D', 0],
['getmappingxpath', 'D', 0],
['hex', 'D', 0],
['is', 'D', 0],
['login', 'D', 0],
['match', 'D', 0],
['not', 'D', 0],
['null', 'D', 0],
['request', 'D', 0],
['sets', 'D', 0],
['to', 'D', 0],
['var', 'D', 0],
['varchar', 'D', 0],
['waitfor', 'D', 0],
['desc', 'D', 0],
['connect', 'D', 0],
['as', 'D', 0],
['int', 'D', 0],
['log', 'D', 0],
['cast', 'D', 0],
['rand', 'D', 0],
['sleep', 'D', 0],
['substring', 'D', 0],
['replace', 'D', 0],
['benchmark', 'D', 0],
['md', 'D', 0],
#######################
['content', 'E', 0],
['cookie', 'E', 0],
['dbms', 'E', 0],
['db', 'E', 0],
['dir', 'E', 0],
['get', 'E', 0],
['http', 'E', 0],
['mysql', 'E', 0],
['oracle', 'E', 0],
['post', 'E', 0],
['query', 'E', 0],
['referer', 'E', 0],
['sql', 'E', 0],
['sqlmap', 'E', 0]
]
# rce ###########################
sw_rce = [
['memberaccess', 'A', 0],
['getsession', 'A', 0],
['getservletcontext', 'A', 0],
['getrealpath', 'A', 0],
['xmldatasource', 'A', 0],
['objectname', 'A', 0],
['management', 'A', 0],
['io', 'A', 0],
['fileoutputstream', 'A', 0],
['bufferedwriter', 'A', 0],
['dispatcher', 'A', 0],
['httpservletresponse', 'A', 0],
['lang', 'A', 0],
['runtime', 'A', 0],
['getruntime', 'A', 0],
['savegangster', 'A', 0],
['zglzcgf0y2hlci5idhrwu2vydmxldfjlcxvlc3q', 'A', 0],
['amf2ys5syw5nllbyb2nlc3ncdwlszgvy', 'A', 0],
['amf2ys5pby5jbnb1dfn0cmvhbvjlywrlcg', 'A', 0],
['inputstreamreader', 'A', 0],
['amf2ys5pby5gawxlt3v0chv0u3ryzwft', 'A', 0],
['amf2ys5pby5cdwzmzxjlzfdyaxrlcg', 'A', 0],
['zglzcgf0y2hlci5idhrwu2vydmxldfjlc3bvbnnl', 'A', 0],
['processbuilder', 'A', 0],
['allowstaticmethodaccess', 'A', 0],
['servletactioncontext', 'A', 0],
['methodaccessor', 'A', 0],
['denymethodexecution', 'A', 0],
['redirectaction', 'A', 0],
['ognlcontext', 'A', 0],
['memberacess', 'A', 0],
['redirect', 'A', 0],
['action', 'A', 0],
['annotationinvocationhandler', 'A', 0],
['annotation', 'A', 0],
['reflect', 'A', 0],
['class', 'A', 0],
['classloader', 'A', 0],
['xwork', 'A', 0],
['ognlutil', 'A', 0],
['redirecterrorstream', 'A', 0],
['setmemberaccess', 'A', 0],
['getinstance', 'A', 0],
['actioncontext', 'A', 0],
['getexcludedpackagenames', 'A', 0],
['getexcludedclasses', 'A', 0],
['getinputstream', 'A', 0],
['getwriter', 'A', 0],
['workcontext', 'A', 0],
['xmldecoder', 'A', 0],
['println', 'A', 0],
['unmarshaller', 'A', 0],
['allowpackageprotectedaccess', 'A', 0],
['allowprotectedaccess', 'A', 0],
['allowprivateaccess', 'A', 0],
['excludedpackagenamepatterns', 'A', 0],
['excludedclasses', 'A', 0],
['invokeuq', 'A', 0],
['getruntimeur', 'A', 0],
['getmethoduq', 'A', 0],
['constanttransformerxv', 'A', 0],
['invokertransformer', 'A', 0],
['imethodnamet', 'A', 0],
['annotationinvocationhandleru', 'A', 0],
['invocationhandler', 'A', 0],
['runtimexpsr', 'A', 0],
['objectxpvq', 'A', 0],
['invoker', 'A', 0],
['createobject', 'A', 0],
#######################################
['netstat', 'B', 0],
['uname', 'B', 0],
['ipconfig', 'B', 0],
['cmd', 'B', 0],
['root', 'B', 0],
['exe', 'B', 0],
['awzjb25mawc', 'B', 0],
['bmv0c3rhdcat', 'B', 0],
['exec', 'B', 0],
['dir', 'B', 0],
['rm', 'B', 0],
['rf', 'B', 0],
['mkdir', 'B', 0],
['ls', 'B', 0],
['ifconfig', 'B', 0],
['chmglq', 'B', 0],
['bhntb2qg', 'B', 0],
['bmv0c3rhdca', 'B', 0],
['zgyg', 'B', 0],
['dgnwzhvtcca', 'B', 0],
['cgvybca', 'B', 0],
['d2dldca', 'B', 0],
['ymfzaca', 'B', 0],
['y2qg', 'B', 0],
['dm1zdgf0ia', 'B', 0],
['bhnvzia', 'B', 0],
['zgly', 'B', 0],
['zwnobya', 'B', 0],
['bmmglq', 'B', 0],
['cgluzya', 'B', 0],
['c2h1dgrvd24g', 'B', 0],
['a2lsbca', 'B', 0],
['dw5hbwug', 'B', 0],
['chdk', 'B', 0],
['bwtkaxig', 'B', 0],
['cm0glq', 'B', 0],
['dmkg', 'B', 0],
['bxyg', 'B', 0],
['y2htb2qg', 'B', 0],
['dg91y2gg', 'B', 0],
['bhmg', 'B', 0],
['y2f0ia', 'B', 0],
['y2f0pg', 'B', 0],
['c3uglq', 'B', 0],
['d2hvyw1p', 'B', 0],
['dg9wic0', 'B', 0],
['zgf0zq', 'B', 0],
['cgfzc3dk', 'B', 0],
['c3r0esa', 'B', 0],
['cm1kaxig', 'B', 0],
['bg4g', 'B', 0],
['y3ag', 'B', 0],
['y2hvd24g', 'B', 0],
['y2hncnag', 'B', 0],
['dw1hc2sg', 'B', 0],
['bw9yzsa', 'B', 0],
['agvhzca', 'B', 0],
['dgfpbca', 'B', 0],
['d2mg', 'B', 0],
['y3v0ia', 'B', 0],
['c29ydca', 'B', 0],
['c3bsaxqg', 'B', 0],
['z3jlcca', 'B', 0],
['zmluzca', 'B', 0],
['wget', 'B', 0],
['powershell', 'B', 0],
['curl', 'B', 0],
['nslookup', 'B', 0],
#####################
['exefile', 'C', 0],
['jexws4', 'C', 0],
['singlesaints', 'C', 0],
['gry', 'C', 0],
['struts2', 'C', 0],
['showcase', 'C', 0],
['apache', 'C', 0],
['sun', 'C', 0],
['ognl', 'C', 0],
['soapenv', 'C', 0],
['member', 'C', 0],
['access', 'C', 0],
['acunetix', 'C', 0],
['soap', 'C', 0],
['javax', 'C', 0],
['java', 'C', 0],
['envelope', 'C', 0],
['method', 'C', 0],
['command', 'C', 0],
['xmlsoap', 'C', 0],
['sr', 'C', 0],
['sh', 'C', 0],
['coordinatorporttype', 'C', 0],
['appscan', 'C', 0],
['spider', 'C', 0],
########################################
['propfind', 'D', 0],
['content', 'D', 0],
['length', 'D', 0],
['head', 'D', 0],
['post', 'D', 0],
['get', 'D', 0],
['type', 'D', 0],
['user', 'D', 0],
['agent', 'D', 0],
['accept', 'D', 0],
['cookie', 'D', 0],
['prohibited', 'D', 0]
]
# uaa ###########################
sw_uaa = [
['myadmin', 'A', 0],
['manager', 'A', 0],
['admin', 'A', 0],
['wp', 'A', 0],
['saedit', 'A', 0],
['config', 'A', 0],
['funcspecs', 'A', 0],
['scripts', 'A', 0],
['server', 'A', 0],
['center', 'A', 0],
['tomcat', 'A', 0],
['pma', 'A', 0],
['transfer', 'A', 0],
['console', 'A', 0],
['vti', 'A', 0],
['acensus', 'A', 0],
['openapi', 'A', 0],
['jmx', 'A', 0],
['web', 'A', 0],
['conf', 'A', 0],
['servlet', 'A', 0],
['export', 'A', 0],
['cs', 'A', 0],
['db', 'A', 0],
['changelog', 'A', 0],
['status', 'A', 0],
['login', 'A', 0],
['setup', 'A', 0],
['info', 'A', 0],
['join', 'A', 0],
['encoding', 'A', 0],
['bin', 'A', 0],
['security', 'A', 0],
['empappupdtlogin', 'A', 0],
['content', 'A', 0],
['spmgr', 'A', 0],
['sap', 'A', 0],
['rd', 'A', 0],
['log', 'A', 0],
['details', 'A', 0],
['howto', 'A', 0],
['inc', 'A', 0],
['index', 'A', 0],
['check', 'A', 0],
['loginform', 'A', 0],
['service', 'A', 0],
['user', 'A', 0],
['plugins', 'A', 0],
['properties', 'A', 0],
['wsomg', 'A', 0],
['portal', 'A', 0],
['import', 'A', 0],
['gpin', 'A', 0],
['aut', 'A', 0],
['rest', 'A', 0],
['dzs', 'A', 0],
['csql', 'A', 0],
['dll', 'A', 0],
['edit', 'A', 0],
['view', 'A', 0],
['upload', 'A', 0],
['author', 'A', 0],
['resource', 'A', 0],
['zoomsounds', 'A', 0],
['phpmyadmin', 'A', 0],
['phpmyadminold', 'A', 0],
['bak', 'A', 0],
['pmapass', 'A', 0],
['pmahomme', 'A', 0],
['editor', 'A', 0],
['phpadmin', 'A', 0],
['configuration', 'A', 0],
['fckeditor', 'A', 0],
['inf', 'A', 0],
['phpmy', 'A', 0],
['ckfinder', 'A', 0],
['webadmin', 'A', 0],
#######################################
['rhksflwk', 'B', 0],
['master', 'B', 0],
['admin', 'B', 0],
['manager', 'B', 0],
['webmaster', 'B', 0],
['root', 'B', 0],
['administrator', 'B', 0],
['administrators', 'B', 0],
['superuser', 'B', 0],
['weblogic', 'B', 0],
['guest', 'B', 0],
['test', 'B', 0],
['ftpuser', 'B', 0],
['system', 'B', 0],
['scott', 'B', 0],
['tomcat', 'B', 0],
['user', 'B', 0],
['operator', 'B', 0],
['anonymous', 'B', 0],
['super', 'B', 0],
['pmauser', 'B', 0],
['mysqladmin', 'B', 0],
['sysmaster', 'B', 0],
['dbadmin', 'B', 0],
['pmaauth', 'B', 0],
['admindb', 'B', 0],
['administrateur', 'B', 0],
['administrat', 'B', 0],
['webmail', 'B', 0],
['adminmaster', 'B', 0],
['phpadmin', 'B', 0],
['testuser', 'B', 0],
['rootadmin', 'B', 0],
['adminid', 'B', 0],
#######################################
['root', 'C', 0],
['administrator', 'C', 0],
['administrators', 'C', 0],
['superuser', 'C', 0],
['weblogic', 'C', 0],
['asdf', 'C', 0],
['qwer', 'C', 0],
['test', 'C', 0],
['passwd', 'C', 0],
['qwerty', 'C', 0],
['password', 'C', 0],
['manager', 'C', 0],
['pass', 'C', 0],
['admin', 'C', 0],
['abcd', 'C', 0],
['aaaa', 'C', 0],
['asdfgh', 'C', 0],
['webmaster', 'C', 0],
['webmaste', 'C', 0],
['iisadminpwd', 'C', 0],
['asdfg', 'C', 0],
['rootroot', 'C', 0],
['rootpassword', 'C', 0],
['asdfasdf', 'C', 0],
['abcdefg', 'C', 0],
##########################################
['authorization', 'D', 0],
['basic', 'D', 0],
['zmeu', 'D', 0],
['python', 'D', 0],
['cpython', 'D', 0],
['scan', 'D', 0],
['testcookie', 'D', 0],
['ehlo', 'D', 0],
['baiduspider', 'D', 0]
]
# fdo ###########################
sw_fdo = [
['etc', 'A', 0],
['opt', 'A', 0],
['proc', 'A', 0],
['root', 'A', 0],
['usr', 'A', 0],
['var', 'A', 0],
['inetpub', 'A', 0],
['recycle', 'A', 0],
['apache', 'A', 0],
['documents', 'A', 0],
['and', 'A', 0],
['settings', 'A', 0],
['home', 'A', 0],
['log', 'A', 0],
['logs', 'A', 0],
['minint', 'A', 0],
['mysql', 'A', 0],
['nginx', 'A', 0],
['php', 'A', 0],
['program', 'A', 0],
['files', 'A', 0],
['programfiles', 'A', 0],
['sysprep', 'A', 0],
['system', 'A', 0],
['volume', 'A', 0],
['information', 'A', 0],
['users', 'A', 0],
['wamp', 'A', 0],
['windows', 'A', 0],
['winnt', 'A', 0],
['xampp', 'A', 0],
['web', 'A', 0],
['inf', 'A', 0],
['config', 'A', 0],
['include', 'A', 0],
['inc', 'A', 0],
['sites', 'A', 0],
['phpmyadmin', 'A', 0],
['jeus', 'A', 0],
['library', 'A', 0],
['private', 'A', 0],
['httpd', 'A', 0],
['init', 'A', 0],
['lampp', 'A', 0],
['lamp', 'A', 0],
['self', 'A', 0],
['ssh', 'A', 0],
['local', 'A', 0],
['sysconfig', 'A', 0],
['administrator', 'A', 0],
['bin', 'A', 0],
['wwwroot', 'A', 0],
['smsosd', 'A', 0],
['data', 'A', 0],
['conf', 'A', 0],
['apache', 'A', 0],
['group', 'A', 0],
['apachegroup', 'A', 0],
['apache', 'A', 0],
['software', 'A', 0],
['foundation', 'A', 0],
['filezilla', 'A', 0],
['server', 'A', 0],
['inetsrv', 'A', 0],
['debug', 'A', 0],
['panther', 'A', 0],
['repair', 'A', 0],
['filezillaftp', 'A', 0],
['mercurymail', 'A', 0],
['sendmail', 'A', 0],
['tomcat', 'A', 0],
['webalizer', 'A', 0],
['webdav', 'A', 0],
['plugins', 'A', 0],
['defaults', 'A', 0],
['webserver', 'A', 0],
['sites', 'A', 0],
['available', 'A', 0],
['desktop', 'A', 0],
['stable', 'A', 0],
['osdlogs', 'A', 0],
['mysql', 'A', 0],
['unattend', 'A', 0],
['drivers', 'A', 0],
['documents', 'A', 0],
['htdocs', 'A', 0],
['regback', 'A', 0],
['httperr', 'A', 0],
['extra', 'A', 0],
['schema', 'A', 0],
['passwd', 'A', 0],
#['etcpasswd', 'A', 0],
###########################
['vhosts', 'B', 0],
['grub', 'B', 0],
['mkuser', 'B', 0],
['config', 'B', 0],
['passwd', 'B', 0],
['group', 'B', 0],
['hosts', 'B', 0],
['motd', 'B', 0],
['issue', 'B', 0],
['bashrc', 'B', 0],
['nginx', 'B', 0],
['boot', 'B', 0],
['version', 'B', 0],
['cmdline', 'B', 0],
['mounts', 'B', 0],
['host', 'B', 0],
['fstab', 'B', 0],
['sysprep', 'B', 0],
['unattended', 'B', 0],
['unattend', 'B', 0],
['shadow', 'B', 0],
['profile', 'B', 0],
['interrupts', 'B', 0],
['cpuinfo', 'B', 0],
['meminfo', 'B', 0],
['services', 'B', 0],
['security', 'B', 0],
['shells', 'B', 0],
['resolv', 'B', 0],
['fastab', 'B', 0],
['login', 'B', 0],
['ftproot', 'B', 0],
['access', 'B', 0],
['error', 'B', 0],
['apache', 'B', 0],
['systeminit', 'B', 0],
['robots', 'B', 0],
['humans', 'B', 0],
['style', 'B', 0],
['configuration', 'B', 0],
['wp', 'B', 0],
['login', 'B', 0],
['wp', 'B', 0],
['admin', 'B', 0],
['wp', 'B', 0],
['content', 'B', 0],
['my', 'B', 0],
['php', 'B', 0],
['sessions', 'B', 0],
['server', 'B', 0],
['local', 'B', 0],
['wpsettings', 'B', 0],
['explorer', 'B', 0],
['iis', 'B', 0],
['notepad', 'B', 0],
['system', 'B', 0],
['temp', 'B', 0],
['windowsupdate', 'B', 0],
['win', 'B', 0],
['weblogic', 'B', 0],
['mysql', 'B', 0],
['changelog', 'B', 0],
['properties', 'B', 0],
['mercury', 'B', 0],
['phpinfo', 'B', 0],
['sendmail', 'B', 0],
['webalizer', 'B', 0],
['webdav', 'B', 0],
['settings', 'B', 0],
['httpd', 'B', 0],
['sam', 'B', 0],
['software', 'B', 0],
['eula', 'B', 0],
['license', 'B', 0],
['sysprepsysprep', 'B', 0],
['sysprepunattended', 'B', 0],
['sysprepunattend', 'B', 0],
['index', 'B', 0],
['apachectl', 'B', 0],
['hostname', 'B', 0],
['mysql', 'B', 0],
['bin', 'B', 0],
['default', 'B', 0],
['applicationhost', 'B', 0],
['httperr', 'B', 0],
['aspnet', 'B', 0],
['schema', 'B', 0],
['ports', 'B', 0],
['httpd', 'B', 0],
['ssl', 'B', 0],
['desktop', 'B', 0],
['variables', 'B', 0],
['setupinfo', 'B', 0],
['appevent', 'B', 0],
['secevent', 'B', 0],
['tomcat', 'B', 0],
['users', 'B', 0],
['web', 'B', 0],
['appstore', 'B', 0],
['metabase', 'B', 0],
['netsetup', 'B', 0],
['conf', 'B', 0],
['environ', 'B', 0],
['authorized', 'B', 0],
['keys', 'B', 0],
['id', 'B', 0],
['rsa', 'B', 0],
['known', 'B', 0],
['hosts', 'B', 0],
['network', 'B', 0],
['ntuser', 'B', 0],
['logfiles', 'B', 0],
['global', 'B', 0],
['history', 'B', 0],
['htpasswd', 'B', 0],
['bash', 'B', 0],
['history', 'B', 0],
['my', 'B', 0],
##############################
['d', 'C', 0],
['conf', 'C', 0],
['default', 'C', 0],
['wsconfig', 'C', 0],
['ini', 'C', 0],
['gz', 'C', 0],
['bashrc', 'C', 0],
['inf', 'C', 0],
['txt', 'C', 0],
['xml', 'C', 0],
['defs', 'C', 0],
['log', 'C', 0],
['dat', 'C', 0],
['css', 'C', 0],
['php', 'C', 0],
['cnf', 'C', 0],
['exe', 'C', 0],
['inc', 'C', 0],
['rtf', 'C', 0],
['html', 'C', 0],
['err', 'C', 0],
['confetc', 'C', 0],
['config', 'C', 0],
['bak', 'C', 0],
['evt', 'C', 0],
['sav', 'C', 0],
['sa', 'C', 0],
['keystore', 'C', 0],
['pub', 'C', 0],
['asa', 'C', 0],
['asp', 'C', 0],
['localhost', 'C', 0],
['filesystems', 'C', 0]
]
# fup ###########################
sw_fup = [
['zorback', 'A', 0],
['h4x0r', 'A', 0],
['awen', 'A', 0],
['perlkit', 'A', 0],
['darkraver', 'A', 0],
['carbylamine', 'A', 0],
['c99madshell', 'A', 0],
['azrail', 'A', 0],
['aspyqanalyser', 'A', 0],
['aspxspy', 'A', 0],
['asmodeus', 'A', 0],
['antichat', 'A', 0],
['aventgrup', 'A', 0],
['ru24postwebshell', 'A', 0],
['jspspy', 'A', 0],
['h4ntu', 'A', 0],
['entrika', 'A', 0],
['xiangxilianjie', 'A', 0],
['sqlrootkit', 'A', 0],
['kingdefacer', 'A', 0],
['lotfree', 'A', 0],
['backdoor', 'A', 0],
['bythehacker', 'A', 0],
['c99shell', 'A', 0],
['knull', 'A', 0],
['hackart', 'A', 0],
['ru24postwebshell', 'A', 0],
['phpwebshell', 'A', 0],
['rootshell', 'A', 0],
['nullshell', 'A', 0],
['aspshell', 'A', 0],
['myshell', 'A', 0],
['wshshell', 'A', 0],
['kcwebtelnet', 'A', 0],
['r57shell', 'A', 0],
['jspwebshell', 'A', 0],
##################
['shell', 'B', 0],
['exec', 'B', 0],
['passthru', 'B', 0],
['system', 'B', 0],
['popen', 'B', 0],
['eval', 'B', 0],
['command', 'B', 0],
['base64', 'B', 0],
['getparameter', 'B', 0],
['echo', 'B', 0],
['execl', 'B', 0],
['bin', 'B', 0],
['sh', 'B', 0],
['gzinflate', 'B', 0],
['decode', 'B', 0],
['uname', 'B', 0],
['execute', 'B', 0],
['createtextfile', 'B', 0],
['createobject', 'B', 0],
['phpremoteview', 'B', 0],
['fileoutputstream', 'B', 0],
['executecommand', 'B', 0],
['htmlencode', 'B', 0],
['getruntime', 'B', 0],
['runtime', 'B', 0],
['unzip', 'B', 0],
['mkdirs', 'B', 0],
['fileinputstream', 'B', 0],
['getabsolutepath', 'B', 0],
['replace', 'B', 0],
['function', 'B', 0],
['method', 'B', 0],
['preg', 'B', 0],
['str', 'B', 0],
['base64decoder', 'B', 0],
['decodebuffer', 'B', 0],
['language', 'B', 0],
['filename', 'B', 0],
['filepath', 'B', 0],
['file', 'B', 0],
['name', 'B', 0],
['encode', 'B', 0],
['realpath', 'B', 0],
['formbase64string', 'B', 0],
['filesystemobject', 'B', 0],
['phpinfo', 'B', 0],
['getenv', 'B', 0],
['processbuilder', 'B', 0],
['popupmanagefile', 'B', 0],
['rot', 'B', 0],
['action', 'B', 0],
['curl', 'B', 0],
#####################
['php', 'C', 0],
['asp', 'C', 0],
['jsp', 'C', 0],
['asa', 'C', 0],
['cdx', 'C', 0],
['war', 'C', 0],
['aspx', 'C', 0],
['zip', 'C', 0],
['cgi', 'C', 0],
['png', 'C', 0],
['gif', 'C', 0],
['jpeg', 'C', 0],
['exe', 'C', 0],
######################
['get', 'D', 0],
['post', 'D', 0],
['http', 'D', 0],
['title', 'D', 0],
['vbscript', 'D', 0],
['upload', 'D', 0],
['upfile', 'D', 0],
['uploads', 'D', 0],
['popupfile', 'D', 0],
['run', 'D', 0],
['request', 'D', 0],
['response', 'D', 0],
['content', 'D', 0],
['form', 'D', 0],
['data', 'D', 0],
['type', 'D', 0],
['encoding', 'D', 0],
['bytes', 'D', 0],
['filemanager', 'D', 0],
['uploadimage', 'D', 0],
['fileuploader', 'D', 0]
]
|
984,998 | f6cf440f9453e24d9525269e9787c4202400578d | import numpy as np
import matplotlib.pyplot as plt
class investment:
def __init__(self, positions, num_trials):
""" class inputs constructor """
self.positions = positions
self.num_trials = num_trials
def stimulate(self, position_values, num_trials):
"""
Function stimulate repeat num_trials times simulation for each value
in position_values and return and save the result as a list called
daily_ret
parameters:
position_values: a list of value that represent the size of each
investment
num_trials: int
return:
result: dictionary with a list of positions as keys and a list
of daily_ret as values
"""
result = {}
for p in position_values:
cumu_ret = np.zeros(num_trials)
daily_ret = np.zeros(num_trials)
for trial in range(self.num_trials):
cumu_num = 0
for i in range(int(1000/p)):
random_num = np.random.rand()
if (0 <= random_num <= 0.51):
cumu_num = cumu_num + 2 * p
elif (1 > random_num > 0.51):
cumu_num = cumu_num
cumu_ret[trial] = cumu_num
daily_ret[trial] = cumu_ret[trial]/1000 - 1
result[int(1000/p)] = daily_ret
return result
def present_results(self, positions, num_trials):
"""
Function present_results
1) call the function 'stimulate' to get a dictionary with list of daily_ret
as values
2) plot the histogram of the result and create file 'result.txt' with basic
statistics info.
parameters:
positions: list of int
num_trials: int
returns:
create some histogram and a 'result.txt' file
"""
position_values = [1000 / p for p in positions]
result = self.stimulate(position_values, num_trials)
d = open('results.txt', 'w')
for p in positions:
plt.hist(result[p], 100, range = [-1.0, 1.0])
plt.ylabel('Number of trials with corresponding results')
plt.xlabel('daily_ret')
if p == 1:
plt.savefig('histogram_0001_pos.pdf')
elif p == 10:
plt.savefig('histogram_0010_pos.pdf')
elif p == 100:
plt.savefig('histogram_0100_pos.pdf')
elif p == 1000:
plt.savefig('histogram_1000_pos.pdf')
plt.close()
ret = np.asarray(result[p])
ret_mean = np.mean(ret)
ret_std = np.std(ret)
d.write('The mean of daily return for position {0} is {1}\n'.format(p, ret_mean))
d.write('The standard deviation of daily return for position {0} is {1}\n'.format(p, ret_std))
d.close() |
984,999 | bb01ee5b8f17559bcbe4d8e1ce324698b4a0a524 | name = input("Cual es tu nombre? ")
apellido = input("cual es tu apellido? ")
print(f"hola {name} {apellido} buen dia :) ") |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.