blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7e0f20a3411dc570ed92600197a47eda29d7e3fc | b5ffa0109ee980406550b7f9a4f5c7587f10a759 | /sklearn库.py | c597a056daae863e773ae3d33e4f1db9b08556b2 | [] | no_license | SuneastChen/np_pd_sklearn | 07fd99f383cfaf117e6dff7beb12b240957cbbe0 | 2ff777772c5a0db1e21635796351919c049dc680 | refs/heads/master | 2020-03-07T22:38:27.311708 | 2018-04-02T13:24:03 | 2018-04-02T13:24:03 | 127,759,582 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,832 | py | # _*_ coding:utf-8 _*_
# !/usr/bin/python
import numpy as np
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
iris = datasets.load_iris() # 加载指定数据库,是一个字典,data与target是key
iris_X = iris.data # 特征数据表,是二维数组
iris_y = iris.target # 结果标签,是个一维数组
print(iris_X[:3, :]) # 查看一下三行的数据
print(iris_y) # 查看结果集
# 将数据集分成训练集,测试集
X_train, X_test, y_train, y_test = train_test_split(iris_X, iris_y, test_size=0.3)
print(y_train) # 训练集自动打乱了
# 用邻近算法
knn = KNeighborsClassifier()
knn.fit(X_train, y_train) # 开始训练
print(knn.predict(X_test)) # 输入测试集得出结果
print(y_test) # 这是测试集的真实结果,对比
from sklearn.linear_model import LinearRegression
# 通用的学习模式
loaded_data = datasets.load_boston() # 加载房价的数据库
data_X = loaded_data.data
data_y = loaded_data.target
model = LinearRegression() # 调用线性回归模式
model.fit(data_X, data_y) # 训练
print(model.predict(data_X[:4, :])) # 测试
print(data_y[:4])
print(model.coef_) # 斜率,即输入特征的各比重
print(model.intercept_) # 截距
print(model.get_params()) # 返回model定义时的参数
# {'copy_X': True, 'fit_intercept': True, 'n_jobs': 1, 'normalize': False}
print(model.score(data_X, data_y)) # 将数据及结果传入,给线性模型打分,准确度
import matplotlib.pyplot as plt
# 生成数据集X,对应的线性结果集y
X, y = datasets.make_regression(n_samples=100, n_features=1, n_targets=1, noise=10)
print(X[:5, :])
plt.scatter(X, y)
plt.show()
from sklearn import preprocessing
a = np.array([[10, 2.7, 3.6],
[-100, 5, -2],
[120, 20, 40]])
print(a)
print(preprocessing.scale(a)) # 将各系列的值范围整体缩小
from sklearn.datasets.samples_generator import make_classification
from sklearn.svm import SVC
X, y = make_classification(n_samples=300, n_features=2, n_redundant=0, n_informative=2,
random_state=22, n_clusters_per_class=1, scale=100) # 生成数据
# redundant adj.多余的,冗余的 informative adj.提供有用信息的
X = preprocessing.scale(X) # 坐标轴整体浓缩
# plt.scatter(X[:, 0], X[:, 1], c=y)
# plt.show()
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3)
model = SVC() # 加入正则防止过拟合的SVC算法
model.fit(X_train, y_train)
print(model.score(X_test, y_test)) # 浓缩之后得分较高94.4 ,故系列的大小范围直接影响准确度
# 分成好几组的训练集和测试集
from sklearn.model_selection import cross_val_score
iris = datasets.load_iris() # 加载指定数据库
iris_X = iris.data # 特征数据表
iris_y = iris.target # 结果标签表
# 将数据集分成训练集,测试集
X_train, X_test, y_train, y_test = train_test_split(iris_X, iris_y, test_size=0.3)
knn = KNeighborsClassifier(n_neighbors=5) # 用邻近算法,加入参数取邻近的5个点
# 只测试一组
# knn.fit(X_train, y_train) # 开始训练
# print(knn.score(X_test, y_test)) # 只测试一组的结果得分
scores = cross_val_score(knn, X, y, cv=5, scoring='accuracy') # 分成5组训练集,测试集,分别做测试
print(scores) # 得到一个一维数组
print(scores.mean())
# 选择最优的参数,即参数取邻近的几个点准确率最高的
k_range = range(1, 31) # 参数列表
k_scores = []
for k in k_range: # 也可以把不同的学习model加入测试
knn = KNeighborsClassifier(n_neighbors=k) # 加入循环的k参数
# scores = cross_val_score(knn, X, y, cv=10, scoring='accuracy') # for classfification(分类问题)
loss = -cross_val_score(knn, X, y, cv=10, scoring='neg_mean_squared_error') # for regression(线性回归问题),加负号
k_scores.append(loss.mean()) # 每进行一组测试,产生一个一维数组loss
# print(k_scores)
plt.plot(k_range, k_scores)
plt.xlabel('n_neighbors=k')
plt.ylabel('accuracy')
plt.show()
# 得出参数n_neighbors=10时最优,大于时就会产生过度拟合(over fitting)
# 怎么样看过度拟合
'''
from sklearn.model_selection import learning_curve
from sklearn.datasets import load_digits
digits = load_digits()
X = digits.data
y = digits.target
train_sizes, train_loss, test_loss = learning_curve(
SVC(gamma=0.001), X, y, cv=5, scoring='neg_mean_squared_error', train_sizes=[i/10 for i in range(1, 11)]
) # 多组测试的方法,传入训练数量的百分比点
# print(train_sizes) # 得到每个时间段训练的数量,组成的一维数组
# print(train_loss) # 得到相应的二维数组,列数=分组数,行数=时间段的个数
# print(test_loss) # 得到相应的二维数组,列数=分组数,行数=时间段的个数
train_loss_mean = -np.mean(train_loss, axis=1) # 在表格右侧求平均,增加列,行不变,即axis=1
test_loss_mean = -np.mean(test_loss, axis=1)
plt.plot(train_sizes, train_loss_mean, 'o-', color='r', label='Training')
plt.plot(train_sizes, test_loss_mean, 'o-', color='g', label='Testing')
plt.xlabel('train_sizes')
plt.ylabel('loss')
plt.show() # 若将SVC模型的gamma参数改为0.01,便会产生过拟合
'''
# 如何测试模型中的最优参数
'''
from sklearn.model_selection import validation_curve
from sklearn.datasets import load_digits
digits = load_digits()
X = digits.data
y = digits.target
param_range = np.logspace(-6, -2.3, 5) # 新参数
train_loss, test_loss = validation_curve(
SVC(), X, y, param_name='gamma', param_range=param_range,
cv=10, scoring='neg_mean_squared_error') # 返回值无train_sizes,参数无train_sizes,新增了gamma参数
train_loss_mean = -np.mean(train_loss, axis=1) # 在表格右侧求平均,增加列,行不变,即axis=1
test_loss_mean = -np.mean(test_loss, axis=1)
plt.plot(param_range, train_loss_mean, 'o-', color='r', label='Training')
plt.plot(param_range, test_loss_mean, 'o-', color='g', label='Testing')
plt.xlabel('gamma')
plt.ylabel('loss')
plt.show() # 根据图像可直观地看出,最优参数gamma=0.0005左右
'''
# 将训练好的模型,导出导入
from sklearn import svm
iris = datasets.load_iris()
X, y = iris.data, iris.target
model = SVC()
model.fit(X,y)
#方法1:用pickle模块导出导入
import pickle
with open('model.pkl', 'wb')as f:
pickle.dump(model, f)
with open('model.pkl', 'rb')as f:
model2 = pickle.load(f)
print(model2.predict(X[0:3])) # 把前3行数据做测试
#方法2:用joblib模块,性能更高效
from sklearn.externals import joblib
joblib.dump(model, 'model_joblib.pkl') # 保存模型
model3 = joblib.load('model_joblib.pkl')
print(model3.predict(X[0:6]))
| [
"1050521852@qq.com"
] | 1050521852@qq.com |
18436012318d91b86faa7a1b49103c1a1b6c4fa6 | 70a6142a7369b082aa5131aed13c074532bfca1f | /RLWorkflow/common/mpi_moment.py | 8f4b09b4dda29ba115e764ed899c8b8253884dc0 | [] | no_license | nivekhah/RLTaskOffloading | 2ea5f56add6a5ca548e7c0096ca890c5220e3530 | d07d7a6334ae2c9aa93f8c0e70ff46393c189ee4 | refs/heads/master | 2022-04-08T06:17:03.242553 | 2020-03-14T12:12:38 | 2020-03-14T12:12:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,971 | py | from mpi4py import MPI
import numpy as np
from RLWorkflow.common.misc_util import zipsame
def mpi_mean(x, axis=0, comm=None, keepdims=False):
x = np.asarray(x)
assert x.ndim > 0
if comm is None: comm = MPI.COMM_WORLD
xsum = x.sum(axis=axis, keepdims=keepdims)
n = xsum.size
localsum = np.zeros(n+1, x.dtype)
localsum[:n] = xsum.ravel()
localsum[n] = x.shape[axis]
globalsum = np.zeros_like(localsum)
comm.Allreduce(localsum, globalsum, op=MPI.SUM)
return globalsum[:n].reshape(xsum.shape) / globalsum[n], globalsum[n]
def mpi_moments(x, axis=0, comm=None, keepdims=False):
x = np.asarray(x)
assert x.ndim > 0
mean, count = mpi_mean(x, axis=axis, comm=comm, keepdims=True)
sqdiffs = np.square(x - mean)
meansqdiff, count1 = mpi_mean(sqdiffs, axis=axis, comm=comm, keepdims=True)
assert count1 == count
std = np.sqrt(meansqdiff)
if not keepdims:
newshape = mean.shape[:axis] + mean.shape[axis+1:]
mean = mean.reshape(newshape)
std = std.reshape(newshape)
return mean, std, count
def test_runningmeanstd():
import subprocess
subprocess.check_call(['mpirun', '-np', '3',
'python','-c',
'from baselines.common.mpi_moments import _helper_runningmeanstd; _helper_runningmeanstd()'])
def _helper_runningmeanstd():
comm = MPI.COMM_WORLD
np.random.seed(0)
for (triple,axis) in [
((np.random.randn(3), np.random.randn(4), np.random.randn(5)),0),
((np.random.randn(3,2), np.random.randn(4,2), np.random.randn(5,2)),0),
((np.random.randn(2,3), np.random.randn(2,4), np.random.randn(2,4)),1),
]:
x = np.concatenate(triple, axis=axis)
ms1 = [x.mean(axis=axis), x.std(axis=axis), x.shape[axis]]
ms2 = mpi_moments(triple[comm.Get_rank()],axis=axis)
for (a1,a2) in zipsame(ms1, ms2):
print(a1, a2)
assert np.allclose(a1, a2)
print("ok!")
| [
"jw855@exeter.ac.uk"
] | jw855@exeter.ac.uk |
b7d65448e1c658d3cc0b42437060aee5c8c46e72 | ca002961fa07883ff79ea67713bbc79e0ac79d28 | /plugins/brains/BBWander.py | 5c642987580df24602062aadb1efb8cb65ea2809 | [] | no_license | mindgitrwx/pyrobot3 | e51f8f1bac01a2509f2d89668102770053c16f56 | 45216c0c11f5efaaa4042916b2fe8eaac00fc4a7 | refs/heads/master | 2020-03-23T19:28:44.395949 | 2018-10-03T22:06:42 | 2018-10-03T22:06:42 | 141,980,775 | 0 | 3 | null | 2018-09-14T11:20:00 | 2018-07-23T07:53:27 | Python | UTF-8 | Python | false | false | 2,185 | py | # A Behavior-based control system
from pyrobot.brain.fuzzy import *
from pyrobot.brain.behaviors import *
import math, time
class Avoid (Behavior):
"""Avoid Class"""
def setup(self): # called when created
"""setup method"""
self.lasttime = time.time()
self.count = 0
def direction(self, dir):
""" computes opposite direction given an angle"""
if dir < 0.0:
return 0.9
else:
return -0.9
def update(self):
if self.count == 50:
currtime = time.time()
self.count = 0
self.lasttime = time.time()
else:
self.count += 1
close_dist, angle = min( [(s.distance(), s.angle(unit="radians")) for s in self.robot.range["front-all"]])
max_sensitive = self.robot.range.getMaxvalue() * 0.8
self.IF(Fuzzy(0.1, max_sensitive) << close_dist, 'translate', 0.0, "TooClose")
self.IF(Fuzzy(0.1, max_sensitive) >> close_dist, 'translate', 0.3, "Ok")
self.IF(Fuzzy(0.1, max_sensitive) << close_dist, 'rotate', self.direction(angle), "TooClose")
self.IF(Fuzzy(0.1, max_sensitive) >> close_dist, 'rotate', 0.0, "Ok")
class TurnAround(State):
def update(self):
if min([s.distance() for s in self.robot.range["front-all"]]) < 1.0:
self.move(0, .2)
else:
self.goto("state1")
class state1 (State):
""" sample state """
def setup(self):
self.add(Avoid(1, {'translate': .3, 'rotate': .3}))
print(("initialized state", self.name))
def update(self):
if min([s.distance() for s in self.robot.range["front-all"]]) < 1:
self.goto("TurnAround")
def INIT(engine): # passes in robot, if you need it
brain = BehaviorBasedBrain({'translate' : engine.robot.translate, \
'rotate' : engine.robot.rotate, \
'update' : engine.robot.update }, engine)
# add a few states:
brain.add(state1()) # non active
brain.add(TurnAround()) # non active
# activate a state:
brain.activate('state1') # could have made it active in constructor
return brain
| [
"hybridkernal@gmail.com"
] | hybridkernal@gmail.com |
13f93076286f644bf0ff84425653e325e5ec4bbf | b12847bc4b3a15b0f771840d6c3c01123e44f0f5 | /a1_api/run.py | c161a3fd8217e80772d30c1a258a895627eebbde | [] | no_license | yue12317xiang/python_api | e231dbccb18cc450b04c9e168cfdbeb7c9027c8a | 28d0d74347f8fbd8186b067a19b2dfc58a46828e | refs/heads/master | 2021-03-21T09:55:42.915372 | 2020-07-02T11:13:50 | 2020-07-02T11:13:50 | 247,283,908 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 700 | py | from httprquest import HttpRequest
from do_excel import Do_Excel
COOKIES = None
def runn(test_data,sheet_name):
global COOKIES
for item in test_data:
res = HttpRequest().httrequest(item["url"],eval(item["data"]),item["method"],cookies=COOKIES)
if res.cookies:
COOKIES = res.cookies
print("响应结果为:",res.json())
Do_Excel().write_data("data.xlsx",sheet_name,item["code_id"]+1,str(res.json()))
test_data = Do_Excel().get_data("data.xlsx","denglu")
runn(test_data,"denglu")
test_data = Do_Excel().get_data("data.xlsx","zhuce")
runn(test_data,"zhuce")
test_data = Do_Excel().get_data("data.xlsx","chongzhi")
runn(test_data,"chongzhi")
| [
"15119724465@qq.com"
] | 15119724465@qq.com |
b0eba99c0ca25ed04ea431a7bee9a18f308d4931 | 646cadb1c72ef4a060343baf2fcbe271958b6878 | /tigerjython/TJExamples/10-Ef/Eff4d.py | a11bfecbf166ccc406e98f9264dc1a5edaf3fec4 | [] | no_license | tigerjython/tjinstall | bd75cf8e4ae27b639a13865ef1ec5710391a2938 | aab61519b5299c2ab4f423c6fc5d8ea7c7860a99 | refs/heads/master | 2021-01-17T08:53:50.386905 | 2018-01-12T06:56:28 | 2018-01-12T06:56:28 | 40,659,466 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,151 | py | from gamegrid import *
locations = {
'Althaus':Location(2, 0),
'Bellevue':Location(0, 1),
'City':Location(1, 3),
'Dom':Location(4, 2),
'Enge':Location(5, 0),
'Friedhof':Location(3, 4)}
neighbours = {
'Althaus':['Bellevue', 'Dom', 'Enge'],
'Bellevue':['Althaus', 'City', 'Dom'],
'City':['Bellevue', 'Dom', 'Friedhof'],
'Dom':['Althaus', 'Bellevue', 'City', 'Enge', 'Friedhof'],
'Enge':['Althaus', 'Dom'],
'Friedhof':['Althaus', 'City', 'Dom']}
distances = {('Althaus', 'Bellevue'):5, ('Althaus', 'Dom'):9,
('Althaus', 'Enge'):6, ('Althaus', 'Friedhof'):15,
('Bellevue', 'City'):3, ('Bellevue', 'Dom'):13,
('City', 'Dom'):4, ('City', 'Friedhof'):3,
('Dom', 'Enge'):2, ('Dom', 'Friedhof'):12}
def getNeighbourDistance(station1, station2):
if station1 < station2:
return distances[(station1, station2)]
return distances[(station2, station1)]
def totalDistance(li):
sum = 0
for i in range(len(li) - 1):
sum += getNeighbourDistance(li[i], li[i + 1])
return sum
def drawGraph():
getBg().clear()
getBg().setPaintColor(Color.blue)
for station in locations:
location = locations[station]
getBg().fillCircle(toPoint(location), 10)
startPoint = toPoint(location)
getBg().drawText(station, startPoint)
for s in neighbours[station]:
drawConnection(station, s)
if s < station:
distance = distances[(s, station)]
else:
distance = distances[(station, s)]
endPoint = toPoint(locations[s])
getBg().drawText(str(distance),
getDividingPoint(startPoint, endPoint, 0.5))
refresh()
def drawConnection(startStation, endStation):
startPoint = toPoint(locations[startStation])
endPoint = toPoint(locations[endStation])
getBg().drawLine(startPoint, endPoint)
def search(station):
global trackToTarget, trackLength
visited.append(station) # station marked as visited
# Check for solution
if station == targetStation:
currentDistance = totalDistance(visited)
if currentDistance < trackLength:
trackLength = currentDistance
trackToTarget = visited[:]
for s in neighbours[station]:
if s not in visited: # if all are visited, recursion returns
search(s) # recursive call
visited.pop() # station may be visited by another path
def getStation(location):
for station in locations:
if locations[station] == location:
return station
return None # station not found
def init():
global visited, trackToTarget, trackLength
visited = []
trackToTarget = []
trackLength = 1000
drawGraph()
def pressEvent(e):
global isStart, startStation, targetStation
mouseLoc = toLocationInGrid(e.getX(), e.getY())
mouseStation = getStation(mouseLoc)
if mouseStation == None:
return
if isStart:
isStart = False
init()
setTitle("Klicke auf Zielstation")
startStation = mouseStation
getBg().setPaintColor(Color.red)
getBg().fillCircle(toPoint(mouseLoc), 10)
else:
isStart = True
setTitle("Noch einmal? Klicke auf Startstation")
targetStation = mouseStation
getBg().setPaintColor(Color.green)
getBg().fillCircle(toPoint(mouseLoc), 10)
search(startStation)
setStatusText("Kürzester Weg von " + startStation + " nach "
+ targetStation + ": " + str(trackToTarget) + " Länge = "
+ str(trackLength))
for i in range(len(trackToTarget) - 1):
s1 = trackToTarget[i]
s2 = trackToTarget[i + 1]
getBg().setPaintColor(Color.black)
getBg().setLineWidth(3)
drawConnection(s1, s2)
getBg().setLineWidth(1)
refresh()
isStart = True
makeGameGrid(7, 5, 100, None, "sprites/city.png", False,
mousePressed = pressEvent)
setTitle("City Guide. Klicke auf Startstation")
addStatusBar(30)
show()
init()
| [
"support@tigerjython.com"
] | support@tigerjython.com |
773fa456f16adc76fdbca0568bf8feb723dfad1b | 2d4af29250dca8c72b74e190e74d92f1467120a0 | /TaobaoSdk/Request/TaohuaChildcatesGetRequest.py | a624463f6f4d5bb1765b77cb318501d6f0daeeac | [] | no_license | maimiaolmc/TaobaoOpenPythonSDK | 2c671be93c40cf487c0d7d644479ba7e1043004c | d349aa8ed6229ce6d76a09f279a0896a0f8075b3 | refs/heads/master | 2020-04-06T03:52:46.585927 | 2014-06-09T08:58:27 | 2014-06-09T08:58:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,548 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim: set ts=4 sts=4 sw=4 et:
## @brief 通过类目ID获取它的类目列表
# @author wuliang@maimiaotech.com
# @date 2012-07-03 10:25:14
# @version: 0.0.0
import os
import sys
import time
def __getCurrentPath():
return os.path.normpath(os.path.join(os.path.realpath(__file__), os.path.pardir))
__modulePath = os.path.join(__getCurrentPath(), os.path.pardir)
__modulePath = os.path.normpath(__modulePath)
if __modulePath not in sys.path:
sys.path.insert(0, __modulePath)
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">通过类目ID获取它的类目列表</SPAN>
# <UL>
# </UL>
class TaohuaChildcatesGetRequest(object):
def __init__(self):
super(self.__class__, self).__init__()
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">获取API名称</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">str</SPAN>
# </LI>
# </UL>
self.method = "taobao.taohua.childcates.get"
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">时间戳,如果不设置,发送请求时将使用当时的时间</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">int</SPAN>
# </LI>
# </UL>
self.timestamp = int(time.time())
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">通过类目ID获取它的子类目列表</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">Number</SPAN>
# </LI>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Required</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">optional</SPAN>
# </LI>
# </UL>
self.cate_id = None
| [
"liyangmin@maimiaotech.com"
] | liyangmin@maimiaotech.com |
b8c0e25b21b256f84885af19f89d37a2572cf605 | 2fae2bd977cdfe0eac5cdffddd669db84d18e1ef | /dbg_merge_rasters.py | 63bab60c3f3f2c533ce65b6513765bea16d3cd80 | [] | no_license | nuhnuh/raster_server | b84c3fcf3b804c6937d39353c61378cd035b1e03 | c14d6d602c71b986ec995dc1c89e49051e39bcb9 | refs/heads/master | 2020-03-29T22:00:41.816830 | 2018-10-25T12:37:13 | 2018-10-25T12:37:13 | 150,398,796 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,802 | py | #!/usr/bin/env python3
"""
TODO: merge subimgs of the tifs intersections
"""
import glob
from osgeo import gdal
from osgeo import ogr
import numpy as np
import os
import matplotlib.pyplot as plt
import cv2
dst_dir = '/tmp'
dst_dir = os.path.expanduser( dst_dir )
def get_tif_filenames() :
src_dir = 'data2/orto/*.tif'
filenames = glob.glob( src_dir )
return filenames
def load_rasters_md( filenames ) :
rasters_gt = []
rasters_sz = []
for fn in filenames :
raster = gdal.Open( fn )
raster_gt = raster.GetGeoTransform()
raster_sz = raster.RasterXSize, raster.RasterYSize
rasters_gt.append( raster_gt )
rasters_sz.append( raster_sz )
return rasters_gt, rasters_sz
def get_bbox( gt, sz ) :
x0, dx, dxdy, y0, dydx, dy = gt
x_tl, y_tl = x0, y0
x_br = x0 + dx * sz[0]
y_br = y0 + dy * sz[1]
bbox = x_tl, y_tl, x_br, y_br
return bbox
def bbox2geom( bbox ) :
xLeft, yTop, xRight, yBottom = bbox
ring = ogr.Geometry(ogr.wkbLinearRing)
ring.AddPoint_2D(xLeft, yTop)
ring.AddPoint_2D(xLeft, yBottom)
ring.AddPoint_2D(xRight, yBottom)
ring.AddPoint_2D(xRight, yTop)
ring.AddPoint_2D(xLeft, yTop)
geom = ogr.Geometry(ogr.wkbPolygon)
geom.AddGeometry(ring)
return geom
def geom2bbox( geom ) :
xmin, xmax, ymin, ymax = geom.GetEnvelope()
bbox = xmin, ymax, xmax, ymin
return bbox
def find_intersecting_rasters( rasters_geom, roi_bbox ) :
# https://pcjericks.github.io/py-gdalogr-cookbook/geometry.html
roi_geom = bbox2geom( roi_bbox )
result = []
for idx, raster_geom in enumerate(rasters_geom) :
if roi_geom.Intersect( raster_geom ) :
result.append( idx )
return result
def raster2img( raster ) :
nbands = raster.RasterCount
xcount = raster.RasterXSize
ycount = raster.RasterYSize
img = None
# for k in range(min(3,nbands)) :
for k in range(nbands) :
band = raster.GetRasterBand( 1+k ) # 1-based index
data = band.ReadAsArray( 0, 0, xcount, ycount )
print('WARNING: raster2img uses only 3 bands')
if img is None :
img = data
else:
img = np.dstack(( img, data ))
return img
def load_roi_from_tif( fn, roi_bbox ) :
# Open source raster (.tif) and find pixel size, nbands and data type
raster_in = gdal.Open( fn )
gt = raster_in.GetGeoTransform()
x0, dx, dxdy, y0, dydx, dy = gt
assert( abs(dx) == abs(dy) )
dy = abs(dy)
pixel_size = dx
nbands = raster_in.RasterCount
data_type = raster_in.GetRasterBand(1).DataType
# ROI_world (x,y) to ROI_pix (j,i)
x_min, y_max, x_max, y_min = roi_bbox
j0 = int(round( ( x_min - x0 ) / pixel_size ))
j1 = int(round( ( x_max - x0 ) / pixel_size ))
i0 = int(round( - ( y_min - y0 ) / pixel_size ))
i1 = int(round( - ( y_max - y0 ) / pixel_size ))
# origin for ROI
j_off, i_off = int(j0), int(i1) # upper left
#
j_count = j1 - j0
i_count = i0 - i1
assert( j_count > 0 )
assert( i_count > 0 )
assert( j_count <= raster_in.RasterXSize )
assert( i_count <= raster_in.RasterYSize )
#
x_off = x0 + j0 * pixel_size
y_off = y0 - i1 * pixel_size
# Create the destination data source
raster_out = gdal.GetDriverByName('MEM').Create( '', j_count, i_count, nbands, data_type )
raster_out.SetGeoTransform(( x_off, pixel_size, 0, y_off, 0, -pixel_size ))
# Setting spatial reference of output raster
# wkt = raster_in.GetProjection()
# from osgeo import osr
# srs = osr.SpatialReference()
# srs.ImportFromWkt(wkt)
# raster_out.SetProjection( srs.ExportToWkt() )
raster_out.SetProjection( raster_in.GetProjection() )
# copy ROI
for k in range(nbands) :
band = raster_in.GetRasterBand( 1+k ) # 1-based index
# print( '##########', xoff, yoff, xcount, ycount )
print('j0,i1,j_count,i_count:', j0, i1, j_count, i_count)
data = band.ReadAsArray( j0, i1, j_count, i_count )
print('data.shape:', data.shape)
# print( '##########', k, data.shape )
band2 = raster_out.GetRasterBand( 1+k ) # 1-based index
band2.WriteArray( data )
# does not work :/
# band2.SetColorInterpretation( band.GetRasterColorInterpretation() )
# band2.SetRasterColorInterpretation( gdal.GCI_Undefined )
return raster_out
def get_roi( filenames, rasters_geom, roi_bbox ) :
print( 'roi_bbox:', roi_bbox )
idxs = find_intersecting_rasters( rasters_geom, roi_bbox )
roi_geom = bbox2geom( roi_bbox )
for idx in idxs :
print( 'raster_bbox:', geom2bbox(rasters_geom[idx]) )
if geom2bbox(roi_geom.Intersection(rasters_geom[idx])) == roi_bbox :
raster = load_roi_from_tif( filenames[idx], roi_bbox )
print( 'a tif contains the full roi' )
return raster
print( 'no tif contains the full roi' )
raise 'TODO'
#
for idx in idxs :
# intersect each raster with the roi
ibbox = geom2bbox(roi_geom.Intersection(rasters_geom[idx]))
# load subimg of the intersection
iraster = load_roi_from_tif( filenames[idx], ibbox )
subimg = raster2img( subimg )
#
plt.imshow( subimg )
plt.show()
def test1() :
# load rasters (tifs) geometry
filenames = get_tif_filenames()
print( len(filenames), ' tifs available' )
assert( len(filenames) > 0 ), 'folder empty?'
# filenames = filenames[:10]
rasters_gt, rasters_sz = load_rasters_md( filenames )
[ print( fn, get_bbox(gt,sz) ) for fn, gt, sz in zip(filenames, rasters_gt, rasters_sz) ]
#
rasters_geom = [ bbox2geom(get_bbox(gt,sz)) for gt, sz in zip(rasters_gt, rasters_sz) ]
# find the intersection of the rasters
print('------')
print( 'finding the intersection of the 4 tifs:' )
geom = rasters_geom[0]
print( geom )
for geom_k in rasters_geom[1:] :
print( geom_k )
geom = geom.Intersection( geom_k )
#
for geom_k in rasters_geom :
xy = np.asarray( geom_k.Boundary().GetPoints() )
x, y = xy[:,0], xy[:,1]
plt.plot( x, y )
xy = np.asarray( geom.Boundary().GetPoints() )
x, y = xy[:,0], xy[:,1]
plt.plot( x, y, ':k' )
plt.axis('equal')
plt.show()
# check that bbox2geom and geom2bbox work OK
print('------')
print( 'checking bbox2geom and geom2bbox:' )
print( geom )
print( geom2bbox( geom ) )
print( bbox2geom(geom2bbox( geom )) )
print( geom2bbox(bbox2geom(geom2bbox( geom ))) )
# define a 10 m widther roi than the intersection of the 4 tifs
print('------')
print( 'defining a ROI' )
roi_bbox = geom2bbox( geom )
print( 'roi_bbox:', roi_bbox )
roi_bbox = (614180.0-10, 4734050.0+10, 614290.0+10, 4733940.0-10)
print( 'roi_bbox:', roi_bbox )
#
for geom_k in rasters_geom :
xy = np.asarray( geom_k.Boundary().GetPoints() )
x, y = xy[:,0], xy[:,1]
plt.plot( x, y )
xy = np.asarray( bbox2geom(roi_bbox).Boundary().GetPoints() )
x, y = xy[:,0], xy[:,1]
plt.plot( x, y, ':k' )
plt.axis('equal')
plt.show()
# # dbg
# roi_bbox = geom2bbox( geom )
# for idx in range(len(filenames)) :
# print('-- ', idx)
# img = load_roi_from_tif( filenames[idx], roi_bbox )
# img = raster2img( img )
# print(img)
# load pixels in the intersection of images
roi_raster = get_roi( filenames, rasters_geom, roi_bbox )
roi_img = raster2img( roi_raster )
plt.imshow( roi_img )
plt.show()
raise 'what if roi_bbox is not fully included in any of the tifs?'
def draw_mask( subraster, source_layer ) :
nbands = subraster.RasterCount
data_type = subraster.GetRasterBand(1).DataType
subraster_mask = gdal.GetDriverByName('MEM').Create( '',
subraster.RasterXSize, subraster.RasterYSize, 1, data_type )
subraster_mask.SetGeoTransform( subraster.GetGeoTransform() )
# Rasterize
band = subraster_mask.GetRasterBand(1)
band.SetNoDataValue( 128 )
gdal.RasterizeLayer( subraster_mask, [1], source_layer, burn_values=[255] )
return subraster_mask
def test2() :
# load rasters (tifs) geometry
filenames = get_tif_filenames()
print( len(filenames), ' tifs available' )
assert( len(filenames) > 0 ), 'folder empty?'
# filenames = filenames[:10]
rasters_gt, rasters_sz = load_rasters_md( filenames )
rasters_geom = [ bbox2geom(get_bbox(gt,sz)) for gt, sz in zip(rasters_gt, rasters_sz) ]
# load shp
print( 'loading shp..' )
shp_fn = 'data2/shp/Edif_Clases.shp'
shp = ogr.Open( shp_fn , 0 ) # 0 means read-only. 1 means writeable.
layer = shp.GetLayer()
print( 'len(layer):', len(layer) )
#
data = []
for idx, feature in enumerate(layer) :
print( 'processing feature', idx )
# if feature['PARCELA'] != 2981 :
# continue
print( feature.ExportToJson() )
print( feature['CUBIERTA'], feature['TIPO_CUBIE'] )
envelope = feature.GetGeometryRef().GetEnvelope()
xmin, xmax, ymin, ymax = envelope
# roi_bbox = xmin, ymax, xmax, ymin
# extend roi
margin = 14
roi_bbox = xmin-margin, ymax+margin, xmax+margin, ymin-margin
# # dbg
# idxs = find_intersecting_rasters( rasters_geom, roi_bbox )
# if len(idxs) < 1 :
# assert(False), 'roi does not intersect any raster'
#
subraster = get_roi( filenames, rasters_geom, roi_bbox )
# # save subraster
# fn = '{:06d}.tif'.format( idx )
# fn = os.path.join( dst_dir, fn )
# print( 'saving ', fn )
# gdal.GetDriverByName('GTiff').CreateCopy( fn, subraster, strict=0 ) # strict=1 : report errors
# subraster_mask
shp_idx = ogr.GetDriverByName('Memory').CreateDataSource('')
source_layer = shp_idx.CreateLayer('states_extent')
source_layer.CreateFeature( feature )
subraster_mask = draw_mask( subraster, source_layer )
#
img = raster2img( subraster )
mask = raster2img( subraster_mask )
# # dbg
# # continue
# print( 'mask.shape:', mask.shape )
# ax = plt.subplot(2,3,1)
# plt.imshow( img[:,:,0] )
# plt.title('band 0 (R)')
# plt.subplot(2,3,2, sharex=ax, sharey=ax )
# plt.imshow( img[:,:,1] )
# plt.title('band 1 (G)')
# plt.subplot(2,3,3, sharex=ax, sharey=ax )
# plt.imshow( img[:,:,2] )
# plt.title('band 2 (B)')
# plt.subplot(2,3,4, sharex=ax, sharey=ax )
# plt.imshow( img[:,:,3] )
# plt.title('band 3')
# plt.subplot(2,3,5, sharex=ax, sharey=ax )
# plt.imshow( img[:,:,:3] )
# plt.title('RGB')
# plt.subplot(2,3,6, sharex=ax, sharey=ax )
# plt.imshow( mask )
# plt.title('mask')
# plt.show()
#
kernel = np.ones( (1+9,1+9), np.uint8 )
# mask2 = cv2.dilate( mask, kernel, iterations=1 )
mask1 = mask
mask2 = cv2.erode( mask, kernel, iterations=1 )
img1 = np.array( img )
img2 = np.array( img )
I1 = mask1 == 0
I2 = mask2 == 0
for k in range(img.shape[2]) :
img1[ I1, k ] = 0
img2[ I2, k ] = 0
#
img3 = np.array( img )
# img3 = img3.reshape((-1,3))
color_m = img3[ mask2 != 0 ].mean(axis=0)
color_std = img3[ mask2 != 0 ].std(axis=0)
print( feature['TIPO_CUBIE'], 'color:', color_m, color_std )
data.append( (feature['TIPO_CUBIE'], color_m) )
continue
# dbg
print( 'mask.shape:', mask.shape )
ax = plt.subplot(2,3,1)
plt.imshow( img[:,:,0] )
plt.subplot(2,3,1, sharex=ax, sharey=ax )
plt.imshow( img[:,:,:3] )
plt.title('RGB (original)')
plt.subplot(2,3,2, sharex=ax, sharey=ax )
plt.imshow( img1[...,:3] )
plt.title('img1')
plt.subplot(2,3,5, sharex=ax, sharey=ax )
plt.imshow( mask1 )
plt.title('mask1')
plt.subplot(2,3,3, sharex=ax, sharey=ax )
plt.imshow( img2[...,:3] )
plt.title('img2')
plt.subplot(2,3,6, sharex=ax, sharey=ax )
plt.imshow( mask2 )
plt.title('mask2')
plt.show()
print( 'data' )
for label, color in data :
print( '{},{}'.format( label, ','.join(list(map(str,color))) ) )
if __name__ == '__main__':
# test1()
test2()
| [
"unamirp@gmail.com"
] | unamirp@gmail.com |
21bd5066ba2a212591f1557923296b35eda07ae0 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_072/ch25_2019_08_21_19_49_43_725038.py | 4ad02d8f6560a27c1b43320c99a7c2c44a6ef538 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | a=float(input('Qual será a distância percorrida ? '))
def preco_passagem(a):
if a<=200:
return a*0.5
else:
return 100+(a-100)*0.45
print('{0:.2f}'.format(preco_passagem(a))) | [
"you@example.com"
] | you@example.com |
b436a2f50887357258378c167beccd815a60f265 | 6eba4a5787d6c0edfa1ec6f94f33f67b721a6522 | /desafioEntreNotas.py | a90e6e4e3e38802b6755542c87e24f1168a2c4af | [] | no_license | HenriqueSantos114/desafios-em-python | 5dd41fa98e5182d8c46f9e2d7bc6e331de303ecf | 55c8d31784505e24e6f3bf823a4c660252482c7c | refs/heads/main | 2023-07-29T04:36:41.496965 | 2021-09-08T02:15:24 | 2021-09-08T02:15:24 | 402,064,365 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 490 | py | # Faça um programa que peça uma nota, entre zero e dez.
# Mostre uma mensagem caso o valor seja inválido e
# continue pedindo até que o usuário informe um valor válido.
nota = int(input('Nota entre 0 e 10: '))
if nota > 0 and nota < 10:
print('Sua nota é ' + str(nota))
while nota > 10 or nota < 0:
print('Favor inserir um número válido!')
nota = int(input('Nota entre 0 e 10: '))
if nota > 0 and nota < 10:
print('Sua nota é ' + str(nota))
| [
"73488726+HenriqueSantos114@users.noreply.github.com"
] | 73488726+HenriqueSantos114@users.noreply.github.com |
44745815bf70dfefbc566356404d6d02776e8a77 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03254/s956844324.py | 07d913466375174a9e3f7d1410eaa5709318f863 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | N,x = map(int, input().split())
A = sorted(map(int, input().split()))
s = 0
for i in range(N):
x -= A[i]
if x<0:
break
else:
s += 1
print(s if x<=0 else s-1) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
bb48eb15a96fd7e452ae09f500b5f99a37446043 | a3aa5c6696f2cb4b5b6aa313ed7d55d5f9ee65e1 | /examples/ssv_NoSRD/108cells/_NT5terms/setrun.py | f93781733947f01ad7b3a433e0456431b5e7ab47 | [
"BSD-3-Clause"
] | permissive | mjberger/ho_amrclaw_amrcart | 65d237449d1163b2f8cd2b40ced850bc9d7f9c26 | 0e0d37dda52b8c813f7fc4bd7e61c5fdb33b0ada | refs/heads/master | 2023-01-12T18:36:08.808795 | 2020-11-18T20:23:30 | 2020-11-18T20:23:30 | 298,061,208 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,911 | py | """
Module to set up run time parameters for Clawpack.
The values set in the function setrun are then written out to data files
that will be read in by the Fortran code.
"""
from __future__ import absolute_import
import os
import numpy as np
#------------------------------
def setrun(claw_pkg='amrclaw'):
#------------------------------
"""
Define the parameters used for running Clawpack.
INPUT:
claw_pkg expected to be "amrclaw" for this setrun.
OUTPUT:
rundata - object of class ClawRunData
"""
from clawpack.clawutil import data
assert claw_pkg.lower() == 'amrclaw', "Expected claw_pkg = 'amrclaw'"
num_dim = 2
rundata = data.ClawRunData(claw_pkg, num_dim)
#------------------------------------------------------------------
# Problem-specific parameters to be written to setprob.data:
#------------------------------------------------------------------
probdata = rundata.new_UserData(name='probdata',fname='setprob.data')
probdata.add_param('mstage ',2, 'RK method order (coeffs set in setprob)')
probdata.add_param('ismp', 0, ' stabilization method')
## 1 = SRD
## 2 = DRD
probdata.add_param('pwconst', False, ' no slopes in plotting ')
probdata.add_param('max1d', 900,' max size each dir for grid patches')
probdata.add_param('nloops', 2, '# closed loops or segments')
probdata.add_param('xloop1', .00001, ' starting pt x')
probdata.add_param('yloop1', .99999, ' starting pt y')
probdata.add_param('xloop2', 1.384, ' starting pt x')
probdata.add_param('yloop2', 0.0, ' starting pt y')
probdata.add_param('ghost_ccg', False, ' use ghost cells in cutcell/tile gradients')
probdata.add_param('limitTile', 1, ' 1 = BJ, 2 = LP')
probdata.add_param('lpChoice', 2, ' 1 = restrictive, 2 = relaxed, if LP limiter used')
probdata.add_param('nTerms', 5, ' 2 = first order cell gradient, 5 = second order')
probdata.add_param('numMergeTerms', 5,' 2 = first order tile gradient, 5 = second order')
#------------------------------------------------------------------
# Standard Clawpack parameters to be written to claw.data:
# (or to amrclaw.data for AMR)
#------------------------------------------------------------------
clawdata = rundata.clawdata # initialized when rundata instantiated
# Set single grid parameters first.
# See below for AMR parameters.
# ---------------
# Spatial domain:
# ---------------
# Number of space dimensions:
clawdata.num_dim = num_dim
# Lower and upper edge of computational domain:
clawdata.lower[0] = 0.00e+00 # xlower
clawdata.upper[0] = 1.43e+00 # xupper
clawdata.lower[1] = 0.00e+00 # ylower
clawdata.upper[1] = 1.4301e+00 # yupper
# Number of grid cells:
#clawdata.num_cells[0] = 54 # mx
#clawdata.num_cells[1] = 54 # my
#clawdata.num_cells[0] = 26 # mx
#clawdata.num_cells[1] = 26 # my
clawdata.num_cells[0] = 108 # mx
clawdata.num_cells[1] = 108 # my
#clawdata.num_cells[0] = 216 # mx
#clawdata.num_cells[1] = 216 # my
#clawdata.num_cells[0] = 324 # mx
#clawdata.num_cells[1] = 324 # my
#clawdata.num_cells[0] = 432 # mx
#clawdata.num_cells[1] = 432 # my
#clawdata.num_cells[0] = 864 # mx
#clawdata.num_cells[1] = 864 # my
# ---------------
# Size of system:
# ---------------
# Number of equations in the system:
clawdata.num_eqn = 4
# Number of auxiliary variables in the aux array (initialized in setaux)
clawdata.num_aux = 0
# Index of aux array corresponding to capacity function, if there is one:
clawdata.capa_index = 0
# -------------
# Initial time:
# -------------
clawdata.t0 = 0.000000
# Restart from checkpoint file of a previous run?
# If restarting, t0 above should be from original run, and the
# restart_file 'fort.chkNNNNN' specified below should be in
# the OUTDIR indicated in Makefile.
#clawdata.restart = True # True to restart from prior results
clawdata.restart = False # True to restart from prior results
clawdata.restart_file = 'fort.chk01500' # File to use for restart data
# -------------
# Output times:
#--------------
# Specify at what times the results should be written to fort.q files.
# Note that the time integration stops after the final output time.
clawdata.output_style = 3
if clawdata.output_style==1:
# Output ntimes frames at equally spaced times up to tfinal:
# Can specify num_output_times = 0 for no output
clawdata.num_output_times = 16
clawdata.tfinal = 4.0
clawdata.output_t0 = True # output at initial (or restart) time?
elif clawdata.output_style == 2:
# Specify a list or numpy array of output times:
# Include t0 if you want output at the initial time.
clawdata.output_times = [0., .9, 10., 12.]
elif clawdata.output_style == 3:
# Output every step_interval timesteps over total_steps timesteps:
clawdata.output_step_interval = 3000
clawdata.total_steps = 3000
#clawdata.output_t0 = True # output at initial (or restart) time?
clawdata.output_t0 = False # output at initial (or restart) time?
clawdata.output_format = 'ascii' # 'ascii', 'binary', 'netcdf'
clawdata.output_q_components = 'all' # only 'all'
clawdata.output_aux_components = 'all' # 'all' or 'none'
clawdata.output_aux_onlyonce = False # output aux arrays only at t0?
# ---------------------------------------------------
# Verbosity of messages to screen during integration:
# ---------------------------------------------------
# The current t, dt, and cfl will be printed every time step
# at AMR levels <= verbosity. Set verbosity = 0 for no printing.
# (E.g. verbosity == 2 means print only on levels 1 and 2.)
clawdata.verbosity = 1
# --------------
# Time stepping:
# --------------
# if dt_variable==True: variable time steps used based on cfl_desired,
# if dt_variable==False: fixed time steps dt = dt_initial always used.
clawdata.dt_variable = True
# Initial time step for variable dt.
# (If dt_variable==0 then dt=dt_initial for all steps)
clawdata.dt_initial = 2.000000e-02
# Max time step to be allowed if variable dt used:
clawdata.dt_max = 1.000000e+99
# Desired Courant number if variable dt used
clawdata.cfl_desired = 0.450000
# max Courant number to allow without retaking step with a smaller dt:
clawdata.cfl_max = 0.450000
# Maximum number of time steps to allow between output times:
clawdata.steps_max = 1000
# ------------------
# Method to be used:
# ------------------
# Order of accuracy: 1 => Godunov, 2 => Lax-Wendroff plus limiters
clawdata.order = 2
# Use dimensional splitting? (not yet available for AMR)
clawdata.dimensional_split = 'unsplit'
# For unsplit method, transverse_waves can be
# 0 or 'none' ==> donor cell (only normal solver used)
# 1 or 'increment' ==> corner transport of waves
# 2 or 'all' ==> corner transport of 2nd order corrections too
clawdata.transverse_waves = 0
# Number of waves in the Riemann solution:
clawdata.num_waves = 3
# List of limiters to use for each wave family:
# Required: len(limiter) == num_waves
# Some options:
# 0 or 'none' ==> no limiter (Lax-Wendroff)
# 1 or 'minmod' ==> minmod
# 2 or 'superbee' ==> superbee
# 3 or 'vanleer' ==> van Leer
# 4 or 'mc' ==> MC limiter
#clawdata.limiter = ['mc','mc','mc']
clawdata.limiter = [0,0,0]
clawdata.use_fwaves = False # True ==> use f-wave version of algorithms
# Source terms splitting:
# src_split == 0 or 'none' ==> no source term (src routine never called)
# src_split == 1 or 'godunov' ==> Godunov (1st order) splitting used,
# src_split == 2 or 'strang' ==> Strang (2nd order) splitting used, not recommended.
clawdata.source_split = 0
# --------------------
# Boundary conditions:
# --------------------
# Number of ghost cells (usually 2)
clawdata.num_ghost = 4 # needed for 2 stage RK + delta distrib
# Choice of BCs at xlower and xupper:
# 0 or 'user' => user specified (must modify bcNamr.f to use this option)
# 1 or 'extrap' => extrapolation (non-reflecting outflow)
# 2 or 'periodic' => periodic (must specify this at both boundaries)
# 3 or 'wall' => solid wall for systems where q(2) is normal velocity
# will be using exact soln for bcs
clawdata.bc_lower[0] = 'user' # at xlower
clawdata.bc_upper[0] = 'user' # at xupper
clawdata.bc_lower[1] = 'user' # at ylower
clawdata.bc_upper[1] = 'user' # at yupper
# ---------------
# Gauges:
# ---------------
rundata.gaugedata.gauges = []
# for gauges append lines of the form [gaugeno, x, y, t1, t2]
#rundata.gaugedata.gauges.append([1, 0.4, 0.3, 0., 10.])
#rundata.gaugedata.gauges.append([2, 0.6, 0.3, 0., 10.])
# --------------
# Checkpointing:
# --------------
# Specify when checkpoint files should be created that can be
# used to restart a computation.
clawdata.checkpt_style = 1
if clawdata.checkpt_style == 0:
# Do not checkpoint at all
pass
elif clawdata.checkpt_style == 1:
# Checkpoint only at tfinal.
pass
elif clawdata.checkpt_style == 2:
# Specify a list of checkpoint times.
clawdata.checkpt_times = [0.1,0.15]
elif clawdata.checkpt_style == 3:
# Checkpoint every checkpt_interval timesteps (on Level 1)
# and at the final time.
clawdata.checkpt_interval = 5
# ---------------
# AMR parameters:
# ---------------
amrdata = rundata.amrdata
# max number of refinement levels:
amrdata.amr_levels_max = 1
# List of refinement ratios at each level (length at least amr_level_max-1)
amrdata.refinement_ratios_x = [2, 4, 2]
amrdata.refinement_ratios_y = [2, 4, 2]
amrdata.refinement_ratios_t = [2, 4, 2]
# Specify type of each aux variable in amrdata.auxtype.
# This must be a list of length num_aux, each element of which is one of:
# 'center', 'capacity', 'xleft', or 'yleft' (see documentation).
amrdata.aux_type = ['xleft', 'yleft', 'center']
# Flag for refinement based on Richardson error estimater:
amrdata.flag_richardson = False # use Richardson?
amrdata.flag_richardson_tol = 1.000000e+00 # Richardson tolerance
# Flag for refinement using routine flag2refine:
amrdata.flag2refine = True # use this?
amrdata.flag2refine_tol = 5.000000e-02 # tolerance used in this routine
# User can modify flag2refine to change the criterion for flagging.
# Default: check max-norm of difference between q in a cell and
# each of its neighbors.
# steps to take on each level L between regriddings of level L+1:
amrdata.regrid_interval = 2
# width of buffer zone around flagged points:
# (typically the same as regrid_interval so waves don't escape):
amrdata.regrid_buffer_width = 2
# clustering alg. cutoff for (# flagged pts) / (total # of cells refined)
# (closer to 1.0 => more small grids may be needed to cover flagged cells)
amrdata.clustering_cutoff = 0.700000
# print info about each regridding up to this level:
amrdata.verbosity_regrid = 0
# ---------------
# Regions:
# ---------------
rundata.regiondata.regions = []
# to specify regions of refinement append lines of the form
# [minlevel,maxlevel,t1,t2,x1,x2,y1,y2]
# ----- For developers -----
# Toggle debugging print statements:
amrdata.dprint = False # print domain flags
amrdata.eprint = False # print err est flags
amrdata.edebug = False # even more err est flags
amrdata.gprint = False # grid bisection/clustering
amrdata.nprint = False # proper nesting output
amrdata.pprint = False # proj. of tagged points
amrdata.rprint = False # print regridding summary
amrdata.sprint = False # space/memory output
amrdata.tprint = False # time step reporting each level
amrdata.uprint = False # update/upbnd reporting
return rundata
# end of function setrun
# ----------------------
if __name__ == '__main__':
# Set up run-time parameters and write all data files.
import sys
rundata = setrun(*sys.argv[1:])
rundata.write()
| [
"berger@cims.nyu.edu"
] | berger@cims.nyu.edu |
9697e404936e536a1b2a743fe4b9751ef40c69fc | a7ba274ad375edc99b8525434b491e5365d19847 | /dataManage/serverHandler/arcpyControl/publishMapServer.py | c1fba66e23ee3722039ffc1d1484bad1a04bb92d | [] | no_license | gery1990/CMB-Py | 4d5172a5d4ac241bd0bae4c519ad40c1150f00e7 | 2cd2087fe7bad9304eda255ef77210abdb6f1c58 | refs/heads/master | 2021-05-16T08:23:16.989450 | 2017-09-29T02:09:19 | 2017-09-29T02:09:19 | 104,054,670 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,227 | py | # coding:utf-8
import arcpy, traceback, os
import xml.dom.minidom as DOM
class PublishServer():
def __init__(self, mxdTempPath, agspath, type, logger):
'''
:param mxdTempPath: 模板路径
:param agspath: GIS服务连接文件
'''
self.mxdTempPath = mxdTempPath
self.agspath = agspath
self.logger = logger
self.type = type
self.logger.info('''mxdTempPath: %s
agsPath: %s
type: %s ''' % (mxdTempPath, agspath, type))
def buildMxd(self, mxdOutputPath, layers):
'''
:param mxdOutputPath: mxd输出路径
:param layers: 图层路径数组
:return:
'''
try:
mxd = arcpy.mapping.MapDocument(self.mxdTempPath)
df = arcpy.mapping.ListDataFrames(mxd, "Layers")[0]
for layer in layers:
layerName = os.path.basename(layer)
arcpy.MakeFeatureLayer_management(layer, layerName)
mapLayer = arcpy.mapping.Layer(layerName)
arcpy.mapping.AddLayer(df, mapLayer, "BOTTOM")
mxd.saveACopy(mxdOutputPath)
return mxdOutputPath
except:
self.logger.warning(traceback.format_exc())
# 创建服务定义文档
def publishMXD(self, mxdPath, definitionType=None, maxRecordCount=None, maxInstances=None):
'''
:param mxdPath: 地图文档路径
:param definitionType: esriServiceDefinitionType_Replacement 覆盖更新
:param maxRecordCount: 最大返回数
:param maxInstances: 最大实例数
:return:
'''
try:
new_mxd = arcpy.mapping.MapDocument(mxdPath)
mxdName = os.path.basename(mxdPath)
dotIndex = mxdName.index('.')
serviceName = mxdName[0:dotIndex]
sddraft = os.path.abspath(serviceName + '.sddraft')
sd = os.path.abspath(serviceName + '.sd')
if os.path.exists(sd):
os.remove(sd)
# 创建服务定义草稿draft
analysis = arcpy.mapping.CreateMapSDDraft(new_mxd, sddraft, serviceName, 'ARCGIS_SERVER',
self.agspath, False, self.type, None, None)
if analysis['errors'] == {}:
self.editSddraft(sddraft, definitionType, maxRecordCount, maxInstances)
# Execute StageService
arcpy.StageService_server(sddraft, sd)
# Execute UploadServiceDefinition
arcpy.UploadServiceDefinition_server(sd, self.agspath)
else:
# if the sddraft analysis contained errors, display them
print analysis['errors']
except:
self.logger.warning(traceback.format_exc())
def editSddraft(self, xml, definitionType, maxRecordCount, maxInstances):
try:
doc = DOM.parse(xml)
if definitionType != None:
descriptions = doc.getElementsByTagName("Type")
desc = descriptions[0]
if desc.parentNode.tagName == "SVCManifest":
if desc.hasChildNodes():
desc.firstChild.data = definitionType
if maxRecordCount != None or maxInstances != None:
descriptions = doc.getElementsByTagName("Configurations")
desc = descriptions[0]
if desc.parentNode.tagName == "SVCManifest":
for configuration in desc.childNodes:
if configuration.tagName == 'SVCConfiguration':
for svgConfig in configuration.childNodes:
if svgConfig.tagName == 'Definition':
for definition in svgConfig.childNodes:
if definition.tagName == 'ConfigurationProperties':
for configPro in definition.childNodes:
if configPro.tagName == "PropertyArray":
for propertyArray in configPro.childNodes:
if propertyArray.childNodes[
0].firstChild.nodeValue == "maxRecordCount" and maxRecordCount != None:
if propertyArray.childNodes[1].hasChildNodes():
propertyArray.childNodes[
1].firstChild.data = maxRecordCount
break
if definition.tagName == 'Props':
for pros in definition.childNodes:
if pros.tagName == "PropertyArray":
for propertyArray in pros.childNodes:
if propertyArray.childNodes[
0].firstChild.nodeValue == "MaxInstances" and maxInstances != None:
if propertyArray.childNodes[1].hasChildNodes():
propertyArray.childNodes[
1].firstChild.data = maxInstances
break
outXml = xml
f = open(outXml, 'w')
doc.writexml(f)
f.close()
except:
self.logger.warning(traceback.format_exc())
if __name__ == '__main__':
# publishS = PublishServer('/home/arcgis/dataTest/template.mxd',r'/home/arcgis/dataTest/arcgis on 192.168.119.134_6080 (admin).ags')
# mxdPath=publishS.buildMxd(r'/home/arcgis/dataTest',r'/home/arcgis/dataTest/iseeIVlayers.gdb/fishnet_wgs84')
# publishS.publishMXD(mxdPath,True)
# publishS = PublishServer(r'/home/arcgis/dataManage/workspace/template.mxd',
# r'/home/arcgis/dataManage/workspace/connectTo99.12.100.ags',None)
# mxdPath = publishS.buildMxd(r'/data/dataHandle/GISData/arcgisserver/gdb/sales/customertest/test.mxd', [r'/data/dataHandle/GISData/arcgisserver/gdb/sales/customertest/customertest1.gdb/customertest_1'])
# publishS.publishMXD(mxdPath, definitionType="esriServiceDefinitionType_Replacement", maxInstances=20,
# maxRecordCount=400000)
publishS = PublishServer(r'C:\Users\LocalUser\Desktop\template.mxd',
r'C:\Users\LocalUser\Desktop\arcgis on 99.12.95.181 (系统管理员).ags', None)
mxdPath = publishS.buildMxd(r'C:\Users\LocalUser\Desktop\test.mxd', [
r'D:\branch.gdb\branch'])
publishS.publishMXD(mxdPath, definitionType="esriServiceDefinitionType_Replacement", maxInstances=20,
maxRecordCount=400000)
| [
"492542279@qq.com"
] | 492542279@qq.com |
70571dd78c2c6cb07035f45c95d36358920d2e18 | a153581be385c3f0671de8ebe41292a08b466835 | /src/Database/Enums.py | 94c07882e0a13d47fb4a3541bd2f65b4b395ffd3 | [] | no_license | PaperBag-Tech/Alpha-api | dd5b253eb8ab6928b3a4144c8331f9b3748c7548 | e1cc98c2b72f78200d425bd2e5648e1abd8dc0e5 | refs/heads/Dev | 2023-08-30T10:13:33.184384 | 2021-09-19T12:25:17 | 2021-09-19T12:25:17 | 389,418,179 | 0 | 0 | null | 2021-09-19T12:25:17 | 2021-07-25T18:43:50 | Python | UTF-8 | Python | false | false | 361 | py | from enum import Enum
class UserRole(str, Enum):
Admin = "Admin"
Editor = "Editor"
Agent = "Agent"
Others = "Others"
class LeadStauts(str, Enum):
open = "Open"
scheduled = "Scheduled"
rescheduled = "Rescheduled"
pendingCustomerConfirmation = "Pending Customer Confirmation"
pendingAgentConfirmation = "Pending Agent Confirmation"
closed = "Closed"
| [
"deepakpatelg16@gmail.com"
] | deepakpatelg16@gmail.com |
9f5a8273537b84262585ae90eb297d25d5a299e3 | 4271a6f470beb1b519963b0d318fe45af9e1de49 | /src/task/task_utils.py | aa03cb79ffd3d694d04e04904f7dc85638f6dfbc | [] | no_license | kfox1111/apbs-rest | d129b91a282e30521ffd5d7332738b58b3df39d6 | ada11d0c207b246761d768e3a3a1372a3f32cd25 | refs/heads/master | 2020-06-20T07:48:32.659798 | 2019-07-13T00:52:59 | 2019-07-13T00:52:59 | 197,048,933 | 0 | 0 | null | 2019-07-15T18:08:14 | 2019-07-15T18:08:13 | null | UTF-8 | Python | false | false | 3,399 | py | import os, time, sys
import requests
try:
from simplejson import loads
except:
from json import loads
STORAGE_HOST = os.environ.get('STORAGE_HOST', 'http://localhost:5001')
STORAGE_URL = os.environ.get('STORAGE_URL' , 'http://localhost:5001/storage')
END_STATES = ['complete', 'error']
def get_starttime(jobid, task_name):
"""Returns the start time for the specified job id and type"""
starttime = None
object_name = '%s/%s_start_time' % (jobid, task_name)
starttime_url = '%s/api/storage/%s?json=true' % (STORAGE_HOST, object_name)
response = requests.get( starttime_url )
if response.status_code == 200:
status_str = response.content
# print(status_str)
status_str = loads(status_str)[object_name]
if status_str is not None:
starttime = float(status_str.split('\n')[0].strip())
else:
print(status_str)
return starttime
def get_endtime(jobid, task_name):
"""Returns the end time for the specified job id and type"""
endtime = None
object_name = '%s/%s_end_time' % (jobid, task_name)
endtime_url = '%s/api/storage/%s?json=true' % (STORAGE_HOST, object_name)
response = requests.get( endtime_url )
if response.status_code == 200 and get_jobstatus_state(jobid, task_name) in END_STATES:
status_str = response.content
status_str = loads(status_str)[object_name]
if status_str is not None:
endtime = float(status_str.split('\n')[0].strip())
else:
print(status_str)
else:
print(' %s job still running' % jobid)
return endtime
def get_jobstatus_state(jobid, task_name):
job_status = None
object_name = '%s/%s_status' % (jobid, task_name)
status_url = '%s/api/storage/%s?json=true' % (STORAGE_HOST, object_name)
response = requests.get( status_url )
if response.status_code == 200:
status_str = response.content
status_str = loads(status_str)[object_name]
if status_str is not None:
job_status = status_str.split('\n')[0].strip()
else:
print(status_str)
return job_status
def get_jobstatus_info(jobid, task_name):
"""Returns the status and potential output files for the specified job id and type"""
job_status = None
job_progress = []
object_name = '%s/%s_status' % (jobid, task_name)
status_url = '%s/api/storage/%s?json=true' % (STORAGE_HOST, object_name)
# print('status_url: \n %s'%status_url)
response = requests.get( status_url )
if response.status_code == 200:
status_str = response.content
status_str = loads(status_str)[object_name]
if status_str is not None:
job_status_text = status_str.split('\n')
job_status = job_status_text[0]
for line in job_status_text[1:]:
line_stripped = line.strip()
if len(line_stripped) > 0:
# print('name: '+line.strip())
# print(len(line))
job_progress.append(line.strip())
else:
print(status_str)
# Converts the retrieved files to URL-friendly versions
for i in range(len(job_progress)):
filename = job_progress[i].split('/')[-1]
job_progress[i] = '%s/%s' % (jobid, filename)
return job_status, job_progress
| [
"elvis.offor@pnnl.gov"
] | elvis.offor@pnnl.gov |
0ce9a04eb907b5fa9abb3ae64148d10cf425c13a | 7e78ae372335fd8e8f0f251d037984629919dff8 | /print_recent_activities.py | 64c5f22a0df8bcd747525bff1386ddc8ce4d5603 | [
"MIT"
] | permissive | ElvinOuyang/todoist-history-analytics | cc7e047dbc83934367d25c92d053805f7552b13f | f3eb9629c84e3878af39524e8e5694bd3c743973 | refs/heads/master | 2021-07-19T04:53:56.429227 | 2017-10-25T20:18:05 | 2017-10-25T20:18:05 | 106,601,656 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 759 | py | import todoist_functions as todofun
import sys
if __name__ == '__main__':
try:
script, activity_count = sys.argv
except ValueError:
sys.stderr.write(
"Please input desired rows of records after the script...\n")
sys.exit(1)
if int(activity_count) > 100:
print(">>> This program prints up to 100 records. Printing:")
activity_count = 100
else:
print(">>> Printing:")
activity_count = int(activity_count)
# set up api engine to my developer app
api = todofun.create_api()
# get one-time activity log from api
act = api.activity.get(limit=activity_count)
act_df = todofun.transform_act(act)
act_df = todofun.df_standardization(act_df)
print(act_df)
| [
"elvin.ouyang@gmail.com"
] | elvin.ouyang@gmail.com |
c5cf3f5dddb8cb510c9b6acf954b3ddde35e9e2e | 8506f0a22ef4edf03627951ced530b921ff4d383 | /tools/sumolib/output/convert/gpx.py | 0ec2127c1bf7a9f35b0a8fba39d2c071c8999ca0 | [] | no_license | deepak728/Traffic-Optimization- | fb0ac074fa601e524eb0d79defc7e8b84ab03138 | 85bc54de2e318f36bdcc5bb6f05badde0fb35ffe | refs/heads/master | 2020-03-29T23:29:36.740048 | 2018-11-12T09:19:17 | 2018-11-12T09:19:17 | 150,475,374 | 1 | 1 | null | 2018-11-12T09:19:19 | 2018-09-26T18:57:35 | Java | UTF-8 | Python | false | false | 1,366 | py | """
@file gpx.py
@author Jakob Erdmann
@author Laura Bieker
@date 2014-02-13
@version $Id: gpx.py 18096 2015-03-17 09:50:59Z behrisch $
This module includes functions for converting SUMO's fcd-output into
GPX format (http://en.wikipedia.org/wiki/GPS_eXchange_Format)
SUMO, Simulation of Urban MObility; see http://sumo.dlr.de/
Copyright (C) 2014 DLR (http://www.dlr.de/) and contributors
This file is part of SUMO.
SUMO is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
"""
from collections import defaultdict
def fcd2gpx(inpFCD, outSTRM, ignored):
tracks = defaultdict(list)
for timestep in inpFCD:
for v in timestep.vehicle:
tracks[v.id].append((timestep.time, v.x, v.y))
outSTRM.write('<?xml version="1.0" encoding="UTF-8"?>\n')
outSTRM.write('<gpx version="1.0">\n')
for vehicle, trackpoints in tracks.iteritems():
outSTRM.write(" <trk><name>%s</name><trkseg>\n" % vehicle)
for timestamp, lon, lat in trackpoints:
outSTRM.write(' <trkpt lon="%s" lat="%s"><time>%s</time></trkpt>\n' % (
lon, lat, timestamp))
outSTRM.write(" </trkseg></trk>\n")
outSTRM.write('</gpx>\n')
| [
"deepak711998@gmail.com"
] | deepak711998@gmail.com |
43c46f3842293ca95fcc91f1dcb7bdd6100621cd | f0937d9fb9108cdd69c5c477a782965bb1f25da5 | /first/settings.py | 5922c4ca8b47a4245264bfa0f0f1e6fe1814266e | [] | no_license | SimeonYS/first | 64218a5c2113cebfc1e1aec3f2808dcefcc30342 | 986e7bbbe5635685ce6795ee9f1459ce5d5a8ef5 | refs/heads/main | 2023-03-29T17:29:57.300975 | 2021-03-29T07:58:32 | 2021-03-29T07:58:32 | 352,561,410 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 372 | py | BOT_NAME = 'first'
SPIDER_MODULES = ['first.spiders']
NEWSPIDER_MODULE = 'first.spiders'
FEED_EXPORT_ENCODING = 'utf-8'
LOG_LEVEL = 'ERROR'
DOWNLOAD_DELAY = 0
USER_AGENT="Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36"
ROBOTSTXT_OBEY = True
ITEM_PIPELINES = {
'first.pipelines.FirstPipeline': 300,
} | [
"simeon.simeonov@ADPVT.com"
] | simeon.simeonov@ADPVT.com |
0fc9cec53ec101f6da272d7dd7ceda6c6293a687 | cfc82c85d5476dcb4a945f5fd1b0a08a1be8bef5 | /python/jira-python/jira-close.py | ae4ea08a9558cf702b0a1517efccf237e836350c | [] | no_license | anto-daniel/command-examples | 42d428e4ac96947d116fcd1ae308f979dc67ad35 | 5ac2d6f7e5b03f5fd0d1a545b8ad83df4b1a459a | refs/heads/master | 2023-08-05T12:06:28.987027 | 2023-07-26T07:29:34 | 2023-07-26T07:29:34 | 16,992,583 | 0 | 2 | null | 2016-01-13T16:02:24 | 2014-02-19T17:33:16 | HTML | UTF-8 | Python | false | false | 632 | py | import sys, getopt
from jira.client import JIRA
options = {'server':'https://jira.corp.domain.com','verify':False}
jira = JIRA(options, basic_auth=('anto.daniel','xxxxxxxxx'))
ticketid = sys.argv[1]
#addcomment = sys.argv[2]
issue = jira.issue(ticketid)
transitions = jira.transitions(issue)
[(t['id'], t['name']) for t in transitions]
for t in transitions:
print t['id'], t['name']
#print transitions
#jira.transition_issue(issue,'2', assignee={'name':'anto.daniel'}, resolution={'id': '1' }, rca={'id':'2'})
jira.transition_issue(issue,'2', assignee={'name':'anto.daniel'})
print issue.__str__()+" Closed."
| [
"root@Anto-E7480.actiance.local"
] | root@Anto-E7480.actiance.local |
4aba499e6a04a5bfbbcc88a16ab099f4dadfba4b | 379211f2a6175049ebb711554b24ccd33737aba5 | /poto/script/create_lambda_function.py | bf4c5e6802c9b2d861938379cf2f06091a099ce7 | [] | no_license | POZAlabs/poto | 5df8451025d7a541e47537b6f6e45521ded1d664 | bb26845bc03a120ccd496f06def01162697c7335 | refs/heads/master | 2021-06-05T09:56:07.198235 | 2020-01-23T06:05:23 | 2020-01-23T06:05:23 | 147,601,905 | 0 | 1 | null | 2020-01-23T06:05:25 | 2018-09-06T01:44:30 | Python | UTF-8 | Python | false | false | 3,219 | py | import os
import argparse
import json
import re
import sys
import base64
import shutil
import boto3
from poto.access import get_client
from poto.lambda_func import create_lambda_layer, create_lambda_function_zip
from poto.utils import set_home
def get_parser():
parser = argparse.ArgumentParser('lambda')
parser.add_argument('--aws_config_path', type=str, help='config path containing aws access key and secret key')
parser.add_argument('--zappa_stage', type=str, help='zappa_settings_key')
parser.add_argument('--output_path', type=str, default=os.path.expanduser("~"), help='output path')
parser.add_argument('--repo_path', type=str, help='zipped repo_path')
parser.add_argument('--is_non_editible_packages', action="store_true")
parser.add_argument('--layer_name', type=str, help='lambda layer name')
parser.add_argument('--func_name', type=str, help='lambda function name')
parser.add_argument('--handler', type=str, help='lambda handler')
parser.add_argument('--Timeout', type=int, default=600)
parser.add_argument('--delete_local_zip', action="store_true")
return parser
if __name__ == '__main__':
parser = get_parser()
args = parser.parse_args()
args.repo_path = set_home(args.repo_path)
os.chdir(args.repo_path)
layer_zip_path = create_lambda_layer(args.zappa_stage, args.output_path)
func_zip_path = create_lambda_function_zip(args.repo_path,
args.output_path,
args.is_non_editible_packages,
args.zappa_stage)
settings = json.load(open(os.path.join(args.repo_path, "zappa_settings.json")))
config = json.load(open(args.aws_config_path))
lambda_client = get_client('lambda', config)
# create layer
response = lambda_client.publish_layer_version(LayerName=args.layer_name,
Content={'ZipFile': open(layer_zip_path, 'rb').read()},
CompatibleRuntimes=['python3.6'])
print('='*30, 'layer', '='*30)
print(response)
# get layer arn
layer_version_response = lambda_client.get_layer_version(LayerName=args.layer_name,
VersionNumber=1)
# create function
response = lambda_client.create_function(FunctionName=args.func_name,
Runtime='python3.6',
Role=settings[args.zappa_stage]['role_arn'],
Handler=args.handler,
Code={'ZipFile': open(func_zip_path, 'rb').read()},
Environment={'Variables':
settings[args.zappa_stage].get('aws_environment_variables')},
Layers=[layer_version_response['LayerVersionArn']])
print('='*30, 'function', '='*30)
print(response)
if args.delete_local_zip:
os.remove(layer_zip_path)
os.remove(func_zip_path)
| [
"gpalektzja@gmail.com"
] | gpalektzja@gmail.com |
93e16e11d8d12ec6fd85f2c0f773cb3b1e3e8637 | 063f2c70005e0e3abc0adcc96732a7990e979bb2 | /booker/apps/libraryapp/migrations/0005_auto_20201129_1848.py | 3ebb0ef70ac9d7f731ad3fa8beb8cf120f41ca45 | [] | no_license | ppolle/book-rental | c0c8effe74def517b8130e6a21e3522dfcd1b8e5 | a50e0a30878b3e1fc487f2bc852cdee3a85b3a00 | refs/heads/master | 2023-01-30T17:45:51.002965 | 2020-12-12T12:54:02 | 2020-12-12T12:54:02 | 316,337,267 | 0 | 0 | null | 2020-12-12T12:54:03 | 2020-11-26T21:09:43 | HTML | UTF-8 | Python | false | false | 593 | py | # Generated by Django 2.2.17 on 2020-11-29 15:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('libraryapp', '0004_auto_20201129_0232'),
]
operations = [
migrations.AddField(
model_name='booktype',
name='early_rates',
field=models.DecimalField(decimal_places=2, default=0, max_digits=10),
),
migrations.AddField(
model_name='booktype',
name='minimum_days',
field=models.PositiveIntegerField(default=0),
),
]
| [
"peter.m.polle@gmail.com"
] | peter.m.polle@gmail.com |
6407830cac6dd52f49534aef0001c3f5ad710b1c | 3df4178ab9078babca7ce3a971f6e250b52bc637 | /code_cobrapy_parFBA.py | b4f940699e80ec3408cebcd9e82ed54b131064e5 | [
"MIT"
] | permissive | dasalazarb/NALFD-related_HCC_Recon_2.2 | 0e9104f12e8348fe0613917f312c749e6cbae116 | cc07ea4140ff1bfb6701bf2fe972868342274d1e | refs/heads/main | 2023-04-16T11:19:22.260509 | 2022-05-29T22:10:32 | 2022-05-29T22:10:32 | 497,475,295 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,457 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Oct 31 15:08:53 2021
@author: da.salazarb
"""
# %% Libraries and data
import cobra
import pandas as pd
import numpy as np
GSE14520_MoA = pd.read_table("D:/SB-HCC/00_MoA/GSE14520_MoA.txt")
GSE10142_MoA = pd.read_table("D:/SB-HCC/00_MoA/GSE10142_MoA.txt")
neg_AEBP1 = list(GSE14520_MoA.loc[(GSE14520_MoA["modeOfAction"] == -1) & (GSE14520_MoA["from"] == "AEBP1"),:]["to_hgnc"])
neg_AR = list(GSE14520_MoA.loc[(GSE14520_MoA["modeOfAction"] == -1) & (GSE14520_MoA["from"] == "AR"),:]["to_hgnc"])
neg_NR1I3 = list(GSE14520_MoA.loc[(GSE14520_MoA["modeOfAction"] == -1) & (GSE14520_MoA["from"] == "NR1I3"),:]["to_hgnc"])
pos_AEBP1 = list(GSE14520_MoA.loc[(GSE14520_MoA["modeOfAction"] == 1) & (GSE14520_MoA["from"] == "AEBP1"),:]["to_hgnc"])
pos_AR = list(GSE14520_MoA.loc[(GSE14520_MoA["modeOfAction"] == 1) & (GSE14520_MoA["from"] == "AR"),:]["to_hgnc"])
pos_NR1I3 = list(GSE14520_MoA.loc[(GSE14520_MoA["modeOfAction"] == 1) & (GSE14520_MoA["from"] == "NR1I3"),:]["to_hgnc"])
## modelo
model = cobra.io.read_sbml_model('D:/SB-HCC/MODEL1603150001.xml')
pos_neg = {"neg_AEBP1":neg_AEBP1,"neg_AR":neg_AR, "neg_NR1I3":neg_NR1I3,
"pos_AEBP1":pos_AEBP1, "pos_AR":pos_AR, "pos_NR1I3": pos_NR1I3}
# %% fucntions
def get_list_rxn(rxn_temp,pos_temp,genes,genes_rxn):
if len(set(rxn_temp).intersection(pos_temp)) > 0: ## hay genes para revisar?
indices = []
list_rxn_names_temp = []
for j in set(rxn_temp).intersection(pos_temp): ## recorre genes para apagar o prender
indices = indices + list(np.where(j == genes)[0])
indices = np.array(indices)
for k in genes_rxn[indices]: ## para cada gen revisar las reacciones
list_rxn_names_temp = [i.name for i in list(k.reactions)]
else:
list_rxn_names_temp = []
return list_rxn_names_temp
def modify_model(model, pos_neg):
genes = np.array([i.name for i in model.genes]) # nombres genes
rxns = np.array([i.name for i in model.reactions]) # nombres reacciones
genes_rxn = pd.DataFrame(model.genes).iloc[:,0] # identificador gen
pos_list = []
neg_list = []
for rxn in model.reactions: ## recorre reacciones
rxn_temp = [gene.id for gene in rxn.genes]
### positive MoA
pos_temp = {k: v for k,v in pos_neg.items() if "pos" in k}
pos_temp = [i for v in pos_temp.values() for i in v]
pos_list = pos_list + get_list_rxn(rxn_temp,pos_temp,genes,genes_rxn)
### negative MoA
neg_temp = {k: v for k,v in pos_neg.items() if "neg" in k}
neg_temp = [i for v in neg_temp.values() for i in v]
neg_list = neg_list + get_list_rxn(rxn_temp,neg_temp,genes,genes_rxn)
pos_list = list(np.unique(pos_list))
neg_list = list(np.unique(neg_list))
lista_indices_pos = []
for l in pos_list: ## modificar lb y ub para cada reaccion
indice_rxn = list(np.where(l == rxns)[0])
lista_indices_pos = lista_indices_pos + indice_rxn
for indice in indice_rxn:
model.reactions[indice].upper_bound = 1000
model.reactions[indice].lower_bound = -1000
lista_indices_neg = []
for l in neg_list: ## modificar lb y ub para cada reaccion
indice_rxn = list(np.where(l == rxns)[0])
lista_indices_neg = lista_indices_neg + indice_rxn
for indice in indice_rxn:
# print(indice)
# print(model.reactions[indice].name)
model.reactions[indice].lower_bound = 0
model.reactions[indice].upper_bound = 10
# print(model.reactions[indice].lower_bound)
# print(model.reactions[indice].upper_bound)
return model, lista_indices_pos, lista_indices_neg
# %% to generate the model of NAFLD-related HCC model
model_temp, lista_indices_pos, lista_indices_neg = modify_model(model, pos_neg)
## Check point
## positive genes
rxn_temp = model_temp.reactions[lista_indices_pos[10]]
rxn_temp.name
rxn_temp.reaction
rxn_temp.lower_bound
rxn_temp.upper_bound
dict_pos = dict()
dict_pos["reaction"] = []; dict_pos["gene"] = []; dict_pos["name"] = []
for i in lista_indices_pos:
rxn_temp = model_temp.reactions[i]
dict_pos["name"].append(rxn_temp.name)
dict_pos["reaction"].append(rxn_temp.reaction)
dict_pos["gene"].append(rxn_temp.gene_name_reaction_rule)
## negative gene
rxn_temp = model_temp.reactions[lista_indices_neg[0]]
rxn_temp.name
rxn_temp.reaction
rxn_temp.lower_bound
rxn_temp.upper_bound
dict_neg = dict()
dict_neg["reaction"] = []; dict_neg["gene"] = []; dict_neg["name"] = []
for i in lista_indices_neg:
rxn_temp = model_temp.reactions[i]
dict_neg["name"].append(rxn_temp.name)
dict_neg["reaction"].append(rxn_temp.reaction)
dict_neg["gene"].append(rxn_temp.gene_name_reaction_rule)
## which reactions where modified in NALFD-related HCC
df_pos = pd.DataFrame.from_dict(dict_pos); df_pos.to_csv("D:/SB-HCC/which_genes_regulated_pos.csv", sep=";")
df_neg = pd.DataFrame.from_dict(dict_neg); df_neg.to_csv("D:/SB-HCC/which_genes_regulated_neg.csv", sep=";")
# %% run pFBA
model.objective = 'ATPS4m'
# fba_solution = model.optimize()
pfba_solution = cobra.flux_analysis.pfba(model)
model_temp.objective = 'ATPS4m'
# fba_solution = model_temp.optimize()
pfba_solution = cobra.flux_analysis.pfba(model_temp)
pfba_solution.status
| [
"dsalazarb@javeriana.edu.co"
] | dsalazarb@javeriana.edu.co |
7138dd90e13a84f8d729f1f5bbdd4b8a44e3ac41 | dd19bbe893c7b978bd0c5028670286093f61da7b | /examples/UNet/generators.py | b9bfc040edeff4afbf89d41522980888b13d73a5 | [
"MIT"
] | permissive | herandy/Recipes | a8d120d753c684924ec42a6cd6d65f6997d55a11 | 48fe66f407f916b09d89fd2603e1137cc500c556 | refs/heads/master | 2022-11-08T06:55:59.895070 | 2017-05-01T23:48:28 | 2017-05-01T23:48:28 | 88,197,292 | 0 | 1 | MIT | 2022-10-08T17:45:39 | 2017-04-13T19:01:26 | Python | UTF-8 | Python | false | false | 3,381 | py | __author__ = 'Fabian Isensee'
import numpy as np
import lasagne
def batch_generator(data, target, BATCH_SIZE, shuffle=False):
if shuffle:
while True:
ids = np.random.choice(len(data), BATCH_SIZE)
yield data[ids], target[ids]
else:
for idx in range(0, len(data), BATCH_SIZE):
ids = slice(idx, idx + BATCH_SIZE)
yield data[ids], target[ids]
def batch_generator_old(data, target, BATCH_SIZE, shuffle=False):
'''
just a simple batch iterator, no cropping, no rotation, no anything
'''
np.random.seed()
idx = np.arange(data.shape[0])
if shuffle:
np.random.shuffle(idx)
idx_2 = np.array(idx)
# if BATCH_SIZE is larger than len(data) we need to artificially enlarge the idx array (loop around)
while BATCH_SIZE > len(idx):
idx_2 = np.concatenate((idx_2, idx))
del(idx)
while True:
ctr = 0
yield np.array(data[idx_2[ctr:ctr+BATCH_SIZE]]), np.array(target[idx_2[ctr:ctr+BATCH_SIZE]])
ctr += BATCH_SIZE
if ctr >= data.shape[0]:
ctr -= data.shape[0]
def center_crop_generator(generator, output_size):
'''
yields center crop of size output_size (may be 1d or 2d) from data and seg
'''
'''if type(output_size) not in (tuple, list):
center_crop = [output_size, output_size]
elif len(output_size) == 2:
center_crop = list(output_size)
else:
raise ValueError("invalid output_size")'''
center_crop = lasagne.utils.as_tuple(output_size, 2, int)
for data, seg in generator:
center = np.array(data.shape[2:])/2
yield data[:, :, int(center[0]-center_crop[0]/2.):int(center[0]+center_crop[0]/2.), int(center[1]-center_crop[1]/2.):int(center[1]+center_crop[1]/2.)], seg[:, :, int(center[0]-center_crop[0]/2.):int(center[0]+center_crop[0]/2.), int(center[1]-center_crop[1]/2.):int(center[1]+center_crop[1]/2.)]
def random_crop_generator(generator, crop_size=(128, 128)):
'''
yields a random crop of size crop_size
'''
if type(crop_size) not in (tuple, list):
crop_size = [crop_size, crop_size]
elif len(crop_size) == 2:
crop_size = list(crop_size)
else:
raise ValueError("invalid crop_size")
for data, seg in generator:
lb_x = np.random.randint(0, data.shape[2]-crop_size[0])
lb_y = np.random.randint(0, data.shape[3]-crop_size[1])
data = data[:, :, lb_x:lb_x+crop_size[0], lb_y:lb_y+crop_size[1]]
seg = seg[:, :, lb_x:lb_x+crop_size[0], lb_y:lb_y+crop_size[1]]
yield data, seg
def threaded_generator(generator, num_cached=10):
# this code is written by jan Schluter
# copied from https://github.com/benanne/Lasagne/issues/12
import Queue
queue = Queue.Queue(maxsize=num_cached)
sentinel = object() # guaranteed unique reference
# define producer (putting items into queue)
def producer():
for item in generator:
queue.put(item)
queue.put(sentinel)
# start producer (in a background thread)
import threading
thread = threading.Thread(target=producer)
thread.daemon = True
thread.start()
# run as consumer (read items from queue, in current thread)
item = queue.get()
while item is not sentinel:
yield item
queue.task_done()
item = queue.get()
| [
"amirhosein.herandy@gmail.com"
] | amirhosein.herandy@gmail.com |
65bc21b97b512b395718665657f444720a1f37f8 | d6c0361cec81b712fe0184670d6d3256ef1af062 | /malteheckelen_de/wsgi.py | 4d13503f5c62fb30425893cc6c5378240e03c828 | [] | no_license | malteheckelen/malteheckelen_site | e59e64c12eff4a56abaa4bd476239346990259bd | 6463072561a75ad3429ba31517c9ecfc6355c6fa | refs/heads/master | 2023-02-26T13:11:09.172873 | 2021-02-02T10:00:37 | 2021-02-02T10:00:37 | 190,808,067 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | """
WSGI config for malteheckelen_de project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'malteheckelen_de.settings')
application = get_wsgi_application()
| [
"malteheckelen@googlemail.com"
] | malteheckelen@googlemail.com |
e1e08627cb4eb317236b0b5da9b3b4f6f5e0d446 | d6c990a9abdb4c8dad0c9785b96a0edf9a33b7fd | /magic_formula_revised.py | f6c6dd7bbc791c64d67a4cc4fcd2ccb4256e4570 | [] | no_license | shashghosh/AlgoTrading | 417e6e193aeaa66605f1fc22418d8b1ec4b318f3 | 7d877c3eef2e51549ac226ffe29b8a3af94ef8df | refs/heads/main | 2023-04-05T07:28:10.655278 | 2021-04-21T18:10:13 | 2021-04-21T18:10:13 | 360,260,190 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,493 | py | # ============================================================================
# Greenblatt's Magic Formula Implementation
# Author - Mayank Rasu
# Please report bugs/issues in the Q&A section
# =============================================================================
import requests
from bs4 import BeautifulSoup
import pandas as pd
tickers = ["ADANIPORTS.NS", "ASIANPAINT.NS", "AXISBANK.NS", "BAJAJ-AUTO.NS", "BAJFINANCE.NS", "BAJAJFINSV.NS", "BPCL.NS", "BHARTIARTL.NS", "INFRATEL.NS", "BRITANNIA.NS", "CIPLA.NS", "COALINDIA.NS", "DRREDDY.NS", "EICHERMOT.NS", "GAIL.NS", "GRASIM.NS", "HCLTECH.NS", "HDFCBANK.NS", "HEROMOTOCO.NS", "HINDALCO.NS", "HINDUNILVR.NS", "HDFC.NS", "ICICIBANK.NS", "ITC.NS", "IOC.NS", "INDUSINDBK.NS", "INFY.NS", "JSWSTEEL.NS", "KOTAKBANK.NS", "LT.NS", "M&M.NS", "MARUTI.NS", "NTPC.NS", "NESTLEIND.NS", "ONGC.NS", "POWERGRID.NS", "RELIANCE.NS", "SHREECEM.NS", "SBIN.NS", "SUNPHARMA.NS", "TCS.NS", "TATAMOTORS.NS", "TATASTEEL.NS", "TECHM.NS", "TITAN.NS", "UPL.NS", "ULTRACEMCO.NS", "VEDL.NS", "WIPRO.NS", "ZEEL.NS",
"ACC.NS", "ABBOTINDIA.NS", "ADANITRANS.NS", "AMBUJACEM.NS", "AUROPHARMA.NS", "DMART.NS", "BAJAJHLDNG.NS", "BANDHANBNK.NS", "BANKBARODA.NS", "BERGEPAINT.NS", "BIOCON.NS", "BOSCHLTD.NS", "CADILAHC.NS", "COLPAL.NS", "CONCOR.NS", "DLF.NS", "DABUR.NS", "DIVISLAB.NS", "GICRE.NS", "GODREJCP.NS", "HDFCAMC.NS", "HDFCLIFE.NS", "HAVELLS.NS", "HINDPETRO.NS", "HINDZINC.NS", "ICICIGI.NS", "ICICIPRULI.NS", "IGL.NS", "NAUKRI.NS", "INDIGO.NS", "LUPIN.NS", "MARICO.NS", "MOTHERSUMI.NS", "MUTHOOTFIN.NS", "NHPC.NS", "NMDC.NS", "OFSS.NS", "PAGEIND.NS", "PETRONET.NS", "PIDILITIND.NS", "PEL.NS", "PFC.NS", "PGHH.NS", "PNB.NS", "SBILIFE.NS", "SRTRANSFIN.NS", "SIEMENS.NS", "TORNTPHARM.NS", "UBL.NS", "MCDOWELL-N.NS"]
#list of tickers whose financial data needs to be extracted
financial_dir = {}
for ticker in tickers:
print("Scraping data for", ticker)
try:
#getting balance sheet data from yahoo finance for the given ticker
temp_dir = {}
url = 'https://in.finance.yahoo.com/quote/'+ticker+'/balance-sheet?p='+ticker
page = requests.get(url)
page_content = page.content
soup = BeautifulSoup(page_content,'html.parser')
tabl = soup.find_all("div", {"class" : "M(0) Whs(n) BdEnd Bdc($seperatorColor) D(itb)"})
for t in tabl:
rows = t.find_all("div", {"class" : "rw-expnded"})
for row in rows:
temp_dir[row.get_text(separator='|').split("|")[0]]=row.get_text(separator='|').split("|")[1]
#getting income statement data from yahoo finance for the given ticker
url = 'https://in.finance.yahoo.com/quote/'+ticker+'/financials?p='+ticker
page = requests.get(url)
page_content = page.content
soup = BeautifulSoup(page_content,'html.parser')
tabl = soup.find_all("div", {"class" : "M(0) Whs(n) BdEnd Bdc($seperatorColor) D(itb)"})
for t in tabl:
rows = t.find_all("div", {"class" : "rw-expnded"})
for row in rows:
temp_dir[row.get_text(separator='|').split("|")[0]]=row.get_text(separator='|').split("|")[2]
#getting cashflow statement data from yahoo finance for the given ticker
url = 'https://in.finance.yahoo.com/quote/'+ticker+'/cash-flow?p='+ticker
page = requests.get(url)
page_content = page.content
soup = BeautifulSoup(page_content,'html.parser')
tabl = soup.find_all("div", {"class" : "M(0) Whs(n) BdEnd Bdc($seperatorColor) D(itb)"})
for t in tabl:
rows = t.find_all("div", {"class" : "rw-expnded"})
for row in rows:
temp_dir[row.get_text(separator='|').split("|")[0]]=row.get_text(separator='|').split("|")[2]
#getting key statistics data from yahoo finance for the given ticker
url = 'https://in.finance.yahoo.com/quote/'+ticker+'/key-statistics?p='+ticker
page = requests.get(url)
page_content = page.content
soup = BeautifulSoup(page_content,'html.parser')
#tabl = soup.findAll("table", {"class": "W(100%) Bdcl(c) "})
tabl = soup.findAll("table") #if this line gives error
for t in tabl:
rows = t.find_all("tr")
for row in rows:
if len(row.get_text(separator='|').split("|")[0:2])>0:
temp_dir[row.get_text(separator='|').split("|")[0]]=row.get_text(separator='|').split("|")[-1]
#combining all extracted information with the corresponding ticker
financial_dir[ticker] = temp_dir
except:
print("Problem scraping data for ",ticker)
#storing information in pandas dataframe
combined_financials = pd.DataFrame(financial_dir)
combined_financials.dropna(how='all',axis=1,inplace=True) #dropping columns with all NaN values
tickers = combined_financials.columns #updating the tickers list based on only those tickers whose values were successfully extracted
for ticker in tickers:
combined_financials = combined_financials[~combined_financials[ticker].str.contains("[a-z]").fillna(False)]
# creating dataframe with relevant financial information for each stock using fundamental data
stats = ["EBITDA",
"Depreciation & amortisation",
"Market cap (intra-day)",
"Net income available to common shareholders",
"Net cash provided by operating activities",
"Capital expenditure",
"Total current assets",
"Total current liabilities",
"Net property, plant and equipment",
"Total stockholders' equity",
"Long-term debt",
"Forward annual dividend yield"] # change as required
indx = ["EBITDA","D&A","MarketCap","NetIncome","CashFlowOps","Capex","CurrAsset",
"CurrLiab","PPE","BookValue","TotDebt","DivYield"]
all_stats = {}
for ticker in tickers:
try:
temp = combined_financials[ticker]
ticker_stats = []
for stat in stats:
ticker_stats.append(temp.loc[stat])
all_stats['{}'.format(ticker)] = ticker_stats
except:
print("can't read data for ",ticker)
# cleansing of fundamental data imported in dataframe
all_stats_df = pd.DataFrame(all_stats,index=indx)
all_stats_df[tickers] = all_stats_df[tickers].replace({',': ''}, regex=True)
all_stats_df[tickers] = all_stats_df[tickers].replace({'M': 'E+03'}, regex=True)
all_stats_df[tickers] = all_stats_df[tickers].replace({'B': 'E+06'}, regex=True)
all_stats_df[tickers] = all_stats_df[tickers].replace({'T': 'E+09'}, regex=True)
all_stats_df[tickers] = all_stats_df[tickers].replace({'%': 'E-02'}, regex=True)
for ticker in all_stats_df.columns:
all_stats_df[ticker] = pd.to_numeric(all_stats_df[ticker].values,errors='coerce')
#all_stats_df.dropna(axis=1,inplace=True)
tickers = all_stats_df.columns
# calculating relevant financial metrics for each stock
transpose_df = all_stats_df.transpose()
final_stats_df = pd.DataFrame()
final_stats_df["EBIT"] = transpose_df["EBITDA"] - transpose_df["D&A"].fillna(0)
final_stats_df["TEV"] = transpose_df["MarketCap"].fillna(0) \
+transpose_df["TotDebt"].fillna(0) \
-(transpose_df["CurrAsset"].fillna(0)-transpose_df["CurrLiab"].fillna(0))
final_stats_df["EarningYield"] = final_stats_df["EBIT"]/final_stats_df["TEV"]
final_stats_df["FCFYield"] = (transpose_df["CashFlowOps"]-transpose_df["Capex"])/transpose_df["MarketCap"]
final_stats_df["ROC"] = (transpose_df["EBITDA"] - transpose_df["D&A"].fillna(0))/(transpose_df["PPE"]+transpose_df["CurrAsset"]-transpose_df["CurrLiab"])
final_stats_df["BookToMkt"] = transpose_df["BookValue"]/transpose_df["MarketCap"]
final_stats_df["DivYield"] = transpose_df["DivYield"]
################################Output Dataframes##############################
# finding value stocks based on Magic Formula
final_stats_val_df = final_stats_df.loc[tickers,:]
final_stats_val_df["CombRank"] = final_stats_val_df["EarningYield"].rank(ascending=False,na_option='bottom')+final_stats_val_df["ROC"].rank(ascending=False,na_option='bottom')
final_stats_val_df["MagicFormulaRank"] = final_stats_val_df["CombRank"].rank(method='first')
value_stocks = final_stats_val_df.sort_values("MagicFormulaRank").iloc[:,[2,4,8]]
print("------------------------------------------------")
print("Value stocks based on Greenblatt's Magic Formula")
print(value_stocks)
# finding highest dividend yield stocks
high_dividend_stocks = final_stats_df.sort_values("DivYield",ascending=False).iloc[:,6]
print("------------------------------------------------")
print("Highest dividend paying stocks")
print(high_dividend_stocks)
# # Magic Formula & Dividend yield combined
final_stats_df["CombRank"] = final_stats_df["EarningYield"].rank(ascending=False,method='first') \
+final_stats_df["ROC"].rank(ascending=False,method='first') \
+final_stats_df["DivYield"].rank(ascending=False,method='first')
final_stats_df["CombinedRank"] = final_stats_df["CombRank"].rank(method='first')
value_high_div_stocks = final_stats_df.sort_values("CombinedRank").iloc[:,[2,4,6,8]]
print("------------------------------------------------")
print("Magic Formula and Dividend Yield combined")
print(value_high_div_stocks)
| [
"noreply@github.com"
] | noreply@github.com |
7430d072306d532ae77c0231f07d45427d020cbf | c01d3f51c1e7dfe18d019d7c68cac52899aba7c9 | /main-1.py | fb2a77feea96e18a25af1513fbf96661772f6ebc | [] | no_license | FLOWR1/2018hackathon | bc364c870b66bef3012d577a718150c88ca0376d | 455863a73499ebf9c991c3fb5e2fa8d305eea6fc | refs/heads/master | 2020-05-22T11:48:51.935985 | 2019-05-13T02:20:55 | 2019-05-13T02:20:55 | 186,330,162 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,246 | py | import cv2
import numpy as np
import threading
import sys
import os
import dlib
import models
import NonLinearLeastSquares
import ImageProcessing
from drawing import *
import FaceRendering
import utils
import timeit
INTRO_VID_PATH = '../ROEM 2014 Spring SUZY 1080p.mp4'
BACKGROUND_VID_PATH = '../ROEM 2014 Spring SUZY 320p.mp4'
SAVE_VID_PATH = '../out.avi'
MOVEMENT_THRESHOLD = 1.2#1.7 #higher is bigger movement
GUIDE_SHOW_TIME = 7.0 #seconds
GUIDE_WAIT_TIME = 4.0
VIDEO_CAPTURE_CAM_NUM = 0
cap_intro_vid = cv2.VideoCapture(INTRO_VID_PATH)
#loading the keypoint detection model, the image and the 3D model
predictor_path = "../shape_predictor_68_face_landmarks.dat"
#the smaller this value gets the faster the detection will work
#if it is too small, the user's face might not be detected
maxImageSizeForDetection = 320
#카메라로 찍히는 영상에서 얼굴을 찾는 detector
detector = dlib.get_frontal_face_detector()
#사람 얼굴을 찾는 입과 눈의 구석, 코의 끝과 같은 중요한 얼굴 표식의 위치를 식별하는 점들의 집합
predictor = dlib.shape_predictor(predictor_path)
# candide = 3D face model source
# mean3Dshape : 얼굴의 중립상태에 해당하는 정점 리스트
# blendshapes : 중립상태인 얼굴에서 추가하여 수정할 수 있는 얼굴
# ex) 미소, 눈썹 올라가는 부분
# candide에 정의된 애니메이션 Units에서 파생된다.
# mesh : Candide가 얼굴 목록으로 제공한 원래의 mesh
# idxs3D, idxs2D: Candide 모델(idxs3D)과 얼굴 정렬점 세트(idxs2D)사이에 해당하는 지점들의 인덱스들이다.
mean3DShape, blendshapes, mesh, idxs3D, idxs2D = utils.load3DFaceModel("../candide.npz")
projectionModel = models.OrthographicProjectionBlendshapes(blendshapes.shape[0])
def movement_detection():
ret, frame1 = cv2.VideoCapture(VIDEO_CAPTURE_CAM_NUM).read()
frame1 = cv2.resize(frame1, (100, 50))
prvs = cv2.cvtColor(frame1,cv2.COLOR_BGR2GRAY)
hsv = np.zeros_like(frame1)
hsv[...,1] = 255
mov_check_cap = cv2.VideoCapture(VIDEO_CAPTURE_CAM_NUM)
while(True):
ret, frame2 = mov_check_cap.read()
frame2 = cv2.resize(frame2, (100, 50))
next = cv2.cvtColor(frame2,cv2.COLOR_BGR2GRAY)
flow = cv2.calcOpticalFlowFarneback(prvs,next, None, 0.5, 3, 15, 3, 5, 1.2, 0)
mag, ang = cv2.cartToPolar(flow[...,0], flow[...,1])
mag_mat = np.matrix(mag)
cam_movement = mag_mat.mean()
if cam_movement > MOVEMENT_THRESHOLD:
break
prvs = next
def video():
t = threading.Thread(target=movement_detection)
t.start()
while(cap_intro_vid.isOpened()):
ret, frame = cap_intro_vid.read()
cv2.namedWindow("frame", cv2.WND_PROP_FULLSCREEN)
cv2.setWindowProperty("frame", cv2.WND_PROP_FULLSCREEN, 1)
cv2.imshow('frame',frame)
if not t.isAlive():
guide_facechange()
break
if cv2.waitKey(1) & 0xFF == ord('q'):
break
def guide_facechange():
def tmp():
print("")
t = threading.Timer(GUIDE_SHOW_TIME, tmp)
t.start()
t_wait = threading.Timer(GUIDE_WAIT_TIME, tmp)
t_wait.start()
print("Press T to draw the keypoints and the 3D model")
print("Press W to start recording to a video file")
print("Press R to restart")
print("Press Q or ESC to Quit")
modelParams = None
lockedTranslation = False
drawOverlay = False
#cap = cv2.VideoCapture(VIDEO_CAPTURE_CAM_NUM)
cap = cv2.VideoCapture(BACKGROUND_VID_PATH)
writer = None
cameraImg = cap.read()[1] # face swap하여 붙일 영상의 img
textureImg = cv2.VideoCapture(VIDEO_CAPTURE_CAM_NUM).read()[1]
#print("광고영상 shape : \t\t",cameraImg.shape[1],cameraImg.shape[0])
#print("카메라 캡쳐영상 shape : ",textureImg.shape[1],textureImg.shape[0])
###### face detection with guide
cap_guide_cam = cv2.VideoCapture(VIDEO_CAPTURE_CAM_NUM)
if (cap_guide_cam.isOpened() == False):
print("Unable to read camera feed")
frame_width = int(cap_guide_cam.get(3))
frame_height = int(cap_guide_cam.get(4))
str="match your face"
str2="O"
str3="ATTENTION"
while(True):
ret, frame = cap_guide_cam.read()
frame_org = frame
cv2.putText(frame,str,(int(frame_width/3),int(frame_height/6)),cv2.FONT_HERSHEY_SIMPLEX,int(frame_width/600),(0,0,0),int(frame_width/300))
cv2.putText(frame,str2,(int(frame_width/3),int(frame_width/2)),cv2.FONT_HERSHEY_SIMPLEX,int(frame_width/60),(0,0,255),int(frame_width/300))
cv2.putText(frame,str3,(int((frame_width*2)/3),int((frame_height*2)/3)),cv2.FONT_HERSHEY_SIMPLEX,int(frame_width/650),(0,0,0),int(frame_width/300))
cv2.imshow('frame',frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
if not t_wait.isAlive():
dets = detector(frame_org, 1) #처음 camera로 촬영한 캡쳐를 넣어서 얼굴을 찾음.
if len(dets) > 0:
print("detected")
break
else:
print("now detecting")
if not t.isAlive():
video()
# 찍은 영상의 캡쳐를 3D로 재구성하여 합침
textureCoords = utils.getFaceTextureCoords(textureImg, mean3DShape, blendshapes, idxs2D, idxs3D, detector, predictor)
# 찍은 얼굴의 데이터를 영상의 얼굴에 rendering
renderer = FaceRendering.FaceRenderer(cameraImg, textureImg, textureCoords, mesh)
doProcess=False
meanTime=[[0]*4 for i in range(4)]
while True:
#영상 캡쳐
cameraImg = cap.read()[1]
shapes2D = utils.getFaceKeypoints(cameraImg, detector, predictor, maxImageSizeForDetection)
doProcess = not doProcess
if doProcess is not True:
continue
else:
if shapes2D is not None:
for shape2D in shapes2D:
start = timeit.default_timer()
#3D model parameter initialization
modelParams = projectionModel.getInitialParameters(mean3DShape[:, idxs3D], shape2D[:, idxs2D])
stop = timeit.default_timer()
meanTime[0][0]+=stop-start
meanTime[0][1]+=1
#print(1, float(meanTime[0][0]/meanTime[0][1]))
start = timeit.default_timer()
#3D model parameter optimization
modelParams = NonLinearLeastSquares.GaussNewton(modelParams, projectionModel.residual, projectionModel.jacobian, ([mean3DShape[:, idxs3D], blendshapes[:, :, idxs3D]], shape2D[:, idxs2D]), verbose=0)
stop = timeit.default_timer()
meanTime[1][0]+=stop-start
meanTime[1][1]+=1
#print(2, float(meanTime[1][0]/meanTime[1][1]))
start = timeit.default_timer()
#rendering the model to an image
#다듬기
shape3D = utils.getShape3D(mean3DShape, blendshapes, modelParams)
renderedImg = renderer.render(shape3D)
stop = timeit.default_timer()
meanTime[2][0]+=stop-start
meanTime[2][1]+=1
#print(3, float(meanTime[2][0]/meanTime[2][1]))\
start = timeit.default_timer()
#blending of the rendered face with the image
mask = np.copy(renderedImg[:, :, 0])
renderedImg = ImageProcessing.colorTransfer(cameraImg, renderedImg, mask)
cameraImg = ImageProcessing.blendImages(renderedImg, cameraImg, mask)
stop = timeit.default_timer()
meanTime[3][0] += stop - start
meanTime[3][1] += 1
#print(4, float(meanTime[3][0] / meanTime[3][1]))
#drawing of the mesh and keypoints
# 't'를 누를 때, 적용. facepoint가 표시됨.
if drawOverlay:
drawPoints(cameraImg, shape2D.T)
drawProjectedShape(cameraImg, [mean3DShape, blendshapes], projectionModel, mesh, modelParams, lockedTranslation)
if writer is not None:
writer.write(cameraImg)
cv2.namedWindow("image", cv2.WND_PROP_FULLSCREEN)
cv2.setWindowProperty("image", cv2.WND_PROP_FULLSCREEN, 1)
cv2.imshow('image',cameraImg)
key = cv2.waitKey(1)
if key == 27 or key == ord('q'):
break
if key == ord('t'):
drawOverlay = not drawOverlay
if key == ord('r'):
cv2.destroyAllWindows()
video()
if key == ord('w'):
if writer is None:
print("Starting video writer")
writer = cv2.VideoWriter(SAVE_VID_PATH, cv2.VideoWriter_fourcc('X', 'V', 'I', 'D'), 13, (cameraImg.shape[1], cameraImg.shape[0]))
if writer.isOpened():
print("Writer succesfully opened")
else:
writer = None
print("Writer opening failed")
else:
print("Stopping video writer")
writer.release()
writer = None
cap.release()
cap_intro_vid.release()
cap_guide_cam.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
video() | [
"bamy@skku.edu"
] | bamy@skku.edu |
8898c800273d477134f4b1d5a9d195979c06ad78 | 4cdb8580f1662336583b844f9f39ec334f4b2bdd | /sim_builder.py | e4e46e1fc19db432d088ab404df83dee1b0aa9df | [
"CC0-1.0"
] | permissive | latimerb/BLA_invivo | 36ee2e3241b530b73ee7174848878df1fa2ee8fd | 715eba51e9d1f1f4d1da44c5627725b166aadf63 | refs/heads/master | 2020-04-30T09:55:45.431196 | 2019-05-06T15:34:58 | 2019-05-06T15:34:58 | 176,762,414 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96,288 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Feb 24 23:26:54 2018
@author: Tyler Banks
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
import os
import subprocess
import re
import glob
from collections import defaultdict
import threading
try:
import Tkinter as tk # this is for python2
import ttk
import tkMessageBox as messagebox
except:
import tkinter as tk # this is for python3
from tkinter import ttk
from tkinter import messagebox
root = tk.Tk()
dataset_folder = 'datasets'
cells_folder = 'cells'
results_folder = 'results'
cellnums_file_prefix = 'cellnumbers_'
cellnums_file_postfix = '.dat'
conndata_file_prefix = 'conndata_'
conndata_file_postfix = '.dat'
syndata_file_prefix = 'syndata_'
syndata_file_postfix = '.dat'
phasicdata_file_prefix = 'phasic_'
phasicdata_file_postfix = '.dat'
trace_file_prefix = 'trace_'
trace_file_postfix = '.dat'
mods_glob = os.path.join('*.mod')
cellnums_glob = os.path.join(dataset_folder,cellnums_file_prefix + '*' + cellnums_file_postfix)
connections_glob = os.path.join(dataset_folder, conndata_file_prefix +'*'+ conndata_file_postfix)
syndata_glob = os.path.join(dataset_folder, syndata_file_prefix + '*' + syndata_file_postfix)
phasicdata_glob = os.path.join(dataset_folder, phasicdata_file_prefix + '*' + phasicdata_file_postfix)
results_glob = os.path.join(results_folder,'*','')
cells_glob = cells_folder+'/class_*.hoc'
cellclasses = [fn for fn in glob.glob(cells_glob)
if not os.path.basename(fn).startswith('class_cell_template')]
class Autoresized_Notebook(ttk.Notebook):
def __init__(self, master=None, **kw):
ttk.Notebook.__init__(self, master, **kw)
self.bind("<<NotebookTabChanged>>", self._on_tab_changed)
def _on_tab_changed(self,event):
event.widget.update_idletasks()
tab = event.widget.nametowidget(event.widget.select())
event.widget.configure(height=tab.winfo_reqheight())
class CreateToolTip(object):
"""
create a tooltip for a given widget
https://stackoverflow.com/questions/3221956/how-do-i-display-tooltips-in-tkinter
"""
def __init__(self, widget, text='widget info'):
self.waittime = 500 #miliseconds
self.wraplength = 180 #pixels
self.widget = widget
self.text = text
self.widget.bind("<Enter>", self.enter)
self.widget.bind("<Leave>", self.leave)
self.widget.bind("<ButtonPress>", self.leave)
self.id = None
self.tw = None
def enter(self, event=None):
self.schedule()
def leave(self, event=None):
self.unschedule()
self.hidetip()
def schedule(self):
self.unschedule()
self.id = self.widget.after(self.waittime, self.showtip)
def unschedule(self):
id = self.id
self.id = None
if id:
self.widget.after_cancel(id)
def showtip(self, event=None):
x = y = 0
x, y, cx, cy = self.widget.bbox("insert")
x += self.widget.winfo_rootx() + 25
y += self.widget.winfo_rooty() + 20
# creates a toplevel window
self.tw = tk.Toplevel(self.widget)
# Leaves only the label and removes the app window
self.tw.wm_overrideredirect(True)
self.tw.wm_geometry("+%d+%d" % (x, y))
label = tk.Label(self.tw, text=self.text, justify='left',
background="#ffffff", relief='solid', borderwidth=1,
wraplength = self.wraplength)
label.pack(ipadx=1)
def hidetip(self):
tw = self.tw
self.tw= None
if tw:
tw.destroy()
class DialogEntryBox:
def __init__(self, parent, text="value", lefttext="",righttext=""):
top = self.top = tk.Toplevel(parent)
top.geometry('350x100')
tk.Label(top, text=text).grid(row=0,column=1,sticky="WE")
self.value = tk.StringVar(top)
self.confirm = False
tk.Label(top, text=lefttext,width=20, anchor="e").grid(row=1,column=0)
vcmd = (top.register(self.validate),
'%d', '%i', '%P', '%s', '%S', '%v', '%V', '%W')
self.e = tk.Entry(top,textvariable=self.value, validate = 'key', validatecommand = vcmd)
self.e.grid(row=1,column=1,columnspan=2)
tk.Label(top, text=righttext).grid(row=1,column=3)
button_frame = tk.Frame(top)
button_frame.grid(row=2,column=1)
b = tk.Button(button_frame, text="Ok", command=self.ok)
b.grid(pady=5, padx=5, column=0, row=2, sticky="WE")
b = tk.Button(button_frame, text="Cancel", command=self.cancel)
b.grid(pady=5, padx=5, column=1, row=2, sticky="WE")
tk.Label(top, text="Numbers only currently",width=20, anchor="w",fg='blue').grid(row=3,column=1,columnspan=2)
def validate(self, action, index, value_if_allowed,
prior_value, text, validation_type, trigger_type, widget_name):
if text in '0123456789-':
try:
if value_if_allowed is '':
return True
float(value_if_allowed)
return True
except ValueError:
return False
else:
return False
def ok(self):
self.confirm = True
self.top.destroy()
def cancel(self):
self.top.destroy()
class PandasTable(tk.Frame):
'''Easily display an editable pandas dataframe in TK as a Frame (Created by Tyler Banks)'''
'''
root = None
table_frame_internal = None
table_tools_frame_internal = None
names = [] #header for the df
values_arr = [] #values of all rows ever entered
entities_arr = [] #all entities in the rows
deleted_rows = [] #keep track of what has been deleted, since we don't really delete
'''
def __init__(self, widget, show_add_row_button=True, allow_sorting=True):
super(PandasTable, self).__init__(widget)
self.root = tk.Frame(widget,padx=5,pady=5)
self.table_frame_internal = tk.Frame(self.root,background='white')
self.table_tools_frame_internal = tk.Frame(self.root)
#self.table_frame_internal.grid_forget() #Probably good housekeeping
#self.table_frame_internal.destroy() #Re-enable for previously created
self.table_frame_internal.grid(sticky="news",row=0,column=0)
self.table_tools_frame_internal.grid(sticky="news",row=1,column=0)
self.options_dict = None
self.data_changed = False
self.show_header = True
self.show_numbering = True
self.show_delete_row = True
self.first_column_is_header = False
self.first_column_is_id = False
self.show_add_row_button = show_add_row_button
self.immutable_columns = []
self.immutable_values = []
self.hidden_columns = []
self.col_width=15
self.last_sort_by = True
self.last_sort_by_col = ''
self.allow_sorting = allow_sorting
self.init_tools()
return
def init_tools(self):
if self.show_add_row_button:
addRowButton = tk.Button(self.table_tools_frame_internal, text="Add Row", command=lambda: self.add_row(None))
addRowButton.grid(column=0, row =0, padx=5,pady=5, sticky='W')
def pack(self,*args):
super(PandasTable,self).pack(*args)
self.root.pack(*args)
#self.table_frame_internal.pack()
def grid(self,*args):
super(PandasTable,self).grid(*args)
self.root.grid(*args)
def set_changed(self, ch):
self.data_changed = ch
def change_in_data(self, *args):
self.set_changed(True)
def has_changed(self):
return self.data_changed
def sort_by(self, col):
if not self.allow_sorting:
return
if self.last_sort_by_col is col:
self.last_sort_by = not self.last_sort_by #sort decending
else:
self.last_sort_by = True
self.last_sort_by_col = col
sorted_df = self.df.sort_values(by=[col], ascending=self.last_sort_by)
self.set_dataframe(sorted_df, self.options_dict,
self.show_header, self.show_numbering,
self.show_delete_row, self.first_column_is_header,
self.first_column_is_id, self.immutable_columns,
self.immutable_values, self.hidden_columns)
def set_dataframe(self, df, options_dict = defaultdict(list), \
show_header=True, show_numbering=True, \
show_delete_row=True, first_column_is_header=False, \
first_column_is_id= False, immutable_columns=[],\
immutable_values=[], hidden_columns=[]):
'''Totally wipe the slate and display a new dataframe'''
self.df = df
self.data_changed = False
self.show_header = show_header
self.show_numbering = show_numbering
self.show_delete_row = show_delete_row
self.first_column_is_header = first_column_is_header
self.first_column_is_id = first_column_is_id
self.immutable_columns = immutable_columns
self.hidden_columns = []#hidden_columns #Still has some minor bugs to work out, implement if needed
for widget in self.table_frame_internal.winfo_children():
widget.destroy()
self.entities_arr = []
self.values_arr = []
self.deleted_rows = []
self.options_dict = options_dict
self.immutable_values = immutable_values
self.names = list(df)
if show_header:
for k, n in enumerate(self.names):
if self.first_column_is_id and k==0:
continue
if k in self.hidden_columns:
continue
var = tk.Label(self.table_frame_internal, text=n)
var.config(width=self.col_width,relief=tk.GROOVE,background='light gray')
var.bind("<Button-1>", lambda event, col=n:self.sort_by(col))
var.grid(column=k+3, row =0, padx=1, sticky='NEWS')
for i, row in df.iterrows():
self.add_row(row)
return
def get_dataframe(self):
l = []
for i, val_row in enumerate(self.values_arr):
r = []
if i not in self.deleted_rows:
for j, val in enumerate(val_row):
r.append(val.get())
l.append(r)
return pd.DataFrame(l,columns=self.names)
def add_row(self,row):
col_arr = []
entity_arr = []
id = len(self.entities_arr)
insert_i = len(self.entities_arr)+1
if row is None:
test = ['']*len(self.names)
r = pd.DataFrame(test).transpose()
r.columns = self.names
for k, rt in r.iterrows():
row = rt
break
num = len(self.entities_arr)-len(self.deleted_rows)
if self.show_numbering:
num_button = tk.Label(self.table_frame_internal,text=str(num))
num_button.config(relief=tk.GROOVE,background='light gray',width=3)
num_button.grid(row=insert_i, column=0,sticky='news')
entity_arr.append(num_button)
if self.first_column_is_header:
text = ' '
if id < len(self.names):
text = self.names[id]
identity_button = tk.Label(self.table_frame_internal,text=str(text))
identity_button.config(width=self.col_width,relief=tk.GROOVE,background='light gray')
identity_button.grid(row=insert_i, column=1,sticky='news')
entity_arr.append(identity_button)
for j, col in enumerate(row):
value = tk.StringVar(self.table_frame_internal)
value.set(col)
value.trace("w",self.change_in_data)
entity = None
#print(value.get())
if j in self.hidden_columns:
entity = tk.Entry(self.table_frame_internal,textvariable=value)
entity.place(width=20)
#don't display
elif self.first_column_is_id and j==0:
entity = tk.Label(self.table_frame_internal,text=value.get())
entity.config(width=20,height=2,relief=tk.GROOVE,background='light gray')
entity.grid(row=insert_i, column=2,sticky='news')
#entity = tk.Button(self.table_frame_internal,text=value.get())
#entity.grid(row=insert_i, column=0,sticky='news')
elif value.get() in self.immutable_values:
entity = tk.Label(self.table_frame_internal,text=" ")
entity.config(width=20,relief=tk.GROOVE,background='light gray')
entity.grid(row=insert_i, column=j+3,sticky='news')
elif self.options_dict is not None and self.options_dict.get(j,False):
temp = self.options_dict.get(j)[0]
entity = tk.OptionMenu(self.table_frame_internal, value ,*temp, '')
entity.config(width=20)
entity.grid(column=j+3,row=insert_i,sticky='NEWS')
else:
entity = tk.Entry(self.table_frame_internal,textvariable=value)
entity.place(width=20)
entity.grid(row=insert_i,column=j+3,sticky='NEWS')
entity_arr.append(entity)
col_arr.append(value)
if self.show_delete_row:
remove_button = tk.Button(self.table_frame_internal,text="X", command=lambda r = id: self.del_row(r))
remove_button.grid(row=insert_i, column=len(self.names)+3,sticky='news')
entity_arr.append(remove_button)
self.entities_arr.append(entity_arr)
self.values_arr.append(col_arr)
return
def del_row(self, row):
'''A little unsafe, we assume clean data'''
for i,r in enumerate(self.entities_arr):
for j,c in enumerate(r):
if(i==row):
c.grid_forget()
c.destroy()
for i in self.deleted_rows:
if i == row:
return
self.deleted_rows.append(row)
return
def new(self):
df = pd.DataFrame(columns=self.names)
self.set_dataframe(df, options_dict=self.options_dict, show_header=self.show_header)
self.add_row(None)
return
def menu_bar(root):
def hello():
print("hello!")
def about():
messagebox.showinfo("About", "Simulation Builder written for:\nProfessor Satish Nair's Neural Engineering Laboratory\nat The University of Missouri\n\nWritten by: Tyler Banks\n\nContributors: Ben Latimer\n\nInitial Neuron Code: Bezaire et al (2016), ModelDB (accession number 187604), and McDougal et al (2017)\n\nEmail tbg28@mail.missouri.edu with questions", icon='info')
menubar = tk.Menu(root)
# create a pulldown menu, and add it to the menu bar
filemenu = tk.Menu(menubar, tearoff=0)
#filemenu.add_command(label="Open", command=hello)
#filemenu.add_command(label="Save", command=hello)
#filemenu.add_separator()
filemenu.add_command(label="Exit", command=root.quit)
menubar.add_cascade(label="File", menu=filemenu)
helpmenu = tk.Menu(menubar, tearoff=0)
helpmenu.add_command(label="About", command=about)
menubar.add_cascade(label="Help", menu=helpmenu)
return menubar
#pass the method that will create the content for your frame
def bind_page(page, gen_frame):
#### Scrollable Frame Window ####
#https://stackoverflow.com/questions/42237310/tkinter-canvas-scrollbar
frame = tk.Frame(page, bd=2)
frame.pack(side="left",fill="both",expand=True)
yscrollbar = tk.Scrollbar(frame)
yscrollbar.pack(side=tk.RIGHT,fill=tk.Y)
xscrollbar = tk.Scrollbar(frame, orient=tk.HORIZONTAL)
xscrollbar.pack(side=tk.BOTTOM,fill=tk.X)
canvas = tk.Canvas(frame, bd=0,
xscrollcommand=xscrollbar.set,
yscrollcommand=yscrollbar.set,)
xscrollbar.config(command=canvas.xview)
yscrollbar.config(command=canvas.yview)
f=tk.Frame(canvas)
canvas.pack(side="left",fill="both",expand=True)
canvas.create_window(0,0,window=f,anchor='nw')
###############################
gen_frame(f)
frame.update()
canvas.config(scrollregion=canvas.bbox("all"))
params_dict = {'aaa':tk.StringVar(root,'bbb'),\
'loaded_cellnums':tk.StringVar(root,''),\
'loaded_conndata':tk.StringVar(root,''),\
'loaded_syndata':tk.StringVar(root,''),\
'loaded_phasicdata':tk.StringVar(root,''),
'params_cellnums':tk.StringVar(root,''),
'params_conndata':tk.StringVar(root,''),
'params_syndata':tk.StringVar(root,''),
'params_phasicdata':tk.StringVar(root,''),}
def get_public_param(param):
try:
return params_dict[param].get()
except KeyError:
return None
def set_public_param(param, strvar):
try:
params_dict[param].set(strvar)
except KeyError:
params_dict[param] = strvar
return
def reset_public_params():
params_dict.clear()
return
def parameters_page(root):
'''
Reads the parameters hoc file
Lines should be formatted like:
default_var("Variable","value") // Comment to be tip
'''
param_has_changed = False
params_file = os.path.join('setupfiles','parameters.hoc')
top_option_frame = tk.LabelFrame(root, text="Management")
table_frame = tk.Frame(root)
import_export_frame = tk.LabelFrame(root, text="Import/Export")
top_option_frame.grid(column=0,row=0,sticky='news',padx=10,pady=5)
table_frame.grid(column=0,row=1,sticky='news',padx=10,pady=5)
import_export_frame.grid(column=1,row=0,sticky='news',padx=10,pady=5)
def param_changed(*args,val=True):
param_has_changed = val
class Row(tk.Frame):
def __init__(self, parent, *args, **kwargs):
tk.Frame.__init__(self, parent, *args, **kwargs)
self.parent = parent
self.root = tk.Frame(self.parent)
self.is_string = False
return
def config(self, variable, value, comment, is_string):
self.v_value = tk.StringVar(self.root)
self.v_value.set(value)
self.v_value.trace("w",param_changed)
self.variable = variable
self.comment = comment
self.is_string = is_string
frame = tk.Frame(self.root)
var = tk.Label(frame, text=variable ,width=20,background='light gray')
var.config(relief=tk.GROOVE)
var.grid(column=0, row=0, padx=5, sticky='WE')
val = tk.Entry(frame,textvariable=self.v_value)
val.grid(column=1, row=0, sticky='E')
CreateToolTip(var,comment)
frame.pack()
return self
def row_to_param_str(self):
#default_var("RunName","testrun") // Name of simulation run
proto = "default_var(\"{}\",{})\t\t// {}"
if self.is_string:
proto = "default_var(\"{}\",\"{}\")\t\t// {}"
line = proto.format(self.variable,self.v_value.get(),self.comment)
return line
def pack(self,*args,**kwargs):
super(Row,self).pack(*args,**kwargs)
self.root.pack(*args,**kwargs)
def grid(self,*args,**kwargs):
super(Row,self).grid(*args,**kwargs)
self.root.grid(*args,**kwargs)
row_header = ['variable','value','comment','value_is_string']
rows=[]
def load(filename):
params = []
with open(filename) as fp:
line = fp.readline()
cnt = 1
while line:
m = re.search('default_var\((.+?)\)', line)
if m:
line_variable = re.search('\"(.+?)\"', m.group(1)).group(1)
line_value = re.search(',(.+?)$', m.group(1)).group(1)
line_comment = re.search('\/\/ (.+?)$',line).group(1)
line_value_string = False
n = re.search('\"(.*?)\"',line_value)
if n:
line_value_string = True
line_value=n.group(1)
params.append([line_variable,line_value,line_comment,line_value_string])
line = fp.readline()
cnt += 1
df = pd.DataFrame(params,columns=row_header)
return df
def save():
file = open(params_file,"w")
for r in rows:
file.write(r.row_to_param_str()+"\n")
file.close()
display_app_status('Parameters \"'+params_file+'\" saved')
df = load(params_file)
re_set_file_params(df)
return
general_frame = tk.LabelFrame(table_frame, text="General",fg="blue")
general_frame.grid(column=0,row=0,sticky='news',padx=10,pady=5)
dropdown_frame = tk.LabelFrame(table_frame, text="Data Sources",fg="blue")
dropdown_frame.grid(column=1,row=0,sticky='news',padx=10,pady=5)
space_frame = tk.LabelFrame(table_frame, text="Spacial Config",fg="blue")
space_frame.grid(column=0,row=1,sticky='news',padx=10,pady=5)
print_frame = tk.LabelFrame(table_frame, text="Print/Output",fg="blue")
print_frame.grid(column=1,row=2,sticky='news',padx=10,pady=5)
misc_frame = tk.LabelFrame(table_frame, text="Miscellaneous",fg="blue")
misc_frame.grid(column=0,row=2,sticky='news',padx=10,pady=5)
lfp_frame = tk.LabelFrame(table_frame, text="LFP Config",fg="blue")
lfp_frame.grid(column=1,row=1,sticky='news',padx=10,pady=5)
param_file_vars = ['ConnData','SynData','NumData','PhasicData']
general_vars = ['RunName', 'Scale','SimDuration','StepBy','TemporalResolution','RandomVrest','RandomVinit']
space_vars = ['TransverseLength','LongitudinalLength','LayerHeights','SpatialResolution']
dropdown_vars = ['Connectivity','Stimulation']
print_vars = ['PrintVoltage','PrintTerminal','PrintConnDetails','PrintCellPositions','PrintConnSummary','CatFlag','EstWriteTime','NumTraces']
lfp_vars = ['lfp_dt','ElectrodePoint','ComputeNpoleLFP','ComputeDipoleLFP','LFPCellTypes','MaxEDist']
def refresh(df):
param_changed(val=False)
rows.clear()
padtopbot = 3
Row(general_frame).pack(pady=padtopbot-1)
Row(dropdown_frame).pack(pady=padtopbot-1)
Row(space_frame).pack(pady=padtopbot-1)
Row(print_frame).pack(pady=padtopbot-1)
Row(misc_frame).pack(pady=padtopbot-1)
Row(lfp_frame).pack(pady=padtopbot-1)
for i, row in df.iterrows():
temp = []
temp.append(row.tolist())
temp = temp[0]
#config(self, variable, value, comment, is_string):
frame = misc_frame
if temp[0] in general_vars:
frame=general_frame
elif temp[0] in dropdown_vars or temp[0] in param_file_vars:
frame=dropdown_frame
elif temp[0] in space_vars:
frame=space_frame
elif temp[0] in print_vars:
frame=print_frame
elif temp[0] in lfp_vars:
frame=lfp_frame
#This is all pages to change
row = Row(frame).config(temp[0],temp[1],temp[2],temp[3])
row.pack(padx=10)
rows.append(row)
set_public_param(temp[0],row.v_value)
Row(general_frame).pack(pady=padtopbot)
Row(dropdown_frame).pack(pady=padtopbot)
Row(space_frame).pack(pady=padtopbot)
Row(print_frame).pack(pady=padtopbot)
Row(misc_frame).pack(pady=padtopbot)
Row(lfp_frame).pack(pady=padtopbot)
return
def re_set_file_params(df):
for i, row in df.iterrows():
temp = []
temp.append(row.tolist())
temp = temp[0]
if temp[0] in param_file_vars:
set_public_param(temp[0],temp[1])
return
def verify():
display_app_status('Not implemented')
return
def load_configs():
display_app_status('Not implemented')
return
def import_model():
display_app_status('Not implemented')
return
def export_model():
display_app_status('Not implemented')
return
verifyBuildButton = tk.Button(top_option_frame, text="Verify Model Configuration", command=verify)
verifyBuildButton.grid(column=1, row =0, padx=5, pady=5, sticky='W')
verifyBuildButton.config(state=tk.DISABLED)
loadConfigsButton = tk.Button(top_option_frame, text="Load Parameters into Views", command=load_configs)
loadConfigsButton.grid(column=2, row =0, padx=5, pady=5, sticky='W')
loadConfigsButton.config(state=tk.DISABLED)
saveButton = tk.Button(top_option_frame, text="Save Parameters File", command=save)
saveButton.grid(column=0, row =0, padx=5, pady=5, sticky='W')
importButton = tk.Button(import_export_frame, text="Import Model", command=import_model)
importButton.grid(column=0, row =0, padx=5, pady=5, sticky='WE')
importButton.config(state=tk.DISABLED)
exportButton = tk.Button(import_export_frame, text="Export Model", command=export_model)
exportButton.grid(column=0, row =1, padx=5, pady=5, sticky='WE')
exportButton.config(state=tk.DISABLED)
df = load(params_file)
refresh(df)
def cells_page(root):
column_names = ["Friendly Cell Name", "Cell File Name", "Number of Cells", "Layer Index","Artificial:1 Real:0"]
top_option_frame = tk.LabelFrame(root, text="File Management")
table_frame = tk.LabelFrame(root, text="Cell Numbers")
bottom_option_frame = tk.Frame(root)
top_option_frame.grid(column=0,row=0,sticky='news',padx=10,pady=5)
table_frame.grid(column=0,row=1,sticky='news',padx=10,pady=5)
bottom_option_frame.grid(column=0,row=2)
pt = PandasTable(table_frame, show_add_row_button=True)
cellclasses_a = []
options = glob.glob(cellnums_glob)
if len(options) is 0:
options.append('')
def generate_files_available():
cellclasses_a.clear()
search = 'cells\\\\class_(.+?).hoc'
for c in cellclasses:
m = re.search(search, c)
if m:
cellclasses_a.append(m.group(1))
def update_scrollbar(panda_table_root):
panda_table_root.update()
root.master.configure(scrollregion=(0, 0, panda_table_root.winfo_width()*1.25, panda_table_root.winfo_height()*1.5 ))
def load(*args):
if not filename.get() or filename.get() is '':
return
#print ("loading: " + filename.get())
cellnums_pd = pd.read_csv(filename.get() ,delimiter=' ',\
skiprows=1,header=None,\
names = column_names)
cellnums_pd[column_names[2]] = cellnums_pd[column_names[2]].astype(int)
cellnums_pd[column_names[3]] = cellnums_pd[column_names[3]].astype(int)
cellnums_pd[column_names[4]] = cellnums_pd[column_names[4]].astype(int)
pt.set_dataframe(cellnums_pd, options_dict=d, show_numbering=True, show_delete_row=True, first_column_is_header=False)
pt.pack()
update_scrollbar(pt.root)
set_public_param("loaded_cellnums",filename.get())
display_app_status('Cells file \"'+filename.get()+'\" loaded')
def save(save_to=None):
pt_df = pt.get_dataframe()
(nr,nc) = pt_df.shape
tb = pt_df.to_csv(sep=' ',header=False,index=False)
if not save_to:
save_to = filename.get()
file = open(save_to,"w")
file.write(str(nr)+'\n')
file.write(tb)
file.close()
display_app_status('Cells file \"'+filename.get()+'\" saved')
def new():
if pt.has_changed():
result = messagebox.askquestion("New", "Are you sure? Data has been changed.", icon='warning')
if result != 'yes':
return
d = DialogEntryBox(root,text="New File Name:",lefttext=os.path.join(dataset_folder, cellnums_file_prefix),righttext=cellnums_file_postfix)
root.wait_window(d.top)
if d.confirm==False:
return
newfilename = os.path.join(dataset_folder, cellnums_file_prefix+ d.value.get() + cellnums_file_postfix)
f = open(newfilename,"w+")
f.close
#pt.new()
generate_files_available()
#https://stackoverflow.com/questions/17580218/changing-the-options-of-a-optionmenu-when-clicking-a-button
m = fileMenu.children['menu']
m.delete(0,tk.END)
newvalues = options
newvalues.append(newfilename)
for val in newvalues:
m.add_command(label=val,command=lambda v=filename,l=val:v.set(l))
filename.set(newfilename)
pt.new()
display_app_status('Cells file \"'+newfilename+'\" created')
def new_clone():
if pt.has_changed():
result = messagebox.askquestion("New", "Are you sure? Data has been changed.", icon='warning')
if result != 'yes':
return
d = DialogEntryBox(root,text="New File Name:",lefttext=os.path.join(dataset_folder, cellnums_file_prefix),righttext=cellnums_file_postfix)
root.wait_window(d.top)
if d.confirm==False:
return
newfilename = os.path.join(dataset_folder,cellnums_file_prefix+ d.value.get() + cellnums_file_postfix)
f = open(newfilename,"w+")
f.close()
save(save_to=newfilename)
m = fileMenu.children['menu']
m.delete(0,tk.END)
newvalues = options
newvalues.append(newfilename)
for val in newvalues:
m.add_command(label=val,command=lambda v=filename,l=val:v.set(l))
filename.set(newfilename)
display_app_status('Cells file \"'+filename.get()+'\" created')
return
def set_numdata_param():
fn = filename.get()
search = cellnums_file_prefix+'(.+?)'+cellnums_file_postfix
m = re.search(search,fn)
if m:
fn = m.group(1)
set_public_param("NumData", fn)
display_app_status('NumData parameter set to \"'+ filename.get() +'\" in current parameters file')
return
def delete_current_file():
return
def load_numdata_param():
numdat = get_public_param("NumData")
numdat = os.path.join(dataset_folder, cellnums_file_prefix + numdat + cellnums_file_postfix)
#filename.set('')
filename.set(numdat)
generate_files_available()
d = defaultdict(list)
d[1].append(cellclasses_a)
#Create the choice option panel
filename = tk.StringVar(top_option_frame)
filename.trace("w",load)
#numdat = get_public_param("NumData")
#numdat = os.path.join(dataset_folder, cellnums_file_prefix + numdat + cellnums_file_postfix)
filename.set('')
#filename.set(numdat)
#filename.set(options[0])
load()#initial load
newButton = tk.Button(top_option_frame, text="New", command=new,width=30)
newButton.grid(column=0, row =0, padx=5,columnspan=2, sticky='WE')
useButton = tk.Button(top_option_frame, text="Set as NumData", command=set_numdata_param,width=15)
useButton.grid(column=0, row =1, padx=5, sticky='W')
loadButton = tk.Button(top_option_frame, text="Load NumData", command=load_numdata_param,width=15)
loadButton.grid(column=1, row =1, padx=5, sticky='W')
fileMenu = tk.OptionMenu(top_option_frame, filename, *options)
fileMenu.grid(column=2, row =0, padx=5, sticky='WE',columnspan=2)
saveButton = tk.Button(top_option_frame, text="Save", command=save)
saveButton.grid(column=2, row =1, padx=5, pady=5, sticky='WE')
newCloneButton = tk.Button(top_option_frame, text="Save As", command=new_clone)
newCloneButton.grid(column=3, row =1, padx=5, sticky='WE')
deleteButton = tk.Button(top_option_frame, text="Delete", command=delete_current_file)
deleteButton.grid(column=4, row =0, padx=5, pady=5, sticky='W')
deleteButton.config(state=tk.DISABLED)
def connections_page(root):
class connections_adapter(object):
def __init__(self, root, col, text=''):
self.root = root
self.col = col
tk.Label(root, text=text ,fg='blue').pack(anchor='w')
self.pt = PandasTable(self.root, show_add_row_button=False, allow_sorting=False)
self.pt.pack()
def read_internal(self, df, astype=None):
df1 = df[df.columns[[0,1,self.col]]]
pre = df1[df1.columns[0]].unique()
pre = pd.DataFrame(pre)
post = df1[df1.columns[1]].unique()
vals = df1[df1.columns[2]]
vals = pd.DataFrame(vals.values.reshape(len(pre),len(post)),columns=post)
if astype:
vals = vals.astype(astype)
df1 = pd.concat([pre,vals],axis=1)
#df1[df1.columns[self.col]] = df1[df1.columns[self.col]]
return pd.DataFrame(df1)
def get_df(self):
pt_df = self.pt.get_dataframe()
(nr,nc) = pt_df.shape
cols = list(range(1,nc))
df1 = pt_df[pt_df.columns[cols]]
data_column = pd.DataFrame(df1.values.reshape(nr*(nc-1),1))#.astype(float)
post_column = pd.DataFrame(pt_df[pt_df.columns[0]])
post_column = np.repeat(post_column[post_column.columns[0]],nc-1).reset_index(drop=True)
pre_column = list(pt_df)
del pre_column[0]
pre_column = pd.DataFrame(pre_column*nr)
df_ret = pd.concat([post_column, pre_column, data_column],axis=1)
df_ret.columns = range(df_ret.shape[1])
return df_ret
def refresh(self, df, astype=None):
self.pt.set_dataframe(self.read_internal(df, astype), show_delete_row=False,\
show_header=True, show_numbering=False, \
first_column_is_id=True)
self.pt.pack()
def has_changed(self):
return self.pt.has_changed()
def raise_frame(frame):
frame.tkraise()
top_option_frame = tk.LabelFrame(root, text="File Management")
table_frame = tk.LabelFrame(root, text="Connection Data")
table_frame_internal = tk.Frame(table_frame)
table_frame_controls = tk.Frame(table_frame)
bottom_option_frame = tk.LabelFrame(root)
top_option_frame.grid(column=0,row=0,sticky='we',padx=10,pady=5)
table_frame.grid(column=0,row=1,sticky='we',padx=10,pady=5)
table_frame_controls.grid(column=0, row=0, sticky='we')
table_frame_internal.grid(column=0, row=1, sticky='news')
bottom_option_frame.grid(column=0,row=2,sticky='we')
page2 = tk.Frame(table_frame_internal)
page3 = tk.Frame(table_frame_internal)
page1 = tk.Frame(table_frame_internal)
######################################
cellclasses_a = []
options = glob.glob(connections_glob)
if len(options) is 0:
options.append('')
d = defaultdict(list)
d[1].append(cellclasses_a)
tk.Button(table_frame_controls, text='Synaptic Weights', command=lambda:raise_frame(page1)).grid(column=0,row=0,padx=4,pady=4)
text = 'Synaptic weight refers to the strength of a connection between two nodes, corresponding in biology to the influence the firing neuron on another neuron.'
synaptic_weight_page_obj = connections_adapter(page1,2,text=text)
tk.Button(table_frame_controls, text='Convergence', command=lambda:raise_frame(page2)).grid(column=1,row=0,padx=4,pady=4)
text = 'Convergence defines the *total* number of connections to be randomly distributed between the presynaptic type and the postsynaptic type neuron.'
convergence_page_obj = connections_adapter(page2,3,text)#convergence_page(page2)
tk.Button(table_frame_controls, text='Synapses', command=lambda:raise_frame(page3)).grid(column=2,row=0,padx=4,pady=4)
text = 'Synapses per connection to be made.'
synapses_page_obj = connections_adapter(page3,4,text)#synapses_page(page3)
######################################
def generate_files_available():
cellclasses_a.clear()
search = 'cells\\\\class_(.+?).hoc'
for c in cellclasses:
m = re.search(search, c)
if m:
cellclasses_a.append(m.group(1))
def set_whole_df(df):
page1.grid_forget()
page2.grid_forget()
page3.grid_forget()
convergence_page_obj.refresh(df,'uint')
synapses_page_obj.refresh(df,'uint')
synaptic_weight_page_obj.refresh(df)
page2.grid(column=0,row=0,sticky='news')
page3.grid(column=0,row=0,sticky='news')
page1.grid(column=0,row=0,sticky='news')
return
def update_scrollbar(panda_table_root):
panda_table_root.update()
root.master.configure(scrollregion=(0, 0, panda_table_root.winfo_width()*1.25, panda_table_root.winfo_height()*1.5 ))
def load(*args,load_from=None):
if not load_from:
if not filename.get() or filename.get() is '':
return
else:
load_from = filename.get()
df = pd.read_csv(load_from ,delimiter=' ',\
skiprows=1,header=None,\
names = ["Friendly Cell Name", "Cell File Name", "Num Cells", "Layer Index","Artificial:1 Real:0"])
set_whole_df(df)
update_scrollbar(synaptic_weight_page_obj.pt.root)
display_app_status('Connections Data file \"'+filename.get()+'\" loaded')
return
def get_whole_df():
wei_df = synaptic_weight_page_obj.get_df()
con_df = convergence_page_obj.get_df()
syn_df = synapses_page_obj.get_df()
head_df = pd.DataFrame(wei_df[wei_df.columns[0:2]])
wei_df = pd.DataFrame(wei_df[wei_df.columns[2]]).astype('float')
wei_df.columns = [2]
con_df = pd.DataFrame(con_df[con_df.columns[2]]).astype('float')
con_df.columns = [3]
syn_df = pd.DataFrame(syn_df[syn_df.columns[2]]).astype('float')
syn_df.columns = [4]
df = pd.concat([head_df, wei_df, con_df, syn_df],axis=1)
return df
def save(save_to=None):
if not save_to:
save_to = filename.get()
df = get_whole_df()
(nr,nc) = df.shape
tb = df.to_csv(sep=' ',header=False,index=False,float_format='%.6f')
file = open(save_to,"w")
file.write(str(nr)+'\n')
file.write(tb)
file.close()
display_app_status('Connections Data file \"'+filename.get()+'\" saved')
return
def new():
if synaptic_weight_page_obj.has_changed() or convergence_page_obj.has_changed() or synapses_page_obj.has_changed():
result = messagebox.askquestion("New", "Are you sure? Data has been changed.", icon='warning')
if result != 'yes':
return
d = DialogEntryBox(root,text="New File Name:",lefttext=os.path.join(dataset_folder, conndata_file_prefix),righttext=conndata_file_postfix)
root.wait_window(d.top)
if d.confirm==False:
return
#get all as presynaptic
#get all not artificial as postsynaptic
newfilename = os.path.join(dataset_folder,conndata_file_prefix+ d.value.get() + conndata_file_postfix)
loaded_cellnums = get_public_param("loaded_cellnums")
column_names = ["Friendly Cell Name", "Cell File Name", "Num Cells", "Layer Index","Artificial:1 Real:0"]
cellnums_pd = pd.read_csv(loaded_cellnums ,delimiter=' ',\
skiprows=1,header=None,\
names = column_names)
cellnums_pd[column_names[4]] = cellnums_pd[column_names[4]].astype(int)
pre = cellnums_pd[cellnums_pd.columns[0]].values.tolist()
(pre_nr,pre_nc) = pd.DataFrame(pre).shape
post = cellnums_pd.loc[cellnums_pd[column_names[4]] == 0]
post = post[post.columns[0]]
post = pd.DataFrame(post)
(post_nr,post_nc) = post.shape
post = np.repeat(post[post.columns[0]],pre_nr).reset_index(drop=True)
pre = pd.DataFrame(pre*post_nr)
df = pd.concat([pre,post],axis=1)
df[2] = '0.0'
df[3] = '0'
df[4] = '0'
tb = df.to_csv(sep=' ',header=False,index=False,float_format='%.6f')
file = open(newfilename,"w")
file.write(str(pre_nr*post_nr)+'\n')
file.write(tb)
file.close()
load(load_from=newfilename)
reload_files_and_set(newfilename)
#create a presynaptic*postsynaptic by 5 pandas dataframe
#set all values to zero
#set first column
#set second column
display_app_status('Connections Data file \"'+filename.get()+'\" created')
return
def new_clone_current():
if synaptic_weight_page_obj.has_changed() or convergence_page_obj.has_changed() or synapses_page_obj.has_changed():
result = messagebox.askquestion("New", "Are you sure? Data has been changed.", icon='warning')
if result != 'yes':
return
d = DialogEntryBox(root,text="New File Name:",lefttext=os.path.join(dataset_folder, conndata_file_prefix),righttext=conndata_file_postfix)
root.wait_window(d.top)
if d.confirm==False:
return
newfilename = os.path.join(dataset_folder,conndata_file_prefix+ d.value.get() + conndata_file_postfix)
f = open(newfilename,"w+")
f.close()
save(save_to=newfilename)
reload_files_and_set()
display_app_status('Connections Data file \"'+filename.get()+'\" was created')
return
def set_conndata_param():
fn = filename.get()
search = conndata_file_prefix+'(.+?)'+conndata_file_postfix
m = re.search(search,fn)
if m:
fn = m.group(1)
set_public_param("ConnData", fn)
display_app_status('ConnData parameter set to \"'+ filename.get() +'\" in current parameters file')
return
def reload_files_and_set(newfilename):
m = fileMenu.children['menu']
m.delete(0,tk.END)
newvalues = options
newvalues.append(newfilename)
for val in newvalues:
m.add_command(label=val,command=lambda v=filename,l=val:v.set(l))
filename.set(newfilename)
def delete_current_file():
return
def load_conndata_param():
conndat = get_public_param("ConnData")
conndat = os.path.join(dataset_folder, conndata_file_prefix + conndat + conndata_file_postfix)
#filename.set('')
filename.set(conndat)
#generate_files_available()
#Create the choice option panel
filename = tk.StringVar(top_option_frame)
filename.trace("w",load)
filename.set('')
#filename.set(options[0])
newFromCellsButton = tk.Button(top_option_frame, text="Generate New from Current Cells File", command=new, width=30)
newFromCellsButton.grid(column=0, row =0, padx=5, sticky='WE',columnspan=2)
useButton = tk.Button(top_option_frame, text="Set as ConnData", command=set_conndata_param, width=15)
useButton.grid(column=0, row =1, padx=5, sticky='W')
loadButton = tk.Button(top_option_frame, text="Load ConnData", command=load_conndata_param,width=15)
loadButton.grid(column=1, row =1, padx=5, sticky='W')
fileMenu = tk.OptionMenu(top_option_frame, filename, *options)
fileMenu.grid(column=2, row =0, padx=5, sticky='WE',columnspan=2)
deleteButton = tk.Button(top_option_frame, text="Delete", command=delete_current_file)
deleteButton.grid(column=4, row =0, padx=5, pady=5, sticky='W')
deleteButton.config(state=tk.DISABLED)
saveButton = tk.Button(top_option_frame, text="Save", command=save)
saveButton.grid(column=2, row =1, padx=5,pady=5, sticky='WE')
newFromCurrentButton = tk.Button(top_option_frame, text="Save As", command=new_clone_current)
newFromCurrentButton.grid(column=3, row =1, padx=5, sticky='WE')
def synapses_page(root):
sections_list = ['dendrite_list','soma_list','apical_list','axon_list']
synapse_type_list = ['MyExp2Sid','ExpGABAab','Custom']
condition_list = ['distance(x)','y3d(x)']
synapse_column_names = ["Postsynaptic Cell", "Presynaptic Cells",\
"Synapse Type", "Postsynaptic Section Target",\
"Condition 1", "Condition 2",\
"Tau1a/modfile", "Tau2a/cust1", "ea/cust2",\
"Tau1b/cust3", "Tau2b/cust4", "eb/cust5"]
mod_list = glob.glob(mods_glob)
if len(mod_list) is 0:
mod_list.append('')
class synapses_adapter(object):
def __init__(self, root):
self.root = root
self.pt = PandasTable(self.root, show_add_row_button=False)
self.pt.pack()
def read_internal(self, df):
'''get whole dataframe'''
return df
def get_df(self):
df = self.pt.get_dataframe().replace(np.nan, '', regex=True).replace('nan','',regex=True)
return df
def refresh(self, df):
d = defaultdict(list)
d[3].append(sections_list)
self.pt.set_dataframe(self.read_internal(df), show_delete_row=True,\
show_header=True, show_numbering=True, \
first_column_is_id=False, immutable_values=["nan"],\
options_dict=d)
self.pt.pack()
def add_row(self, row):
row = np.array(row).reshape(-1,len(row))
r = pd.DataFrame(row,columns=synapse_column_names)
for i, row in r.iterrows():
self.pt.add_row(row)
return
def has_changed(self):
return self.pt.has_changed()
class SynapseEntryBox:
def __init__(self, parent, text="value", lefttext="",righttext=""):
top = self.top = tk.Toplevel(parent)
top.geometry('475x475')
top.resizable(0,0)
tk.Label(top, text='Create new synapse:\nValues from currently loaded cells file.').grid(row=0,column=0,sticky="WE",columnspan=2)
core_extras = tk.Frame(top)
gaba_extras = tk.Frame(top)
custom_extras = tk.Frame(top)
def showhide_gaba_extras(*args):
if self.syntype_value.get() == synapse_type_list[1]:
custom_extras.grid_forget()
core_extras.grid(row=7,column=0,columnspan=2)
gaba_extras.grid(row=10,column=0,columnspan=2)
elif self.syntype_value.get() == synapse_type_list[2]:
core_extras.grid_forget()
gaba_extras.grid_forget()
custom_extras.grid(row=7,column=0,columnspan=4)
return
else:
core_extras.grid(row=7,column=0,columnspan=2)
custom_extras.grid_forget()
gaba_extras.grid_forget()
return
core_extras.grid(row=7,column=0,columnspan=2)
self.pre_value = tk.StringVar(top)
self.post_value = tk.StringVar(top)
self.syntype_value = tk.StringVar(top)
self.section_value = tk.StringVar(top)
self.cond1_value = tk.StringVar(top)
self.cond1_text_value = tk.StringVar(top)
self.cond2_value = tk.StringVar(top)
self.cond2_text_value = tk.StringVar(top)
self.tau1a_value = tk.StringVar(top)
self.tau2a_value = tk.StringVar(top)
self.ea_value = tk.StringVar(top)
self.tau1b_value = tk.StringVar(top)
self.tau2b_value = tk.StringVar(top)
self.eb_value = tk.StringVar(top)
self.custom_mod_value = tk.StringVar(top)
self.custom1_value = tk.StringVar(top)
self.custom2_value = tk.StringVar(top)
self.custom3_value = tk.StringVar(top)
self.custom4_value = tk.StringVar(top)
self.custom5_value = tk.StringVar(top)
self.custom6_value = tk.StringVar(top)
self.custom1_value2 = tk.StringVar(top)
self.custom2_value2 = tk.StringVar(top)
self.custom3_value2 = tk.StringVar(top)
self.custom4_value2 = tk.StringVar(top)
self.custom5_value2 = tk.StringVar(top)
self.custom6_value2 = tk.StringVar(top)
self.confirm = False
#Inputs
loaded_cellnums = get_public_param("loaded_cellnums")
column_names = ["Friendly Cell Name", "Cell File Name", "Num Cells", "Layer Index","Artificial:1 Real:0"]
cellnums_pd = pd.read_csv(loaded_cellnums ,delimiter=' ',\
skiprows=1,header=None,\
names = column_names)
cellnums_pd[column_names[4]] = cellnums_pd[column_names[4]].astype(int)
pre_options = cellnums_pd[cellnums_pd.columns[0]].values.tolist()
post_options = cellnums_pd.loc[cellnums_pd[column_names[4]] == 0]
post_options = post_options[post_options.columns[0]].values.tolist()
l = tk.Label(top, text='Presynaptic Cell',width=25, background='light gray')
l.grid(row=1,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
#self.pre = tk.Entry(top,textvariable=self.pre_value)
self.pre = tk.OptionMenu(top, self.pre_value, *pre_options)
self.pre.grid(row=1,column=1)
l = tk.Label(top, text='Postsynaptic Cell',width=25, background='light gray')
l.grid(row=2,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
#self.post = tk.Entry(top,textvariable=self.post_value)
self.post = tk.OptionMenu(top, self.post_value, *post_options)
self.post.grid(row=2,column=1)
l = tk.Label(top, text='Synapse Type',width=25, background='light gray')
l.grid(row=3,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
#self.syntype = tk.Entry(top,textvariable=self.syntype_value)
self.syntype = tk.OptionMenu(top, self.syntype_value, *synapse_type_list)
self.syntype_value.trace("w",showhide_gaba_extras)
self.syntype_value.set(synapse_type_list[0])
self.syntype.grid(row=3,column=1)
l = tk.Label(top, text='Postsynaptic Section Target',width=25, background='light gray')
l.grid(row=4,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
#self.section = tk.Entry(top,textvariable=self.section_value)
self.section = tk.OptionMenu(top, self.section_value, *sections_list)
self.section.grid(row=4,column=1)
l = tk.Label(top, text='Condition 1',width=25, background='light gray')
l.grid(row=5,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
#self.cond1 = tk.Entry(top,textvariable=self.cond1_value)
self.cond1 = tk.OptionMenu(top, self.cond1_value, *condition_list)
self.cond1_value.set(condition_list[0])
self.cond1.grid(row=5,column=1)
tk.Label(top, text=' > ').grid(row=5, column=2)
self.cond1_text = tk.Entry(top, textvariable=self.cond1_text_value)
self.cond1_text_value.set('-1')
self.cond1_text.grid(row=5, column=3)
l = tk.Label(top, text='Condition 2',width=25, background='light gray')
l.grid(row=6,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
#self.cond2 = tk.Entry(top,textvariable=self.cond2_value)
self.cond2 = tk.OptionMenu(top, self.cond2_value, *condition_list)
self.cond2_value.set(condition_list[0])
self.cond2.grid(row=6,column=1)
tk.Label(top, text=' < ').grid(row=6, column=2)
self.cond2_text = tk.Entry(top, textvariable=self.cond2_text_value)
self.cond2_text_value.set('10000')
self.cond2_text.grid(row=6, column=3)
l = tk.Label(core_extras, text='Tau1a',width=25, background='light gray')
l.grid(row=7,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.tau1a = tk.Entry(core_extras,textvariable=self.tau1a_value)
self.tau1a_value.set('2.0')
self.tau1a.grid(row=7,column=1)
l = tk.Label(core_extras, text='Tau2a',width=25, background='light gray')
l.grid(row=8,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.tau2a = tk.Entry(core_extras,textvariable=self.tau2a_value)
self.tau2a_value.set('6.3')
self.tau2a.grid(row=8,column=1)
l = tk.Label(core_extras, text='ea',width=25, background='light gray')
l.grid(row=9,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.ea = tk.Entry(core_extras,textvariable=self.ea_value)
self.ea_value.set('0.0')
self.ea.grid(row=9,column=1)
#GABA EXTRAS
l = tk.Label(gaba_extras, text='Tau1b',width=25, background='light gray')
l.grid(row=10,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.tau1b = tk.Entry(gaba_extras,textvariable=self.tau1b_value)
self.tau1b.grid(row=10,column=1)
l = tk.Label(gaba_extras, text='Tau2b',width=25, background='light gray')
l.grid(row=11,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.tau2b = tk.Entry(gaba_extras,textvariable=self.tau2b_value)
self.tau2b.grid(row=11,column=1)
l = tk.Label(gaba_extras, text='eb',width=25, background='light gray')
l.grid(row=12,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.eb = tk.Entry(gaba_extras,textvariable=self.eb_value)
self.eb.grid(row=12,column=1)
#CUSTOM EXTRAS
l = tk.Label(custom_extras, text='Synapse Mod File',width=25, background='light gray')
l.grid(row=7,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.custom_mod = tk.OptionMenu(custom_extras, self.custom_mod_value, *mod_list)
self.custom_mod_value.set('')
self.custom_mod.grid(row=7,column=1)
tk.Label(custom_extras, text=' Syn."parameter" ').grid(row=8, column=1)
tk.Label(custom_extras, text=' : ').grid(row=8, column=2)
tk.Label(custom_extras, text=' "value" ').grid(row=8, column=3)
l = tk.Label(custom_extras, text='Custom Parameter 1',width=25, background='light gray')
l.grid(row=9,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.custom1 = tk.Entry(custom_extras,textvariable=self.custom1_value)
self.custom1.grid(row=9,column=1)
tk.Label(custom_extras, text=' : ').grid(row=9, column=2)
self.custom1_text = tk.Entry(custom_extras, textvariable=self.custom1_value2)
self.custom1_text.grid(row=9, column=3)
l = tk.Label(custom_extras, text='Custom Parameter 2',width=25, background='light gray')
l.grid(row=10,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.custom2 = tk.Entry(custom_extras,textvariable=self.custom2_value)
self.custom2.grid(row=10,column=1)
tk.Label(custom_extras, text=' : ').grid(row=10, column=2)
self.custom2_text = tk.Entry(custom_extras, textvariable=self.custom2_value2)
self.custom2_text.grid(row=10, column=3)
l = tk.Label(custom_extras, text='Custom Parameter 3',width=25, background='light gray')
l.grid(row=11,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.custom3 = tk.Entry(custom_extras,textvariable=self.custom3_value)
self.custom3.grid(row=11,column=1)
tk.Label(custom_extras, text=' : ').grid(row=11, column=2)
self.custom3_text = tk.Entry(custom_extras, textvariable=self.custom3_value2)
self.custom3_text.grid(row=11, column=3)
l = tk.Label(custom_extras, text='Custom Parameter 4',width=25, background='light gray')
l.grid(row=12,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.custom4 = tk.Entry(custom_extras,textvariable=self.custom4_value)
self.custom4.grid(row=12,column=1)
tk.Label(custom_extras, text=' : ').grid(row=12, column=2)
self.custom4_text = tk.Entry(custom_extras, textvariable=self.custom4_value2)
self.custom4_text.grid(row=12, column=3)
l = tk.Label(custom_extras, text='Custom Parameter 5',width=25, background='light gray')
l.grid(row=13,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.custom5 = tk.Entry(custom_extras,textvariable=self.custom5_value)
self.custom5.grid(row=13,column=1)
tk.Label(custom_extras, text=' : ').grid(row=13, column=2)
self.custom5_text = tk.Entry(custom_extras, textvariable=self.custom5_value2)
self.custom5_text.grid(row=13, column=3)
#Return
button_frame = tk.Frame(top)
button_frame.grid(row=20,column=0,columnspan=2)
b = tk.Button(button_frame, text="Ok", command=self.ok)
b.grid(pady=5, padx=5, column=0, row=0, sticky="WE")
b = tk.Button(button_frame, text="Cancel", command=self.cancel)
b.grid(pady=5, padx=5, column=1, row=0, sticky="WE")
def verify_good(self):
return True
def get_values(self):
if self.syntype_value.get() == "Custom":
if self.custom_mod_value.get() is '':
self.custom_mod_value.set('none')
if self.custom1_value.get() is '' or self.custom1_value2.get() is '':
self.custom1_value.set('none')
self.custom1_value2.set('none')
if self.custom2_value.get() is '' or self.custom2_value2.get() is '':
self.custom2_value.set('none')
self.custom2_value2.set('none')
if self.custom3_value.get() is '' or self.custom3_value2.get() is '':
self.custom3_value.set('none')
self.custom3_value2.set('none')
if self.custom4_value.get() is '' or self.custom4_value2.get() is '':
self.custom4_value.set('none')
self.custom4_value2.set('none')
if self.custom5_value.get() is '' or self.custom5_value2.get() is '':
self.custom5_value.set('none')
self.custom5_value2.set('none')
if self.custom6_value.get() is '' or self.custom6_value2.get() is '':
self.custom6_value.set('none')
self.custom6_value2.set('none')
newsyn = [self.pre_value.get(), self.post_value.get(), self.syntype_value.get(),
self.section_value.get(), self.cond1_value.get()+'>'+self.cond1_text_value.get(), self.cond2_value.get()+'<'+self.cond2_text_value.get(),
"modfile"+":"+self.custom_mod_value.get(),
self.custom1_value.get()+":"+self.custom1_value2.get(),
self.custom2_value.get()+":"+self.custom2_value2.get(),
self.custom3_value.get()+":"+self.custom3_value2.get(),
self.custom4_value.get()+":"+self.custom4_value2.get(),
self.custom5_value.get()+":"+self.custom5_value2.get()]
return newsyn
else:
if self.syntype_value.get() == synapse_type_list[0]: #set to nan if it's not a gabaab
self.tau1b_value.set('nan')
self.tau2b_value.set('nan')
self.eb_value.set('nan')
newsyn = [self.pre_value.get(), self.post_value.get(), self.syntype_value.get(),
self.section_value.get(), self.cond1_value.get()+'>'+self.cond1_text_value.get(), self.cond2_value.get()+'<'+self.cond2_text_value.get(),
self.tau1a_value.get(), self.tau2a_value.get(), self.ea_value.get(),
self.tau1b_value.get(), self.tau2b_value.get(), self.eb_value.get()]
return newsyn
def ok(self):
self.confirm = True
self.top.destroy()
def cancel(self):
self.top.destroy()
def add_synapse(*args):
d = SynapseEntryBox(root)
root.wait_window(d.top)
if d.confirm==False:
return
if d.verify_good():
synapses_page_obj.add_row(d.get_values())
update_scrollbar(synapses_page_obj.pt.root)
top_option_frame = tk.LabelFrame(root, text="File Management")
table_frame = tk.LabelFrame(root, text="Synapse Data")
table_frame_internal = tk.Frame(table_frame)
table_frame_controls = tk.Frame(table_frame)
bottom_option_frame = tk.LabelFrame(root)
bottom_option_frame.tk
top_option_frame.grid(column=0,row=0,sticky='we',padx=10,pady=5)
table_frame.grid(column=0,row=1,sticky='we',padx=10,pady=5)
table_frame_controls.grid(column=0, row=0, sticky='we')
table_frame_internal.grid(column=0, row=1, sticky='news')
bottom_option_frame.grid(column=0,row=2,sticky='we')
page1 = tk.Frame(table_frame_internal)
######################################
cellclasses_a = []
options = glob.glob(syndata_glob)
if len(options) is 0:
options.append('')
d = defaultdict(list)
d[1].append(cellclasses_a)
tk.Button(table_frame_controls, text='Add Synapse Type', command=add_synapse).grid(column=0,row=0, padx=5, pady=5)
synapses_page_obj = synapses_adapter(page1)
######################################
def generate_files_available():
cellclasses_a.clear()
search = 'cells\\\\class_(.+?).hoc'
for c in cellclasses:
m = re.search(search, c)
if m:
cellclasses_a.append(m.group(1))
def update_scrollbar(panda_table_root):
panda_table_root.update()
root.master.configure(scrollregion=(0, 0, panda_table_root.winfo_width()*1.1, panda_table_root.winfo_height()*1.1 ))
def load(*args):
#print ("loading: " + filename.get())
if not filename.get() or filename.get() is '':
return
df = pd.read_csv(filename.get() ,delim_whitespace=True,\
skiprows=1,header=None,\
names = synapse_column_names)
cols = list(df.columns.values) #switch pre and post synaptic
cols.insert(0, cols.pop(1))
df = df[cols]
page1.grid_forget()
synapses_page_obj.refresh(df)
page1.grid(column=0,row=0,sticky='news')
update_scrollbar(synapses_page_obj.pt.root)
display_app_status('Synapse Data file \"'+filename.get()+'\" loaded')
def save(save_to=None):
pt_df = synapses_page_obj.get_df()
cols = list(pt_df.columns.values) #switch pre and post synaptic
cols.insert(0, cols.pop(1))
pt_df = pt_df[cols]
(nr,nc) = pt_df.shape
#a = pd.DataFrame(pt_df[pt_df.columns[list(range(0,6))]])
#b = pd.DataFrame(pt_df[pt_df.columns[list(range(6,12))]]).astype('float')
#pt_df = pd.concat([a,b],axis=1)
tb = pt_df.to_csv(sep=' ',header=False,index=False,float_format='%.6f')
if not save_to:
save_to=filename.get()
file = open(save_to,"w")
file.write(str(nr)+'\n')
file.write(tb)
file.close()
display_app_status('Synapse Data file \"'+filename.get()+'\" saved')
return
def new():
if synapses_page_obj.has_changed():
result = messagebox.askquestion("New", "Are you sure? Data has been changed.", icon='warning')
if result != 'yes':
return
d = DialogEntryBox(root,text="New File Name:",lefttext=os.path.join(dataset_folder, syndata_file_prefix),righttext=syndata_file_postfix)
root.wait_window(d.top)
if d.confirm==False:
return
newfilename = dataset_folder+'\\'+syndata_file_prefix+ d.value.get() + syndata_file_postfix
f = open(newfilename,"w+")
f.close
#pt.new()
#generate_files_available()
##https://stackoverflow.com/questions/17580218/changing-the-options-of-a-optionmenu-when-clicking-a-button
m = fileMenu.children['menu']
m.delete(0,tk.END)
newvalues = options
newvalues.append(newfilename)
for val in newvalues:
m.add_command(label=val,command=lambda v=filename,l=val:v.set(l))
filename.set(newfilename)
#pt.new()
display_app_status('Synapse Data file \"'+filename.get()+'\" created')
return
def new_clone_current():
if synapses_page_obj.has_changed():
result = messagebox.askquestion("New", "Are you sure? Data has been changed.", icon='warning')
if result != 'yes':
return
d = DialogEntryBox(root,text="New File Name:",lefttext=os.path.join(dataset_folder, syndata_file_prefix),righttext=syndata_file_postfix)
root.wait_window(d.top)
if d.confirm==False:
return
newfilename = os.path.join(dataset_folder,syndata_file_prefix+ d.value.get() + syndata_file_postfix)
f = open(newfilename,"w+")
f.close()
save(save_to=newfilename)
m = fileMenu.children['menu']
m.delete(0,tk.END)
newvalues = options
newvalues.append(newfilename)
for val in newvalues:
m.add_command(label=val,command=lambda v=filename,l=val:v.set(l))
filename.set(newfilename)
display_app_status('Synapse Data file \"'+filename.get()+'\" was created')
return
def set_syndata_param():
fn = filename.get()
search = syndata_file_prefix+'(.+?)'+syndata_file_postfix
m = re.search(search,fn)
if m:
fn = m.group(1)
set_public_param("SynData", fn)
display_app_status('SynData parameter set to \"'+ filename.get() +'\" in current parameters file')
return
def delete_current_file():
return
def load_syndata_param():
syndata = get_public_param("SynData")
syndata = os.path.join(dataset_folder, syndata_file_prefix + syndata + syndata_file_postfix)
#filename.set('')
filename.set(syndata)
#generate_files_available()
#Create the choice option panel
filename = tk.StringVar(top_option_frame)
filename.trace("w",load)
syndat = get_public_param("SynData")
syndat = os.path.join(dataset_folder, syndata_file_prefix + syndat + syndata_file_postfix)
filename.set('')
#filename.set(syndat)
#filename.set(options[0])
newFromCellsButton = tk.Button(top_option_frame, text="Create New", command=new, width=30)
newFromCellsButton.grid(column=0, row =0, padx=5, columnspan=2, sticky='WE')
useButton = tk.Button(top_option_frame, text="Set as SynData", command=set_syndata_param, width=15)
useButton.grid(column=0, row =1, padx=5, sticky='W')
loadButton = tk.Button(top_option_frame, text="Load SynData", command=load_syndata_param, width=15)
loadButton.grid(column=1, row =1, padx=5, sticky='W')
fileMenu = tk.OptionMenu(top_option_frame, filename, *options)
fileMenu.grid(column=2, row =0, padx=5, sticky='WE', columnspan=2)
deleteButton = tk.Button(top_option_frame, text="Delete", command=delete_current_file)
deleteButton.grid(column=4, row =0, padx=5, pady=5, sticky='W')
deleteButton.config(state=tk.DISABLED)
saveButton = tk.Button(top_option_frame, text="Save", command=save)
saveButton.grid(column=2, row =1, padx=5, pady=5, sticky='WE')
newFromCurrentButton = tk.Button(top_option_frame, text="Save As", command=new_clone_current)
newFromCurrentButton.grid(column=3, row =1, padx=5, sticky='WE')
return
def phasic_page(root):
phase_column_list = ["Cell","Max Frequency (Hz)","Noise","Depth","Phase"]
top_option_frame = tk.LabelFrame(root, text="File Management")
table_frame = tk.LabelFrame(root, text="Phasic Stimulation")
table_frame_internal = tk.Frame(table_frame)
table_frame_controls = tk.Frame(table_frame)
bottom_option_frame = tk.LabelFrame(root)
bottom_option_frame.tk
top_option_frame.grid(column=0,row=0,sticky='we',padx=10,pady=5)
table_frame.grid(column=0,row=1,sticky='we',padx=10,pady=5)
table_frame_controls.grid(column=0, row=0, sticky='we')
table_frame_internal.grid(column=0, row=1, sticky='news')
bottom_option_frame.grid(column=0,row=2,sticky='we')
page1 = tk.Frame(table_frame_internal)
######################################
cellclasses_a = []
options = glob.glob(phasicdata_glob)
if len(options) is 0:
options.append('')
d = defaultdict(list)
d[1].append(cellclasses_a)
######################################
pt = PandasTable(table_frame_internal, show_add_row_button=False)
class PhasicEntryBox:
def __init__(self, parent, text="value", lefttext="",righttext=""):
top = self.top = tk.Toplevel(parent)
top.geometry('325x250')
top.resizable(0,0)
tk.Label(top, text='Enter new phasic stimulation information:\nCell types from currently loaded \ncells file artificial cells.').grid(row=0,column=0,sticky="WE",columnspan=2)
self.cell_value = tk.StringVar(top)
self.frequency_value = tk.StringVar(top)
self.noise_value = tk.StringVar(top)
self.depth_value = tk.StringVar(top)
self.phase_value = tk.StringVar(top)
self.confirm = False
#Inputs
loaded_cellnums = get_public_param("loaded_cellnums")
column_names = ["Friendly Cell Name", "Cell File Name", "Num Cells", "Layer Index","Artificial:1 Real:0"]
cellnums_pd = pd.read_csv(loaded_cellnums ,delimiter=' ',\
skiprows=1,header=None,\
names = column_names)
cellnums_pd[column_names[4]] = cellnums_pd[column_names[4]].astype(int)
post_options = cellnums_pd.loc[cellnums_pd[column_names[4]] != -1]
post_options = post_options[post_options.columns[0]].values.tolist()
l = tk.Label(top, text='Cell',width=25, background='light gray')
l.grid(row=2,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
#self.post = tk.Entry(top,textvariable=self.post_value)
self.cell = tk.OptionMenu(top, self.cell_value, *post_options)
self.cell.grid(row=2,column=1)
l = tk.Label(top, text='Max Frequency (Hz)',width=25, background='light gray')
l.grid(row=3,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.frequency = tk.Entry(top,textvariable=self.frequency_value)
self.frequency_value.set('1')
self.frequency.grid(row=3,column=1)
l = tk.Label(top, text='Noise',width=25, background='light gray')
l.grid(row=4,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.noise = tk.Entry(top,textvariable=self.noise_value)
self.noise_value.set('0.0')
self.noise.grid(row=4,column=1)
l = tk.Label(top, text='Depth',width=25, background='light gray')
l.grid(row=5,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.depth = tk.Entry(top,textvariable=self.depth_value)
self.depth_value.set('0.0')
self.depth.grid(row=5,column=1)
l = tk.Label(top, text='Phase',width=25, background='light gray')
l.grid(row=6,column=0,pady=5,padx=5)
l.config(relief=tk.GROOVE)
self.phase = tk.Entry(top,textvariable=self.phase_value)
self.phase_value.set('0')
self.phase.grid(row=6,column=1)
#Return
button_frame = tk.Frame(top)
button_frame.grid(row=20,column=0,columnspan=2)
b = tk.Button(button_frame, text="Ok", command=self.ok)
b.grid(pady=5, padx=5, column=0, row=0, sticky="WE")
b = tk.Button(button_frame, text="Cancel", command=self.cancel)
b.grid(pady=5, padx=5, column=1, row=0, sticky="WE")
def verify_good(self):
return True
def get_values(self):
newphase = [self.cell_value.get(), self.frequency_value.get(), self.noise_value.get(),
self.depth_value.get(), self.phase_value.get()]
return newphase
def ok(self):
self.confirm = True
self.top.destroy()
def cancel(self):
self.top.destroy()
def generate_files_available():
cellclasses_a.clear()
search = 'cells\\\\class_(.+?).hoc'
for c in cellclasses:
m = re.search(search, c)
if m:
cellclasses_a.append(m.group(1))
def update_scrollbar(panda_table_root):
panda_table_root.update()
root.master.configure(scrollregion=(0, 0, panda_table_root.winfo_width()*1.1, panda_table_root.winfo_height()*1.1 ))
def load(*args):
if not filename.get() or filename.get() is '':
return
df = pd.read_csv(filename.get() ,delim_whitespace=True,\
skiprows=1,header=None,\
names = phase_column_list)
pt.pack()
pt.set_dataframe(df, show_delete_row=True,\
show_header=True, show_numbering=True, \
first_column_is_id=False, immutable_values=["nan"])
update_scrollbar(pt.root)
display_app_status('Phasic Data file \"'+filename.get()+'\" loaded')
def save(save_to=None):
pt_df = pt.get_dataframe()
(nr,nc) = pt_df.shape
tb = pt_df.to_csv(sep=' ',header=False,index=False)
if not save_to:
save_to = filename.get()
file = open(save_to,"w")
file.write(str(nr)+'\n')
file.write(tb)
file.close()
display_app_status('Phasic Data file \"'+filename.get()+'\" saved')
return
def new_generate():
newfilename = ''
if pt.has_changed():
result = messagebox.askquestion("New", "Are you sure? Data has been changed.", icon='warning')
if result != 'yes':
return
d = DialogEntryBox(root,text="New File Name:",lefttext=os.path.join(dataset_folder, phasicdata_file_prefix),righttext=phasicdata_file_postfix)
root.wait_window(d.top)
if d.confirm==False:
return
newfilename = dataset_folder+'\\'+phasicdata_file_prefix+ d.value.get() + phasicdata_file_postfix
f = open(newfilename,"w+")
f.close
#pt.new()
#generate_files_available()
#https://stackoverflow.com/questions/17580218/changing-the-options-of-a-optionmenu-when-clicking-a-button
m = fileMenu.children['menu']
m.delete(0,tk.END)
newvalues = options
newvalues.append(newfilename)
for val in newvalues:
m.add_command(label=val,command=lambda v=filename,l=val:v.set(l))
filename.set(newfilename)
#pt.new()
display_app_status('Phasic Data file \"'+ newfilename +'\" created')
return
def new_clone_current():
if pt.has_changed():
result = messagebox.askquestion("New", "Are you sure? Data has been changed.", icon='warning')
if result != 'yes':
return
d = DialogEntryBox(root,text="New File Name:",lefttext=os.path.join(dataset_folder, phasicdata_file_prefix),righttext=phasicdata_file_postfix)
root.wait_window(d.top)
if d.confirm==False:
return
newfilename = os.path.join(dataset_folder,phasicdata_file_prefix+ d.value.get() + phasicdata_file_postfix)
f = open(newfilename,"w+")
f.close()
save(save_to=newfilename)
m = fileMenu.children['menu']
m.delete(0,tk.END)
newvalues = options
newvalues.append(newfilename)
for val in newvalues:
m.add_command(label=val,command=lambda v=filename,l=val:v.set(l))
filename.set(newfilename)
display_app_status('Phasic Data file \"'+filename.get()+'\" was created')
return
def set_phasicdata_param():
fn = filename.get()
search = phasicdata_file_prefix+'(.+?)'+phasicdata_file_postfix
m = re.search(search,fn)
if m:
fn = m.group(1)
set_public_param("PhasicData", fn)
display_app_status('PhasicData parameter set to \"'+ filename.get() +'\" in current parameters file')
return
#generate_files_available()
def add_phase(*args):
d = PhasicEntryBox(root)
root.wait_window(d.top)
if d.confirm==False:
return
if d.verify_good():
row = d.get_values()
row = np.array(row).reshape(-1,len(row))
r = pd.DataFrame(row,columns=phase_column_list)
for i, row in r.iterrows():
pt.add_row(row)
def delete_current_file():
return
def load_phasicdata_param():
phasicdata = get_public_param("PhasicData")
phasicdata = os.path.join(dataset_folder, phasicdata_file_prefix + phasicdata + phasicdata_file_postfix)
#filename.set('')
filename.set(phasicdata)
tk.Button(table_frame_controls, text='Add Phasic Stimulus', command=add_phase).grid(column=0,row=0, padx=5, pady=5)
#Create the choice option panel
filename = tk.StringVar(top_option_frame)
filename.trace("w",load)
phasicdat = get_public_param("PhasicData")
phasicdat = os.path.join(dataset_folder, phasicdata_file_prefix + phasicdat + phasicdata_file_postfix)
filename.set('')
#filename.set(phasicdat)
#filename.set(options[0])
newFromCellsButton = tk.Button(top_option_frame, text="Create New", command=new_generate, width=30)
newFromCellsButton.grid(column=0, row =0, padx=5, columnspan=2, sticky='WE')
useButton = tk.Button(top_option_frame, text="Set as PhasicData", command=set_phasicdata_param, width=15)
useButton.grid(column=0, row =1, padx=5, sticky='W')
loadButton = tk.Button(top_option_frame, text="Load PhasicData", command=load_phasicdata_param, width=15)
loadButton.grid(column=1, row =1, padx=5, sticky='W')
fileMenu = tk.OptionMenu(top_option_frame, filename, *options)
fileMenu.grid(column=2, row =0, padx=5, sticky='WE', columnspan=2)
deleteButton = tk.Button(top_option_frame, text="Delete", command=delete_current_file)
deleteButton.grid(column=4, row =0, padx=5, pady=5, sticky='W')
deleteButton.config(state=tk.DISABLED)
saveButton = tk.Button(top_option_frame, text="Save", command=save)
saveButton.grid(column=2, row =1, padx=5, pady=5, sticky='WE')
newFromCurrentButton = tk.Button(top_option_frame, text="Save As", command=new_clone_current)
newFromCurrentButton.grid(column=3, row =1, padx=5, sticky='WE')
return
def results_page(root):
buildrun_frame = tk.LabelFrame(root, text="Run Model")
results_frame = tk.LabelFrame(root, text="General Results")
console_frame = tk.LabelFrame(root, text="Console Output")
buildrun_frame.grid(column=0,row=0,sticky='NEWS',padx=10,pady=5)
results_frame.grid(column=0,row=1,sticky='NEWS',padx=10,pady=5)
console_frame.grid(column=1, row=0, rowspan=2, sticky='NEWS')
#######Build Section
##############################
def reload_results():
options = glob.glob(results_glob)
m = fileMenu.children['menu']
m.delete(0,tk.END)
newvalues = options
for val in newvalues:
m.add_command(label=val,command=lambda v=foldername,l=val:v.set(l))
return
def run_command(command):
try:
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return iter(p.stdout.readline, b'')
except Exception as e:
return iter(str(e).splitlines())
def run_command_in_console(command):
console.configure(state='normal')
console.insert('end', 'console > ' + command + '\n\n')
command = command.split()
for line in run_command(command):
try:
string = line.decode('unicode_escape')
except Exception:
string = line
console.insert('end', '' + string)
console.see(tk.END)
console.insert('end', 'console > \n')
console.configure(state='disabled')
#display_app_status('Not implemented')
reload_results()
return
#TODO THREADING IS A WIP, need to convert everything to classes first to ensure we only run 1 at a time
def run_command_in_console_threaded(command):
import threading
t1 = threading.Thread(target=lambda r=command:run_command_in_console(r))
t1.start()
return
def generate_batch():
#Create popup with questions for batch, with defaults loaded from json file
display_app_status('Not implemented')
return
def build_run_batch():
generate_batch()
def local_run():
run = 'nrnivmodl' #Sams fix
run_command_in_console(run)
run = 'nrniv main.hoc'
run_command_in_console_threaded(run)
return
buildButton = tk.Button(buildrun_frame, text="Build Parallel Batch", command=generate_batch)
buildButton.grid(column=0, row =0, padx=5, pady=5, sticky='WE')
buildButton.config(state=tk.DISABLED)
buildrunButton = tk.Button(buildrun_frame, text="Build & Run Parallel Batch", command=build_run_batch)
buildrunButton.grid(column=0, row =1, padx=5, pady=5, sticky='WE')
buildrunButton.config(state=tk.DISABLED)
localrunButton = tk.Button(buildrun_frame, text="Run on this Single Machine", command=local_run)
localrunButton.grid(column=2, row =0, padx=5, pady=5, sticky='WE')
#I envision this button to open a popup to configure a remote connection, sftp all files in this directory
#Maybe treating this as a git repository, some sort of version control, so we can copy the results back to
#this system, may be a huge pain to implement/not worth it
buildrunButton = tk.Button(buildrun_frame, text="Run on Remote System (SSH)", command=build_run_batch)
buildrunButton.grid(column=2, row =1, padx=5, pady=5, sticky='WE')
buildrunButton.config(state=tk.DISABLED)
##############################
######Results section
##############################
class ShowGraphBox:
def __init__(self, parent, df, plottype='line', text="Graph Area"):
top = self.top = tk.Toplevel(parent)
top.geometry('510x430')
top.resizable(0,0)
#tk.Label(top, text='Create new synapse:').grid(row=0,column=0,sticky="WE",columnspan=2)
lf = ttk.Labelframe(top, text=text)
lf.grid(row=4, column=0, sticky='nwes', padx=3, pady=3)
fig = Figure(figsize=(5,4), dpi=100)
ax = fig.add_subplot(111)
#df.plot(x='t', y='s', ax=ax)
df.plot(kind=plottype, ax=ax)
canvas = FigureCanvasTkAgg(fig, master=lf)
canvas.show()
canvas.get_tk_widget().grid(row=0, column=0)
def generate_spike_raster():
display_app_status('Not implemented')
return
selected = {}
#selected = ['trace_hco10.dat', 'trace_hco21.dat']
def display_spike_train():
if not bool(selected): #nothing selected
display_app_status('Nothing selected, stopping')
return
cells_v = {}
#for fn in selected:
for fn, value in selected.items():
#filename = os.path.join(foldername.get(), fn)
if value.get() is 0:
continue
df = pd.read_csv(fn ,delim_whitespace=True,\
skiprows=1,header=None,\
names = ['Time', 'voltage'])
cellname = ''
search = 'trace_(.+?).dat'
m = re.search(search,fn)
if m:
cellname = m.group(1)
cells_v[cellname] = list(df['voltage'])
#print(cells_v)
df_p = pd.DataFrame(cells_v, index=df['Time'])
ShowGraphBox(results_frame, df_p)
return
def load_results(*args):
selected.clear()
for widget in cellslistbox.winfo_children():
widget.destroy()
results_trace_glob = os.path.join(foldername.get(),trace_file_prefix + '*' + trace_file_postfix)
available_traces = glob.glob(results_trace_glob)
for i,trace in enumerate(available_traces):
var1 = tk.IntVar()
selected[trace] = var1
tk.Checkbutton(cellslistbox, text=trace, variable=var1).grid(row=i, sticky='w')
return
result_options = glob.glob(results_glob)
foldername = tk.StringVar(results_frame)
foldername.set('')#result_options[0])
foldername.trace("w",load_results)
r = tk.Label(results_frame,text='Results loaded: ')
r.grid(column=0, row =0)
fileMenu = tk.OptionMenu(results_frame, foldername, *result_options,'')
fileMenu.grid(column=1, row =0, padx=5, sticky='WE', columnspan=2)
cellslistbox = tk.LabelFrame(results_frame, text='Cell Traces')
cellslistbox.grid(column=0,row=1,padx=5,sticky='WE',rowspan=99)
spikerasterButton = tk.Button(results_frame, text="Show Spike Raster for all", command=generate_spike_raster)
spikerasterButton.grid(column=1, row =1, padx=5, pady=5, sticky='WE')
spikerasterButton.config(state=tk.DISABLED)
graphspikeButton = tk.Button(results_frame, text="Show Spike Activity for selected", command=display_spike_train)
graphspikeButton.grid(column=1, row =2, padx=5, pady=5, sticky='WE')
##############################
######Console section
##############################
c = tk.Label(console_frame,text='Live output for current run.')
c.grid(column=0, row=0)
console = tk.Text(console_frame)
console.config(width= 70, height=25, bg='black',fg='light green')
console.grid(column=0, row=1, padx=5, pady=5, sticky='NEWS')
console.configure(state='normal')
console.insert('end', 'console > \n')
console.configure(state='disabled')
##############################
return
def main(root):
print('Starting Sim Builder. Please wait...')
style = ttk.Style()
try:
style.theme_create( "colored", parent="alt", settings={
"TNotebook": {"configure": {"tabmargins": [2, 5, 2, 0] } },
"TNotebook.Tab": {
"configure": {"padding": [5, 2], "background": "#D9D9D9" },
"map": {"background": [("selected", "#C0C0E0")],
"expand": [("selected", [1, 1, 1, 0])] } } } )
style.theme_create( "largertheme", parent="alt", settings={
"TNotebook": {"configure": {"tabmargins": [2, 5, 2, 0] } },
"TNotebook.Tab": {
"configure": {"padding": [5, 2] },
"map": {
"expand": [("selected", [1, 1, 1, 0])] } } } )
style.theme_use("colored")
except Exception:
print('Style loaded previously. Continuing.')
frame1 = tk.Frame(root)
frame1.grid(row=0,column=0,sticky='news')
frame1.columnconfigure(0,weight=1)
frame1.columnconfigure(0,weight=1)
frame2 = tk.Frame(root)
frame2.grid(row=1,column=0,sticky='news')
nb = Autoresized_Notebook(frame1)
nb.pack(padx=5,pady=5,side="left",fill="both",expand=True)
bottom_status_bar = tk.Frame(frame2)
bottom_status_bar.grid(row=0,column=0,padx=5,pady=2)#,fill=tk.X,expand=True)
label = tk.Label(bottom_status_bar,textvariable=app_status)
label.pack(expand=True)
page1 = ttk.Frame(nb)
page2 = ttk.Frame(nb)
page3 = ttk.Frame(nb)
page4 = ttk.Frame(nb)
page5 = ttk.Frame(nb)
page6 = ttk.Frame(nb)
page7 = ttk.Frame(nb)
page8 = ttk.Frame(nb)
nb.add(page1, text='Network Model Builder')
nb.add(page2, text='Cells')
nb.add(page3, text='Connections')
nb.add(page4, text='Synapses')
nb.add(page5, text='Phasic Stimulation')
#nb.add(page6, text='Cell Builder')
#nb.add(page7, text='Ion Channel Builder')
nb.add(page8, text='Run & Display Results')
#Alternatively you could do parameters_page(page1), but wouldn't get scrolling
bind_page(page1, parameters_page)
bind_page(page2, cells_page)
bind_page(page3, connections_page)
bind_page(page4, synapses_page)
bind_page(page5, phasic_page)
bind_page(page8, results_page)
display_app_status("Ready")
try:
print('Load complete. Running Sim Builder...')
root.mainloop()
except Exception:
print('Error, closing display loop')
print('Closing Sim Builder')
default_status = "Status: Ready"
def reset_app_status():
app_status.set(default_status)
def display_app_status(str):
app_status.set("Status: "+str)
threading.Timer(4.0, reset_app_status).start()
root.columnconfigure(0,weight=1)
root.rowconfigure(0,weight=1)
root.title("Generalized Neuron Network Model Builder (University of Missouri - Neural Engineering Laboratory - Nair) Sim Builder BETA VERSION")
root.geometry('1050x600')
#root.resizable(0,0)
root.config(menu=menu_bar(root))
app_status = tk.StringVar(root,'')
reset_app_status()
main(root)
| [
"latimerb@comet-ln3.sdsc.edu"
] | latimerb@comet-ln3.sdsc.edu |
54f0f5ed57993b798a9f8ad302c3afbaed89a667 | 8670b5e4195bb16d386584e20dc511565c577c5f | /Greedy Algorithms/Maximum Advertisement Revenue/maximum_ad_revenue.py | 552f4342427725b2c860a94ce651da89c2b42047 | [] | no_license | CvanderStoep/AlgorithmicToolbox | 558ac7bdd7590b1328b53fa2d31d5bfbb03b1fce | 5d3b6ea96d70e3eee110b0933b0585b49a6f10a5 | refs/heads/master | 2022-12-24T12:46:22.764695 | 2020-10-06T15:07:26 | 2020-10-06T15:07:26 | 292,310,704 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,372 | py | # python3
from itertools import permutations
def max_dot_product_naive(first_sequence, second_sequence):
assert len(first_sequence) == len(second_sequence)
assert len(first_sequence) <= 10 ** 3
assert all(0 <= f <= 10 ** 5 for f in first_sequence)
assert all(0 <= s <= 10 ** 5 for s in second_sequence)
max_product = 0
for permutation in permutations(second_sequence):
dot_product = sum(first_sequence[i] * permutation[i] for i in range(len(first_sequence)))
max_product = max(max_product, dot_product)
return max_product
def max_dot_product(first_sequence, second_sequence):
assert len(first_sequence) == len(second_sequence)
assert len(first_sequence) <= 10 ** 3
assert all(0 <= f <= 10 ** 5 for f in first_sequence)
assert all(0 <= s <= 10 ** 5 for s in second_sequence)
first_sequence.sort()
second_sequence.sort()
# dot_product = sum(first_sequence[i] * second_sequence[i] for i in range(len(first_sequence)))
dot_product = sum(i * j for i, j in zip(first_sequence, second_sequence))
return dot_product
if __name__ == '__main__':
n = int(input())
prices = list(map(int, input().split()))
clicks = list(map(int, input().split()))
assert len(prices) == len(clicks) == n
print(max_dot_product(prices, clicks))
print(max_dot_product_naive(prices, clicks))
| [
"carlo.vanderstoep@gmail.com"
] | carlo.vanderstoep@gmail.com |
a129bbc914a2740992f20ee1dc830ea5aaece802 | a8d9e696c73639bd6f3a8f2e64a207e218078027 | /api/migrations/0011_auto_20190316_1456.py | df1e30c151826993f3e08b5e6d09caab147d0410 | [] | no_license | rachanaaithal/ACMS-Hotel-And-Flight-Booking | 3ad2359843af24ab5ba2f09fc76f7c03092b7dfc | eae1253d514921d8648beef71d48bd77fd312014 | refs/heads/master | 2022-12-04T16:18:29.466221 | 2019-08-12T10:13:00 | 2019-08-12T10:13:00 | 173,610,945 | 0 | 13 | null | 2022-11-22T01:06:45 | 2019-03-03T18:04:56 | JavaScript | UTF-8 | Python | false | false | 514 | py | # Generated by Django 2.1.5 on 2019-03-16 14:56
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('api', '0010_auto_20190316_1420'),
]
operations = [
migrations.AlterField(
model_name='roomavailability',
name='id',
field=models.UUIDField(default=uuid.uuid4, help_text='Unique ID for this particular transaction', primary_key=True, serialize=False),
),
]
| [
"noreply@github.com"
] | noreply@github.com |
dcaa6538533e7aea6475ca43d0007542414c737a | 884e6e0dbef09e17cb76ff3536422ca342a58fbd | /Dokuz_Tas_Tahta.py | c69e36be9c0bd4d6bb1c62d3caed566e4d2ad233 | [] | no_license | omerfaruktuna/DokuzTas | f41936f786f987afd2a822bd7a0e79918ecaa25d | 2f3e09fd04e45874176117170037eda0ece76bbe | refs/heads/master | 2020-05-23T01:18:10.006508 | 2019-05-14T09:21:05 | 2019-05-14T09:21:05 | 186,587,055 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,328 | py | import random
class Dokuz_Tas_Tahta():
def __init__(self):
self.current_board = ["", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*"]
def print_tahta(self):
print("Current Picture of the Board:\n")
print(self.current_board[1]+"---------"+self.current_board[2]+"---------"+self.current_board[3])
print("| |")
print("| "+self.current_board[4]+"------"+self.current_board[5]+"------"+self.current_board[6]+" |")
print("| | | |")
print("| | "+self.current_board[7]+"---"+self.current_board[8]+"---"+self.current_board[9]+" | |")
print("| | | | | |")
print(self.current_board[10]+" "+self.current_board[11]+" "+self.current_board[12]+" "+self.current_board[13]+" "+self.current_board[14]+" "+self.current_board[15])
print("| | | | | |")
print("| | "+self.current_board[16]+"---"+self.current_board[17]+"---"+self.current_board[18]+" | |")
print("| | | |")
print("| "+self.current_board[19]+"------"+self.current_board[20]+"------"+self.current_board[21]+" |")
print("| |")
print(self.current_board[22]+"---------"+self.current_board[23]+"---------"+self.current_board[24])
def is_triple(self,player):
if (self.current_board[1] == player and self.current_board[2] == player and self.current_board[3] == player) or \
(self.current_board[4] == player and self.current_board[5] == player and self.current_board[6] == player) or \
(self.current_board[7] == player and self.current_board[8] == player and self.current_board[9] == player) or \
(self.current_board[16] == player and self.current_board[17] == player and self.current_board[18] == player) or \
(self.current_board[19] == player and self.current_board[20] == player and self.current_board[21] == player) or \
(self.current_board[22] == player and self.current_board[23] == player and self.current_board[24] == player) or \
(self.current_board[1] == player and self.current_board[10] == player and self.current_board[22] == player) or \
(self.current_board[3] == player and self.current_board[15] == player and self.current_board[24] == player) or \
(self.current_board[4] == player and self.current_board[11] == player and self.current_board[19] == player) or \
(self.current_board[6] == player and self.current_board[14] == player and self.current_board[21] == player) or \
(self.current_board[7] == player and self.current_board[12] == player and self.current_board[16] == player) or \
(self.current_board[9] == player and self.current_board[13] == player and self.current_board[18] == player)or \
(self.current_board[2] == player and self.current_board[5] == player and self.current_board[8] == player) or \
(self.current_board[17] == player and self.current_board[20] == player and self.current_board[23] == player) or \
(self.current_board[1] == player and self.current_board[4] == player and self.current_board[7] == player) or \
(self.current_board[3] == player and self.current_board[6] == player and self.current_board[9] == player) or \
(self.current_board[16] == player and self.current_board[19] == player and self.current_board[22] == player) or \
(self.current_board[18] == player and self.current_board[21] == player and self.current_board[24] == player) :
return True
else:
return False
def is_board_full(self):
if "*" in self.current_board:
return False
else:
return True
def is_valid_move(self,move):
if self.current_board[move] == "*":
return True
else:
return False
def changePlayer(self,player):
if player == "M":
return "S"
else:
return "M"
def makeMove(self, position, player):
self.current_board[position] = player
def availableMoves(self):
moves = []
for i in range(0, len(self.current_board)):
if self.current_board[i] == "*":
moves.append(i)
return moves
def get_computer_move(self, player):
if self.current_board[5] == " ":
return 5
while True:
move = random.randint(1,24)
if self.current_board[move] == "*":
return move
break
return 5
| [
"noreply@github.com"
] | noreply@github.com |
fa4e4448ac09a8ca4502f4e8591d83ef40112882 | fc2447b91cbee82e74e939092ec1903678f3217a | /PythonPractice/算法图解/Dijkstra's algorithm.py | 0cd528a8c91f11657af1906538a31b531f16e4a9 | [] | no_license | yglj/learngit | 0eac654e7c49f2ede064b720e6ee621a702193b4 | 74fb4b93d5726c735b64829cafc99878d8082121 | refs/heads/master | 2022-12-24T10:01:56.705046 | 2019-05-27T21:04:08 | 2019-05-27T21:04:08 | 146,157,116 | 0 | 1 | null | 2022-12-12T07:01:25 | 2018-08-26T06:28:20 | HTML | UTF-8 | Python | false | false | 1,771 | py | """
狄克斯特拉算法
每条边上的关联数字称为权重
带权重的图叫加权图
寻找加权图的最短路径
只是用于有向无环图
"""
graph = {} # 加权图
costs = {} # 开销
parents = {} # 父节点
# 图的各顶点的邻居及边的权重
graph['start'] = {}
graph['start']['a'] = 6
graph['start']['b'] = 2
# print(graph['start'].keys())
graph['a'] = {}
graph['a']['fin'] = 1
graph['b'] = {}
graph['b']['a'] = 3
graph['b']['fin'] = 5
graph['fin'] = {}
infinity = float('inf') # 无穷大
costs['a'] = 6
costs['b'] = 2
costs['fin'] = infinity
parents['a'] = 'start'
parents['b'] = 'start'
parents['fin'] = None # 开始没有到达fin的路径
processed = []
"""
1.只要还有要处理的节点
2.获取离起点最近的节点
3.更新其邻居的开销
4.如果有邻居的开销被更新,同时更新其父节点
5.将该节点标记为处理过
"""
def find_lowest_cost_node(costs):
lowest_cost = float('inf')
lowest_cost_node = None
for node in costs:
cost = costs[node]
if cost < lowest_cost and node not in processed:
lowest_cost = cost
lowest_cost_node = node
return lowest_cost_node
def main():
node = find_lowest_cost_node(costs)
while node is not None:
cost = costs[node]
neighbors = graph[node]
for n in neighbors.keys():
new_cost = cost + neighbors[n]
if costs[n] > new_cost:
costs[n] = new_cost
parents[n] = node
processed.append(node)
node = find_lowest_cost_node(costs)
if __name__ == '__main__':
main()
# print(parents)
# print(costs)
# print(graph)
processed.insert(0, 'start')
path = '->'.join(processed)
print(path)
| [
"2365952530@qq.com"
] | 2365952530@qq.com |
727b5f688d0d70414334ccda20dfd1f147a25259 | b604d6e2b1f206e6df660da2be2add78ec22941a | /resources/ros_kinetic/src/ros/rosbuild/bin/rosgcov_summarize | 9c19df610036bbb08442ed9a949fe1a44b505a54 | [] | no_license | fqez/common | 7b521773d81e2e687f6ae482f595ca3d19515e39 | f423fec07f39da9cb38f91dc4f3f1cd51c1a3130 | refs/heads/master | 2020-05-21T23:59:17.035384 | 2017-03-14T11:46:57 | 2017-03-14T11:46:57 | 62,873,104 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,635 | #!/usr/bin/env python3
import sys
import fileinput
import re
import os.path
USAGE = "USAGE: rosgcov_summarize <package_dir> <rosgcov_file>"
if len(sys.argv) != 3:
print(USAGE)
sys.exit(-1)
pkg = sys.argv[1]
fname = sys.argv[2]
if not os.path.exists(fname):
print('[rosgcov] %s : %.2f%% (no coverage results)' % (os.path.split(pkg)[1],0.0))
sys.exit(0)
re_hit = re.compile('^ *[0-9]*:.*')
re_miss = re.compile('^ *#####:.*')
re_branch_hit = re.compile('^branch *[0-9] *taken [0-9]*.*')
re_branch_miss = re.compile('^branch *[0-9] *never executed.*')
files = []
finput = fileinput.input(fname)
for l in finput:
ls = l.strip().split(' ')
f = os.path.join(ls[0],os.path.split(ls[1])[1])
files.append(f.strip())
total = 0
hits = 0
misses = 0
branch_total = 0
branch_hits = 0
branch_misses = 0
print('-------------------------------------------------------')
print('Coverage summary: ')
print('-------------------------------------------------------')
for f in files:
prefix = os.path.commonprefix([pkg, f])
display_name = f[len(prefix):]
if display_name[0] == '/':
display_name = display_name[1:]
print(' ' + display_name + ': ')
gcov_fname = f + '.gcov'
if not os.path.exists(gcov_fname):
print('WARNING: no coverage results for %s' % (display_name))
continue
gcovf = fileinput.input(gcov_fname)
local_total = 0
local_hits = 0
local_misses = 0
local_branch_total = 0
local_branch_hits = 0
local_branch_misses = 0
for s in gcovf:
if re_hit.match(s):
local_hits += 1
local_total += 1
elif re_miss.match(s):
local_misses += 1
local_total += 1
if re_branch_hit.match(s):
local_branch_hits += 1
local_branch_total += 1
elif re_branch_miss.match(s):
local_branch_misses += 1
local_branch_total += 1
print(' line: %.2f%% (%d / %d)' % ((100.0 * local_hits / max(local_total,1)), local_hits, local_total))
hits += local_hits
misses += local_misses
total += local_total
print(' branch: %.2f%% (%d / %d)' % ((100.0 * local_branch_hits / max(local_branch_total,1)), local_branch_hits, local_branch_total))
branch_hits += local_branch_hits
branch_misses += local_branch_misses
branch_total += local_branch_total
print('-------------------------------------------------------')
print('[rosgcov] %s : %.2f%% (%d / %d)' % (os.path.split(pkg)[1],(100.0 * hits / max(total,1)), hits, total))
print('[rosgcov] %s : branch %.2f%% (%d / %d)' % (os.path.split(pkg)[1],(100.0 * branch_hits / max(branch_total,1)), branch_hits, branch_total))
print('-------------------------------------------------------')
| [
"f.perez475@gmail.com"
] | f.perez475@gmail.com | |
832c54e69da7d3a98af00fecad751a69ad911ebb | 175bec7f39f7acc9e574e76383969755b8c3874d | /blogApp/blog/urls.py | ef67242776567a4890cbabbbb6c24da12fec3648 | [] | no_license | agdelvalle/djangoBlog | 4af2338232384680bab29ddbb2854fdc284a2b10 | 377e3d37cea8f9e7b6d92370c7ab97c75983388e | refs/heads/master | 2023-08-25T12:47:47.875618 | 2021-10-30T17:41:20 | 2021-10-30T17:41:20 | 422,945,707 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 620 | py | from django.conf.urls import url
from .views import index, new_entry, view_entry, delete_entry, delete_comment, update_entry, login
urlpatterns = [
url(r'^$', index, name='index'),
url(r'^new_entry$', new_entry, name='new-entry'),
url(r'view_entry/(?P<blog_id>\d+)$', view_entry, name='view-entry'),
url(r'delete_entry/(?P<blog_id>\d+)$', delete_entry, name='delete-entry'),
url(r'delete_comment/(?P<blog_id>\d+)/(?P<comment_id>\d+)$', delete_comment, name='delete-comment'),
url(r'update_entry/(?P<blog_id>\d+)$', update_entry, name='update-entry'),
url(r'^login$', login, name='login'),
]
| [
"aly.delvalle.9@gmail.com"
] | aly.delvalle.9@gmail.com |
d0f5c9bc02f4fce66adc44578dcf744646f61523 | 0f121afc3b5a59d44bb237e37feedbc63a1c824e | /q2/q2.py | b9ae33d6f5e776f30ad76665006c66bdff2edcba | [] | no_license | BonJovi1/Camera-Modelling-and-Transformations | c712567e13686859af527b0dd32962d9fc09f123 | 1300a8c0f98ccd4b4e975ce981a7c5d94d1338fc | refs/heads/master | 2020-07-09T20:04:00.797176 | 2019-10-10T18:01:13 | 2019-10-10T18:01:13 | 204,070,726 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,873 | py | #importing libraries
import numpy as np
import matplotlib.pyplot as plt
import cv2
#camera calibration matrix as given
cal_mat = np.array( [ [7.2153e+02, 0, 6.0955e+02],
[0, 7.2153e+02, 1.7285e+02],
[0, 0, 1] ] )
cam_inverse = np.linalg.inv(cal_mat)
#found using imtool in matlab
pixels = np.array( [ [825, 308, 1] ] )
temp = np.matmul(cam_inverse, pixels.transpose())
# print(temp); #temp is coordinates in camera plane, but no depth, z = fy. Z = Y.fy / y
fy = 7.2153e+02
Y = 312
y = 1.65
z = (fy*y)/(Y - 1.7285e+02)
# z = (y * fy)/Y
# print(z);
temp = z * temp
# print(temp);#camera cooridnates of origin
X = temp[0]
Y = temp[1]
Z = temp[2]
height = 1.38
width = 1.51
length = 4.10
flb = np.array([X, Y, Z])
frb = np.array([X+ width, Y, Z])
flt = np.array([X, Y-height, Z])
frt = np.array([X+width, Y-height, Z])
nlb = np.array([X, Y, Z+length])
nrb = np.array([X+width, Y, Z+length])
nlt = np.array([X, Y-height, Z+length])
nrt = np.array([X+width, Y-height, Z+length])
flb2 = np.matmul(cal_mat, flb)
frb2 = np.matmul(cal_mat, frb)
flt2 = np.matmul(cal_mat, flt)
frt2 = np.matmul(cal_mat, frt)
nlb2 = np.matmul(cal_mat, nlb)
nrb2 = np.matmul(cal_mat, nrb)
nlt2 = np.matmul(cal_mat, nlt)
nrt2 = np.matmul(cal_mat, nrt)
flb2 = flb2/flb2[2]
frb2 = frb2/frb2[2]
flt2 = flt2/flt2[2]
frt2 = frt2/frt2[2]
nlb2 = nlb2/nlb2[2]
nrb2 = nrb2/nrb2[2]
nlt2 = nlt2/nlt2[2]
nrt2 = nrt2/nrt2[2]
print(flb2)
print(frb2)
print(flt2)
print(frt2)
connect = [(flb2, frb2, flt2, nlb2), (frt2, flt2, frb2, nrt2), (nlt2, flt2, nlb2, nrt2), (nrb2, nrt2, nlb2, frb2)]
def main():
image = plt.imread("image.png")
for group in connect:
pt1 = group[0]
for i in range(1,4):
pt = group[i]
plt.plot([pt1[0], pt[0]], [pt1[1], pt[1]], c='r', linewidth = 5)
plt.imshow(image)
plt.show()
main()
| [
"abhinav.g@students.iiit.ac.in"
] | abhinav.g@students.iiit.ac.in |
b24dca7b4cab815df2d6088302dd4b54e9fd78d0 | 615bf8c6b986f5985e5f6592f6b502f50cc0e984 | /baby.py | f45952b3437c53804f01c140c8deadb590a61228 | [] | no_license | Ch0498/Python-Bible | 3e30dbc452ef5b3a9c2fa83b476ff485209a48fc | 75b22ff2854b78e52eef5f5de7d907c1feaf19e4 | refs/heads/master | 2022-11-07T01:28:27.993759 | 2020-06-28T11:29:51 | 2020-06-28T11:29:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 340 | py | from random import choice
questions = ["Why is the sky blue?: ", "Why is there a face on the moon?: ",
"Where are all the dinosaurs: "]
question = choice(questions)
answer = input(question).strip().lower()
while answer != "just because":
answer = input("why?: ").strip().lower()
print("Oh... Okay")
| [
"noreply@github.com"
] | noreply@github.com |
4b3ea08a26e0a92132a0a700b7e8ff04bd9e13fb | 0420b28aa59330fb0d9548f636b1460668163887 | /accounts/migrations/0005_alter_userprofile_profile_picture.py | 591939011f4877d881bd9c3396ddd91668e6bf0a | [] | no_license | akhmadakhmedov/modamag | 30cc3ea335b7fe8fbc234149b11d2df11b627281 | 0459f27230027fab51cbaae2a594ffde52a64d04 | refs/heads/main | 2023-08-11T01:48:58.979894 | 2021-10-12T11:18:08 | 2021-10-12T11:18:08 | 391,133,082 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 419 | py | # Generated by Django 3.2.5 on 2021-08-16 09:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0004_userprofile'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='profile_picture',
field=models.ImageField(blank=True, upload_to='images/users/'),
),
]
| [
"ahmedov.thy@gmail.com"
] | ahmedov.thy@gmail.com |
ca46ce4a951be66fcf2ef5ba9a70ce827fc1944d | fa2d32a77ebd558ffe54eea67504308fa1ac2d2c | /learn.fabric/a/bin/python-config | 4336941e993282c564dab78f716b56d48c7c63fa | [] | no_license | prpllrhd/morePYTHON | 329362c18a447f913fa1f73c06fd0451b1a79009 | 739f64eb5462de532a8f1788ebe7631cb155f565 | refs/heads/master | 2021-06-05T19:59:43.276455 | 2019-01-20T19:53:34 | 2019-01-20T19:53:34 | 14,663,754 | 0 | 1 | null | 2015-07-08T06:12:08 | 2013-11-24T15:38:51 | Python | UTF-8 | Python | false | false | 2,355 | #!/Users/samy/git/python/learn.fabric/a/bin/python
import sys
import getopt
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'help']
if sys.version_info >= (3, 2):
valid_opts.insert(-1, 'extension-suffix')
valid_opts.append('abiflags')
if sys.version_info >= (3, 3):
valid_opts.append('configdir')
def exit_with_usage(code=1):
sys.stderr.write("Usage: {0} [{1}]\n".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
abiflags = getattr(sys, 'abiflags', '')
libs = ['-lpython' + pyver + abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix is None:
ext_suffix = sysconfig.get_config_var('SO')
print(ext_suffix)
elif opt == '--abiflags':
if not getattr(sys, 'abiflags', None):
exit_with_usage()
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
| [
"samy@rakhees-Air.fios-router.home"
] | samy@rakhees-Air.fios-router.home | |
68026a166c2d56d84db2d415b5ebe71328e54e9b | f79306de1af4c5ef42bc457944b415471805e90c | /ps/baekjoon/1620.py | b51adf77490af00d03d82f0e3c1ce2633a51d6cf | [] | no_license | FutureSeller/TIL | 9d514e19fd10b237b48eaac9f319f9a68bd47edc | 6fee95619ea96893daf66632a582cdd95a18d8e0 | refs/heads/main | 2023-08-06T18:05:14.551684 | 2023-08-04T07:17:34 | 2023-08-04T07:17:34 | 194,835,139 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | from collections import OrderedDict
import sys
N, M = map(int, sys.stdin.readline().split())
pokemon = OrderedDict()
names = []
for idx in range(0, N):
name = sys.stdin.readline().rstrip()
names.append(name)
pokemon[name] = idx
for _ in range(M):
name = sys.stdin.readline().rstrip()
if name not in pokemon:
print(names[int(name)-1])
else:
print(pokemon[name] + 1)
| [
"f.s3ll3r@gmail.com"
] | f.s3ll3r@gmail.com |
8cfda59f774349e3e71b33a2df89c221077a735d | c34949b4773302fd7051d524855268ff47cd94a0 | /github/git.py | 64bad17b60b9de8db07b33a879f263328c6969a3 | [] | no_license | fsxchen/netx | 79d7b393777791ede2d3d173cc9944a8a6adda9c | 3efa26ae9b315fd4f9a464204468293a0c5b1349 | refs/heads/master | 2021-01-18T17:40:00.342621 | 2016-11-23T09:42:39 | 2016-11-23T09:42:39 | 71,990,502 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,295 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
'''
File Name: git.py
Description:
Created_Time: 2016-11-08 11:35:47
Last modified: 2016-11-23 11时54分03秒
'''
_author = 'arron'
_email = 'fsxchen@gmail.com'
import os
import json
from operator import itemgetter
from github import Github
import networkx as nx
from networkx.readwrite import json_graph
import matplotlib.pyplot as plt
from IPython.display import IFrame
from IPython.core.display import display
from networkx.readwrite import json_graph
# git 配置
username = "fsxchen"
password = raw_input("please input password:")
g_user = Github(username, password)
def get_data(username):
filename = "{username}.gpickle".format(username=username)
if os.path.exists(filename):
g = nx.read_gpickle(filename)
else:
g = nx.DiGraph()
user = g_user.get_user(username)
g.add_node(user.login + '(user)', type='user')
followers = user.get_followers()
following = user.get_following()
for fols in followers:
g.add_node(fols.login + '(user)', type='user')
g.add_edge(fols.login + '(user)', user.login + '(user)', type='follows')
for f in fols.get_followers():
g.add_node(f.login + '(user)', type='user')
g.add_edge(f.login + '(user)', fols.login + '(user)',
type='follows')
for fi in fols.get_following():
g.add_node(fi.login + '(user)', type='user')
g.add_edge(fols.login + '(user)', fi.login + '(user)',
type='following')
for follin in following:
g.add_node(follin.login + '(user)', type='user')
g.add_edge(user.login + '(user)', follin.login + '(user)',
type='following')
for f in follin.get_followers():
g.add_node(f.login + '(user)', type='user')
g.add_edge(f.login + '(user)', follin.login + '(user)',
type='follows')
for fi in follin.get_following():
g.add_node(fi.login + '(user)', type='user')
g.add_edge(follin.login + '(user)', fi.login + '(user)',
type='following')
nx.write_gpickle(g, filename)
return g
if __name__ == "__main__":
g = get_data('Z-0ne')
print nx.info(g)
print sorted([n for n in g.degree_iter()], key=itemgetter(1), reverse=True)[:10]
h = g.copy()
dc = sorted(nx.degree_centrality(h).items(),
key=itemgetter(1), reverse=True)
print "Degree Centrality"
print dc[:10]
print "*" * 15
bc = sorted(nx.betweenness_centrality(h).items(),
key=itemgetter(1), reverse=True)
print "Betweenness Centrality"
print bc[:10]
print "*" * 15
cc = sorted(nx.closeness_centrality(h).items(),
key=itemgetter(1), reverse=True)
print "Closeness Centrality"
# d = json_graph.node_link_data(G)
# son.dump(d, open('graph.json', 'w'))
# nx.write_graphml(G, "z0ne.graphml")
# viz_file = 'graph.html'
# display(IFrame(viz_file, '100%', '600px'))
# with open("grahp.json", 'w') as f:
# data = json_graph.tree_data(G, root=1)
# json.dump(data, f, indent=4)
# print dir(user)
| [
"fsxchen@gmail.com"
] | fsxchen@gmail.com |
3d09ba2c50e27b812f44551b76e959f8ed91b1c4 | 5e9e683894db55e6918f7c3f23859fcf905f856f | /src/infra/db/db_connector.py | cae1725ba554b582d0b29694eb1ab88e0a8915df | [] | no_license | vigenere23/cooky-api | 3cfe612b5144dec5daf082f34b65f1d4955de815 | cfe6542ba98579282e818e4f13374fc032e97aaa | refs/heads/master | 2023-07-18T02:44:25.171587 | 2021-08-26T16:15:38 | 2021-08-26T16:25:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,352 | py | from src.infra.db.db_connection import DBConnection
class DBConnector:
def __init__(self, connection: DBConnection):
self.__connection = connection
def delete(self, query, data):
try:
self.__connection.execute(query, data)
self.__connection.commit()
except Exception as e:
self.__connection.rollback()
raise e
def replace(self, query, data):
try:
self.__connection.execute(query, data)
self.__connection.commit()
except Exception as e:
self.__connection.rollback()
raise e
def create(self, query, data, autocommit=True):
try:
result = self.__connection.execute(query, data)
if autocommit:
self.__connection.commit()
return result.last_id()
except Exception as e:
self.__connection.rollback()
raise e
def find(self, query, data=None):
result = self.__connection.execute(query, data)
return result.fetch_one()
def findAll(self, query, data=None, limit=None):
result = self.__connection.execute(query, data)
return result.fetch_many(limit)
def getColumns(self, table_name):
query = 'DESC {}'.format(table_name)
return self.findAll(query)
| [
"lolgab1@hotmail.com"
] | lolgab1@hotmail.com |
4aa3c05bab82dea4ae678dfc7c1ea442168008e2 | 414a58c691ff7b434034086433644870f8ac5c99 | /tests/test_geom.py | b1de7a128110d8a3d34fee1bc3c1dbf3d7148c62 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | WZBSocialScienceCenter/pdftabextract | 08328197681ca03b764ea2df410851c06e0a92b7 | 7b86a9098b5d397f984b1cbc6716d55860e34ef8 | refs/heads/master | 2022-08-02T16:43:42.187628 | 2022-06-24T09:51:22 | 2022-06-24T09:51:22 | 62,884,666 | 2,239 | 401 | Apache-2.0 | 2022-06-24T09:51:23 | 2016-07-08T11:44:46 | Python | UTF-8 | Python | false | false | 7,946 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Feb 13 09:50:51 2017
@author: mkonrad
"""
import math
import pytest
from hypothesis import given
import hypothesis.strategies as st
import numpy as np
from pdftabextract.geom import (pt, ptdist, vecangle, vecrotate, overlap, lineintersect,
rect, rectcenter, rectarea, rectintersect,
normalize_angle, normalize_angle_halfcircle,
project_polarcoord_lines)
FMIN = np.finfo(np.float32).min
FMAX = np.finfo(np.float32).max
def test_pt():
x = 0
y = 1
pt0 = pt(x, y)
assert type(pt0) is np.ndarray
assert pt0.dtype == np.float
assert pt0[0] == x
assert pt0[1] == y
pt1 = pt(x, y, np.int)
assert pt1.dtype == np.int
assert pt1[0] == x
assert pt1[1] == y
def test_ptdist():
p1 = pt(0, 0)
p2 = pt(1, 0)
p3 = pt(1, 1)
assert ptdist(p1, p1) == 0
assert ptdist(p1, p2) == 1
assert ptdist(p2, p1) == ptdist(p1, p2)
assert ptdist(p1, p3) == math.sqrt(2)
def test_vecangle():
v1 = pt(1, 0)
v2 = pt(2, 0)
v3 = pt(1, 1)
v4 = pt(0, 1)
v5 = pt(0, -1)
assert np.isnan(vecangle(pt(0, 0), v1)) # pt(0, 0) is vec of no length
assert vecangle(v1, v2) == 0
assert round(vecangle(v1, v3), 4) == round(math.radians(45), 4)
assert vecangle(v2, v4) == vecangle(v1, v4) == math.radians(90)
assert vecangle(v2, v5) == math.radians(90) # always the smaller angle
@given(st.floats(min_value=FMIN, max_value=FMAX),
st.floats(min_value=FMIN, max_value=FMAX),
st.floats(min_value=FMIN, max_value=FMAX),
st.floats(min_value=FMIN, max_value=FMAX))
def test_vecangle_2(x1, y1, x2, y2):
v0 = pt(0, 0)
v1 = pt(x1, y1)
v2 = pt(x2, y2)
try:
alpha = vecangle(v1, v2)
except ValueError: # math domain error in some edge cases?
return
if np.allclose(v1, v0) or np.allclose(v2, v0):
assert np.isnan(alpha)
else:
assert 0 <= alpha <= np.pi
def test_vecrotate():
assert np.array_equal(vecrotate(pt(0, 0), 0.123), pt(0, 0))
assert np.allclose(vecrotate(pt(1, 0), math.radians(90)), pt(0, 1))
assert np.allclose(vecrotate(pt(1, 0), math.radians(90), about=pt(1, 1)), pt(2, 1))
def test_overlap():
assert overlap(0, 1, 0, 1) is True
assert overlap(0, 0, 1, 1) is False
assert overlap(0, 10, 5, 15) is True
assert overlap(-10, 10, -20, -10) is True
assert overlap(-9, 10, -20, -10) is False
def test_lineintersect():
# first with check_in_segm = True
X = lineintersect(pt(0, 0), pt(0, 0), pt(0, 0), pt(0, 0)) # coincident I
assert sum(np.isnan(X)) == len(X)
X = lineintersect(pt(0, 0), pt(0, 1), pt(0, 0), pt(0, 1)) # coincident II
assert sum(np.isnan(X)) == len(X)
assert lineintersect(pt(0, 0), pt(0, 1), pt(1, 0), pt(1, 1)) is None # parallel, non coincident
assert lineintersect(pt(0, 0), pt(0, 1), pt(1, 1), pt(2, 2)) is None # non-parellel, no intersection
assert lineintersect(pt(0, 0), pt(2, 2), pt(0, 5), pt(5, 0)) is None # non-parellel, no intersection II
assert np.array_equal(lineintersect(pt(0, 0), pt(0, 1), pt(0, 1), pt(2, 2)), pt(0, 1)) # intersection - touch
assert np.array_equal(lineintersect(pt(0, 0), pt(2, 2), pt(0, 2), pt(2, 0)), pt(1, 1)) # intersection
# now with check_in_segm = False
X = lineintersect(pt(0, 0), pt(0, 0), pt(0, 0), pt(0, 0), False) # coincident I
assert sum(np.isnan(X)) == len(X)
X = lineintersect(pt(0, 0), pt(0, 1), pt(0, 0), pt(0, 1), False) # coincident II
assert sum(np.isnan(X)) == len(X)
X = lineintersect(pt(0, 0), pt(1, 1), pt(2, 2), pt(3, 3), False) # coincident III
assert sum(np.isnan(X)) == len(X)
assert np.array_equal(lineintersect(pt(0, 0), pt(0, 1), pt(1, 1), pt(2, 2), False), pt(0, 0)) # intersection (out of segments)
assert np.array_equal(lineintersect(pt(0, 0), pt(0, 1), pt(0, 1), pt(2, 2), False), pt(0, 1)) # intersection - touch
assert np.array_equal(lineintersect(pt(0, 0), pt(2, 2), pt(0, 2), pt(2, 0), False), pt(1, 1)) # intersection
def test_rect():
with pytest.raises(ValueError):
rect(pt(0, 0), pt(1, 1, dtype=np.int)) # dtypes do not match
with pytest.raises(ValueError):
rect(pt(0, 0), pt(0, 0)) # doesn't form rect
with pytest.raises(ValueError):
rect(pt(1, 1), pt(0, 0)) # doesn't form rect
with pytest.raises(ValueError):
rect(pt(0, 0), pt(1, 0)) # doesn't form rect
a = pt(0, 0)
b = pt(1, 1)
r = rect(a, b)
assert r.dtype == a.dtype == b.dtype
assert np.array_equal(r[0], a)
assert np.array_equal(r[1], b)
a = pt(-3, -1)
b = pt(8, 1.2)
r = rect(a, b)
assert r.dtype == a.dtype == b.dtype
assert np.array_equal(r[0], a)
assert np.array_equal(r[1], b)
def test_rectcenter():
a = pt(0, 0)
b = pt(1, 1)
r = rect(a, b)
center = rectcenter(r)
assert type(center) is np.ndarray
assert np.array_equal(center, pt(0.5, 0.5))
a = pt(-3, -1)
b = pt(2, 5)
r = rect(a, b)
assert np.array_equal(rectcenter(r), pt(-0.5, 2))
def test_rectarea():
a = pt(0, 0)
b = pt(1, 1)
r = rect(a, b)
assert rectarea(r) == 1
a = pt(-3, -1)
b = pt(2, 5)
r = rect(a, b)
assert rectarea(r) == 30
def test_rectintersect():
a = rect(pt(0, 0), pt(1, 1))
b = rect(pt(-3, -1), pt(2, 5))
assert rectintersect(a, a) == rectarea(a)
assert rectintersect(b, b) == rectarea(b)
assert rectintersect(a, a, norm_intersect_area='a') == 1
assert rectintersect(a, a, norm_intersect_area='b') == 1
with pytest.raises(ValueError):
rectintersect(a, a, norm_intersect_area='c')
# complete intersect
assert rectintersect(a, b) == rectarea(a)
assert rectintersect(b, a) == rectarea(a)
assert rectintersect(a, b, norm_intersect_area='a') == 1
assert rectintersect(b, a, norm_intersect_area='b') == 1
assert rectintersect(b, a, norm_intersect_area='a') < 1
assert rectintersect(a, b, norm_intersect_area='b') < 1
# partial intersect
a = rect(pt(0, 0), pt(1, 1))
b = rect(pt(0.5, 0.5), pt(1.5, 1.5))
assert rectintersect(a, b) == 0.25
assert rectintersect(a, b, norm_intersect_area='a') == 0.25
assert rectintersect(a, b, norm_intersect_area='b') == 0.25
b = rect(pt(0.75, 0.5), pt(1.5, 1.5))
assert rectintersect(a, b) == 0.125
# touch
a = rect(pt(0, 0), pt(1, 1))
b = rect(pt(1, 1), pt(1.5, 1.5))
assert rectintersect(a, b) == 0
# no intersection
a = rect(pt(0, 0), pt(1, 1))
b = rect(pt(1.1, 1.1), pt(1.5, 1.5))
assert rectintersect(a, b) is None
def test_normalize_angle():
for i in range(-10, 10):
theta = i * np.pi
norm = normalize_angle(theta)
assert 0 <= norm < 2 * np.pi
assert norm / np.pi == i % 2
def test_normalize_angle_halfcircle():
for i in range(-10, 10):
theta = 0.5 * i * np.pi
norm = normalize_angle_halfcircle(theta)
assert 0 <= norm < np.pi
assert norm / np.pi * 2 == i % 2
@given(
st.lists(st.lists(st.floats(allow_nan=False, allow_infinity=False), min_size=2, max_size=2)),
st.integers(),
st.integers()
)
def test_project_polarcoord_lines(hough_lines, img_w, img_h):
if img_w <= 0 or img_h <= 0:
with pytest.raises(ValueError):
project_polarcoord_lines(hough_lines, img_w, img_h)
return
else:
res = project_polarcoord_lines(hough_lines, img_w, img_h)
assert type(res) is list
assert len(res) == len(hough_lines)
for pts in res:
assert len(pts) == 2
assert type(pts[0]) == type(pts[1]) == np.ndarray
assert len(pts[0]) == len(pts[1]) == 2
| [
"markus.konrad@wzb.eu"
] | markus.konrad@wzb.eu |
2206b4bfd3f1f3c2510c27d0f3cce62a12de5313 | 3a6b2c80f948a7918d54b71866d94476d17f19ef | /docs/_build/html/_downloads/06b1aa4ac217e5bc4f81274b1df76753/demo3.py | f37533217c6ef48d7c40a3069c064bf780f9459f | [
"BSD-2-Clause",
"BSD-3-Clause"
] | permissive | beckermr/GalSim | c306c3e5c00a78c15a9d9f20d2bfa489239fb946 | 96c80bde3184f84e450c2dc441ee8fe03b5197f2 | refs/heads/releases/2.3 | 2022-12-04T20:45:35.964787 | 2022-07-01T06:23:22 | 2022-07-01T06:24:49 | 288,194,984 | 0 | 0 | NOASSERTION | 2020-08-17T13:59:25 | 2020-08-17T13:59:25 | null | UTF-8 | Python | false | false | 15,150 | py | # Copyright (c) 2012-2021 by the GalSim developers team on GitHub
# https://github.com/GalSim-developers
#
# This file is part of GalSim: The modular galaxy image simulation toolkit.
# https://github.com/GalSim-developers/GalSim
#
# GalSim is free software: redistribution and use in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions, and the disclaimer given in the accompanying LICENSE
# file.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the disclaimer given in the documentation
# and/or other materials provided with the distribution.
#
"""
Demo #3
The third script in our tutorial about using GalSim in python scripts: examples/demo*.py.
(This file is designed to be viewed in a window 100 characters wide.)
This script gets reasonably close to including all the principal features of an image
from a ground-based telescope. The galaxy is represented as the sum of a bulge and a disk,
where each component is represented by a sheared Sersic profile (with different Sersic
indices). The PSF has both atmospheric and optical components. The atmospheric
component is a Kolmogorov turbulent spectrum. The optical component includes defocus,
coma and astigmatism, as well as obscuration from a secondary mirror. The noise model
includes both a gain and read noise. And finally, we include the effect of a slight
telescope distortion.
New features introduced in this demo:
- obj = galsim.Sersic(n, flux, half_light_radius)
- obj = galsim.Sersic(n, flux, scale_radius)
- obj = galsim.Kolmogorov(fwhm)
- obj = galsim.OpticalPSF(lam_over_diam, defocus, coma1, coma2, astig1, astig2, obscuration)
- obj = obj.shear(e, beta) -- including how to specify an angle in GalSim
- shear = galsim.Shear(q, beta)
- obj = obj.shear(shear)
- obj3 = x1 * obj1 + x2 * obj2
- obj = obj.withFlux(flux)
- image = galsim.ImageF(image_size, image_size)
- image = obj.drawImage(image, wcs)
- image = obj.drawImage(method='sb')
- world_profile = wcs.toWorld(profile)
- shear3 = shear1 + shear2
- noise = galsim.CCDNoise(rng, sky_level, gain, read_noise)
"""
import sys
import os
import math
import logging
import galsim
def main(argv):
"""
Getting reasonably close to including all the principle features of an image from a
ground-based telescope:
- Use a bulge plus disk model for the galaxy
- Both galaxy components are Sersic profiles (n=3.5 and n=1.5 respectively)
- Let the PSF have both atmospheric and optical components.
- The atmospheric component is a Kolmogorov spectrum.
- The optical component has some defocus, coma, and astigmatism.
- Add both Poisson noise to the image and Gaussian read noise.
- Let the pixels be slightly distorted relative to the sky.
"""
# We do some fancier logging for demo3, just to demonstrate that we can:
# - we log to both stdout and to a log file
# - the log file has a lot more (mostly redundant) information
logging.basicConfig(format="%(message)s", level=logging.INFO, stream=sys.stdout)
if not os.path.isdir('output'):
os.mkdir('output')
logFile = logging.FileHandler(os.path.join("output", "script3.log"))
logFile.setFormatter(logging.Formatter("%(name)s[%(levelname)s] %(asctime)s: %(message)s"))
logging.getLogger("demo3").addHandler(logFile)
logger = logging.getLogger("demo3")
gal_flux = 1.e6 # ADU ("Analog-to-digital units", the units of the numbers on a CCD)
bulge_n = 3.5 #
bulge_re = 2.3 # arcsec
disk_n = 1.5 #
disk_r0 = 0.85 # arcsec (corresponds to half_light_radius of ~3.7 arcsec)
bulge_frac = 0.3 #
gal_q = 0.73 # (axis ratio 0 < q < 1)
gal_beta = 23 # degrees (position angle on the sky)
atmos_fwhm=2.1 # arcsec
atmos_e = 0.13 #
atmos_beta = 0.81 # radians
opt_defocus=0.53 # wavelengths
opt_a1=-0.29 # wavelengths
opt_a2=0.12 # wavelengths
opt_c1=0.64 # wavelengths
opt_c2=-0.33 # wavelengths
opt_obscuration=0.3 # linear scale size of secondary mirror obscuration
lam = 800 # nm NB: don't use lambda - that's a reserved word.
tel_diam = 4. # meters
pixel_scale = 0.23 # arcsec / pixel
image_size = 64 # n x n pixels
wcs_g1 = -0.02 #
wcs_g2 = 0.01 #
sky_level = 2.5e4 # ADU / arcsec^2
gain = 1.7 # e- / ADU
# Note: here we assume 1 photon -> 1 e-, ignoring QE. If you wanted,
# you could include the QE factor as part of the gain.
read_noise = 0.3 # e- / pixel
random_seed = 1314662
logger.info('Starting demo script 3 using:')
logger.info(' - Galaxy is bulge plus disk, flux = %.1e',gal_flux)
logger.info(' - Bulge is Sersic (n = %.1f, re = %.2f), frac = %.1f',
bulge_n,bulge_re,bulge_frac)
logger.info(' - Disk is Sersic (n = %.1f, r0 = %.2f), frac = %.1f',
disk_n,disk_r0,1-bulge_frac)
logger.info(' - Shape is q,beta (%.2f,%.2f deg)', gal_q, gal_beta)
logger.info(' - Atmospheric PSF is Kolmogorov with fwhm = %.2f',atmos_fwhm)
logger.info(' - Shape is e,beta (%.2f,%.2f rad)', atmos_e, atmos_beta)
logger.info(' - Optical PSF has defocus = %.2f, astigmatism = (%.2f,%.2f),',
opt_defocus, opt_a1, opt_a2)
logger.info(' coma = (%.2f,%.2f), lambda = %.0f nm, D = %.1f m',
opt_c1, opt_c2, lam, tel_diam)
logger.info(' obscuration linear size = %.1f',opt_obscuration)
logger.info(' - pixel scale = %.2f,',pixel_scale)
logger.info(' - WCS distortion = (%.2f,%.2f),',wcs_g1,wcs_g2)
logger.info(' - Poisson noise (sky level = %.1e, gain = %.1f).',sky_level, gain)
logger.info(' - Gaussian read noise (sigma = %.2f).',read_noise)
# Initialize the (pseudo-)random number generator that we will be using below.
rng = galsim.BaseDeviate(random_seed+1)
# Define the galaxy profile.
# Normally Sersic profiles are specified by half-light radius, the radius that
# encloses half of the total flux. However, for some purposes, it can be
# preferable to instead specify the scale radius, where the surface brightness
# drops to 1/e of the central peak value.
bulge = galsim.Sersic(bulge_n, half_light_radius=bulge_re)
disk = galsim.Sersic(disk_n, scale_radius=disk_r0)
# Objects may be multiplied by a scalar (which means scaling the flux) and also
# added to each other.
gal = bulge_frac * bulge + (1-bulge_frac) * disk
# Could also have written the following, which does the same thing:
# gal = galsim.Add([ bulge.withFlux(bulge_frac) , disk.withFlux(1-bulge_frac) ])
# Both syntaxes work with more than two summands as well.
# Set the overall flux of the combined object.
gal = gal.withFlux(gal_flux)
# Since the total flux of the components was 1, we could also have written:
# gal *= gal_flux
# The withFlux method will always set the flux to the given value, while `gal *= flux`
# will multiply whatever the current flux is by the given factor.
# Set the shape of the galaxy according to axis ratio and position angle
# Note: All angles in GalSim must have explicit units. Options are:
# galsim.radians
# galsim.degrees
# galsim.arcmin
# galsim.arcsec
# galsim.hours
gal_shape = galsim.Shear(q=gal_q, beta=gal_beta*galsim.degrees)
gal = gal.shear(gal_shape)
logger.debug('Made galaxy profile')
# Define the atmospheric part of the PSF.
# Note: the flux here is the default flux=1.
atmos = galsim.Kolmogorov(fwhm=atmos_fwhm)
# For the PSF shape here, we use ellipticity rather than axis ratio.
# And the position angle can be either degrees or radians. Here we chose radians.
atmos = atmos.shear(e=atmos_e, beta=atmos_beta*galsim.radians)
logger.debug('Made atmospheric PSF profile')
# Define the optical part of the PSF:
# The first argument of OpticalPSF below is lambda/diam (wavelength of light / telescope
# diameter), which needs to be in the same units used to specify the image scale. We are using
# arcsec for that, so we have to self-consistently use arcsec here, using the following
# calculation:
lam_over_diam = lam * 1.e-9 / tel_diam # radians
lam_over_diam *= 206265 # arcsec
# Note that we could also have made GalSim do the conversion for us if we did not know the right
# factor:
# lam_over_diam = lam * 1.e-9 / tel_diam * galsim.radians
# lam_over_diam = lam_over_diam / galsim.arcsec
logger.debug('Calculated lambda over diam = %f arcsec', lam_over_diam)
# The rest of the values should be given in units of the wavelength of the incident light.
optics = galsim.OpticalPSF(lam_over_diam,
defocus = opt_defocus,
coma1 = opt_c1, coma2 = opt_c2,
astig1 = opt_a1, astig2 = opt_a2,
obscuration = opt_obscuration)
logger.debug('Made optical PSF profile')
# So far, our coordinate transformation between image and sky coordinates has been just a
# scaling of the units between pixels and arcsec, which we have defined as the "pixel scale".
# This is fine for many purposes, so we have made it easy to treat the coordinate systems
# this way via the `scale` parameter to commands like drawImage. However, in general, the
# transformation between the two coordinate systems can be more complicated than that,
# including distortions, rotations, variation in pixel size, and so forth. GalSim can
# model a number of different "World Coordinate System" (WCS) transformations. See the
# docstring for BaseWCS for more information.
# In this case, we use a WCS that includes a distortion (specified as g1,g2 in this case),
# which we call a ShearWCS.
wcs = galsim.ShearWCS(scale=pixel_scale, shear=galsim.Shear(g1=wcs_g1, g2=wcs_g2))
logger.debug('Made the WCS')
# Next we will convolve the components in world coordinates.
psf = galsim.Convolve([atmos, optics])
final = galsim.Convolve([psf, gal])
logger.debug('Convolved components into final profile')
# This time we specify a particular size for the image rather than let GalSim
# choose the size automatically. GalSim has several kinds of images that it can use:
# ImageF uses 32-bit floats (like a C float, aka numpy.float32)
# ImageD uses 64-bit floats (like a C double, aka numpy.float64)
# ImageS uses 16-bit integers (usually like a C short, aka numpy.int16)
# ImageI uses 32-bit integers (usually like a C int, aka numpy.int32)
# If you let the GalSim drawImage command create the image for you, it will create an ImageF.
# However, you can make a different type if you prefer. In this case, we still use
# ImageF, since 32-bit floats are fine. We just want to set the size explicitly.
image = galsim.ImageF(image_size, image_size)
# Draw the image with the given WCS. Note that we use wcs rather than scale when the
# WCS is more complicated than just a pixel scale.
final.drawImage(image=image, wcs=wcs)
# Also draw the effective PSF by itself and the optical PSF component alone.
image_epsf = galsim.ImageF(image_size, image_size)
psf.drawImage(image_epsf, wcs=wcs)
# We also draw the optical part of the PSF at its own Nyquist-sampled pixel size
# in order to better see the features of the (highly structured) profile.
# In this case, we draw a "surface brightness image" using method='sb'. Rather than
# integrate the flux over the area of each pixel, this method just samples the surface
# brightness value at the locations of the pixel centers. We will encounter a few other
# drawing methods as we go through this sequence of demos. cf. demos 7, 8, 10, and 11.
image_opticalpsf = optics.drawImage(method='sb')
logger.debug('Made image of the profile')
# This time, we use CCDNoise to model the real noise in a CCD image. It takes a sky level,
# gain, and read noise, so it can be a bit more realistic than the simpler GaussianNoise
# or PoissonNoise that we used in demos 1 and 2.
# The sky level for CCDNoise is the level per pixel that contributed to the noise.
sky_level_pixel = sky_level * pixel_scale**2
# The gain is in units of e-/ADU. Technically, one should also account for quantum efficiency
# (QE) of the detector. An ideal CCD has one electron per incident photon, but real CCDs have
# QE less than 1, so not every photon triggers an electron. We are essentially folding
# the quantum efficiency (and filter transmission and anything else like that) into the gain.
# The read_noise value is given as e-/pixel. This is modeled as a pure Gaussian noise
# added to the image after applying the pure Poisson noise.
noise = galsim.CCDNoise(rng, gain=gain, read_noise=read_noise, sky_level=sky_level_pixel)
image.addNoise(noise)
logger.debug('Added Gaussian and Poisson noise')
# Write the images to files.
file_name = os.path.join('output', 'demo3.fits')
file_name_epsf = os.path.join('output','demo3_epsf.fits')
file_name_opticalpsf = os.path.join('output','demo3_opticalpsf.fits')
image.write(file_name)
image_epsf.write(file_name_epsf)
image_opticalpsf.write(file_name_opticalpsf)
logger.info('Wrote image to %r', file_name)
logger.info('Wrote effective PSF image to %r', file_name_epsf)
logger.info('Wrote optics-only PSF image (Nyquist sampled) to %r', file_name_opticalpsf)
# Check that the HSM package, which is bundled with GalSim, finds a good estimate
# of the shear.
results = galsim.hsm.EstimateShear(image, image_epsf)
logger.info('HSM reports that the image has observed shape and size:')
logger.info(' e1 = %.3f, e2 = %.3f, sigma = %.3f (pixels)', results.observed_shape.e1,
results.observed_shape.e2, results.moments_sigma)
logger.info('When carrying out Regaussianization PSF correction, HSM reports')
logger.info(' e1, e2 = %.3f, %.3f',
results.corrected_e1, results.corrected_e2)
logger.info('Expected values in the limit that noise and non-Gaussianity are negligible:')
# Convention for shear addition is to apply the second term initially followed by the first.
# So this needs to be the WCS shear + the galaxy shape in that order.
total_shape = galsim.Shear(g1=wcs_g1, g2=wcs_g2) + gal_shape
logger.info(' e1, e2 = %.3f, %.3f', total_shape.e1, total_shape.e2)
if __name__ == "__main__":
main(sys.argv)
| [
"michael@jarvis.net"
] | michael@jarvis.net |
8c0fd05fd868dd2f69902f3cd5972467419c43ff | 6bde52f4bc294810f5a347ccc7e40c9a7f34274b | /Secondary/utils/canmatrix/library/exportxls.py | 5f653ce518bef338e14058b71dd872eae4a494df | [
"BSD-3-Clause",
"Apache-2.0",
"MIT",
"BSD-2-Clause"
] | permissive | OSADP/CARMA | 3b7add9aeec3deb83756529f34b3dc1ee2414785 | b5a832bc9d127b8cbda3e7c45f29344125e7b690 | refs/heads/master | 2021-03-22T03:30:19.463424 | 2016-02-03T21:02:39 | 2016-02-03T21:02:39 | 48,701,820 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,953 | py | #!/usr/bin/env python
#Copyright (c) 2013, Eduard Broecker
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without modification, are permitted provided that
# the following conditions are met:
#
# Redistributions of source code must retain the aframeve copyright notice, this list of conditions and the
# following disclaimer.
# Redistributions in binary form must reproduce the aframeve copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
#WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
#PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
#DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
#PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
#OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
#DAMAGE.
#
# this script exports xls-files from a canmatrix-object
# xls-files are the can-matrix-definitions displayed in Excel
import xlwt
import sys
from canmatrix import *
#Font Size : 8pt * 20 = 160
#font = 'font: name Arial Narrow, height 160'
font = 'font: name Verdana, height 160'
sty_header = xlwt.easyxf(font + ', bold on; align: rota 90, vertical center, horizontal center','pattern: pattern solid, fore-colour rose')
sty_norm = xlwt.easyxf(font + ', colour black')
sty_first_frame = xlwt.easyxf(font + ', colour black; borders: top thin')
sty_white = xlwt.easyxf(font + ', colour white')
# BUMatrix-Styles
sty_green = xlwt.easyxf('pattern: pattern solid, fore-colour light_green')
sty_green_first_frame = xlwt.easyxf('pattern: pattern solid, fore-colour light_green; borders: top thin')
sty_sender = xlwt.easyxf('pattern: pattern 0x04, fore-colour gray25')
sty_sender_first_frame = xlwt.easyxf('pattern: pattern 0x04, fore-colour gray25; borders: top thin')
sty_sender_green = xlwt.easyxf('pattern: pattern 0x04, fore-colour gray25, back-colour light_green')
sty_sender_green_first_frame = xlwt.easyxf('pattern: pattern 0x04, fore-colour gray25, back-colour light_green; borders: top thin')
def writeFrame(frame, worksheet, row, mystyle):
#frame-id
worksheet.write(row, 0, label = "%3Xh" % frame._Id, style=mystyle)
#frame-Name
worksheet.write(row, 1, label = frame._name, style=mystyle)
#determin cycle-time
if "GenMsgCycleTime" in frame._attributes:
worksheet.write(row, 2, label = int(frame._attributes["GenMsgCycleTime"]) , style=mystyle)
else:
worksheet.write(row, 2, label = "", style=mystyle)
#determin send-type
if "GenMsgSendType" in frame._attributes:
if frame._attributes["GenMsgSendType"] == "5":
worksheet.write(row, 3, label = "Cyclic+Change" , style=mystyle)
if "GenMsgDelayTime" in frame._attributes:
worksheet.write(row, 4, label = int(frame._attributes["GenMsgDelayTime"]) , style=mystyle)
else:
worksheet.write(row, 4, label = "", style=mystyle)
elif frame._attributes["GenMsgSendType"] == "0":
worksheet.write(row, 3, label = "Cyclic" , style=mystyle)
worksheet.write(row, 4, label = "", style=mystyle)
elif frame._attributes["GenMsgSendType"] == "2":
worksheet.write(row, 3, label = "BAF" , style=mystyle)
if "GenMsgNrOfRepetitions" in frame._attributes:
worksheet.write(row, 4, label = int(frame._attributes["GenMsgNrOfRepetitions"]) , style=mystyle)
else:
worksheet.write(row, 4, label = "", style=mystyle)
elif frame._attributes["GenMsgSendType"] == "8":
worksheet.write(row, 3, label = "DualCycle" , style=mystyle)
if "GenMsgCycleTimeActive" in frame._attributes:
worksheet.write(row, 4, label = int(frame._attributes["GenMsgCycleTimeActive"]) , style=mystyle)
else:
worksheet.write(row, 4, label = "", style=mystyle)
elif frame._attributes["GenMsgSendType"] == "10":
worksheet.write(row, 3, label = "None" , style=mystyle)
if "GenMsgDelayTime" in frame._attributes:
worksheet.write(row, 4, label = int(frame._attributes["GenMsgDelayTime"]) , style=mystyle)
else:
worksheet.write(row, 4, label = "", style=mystyle)
elif frame._attributes["GenMsgSendType"] == "9":
worksheet.write(row, 3, label = "OnChange" , style=mystyle)
if "GenMsgNrOfRepetitions" in frame._attributes:
worksheet.write(row, 4, label = int(frame._attributes["GenMsgNrOfRepetitions"]) , style=mystyle)
else:
worksheet.write(row, 4, label = "", style=mystyle)
elif frame._attributes["GenMsgSendType"] == "1":
worksheet.write(row, 3, label = "Spontaneous" , style=mystyle)
if "GenMsgDelayTime" in frame._attributes:
worksheet.write(row, 4, label = int(frame._attributes["GenMsgDelayTime"]) , style=mystyle)
else:
worksheet.write(row, 4, label = "", style=mystyle)
else:
worksheet.write(row, 3, label = "", style=mystyle)
worksheet.write(row, 4, label = "", style=mystyle)
else:
worksheet.write(row, 3, label = "", style=mystyle)
worksheet.write(row, 4, label = "", style=mystyle)
def writeSignal(db, sig, worksheet, row, mystyle, rearCol):
#startbyte
worksheet.write(row, 5, label = (sig._startbit)/8+1, style=mystyle)
#startbit
worksheet.write(row, 6, label = (sig._startbit)%8, style=mystyle)
#signalname
worksheet.write(row, 7, label = sig._name, style=mystyle)
# eval comment:
if sig._comment is None:
comment = ""
else:
comment = sig._comment
# eval multiplex-info
if sig._multiplex == 'Multiplexor':
comment = "Mode Signal: " + comment
elif sig._multiplex is not None:
comment = "Mode " + str(sig._multiplex) + ":" + comment
#write comment and size of signal in sheet
worksheet.write(row, 8, label = comment, style=mystyle)
worksheet.write(row, 9, label = sig._signalsize, style=mystyle)
#startvalue of signal available
if "GenSigStartValue" in sig._attributes:
if db._signalDefines["GenSigStartValue"]._definition == "STRING":
worksheet.write(row, 10, label = sig._attributes["GenSigStartValue"], style=mystyle)
elif db._signalDefines["GenSigStartValue"]._definition == "INT" or db._signalDefines["GenSigStartValue"]._definition == "HEX":
worksheet.write(row, 10, label = "%Xh" % int(sig._attributes["GenSigStartValue"]), style=mystyle)
else:
worksheet.write(row, 10, label = " ", style=mystyle)
#SNA-value of signal available
if "GenSigSNA" in sig._attributes:
sna = sig._attributes["GenSigSNA"][1:-1]
worksheet.write(row, 11, label = sna, style=mystyle)
#no SNA-value of signal available / just for correct style:
else:
worksheet.write(row, 11, label = " ", style=mystyle)
# eval byteorder (intel == 1 / motorola == 0)
if sig._byteorder == 1:
worksheet.write(row, 12, label = "i", style=mystyle)
else:
worksheet.write(row, 12, label = "m", style=mystyle)
# is a unit defined for signal?
if sig._unit.strip().__len__() > 0:
# factor not 1.0 ?
if float(sig._factor) != 1:
worksheet.write(row, rearCol+2, label = "%g" % float(sig._factor) + " " + sig._unit, style=mystyle)
#factor == 1.0
else:
worksheet.write(row, rearCol+2, label = sig._unit, style=mystyle)
# no unit defined
else:
# factor not 1.0 ?
if float(sig._factor) != 1:
worksheet.write(row, rearCol+2, label = float(sig._factor), style=mystyle)
#factor == 1.0
else:
worksheet.write(row, rearCol+2, label = "", style=mystyle)
def writeValue(label, value, worksheet, row, rearCol, mystyle):
# write value and lable in sheet
worksheet.write(row, rearCol, label = label, style=mystyle)
worksheet.write(row, rearCol+1, label = value, style=mystyle)
def writeBuMatrix(buList, sig, frame, worksheet, row, col, firstframe):
# first-frame - style with borders:
if firstframe == sty_first_frame:
norm = sty_first_frame
sender = sty_sender_first_frame
norm_green = sty_green_first_frame
sender_green = sty_sender_green_first_frame
# consecutive-frame - style without borders:
else:
norm = sty_norm
sender = sty_sender
norm_green = sty_green
sender_green = sty_sender_green
#iterate over boardunits:
for bu in buList:
#every second Boardunit with other style
if col % 2 == 0:
locStyle = norm
locStyleSender = sender
#every second Boardunit with other style
else:
locStyle = norm_green
locStyleSender = sender_green
# write "s" "r" "r/s" if signal is sent, recieved or send and recived by boardunit
if bu in sig._reciever and bu in frame._Transmitter:
worksheet.write(row, col, label = "r/s", style = locStyleSender)
elif bu in sig._reciever:
worksheet.write(row, col, label = "r", style = locStyle)
elif bu in frame._Transmitter:
worksheet.write(row, col, label = "s", style = locStyleSender)
else:
worksheet.write(row, col, label = "", style = locStyle)
col += 1
# loop over boardunits ends here
return col
def exportXls(db, filename):
head_top = ['ID', 'Frame Name', 'Cycle Time [ms]', 'Launch Type', 'Launch Parameter', 'Signal Byte No.', 'Signal Bit No.', 'Signal Name', 'Signal Function', 'Signal Length [Bit]', 'Signal Default', ' Signal Not Available', 'Byteorder']
head_tail = ['Value', 'Name / Phys. Range', 'Function / Increment Unit']
workbook = xlwt.Workbook(encoding = 'utf8')
worksheet = workbook.add_sheet('K-Matrix ' + filename.replace('.xls','')[0:22])
col = 0
# write first row (header) cols before frameardunits:
for head in head_top:
worksheet.write(0, col, label = head, style=sty_header)
worksheet.col(col).width = 1111
col += 1
# write frameardunits in first row:
buList = []
for bu in db._BUs._list:
worksheet.write(0, col, label = bu._name, style=sty_header)
worksheet.col(col).width = 1111
buList.append(bu._name)
col += 1
head_start = col
# write first row (header) cols after frameardunits:
for head in head_tail:
worksheet.write(0, col, label = head, style=sty_header)
worksheet.col(col).width = 3333
col += 1
# set width of selected Cols
worksheet.col(1).width = 5555
worksheet.col(3).width = 3333
worksheet.col(7).width = 5555
worksheet.col(8).width = 7777
worksheet.col(head_start).width = 1111
worksheet.col(head_start+1).width = 5555
frameHash = {}
for frame in db._fl._list:
frameHash[int(frame._Id)] = frame
#set row to first Frame (row = 0 is header)
row = 1
# iterate over the frames
for idx in sorted(frameHash.iterkeys()):
frame = frameHash[idx]
framestyle = sty_first_frame
#sort signals:
sigHash ={}
for sig in frame._signals:
sigHash["%02d" % int(sig._startbit) + sig._name] = sig
#set style for first line with border
sigstyle = sty_first_frame
#iterate over signals
for sig_idx in sorted(sigHash.iterkeys()):
sig = sigHash[sig_idx]
# if not first Signal in Frame, set style
if sigstyle != sty_first_frame:
sigstyle = sty_norm
# valuetable available?
if sig._values.__len__() > 0:
valstyle = sigstyle
# iterate over values in valuetable
for val in sorted(sig._values.iterkeys()):
writeFrame(frame, worksheet, row, framestyle)
col = head_top.__len__()
col = writeBuMatrix(buList, sig, frame, worksheet, row, col, framestyle)
# write Value
writeValue(val,sig._values[val], worksheet, row, col, valstyle)
writeSignal(db, sig, worksheet, row, sigstyle, col)
# no min/max here, because min/max has same col as values...
#next row
row +=1
# set style to normal - without border
sigstyle = sty_white
framestyle = sty_white
valstyle = sty_norm
#loop over values ends here
# no valuetable available
else:
writeFrame(frame, worksheet, row, framestyle)
col = head_top.__len__()
col = writeBuMatrix(buList, sig, frame, worksheet, row, col, framestyle)
writeSignal(db, sig, worksheet, row, sigstyle, col)
if float(sig._min) != 0 or float(sig._max) != 1.0:
worksheet.write(row, col+1, label = str("%s..%s" %(sig._min, sig._max)), style=sigstyle)
else:
worksheet.write(row, col+1, label = "", style=sigstyle)
# just for border
worksheet.write(row, col, label = "", style=sigstyle)
#next row
row +=1
# set style to normal - without border
sigstyle = sty_white
framestyle = sty_white
# reset signal-Array
signals = []
#loop over signals ends here
# loop over frames ends here
# frozen headings instead of split panes
worksheet.set_panes_frozen(True)
# in general, freeze after last heading row
worksheet.set_horz_split_pos(1)
worksheet.set_remove_splits(True)
# save file
workbook.save(filename)
| [
"starkj@leidos.com"
] | starkj@leidos.com |
d7260d07b1153616bbc341e18e6edb40759ede60 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02631/s086085697.py | fe20e86568153f3f73efd0b1df7a952b71276135 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 122 | py | n=int(input())
a=list(map(int,input().split()))
c=a[0]
for i in range(1,n):
c=c^a[i]
for i in range(n):
print(c^a[i]) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
38b56793cdc3a225af1527dc3fac40396408906b | 11de80f6ab9a9d89c3ab41cfbbe60c3c595a351d | /objectorienteddesign/cards/propertycards/pistachioblackwildcard.py | fcdbe12b4b9184086edd80f166bf2e2c28002a38 | [] | no_license | karlmartitoots/monopoly-deal-machine-learning | 0e5bd799eea4cb06d5cb6b7a818bdb5d816237d7 | 51c5f9fc31e53cfbffea89f6cdc4ef3a27adf3dd | refs/heads/master | 2020-03-23T06:13:09.894190 | 2018-07-24T19:29:34 | 2018-07-24T19:29:34 | 141,196,295 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 29 | py | class PistachioBlackWildCard: | [
"karlmarti65@gmail.com"
] | karlmarti65@gmail.com |
33a18687d659b7660ac884d23083f6b5f93f45ad | 141279d57293ea855be1b662202199eeb95f1ed8 | /manage.py | 31892c1f95191006b410e442302ccd21ff9e9e0e | [] | no_license | ErickEMAS/economiza | af19f1fad19dfad191373f55603cbe83f35846be | 6db225e95dafe5f5fc5499b1060e5583d9e73770 | refs/heads/master | 2022-11-24T18:16:55.371632 | 2019-12-12T02:42:37 | 2019-12-12T02:42:37 | 225,087,406 | 0 | 0 | null | 2022-11-22T04:52:02 | 2019-12-01T00:30:33 | CSS | UTF-8 | Python | false | false | 636 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config_economiza.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"erick8alexandre@gmail.com"
] | erick8alexandre@gmail.com |
ca15dbe70032e4a4eefd4396a04a3822f0e73020 | 1470faaf09bd4c9a6e4c11c3420ab9189f76daa1 | /app.py | 76ad2db69fef69f6c0d42f74e387217cdc1cae0a | [] | no_license | PandeCode/flask-react-ts-template | 47f794b0cbd8e4027beaf63a8380fef846d4d8ce | b39e6ff5bef67fdc5c9d8a397be07461cd60bcc1 | refs/heads/main | 2023-06-02T03:58:44.255036 | 2021-06-17T13:46:29 | 2021-06-17T13:46:29 | 366,999,393 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 360 | py | #!/bin/env python3
from server.main import app
from flask_session import Session
import os
if __name__ == "__main__":
app.config["SESSION_TYPE"] = "filesystem"
app.config["SESSION_FILE_DIR"] = "./.flask_session/"
Session(app)
caches_folder = "./.server_cache/"
if not os.path.exists(caches_folder):
os.makedirs(caches_folder)
app.run(debug=True)
| [
"pandeshawnbenjamin@gmail.com"
] | pandeshawnbenjamin@gmail.com |
4ceb80084efd14f79aa57cbafc8d5f673076d470 | e9f0bb945f34408f510d696ae38421ee4248b584 | /chaoxi/day-091-matplotlib/matplotlib-exam1.py | 7c1fbe6dbdf95e36d50b5d808f12ce8bfeaa2153 | [] | no_license | cxhuan/python-examples | 3d7582a36300d225f903665da8c3b555149a36ec | 5a1b4019cc68ce457c9880120b7a88167b05c14c | refs/heads/master | 2022-04-10T23:58:37.789041 | 2020-04-07T04:09:20 | 2020-04-07T04:09:20 | 250,184,331 | 0 | 0 | null | 2020-03-26T07:00:36 | 2020-03-26T07:00:35 | null | UTF-8 | Python | false | false | 465 | py | import matplotlib.pyplot as plt
# 示例1 简单绘图示例
plt.plot([1, 2], [1, 2], 'r--+')
plt.show()
# 示例2
# 指定一个画板
fig = plt.figure()
# 指定画板后指定轴
# ax = fig.add_subplot(111)
ax1 = fig.add_subplot(221)
ax2 = fig.add_subplot(222)
ax3 = fig.add_subplot(224)
ax4 = fig.add_subplot(223)
# 设置轴的位置
# ax.set(xlim=[0.5, 4.5], ylim=[-2, 8], title='An Example Axes',
# ylabel='Y-Axis', xlabel='X-Axis')
plt.show()
| [
"283807775@qq.com"
] | 283807775@qq.com |
908780fe69c1ca758295ca0f25b531c70571438f | 67d8173a716da10a7350213d98938aae9f2115ce | /ProgrammingCourses/CS61A/lab/lab09/tests/substitute.py | c599160851d680f435682a58dd191c6b5377599d | [] | no_license | jxie0755/Learning_Python | 94490d41bdf93acf8396f843328e38b6da310b0f | 143422321cbc3715ca08f6c3af8f960a55887ced | refs/heads/master | 2021-11-02T22:47:35.790239 | 2021-09-26T04:26:23 | 2021-09-26T04:26:23 | 101,445,132 | 0 | 2 | null | 2019-02-19T15:48:44 | 2017-08-25T22:00:16 | Python | UTF-8 | Python | false | false | 1,174 | py | test = {
"name": "substitute",
"points": 1,
"suites": [
{
"cases": [
{
"code": r"""
scm> (substitute "(c a b) "b 'l)
(c a l)
scm> (substitute "(f e a r s) "f 'b)
(b e a r s)
scm> (substitute "(g (o) o (o)) "o 'r)
(g (r) r (r))
""",
"hidden": False,
"locked": False
},
{
"code": r"""
scm> (substitute '((lead guitar) (bass guitar) (rhythm guitar) drums)
.... "guitar "axe)
((lead axe) (bass axe) (rhythm axe) drums)
scm> (substitute "(romeo romeo wherefore art thou romeo) "romeo 'paris)
(paris paris wherefore art thou paris)
scm> (substitute "((to be) or not (to (be))) "be 'eat)
((to eat) or not (to (eat)))
scm> (substitute "(a b (c) d e) "foo 'bar)
(a b (c) d e)
""",
"hidden": False,
"locked": False
}
],
"scored": True,
"setup": r"""
scm> (load 'lab09)
scm> (load 'lab09_extra)
""",
"teardown": "",
"type": "scheme"
}
]
}
| [
"30805062+jxie0755@users.noreply.github.com"
] | 30805062+jxie0755@users.noreply.github.com |
1c41b31c2095067d219200c34429fe81d65f2c1a | 96c1f13473cf224113185902edd4c9c01091e106 | /tests/torchlie_tests/functional/test_se3.py | c3af91c3b45ba611167ac0d61031d6cf9bfbf0f1 | [
"MIT"
] | permissive | facebookresearch/theseus | f1e488eb5a25f5ba74a6995911bee958b5da4cf3 | 240e1206329d42fedd40399684d6e17e455c6645 | refs/heads/main | 2023-08-11T07:33:12.328520 | 2023-08-02T12:58:01 | 2023-08-02T12:58:01 | 429,570,359 | 1,410 | 105 | MIT | 2023-08-01T14:30:01 | 2021-11-18T20:28:27 | Python | UTF-8 | Python | false | false | 3,067 | py | # Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Sequence, Union
import pytest
import torch
import torchlie.functional.se3_impl as se3_impl
from torchlie.functional import SE3
from .common import (
BATCH_SIZES_TO_TEST,
TEST_EPS,
check_binary_op_broadcasting,
check_left_project_broadcasting,
check_lie_group_function,
check_jacrev_binary,
check_jacrev_unary,
run_test_op,
)
@pytest.mark.parametrize(
"op_name",
[
"exp",
"log",
"adjoint",
"inverse",
"hat",
"compose",
"transform",
"untransform",
"lift",
"project",
"left_act",
"left_project",
"normalize",
],
)
@pytest.mark.parametrize("batch_size", BATCH_SIZES_TO_TEST)
@pytest.mark.parametrize("dtype", [torch.float32, torch.float64])
def test_op(op_name, batch_size, dtype):
rng = torch.Generator()
rng.manual_seed(0)
run_test_op(op_name, batch_size, dtype, rng, 6, (3, 4), se3_impl)
@pytest.mark.parametrize("batch_size", BATCH_SIZES_TO_TEST)
@pytest.mark.parametrize("dtype", [torch.float32, torch.float64])
def test_vee(batch_size: Union[int, Sequence[int]], dtype: torch.dtype):
if isinstance(batch_size, int):
batch_size = (batch_size,)
rng = torch.Generator()
rng.manual_seed(0)
tangent_vector = torch.rand(*batch_size, 6, dtype=dtype, generator=rng)
matrix = se3_impl._hat_autograd_fn(tangent_vector)
# check analytic backward for the operator
check_lie_group_function(se3_impl, "vee", TEST_EPS, (matrix,))
# check the correctness of hat and vee
actual_tangent_vector = se3_impl._vee_autograd_fn(matrix)
torch.testing.assert_close(
actual_tangent_vector, tangent_vector, atol=TEST_EPS, rtol=TEST_EPS
)
@pytest.mark.parametrize("batch_size", [1, 10, 100])
@pytest.mark.parametrize("name", ["exp", "inv"])
def test_jacrev_unary(batch_size, name):
check_jacrev_unary(SE3, 6, batch_size, name)
@pytest.mark.parametrize("batch_size", [1, 10, 100])
@pytest.mark.parametrize("name", ["compose", "transform", "untransform"])
def test_jacrev_binary(batch_size, name):
if not hasattr(torch, "vmap"):
return
check_jacrev_binary(SE3, batch_size, name)
@pytest.mark.parametrize("name", ["compose", "transform", "untransform"])
def test_binary_op_broadcasting(name):
rng = torch.Generator()
rng.manual_seed(0)
batch_sizes = [(1,), (2,), (1, 2), (2, 1), (2, 2), (2, 2, 2), tuple()]
for bs1 in batch_sizes:
for bs2 in batch_sizes:
check_binary_op_broadcasting(
SE3, name, (3, 4), bs1, bs2, torch.float64, rng
)
def test_left_project_broadcasting():
rng = torch.Generator()
rng.manual_seed(0)
batch_sizes = [tuple(), (1, 2), (1, 1, 2), (2, 1), (2, 2), (2, 2, 2)]
check_left_project_broadcasting(SE3, batch_sizes, [0, 1, 2], (3, 4), rng)
| [
"noreply@github.com"
] | noreply@github.com |
400c7d1dfbd9b32067d5a361e8a800aaea5f8be9 | 771c1e2011a85a287c766b1a3d299ced2e6f799f | /src/electionguard/ballot_compact.py | 96e4d2be29a39ca2eb31f736305027dd3da57e10 | [
"MIT"
] | permissive | microsoft/electionguard-python | f50f64a473a8d77984a2faf4aa8db40cebb5c201 | b3ddc2a732f6c5f078a3afbe05b00d632a2ff5e0 | refs/heads/main | 2023-08-03T12:44:35.322716 | 2022-10-28T12:47:18 | 2022-10-28T12:47:18 | 246,392,956 | 143 | 117 | MIT | 2023-08-02T00:24:32 | 2020-03-10T19:46:06 | Python | UTF-8 | Python | false | false | 5,601 | py | from dataclasses import dataclass
from typing import Dict, List
from .ballot import (
CiphertextBallot,
SubmittedBallot,
PlaintextBallot,
PlaintextBallotContest,
PlaintextBallotSelection,
make_ciphertext_submitted_ballot,
)
from .ballot_box import BallotBoxState
from .election import CiphertextElectionContext
from .election_object_base import sequence_order_sort
from .encrypt import encrypt_ballot_contests
from .group import ElementModQ
from .manifest import (
ContestDescriptionWithPlaceholders,
InternalManifest,
)
from .utils import get_optional
YES_VOTE = 1
NO_VOTE = 0
@dataclass
class CompactPlaintextBallot:
"""A compact plaintext representation of ballot minimized for data size"""
object_id: str
style_id: str
selections: List[bool]
write_ins: Dict[int, str]
@dataclass
class CompactSubmittedBallot:
"""A compact submitted ballot minimized for data size"""
compact_plaintext_ballot: CompactPlaintextBallot
timestamp: int
ballot_nonce: ElementModQ
code_seed: ElementModQ
code: ElementModQ
ballot_box_state: BallotBoxState
def compress_plaintext_ballot(ballot: PlaintextBallot) -> CompactPlaintextBallot:
"""Compress a plaintext ballot into a compact plaintext ballot"""
selections = _get_compact_selections(ballot)
extended_data = _get_compact_write_ins(ballot)
return CompactPlaintextBallot(
ballot.object_id, ballot.style_id, selections, extended_data
)
def compress_submitted_ballot(
ballot: SubmittedBallot,
plaintext_ballot: PlaintextBallot,
ballot_nonce: ElementModQ,
) -> CompactSubmittedBallot:
"""Compress a submitted ballot into a compact submitted ballot"""
return CompactSubmittedBallot(
compress_plaintext_ballot(plaintext_ballot),
ballot.timestamp,
ballot_nonce,
ballot.code_seed,
ballot.code,
ballot.state,
)
def expand_compact_submitted_ballot(
compact_ballot: CompactSubmittedBallot,
internal_manifest: InternalManifest,
context: CiphertextElectionContext,
) -> SubmittedBallot:
"""
Expand a compact submitted ballot using context and
the election manifest into a submitted ballot
"""
# Expand ballot and encrypt & hash contests
plaintext_ballot = expand_compact_plaintext_ballot(
compact_ballot.compact_plaintext_ballot, internal_manifest
)
nonce_seed = CiphertextBallot.nonce_seed(
internal_manifest.manifest_hash,
compact_ballot.compact_plaintext_ballot.object_id,
compact_ballot.ballot_nonce,
)
contests = get_optional(
encrypt_ballot_contests(
plaintext_ballot, internal_manifest, context, nonce_seed
)
)
return make_ciphertext_submitted_ballot(
plaintext_ballot.object_id,
plaintext_ballot.style_id,
internal_manifest.manifest_hash,
compact_ballot.code_seed,
contests,
compact_ballot.code,
compact_ballot.timestamp,
compact_ballot.ballot_box_state,
)
def expand_compact_plaintext_ballot(
compact_ballot: CompactPlaintextBallot, internal_manifest: InternalManifest
) -> PlaintextBallot:
"""Expand a compact plaintext ballot into the original plaintext ballot"""
return PlaintextBallot(
compact_ballot.object_id,
compact_ballot.style_id,
_get_plaintext_contests(compact_ballot, internal_manifest),
)
def _get_compact_selections(ballot: PlaintextBallot) -> List[bool]:
selections = []
for contest in ballot.contests:
for selection in contest.ballot_selections:
selections.append(selection.vote == YES_VOTE)
return selections
def _get_compact_write_ins(ballot: PlaintextBallot) -> Dict[int, str]:
write_ins = {}
index = 0
for contest in ballot.contests:
for selection in contest.ballot_selections:
index += 1
if selection.write_in:
write_ins[index] = selection.write_in
return write_ins
def _get_plaintext_contests(
compact_ballot: CompactPlaintextBallot, internal_manifest: InternalManifest
) -> List[PlaintextBallotContest]:
"""Get ballot contests from compact plaintext ballot"""
index = 0
ballot_style_contests = _get_ballot_style_contests(
compact_ballot.style_id, internal_manifest
)
contests: List[PlaintextBallotContest] = []
for manifest_contest in sequence_order_sort(internal_manifest.contests):
contest_in_style = (
ballot_style_contests.get(manifest_contest.object_id) is not None
)
# Iterate through selections. If contest not in style, mark placeholder
selections: List[PlaintextBallotSelection] = []
for selection in sequence_order_sort(manifest_contest.ballot_selections):
selections.append(
PlaintextBallotSelection(
selection.object_id,
YES_VOTE if compact_ballot.selections[index] else NO_VOTE,
not contest_in_style,
compact_ballot.write_ins.get(index),
)
)
index += 1
contests.append(PlaintextBallotContest(manifest_contest.object_id, selections))
return contests
def _get_ballot_style_contests(
ballot_style_id: str, internal_manifest: InternalManifest
) -> Dict[str, ContestDescriptionWithPlaceholders]:
ballot_style_contests = internal_manifest.get_contests_for(ballot_style_id)
return {contest.object_id: contest for contest in ballot_style_contests}
| [
"noreply@github.com"
] | noreply@github.com |
59f9254a26faab54b7b698e6c4336b6ab640fcad | 2cf71738c6425c618fc2ad44c958a1688830f2cb | /base/base_train.py | ede030944667719ab1d1034500d2700475247be4 | [
"Apache-2.0"
] | permissive | fgabel/novelty_detection | 87b69b83e0f30b9ac5ccdcea9e283bfd5db76d4e | fa27ad2f1fcc361461ab27aa8a240513a5e85f5a | refs/heads/master | 2020-05-29T17:24:48.933688 | 2019-12-12T09:14:20 | 2019-12-12T09:14:20 | 189,273,812 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,106 | py | import tensorflow as tf
class BaseTrain:
def __init__(self, sess, model, data, config, logger):
self.model = model
self.logger = logger
self.config = config
self.sess = sess
self.data = data
self.init = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
self.sess.run(self.init)
def train(self):
for cur_epoch in range(self.model.cur_epoch_tensor.eval(self.sess), self.config.num_epochs + 1, 1):
self.train_epoch()
self.sess.run(self.model.increment_cur_epoch_tensor)
def train_epoch(self):
"""
implement the logic of epoch:
-loop over the number of iterations in the config and call the train step
-add any summaries you want using the summary
"""
raise NotImplementedError
def train_step(self):
"""
implement the logic of the train step
- run the tensorflow session
- return any metrics you need to summarize
"""
raise NotImplementedError
| [
"felix@bnbit.de"
] | felix@bnbit.de |
47bf41018d10009f670e94680474ce6e67622dc7 | 7a34d52b758512d01ed657db6e936fc769117d4c | /crypto/random/primes.py | ffedcc42b85f2e44e321b5fca15d753053738e5d | [] | no_license | Notgnoshi/cryptography | 8ff4cd93d6a0ff0e761c271b167fd8e364cef8d6 | 50abb1fad13c0a1cd5083069990f68873ea31c1d | refs/heads/master | 2019-07-02T01:11:14.833503 | 2018-01-10T14:40:41 | 2018-01-10T14:40:41 | 102,778,190 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | import random
import gmpy2
# TODO: There's got to be a better implementation than this...
def random_prime(bits):
"""
Generates a random prime number with `bits` bits
Example:
>>> p = random_prime(256)
>>> p.bit_length()
256
"""
# Generate a random number with n bits
num = random.randint(2**(bits - 1) + 1, 2**bits)
# Find the next prime after the number - will *probably* have n bits.
return int(gmpy2.next_prime(num))
| [
"notgnoshi@gmail.com"
] | notgnoshi@gmail.com |
d3ba496d61ec0b8196d374ecc5a41a78d33281b9 | 955281861be4fc4bbff48f0cfead3900cc8b9a90 | /Python/displayContents.py | 4d3a78e556ee862f8a80f06987293cebe0def722 | [] | no_license | NolanRudolph/FlowSynth | 1020b87553685918d76a5886a4566e543bda0c4b | 0603bd0a71d6841f96f9fd026ee5673d3cc916a2 | refs/heads/master | 2022-03-29T01:59:03.551450 | 2020-01-14T21:31:46 | 2020-01-14T21:31:46 | 184,869,866 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,709 | py | import sys
def main():
try:
f_in = open(sys.argv[1], "r")
except IndexError:
print("Please use as $ python " + sys.argv[0] + " [FILE]")
return None
lines_since_title = 0
for line in f_in:
entry = line.split(",")
entry[2] = entry[2][:18]
entry[3] = entry[3][:18]
"""
Number of spaces per column
start_time_space = 15
end_time_space = 15
src_IP_space = 15
dst_IP_space = 15
src_port_space = 9
dst_port_space = 9
IP_prot_space = 5
TOS_val_space = 4
TCP_flags_space = 10
num_packets_space = 10
num_bytes_space = 12
router_in_space = 8
router_out_space = 9
src_ASN_space = 8
dst_SAN_space = 8
Represented by list below
"""
spaces = [15, 15, 19, 19, 9, 9, 5, 4, 6, 10, 12, 8, 9, 8, 8]
new_str = "| "
for i in range(15):
attr = entry[i]
length = len(attr)
new_str += attr
new_str += " " * (spaces[i] - length)
new_str += "| "
if lines_since_title % 30 == 0:
make_titles()
lines_since_titles = 0
print(new_str)
lines_since_title += 1
return None
def make_titles():
print("+----------------+----------------+--------------------+--------------------+----------+----------+------+-----+-------+-----------+-------------+---------+----------+---------+---------+")
print("| Start Time | End Time | Source IP | Destination IP | Src Port | Dst Port | Prot | TOS | Flags | # Packets | # Bytes | Rout In | Rout Out | Src ASN | Dst ASN |")
print("+----------------+----------------+--------------------+--------------------+----------+----------+------+-----+-------+-----------+-------------+---------+----------+---------+---------+")
return None
if __name__ == "__main__":
main()
| [
"nolan@localhost.localdomain"
] | nolan@localhost.localdomain |
3dc2bb12966bffd471380690c04c8efd0a9a13b7 | caedff6019e47035eadaaad5a588022e05d92104 | /Christmas2016/question/migrations/0001_initial.py | 763d1090fa8d71921230ce550cd9738236392d82 | [] | no_license | JMorris1575/christmas16 | ff767add9321bfe82ee70477f75a957504dc5288 | 1b06bf8febb94a699226b0b9d951cb14bbe59d50 | refs/heads/master | 2021-01-13T09:33:57.721350 | 2016-12-28T13:12:44 | 2016-12-28T13:12:44 | 72,059,066 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,218 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-09 02:39
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import model_mixins
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.TextField()),
],
),
migrations.CreateModel(
name='Response',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='question.Question')),
],
bases=(models.Model, model_mixins.AuthorMixin),
),
]
| [
"FrJamesMorris@gmail.com"
] | FrJamesMorris@gmail.com |
628d46dc69e58fab2b00e0b3f44ef0d2fcd88ea1 | 5f22ddbd3eeb99709e43e7b9a7958c9987c7efa4 | /interview_bits/level_1/01_mathematics/05_number_encoding/01_rearrange-array.py | f60f9290190be8d9160ecf9353f276b41e9c32b3 | [] | no_license | salvador-dali/algorithms_general | 04950bd823fc354adc58a4f23b7d2f3d39664798 | aeee3356e2488c6fab08741b1ac26e8bd5e4ac0d | refs/heads/master | 2020-12-14T06:24:10.466601 | 2016-07-17T06:00:17 | 2016-07-17T06:00:17 | 47,397,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py | # https://www.interviewbit.com/problems/rearrange-array/
def rearrange(arr):
n = len(arr)
for i in xrange(n):
arr[i] += n * (arr[arr[i]] % n)
for i in xrange(n):
arr[i] /= n
return arr
| [
"dmytro@knowlabs.com"
] | dmytro@knowlabs.com |
acb5c106275a1bfa7b8ae22ee597d799b0cb87f1 | bffde27fec141b8ea73f2c44e9316870e30b9a6a | /T_Computacional/p28.py | 59a3d876e2a3022bbe4b6aea75a27b4df7394d88 | [] | no_license | Edrasen/Teoria_Computacional | 5ffa3ee90f40ab0cdb0d7b59183d832d2b137bb9 | 43e6312f22b19a0951875598d172ead8288fa9b7 | refs/heads/master | 2020-07-11T18:29:37.218095 | 2019-09-30T02:37:53 | 2019-09-30T02:37:53 | 204,615,147 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 204 | py | numero = int(input("Introduzca un numero entre 0 y 100: "))
porcent = '%'
if(numero >0 and numero < 100):
print("{}{}".format(numero, porcent))
else:
print("El numero introducido no es valido")
| [
"noreply@github.com"
] | noreply@github.com |
6e124878e7a265bcc923d0028a77f10964e8177f | a09c056ff972165d3b42a4ac25083a4dfbc0024e | /examples/run_trainer.py | 54388af8db5884e3ff9bdf62163fc80fe7b5a7b5 | [
"MIT"
] | permissive | Ankur-Deka/malib | fee2957a819053238a4975f7e56697fe4155bf12 | 6be1b8a4a71ae469db3621449f77a88967165016 | refs/heads/master | 2021-01-07T14:46:21.917108 | 2020-02-25T20:03:40 | 2020-02-25T20:03:40 | 241,731,111 | 0 | 0 | MIT | 2020-02-19T21:33:44 | 2020-02-19T21:33:43 | null | UTF-8 | Python | false | false | 1,035 | py | # Created by yingwen at 2019-03-16
from malib.agents.agent_factory import *
from malib.samplers.sampler import MASampler
from malib.environments import DifferentialGame
from malib.logger.utils import set_logger
from malib.utils.random import set_seed
from malib.trainers import MATrainer
set_seed(0)
agent_setting = 'MADDPG'
game_name = 'ma_softq'
suffix = f'{game_name}/{agent_setting}'
set_logger(suffix)
agent_num = 2
batch_size = 128
training_steps = 10000
exploration_step = 1000
hidden_layer_sizes = (10, 10)
max_replay_buffer_size = 1e5
env = DifferentialGame(game_name, agent_num)
agents = []
for i in range(agent_num):
agent = get_maddpg_agent(env, i, hidden_layer_sizes=hidden_layer_sizes, max_replay_buffer_size=max_replay_buffer_size)
agents.append(agent)
sampler = MASampler(agent_num)
sampler.initialize(env, agents)
trainer = MATrainer(
env=env, agents=agents, sampler=sampler,
steps=training_steps, exploration_steps=exploration_step,
extra_experiences=['target_actions'],
)
trainer.run()
| [
"noreply@github.com"
] | noreply@github.com |
f26bc83e65aa4622ba13805edca1cbbd55ea8d29 | f1c93e8553d7b7ce1218c3779d793d0d7d5dca8c | /geomloss/examples/performances/plot_benchmarks_samplesloss_3D.py | 9330a2bf32a5430f3f7ef8cb1fe386da192ea869 | [
"MIT"
] | permissive | jeanfeydy/geomloss | e38a848816b6e2b0b2f11aa7a0c3141039fae8bd | 5804ca57f84bd95226efd1d44929022deb9cd23a | refs/heads/main | 2023-05-10T18:55:44.179300 | 2023-03-23T18:50:05 | 2023-03-23T18:50:05 | 173,165,841 | 510 | 53 | MIT | 2023-03-21T10:41:14 | 2019-02-28T18:28:14 | Python | UTF-8 | Python | false | false | 5,907 | py | """
Benchmark SamplesLoss in 3D
=====================================
Let's compare the performances of our losses and backends
as the number of samples grows from 100 to 1,000,000.
"""
##############################################
# Setup
# ---------------------
import numpy as np
import time
from matplotlib import pyplot as plt
import importlib
import torch
use_cuda = torch.cuda.is_available()
from geomloss import SamplesLoss
MAXTIME = 10 if use_cuda else 1 # Max number of seconds before we break the loop
REDTIME = (
2 if use_cuda else 0.2
) # Decrease the number of runs if computations take longer than 2s...
D = 3 # Let's do this in 3D
# Number of samples that we'll loop upon
NS = [
100,
200,
500,
1000,
2000,
5000,
10000,
20000,
50000,
100000,
200000,
500000,
1000000,
]
##############################################
# Synthetic dataset. Feel free to use
# a Stanford Bunny, or whatever!
def generate_samples(N, device):
"""Create point clouds sampled non-uniformly on a sphere of diameter 1."""
x = torch.randn(N, D, device=device)
x[:, 0] += 1
x = x / (2 * x.norm(dim=1, keepdim=True))
y = torch.randn(N, D, device=device)
y[:, 1] += 2
y = y / (2 * y.norm(dim=1, keepdim=True))
x.requires_grad = True
# Draw random weights:
a = torch.randn(N, device=device)
b = torch.randn(N, device=device)
# And normalize them:
a = a.abs()
b = b.abs()
a = a / a.sum()
b = b / b.sum()
return a, x, b, y
##############################################
# Benchmarking loops.
def benchmark(Loss, dev, N, loops=10):
"""Times a loss computation+gradient on an N-by-N problem."""
importlib.reload(torch) # In case we had a memory overflow just before...
device = torch.device(dev)
a, x, b, y = generate_samples(N, device)
# We simply benchmark a Loss + gradien wrt. x
code = "L = Loss( a, x, b, y ) ; L.backward()"
Loss.verbose = True
exec(code, locals()) # Warmup run, to compile and load everything
Loss.verbose = False
t_0 = time.perf_counter() # Actual benchmark --------------------
if use_cuda:
torch.cuda.synchronize()
for i in range(loops):
exec(code, locals())
if use_cuda:
torch.cuda.synchronize()
elapsed = time.perf_counter() - t_0 # ---------------------------
print(
"{:3} NxN loss, with N ={:7}: {:3}x{:3.6f}s".format(
loops, N, loops, elapsed / loops
)
)
return elapsed / loops
def bench_config(Loss, dev):
"""Times a loss computation+gradient for an increasing number of samples."""
print("Backend : {}, Device : {} -------------".format(Loss.backend, dev))
times = []
def run_bench():
try:
Nloops = [100, 10, 1]
nloops = Nloops.pop(0)
for n in NS:
elapsed = benchmark(Loss, dev, n, loops=nloops)
times.append(elapsed)
if (nloops * elapsed > MAXTIME) or (
nloops * elapsed > REDTIME and len(Nloops) > 0
):
nloops = Nloops.pop(0)
except IndexError:
print("**\nToo slow !")
try:
run_bench()
except RuntimeError as err:
if str(err)[:4] == "CUDA":
print("**\nMemory overflow !")
else:
# CUDA memory overflows semi-break the internal
# torch state and may cause some strange bugs.
# In this case, best option is simply to re-launch
# the benchmark.
run_bench()
return times + (len(NS) - len(times)) * [np.nan]
def full_bench(loss, *args, **kwargs):
"""Benchmarks the varied backends of a geometric loss function."""
print("Benchmarking : ===============================")
lines = [NS]
backends = ["tensorized", "online", "multiscale"]
for backend in backends:
Loss = SamplesLoss(*args, **kwargs, backend=backend)
lines.append(bench_config(Loss, "cuda" if use_cuda else "cpu"))
benches = np.array(lines).T
# Creates a pyplot figure:
plt.figure()
linestyles = ["o-", "s-", "^-"]
for i, backend in enumerate(backends):
plt.plot(
benches[:, 0],
benches[:, i + 1],
linestyles[i],
linewidth=2,
label='backend="{}"'.format(backend),
)
plt.title('Runtime for SamplesLoss("{}") in dimension {}'.format(Loss.loss, D))
plt.xlabel("Number of samples per measure")
plt.ylabel("Seconds")
plt.yscale("log")
plt.xscale("log")
plt.legend(loc="upper left")
plt.grid(True, which="major", linestyle="-")
plt.grid(True, which="minor", linestyle="dotted")
plt.axis([NS[0], NS[-1], 1e-3, MAXTIME])
plt.tight_layout()
# Save as a .csv to put a nice Tikz figure in the papers:
header = "Npoints " + " ".join(backends)
np.savetxt(
"output/benchmark_" + Loss.loss + "_3D.csv",
benches,
fmt="%-9.5f",
header=header,
comments="",
)
##############################################
# Gaussian MMD, with a small blur
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
full_bench(SamplesLoss, "gaussian", blur=0.1, truncate=3)
##############################################
# Energy Distance MMD
# ~~~~~~~~~~~~~~~~~~~~~~
#
full_bench(SamplesLoss, "energy")
##############################################
# Sinkhorn divergence
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# With a medium blurring scale, at one twentieth of the
# configuration's diameter:
full_bench(SamplesLoss, "sinkhorn", p=2, blur=0.05, diameter=1)
##############################################
# With a small blurring scale, at one hundredth of the
# configuration's diameter:
full_bench(SamplesLoss, "sinkhorn", p=2, blur=0.01, diameter=1)
plt.show()
| [
"jean.feydy@gmail.com"
] | jean.feydy@gmail.com |
2bf18684316bad00ea55faedb105e1f676e45fa2 | c4752add6e2d7551423e84ee495369353875d45e | /chiminey/sshconnection/manage.py | 23616bff1a9f276c2768ce76c99fae04e8a6969f | [
"BSD-3-Clause",
"MIT"
] | permissive | chiminey/chiminey | a7363b9ced4ea9615dba7bdaec03665f352767ff | f7bb69b1514a218cce8a4c293dec1205c951dfb7 | refs/heads/master | 2021-01-24T09:48:41.209691 | 2018-10-08T06:50:00 | 2018-10-08T06:50:00 | 17,128,477 | 4 | 7 | null | 2018-08-27T00:45:42 | 2014-02-24T07:24:19 | Python | UTF-8 | Python | false | false | 1,257 | py | # Copyright (C) 2014, RMIT University
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from chiminey.sshconnection import paramikoconnector
def open_connection(ip_address, settings):
return paramikoconnector.open_connection(ip_address, settings)
| [
"iimanyusuf@gmail.com"
] | iimanyusuf@gmail.com |
1458e39151a1c8eb383271d24df1a34d293c78dc | d5dbae52bbfded54436a665f614a2793029371ea | /features/audio_features/helpers/pyAudioLex/adjective_freq.py | 5869adb9bbd22aadb13228a7d732ccf70fcdc1de | [
"Apache-2.0"
] | permissive | bmarggraff/allie | 88b97acffebe2c1876b379d478b293bfb9edfefb | 2e2f8780f0a42229b582703455e9ce1d42cf9f96 | refs/heads/master | 2022-11-28T02:27:55.100030 | 2020-08-07T19:55:46 | 2020-08-07T19:55:46 | 285,911,411 | 1 | 0 | null | 2020-08-07T20:03:08 | 2020-08-07T20:03:07 | null | UTF-8 | Python | false | false | 682 | py | '''
@package: pyAudioLex
@author: Drew Morris
@module: adjective_freq
Frequency of a POS tag is computed by dividing the total number of words
with that tag by the total number of words spoken by the subject in the
recording.
'''
from nltk.tokenize import word_tokenize
from nltk.tag import pos_tag, map_tag
def adjective_freq(s, tokens = None):
if tokens == None:
tokens = word_tokenize(s)
pos = pos_tag(tokens)
adjectives = []
for [token, tag] in pos:
part = map_tag('en-ptb', 'universal', tag)
if part == "ADJ":
adjectives.append(token)
if len(tokens) == 0:
return float(0)
else:
return float(len(adjectives)) / float(len(tokens))
| [
"jim.schwoebel@gmail.com"
] | jim.schwoebel@gmail.com |
2d74dc299c2700be8c0f0cc55ae6c2e53b4e6b39 | 56ffbb38c4a9447ddfc0b3baeae65ea91ebca619 | /pics/tests.py | 51a8eebb5fc85d3019a3db8e296e1c6f017213ad | [
"MIT"
] | permissive | Isaac-waithaka/Personal-Gallery | 7ecc222fbcb8dec5d99dee60eabf553dfc115a5c | 8ddf236fb1517dd83cae675fa40010f4df02fbc9 | refs/heads/master | 2023-02-19T06:29:07.483347 | 2021-01-21T05:47:15 | 2021-01-21T05:47:15 | 328,609,914 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,696 | py | from django.test import TestCase
from .models import *
# Create your tests here.
class ImageTest(TestCase):
# creating a setup for the test
def setUp(self):
self.new_town = Places.objects.create(location = 'kisumo')
self.new_category = Category.objects.create(category = 'sports')
self.new_image = Image.objects.create(name = 'image', description = 'james jatugo', location = self.new_town, category = self.new_category)
def test_instance(self):
self.assertTrue(isinstance(self.new_image,Image))
def test_delete(self):
self.new_image.save()
self.new_image.delete()
self.assertTrue(len(Image.objects.all()) == 0)
def test_get_image(self):
self.new_image.save()
image = Image.get_image(1)
self.assertTrue(len(image) == 0)
def test_search_image(self):
self.new_image.save()
image = Image.search_image('sports')
self.assertTrue(len(image) > 0)
def test_view_image(self):
self.new_image.save()
image = Image.view_image(self.new_town)
self.assertTrue(len(image) > 0)
def test_image_cat(self):
self.new_image.save()
image = Image.image_cat(self.new_category)
self.assertTrue(len(image) > 0)
class CategoryTest(TestCase):
def setUp(self):
self.new_category = Category.objects.create(category='fun')
def test_instance(self):
self.assertTrue(isinstance(self.new_category,Category))
class PlacesTest(TestCase):
def setUp(self):
self.new_town = Places.objects.create(location='nairobi')
def test_instance(self):
self.assertTrue(isinstance(self.new_town,Places))
| [
"waithakaisaac2016@gmail.com"
] | waithakaisaac2016@gmail.com |
461caf1d51436f245f84a2eb6e7dd79ba383faea | 8d3aa3d1da0b6da734568f08d9689ea1011d8549 | /app/models/users.py | a65dbf05b20fbe82a37427f60d32a2ce17656d66 | [] | no_license | housong12590/luolin | 79c88546b27e881a426841bb38c00bacce39975b | 1264f38a84293da6b3077e5da465af0ad64b9ebc | refs/heads/master | 2020-03-16T22:04:46.055419 | 2018-05-15T18:25:56 | 2018-05-15T18:25:56 | 133,025,852 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | from app import db
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), unique=True)
| [
"304536797@qq.com"
] | 304536797@qq.com |
bdb840818727a44142386b7bfca07f85bb42c4c5 | 2eae990930f7823ab45b5e9b279841bd20127945 | /passwordgenerator/urls.py | ec7e44bfe5db483d06cc2d939dab5e37f7ef7bf3 | [] | no_license | Shantanumali/Password_generator | e5b3fa21e35cc2592e4416c2d3620f4eb786bc01 | 9b8fac6cc44fabd585013ab93b012f78547b1bfc | refs/heads/master | 2022-12-29T06:47:21.467523 | 2020-09-30T13:32:30 | 2020-09-30T13:32:30 | 299,912,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 863 | py | """passwordgenerator URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path
from generator import views
urlpatterns = [
path('' , views.home, name = "home"),
path('password/', views.password, name = "password"),
path('home/', views.home, name = "home"),
]
| [
"shantumali95@gmail.com"
] | shantumali95@gmail.com |
f5ff2edc3d00e7ae5094e3a26df7cacec24db0cd | 966b16ebb1b012cc1cedbd320a5a3d25302afa8f | /first-light-sources/light-sources-detection.py | 4026ba98b5cff5a73dd2bba6da2789fc7fafc674 | [] | no_license | lexiddie/light-sources-and-shadow | 6f8123a3df7f652e6ca0d07dc735e2e917716127 | d33d7bd5f7840afadba49b003eece31db4732554 | refs/heads/main | 2023-08-05T03:59:31.243298 | 2021-09-29T10:38:21 | 2021-09-29T10:38:21 | 411,610,831 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,770 | py | import cv2
import numpy as np
import colorsys
import random as rd
img1 = './img1.jpg'
img2 = './img2.jpg'
img3 = './img3.jpg'
img4 = './img4.jpg'
img5 = './img5.jpg'
img6 = './img6.jpg'
img7 = './img7.jpg'
img8 = './img8.png'
read_img = cv2.imread(img5)
img_result = cv2.cvtColor(read_img, cv2.COLOR_HSV2BGR)
img_data = np.array(read_img)
b, g, r = cv2.split(read_img)
print(b, g, r)
print('\n')
# apply Canny edge detection using a wide threshold, tight
# threshold, and automatically determined threshold
blurred = cv2.GaussianBlur(img_result, (5, 5), 0)
wide = cv2.Canny(blurred, 10, 200)
tight = cv2.Canny(blurred, 225, 250)
# edged = cv2.Canny(img_result, 100, 200)
# edged = cv2.Canny(blurred, 100, 200)
edged = cv2.Canny(blurred, 225, 250)
contours, hierarchy = cv2.findContours(edged.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
deepcopy_img = read_img.copy()
def get_contour_areas(cnts):
all_areas = []
for cnt in cnts:
area = cv2.contourArea(cnt)
all_areas.append(area)
return all_areas
print("Contour Areas before Sorting", get_contour_areas(contours))
sorted_contours = sorted(contours, key=cv2.contourArea, reverse=True)
print("Contour Areas after Sorting", get_contour_areas(sorted_contours))
for c in sorted_contours:
cv2.drawContours(image=read_img, contours=[c], contourIdx=-1, color=(7, 210, 193), thickness=2, lineType=cv2.FILLED)
# result = np.concatenate((read_img, img_result), axis=1)
# result = np.concatenate((deepcopy_img, img_result), axis=1)
# result = np.concatenate((img_result, blurred), axis=1)
result = np.concatenate((deepcopy_img, read_img), axis=1)
cv2.imshow('Result Images', result)
# cv2.imshow('Result Images', edged)
print('Finish Compute')
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"37273428+lexiddie@users.noreply.github.com"
] | 37273428+lexiddie@users.noreply.github.com |
9b87c9c6ae3f0501855c5d7c3d286406dc1a1eb4 | aa6b1d8ff03abb3ca65298cfd645ad91e0f57b19 | /migrations/creationdb.py | 92d9ecebd38784ae44e846ddadbba24b55d18cae | [
"MIT"
] | permissive | MMB019/detection_hepathie_C | 2156374bb5a93f58a9d8aa006450b0ead3b20c70 | b48fa6a930b332dfd2c532e7d33fa9c3000b6257 | refs/heads/main | 2023-07-27T17:42:12.942115 | 2021-09-14T11:59:08 | 2021-09-14T11:59:08 | 405,172,962 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 549 | py | import sqlite3
#creation of the database
def creation_tableau(database="database.db"):
conn=sqlite3.connect(database)
conn.execute("CREATE TABLE prediction (id integer auto_increment primary key, firstName TEXT,lastName TEXT, sex varchar, age varchar,alb varchar,alp varchar,alt varchar, ast varchar, bil varchar,che varchar , chol varchar, crea varchar,ggt varchar, port varchar,result integer)")
print("la creation de la table prediction ok!")
conn.close()
#execution du code
if __name__=="__main__":
creation_tableau() | [
"mdoumbang224@gmail.com"
] | mdoumbang224@gmail.com |
50f8e6b88bff07c4af0e52bfa551b372a8c93bc8 | a35b2842ff707c9adf70e178ba8cb7a128e6f0fa | /brl_gym/scripts/crosswalk_vel/generate_initial_conditions.py | a9c7a1bb7d4cb52e8276db48814c90777f6661e9 | [
"BSD-3-Clause"
] | permissive | gilwoolee/brl_gym | 7717366a09c7ff96a8fbc02688febe6d559e333a | 9c0784e9928f12d2ee0528c79a533202d3afb640 | refs/heads/master | 2022-11-26T15:08:56.730225 | 2020-08-02T05:08:28 | 2020-08-02T05:08:28 | 198,884,614 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 772 | py | from brl_gym.envs.crosswalk_vel import CrossWalkVelEnv
import numpy as np
env = CrossWalkVelEnv()
env.reset()
goals = env.goals
peds = env.pedestrians
pose = env.pose
ped_speeds = env.pedestrian_speeds
print("Car 37, 38, 35")
print("Peds :\n", np.around(peds,1))
print("Ped speeds:\n", np.around(ped_speeds,2))
print("Goals :\n", np.around(goals,1))
print("Pose :\n", np.around(pose,1))
print("Angle :\n", np.around(np.rad2deg(pose[2]),2))
for ps, goal in zip(ped_speeds, goals):
if goal[0] == 3.5:
goal[0] = 3.2
if goal[0] == 0.0:
goal[0] = 0.3
print("roslaunch mushr_control runner_script.launch car_name:=$CAR_NAME wait_for_signal:=false desired_speed:={:.2f} desired_x:={:.2f} desired_y:={:.2f} local:=false".format(ps, goal[0], goal[1]))
| [
"gilwoo301@gmail.com"
] | gilwoo301@gmail.com |
eebe2c77eec69a0b694b9ead01bc15169876b071 | 17e944986da2284fe070b3564426e1debbe7bb89 | /modules/neutronclient/tests/unit/test_cli20_network.py | e0681d250e7faa952281b7b123624f94c33ace83 | [] | no_license | sonal-swarnima/baadal | bc793a84e4c45bedc6e7faa290a211993dfa3905 | f2f47cb697ee2bb8ef71b7e821ba3ecb3a3bdb7c | refs/heads/master | 2021-01-15T21:07:02.034366 | 2016-01-22T17:41:14 | 2016-01-22T17:41:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25,658 | py | # All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import itertools
import sys
from mox3 import mox
from oslo_serialization import jsonutils
from neutronclient.common import exceptions
from neutronclient.neutron.v2_0 import network
from neutronclient import shell
from neutronclient.tests.unit import test_cli20
class CLITestV20NetworkJSON(test_cli20.CLITestV20Base):
def setUp(self):
super(CLITestV20NetworkJSON, self).setUp(plurals={'tags': 'tag'})
def test_create_network(self):
"""Create net: myname."""
resource = 'network'
cmd = network.CreateNetwork(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
args = [name, ]
position_names = ['name', ]
position_values = [name, ]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_network_with_unicode(self):
"""Create net: u'\u7f51\u7edc'."""
resource = 'network'
cmd = network.CreateNetwork(test_cli20.MyApp(sys.stdout), None)
name = u'\u7f51\u7edc'
myid = 'myid'
args = [name, ]
position_names = ['name', ]
position_values = [name, ]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_network_tenant(self):
"""Create net: --tenant_id tenantid myname."""
resource = 'network'
cmd = network.CreateNetwork(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
args = ['--tenant_id', 'tenantid', name]
position_names = ['name', ]
position_values = [name, ]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
# Test dashed options
args = ['--tenant-id', 'tenantid', name]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_network_provider_args(self):
"""Create net: with --provider arguments."""
resource = 'network'
cmd = network.CreateNetwork(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
# Test --provider attributes before network name
args = ['--provider:network_type', 'vlan',
'--provider:physical_network', 'physnet1',
'--provider:segmentation_id', '400', name]
position_names = ['provider:network_type',
'provider:physical_network',
'provider:segmentation_id', 'name']
position_values = ['vlan', 'physnet1', '400', name]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_network_tags(self):
"""Create net: myname --tags a b."""
resource = 'network'
cmd = network.CreateNetwork(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
args = [name, '--tags', 'a', 'b']
position_names = ['name', ]
position_values = [name, ]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tags=['a', 'b'])
def test_create_network_external(self):
"""Create net: --router:external myname."""
resource = 'network'
cmd = network.CreateNetwork(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
args = [name, '--router:external']
position_names = ['name', ]
position_values = [name, ]
external = {'router:external': True}
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
**external)
def test_create_network_state(self):
"""Create net: --admin_state_down myname."""
resource = 'network'
cmd = network.CreateNetwork(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
args = ['--admin_state_down', name, ]
position_names = ['name', ]
position_values = [name, ]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
admin_state_up=False)
# Test dashed options
args = ['--admin-state-down', name, ]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
admin_state_up=False)
def test_create_network_vlan_transparent(self):
"""Create net: myname --vlan-transparent True."""
resource = 'network'
cmd = network.CreateNetwork(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
args = ['--vlan-transparent', 'True', name]
vlantrans = {'vlan_transparent': 'True'}
position_names = ['name', ]
position_values = [name, ]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
**vlantrans)
def test_list_nets_empty_with_column(self):
resources = "networks"
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
self.mox.StubOutWithMock(cmd, "get_client")
self.mox.StubOutWithMock(self.client.httpclient, "request")
self.mox.StubOutWithMock(network.ListNetwork, "extend_list")
network.ListNetwork.extend_list(mox.IsA(list), mox.IgnoreArg())
cmd.get_client().MultipleTimes().AndReturn(self.client)
reses = {resources: []}
resstr = self.client.serialize(reses)
# url method body
query = "id=myfakeid"
args = ['-c', 'id', '--', '--id', 'myfakeid']
path = getattr(self.client, resources + "_path")
self.client.httpclient.request(
test_cli20.MyUrlComparator(test_cli20.end_url(path, query),
self.client),
'GET',
body=None,
headers=mox.ContainsKeyValue(
'X-Auth-Token',
test_cli20.TOKEN)).AndReturn(
(test_cli20.MyResp(200), resstr))
self.mox.ReplayAll()
cmd_parser = cmd.get_parser("list_" + resources)
shell.run_command(cmd, cmd_parser, args)
self.mox.VerifyAll()
self.mox.UnsetStubs()
_str = self.fake_stdout.make_string()
self.assertEqual('\n', _str)
def _test_list_networks(self, cmd, detail=False, tags=(),
fields_1=(), fields_2=(), page_size=None,
sort_key=(), sort_dir=()):
resources = "networks"
self.mox.StubOutWithMock(network.ListNetwork, "extend_list")
network.ListNetwork.extend_list(mox.IsA(list), mox.IgnoreArg())
self._test_list_resources(resources, cmd, detail, tags,
fields_1, fields_2, page_size=page_size,
sort_key=sort_key, sort_dir=sort_dir)
def test_list_nets_pagination(self):
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
self.mox.StubOutWithMock(network.ListNetwork, "extend_list")
network.ListNetwork.extend_list(mox.IsA(list), mox.IgnoreArg())
self._test_list_resources_with_pagination("networks", cmd)
def test_list_nets_sort(self):
"""list nets: --sort-key name --sort-key id --sort-dir asc
--sort-dir desc
"""
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_list_networks(cmd, sort_key=['name', 'id'],
sort_dir=['asc', 'desc'])
def test_list_nets_sort_with_keys_more_than_dirs(self):
"""list nets: --sort-key name --sort-key id --sort-dir desc
"""
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_list_networks(cmd, sort_key=['name', 'id'],
sort_dir=['desc'])
def test_list_nets_sort_with_dirs_more_than_keys(self):
"""list nets: --sort-key name --sort-dir desc --sort-dir asc
"""
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_list_networks(cmd, sort_key=['name'],
sort_dir=['desc', 'asc'])
def test_list_nets_limit(self):
"""list nets: -P."""
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_list_networks(cmd, page_size=1000)
def test_list_nets_detail(self):
"""list nets: -D."""
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_list_networks(cmd, True)
def test_list_nets_tags(self):
"""List nets: -- --tags a b."""
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_list_networks(cmd, tags=['a', 'b'])
def test_list_nets_tags_with_unicode(self):
"""List nets: -- --tags u'\u7f51\u7edc'."""
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_list_networks(cmd, tags=[u'\u7f51\u7edc'])
def test_list_nets_detail_tags(self):
"""List nets: -D -- --tags a b."""
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_list_networks(cmd, detail=True, tags=['a', 'b'])
def _test_list_nets_extend_subnets(self, data, expected):
def setup_list_stub(resources, data, query):
reses = {resources: data}
resstr = self.client.serialize(reses)
resp = (test_cli20.MyResp(200), resstr)
path = getattr(self.client, resources + '_path')
self.client.httpclient.request(
test_cli20.MyUrlComparator(
test_cli20.end_url(path, query), self.client),
'GET',
body=None,
headers=mox.ContainsKeyValue(
'X-Auth-Token', test_cli20.TOKEN)).AndReturn(resp)
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
self.mox.StubOutWithMock(cmd, 'get_client')
self.mox.StubOutWithMock(self.client.httpclient, 'request')
cmd.get_client().AndReturn(self.client)
setup_list_stub('networks', data, '')
cmd.get_client().AndReturn(self.client)
filters = ''
for n in data:
for s in n['subnets']:
filters = filters + "&id=%s" % s
setup_list_stub('subnets',
[{'id': 'mysubid1', 'cidr': '192.168.1.0/24'},
{'id': 'mysubid2', 'cidr': '172.16.0.0/24'},
{'id': 'mysubid3', 'cidr': '10.1.1.0/24'}],
query='fields=id&fields=cidr' + filters)
self.mox.ReplayAll()
args = []
cmd_parser = cmd.get_parser('list_networks')
parsed_args = cmd_parser.parse_args(args)
result = cmd.get_data(parsed_args)
self.mox.VerifyAll()
self.mox.UnsetStubs()
_result = [x for x in result[1]]
self.assertEqual(len(_result), len(expected))
for res, exp in zip(_result, expected):
self.assertEqual(len(res), len(exp))
for a, b in zip(res, exp):
self.assertEqual(a, b)
def test_list_nets_extend_subnets(self):
data = [{'id': 'netid1', 'name': 'net1', 'subnets': ['mysubid1']},
{'id': 'netid2', 'name': 'net2', 'subnets': ['mysubid2',
'mysubid3']}]
# id, name, subnets
expected = [('netid1', 'net1', 'mysubid1 192.168.1.0/24'),
('netid2', 'net2',
'mysubid2 172.16.0.0/24\nmysubid3 10.1.1.0/24')]
self._test_list_nets_extend_subnets(data, expected)
def test_list_nets_extend_subnets_no_subnet(self):
data = [{'id': 'netid1', 'name': 'net1', 'subnets': ['mysubid1']},
{'id': 'netid2', 'name': 'net2', 'subnets': ['mysubid4']}]
# id, name, subnets
expected = [('netid1', 'net1', 'mysubid1 192.168.1.0/24'),
('netid2', 'net2', 'mysubid4 ')]
self._test_list_nets_extend_subnets(data, expected)
def test_list_nets_fields(self):
"""List nets: --fields a --fields b -- --fields c d."""
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_list_networks(cmd,
fields_1=['a', 'b'], fields_2=['c', 'd'])
def _test_list_nets_columns(self, cmd, returned_body,
args=('-f', 'json')):
resources = 'networks'
self.mox.StubOutWithMock(network.ListNetwork, "extend_list")
network.ListNetwork.extend_list(mox.IsA(list), mox.IgnoreArg())
self._test_list_columns(cmd, resources, returned_body, args=args)
def test_list_nets_defined_column(self):
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
returned_body = {"networks": [{"name": "buildname3",
"id": "id3",
"tenant_id": "tenant_3",
"subnets": []}]}
self._test_list_nets_columns(cmd, returned_body,
args=['-f', 'json', '-c', 'id'])
_str = self.fake_stdout.make_string()
returned_networks = jsonutils.loads(_str)
self.assertEqual(1, len(returned_networks))
net = returned_networks[0]
self.assertEqual(1, len(net))
self.assertIn("id", net.keys())
def test_list_nets_with_default_column(self):
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
returned_body = {"networks": [{"name": "buildname3",
"id": "id3",
"tenant_id": "tenant_3",
"subnets": []}]}
self._test_list_nets_columns(cmd, returned_body)
_str = self.fake_stdout.make_string()
returned_networks = jsonutils.loads(_str)
self.assertEqual(1, len(returned_networks))
net = returned_networks[0]
self.assertEqual(3, len(net))
self.assertEqual(0, len(set(net) ^ set(cmd.list_columns)))
def test_list_external_nets_empty_with_column(self):
resources = "networks"
cmd = network.ListExternalNetwork(test_cli20.MyApp(sys.stdout), None)
self.mox.StubOutWithMock(cmd, "get_client")
self.mox.StubOutWithMock(self.client.httpclient, "request")
self.mox.StubOutWithMock(network.ListNetwork, "extend_list")
network.ListNetwork.extend_list(mox.IsA(list), mox.IgnoreArg())
cmd.get_client().MultipleTimes().AndReturn(self.client)
reses = {resources: []}
resstr = self.client.serialize(reses)
# url method body
query = "router%3Aexternal=True&id=myfakeid"
args = ['-c', 'id', '--', '--id', 'myfakeid']
path = getattr(self.client, resources + "_path")
self.client.httpclient.request(
test_cli20.MyUrlComparator(
test_cli20.end_url(path, query), self.client),
'GET',
body=None,
headers=mox.ContainsKeyValue(
'X-Auth-Token',
test_cli20.TOKEN)).AndReturn(
(test_cli20.MyResp(200), resstr))
self.mox.ReplayAll()
cmd_parser = cmd.get_parser("list_" + resources)
shell.run_command(cmd, cmd_parser, args)
self.mox.VerifyAll()
self.mox.UnsetStubs()
_str = self.fake_stdout.make_string()
self.assertEqual('\n', _str)
def _test_list_external_nets(self, resources, cmd,
detail=False, tags=(),
fields_1=(), fields_2=()):
self.mox.StubOutWithMock(cmd, "get_client")
self.mox.StubOutWithMock(self.client.httpclient, "request")
self.mox.StubOutWithMock(network.ListNetwork, "extend_list")
network.ListNetwork.extend_list(mox.IsA(list), mox.IgnoreArg())
cmd.get_client().MultipleTimes().AndReturn(self.client)
reses = {resources: [{'id': 'myid1', },
{'id': 'myid2', }, ], }
resstr = self.client.serialize(reses)
# url method body
query = ""
args = detail and ['-D', ] or []
if fields_1:
for field in fields_1:
args.append('--fields')
args.append(field)
if tags:
args.append('--')
args.append("--tag")
for tag in tags:
args.append(tag)
if (not tags) and fields_2:
args.append('--')
if fields_2:
args.append("--fields")
for field in fields_2:
args.append(field)
for field in itertools.chain(fields_1, fields_2):
if query:
query += "&fields=" + field
else:
query = "fields=" + field
if query:
query += '&router%3Aexternal=True'
else:
query += 'router%3Aexternal=True'
for tag in tags:
if query:
query += "&tag=" + tag
else:
query = "tag=" + tag
if detail:
query = query and query + '&verbose=True' or 'verbose=True'
path = getattr(self.client, resources + "_path")
self.client.httpclient.request(
test_cli20.MyUrlComparator(
test_cli20.end_url(path, query), self.client),
'GET',
body=None,
headers=mox.ContainsKeyValue('X-Auth-Token', test_cli20.TOKEN)
).AndReturn((test_cli20.MyResp(200), resstr))
self.mox.ReplayAll()
cmd_parser = cmd.get_parser("list_" + resources)
shell.run_command(cmd, cmd_parser, args)
self.mox.VerifyAll()
self.mox.UnsetStubs()
_str = self.fake_stdout.make_string()
self.assertIn('myid1', _str)
def test_list_external_nets_detail(self):
"""list external nets: -D."""
resources = "networks"
cmd = network.ListExternalNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_list_external_nets(resources, cmd, True)
def test_list_external_nets_tags(self):
"""List external nets: -- --tags a b."""
resources = "networks"
cmd = network.ListExternalNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_list_external_nets(resources,
cmd, tags=['a', 'b'])
def test_list_external_nets_detail_tags(self):
"""List external nets: -D -- --tags a b."""
resources = "networks"
cmd = network.ListExternalNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_list_external_nets(resources, cmd,
detail=True, tags=['a', 'b'])
def test_list_externel_nets_fields(self):
"""List external nets: --fields a --fields b -- --fields c d."""
resources = "networks"
cmd = network.ListExternalNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_list_external_nets(resources, cmd,
fields_1=['a', 'b'],
fields_2=['c', 'd'])
def test_update_network_exception(self):
"""Update net: myid."""
resource = 'network'
cmd = network.UpdateNetwork(test_cli20.MyApp(sys.stdout), None)
self.assertRaises(exceptions.CommandError, self._test_update_resource,
resource, cmd, 'myid', ['myid'], {})
def test_update_network(self):
"""Update net: myid --name myname --tags a b."""
resource = 'network'
cmd = network.UpdateNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_update_resource(resource, cmd, 'myid',
['myid', '--name', 'myname',
'--tags', 'a', 'b'],
{'name': 'myname', 'tags': ['a', 'b'], }
)
def test_update_network_with_unicode(self):
"""Update net: myid --name u'\u7f51\u7edc' --tags a b."""
resource = 'network'
cmd = network.UpdateNetwork(test_cli20.MyApp(sys.stdout), None)
self._test_update_resource(resource, cmd, 'myid',
['myid', '--name', u'\u7f51\u7edc',
'--tags', 'a', 'b'],
{'name': u'\u7f51\u7edc',
'tags': ['a', 'b'], }
)
def test_show_network(self):
"""Show net: --fields id --fields name myid."""
resource = 'network'
cmd = network.ShowNetwork(test_cli20.MyApp(sys.stdout), None)
args = ['--fields', 'id', '--fields', 'name', self.test_id]
self._test_show_resource(resource, cmd, self.test_id, args,
['id', 'name'])
def test_delete_network(self):
"""Delete net: myid."""
resource = 'network'
cmd = network.DeleteNetwork(test_cli20.MyApp(sys.stdout), None)
myid = 'myid'
args = [myid]
self._test_delete_resource(resource, cmd, myid, args)
def _test_extend_list(self, mox_calls):
data = [{'id': 'netid%d' % i, 'name': 'net%d' % i,
'subnets': ['mysubid%d' % i]}
for i in range(10)]
self.mox.StubOutWithMock(self.client.httpclient, "request")
path = getattr(self.client, 'subnets_path')
cmd = network.ListNetwork(test_cli20.MyApp(sys.stdout), None)
self.mox.StubOutWithMock(cmd, "get_client")
cmd.get_client().MultipleTimes().AndReturn(self.client)
mox_calls(path, data)
self.mox.ReplayAll()
known_args, _vs = cmd.get_parser('create_subnets').parse_known_args()
cmd.extend_list(data, known_args)
self.mox.VerifyAll()
def _build_test_data(self, data):
subnet_ids = []
response = []
filters = ""
for n in data:
if 'subnets' in n:
subnet_ids.extend(n['subnets'])
for subnet_id in n['subnets']:
filters = "%s&id=%s" % (filters, subnet_id)
response.append({'id': subnet_id,
'cidr': '192.168.0.0/16'})
resp_str = self.client.serialize({'subnets': response})
resp = (test_cli20.MyResp(200), resp_str)
return filters, resp
def test_extend_list(self):
def mox_calls(path, data):
filters, response = self._build_test_data(data)
self.client.httpclient.request(
test_cli20.MyUrlComparator(test_cli20.end_url(
path, 'fields=id&fields=cidr' + filters), self.client),
'GET',
body=None,
headers=mox.ContainsKeyValue(
'X-Auth-Token', test_cli20.TOKEN)).AndReturn(response)
self._test_extend_list(mox_calls)
def test_extend_list_exceed_max_uri_len(self):
def mox_calls(path, data):
sub_data_lists = [data[:len(data) - 1], data[len(data) - 1:]]
filters, response = self._build_test_data(data)
# 1 char of extra URI len will cause a split in 2 requests
self.mox.StubOutWithMock(self.client.httpclient,
"_check_uri_length")
self.client.httpclient._check_uri_length(mox.IgnoreArg()).AndRaise(
exceptions.RequestURITooLong(excess=1))
for data in sub_data_lists:
filters, response = self._build_test_data(data)
self.client.httpclient._check_uri_length(
mox.IgnoreArg()).AndReturn(None)
self.client.httpclient.request(
test_cli20.MyUrlComparator(
test_cli20.end_url(
path, 'fields=id&fields=cidr%s' % filters),
self.client),
'GET',
body=None,
headers=mox.ContainsKeyValue(
'X-Auth-Token', test_cli20.TOKEN)).AndReturn(response)
self._test_extend_list(mox_calls)
class CLITestV20NetworkXML(CLITestV20NetworkJSON):
format = 'xml'
| [
"msasad.visitor@iitd.ac.in"
] | msasad.visitor@iitd.ac.in |
077e06806c57829b1f5cc54d139833314ac0bffe | 308953409e1a3b828ac49b7301c1e751cbf762cf | /suite_EETc 12/tst_Open_Change_Values_Import_No/test.py | fec88bb5a37e70b505750a61bac908c5b0993dd9 | [] | no_license | asthagaur1/danfoss-automation | 4dcc7d8f000917b67e4d6f46ff862a525ddcbc5e | 213a99d3375889cd0e0c801421a50e9fe6085879 | refs/heads/main | 2023-03-31T23:26:56.956107 | 2021-04-01T08:52:37 | 2021-04-01T08:52:37 | 353,627,845 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 418 | py | def main():
excel = r"C:\gitworkspace\KoolProg-TestAutomation\Master_Functions\Test_Automation\SourceCode\suite_EETc 12\shared\testdata\Open_Change_Values_Import_No.xls";
#Mapping with Global scripts for Function library and key action.
source(findFile("scripts", "Functions.py"))
source(findFile("scripts", "Actions.py"))
# source(findFile("scripts", "object_id.py"))
keyAction(excel)
| [
"asthagaur@danfoss.com"
] | asthagaur@danfoss.com |
35eb2ce1fad7f733c2a194f4f9d8665ea178a9f2 | 7ed2994d49ee68b1cfc6d9b905ccb38dba9d2495 | /api/lib/python3.7/__future__.py | 261e4dc37103123e7b2bb3c5c995763e292899c4 | [] | no_license | zarrinan/NLP_Support_Groups_Flask_API | 9aed1741d23d7cf12fecebdfe23fd8a941ab69f1 | d1aa78c0fe2690bed4c77c540b79cc948d1a9bff | refs/heads/master | 2020-05-17T05:07:31.286994 | 2019-06-26T19:49:07 | 2019-06-26T19:49:07 | 183,524,889 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52 | py | /Users/zarrina/anaconda3/lib/python3.7/__future__.py | [
"zarrina_@hotmail.com"
] | zarrina_@hotmail.com |
d145b16c5c3741f878f24d13a9c8083f39ac11ac | 196dc6e64e556e6375fbea9ca4b170167ab75302 | /FinalExam/mainLinkedList.py | 5c2b644e83be86691842bfcf5514672cdd18418a | [] | no_license | sccaster/pythonProgams | ed1867976d5209efd74d0534cb038b7fa276824c | 99cb40f7804c5a0228c4bfb9bc75c0d9fd15b276 | refs/heads/master | 2020-12-09T12:42:37.676162 | 2020-01-11T22:39:40 | 2020-01-11T22:39:40 | 233,306,765 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 175 | py | from bst import BST
def main():
nums = ["5", "3", "1", "6", "2", "8", "0", "9", "7", "4"]
newBST = BST(list_of_objects = nums)
newBST.inorder()
main()
| [
"noreply@github.com"
] | noreply@github.com |
8d96b4fde7dc180a58208bbd4c48817b88ff107a | 6cc8773a178d4ab3d0c720296232f1d25451c9fd | /proba.py | 4a215e867cd17725d8a5128869124728a3ab5349 | [] | no_license | miks4/python | f5dad80552b768466d4f4e6af96bca6ecfcf3bfa | 60c7e127072cfee3f4c6af6d767ec66ddf89f167 | refs/heads/master | 2022-06-22T04:02:52.151869 | 2020-05-09T18:19:29 | 2020-05-09T18:19:29 | 259,106,253 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,029 | py | class Employee:
name = "Ben"
designation = "Sales executive"
saleMadeThisWeek = 6
numberOfworkingHours = 40
def hasAchivedTarget(self):
if self.saleMadeThisWeek >= 5:
print("target has been achived")
else:
print("target has not been achived")
def employeeDetalis(self):
self.Name = "lol"
print("name :", self.Name)
age = 28
print("age:",age)
def printEmployeeDetalis(self):
print("printing ina anothe method")
print("name:", self.Name)
print("age: ,age")
def employeeDetalis1(self) :
self.firstName = "ben"
@staticmethod
def Welcoomemessage():
print("welcome")
def displayEmployeeDetalis(self):
print(self.firstName)
def __init__(self, name):
self.firstName = name
empployeeOne = Employee()
print(empployeeOne.name)
print(empployeeOne.hasAchivedTarget())
empployeeTwo = Employee()
print(empployeeTwo.name)
print("number of working hours for employe one is")
print(empployeeOne.numberOfworkingHours)
print("number of working hours for employe two is")
print(empployeeTwo.numberOfworkingHours)
Employee.numberOfworkingHours = 45
print("number of working hours for employe one after this funciton:")
print(empployeeOne.numberOfworkingHours)
print("number of working hours for employe two after this funciton:")
print(empployeeTwo.numberOfworkingHours)
empployeeOne.name = "john"
empployeeTwo.name = "mary"
print("emplyee one:")
print(empployeeOne.name)
print("empployee Two:")
print(empployeeTwo.name)
empployeeOne.numberOfworkingHours = 40
print("number of working hours for employee one:")
print(empployeeOne.numberOfworkingHours)
print("number of working hours for employe two after this funciton:")
print(empployeeTwo.numberOfworkingHours)
employee = Employee("mark")
employee.employeeDetalis()
employee.printEmployeeDetalis()
employee.employeeDetalis1()
print(employee.firstName)
employee.Welcoomemessage()
employee.displayEmployeeDetalis() | [
"62470347+miks4@users.noreply.github.com"
] | 62470347+miks4@users.noreply.github.com |
9db26fb7dad810ee471a57378bf7b950550c9a78 | e1a2c6ed4a4b93b4697974e3b0a32a4d67daa6f6 | /venv/Lib/site-packages/pybrain3/rl/environments/ode/instances/ccrl.py | f868c93f0e79f79e82bdefa752c7d5da13efb64f | [
"MIT"
] | permissive | ishatserka/MachineLearningAndDataAnalysisCoursera | cdf0f23a58617e17d6b938e3a9df17daae8585e4 | e82e772df2f4aec162cb34ac6127df10d14a625a | refs/heads/master | 2021-09-11T01:39:26.228392 | 2018-04-05T14:33:39 | 2018-04-05T14:33:39 | 117,153,454 | 0 | 0 | MIT | 2018-03-27T05:20:37 | 2018-01-11T21:05:33 | Python | UTF-8 | Python | false | false | 6,146 | py | __author__ = 'Frank Sehnke, sehnke@in.tum.de'
from pybrain3.rl.environments.ode import ODEEnvironment, sensors, actuators
import imp
import xode #@UnresolvedImport
import ode #@UnresolvedImport
import sys
from scipy import array, asarray
class CCRLEnvironment(ODEEnvironment):
def __init__(self, xodeFile="ccrlGlas.xode", renderer=True, realtime=False, ip="127.0.0.1", port="21590", buf='16384'):
ODEEnvironment.__init__(self, renderer, realtime, ip, port, buf)
# load model file
self.pert = asarray([1.0, 0.0, 0.0])
self.loadXODE(imp.find_module('pybrain')[1] + "/rl/environments/ode/models/" + xodeFile)
# standard sensors and actuators
self.addSensor(sensors.JointSensor())
self.addSensor(sensors.JointVelocitySensor())
self.addActuator(actuators.JointActuator())
#set act- and obsLength, the min/max angles and the relative max touques of the joints
self.actLen = self.indim
self.obsLen = len(self.getSensors())
#ArmLeft, ArmRight, Hip, PevelLeft, PevelRight, TibiaLeft, TibiaRight, KneeLeft, KneeRight, FootLeft, FootRight
self.tourqueList = array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.8, 0.8, 0.8, 0.5, 0.5, 0.1],)
#self.tourqueList=array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],)
self.cHighList = array([0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.9],)
self.cLowList = array([-1.0, -1.0, -1.0, -1.5, -1.0, -1.0, -1.0, -0.7, -1.0, 0.0, -1.0, -1.5, -1.0, -1.0, -1.0, 0.0],)
self.stepsPerAction = 1
def step(self):
# Detect collisions and create contact joints
self.tableSum = 0
self.glasSum = 0
ODEEnvironment.step(self)
def _near_callback(self, args, geom1, geom2):
"""Callback function for the collide() method.
This function checks if the given geoms do collide and
creates contact joints if they do."""
# only check parse list, if objects have name
if geom1.name != None and geom2.name != None:
# Preliminary checking, only collide with certain objects
for p in self.passpairs:
g1 = False
g2 = False
for x in p:
g1 = g1 or (geom1.name.find(x) != -1)
g2 = g2 or (geom2.name.find(x) != -1)
if g1 and g2:
return()
# Check if the objects do collide
contacts = ode.collide(geom1, geom2)
tmpStr = geom2.name[:-2]
handStr = geom1.name[:-1]
if geom1.name == 'plate' and tmpStr != 'objectP':
self.tableSum += len(contacts)
if tmpStr == 'objectP' and handStr == 'pressLeft':
if len(contacts) > 0: self.glasSum += 1
tmpStr = geom1.name[:-2]
handStr = geom2.name[:-1]
if geom2.name == 'plate' and tmpStr != 'objectP':
self.tableSum += len(contacts)
if tmpStr == 'objectP' and handStr == 'pressLeft':
if len(contacts) > 0: self.glasSum += 1
# Create contact joints
world, contactgroup = args
for c in contacts:
p = c.getContactGeomParams()
# parameters from Niko Wolf
c.setBounce(0.2)
c.setBounceVel(0.05) #Set the minimum incoming velocity necessary for bounce
c.setSoftERP(0.6) #Set the contact normal "softness" parameter
c.setSoftCFM(0.00005) #Set the contact normal "softness" parameter
c.setSlip1(0.02) #Set the coefficient of force-dependent-slip (FDS) for friction direction 1
c.setSlip2(0.02) #Set the coefficient of force-dependent-slip (FDS) for friction direction 2
c.setMu(self.FricMu) #Set the Coulomb friction coefficient
j = ode.ContactJoint(world, contactgroup, c)
j.name = None
j.attach(geom1.getBody(), geom2.getBody())
def loadXODE(self, filename, reload=False):
""" loads an XODE file (xml format) and parses it. """
f = file(filename)
self._currentXODEfile = filename
p = xode.parser.Parser()
self.root = p.parseFile(f)
f.close()
try:
# filter all xode "world" objects from root, take only the first one
world = [x for x in self.root.getChildren() if isinstance(x, xode.parser.World)][0]
except IndexError:
# malicious format, no world tag found
print("no <world> tag found in " + filename + ". quitting.")
sys.exit()
self.world = world.getODEObject()
self._setWorldParameters()
try:
# filter all xode "space" objects from world, take only the first one
space = [x for x in world.getChildren() if isinstance(x, xode.parser.Space)][0]
except IndexError:
# malicious format, no space tag found
print("no <space> tag found in " + filename + ". quitting.")
sys.exit()
self.space = space.getODEObject()
# load bodies and geoms for painting
self.body_geom = []
self._parseBodies(self.root)
for (body, _) in self.body_geom:
if hasattr(body, 'name'):
tmpStr = body.name[:-2]
if tmpStr == "objectP":
body.setPosition(body.getPosition() + self.pert)
if self.verbosity > 0:
print("-------[body/mass list]-----")
for (body, _) in self.body_geom:
try:
print(body.name, body.getMass())
except AttributeError:
print("<Nobody>")
# now parse the additional parameters at the end of the xode file
self.loadConfig(filename, reload)
def reset(self):
ODEEnvironment.reset(self)
self.pert = asarray([1.0, 0.0, 0.0])
if __name__ == '__main__' :
w = CCRLEnvironment()
while True:
w.step()
if w.stepCounter == 1000: w.reset()
| [
"shatserka@gmail.com"
] | shatserka@gmail.com |
4a1c19886f6f6b27415fe252cd755cf72bc62216 | 1ac5d22179a64a384f7215cd341e37fed3fb7aee | /zmsavings/core.py | 96f9ebeb4491e9b03b984e6b78da2dc4d901fdf3 | [
"Apache-2.0"
] | permissive | vleseg/zmsavings | 0a3dae3da6da6b542b432d1fda18446b2e65c872 | eb41f516fae2a7d97e107de682dbe15fa6a85719 | refs/heads/master | 2021-07-10T00:40:01.202071 | 2017-12-19T15:09:19 | 2017-12-19T15:09:19 | 96,619,318 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 661 | py | from data.model import Account, Goal, ProgressiveTotal, Transaction
def _select_transactions_for_goal(goal):
account = Account.select(lambda a: a.name == goal.account.name)[0]
return Transaction.select(
lambda t:
(t.income_account == account or t.outcome_account == account) and
t.date >= goal.start_date)
def main():
progressive_totals = []
for g in Goal.all():
transactions = _select_transactions_for_goal(g)
progressive_totals.append(ProgressiveTotal(g, transactions))
for pt in progressive_totals:
pt.calculate()
pt.visualize()
if __name__ == '__main__':
main()
| [
"vlesiil@yandex.ru"
] | vlesiil@yandex.ru |
55af1807d0651e3ce77d75f84c95801118d2aacc | d8c2cf1249c58b5f843031450db2f0f8733f85e8 | /todo/urls.py | 1a013e167082d76a6e64cdea74f08df97dddf656 | [
"MIT"
] | permissive | guluzadef/Instaexample | 80849e8a98f6e75b256e8e1d409793a490ea1a53 | 9c74a3e3ac8b523bbccd0e2e6c769c40bf6b3406 | refs/heads/master | 2020-07-01T20:12:30.928779 | 2019-09-11T16:59:45 | 2019-09-11T16:59:45 | 201,284,824 | 2 | 0 | MIT | 2019-09-11T16:54:56 | 2019-08-08T15:21:17 | Python | UTF-8 | Python | false | false | 972 | py | """todo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('', include("todo_app.urls")),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"you@example.com"
] | you@example.com |
1f5f060e91a1d774b0d7b3660bffef7a99aac54f | 863e4c77d31fd25fca98a04ca2db4f0b4adc6108 | /configs/src/main.py | 5e99b2da9e4339e6b8dadd4e0670e537bdfb2826 | [] | no_license | ValentinMouret/playground | 075edb9969583065eddf8c441aa225bc5248ea51 | 25666840369de85c6323382da75d905010dbae0b | refs/heads/master | 2020-07-07T15:18:50.888493 | 2019-08-20T13:55:20 | 2019-08-20T13:55:20 | 203,386,974 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 601 | py | import dataclasses
import pyhocon
class Config:
@classmethod
def from_config_tree(cls, conf: pyhocon.ConfigTree):
return cls(**conf.as_plain_ordered_dict())
@dataclasses.dataclass
class ImportantConfig(Config):
name: str
destination: dict
def main():
conf = {"name": "foo", "destination": {"fizz": "buzz"}}
important_config = ImportantConfig(**conf)
print(important_config)
hoconf = pyhocon.ConfigFactory.from_dict(conf)
important_config = ImportantConfig.from_config_tree(hoconf)
print(important_config)
if __name__ == "__main__":
main()
| [
"valentin.mou@gmail.com"
] | valentin.mou@gmail.com |
bf799d87050ee17a2efe9205421a451ddbc5bbb3 | f0b741f24ccf8bfe9bd1950425d83b6291d21b10 | /components/google-cloud/google_cloud_pipeline_components/container/v1/bigquery/ml_reconstruction_loss/launcher.py | b0671efb65d1838f7599a10b484a9e7483666bb0 | [
"Apache-2.0"
] | permissive | kubeflow/pipelines | e678342b8a325559dec0a6e1e484c525fdcc8ce8 | 3fb199658f68e7debf4906d9ce32a9a307e39243 | refs/heads/master | 2023-09-04T11:54:56.449867 | 2023-09-01T19:07:33 | 2023-09-01T19:12:27 | 133,100,880 | 3,434 | 1,675 | Apache-2.0 | 2023-09-14T20:19:06 | 2018-05-12T00:31:47 | Python | UTF-8 | Python | false | false | 3,115 | py | # Copyright 2022 The Kubeflow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GCP launcher for Bigquery jobs based on the AI Platform SDK."""
import argparse
import logging
import sys
from google_cloud_pipeline_components.container.v1.bigquery.ml_reconstruction_loss import remote_runner
from google_cloud_pipeline_components.container.v1.gcp_launcher.utils import parser_util
def _parse_args(args):
"""Parse command line arguments."""
parser, parsed_args = parser_util.parse_default_args(args)
# Parse the conditionally required arguments
parser.add_argument(
'--executor_input',
dest='executor_input',
type=str,
# executor_input is only needed for components that emit output artifacts.
required=True,
default=argparse.SUPPRESS,
)
parser.add_argument(
'--job_configuration_query_override',
dest='job_configuration_query_override',
type=str,
required=True,
default=argparse.SUPPRESS,
)
parser.add_argument(
'--model_name',
dest='model_name',
type=str,
required=True,
default=argparse.SUPPRESS,
)
parser.add_argument(
'--table_name',
dest='table_name',
type=str,
# table_name is only needed for BigQuery tvf model job component.
required=False,
default=argparse.SUPPRESS,
)
parser.add_argument(
'--query_statement',
dest='query_statement',
type=str,
# query_statement is only needed for BigQuery predict model job component.
required=False,
default=argparse.SUPPRESS,
)
parsed_args, _ = parser.parse_known_args(args)
return vars(parsed_args)
def main(argv):
"""Main entry.
Expected input args are as follows:
Project - Required. The project of which the resource will be launched.
Region - Required. The region of which the resource will be launched.
Type - Required. GCP launcher is a single container. This Enum will
specify which resource to be launched.
Request payload - Required. The full serialized json of the resource spec.
Note this can contain the Pipeline Placeholders.
gcp_resources - placeholder output for returning job_id.
Args:
argv: A list of system arguments.
"""
parsed_args = _parse_args(argv)
job_type = parsed_args['type']
if job_type != 'BigqueryMLReconstructionLossJob':
raise ValueError('Incorrect job type: ' + job_type)
logging.info('Job started for type: ' + job_type)
remote_runner.bigquery_ml_reconstruction_loss_job(**parsed_args)
if __name__ == '__main__':
main(sys.argv[1:])
| [
"nobody@google.com"
] | nobody@google.com |
9bee571ee4fb8a3ccac50e1f34524492497a2d8f | 7d9145ad090071f3c9326a9ad7464d33f1c95ecc | /lookup.py | 5bfdf7007b9598155c4d6b844cfbf1688d1ec370 | [] | no_license | kamidzi/bcpc-build | a250b9bd0a2a25fbe6af3970cfd2c1e45b63aa37 | 81b2ba7856c52c6beea36b69eedb038c948cd0c8 | refs/heads/master | 2020-06-10T03:25:08.890345 | 2019-06-24T17:10:26 | 2019-06-24T19:11:42 | 193,567,075 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 159 | py | from bcpc_build.db import utils
from bcpc_build.build_unit import BuildUnit
id = 'i'
session = utils.Session()
x = session.query(BuildUnit).get(id)
print(x)
| [
"kmidzi@bloomberg.net"
] | kmidzi@bloomberg.net |
9e78bb7a62c7ff5743be037816b12a9c2316c086 | 82fdb2f3baeb4f08799d93c4be8d8c829f092415 | /tests/test_policies.py | 52b3f8e8e75389b6127427521a6f3c7145b58814 | [
"Apache-2.0"
] | permissive | velamurip/rasa_core | 915f815772e2b596f837f0e1af511e829cc28e3e | f3dbb70d0bb748628ab238eded17a8f5e09279e2 | refs/heads/master | 2021-05-16T04:22:04.310610 | 2017-10-05T09:53:22 | 2017-10-05T09:53:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,838 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import pytest
from rasa_core.channels import UserMessage
from rasa_core.domain import TemplateDomain
from rasa_core.featurizers import BinaryFeaturizer
from rasa_core.policies.keras_policy import KerasPolicy
from rasa_core.policies.memoization import MemoizationPolicy
from rasa_core.policies.scoring_policy import ScoringPolicy
from rasa_core.trackers import DialogueStateTracker
from rasa_core.training_utils import extract_training_data_from_file, \
extract_stories_from_file
def train_data(max_history, domain):
return extract_training_data_from_file(
"data/dsl_stories/stories_defaultdomain.md",
domain=domain, max_history=max_history, remove_duplicates=True,
featurizer=BinaryFeaturizer())
# We are going to use class style testing here since unfortunately pytest
# doesn't support using fixtures as arguments to its own parameterize yet
# (hence, we can't train a policy, declare it as a fixture and use the different
# fixtures of the different policies for the functional tests). Therefore, we
# are going to reverse this and train the policy within a class and collect the
# tests in a base class.
class PolicyTestCollection(object):
"""Tests every policy needs to fulfill.
Each policy can declare further tests on its own."""
max_history = 3 # this is the amount of history we test on
def create_policy(self):
raise NotImplementedError
@pytest.fixture(scope="module")
def trained_policy(self):
default_domain = TemplateDomain.load("examples/default_domain.yml")
policy = self.create_policy()
X, y = train_data(self.max_history, default_domain)
policy.max_history = self.max_history
policy.featurizer = BinaryFeaturizer()
policy.train(X, y, default_domain)
return policy
def test_persist_and_load(self, trained_policy, default_domain, tmpdir):
trained_policy.persist(tmpdir.strpath)
loaded = trained_policy.__class__.load(tmpdir.strpath,
trained_policy.featurizer,
trained_policy.max_history)
stories = extract_stories_from_file(
"data/dsl_stories/stories_defaultdomain.md", default_domain)
for story in stories:
tracker = DialogueStateTracker("default", default_domain.slots)
dialogue = story.as_dialogue("default", default_domain)
tracker.update_from_dialogue(dialogue)
predicted_probabilities = loaded.predict_action_probabilities(
tracker, default_domain)
actual_probabilities = trained_policy.predict_action_probabilities(
tracker, default_domain)
assert predicted_probabilities == actual_probabilities
def test_prediction_on_empty_tracker(self, trained_policy, default_domain):
tracker = DialogueStateTracker(UserMessage.DEFAULT_SENDER,
default_domain.slots,
default_domain.topics,
default_domain.default_topic)
probabilities = trained_policy.predict_action_probabilities(
tracker, default_domain)
assert len(probabilities) == default_domain.num_actions
assert max(probabilities) <= 1.0
assert min(probabilities) >= 0.0
def test_persist_and_load_empty_policy(self, tmpdir):
empty_policy = self.create_policy()
empty_policy.persist(tmpdir.strpath)
loaded = empty_policy.__class__.load(tmpdir.strpath, BinaryFeaturizer(),
empty_policy.max_history)
assert loaded is not None
class TestKerasPolicy(PolicyTestCollection):
@pytest.fixture(scope="module")
def create_policy(self):
p = KerasPolicy()
return p
class TestScoringPolicy(PolicyTestCollection):
@pytest.fixture(scope="module")
def create_policy(self):
p = ScoringPolicy()
return p
class TestMemoizationPolicy(PolicyTestCollection):
@pytest.fixture(scope="module")
def create_policy(self):
p = MemoizationPolicy()
return p
def test_memorise(self, trained_policy, default_domain):
X, y = train_data(self.max_history, default_domain)
trained_policy.train(X, y, default_domain)
for ii in range(X.shape[0]):
assert trained_policy.recall(X[ii, :, :], default_domain) == y[ii]
random_feature = np.random.randn(default_domain.num_features)
assert trained_policy.recall(random_feature, default_domain) is None
| [
"tom.bocklisch@scalableminds.com"
] | tom.bocklisch@scalableminds.com |
224d192a356f25f72640dd130596fa1cc7f853c8 | fb1fd30098fd4dd7f11e614fbcd19bda5e0414bd | /randNum.py | 32dc0504c2cbabfba7c0c7b3ba6838a1d01a160a | [] | no_license | kali-lg/python | 6ceb452ae7fd611bb6b6b99a4be4404f3fd6b2de | 0363dba3e224ee2044dbe3216289c0245df9c5c0 | refs/heads/master | 2021-01-10T09:37:58.103674 | 2016-03-07T13:09:57 | 2016-03-07T13:09:57 | 53,310,186 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 454 | py | #!/usr/bin/python
import random
num = random.randint(0, 100)
while True:
try:
guess = int(raw_input("Please Enter number 1~100:\n"))
except ValueError, e:
print "Please Enter correct number, your number is wrong type."
continue
if guess > num:
print "Guess Bigger:", guess
elif guess < num:
print "Gusee Smaller:", guess
else:
print "Guess OK, Game Over:"
break
print "\n"
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
579d13c29895c97ff77f3425bac31cb6d6070857 | 1e6e3bb707920fdb01ebca23eaf81097c558d918 | /tests/system/action/test_internal_actions.py | cc7ffd01313c71744500855191c73bb153e2160b | [
"MIT"
] | permissive | OpenSlides/openslides-backend | cbd24589f82a6f29bde02611610511870bb6abbf | d8511f5138db4cc5fe4fa35e2a0200f766bd49c5 | refs/heads/main | 2023-08-23T11:54:25.064070 | 2023-08-22T11:15:45 | 2023-08-22T11:15:45 | 231,757,840 | 6 | 22 | MIT | 2023-09-14T16:23:41 | 2020-01-04T12:17:38 | Python | UTF-8 | Python | false | false | 7,783 | py | from tempfile import NamedTemporaryFile
from typing import Any, Dict, Optional
from openslides_backend.http.views.action_view import (
INTERNAL_AUTHORIZATION_HEADER,
ActionView,
)
from openslides_backend.http.views.base_view import RouteFunction
from openslides_backend.shared.env import DEV_PASSWORD
from openslides_backend.shared.util import ONE_ORGANIZATION_FQID
from tests.system.util import disable_dev_mode, get_route_path
from tests.util import Response
from .base import BaseActionTestCase
from .util import get_internal_auth_header
class BaseInternalRequestTest(BaseActionTestCase):
"""
Provides the ability to use the anonymous client to call an internal route.
"""
route: RouteFunction
def call_internal_route(
self,
payload: Any,
internal_auth_password: Optional[str] = DEV_PASSWORD,
) -> Response:
if internal_auth_password is None:
headers = {}
else:
headers = get_internal_auth_header(internal_auth_password)
return self.anon_client.post(
get_route_path(self.route),
json=payload,
headers=headers,
)
class BaseInternalPasswordTest(BaseInternalRequestTest):
"""
Sets up a server-side password for internal requests.
"""
internal_auth_password: str = "Q2^$2J9QXimW6lDPoGj4"
def setUp(self) -> None:
super().setUp()
self.secret_file = NamedTemporaryFile()
self.secret_file.write(self.internal_auth_password.encode("ascii"))
self.secret_file.seek(0)
self.app.env.vars["INTERNAL_AUTH_PASSWORD_FILE"] = self.secret_file.name
def tearDown(self) -> None:
super().tearDown()
self.app.env.vars["INTERNAL_AUTH_PASSWORD_FILE"] = ""
self.secret_file.close()
class BaseInternalActionTest(BaseInternalRequestTest):
"""
Sets up a server-side password for internal requests.
"""
route: RouteFunction = ActionView.internal_action_route
def internal_request(
self,
action: str,
data: Dict[str, Any],
internal_auth_password: Optional[str] = DEV_PASSWORD,
) -> Response:
return super().call_internal_route(
[{"action": action, "data": [data]}], internal_auth_password
)
class TestInternalActionsDev(BaseInternalActionTest):
"""
Uses the anonymous client to call the internal action route. This should skip all permission checks, so the requests
still succeed.
Just rudimentary tests that the actions generally succeed since if that's the case, everything should be handled
analogously to the external case, which is already tested sufficiently in the special test cases for the actions.
Hint: This test assumes that OPENSLIDES_DEVELOPMENT is truthy.
"""
def test_internal_user_create(self) -> None:
response = self.internal_request("user.create", {"username": "test"})
self.assert_status_code(response, 200)
self.assert_model_exists("user/2", {"username": "test"})
def test_internal_user_update(self) -> None:
response = self.internal_request("user.update", {"id": 1, "username": "test"})
self.assert_status_code(response, 200)
self.assert_model_exists("user/1", {"username": "test"})
def test_internal_user_delete(self) -> None:
response = self.internal_request("user.delete", {"id": 1})
self.assert_status_code(response, 200)
self.assert_model_deleted("user/1")
def test_internal_user_set_password(self) -> None:
response = self.internal_request(
"user.set_password", {"id": 1, "password": "new_password"}
)
self.assert_status_code(response, 200)
model = self.get_model("user/1")
assert self.auth.is_equals("new_password", model["password"])
def test_internal_organization_initial_import(self) -> None:
self.datastore.truncate_db()
response = self.internal_request("organization.initial_import", {"data": {}})
self.assert_status_code(response, 200)
self.assert_model_exists(ONE_ORGANIZATION_FQID)
self.assert_model_exists("user/1", {"username": "superadmin"})
def test_internal_mismatching_passwords(self) -> None:
response = self.internal_request(
"user.create", {"username": "test"}, "wrong_pw"
)
self.assert_status_code(response, 401)
self.assert_model_not_exists("user/2")
def test_internal_no_password_in_request(self) -> None:
response = self.internal_request("user.create", {"username": "test"}, None)
self.assert_status_code(response, 401)
self.assert_model_not_exists("user/2")
def test_internal_wrong_password_in_request(self) -> None:
response = self.internal_request("user.create", {"username": "test"}, "wrong")
self.assert_status_code(response, 401)
self.assert_model_not_exists("user/2")
def test_internal_execute_stack_internal_via_public_route(self) -> None:
self.datastore.truncate_db()
response = self.request(
"organization.initial_import", {"data": {}}, internal=False
)
self.assert_status_code(response, 400)
self.assertEqual(
response.json.get("message"),
"Action organization.initial_import does not exist.",
)
self.assert_model_not_exists("organization/1")
def test_internal_wrongly_encoded_password(self) -> None:
response = self.anon_client.post(
get_route_path(self.route),
json=[{"action": "user.create", "data": [{"username": "test"}]}],
headers={INTERNAL_AUTHORIZATION_HEADER: "openslides"},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("user/2")
@disable_dev_mode
class TestInternalActionsProd(BaseInternalActionTest):
"""
The same as the TestInternalActionsDev class but in prod mode.
"""
def test_internal_no_password_on_server(self) -> None:
response = self.internal_request(
"user.create", {"username": "test"}, "some password"
)
self.assert_status_code(response, 500)
self.assert_model_not_exists("user/2")
@disable_dev_mode
class TestInternalActionsProdWithPasswordFile(
BaseInternalActionTest, BaseInternalPasswordTest
):
"""
Same as TestInternalActionsProd but with a server-side password set.
"""
def test_internal_wrong_password(self) -> None:
response = self.internal_request("user.create", {"username": "test"}, "wrong")
self.assert_status_code(response, 401)
self.assert_model_not_exists("user/2")
def test_internal_execute_public_action(self) -> None:
response = self.internal_request(
"user.create", {"username": "test"}, self.internal_auth_password
)
self.assert_status_code(response, 200)
self.assert_model_exists("user/2")
def test_internal_execute_stack_internal_action(self) -> None:
self.datastore.truncate_db()
response = self.internal_request(
"organization.initial_import", {"data": {}}, self.internal_auth_password
)
self.assert_status_code(response, 200)
self.assert_model_exists(ONE_ORGANIZATION_FQID)
def test_internal_execute_backend_internal_action(self) -> None:
response = self.internal_request(
"option.create",
{"meeting_id": 1, "text": "test"},
self.internal_auth_password,
)
self.assert_status_code(response, 400)
self.assertEqual(
response.json.get("message"), "Action option.create does not exist."
)
self.assert_model_not_exists("option/1")
| [
"noreply@github.com"
] | noreply@github.com |
7ba263dff9828d8b26aa3059286fa58d42176805 | 7e659ac00ac1557962409272d51e1ae588dab258 | /configs/official/point_sup_r50_fpn.py | e4dd48a5feb7a268f5383e955bf38097f0d673d5 | [] | no_license | easilylazy/point-sup | 4766f6c809ee36aea66daccbd4666e47cdce31d7 | 49d2e69eaac9f49d9d5df9e9612c733c96a374ad | refs/heads/main | 2023-08-25T04:43:21.170024 | 2021-10-29T07:07:44 | 2021-10-29T07:07:44 | 421,438,404 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,257 | py | # model settings
model = dict(
type="PointSup",
backbone=dict(
type="ResNet",
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type="BN", requires_grad=True),
norm_eval=True,
style="pytorch",
init_cfg=dict(type="Pretrained", checkpoint="torchvision://resnet50"),
),
neck=dict(
type="FPN", in_channels=[256, 512, 1024, 2048], out_channels=256, num_outs=5
),
rpn_head=dict(
type="RPNHead",
in_channels=256,
feat_channels=256,
anchor_generator=dict(
type="AnchorGenerator",
scales=[8],
ratios=[0.5, 1.0, 2.0],
strides=[4, 8, 16, 32, 64],
),
bbox_coder=dict(
type="DeltaXYWHBBoxCoder",
target_means=[0.0, 0.0, 0.0, 0.0],
target_stds=[1.0, 1.0, 1.0, 1.0],
),
loss_cls=dict(type="CrossEntropyLoss", use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type="L1Loss", loss_weight=1.0),
),
roi_head=dict(
type="PointSupRoIHead",
bbox_roi_extractor=dict(
type="SingleRoIExtractor",
roi_layer=dict(type="RoIAlign", output_size=7, sampling_ratio=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32],
),
bbox_head=dict(
type="Shared2FCBBoxHead",
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=80,
bbox_coder=dict(
type="DeltaXYWHBBoxCoder",
target_means=[0.0, 0.0, 0.0, 0.0],
target_stds=[0.1, 0.1, 0.2, 0.2],
),
reg_class_agnostic=False,
loss_cls=dict(type="CrossEntropyLoss", use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type="L1Loss", loss_weight=1.0),
),
mask_roi_extractor=dict(
type="SingleRoIExtractor",
roi_layer=dict(type="RoIAlign", output_size=14, sampling_ratio=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32],
),
mask_head=dict(
type="PointSupHead",
num_convs=4,
in_channels=256,
conv_out_channels=256,
num_classes=80,
loss_mask=dict(type="CrossEntropyLoss", use_mask=True, loss_weight=1.0),
),
),
# model training and testing settings
train_cfg=dict(
rpn=dict(
assigner=dict(
type="MaxIoUAssigner",
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=True,
ignore_iof_thr=-1,
),
sampler=dict(
type="RandomSampler",
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False,
),
allowed_border=-1,
pos_weight=-1,
debug=False,
),
rpn_proposal=dict(
nms_pre=2000,
max_per_img=1000,
nms=dict(type="nms", iou_threshold=0.7),
min_bbox_size=0,
),
rcnn=dict(
assigner=dict(
type="MaxIoUAssigner",
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
match_low_quality=True,
ignore_iof_thr=-1,
),
sampler=dict(
type="RandomSampler",
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True,
),
mask_size=28,
Point_N=10,
pos_weight=-1,
debug=False,
),
),
test_cfg=dict(
rpn=dict(
nms_pre=1000,
max_per_img=1000,
nms=dict(type="nms", iou_threshold=0.7),
min_bbox_size=0,
),
rcnn=dict(
score_thr=0.05,
nms=dict(type="nms", iou_threshold=0.5),
max_per_img=100,
mask_thr_binary=0.5,
Point_N=10,
),
),
)
| [
"easilylazy@qq.com"
] | easilylazy@qq.com |
25ec5c3a23fdcbb3fe68b62fb26e6466e9c81f4a | 94c7440e7f1d2fdbe4a1e26b9c75a94e49c14eb4 | /leetcode/371.py | 9db89c099bace2c01ca91a5174d2047ab78a610c | [
"Apache-2.0"
] | permissive | windniw/just-for-fun | 7ddea4f75cf3466a400b46efe36e57f6f7847c48 | 44e1ff60f8cfaf47e4d88988ee67808f0ecfe828 | refs/heads/master | 2022-08-18T09:29:57.944846 | 2022-07-25T16:04:47 | 2022-07-25T16:04:47 | 204,949,602 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 717 | py | """
link: https://leetcode.com/problems/sum-of-two-integers
problem: 不用 + / - 号,求 integer 类型的 a + b
solution: 由于python没有左移整形溢出这道题难度直线上升。
a + b
== 不进位 (a + b) + 进位 (a + b) << 1
== a ^ b + (a & b) << 1
持续迭代到 (a & b) << 1 为0,即不进位时, 结果为当时的 a ^ b
"""
class Solution:
def getSum(self, a: int, b: int) -> int:
max_uint = 0xffffffff
max_int = 0x7fffffff - 1
while a:
add = (a & b) << 1
b = a ^ b
a = add
add &= max_uint
b &= max_uint
return b if b <= max_int else ~(b ^ max_uint)
| [
"windniw36@gmail.com"
] | windniw36@gmail.com |
f2261d9458aaf8e65918344267e1d394e3eb7d8a | f23e2dedbdf49ef093dba351fc0c368f5446000f | /crud/firstcrud/migrations/0001_initial.py | 3fe4cc5740604e8bb3ebe1b799270242d84af55e | [] | no_license | Hrach99-dev/django_crud | c4754ebfa87813f47c64eb6c9ca79e3ee39b2082 | cbc56d112f3316e5acc25c477235f89114818ea1 | refs/heads/main | 2023-06-03T17:34:21.582803 | 2021-06-29T16:19:30 | 2021-06-29T16:19:30 | 381,418,419 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 548 | py | # Generated by Django 3.2.4 on 2021-06-29 15:19
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=64)),
('surname', models.CharField(max_length=64)),
],
),
]
| [
"hrach.ghazaryan.web@gmail.com"
] | hrach.ghazaryan.web@gmail.com |
65891c8750b9d10b031594b8b35080a55aaa6663 | 36409b78394002e5d6e9228ca256fd4654b38f80 | /random walk/src/BD_LERW.py | 225bf177733ba635a79943248f53c2381ba65975 | [] | no_license | xiaogang00/Mining-in-Social-Networks | fa383494fd56124096c60317af2b30373c0d4aac | 87ab6f29ae148170d03470987299c7ea812d1dab | refs/heads/master | 2020-12-02T16:22:59.938930 | 2017-08-30T01:58:33 | 2017-08-30T01:58:33 | 96,543,382 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,050 | py | #!/usr/bin/python
#
# An implementation of a Bidirectional Loop Erased Random Walk (LERW)
# from a cylinder with reflecting boundaries on the left
# and open boundaries on the right.
# PNG output of a single trajectory.
# Habib Rehmann and Gunnar Pruessner
#
import random
import numpy as np
import matplotlib.pyplot as plt
from copy import deepcopy
seed = 10 # random seed
Length = 200 # length of the cyclinder
Circ = 200 # circumference of cyclinder
x = 0 # x coordinate of starting location
# y coordinate of starting location. Origin is at centre of square
y = Circ / 2
#在这里一开始的时候,x是在原点,而y是在中间的
s = 0 # Step number.
realizations = 8
trajectory = [] # List of the x coordinates of all points visited.
# (Length x Circ) 2D array of zeros
lattice = np.zeros((Length, Circ), dtype=int)
random.seed(seed)
# Plot config
dpi = 300
fig, ax = plt.subplots()
fig.set_size_inches(3, Circ * 3. / Length)
ax.set_xlim(0, Length - 1)
ax.set_ylim(0, Circ - 1)
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
def plot(LERW, c='g', Length = Length, Circ = Circ):
for pos in range(len(LERW)):
x, y = LERW[pos]
#不能画在边缘和角落
if (x == Length) or (x == 0) or (y == Circ) or (y == Circ) or (y == 0):
LERW[pos] = (np.nan, np.nan)
pos += 1
plt.plot(*zip(*LERW), color=c, linewidth=0.2)
# Generate a randomwalk
for i in range(realizations):
s = 0
x = 0 # x coordinate of starting location
y = Circ / 2 # y coordinate of starting location
#lattice在这里是格子的线
lattice = np.zeros((Length, Circ), dtype=int)
trajectory = []
while True:
s += 1
#下面相当于在x,y的方向上产生随机数
if (bool(random.getrandbits(1))):
if (bool(random.getrandbits(1))):
x += 1
else:
x -= 1
else:
if (bool(random.getrandbits(1))):
y += 1
else:
y -= 1
if (x >= Length):
break
elif (x < 0):
x = 0
if (y >= Circ):
y -= Circ
elif (y < 0):
y += Circ
lattice[x][y] += 1
trajectory.append((x, y))
x0, y0, pos = None, None, 0
# Loop erasure
LERW_LeftRight = deepcopy(trajectory)
lcpy = deepcopy(lattice)
x0, y0 = None, None
pos = 0
while pos < len(LERW_LeftRight):
x, y = LERW_LeftRight[pos]
if lcpy[x][y] > 1 and (not x0):
x0, y0 = x, y
pos0 = pos
elif (x == x0) and (y == y0) and (lcpy[x][y] == 1):
del LERW_LeftRight[pos0:pos]
x0, y0 = None, None
pos = pos0
lcpy[x][y] -= 1
pos += 1
plot(LERW_LeftRight)
# Loop erasure (tranversal from right to left)
LERW_RightLeft = deepcopy(trajectory[::-1])
lcpy = deepcopy(lattice)
x0, y0 = None, None
pos = 0
while pos < len(LERW_RightLeft):
x, y = LERW_RightLeft[pos]
if lcpy[x][y] > 1 and (not x0):
x0, y0 = x, y
pos0 = pos
elif (x == x0) and (y == y0) and (lcpy[x][y] == 1):
del LERW_RightLeft[pos0:pos]
x0, y0 = None, None
pos = pos0
lcpy[x][y] -= 1
pos += 1
plot(LERW_RightLeft, 'r')
# Plot random walk
plt.savefig(__file__[:-3]+".png", bbox_inches="tight", dpi=dpi)
| [
"872310734@qq.com"
] | 872310734@qq.com |
bb66110d1d24a5a636d80d3a562ef96f129247ea | 3e01b2478760ec1c936418c1c7752a61327c90fe | /experiment_scripts/train_style_xfer_encoder.py | 39584163e9126cc42428b99871b23897b0d9c463 | [
"Apache-2.0"
] | permissive | cwindolf/ffn | 467d83c1f2328f00372ac0070f4f94c35221e958 | 2ab3ba0fb3574718d83523283ba6b71e047e5a12 | refs/heads/master | 2021-07-06T20:05:34.559288 | 2020-08-26T01:45:41 | 2020-08-26T01:45:41 | 167,216,515 | 2 | 1 | Apache-2.0 | 2019-12-27T20:05:46 | 2019-01-23T16:37:55 | Python | UTF-8 | Python | false | false | 5,846 | py | import os.path
import numpy as np
import tensorflow as tf
from absl import app
from absl import flags
from secgan.training import inputs
from secgan import models
# ------------------------------- flags -------------------------------
# Model parameters
flags.DEFINE_integer('layer', None, 'Depth of the *coders.')
flags.DEFINE_integer('batch_size', 4, '')
flags.DEFINE_float('pixel_loss_lambda', 1e-3, 'Pixel loss coefficient')
flags.DEFINE_float('encoding_loss_lambda', 1.0, 'Encoding loss coefficient')
flags.DEFINE_integer('fov_len', 33, 'Length of FOV on each axis')
flags.DEFINE_integer('ffn_delta', 8, '')
flags.DEFINE_float('seed_pad', 0.5, '')
flags.DEFINE_float('seed_init', 0.95, '')
flags.DEFINE_integer('depth', 12, 'Depth of original FFN model.')
# Data
flags.DEFINE_string(
'volume_spec', None, 'Volume to encode and train encoder on.'
)
flags.DEFINE_float('image_mean', 128.0, '')
flags.DEFINE_float('image_stddev', 33.0, '')
# Model storage
flags.DEFINE_string('train_dir', None, 'Where to save encoder checkpoints.')
flags.DEFINE_string(
'decoder_ckpt', None, 'Restore these weights into our fixed decoder'
)
flags.DEFINE_string(
'ffn_ckpt', None, 'Load this up to initialize the encoder, if provided'
)
flags.DEFINE_integer('max_steps', 10000, 'Number of encoder train steps.')
FLAGS = flags.FLAGS
# ------------------------------- main --------------------------------
def main(argv):
# Parse args a little -----------------------------------------
fov_size = [FLAGS.fov_len, FLAGS.fov_len, FLAGS.fov_len]
with open(os.path.join(FLAGS.train_dir, 'flagfile.txt'), 'w') as ff:
ff.write(FLAGS.flags_into_string())
# Data pipeline -----------------------------------------------
fov_batches = inputs.random_fovs(
FLAGS.volume_spec,
FLAGS.batch_size,
fov_size,
FLAGS.image_mean,
FLAGS.image_stddev,
)
# Make a batch of "init" seeds to feed the encoder.
fixed_seed_batch = inputs.fixed_seed_batch(
FLAGS.batch_size, fov_size, FLAGS.seed_pad, FLAGS.seed_init
)
# Load FFN weights ------------------------------------------------
# Hooking graphs together... This bit loads up weights.
if FLAGS.ffn_ckpt:
encoder = models.ConvStack3DEncoder.from_ffn_ckpt(
FLAGS.ffn_ckpt,
FLAGS.ffn_delta,
fov_size,
FLAGS.batch_size,
fixed_seed_batch,
pixel_loss_lambda=FLAGS.pixel_loss_lambda,
encoding_loss_lambda=FLAGS.encoding_loss_lambda,
for_training=True,
depth=FLAGS.layer,
)
# Training graph --------------------------------------------------
training_graph = tf.Graph()
with training_graph.as_default():
# Init encoder ------------------------------------------------
if not FLAGS.ffn_ckpt:
encoder = models.ConvStack3DEncoder(
fov_size=fov_size,
input_seed=fixed_seed_batch,
batch_size=FLAGS.batch_size,
for_training=True,
pixel_loss_lambda=FLAGS.pixel_loss_lambda,
encoding_loss_lambda=FLAGS.encoding_loss_lambda,
depth=FLAGS.layer,
)
encoder.define_tf_graph()
# Build decoder -----------------------------------------------
decoder = models.ConvStack3DDecoder(
fov_size=fov_size,
batch_size=FLAGS.batch_size,
depth=FLAGS.layer,
for_training=False,
)
decoder.define_tf_graph(encoder)
# Decoder restore op
dinit_op, dinit_fd = tf.contrib.framework.assign_from_checkpoint(
FLAGS.decoder_ckpt, decoder.vars, ignore_missing_vars=True
)
# Hook decoder into encoder -----------------------------------
encoder.add_training_ops(decoder)
# TF setup + run ----------------------------------------------
scaffold = tf.train.Scaffold(
ready_for_local_init_op=tf.report_uninitialized_variables(
encoder.vars
),
local_init_op=tf.group(
[
# tf.initializers.variables(encoder.vars),
tf.initializers.variables(decoder.vars),
tf.initializers.local_variables(),
]
),
saver=encoder.saver,
summary_op=tf.summary.merge_all(),
)
config = tf.ConfigProto(
log_device_placement=False, allow_soft_placement=True
)
with tf.train.MonitoredTrainingSession(
config=config,
scaffold=scaffold,
checkpoint_dir=FLAGS.train_dir,
save_summaries_secs=30,
save_checkpoint_secs=600,
) as sess:
# Assign to decoder
sess.run(
dinit_op,
feed_dict={
encoder.input_patches: np.zeros(
[FLAGS.batch_size, *fov_size, 1], dtype=np.float32
),
**dinit_fd,
},
)
# Train decoder
for i, fov_batch in enumerate(fov_batches):
# Run decoder train op
sess.run(
encoder.train_op,
feed_dict={encoder.input_patches: fov_batch},
)
if i > FLAGS.max_steps:
print('Reached max_steps', i)
break
# ---------------------------------------------------------------------
if __name__ == '__main__':
flags.mark_flag_as_required('layer')
flags.mark_flag_as_required('volume_spec')
flags.mark_flag_as_required('train_dir')
flags.mark_flag_as_required('decoder_ckpt')
app.run(main)
| [
"cwindolf95@gmail.com"
] | cwindolf95@gmail.com |
ab885e0e900522ccf71cc304f9d9773e78853ded | 64c8ae3675739067e75769be672d10591d60c3f0 | /Quotes_crawler.py | 0c857f977dca98279f18c47449b7f0662de593ed | [] | no_license | akshatjain02/Python-scripts | 7cde46129075984afb555facbb49aa47a5eb9382 | d193935c8514575d26b001f2f09e511d8f47fc8a | refs/heads/master | 2021-09-03T22:39:47.703984 | 2018-01-12T15:00:45 | 2018-01-12T15:00:45 | 97,263,035 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 791 | py | import requests
from bs4 import BeautifulSoup
import csv
url = "http://www.values.com/inspirational-quotes"
r = requests.get(url)
soup = BeautifulSoup(r.content, 'html5lib')
quotes = [] #list of dictionaries representing quotes
table = soup.find('div', attrs = {'id':'portfolio'})
for row in table.findAll('div', attrs = {'class':'portfolio-image'}):
quote = {} #dictionary
#quote['theme'] = row.h5.text
quote['url'] = row.a['href']
quote['img'] = row.img['src']
quote['lines'] = row.img['alt']
#quote['author'] = row.p.text
quotes.append(quote)
filename = 'Quotes.csv'
with open(filename, 'wb') as f:
#w = csv.DictWriter(f, ['theme', 'url', 'img', 'lines', 'author'])
w = csv.DictWriter(f, ['url', 'img', 'lines'])
w.writeheader()
for quote in quotes:
w.writerow(quote) | [
"noreply@github.com"
] | noreply@github.com |
b763e7d5f064ecf08cc3ec96e65e8f15f11530d8 | 0735037ad5b2a1b4405825d2f9b0b6b8a01a27c0 | /test3.py | 55b60377942d20dd53f44f21851d4c0c5a87c67a | [] | no_license | koibiki/ai-factory | c576ea8d4142c7f9f72a98d037c5fe3309bc6967 | fbc926d3513fea814a7a818e38daa0b1fa00ceb3 | refs/heads/master | 2021-09-06T20:58:42.585095 | 2018-02-11T09:23:52 | 2018-02-11T09:23:52 | 116,560,287 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,197 | py | from feature_engineering.feature_selector import *
from feature_engineering.create_sample import *
from feature_engineering.separate_str_num import *
from feature_engineering.fill_nan import *
from model_selection.regressor_model_factory import RegressorModelFactory
from model_selection.cv import k_fold_regressor
from model_selection.smote_cv import *
from sklearn.model_selection import train_test_split
train = pd.read_csv('./input/train.csv')
train_X = train.iloc[:, 1:-1]
train_Y = train.iloc[:, -1]
train_X = delete_constant(train_X)
train_X = delete_nan(train_X)
data_num, data_str = separate_num_str(train_X)
data_str = pd.get_dummies(data_str)
train_data = pd.concat([data_num], axis=1)
print(train_data.shape)
predict, cv_indexs, importances = \
k_fold_regressor(train_data, train_Y, train_data, model_num=RegressorModelFactory.MODEL_LIGHET_GBM, cv=10, important_level=0)
importances_train = train_data[importances]
print(importances_train.shape)
pd.DataFrame(importances, columns=['importance']).to_csv('./output/important_feature.csv', index=None)
k_fold_regressor(importances_train, train_Y, importances_train, model_num=RegressorModelFactory.MODEL_LIGHET_GBM, cv=5)
| [
"lcheng.ext@orange.com"
] | lcheng.ext@orange.com |
6f89e64639bf0ddfbd6b89056520b9013d8be41e | 042aca4089ca33da41a3bd67898fc4f7f534cd13 | /ServingRobot/mqtt/mqtt01.py | ad3396872a07a744f701fa7a2c6c20db45b32abd | [] | no_license | jacksimuse/Project_EATS | bdcd04fcc0623ec5b8a6bb7df876ba1894fce805 | 4247fd6dcf4b05a5db5155d0f01e4adaf1bdf513 | refs/heads/main | 2023-07-20T05:33:42.633632 | 2021-08-27T13:58:58 | 2021-08-27T13:58:58 | 364,155,556 | 7 | 3 | null | 2021-08-20T06:21:47 | 2021-05-04T06:02:56 | C# | UTF-8 | Python | false | false | 2,819 | py | import RPi.GPIO as GPIO
import paho.mqtt.client as mqtt
import sys
import threading
import signal
import os
import time
pin = 4 # 라인 근접 센서
mpin1 = 20 # 앞 왼 바퀴1
mpin2 = 21 # 앞 왼 바퀴2
mpin3 = 6 # 앞 오 바퀴1
mpin4 = 12 # 앞 오 바퀴2
ena = 17 # 앞 왼 바퀴 enable 입력
enb = 18 # 앞 오 바퀴 enable 입력
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin, GPIO.IN)
GPIO.setup(mpin1, GPIO.OUT)
GPIO.setup(mpin2, GPIO.OUT)
GPIO.setup(mpin3, GPIO.OUT)
GPIO.setup(mpin4, GPIO.OUT)
GPIO.setup(ena, GPIO.OUT)
GPIO.setup(enb, GPIO.OUT)
pa = GPIO.PWM(ena, 100)
pb = GPIO.PWM(enb, 100)
pa.start(100)
pb.start(100)
GPIO.output(mpin1, False)
GPIO.output(mpin2, False)
GPIO.output(mpin3, False)
GPIO.output(mpin4, False)
def setOg():
pa.ChangeDutyCycle(100)
pb.ChangeDutyCycle(100)
def set_left():
pa.ChangeDutyCycle(40)
GPIO.output(mpin1, True)
GPIO.output(mpin2, False)
GPIO.output(mpin3, False)
GPIO.output(mpin4, True)
def set_right():
pb.ChangeDutyCycle(40)
GPIO.output(mpin1, False)
GPIO.output(mpin2, True)
GPIO.output(mpin3, True)
GPIO.output(mpin4, False)
def set_start():
setOg()
GPIO.output(mpin1, False)
GPIO.output(mpin2, True)
GPIO.output(mpin3, False)
GPIO.output(mpin4, True)
def set_back():
setOg()
GPIO.output(mpin1, True)
GPIO.output(mpin2, False)
GPIO.output(mpin3, True)
GPIO.output(mpin4, False)
def stop():
setOg()
GPIO.output(mpin1, False)
GPIO.output(mpin2, False)
GPIO.output(mpin3, False)
GPIO.output(mpin4, False)
def on_message(client, userdata, message):
topic=str(message.topic)
message = str(message.payload.decode("utf-8"))
print(topic+message)
if message == 's':
set_start()
elif message == 'b':
set_back()
elif message == 't':
stop()
elif message == 'l':
set_left()
elif message == 'r':
set_right()
else: pass
broker_address='210.119.12.93'
pub_topic = 'MOTOR/TEST/'
print("creating new instance")
client=mqtt.Client("P1") #create new instance
print("connecting to broker")
client.connect(broker_address) #connect to broker
client.subscribe(pub_topic)
# client.on_connect = on_connect
# client.on_disconnect = on_disconnect
client.on_message = on_message
try:
while True:
client.loop_forever()
#GPIO.add_event_detect(pin, GPIO.FALLING, callback=client.loop_forever())
# while True:
# if GPIO.input(pin) == False:
# print('path')
# if GPIO.input(pin) == True:
# break
# client.loop_forever()
# elif GPIO.input(pin) == True:
# print('no path')
# stop()
except KeyboardInterrupt:
GPIO.cleanup()
sys.exit() | [
"nhsk0767@gmail.com"
] | nhsk0767@gmail.com |
9a70d8612161960f7cc8dddb13768a792c11ce1d | 47f9b58b51264d8e2e23acdfe6281aec24bc1678 | /wxServer/Handerls/toupiaoHanderl.py | 1c5ccc54c083f45384368bdd4289aa48f23e1800 | [] | no_license | andwang130/DOme | 93ed0188fe7add6ad76f65bd679e76457f3ac743 | 79e0a445ad81956d751439f829088ae1ed7f0bc2 | refs/heads/master | 2020-04-04T19:51:39.426476 | 2019-05-08T09:31:38 | 2019-05-08T09:31:38 | 155,844,167 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,385 | py | # -*- coding: UTF-8 -*-
import Basehanderl
import redis
import json
import time
import tornado
import pojcetm
import uuid
class toupiaoHanderl(Basehanderl.Basehandelr):
@tornado.gen.coroutine
def get(self):
self.db_linck()
userid=self.get_argument("userid")
uuid_=self.get_argument("uuid")
code = self.get_argument("code",None)
openid = self.get_secure_cookie("openid")
if not self.Verification(openid, self.request.headers.get("X-Real-IP")):
self.render("404.html")
raise tornado.gen.Return()
if openid:
self.rq(uuid_,userid)
raise tornado.gen.Return()
elif code:
if not openid:
newopenid = yield tornado.gen.Task(self.get_openid,code)
self.set_secure_cookie("openid", newopenid)
self.rq(uuid, userid)
raise tornado.gen.Return()
else:
self.auto()
raise tornado.gen.Return()
def rq(self,uuid_,userid):
if userid and uuid_:
coures = self.Mongodb["poject"].find_one({"uuid": uuid_})
pojcetm.imgae_change(coures)
usercoures = self.Mongodb["tpUser"].find_one({"userid": userid})
pojcetm.imgae_change(usercoures)
coureslist = self.Mongodb["tpUser"].find({"uuid": uuid_}, {"userid": 1, "votenum": 1}).sort(
[("votenum", -1)])
self.Mongodb["tpUser"].update_one({"userid": userid}, {"$inc": {"vheat": 1}});
data = {}
data["topimges"] = [coures["topimgV"], coures["topimg2V"], coures["topimg3V"]]
frist_data={"topimgV":self.get_frist(uuid_)}
pojcetm.imgae_change(frist_data)
data["topimges"].append(frist_data["topimgV"])
x = 0
next_couresl = None
for i in coureslist:
if i["userid"] == userid:
if x != 0:
data["index"] = x + 1
data["subvotenum"] = int(next_couresl["votenum"]) - int(usercoures["votenum"])
else:
data["index"] = 1
data["subvotenum"] = 0
break
next_couresl = i
x += 1
data["endtimes"] = time.mktime(time.strptime(coures["timeend"], '%Y-%m-%d %H:%M')) - time.time()
data["aptimes"] = time.mktime(time.strptime(coures["tiemstatr"], '%Y-%m-%d %H:%M')) - time.time()
data["aptimestart"] = coures["tiemstatr"]
data["aptimeend"] = coures["timeend"]
data["titile"] = coures["titile"]
data["name"] = usercoures["name"]
data["votenum"] = usercoures["votenum"]
data["avatar"] = usercoures["avatar"]
data["introduction"]=usercoures["introduction"]
data["userid"] = userid
data["uuid"] = uuid_
data["index_"]=usercoures["index"]
data["description"] = usercoures["description"]
imgs = []
for i in ["images1", "images2", "images3", "images4", "images5"]:
if usercoures[i] != "":
imgs.append(usercoures[i])
data["imgse"] = imgs
shares = {}
shares["sharetitle"] = coures["sharetitle"]
shares["shareimgV"] = coures["shareimgV"]
shares["sharedesc"] = coures["sharedesc"]
shares["url"] = self.wxconfig.get("chindwww","") + "/wx/toupiao?uuid={}&userid={}".format(uuid_,userid)
pojcetm.imgae_change(shares)
pojcetm.imgae_change(data)
aseedata = pojcetm.get_wxcongif(self.wxconfig.get("chindwww","") + self.request.uri,self.wxconfig)
if pojcetm.TempCode == 1:
self.render("toupiao.html", data=data, share=shares, aseedata=aseedata)
elif pojcetm.TempCode==2:
self.render("temp2/tpuser.html", data=data, aseedata=aseedata, share=shares)
def post(self):
openid = self.get_secure_cookie("openid")
userid= self.get_argument("userid", None)
if userid and openid:
try:
myreids = redis.StrictRedis(**pojcetm.conf_redis)
self.db_linck()
couers=self.Mongodb["tpUser"].find_one({"userid":userid,"status":0})
if couers:
pojectcoures = self.Mongodb["poject"].find_one({"uuid": couers["uuid"]})
if time.mktime(time.strptime(pojectcoures["votestart"], '%Y-%m-%d %H:%M')) - time.time() > 0:
self.write(json.dumps({"status": 0, "msg": "投票未开始"}))
return
if time.mktime(time.strptime(pojectcoures["voteend"], '%Y-%m-%d %H:%M')) - time.time() < 0:
self.write(json.dumps({"status": 0, "msg": "投票已结束"}))
return
if pojectcoures["rangenum"]<=0:
self.write(json.dumps({"status": 0, "msg": "不可投票"}))
return
num=myreids.get(openid+couers["uuid"])
if not num:
order = {"orderid":str(uuid.uuid1()).replace("-",""),"userid":userid,"openid":openid, "headimg":"", "operate":"" ,"uuid":couers["uuid"],
"username":couers["name"],"money":0, "liwu":0 ,"num":0,
"votenum":1, "times":time.time() ,"ip":self.request.headers.get("X-Real-IP") ,"start":1
,"type":"tp","Adminid":pojectcoures["Adminid"]}
self.Mongodb["tpUser"].update_one({"userid": userid}, {"$inc": {"votenum": 1}});
self.Mongodb["poject"].update_one({"uuid": couers["uuid"]},{"$inc": {"votes": 1}});
self.Mongodb["Ordel"].insert_one(order)
myreids.set(openid+couers["uuid"],1,ex=pojectcoures["rangetime"]*3600)
self.write(json.dumps({"status": 1, "msg": "成功"}))
else:
if int(num)>=pojectcoures["rangenum"]:
self.write(json.dumps({"status": 0, "msg": "每{}个小时可投票{}次,你已经投过票了".format(pojectcoures["rangetime"],pojectcoures["rangenum"])}))
else:
order = {"orderid": str(uuid.uuid1()).replace("-", ""), "userid": userid, "openid": openid,
"headimg": "", "operate": "", "uuid": couers["uuid"],
"username": couers["name"], "money": 0, "liwu": 0, "num": 0,
"votenum": 1, "times": time.time(), "ip": self.request.headers.get("X-Real-IP"),
"start": 1,"type":"tp","Adminid":pojectcoures["Adminid"]}
self.Mongodb["tpUser"].update_one({"userid": userid}, {"$inc": {"votenum": 1}});
self.Mongodb["poject"].update_one({"uuid": couers["uuid"]}, {"$inc": {"votes": 1}});
self.Mongodb["Ordel"].insert_one(order)
myreids.incr(openid + couers["uuid"])
self.write(json.dumps({"status": 1, "msg": "成功"}))
except Exception as e:
print(e)
self.write(json.dumps({"status": 0, "msg": "数据库错误"}))
else:
self.write(json.dumps({"status": 0, "msg": "没有openid"}))
class toupiaoinfoHanderl(Basehanderl.Basehandelr):
def get(self):
openid = self.get_secure_cookie("openid")
self.db_linck()
userid = self.get_argument("userid")
usercoures = self.Mongodb["tpUser"].find_one({"userid": userid})
coures = self.Mongodb["poject"].find_one({"uuid": usercoures["uuid"]})
data={}
data["endtimes"] = time.mktime(time.strptime(coures["timeend"], '%Y-%m-%d %H:%M')) - time.time()
data["aptimes"] = time.mktime(time.strptime(coures["tiemstatr"], '%Y-%m-%d %H:%M')) - time.time()
data["aptimestart"] = coures["tiemstatr"]
data["aptimeend"] = coures["timeend"]
data["titile"] = coures["titile"]
data["name"] = usercoures["name"]
data["index"] = usercoures["index"]
data["uuid"]=usercoures["uuid"]
data["userid"]=userid
shares = {}
shares["sharetitle"] = coures["sharetitle"]
shares["shareimgV"] = coures["shareimgV"]
shares["sharedesc"] = coures["sharedesc"]
shares["url"] = self.wxconfig.get("chindwww","") + "/wx/toupiao?uuid={}&userid={}".format(data["uuid"], userid)
aseedata = pojcetm.get_wxcongif(self.wxconfig.get("chindwww","")+ self.request.uri, self.wxconfig)
if openid:
if not self.Verification(openid, self.request.headers.get("X-Real-IP")):
self.render("404.html")
return
self.render("temp2/toupiao.html", data=data,share=shares, aseedata=aseedata)
else:
url= pojcetm.www + "/wx/toupiao?uuid={}&userid={}".format(data["uuid"], userid)
self.redirect(url) | [
"627656470@qq.com"
] | 627656470@qq.com |
751375bebcc798846b4078bfe24e0b11e4455851 | 6f3433ee263e9a7e6d6915ca0fb9ae5c5850a303 | /students/K33402/Dubina Sergey/practical_works/prac_3/warriors_project/warriors_app/migrations/0001_initial.py | 4b1c6fa8bb8a61bfb54a0301c0a42c29c07dfa0d | [
"MIT"
] | permissive | dEbAR38/ITMO_ICT_WebDevelopment_2020-2021 | 8a2a1abe6cd7f3bb0c0b1774cad1765d377f52c7 | 208cbc6d2b6d40c3043d35ce773a3433b377f671 | refs/heads/master | 2023-04-03T10:09:38.427787 | 2021-04-09T10:42:27 | 2021-04-09T10:42:27 | 296,327,161 | 0 | 0 | MIT | 2020-09-17T12:59:46 | 2020-09-17T12:59:45 | null | UTF-8 | Python | false | false | 2,648 | py | # Generated by Django 3.1.1 on 2020-11-23 21:10
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Profession',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=120, verbose_name='Название')),
('description', models.TextField(verbose_name='Описание')),
],
),
migrations.CreateModel(
name='Skill',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=120, verbose_name='Наименование')),
],
),
migrations.CreateModel(
name='SkillOfWarrior',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('level', models.IntegerField(verbose_name='Уровень освоения умения')),
('skill', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='warriors_app.skill', verbose_name='Умение')),
],
),
migrations.CreateModel(
name='Warrior',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('race', models.CharField(choices=[('s', 'student'), ('d', 'developer'), ('t', 'teamlead')], max_length=1, verbose_name='Расса')),
('name', models.CharField(max_length=120, verbose_name='Имя')),
('level', models.IntegerField(default=0, verbose_name='Уровень')),
('profession', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='warriors_app.profession', verbose_name='Профессия')),
('skill', models.ManyToManyField(related_name='warrior_skils', through='warriors_app.SkillOfWarrior', to='warriors_app.Skill', verbose_name='Умения')),
],
),
migrations.AddField(
model_name='skillofwarrior',
name='warrior',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='warriors_app.warrior', verbose_name='Воин'),
),
]
| [
"noreply@github.com"
] | noreply@github.com |
95706a934158c11e5565759ef753f1ffb292c167 | ebbe7de4dab925cce8ce726b3f78ad9af61ef6cb | /pygmt/src/x2sys_cross.py | 26987452bf9872f39b8524e8e174a39422ffe7c8 | [
"BSD-3-Clause"
] | permissive | chinaperrin/pygmt | bcc6702de93d473af1cbb8730e8db5b9164fd790 | 21e3e75b7c84d84d7c76ddac0dedb9718871580f | refs/heads/master | 2023-03-05T12:43:11.019682 | 2022-10-10T01:03:17 | 2022-10-10T01:03:17 | 215,927,273 | 0 | 0 | BSD-3-Clause | 2023-02-28T13:57:39 | 2019-10-18T02:42:29 | Python | UTF-8 | Python | false | false | 9,595 | py | """
x2sys_cross - Calculate crossovers between track data files.
"""
import contextlib
import os
from pathlib import Path
import pandas as pd
from pygmt.clib import Session
from pygmt.exceptions import GMTInvalidInput
from pygmt.helpers import (
GMTTempFile,
build_arg_string,
data_kind,
dummy_context,
fmt_docstring,
kwargs_to_strings,
unique_name,
use_alias,
)
@contextlib.contextmanager
def tempfile_from_dftrack(track, suffix):
"""
Saves pandas.DataFrame track table to a temporary tab-separated ASCII text
file with a unique name (to prevent clashes when running x2sys_cross),
adding a suffix extension to the end.
Parameters
----------
track : pandas.DataFrame
A table holding track data with coordinate (x, y) or (lon, lat) values,
and (optionally) time (t).
suffix : str
File extension, e.g. xyz, tsv, etc.
Yields
------
tmpfilename : str
A temporary tab-separated value file with a unique name holding the
track data. E.g. 'track-1a2b3c4.tsv'.
"""
try:
tmpfilename = f"track-{unique_name()[:7]}.{suffix}"
track.to_csv(
path_or_buf=tmpfilename,
sep="\t",
index=False,
na_rep="NaN", # write a NaN value explicitly instead of a blank string
date_format="%Y-%m-%dT%H:%M:%S.%fZ",
)
yield tmpfilename
finally:
os.remove(tmpfilename)
@fmt_docstring
@use_alias(
A="combitable",
C="runtimes",
D="override",
I="interpolation",
R="region",
S="speed",
T="tag",
Q="coe",
V="verbose",
W="numpoints",
Z="trackvalues",
)
@kwargs_to_strings(R="sequence")
def x2sys_cross(tracks=None, outfile=None, **kwargs):
r"""
Calculate crossovers between track data files.
Determines all intersections between ("external cross-overs") or within
("internal cross-overs") tracks (Cartesian or geographic), and report the
time, position, distance along track, heading and speed along each track
segment, and the crossover error (COE) and mean values for all observables.
By default, :func:`pygmt.x2sys_cross` will look for both external and
internal COEs. As an option, you may choose to project all data using one
of the map projections prior to calculating the COE.
Full option list at :gmt-docs:`supplements/x2sys/x2sys_cross.html`
{aliases}
Parameters
----------
tracks : pandas.DataFrame or str or list
A table or a list of tables with (x, y) or (lon, lat) values in the
first two columns. Track(s) can be provided as pandas DataFrame tables
or file names. Supported file formats are ASCII, native binary, or
COARDS netCDF 1-D data. More columns may also be present.
If the file names are missing their file extension, we will append the
suffix specified for this TAG. Track files will be searched for first
in the current directory and second in all directories listed in
$X2SYS_HOME/TAG/TAG_paths.txt (if it exists). [If $X2SYS_HOME is not
set it will default to $GMT_SHAREDIR/x2sys]. (**Note**: MGD77 files
will also be looked for via $MGD77_HOME/mgd77_paths.txt and .gmt
files will be searched for via $GMT_SHAREDIR/mgg/gmtfile_paths).
outfile : str
Optional. The file name for the output ASCII txt file to store the
table in.
tag : str
Specify the x2sys TAG which identifies the attributes of this data
type.
combitable : str
Only process the pair-combinations found in the file *combitable*
[Default process all possible combinations among the specified files].
The file *combitable* is created by :gmt-docs:`x2sys_get's -L option
<supplements/x2sys/x2sys_get.html#l>`.
runtimes : bool or str
Compute and append the processing run-time for each pair to the
progress message (use ``runtimes=True``). Pass in a file name (e.g.
``runtimes="file.txt"``) to save these run-times to file. The idea here
is to use the knowledge of run-times to split the main process in a
number of sub-processes that can each be launched in a different
processor of your multi-core machine. See the MATLAB function
`split_file4coes.m
<https://github.com/GenericMappingTools/gmt/blob/master/src/x2sys/>`_.
override : bool or str
**S**\|\ **N**.
Control how geographic coordinates are handled (Cartesian data are
unaffected). By default, we determine if the data are closer to one
pole than the other, and then we use a cylindrical polar conversion to
avoid problems with longitude jumps. You can turn this off entirely
with ``override`` and then the calculations uses the original data (we
have protections against longitude jumps). However, you can force the
selection of the pole for the projection by appending **S** or **N**
for the south or north pole, respectively. The conversion is used
because the algorithm used to find crossovers is inherently a
Cartesian algorithm that can run into trouble with data that has large
longitudinal range at higher latitudes.
interpolation : str
**l**\|\ **a**\|\ **c**.
Sets the interpolation mode for estimating values at the crossover.
Choose among:
- **l** - Linear interpolation [Default].
- **a** - Akima spline interpolation.
- **c** - Cubic spline interpolation.
coe : str
Use **e** for external COEs only, and **i** for internal COEs only
[Default is all COEs].
{region}
speed : str or list
**l**\|\ **u**\|\ **h**\ *speed*.
Defines window of track speeds. If speeds are outside this window we do
not calculate a COE. Specify:
- **l** sets lower speed [Default is 0].
- **u** sets upper speed [Default is infinity].
- **h** does not limit the speed but sets a lower speed below which
headings will not be computed (i.e., set to NaN) [Default
calculates headings regardless of speed].
For example, you can use ``speed=["l0", "u10", "h5"]`` to set a lower
speed of 0, upper speed of 10, and disable heading calculations for
speeds below 5.
{verbose}
numpoints : int
Give the maximum number of data points on either side of the crossover
to use in the spline interpolation [Default is 3].
trackvalues : bool
Report the values of each track at the crossover [Default reports the
crossover value and the mean value].
Returns
-------
crossover_errors : :class:`pandas.DataFrame` or None
Table containing crossover error information.
Return type depends on whether the ``outfile`` parameter is set:
- :class:`pandas.DataFrame` with (x, y, ..., etc) if ``outfile`` is not
set
- None if ``outfile`` is set (track output will be stored in the set in
``outfile``)
"""
with Session() as lib:
file_contexts = []
for track in tracks:
kind = data_kind(track)
if kind == "file":
file_contexts.append(dummy_context(track))
elif kind == "matrix":
# find suffix (-E) of trackfiles used (e.g. xyz, csv, etc) from
# $X2SYS_HOME/TAGNAME/TAGNAME.tag file
lastline = (
Path(os.environ["X2SYS_HOME"], kwargs["T"], f"{kwargs['T']}.tag")
.read_text(encoding="utf8")
.strip()
.split("\n")[-1]
) # e.g. "-Dxyz -Etsv -I1/1"
for item in sorted(lastline.split()): # sort list alphabetically
if item.startswith(("-E", "-D")): # prefer -Etsv over -Dxyz
suffix = item[2:] # e.g. tsv (1st choice) or xyz (2nd choice)
# Save pandas.DataFrame track data to temporary file
file_contexts.append(tempfile_from_dftrack(track=track, suffix=suffix))
else:
raise GMTInvalidInput(f"Unrecognized data type: {type(track)}")
with GMTTempFile(suffix=".txt") as tmpfile:
with contextlib.ExitStack() as stack:
fnames = [stack.enter_context(c) for c in file_contexts]
if outfile is None:
outfile = tmpfile.name
lib.call_module(
module="x2sys_cross",
args=build_arg_string(
kwargs, infile=" ".join(fnames), outfile=outfile
),
)
# Read temporary csv output to a pandas table
if outfile == tmpfile.name: # if outfile isn't set, return pd.DataFrame
# Read the tab-separated ASCII table
table = pd.read_csv(
tmpfile.name,
sep="\t",
header=2, # Column names are on 2nd row
comment=">", # Skip the 3rd row with a ">"
parse_dates=[2, 3], # Datetimes on 3rd and 4th column
)
# Remove the "# " from "# x" in the first column
table = table.rename(columns={table.columns[0]: table.columns[0][2:]})
elif outfile != tmpfile.name: # if outfile is set, output in outfile only
table = None
return table
| [
"noreply@github.com"
] | noreply@github.com |
0eee37fc40fe0c80380a966a17972e803704d7d3 | 2a03d611f83932f1e400fe8828af6546b945447d | /ts_api/urls.py | a6df40b2f3e1721a416304fef95832861b507d4e | [] | no_license | ggabunia/ThoughtShare-Django | b354d1724472b9bb43df9a324d9856f1285cb340 | a9fee6a1bba430ffe49bc2e1c2fd74db10bc2a19 | refs/heads/master | 2022-11-26T05:28:51.544298 | 2018-12-03T16:54:49 | 2018-12-03T16:54:49 | 158,215,699 | 0 | 0 | null | 2022-11-22T03:07:04 | 2018-11-19T12:05:10 | Python | UTF-8 | Python | false | false | 2,147 | py | from django.contrib import admin
from django.urls import path
from django.conf.urls import include
from ts_api import views
from rest_framework import routers
app_name = 'ts_api'
urlpatterns = [
path('', views.api_root),
path('rest-auth/', include('rest_auth.urls'), name='rest_auth'),
path('login/',views.CustomLoginView.as_view(), name='login'),
path('logout/',views.CustomLogoutView.as_view(), name='logout'),
path('all-ideas/',views.AllIdeas.as_view(), name = 'all_ideas'),
path('my-ideas/',views.MyIdeas.as_view(), name = 'my_ideas'),
path('user-ideas/>',views.UserIdeas.as_view(), name = 'user_ideas'),
path('user-ideas/<int:pk>',views.UserIdeas.as_view(), name = 'user_ideas'),
path('all-users/',views.UserList.as_view(), name = 'user_list'),
path('register/',views.RegisterUser.as_view(), name = 'register'),
path('add-idea/',views.AddIdea.as_view(), name = 'add_idea'),
path('current-user/',views.GetCurrentUser.as_view(), name = 'get_current_user'),
path('get-user/',views.GetUser.as_view(), name = 'get_user'),
path('get-user/<int:pk>',views.GetUser.as_view(), name = 'get_user'),
path('all-categories/',views.CategoryList.as_view(), name = 'all_categories'),
path('get-category/',views.GetCategory.as_view(), name = 'get_category'),
path('get-category/<int:pk>',views.GetCategory.as_view(), name = 'get_category'),
path('edit-idea/',views.UpdateIdea.as_view(), name = 'edit_idea'),
path('edit-idea/<int:pk>',views.UpdateIdea.as_view(), name = 'edit_idea'),
path('get-idea/',views.GetIdea.as_view(), name = 'get_idea'),
path('get-idea/<int:pk>',views.GetIdea.as_view(), name = 'get_idea'),
path('add-rating/',views.AddRating.as_view(), name = 'add_rating'),
path('get-user-rating/',views.get_user_rating, name = 'get_user_rating'),
path('get-user-rating/<int:idea_id>',views.get_user_rating, name = 'get_user_rating'),
path('remove-rating/',views.delete_rating, name='remove_rating'),
path('remove-rating/<int:idea_id>',views.delete_rating, name='remove_rating'),
path('search/',views.SearchIdeas.as_view(), name='search'),
]
| [
"ggabunia91@outlook.com"
] | ggabunia91@outlook.com |
d0a92881174f016830e5b146ca97ba5a68b65627 | 2aa4c7c94866e7a958e4787dd4487aa7c1eb8d61 | /applications/MappingApplication/tests/test_mapper_mpi_tests.py | 17fa528cbfa65adc8d0f6521adde262131b8852b | [
"BSD-3-Clause"
] | permissive | PFEM/Kratos | b48df91e6ef5a00edf125e6f5aa398505c9c2b96 | 796c8572e9fe3875562d77370fc60beeacca0eeb | refs/heads/master | 2021-10-16T04:33:47.591467 | 2019-02-04T14:22:06 | 2019-02-04T14:22:06 | 106,919,267 | 1 | 0 | null | 2017-10-14T10:34:43 | 2017-10-14T10:34:43 | null | UTF-8 | Python | false | false | 2,141 | py | from __future__ import print_function, absolute_import, division # makes KratosMultiphysics backward compatible with python 2.6 and 2.7
import KratosMultiphysics
from KratosMultiphysics.mpi import mpi
import KratosMultiphysics.MetisApplication
import KratosMultiphysics.TrilinosApplication
import KratosMultiphysics.MappingApplication as KratosMapping
import KratosMultiphysics.KratosUnittest as KratosUnittest
from base_mapper_tests import BaseMapperTests
from trilinos_import_model_part_utility import TrilinosImportModelPartUtility
class MapperMPITests(BaseMapperTests, KratosUnittest.TestCase):
@classmethod
def _ImportModelPart(cls):
cls.model_part_origin.AddNodalSolutionStepVariable(
KratosMultiphysics.PARTITION_INDEX)
cls.model_part_destination.AddNodalSolutionStepVariable(
KratosMultiphysics.PARTITION_INDEX)
origin_settings = KratosMultiphysics.Parameters("""{
"model_import_settings": {
"input_type": "mdpa",
"input_filename": \"""" + cls.input_file_origin + """\",
"partition_in_memory" : true
},
"echo_level" : 0
}""")
destination_settings = origin_settings.Clone()
destination_settings["model_import_settings"]["input_filename"].SetString(
cls.input_file_destination)
model_part_import_util_origin = TrilinosImportModelPartUtility(
cls.model_part_origin, origin_settings)
model_part_import_util_destination = TrilinosImportModelPartUtility(
cls.model_part_destination, destination_settings)
model_part_import_util_origin.ImportModelPart()
model_part_import_util_destination.ImportModelPart()
model_part_import_util_origin.CreateCommunicators()
model_part_import_util_destination.CreateCommunicators()
def _CreateMapper(self, mapper_settings):
return KratosMapping.MapperFactory.CreateMPIMapper(
self.model_part_origin,
self.model_part_destination,
mapper_settings)
if __name__ == '__main__':
KratosUnittest.main()
| [
"philipp.bucher@tum.de"
] | philipp.bucher@tum.de |
e1dd33e069a4688d18b42d008053e2b14b83c6e8 | 805db1dc2e179902bf319f2eec21554986a772db | /SmartBody-additional/data/examples/OgreDemo.py | 3522e594980be7db9a51c68b49b22f262fd95839 | [] | no_license | michaelnixon/avatar | 0193eed78bb47863c46aa6f6e7260deae604819c | 0349a7f3a5cbf55811f11795c10f0e406a543cff | refs/heads/master | 2020-05-28T07:15:53.200531 | 2016-02-11T01:19:56 | 2016-02-11T01:19:56 | 19,548,417 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,063 | py | import math
import random
print "|--------------------------------------------|"
print "| Starting Ogre Demo |"
print "|--------------------------------------------|"
scene.setScale(0.1)
# Add asset paths
scene.addAssetPath('mesh', 'mesh')
scene.addAssetPath('motion', 'Sinbad')
scene.addAssetPath('script', 'scripts')
scene.addAssetPath('script', 'behaviorsets')
scene.loadAssets()
# Set scene parameters and camera
#scene.getPawn('camera').setPosition(SrVec(0, -5, 0))
# Set joint map for Sinbad
print 'Setting up joint map for Brad'
scene.run('ogre-sinbad-map.py')
sinbadSkName = 'Sinbad.skeleton.xml'
jointMapManager = scene.getJointMapManager()
sinbadMap = jointMapManager.getJointMap('Sinbad.skeleton.xml')
ogreSk = scene.getSkeleton(sinbadSkName)
sinbadMap.applySkeleton(ogreSk)
# Behavior set setup
scene.run('behaviorsetup.py')
# Animation setup
#scene.run('init-param-animation.py')
steerManager = scene.getSteerManager()
# Setting up Sinbad
print 'Setting up Sinbad'
sinbadName = 'sinbad'
sinbad = scene.createCharacter(sinbadName,'')
sinbadSk = scene.createSkeleton(sinbadSkName)
sinbad.setSkeleton(sinbadSk)
sinbadPos = SrVec(0,5.16, 0)
sinbad.setPosition(sinbadPos)
sinbad.createStandardControllers()
sinbad.setStringAttribute('deformableMesh', 'Sinbad.mesh.xml')
# setup locomotion
scene.run('BehaviorSetMaleLocomotion.py')
setupBehaviorSet()
retargetBehaviorSet(sinbadName)
# setup reaching
#scene.run('BehaviorSetReaching.py')
#setupBehaviorSet()
#retargetBehaviorSet(sinbadName)
sinbad.setStringAttribute("displayType", "GPUmesh")
print 'Configuring scene parameters and camera'
scene.setBoolAttribute('internalAudio', True)
camera = getCamera()
camera.setEye(0, 5.98, 13.44)
camera.setCenter(1.0, 1.7, -39.5)
camera.setUpVector(SrVec(0, 1, 0))
camera.setScale(1)
camera.setFov(1.0472)
camera.setFarPlane(100)
camera.setNearPlane(0.1)
camera.setAspectRatio(1.02)
sim.start()
bml.execBML(sinbadName, '<body posture="ChrUtah_Idle001"/>')
sim.resume()
| [
"mnixon@gmail.com"
] | mnixon@gmail.com |
18fe1e8f4236f5abbedb33cde35e8ac736d663ac | 850804fd11f40c5ba5461b6a074df5b89415260b | /files/usr/lib/python2.7/site-packages/sockjs/tornado/websocket.py | 3738debf150e86d34954f8ec86044e658a309c44 | [] | no_license | adidoes/get_started_with_respeaker | f5264b3402c9fccd467728643d0143f28e6ab05d | 35e1fe18f5906c9f28029f28c624df02d0695cf3 | refs/heads/master | 2021-05-08T07:29:08.514378 | 2017-10-12T11:40:33 | 2017-10-12T11:40:33 | 106,880,778 | 2 | 0 | null | 2017-10-13T22:59:49 | 2017-10-13T22:59:49 | null | UTF-8 | Python | false | false | 1,574 | py | from tornado import websocket, escape
class SockJSWebSocketHandler(websocket.WebSocketHandler):
def _execute(self, transforms, *args, **kwargs):
# Websocket only supports GET method
if self.request.method != "GET":
self.stream.write(escape.utf8(
"HTTP/1.1 405 Method Not Allowed\r\n"
"Allow: GET\r\n"
"Connection: Close\r\n"
"\r\n"
))
self.stream.close()
return
# Upgrade header should be present and should be equal to WebSocket
if self.request.headers.get("Upgrade", "").lower() != "websocket":
self.stream.write(escape.utf8(
"HTTP/1.1 400 Bad Request\r\n"
"Connection: Close\r\n"
"\r\n"
"Can \"Upgrade\" only to \"WebSocket\"."
))
self.stream.close()
return
# Connection header should be upgrade. Some proxy servers/load balancers
# might mess with it.
headers = self.request.headers
connection = map(lambda s: s.strip().lower(), headers.get("Connection", "").split(","))
if "upgrade" not in connection:
self.stream.write(escape.utf8(
"HTTP/1.1 400 Bad Request\r\n"
"Connection: Close\r\n"
"\r\n"
"\"Connection\" must be \"Upgrade\"."
))
self.stream.close()
return
return super(SockJSWebSocketHandler, self)._execute(transforms, *args, **kwargs)
| [
"Yihui Xiong"
] | Yihui Xiong |
1c588bf8b790df7ffc1493b1dd25bffff36de0e4 | 07addf6da975492b53df3be57559ebbbd83ad1c1 | /src/unittest/python/aws_tests/__init__.py | 44ce9074f97ac6ce31c2d9f4bf08ae34e1f89462 | [
"Apache-2.0"
] | permissive | cfn-sphere/cfn-sphere | 9f5545d36f8571522840952a34a4f12edf71a80a | 6e2f93f36075a738f7d9e1b484bd655b5bf942fb | refs/heads/master | 2023-02-19T12:23:42.862572 | 2023-02-09T11:48:25 | 2023-02-09T11:48:25 | 40,249,032 | 86 | 34 | Apache-2.0 | 2023-02-09T11:48:27 | 2015-08-05T14:13:44 | Python | UTF-8 | Python | false | false | 22 | py | __author__ = 'mhoyer'
| [
"marco.hoyer@immobilienscout24.de"
] | marco.hoyer@immobilienscout24.de |
a0e00cdb2a95e46d3b90bcbea838cc21df733ab7 | 08c251243a166da41cf91f198bc744ee25f96352 | /kaggle/avito/Predict.py | 72ce4c6aac22cc083c604b15a3bd85c182804888 | [] | no_license | tanay0nSpark/evolveML | afe22e09ecf2668a42c68e3947c72c81f48a30eb | d7b7f0e13f4d1ba95148af94461cb180d8a10043 | refs/heads/master | 2021-06-01T14:39:16.116459 | 2016-06-19T18:16:14 | 2016-06-19T18:16:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,167 | py | __author__ = 'abc'
from pyspark.mllib.tree import DecisionTree, DecisionTreeModel
def predict_proba(rf_model, data):
'''
This wrapper overcomes the "binary" nature of predictions in the native
RandomForestModel.
''' # Collect the individual decision tree models by calling the underlying
# Java model. These are returned as JavaArray defined by py4j.
trees = rf_model._java_model.trees()
ntrees = rf_model.numTrees()
scores = DecisionTreeModel(trees[0]).predict(data.map(
lambda row: [float(row.SearchID), float(row.AdID), float(row.Position), float(row.ObjectType),
float(row.HistCTR)]))
# For each decision tree, apply its prediction to the entire dataset and
# accumulate the results using 'zip'.
for i in range(1, ntrees):
dtm = DecisionTreeModel(trees[i])
scores = scores.zip(dtm.predict(data.map(lambda row : [float(row.SearchID),float(row.AdID),float(row.Position),float(row.ObjectType),float(row.HistCTR)])))
scores = scores.map(lambda x: x[0] + x[1])
# Divide the accumulated scores over the number of trees
return scores.map(lambda x: x / ntrees)
| [
"abhishek.create@gmail.com"
] | abhishek.create@gmail.com |
805e4b0e4a22e909185e96d5788bd12061f9e16a | 4df948c31bde1b49c110820ecf8a38f949a78f62 | /vta/tests/python/integration/test_benchmark_gemm.py | da867c9b827007e5e3c94b18238fb448793bd154 | [
"Apache-2.0"
] | permissive | jroesch/tvm | 40b4b8707177e3354c264ce31092721930ced376 | c2b36154778503a509a70a3b5309b201969eccab | refs/heads/master | 2021-12-19T03:38:13.732405 | 2018-10-22T16:31:59 | 2018-10-22T16:31:59 | 135,759,537 | 4 | 7 | Apache-2.0 | 2021-06-17T07:22:42 | 2018-06-01T20:15:33 | C++ | UTF-8 | Python | false | false | 11,731 | py | import tvm
import numpy as np
from tvm.contrib import util
import vta.testing
def test_gemm():
def run_gemm_packed(env, remote, batch_size, channel, block):
data_shape = (batch_size // env.BATCH,
channel // env.BLOCK_IN,
env.BATCH,
env.BLOCK_IN)
weight_shape = (channel // env.BLOCK_OUT,
channel // env.BLOCK_IN,
env.BLOCK_OUT,
env.BLOCK_IN)
res_shape = (batch_size // env.BATCH,
channel // env.BLOCK_OUT,
env.BATCH,
env.BLOCK_OUT)
# To compute number of ops, use a x2 factor for FMA
num_ops = 2 * channel * channel * batch_size
ko = tvm.reduce_axis((0, channel // env.BLOCK_IN), name='ko')
ki = tvm.reduce_axis((0, env.BLOCK_IN), name='ki')
data = tvm.placeholder(data_shape,
name="data",
dtype=env.inp_dtype)
weight = tvm.placeholder(weight_shape,
name="weight",
dtype=env.wgt_dtype)
data_buf = tvm.compute(data_shape,
lambda *i: data(*i),
"data_buf")
weight_buf = tvm.compute(weight_shape,
lambda *i: weight(*i),
"weight_buf")
res_gem = tvm.compute(res_shape,
lambda bo, co, bi, ci: tvm.sum(
data_buf[bo, ko, bi, ki].astype(env.acc_dtype) *
weight_buf[co, ko, ci, ki].astype(env.acc_dtype),
axis=[ko, ki]),
name="res_gem")
res_shf = tvm.compute(res_shape,
lambda *i: res_gem(*i)>>8,
name="res_shf")
res_max = tvm.compute(res_shape,
lambda *i: tvm.max(res_shf(*i), 0),
"res_max") #relu
res_min = tvm.compute(res_shape,
lambda *i: tvm.min(res_max(*i), (1<<(env.INP_WIDTH-1))-1),
"res_min") #relu
res = tvm.compute(res_shape,
lambda *i: res_min(*i).astype(env.inp_dtype),
name="res")
def verify(s, check_correctness=True):
mod = vta.build(s, [data, weight, res],
"ext_dev", env.target_host, name="gemm")
temp = util.tempdir()
mod.save(temp.relpath("gemm.o"))
remote.upload(temp.relpath("gemm.o"))
f = remote.load_module("gemm.o")
# verify
ctx = remote.ext_dev(0)
# Data in original format
data_orig = np.random.randint(
-128, 128, size=(batch_size, channel)).astype(data.dtype)
weight_orig = np.random.randint(
-128, 128, size=(channel, channel)).astype(weight.dtype)
data_packed = data_orig.reshape(
batch_size // env.BATCH, env.BATCH,
channel // env.BLOCK_IN, env.BLOCK_IN).transpose((0, 2, 1, 3))
weight_packed = weight_orig.reshape(
channel // env.BLOCK_OUT, env.BLOCK_OUT,
channel // env.BLOCK_IN, env.BLOCK_IN).transpose((0, 2, 1, 3))
res_np = np.zeros(res_shape).astype(res.dtype)
data_arr = tvm.nd.array(data_packed, ctx)
weight_arr = tvm.nd.array(weight_packed, ctx)
res_arr = tvm.nd.array(res_np, ctx)
res_ref = np.zeros(res_shape).astype(env.acc_dtype)
for b in range(batch_size // env.BATCH):
for i in range(channel // env.BLOCK_OUT):
for j in range(channel // env.BLOCK_IN):
res_ref[b,i,:] += np.dot(data_packed[b,j,:].astype(env.acc_dtype),
weight_packed[i,j].T.astype(env.acc_dtype))
res_ref = np.right_shift(res_ref, 8)
res_ref = np.clip(res_ref, 0, (1<<(env.INP_WIDTH-1))-1).astype(res.dtype)
time_f = f.time_evaluator("gemm", ctx, number=20)
cost = time_f(data_arr, weight_arr, res_arr)
res_unpack = res_arr.asnumpy().reshape(batch_size // env.BATCH,
channel // env.BLOCK_OUT,
env.BATCH,
env.BLOCK_OUT)
if check_correctness:
tvm.testing.assert_allclose(res_unpack, res_ref)
return cost
def run_schedule(load_inp,
load_wgt,
gemm,
alu,
store_out,
print_ir,
check_correctness):
s = tvm.create_schedule(res.op)
s[data_buf].set_scope(env.inp_scope)
s[weight_buf].set_scope(env.wgt_scope)
s[res_gem].set_scope(env.acc_scope)
s[res_shf].set_scope(env.acc_scope)
s[res_min].set_scope(env.acc_scope)
s[res_max].set_scope(env.acc_scope)
if block:
bblock = block // env.BATCH
iblock = block // env.BLOCK_IN
oblock = block // env.BLOCK_OUT
xbo, xco, xbi, xci = s[res].op.axis
xb1, xco1, xb2, xco2 = s[res].tile(xbo, xco, bblock, oblock)
store_pt = xb2
s[res_gem].compute_at(s[res], xco1)
s[res_shf].compute_at(s[res], xco1)
s[res_min].compute_at(s[res], xco1)
s[res_max].compute_at(s[res], xco1)
xbo, xco, xbi, xci = s[res_gem].op.axis
# Compute one line at a time
ko1, ko2 = s[res_gem].split(ko, iblock)
s[res_gem].reorder(ko1, ko2, xbo, xco, xbi, xci, ki)
s[data_buf].compute_at(s[res_gem], ko1)
s[weight_buf].compute_at(s[res_gem], ko1)
# Use VTA instructions
s[data_buf].pragma(s[data_buf].op.axis[0], load_inp)
s[weight_buf].pragma(s[weight_buf].op.axis[0], load_wgt)
s[res_gem].tensorize(xbi, gemm)
s[res_shf].pragma(s[res_shf].op.axis[0], alu)
s[res_min].pragma(s[res_min].op.axis[0], alu)
s[res_max].pragma(s[res_max].op.axis[0], alu)
s[res].pragma(store_pt, store_out)
else:
xbo, xco, xbi, xci = s[res_gem].op.axis
s[res_gem].reorder(ko, xbo, xco, xbi, xci, ki)
# Use VTA instructions
s[data_buf].pragma(s[data_buf].op.axis[0], load_inp)
s[weight_buf].pragma(s[weight_buf].op.axis[0], load_wgt)
s[res_gem].tensorize(xbi, gemm)
s[res_shf].pragma(s[res_shf].op.axis[0], alu)
s[res_min].pragma(s[res_min].op.axis[0], alu)
s[res_max].pragma(s[res_max].op.axis[0], alu)
s[res].pragma(s[res].op.axis[0], store_out)
if print_ir:
print(tvm.lower(s, [data, weight, res], simple_mode=True))
return verify(s, check_correctness)
def gemm_normal(print_ir):
mock = env.mock
print("----- GEMM GOPS End-to-End Test-------")
def run_test(header, print_ir, check_correctness):
cost = run_schedule(
env.dma_copy, env.dma_copy, env.gemm, env.alu, env.dma_copy,
print_ir, check_correctness)
gops = (num_ops / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS" % (cost.mean, gops))
with vta.build_config():
run_test("NORMAL", print_ir, True)
def gemm_unittest(print_ir):
mock = env.mock
print("----- GEMM Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, mock.dma_copy, env.gemm, mock.alu, mock.dma_copy,
print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS" % (cost.mean, gops))
with vta.build_config():
run_test("NORMAL", print_ir)
def alu_unittest(print_ir):
mock = env.mock
print("----- ALU Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, mock.dma_copy, mock.gemm, env.alu, mock.dma_copy,
print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS" % (cost.mean, gops))
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
def load_inp_unittest(print_ir):
mock = env.mock
print("----- LoadInp Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
env.dma_copy, mock.dma_copy, mock.gemm, mock.alu, mock.dma_copy, print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
bandwith = (batch_size * channel * env.INP_WIDTH / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS, bandwidth=%g Gbits" % (
cost.mean, gops, bandwith))
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
def load_wgt_unittest(print_ir):
mock = env.mock
print("----- LoadWgt Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, env.dma_copy, mock.gemm, mock.alu, mock.dma_copy, print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
bandwith = (channel * channel * env.WGT_WIDTH / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS, bandwidth=%g Gbits" % (
cost.mean, gops, bandwith))
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
def store_out_unittest(print_ir):
mock = env.mock
print("----- StoreOut Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, mock.dma_copy, mock.gemm, mock.alu, env.dma_copy,
print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
bandwith = (batch_size * channel * env.OUT_WIDTH / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS, bandwidth=%g Gbits" % (
cost.mean, gops, bandwith))
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
gemm_normal(False)
gemm_unittest(False)
alu_unittest(False)
def _run(env, remote):
print("========GEMM 128=========")
run_gemm_packed(env, remote, 128, 128, 128)
vta.testing.run(_run)
if __name__ == "__main__":
test_gemm()
| [
"tqchen@users.noreply.github.com"
] | tqchen@users.noreply.github.com |
21bb49e6c9f2b5c2de5627e34b82279287fe53c6 | ba0faaf6cf21e7946f3d2059d492d45fd3b0b43a | /tests/test_ul.py | f90239536d70e982da1e427b0de21426f1a1c0ce | [] | no_license | pmiddend/sjmanager | 2a632bd148e6f9d849e70036de3db2bebdcfead6 | a81ec7b2ef80a1f94e10e3889b3e51784dff558b | refs/heads/master | 2020-12-24T17:53:52.227304 | 2013-05-27T10:25:14 | 2013-05-27T10:25:14 | 2,422,061 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,591 | py | import sjmanager.config_directory
import sjmanager.downloader.factory
import sjmanager.downloader.meter
import sjmanager.fsutil
import sjmanager.ul
import warnings
import hashlib
import configparser
import unittest
test_link_valid_short = 'http://ul.to/h61x7cor'
test_link_valid = 'http://uploaded.net/file/h61x7cor'
test_link_md5 = 'd5ee608f0427b2c3ed07dc80cf4a0328'
class TestDownloader(unittest.TestCase):
def setUp(self):
warnings.simplefilter('ignore',category=ResourceWarning)
self.config_file = configparser.ConfigParser()
self.config_file.read(
str(
sjmanager.config_directory.config_directory() / "config.ini"))
self.downloader = sjmanager.downloader.factory.create(
self.config_file)
self.ul = sjmanager.ul.Account(
('login',self.config_file.get('ul','cookie')),
self.downloader)
def test_make_proper_link(self):
test_link = 'http://uploaded.net/file/...'
result = self.ul.make_proper_link(test_link)
self.assertEqual(test_link,result)
test_link_invalid = 'http://test.com/lol'
with self.assertRaises(Exception):
self.ul.make_proper_link(test_link_invalid)
result = self.ul.make_proper_link(test_link_valid_short)
self.assertEqual(result,test_link_valid)
def test_download(self):
result = self.ul.download(
url = test_link_valid,
percent_callback = sjmanager.downloader.meter.Null("Downloading..."))
md5 = hashlib.md5()
md5.update(
result.read())
self.assertEqual(md5.hexdigest(),test_link_md5)
if __name__ == '__main__':
unittest.main()
| [
"pmidden@gmx.net"
] | pmidden@gmx.net |
3ec2e2dd3b709a107fda00833615406e4642a963 | 1bb42bac177fb4e979faa441363c27cb636a43aa | /dual_encoder/model_utils.py | 691253213276f6be9ac1bd05a51079a61df3c007 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | google-research/federated | a6040e80fa0fbf533e0d665c66a9bc549d208b3d | 329e60fa56b87f691303638ceb9dfa1fc5083953 | refs/heads/master | 2023-08-28T13:10:10.885505 | 2023-08-22T23:06:08 | 2023-08-22T23:06:40 | 295,559,343 | 595 | 187 | Apache-2.0 | 2022-05-12T08:42:53 | 2020-09-14T23:09:07 | Python | UTF-8 | Python | false | false | 5,775 | py | # Copyright 2021, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for dual encoder model."""
from typing import Callable, Optional
import tensorflow as tf
NormalizationFnType = Optional[Callable[[tf.Tensor], tf.Tensor]]
l2_normalize_fn = lambda x: tf.math.l2_normalize(x, axis=-1)
@tf.function
def get_predicted_embeddings(y_pred, y_true, normalization_fn=l2_normalize_fn):
"""Helper for retrieving optionally normalized embeddings from y_pred.
Args:
y_pred: dual encoder model output. If the model outputs embeddings, `y_pred`
is concatenate(context_embedding, full vocab label embeddings) with shape
[batch_size + label_embedding_vocab_size, final_embedding_dim]. If the
model outputs similarities, `y_pred` is the similarity matrix with shape
[batch_size, label_embedding_vocab_size] between context and full vocab
label embeddings.
y_true: the true labels with shape [batch_size, 1].
normalization_fn: The normalization function to be applied to both context
and label embeddings.
Returns:
Optionally normalized context and label embeddings.
"""
batch_size = tf.shape(y_true)[0]
context_embedding, label_embedding = y_pred[:batch_size], y_pred[batch_size:]
# Optionally apply nomalization_fn to both context and label embeddings,
# computing the cosine similarity rather than the dot product.
if normalization_fn is not None:
context_embedding = normalization_fn(context_embedding)
label_embedding = normalization_fn(label_embedding)
return context_embedding, label_embedding
@tf.function
def get_embeddings_and_similarities(y_pred,
y_true,
expect_embeddings=True,
normalization_fn=l2_normalize_fn):
"""Retrieving the context and label embeddings and the similarities between them.
Args:
y_pred: Dual encoder model output. When expect_embeddings is true, `y_pred`
is concatenate(context_embedding, full vocab label embeddings) with shape
[batch_size + label_embedding_vocab_size, final_embedding_dim]. When
`expect_embeddings` is False, `y_pred` is the similarity matrix with shape
[batch_size, label_embedding_vocab_size] between context and full vocab
label embeddings.
y_true: The true labels with shape [batch_size, 1].
expect_embeddings: If `expect_embeddings` is True, `y_pred` is the context
and label embeddings. Otherwise, the y_pred is the batch or global
similarities.
normalization_fn: The normalization function to be applied to both context
and label embeddings.
Returns:
The optionally normalized context and label embeddings as well as the
similarities between them. The context and label embeddings are `None` if
`expect_embeddings` is False.
"""
if expect_embeddings:
context_embedding, label_embedding = (
get_predicted_embeddings(y_pred, y_true, normalization_fn))
# similarities[i][j] is the dot product of the ith context embedding and
# the jth label embedding in a batch.
similarities = tf.matmul(
context_embedding, label_embedding, transpose_b=True)
else:
context_embedding = label_embedding = None
similarities = y_pred
return context_embedding, label_embedding, similarities
class Similarities(tf.keras.layers.Layer):
"""Keras layer for computing similarities over context/label embeddings.
Takes in context embeddings within a batch and label embeddings to computes a
similarities matrix where similarities[i][j] is the dot product similarity
between context embedding i and label embedding j.
If label embeddings are those within the same batch, this function computes
the batch similarity.
If label embeddings are those for the full vocabulary, this function computes
the global similarity.
Optionally apply normalization to the embeddings, computing cosine similarity
instead of dot product.
"""
def __init__(self,
normalization_fn: NormalizationFnType = l2_normalize_fn,
**kwargs):
super().__init__(**kwargs)
self.normalization_fn = normalization_fn
def call(self, inputs):
if len(inputs) != 2:
raise ValueError(
'Exactly two inputs must be provided, context embeddings and label '
'embeddings, but %d inputs were provided.' % len(inputs))
context_embedding, label_embedding = inputs
# Optionally apply normalization to both context and label embeddings,
# computing the cosine similarity rather than the dot product.
if self.normalization_fn is not None:
context_embedding = self.normalization_fn(context_embedding)
label_embedding = self.normalization_fn(label_embedding)
# similarities[i][j] is the dot product of the ith context embedding and
# the jth label embedding in a batch.
similarities = tf.matmul(
context_embedding, label_embedding, transpose_b=True)
return similarities
def get_config(self):
config = super().get_config()
config.update({
'normalization_fn': self.normalization_fn,
})
return config
NORMALIZATION_FN_MAP = {
'none': None,
'l2_normalize': l2_normalize_fn,
}
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.