index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
23,900 | 633eda083c22bf2daf0ab046db75d90663cedb01 | # pytype generates member variable annotations as comments, check that fix_annotate ignores them
# properly
class C:
def __init__(self, x):
self.y = 1 + x
|
23,901 | 2702d2a8149bd27f1661007ffd9be28cb0e2dabd | """
Title: Jewels and Stones
You're given strings J representing the types of stones that are
jewels, and S representing the stones you have. Each character
in S is a type of stone you have. You want to know how many of
the stones you have are also jewels.
The letters in J are guaranteed distinct, and all characters
in J and S are letters. Letters are case sensitive, so "a" is
considered a different type of stone from "A".
Example 1:
Input: J = "aA", S = "aAAbbbb"
Output: 3
Example 2:
Input: J = "z", S = "ZZ"
Output: 0
Note:
1) S and J will consist of letters and have length at most 50.
2) The characters in J are distinct.
"""
class Solution:
def numJewelsInStones(self, J: str, S: str) -> int:
number_of_jewels = 0
if J and S:
jewel_set = set()
stone_freq_map = {}
for i in range(len(J)):
jewel_set.add(J[i])
for i in range(len(S)):
if S[i] in jewel_set:
stone_freq_map[S[i]] = stone_freq_map.get(S[i], 0) + 1
#print("\n jewel_set: ", jewel_set)
#print(" stone_freq_map: ", stone_freq_map)
for k, v in stone_freq_map.items():
if k in jewel_set:
number_of_jewels += v
return number_of_jewels
def get_test_case_1() -> (str, str):
return None, None
def get_test_case_2() -> (str, str):
return None, ""
def get_test_case_3() -> (str, str):
return "", None
def get_test_case_4() -> (str, str):
return "", ""
def get_test_case_5() -> (str, str):
return "aA", "aAAbbbb"
def get_test_case_6() -> (str, str):
return "z", "ZZ"
if __name__ == "__main__":
solution = Solution()
#J, S = get_test_case_1()
#J, S = get_test_case_2()
#J, S = get_test_case_3()
#J, S = get_test_case_4()
J, S = get_test_case_5()
#J, S = get_test_case_6()
print("\n J: ", J)
print(" S: ", S)
number_of_jewels = solution.numJewelsInStones(J, S)
print("\n number_of_jewels: ", number_of_jewels)
|
23,902 | 417babac39d6a826380153dd2bdb4013f78285d6 | from __future__ import absolute_import
from __future__ import division
__all__ = ['fpb']
import torch
from torch import nn
from torch.nn import functional as F
import torch.utils.model_zoo as model_zoo
from torchvision.models.resnet import resnet50, resnet101, Bottleneck
from .nn_utils import *
from .pc import *
class FPNModule(nn.Module):
def __init__(self, num_layers, num_channels):
super(FPNModule, self).__init__()
self.num_layers = num_layers
self.num_channels = num_channels
self.eps = 0.0001
self.convs = nn.ModuleList()
self.downsamples = nn.ModuleList()
for _ in range(4):
conv = Conv_Bn_Relu(self.num_channels, self.num_channels, k=3, p=1)
self.convs.append(conv)
for _ in range(2):
downsample = nn.Sequential(nn.Conv2d(self.num_channels, self.num_channels, 1, bias=False), nn.BatchNorm2d(self.num_channels), nn.ReLU(inplace=True))
self.downsamples.append(downsample)
self.pc1 = PC_Module(self.num_channels, dropout=True)
self._init_params()
def _init_params(self):
for downsample in self.downsamples:
init_struct(downsample)
return
def forward(self, x):
reg_feats = []
y = x
x_clone = []
for t in x:
x_clone.append(t.clone())
reg_feats.append(y[0])
y[0] = self.convs[0](y[0])
reg_feat = self.pc1(y[1])
reg_feats.append(reg_feat)
y[1] = self.convs[1](reg_feat+F.interpolate(y[0], scale_factor=2, mode='nearest'))
y[1] = self.convs[2](y[1])+self.downsamples[0](x_clone[1])
y[0] = self.convs[3](y[0]+F.max_pool2d(y[1], kernel_size=2))+self.downsamples[1](x_clone[0])
return y, reg_feats
class FPN(nn.Module):
def __init__(self, num_layers, in_channels):
super(FPN, self).__init__()
self.num_layers = num_layers
self.in_channels = in_channels
self.num_neck_channel = 256
self.lateral_convs = nn.ModuleList()
for i in range(1, self.num_layers+1):
conv = Conv_Bn_Relu(in_channels[i], self.num_neck_channel, k=1)
self.lateral_convs.append(conv)
self.fpn_module1 = FPNModule(self.num_layers, self.num_neck_channel)
self.conv = Conv_Bn_Relu(self.num_neck_channel, self.in_channels[1], k=1, activation_cfg=False)
self.relu = nn.ReLU(inplace=True)
self._init_params()
def _init_params(self):
return
def forward(self, x):
y = []
for i in range(self.num_layers):
y.append(self.lateral_convs[i](x[i+1]))
y, reg_feat = self.fpn_module1(y)
y = self.conv(y[0])#
y = self.relu(y)
return y, reg_feat
class FPB(nn.Module):
def __init__(self, num_classes, loss=None, **kwargs):
super(FPB, self).__init__()
resnet_ = resnet50(pretrained=True)
self.num_parts = 3
self.branch_layers = 2
self.loss = loss
self.layer0 = nn.Sequential(resnet_.conv1, resnet_.bn1, resnet_.relu,
resnet_.maxpool)
self.layer1 = resnet_.layer1
self.layer2 = resnet_.layer2
self.pc1 = PC_Module(512, dropout=True)
self.layer3 = resnet_.layer3
layer4 = nn.Sequential(
Bottleneck(1024,
512,
downsample=nn.Sequential(
nn.Conv2d(1024, 2048, 1, bias=False),
nn.BatchNorm2d(2048))), Bottleneck(2048, 512),
Bottleneck(2048, 512))
layer4.load_state_dict(resnet_.layer4.state_dict())
self.layer4 = layer4
self.global_avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.bn_l4 = nn.BatchNorm2d(2048)
self.classifier_l4 = nn.Linear(2048, num_classes)
self.in_channels = [2048, 1024, 512, 256]
self.neck = FPN(self.branch_layers, self.in_channels)
self.part_pools = nn.AdaptiveAvgPool2d((self.num_parts, 1))
self.dim_reds = DimReduceLayer(self.in_channels[1], 256)
self.classifiers = nn.ModuleList([nn.Linear(256, num_classes) for _ in range(self.num_parts)])
self._init_params()
def _init_params(self):
init_bn(self.bn_l4)
nn.init.normal_(self.classifier_l4.weight, 0, 0.01)
if self.classifier_l4.bias is not None:
nn.init.constant_(self.classifier_l4.bias, 0)
init_struct(self.dim_reds)
for c in self.classifiers:
nn.init.normal_(c.weight, 0, 0.01)
if c.bias is not None:
nn.init.constant_(c.bias, 0)
def featuremaps(self, x):
fs = []
y_l0 = self.layer0(x)
y_l1 = self.layer1(y_l0)
y_l2 = self.layer2(y_l1)
y_l2_1 = self.pc1(y_l2)
y_l3 = self.layer3(y_l2_1)
y_l4 = self.layer4(y_l3)
fs.append(y_l4)
fs.append(y_l3)
fs.append(y_l2)
fs.append(y_l1)
return fs, y_l2_1
def cross_ofp(self, x):
x[1] = F.max_pool2d(x[1], kernel_size=2)
y = torch.cat(x, 1)
return y
def forward(self, x):
bs = x.size(0)
reg_feat_re = []
fs, y_l2_1 = self.featuremaps(x)
f_branch, reg_feats = self.neck(fs)
f_l4_train = self.global_avgpool(fs[0])
f_parts = self.part_pools(f_branch)
f_l4 = self.bn_l4(f_l4_train).view(bs, -1)
if not self.training:
f = []
f.append(F.normalize(f_l4, p=2, dim=1))
f.append(F.normalize(f_parts, p=2, dim=1).view(bs, -1))
f = torch.cat(f, 1)
return f
y = []
y_l4 = self.classifier_l4(f_l4)
y.append(y_l4)
f_short = self.dim_reds(f_parts)
for j in range(self.num_parts):
f_j = f_short[:, :, j, :].view(bs, -1)
y_j = self.classifiers[j](f_j)
y.append(y_j)
reg_feat_re.append(self.cross_ofp(reg_feats))
if self.loss == 'softmax':
return y
elif self.loss == 'engine_FPB':
f = []
f.append(F.normalize(f_l4_train, p=2, dim=1).view(bs, -1))
f.append(F.normalize(f_parts, p=2, dim=1).view(bs, -1))
f = torch.cat(f, 1)
return y, f, reg_feat_re
else:
raise KeyError("Unsupported loss: {}".format(self.loss))
def fpb(num_classes, loss='softmax', pretrained=True, **kwargs):
model = FPB(num_classes=num_classes, loss=loss, **kwargs)
return model
|
23,903 | f01679dab849548187d292a668c74b6bb312f951 |
import unittest
import jump_game
class TestJump_Game(unittest.TestCase):
def test_canJump(self):
s = jump_game.Solution()
self.assertEqual(s.canJump([1,2,0,1]), True)
self.assertEqual(s.canJump([2,3,1,1,4]), True)
self.assertEqual(s.canJump([3,2,1,0,4]), False)
self.assertEqual(s.canJump([0]), True)
self.assertEqual(s.canJump([0,0]), False)
self.assertEqual(s.canJump([0]), True)
self.assertEqual(s.canJump([0,1]), False)
self.assertEqual(s.canJump([1,0,2,1]), False)
self.assertEqual(s.canJump([2,0,0,1]), False)
self.assertEqual(s.canJump([2,0,1,0]), True)
if __name__ == '__main__':
unittest.main()
|
23,904 | 630d5733bed84c060c3cdf2002f9378e5c1bb528 | """
Shim to maintain backwards compatibility with old IPython.qt imports.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
# Stdlib
import sys
import types
from warnings import warn
warn("The `IPython.qt` package has been deprecated. "
"You should import from jupyter_qtconsole instead.")
from IPython.utils.shimmodule import ShimModule
# Unconditionally insert the shim into sys.modules so that further import calls
# trigger the custom attribute access above
sys.modules['IPython.qt'] = ShimModule('qt', mirror='jupyter_qtconsole')
|
23,905 | ae885dd791c8ca702d7e9f34fc7e56931a343764 | #!usr/local/bin
# returns a set with best visisting times
import heapdict
def best_visiting_times(I):
# focus en end times
h = heapdict.heapdict()
for i in I:
# interval is the key
# end of interval is the value inserted on
# the heap
h[(i[0], i[1])] = i[1]
result =[]
while (len(h) >0):
# this is the smallest end time
(interval, end) = h.popitem()
# now see how many task are 'happening' or 'finishing'
# at that time
result.append(end)
for i in I:
if i[0] <= end and i[1] >= end:
if h.get((i[0],i[1])):
h.pop((i[0],i[1]))
return result
if __name__=="__main__":
I = [[0,3], [2,6],[3,4],[6,9]]
I = [[1,2], [2,3], [3,4],[2,3],[3,4],[4,5]]
print best_visiting_times(I)
|
23,906 | 7d647eaa17ff98da59e0e2106d042f4c180af9bd | from django.shortcuts import render, redirect
from auth_access.helpers import login_required
@login_required
def index(request):
return render(request, 'frontend/index.html')
|
23,907 | dfa8bc5927fd5e32d7194432e38397749c52bd9d | # -*- coding: utf-8 -*-
"""
Created on Tue Oct 16 16:42:19 2018
@author: huashuo
"""
#要调用一个函数,需要知道函数的名称和参数
#查看abs函数的帮助信息
help(abs)
# =============================================================================
# #传入的参数数量不对,会报错,并提示abs只有一个参数
# abs(2,4)#TypeError: abs() takes exactly one argument (2 given)
# =============================================================================
# =============================================================================
# #如果传入的参数数量是对的,但参数类型不能被函数所接受,也会报TypeError的错误
# abs('c')#TypeError: bad operand type for abs(): 'str'
# =============================================================================
# =============================================================================
# #函数max()可以接收任意多个参数,并返回最大的那个
# print(max(1,3,9,-1,-5))
#
# =============================================================================
# =============================================================================
# #数据类型转换函数
# print(int('123'))
# #print(int('a'))
# print(int(12.38))
# print(float(12))
# print(str(12.34))
# print(str(100))
# print(bool(''))
# print(bool('a'))
# =============================================================================
# =============================================================================
# #函数名其实就是指向一个函数对象的引用,完全可以把函数名赋给一个变量
# #相当于给这个函数起了一个“别名”:
# a=abs
# print(a(-1.3))
# =============================================================================
# =============================================================================
# #练习
# #请利用Python内置的hex()函数把一个整数转换成十六进制表示的字符串:
#
# n1=255
# n2=1000
#
# print(str(hex(255)))
# print(str(hex(1000)))
# =============================================================================
#在Python中,定义一个函数要使用def语句,依次写出函数名、括号、括号中的参数和冒号:,
#然后,在缩进块中编写函数体,函数的返回值用return语句返回。
#自定义一个求绝对值的my_abs函数
def my_abs(x):
if x>0:
return x
else:
return -x
print(my_abs(-9))
# =============================================================================
# #定义空函数
# def pop():
# pass#pass语句什么都不做,但可以用来做占位符
#
# #pass还可以用在其他语句里
# age=20
# if age>=18:
# pass#不用pass会有语法错误,SyntaxError: unexpected EOF while parsing
# =============================================================================
# =============================================================================
# #函数参数个数不对
# #print(my_abs(-10,-1))#TypeError: my_abs() takes 1 positional argument but 2 were given
#
# #参数类型不对,Python解释器就无法帮我们检查
# #print(my_abs('A'))#TypeError: '>' not supported between instances of 'str' and 'int'
# #print(abs('A'))#TypeError: bad operand type for abs(): 'str'
# =============================================================================
# =============================================================================
# #修改一下my_abs的定义,对参数类型做检查,只允许整数和浮点数类型的参数。
# #数据类型检查可以用内置函数isinstance()实现:
# def my_abs2(x):
# if not isinstance(x,(int,float)):
# raise TypeError('bad operand type')
# if x>=0:
# return x
# else:
# return -x
#
# print(my_abs2('A'))
# =============================================================================
# =============================================================================
# #返回多个值
# import math
#
# def move(x,y,step,angle=0):#有0无0结果无影响
# nx=x+step*math.cos(angle)
# ny=y-step*math.sin(angle)
#
# x,y=move(100,100,60,math.pi/6)
# print(x,y)
#
# #但其实这只是一种假象,Python函数返回的仍然是单一值
# r=move(100,100,60,math.pi/6)
# print(r)
# =============================================================================
#练习
#请定义一个函数quadratic(a, b, c),接收3个参数,返回一元二次方程:
#ax2 + bx + c = 0的两个解。
#提示:计算平方根可以调用math.sqrt()函数:
import math
def quadratic(a,b,c):
for i in [a,b,c]:
if not isinstance(i,(int,float)):
raise TypeError('bad operand type')
d=b**2-4*a*c
if d>=0:
x1=(-b+math.sqrt(d))/(2*a)
x2=(-b-math.sqrt(d))/(2*a)
return x1,x2
else:
return('该方程没有实根')
x=quadratic(2,3,1)
print(x)
x=quadratic(1,3,-4)
print(x)
x=quadratic(1,1,1)
print(x)
print('quadratic(2,3,1)=',quadratic(2,3,1))
|
23,908 | 77ab26bb05897cec74d82737f912c2f5f184b2b9 | the_program_memory={"こんにちは、ここはpython入門においての進捗をつらつらと書く場所だ",
"もしかするとレポートだとかバイトだとか、その他面倒な物事のせいで勉強が遅々として進まないということもあるのかもしれないが",
"今日、2021年4月24日、初めてページをめくった日から何日かかろうとも入門を終わらせて見せようという意志",
"そういったものを大事に持って、あんまりTwitterとかはしないようにして進めて言ってくれたらなと思う",
"2021年4月24日:P8まで、特に難しいところはないがやはりpythonは便利だと思わされた",
"2021年4月28日:P28まで、第一章が終わった、特に難しいところはなかったが深く心に入れるべきだと思う箇所が多々あった"}
print(the_program_memory) |
23,909 | 31a3570f7ef07eecd02bb375aa86fa2230c41629 |
from os.path import abspath, basename, dirname, exists, join, relpath
import pickle
from wmilearn import logger
from wmilearn.problem import Problem
class Experiment:
PROBLEM_TEMPL = "{}.problem_{}"
def __init__(self, problems, metadata=None):
self.problems = problems
self.metadata = metadata
def dump(self, experiment_path):
if exists(experiment_path):
logger.warning("File exists: {}".format(experiment_path))
experiment_name = basename(experiment_path)
folder = abspath(dirname(experiment_path))
problem_paths = []
for i, prob in enumerate(self.problems):
problem_filename = Experiment.PROBLEM_TEMPL.format(experiment_name, i)
problem_path = join(folder, problem_filename)
prob.dump(problem_path)
problem_paths.append(relpath(problem_path, folder))
index = {'problem_paths' : problem_paths}
if self.metadata is not None:
index['metadata'] = self.metadata
with open(experiment_path, 'wb') as f:
pickle.dump(index, f)
return experiment_path
@staticmethod
def read(path):
with open(path, 'rb') as f:
index = pickle.load(f)
metadata = None
problems = []
folder = abspath(dirname(path))
for problem_path in index['problem_paths']:
problems.append(Problem.read(join(folder,problem_path)))
if 'metadata' in index:
metadata = index['metadata']
return Experiment(problems, metadata=metadata)
|
23,910 | 63ed99e5962dfb38591d0b8ac7a700745aa62ba9 | arr = [1,2,3,4,5,6,7,8,9]
ln = len(arr)
# print(arr[int(ln/2)])
class BinaryTree:
def __init__(self,data=None):
self.data =data
self.left = None
self.right = None
def insert(root,newData):
if root.data == None:
root.data = newData
else:
if root.data>newData:
if root.left is None:
a = BinaryTree(newData)
root.left = a
elif root.right is None:
a = BinaryTree(newData)
root.right = a
else:
insert(root.left,newData)
if root.data<newData:
if root.right is None:
a = BinaryTree(newData)
root.right = a
elif root.left is None:
a = BinaryTree(newData)
root.left = a
else:
insert(root.right,newData)
return 0
def display(root):
if root is None:
return
display(root.left)
print(root.data)
display(root.right)
root = BinaryTree()
# while True:
# if len(arr)==0:
# break
# num = arr
insert(root,arr[int(ln/2)])
arr.remove(5)
for i in arr:
insert(root,i)
# display(root)
print(root.data)
print(root.left.data,root.right.data)
print(root.left.left.data,root.left.right.data,root.right.left.data,root.right.right.data) |
23,911 | 6dc6961296ea80627e4a326c052ecec0ed381718 | __version__ = "0.1.0"
from sphinx_conda.directives.environment import (
collect_pages as environment_collect_pages,
)
from .domain import CondaDomain
from sphinx.application import Sphinx
def setup(app: Sphinx):
app.add_domain(CondaDomain)
app.connect("html-collect-pages", environment_collect_pages)
# app.connect("object-description-transform", obj_transform)
|
23,912 | 0b152e6da7aaff1d4441f1af13ce39ccff50e53f | class Solution:
def topKFrequent(self, nums, k: int):
from collections import Counter
c = Counter(nums)
list_res = c.most_common(k)
res = []
for item in list_res:
res.append(item[0])
return res
if __name__ == '__main__':
print(Solution().topKFrequent([1, 2, 3, 4, 51, 2, 2], 2))
|
23,913 | 40926838d6fef44c8b1d3f033f7261f3834ced34 | # -*- coding: utf-8 -*-
"""
Created on Sun Jun 6 18:10:19 2021
@author: vijay
"""
prices = [13.49, 8.99, 4.99, 6.49, 20.49, 16.99, 3.99, 5.99, 11.99, 5.00, 10.00]
size = len(prices)
for i in range(size):
if prices[i] < 10:
if prices[i] > 5:
print(prices[i]) |
23,914 | fb6c6893c756a93f1465249b45a7dadd7d35813d | # -*- coding: utf-8 -*-
import json
from flask import Flask, request, jsonify
import redis
import datetime
import random
import petname
import uuid
app = Flask(__name__)
@app.route('/animals', methods=['GET'])
def get_animals():
data = get_data()
json_list = data['animals']
return jsonify(json_list)
@app.route('/animals/head/<type_head>', methods=['GET'])
def get_animal_head(type_head):
test = get_data()
json_list = test['animals']
output = [x for x in json_list if x['head'] == type_head]
return jsonify(output)
@app.route('/animals/legs/<num_legs>', methods=['GET'])
def get_animal_legs(num_legs):
test = get_data()
json_list = test['animals']
output = [x for x in json_list if x['legs'] == int(num_legs)]
return jsonify(output)
# ROUTES FOR MIDTERM
# query a range of dates
@app.route('/animals/dates',methods=['GET'])
def get_dates():
start = request.args.get('start')
startdate = datetime.datetime.strptime(start, "'%Y-%m-%d_%H:%M:%S.%f'")
end = request.args.get('end')
enddate = datetime.datetime.strptime(end, "'%Y-%m-%d_%H:%M:%S.%f'")
test = get_data()
return json.dumps([x for x in test['animals'] if (datetime.datetime.strptime( x['created_on'],'%Y-%m-%d %H:%M:%S.%f') >= startdate and datetime.datetime.strptime( x['created_on'], '%Y-%m-%d %H:%M:%S.%f')<= enddate ) ])
# selects a particular creature by its unique identifier
@app.route('/animals/<uuid>', methods=['GET'])
def get_animal_by_uuid(uuid):
data = get_data()
json_list = data['animals']
output = [x for x in json_list if x['uid'] == uuid]
return jsonify(output)
# edits a particular creature by passing the UUID, and updated "stats"
@app.route('/animals/edit_animal')
def put_animal_stats():
data = get_data()
rd = redis.StrictRedis(host='redis-docker-rry235_redis_1', port = 6379, db = 0)
uid = request.args.get('uid', None)
arms = request.args.get('arms', None)
legs = request.args.get('legs', None)
tails = request.args.get('tails', None)
index = [index for (index,d) in enumerate(data['animals']) if d['uid'] == uid]
data['animals'][index[0]]['tails'] = int(tails)
data['animals'][index[0]]['arms'] = int(arms)
data['animals'][index[0]]['legs'] = int(legs)
rd.set('animals',json.dumps(data,indent=2))
return jsonify(data)
# deletes a selection of animals by a date ranges
@app.route('/animals/delete',methods=['GET'])
def delete_dates():
start = request.args.get('start')
startdate = datetime.datetime.strptime(start, "'%Y-%m-%d_%H:%M:%S.%f'")
end = request.args.get('end')
enddate = datetime.datetime.strptime(end, "'%Y-%m-%d_%H:%M:%S.%f'")
data = get_data()
index = [index for (index,d) in enumerate(data['animals']) if (datetime.datetime.strptime(d['created_on'],'%Y-%m-%d %H:%M:%S.%f') <= startdate or datetime.datetime.strptime(d['created_on'],'%Y-%m-%d %H:%M:%S.%f') >= enddate)]
new_data = {}
new_data['animals'] = []
for x in index:
new_data['animals'].append(data['animals'][x])
rd = redis.StrictRedis(host='redis-docker-rry235_redis_1 ',port=6379,db=0)
rd.set('animals',json.dumps(new_data,indent=2))
return jsonify(new_data)
# returns the average number of legs per animal
@app.route('/animals/average_num_legs', methods=['GET'])
def get_average_num_legs():
data = get_data()
json_list = data['animals']
count = len(json_list)
total = 0
for x in json_list:
total = total + x['legs']
return str(total/count)
# returns a total count of animals
@app.route('/animals/animal_count', methods=['GET'])
def get_animal_count():
data = get_data()
count = len(data['animals'])
return str(count)
@app.route('/load_data')
def get_load_data():
data = {}
data['animals'] = []
heads = ['snake','bull','lion','raven','bunny']
for i in range(20):
num_arms = random.randrange(2,14,2)
num_legs = random.randrange(3,13,3)
num_tails = num_arms + num_legs
data['animals'].append({ 'head': heads[random.randrange(5)], 'body': petname.name()+'-'+petname.name(), 'arms' : num_arms, 'legs': num_legs, 'tails' : num_tails, 'uid' : str(uuid.uuid4()), 'created_on' : '{:%Y-%m-%d %H:%M:%S.%f}'.format(datetime.datetime.now())})
rd = redis.StrictRedis(host = 'redis-docker-rry235_redis_1', port = 6379, db = 0)
rd.set('animals',json.dumps(data,indent=2))
return jsonify(data)
#I had an issue with my ssh connection closing while running the server and it kept it running indefinitely
#This was what I used to turn it off
def shutdown_server():
func = request.environ.get('werkzeug.server.shutdown')
if func is None:
raise RuntimeError('Not running with the Werkzeug Server')
func()
@app.route('/shutdown', methods=['GET'])
def shutdown():
shutdown_server()
return 'Server shutting down...'
def get_data():
rd = redis.StrictRedis(host='redis-docker-rry235_redis_1', port = 6379, db = 0)
userdata = json.loads(rd.get('animals'))
return userdata
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
23,915 | 6cb092ae52fd0cd79573ae9c8dea105ce54bbbea | # coding: utf-8
import sys
import codecs
import django
import uuid
import json
import logging
import subprocess
import datetime
import time
import inspect
import project1
from django.http import HttpResponse
from django.shortcuts import render
from django.utils.translation import ugettext
from app1.utils import *
from app1.form import *
from django.contrib.auth.decorators import login_required
# Create your views here.
logger = logging.getLogger(__name__)
def _enum_fusen_elements():
return FusenManager().all()
@login_required
def default(request):
# *************************************************************************
# *************************************************************************
# *************************************************************************
#
#
# デフォルトページのアクション
#
#
# *************************************************************************
# *************************************************************************
# *************************************************************************
# logger.info('<' + __name__ + '.' + inspect.getframeinfo(inspect.currentframe()).function + '()> $$$ start $$$');
# =========================================================================
# setup
# =========================================================================
# =========================================================================
# validation
# =========================================================================
# =========================================================================
# process
# =========================================================================
# =========================================================================
# contents
# =========================================================================
# logger.info('<' + __name__ + '.' + inspect.getframeinfo(inspect.currentframe()).function + '()> --- end ---');
fields = {}
fields['window_title'] = 'ふせん部屋'
fields['form'] = {
'fusen_elements': _enum_fusen_elements()
}
util.fill_menu_items(request, fields)
context = django.template.RequestContext(request, fields)
template = django.template.loader.get_template('fusenroom/default.html')
return django.http.HttpResponse(template.render(context))
def elements(request):
# *************************************************************************
# *************************************************************************
# *************************************************************************
#
#
# 付箋を返す API みたいな例
#
#
# *************************************************************************
# *************************************************************************
# *************************************************************************
# =========================================================================
# setup
# =========================================================================
# =========================================================================
# validation
# =========================================================================
# =========================================================================
# process
# =========================================================================
# =========================================================================
# contents
# =========================================================================
items = []
for e in _enum_fusen_elements():
items.append(e.as_dict())
response = {
'time': str(datetime.datetime.now()),
'fusen_elements': items
}
return django.http.HttpResponse(json.dumps(response, indent=4))
def new(request):
# *************************************************************************
# *************************************************************************
# *************************************************************************
#
#
# デフォルトページのアクション
#
#
# *************************************************************************
# *************************************************************************
# *************************************************************************
# =========================================================================
# setup
# =========================================================================
# =========================================================================
# validation
# =========================================================================
if not request.user.is_authenticated:
return django.http.HttpResponse('{}')
# =========================================================================
# process
# =========================================================================
FusenManager().create_new(request.user.username, u'ここをクリックしてメモを入力できます。')
# =========================================================================
# contents
# =========================================================================
response = {
'time': str(datetime.datetime.now()),
'response': True
}
return django.http.HttpResponse(json.dumps(response, indent=4))
|
23,916 | f2a3ed18a1fcb3cc67cc7d55c1d193f18f2424a6 | #https://www.pjreddie.com/media/files/mnist_train.csv
#https://www.pjreddie.com/media/files/mnist_test.csv
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
v = [1, 2, 3, 4, 5]
type(v)
numpy_array = np.array(v)
print(numpy_array)
print(type(numpy_array))
matrix = np.array([
[1, 2, 3],
[4, 5, 6],
[7, 8, 9]
])
print(type(matrix))
print(matrix)
mnist_train= pd.read_csv("mnist_train.csv", header=None)
column_names = ["label"]
for i in range(784):
column_names.append("px_{}".format(i + 1))
mnist_train.columns = column_names
mnist_train
matrix = mnist_train.values
print(matrix)
image = matrix[0, 1:]
image.shape
modified_image = image.reshape(28, 28)
modified_image.shape
plt.imshow(modified_image, cmap = "Greys")
|
23,917 | 127e531075fad26f813c03450e3cd6a411f5412c | import cv2
import numpy as np
import rasterio
from rasterio.features import shapes
from pyproj import Proj, transform
import fiona
from fiona.crs import from_epsg
import os
from multiprocessing import Pool
from itertools import product
from functools import partial
#image = '/home/j5/_projects/Cannabis/Inference/m_4012206_se_10_h_20160717.tif'
def load_img_container(image):
with rasterio.open(image) as src:
profile = src.profile
#meta = src.meta
num_img = cv2.imread(image, cv2.IMREAD_UNCHANGED)
h, w, d = num_img.shape
list_of_starting_coords = []
lim_h = h // 1024
lim_w = w // 1024
for row in range(lim_h):
for col in range(lim_w):
list_of_starting_coords.append((row * 1024, col * 1024))
list_of_starting_coords.append((row * 1024, w - 1024))
for col in range(lim_w):
list_of_starting_coords.append((h - 1024, col * 1024))
list_of_starting_coords.append((h - 1024, w - 1024))
# create numpy array to hold inference data
# please note that rasterio writes out raster with dimension first!
s = (7, h, w)
inference_container = np.zeros(s, dtype=np.uint8)
# create container for inference
# Write the product as a raster band to a new 8-bit file. For
# the new file's profile, we start with the meta attributes of
# the source file, but then change the band count to 6, set the
# dtype to uint8, and specify LZW compression.
profile.update(dtype=rasterio.uint8, count=1)
# with rasterio.open('example-total.tif', 'w', **profile) as dst:
# dst.write(inference_container)
return num_img, profile, list_of_starting_coords, inference_container
def take_coords_return_coords(original, destination, shapes):
from pyproj import transform
new_poly = [[]]
#print(shapes)
for coords in shapes:
for coord in coords:
#print(len(coords))
#print(coord)
x, y = coord
# tranform the coord
new_x, new_y = transform(original, destination, x, y)
# put the coord into a list structure
poly_coord = tuple([float(new_y), float(new_x)])
# append the coords to the polygon list
new_poly[0].append(poly_coord)
return new_poly
def canna_transform(shapes_in, profile):
from pyproj import Proj
# we are going to convert everything to WGS 84 epsg 4326
crs = profile['crs']
dst = 'epsg:4326'
destination = Proj(dst)
original = Proj(crs)
#print(shapes_in)
############ non-parrallelized
shapes_in['geometry']["coordinates"] = take_coords_return_coords(original,
destination, shapes_in['geometry']['coordinates'])
return shapes_in
############ parrallelize attempt 1 this one works!!!
# polygons = []
# with Pool(processes=36) as pool:
# polygons.append([x for x in pool.starmap_async(
# take_coords_return_coords,
# product([original], [destination], shapes_in['coordinates'])).get()])
# shapes_in["coordinates"] = polygons[0]
# return shapes_in
############ parrallelize attempt 2
# polygon = []
# #func_list = [(shapes2, original, destination) for shapes2 in shapes_in['coordinates']]
# #print(func_list)
# func_list = partial(take_coords_return_coords, original, destination)
# #print(func_list)
# with Pool(processes=36) as pool:
# polygons.append([x for x in pool.imap_unordered(
# func_list, shapes_in['coordinates'])])
# shapes_in["coordinates"] = polygons[0]
#print(polygons[0])
#return shapes_in
def update_shapefile(num_img, profile):
with rasterio.Env():
#print('starting shp_poly creation')
############ parrallelization attempt
shp_poly_all = []
#shapes_in = [x[0] for x in shapes(num_img, mask=None, transform=profile['transform'])]
all_shapes = [{"properties": {"model_conf": v}, "geometry": s} for i, (s, v) in
enumerate(shapes(num_img, mask=None, transform=profile['transform'])) if v != 0]
# print(all_shapes)
# print(type(all_shapes))
# print(all_shapes[0])
with Pool(processes=35) as pool:
shp_poly_all.append(pool.starmap(canna_transform, product(all_shapes, [profile])))
############ working version
# shp_poly = [{"properties": {"model_conf": v},
# "geometry": canna_transform(crs, dst, s)} for i, (s, v) in
# enumerate(shapes(num_img, mask=None, transform=profile['transform'])) if v != 0]
#print('finished polygon creation ')
return shp_poly_all
|
23,918 | 93d66736fef6168f3e8a78d92eb6f155f344fbe7 | from django.contrib import admin
from .models import Automation
# Register your models here.
admin.site.register(Automation)
|
23,919 | c9970e9b34ecede7445b883c455e0defd97056f5 | from .attribute_builder import AttributeBuilder
class Allow(AttributeBuilder):
"""
Represents 'allow' attribute.
"""
def __init__(self):
super().__init__()
self.attributes = ["allow"]
|
23,920 | a14efef3cfdecf1e0319402e89b9d2d37fbc7316 | import os
import socket
from pytest import fixture
from . import MODULE_PATH
from http.server import SimpleHTTPRequestHandler
import socketserver
from pathlib import Path
import threading
@fixture(scope='session')
def example_server():
webserver_root = os.path.join(MODULE_PATH, 'example_webserver_root')
server_interface = "localhost"
server_port = 0 # first available port
if not Path(webserver_root).is_dir():
raise ValueError(f"Given web server path is invalid: {webserver_root}")
os.chdir(webserver_root) # necessary for python <3.7
if not Path(".").absolute().name == 'example_webserver_root':
raise RuntimeError(
f"Could not change dir to example webserver root: {example_root}"
)
with socketserver.TCPServer(
(server_interface, server_port), SimpleHTTPRequestHandler
) as httpd:
daemon = threading.Thread(name="example web server", target=httpd.serve_forever)
daemon.setDaemon(True) # kill after main thread dies
daemon.start()
yield httpd.server_address
|
23,921 | 373a14ecaa846a0e5410930d3fa3c863347d01d1 | # 1. build a submit function
import vulcan.queue as vq
def submit(optns):
vq.submit(optns.queue, optns.program, input=optns.input_name, output=optns.output_name, sync=True, job_array=optns.job_array_range)
# 2. build an options object
from optavc.options import Options
options_kwargs = {
'template_file_path': "template.dat",
'energy_regex' : r"@DF-RHF Final Energy:\s+(-\d+\.\d+)",
'success_regex' : r"\*\*\* P[Ss][Ii]4 exiting successfully." ,
'queue' : "gen4.q",
'program' : "psi4@master",
'input_name' : "input.dat",
'output_name' : "output.dat",
'submitter' : submit,
'maxiter' : 20,
'job_array' : True,
'g_convergence' : "gau_verytight",
'findif' : {'points': 3}
}
options_obj = Options(**options_kwargs)
# 3. call optimizer
from optavc.optimize import Optimization
optimization_obj = Optimization(options_obj)
optimization_obj.run(restart_iteration=4)
|
23,922 | 6c8da79f64efd640ff48d35e64e45bf3604c542e | from django.urls import reverse
from django.db import models
from mptt.models import MPTTModel, TreeForeignKey
class Node(MPTTModel):
name = models.CharField(max_length=55, unique=True)
parent = TreeForeignKey('self', blank=True, null=True, related_name='children', on_delete=models.CASCADE)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('nodes:node_detail', kwargs={'pk': self.pk, })
class MPTTMeta:
order_insertion_by = ['name']
|
23,923 | b99f2f165548180aca539d5a9a8d18c2fdf1a873 | import pyodbc as db
import pandas as pd
con = db.connect('DRIVER={ODBC Driver 13 for SQL Server};SERVER=ZBOOK;Trusted_Connection=yes;DATABASE=Peaqock')
cur = con.cursor()
df = pd.read_sql("""SELECT *
FROM (((peaqock.dbo.TransactionsHist
inner join Peaqock.dbo.OrdresHist ON Peaqock.dbo.TransactionsHist.IdOrdreEx = Peaqock.dbo.OrdresHist.IdOrdreEx)
INNER join peaqock.dbo.ComptesEspece ON IdCompte=IdCompteEspece)
INNER join Peaqock.dbo.Clients ON IdClient=IdPersonne)""",con)
df.index = df['IdPersonne']
df = pd.DataFrame(df)
Id_Personne_KYC = list(set(pd.read_sql("SELECT IdPersonne FROM KYC",con)['IdPersonne']))
df = df.loc[Id_Personne_KYC]
#
#def table_commander(table_name, column_names, row_example):
# sql_types = ['CHARACTER',]
# python_type = [ ]
# column_types = []
# if len(column_names)!=len(row_example):
# print("lenght of names : ", len(column_names))
# print("length of example : ", len(row_example))
# length = len(column_example)
# command = 'CREATE TABLE IF NOT EXISTS' + table_name + ' ('
# for name in column_names :
# command +=
#
def execom(command, con = db.connect('DRIVER={ODBC Driver 13 for SQL Server};SERVER=ZBOOK;Trusted_Connection=yes;DATABASE=Peaqock')):
cur = con.cursor()
cur.execute(command)
cur.close()
def insert(table_name, df, column_names):
command = 'INSERT INTO '+table_name+' ('
for column_name in column_names :
command += column_name + ','
command = command[:-1]
command += ') VALUES '
for row in df.index :
command += str(tuple(df.loc[row]))+','
command = command[:-1]
return command
con = db.connect('DRIVER={ODBC Driver 13 for SQL Server};SERVER=ZBOOK;Trusted_Connection=yes;DATABASE=Peaqock')
execom('CREATE TABLE Testing1')
|
23,924 | 4e230475eb7e2c84cda9bd7034edab59a56aae02 | with open('allbooks.txt','r') as f:
with open('upload_all_books.sh','w') as g:
print >>g, "#!/bin/bash"
print >> g, "echo 'Uploading all books...'"
print >> g, "set -e"
print >> g, ""
for book in f:
book = book.rstrip()
if not book: continue
print >> g, "rsync -avz books/{0}/web devhap@dev.himalayanacademy.com:public_html/media/books/{0}/".format(book) |
23,925 | 75d6e4672c2da4479dad6b46cb721d90e6a991a7 | """
Django settings for github_scraper project.
Generated by 'django-admin startproject' using Django 3.1.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
import json
import sys
from pathlib import Path
from typing import Dict, Any, TextIO, List, Union
# Define Config type for the configuration pulled from `config.json`
Config = Dict[str, Any]
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR: Path = Path(__file__).resolve().parent.parent
# "Secret" configuration dictionary
config: Config = {}
# Load configuration file
try:
data_file: TextIO
with open(BASE_DIR / 'settings' / 'config.json') as data_file:
config = json.load(data_file)
except IOError:
if 'SKIP_CONFIG' not in os.environ:
sys.exit('You need to setup the config data file (see the `config_template.json` file.)')
# Application definition
INSTALLED_APPS: List[str] = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
# Third party apps
INSTALLED_APPS.extend([
])
# Project defined apps
INSTALLED_APPS.extend([
'search_interface.apps.SearchInterfaceConfig',
])
MIDDLEWARE: List[str] = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware'
]
ROOT_URLCONF: str = 'github_talent_search.urls'
TEMPLATES: List[Dict[str, Any]] = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION: str = 'github_talent_search.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES: Dict[str, Dict[str, Union[str, int, Dict, List]]] = {}
for database in config.get('databases', []):
DATABASES[database.get('connection_name')] = {
'NAME': database.get('database_name'),
'ENGINE': database.get('engine'),
'USER': database.get('user', ''),
'PASSWORD': database.get('password', ''),
'HOST': database.get('host', ''),
'PORT': database.get('port', ''),
'OPTIONS': database.get('options', {})
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS: List[Dict[str, str]] = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'console': {
'format': '%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'console',
},
},
'loggers': {
'': {
'level': 'DEBUG',
'handlers': ['console'],
},
},
}
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE: str = 'en-us'
TIME_ZONE: str = 'UTC'
USE_I18N: bool = True
USE_L10N: bool = True
USE_TZ: bool = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
static_config: Dict = config.get('static', {})
STATIC_ROOT: str = static_config.get('root', '')
STATIC_URL: str = static_config.get('url', '/static/')
GITHUB_TOKEN: str = config.get('github_oauth_token')
INTERNAL_IPS = ['127.0.0.1', ]
|
23,926 | c461bd4f8ae17b39eeb520d35a1665dbd14811c1 | from numpy import loadtxt
from xgboost import XGBClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
import sys
import numpy as np
import csv
def readTrainData(dataFilename, chosen_features):
dataFile = open(dataFilename,'r', errors='ignore')
raw_data = []
counter = 0
for row in csv.reader(dataFile,delimiter=','):
if(counter == 0):#skip for the first row(just title)
counter+=1
continue
mylist = []#reset mylist here
for n in range(len(row)):#106 features
if(n %106 in chosen_features):#only use chosen features
if(row[n] == '0'):
mylist.append(float(0.00001))#to prevent gradient descent fail?
else:
mylist.append(float(row[n]))
raw_data.append(mylist)
raw_data = np.array(raw_data)
return raw_data
def normalizeData(data1,data2):#can be training data or testing data, feature scaling
num_of_data = len(data1)+len(data2)
dim_of_data = len(data1[0])#dimension of data, include 2d size
#print(num_of_data)
#print(dim_of_data)
mean_arr = np.zeros( (dim_of_data,1) )
sigma_arr = np.zeros( (dim_of_data,1) )
for n in range(dim_of_data):
for m in range(len(data1)):
mean_arr[n] += data1[m,n]
for m2 in range(len(data2)):
mean_arr[n] += data2[m2,n]
mean_arr = mean_arr/num_of_data
for n in range(dim_of_data):
for m in range(len(data1)):
sigma_arr[n] += (data1[m,n]-mean_arr[n])**2
for m2 in range(len(data2)):
sigma_arr[n] += (data2[m2,n]-mean_arr[n])**2
sigma_arr = np.sqrt(sigma_arr/num_of_data)
for n in range(dim_of_data):
for m in range(len(data1)):
data1[m,n] = (data1[m,n]-mean_arr[n])/sigma_arr[n]
for m2 in range(len(data2)):
data2[m2,n] =(data2[m2,n]-mean_arr[n])/sigma_arr[n]
return data1, data2
def readTrainLabel(labelFilename):
labelFile = open(labelFilename,'r', errors='ignore')
raw_label = []#construct raw_data as 2d array
counter = 0
for row in csv.reader(labelFile,delimiter=','):
if(counter == 0):#skip for the first row(just title)
counter+=1
continue
mylist = []#reset mylist here
for n in range(len(row)):#106 features
mylist.append(float(row[n]))
raw_label.append(mylist)
#print(mylist)
raw_label = np.array(raw_label)
num_of_data = raw_label.shape[0]
new_label = np.zeros((num_of_data,2))#binary classification here
for m in range(num_of_data):
if(raw_label[m] == 1):
new_label[m,1] = 1
else:
new_label[m,0] = 1
return new_label, raw_label
def writePredict(predictLabel,outputFilename):
out_file = open(outputFilename,'w',newline='')
result_csv = csv.writer(out_file)
result_csv.writerow(['id','label'])
for n in range(predictLabel.shape[0]):
tmp = int(predictLabel[n])
result_csv.writerow( [str(n+1), str(tmp)] )
return predictLabel
########################
rawdatafilename = sys.argv[1] #no use
rawtestfilename = sys.argv[2] #no use
trainDataFilename = sys.argv[3]#'X_train'
trainLabelFilename = sys.argv[4]#'Y_train'
testDataFilename = sys.argv[5]#'X_test'
outputFilename = sys.argv[6]#result.csv
chosen_features = []
for n in range(106):
chosen_features.append(n)
x_train = readTrainData(trainDataFilename, chosen_features)
x_test = readTrainData(testDataFilename, chosen_features)
x_train, x_test = normalizeData(x_train, x_test)
y_train, raw_label = readTrainLabel(trainLabelFilename)
np.random.seed(0)
cv_idx_tmp = np.random.permutation(32561)
#for 4 fold-cross validation, 1400 1400 1400 1440
#cv_fold_1 = cv_idx_tmp[0:16000]
#cv_fold_2 = cv_idx_tmp[16000:32561]
cv_fold_1 = cv_idx_tmp[0:16000]
cv_fold_2 = cv_idx_tmp[16000:32561]
#print(cv_fold_1[:,].shape)
x_train1 = x_train[cv_fold_1[:,] ]
y_train1 = y_train[cv_fold_1[:,] ]
x_train2 = x_train[cv_fold_2[:,] ]
y_train2 = y_train[cv_fold_2[:,] ]
y_train_raw1 = raw_label[cv_fold_1[:,] ]
y_train_raw2 = raw_label[cv_fold_2[:,] ]
y_train_raw1 = np.reshape(y_train_raw1,(-1,))
y_train_raw2 = np.reshape(y_train_raw2,(-1,))
print('x train1 shape ',x_train1.shape)
print('y train1 raw shape ',y_train_raw1.shape)
print('x train shape[1] ',x_train.shape[1])#106 here
print('y train shape[1] ',y_train.shape[1])#2
n_features = x_train.shape[1]
n_labels = y_train.shape[1]
#writePredict(x_test ,w_vec ,outputFilename)
##################################################
# fit model no training data
model = XGBClassifier(
learning_rate =0.2,
max_depth=5,
min_child_weight=1,
gamma=0,
subsample=0.8,
colsample_bytree=0.8,
objective= 'binary:logistic',
nthread=4,
scale_pos_weight=1,
seed=27)
#model.fit(x_train2, y_train_raw2.ravel())
model.fit(x_train, raw_label.ravel())
# make predictions for test data
#y_pred = model.predict(x_train1)
y_pred = model.predict(x_test)
predictions = [round(value) for value in y_pred]
# evaluate predictions
#accuracy = accuracy_score(y_train_raw1, predictions)
#print("Accuracy: %.2f%%" % (accuracy * 100.0))
predictions = np.array(predictions)
#print("predictions shape",predictions.shape)
writePredict(predictions,outputFilename)
|
23,927 | f9121e65200a9cdd0a5d482d10042ee370ae6cc7 | # Generated by Django 2.1.3 on 2018-11-03 05:41
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('entrega', '0002_auto_20181102_2325'),
]
operations = [
migrations.CreateModel(
name='Asignacion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ciudad', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='entrega.Ciudad')),
],
),
migrations.CreateModel(
name='Paquete',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Destinatario', models.CharField(max_length=200)),
('direccion', models.CharField(max_length=200)),
('descripcion', models.CharField(max_length=50)),
('ciudad', models.ManyToManyField(through='entrega.Asignacion', to='entrega.Ciudad')),
('piloto', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='entrega.Piloto')),
],
),
migrations.AddField(
model_name='asignacion',
name='paquete',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='entrega.Paquete'),
),
]
|
23,928 | e436c1b683e4a46808a0891deee56a357b7448ef | # remember to initialize values
# remember to ask for input in lower or upper
# remember to get the and logic right if used, and use parantheses everywhere
# check list
''' TO DO
1. ask if from canada -- DONE
2. ask which province -- DONE
3. if not canada, charge no taxes -- DONE
4. if order was placed in Canada, calculate tax based on province.. -- TODO
4a. find a lazy way to deal with other province charge! :D -- TODO
5. Do the calculation of selected provinceTax and the price of their purchase -- TODO
'''
''' Mistakes made
1. Don't *really* need booleans for this program, but would make it easier as a persistent
feature for the base back end of the 'site' I feel.
2. I didn't declare separate tax rates for each province. This leads to non - modular code,
which makes it much much harder to maintain for other devs.
2a. provinceTax makes it easy for writing code, but will screw over a site that needs
their taxes updated instantly.
'''
#initializing booleans and values
fromCanada = False
outsideCanada = False
province = ''
fromWhere = ''
provinceTax = 0
orderAmount = 10 # Let's just assume it's $10 cad minimum to shop here
#ask if from canada, then ask which province
fromCanada = input('Where do you live? ').lower()
if fromWhere == 'canada':
fromCanada = True
print('Awesome, which province do you live in? ')
print('Alberta, Ontario, New Brunswick, Nova Scotia, or other? ')
province = input('').lower()
elif fromWhere != 'canada':
print("You don't seem to be from Canada, enjoy your no taxes! ")
if province == 'alberta':
provinceTax = .05
print('You will be charged 5% taxes' )
elif province == 'ontario' or province == 'new brunswick' \
or province == 'nova scotia':
print('Sorry, your tax rate is 13%! ')
provinceTax = .13
else:
print('Hey! You\'re not in any of those provinces in canada,\
so you\'ll have to pay 6 % plus GST of 5% ')
provinceTax = .11
print('The tax rate is: %1f' % provinceTax)
#fuckkkk, why does this part keep showing if you dont put canada??
# how do I do the maths? |
23,929 | af1d0460df7324f5654331bb060db1439d7e77f4 | class Ingredient(object):
def __init__(self, name, total_quantity, threshold_for_alert=5):
self.name = name
self.total_quantity = total_quantity
self.available_quantity = total_quantity
self.threshold_for_alert = threshold_for_alert
self.empty = False
def alert(self):
if self.threshold_for_alert >= self.available_quantity:
return "ALERT!!! {} is running low. Please refill ASAP!".format(self.name)
return ""
def use_ingredient(self, quantity):
if self.empty or quantity > self.available_quantity:
return False
self.available_quantity -= quantity
if not self.available_quantity:
self.empty = True
return True
def is_available(self, quantity):
print(self.alert())
if self.empty or quantity > self.available_quantity:
return False
return True
def refill_ingredient(self, quantity):
if self.available_quantity == self.total_quantity:
print("No need to refill!")
return False
diff = self.total_quantity - self.available_quantity
if quantity > diff:
quantity = diff
self.available_quantity += quantity
return True
|
23,930 | 024e9b9955daf49e82f5de43f966882860246c05 | import requests
import time
import os
import json
import re
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
path = BASE_DIR + '/clientdata/data/'
if __name__ == "__main__":
a = 0
j_paht = path + 'poet.song.' + str(a*1000) + '.json'
f = open(j_paht, 'r')
while f:
print(str(a))
try:
json_str = f.read()
data = json.loads(json_str)
data_json = data
for item_t in data_json:
author = item_t["author"]
paragraphs_list = item_t["paragraphs"]
paragraphs = ''
for str_0 in paragraphs_list:
paragraphs = paragraphs + str_0
strains_list = item_t["strains"]
strains = ''
for str_1 in strains_list:
strains = strains + str_1
title = item_t["title"]
'''
chapterlist_path cover_path insert
'''
params_p = {
"value":'',
"author": author,
"content": paragraphs,
"strains": strains,
"title": title
}
url_p = 'http://127.0.0.1:8000/song/shi/'
headers_p = {
'Accept': 'application/json, text/javascript, */*; q=0.01',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36',
'Content-Type': 'application/x-www-form-urlencoded',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN,zh;q=0.8',
'Cache-Control': 'no-cache',
}
rsp = requests.post(url_p, headers=headers_p, data=params_p)
print(rsp)
except Exception as e:
print(e)
pass
a = a + 1
t = a * 1000
j_paht = path + 'poet.song.' + str(t) + '.json'
time.sleep(2)
f = open(j_paht, 'r')
|
23,931 | e02376ce71955e73bfa2ba9033619f6392dcc6d6 | import Cars_Class
class Car_Specialists:
def __init__(self,name,car_type,skill,amount):
self.name = name
self.car_type = car_type
self.skill = skill
self.amount = amount
def persona_details(self):
return '{} is a {} for the vehicle {} and earns {} annually'.format(self.name,self.skill,self.car_type,self.amount)
class mechanic(Car_Specialists):
pass
driver = Car_Specialists('Carl Max', 'Volvo', 'driver', '$50,000')
mech = mechanic('Dickson Martin', 'Volvo', 'mechanic', '$150,000')
print(driver.persona_details())
print(mech.persona_details())
#print(help(mech))
list = [102, 343 , 'que', 2.2, 50]
print(list[0:4])
|
23,932 | 92caaaa97b152d03969ad5b3a55b1d41810bf320 | import sqlite3
conn = sqlite3.connect("dat.db")
cur = conn.cursor()
def createTable():
cur.execute("""CREATE TABLE main
(word text, definition text)
""")
createTable()
f = (open("dict.txt", "r").read()).split("\n")
for line in f:
line = line.split("\t")
if len(line) != 3:
continue
sql = "INSERT INTO main VALUES (?, ?)"
cur.execute(sql, (line[0], line[2], ))
conn.commit()
cur.close()
conn.close()
|
23,933 | 18e60f3564a5dcb10ae9076da17fdac5699fdbd9 | from transformers import XLMModel
import torch
from keras.preprocessing.sequence import pad_sequences
from transformers import XLMTokenizer
import numpy as np
from tqdm import tqdm
import argparse
parser = argparse.ArgumentParser(description='Getting sentence embeddings with XLM. ')
parser.add_argument('--max_len', type=int, default=40,
help='Maximum length of tokens: all sentences with less tokens will be padded with 0, else we will remove all tokens after max_len index')
args = parser.parse_args()
MAX_LEN = args.max_len
class XLM_model:
"""
from here: https://github.com/huggingface/pytorch-transformers/blob/a2d4950f5c909f7bb4ea7c06afa6cdecde7e8750/pytorch_transformers/modeling_xlm.py
We can see all the possible models existing for XLM.
We focus on MLM+TLM which is the model that best performance on cross-lingual tasks.
"""
XLM_PRETRAINED_MODEL_ARCHIVE_MAP = {
'xlm-mlm-en-2048': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-en-2048-pytorch_model.bin",
'xlm-mlm-ende-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-ende-1024-pytorch_model.bin",
'xlm-mlm-enfr-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-enfr-1024-pytorch_model.bin",
'xlm-mlm-enro-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-enro-1024-pytorch_model.bin",
'xlm-mlm-tlm-xnli15-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-tlm-xnli15-1024-pytorch_model.bin",
'xlm-mlm-xnli15-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-xnli15-1024-pytorch_model.bin",
'xlm-clm-enfr-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-clm-enfr-1024-pytorch_model.bin",
'xlm-clm-ende-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-clm-ende-1024-pytorch_model.bin",
'xlm-mlm-17-1280': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-17-1280-pytorch_model.bin",
'xlm-mlm-100-1280': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-100-1280-pytorch_model.bin",
}
def __init__(self, model_name):
# @TODO: check what causal refers again
# model = XLMModel.from_pretrained("xlm-mlm-enfr-1024", causal = False)
self.model_name = model_name
self.model = XLMModel.from_pretrained(model_name, causal = False)
self.tokenizer = XLMTokenizer.from_pretrained(self.model_name, do_lower_case=True)
def encode(self, sentences, max_len):
########## For 15 languages ########
tokenizer = self.tokenizer
# sentences = ['[CLS] '+sentence+' [SEP]' for sentence in sentences]
# sentences = [sentence for sentence in sentences]
# In model.encode(), special characters can be added on the fly
# cf. https://huggingface.co/transformers/_modules/transformers/tokenization_utils.html#PreTrainedTokenizer.add_special_tokens
""""
def encode(
self,
text,
text_pair=None,
add_special_tokens=True,
max_length=None,
stride=0,
truncation_strategy="longest_first",
pad_to_max_length=False,
return_tensors=None,
**kwargs
):
"""
# The special character to separate sentences is </s> for XLM which is encoded as 1
# If in tokenizer.encode() you add add_special_tokens = True
# Then, it is not necessary to add </s> in each side of your sentence
sentences = ["</s> " + sentence + " </s>" for sentence in sentences]
sentences = [tokenizer.tokenize(sent) for sent in sentences]
# maximum length of each sentence ie number of tokens maximal for all sentences
MAX_LEN = max_len # AVG around 10 so 64 is good enough
arr_ids_post = [tokenizer.convert_tokens_to_ids(txt) for txt in sentences]
input_ids_post = pad_sequences(arr_ids_post,
maxlen = MAX_LEN,
dtype = 'long',
truncating = 'post',
padding = 'post') # 'post' to add 0s after in padding for ex
########### CREATE ATTENTION MASKS ###################
# This is just to apply attention on the part where there are actual tokens t
# Not on the padding elements set to 0 before.
attention_masks_post = []
for seq in input_ids_post:
mask = [float(i>0) for i in seq]
attention_masks_post.append(mask)
########### CREATE SPECIAL CHARACTER MASKS FOR BUILDING EMBEDDINGS WITHOUT FOCUSING ON [CLS] and [SEP] ###################
# For XLM, it seems like [CLS] and [SEP] are not used at inference time.
# <s> encoded as 0
# mask_special_characters_post = []
# for seq in input_ids_post:
# # mask_spec = [1 if i not in [627, 615] else 0 for i in seq]
# mask_spec = [1 if i in [0] else 0 for i in seq]
# mask_special_characters_post.append(mask_spec)
input_ids = torch.tensor(input_ids_post)
attention_masks = torch.tensor(attention_masks_post)
# special_masks = torch.tensor(mask_special_characters_post)
# lang_ids = torch.tensor(lang_ids)
########## FORWARD TO GET EMBEDDINGS ##########
input_ids = input_ids.type(torch.int64)
# lang_ids = lang_ids.type(torch.int64)
embeddings_tuple = self.model( input_ids = input_ids, attention_mask = attention_masks)
embeddings_ = embeddings_tuple[0]
# print(embeddings_)
# print(embeddings_.shape)
# print(embeddings_[:,0,:].shape)
embeddings_first_token_only = embeddings_[:,0,:]
embeddings_arr = embeddings_first_token_only.cpu().detach().numpy()
# print(embeddings_arr.shape)
del embeddings_, embeddings_first_token_only, embeddings_tuple
return embeddings_arr
if __name__=='__main__':
# for length in newstest2012.tok.fr
# mean value = 26.6
# std value = 15.4
# max value = 145
# Load XLM model
XLM_model = XLM_model("xlm-mlm-100-1280")
max_len = MAX_LEN
# Open file
lang_arr = ['cs', 'de', 'en', 'es', 'fr', 'ru']
# lang = "ru"
for lang in lang_arr:
# input_file_name = "../data/processed/wmt2012/newstest2012.tok.{}".format(lang)
input_file_name = "../dev/newstest2012.{}".format(lang)
arr_embed = []
with open(input_file_name, 'r') as file:
N_lines = 3003
with tqdm(total=N_lines) as pbar:
for line in file:
line = line.strip("\n")
# For each line get embedding
embed = XLM_model.encode(sentences = [line], max_len = max_len)
arr_embed.append(embed)
pbar.update(1)
# Store embedding in an array
np_embed = np.array(arr_embed)
# save numpy array in memory
np.save(file = "../output/XLM_MLM/newstest2012.{}.embed".format(lang), arr = np_embed) |
23,934 | f2850f217fa649c6bed70a13bae355c913efe603 | ##################################################
# Let's make it more meaningful together
##################################################
# İnstalling required libraries
import numpy as np
import pandas as pd
import seaborn as sns
from matplotlib import pyplot as plt
pd.set_option('display.width', 500)
pd.set_option('display.max_columns', 20)
pd.set_option('display.max_rows', 15)
pd.set_option('display.float_format', lambda x: '%.3f' % x)
pd.set_option('display.expand_frame_repr', True)
# Get the datasets of persona
df = pd.read_csv("hafta_2/persona.csv")
df.head()
######### Understanding the data sets ######
df.ndim # ndim: Number of dimensions.
df.shape # shape: shape of datasets
df.size # size: numbers of elements
df.head() # head: first five values of datasets
df.columns # columns: name of columns
df.info # info: get help information for a function, class, or module.
df.describe().T
df.isnull().values.any() # answer of null value
df.isnull().sum() # total of null value
def check_df(dataframe):
print(f"""
##################### Shape #####################\n\n\t{dataframe.shape}\n\n
##################### Types #####################\n\n{dataframe.dtypes}\n\n
##################### Head #####################\n\n{dataframe.head(3)}\n\n
##################### NA #####################\n\n{dataframe.isnull().sum()}\n\n
##################### Quantiles #####################\n\n{dataframe.quantile([0, 0.05, 0.50, 0.95, 0.99, 1]).T}\n\n""")
check_df(df)
######## Selection of Categorical and Numerical Variables ########
def grab_col_names(dataframe, cat_th=10, car_th=20):
"""
Veri setindeki kategorik, numerik ve kategorik fakat kardinal değişkenlerin isimlerini verir.
Not: Kategorik değişkenlerin içerisine numerik görünümlü kategorik değişkenler de dahildir.
Parameters
------
dataframe: dataframe
Değişken isimleri alınmak istenilen dataframe
cat_th: int, optional
numerik fakat kategorik olan değişkenler için sınıf eşik değeri
car_th: int, optional
kategorik fakat kardinal değişkenler için sınıf eşik değeri
Returns
------
cat_cols: list
Kategorik değişken listesi
num_cols: list
Numerik değişken listesi
cat_but_car: list
Kategorik görünümlü kardinal değişken listesi
Examples
------
import seaborn as sns
df = sns.load_dataset("iris")
print(grab_col_names(df))
Notes
------
cat_cols + num_cols + cat_but_car = toplam değişken sayısı
num_but_cat cat_cols'un içerisinde.
"""
# cat_cols, cat_but_car
cat_cols = [col for col in dataframe.columns if dataframe[col].dtypes == "O"]
num_but_cat = [col for col in dataframe.columns if dataframe[col].nunique() < cat_th and
dataframe[col].dtypes != "O"]
######## General Exploration for Categorical Data ########
def cat_summary(dataframe, plot=False):
# cat_cols = grab_col_names(dataframe)["Categorical_Data"]
for col_name in cat_cols:
print("############## Unique Observations of Categorical Data ###############")
print("The unique number of " + col_name + ": " + str(dataframe[col_name].nunique()))
print("############## Frequency of Categorical Data ########################")
print(pd.DataFrame({col_name: dataframe[col_name].value_counts(),
"Ratio": dataframe[col_name].value_counts() / len(dataframe)}))
if plot: # plot == True (Default)
sns.countplot(x=dataframe[col_name], data=dataframe)
plt.show()
cat_summary(df, plot=True)
######## General Exploration for Numerical Data ########
def num_summary(dataframe, plot=False):
numerical_col = ['PRICE', 'AGE'] ##or grab_col_names(dataframe)["Numerical_Data"]
quantiles = [0.25, 0.50, 0.75, 1]
for col_name in numerical_col:
print("########## Summary Statistics of " + col_name + " ############")
print(dataframe[numerical_col].describe(quantiles).T)
if plot:
sns.histplot(data=dataframe, x=col_name)
plt.xlabel(col_name)
plt.title("The distribution of " + col_name)
plt.grid(True)
plt.show(block=True)
num_summary(df, plot=True)
######## Analysis of the data set ########
# Unique Value of source
df["SOURCE"].unique()
# Frequency of source
df["SOURCE"].value_counts()
df.groupby("SOURCE").count()
# Unique Value and count of price
df["PRICE"].unique()
df.groupby("PRICE").count()
# sales by country
df.groupby("COUNTRY")["PRICE"].count()
# total sales by country
df.groupby("COUNTRY")["PRICE"].sum()
# sales by source
df.groupby("SOURCE")["PRICE"].count()
# mean of sales by country
df.groupby("COUNTRY")["PRICE"].mean()
# mean of sales by source
df.groupby("SOURCE")["PRICE"].mean()
# mean of sales by source and country
df.groupby(["SOURCE", "COUNTRY"])["PRICE"].mean()
# mean of sales by source,country,sex and age as descending order
agg_df = df.groupby(["COUNTRY","SOURCE","SEX","AGE"]).agg({"PRICE": "mean"}).sort_values(("PRICE"),
ascending=False)
agg_df.head()
# Solving the index problem
agg_df = agg_df.reset_index()
######## Define New level-based Customers ########
# But, firstly we need to convert age variable to categorical data.
bins = [agg_df["AGE"].min(), 18, 23, 35, 45, agg_df["AGE"].max()]
labels = [str(agg_df["AGE"].min())+'_18', '19_23', '24_35', '36_45', '46_'+ str(agg_df["AGE"].max())]
agg_df["AGE_CAT"] = pd.cut(agg_df["AGE"], bins , labels=labels)
agg_df.groupby("AGE_CAT").agg({"AGE": ["min", "max", "count"]})
# For creating personas, we group all the features in the dataset:
agg_df = agg_df.groupby(["COUNTRY", "SOURCE", "SEX", "AGE_CAT", "AGE"])[["PRICE"]].sum().reset_index()
agg_df["CUSTOMERS_LEVEL_BASED"] = pd.DataFrame(["_".join(row).upper() for row in agg_df.values[:,0:4]])
agg_df.head()
# Calculating average amount of personas:
agg_df.groupby('CUSTOMERS_LEVEL_BASED').agg({"PRICE": "mean"})
# group by of personas:
agg_df = agg_df.groupby('CUSTOMERS_LEVEL_BASED').agg({"PRICE": "mean"}).sort_values(("PRICE"),ascending=False).reset_index()
agg_df.head()
######## Creating Segments based on Personas ########
segment_labels = ["D","C","B","A"]
agg_df["SEGMENT"] = pd.cut(agg_df["PRICE"], 4, labels=segment_labels)
agg_df.groupby("SEGMENT").agg({"PRICE": ["mean", "max", "sum"]}).sort_values(("SEGMENT"),
ascending=False).reset_index()
# Demonstrating segments as bars on a chart, where the length of each bar varies based on the value of the customer profile
plot = sns.barplot(x="SEGMENT", y="PRICE", data=agg_df)
for bar in plot.patches:
plot.annotate(format(bar.get_height(), '.2f'),
(bar.get_x() + bar.get_width() / 2,
bar.get_height()), ha='center', va='center',
size=8, xytext=(0, 8),
textcoords='offset points')
######### Prediction ########
def ruled_based_classification(dataframe):
def AGE_CAT(age):
if age <= 18:
AGE_CAT = "15_18"
return AGE_CAT
elif (age > 18 and age <= 23):
AGE_CAT = "19_23"
return AGE_CAT
elif (age > 23 and age <= 35):
AGE_CAT = "24_35"
return AGE_CAT
elif (age > 35 and age <= 45):
AGE_CAT = "36_45"
return AGE_CAT
elif (age > 45 and age <= 66):
AGE_CAT = "46_66"
return AGE_CAT
COUNTRY = input("Enter a country name (USA/EUR/BRA/DEU/TUR/FRA):")
SOURCE = input("Enter the operating system of phone (IOS/ANDROID):")
SEX = input("Enter the gender (FEMALE/MALE):")
AGE = int(input("Enter the age:"))
AGE_SEG = AGE_CAT(AGE)
new_user = COUNTRY.upper() + '_' + SOURCE.upper() + '_' + SEX.upper() + '_' + AGE_SEG
print(new_user)
print("Segment:" + agg_df[agg_df["CUSTOMERS_LEVEL_BASED"] == new_user].loc[:, "SEGMENT"].values[0])
print("Price:" + str(agg_df[agg_df["CUSTOMERS_LEVEL_BASED"] == new_user].loc[:, "PRICE"].values[0]))
return new_user
ruled_based_classification(df)
|
23,935 | bb524a506aa36162955394068af103667f9d7ae8 | from Tkinter import *
import string
import random
import tkMessageBox
import tkFileDialog
import datetime
import os
import time
import winsound
import pickle
class encryption:
encrypted=True
def encryptfile(self):
if self.encrypted==False:
infile1=open("Tasks.txt")
infile2=open("Temp.txt","w")
text=infile1.read()
text=encrypt(text)
infile2.write(text)
infile1.close()
infile2.close()
os.remove("Tasks.txt")
os.rename("Temp.txt","Tasks.txt")
infile1=open("settings.txt")
infile2=open("Temp.txt","w")
text=infile1.read()
text=encrypt(text)
infile2.write(text)
infile1.close()
infile2.close()
os.remove("settings.txt")
os.rename("Temp.txt","settings.txt")
self.encrypted=True
def decryptfile(self):
if self.encrypted==True:
infile1=open("Tasks.txt")
infile2=open("Temp.txt","w")
text=infile1.read()
text=decrypt(text)
infile2.write(text)
infile1.close()
infile2.close()
os.remove("Tasks.txt")
os.rename("Temp.txt","Tasks.txt")
infile1=open("settings.txt")
infile2=open("Temp.txt","w")
text=infile1.read()
text=decrypt(text)
infile2.write(text)
infile1.close()
infile2.close()
os.remove("settings.txt")
os.rename("Temp.txt","settings.txt")
self.encrypted=False
def encrypt(text):
password="Porunga"
encrypted = []
for i, c in enumerate(text):
shift = password[i % len(password)]
shift = ord(shift)
encrypted.append((ord(c) + shift) % 256)
return ''.join([chr(n) for n in encrypted])
def decrypt(text):
password="Porunga"
plain = []
for i, c in enumerate(text):
shift = password[i % len(password)]
shift = ord(shift)
plain.append((256 + ord(c) - shift) % 256)
return ''.join([chr(n) for n in plain])
class Diary:
def __init__(self):
self.DateTime=01/01/0001
self.Label=''
self.Details=''
self.cstatus="Pending"
def NewID(self):
NID=str(random.randrange(0,10))+str(random.randrange(0,10))+str(random.randrange(0,10))+str(random.randrange(0,10))
return NID
def Store(self,DateTime,Label,Details,Rating):
self.ID=self.NewID()
S=self.ID+" "+str(DateTime).replace(" ","_")+" "+str(Label).replace(" ","_")+" "+str(Details).replace(" ","_")+" "+str(self.cstatus)+" "+str(Rating)+"\n"
infile=open("Tasks.txt",'a')
infile.write(S)
infile.close()
def Delete(self,DID):
DID=str(DID)
infile1=open("Tasks.txt","r")
infile2=open("Temp.txt","w")
for j in infile1.readlines():
T=j.split()
if T[0]!=DID:
S=T[0]+" "+T[1]+" "+T[2]+" "+T[3]+" "+str(T[4])+" "+str(T[5])+"\n"
infile2.write(S)
infile1.close()
infile2.close()
infile1=open("Tasks.txt","w")
infile2=open("Temp.txt","r")
for i in infile2.readlines():
infile1.write(i)
infile1.close()
infile2.close()
os.remove("Temp.txt")
class Controlwindow:
def __init__(self, parent):
self.smileypath=os.getcwd()+"/smiley.ico"
self.myParent = parent
self.mainframe = Frame(parent)
self.mainframe.pack()
# Control Box
self.controlbox= Frame(self.mainframe)
self.controlbox.pack(side=TOP,ipadx="3m",ipady="2m",padx="3m",pady="2m",
)
# Task Selection Frame
self.tsf=Frame(self.mainframe)
self.tsf.pack(side=TOP,fill=BOTH,expand=YES)
self.scroll=Scrollbar(self.tsf)
self.scroll.pack(side=RIGHT, fill=Y)
self.DF=Text(self.tsf,yscrollcommand=self.scroll.set,wrap=WORD,font='Fixedsys',bg='black',fg='white')
self.DF.pack(side=TOP,fill=BOTH)
self.scroll.config(command=self.DF.yview)
self.reld()
showtime=Label(self.tsf,bd=1)
showtime.pack(side=RIGHT,padx="5m")
def tick():
T=datetime.datetime.today()
DT=str(T.day)+"/"+str(T.month)+"/"+str(T.year)+" "+str(T.hour)+":"+str(T.minute)
showtime.configure(text=DT)
time.sleep(1)
showtime.after(60000, tick)
tick()
self.v=StringVar()
self.textbox=Entry(self.tsf,textvariable=self.v,bd=2)
self.textbox.bind("<KeyPress>",self.Display)
self.textbox.pack(side=RIGHT,padx="29m")
# Extracted all tasks in file one by one and diplayed in scrollable textbox
self.AlarmB=PhotoImage(file="clock.gif")
self.QuitB=PhotoImage(file="quit.gif")
self.AddB=PhotoImage(file="add.gif")
self.EditB=PhotoImage(file="edit.gif")
self.DeleteB=PhotoImage(file="delete.gif")
self.CheckB=PhotoImage(file="check.gif")
self.SettingsB=PhotoImage(file="settings.gif")
# Control Buttons
self.NT = Button(self.controlbox,command=self.NTF,width=50,height=50,image=self.AddB)
self.NT.pack(side=LEFT)
self.EET = Button(self.controlbox,command=self.EETF,width=50,height=50,image=self.EditB)
self.EET.pack(side=LEFT)
self.DT = Button(self.controlbox,command=self.DTF,width=50,height=50,image=self.DeleteB)
self.DT.pack(side=LEFT)
self.checkbutton=Button(self.controlbox,command=self.Checker,width=50,height=50,image=self.CheckB)
self.checkbutton.pack(side=LEFT)
self.closebutton3=Button(self.controlbox,command=parent.destroy,width=50,height=50,image=self.QuitB)
self.closebutton3.pack(side=RIGHT)
self.alarmbutton=Button(self.controlbox,width=50,height=50,image=self.AlarmB,command=self.alarmmode)
self.alarmbutton.pack(side=RIGHT)
self.settingsbutton=Button(self.controlbox,width=50,height=50,image=self.SettingsB,command=self.options)
self.settingsbutton.pack(side=RIGHT)
def options(self):
self.opwin=Toplevel(self.controlbox)
self.opwin.iconbitmap(self.smileypath)
self.opframe1=Frame(self.opwin)
self.opframe1.pack(side=TOP)
self.opframe2=Frame(self.opwin)
self.opframe2.pack(side=TOP)
self.epass=StringVar()
self.npass=StringVar()
self.openB=PhotoImage(file="open.gif")
self.playB=PhotoImage(file="play.gif")
self.passchange=Label(self.opframe1,text="Change Your Password",pady="3m")
self.passchange.pack(side=TOP)
self.passchange1=Label(self.opframe1,text="Existing Password:")
self.passchange1.pack(side=TOP)
self.passbox1=Entry(self.opframe1,textvariable=self.epass,show="*")
self.passbox1.pack(side=TOP)
self.passchange2=Label(self.opframe1,text="New Password:")
self.passchange2.pack(side=TOP)
self.passbox2=Entry(self.opframe1,textvariable=self.npass,show="*")
self.passbox2.pack(side=TOP)
self.passchangeB=Button(self.opframe1,command=self.pchange,text="Ok")
self.passchangeB.pack(side=BOTTOM)
self.openfile=Button(self.opframe2,image=self.openB,command=self.opennew)
self.openfile.pack(side=LEFT)
self.play=Button(self.opframe2,image=self.playB,command=self.plays)
self.play.pack(side=RIGHT)
self.importsound2=Label(self.opframe2,text="Custom Alarm Sound",pady='5m')
self.importsound2.pack(side=TOP)
opf=open("settings.txt")
oplist=pickle.load(opf)
opf.close()
def opennew(self):
self.opwin.iconify()
self.newsoundpath=tkFileDialog.askopenfilename()
if self.newsoundpath[-3:]=='wav':
opf=open("settings.txt")
oplist=pickle.load(opf)
opf.close()
f=open("settings.txt",'w')
pickle.dump(oplist,f)
f.close()
else:
tkMessageBox.showwarning("Error","Please select a .wav sound file")
self.opwin.deiconify()
def plays(self):
opf=open("settings.txt")
oplist=pickle.load(opf)
opf.close()
spath=oplist[1]
data=open(spath,"r").read()
winsound.PlaySound(data, winsound.SND_MEMORY)
def pchange(self):
self.opwin.iconify()
opf=open("settings.txt")
oplist=pickle.load(opf)
opf.close()
if self.epass.get()==oplist[0]:
oplist[0]=self.npass.get()
f=open("settings.txt",'w')
pickle.dump(oplist,f)
f.close()
tkMessageBox.showwarning("Success","Password has been changed")
else:
tkMessageBox.showwarning("Error","Incorrect Password")
self.opwin.deiconify()
self.passbox1.delete(0,END)
self.passbox2.delete(0,END)
def alarmmode(self):
self.al=Toplevel(self.myParent)
self.al.iconbitmap(self.smileypath)
self.alDateYear=StringVar()
self.alDateMonth=StringVar()
self.alDateDay=StringVar()
self.alTimeHour=StringVar()
self.alTimeMinute=StringVar()
self.all1=Label(self.al,text="Enter the Alarm Date [DD/MM/YY]:")
self.all1.pack(side=LEFT)
textbox1=Entry(self.al,textvariable=self.alDateDay,width=4)
textbox1.pack(side=LEFT)
textbox1_=Entry(self.al,textvariable=self.alDateMonth,width=4)
textbox1_.pack(side=LEFT)
textbox1__=Entry(self.al,textvariable=self.alDateYear,width=6)
textbox1__.pack(side=LEFT)
self.alb=Button(self.al,text="Set",command=self.alarmtime)
self.alb.pack(side=RIGHT)
textbox2_=Entry(self.al,textvariable=self.alTimeMinute,width=4)
textbox2_.pack(side=RIGHT)
textbox2=Entry(self.al,textvariable=self.alTimeHour,width=4)
textbox2.pack(side=RIGHT)
self.all2=Label(self.al,text="Enter the Alarm Time [24:00]:")
self.all2.pack(side=RIGHT)
def alarmtime(self):
self.al.destroy()
self.altime=Toplevel(self.myParent)
self.altime.iconbitmap(self.smileypath)
S="Alarm set for "+self.alDateDay.get()+"/"+self.alDateMonth.get()+"/"+self.alDateYear.get()+" "+self.alTimeHour.get()+":"+self.alTimeMinute.get()
self.all12=Label(self.altime,text=S)
self.all12.pack(side=LEFT)
self.albb=Button(self.altime,text="Abort",command=self.altime.destroy)
self.albb.pack(side=RIGHT)
def tick():
T=datetime.datetime.today()
if T.year==eval(self.alDateYear.get()) and T.month==eval(self.alDateMonth.get()) and T.day==eval(self.alDateDay.get()) and T.hour==eval(self.alTimeHour.get()) and T.minute==eval(self.alTimeMinute.get()):
opf=open("settings.txt")
oplist=pickle.load(opf)
opf.close()
spath=oplist[1]
data=open(spath,"r").read()
winsound.PlaySound(data, winsound.SND_MEMORY)
self.altime.after(1000, tick)
tick()
def Checker(self):
self.cframe=Toplevel(self.controlbox,borderwidth=5,relief=RIDGE)
self.cframe.iconbitmap(self.smileypath)
self.ctxt=Label(self.cframe,text="Input the ID's of the tasks you have finished and hit Return:")
self.ctxt.pack()
self.varn=StringVar()
self.textboxn1=Entry(self.cframe,textvariable=self.varn)
self.textboxn1.bind("<Return>",self.Ch)
self.textboxn1.pack(side=BOTTOM)
def Ch(self,event):
infile=open("Tasks.txt","r")
for j in infile.readlines():
T=string.split(j)
if T[0]==str(self.varn.get()):
p=Diary()
p.Delete(self.varn.get())
f=open("Tasks.txt","a")
S=T[0]+" "+T[1].replace(" ","_")+" "+T[2].replace(" ","_")+" "+T[3].replace(" ","_")+" "+"Done"+" "+T[5]+"\n"
f.write(S)
f.close()
infile.close()
self.textboxn1.delete(0,END)
self.textboxn1.pack()
self.reld()
# Kept for Smart Search
def Display(self,event):
self.reld()
self.DF.configure(state=NORMAL)
self.DF.delete(1.0,END)
infile=open("Tasks.txt","r")
L=[]
D={}
for j in infile.readlines():
T=string.split(j)
T[1]=T[1].replace("_"," ")
T[2]=T[2].replace("_"," ")
T[3]=T[3].replace("_"," ")
D1=T[1].split()
AD=D1[0].split("/")
AT=D1[1].split(":")
Test=datetime.datetime(eval(AD[2]),eval(AD[1]),eval(AD[0]),eval(AT[0]),eval(AT[1]))
try:
D[Test].append(T)
except KeyError:
D[Test]=[T]
Li=D.keys()
Li.sort()
for i in Li:
L.extend(D[i])
for k in L:
D=k[1].split()
if self.v.get() in k[1] or self.v.get() in k[2]or self.v.get() in k[3] or self.v.get() in k[4]or self.v.get()==D[0] or self.v.get()==D[1]:
s=eval(k[5])*"*"
S="%s: %s %s %s %s \n %s\n\n"%(k[0],k[2],k[1],s,k[4],k[3])
self.DF.insert(END,S)
self.DF.pack(side=TOP,fill=BOTH)
self.DF.configure(state=DISABLED)
infile.close()
# Edit Frame1
def EETF(self):
self.eframe=Toplevel(self.controlbox,borderwidth=5,relief=RIDGE)
self.eframe.iconbitmap(self.smileypath)
self.Etxt=Label(self.eframe,text="Input the Task ID you wish to Edit and hit Return:")
self.Etxt.pack()
self.var1=StringVar()
textbox3=Entry(self.eframe,textvariable=self.var1)
textbox3.bind("<Return>",self.Ed)
textbox3.pack(side=BOTTOM)
def Ed(self,event):
self.eframe.destroy()
self.neframe=Toplevel(self.controlbox,borderwidth=5,relief=RIDGE)
self.neframe.iconbitmap(self.smileypath)
c=0
infile=open("Tasks.txt","r")
for j in infile.readlines():
T=string.split(j)
if T[0]==str(self.var1.get()):
self.ExistingID=T[0]
c=1
D=T[1].split("_")
Date2=D[0].split("/")
Time2=D[1].split(":")
break
infile.close()
if c==1:
self.DateYear1_=StringVar()
self.DateMonth1_=StringVar()
self.DateDay1_=StringVar()
self.TimeHour1_=StringVar()
self.TimeMinute1_=StringVar()
self.Name1_=StringVar()
self.Details1_=StringVar()
self.l11=Label(self.neframe,text="Enter the task Date [DD/MM/YY]:")
self.l11.pack(side=LEFT)
textbox111=Entry(self.neframe,textvariable=self.DateDay1_,width=4)
textbox111.insert(END,Date2[0])
textbox111.pack(side=LEFT)
textbox112_=Entry(self.neframe,textvariable=self.DateMonth1_,width=4)
textbox112_.insert(END,Date2[1])
textbox112_.pack(side=LEFT)
textbox113__=Entry(self.neframe,textvariable=self.DateYear1_,width=6)
textbox113__.insert(END,Date2[2])
textbox113__.pack(side=LEFT)
textbox21_=Entry(self.neframe,textvariable=self.TimeMinute1_,width=4)
textbox21_.insert(END,Time2[1])
textbox21_.pack(side=RIGHT)
textbox211=Entry(self.neframe,textvariable=self.TimeHour1_,width=4)
textbox211.insert(END,Time2[0])
textbox211.pack(side=RIGHT)
self.l2=Label(self.neframe,text="Enter the task Time [24:00]:")
self.l2.pack(side=RIGHT)
self.l322=Label(self.neframe,text="Enter the task Priority :")
self.l322.pack(side=TOP)
self.slider=Scale(self.neframe,from_=1,to=5,orient=HORIZONTAL)
self.slider.set(eval(T[5]))
self.slider.pack()
self.l31=Label(self.neframe,text="Enter the task Name :")
self.l31.pack(side=TOP)
textbox31=Entry(self.neframe,textvariable=self.Name1_)
textbox31.insert(END,T[2].replace("_"," "))
textbox31.pack(side=TOP)
self.l41=Label(self.neframe,text="Enter the task Details: ")
self.l41.pack(side=TOP)
textbox41=Entry(self.neframe,textvariable=self.Details1_,width=50)
textbox41.insert(END,T[3].replace("_"," "))
textbox41.pack(side=TOP)
self.l51=Button(self.neframe,text="OK",command=self.Modify)
self.l51.pack(side=TOP)
else:
tkMessageBox.showwarning("Error","Please Enter a Correct ID")
self.neframe.destroy()
def Modify(self):
p=Diary()
c=1
try:
Timetest=datetime.time(eval(self.TimeHour1_.get()),eval(self.TimeMinute1_.get()))
except (NameError,ValueError):
tkMessageBox.showwarning("Error","Please Enter a Correct Time")
c=0
try:
Datetest=datetime.date(eval(self.DateYear1_.get()),eval(self.DateMonth1_.get()),eval(self.DateDay1_.get()))
except (NameError,ValueError):
tkMessageBox.showwarning("Error","Please Enter a Correct Date")
c=0
if c==1:
DT=self.DateDay1_.get()+"/"+self.DateMonth1_.get()+"/"+self.DateYear1_.get()+" "+self.TimeHour1_.get()+":"+self.TimeMinute1_.get()
p.Delete(self.var1.get())
S=self.ExistingID+" "+str(DT).replace(" ","_")+" "+str(self.Name1_.get()).replace(" ","_")+" "+str(self.Details1_.get()).replace(" ","_")+" "+"Pending"+" "+str(self.slider.get())+"\n"
infile=open("Tasks.txt",'a')
infile.write(S)
infile.close()
self.reld()
def NTF(self):
self.NTframe=Toplevel(self.controlbox,borderwidth=5,relief=RIDGE)
self.NTframe.iconbitmap(self.smileypath)
self.DateYear_=StringVar()
self.DateMonth_=StringVar()
self.DateDay_=StringVar()
self.TimeHour_=StringVar()
self.TimeMinute_=StringVar()
self.Name_=StringVar()
self.Details_=StringVar()
self.l1=Label(self.NTframe,text="Enter the task Date [DD/MM/YY]:")
self.l1.pack(side=LEFT)
textbox1=Entry(self.NTframe,textvariable=self.DateDay_,width=4)
textbox1.pack(side=LEFT)
textbox1_=Entry(self.NTframe,textvariable=self.DateMonth_,width=4)
textbox1_.pack(side=LEFT)
textbox1__=Entry(self.NTframe,textvariable=self.DateYear_,width=6)
textbox1__.pack(side=LEFT)
textbox2_=Entry(self.NTframe,textvariable=self.TimeMinute_,width=4)
textbox2_.pack(side=RIGHT)
textbox2=Entry(self.NTframe,textvariable=self.TimeHour_,width=4)
textbox2.pack(side=RIGHT)
self.l2=Label(self.NTframe,text="Enter the task Time [24:00]:")
self.l2.pack(side=RIGHT)
self.l322=Label(self.NTframe,text="Enter the task Priority :")
self.l322.pack(side=TOP)
self.slider1=Scale(self.NTframe,from_=1,to=5,orient=HORIZONTAL)
self.slider1.pack()
self.l3=Label(self.NTframe,text="Enter the task Name :")
self.l3.pack(side=TOP)
textbox3=Entry(self.NTframe,textvariable=self.Name_)
textbox3.pack(side=TOP)
self.l5=Button(self.NTframe,text="OK",command=self.Insert)
self.l5.pack(side=BOTTOM)
textbox4=Entry(self.NTframe,textvariable=self.Details_,width=50)
textbox4.pack(side=BOTTOM)
self.l4=Label(self.NTframe,text="Enter the task Details: ")
self.l4.pack(side=BOTTOM)
def Insert(self):
p=Diary()
c=1
try:
Timetest=datetime.time(eval(self.TimeHour_.get()),eval(self.TimeMinute_.get()))
except (NameError,ValueError):
tkMessageBox.showwarning("Error","Please Enter a Correct Time")
c=0
try:
Datetest=datetime.date(eval(self.DateYear_.get()),eval(self.DateMonth_.get()),eval(self.DateDay_.get()))
except (NameError,ValueError):
tkMessageBox.showwarning("Error","Please Enter a Correct Date")
c=0
if c==1:
DT=self.DateDay_.get()+"/"+self.DateMonth_.get()+"/"+self.DateYear_.get()+" "+self.TimeHour_.get()+":"+self.TimeMinute_.get()
p.Store(DT,self.Name_.get(),self.Details_.get(),self.slider1.get())
self.reld()
def DTF(self):
self.deleteframe=Toplevel(self.controlbox,borderwidth=5,relief=RIDGE,)
self.deleteframe.iconbitmap(self.smileypath)
self.Deltxt=Label(self.deleteframe,text="Input the Task ID you wish to delete and hit Return:")
self.Deltxt.pack()
self.var=StringVar()
self.textboxx=Entry(self.deleteframe,textvariable=self.var)
self.textboxx.bind("<Return>",self.Del)
self.textboxx.pack(side=BOTTOM)
def Del(self,event):
p=Diary()
p.Delete(self.var.get())
self.textboxx.delete(0,END)
self.textboxx.pack()
self.reld()
def reld(self):
self.DF.configure(state=NORMAL)
self.DF.delete(1.0,END)
infile=open("Tasks.txt","r")
L=[]
Li=[]
D={}
for j in infile.readlines():
T=string.split(j)
T[1]=T[1].replace("_"," ")
T[2]=T[2].replace("_"," ")
T[3]=T[3].replace("_"," ")
D1=T[1].split()
AD=D1[0].split("/")
AT=D1[1].split(":")
Test=datetime.datetime(eval(AD[2]),eval(AD[1]),eval(AD[0]),eval(AT[0]),eval(AT[1]))
try:
D[Test].append(T)
except KeyError:
D[Test]=[T]
Li=D.keys()
Li.sort()
for i in Li:
L.extend(D[i])
for k in L:
s=eval(k[5])*"*"
S="%s: %s %s %s %s \n %s\n\n"%(k[0],k[2],k[1],s,k[4],k[3])
self.DF.insert(END,S)
self.DF.pack(side=TOP,fill=BOTH)
self.DF.configure(state=DISABLED)
infile.close()
a=encryption()
a.decryptfile()
global oplist
opf=open("settings.txt")
oplist=pickle.load(opf)
opf.close()
global passcode
passcode=oplist[0]
def passcheck(event):
global password
global root1
if password.get()==passcode:
root1.destroy()
root=Tk()
root.iconbitmap(smiley)
root.title("Personal Planner")
p=Diary()
Main=Controlwindow(root)
root.mainloop()
smiley=os.getcwd()+"/smiley.ico"
global root1
root1=Tk()
root1.title("Welcome to Personal Planner")
root1.iconbitmap(smiley)
cal=PhotoImage(file="calendar.gif")
L=Button(root1,image=cal,bd=0)
L.pack()
global password
password=StringVar()
passbox=Entry(root1,textvariable=password,show="*")
passbox.bind("<Return>",passcheck)
passbox.pack(side=BOTTOM)
root1.mainloop()
a.encryptfile()
|
23,936 | 32910bdc05e1792831db13ee94ea41fb33453676 | # vCloud CLI 0.1
#
# Copyright (c) 2017 VMware, Inc. All Rights Reserved.
#
# This product is licensed to you under the
# Apache License, Version 2.0 (the "License").
# You may not use this product except in compliance with the License.
#
# This product may include a number of subcomponents with
# separate copyright notices and license terms. Your use of the source
# code for the these subcomponents is subject to the terms and
# conditions of the subcomponent's license, as noted in the LICENSE file.
#
from vcd_cli.profiles import Profiles
def load_user_plugins():
profiles = Profiles.load()
if 'extensions' in profiles.data and len(profiles.data) > 0:
for extension in profiles.data['extensions']:
__import__(extension)
|
23,937 | e5ead0718984bf2c0276e81442f08a8428c92807 | from datetime import datetime
now = datetime.now()
number = now.hour * now.minute / now.second + now.day
print number
#print number
# print "Here is a random number: " + number
# name = raw_input("What is your name? ")
# print "Greetings, " + name + " how are you feeling today?" |
23,938 | fecbd9dbc71f2e1b9ceaec7b44311ec20b7517fb | import tensorflow as tf
from tensorflow.keras.layers import *
from tensorflow.keras import Model
# Get EfficientNetB6 model, is available only from tensorflow version > 2.2
def get_efficient(train_ds, train_steps, class_number, weights, freeze_layers_number, input_shape, metrics, optimizer, activation='relu', kernel_reg=None, bias_reg=None):
base_model = tf.keras.applications.EfficientNetB6(
include_top=False, weights='imagenet', input_shape=input_shape)
x = base_model.output
x = GlobalAveragePooling2D()(x)
x = Dense(1024, activation=activation,
kernel_regularizer=kernel_reg,
bias_regularizer=bias_reg)(x)
x = Dropout(0.5)(x)
predictions = Dense(class_number, activation='softmax')(x)
model = Model(inputs=base_model.input, outputs=predictions)
for layer in base_model.layers:
layer.trainable = False
model.compile(optimizer=optimizer, loss='categorical_crossentropy')
model.fit(train_ds, steps_per_epoch=train_steps, epochs=3, class_weight=weights)
for layer in model.layers[:freeze_layers_number]:
layer.trainable = False
for layer in model.layers[freeze_layers_number:]:
layer.trainable = True
model.compile(optimizer=optimizer,
loss='categorical_crossentropy',
metrics=metrics)
return model |
23,939 | 13cfc9725549bb83f25faae83d4bac28811bc21b | from datetime import datetime
from django.test import TestCase
from django.contrib.auth.models import User
from blog.models import BlogEntry
class BlogModelTests(TestCase):
fixtures = ['users_views_testdata']
def test_representations(self):
title = 'EntryTitle'
content = '<p> awesome html </p>'
created_on = datetime.now
created_by = User.objects.get(pk=1)
status = BlogEntry.DRAFT
blogentry = BlogEntry(
title=title,
content=content,
status=status,
created_on=created_on,
created_by=created_by,
)
representation = "< BlogEntry title='{}' created_by='{}' status='{}' >".format(title, created_by, status)
self.assertEqual(str(blogentry), representation)
self.assertEqual(repr(blogentry), representation)
|
23,940 | 12971ddeda9a32e55723a76abcf26a7d098e5592 | curl -X POST -H “Content-Type: application/json” -d’{
'sender' : “4498dcbf5abb465a98338a028c144b72”,
'recipient' : “someone-other-address”,
'amount' : 5,
}’
“http://192.168.0.207:5000/transactions/new” |
23,941 | 12285ee490ac43d7537b6140aca3fdb3b05af29e | import sys
import math
counter=0
add=0
def b(var):
global counter
counter+=1
var=var.strip()
return var
def c(var):
var=var[0].strip()
addwholepart(var)
return var
def addwholepart(no):
int(no)
c=0
print(no)
for i in no:
name=sys.argv[1]
fopen=open(name)
for var in fopen.read().split(" "):
rs1=b(var)
var1=var.split(".")
res2=c(var1)
res2=int(res2)
print("total numbrs:",counter-1)
print("addition:",add)
|
23,942 | 76ea2398d04d51627c4e78b2509db18d3977191e | # -*- coding: utf-8 -*-
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import pytest
import time
from azure.datalake.store.exceptions import DatalakeRESTException
from azure.datalake.store.lib import (
DataLakeCredential, DatalakeRESTInterface)
from tests import settings
from tests.testing import my_vcr
@pytest.fixture()
def token():
return settings.TOKEN
@pytest.fixture()
def principal_token():
return settings.PRINCIPAL_TOKEN
@pytest.fixture()
def rest(token):
return DatalakeRESTInterface(settings.STORE_NAME, token)
def test_errors(token):
no_rest = DatalakeRESTInterface("none", token)
with pytest.raises(ValueError):
# no such op
no_rest.call('NONEXISTENT')
with pytest.raises(ValueError):
# too few parameters
no_rest.call("RENAME")
with pytest.raises(ValueError):
# too many parameters
no_rest.call('RENAME', many='', additional='', pars='')
with pytest.raises(DatalakeRESTException):
# no such store
no_rest.call('GETCONTENTSUMMARY')
@my_vcr.use_cassette
def test_auth_refresh(token):
assert token.token['access']
initial_access = token.token['access']
initial_time = token.token['time']
time.sleep(3)
token.refresh_token()
token2 = DataLakeCredential(token.token)
assert token2.token['access']
assert initial_access != token2.token['access']
assert token2.token['time'] > initial_time
@my_vcr.use_cassette
def test_auth_refresh_for_service_principal(principal_token):
assert principal_token.token['access']
assert principal_token.token['secret']
initial_access = principal_token.token['access']
initial_time = principal_token.token['time']
time.sleep(3)
principal_token.refresh_token()
token2 = DataLakeCredential(principal_token.token)
assert token2.token['access']
assert token2.token['secret']
assert token2.token['secret'] == principal_token.token['secret']
assert initial_access != token2.token['access']
assert token2.token['time'] > initial_time
@my_vcr.use_cassette
def test_response(rest):
out = rest.call('LISTSTATUS', '')
assert out
|
23,943 | 3a066204a73a4af93815649809ade4888f1130b6 | import climpy.utils.aeronet_utils as aeronet
from shapely.geometry.polygon import Polygon
import netCDF4
import numpy as np
import climpy.utils.wrf_utils as wrf_utils
import climpy.utils.grid_utils as grid
import pandas as pd
import matplotlib.pyplot as plt
import climpy.utils.file_path_utils as fpu
from climpy.utils.plotting_utils import save_figure_bundle, save_figure, JGR_page_width_inches, \
plot_aeronet_stations_over_wrf_domain
from scipy.constants import golden
import argparse
import os
__author__ = 'Sergey Osipov <Serega.Osipov@gmail.com>'
"""
This script plots WRF-Aeronet diagnostics within the WRF domain.
This version is slow. Much faster approach is to extract data into separate netcdf at Aeronet locations first (sample_wrf_output_at_aeronet_locations.py)
and then plot using aeronet_wrf_pp_comparison.py
To run it from the bash, see list of input args below.
Example (one line for copy-paste):
python aeronet_wrf_domain_comparison.py --aeronet_in='/work/mm0062/b302074//Data/NASA/Aeronet/' --wrf_in=/work/mm0062/b302074/Data/AirQuality/AQABA/chem_106/output/wrfout_d01_2017-0*_00:00:00 --diags_out=/work/mm0062/b302074//Pictures//Papers/AirQuality/AQABA/chem_106/ --aod_level=15
The same example (split for readability):
python aeronet_wrf_domain_comparison.py
--aeronet_in='/work/mm0062/b302074//Data/NASA/Aeronet/'
--wrf_in=/work/mm0062/b302074/Data/AirQuality/AQABA/chem_106/output/wrfout_d01_2017-0*_00:00:00
--diags_out=/work/mm0062/b302074//Pictures//Papers/AirQuality/AQABA/chem_106/
--aod_level=15
"""
parser = argparse.ArgumentParser()
parser.add_argument("--aeronet_in", help="Aeronet file path, which contains AOD and INV folders (download all)", required=True)
parser.add_argument("--wrf_in", help="WRF file path, for example, /storage/.../wrfout_d01_2017-*_00:00:00", required=True)
parser.add_argument("--diags_out", help="Figures/diags file path, for example, /storage/.../Pictures/Paper/", required=True)
parser.add_argument("--aod_level", help="Aeronet AOD level", default=15)
# have to add those to make script Pycharm compatible
parser.add_argument("--mode", help="pycharm")
parser.add_argument("--port", help="pycharm")
# TODO: add time range as input
args = parser.parse_args()
# this is my defaults for debugging
aeronet.DATA_FILE_PATH_ROOT = fpu.get_aeronet_file_path_root()
wrf_file_path = fpu.get_root_storage_path_on_hpc() + '/Data/AirQuality/AQABA/chem_106/output/wrfout_d01_2017-*_00:00:00'
pics_output_folder = fpu.get_pictures_root_folder() + '/Papers/AirQuality/AQABA/{}/'.format('chem_106')
aod_level = 15
# parse the user input and override defaults
if 'PYCHARM_HOSTED' not in os.environ.keys():
print('Using the provided args')
aeronet.DATA_FILE_PATH_ROOT = args.aeronet_in
wrf_file_path = args.wrf_in
pics_output_folder = args.diags_out
aod_level = args.aod_level
print('STARTING DIAGS for \n wrf_in {} \n Aeronet v{} in {} \n output diags into {}'.format(wrf_file_path, aod_level, aeronet.DATA_FILE_PATH_ROOT, pics_output_folder))
# Preparations are done, start diags
# get WRF grid
nc = netCDF4.MFDataset(wrf_file_path) # or netcdf4.Dataset
lon = nc['XLONG'][0] # my grid is stationary in time
lat = nc['XLAT'][0] # sample it at time index 0
domain = Polygon([(np.min(lon), np.min(lat)), (np.min(lon), np.max(lat)), (np.max(lon), np.max(lat)), (np.max(lon), np.min(lat))])
wrf_time = wrf_utils.generate_netcdf_uniform_time_data(nc.variables['Times'])
time_range = (wrf_time.min(), wrf_time.max())
stations = aeronet.filter_available_stations(domain, time_range, aod_level)
plot_aeronet_stations_over_wrf_domain(nc._files[0], stations)
save_figure_bundle(pics_output_folder + '/aeronet/', 'WRF domain and Aeronet stations maps')
def get_wrf_aod_at_aeronet_location(station):
yx_tuple, lon_p, lat_p, distance_error = grid.find_closest_grid_point(lon, lat,
station['Longitude(decimal_degrees)'],
station['Latitude(decimal_degrees)'])
# check that station is not too far, i.e. distance_error < some value, say 0.1
if distance_error > 1:
print('station {} is too far'.format(station['Site_Name']))
return None
# read OD profile (t, z, y, x)
AOD = nc.variables['TAUAER3'][:, :, yx_tuple[0], yx_tuple[1]]
column_AOD = np.sum(AOD, axis=1)
vo = {}
vo['time'] = wrf_time
vo['data'] = column_AOD
return vo
aeronet_var_key = 'AOD_500nm'
wrf_var_key = 'AOD_600nm'
# now lets process all stations, prepare data first
wrf_list = []
aeronet_list = []
for index, station in stations.iterrows():
print('Processing {}'.format(station['Site_Name']))
aeronet_vo = aeronet.get_aod_diag('*{}*'.format(station['Site_Name']), aeronet_var_key, aod_level, aeronet.ALL_POINTS,
time_range=time_range)
# Aeronet data availability is monthly, time filter can still yield no data coverage
if aeronet_vo['data'].size == 0:
print('No data for Aeronet station {}, skipping it'.format(station['Site_Name']))
continue
wrf_vo = get_wrf_aod_at_aeronet_location(station)
# is station is too far wrf will return None
if wrf_vo is None:
print('No data from WRF for Aeronet station {}, skipping it'.format(station['Site_Name']))
continue
wrf_list.append(wrf_vo)
aeronet_list.append(aeronet_vo)
print('Data prepared, do plotting')
print('Do time series plot')
# Time series plot for each site individually
for wrf_vo, aeronet_vo, dummy in zip(wrf_list, aeronet_list, stations.iterrows()):
station = dummy[1]
fig = plt.figure(constrained_layout=True, figsize=(JGR_page_width_inches(), JGR_page_width_inches() / golden))
plt.plot(wrf_vo['time'], wrf_vo['data'], 'o', label='WRF, {}'.format(wrf_var_key))
plt.plot(aeronet_vo['time'], aeronet_vo['data'], '*', label='Aeronet v{}, {}'.format(aod_level, aeronet_var_key))
plt.ylabel('Optical depth, ()')
plt.xlabel('Time, ()')
plt.legend()
plt.title('Column AOD at {}'.format(station['Site_Name']))
#save_figure_bundle(pics_output_folder + '/aeronet/', 'WRF-Aeronet AOD, {}'.format(station['Site_Name']))
save_figure(pics_output_folder + '/aeronet/by_site/', 'WRF-Aeronet AOD, {}.svg'.format(station['Site_Name']))
plt.close(fig)
print('Do scatter plot')
# Next, do the scatter plot
plt.figure(constrained_layout=True, figsize=(JGR_page_width_inches(), JGR_page_width_inches()))
plt.grid()
plt.axis('equal')
for wrf_vo, aeronet_vo, dummy in zip(wrf_list, aeronet_list, stations.iterrows()):
station = dummy[1]
# Do the scatter plot, bring data to the same resolution
df = pd.DataFrame(data=wrf_vo['data'], index=wrf_vo['time'], columns=[wrf_var_key])
wrf_df = df.resample('D').mean()
df = pd.DataFrame(data=aeronet_vo['data'], index=aeronet_vo['time'], columns=[aeronet_var_key])
aeronet_df = df.resample('D').mean()
# make sure that values are compared at the same time
d1, model_ind, obs_ind = np.intersect1d(wrf_df.index, aeronet_df.index, return_indices=True)
plt.scatter(aeronet_df.iloc[obs_ind], wrf_df.iloc[model_ind], label=station['Site_Name'])
plt.legend()
# y=x line
#global_max = np.max([wrf_df.iloc[model_ind].max(), aeronet_df.iloc[obs_ind].max()])
global_max = 3
plt.plot([0, global_max], [0, global_max], 'k-')
plt.xlabel('Observations')
plt.ylabel('Model')
plt.title('Aeronet v{} {}, WRF {}'.format(aod_level, aeronet_var_key, wrf_var_key))
save_figure_bundle(pics_output_folder + '/aeronet/', 'WRF-Aeronet AOD scatter, all stations')
print('DONE')
|
23,944 | dbeda707e143149b11febbc0db28989f40a0c7f8 | import pytest
from models.linear import LinearGenModel
from data.synthetic import generate_wty_linear_multi_w_data
from data.lalonde import load_lalonde
ATE = 5
N = 50
@pytest.fixture(scope='module')
def linear_gen_model():
ate = ATE
w, t, y = generate_wty_linear_multi_w_data(N, data_format='numpy', wdim=5, delta=ate)
return LinearGenModel(w, t, y)
@pytest.fixture(scope='module')
def lalonde_linear_gen_model():
w, t, y = load_lalonde()
return LinearGenModel(w, t, y)
def test_null_true_linear_data_linear_model(linear_gen_model):
uni_metrics = linear_gen_model.get_univariate_quant_metrics()
multi_metrics = linear_gen_model.get_multivariate_quant_metrics()
metrics = {**uni_metrics, **multi_metrics}
for k, v in metrics.items():
if 'pval' in k:
print(k, v)
assert v > 0.2
# NOTE: this took about 1.5 hours on my 2014 Macbook Air
@pytest.mark.slow
def test_power_linear_model_lalonde(lalonde_linear_gen_model):
metrics = lalonde_linear_gen_model.get_multivariate_quant_metrics()
for k, v in metrics.items():
if 'pval' in k:
print(k, v)
assert v < 0.01
|
23,945 | bfc978173d3d273d5d3ecf62cce6609022872175 | scr=input()
if scr=='A':
print('best!!!')
elif scr=='B':
print('good!!')
elif scr=='C':
print('run!')
elif scr=='D':
print('slowly~')
else:
print('what?') |
23,946 | 77e50c6f5776a7a7fff981565ba2bed026fc9e3d | # The MIT License (MIT)
#
# Copyright (c) 2020 Huimao Chen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import lldb
import HMLLDBHelpers as HM
import HMProgressHUD
import HMDebugWindow
import HMDebugBaseViewController
import HMExpressionPrefix
import HMLLDBClassInfo
gClassName = "HMInspectViewController"
gWindowTag = 10100
def register() -> None:
if HM.existClass(gClassName):
return
HMProgressHUD.register()
HMDebugWindow.register()
HMDebugBaseViewController.register()
# Register class
HMProgressHUD.show(f"Register {gClassName}...")
HM.DPrint(f"Register {gClassName}...")
classValue = HM.allocateClass(gClassName, HMDebugBaseViewController.gClassName)
HM.addIvar(classValue.GetValue(), "_previousKeyWindow", "UIWindow *")
HM.addIvar(classValue.GetValue(), "_highlightView", "UIView *")
HM.addIvar(classValue.GetValue(), "_targetView", "UIView *")
HM.addIvar(classValue.GetValue(), "_exitBtn", "UIButton *")
HM.addIvar(classValue.GetValue(), "_infoView", "UIView *")
HM.addIvar(classValue.GetValue(), "_actionView", "UIButton *")
HM.registerClass(classValue.GetValue())
HM.DPrint(f"Add methods to {gClassName}...")
startIMPValue = makeStartIMP()
if not HM.judgeSBValueHasValue(startIMPValue):
HMProgressHUD.hide()
return
HM.addClassMethod(gClassName, "start", startIMPValue.GetValue(), "@@:")
viewDidLoadIMPValue = makeViewDidLoadIMP()
if not HM.judgeSBValueHasValue(viewDidLoadIMPValue):
HMProgressHUD.hide()
return
HM.addInstanceMethod(gClassName, "viewDidLoad", viewDidLoadIMPValue.GetValue(), "v@:")
viewDidLayoutSubviewsIMPValue = makeViewDidLayoutSubviewsIMP()
if not HM.judgeSBValueHasValue(viewDidLayoutSubviewsIMPValue):
HMProgressHUD.hide()
return
HM.addInstanceMethod(gClassName, "viewDidLayoutSubviews", viewDidLayoutSubviewsIMPValue.GetValue(), "v@:")
# event
HM.DPrint(f"Add methods to {gClassName}......")
clickExitBtnIMPValue = makeClickExitBtnIMP()
if not HM.judgeSBValueHasValue(clickExitBtnIMPValue):
HMProgressHUD.hide()
return
HM.addInstanceMethod(gClassName, "clickExitBtn", clickExitBtnIMPValue.GetValue(), "v@:")
clickCloseBtnIMPValue = makeClickCloseBtnIMP()
if not HM.judgeSBValueHasValue(clickCloseBtnIMPValue):
HMProgressHUD.hide()
return
HM.addInstanceMethod(gClassName, "clickCloseBtn", clickCloseBtnIMPValue.GetValue(), "v@:")
handleTapRecognizerIMPValue = makeHandleTapRecognizerIMP()
if not HM.judgeSBValueHasValue(handleTapRecognizerIMPValue):
HMProgressHUD.hide()
return
HM.addInstanceMethod(gClassName, "handleTapRecognizer:", handleTapRecognizerIMPValue.GetValue(), "v@:@")
findSubviewAtPointInViewIMPValue = makeFindSubviewAtPointInViewIMP()
if not HM.judgeSBValueHasValue(findSubviewAtPointInViewIMPValue):
HMProgressHUD.hide()
return
HM.addInstanceMethod(gClassName, "findSubviewAtPoint:inView:", findSubviewAtPointInViewIMPValue.GetValue(), "@@:{CGPoint=dd}@")
refreshTargetViewIMPValue = makeRefreshTargetViewIMP()
if not HM.judgeSBValueHasValue(refreshTargetViewIMPValue):
HMProgressHUD.hide()
return
HM.addInstanceMethod(gClassName, "refreshTargetView:", refreshTargetViewIMPValue.GetValue(), "v@:@")
getInfoArrayFromTargetViewIMPValue = makeGetInfoArrayFromTargetViewIMP()
if not HM.judgeSBValueHasValue(getInfoArrayFromTargetViewIMPValue):
HMProgressHUD.hide()
return
HM.addInstanceMethod(gClassName, "getInfoArrayFromTargetView:", getInfoArrayFromTargetViewIMPValue.GetValue(), "@@:@")
# function action
HM.DPrint(f"Add methods to {gClassName}.........")
if not addFunctionMethods():
HMProgressHUD.hide()
return
HM.DPrint(f"Register {gClassName} done!")
HMProgressHUD.hide()
def makeStartIMP() -> lldb.SBValue:
command_script = f'''
UIViewController * (^IMPBlock)(id) = ^UIViewController *(id classSelf) {{
UIViewController *vc = (UIViewController *)[[(Class)objc_lookUpClass("{gClassName}") alloc] init];
[vc setValue:[UIApplication sharedApplication].keyWindow forKey:@"_previousKeyWindow"];
UIWindow *window = (UIWindow *)[[(Class)objc_lookUpClass("{HMDebugWindow.gClassName}") alloc] init];
if ((BOOL)[[UIApplication sharedApplication] respondsToSelector:@selector(connectedScenes)]) {{
NSSet *scenes = [[UIApplication sharedApplication] connectedScenes]; // NSSet<UIScene *> *scenes
for (id scene in scenes) {{
if ((long)[scene activationState] == 0 && (BOOL)[scene isKindOfClass:NSClassFromString(@"UIWindowScene")]) {{
// UISceneActivationStateForegroundActive
(void)[window setWindowScene:scene];
break;
}}
}}
}}
(void)[window setFrame:(CGRect)UIScreen.mainScreen.bounds];
(void)[window setBackgroundColor:[UIColor clearColor]];
window.windowLevel = UIWindowLevelAlert - 1;
window.tag = {gWindowTag};
window.rootViewController = vc;
[window makeKeyAndVisible];
return vc;
}};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(command_script)
def makeViewDidLoadIMP() -> lldb.SBValue:
command_script = f'''
void (^IMPBlock)(UIViewController *) = ^(UIViewController *vc) {{
Class cls = objc_lookUpClass("{gClassName}");
struct objc_super superInfo = {{
.receiver = vc,
.super_class = (Class)class_getSuperclass((Class)cls)
}};
((void (*)(struct objc_super *, SEL))objc_msgSendSuper)(&superInfo, @selector(viewDidLoad));
// property initialize
(void)[vc.view setBackgroundColor:[UIColor clearColor]];
// highlightView
UIView *_highlightView = [[UIView alloc] init];
[vc setValue:_highlightView forKey:@"_highlightView"];
[_highlightView setUserInteractionEnabled:NO];
(void)[_highlightView setBackgroundColor:[UIColor colorWithRed:0.27 green:0.56 blue:0.82 alpha:0.5]];
[vc.view addSubview:_highlightView];
// infoView
UIView *_infoView = [[UIView alloc] init];
[vc setValue:_infoView forKey:@"_infoView"];
[_infoView setUserInteractionEnabled:NO];
_infoView.hidden = YES;
(void)[_infoView setBackgroundColor:[[UIColor whiteColor] colorWithAlphaComponent:.9]];
(void)[[_infoView layer] setBorderColor:[[UIColor lightGrayColor] CGColor]];
(void)[[_infoView layer] setBorderWidth:0.5];
(void)[[_infoView layer] setCornerRadius:5];
[vc.view addSubview:_infoView];
// actionView
CGFloat actionViewHeight = 120;
CGFloat actionViewWidth = vc.view.bounds.size.width - 10;
UIButton *_actionView = [[UIButton alloc] init];
(void)[_actionView setFrame:(CGRect){{(vc.view.bounds.size.width - actionViewWidth) / 2, 0, actionViewWidth, actionViewHeight}}];
[vc setValue:_actionView forKey:@"_actionView"];
_actionView.hidden = YES;
(void)[_actionView setBackgroundColor:[[UIColor whiteColor] colorWithAlphaComponent:.9]];
(void)[[_actionView layer] setBorderColor:[[UIColor lightGrayColor] CGColor]];
(void)[[_actionView layer] setBorderWidth:0.5];
(void)[[_actionView layer] setCornerRadius:5];
[vc.view addSubview:_actionView];
// actionView-moveBtn
UIButton *(^makeMoveBtnBlock)(NSInteger, NSString *) = ^UIButton *(NSInteger tag, NSString *text) {{
UIButton *moveBtn = [[UIButton alloc] init];
moveBtn.tag = tag;
(void)[moveBtn setBackgroundColor:[[UIColor grayColor] colorWithAlphaComponent:0.2]];
moveBtn.layer.cornerRadius = 18;
((void (*)(id, SEL, id, long)) objc_msgSend)((id)moveBtn, @selector(setTitle:forState:), (id)text, 0); // UIControlStateNormal
((void (*)(id, SEL, id, long)) objc_msgSend)((id)moveBtn, @selector(setTitleColor:forState:), (id)[UIColor blackColor], 0); // UIControlStateNormal
((void (*)(id, SEL, id, long)) objc_msgSend)((id)moveBtn, @selector(setTitleColor:forState:), (id)[[UIColor blackColor] colorWithAlphaComponent:0.2], 1); // UIControlStateHighlighted
((void (*)(id, SEL, id, SEL, long)) objc_msgSend)((id)moveBtn, @selector(addTarget:action:forControlEvents:), (id)vc, @selector(clickMoveBtn:), 64); // UIControlEventTouchUpInside
moveBtn.titleLabel.font = [UIFont systemFontOfSize:18 weight:(UIFontWeight)UIFontWeightBold];
return moveBtn;
}};
CGFloat moveBtnSideLength = 36;
UIButton *moveTopBtn = makeMoveBtnBlock(1, @"↑");
(void)[moveTopBtn setFrame:(CGRect){{32, 6, moveBtnSideLength, moveBtnSideLength}}];
[_actionView addSubview:moveTopBtn];
UIButton *moveLeftBtn = makeMoveBtnBlock(2, @"←");
(void)[moveLeftBtn setFrame:(CGRect){{4, CGRectGetMaxY(moveTopBtn.frame), moveBtnSideLength, moveBtnSideLength}}];
[_actionView addSubview:moveLeftBtn];
UIButton *moveRightBtn = makeMoveBtnBlock(4, @"→");
(void)[moveRightBtn setFrame:(CGRect){{CGRectGetMaxX(moveLeftBtn.frame) + 20, moveLeftBtn.frame.origin.y, moveBtnSideLength, moveBtnSideLength}}];
[_actionView addSubview:moveRightBtn];
UIButton *moveBottomBtn = makeMoveBtnBlock(3, @"↓");
(void)[moveBottomBtn setFrame:(CGRect){{moveTopBtn.frame.origin.x, CGRectGetMaxY(moveLeftBtn.frame), moveBtnSideLength, moveBtnSideLength}}];
[_actionView addSubview:moveBottomBtn];
// actionView-other btns
// 1. ivars properties methods
// 2. superview subviews.first
// 3. next sibling close
UIButton *(^makeBtnBlock)(NSString *) = ^UIButton *(NSString *text) {{
UIButton *btn = [[UIButton alloc] init];
(void)[btn setBackgroundColor:[[UIColor grayColor] colorWithAlphaComponent:0.2]];
btn.layer.cornerRadius = 3;
((void (*)(id, SEL, id, long)) objc_msgSend)((id)btn, @selector(setTitle:forState:), (id)text, 0); // UIControlStateNormal
((void (*)(id, SEL, id, long)) objc_msgSend)((id)btn, @selector(setTitleColor:forState:), (id)[UIColor blackColor], 0); // UIControlStateNormal
((void (*)(id, SEL, id, long)) objc_msgSend)((id)btn, @selector(setTitleColor:forState:), (id)[[UIColor blackColor] colorWithAlphaComponent:0.2], 1); // UIControlStateHighlighted
btn.titleLabel.font = [UIFont systemFontOfSize:13 weight:(UIFontWeight)UIFontWeightBold];
return btn;
}};
CGFloat beginX = CGRectGetMaxX(moveRightBtn.frame);
CGFloat btnHeight = 30;
CGFloat btnWidth1 = 80;
CGFloat btnWidth2 = 100;
CGFloat marginX = 8;
CGFloat offsetY = 6;
CGFloat offsetX = (actionViewWidth - beginX - marginX * 2 - btnWidth1 * 3) / 2;
UIButton *ivarsBtn = makeBtnBlock(@"ivars");
(void)[ivarsBtn setFrame:(CGRect){{beginX + marginX, 8, btnWidth1, btnHeight}}];
((void (*)(id, SEL, id, SEL, long)) objc_msgSend)((id)ivarsBtn, @selector(addTarget:action:forControlEvents:), (id)vc, @selector(ivarsAction), 64); // UIControlEventTouchUpInside
[_actionView addSubview:ivarsBtn];
UIButton *propertiesBtn = makeBtnBlock(@"properties");
(void)[propertiesBtn setFrame:(CGRect){{CGRectGetMaxX(ivarsBtn.frame) + offsetX, ivarsBtn.frame.origin.y, btnWidth1, btnHeight}}];
((void (*)(id, SEL, id, SEL, long)) objc_msgSend)((id)propertiesBtn, @selector(addTarget:action:forControlEvents:), (id)vc, @selector(propertiesAction), 64); // UIControlEventTouchUpInside
[_actionView addSubview:propertiesBtn];
UIButton *methodsBtn = makeBtnBlock(@"methods");
(void)[methodsBtn setFrame:(CGRect){{CGRectGetMaxX(propertiesBtn.frame) + offsetX, ivarsBtn.frame.origin.y, btnWidth1, btnHeight}}];
((void (*)(id, SEL, id, SEL, long)) objc_msgSend)((id)methodsBtn, @selector(addTarget:action:forControlEvents:), (id)vc, @selector(methodsAction), 64); // UIControlEventTouchUpInside
[_actionView addSubview:methodsBtn];
UIButton *siblingPreviousBtn = makeBtnBlock(@"sibling.previous");
(void)[siblingPreviousBtn setFrame:(CGRect){{beginX + marginX, CGRectGetMaxY(ivarsBtn.frame) + offsetY, 120, btnHeight}}];
((void (*)(id, SEL, id, SEL, long)) objc_msgSend)((id)siblingPreviousBtn, @selector(addTarget:action:forControlEvents:), (id)vc, @selector(siblingPreviousAction), 64); // UIControlEventTouchUpInside
[_actionView addSubview:siblingPreviousBtn];
UIButton *siblingNextBtn = makeBtnBlock(@"sibling.next");
(void)[siblingNextBtn setFrame:(CGRect){{CGRectGetMaxX(siblingPreviousBtn.frame) + offsetX, siblingPreviousBtn.frame.origin.y, btnWidth2, btnHeight}}];
((void (*)(id, SEL, id, SEL, long)) objc_msgSend)((id)siblingNextBtn, @selector(addTarget:action:forControlEvents:), (id)vc, @selector(siblingNextAction), 64); // UIControlEventTouchUpInside
[_actionView addSubview:siblingNextBtn];
UIButton *superviewBtn = makeBtnBlock(@"superview");
(void)[superviewBtn setFrame:(CGRect){{beginX + marginX, CGRectGetMaxY(siblingNextBtn.frame) + offsetY, btnWidth1, btnHeight}}];
((void (*)(id, SEL, id, SEL, long)) objc_msgSend)((id)superviewBtn, @selector(addTarget:action:forControlEvents:), (id)vc, @selector(superviewAction), 64); // UIControlEventTouchUpInside
[_actionView addSubview:superviewBtn];
UIButton *subviewBtn = makeBtnBlock(@"subviews.first");
(void)[subviewBtn setFrame:(CGRect){{CGRectGetMaxX(superviewBtn.frame) + offsetX, superviewBtn.frame.origin.y, btnWidth2, btnHeight}}];
((void (*)(id, SEL, id, SEL, long)) objc_msgSend)((id)subviewBtn, @selector(addTarget:action:forControlEvents:), (id)vc, @selector(subviewAction), 64); // UIControlEventTouchUpInside
[_actionView addSubview:subviewBtn];
UIButton *closeBtn = [[UIButton alloc] init];
(void)[closeBtn setFrame:(CGRect){{actionViewWidth - marginX - 50, superviewBtn.frame.origin.y, 50, btnHeight}}];
(void)[closeBtn setBackgroundColor:[UIColor colorWithRed:208/255.0 green:2/255.0 blue:27/255.0 alpha:.7]];
((void (*)(id, SEL, id, long)) objc_msgSend)((id)closeBtn, @selector(setTitle:forState:), (id)@"Close", 0); // UIControlStateNormal
((void (*)(id, SEL, id, long)) objc_msgSend)((id)closeBtn, @selector(setTitleColor:forState:), (id)[UIColor whiteColor], 0); // UIControlStateNormal
closeBtn.titleLabel.font = [UIFont systemFontOfSize:13];
closeBtn.layer.cornerRadius = 3;
((void (*)(id, SEL, id, SEL, long)) objc_msgSend)((id)closeBtn, @selector(addTarget:action:forControlEvents:), (id)vc, @selector(clickCloseBtn), 64); // UIControlEventTouchUpInside
[_actionView addSubview:closeBtn];
// exitBtn
UIButton *_exitBtn = [[UIButton alloc] init];
[vc setValue:_exitBtn forKey:@"_exitBtn"];
(void)[_exitBtn setBackgroundColor:[UIColor colorWithRed:208/255.0 green:2/255.0 blue:27/255.0 alpha:.7]];
((void (*)(id, SEL, id, long)) objc_msgSend)((id)_exitBtn, @selector(setTitle:forState:), (id)@"Tap to exit", 0); // UIControlStateNormal
((void (*)(id, SEL, id, long)) objc_msgSend)((id)_exitBtn, @selector(setTitleColor:forState:), (id)[UIColor whiteColor], 0); // UIControlStateNormal
_exitBtn.titleLabel.font = [UIFont systemFontOfSize:13];
_exitBtn.clipsToBounds = YES;
((void (*)(id, SEL, id, SEL, long)) objc_msgSend)((id)_exitBtn, @selector(addTarget:action:forControlEvents:), (id)vc, @selector(clickExitBtn), 64); // UIControlEventTouchUpInside
[vc.view addSubview:_exitBtn];
// tap
UITapGestureRecognizer *tapRecognizer = [[UITapGestureRecognizer alloc] initWithTarget:vc action:@selector(handleTapRecognizer:)];
[vc.view addGestureRecognizer:tapRecognizer];
}};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(command_script)
def makeViewDidLayoutSubviewsIMP() -> lldb.SBValue:
command_script = f'''
void (^IMPBlock)(UIViewController *) = ^(UIViewController *vc) {{
Class cls = objc_lookUpClass("{gClassName}");
struct objc_super superInfo = {{
.receiver = vc,
.super_class = (Class)class_getSuperclass((Class)cls)
}};
((void (*)(struct objc_super *, SEL))objc_msgSendSuper)(&superInfo, @selector(viewDidLayoutSubviews));
UIEdgeInsets safeAreaInsets = UIEdgeInsetsZero;
if ([UIApplication.sharedApplication.keyWindow respondsToSelector:@selector(safeAreaInsets)]) {{
safeAreaInsets = [UIApplication.sharedApplication.keyWindow safeAreaInsets];
}}
UIButton *_exitBtn = (UIButton *)[vc valueForKey:@"_exitBtn"];
CGSize exitBtnSize = (CGSize){{_exitBtn.intrinsicContentSize.width + 20, _exitBtn.intrinsicContentSize.height}};
CGFloat exitBtnY = vc.view.frame.size.height - safeAreaInsets.bottom - 10 - exitBtnSize.height;
(void)[_exitBtn setFrame:(CGRect){{(vc.view.frame.size.width - exitBtnSize.width) / 2, exitBtnY, exitBtnSize.width, exitBtnSize.height}}];
_exitBtn.layer.cornerRadius = exitBtnSize.height / 2;
}};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(expression=command_script, prefix=HMExpressionPrefix.gPrefix)
def makeClickExitBtnIMP() -> lldb.SBValue:
command_script = '''
void (^IMPBlock)(UIViewController *) = ^(UIViewController *vc) {
UIWindow *_previousKeyWindow = (UIWindow *)[vc valueForKey:@"_previousKeyWindow"];
[vc setValue:nil forKey:@"_previousKeyWindow"];
[[vc.view window] setHidden:YES];
[_previousKeyWindow makeKeyWindow];
};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(command_script)
def makeClickCloseBtnIMP() -> lldb.SBValue:
command_script = '''
void (^IMPBlock)(UIViewController *) = ^(UIViewController *vc) {
UIView *_highlightView = (UIView *)[vc valueForKey:@"_highlightView"];
UIView *_infoView = (UIView *)[vc valueForKey:@"_infoView"];
UIButton *_actionView = (UIButton *)[vc valueForKey:@"_actionView"];
_highlightView.hidden = YES;
_infoView.hidden = YES;
_actionView.hidden = YES;
};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(command_script)
def makeHandleTapRecognizerIMP() -> lldb.SBValue:
command_script = '''
void (^IMPBlock)(UIViewController *, UITapGestureRecognizer *) = ^(UIViewController *vc, UITapGestureRecognizer *tapRecognizer) {
// find targetView
CGPoint point = [tapRecognizer locationInView:vc.view];
UIView *_targetView = nil;
UIWindow *_previousKeyWindow = (UIWindow *)[vc valueForKey:@"_previousKeyWindow"];
for (UIWindow *window in [UIApplication sharedApplication].windows.reverseObjectEnumerator) {
if (_targetView) {
break;
}
if (window == vc.view.window) {
continue;
}
if ([window isHidden]) {
continue;
}
CGPoint wPoint = [window convertPoint:point fromWindow:vc.view.window];
if (window == _previousKeyWindow) {
_targetView = ((id (*)(id, SEL, CGPoint, id)) objc_msgSend)((id)vc, @selector(findSubviewAtPoint:inView:), wPoint, (id)window);
} else {
_targetView = [window hitTest:wPoint withEvent:nil];
}
}
printf("\\n[HMLLDB]: %s\\n", (char *)[[_targetView description] UTF8String]);
(void)[vc performSelector:@selector(refreshTargetView:) withObject:(id)_targetView];
};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(expression=command_script, prefix=HMExpressionPrefix.gPrefix)
def makeFindSubviewAtPointInViewIMP() -> lldb.SBValue:
command_script = '''
UIView * (^IMPBlock)(UIViewController *, CGPoint, UIView *) = ^UIView *(UIViewController *vc, CGPoint point, UIView *view) {
NSArray *clsArr = @[[UITextField class], [UITextView class], [UIProgressView class], [UIActivityIndicatorView class], [UISlider class], [UISwitch class], [UIPageControl class], [UIStepper class]];
for (Class cls in clsArr) {
if ([view isKindOfClass:cls]) {
return view;
}
}
UIView *targetView = nil;
for (UIView *sView in view.subviews.reverseObjectEnumerator) {
if ([sView isHidden] || sView.alpha <= 0.01) {
continue;
}
if (CGRectContainsPoint(sView.frame, point)) {
targetView = sView;
break;;
}
}
if (!targetView) {
return view;
}
CGPoint tPoint = [targetView convertPoint:point fromView:view];
targetView = ((id (*)(id, SEL, CGPoint, id)) objc_msgSend)((id)vc, @selector(findSubviewAtPoint:inView:), tPoint, (id)targetView);
return targetView;
};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(expression=command_script, prefix=HMExpressionPrefix.gPrefix)
def makeRefreshTargetViewIMP() -> lldb.SBValue:
command_script = '''
void (^IMPBlock)(UIViewController *, UIView *) = ^(UIViewController *vc, UIView *targetView) {
[vc setValue:targetView forKey:@"_targetView"];
UIView *_highlightView = (UIView *)[vc valueForKey:@"_highlightView"];
UIView *_infoView = (UIView *)[vc valueForKey:@"_infoView"];
UIButton *_actionView = (UIButton *)[vc valueForKey:@"_actionView"];
if (targetView == nil) {
_highlightView.hidden = YES;
_infoView.hidden = YES;
_actionView.hidden = YES;
return;
}
_highlightView.hidden = NO;
_infoView.hidden = NO;
_actionView.hidden = NO;
// highlightFrame
CGRect highlightFrame = [vc.view.window convertRect:targetView.frame fromView:[targetView superview]];
(void)[_highlightView setFrame:(CGRect)highlightFrame];
// infoView. NSArray<NSArray<NSString *> *> *infoArr
NSArray *infoArr = (NSArray *)[vc performSelector:@selector(getInfoArrayFromTargetView:) withObject:targetView];
_infoView.hidden = targetView == nil;
[_infoView.subviews makeObjectsPerformSelector:@selector(removeFromSuperview)];
UILabel *clsNameLab = [[UILabel alloc] init];
clsNameLab.numberOfLines = 0;
clsNameLab.text = NSStringFromClass([targetView class]);
clsNameLab.textColor = [UIColor blackColor];
clsNameLab.font = [UIFont systemFontOfSize:13 weight:(UIFontWeight)UIFontWeightBold];
[_infoView addSubview:clsNameLab];
UIView *separator = [[UIView alloc] init];
(void)[separator setBackgroundColor:[[UIColor grayColor] colorWithAlphaComponent:0.4]];
[_infoView addSubview:separator];
// infoView width
CGFloat marginX = 5;
CGFloat infoViewWidth = clsNameLab.intrinsicContentSize.width + marginX * 2;
UIFont *infoFont = [UIFont systemFontOfSize:13];
for (NSArray *infos in infoArr) { // NSArray<NSString *> *infos
CGSize leftSize = (CGSize)[infos.firstObject sizeWithAttributes:@{(id)NSFontAttributeName: infoFont}];
CGSize rightSize = (CGSize)[infos.lastObject sizeWithAttributes:@{(id)NSFontAttributeName: infoFont}];
CGFloat labelPadding = 10;
if (infoViewWidth < leftSize.width + rightSize.width + marginX * 2 + labelPadding) {
infoViewWidth = leftSize.width + rightSize.width + marginX * 2 + labelPadding;
}
}
if (infoViewWidth > vc.view.bounds.size.width - 10) {
infoViewWidth = vc.view.bounds.size.width - 10;
}
// infoView subviews and frame
CGSize clsNameLabSize = (CGSize)[clsNameLab sizeThatFits:(CGSize){infoViewWidth - marginX * 2, 1000}];
(void)[clsNameLab setFrame:(CGRect){marginX, 4, clsNameLabSize.width, clsNameLabSize.height}];
(void)[separator setFrame:(CGRect){marginX, CGRectGetMaxY(clsNameLab.frame) + 4, infoViewWidth - marginX * 2, 0.5}];
CGFloat infoViewHeight = 0;
for (int i = 0; i< infoArr.count; ++i) {
NSArray *infos = infoArr[i]; // NSArray<NSString *> *infos
CGFloat rowHeight = 25;
CGFloat beginY = CGRectGetMaxY(separator.frame) + 4;
UILabel *leftLab = [[UILabel alloc] init];
leftLab.text = infos.firstObject;
((void (*)(id, SEL, long)) objc_msgSend)((id)leftLab, @selector(setTextAlignment:), 0); // NSTextAlignmentLeft
leftLab.font = [UIFont systemFontOfSize:13];
leftLab.textColor = [UIColor grayColor];
(void)[leftLab setFrame:(CGRect){marginX, beginY + i * rowHeight , leftLab.intrinsicContentSize.width, leftLab.intrinsicContentSize.height}];
[_infoView addSubview:leftLab];
UILabel *rightLab = [[UILabel alloc] init];
rightLab.text = infos.lastObject;
((void (*)(id, SEL, long)) objc_msgSend)((id)rightLab, @selector(setTextAlignment:), 2); // NSTextAlignmentRight
rightLab.font = [UIFont systemFontOfSize:13];
rightLab.textColor = [UIColor blackColor];
(void)[rightLab setFrame:(CGRect){infoViewWidth - marginX - rightLab.intrinsicContentSize.width, beginY + i * rowHeight, rightLab.intrinsicContentSize.width, rightLab.intrinsicContentSize.height}];
[_infoView addSubview:rightLab];
if (infoViewHeight < CGRectGetMaxY(leftLab.frame) + 4) {
infoViewHeight = CGRectGetMaxY(leftLab.frame) + 4;
}
}
UIButton *_exitBtn = (UIButton *)[vc valueForKey:@"_exitBtn"];
CGFloat actionViewOffsetY = 6;
CGFloat infoViewY = _exitBtn.frame.origin.y - 10 - _actionView.bounds.size.height - actionViewOffsetY - infoViewHeight;
BOOL canFrameInTop = (60 + infoViewHeight + actionViewOffsetY + _actionView.bounds.size.height) <= CGRectGetMinY(highlightFrame);
if (infoViewY < CGRectGetMaxY(highlightFrame) && canFrameInTop) {
infoViewY = 60;
}
(void)[_infoView setFrame:(CGRect){(vc.view.bounds.size.width - infoViewWidth) / 2, infoViewY, infoViewWidth, infoViewHeight}];
CGRect actionViewFrame = _actionView.frame;
actionViewFrame.origin.y = CGRectGetMaxY(_infoView.frame) + actionViewOffsetY;
(void)[_actionView setFrame:(CGRect)actionViewFrame];
[vc.view setNeedsLayout];
[vc.view layoutIfNeeded];
};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(expression=command_script, prefix=HMExpressionPrefix.gPrefix)
# The "infoView" is based on https://github.com/QMUI/LookinServer
def makeGetInfoArrayFromTargetViewIMP() -> lldb.SBValue:
command_script = '''
NSArray * (^IMPBlock)(UIViewController *, UIView *) = ^NSArray *(UIViewController *vc, UIView *targetView) {
NSMutableArray *infoArray = [[NSMutableArray alloc] init]; // NSMutableArray<NSArray<NSString *> *> *infoArr
// helper block
NSString *(^makeColorStringBlock)(UIColor *) = ^NSString *(UIColor *color) {
CGFloat r, g, b, a;
[color getRed:&r green:&g blue:&b alpha:&a];
if (a >= 1) {
return [[NSString alloc] initWithFormat:@"(%.0lf, %.0lf, %.0lf)", r * 255.0, g * 255.0, b * 255.0];
} else {
return [[NSString alloc] initWithFormat:@"(%.0lf, %.0lf, %.0lf, %.2lf)", r * 255.0, g * 255.0, b * 255.0, a];
}
};
NSString *(^getAssetNameBlock)(UIImage *) = ^NSString *(UIImage *img) {
UIImageAsset *imageAsset = (UIImageAsset *)[img imageAsset];
if ((BOOL)[imageAsset respondsToSelector:@selector(assetName)] && (BOOL)[imageAsset respondsToSelector:@selector(_assetManager)]) {
id assetManager = (id)[imageAsset performSelector:@selector(_assetManager)];
if (assetManager) {
return (NSString *)[imageAsset performSelector:@selector(assetName)];
}
}
return @"";
};
// Address
NSString *address = [[NSString alloc] initWithFormat:@"%p", targetView];
[infoArray addObject:@[@"Address", address]];
// Frame
NSString *frameStr = ({
NSString *frameX = [[NSString alloc] initWithFormat:@"%.1f", targetView.frame.origin.x];
if ([frameX hasSuffix:@".0"]) {
frameX = [frameX substringToIndex:[frameX length] - 2];
}
NSString *frameY = [[NSString alloc] initWithFormat:@"%.1f", targetView.frame.origin.y];
if ([frameY hasSuffix:@".0"]) {
frameY = [frameY substringToIndex:[frameY length] - 2];
}
NSString *frameWidthStr = [[NSString alloc] initWithFormat:@"%.1f", targetView.frame.size.width];
if ([frameWidthStr hasSuffix:@".0"]) {
frameWidthStr = [frameWidthStr substringToIndex:[frameWidthStr length] - 2];
}
NSString *frameHeightStr = [[NSString alloc] initWithFormat:@"%.1f", targetView.frame.size.height];
if ([frameHeightStr hasSuffix:@".0"]) {
frameHeightStr = [frameHeightStr substringToIndex:[frameHeightStr length] - 2];
}
[[NSString alloc] initWithFormat:@"{%@, %@, %@, %@}", frameX, frameY, frameWidthStr, frameHeightStr];
});
[infoArray addObject:@[@"Frame", frameStr]];
// BackgroundColor
if ((UIColor *)[targetView backgroundColor]) {
NSString *colorString = makeColorStringBlock((UIColor *)[targetView backgroundColor]);
[infoArray addObject:@[@"BackgroundColor", colorString]];
}
// Alpha
if (targetView.alpha < 1) {
NSString *alphaString = [[NSString alloc] initWithFormat:@"%.2lf", targetView.alpha];
[infoArray addObject:@[@"Alpha", alphaString]];
}
// CornerRadius
if (targetView.layer.cornerRadius > 0) {
NSString *radiusString = [[NSString alloc] initWithFormat:@"%.2lf", targetView.layer.cornerRadius];
[infoArray addObject:@[@"CornerRadius", radiusString]];
}
// BorderColor & BorderWidth
if (targetView.layer.borderColor && targetView.layer.borderWidth > 0) {
UIColor *borderColor = [[UIColor alloc] initWithCGColor:targetView.layer.borderColor];
NSString *colorString = makeColorStringBlock(borderColor);
[infoArray addObject:@[@"BorderColor", colorString]];
NSString *widthString = [[NSString alloc] initWithFormat:@"%.2lf", targetView.layer.borderWidth];
[infoArray addObject:@[@"BorderWidth", widthString]];
}
// Shadow
if (targetView.layer.shadowColor && targetView.layer.shadowOpacity > 0) {
UIColor *shadowColor = [[UIColor alloc] initWithCGColor:targetView.layer.shadowColor];
[infoArray addObject:@[@"ShadowColor", makeColorStringBlock(shadowColor)]];
[infoArray addObject:@[@"ShadowOpacity", [[NSString alloc] initWithFormat:@"%.2lf", targetView.layer.shadowOpacity]]];
[infoArray addObject:@[@"ShadowOffset", [[NSString alloc] initWithFormat:@"%@", NSStringFromCGSize(targetView.layer.shadowOffset)]]];
[infoArray addObject:@[@"ShadowRadius", [[NSString alloc] initWithFormat:@"%.2lf", targetView.layer.shadowRadius]]];
}
// UIImageView
// UIButton
// UILabel
// UIScrollView
// UITextView
// UITextField
if ([targetView isKindOfClass:[UIImageView class]]) {
UIImageView *imageView = (UIImageView *)targetView;
NSString *assetName = getAssetNameBlock([imageView image]);
if ([assetName length] > 0) {
[infoArray addObject:@[@"AssetName", assetName]];
}
} else if ([targetView isKindOfClass:[UIButton class]]) {
UIButton *btn = (UIButton *)targetView;
if ([btn titleForState:0].length) { // UIControlStateNormal
[infoArray addObject:@[@"FontSize", [[NSString alloc] initWithFormat:@"%.2lf", btn.titleLabel.font.pointSize]]];
[infoArray addObject:@[@"FontName", btn.titleLabel.font.fontName]];
[infoArray addObject:@[@"TextColor", makeColorStringBlock(btn.titleLabel.textColor)]];
}
NSString *assetName = getAssetNameBlock([btn imageForState:0]); // UIControlStateNormal
if ([assetName length] > 0) {
[infoArray addObject:@[@"AssetName.normal", assetName]];
}
} else if ([targetView isKindOfClass:[UILabel class]]) {
UILabel *label = (UILabel *)targetView;
[infoArray addObject:@[@"FontSize", [[NSString alloc] initWithFormat:@"%.2lf", label.font.pointSize]]];
[infoArray addObject:@[@"FontName", label.font.fontName]];
[infoArray addObject:@[@"TextColor", makeColorStringBlock(label.textColor)]];
[infoArray addObject:@[@"NumberOfLines", [NSString stringWithFormat:@"%ld", label.numberOfLines]]];
} else if ([targetView isKindOfClass:[UIScrollView class]]) {
UIScrollView *scrollView = (UIScrollView *)targetView;
[infoArray addObject:@[@"ContentSize", NSStringFromCGSize(scrollView.contentSize)]];
[infoArray addObject:@[@"ContentOffset", NSStringFromCGPoint(scrollView.contentOffset)]];
[infoArray addObject:@[@"ContentInset", NSStringFromUIEdgeInsets(scrollView.contentInset)]];
if ([scrollView respondsToSelector:@selector(adjustedContentInset)]) {
[infoArray addObject:@[@"AdjustedContentInset", NSStringFromUIEdgeInsets(scrollView.adjustedContentInset)]];
}
if ([scrollView isKindOfClass:[UITextView class]]) {
UITextView *textView = (UITextView *)scrollView;
[infoArray addObject:@[@"FontSize", [[NSString alloc] initWithFormat:@"%.2lf", textView.font.pointSize]]];
[infoArray addObject:@[@"FontName", textView.font.fontName]];
[infoArray addObject:@[@"TextColor", makeColorStringBlock(textView.textColor)]];
}
} else if ([targetView isKindOfClass:[UITextField class]]) {
UITextField *textField = (UITextField *)targetView;
[infoArray addObject:@[@"FontSize", [[NSString alloc] initWithFormat:@"%.2lf", textField.font.pointSize]]];
[infoArray addObject:@[@"FontName", textField.font.fontName]];
[infoArray addObject:@[@"TextColor", makeColorStringBlock(textField.textColor)]];
}
return [infoArray copy];
};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(command_script)
def addFunctionMethods() -> bool:
clickMoveBtnIMPValue = makeClickMoveBtnIMP()
if not HM.judgeSBValueHasValue(clickMoveBtnIMPValue):
return False
HM.addInstanceMethod(gClassName, "clickMoveBtn:", clickMoveBtnIMPValue.GetValue(), "v@:@")
ivarsActionIMPValue = makeIvarsActionIMP()
if not HM.judgeSBValueHasValue(ivarsActionIMPValue):
return False
HM.addInstanceMethod(gClassName, "ivarsAction", ivarsActionIMPValue.GetValue(), "v@:")
propertiesActionIMPValue = makePropertiesActionIMP()
if not HM.judgeSBValueHasValue(propertiesActionIMPValue):
return False
HM.addInstanceMethod(gClassName, "propertiesAction", propertiesActionIMPValue.GetValue(), "v@:")
methodsActionIMPValue = makeMethodsActionIMP()
if not HM.judgeSBValueHasValue(methodsActionIMPValue):
return False
HM.addInstanceMethod(gClassName, "methodsAction", methodsActionIMPValue.GetValue(), "v@:")
siblingNextActionIMPValue = makeSiblingNextActionIMP()
if not HM.judgeSBValueHasValue(siblingNextActionIMPValue):
return False
HM.addInstanceMethod(gClassName, "siblingNextAction", siblingNextActionIMPValue.GetValue(), "v@:")
siblingPreviousActionIMPValue = makeSiblingPreviousActionIMP()
if not HM.judgeSBValueHasValue(siblingPreviousActionIMPValue):
return False
HM.addInstanceMethod(gClassName, "siblingPreviousAction", siblingPreviousActionIMPValue.GetValue(), "v@:")
superviewActionIMPValue = makeSuperviewActionIMP()
if not HM.judgeSBValueHasValue(superviewActionIMPValue):
return False
HM.addInstanceMethod(gClassName, "superviewAction", superviewActionIMPValue.GetValue(), "v@:")
subviewActionIMPValue = makeSubviewActionIMP()
if not HM.judgeSBValueHasValue(subviewActionIMPValue):
return False
HM.addInstanceMethod(gClassName, "subviewAction", subviewActionIMPValue.GetValue(), "v@:")
return True
def makeClickMoveBtnIMP() -> lldb.SBValue:
command_script = '''
void (^IMPBlock)(UIViewController *, UIButton *) = ^(UIViewController *vc, UIButton *btn) {
UIView *_targetView = (UIView *)[vc valueForKey:@"_targetView"];
CGRect targetFrame = [_targetView frame];
if (btn.tag == 1) {
targetFrame.origin.y = targetFrame.origin.y - 1;
} else if (btn.tag == 2) {
targetFrame.origin.x = targetFrame.origin.x - 1;
} else if (btn.tag == 3) {
targetFrame.origin.y = targetFrame.origin.y + 1;
} else if (btn.tag == 4) {
targetFrame.origin.x = targetFrame.origin.x + 1;
}
(void)[_targetView setFrame:(CGRect)targetFrame];
(void)[vc performSelector:@selector(refreshTargetView:) withObject:(id)_targetView];
};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(expression=command_script, prefix=HMExpressionPrefix.gPrefix)
def makeIvarsActionIMP() -> lldb.SBValue:
command_script = f'''
void (^IMPBlock)(UIViewController *) = ^(UIViewController *vc) {{
UIView *_targetView = (UIView *)[vc valueForKey:@"_targetView"];
NSString *desc = (NSString *)[_targetView performSelector:NSSelectorFromString(@"_ivarDescription")];
printf("\\n[HMLLDB]: _ivarDescription:\\n%s\\n", (char *)[desc UTF8String]);
Class progressHUDCls = (Class)objc_lookUpClass("{HMProgressHUD.gClassName}");
((id (*)(id, SEL, id, int)) objc_msgSend)((id)progressHUDCls, @selector(showOnlyText:hiddenAfterDelay:), @"Please check the Xcode console output", 4);
}};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(command_script)
def makePropertiesActionIMP() -> lldb.SBValue:
command_script = f'''
void (^IMPBlock)(UIViewController *) = ^(UIViewController *vc) {{
UIView *_targetView = (UIView *)[vc valueForKey:@"_targetView"];
NSString *desc = (NSString *)[_targetView performSelector:NSSelectorFromString(@"_propertyDescription")];
printf("\\n[HMLLDB]: _propertyDescription:\\n%s\\n", (char *)[desc UTF8String]);
Class progressHUDCls = (Class)objc_lookUpClass("{HMProgressHUD.gClassName}");
((id (*)(id, SEL, id, int)) objc_msgSend)((id)progressHUDCls, @selector(showOnlyText:hiddenAfterDelay:), @"Please check the Xcode console output", 4);
}};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(command_script)
def makeMethodsActionIMP() -> lldb.SBValue:
command_script = f'''
void (^IMPBlock)(UIViewController *) = ^(UIViewController *vc) {{
UIView *_targetView = (UIView *)[vc valueForKey:@"_targetView"];
NSString *desc = (NSString *)[_targetView performSelector:NSSelectorFromString(@"_methodDescription")];
printf("\\n[HMLLDB]: _methodDescription:\\n%s\\n", (char *)[desc UTF8String]);
Class progressHUDCls = (Class)objc_lookUpClass("{HMProgressHUD.gClassName}");
((id (*)(id, SEL, id, int)) objc_msgSend)((id)progressHUDCls, @selector(showOnlyText:hiddenAfterDelay:), @"Please check the Xcode console output", 4);
}};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(command_script)
def makeSiblingNextActionIMP() -> lldb.SBValue:
command_script = '''
void (^IMPBlock)(UIViewController *) = ^(UIViewController *vc) {
UIView *_targetView = (UIView *)[vc valueForKey:@"_targetView"];
NSArray *siblings = [[_targetView superview] subviews];
if (siblings != nil) {
NSInteger index = [siblings indexOfObject:_targetView];
index = index + 1;
if (index >= [siblings count]) {
index = 0;
}
UIView *targetSibling = [siblings objectAtIndex:index];
printf("\\n[HMLLDB]: %s\\n", (char *)[[targetSibling description] UTF8String]);
(void)[vc performSelector:@selector(refreshTargetView:) withObject:(id)targetSibling];
}
};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(command_script)
def makeSiblingPreviousActionIMP() -> lldb.SBValue:
command_script = '''
void (^IMPBlock)(UIViewController *) = ^(UIViewController *vc) {
UIView *_targetView = (UIView *)[vc valueForKey:@"_targetView"];
NSArray *siblings = [[_targetView superview] subviews];
if (siblings != nil) {
NSInteger index = [siblings indexOfObject:_targetView];
index = index - 1;
if (index < 0) {
index = [siblings count] - 1;
}
UIView *targetSibling = [siblings objectAtIndex:index];
printf("\\n[HMLLDB]: %s\\n", (char *)[[targetSibling description] UTF8String]);
(void)[vc performSelector:@selector(refreshTargetView:) withObject:(id)targetSibling];
}
};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(command_script)
def makeSuperviewActionIMP() -> lldb.SBValue:
command_script = '''
void (^IMPBlock)(UIViewController *) = ^(UIViewController *vc) {
UIView *_targetView = (UIView *)[vc valueForKey:@"_targetView"];
UIView *targetSuperView = [_targetView superview];
if (targetSuperView != nil) {
printf("\\n[HMLLDB]: %s\\n", (char *)[[targetSuperView description] UTF8String]);
(void)[vc performSelector:@selector(refreshTargetView:) withObject:(id)targetSuperView];
}
};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(command_script)
def makeSubviewActionIMP() -> lldb.SBValue:
command_script = '''
void (^IMPBlock)(UIViewController *) = ^(UIViewController *vc) {
UIView *_targetView = (UIView *)[vc valueForKey:@"_targetView"];
UIView *targetSubview = [[_targetView subviews] firstObject];
if (targetSubview != nil) {
printf("\\n[HMLLDB]: %s\\n", (char *)[[targetSubview description] UTF8String]);
(void)[vc performSelector:@selector(refreshTargetView:) withObject:(id)targetSubview];
}
};
imp_implementationWithBlock(IMPBlock);
'''
return HM.evaluateExpressionValue(command_script)
|
23,947 | 56ba802d9c69ffe97a077ceae51208cf5438d945 | import os
import json
'''
use only for data with foreground and foreground 180
'''
path = './data'
classes = list(os.listdir(path))
tag = '.meta.json'
l = len(tag)
for cls in classes:
print('_________________')
print('class: ', cls)
cls_path = os.path.join(path, cls)
dirs = list(os.listdir(cls_path))
for d in dirs:
dir_path = os.path.join(cls_path, d)
samples = [s for s in list(os.listdir(dir_path)) if '.meta.json' in s]
for sample in samples:
try:
with open(os.path.join(dir_path, sample)) as f:
meta = json.load(f)
meta['symmetric'] = 0
with open(os.path.join(dir_path, sample), 'w') as f:
json.dump(meta, f)
except:
print(os.path.join(dir_path, sample))
input()
|
23,948 | 325ac45af0719cdccbada5c310d896710aafa6f3 | from datetime import datetime
from sqlalchemy import (
Table, Column, Integer, String, ForeignKey, DateTime, Boolean, Text )
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
fname = Column(String(64))
lname = Column(String(64))
email = Column(String(64), unique=True, nullable=False)
passwd = Column(String(64), nullable=False)
created = Column(DateTime,
default=datetime.utcnow,
nullable=False)
admin = Column(Boolean, default=False)
cookie = Column(String(64), unique=True)
def __init__(self, fname, lname, email, passwd, admin=False):
self.fname = fname
self.lname = lname
self.email = email
self.passwd = passwd
self.admin = admin
# class Cookie(Base):
# __tablename__ = 'cookie'
# id = Column(Integer, primary_key=True)
# uid = Column(Integer, ForeignKey('user.id'), primary_key=True)
# created = Column(DateTime,
# default=datetime.utcnow,
# nullable=False)
class awardCreator(Base):
__tablename__ = 'awardCreator'
uid = Column(Integer, ForeignKey('user.id'), primary_key=True)
org = Column(String(128), nullable=False)
city = Column(String(64), nullable=False)
state = Column(String(64), nullable=False)
signature = Column(Text, nullable=False)
# Column('signature', String(64), nullable=False)
def __init__(self, uid, org, city, state, signature):
self.uid = uid
self.org = org
self.city = city
self.state = state
self.signature = signature
# class awardType(Base):
# __tablename__ = 'awardType'
# id = Column(Integer, primary_key=True)
# name = Column(String(128), nullable=False)
# class award(Base):
# __tablename__ = 'award'
# id = Column(Integer, primary_key=True)
# fname = Column(String(64), nullable=False)
# lname = Column(String(64), nullable=False)
# email = Column(String(64), nullable=False)
# creatorId = Column(Integer, ForeignKey('awardCreator.uid'))
# awardTypeId = Column(Integer, ForeignKey('awardType.id'))
# created = Column(DateTime,
# default=datetime.utcnow,
# nullable=False)
class award(Base):
__tablename__ = 'award'
id = Column(Integer, primary_key=True)
fname = Column(String(64), nullable=False)
lname = Column(String(64), nullable=False)
email = Column(String(64), nullable=False)
awardType = Column(String(64), nullable=False)
awardDate = Column(DateTime, nullable=False)
creatorID = Column(Integer, ForeignKey('awardCreator.uid'))
def __init__(self, fname, lname, email, awardType, awardDate, creatorID):
self.fname = fname
self.lname = lname
self.email = email
self.awardType = awardType
self.awardDate = awardDate
self.creatorID = creatorID
def connect(user, passwd, db, host='localhost', port=5432):
uri = 'postgresql://' + user + ":" + passwd
uri = uri + '@' + host + ':' + str(port) + '/' + db
connection = create_engine(uri, client_encoding='utf8')
return connection
con = connect('klompen_db_user', 'BlueDanceMoon42', 'klompen_db')
Base.metadata.create_all(con)
|
23,949 | ddd80925e912baa1bcb08262ff4fbef51a3f493c | from kivy.uix.screenmanager import ScreenManager, Screen
from kivy.uix.relativelayout import RelativeLayout
from kivy.uix.gridlayout import GridLayout
from kivy.uix.button import Button
from kivy.uix.togglebutton import ToggleButton
from kivy.uix.label import Label
from kivy.core.window import Window
from kivy.properties import NumericProperty, ReferenceListProperty
from kivy.uix.popup import Popup
from kivy.uix.boxlayout import BoxLayout
from datetime import datetime
from db.essl_credentials import credentials
from kivy.lang import Builder
from pages import calendar_data as cal_data, Dialog, kivytoast
selectedDates = []
holiday = []
halfday = []
Builder.load_string("""
<ToggleBtn>:
canvas.before:
Color:
rgba: (1, 1, 1, 1)
Rectangle:
size: self.size
pos: self.pos
<HolidayBtn>:
font_name: 'fonts/moon-bold.otf'
canvas.before:
Color:
rgba: (128, 0, 128, 0.5)
Rectangle:
pos: self.pos
size: self.size
<HalfdayBtn>:
font_name: 'fonts/moon-bold.otf'
canvas.before:
Color:
rgba: (0, 255, 255, 0.25)
Rectangle:
pos: self.pos
size: self.size
""")
class ToggleBtn(ToggleButton):
pass
class HolidayBtn(ToggleButton):
pass
class HalfdayBtn(ToggleButton):
pass
class CalendarWidgetS(RelativeLayout):
""" Basic calendar widget """
def __init__(self, as_popup=False, touch_switch=False, *args, **kwargs):
super(CalendarWidgetS, self).__init__(*args, **kwargs)
self.as_popup = as_popup
self.touch_switch = touch_switch
#self.selectedDates = []
self.prepare_data()
self.init_ui()
def init_ui(self):
self.left_arrow = Button(text="<", on_press=self.go_prev,
pos_hint={"top": 1, "left": 0}, size_hint=(.1, .1))
self.right_arrow = Button(text=">", on_press=self.go_next,
pos_hint={"top": 1, "right": 1}, size_hint=(.1, .1))
self.add_widget(self.left_arrow)
self.add_widget(self.right_arrow)
# Title
self.title_label = Label(text=self.title, pos_hint={"top": 1, "center_x": .5}, size_hint=(None, 0.15), halign=("center"))
self.add_widget(self.title_label)
# ScreenManager
self.sm = ScreenManager(pos_hint={"top": .9}, size_hint=(1, .9))
self.add_widget(self.sm)
self.create_month_scr(self.quarter[1], toogle_today=True)
def create_month_scr(self, month, toogle_today=False):
""" Screen with calendar for one month """
scr = Screen()
m = self.month_names_eng[self.active_date[1] - 1]
scr.name = "%s-%s" % (m, self.active_date[2]) # like march-2015
# Grid for days
grid_layout = GridLayout(cols=7, rows=7, size_hint=(1, 1), pos_hint={"top": 1})
scr.add_widget(grid_layout)
# Days abbrs
for i in range(7):
if i >= 5: # weekends
l = Label(text=self.days_abrs[i], color=(1, 0, 0, 1))
else: # work days
l = Label(text=self.days_abrs[i], text_size=(self.size[0], None), halign="center")
grid_layout.add_widget(l)
global holiday, halfday
# Buttons with days numbers
for week in month:
for day in week:
if day[1] >= 6: # weekends
self.tbtn = ToggleBtn(text=str(day[0]), color=(0, 0, 0, 1))
else:
self.tbtn = ToggleBtn(text=str(day[0]), color=(0, 0, 0, 1))
for i in range(len(holiday)):
if self.active_date[2] == holiday[i][2]:
if self.active_date[1] == holiday[i][1]:
if day[0] == holiday[i][0]:
self.tbtn.background_color=(128, 0, 128, 1)
for i in range(len(halfday)):
if self.active_date[2] == halfday[i][2]:
if self.active_date[1] == halfday[i][1]:
if day[0] == halfday[i][0]:
self.tbtn.background_color=(0, 255, 255, 0.5)
self.tbtn.bind(on_press=self.get_btn_value)
if toogle_today:
# Down today button
if day[0] == self.active_date[0] and day[2] == 1:
self.tbtn.state = "down"
# Disable buttons with days from other months
if day[2] == 0:
self.tbtn.text = " "
self.tbtn.disabled = True
self.tbtn.background_color = (0, 0, 0, 0.1)
grid_layout.add_widget(self.tbtn)
self.sm.add_widget(scr)
def prepare_data(self):
""" Prepare data for showing on widget loading """
# Get days abbrs and month names lists
self.month_names = cal_data.get_month_names()
self.month_names_eng = cal_data.get_month_names_eng()
self.days_abrs = cal_data.get_days_abbrs()
# Today date
self.active_date = cal_data.today_date_list()
# Set title
self.title = "%s - %s" % (self.month_names[self.active_date[1] - 1],
self.active_date[2])
# Quarter where current month in the self.quarter[1]
self.get_quarter()
def get_quarter(self):
""" Get caledar and months/years nums for quarter """
self.quarter_nums = cal_data.calc_quarter(self.active_date[2],
self.active_date[1])
self.quarter = cal_data.get_quarter(self.active_date[2],
self.active_date[1])
def get_btn_value(self, inst):
""" Get day value from pressed button """
self.active_date[0] = int(inst.text)
selected = [self.active_date[0], self.active_date[1], self.active_date[2]]
global selectedDates
if selected in selectedDates:
selectedDates.remove(selected)
else:
selectedDates.append(selected)
if self.as_popup:
self.parent_popup.dismiss()
#getInfo.openPopup()
def go_prev(self, inst):
""" Go to screen with previous month """
# Change active date
self.active_date = [self.active_date[0], self.quarter_nums[0][1],
self.quarter_nums[0][0]]
# Name of prev screen
n = self.quarter_nums[0][1] - 1
prev_scr_name = "%s-%s" % (self.month_names_eng[n],
self.quarter_nums[0][0])
# If it's doen't exitst, create it
if not self.sm.has_screen(prev_scr_name):
self.create_month_scr(self.quarter[0])
self.sm.current = prev_scr_name
self.sm.transition.direction = "left"
self.get_quarter()
self.title = "%s - %s" % (self.month_names[self.active_date[1] - 1],
self.active_date[2])
self.title_label.text = self.title
def go_next(self, inst):
""" Go to screen with next month """
# Change active date
self.active_date = [self.active_date[0], self.quarter_nums[2][1],
self.quarter_nums[2][0]]
# Name of prev screen
n = self.quarter_nums[2][1] - 1
next_scr_name = "%s-%s" % (self.month_names_eng[n],
self.quarter_nums[2][0])
# If it's doen't exitst, create it
if not self.sm.has_screen(next_scr_name):
self.create_month_scr(self.quarter[2])
self.sm.current = next_scr_name
self.sm.transition.direction = "right"
self.get_quarter()
self.title = "%s - %s" % (self.month_names[self.active_date[1] - 1],
self.active_date[2])
self.title_label.text = self.title
def on_touch_move(self, touch):
""" Switch months pages by touch move """
if self.touch_switch:
# Left - prev
if touch.dpos[0] < -30:
self.go_prev(None)
# Right - next
elif touch.dpos[0] > 30:
self.go_next(None)
import pymysql
def paintDates():
global holiday, halfday
db = pymysql.connect(credentials['address'], credentials['username'], credentials['password'], credentials['db'], autocommit=True, connect_timeout=1)
cur = db.cursor()
cur.execute("SELECT DAY, MONTH, YEAR, DETAIL FROM essl.month_details")
for data in cur.fetchall():
if data[3] == 'HOLIDAY':
holiday.append([data[0], data[1], data[2]])
else:
halfday.append([data[0], data[1], data[2]])
def setup():
paintDates()
calSettingsLayout = BoxLayout(orientation='vertical')
daySetLayout = BoxLayout(orientation='horizontal', size_hint_y=0.2)
holidayBtn = HolidayBtn(text='HOLIDAY', size_hint_x=0.5, color=(128, 0, 128, 1), bold=True)
daySetLayout.add_widget(holidayBtn)
halfdayBtn = HalfdayBtn(text='HALF DAY', size_hint_x=0.5, color=(0, 255, 255, 0.5), bold=True)
daySetLayout.add_widget(halfdayBtn)
calSettingsLayout.add_widget(daySetLayout)
cal = CalendarWidgetS()
calSettingsLayout.add_widget(cal)
def callback(instance):
def call(instance):
if instance.text == 'OK':
pop.dismiss()
global selectedDates
if instance.text == 'SAVE':
db = pymysql.connect(credentials['address'], credentials['username'], credentials['password'], credentials['db'], autocommit=True, connect_timeout=1)
cur = db.cursor()
closePopBtn = Button(text="OK", size_hint=(1, 0.25))
closePopBtn.bind(on_release=call)
if holidayBtn.state == 'down':
for date in selectedDates:
cur.execute("INSERT INTO essl.month_details (DAY, MONTH, YEAR, DETAIL) VALUES(%d, %d, %d, 'HOLIDAY')" %(date[0], date[1], date[2]))
kivytoast.toast('Holidays Applied', (0, 1, 0, 0.5), length_long=True)
elif halfdayBtn.state == 'down':
for date in selectedDates:
cur.execute("INSERT INTO essl.month_details (DAY, MONTH, YEAR, DETAIL) VALUES(%d, %d, %d, 'HALFDAY')" %(date[0], date[1], date[2]))
kivytoast.toast('Halfdays Applied', (0, 1, 0, 0.5), length_long=True)
cur.close()
db.close()
saveBtn = Button(text='SAVE', size_hint_y=0.2)
saveBtn.bind(on_press=callback)
calSettingsLayout.add_widget(saveBtn)
popup = Popup(title='date settings', content=calSettingsLayout, size_hint=(0.65, 0.65))
popup.open()
|
23,950 | b308743171d2e198e58bfc79e8408a00817d55de | from django.apps import AppConfig
class CaseVideosConfig(AppConfig):
name = 'case_videos'
|
23,951 | 8432b87f84b43aff9438a6ff756f572f495ad1bb | from django.db import models
import datetime
# Create your models here.
### The code below creates a post object to be used in my database.
class Post(models.Model):
title=models.CharField(max_length=30)
body=models.CharField(max_length=500)
date_posted=datetime.datetime.now()
|
23,952 | 50723106665c88fcd9b854918a9328f13cbd984e | # Unit 02 - Nonlinear Classification, Linear regression, Collaborative Filtering
|
23,953 | 92c8b09001f77ce014fe4cea1abb19f3c2bf12e7 | # -*- coding: UTF-8 -*-
__copyright__ = 'Copyright (c) 2016 The University of Texas at Austin'
__author__ = 'mccookpv'
from django.http import HttpResponse
from django.http import Http404
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from django.conf import settings
import logging
import json
logger_main = logging.getLogger("main")
class NanoSourcerLtiHandshakeMiddleware(object):
def __init__(self,
*args,
**kwargs):
self.lti_app_config = settings.LTI_APP_CONFIG
def get_lti_session_key(self, request):
if request.method == 'POST':
lti_session_key = request.POST.get('lti_session_key', None)
if not lti_session_key:
lti_session_key = request.POST.get('lk', None)
elif request.method == 'GET':
lti_session_key = request.GET.get('lk', None)
if not lti_session_key:
lti_session_key = request.GET.get('lti_session_key', None)
else:
if request.body:
request_body_dict = json.loads(request.body)
lti_session_key = request_body_dict.get('lk', None)
if not lti_session_key:
lti_session_key = request_body_dict.get('lti_session_key', None)
else:
lti_session_key = None
return lti_session_key
def bypass_exclude_paths(self,
request):
lti_middleware_exclude_path_list = self.lti_app_config.get_lti_middleware_exclude_paths()
for ex_regex in lti_middleware_exclude_path_list:
# if request.path.startswith(regex):
if ex_regex.match(request.path):
return True
return False
def process_request(self,
request):
p = "process_request"
if self.bypass_exclude_paths(request):
return None
logger_main.info("=" * 64)
logger_main.info("{0} {1}".format(p, request.path))
lti_session_key = self.get_lti_session_key(request)
request.session['lti_session_key'] = lti_session_key
# logger_main.info('{0} lti_session_key {1}'.format(p, lti_session_key))
# for k, v in request.POST.items():
# logger_main.info("POST {0}:{1}".format(k, v))
# for k, v in request.GET.items():
# logger_main.info("GET {0}:{1}".format(k, v))
if not self.lti_app_config:
return HttpResponse(content="LTI app configuration not found.",
content_type=None,
status=500,
reason='LTI app configuration not found.')
if self.bypass_exclude_paths(request):
return None
lti_session_data, message_dict = self.lti_app_config.get_valid_lti_session(request,
lti_session_key=lti_session_key,
default_session_expiry=3600,
lti_session_data=None,
load_full_handshake=False,
load_convenience_vars=True,
print_lti_handshake=False,
include_param_keys=[],
include_header_keys=[])
if not lti_session_data:
return HttpResponse(content="Session has expired. Please re-launch application from menu.",
content_type=None,
status=401,
reason='Session has expired. Please re-launch application from menu.')
return None
def process_view(self,
request,
view_func,
view_args,
view_kwargs):
return None
def process_template_response(self,
request,
response):
return None
def process_response(self,
request,
response):
return response
def process_exception(self,
request,
exception):
if hasattr(exception, 'error_code') and hasattr(exception, 'error_message'):
return HttpResponse(content=exception.error_message,
content_type=None,
status=exception.error_code,
reason=exception.error_message)
else:
if isinstance(exception, PermissionDenied):
return HttpResponse(content="Access denied.",
content_type=None,
status=403,
reason='Access denied.')
elif isinstance(exception, SuspiciousOperation):
return HttpResponse(content="Access denied.",
content_type=None,
status=403,
reason='Access denied.')
elif isinstance(exception, Http404):
return HttpResponse(content="Resource not found.",
content_type=None,
status=404,
reason='Resource not found.')
else:
if str(exception):
return HttpResponse(content="Error: {0}".format(str(exception)),
content_type=None,
status=500,
reason=exception)
else:
return HttpResponse(content="Access denied.",
content_type=None,
status=403,
reason='Access denied.')
def get_timedelta_seconds(self,
td):
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / 10 ** 6
|
23,954 | 1f011319c285bffb61257ea4f9b893960ae003b6 | from datetime import datetime
import os
import time
from openpyxl import load_workbook, Workbook
# 평균 시간 계산용 함수 time_spend 전역 배열을 사용
def time_cal(count):
sum = 0
for i in time_spend:
sum += i
print("평균 소요 시간 : " + str(round(sum / (count + 1), 2)) + "초")
# 파일 처리에 필요한 폴더 생성
def init():
if not os.path.isdir("./merge1"):
os.mkdir("./merge1")
# 디렉토리의 파일들을 저장하는 배열을 생성
def get_directory_file_list(target_directory, file_list=[]):
for i in os.listdir(target_directory):
if os.path.isdir(target_directory + i + "/"):
file_list = get_directory_file_list(target_directory + i + "/", file_list)
else:
file_list.append(target_directory + i)
return file_list
def merge_1(file_number):
# start1 = time.time() # 시작 시간 저장
# print(file_number + " 파일 처리 시작")
# now = datetime.now()
# process_starttime = now.strftime('%Y-%m-%d %H:%M:%S')
# print("프로세스 처리 시작시간 " + process_starttime) # 2020-05-29 22:49:32
# complete폴더에있는 데이터를 활용하도록 함
file_location = complete_file_directory + file_number
try:
excel_filename = file_location
load_wb = load_workbook(excel_filename, data_only=True)
for sheet in load_wb:
# 엑셀에 2열부터시작하고, 최대 2행만 가져오도록함
for row in sheet.iter_rows(min_row=2, max_col=2):
if len(row[0].value) > 1:
# print(str(row[0].value) + " " + str(row[1].value) + " 추가중")
if row[0].value in noun:
nouncount[noun.index(row[0].value)] = int(nouncount[noun.index(row[0].value)]) + int(
row[1].value)
else:
noun.append(row[0].value)
nouncount.append(row[1].value)
except Exception as e:
print(e)
try:
except_word = open("결과제외단어.txt", mode='rt', encoding='utf-8')
for i in except_word.readlines():
i = i.replace('\n', '')
del nouncount[noun.index(i)]
del noun[noun.index(i)]
except_word.close()
except Exception as e:
print(e)
# now = datetime.now()
# process_endtime = now.strftime('%Y-%m-%d %H:%M:%S')
# print("프로세스 처리 종료시간 " + process_endtime) # 2020-05-29 22:49:32
# time_spend.append(time.time() - start1)
# print("파일처리 소요시간 : ", time.time() - start1) # 현재시각 - 시작시간 = 실행 시간
def merge1_save(file_number):
# start2 = time.time() # 시작 시간 저장
# now = datetime.now()
# process_starttime = now.strftime('%Y-%m-%d %H:%M:%S')
# print("파일 저장 시작 " + process_starttime) # 2020-05-29 22:49:32
# desc(내림차순으로 정렬)
noun_list = {}
for i in noun:
noun_list.setdefault(i, nouncount[noun.index(i)])
noun_list_sort = sorted(noun_list.items(), key=lambda x: x[1], reverse=True)
file_path = "./merge1/20" + file_number[0:2] + "/" + file_number + ".xlsx"
write_wb = Workbook()
write_ws = write_wb.active
write_ws.cell(1, 1, "word")
write_ws.cell(1, 2, "frequency")
for i in range(0, len(noun_list_sort)):
write_ws.cell(i + 2, 1, noun_list_sort[i][0])
write_ws.cell(i + 2, 2, noun_list_sort[i][1])
# print(str(noun_list_sort[i][0]) + " " + str(noun_list_sort[i][1]) + " 추가중")
write_wb.save(file_path)
# now = datetime.now()
# process_endtime = now.strftime('%Y-%m-%d %H:%M:%S')
# print("파일 저장 완료 " + process_endtime) # 2020-05-29 22:49:32
# print("파일 저장 소요시간 : ", time.time() - start2) # 현재시각 - 시작시간 = 실행 시간
start = time.time() # 시작 시간 저장
noun = []
nouncount = []
time_spend = []
previous_number = 0
init()
complete_file_directory = "./complete/"
file_list = get_directory_file_list(complete_file_directory)
# ex ) './complete/200101000012543_9.xlsx', './complete/200101001831944_2.xlsx'
# print(file_list)
for i in range(0, len(file_list)):
# print("총 파일 " + str(len(file_list)) + "개 중 " + str(i) + "번째 파일 처리 중")
# print(str(round((i / len(file_list)) * 100, 2)) + "% 처리 중")
# 파일 처리에 필요한 폴더를 미리 생성하도록 함
if not os.path.isdir("./merge1/20" + str(file_list[i].split("/")[-1].split("_")[0][0:2])):
os.mkdir("./merge1/20" + str(file_list[i].split("/")[-1].split("_")[0][0:2]))
# 파일이 이미 있다면 제외함
if os.path.isfile("./merge1/20" + str(file_list[i].split("/")[-1].split("_")[0][0:2])+"/" + file_list[i].split("/")[-1].split("_")[0]+".xlsx"):
print(file_list[i].split("/")[-1].split("_")[0]+".xlsx"+" 파일 있음")
continue
# 기존의 파일 번호를 확인하고 틀리다면 저장하는 함수를 실행하고 파일처리를 하도록 함
if int(file_list[i].split("/")[-1].split("_")[0]) != previous_number and previous_number != 0:
print(file_list[i].split("/")[-1].split("_")[0]+".xlsx"+" 파일 없음")
# print("현재번호 : " + str(file_list[i].split("/")[-1].split("_")[0]))
# print("기존번호 : " + str(previous_number))
merge1_save(file_list[i - 1].split("/")[-1].split("_")[0])
noun = []
nouncount = []
previous_number = int(file_list[i].split("/")[-1].split("_")[0])
merge_1(file_list[i].split("/")[-1])
# time_cal(i)
print("총 소요시간 :", time.time() - start) # 현재시각 - 시작시간 = 실행 시간
|
23,955 | 399bbcac1ed45eca6373cb67ff47daaad771efb2 | def printH(name="Peng"):
print "Hello ",name
printH()
printH("Shi") |
23,956 | 6322fc020978b823330a0e50f1cbd9243e47ebac | from byugui.new_body_gui import CreateWindow, NewBodyWindow
from PySide2 import QtWidgets
import hou
import os
def go():
global quote_window
dialog = CreateWindow(hou.ui.mainQtWindow())
|
23,957 | 5763727055c7b6b03242e36a18b789c49cd1a777 | import matplotlib.pyplot as plt
import numpy as np
import os
import sys
args = sys.argv
if(args[1] == "usps_train.csv" and args[2] == "usps_test.csv"):
u_train_data = args[1]
u_test_data = args[2]
else:
print("Incorrect files passed, exiting...")
quit()
np.seterr(all='raise')
learning_factor = .01
lambdas = sys.argv[3][1:][:-1].split(",")
train_correct = [0]*len(lambdas)
test_correct = [0]*len(lambdas)
train_x = np.genfromtxt(u_train_data, usecols=range(256), delimiter=',')
train_y = np.genfromtxt(u_train_data, usecols=(-1), delimiter=',')
train_x = train_x/255
train_x = np.insert(train_x, 0, 1, axis=1)
test_x = np.genfromtxt(u_test_data, usecols=range(256), delimiter=',')
test_x = np.insert(test_x, 0, 1, axis=1)
test_y = np.genfromtxt(u_test_data, usecols=(-1), delimiter=',')
weights = np.zeros(train_x.shape[1])
for k in range(len(lambdas)):
# Create the gradient and weight vector
for i in range(1000):
gradient = np.zeros(train_x.shape[1])
for j in range(train_x.shape[0]):
y_hat = 1./(1. + np.e**(-1. * np.dot(weights.T, train_x[j])))
gradient = np.add(gradient, ((y_hat-train_y[j]) * train_x[j]))
gradient = np.add(gradient, (learning_factor)*(np.linalg.norm(weights)))
weights = np.subtract(weights, (learning_factor * gradient))
# Get the number of correct predictions for the training data for kth lambda
for j in range(train_x.shape[0]):
if np.dot(weights.T, train_x[j]) >= 0. and train_y[j] == 1:
train_correct[k]+=1
elif np.dot(weights.T, train_x[j]) < 0. and train_y[j] == 0:
train_correct[k]+=1
train_correct[k] = (train_correct[k]/train_x.shape[0])*100
print("Training Accuracy for " + str(k) + "lambda: " + str(train_correct[k]))
# Get the number of correct predictions for the test data for kth lambda
for j in range(test_x.shape[0]):
if np.dot(weights.T, test_x[j]) >= 0. and test_y[j] == 1:
test_correct[k]+=1
elif np.dot(weights.T, test_x[j]) < 0. and test_y[j] == 0:
test_correct[k]+=1
test_correct[k] = (test_correct[k]/test_x.shape[0])*100
print("Testing Accuracy for " + str(k) + "th lambda: " + str(test_correct[k]))
# Plot the two correct lists vs the given lambdas
plt.plot(lambdas, train_correct)
plt.xlabel("lambdas")
plt.xticks(lambdas)
plt.ylabel("Training Accuracy")
plt.savefig("TrainingAcc.png")
plt.clf()
plt.plot(lambdas, test_correct)
plt.xlabel("lambdas")
plt.xticks(lambdas)
plt.ylabel("Testing Accuracy")
plt.savefig("TestingAcc.png") |
23,958 | a64d4076162213273cf7c44800731735f84a1fbc | # Practice: Blackjack Hand
# Implement scoring a single hand of blackjack.
#
# Cards have point values. Aces are 1 or 11, number cards are their number,
# face cards are all 10. A hand is worth the sum of all the points of the cards
# in it.
# An ace is worth 1 when the hand it's a part of would be over 21 if it was
# worth 11.
#
# Make a class that represents a card.
# Make a class that represents a hand.
# Add functions that adds a card to a hand, one that scores a hand, and one
# that returns if the score is over 21.
# Allow a user to type in a hand and have it be converted into card objects
# and then scored.
import random
class Card(object):
def __init__(self):
self.names = ['A', '2', '3', '4', '5', '6', '7',
'8', '9', '10', 'J', 'Q', 'K']
self.suits = ['C', 'D', 'H', 'S']
def __repr__(self):
return 'Card({}{})'.format(self.names, self.suits)
class Hand(object):
def __init__(self):
self.hand_of_cards = []
def __repr__(self):
return 'Hand({})'.format(self.hand_of_cards)
def generate_card(self):
self.the_card = random.choice(Card.names), random.choice(Card.suits)
return self.the_card
def add_card(self):
self.hand_of_cards.append(self.generate_card())
print('Added {} to your hand'.format(self.hand_of_cards[-1]))
def score(self, split_hand, the_cards):
points = 0
for each in split_hand:
if each[0] not in the_cards.card_names[0:13]:
print("Not sure what {} is, disregarding.".format(each))
elif each[0] in the_cards.card_names[10:13]:
points += 10
elif each[0] in the_cards.card_names[1:10]:
points += int(each[0])
elif each[0] == the_cards.card_names[0]:
if points + 11 > 21:
points += 1
elif points + 11 <= 21:
points += 10
return points
def main():
my_card = Card()
print(my_card)
my_hand = Hand()
print(my_hand)
my_hand.add_card()
print(my_hand)
# print(my_hand.score())
# split_hand = print(input(str("Please enter a hand to be scored(i.e. 'k q' or 'kd qh'): ")).upper().split())
# print(split_hand)
# my_card = (split_hand[0])
# print(my_card)
# my_hand = Hand(split_hand)
# print(my_hand.score())
main()
|
23,959 | fd824bd722a4671bd7d4a039ebb3113cf4d9b796 | """A simple, generic table parser for HTML.
Inputs:
- source: URL or (plain) filename
- addl_data: dict of additional data to add to each row
Outputs:
- List of dicts containing data from largest table on page
@author n.o.franklin@gmail.com
@date 2017-12-30
"""
from requests import get
from bs4 import BeautifulSoup
def generic_table_parser(source, addl_data):
if source[:4] == 'http':
response = get(source)
response.raise_for_status()
fh = response.text
else:
fh = open(source, 'r')
soup = BeautifulSoup(fh, 'lxml')
tabs = soup.find_all('table')
# Find largest table
tab_sizes = {}
for i, tab in enumerate(tabs):
rows = tab.find_all('tr')
row_num = 0
for row in rows:
row_num += 1
tab_sizes[row_num] = i
largest_index = tab_sizes[sorted(tab_sizes.keys(), reverse=True)[0]]
# Get table headings
headings = []
for header in tabs[largest_index].find_all('th'):
headings.append(header.text)
if not headings:
raise Exception("No table headings found.")
# Get table data
results = []
for row in tabs[largest_index].find_all('tr'):
cells = row.find_all('td')
if len(cells) == len(headings):
row_data = {headings[i]: cell.text for i, cell in enumerate(cells)}
row_data.update({'source': source})
row_data.update(addl_data)
results.append(row_data)
return results
|
23,960 | 4df86a4bea3b3446807225d5c0cbba05e8c0c3d0 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import logging
import scapy.config
import scapy.layers.l2
import scapy.route
import socket
import math
import smbc
import glib, gio, gobject
logging.basicConfig(format='%(asctime)s %(levelname)-5s %(message)s',datefmt='%Y-%m-%d %H:%M:%S', level=logging.DEBUG)
logger = logging.getLogger(__name__)
DOMAIN = 'WORKGROUP'
USERNAME = 'antonis'
PASSWORD = '312ggp12'
def mount(f):
op = gio.MountOperation()
op.connect('ask-password', ask_password_cb)
f.mount_enclosing_volume(op, mount_done_cb)
def mount_mountable(f):
op = gio.MountOperation()
op.connect('ask-password', ask_password_cb)
f.mount_mountable( op, mount_mountable_done_cb )
def mount_mountable_done_cb(obj, res):
try:
f = obj.mount_mountable_finish( res )
if( not f ):
logger.error( 'An error occured mounting {}'.format( obj.get_basename() ) )
else:
logger.debug( 'finished mount_mountable for: {}'.format( f.get_basename() ) )
browse( obj )
except gio.Error, e:
logger.error( 'Error code:{}, {}'.format( e.code, e ) )
if( e.code == gio.ERROR_ALREADY_MOUNTED ):
logger.debug( '{} is already mounted and will eject'.format( obj.get_basename() ) )
obj.eject_mountable( eject_done )
except:
import traceback; traceback.print_exc()
def ask_password_cb(op, message, default_user, default_domain, flags):
op.set_username(USERNAME)
op.set_domain(DOMAIN)
op.set_password(PASSWORD)
op.reply(gio.MOUNT_OPERATION_HANDLED)
def mount_done_cb(obj, res):
#logger.debug( 'finished with res:{}'.format( dir( res ) ) )
#import pprint; pprint.pprint( dir( res ) )
#import pprint; pprint.pprint( obj )
#return
#obj.mount_enclosing_volume_finish(res)
browse( obj )
pass
def eject_done( obj, res ):
logger.log( 'Eject of {} finished {}'.format( obj.get_basename(), ( 'succesfully' if obj.eject_mountable_finish( res ) else 'unsuccessfully' ) ) )
def browse( f ):
import pprint; pprint.pprint( f )
#mnt = f.find_enclosing_mount()
#if( not mnt ):
#mount( f )
try:
infos = f.enumerate_children('standard::name,standard::type' )#,standard::size')
except Exception as ex:
logger.debug( '{} has no children'.format( f.get_basename() ) )
logger.debug( 'The exception is {} (exception code: {}, gio.ERROR_NOT_MOUNTED:{}, code == ERROR_NOT_MOUNTED is {} )'.format( ex, ex.code, gio.ERROR_NOT_MOUNTED, ex.code == gio.ERROR_NOT_MOUNTED ) )
if( ex.code == gio.ERROR_NOT_MOUNTED ):
mount( f )
return
import traceback; traceback.print_exc()
return
for info in infos:
logger.debug( '\t\t\t:{} ({})'.format( info.get_name(), info.get_file_type() ) )
child = f.get_child(info.get_name())
logger.debug( 'child: {}'.format( child ) )
if info.get_file_type() == gio.FILE_TYPE_DIRECTORY:
print( '\t\t\tdir:{} ({})'.format( info.get_name(), info.get_file_type() ) )
elif( info.get_file_type() == gio.FILE_TYPE_MOUNTABLE ):
logger.debug( '\tthis is a FILE_TYPE_MOUNTABLE' )
continue
try:
mount_mountable( child )
#mount( child )
#browse( child )
except Exception as e:
logger.debug( 'got exception e: {}'.format( e ) )
if( e.code == gio.ERROR_ALREADY_MOUNTED ):
logger.debug( '{} is already mounted and will eject'.format( info.get_name() ) )
child.eject_mountable( eject_done )
else:
import traceback; traceback.print_exc()
else:
print( '\t\t\tfile:{} ({})'.format( info.get_name(), info.get_file_type() ) )
def long2net(arg):
if (arg <= 0 or arg >= 0xFFFFFFFF):
raise ValueError("illegal netmask value", hex(arg))
return 32 - int(round(math.log(0xFFFFFFFF - arg, 2)))
def to_CIDR_notation(bytes_network, bytes_netmask):
network = scapy.utils.ltoa(bytes_network)
netmask = long2net(bytes_netmask)
net = "%s/%s" % (network, netmask)
if netmask < 16:
logger.warn("%s is too big. skipping" % net)
return None
return net
def scan_and_print_neighbors(net, interface):
logger.info("arping %s on %s" % (net, interface))
ans, unans = scapy.layers.l2.arping(net, iface=interface, timeout=1, verbose=True)
for s, r in ans.res:
line = r.sprintf("%Ether.src% %ARP.psrc%")
try:
hostname = socket.gethostbyaddr(r.psrc)
line += " " + hostname[0]
except socket.herror:
# failed to resolve
pass
logger.info(line)
def TryGio( arg = None ):
gobject.threads_init()
logger.debug( '\n--- Now trying gio for "{}"---'.format( arg ) )
#filename = "smb:///{}/".format( 'network' )
#filename = "smb:///{}/".format( '192.168.1.4' )
#filename = "smb://"
#filename = "smb://WORKGROUP"
#filename = "smb://HOMEPC"
filename = "smb://"
if( arg ):
filename += arg
logging.debug( 'Browsing dir {}'.format( filename ) )
fh = gio.File( filename )
#mount( fh )
browse( fh )
glib.MainLoop().run()
class SMBCProvider:
DOMAIN = ''
USERNAME = 'antonis'
PASSWORD = '312ggp12'
def __init__( self ):
self.domain = DOMAIN
self.username = USERNAME
self.password = PASSWORD
self.ctx = smbc.Context( auth_fn = self.my_auth_callback_fn, debug=0 )
#self.ctx = smbc.Context()
self.ctx.optionNoAutoAnonymousLogin = True
def my_auth_callback_fn( self, server, share, workgroup, username, password ):
logger.debug( 'server:{}, share:{}, workgroup:{}, username:{}, password:{}'.format( server, share, workgroup, username, password ) )
logger.debug( 'returning ( {}, {}, {} )'.format( workgroup, self.username, self.password ) )
return ( workgroup, self.username, self.password )
def SMBCDir( self, ctx, samba_path, tabs ):
try:
logger.debug( 'will open path {}'.format( samba_path ) )
entries = self.ctx.opendir( samba_path ).getdents()
logger.debug( 'got {} entries'.format( len( entries ) ) )
tabs = '{}{}'.format( tabs, '\t' )
for entry in entries:
#vlc_uri = 'smb://{}:{}@{}/{}/{}'.format( self.username, self.password, server, directory_path, entry.name )
path = samba_path
#logger.debug( 'smbc_type:{}, comment:{}, name:{}'.format( entry.smbc_type, entry.comment, entry.name ) )
if( entry.smbc_type == 1 ): #domain
logger.debug( 'setting DOMAIN, was {}, is {}'.format( self.domain, entry.name ) )
self.domain = entry.name
path = 'smb://{}'.format( self.domain )
if( entry.smbc_type == 2 ): #server
logger.debug( 'setting server to {}'.format( entry.name ) )
server = entry.name
path = 'smb://{}'.format( server )
if( entry.smbc_type in [ 3, 7 ] ): #dir
path = '{}/{}'.format( samba_path, entry.name )
#logger.debug( 'will go deeper to {}'.format( path ) )
if( entry.smbc_type in [ 1, 2, 3 ] ):#, 7 ] ):
self.SMBCDir( ctx, path, tabs )
logger.debug( 'smbc_type:{}, path:{}, comment:{}, name:{}'.format( entry.smbc_type, path, entry.comment, entry.name ) )
except:
import traceback; traceback.print_exc()
logger.error( 'Failed' )
def Start( self ):
try:
logger.debug( '\n--- Now trying smbc ---' )
path = "smb://"
#path = 'smb://HOMEPC/movies-vfat'
#path ='smb://HOMEPC/movies-vfat/Covert.One.The.Hades.Factor.DVDRip.XviD/CD1'
#entries = ctx.opendir( samba_path ).getdents ()
#for entry in entries:
#logger.info( 'comment:{}, name:{}, smbc_type:{}'.format( entry.comment, entry.name, entry.smbc_type ) )
self.SMBCDir( self.ctx, path, '' )
logger.debug( '\n--- Finished smbc ---' )
except:
import sys, traceback; logger.error( traceback.format_exception( *sys.exc_info() ) )
#sp = SMBCProvider()
#sp.Start()
import sys;
TryGio( ( sys.argv[1] if len( sys.argv ) > 1 else None ) )
#print( len( sys.argv ) )
sys.exit(0)
for route in scapy.config.conf.route.routes:
network = route[0]
netmask = route[1]
interface = route[3]
logger.debug( 'Doing {}/{} on {} (route[4]:{})'.format( network, netmask, interface, route[4] ) )
# skip loopback network and default gw
if network == 0 or interface == 'lo' or route[4] == '127.0.0.1' or route[4] == '0.0.0.0' :
continue
if netmask <= 0 or netmask == 0xFFFFFFFF:
continue
net = to_CIDR_notation(network, netmask)
if net == '169.254.0.0/16':
logger.debug( "skipping microsoft's link-local IPv4 {} Automatic Private Internet Protocol Addressing (APIPA)".format( net ) )
continue
if interface != scapy.config.conf.iface:
# see http://trac.secdev.org/scapy/ticket/537
logger.warn("skipping %s because scapy currently doesn't support arping on non-primary network interfaces", net)
continue
if net:
scan_and_print_neighbors(net, interface)
|
23,961 | 853da41550bc96a5e28d5e1c2d473a1fa180b848 | # Generated by Django 2.1.7 on 2019-04-30 09:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='catalog',
name='image',
field=models.ImageField(blank=True, upload_to='', verbose_name='Preview'),
),
]
|
23,962 | e35544bbeca93c2c1f455f6a4ee409126b177a6f | from rest_framework import viewsets
from classifier.models import Classifier
from classifier.serializers import ClassifierSerializer
class ClassifierViewSet(viewsets.ModelViewSet):
queryset = Classifier.objects.all()
serializer_class = ClassifierSerializer |
23,963 | 426c7b05b24479f059a27202b0e290f1fdd3e634 | # coding=utf-8
class YangShiXinWen:
def __init__(self):
print ("央视新闻实时: https://weibo.com/cctvxinwen?profile_ftype=1&is_all=1")
|
23,964 | e2b9f4a48affbed51968a75925a3dede601fbce1 | import cv2
import sys
import pytesseract
import re
if __name__ == '__main__':
if len(sys.argv) < 2:
print('Usage: python main.py image.jpg')
print(cv2.getVersionString())
print(pytesseract.get_tesseract_version())
sys.exit(1)
imPath = sys.argv[1]
config = '-l eng --oem 1 --psm 3'
img = cv2.imread(imPath, cv2.IMREAD_GRAYSCALE)
text = pytesseract.image_to_string(img, config=config)
# print(type(text))
text_new = text.split('\n')
# print(text_new)
regex = '^Photo Code:'
for t in text_new:
if re.match(regex, t) is not None:
print(t.replace('Photo Code: ', ''))
|
23,965 | 27271155d7dcde7479afb45a551adba53fdd687c | import cv2
import numpy as np
import math
from zigzag import *
from utils import *
# do inv dct
def decode(frame, width=WIDTH, height=HEIGHT, block_size=BLOCK_SIZE):
padded_frame = np.zeros((height, width))
i, j, k = 0, 0, 0
while i < height:
j = 0
while j < width:
temp_stream = frame[i:i+block_size,j:j+block_size]
block = inverse_zigzag(temp_stream.flatten(), block_size, block_size)
quant = np.multiply(block, Q)
padded_frame[i:i+block_size,j:j+block_size] = cv2.idct(quant)
j += block_size
i += block_size
padded_frame[padded_frame > 255] = 255
padded_frame[padded_frame < 0] = 0
return padded_frame
|
23,966 | 15eec660b1eb022e6040f82d27530f4152e54a28 | #!/usr/bin/env python
#
# Project Euler 60
from itertools import combinations
from bitarray import bitarray
from math import sqrt
primes = None
primeset = None
maxlen = 8
def generate_primes():
global maxlen
size = 10 ** maxlen
stop = int(sqrt(size))
a = bitarray(size)
a.setall(True)
a[:2] = False # 0, 1 are not prime
primes = list()
next = 2L
try:
while True:
primes.append(next)
if next < stop:
a[next::next] = False
next = a.index(True, next+1)
except ValueError:
pass
return primes
def pair_satisfies(a, b):
global primeset
a, b = str(a), str(b)
i, j = int(a + b), int(b + a)
return i in primeset and j in primeset
def set_satisfies(s):
if len(s) < 2:
return True
return all(pair_satisfies(a, b) for (a, b) in combinations(s, 2))
dead_ends = set()
def partial(working):
global maxlen
if len(working) == 5:
return working
if len(working) > 0:
longest = max(working, key=lambda x: len(str(x)))
maxprime = 10 ** (maxlen - len(str(longest)))
else:
maxprime = 10 ** maxlen
for prime in primes:
if prime > maxprime:
break
if prime in [2, 5]:
continue
if prime in working:
continue
next_attempt = list(working)
next_attempt.append(prime)
if frozenset(next_attempt) in dead_ends:
continue
if not set_satisfies(next_attempt):
continue
result = partial(next_attempt)
if result:
return result
dead_ends.add(frozenset(working))
return False
def solution():
global primes
global primeset
print "generating primes..."
primes = generate_primes()
print "prepping O(1) prime lookup table..."
primeset = set(primes)
print "solving..."
return sum(partial([]))
if __name__ == "__main__":
print solution()
|
23,967 | 32022d2c65938320636c99f83ca06975739e2854 | import logging
import os
from tqdm import tqdm
from file_reader import itWacReader, itWikiReader
logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.INFO)
corpus = itWacReader('/import/cogsci/andrea/dataset/corpora/itwac')
logging.info('Reorganizing ItWac')
out_folder = '/import/cogsci/andrea/dataset/corpora/itwac_lemma_reorganized'
os.makedirs(out_folder, exist_ok=True)
current_file = list()
current_paragraph = list()
file_counter = 0
for l in tqdm(corpus):
# Adding sentences to paragraph
if len(current_paragraph) + len(l) <= 128:
current_paragraph = current_paragraph + l
else:
current_file.append(current_paragraph)
current_paragraph = list()
# Adding paragraphs to file
if len(current_file) == 1000:
file_path = os.path.join(out_folder, '{:06}.itwac'.format(file_counter))
with open(file_path, encoding='utf-8', mode='w') as o:
for l in current_file:
for w in l:
if w == 'passerotto':
w = 'passero'
o.write('{} '.format(w))
o.write('\n')
file_counter += 1
current_file = list()
|
23,968 | 9ae3bb73ef18dc357778953456727382d23dc46b | from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
class Workers(models.Model):
TYPE_OF_PERSON = {
('صنايعي' , "صنايعي" ),
('عامل' , "عامل" ),
('إداري' , "إدارى" ),
('محاسب' , "محاسب" ),
('سواق' , "سواق" ),
}
name = models.CharField(_("إسم العامل"),max_length=150)
type_of_person = models.CharField(_("نوع العامل"), max_length=50 ,choices=TYPE_OF_PERSON, default='عامل' )
description = models.TextField(_("وصف العامل"))
create_at = models.DateTimeField(_("تاريخ الإضافة"), default = timezone.now)
update_by = models.DateTimeField(_("تم التحديث :"),auto_now_add=True, blank=True, null=True)
price = models.FloatField(_("راتب العامل "))
class Meta:
verbose_name = _("Workers")
verbose_name_plural = _("Workerss")
def __str__(self):
return str(self.name)
# def save(self, *args, **kwargs):
# self.remaining_amount = self.price - self.amount_received
# super(Workers, self).save(*args, **kwargs)
class Amount_Received(models.Model):
workers = models.ForeignKey("Workers", verbose_name=_("العامل"), on_delete=models.CASCADE)
amount_received = models.FloatField(_("قيمة السلفة"))
remaining_amount = models.FloatField(_("صافي المرتب "), blank=True, null=True)
description = models.TextField(_("سبب السلفة"))
create_at = models.DateTimeField(_("تاريخ الإضافة"), default = timezone.now)
update_by = models.DateTimeField(_("تم التحديث :"),auto_now_add=True, blank=True, null=True)
class Meta:
verbose_name = _("Rmount_Received")
verbose_name_plural = _("Rmount_Receiveds")
def __str__(self):
return str(self.workers)
def save(self, *args, **kwargs):
self.remaining_amount = self.workers.price - self.amount_received
super(Amount_Received, self).save(*args, **kwargs)
class Servicing(models.Model):
name = models.CharField(_("عنوان الصيانة"),max_length=150)
description = models.TextField(_("وصف الصيانة"))
create_at = models.DateTimeField(_("تاريخ الإضافة"), default = timezone.now)
update_by = models.DateTimeField(_("تم التحديث :"),auto_now_add=True, blank=True, null=True)
cost = models.FloatField(_("تكلفة الصيانه"))
class Meta:
verbose_name = _("Servicing")
verbose_name_plural = _("Servicings")
def __str__(self):
return str(self.name)
|
23,969 | 35a47d3c6e13fa4a115630b6ccc268d284873159 | from datetime import datetime, timedelta
import requests
import config
global_url = "https://www.googleapis.com/calendar/v3/calendars/%s/events?orderBy=startTime&singleEvents=true&timeMin={}&timeMax={}&key=%s"% (config.calendars_Id, config.auth_key)
class calendar:
def __init__(self, MINTime, MAXTime) -> None:
self.MINTime = MINTime
self.MAXTime = MAXTime
self.url = global_url.format(self.MINTime, self.MAXTime)
def urlopen(self) -> dict:
calendar_urlOpens = requests.get(self.url)
return calendar_urlOpens.json()
week_ago = (datetime.today() - timedelta(7)).strftime("%Y-%m-%dT00:00:00Z")
week_later = (datetime.today() + timedelta(7)).strftime("%Y-%m-%dT00:00:00Z")
r = calendar(MINTime=week_ago, MAXTime=week_later)
result = r.urlopen()
for i in range(0,len(result['items'])):
print(result['items'][i]['summary']) |
23,970 | c80c5c37637084804668a8016b96f84be2f17420 | from numpy import*
a = array(eval(input("tempo de chegada dos corredores")))
min(a)
|
23,971 | 82b0bc3254214688fc858a50cb18f66f05325e81 | import logging
import time
import netmiko
# logging.basicConfig(
# # filename="ios_conf_handler.log",
# level=logging.DEBUG,
# format="[%(levelname)s] %(asctime)s (%(threadName)-4s): %(message)s",
# )
default_password_list = []
with open("default_passwords.txt", "r") as passwords_file:
for line in passwords_file:
default_password_list.append(line.strip(" \n"))
class _Device:
def __init__(self, name: str, address: str, port: int or str, user_password: str, priv_password: str):
self.name = name
self.priv_password = priv_password
self.user_password = user_password
self.address = address
self.port = port
self.conf_file_path = "device_list_and_configs/{}.txt".format(name)
def _open_telnet_and_login_to_user_EXEC(self):
logging.info('_open_telnet_and_login_to_user_EXEC: trying to open telnet connection')
# if password was not provided pick default password, else login with provided password
if self.user_password == "":
# trying to login using default passwords
for password in default_password_list:
try:
logging.info('_open_telnet_and_login_to_user_EXEC: password:' + password)
self.telnet_netmiko = netmiko.Netmiko(host=self.address,
password=password,
secret='',
port=self.port,
device_type='cisco_ios_telnet',
timeout=20,
session_timeout=10,
auth_timeout=15)
self.user_password = password
break
except netmiko.NetMikoAuthenticationException:
# if login failed, catch that exception, log that, and start next iteration
logging.info('_open_telnet_and_login_to_user_EXEC: password failed')
continue
# if loop was exited not by "break", which means that all login attempts failed, raise that
else:
raise netmiko.NetMikoAuthenticationException("Login to user EXEC failed: {}".format(self.name))
else:
self.telnet_netmiko = netmiko.Netmiko(host=self.address,
password=self.user_password,
secret=self.priv_password,
port=self.port,
device_type='cisco_ios_telnet',
timeout=20,
session_timeout=10,
auth_timeout=15)
logging.info('_open_telnet_and_login_to_user_EXEC: opened telnet connection')
def _close_telnet(self):
self.telnet_netmiko.disconnect()
logging.info('_close_telnet: closed telnet connection')
def _login_to_priv_EXEC(self):
# if password was not provided pick default password, else login with provided password
if self.priv_password == "":
# trying to login to priv EXEC using default passwords
for password in default_password_list:
try:
self.telnet_netmiko.secret = password
self.telnet_netmiko.enable()
self.priv_password = password
break
except ValueError:
logging.info('_login_to_priv_EXEC: pass try')
continue
except netmiko.ssh_exception.NetMikoTimeoutException:
self.telnet_netmiko.send_command("\n", expect_string="({}|assword)".format(
self.telnet_netmiko.base_prompt),
strip_command=False,
strip_prompt=False)
# if loop was exited not by "break", which means that all login attempts failed, raise that
else:
raise netmiko.NetMikoAuthenticationException("Login to priv EXEC failed: {}".format(self.name))
logging.info('_login_to_priv_EXEC: successful login to priv_EXEC')
return
else:
self.telnet_netmiko.enable()
logging.info('_login_to_priv: successful login to priv_EXEC')
return
def _erase_startup_configuration(self):
output = self.telnet_netmiko.send_command("erase startup-config", expect_string="confirm", strip_command=False,
strip_prompt=False)
logging.info(output)
if "confirm" in output:
output = self.telnet_netmiko.send_command("\n", strip_command=False, strip_prompt=False)
logging.info(output)
logging.info('_erase_startup_configuration: successful erase startup-config')
time.sleep(1)
else:
logging.warning('_erase_startup_configuration: unsuccessful erase startup-config\n' + output)
self._close_telnet()
def _reload_devise(self):
# send command "reload", read output(response from device), and process it
output = self.telnet_netmiko.send_command("reload", expect_string="(confirm|yes/no)", strip_command=False,
strip_prompt=False)
logging.info(output)
if "Proceed with reload" in output:
output = self.telnet_netmiko.send_command("\n", expect_string="Reload Reason", strip_command=False,
strip_prompt=False)
logging.info(output)
logging.info("_reload_devise: successful")
elif "System configuration has been modified" in output: # System configuration has been modified. Save? [yes/no]:
output = self.telnet_netmiko.send_command("no", expect_string="confirm", strip_command=False,
strip_prompt=False)
logging.info(output)
if "Proceed with reload" in output:
output = self.telnet_netmiko.send_command("\n", expect_string="Reload Reason", strip_command=False,
strip_prompt=False)
logging.info(output)
logging.info("_reload_devise: successful")
else:
logging.warning("_reload_devise:| unsuccessful\n" + output)
self._close_telnet()
else:
logging.warning("_reload_devise: unsuccessful\n" + output)
self._close_telnet()
def _update_configuration(self):
configuration = []
# read all configuration commands from configuration file, store them to list, and send to device
with open(self.conf_file_path, "r") as f:
for conf_line in f:
configuration.append(conf_line.strip(" \n"))
self.telnet_netmiko.send_config_set(configuration, strip_command=False, strip_prompt=False)
logging.info("_update_conf: configuration entered")
def _read_and_save_running_configuration_to_conf_file(self):
conf_to_save = self.telnet_netmiko.send_command("show running-config", strip_command=True, strip_prompt=True)
logging.info("_read_and_save: successful read conf")
with open(self.conf_file_path, "w") as conf_file:
conf_file.write(conf_to_save)
logging.info("_read_and_save: successful save to file")
def _save_configuration(self):
output = self.telnet_netmiko.save_config(confirm=False) # confirm=False confirm_response=""
logging.info(output)
logging.info("_save_configuration: successful")
def erase_configuration(self):
self._open_telnet_and_login_to_user_EXEC()
self._login_to_priv_EXEC()
time.sleep(5)
self._erase_startup_configuration()
self._reload_devise()
self._close_telnet()
def update_configuration(self):
self._open_telnet_and_login_to_user_EXEC()
self._login_to_priv_EXEC()
time.sleep(5)
self._update_configuration()
self._save_configuration()
self._close_telnet()
def write_configuration(self):
self._open_telnet_and_login_to_user_EXEC()
self._login_to_priv_EXEC()
time.sleep(5)
self._erase_startup_configuration()
self._reload_devise()
self._close_telnet()
time.sleep(150)
self._open_telnet_and_login_to_user_EXEC()
self._login_to_priv_EXEC()
time.sleep(5)
self._update_configuration()
self._save_configuration()
self._close_telnet()
def read_configuration(self):
self._open_telnet_and_login_to_user_EXEC()
self._login_to_priv_EXEC()
time.sleep(5)
self._read_and_save_running_configuration_to_conf_file()
self._close_telnet()
def erase_configuration(name: str, address: str, port: int, user_password: str = "", priv_password: str = ""):
d = _Device(name, address, port, user_password, priv_password)
d.erase_configuration()
def update_configuration(name: str, address: str, port: int, user_password: str = "", priv_password: str = ""):
d = _Device(name, address, port, user_password, priv_password)
d.update_configuration()
def write_configuration(name: str, address: str, port: int, user_password: str = "", priv_password: str = ""):
d = _Device(name, address, port, user_password, priv_password)
d.write_configuration()
def read_configuration(name: str, address: str, port: int, user_password: str = "", priv_password: str = ""):
d = _Device(name, address, port, user_password, priv_password)
d.read_configuration()
|
23,972 | a243f4df2c31cd8f4c7ce4f2a4523aa917cb5b9c | #Ignore pylint whitespace warning
# pylint: disable=W0311
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.shortcuts import render, render_to_response
from django.views.generic import View
from django.template.defaulttags import register
import sys
from customers.models import Users as users
from customers.models import Vendors as vendors
from customers.models import Orders as orders
from customers.models import Address
#A Filter for enabling the usage of a dictionary in the templates
@register.filter(name = 'get_full_address')
def get_full_address(address_id):
try:
print address_id
address = Address.objects.get(id=address_id)
full_address = (str(address.building_number) if address.building_number != None else '') + ' ' +\
(str(address.street) if address.street != None else '') + ' ' + (str(address.locality) if address.locality != None else '') + ' ' +\
(str(address.landmark) if address.landmark != None else '') + ' ' + ((', ' + str(address.city)) if address.city != None else '')
return full_address
except Exception as general_exception:
print str(general_exception)
print "Line number : " + str(sys.exc_traceback.tb_lineno)
return "not_found"
#A Filter for enabling the usage of a dictionary in the templates
@register.filter(name = 'get_item')
def get_item(dictionary, key):
try:
value = dictionary.get(key) #Using '.get' To suppress key error (just in case)
return value
except Exception as general_exception:
print str(general_exception)
print "Line number : " + str(sys.exc_traceback.tb_lineno)
return "not_found"
|
23,973 | c5e57d38d85494e9e2072fe6411b15b27a32c1f6 | import tkinter as tk
from tkinter import ttk
from tkinter import scrolledtext
from moviePageClass import moviePage
from seriesPageClass import seriesPage
from LangPack import I18N
import sqlite3
#from loginPageClass import LoginPage
import settings as settings
class mainPage:
def __init__(self,user,master):
self.master = master
self.i18n = I18N(self.master.choice.get())
self.win = tk.Toplevel()
self.win.grab_set()
self.win.title(self.i18n.main_page_title)
self.win.geometry("600x500+710+290")
self.win.resizable(False, False)
self.current_user = user
self.create_widgets()
#self.choice2=self.LoginPage.choice.get()
def list_favorites(self):
con = sqlite3.connect("Users.db")
cur = con.cursor()
cur.execute("SELECT favs FROM Users WHERE username=?", [self.current_user])
rec = cur.fetchone()
favSeries = rec[0].split(",")
cur.execute("SELECT favsMovies FROM Users WHERE username=?", [self.current_user])
rec = cur.fetchone()
favMovies = rec[0].split(",")
con.close()
conn = sqlite3.connect("Movies.db")
curr = conn.cursor()
self.scr_text1.configure(state="normal")
self.scr_text1.delete("1.0", tk.END)
for i in favMovies:
if i != "":
curr.execute("SELECT name FROM Movies WHERE mid=?", [i])
rec = curr.fetchone()
self.scr_text1.insert(tk.INSERT, rec[0] + "\n")
self.scr_text1.configure(state="disabled")
conn.close()
connn = sqlite3.connect("Series.db")
currr = connn.cursor()
self.scr_text2.configure(state="normal")
self.scr_text2.delete("1.0", tk.END)
for i in favSeries:
if i != "":
currr.execute("SELECT name FROM Series WHERE mid=?", [i])
rec = currr.fetchone()
print(rec[0])
self.scr_text2.insert(tk.INSERT, rec[0] + "\n")
self.scr_text2.configure(state="disabled")
connn.close()
def list_favoritesTR(self):
con = sqlite3.connect("Users.db")
cur = con.cursor()
cur.execute("SELECT favs FROM Users WHERE username=?", [self.current_user])
rec = cur.fetchone()
favSeries = rec[0].split(",")
cur.execute("SELECT favsMovies FROM Users WHERE username=?", [self.current_user])
rec = cur.fetchone()
favMovies = rec[0].split(",")
con.close()
conn = sqlite3.connect("Movies.db")
curr = conn.cursor()
self.scr_text1.configure(state="normal")
self.scr_text1.delete("1.0", tk.END)
for i in favMovies:
if i != "":
curr.execute("SELECT name FROM MoviesTr WHERE mid=?", [i])
rec = curr.fetchone()
self.scr_text1.insert(tk.INSERT, rec[0] + "\n")
self.scr_text1.configure(state="disabled")
conn.close()
connn = sqlite3.connect("Series.db")
currr = connn.cursor()
self.scr_text2.configure(state="normal")
self.scr_text2.delete("1.0", tk.END)
for i in favSeries:
if i != "":
currr.execute("SELECT name FROM SeriesTr WHERE mid=?", [i])
rec = currr.fetchone()
print(rec[0])
self.scr_text2.insert(tk.INSERT, rec[0] + "\n")
self.scr_text2.configure(state="disabled")
connn.close()
def goToMoviePage(self):
app = moviePage(self.current_user,self)
app.win.mainloop()
def goToSeriesPage(self):
app = seriesPage(self.current_user,self)
app.win.mainloop()
def create_widgets(self):
self.btn_movies = ttk.Button(self.win, text=self.i18n.main_page_movies_button, command=self.goToMoviePage)
self.btn_movies.grid(column=0, row=0, padx=15, pady=5)
self.btn_tv_series = ttk.Button(self.win, text=self.i18n.main_page_series_button, command=self.goToSeriesPage)
self.btn_tv_series.grid(column=1, row=0, padx=15, pady=5)
self.lbl_fav1 = ttk.Label(self.win, text=self.i18n.main_page_movies_fav)
self.lbl_fav1.grid(column=0, row=1, padx=5, pady=2)
self.lbl_fav2 = ttk.Label(self.win, text=self.i18n.main_page_series_fav)
self.lbl_fav2.grid(column=1, row=1, padx=5, pady=2)
self.scr_text1 = scrolledtext.ScrolledText(self.win, width=30, height=12, wrap=tk.WORD)
self.scr_text1.grid(column=0, row=2, padx=5, pady=2)
self.scr_text2 = scrolledtext.ScrolledText(self.win, width=30, height=12, wrap=tk.WORD)
self.scr_text2.grid(column=1, row=2, padx=5, pady=2)
self.btn_logoff = ttk.Button(self.win, text=self.i18n.main_page_logoff_button, command=self.win.destroy)
self.btn_logoff.grid(column=0, row=3, columnspan=2, pady=10)
if settings.language =="en":
self.list_favorites()
else:
self.list_favoritesTR()
|
23,974 | cacfe1e2fe2bc77121072697bf015d1c2bf72aeb | import logging
from augur.application.db.session import DatabaseSession
from augur.application.db.engine import DatabaseEngine
from augur.application.config import AugurConfig
def get_redis_conn_values():
logger = logging.getLogger(__name__)
with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session:
config = AugurConfig(logger, session)
redis_db_number = config.get_value("Redis", "cache_group") * 3
redis_conn_string = config.get_value("Redis", "connection_string")
if redis_conn_string[-1] != "/":
redis_conn_string += "/"
return redis_db_number, redis_conn_string
def get_rabbitmq_conn_string():
logger = logging.getLogger(__name__)
with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session:
config = AugurConfig(logger, session)
rabbbitmq_conn_string = config.get_value("RabbitMQ", "connection_string")
return rabbbitmq_conn_string
|
23,975 | 9930239385e13f4f1589c83898ca4937c4ec373e | #!/usr/bin/env python
"""
This code intializes all the methods of the str17_serial_control script but does nothing when called
Used for debug purposes
"""
import rospy
class StArm():
def __init__(self):
rospy.init_node('robot_fake_arm', anonymous=True)
self.pub = rospy.Publisher('arm_debug', String, queue_size=10)
def initial_calibration(self):
pass
def purge(self):
pass
def roboforth(self):
pass
def decimal(self):
pass
def start(self):
pass
def execute_command(self, cmd):
pass
def continuous(self):
pass
def segmented(self):
pass
def joint(self):
pass
def create_route(self, route_name, commands, debug = False):
pass
def calibrate(self):
pass
def home(self):
pass
def cartesian(self, block=False):
pass
def block_on_result(self, cmd, debug=False):
pass
def get_status(self):
pass
def get_speed(self):
pass
def set_speed(self, speed):
pass
def set_point(self, name):
pass
def get_accel(self):
pass
def set_accel(self, accel):
pass
def run_route(self, route):
pass
def move_to(self, x, y, z, debug=False, block=True):
pass
def rotate_wrist(self, roll):
pass
def rotate_wrist_rel(self, roll_inc):
pass
def rotate_hand(self, pitch):
pass
def rotate_elbow(self, pitch):
pass
def rotate_shoulder(self, pitch):
pass
def rotate_waist(self, pitch):
pass
def rotate_waist_rel(self, pitch):
pass
def rotate_hand_rel(self, pitch_inc):
pass
def move_hand(self, roll):
pass
def energize(self):
pass
def de_energize(self):
pass
def where(self):
pass
def check_if_done(self):
pass
def dummy(self):
pass
def lock_wrist_angle(self,TF = True):
pass
|
23,976 | 90d52f57d3faf673c1277d94acfd10f7fa0ae5c3 | def hanoi(n, a='A', b='B', c='C'):
'''汉诺塔'''
if n == 1:
print('move', a, '-->', c)
return
# 要把A的前n-1个饼全移到B,A剩一个饼,再把这个饼移到C
hanoi(n-1, a, c, b)
print('move', a, '-->', c)
# 现在把B看作第一个塔,问题就变成了把B的所有饼移到C,形成递归
hanoi(n-1, b, a, c)
if __name__ == '__main__':
print(hanoi(5))
'''Stack Version
from Stack import Stack
def HanoiWithStack(n, a, b, c):
if a.size() == 0:
for i in range(n, 0, -1):
a.push(i)
if n == 1:
c.push(a.pop())
return
HanoiWithStack(n - 1, a, c, b)
c.push(a.pop())
HanoiWithStack(n - 1, b, a, c)
if __name__ == '__main__':
a = Stack()
b = Stack()
c = Stack()
HanoiWithStack(5, a, b, c)
for i in range(c.size()):
print(c.pop())
''' |
23,977 | 8203e87e7a93bf12e9af6748f35daf2d38e5c187 | #MYLIST IS A VARIABLEIN WHICH WE ARE GOING TO PERFORM METHODS
mylist=['ram','sita','meeta']
mylist.insert(1,'vani')#this method will insert "vani" at 1 index value
mylist.remove('ram')#it will remove "ram from the list"
mylist.pop()#it will remove the last character
mylist.sort()#it will sort all the characters in alphabetical form
mylist.reverse()#it will reverse the order
print(mylist.index('sita'))#it will tell the index value of 'sita'
python=mylist.copy()#it shift the characters from mylist to python
print(mylist)
print(python)
python.pop(-1)#it will pop the character at the -1 index
new=['jasmine','serena','taylor']
python.extend(new)#it will extend mylist by adding the characters in
#variable new
print(python)
|
23,978 | d7ca099870dc26271dd015b0578afb0b6c5dc1af | import sys
import traceback
class GridMarketsError(Exception):
"""Utility class to define all API errors/exceptions"""
def __init__(self, message=None, http_status=None):
super(GridMarketsError, self).__init__(message)
self._message = message
self._http_status = http_status
try:
self.traceback = traceback.extract_tb(sys.exc_info()[2])
except AttributeError:
self.traceback = None
pass
def __str__(self):
msg = self._message or '<empty message>'
return msg
@property
def user_message(self):
return self._message
def __repr__(self):
return "%s(message=%r, http_status=%r)" % (
self.__class__.__name__,
self._message,
self._http_status)
class AuthenticationError(GridMarketsError):
def __init__(self, message=None, http_status=None):
super(AuthenticationError, self).__init__(message, http_status)
class APIError(GridMarketsError):
pass
class InsufficientCreditsError(GridMarketsError):
def __init__(self, message=None):
super(InsufficientCreditsError, self).__init__(message)
class InvalidRequestError(GridMarketsError):
def __init__(self, message=None, errors=None):
super(InvalidRequestError, self).__init__(message)
self.errors = errors if errors else dict()
|
23,979 | ce07369eb4dbe5a13879c3d626d45d2f2350708d | import json
import csv
import re
import tweepy as tw
from tweepy import OAuthHandler
from textblob import TextBlob
import pandas as pd
from textblob.sentiments import NaiveBayesAnalyzer
from datetime import datetime
from datetime import date
from airflow import DAG
from airflow.operators.python_operator import PythonOperator
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2021, 1, 1)
}
dag = DAG(
'tweets-DAG',
default_args=default_args,
description='Fetch happiness data from API',
schedule_interval='@hourly',
)
def fetch_data(**kwargs):
consumerKey = 'PSFNqjQ7vlekOlMlCusutTxky'
consumerSecret = 'VpyKj7IEQFOzgqmr0FtKIVC8vBIrN98GDY574wRIYYOK4ZPJ9k'
accessToken = '947064703334912000-dFx773rzq4R0eQydcM3XWmdilXAhaQf'
accessTokenSecret = 'qfAlla1A9WkhS6465ozv6wa6bl1apC7rbswc6v4eXL8h9'
auth = OAuthHandler(consumerKey, consumerSecret)
auth.set_access_token(accessToken, accessTokenSecret)
api = tw.API(auth)
#Sweden
happy_tweets = tw.Cursor(api.search,geocode="57.7118,11.98868,200km" , q= "-filter:retweets, safe", lang ="en", since='2021-01-01' ).items(20)
#Egypt
sad_tweets = tw.Cursor(api.search,geocode="27.19595,33.82958,200km" , q= "-filter:retweets, safe", lang ="en", since='2021-01-01' ).items(20)
happy_users_locs = [[datetime.now(),tweet.user.screen_name, tweet.text ,tweet.user.location] for tweet in happy_tweets]
sad_users_locs = [[datetime.now(),tweet.user.screen_name, tweet.text ,tweet.user.location] for tweet in sad_tweets]
happy_tweets_df = pd.DataFrame(data=happy_users_locs,
columns=['Timestamp','User','Text','Location'])
sad_tweets_df = pd.DataFrame(data=sad_users_locs,
columns=['Timestamp','User','Text','Location'])
happy_tweets_json = happy_tweets_df.to_json()
sad_tweets_json = sad_tweets_df.to_json()
return happy_tweets_json, sad_tweets_json
def sentiment_analysis(**context):
tweets_Happy, tweets_Sad = context['task_instance'].xcom_pull(task_ids='fetch')
tweets_Happy_df = pd.DataFrame(json.loads(tweets_Happy))
tweets_Sad_df = pd.DataFrame(json.loads(tweets_Sad))
sentiment_sweden = 0
sentiment_egypt = 0
average_sentiment_sweden = 0
average_sentiment_egypt = 0
texts_sweden = tweets_Happy_df["Text"]
texts_egypt = tweets_Sad_df["Text"]
sentiment_series_sweden = []
for text in texts_sweden:
analysis = TextBlob(' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", str(text)).split()))
sentiment_sweden += analysis.sentiment.polarity
average_sentiment_sweden = sentiment_sweden / len(tweets_Happy)
sentiment_series_sweden.append(analysis.sentiment.polarity)
tweets_Happy_df['Sentiment'] = sentiment_series_sweden
sentiment_series_egypt = []
for text in texts_egypt:
analysis = TextBlob(' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", str(text)).split()))
sentiment_egypt += analysis.sentiment.polarity
average_sentiment_egypt = sentiment_egypt / len(tweets_Happy)
sentiment_series_egypt.append(analysis.sentiment.polarity)
tweets_Sad_df['Sentiment'] = sentiment_series_egypt
return average_sentiment_sweden, average_sentiment_egypt, tweets_Happy_df.to_json(),tweets_Sad_df.to_json()
def store_data(**context):
average_sentiment_sweden, average_sentiment_egypt, sentiment_sweden_df, sentiment_egypt_df = context['task_instance'].xcom_pull(task_ids='analyze_tweets')
sweden_csv = pd.DataFrame(json.loads(sentiment_sweden_df))
sweden_csv.to_csv("/root/airflow/dags/sweden-sent.csv")
egypt_csv = pd.DataFrame(json.loads(sentiment_egypt_df))
egypt_csv.to_csv("/root/airflow/dags/egypt-sent.csv")
fetch = PythonOperator(
task_id='fetch',
python_callable=fetch_data,
provide_context=True,
dag=dag
)
analyze_tweets = PythonOperator(
task_id='analyze_tweets',
python_callable=sentiment_analysis,
provide_context=True,
dag=dag
)
store = PythonOperator(
task_id='store',
python_callable=store_data,
provide_context=True,
dag=dag
)
fetch >> analyze_tweets
analyze_tweets >> store
|
23,980 | e28dd22c9a2265c67d0749415685ba4db93d18ca | import discord
import time
class Voice_User():
"""Represents a Discord user.
Attributes
-----------
user: :class:`User`
The discord user.
channel: :class:`VoiceChannel`
The discord voice channel the user is connected to.
time_alone_start: :class:`float`
The system time at which the member started being alone
is_alone: :class:`bool`
Whether the user is alone in the channel
"""
user: discord.User
channel: discord.VoiceChannel
time_alone_start: float
is_alone: bool
def __init__(self):
self.time_alone_start = 0
self.is_alone = False
|
23,981 | 58fa7289d6e73af711bdc782e759f271a7794db6 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import sys
import os
sys.path.append(os.path.abspath("./"))
import time
import mnist_loader
import network
import numpy as np
print "开始训练,较耗时,请稍等。。。"
training_data, validation_data, test_data = mnist_loader.load_data_wrapper()
# 784 个输入神经元,一层隐藏层,包含 30 个神经元,输出层包含 10 个神经元
net = network.Network([784, 30, 10])
t = time.time()
net.SGD(training_data, 1, 10, 3.0, test_data = test_data)
print("time SGD: {1} / {0}".format(time.time() - t, net.delta_t))
# Epoch 0: 9038 / 10000
# Epoch 1: 9178 / 10000
# Epoch 2: 9231 / 10000
# ...
# Epoch 27: 9483 / 10000
# Epoch 28: 9485 / 10000
# Epoch 29: 9477 / 10000
# bp = network.Network([2, 3, 2])
# bp.weights = [np.array([(0.1, 0.2), (0.2, 0.3), (0.3, 0.4)]), np.array([(0.5, 0.6, 0.7), (1.0, 1.0, 1.0)])]
# bp.biases = [np.array([(0.3,), (0.4,), (0.5,)]), np.array([(0.2,), (1.0,)])]
# inputs = [np.array([(1,), (2,)])]
# ds = [np.array([(0.5,), (0.2,)])]
# training_data = zip(inputs, ds)
# bp.SGD(training_data, 10, 1, 0.5)
# print(bp.weights)
# print(bp.biases)
# print("------------------------------")
# res = bp.feedforward(np.array([(1,), (2,)]))
# print(res)
# print("******************************")
|
23,982 | a014043cb6a4f501bb6b12bde0840d589ce32a50 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
import pickle
import libs.stage1_funcs as s1f
import libs.stage1_classes as s1c
import libs.motif_filter as mtf
import libs.clustering as clf
import subprocess
def main(args):
start_time = time.time()
# Print start time
if not args['--quiet']:
print "\n{1} Job name {0} ---------------".format(args['<id>'],
time.strftime('%H:%M:%S', time.localtime()))
# Prepare folders for analysis
s1f.prepare_folders(args)
# Write parameters to log file
s1f.log_parameters('./results/{0}/{0}.log'.format(args['<id>']), args)
#
# Filter forward and reverse reads down to those containing motif, and
# splice clone-specific subsequence
#
# Print progress to stdout
if not args['--quiet']:
s1f.print_timestamp('Searching reads for motif...')
# Execute motif filter
reads_path, total_input_seqs = mtf.filter_reads(
args['<fastq>'],
args['<id>'],
'./tmp/{0}'.format(args['<id>']),
args['--r'],
args['--germ'],
args['--ab'],
args['--bb'],
args['--m'],
'./results/{0}/{0}.log'.format(args['<id>']))
#
# Clustering: dereplication, clustering and consensus calculation
#
if not args['--quiet']:
s1f.print_timestamp('Clustering reads...')
clus_start_time = time.time()
# Dereplicate the reads
record_list, num_unique_seqs, total_filtered_reads = \
clf.dereplication(reads_path, args['--min_dup'])
record_list_len = len(record_list)
# DEBUG **********************
record_tot_size = 0
for rec in record_list:
record_tot_size += rec['size']
print 'Total size check 1:', record_tot_size
# Do clustering
clusters = clf.cluster(record_list[:],
args['--identity'])
del record_list
num_clusters = len(clusters)
# DEBUG **********************
record_tot_size = 0
for key in clusters:
for rec in clusters[key]:
record_tot_size += rec['size']
print 'Total size check 2:', record_tot_size
# Convert cluster groups into an ordered list of consensus records
consensus_list = clf.clusters_to_consensus(clusters)
del clusters
# DEBUG **********************
record_tot_size = 0
for consen in consensus_list:
record_tot_size += consen['size']
print 'Total size check 3:', record_tot_size
# Write clustering stats to log file
clf.log_stats('./results/{0}/{0}.log'.format(args['<id>']),
total_filtered_reads,
num_unique_seqs,
record_list_len,
num_clusters,
clus_start_time)
# Check that more than 1 cluster exists (else BLAST will crash)
if not num_clusters > 1:
print 'Warning: Less than 2 clusters were produced. Exiting!'
return 1
#
# Initiate records class, load clusters and write target fastas
#
if not args['--quiet']:
s1f.print_timestamp('Writing top clusters...')
cluster_records = s1c.Records()
cluster_records.add_records_from_clustering(consensus_list)
del consensus_list
# Write total number of reads in input fastq to Records class for use in
# sample comparison
cluster_records.total_input_seqs = total_input_seqs
# DEBUG ****************************
print 'Total size check 4:', cluster_records.total_clusters_size
# Write the top N clusters to fasta for BLASTing
cluster_records.write_top_to_fasta(
args['--top'],
'./tmp/{0}/{0}.top'.format(args['<id>']),
size_out=False)
# Write all clusters to fasta in results folder
cluster_records.write_top_to_fasta(
None,
'./results/{0}/{0}.cons'.format(args['<id>']),
size_out=True)
#
# BLAST consensus sequences for V, D and J hits.
#
if not args['--quiet']:
s1f.print_timestamp('BLASTing segments...')
# BLAST D regions
s1f.run_D_blast('./tmp/{0}/{0}.top'.format(args['<id>']), args['--t'])
# BLAST J regions
s1f.run_J_blast('./tmp/{0}/{0}.top'.format(args['<id>']), args['--t'])
# BLAST V regions
s1f.run_V_blast('./tmp/{0}/{0}.top'.format(args['<id>']), args['--t'])
if not args['--quiet']:
s1f.print_timestamp('Parsing BLAST results...')
# Parse the D and J segment hits from XML file. J must be done before D.
# NB that J and V must be done before D
for segment in ['J', 'V', 'D']:
in_file = './tmp/{0}/{0}.top_{1}.blast'.format(args['<id>'], segment)
cluster_records.add_VDJ_BLAST_xml(in_file,
args['--e'],
args['--t'],
segment)
#
# Write results files
#
if not args['--quiet']:
s1f.print_timestamp('Writing outputs...')
# Write the .dat summary file
with open('./results/{0}/{0}.dat'.format(args['<id>']), 'w') as out_handle:
pickle.dump(cluster_records, out_handle)
# Write the .tab summary file
tab_file = './results/{0}/{0}.tab'.format(args['<id>'])
cluster_records.write_tabulated(tab_file, args['--top'])
# Write the detailed summary report
cluster_records.write_detail_output(
'./results/{0}/{0}.detail'.format(args['<id>']),
args['--top'])
# Try to make bubble plot using Rscript
cmd = ["Rscript", "extras/make-bubbleplot.R", tab_file]
ret = subprocess.call(cmd)
if ret == 0:
print("Bubble plot done.")
else:
print("Failed to make bubble plot.")
return 0
if __name__ == '__main__':
pass
|
23,983 | ff46c3affcf7e16fe6ff7c282d565a369d3aa1f6 | '''
Created on Jan 14, 2013
@package: security
@copyright: 2012 Sourcefabric o.p.s.
@license: http://www.gnu.org/licenses/gpl-3.0.txt
@author: Gabriel Nistor
API specifications for security right type.
'''
from .domain_security import modelSecurity
from ally.api.config import service, call
from ally.support.api.entity import Entity, IEntityNQService
# --------------------------------------------------------------------
@modelSecurity
class RightType(Entity):
'''
Provides the right type data.
'''
Name = str
Description = str
# --------------------------------------------------------------------
# No query
# --------------------------------------------------------------------
@service((Entity, RightType))
class IRightTypeService(IEntityNQService):
'''
Right type model service interface
'''
@call
def getByName(self, name:RightType.Name) -> RightType:
'''
Provides the right type based on a provided name.
'''
# --------------------------------------------------------------------
|
23,984 | ad27b1e8735746d48106d5564f51a34dc8239351 | __version__ = '3.0.2'
__version_info__ = (3, 0, 2)
# import cgco
# import pygco
try:
from pygco import *
except:
from gco.pygco import * |
23,985 | 378fed1f44d8cbca79a4bb679052dac7576bc7a5 | import unittest
import ray
import numpy as np
import time
import string
import sys
from collections import namedtuple
import libnumbuf
import test_functions
import ray.array.remote as ra
import ray.array.distributed as da
def assert_equal(obj1, obj2):
if type(obj1).__module__ == np.__name__ or type(obj2).__module__ == np.__name__:
if (hasattr(obj1, "shape") and obj1.shape == ()) or (hasattr(obj2, "shape") and obj2.shape == ()):
# This is a special case because currently np.testing.assert_equal fails
# because we do not properly handle different numerical types.
assert obj1 == obj2, "Objects {} and {} are different.".format(obj1, obj2)
else:
np.testing.assert_equal(obj1, obj2)
elif hasattr(obj1, "__dict__") and hasattr(obj2, "__dict__"):
special_keys = ["_pytype_"]
assert set(obj1.__dict__.keys() + special_keys) == set(obj2.__dict__.keys() + special_keys), "Objects {} and {} are different.".format(obj1, obj2)
for key in obj1.__dict__.keys():
if key not in special_keys:
assert_equal(obj1.__dict__[key], obj2.__dict__[key])
elif type(obj1) is dict or type(obj2) is dict:
assert_equal(obj1.keys(), obj2.keys())
for key in obj1.keys():
assert_equal(obj1[key], obj2[key])
elif type(obj1) is list or type(obj2) is list:
assert len(obj1) == len(obj2), "Objects {} and {} are lists with different lengths.".format(obj1, obj2)
for i in range(len(obj1)):
assert_equal(obj1[i], obj2[i])
elif type(obj1) is tuple or type(obj2) is tuple:
assert len(obj1) == len(obj2), "Objects {} and {} are tuples with different lengths.".format(obj1, obj2)
for i in range(len(obj1)):
assert_equal(obj1[i], obj2[i])
else:
assert obj1 == obj2, "Objects {} and {} are different.".format(obj1, obj2)
PRIMITIVE_OBJECTS = [0, 0.0, 0.9, 0L, 1L << 62, "a", string.printable, "\u262F",
u"hello world", u"\xff\xfe\x9c\x001\x000\x00", None, True,
False, [], (), {}, np.int8(3), np.int32(4), np.int64(5),
np.uint8(3), np.uint32(4), np.uint64(5), np.float32(1.9),
np.float64(1.9), np.zeros([100, 100]),
np.random.normal(size=[100, 100]), np.array(["hi", 3]),
np.array(["hi", 3], dtype=object),
np.array([["hi", u"hi"], [1.3, 1L]])]
COMPLEX_OBJECTS = [#[[[[[[[[[[[[]]]]]]]]]]]],
{"obj{}".format(i): np.random.normal(size=[100, 100]) for i in range(10)},
#{(): {(): {(): {(): {(): {(): {(): {(): {(): {(): {(): {(): {}}}}}}}}}}}}},
#((((((((((),),),),),),),),),),
#{"a": {"b": {"c": {"d": {}}}}}
]
class Foo(object):
def __init__(self):
pass
class Bar(object):
def __init__(self):
for i, val in enumerate(PRIMITIVE_OBJECTS + COMPLEX_OBJECTS):
setattr(self, "field{}".format(i), val)
class Baz(object):
def __init__(self):
self.foo = Foo()
self.bar = Bar()
def method(self, arg):
pass
class Qux(object):
def __init__(self):
self.objs = [Foo(), Bar(), Baz()]
class SubQux(Qux):
def __init__(self):
Qux.__init__(self)
class CustomError(Exception):
pass
Point = namedtuple("Point", ["x", "y"])
NamedTupleExample = namedtuple("Example", "field1, field2, field3, field4, field5")
CUSTOM_OBJECTS = [Exception("Test object."), CustomError(), Point(11, y=22),
Foo(), Bar(), Baz(), # Qux(), SubQux(),
NamedTupleExample(1, 1.0, "hi", np.zeros([3, 5]), [1, 2, 3])]
BASE_OBJECTS = PRIMITIVE_OBJECTS + COMPLEX_OBJECTS + CUSTOM_OBJECTS
LIST_OBJECTS = [[obj] for obj in BASE_OBJECTS]
TUPLE_OBJECTS = [(obj,) for obj in BASE_OBJECTS]
# The check that type(obj).__module__ != "numpy" should be unnecessary, but
# otherwise this seems to fail on Mac OS X on Travis.
DICT_OBJECTS = ([{obj: obj} for obj in PRIMITIVE_OBJECTS if obj.__hash__ is not None and type(obj).__module__ != "numpy"] +
# DICT_OBJECTS = ([{obj: obj} for obj in BASE_OBJECTS if obj.__hash__ is not None] +
[{0: obj} for obj in BASE_OBJECTS])
RAY_TEST_OBJECTS = BASE_OBJECTS + LIST_OBJECTS + TUPLE_OBJECTS + DICT_OBJECTS
# Check that the correct version of cloudpickle is installed.
try:
import cloudpickle
cloudpickle.dumps(Point)
except AttributeError:
cloudpickle_command = "sudo pip install --upgrade git+git://github.com/cloudpipe/cloudpickle.git@0d225a4695f1f65ae1cbb2e0bbc145e10167cce4"
raise Exception("You have an older version of cloudpickle that is not able to serialize namedtuples. Try running \n\n{}\n\n".format(cloudpickle_command))
class SerializationTest(unittest.TestCase):
def testRecursiveObjects(self):
ray.init(start_ray_local=True, num_workers=0)
class ClassA(object):
pass
ray.register_class(ClassA)
# Make a list that contains itself.
l = []
l.append(l)
# Make an object that contains itself as a field.
a1 = ClassA()
a1.field = a1
# Make two objects that contain each other as fields.
a2 = ClassA()
a3 = ClassA()
a2.field = a3
a3.field = a2
# Make a dictionary that contains itself.
d1 = {}
d1["key"] = d1
# Create a list of recursive objects.
recursive_objects = [l, a1, a2, a3, d1]
# Check that exceptions are thrown when we serialize the recursive objects.
for obj in recursive_objects:
self.assertRaises(Exception, lambda : ray.put(obj))
ray.worker.cleanup()
class ObjStoreTest(unittest.TestCase):
# Test setting up object stores, transfering data between them and retrieving data to a client
def testObjStore(self):
node_ip_address = "127.0.0.1"
scheduler_address = ray.services.start_ray_local(num_objstores=2, num_workers=0, worker_path=None)
ray.connect(node_ip_address, scheduler_address, mode=ray.SCRIPT_MODE)
objstore_addresses = [objstore_info["address"] for objstore_info in ray.scheduler_info()["objstores"]]
w1 = ray.worker.Worker()
w2 = ray.worker.Worker()
ray.reusables._cached_reusables = [] # This is a hack to make the test run.
ray.connect(node_ip_address, scheduler_address, objstore_address=objstore_addresses[0], mode=ray.SCRIPT_MODE, worker=w1)
ray.reusables._cached_reusables = [] # This is a hack to make the test run.
ray.connect(node_ip_address, scheduler_address, objstore_address=objstore_addresses[1], mode=ray.SCRIPT_MODE, worker=w2)
for cls in [Foo, Bar, Baz, Qux, SubQux, Exception, CustomError, Point, NamedTupleExample]:
ray.register_class(cls)
# putting and getting an object shouldn't change it
for data in RAY_TEST_OBJECTS:
objectid = ray.put(data, w1)
result = ray.get(objectid, w1)
assert_equal(result, data)
# putting an object, shipping it to another worker, and getting it shouldn't change it
for data in RAY_TEST_OBJECTS:
objectid = ray.put(data, w1)
result = ray.get(objectid, w2)
assert_equal(result, data)
# putting an object, shipping it to another worker, and getting it shouldn't change it
for data in RAY_TEST_OBJECTS:
objectid = ray.put(data, w2)
result = ray.get(objectid, w1)
assert_equal(result, data)
# This test fails. See https://github.com/ray-project/ray/issues/159.
# getting multiple times shouldn't matter
# for data in [np.zeros([10, 20]), np.random.normal(size=[45, 25]), np.zeros([10, 20], dtype=np.dtype("float64")), np.zeros([10, 20], dtype=np.dtype("float32")), np.zeros([10, 20], dtype=np.dtype("int64")), np.zeros([10, 20], dtype=np.dtype("int32"))]:
# objectid = worker.put(data, w1)
# result = worker.get(objectid, w2)
# result = worker.get(objectid, w2)
# result = worker.get(objectid, w2)
# assert_equal(result, data)
# Getting a buffer after modifying it before it finishes should return updated buffer
objectid = ray.libraylib.get_objectid(w1.handle)
buf = ray.libraylib.allocate_buffer(w1.handle, objectid, 100)
buf[0][0] = 1
ray.libraylib.finish_buffer(w1.handle, objectid, buf[1], 0)
completedbuffer = ray.libraylib.get_buffer(w1.handle, objectid)
self.assertEqual(completedbuffer[0][0], 1)
# We started multiple drivers manually, so we will disconnect them manually.
ray.disconnect(worker=w1)
ray.disconnect(worker=w2)
ray.worker.cleanup()
class WorkerTest(unittest.TestCase):
def testPutGet(self):
ray.init(start_ray_local=True, num_workers=0)
for i in range(100):
value_before = i * 10 ** 6
objectid = ray.put(value_before)
value_after = ray.get(objectid)
self.assertEqual(value_before, value_after)
for i in range(100):
value_before = i * 10 ** 6 * 1.0
objectid = ray.put(value_before)
value_after = ray.get(objectid)
self.assertEqual(value_before, value_after)
for i in range(100):
value_before = "h" * i
objectid = ray.put(value_before)
value_after = ray.get(objectid)
self.assertEqual(value_before, value_after)
for i in range(100):
value_before = [1] * i
objectid = ray.put(value_before)
value_after = ray.get(objectid)
self.assertEqual(value_before, value_after)
ray.worker.cleanup()
class APITest(unittest.TestCase):
def testPassingArgumentsByValue(self):
ray.init(start_ray_local=True, num_workers=0)
# The types that can be passed by value are defined by
# is_argument_serializable in serialization.py.
class Foo(object):
pass
CAN_PASS_BY_VALUE = [1, 1L, 1.0, True, False, None, [1L, 1.0, True, None],
([1, 2, 3], {False: [1.0, u"hi", ()]}), 100 * ["a"]]
CANNOT_PASS_BY_VALUE = [int, np.int64(0), np.float64(0), Foo(), [Foo()],
(Foo()), {0: Foo()}, [[[int]]], 101 * [1],
np.zeros(10)]
for obj in CAN_PASS_BY_VALUE:
self.assertTrue(ray.serialization.is_argument_serializable(obj))
self.assertEqual(obj, ray.serialization.deserialize_argument(ray.serialization.serialize_argument_if_possible(obj)))
for obj in CANNOT_PASS_BY_VALUE:
self.assertFalse(ray.serialization.is_argument_serializable(obj))
self.assertEqual(None, ray.serialization.serialize_argument_if_possible(obj))
ray.worker.cleanup()
def testRegisterClass(self):
ray.init(start_ray_local=True, num_workers=0)
# Check that putting an object of a class that has not been registered
# throws an exception.
class TempClass(object):
pass
self.assertRaises(Exception, lambda : ray.put(Foo))
# Check that registering a class that Ray cannot serialize efficiently
# raises an exception.
self.assertRaises(Exception, lambda : ray.register_class(type(True)))
# Check that registering the same class with pickle works.
ray.register_class(type(float), pickle=True)
self.assertEqual(ray.get(ray.put(float)), float)
ray.worker.cleanup()
def testKeywordArgs(self):
reload(test_functions)
ray.init(start_ray_local=True, num_workers=1)
x = test_functions.keyword_fct1.remote(1)
self.assertEqual(ray.get(x), "1 hello")
x = test_functions.keyword_fct1.remote(1, "hi")
self.assertEqual(ray.get(x), "1 hi")
x = test_functions.keyword_fct1.remote(1, b="world")
self.assertEqual(ray.get(x), "1 world")
x = test_functions.keyword_fct2.remote(a="w", b="hi")
self.assertEqual(ray.get(x), "w hi")
x = test_functions.keyword_fct2.remote(b="hi", a="w")
self.assertEqual(ray.get(x), "w hi")
x = test_functions.keyword_fct2.remote(a="w")
self.assertEqual(ray.get(x), "w world")
x = test_functions.keyword_fct2.remote(b="hi")
self.assertEqual(ray.get(x), "hello hi")
x = test_functions.keyword_fct2.remote("w")
self.assertEqual(ray.get(x), "w world")
x = test_functions.keyword_fct2.remote("w", "hi")
self.assertEqual(ray.get(x), "w hi")
x = test_functions.keyword_fct3.remote(0, 1, c="w", d="hi")
self.assertEqual(ray.get(x), "0 1 w hi")
x = test_functions.keyword_fct3.remote(0, 1, d="hi", c="w")
self.assertEqual(ray.get(x), "0 1 w hi")
x = test_functions.keyword_fct3.remote(0, 1, c="w")
self.assertEqual(ray.get(x), "0 1 w world")
x = test_functions.keyword_fct3.remote(0, 1, d="hi")
self.assertEqual(ray.get(x), "0 1 hello hi")
x = test_functions.keyword_fct3.remote(0, 1)
self.assertEqual(ray.get(x), "0 1 hello world")
ray.worker.cleanup()
def testVariableNumberOfArgs(self):
reload(test_functions)
ray.init(start_ray_local=True, num_workers=1)
x = test_functions.varargs_fct1.remote(0, 1, 2)
self.assertEqual(ray.get(x), "0 1 2")
x = test_functions.varargs_fct2.remote(0, 1, 2)
self.assertEqual(ray.get(x), "1 2")
self.assertTrue(test_functions.kwargs_exception_thrown)
self.assertTrue(test_functions.varargs_and_kwargs_exception_thrown)
ray.worker.cleanup()
def testNoArgs(self):
reload(test_functions)
ray.init(start_ray_local=True, num_workers=1)
test_functions.no_op.remote()
time.sleep(0.2)
task_info = ray.task_info()
self.assertEqual(len(task_info["failed_tasks"]), 0)
self.assertEqual(len(task_info["running_tasks"]), 0)
ray.worker.cleanup()
def testDefiningRemoteFunctions(self):
ray.init(start_ray_local=True, num_workers=3)
# Test that we can define a remote function in the shell.
@ray.remote
def f(x):
return x + 1
self.assertEqual(ray.get(f.remote(0)), 1)
# Test that we can redefine the remote function.
@ray.remote
def f(x):
return x + 10
self.assertEqual(ray.get(f.remote(0)), 10)
# Test that we can close over plain old data.
data = [np.zeros([3, 5]), (1, 2, "a"), [0.0, 1.0, 2L], 2L, {"a": np.zeros(3)}]
@ray.remote
def g():
return data
ray.get(g.remote())
# Test that we can close over modules.
@ray.remote
def h():
return np.zeros([3, 5])
assert_equal(ray.get(h.remote()), np.zeros([3, 5]))
@ray.remote
def j():
return time.time()
ray.get(j.remote())
# Test that we can define remote functions that call other remote functions.
@ray.remote
def k(x):
return x + 1
@ray.remote
def l(x):
return ray.get(k.remote(x))
@ray.remote
def m(x):
return ray.get(l.remote(x))
self.assertEqual(ray.get(k.remote(1)), 2)
self.assertEqual(ray.get(l.remote(1)), 2)
self.assertEqual(ray.get(m.remote(1)), 2)
ray.worker.cleanup()
def testGetMultiple(self):
ray.init(start_ray_local=True, num_workers=0)
object_ids = [ray.put(i) for i in range(10)]
self.assertEqual(ray.get(object_ids), range(10))
ray.worker.cleanup()
def testWait(self):
ray.init(start_ray_local=True, num_workers=1)
@ray.remote
def f(delay):
time.sleep(delay)
return 1
objectids = [f.remote(1.0), f.remote(0.5), f.remote(0.5), f.remote(0.5)]
ready_ids, remaining_ids = ray.wait(objectids)
self.assertTrue(len(ready_ids) == 1)
self.assertTrue(len(remaining_ids) == 3)
ready_ids, remaining_ids = ray.wait(objectids, num_returns=4)
self.assertEqual(ready_ids, objectids)
self.assertEqual(remaining_ids, [])
objectids = [f.remote(0.5), f.remote(0.5), f.remote(0.5), f.remote(0.5)]
start_time = time.time()
ready_ids, remaining_ids = ray.wait(objectids, timeout=1.75, num_returns=4)
self.assertTrue(time.time() - start_time < 2)
self.assertEqual(len(ready_ids), 3)
self.assertEqual(len(remaining_ids), 1)
ray.wait(objectids)
objectids = [f.remote(1.0), f.remote(0.5), f.remote(0.5), f.remote(0.5)]
start_time = time.time()
ready_ids, remaining_ids = ray.wait(objectids, timeout=5)
self.assertTrue(time.time() - start_time < 5)
self.assertEqual(len(ready_ids), 1)
self.assertEqual(len(remaining_ids), 3)
ray.worker.cleanup()
def testCachingReusables(self):
# Test that we can define reusable variables before the driver is connected.
def foo_initializer():
return 1
def bar_initializer():
return []
def bar_reinitializer(bar):
return []
ray.reusables.foo = ray.Reusable(foo_initializer)
ray.reusables.bar = ray.Reusable(bar_initializer, bar_reinitializer)
@ray.remote
def use_foo():
return ray.reusables.foo
@ray.remote
def use_bar():
ray.reusables.bar.append(1)
return ray.reusables.bar
ray.init(start_ray_local=True, num_workers=2)
self.assertEqual(ray.get(use_foo.remote()), 1)
self.assertEqual(ray.get(use_foo.remote()), 1)
self.assertEqual(ray.get(use_bar.remote()), [1])
self.assertEqual(ray.get(use_bar.remote()), [1])
ray.worker.cleanup()
def testCachingFunctionsToRun(self):
# Test that we export functions to run on all workers before the driver is connected.
def f(worker):
sys.path.append(1)
ray.worker.global_worker.run_function_on_all_workers(f)
def f(worker):
sys.path.append(2)
ray.worker.global_worker.run_function_on_all_workers(f)
def g(worker):
sys.path.append(3)
ray.worker.global_worker.run_function_on_all_workers(g)
def f(worker):
sys.path.append(4)
ray.worker.global_worker.run_function_on_all_workers(f)
ray.init(start_ray_local=True, num_workers=2)
@ray.remote
def get_state():
time.sleep(1)
return sys.path[-4], sys.path[-3], sys.path[-2], sys.path[-1]
res1 = get_state.remote()
res2 = get_state.remote()
self.assertEqual(ray.get(res1), (1, 2, 3, 4))
self.assertEqual(ray.get(res2), (1, 2, 3, 4))
# Clean up the path on the workers.
def f(worker):
sys.path.pop()
sys.path.pop()
sys.path.pop()
sys.path.pop()
ray.worker.global_worker.run_function_on_all_workers(f)
ray.worker.cleanup()
def testRunningFunctionOnAllWorkers(self):
ray.init(start_ray_local=True, num_workers=1)
def f(worker):
sys.path.append("fake_directory")
ray.worker.global_worker.run_function_on_all_workers(f)
@ray.remote
def get_path():
return sys.path
self.assertEqual("fake_directory", ray.get(get_path.remote())[-1])
def f(worker):
sys.path.pop(-1)
ray.worker.global_worker.run_function_on_all_workers(f)
self.assertTrue("fake_directory" not in ray.get(get_path.remote()))
ray.worker.cleanup()
def testComputationGraph(self):
ray.init(start_ray_local=True, num_workers=1)
@ray.remote
def f(x):
return x
@ray.remote
def g(x, y):
return x, y
a = f.remote(1)
b = f.remote(1)
c = g.remote(a, b)
c = g.remote(a, 1)
# Make sure that we can produce a computation_graph visualization.
ray.visualize_computation_graph(view=False)
ray.worker.cleanup()
class ReferenceCountingTest(unittest.TestCase):
def testDeallocation(self):
reload(test_functions)
for module in [ra.core, ra.random, ra.linalg, da.core, da.random, da.linalg]:
reload(module)
ray.init(start_ray_local=True, num_workers=1)
def check_not_deallocated(object_ids):
reference_counts = ray.scheduler_info()["reference_counts"]
for object_id in object_ids:
self.assertGreater(reference_counts[object_id.id], 0)
def check_everything_deallocated():
reference_counts = ray.scheduler_info()["reference_counts"]
self.assertEqual(reference_counts, len(reference_counts) * [-1])
z = da.zeros.remote([da.BLOCK_SIZE, 2 * da.BLOCK_SIZE])
time.sleep(0.1)
objectid_val = z.id
time.sleep(0.1)
check_not_deallocated([z])
del z
time.sleep(0.1)
check_everything_deallocated()
x = ra.zeros.remote([10, 10])
y = ra.zeros.remote([10, 10])
z = ra.dot.remote(x, y)
objectid_val = x.id
time.sleep(0.1)
check_not_deallocated([x, y, z])
del x
time.sleep(0.1)
check_not_deallocated([y, z])
del y
time.sleep(0.1)
check_not_deallocated([z])
del z
time.sleep(0.1)
check_everything_deallocated()
z = da.zeros.remote([4 * da.BLOCK_SIZE])
time.sleep(0.1)
check_not_deallocated(ray.get(z).objectids.tolist())
del z
time.sleep(0.1)
check_everything_deallocated()
ray.worker.cleanup()
def testGet(self):
ray.init(start_ray_local=True, num_workers=3)
for cls in [Foo, Bar, Baz, Qux, SubQux, Exception, CustomError, Point, NamedTupleExample]:
ray.register_class(cls)
# Remote objects should be deallocated when the corresponding ObjectID goes
# out of scope, and all results of ray.get called on the ID go out of scope.
for val in RAY_TEST_OBJECTS:
x = ray.put(val)
objectid = x.id
xval = ray.get(x)
del x, xval
self.assertEqual(ray.scheduler_info()["reference_counts"][objectid], -1)
# Remote objects that do not contain numpy arrays should be deallocated when
# the corresponding ObjectID goes out of scope, even if ray.get has been
# called on the ObjectID.
for val in [True, False, None, 1, 1.0, 1L, "hi", u"hi", [1, 2, 3], (1, 2, 3), [(), {(): ()}]]:
x = ray.put(val)
objectid = x.id
xval = ray.get(x)
del x
self.assertEqual(ray.scheduler_info()["reference_counts"][objectid], -1)
# Remote objects that contain numpy arrays should not be deallocated when
# the corresponding ObjectID goes out of scope, if ray.get has been called
# on the ObjectID and the result of that call is still in scope.
for val in [np.zeros(10), [np.zeros(10)], (((np.zeros(10)),),), {(): np.zeros(10)}, [1, 2, 3, np.zeros(1)]]:
x = ray.put(val)
objectid = x.id
xval = ray.get(x)
del x
self.assertEqual(ray.scheduler_info()["reference_counts"][objectid], 1)
# Getting an object multiple times should not be a problem. And the remote
# object should not be deallocated until both of the results are out of scope.
for val in [np.zeros(10), [np.zeros(10)], (((np.zeros(10)),),), {(): np.zeros(10)}, [1, 2, 3, np.zeros(1)]]:
x = ray.put(val)
objectid = x.id
xval1 = ray.get(x)
xval2 = ray.get(x)
del xval1
# Make sure we can still access xval2.
xval2
del xval2
self.assertEqual(ray.scheduler_info()["reference_counts"][objectid], 1)
xval3 = ray.get(x)
xval4 = ray.get(x)
xval5 = ray.get(x)
del x
del xval4, xval5
# Make sure we can still access xval3.
xval3
self.assertEqual(ray.scheduler_info()["reference_counts"][objectid], 1)
del xval3
self.assertEqual(ray.scheduler_info()["reference_counts"][objectid], -1)
# Getting an object multiple times and assigning it to the same name should
# work. This was a problem in https://github.com/ray-project/ray/issues/159.
for val in [np.zeros(10), [np.zeros(10)], (((np.zeros(10)),),), {(): np.zeros(10)}, [1, 2, 3, np.zeros(1)]]:
x = ray.put(val)
objectid = x.id
xval = ray.get(x)
xval = ray.get(x)
xval = ray.get(x)
xval = ray.get(x)
self.assertEqual(ray.scheduler_info()["reference_counts"][objectid], 1)
del x
self.assertEqual(ray.scheduler_info()["reference_counts"][objectid], 1)
del xval
self.assertEqual(ray.scheduler_info()["reference_counts"][objectid], -1)
ray.worker.cleanup()
class PythonModeTest(unittest.TestCase):
def testPythonMode(self):
reload(test_functions)
ray.init(start_ray_local=True, driver_mode=ray.PYTHON_MODE)
@ray.remote
def f():
return np.ones([3, 4, 5])
xref = f.remote()
assert_equal(xref, np.ones([3, 4, 5])) # remote functions should return by value
assert_equal(xref, ray.get(xref)) # ray.get should be the identity
y = np.random.normal(size=[11, 12])
assert_equal(y, ray.put(y)) # ray.put should be the identity
# make sure objects are immutable, this example is why we need to copy
# arguments before passing them into remote functions in python mode
aref = test_functions.python_mode_f.remote()
assert_equal(aref, np.array([0, 0]))
bref = test_functions.python_mode_g.remote(aref)
assert_equal(aref, np.array([0, 0])) # python_mode_g should not mutate aref
assert_equal(bref, np.array([1, 0]))
ray.worker.cleanup()
def testReusableVariablesInPythonMode(self):
reload(test_functions)
ray.init(start_ray_local=True, driver_mode=ray.PYTHON_MODE)
def l_init():
return []
def l_reinit(l):
return []
ray.reusables.l = ray.Reusable(l_init, l_reinit)
@ray.remote
def use_l():
l = ray.reusables.l
l.append(1)
return l
# Get the local copy of the reusable variable. This should be stateful.
l = ray.reusables.l
assert_equal(l, [])
# Make sure the remote function does what we expect.
assert_equal(ray.get(use_l.remote()), [1])
assert_equal(ray.get(use_l.remote()), [1])
# Make sure the local copy of the reusable variable has not been mutated.
assert_equal(l, [])
l = ray.reusables.l
assert_equal(l, [])
# Make sure that running a remote function does not reset the state of the
# local copy of the reusable variable.
l.append(2)
assert_equal(ray.get(use_l.remote()), [1])
assert_equal(l, [2])
ray.worker.cleanup()
class PythonCExtensionTest(unittest.TestCase):
def testReferenceCountNone(self):
ray.init(start_ray_local=True, num_workers=1)
# Make sure that we aren't accidentally messing up Python's reference counts.
@ray.remote
def f():
return sys.getrefcount(None)
first_count = ray.get(f.remote())
second_count = ray.get(f.remote())
self.assertEqual(first_count, second_count)
ray.worker.cleanup()
def testReferenceCountTrue(self):
ray.init(start_ray_local=True, num_workers=1)
# Make sure that we aren't accidentally messing up Python's reference counts.
@ray.remote
def f():
return sys.getrefcount(True)
first_count = ray.get(f.remote())
second_count = ray.get(f.remote())
self.assertEqual(first_count, second_count)
ray.worker.cleanup()
def testReferenceCountFalse(self):
ray.init(start_ray_local=True, num_workers=1)
# Make sure that we aren't accidentally messing up Python's reference counts.
@ray.remote
def f():
return sys.getrefcount(False)
first_count = ray.get(f.remote())
second_count = ray.get(f.remote())
self.assertEqual(first_count, second_count)
ray.worker.cleanup()
class ReusablesTest(unittest.TestCase):
def testReusables(self):
ray.init(start_ray_local=True, num_workers=1)
# Test that we can add a variable to the key-value store.
def foo_initializer():
return 1
def foo_reinitializer(foo):
return foo
ray.reusables.foo = ray.Reusable(foo_initializer, foo_reinitializer)
self.assertEqual(ray.reusables.foo, 1)
@ray.remote
def use_foo():
return ray.reusables.foo
self.assertEqual(ray.get(use_foo.remote()), 1)
self.assertEqual(ray.get(use_foo.remote()), 1)
self.assertEqual(ray.get(use_foo.remote()), 1)
# Test that we can add a variable to the key-value store, mutate it, and reset it.
def bar_initializer():
return [1, 2, 3]
ray.reusables.bar = ray.Reusable(bar_initializer)
@ray.remote
def use_bar():
ray.reusables.bar.append(4)
return ray.reusables.bar
self.assertEqual(ray.get(use_bar.remote()), [1, 2, 3, 4])
self.assertEqual(ray.get(use_bar.remote()), [1, 2, 3, 4])
self.assertEqual(ray.get(use_bar.remote()), [1, 2, 3, 4])
# Test that we can use the reinitializer.
def baz_initializer():
return np.zeros([4])
def baz_reinitializer(baz):
for i in range(len(baz)):
baz[i] = 0
return baz
ray.reusables.baz = ray.Reusable(baz_initializer, baz_reinitializer)
@ray.remote
def use_baz(i):
baz = ray.reusables.baz
baz[i] = 1
return baz
assert_equal(ray.get(use_baz.remote(0)), np.array([1, 0, 0, 0]))
assert_equal(ray.get(use_baz.remote(1)), np.array([0, 1, 0, 0]))
assert_equal(ray.get(use_baz.remote(2)), np.array([0, 0, 1, 0]))
assert_equal(ray.get(use_baz.remote(3)), np.array([0, 0, 0, 1]))
# Make sure the reinitializer is actually getting called. Note that this is
# not the correct usage of a reinitializer because it does not reset qux to
# its original state. This is just for testing.
def qux_initializer():
return 0
def qux_reinitializer(x):
return x + 1
ray.reusables.qux = ray.Reusable(qux_initializer, qux_reinitializer)
@ray.remote
def use_qux():
return ray.reusables.qux
self.assertEqual(ray.get(use_qux.remote()), 0)
self.assertEqual(ray.get(use_qux.remote()), 1)
self.assertEqual(ray.get(use_qux.remote()), 2)
ray.worker.cleanup()
def testUsingReusablesOnDriver(self):
ray.init(start_ray_local=True, num_workers=1)
# Test that we can add a variable to the key-value store.
def foo_initializer():
return []
def foo_reinitializer(foo):
return []
ray.reusables.foo = ray.Reusable(foo_initializer, foo_reinitializer)
@ray.remote
def use_foo():
foo = ray.reusables.foo
foo.append(1)
return foo
# Check that running a remote function does not reset the reusable variable
# on the driver.
foo = ray.reusables.foo
self.assertEqual(foo, [])
foo.append(2)
self.assertEqual(foo, [2])
foo.append(3)
self.assertEqual(foo, [2, 3])
self.assertEqual(ray.get(use_foo.remote()), [1])
self.assertEqual(ray.get(use_foo.remote()), [1])
self.assertEqual(ray.get(use_foo.remote()), [1])
# Check that the copy of foo on the driver has not changed.
self.assertEqual(foo, [2, 3])
foo = ray.reusables.foo
self.assertEqual(foo, [2, 3])
ray.worker.cleanup()
class ClusterAttachingTest(unittest.TestCase):
def testAttachingToCluster(self):
node_ip_address = "127.0.0.1"
scheduler_port = np.random.randint(40000, 50000)
scheduler_address = "{}:{}".format(node_ip_address, scheduler_port)
ray.services.start_scheduler(scheduler_address, cleanup=True)
time.sleep(0.1)
ray.services.start_node(scheduler_address, node_ip_address, num_workers=1, cleanup=True)
ray.init(node_ip_address=node_ip_address, scheduler_address=scheduler_address)
@ray.remote
def f(x):
return x + 1
self.assertEqual(ray.get(f.remote(0)), 1)
ray.worker.cleanup()
def testAttachingToClusterWithMultipleObjectStores(self):
node_ip_address = "127.0.0.1"
scheduler_port = np.random.randint(40000, 50000)
scheduler_address = "{}:{}".format(node_ip_address, scheduler_port)
ray.services.start_scheduler(scheduler_address, cleanup=True)
time.sleep(0.1)
ray.services.start_node(scheduler_address, node_ip_address, num_workers=5, cleanup=True)
ray.services.start_node(scheduler_address, node_ip_address, num_workers=5, cleanup=True)
ray.services.start_node(scheduler_address, node_ip_address, num_workers=5, cleanup=True)
ray.init(node_ip_address=node_ip_address, scheduler_address=scheduler_address)
@ray.remote
def f(x):
return x + 1
self.assertEqual(ray.get(f.remote(0)), 1)
ray.worker.cleanup()
if __name__ == "__main__":
unittest.main(verbosity=2)
|
23,986 | a4dc0eafb91b0bbc5c37ec52de8afdd8a040a258 | # -*- coding: utf-8 -*-
"""Jinja2 template engine."""
import os
import re
from jinja2 import Environment
from jinja2.exceptions import UndefinedError, TemplateSyntaxError
from piecutter.engines import Engine
from piecutter.exceptions import TemplateError
def path_join(*args, **kwargs):
"""Return ``args`` joined as file paths like with os.path.join().
>>> from piecutter.engines.jinja import path_join
>>> path_join('foo', 'bar')
'foo/bar'
Paths are normalized.
>>> path_join('foo', '..', 'bar')
'bar'
You can pass an extra keyword argument 'target_os': a value in os.name
capabilities.
>>> path_join('foo', 'bar', target_os='posix')
'foo/bar'
Currently, this is using os.path, i.e. the separator and rules for the
computer running Jinja2 engine. A NotImplementedError exception will be
raised if 'os' argument differs from 'os.name'.
>>> import os
>>> os.name == 'posix' # Sorry if you are running tests on another OS.
True
>>> path_join('foo', 'bar', target_os='nt') # Doctest: +ELLIPSIS
Traceback (most recent call last):
...
NotImplementedError: Cannot join path with "nt" style. Host OS is "posix".
"""
target_os = kwargs.get('target_os', None)
if target_os and target_os is not os.name:
raise NotImplementedError('Cannot join path with "{target}" style. '
'Host OS is "{host}".'.format(
target=target_os,
host=os.name))
result = os.path.join(*args)
result = path_normalize(result, target_os)
return result
def path_normalize(path, target_os=None):
"""Normalize path (like os.path.normpath) for given os.
>>> from piecutter.engines.jinja import path_normalize
>>> path_normalize('foo/bar')
'foo/bar'
>>> path_normalize('foo/toto/../bar')
'foo/bar'
Currently, this is using os.path, i.e. the separator and rules for the
computer running Jinja2 engine. A NotImplementedError exception will be
raised if 'os' argument differs from 'os.name'.
>>> import os
>>> os.name == 'posix' # Sorry if you are running tests on another OS.
True
>>> path_normalize('foo/bar', target_os='nt') # Doctest: +ELLIPSIS
Traceback (most recent call last):
...
NotImplementedError: Cannot join path with "nt" style. Host OS is "posix".
"""
if target_os and target_os is not os.name:
raise NotImplementedError('Cannot join path with "{target}" style. '
'Host OS is "{host}".'.format(
target=target_os,
host=os.name))
return os.path.normpath(path)
class Jinja2Engine(Engine):
"""Jinja2 template engine."""
def __init__(self, environment=None):
if environment is None:
environment = Environment()
self.environment = environment
self.register_environment_functions()
def register_environment_functions(self):
"""Populate self.environment.globals with some global functions."""
self.environment.globals['path_join'] = path_join
self.environment.globals['path_normalize'] = path_normalize
def render(self, template, context):
"""Return the rendered template against context."""
try:
template = self.environment.from_string(template)
except TemplateSyntaxError as e:
raise TemplateError(e)
try:
return template.render(**context)
except (UndefinedError, TypeError) as e:
raise TemplateError(e)
def match(self, template, context):
"""Return a ratio showing whether template looks like using engine.
>>> engine = Jinja2Engine()
>>> engine.match('', {})
0.0
>>> engine.match('{# Jinja2 #}', {})
1.0
>>> engine.match('Not shebang {# Jinja2 #}', {})
0.0
>>> engine.match('{{ key }}', {})
0.9
"""
# Try to locate a root variable in template.
if template.startswith('{# Jinja2 #}'):
return 1.0
if re.search(r'{{ .+ }}', template):
return 0.9
return 0.0
|
23,987 | fc0b1391217e7396279a2e4b125260dca5264077 | from agents.eval.evaluate_dagger import evaluate_dagger
from agents.eval.evaluate_vision_dagger import evaluate_vision_dagger
from agents.eval.evaluate_dqn import evaluate_dqn |
23,988 | 6b79a8598931d9055da019fa6580b473aed7bf9a | #!/usr/bin/env python
"""
Simple tool to simulate stellar populations.
"""
__author__ = "Alex Drlica-Wagner"
__email__ = "kadrlica@fnal.gov"
__version__ = "0.1.0"
import os,sys
import numpy as np
import scipy.stats as stats
from dwarf import Dwarf
from instruments import factory as instrumentFactory
def randerr(size=1,func='normal',**kwargs):
""" Return a sample from a random variate. """
kwargs.update(size=size)
funclower = func.lower()
if funclower in ['normal','gaussian','gauss']:
rvs = stats.norm.rvs
elif funclower in ['uniform']:
rvs = stats.uniform(-1,2).rvs
elif funclower in ['lorentzian','cauchy']:
rvs = stats.cauchy.rvs
elif funclower in ['delta']:
rvs = stats.randint(1,2).rvs
else:
raise Exception('Unrecognized type: %s'%func)
return rvs(**kwargs)
class Simulator(object):
def run(self, num=1, exptime=10000):
self.create_dwarf()
self.create_instrument()
if not hasattr(exptime,'__iter__'): exptime = [exptime]
out = []
for e in exptime:
for i in num:
data = simulate(dwarf,instrument,exptime)
out.append(data)
return out
@staticmethod
def simulate(dwarf,instrument,exp=10000):
""" Simulate observation """
# Set the second band to 'i' (matches CaT lines)
dwarf.band_1 = 'g'; dwarf.band_2 = 'i'
mag_1,mag_2,ra,dec = dwarf.simulate()
snr = instrument.mag2snr(mag_2,exp)
#olderr = np.seterr(all='ignore')
sel = (mag_1 > 16) & (snr > 5)
#np.seterr(**olderr)
nstar = sel.sum()
mag = mag_1[sel]
color = (mag_1-mag_2)[sel]
snr = snr[sel]
# The true velocity, u, of each star is the sum of the mean velocity and
# a component from the intrinsic velocity dispersion
vtrue = dwarf.vmean + dwarf.vdisp*randerr(nstar,'normal')
# There are two components of the measurement uncertainty on
# the velocity of each star
vstaterr = instrument.snr2err(snr)
vsyserr = instrument.vsys
# The measured velocity is the true velocity plus a component from the
# instrumental measurement error
vstat = vstaterr*randerr(nstar,'normal')
vsys = vsyserr*randerr(nstar,'normal')
vmeas = vtrue + vstat + vsys
# Now assign the measurement error to the statistical error
vmeaserr = vstaterr
# The error that is commonly used is the sum of the measurement error
# and the systematice error estimate in quadrature
verr = np.sqrt(vstaterr**2 + vsyserr**2)
names = ['RA','DEC','MAG_%s'%dwarf.band_1.upper(),'MAG_%s'%dwarf.band_2.upper(),
'SNR','VTRUE','VSTAT','VSYS','VMEAS','VMEASERR','VERR']
data = [ra[sel],dec[sel],mag_1[sel],mag_2[sel],snr,vtrue,vstat,vsys,vmeas,vmeaserr,verr]
return np.rec.fromarrays(data,names=names)
if __name__ == "__main__":
import argparse
description = "Simulate the observable properties of a dwarf galaxy."
parser = argparse.ArgumentParser(description=description)
parser.add_argument('outfile',nargs='?',
help="Optional output file")
parser.add_argument('--seed',type=int,default=None,
help="Random seed")
group = parser.add_argument_group('Physical')
parser.add_argument('--stellar_mass',type=float,default=2000.,
help='Stellar mass for simulated satellite (Msun)')
parser.add_argument('--vmean',type=float,default=60.,
help='Mean systemic velocity (km/s)')
parser.add_argument('--vdisp',type=float,default=3.3,
help='Velocity dispersion (km/s)')
group = parser.add_argument_group('Isochrone')
group.add_argument('--isochrone',type=str,default='Bressan2012',
help='Isochrone type.')
group.add_argument('--distance_modulus',type=float,default=17.5,
help='Distance modulus.')
group.add_argument('--age',type=float,default=13.0,
help='Age of stellar population (Gyr).')
group.add_argument('--metallicity',type=float,default=1e-3,
help='Metallicity of stellar population.')
group = parser.add_argument_group('Kernel')
group.add_argument('--kernel',type=str,default='EllipticalPlummer',
help='Kernel type.')
group.add_argument('--ra',type=float,default=54.0,
help='Centroid right acension (deg).')
group.add_argument('--dec',type=float,default=-54.0,
help='Centroid declination (deg).')
group.add_argument('--extension',type=float,default=0.1,
help='Extension (deg).')
group.add_argument('--ellipticity',type=float,default=0.0,
help='Spatial extension (deg).')
group.add_argument('--position_angle',type=float,default=0.0,
help='Spatial extension (deg).')
group = parser.add_argument_group('Instrument')
group.add_argument('--instrument',default='gmacs',choices=['gmacs'],
help='Instrument')
egroup = group.add_mutually_exclusive_group()
egroup.add_argument('--exptime',default=3600.,type=float,
help='Exposure time (s)')
egroup.add_argument('--maglim',default=None,type=float,
help='Limiting magnitude (S/N = 5)')
group.add_argument('--vsys',default=None,type=float,
help='Systematic velocity error (km/s)')
args = parser.parse_args()
kwargs = vars(args)
np.random.seed(args.seed)
exptime = mag2exp(args.maglim) if args.maglim else args.exptime
dwarf = Dwarf(vmean=args.vmean,vdisp=args.vdisp)
isochrone=Dwarf.createIsochrone(name=args.isochrone, age=args.age,
metallicity=args.metallicity,
distance_modulus=args.distance_modulus)
dwarf.set_isochrone(isochrone)
kernel=Dwarf.createKernel(name=args.kernel,extension=args.extension,
ellipticity=args.ellipticity,
position_angle=args.position_angle,
lon=args.ra,lat=args.dec)
dwarf.set_kernel(kernel)
dwarf.richness = args.stellar_mass/dwarf.isochrone.stellar_mass()
instr= instrumentFactory(args.instrument)
if args.vsys is not None: instrument.vsys = args.vsys
# Run the simulation
data = Simulator.simulate(dwarf,instr,exptime)
# Output
if args.outfile:
out = open(args.outfile,'w')
else:
out = sys.stdout
out.write('#'+' '.join(['%-9s'%n for n in data.dtype.names])+'\n')
np.savetxt(out,data,fmt='%-9.5f')
|
23,989 | ea888c6b2ac9530e734dd44dc886f164d4e74f39 | # Generated by Django 2.0.8 on 2018-09-14 15:13
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('security', '0016_auto_20180914_1551'),
]
operations = [
migrations.RemoveField(
model_name='banktransfersenderdetails',
name='sender_account_number',
),
migrations.RemoveField(
model_name='banktransfersenderdetails',
name='sender_roll_number',
),
migrations.RemoveField(
model_name='banktransfersenderdetails',
name='sender_sort_code',
),
migrations.AlterField(
model_name='banktransfersenderdetails',
name='sender_bank_account',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='senders', to='security.BankAccount'),
),
]
|
23,990 | 9821cca4035085e38426f98fdc3dd82a0a9612fd | from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from api.forms import CustomUserCreationForm
from api.models import User
from django.urls import reverse_lazy
from django.views import generic
from django.http import JsonResponse
import json
# Create your views here.
class SignUp(generic.CreateView):
form_class = CustomUserCreationForm
success_url = reverse_lazy('login')
@csrf_exempt
def signup(request):
print(request.method)
data = json.load(request)
if request.method == 'POST':
form = CustomUserCreationForm(data)
print(data)
if form.is_valid():
print(form)
return JsonResponse({'status': 'Success'}, status=200)
return JsonResponse({'status': 'Form Not Valid'}, status=200)
return JsonResponse({'status': 'Failed'}, status=200)
|
23,991 | 2170b50811a58be7f47e378b601eebe33c635285 | # 通过列表生成式,我们可以直接创建一个列表。但是,受到内存限制,列表容量肯定是有限的。而且,创建一个包含100万个元素的列表,
# 不仅占用很大的存储空间,如果我们仅仅需要访问前面几个元素,那后面绝大多数元素占用的空间都白白浪费了。 所以,如果列表元素
# 可以按照某种算法推算出来,那我们是否可以在循环的过程中不断推算出后续的元素呢?这样就不必创建完整的list,从而节省大量的空间。
# 在Python中,这种一边循环一边计算的机制,称为生成器:generator。 要创建一个generator,有很多种方法。第一种方法很简单,
# 只要把一个列表生成式的[]改成(),就创建了一个generator:
L = [x * x for x in range(10)]
print(L)
# [0, 1, 4, 9, 16, 25, 36, 49, 64, 81]
g = (x * x for x in range(10))
print(g)
# <generator object <genexpr> at 0x1022ef630>
# 创建L和g的区别仅在于最外层的[]和(),L是一个list,而g是一个generator。 我们可以直接打印出list的每一个元素,
# 但我们怎么打印出generator的每一个元素呢? 如果要一个一个打印出来,可以通过next()函数获得generator的下一个返回值:
print(next(g))
print(next(g))
print(next(g))
print(next(g))
print("=======================")
for i in g:
print(i)
|
23,992 | 091f40341ab1eb1fd054e7888f3f3b237c336287 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
''
__author__ = 'PakhoLeung'
from sensors.electronic_component import ElectronicComponent
import logging
import time
import RPi.GPIO as GPIO
class Servo(ElectronicComponent):
__channel = None
__pwm = None
__freq = None # frequent
__dc = None # duty cycle
__angle = None
def __init__(self, channel: int, freq=50):
super().__init__()
self.__channel = channel
self.__freq = freq
self.angle = 0
GPIO.setup(self.__channel, GPIO.OUT, initial=False)
self.__pwm = GPIO.PWM(self.__channel, self.__freq)
self.__pwm.start(0)
def start(self):
super().start()
self.__pwm.start(0)
def terminate(self):
super().terminate()
GPIO.output(self.__channel, GPIO.LOW)
GPIO.cleanup(self.__channel)
def pause(self):
super().pause()
GPIO.output(self.__channel, GPIO.LOW)
def changeFrequency(self, freq):
self.__freq = freq
self.__pwm.ChangeFrequency(self.__freq)
def __changeDC(self, dc):
self.__dc = dc
self.__pwm.ChangeDutyCycle(self.__dc)
def stop(self):
super().stop()
self.__pwm.stop()
def rotateTo(self, angle):
# 该函数传入一个角度制角度,为舵机旋转一定角度
# 这是一个耗时函数,请不要在主函数使用该函数
if angle < 0 or angle > 180:
raise ValueError("Angle is out of Bound.")
# if threading.current_thread() == threading._main_thread:
# raise threading.ThreadError("spin is a time-consuming job. Can't run in mainThread.")
if self.getStatus() == self.RUNNING:
logging.info("This servo is occupied.")
return
self.start()
self.__dc = (angle / (180 - 0)) * (12.5 - 2.5) + 2.5
self.__changeDC(self.__dc)
self.__angle = angle
time.sleep(0.6)
self.stop()
def presentAngle(self):
return self.__angle |
23,993 | 94c587d85c117bb4adbaec75bf651212d2730129 | a = int(input("Please Enter Principle Amount: "))
b = float(input("Please Enter rate of interest in %: "))
c = int(input("Enter Number of years for Investment: "))
e = 0
f = a * b
while e < c:
f += f * b
e += 1
print(f) |
23,994 | e0170ab309f54a07e7cbc11938709d185bfcd0b5 | from itertools import groupby
import re
aa = 'aaabbccddaa'
#와 저 \1의 의미는 정규식 괄호친거 저거를 참조하라는소리 그니까
#a와 a여러개 반복된거 두개세트로 나오라고 함
#파인드올은 아마 그룹으로된거 듀플로뽑아줌!!!!!
filted = re.findall(r'(\w)(\1*)',aa)
print(filted)
#그룹바이 ㅋ ㅋ 어마어마하군 ㅋ
for x ,y in groupby(aa):
print(''.join(y))
def realdap(string):
count = 1
ans = ''
string = string + '\0'
for i in range(1,len(string)):
if string[i-1] == string[i]:
count += 1
else:
ans = string[i-1]+str(count)+realdap(string[i:])
break
return ans
|
23,995 | 3d9ebd0e2c6ffea7aa1e355a3fbf081324718266 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'test.ui'
#
# Created by: PyQt5 UI code generator 5.9.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(206, 105)
self.verticalLayout = QtWidgets.QVBoxLayout(Form)
self.verticalLayout.setObjectName("verticalLayout")
self.btn01 = QtWidgets.QPushButton(Form)
self.btn01.setObjectName("btn01")
self.verticalLayout.addWidget(self.btn01)
self.btn02 = QtWidgets.QPushButton(Form)
self.btn02.setObjectName("btn02")
self.verticalLayout.addWidget(self.btn02)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "testfrom"))
self.btn01.setText(_translate("Form", "btn01"))
self.btn02.setText(_translate("Form", "btn02"))
|
23,996 | f3297a39f83fcc440a9e7778651a935efeff94a0 | import math
def run (x1, y1, x2, y2):
distance = math.sqrt((x2 - x1) * (x2 - x1) + (y2 - y1) * (y2 - y1));
return distance
|
23,997 | 971c9a7c1b26b12832bc3160453eb41c63c90dfb | from main.game_manager import GameManager
from main.menu.sub_menu import SubMenu
from main.players.human_player import HumanPlayer
from main.players.stockfish_player import StockfishPlayer
from main.menu.player_options_menu import PlayerOptionsMenu
class PlayerSelectMenu(SubMenu):
PlayerTypes = [HumanPlayer, StockfishPlayer]
def __init__(self, first_player=None, parent=None):
self.first_player = first_player
if first_player is None:
menu_name = "White player"
else:
menu_name = "Black player"
self.player_type_names = []
for player_type in PlayerSelectMenu.PlayerTypes:
self.player_type_names.append(player_type.__name__)
SubMenu.__init__(self, "New game", self.player_type_names, parent=parent, name_inside=menu_name)
def on_enter_submenu(self):
super(PlayerSelectMenu, self).on_enter_submenu()
self.elements = self.player_type_names
self.current_pos = 1
self.in_submenu = False
def on_select_element(self, pos, element):
current_player = PlayerSelectMenu.PlayerTypes[pos]()
if len(current_player.get_configuration_options()) > 0:
self.elements = [PlayerOptionsMenu(current_player, self.parent, first_player=self.first_player)]
self.current_pos = 0
self.in_submenu = True
self.elements[0].on_enter_submenu()
else:
if self.first_player is None:
self.elements = [PlayerSelectMenu(first_player=current_player, parent=self.parent)]
self.current_pos = 0
self.in_submenu = True
self.elements[0].on_enter_submenu()
else:
GameManager.instance.new_game(player1=self.first_player, player2=current_player)
|
23,998 | 6586f5fb7ee6bc30984f03486140ac9c7c537726 | """
pypsum.appspot.com
~~~~~~~~~~~~~~~~~~
This is the pypsum Google Appengine application runner. It does nothing more
than importing the pypsum application, Goolge Appengine SDK utils and run the
application.
"""
__author__ = "Luca De Vitis <luca@monkeython.com>"
__version__ = '0.2'
__copyright__ = "2011, %s " % __author__
__license__ = """
Copyright (C) %s
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
.along with this program. If not, see <http://www.gnu.org/licenses/>.
""" % __copyright__
__doc__ = """
:version: %s
:author: %s
:organization: Monkeython
:contact: http://www.monkeython.com
:copyright: %s
%s
""" % (__version__, __author__, __license__, __doc__)
__docformat__ = 'restructuredtext en'
__classifiers__ = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP' ]
if __name__ == '__main__':
from google.appengine.ext.webapp.util import run_wsgi_app
from pypsum.application import pypsum
from pypsum.settings import AppSpot
pypsum.config.from_object(AppSpot)
run_wsgi_app(pypsum)
|
23,999 | ee580dbd935f9cc8d8d41c7b1f075a79f4dedff6 | from PyInstaller.__main__ import run
if __name__ == '__main__':
opts = ['main.py', # 主程序文件
'-p get_csv_01.py',
'-p url.py',
'-n hirain_data_visualization_0.0.4', # 可执行文件名称
'-F', # 打包单文件
# '-w', #是否以控制台黑窗口运行
r'--icon=D:/shan.tian/Downloads/20210329/20210427/templates/data.ico', # 可执行程序图标
'-y',
'--clean',
'--workpath=build',
'--add-data=templates;templates', # 打包包含的html页面
'--add-data=static;static', # 打包包含的静态资源
'--distpath=build',
'--specpath=./'
]
run(opts)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.