content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
import numpy as np
from math import pi
from os.path import join
import matplotlib.pyplot as plt
from src import MLEnergy, list_tl_files
plt.ion()
source_depth = 'shallow'
#source_depth = 'deep'
save_dict = one_freq(400)
save_dict['e_ri_400'] = save_dict.pop('e_ri')
save_dict['e_ri_0_400'] = save_dict.pop('e_ri_0')
save_dict['loop_len_400'] = save_dict.pop('loop_len')
"""
tmp_dict = one_freq(1e3)
save_dict['e_ri_1000'] = tmp_dict.pop('e_ri')
save_dict['e_ri_0_1000'] = tmp_dict.pop('e_ri_0')
save_dict['loop_len_1000'] = tmp_dict.pop('loop_len')
"""
np.savez("data/processed/bg_ri_eng_" + source_depth + ".npz", **save_dict)
| [
11748,
299,
32152,
355,
45941,
198,
6738,
10688,
1330,
31028,
198,
6738,
28686,
13,
6978,
1330,
4654,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
198,
6738,
12351,
1330,
337,
2538,
5877,
11,
1351,
62,
28781,
62,
... | 2.328413 | 271 |
# Author: Martin McBride
# Created: 2021-07-07
# Copyright (C) 2021, Martin McBride
# License: MIT
# Draw paths with Pillow
from PIL import Image, ImageDraw, ImagePath
import math
import random
count = 201
points = [curve(t) for t in range(0, count, 10)]
# Creating and drawing a path
image = Image.new('RGB', (400, 300), 'lightgrey')
draw = ImageDraw.Draw(image)
path = ImagePath.Path(points)
path.compact()
draw.line(path, fill='blue', width=4)
image.save('imagedraw-path.png')
# transforming a path translate and rotate
image = Image.new('RGB', (400, 300), 'lightgrey')
draw = ImageDraw.Draw(image)
c = math.cos(math.pi/6)
s = math.sin(math.pi/6)
path = ImagePath.Path(points)
path.transform([c, -s, 100, s, c, 100])
path.compact()
draw.line(path, fill='blue', width=4)
image.save('imagedraw-transformpath.png')
# Randomise a path
image = Image.new('RGB', (400, 300), 'lightgrey')
draw = ImageDraw.Draw(image)
path = ImagePath.Path(points)
path.transform([c, -s, 100, s, c, 100])
path.compact()
path.map(sketch)
draw.line(path, fill='blue', width=4)
image.save('imagedraw-sketchpath.png')
| [
2,
6434,
25,
220,
5780,
1982,
47148,
198,
2,
15622,
25,
33448,
12,
2998,
12,
2998,
198,
2,
15069,
357,
34,
8,
33448,
11,
5780,
1982,
47148,
198,
2,
13789,
25,
17168,
198,
198,
2,
15315,
13532,
351,
19770,
322,
198,
198,
6738,
350,... | 2.690821 | 414 |
import pyautogui
from PIL import ImageGrab, ImageOps
from numpy import *
if __name__ == "__main__":
main()
| [
11748,
12972,
2306,
519,
9019,
198,
6738,
350,
4146,
1330,
7412,
48400,
11,
7412,
41472,
198,
6738,
299,
32152,
1330,
1635,
198,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,
197,
12417,
3419,
198
] | 2.868421 | 38 |
from collections import namedtuple
HOST = namedtuple('HOST', 'servers sockets channels DPC')
DRAM = namedtuple('DRAM', 'ranks bgroups banks')
'''
Datacenter of ICX servers: 100K, 2 sockets, 8 channels, 2DPC
Datacenter of ARM servers: 100K, 1 socket, 12 channels, 1DPC
Dual-socket ICX server: 1 node, 2 sockets, 8 channels, 2DPC
ARM server: 1 node, 1 socket, 12 channels, 1DPC
drDDR5: 2 ranks, 8 BGs, 4 BAs
srDDR5: 1 rank, 8 BGs, 4 BAs
'''
icxFLEET = HOST(100 * 1000, 2, 8, 2)
armFLEET = HOST(100 * 1000, 1, 12, 1)
icxSRV = HOST(1, 2, 8, 2)
armSRV = HOST(1, 1, 12, 1)
drDDR5 = DRAM(2, 8, 4)
srDDR5 = DRAM(1, 8, 4)
'''
Configurations used in the DRAMSec paper
Sys A: dual-socket server with 16 channels (8 channels per socket), 2 DPC, 2 ranks, 8 BGs, 4 BAs
Sys B: single-socket server with 2 channels, 1 DPC, 1 rank, 4 BGs, 4 BAs
'''
hostA = icxSRV
dramA = drDDR5
hostB = HOST(1, 1, 2, 1)
dramB = DRAM(1, 4, 4)
def Banks(HOST, DRAM):
'''
Computes the number of banks in a system
:param HOST: host config (namedtuple)
:param DRAM: DRAM config (namedtuple)
:rtype: int
'''
return HOST.servers * HOST.sockets * HOST.channels * HOST.DPC * DRAM.ranks * DRAM.bgroups * DRAM.banks
def PrintConfig(HOST):
'''
Prints the configuration of the host
'''
print('Host:')
print(' Servers: {}'.format(HOST.servers))
print(' Sockets: {}'.format(HOST.sockets))
print(' Channels: {}'.format(HOST.channels))
print(' DPC: {}'.format(HOST.DPC))
def PrintDRAM(DRAM):
'''
Prints the configuration of the DRAM
'''
print('DRAM:')
print(' Ranks: {}'.format(DRAM.ranks))
print(' BGroups: {}'.format(DRAM.bgroups))
print(' Banks: {}'.format(DRAM.banks)) | [
6738,
17268,
220,
220,
220,
1330,
3706,
83,
29291,
198,
198,
39,
10892,
796,
3706,
83,
29291,
10786,
39,
10892,
3256,
705,
2655,
690,
37037,
9619,
360,
5662,
11537,
198,
7707,
2390,
796,
3706,
83,
29291,
10786,
7707,
2390,
3256,
705,
... | 2.294805 | 770 |
# =========================================================================== #
# ____________________ |Importation des lib/packages| ____________________ #
# =========================================================================== #
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import sys
# -- Local modules
from parsing import parser
from conversion import convert_expression
from developpement import developpement
from constants import b_bonus
# -- Specific for the handle of the bonus
if b_bonus:
from polynomial_bonus import MyMonomial
from polynomial_bonus import PolynomialBonus as Polynom
else:
from polynomial import MyMonomial
from polynomial import Polynomial as Polynom
# =========================================================================== #
# ___________________________ |FUNCTIONS| ____________________________ #
# =========================================================================== #
def process(lst_expr:list):
""" Converts the raw tokens into a developped polynomial instance.
Parameter:
----------
* lst_expr [list of Tokens]: raw list of Tokens.
Return:
-------
polynom [Polynomial object]: developped form of the polynomial.
"""
lst_conv = convert_expression(lst_expr)
lst_dev = developpement(lst_conv)
polynom = lst_dev[0]
return polynom
def simple_graph(polynom):
""" Plots the polynomial.
Parameter:
----------
* polynom [Polynomial/PolynomialBonus]: polynomial instance.
Return:
-------
None.
"""
nb_r = len(polynom.coefs) - 1
f = polynom.lambda_polynom()
c_x = polynom.coefs[-2] / (nb_r * polynom.coefs[-1])
x = np.linspace(start= -c_x - 6, stop=-c_x + 6, num=50 * (polynom.degree + 1))
y = list(map(polynom.lambda_p, x))
real_roots = []
for r in polynom.roots:
if isinstance(r, complex):
continue
real_roots.append(r)
real_roots_x = np.array(real_roots)
real_roots_y = np.zeros(real_roots_x.shape[0])
sns.set_theme()
ax = sns.lineplot(x=x, y=y)
sns.scatterplot(x=real_roots_x, y = real_roots_y, ax=ax)
ax.set(xlabel = "$x$", ylabel = "$P(x)$")
ax.axhline(0, c="black", lw=0.5)
ax.axvline(0, c="black", lw=0.5)
plt.show()
# =========================================================================== #
# ______________________________ |MAIN| ______________________________ #
# =========================================================================== #
if __name__ == "__main__":
args = sys.argv
# --- Parsing of arguments
if len(args) == 1:
print("No argument.")
sys.exit()
exprs = parser(args[1:])
if exprs is None:
sys.exit()
for expr in exprs:
# --- Processing
poly = process(expr)
if isinstance(poly, MyMonomial):
poly = Polynom([poly.coefficient])
poly.summarize()
# --- Graphic representation
if b_bonus and poly.degree > 0:
simple_graph(poly) | [
2,
38093,
2559,
855,
1303,
198,
2,
220,
4841,
1427,
220,
930,
3546,
10189,
748,
9195,
14,
43789,
91,
220,
220,
220,
4841,
1427,
1303,
198,
2,
38093,
2559,
855,
1303,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
2603,
29487,
8019,
... | 2.686632 | 1,152 |
from django.contrib.sitemaps import Sitemap
from academicPhylogeny.models import PhD
| [
6738,
42625,
14208,
13,
3642,
822,
13,
82,
9186,
1686,
1330,
311,
9186,
499,
198,
6738,
8233,
2725,
2645,
6644,
88,
13,
27530,
1330,
16394,
198
] | 3.269231 | 26 |
import sys, os
sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/' + '../..'))
from tmlib.datasets import utilizies
from ldamodel import LdaModel
from tmlib.datasets.dataset import DataSet
import logging
FORMAT = "%(levelname)s> In %(module)s.%(funcName)s line %(lineno)d at %(asctime)-s> %(message)s"
logging.basicConfig(format=FORMAT, level=logging.INFO)
logger = logging.getLogger(__name__)
class LearningStatistics(object):
"""docstring for ClassName"""
class LdaLearning(object):
"""docstring for LdaLearning"""
def learn_model(self, save_statistic=False, save_model_every=0, compute_sparsity_every=0,
save_top_words_every=0, num_top_words=10, model_folder=None, save_topic_proportions=None):
"""
Args:
data:
save_model_every:
compute_sparsity_every:
save_statistic:
save_top_words_every:
num_top_words:
model_folder:
Returns:
"""
mini_batch_no = 0
# create model_folder
if model_folder is not None:
if not os.path.exists(model_folder):
os.mkdir(model_folder)
if save_topic_proportions is not None:
self.data.init_database(save_topic_proportions)
logger.info("Start learning Lda model, passes over")
# Iterating
while not self.data.check_end_of_data():
mini_batch = self.data.load_mini_batch()
# This using for streaming method
if self.num_terms != self.data.get_num_terms():
self.num_terms = self.data.get_num_terms()
new_model = LdaModel(self.num_terms, self.num_topics, random_type=1)
new_model.model[:, :self.lda_model.model.shape[1]] = self.lda_model.model
self.lda_model = new_model
# run expectation - maximization algorithms
time_e, time_m, param_theta = self.static_online(mini_batch.word_ids_tks, mini_batch.cts_lens)
theta = self.estimate_topic_proportions(param_theta)
if save_topic_proportions is not None:
self.data.store_topic_proportions(theta)
self.lda_model.presence_score += theta.sum(axis=0)
del theta
self.statistics.record_time(time_e, time_m)
# compute documents sparsity
if compute_sparsity_every > 0 and (self.data.mini_batch_no % compute_sparsity_every) == 0:
sparsity = utilizies.compute_sparsity(param_theta, param_theta.shape[0], param_theta.shape[1], 't')
self.statistics.record_sparsity(sparsity)
# save model : lambda, beta, N_phi
if save_model_every > 0 and (self.data.mini_batch_no % save_model_every) == 0:
model_file = model_folder + '/model_batch' + str(mini_batch_no) + '.txt'
self.lda_model.save(model_file)
# save top words
if save_top_words_every > 0 and (self.data.mini_batch_no % save_top_words_every) == 0:
top_words_file = model_folder + '/top_words_batch_' + str(mini_batch_no) + '.txt'
self.lda_model.print_top_words(num_top_words, vocab_file=self.data.vocab_file, display_result=top_words_file)
if self.data.end_of_file and not self.data.check_end_of_data():
self.lda_model.presence_score *= 0
mini_batch_no += 1
# save learning statistic
if save_statistic:
time_file = model_folder + '/time' + str(self.data.mini_batch_no) + '.csv'
self.statistics.save_time(time_file)
if compute_sparsity_every > 0:
sparsity_file = model_folder + '/sparsity' + str(self.data.mini_batch_no) + '.csv'
self.statistics.save_sparsity(sparsity_file)
# Finish
logger.info('Finish training!!!')
return self.lda_model
| [
11748,
25064,
11,
28686,
198,
17597,
13,
6978,
13,
33295,
7,
418,
13,
6978,
13,
397,
2777,
776,
7,
418,
13,
6978,
13,
15908,
3672,
7,
834,
7753,
834,
8,
1343,
31051,
6,
1343,
705,
40720,
492,
6,
4008,
198,
6738,
256,
4029,
571,
... | 2.120625 | 1,857 |
from os.path import join as pjoin
from os import listdir
import pandas as pd
from pandas import DataFrame
if __name__ == '__main__':
main()
| [
6738,
28686,
13,
6978,
1330,
4654,
355,
279,
22179,
198,
6738,
28686,
1330,
1351,
15908,
198,
11748,
19798,
292,
355,
279,
67,
198,
6738,
19798,
292,
1330,
6060,
19778,
628,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
... | 2.98 | 50 |
"""Markdown rendering with python-markdown2.
https://github.com/trentm/python-markdown2
"""
import markdown2
from django import template
register = template.Library()
@register.filter()
def markdown(md):
"""Render html from markdown string."""
if not md:
return ""
return markdown2.markdown(md, extras={
"tables": None,
"code-friendly": None,
"html-classes": {
'table': 'table table-striped',
},
})
| [
37811,
9704,
2902,
14837,
351,
21015,
12,
4102,
2902,
17,
13,
198,
198,
5450,
1378,
12567,
13,
785,
14,
83,
1156,
76,
14,
29412,
12,
4102,
2902,
17,
198,
37811,
198,
198,
11748,
1317,
2902,
17,
198,
6738,
42625,
14208,
1330,
11055,
... | 2.471204 | 191 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^areas/$', views.AreasView.as_view()),
# 如果路由匹配成功,会生成一个字典数据{'pk':1300}
url(r'^areas/(?P<pk>\d+)/$', views.AreaView.as_view()),
url(r'^addresses/$', views.AddressView.as_view()),
url(r'^addresses/(?P<pk>\d+)/$', views.AddressView.as_view()),
url(r'^addresses/(?P<pk>\d+)/status/$', views.AddressStatus.as_view()),
url(r'^addresses/(?P<pk>\d+)/title/$', views.AddressStatus.as_view()),
]
| [
6738,
42625,
14208,
13,
10414,
13,
6371,
82,
1330,
19016,
198,
6738,
764,
1330,
5009,
198,
198,
6371,
33279,
82,
796,
685,
198,
220,
220,
220,
19016,
7,
81,
6,
61,
533,
292,
32624,
3256,
5009,
13,
8491,
292,
7680,
13,
292,
62,
117... | 1.874046 | 262 |
RESULTS = [({'scale_factor': 1000, 'threads': 1, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (455923.0, 0.0)), ({'scale_factor': 1000, 'threads': 1, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (392189.0, 0.0)), ({'scale_factor': 4000, 'threads': 4, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (1837830.0, 0.0)), ({'scale_factor': 4000, 'threads': 4, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (1386150.0, 0.0)), ({'scale_factor': 8000, 'threads': 8, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (3117300.0, 0.0)), ({'scale_factor': 8000, 'threads': 8, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (2378310.0, 0.0)), ({'scale_factor': 12000, 'threads': 12, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (3941100.0, 0.0)), ({'scale_factor': 12000, 'threads': 12, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (3129000.0, 0.0)), ({'scale_factor': 16000, 'threads': 16, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (4299420.0, 0.0)), ({'scale_factor': 16000, 'threads': 16, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (3477480.0, 0.0)), ({'scale_factor': 20000, 'threads': 20, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (4436690.0, 0.0)), ({'scale_factor': 20000, 'threads': 20, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (3591450.0, 0.0)), ({'scale_factor': 24000, 'threads': 24, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (4492090.0, 0.0)), ({'scale_factor': 24000, 'threads': 24, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (3583380.0, 0.0)), ({'scale_factor': 28000, 'threads': 28, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (4523280.0, 0.0)), ({'scale_factor': 28000, 'threads': 28, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (3737430.0, 0.0)), ({'scale_factor': 32000, 'threads': 32, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (4557360.0, 0.0)), ({'scale_factor': 32000, 'threads': 32, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (4139190.0, 0.0)), ({'scale_factor': 1, 'threads': 1, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (28194.3, 0.0)), ({'scale_factor': 1, 'threads': 1, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (15643.4, 0.0)), ({'scale_factor': 4, 'threads': 4, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (103030.0, 0.0)), ({'scale_factor': 4, 'threads': 4, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (58260.7, 0.866664)), ({'scale_factor': 8, 'threads': 8, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (199311.0, 0.0)), ({'scale_factor': 8, 'threads': 8, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (115993.0, 1.83333)), ({'scale_factor': 12, 'threads': 12, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (288046.0, 0.0)), ({'scale_factor': 12, 'threads': 12, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (161253.0, 2.68333)), ({'scale_factor': 16, 'threads': 16, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (369982.0, 0.0)), ({'scale_factor': 16, 'threads': 16, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (214555.0, 3.24999)), ({'scale_factor': 20, 'threads': 20, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (458774.0, 0.0)), ({'scale_factor': 20, 'threads': 20, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (260806.0, 3.78332)), ({'scale_factor': 24, 'threads': 24, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (544124.0, 0.0)), ({'scale_factor': 24, 'threads': 24, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (296078.0, 4.59998)), ({'scale_factor': 28, 'threads': 28, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (616619.0, 0.0)), ({'scale_factor': 28, 'threads': 28, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (320886.0, 5.46665)), ({'scale_factor': 32, 'threads': 32, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (646355.0, 0.0)), ({'scale_factor': 32, 'threads': 32, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (295248.0, 4.09999))]
| [
46274,
796,
685,
15090,
6,
9888,
62,
31412,
10354,
8576,
11,
705,
16663,
82,
10354,
352,
11,
705,
17602,
77,
62,
33152,
10354,
352,
11,
705,
9945,
10354,
705,
74,
85,
9945,
3256,
705,
26968,
10354,
705,
88,
6359,
65,
6,
5512,
357,
... | 2.112013 | 1,848 |
# -*- coding: utf-8 -*-
"PLOTTING FILE"
import matplotlib.pyplot as plt
import Graphics as artist
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
1,
6489,
29089,
2751,
45811,
1,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
220,
198,
11748,
19840,
355,
6802,
628,
198
] | 2.589744 | 39 |
"""
# SOURCE
https://gum.co/prselectionui
https://github.com/parzival-roethlein/prmaya
# DESCRIPTION
A dynamic User Interface (UI) to manage selections and poses in Autodesk Maya.
# USAGE
Put the file prSelectionUi.py in your Maya scripts folder and execute:
- Python:
import prSelectionUi;prSelectionUi.UI()
- MEL (for marking menu, ...):
python("import prSelectionUi;prSelectionUi.UI()");
# LINKS
- Demo video
https://vimeo.com/pazrot/prselectionui
- Background information on my blog
http://pazrot3d.blogspot.com/2012/03/prselectionui.html
- Donate: (This was written in my spare time. If you found it useful in Maya or for coding, consider supporting the author)
https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=7X4EJ8Z7NUSQW
# FEATURES
- User can add/remove/edit any components at any time (sets/selections/poses/members)
- All information is stored in the scene as objectSets (works when referenced)
-- These sets can be exported/imported between scenes like any Maya object
- Context pop-up menus (RMC=Right-mouse-click, MMC=Middle-mouse-click):
-- RMC on set-menu (top row): Create/Edit Sets and adjust UI (dockable, hide bar)
-- RMC on empty set-area: Create Selection/Pose
--- RMC on Selection/Pose: Create/Edit Selection/Pose/Members
---- MMC on Selection/Pose for extra commands (keyframe, reset, ..)
# VERSIONS
2016-06-01 / 0.9.9: nonforced dockable confirmdialog
2012-12-25 / 0.9.8: Pose % items added to Pose middle mouse click menu // Autofocus on panel (tool shortcuts keep working)
2012-12-15 / 0.9.7: added option "UI: toggle tablayout" to switch display between tablayout and drop down menu
2012-12-12 / 0.9.6: bugfix (maya crashed for some users) // does not work with older sets
2012-12-11 / 0.9.5: loads faster, no crashes // changed mouse-click menus // new members UI // tabs are now called sets and in optionMenu
2012-04-20 / 0.9.3: pose support added // tweaks (button ordering,..) // can't read sets from old version
2012-04-15 / 0.9.2: tweaks (no popup when changing member status, show/hide shape visible in button,..)
2012-04-13 / 0.9.1: removed limitations // new features // can't read sets from old version
2012-03-01 / 0.9.0: first version
# TODO bugs
- when adding nucleus to selection: api error...
# TODO
- create pose option: [radioBtnGrp] all channelBox or all keyable
- create pose option: [checkbox] if channelBox selected -> only those
- pose option: replace pose
- apply pose options: only apply to channelBox selected attr
- selection option: "replace with selected" next to "Add selected"
- improve default name of tab/button (from selection)
- button to print command that should be run to create UI as it currently is and/or config file for that?
- UI options as arguments in .UI() call
- option to have multiple instances of window open (how do changes propagate? always force refresh on inactive ui?)
- set naming convention...
- duplicate selection / pose button
- search and replace for members (give user string output, input field, they can edit in editor of their choice) this also helps when flooding the same vertices on differnt meshes
# TODO nice to have
- faster loading: use API to search/read sets?
- store pose nodes in dictionary with attr and values for faster 'pose selected nodes' cmd
- add color option in create new button window
- shift/ctrl+mouse click on selection center button with maya viewport functionality
- marking menu as alternative to the right click menus etc
- export/import set UI button (right click menu?)
- API commands to edit data // put attributes (color, ...) in channelBox for easy batch editing from user
- save set to shelf
"""
import pymel.core as pm
import maya.mel as mm
class UI(pm.uitypes.Window):
"""
window of prSelectionUi.
Maya-UI-Hierarchy:
(UI:) window > formLayout > horizontalLayout > optionMenu, tabLayout
(Set:) verticalLayout >
(Element:) horizontalLayout > buttons (+,select,- OR pose)
"""
# constants
_TITLE = 'prSelectionUi_0_99' # dots or space will break pm.window(cls.__TITLE)
_FORMLAYOUT = 'prSelForm'
_TABLAYOUT = 'prSelTab'
_DOCKCONTROL = 'prSelectionUiDoc'
# variables
sets = None
setsOptionMenu = None
setsTabLayout = None
cbConfirmDialogs = None
dockControl = None
def __new__(cls):
""" delete possible old window and create new instance """
if pm.window(cls._TITLE, exists=True):
pm.deleteUI(cls._TITLE)
if pm.dockControl(cls._DOCKCONTROL, ex=1):
pm.deleteUI(cls._DOCKCONTROL, control=1)
self = pm.window(cls._TITLE, title=cls._TITLE)
return pm.uitypes.Window.__new__(cls, self)
def __init__(self):
"""
create UI elements (layouts, buttons)
show window
try to load from scene, else create default
"""
# initialize variables
self.sets = []
# formLayout base
form = pm.formLayout(self._FORMLAYOUT)
with form:
# optionMenu
self.setsOptionMenu = pm.optionMenu(cc=pm.Callback(self.setsMenu_cmd))
pm.popupMenu(parent=form, button=3)
pm.menuItem('Create Set', c=pm.Callback(self.set_new))
pm.menuItem('Delete Set', c=pm.Callback(self.set_deleteActive))
pm.menuItem(divider=1)
pm.menuItem('Edit Set Name', c=pm.Callback(self.set_editName))
pm.menuItem('Edit Set Index', c=pm.Callback(self.set_editIndex))
pm.menuItem(divider=1)
pm.menuItem('Reload Sets', c=pm.Callback(self.set_load))
pm.menuItem(divider=1)
pm.menuItem(l='UI: Toggle title bar', c=pm.Callback(self.ui_toggleTitleBar))
pm.menuItem(l='UI: Make dockable', c=pm.Callback(self.ui_makeDockable))
pm.menuItem(l='UI: Toggle tablayout', c=pm.Callback(self.ui_toggleTablayout))
self.cbConfirmDialogs = pm.menuItem(l='UI: Confirm dialogs', cb=1)
self.cbFocusPanel = pm.menuItem(l='UI: Autofocus panel', cb=1)
# tabLayout
self.setsTabLayout = pm.tabLayout(self._TABLAYOUT, tabsVisible=0)
# form.redistribute(0,1)
# no redistribute, because optionMenu width will resize to minimum when editing members and only refresh when UI gets modified again
form.attachForm(self.setsOptionMenu, 'top', 0) # unnecessary?
form.attachForm(self.setsOptionMenu, 'left', 0)
form.attachControl(self.setsTabLayout, 'top', 0, self.setsOptionMenu)
form.attachForm(self.setsTabLayout, 'left', 0)
form.attachForm(self.setsTabLayout, 'right', 0)
form.attachForm(self.setsTabLayout, 'bottom', 0)
# miConfirm.setCommand(..)# maya 2012+ only (pymel version ...?)
# load tabs from scene
self.set_load(1)
# show window
self.show()
def createWarning(self, msg):
""" create a warning for the user """
import maya.mel as mm
mm.eval('warning "%s"' % msg)
def setsMenu_cmd(self):
""" command to execute when set optionMenu items get selected """
count = self.setsOptionMenu.getNumberOfItems()
selected = self.setsOptionMenu.getSelect()
self.setsTabLayout.setSelectTabIndex(selected)
self.setsMenu_updateSelected()
def setsMenu_updateSelected(self):
""" select proper menuItem """
selectIndex = self.setsTabLayout.getSelectTabIndex()
if selectIndex == 0:
return
self.setsOptionMenu.setSelect(selectIndex)
def tryConfirmDialog(self, msg, forced=False):
""" create confirm dialog if option is enabled """
# get confirmDialog checkBox value
confirmDialogsValue = pm.menuItem(self.cbConfirmDialogs, q=1, checkBox=1)
print('val: %d' % confirmDialogsValue)
print(forced)
# create pop-up if required
if confirmDialogsValue or forced:
result = pm.confirmDialog(title='Confirm ', message=msg, button=['Yes', 'No'],
defaultButton='Yes', cancelButton='No', dismissString='No')
if result == 'Yes':
return True
else:
return False
else:
return True
def tryFocusPanel(self):
""" set focus on panel under cursor, if the option is enabled. So shortcuts for move-tool,... still work """
focusPanelValue = pm.menuItem(self.cbFocusPanel, q=1, checkBox=1)
if focusPanelValue:
# The next line makes little sense, but is required for shortcuts to work in the panel below
pm.setFocus(self)
# set focus on panel under prSelectionUi
pm.setFocus(pm.getPanel(up=1))
def ui_toggleTitleBar(self):
""" toggle the UI title bar on/off, to reduce size of UI """
# error catching
if self.dockControl:
mm.eval('warning "Can\'t remove title bar of a window that is dockable."')
return
# save height and width, else it changes to default
height = self.getHeight()
width = self.getWidth()
# toggle title bar
if self.getTitleBar():
self.setTitleBar(0)
else:
self.setTitleBar(1)
# set height/width
self.setHeight(height)
self.setWidth(width)
def ui_toggleTablayout(self):
""" toggle tablayout visibility / optionMenu visibility """
if self.setsTabLayout.getTabsVisible():
self.setsTabLayout.setTabsVisible(0)
# select proper optionMenu item
self.setsOptionMenu.setManage(1)
self.setsOptionMenu.setSelect(self.setsTabLayout.getSelectTabIndex())
else:
self.setsTabLayout.setTabsVisible(1)
self.setsOptionMenu.setManage(0)
def ui_makeDockable(self):
""" make the window dockable """
# skip if already dockable
if self.dockControl:
mm.eval('warning "The window is dockable already."')
return
# warning for user
if not self.tryConfirmDialog('Make UI dockable?\n'):
return
# create dockControl
self.dockControl = pm.dockControl(self._DOCKCONTROL, l=self, content=self, area='left',
allowedArea=['left', 'right'])
# #########################
# SETS
# #########################
def set_load(self, forced=0):
""" load tabs (sets) from scene """
# warning
if not forced:
if not self.tryConfirmDialog('Load tabs from sets/scene?'):
return
# delete old UI elements
for x in range(len(self.sets)):
self.sets[0].delete(1, 1)
if self.sets != []:
raise NameError('self.sets should be empty error')
# create UI
tabSets = Set.findSets()
if tabSets:
# loading feedback for user
print('\n###### start loading sets ######')
# load
for x, eachSet in enumerate(tabSets):
self.set_new(eachSet)
# loading feedback for user
print('###### finished loading sets ######\n')
def set_sort(self):
""" order tabs by index and name """
# create index dictionary with tab-dictionary
dic = {}
for each in self.sets:
if dic.has_key(each.tabIndex):
dic[each.tabIndex][each.referencePrefix + each.tabName + each.shortName()] = each
else:
dic[each.tabIndex] = {each.referencePrefix + each.tabName + each.shortName(): each}
ordered = []
for eachIndexKey in sorted(dic.iterkeys()):
for eachTabKey in sorted(dic[eachIndexKey].iterkeys()):
ordered.append(dic[eachIndexKey][eachTabKey])
self.sets = ordered
def set_getActive(self, noReferenceMessage=None):
""" return the currently active tab, optional error if active tab is reference """
# error check
if not self.sets:
self.setsMenu_updateSelected()
raise NameError(
'\n\n\n ------- \nYou have to create a set first (right click on drop down menu at top)')
# return selected tab
selectedLayout = self.setsTabLayout.getSelectTab()
for each in self.sets:
if each.shortName() == selectedLayout:
# found active tab
if noReferenceMessage and each.referencePrefix:
import maya.mel as mm
raise NameError(noReferenceMessage)
return each
def set_new(self, fromSet=None):
""" create new tab, either with user input, or from given set """
# get tab name
tabName = Set.getName(fromSet)
if not tabName:
return
with self.setsTabLayout:
# create tab: instance / UI element
newSet = Set(tabName, self, fromSet)
def set_deleteActive(self):
""" delete active tab """
self.set_getActive('Can\'t delete referenced tab!').delete()
def set_editName(self):
""" rename the currently active tab """
self.set_getActive('Can\'t rename referenced tab!').rename()
def set_editIndex(self):
""" change index/position of active tab """
self.set_getActive().changeIndex()
def set_newElement(self, elementType):
""" create element of given type """
self.set_getActive('Can\'t add %s to referenced tab!' % elementType).element_create(elementType)
def set_toShelf(self):
""" save set to, so pressing shelf will create set """
print('save to shelf')
class Set(pm.uitypes.FormLayout): # scroll layout?
""" class for each tab in the UI window """
# constants
_SET_PREFIX = ''
_SET_SUFFIX = '_tab_set'
_ATTR_TABNAME = 'prSel_TabName'
_ATTR_SELECTION = 'prSel_TabSelection'
_ATTR_INDEX = 'prSel_TabIndex'
# variables
asMenuItem = None
parentUi = None
referencePrefix = None
elements = None
# maya nodes / attributes
set = None
tabName = None
tabIndex = None
@staticmethod
def getName(fromSet=None):
"""
get name from given set or user input
extra function because it should be known before instance is created
"""
if fromSet:
# get tab name from set
tabName = fromSet.attr(Set._ATTR_TABNAME).get()
Set.checkName(tabName)
else:
# get tab name from user
result = pm.promptDialog(title='New Set', message='Name for new Set:', button=['OK', 'Cancel'],
defaultButton='OK', cancelButton='Cancel', dismissString='Cancel')
if result != 'OK':
return None
tabName = str(pm.promptDialog(q=1, text=1))
Set.checkName(tabName)
return tabName
@staticmethod
def checkName(name):
""" check if given name is valid for a tab """
if name == '':
raise NameError('Invalid name: "" (nothing)')
@staticmethod
def findSets():
""" return all tab sets from scene """
sceneSets = []
# find sets
for eachSet in (pm.ls(type='objectSet')):
if eachSet.hasAttr(Set._ATTR_TABNAME):
sceneSets.append(eachSet)
# return
return sceneSets
def __new__(cls, name, parentUi, fromSet=None):
""" get name from user or read from set. then create instance """
self = pm.formLayout(nd=100)
return pm.uitypes.FormLayout.__new__(cls, self)
def __init__(self, name, parentUi, fromSet=None):
""" initialize variables """
if fromSet:
print('- set: %s // %s' % (name, fromSet))
# parent init for _reverse,... variables
super(Set, self).__init__()
# variables
self.tabName = name
self.parentUi = parentUi
# right click menu to create the first selection/pose
pm.popupMenu(button=3)
pm.menuItem(label='New selection', c=pm.Callback(self.parentUi.set_newElement, Selection._TYPE))
pm.menuItem(label='New pose', c=pm.Callback(self.parentUi.set_newElement, Pose._TYPE))
# get/create set
if fromSet:
self.set = fromSet
else:
self.createSet()
# read values from set
self.tabIndex = self.set.attr(Set._ATTR_INDEX).get()
if self.set.referenceFile():
self.referencePrefix = self.set.referenceFile().fullNamespace + ':'
else:
self.referencePrefix = ''
# elements
self.elements = []
for each in self.set.dnSetMembers.inputs(): # pm.sets( self.set, q=1 ):# for propper order
if each.hasAttr(Selection._ATTR_TYPE):
self.element_create(Selection._TYPE, each)
elif each.hasAttr(Pose._ATTR_TYPE):
self.element_create(Pose._TYPE, each)
# store in parentUi
self.parentUi.sets.append(self)
# set name in UI
self.parentUi.setsTabLayout.setTabLabel([self, self.getUiName()])
# select new tab
self.parentUi.setsTabLayout.setSelectTab(self.shortName())
# position tab
self.setPosition()
def delete(self, forced=0, uiOnly=0):
""" delete tab UI element and set """
# skip reference tabs
if self.referencePrefix and not uiOnly:
return
# pop-up
if not forced:
if not self.parentUi.tryConfirmDialog('Delete tab: "%s" ?' % self.getUiName()):
return
# parent delete
super(Set, self).delete()
# delete in parent
self.parentUi.sets.remove(self)
# delete set
if not uiOnly:
pm.delete(self.set)
# delete menuItem
pm.deleteUI(self.asMenuItem)
self.parentUi.setsMenu_updateSelected()
def setPosition(self):
""" calculate and set position of given set (by index and alphabet) """
# order self.sets variable
self.parentUi.set_sort()
# get index
tabPosition = self.parentUi.sets.index(self)
# adjust UI
self.parentUi.setsTabLayout.moveTab(
[self.parentUi.setsTabLayout.getSelectTabIndex(), tabPosition + 1])
# select with new position
self.parentUi.setsTabLayout.setSelectTab(self.shortName())
# delete possible old menuItem
if self.asMenuItem:
pm.deleteUI(self.asMenuItem)
# create menuItem
# with self.parentUi.setsOptionMenu:# parent flag because of maya 2012
insertAfterIndex = self.parentUi.setsTabLayout.getSelectTabIndex() - 1
allMenuItems = pm.optionMenu(self.parentUi.setsOptionMenu, q=1, itemListLong=1)
if insertAfterIndex == 0:
insertAfter = ''
else:
insertAfter = allMenuItems[insertAfterIndex - 1] # .getItemListLong() # maya(pymel) 2012+
# shorten long reference-names/namespaces
uiName = self.getUiName()
if uiName.find(':') != -1:
shortName = uiName[uiName.rfind(':') + 1:]
if len(uiName) - len(shortName) > 20:
uiIndex = uiName[:uiName.find(' ') + 1]
uiName = uiName[uiName.find(' ') + 1:]
splitList = uiName.split(':')[:-1]
splitList.reverse()
for eachPart in splitList:
if len(eachPart) > 10:
eachPart = eachPart[:10] + '..'
shortName = eachPart + ':' + shortName
uiName = uiIndex + shortName
# menuItem
# if( len( self.parentUi.setsOptionMenu.getItemListLong() ) == 0 ):# only maya(pymel) 2012+
if not insertAfter:
if allMenuItems:
self.asMenuItem = pm.menuItem(uiName, label=uiName, p=self.parentUi.setsOptionMenu, ia="")
else:
self.asMenuItem = pm.menuItem(uiName, label=uiName, p=self.parentUi.setsOptionMenu)
else:
'''
# changed from following line, because of maya 2011 bug/crash when using -ia flag (flag works for simple case thou?!)
#self.asMenuItem = pm.menuItem( uiName, label=uiName, p=self.parentUi.setsOptionMenu, ia=insertAfter )
'''
# rebuild menuItems
# create new label list
allLabels = []
for eachItem in allMenuItems:
allLabels.append(pm.menuItem(eachItem, q=1, label=1))
allLabels.insert(insertAfterIndex, uiName)
# delete old menuItems
for eachItem in allMenuItems:
pm.deleteUI(eachItem)
# create menuItems
for x, each in enumerate(allLabels):
eachItem = pm.menuItem(each, label=each, p=self.parentUi.setsOptionMenu)
if x == insertAfterIndex:
self.asMenuItem = eachItem
# update selection
self.parentUi.setsMenu_updateSelected()
def changeIndex(self):
""" change index """
# create message to show user list of all tabs with indices
strMessage = 'Change index of tab "%s"\n' % (self.getUiName())
for eachSet in self.parentUi.sets:
strMessage += '\n' + eachSet.getUiName()
if eachSet == self:
strMessage += ' <----- (active)'
# get input from user
result = pm.promptDialog(title='Move tab', message=strMessage, tx=self.tabIndex,
button=['OK', 'Cancel'], defaultButton='OK', cancelButton='Cancel',
dismissString='Cancel')
if result != 'OK':
return
# get value
newIndex = int(pm.promptDialog(q=1, text=1))
# change index
self.tabIndex = newIndex
self.set.attr(self._ATTR_INDEX).set(newIndex)
# set UI position
self.setPosition()
# set name
self.parentUi.setsTabLayout.setTabLabel([self, self.getUiName()])
def createSet(self):
""" create set of this tab """
# create set
newSet = pm.sets(empty=1, name=self._SET_PREFIX + self.tabName + self._SET_SUFFIX)
# add attributes
newSet.setAttr(self._ATTR_TABNAME, self.tabName, force=1)
newSet.setAttr(self._ATTR_INDEX, 1, force=1)
# store set
self.set = newSet
def getUiName(self):
""" return name for UI display (tab name) """
return str(self.tabIndex) + ' ' + self.referencePrefix + self.tabName
def rename(self):
""" rename this tab (overwrote default rename()) """
# get name from user
result = pm.promptDialog(title='Rename tab',
message='Rename tab: "%s" ' % self.getUiName(),
tx=self.tabName,
button=['OK', 'Cancel'], defaultButton='OK', cancelButton='Cancel',
dismissString='Cancel')
if result == 'OK':
text = pm.promptDialog(q=1, text=1)
self.checkName(text)
self.tabName = text
self.set.attr(self._ATTR_TABNAME).set(text)
self.set.rename(self._SET_PREFIX + text + self._SET_SUFFIX)
# adjust UI
# rename UI
self.parentUi.setsTabLayout.setTabLabel([self, self.getUiName()])
# set UI position
self.setPosition()
# #########################
# ELEMENT
# #########################
def element_orderUi(self):
""" set position (order) of elements in tab """
if len(self.elements) == 0:
return
# order elements from set children order
oldSetOrder = []
for each in self.elements:
oldSetOrder.append(each.set)
newSetOrder = self.set.dnSetMembers.inputs() # pm.sets( self.set, q=1 ) not correct order# self.set.members() / asSelectionSet # MPlug error..
newElementList = []
for x, each in enumerate(newSetOrder):
newElementList.append(self.elements[oldSetOrder.index(each)])
self.elements = newElementList
#
self.element_uiDistribute()
def element_uiDistribute(self):
""" attach each elements top/bottom """
if not self.elements:
return
positionUnit = 100.0 / len(self.elements)
for x, each in enumerate(self.elements):
pm.formLayout(self, e=1, ap=[(each, 'top', 0, x * positionUnit),
(each, 'bottom', 0, (x + 1) * positionUnit)])
def element_orderUiDeferred(self):
""" order tab-elements deferred to avoid crash when called from element popUpMenu """
import maya.utils as mu
mu.executeDeferred(self.element_orderUi)
def element_create(self, elementType, fromSet=0):
""" create element of given type "pose" or "selection" """
# get name
elementName = SetElement.getName(elementType, fromSet)
if not elementName:
return
# create element
with self:
if elementType == Selection._TYPE:
Selection(elementName, self, fromSet)
elif elementType == Pose._TYPE:
Pose(elementName, self, fromSet)
class SetElement(pm.uitypes.FormLayout):
""" base class for tab elements: selections and poses """
# constants
_DEFAULTCOLOR = [0.4, 0.4, 0.4]
_ATTR_NAME = 'prSel_name'
_ATTR_COLOR = 'prSel_color'
# has to be overwritten
_SET_PREFIX = None # 'set_sel_' / 'set_pose_'
_SET_SUFFIX = None
_ATTR_TYPE = None # 'prSel_selection' / 'prSel_pose'
_TYPE = None # 'selection' / 'pose'
# variables
parentUi = None
referencePrefix = None
set = None
name = None
color = None
rightClickMenu = None
middleClickMenu = None
horizontal = None
# has to be set in child class
button = None
@staticmethod
def checkName(name):
""" check if the given name string is a valid """
if name == '':
raise NameError('Invalid name: "" (nothing)')
@staticmethod
def getName(elementType, fromSet=None):
""" get name from user or given set """
# from set
if fromSet:
return fromSet.attr(SetElement._ATTR_NAME).get()
# get name from user
result = pm.promptDialog(title=('New %s' % elementType), message=('Name for new %s:' % elementType),
button=['OK', 'Cancel'], defaultButton='OK', cancelButton='Cancel',
dismissString='Cancel')
if result == 'OK':
text = pm.promptDialog(q=1, text=1)
SetElement.checkName(text)
return text
else:
return None
def __new__(cls, name, parentUi, fromSet=None):
""" create new instance """
self = pm.formLayout()
return pm.uitypes.FormLayout.__new__(cls, self)
def __init__(self, name, parentUi, fromSet=None):
""" initialize variables """
if fromSet:
print('-- element: %s (%s)' % (name, self._TYPE))
# parent init for _reverse,... variables
super(SetElement, self).__init__()
# variables
self.name = name
self.parentUi = parentUi
# assign/create set
if fromSet:
self.set = fromSet
else:
# create set
self.set = pm.sets(empty=1, name=self._SET_PREFIX + self.name + self._SET_SUFFIX)
# attach to tab set
pm.sets(self.parentUi.set, e=1, add=self.set)
# add attributes with default values
self.set.addAttr(self._ATTR_NAME,
dt='string') # self.set.setAttr( self._ATTR_NAME, self.name, force=1 ) # problem with some signs (umlaute),..
self.set.attr(self._ATTR_NAME).set(self.name)
self.set.addAttr(self._ATTR_COLOR, dt='float3')
self.set.attr(self._ATTR_COLOR).set(self._DEFAULTCOLOR)
self.set.addAttr(self._ATTR_TYPE, at='bool', dv=1)
# read set variables
self.color = self.set.attr(self._ATTR_COLOR).get()
if self.set.referenceFile():
self.referencePrefix = self.set.referenceFile().fullNamespace + ':'
else:
self.referencePrefix = ''
# set color
self.setColor()
# attach left and right side to parent layout
pm.formLayout(self.parentUi, e=1, attachForm=[(self, 'left', 0), (self, 'right', 0)])
# right click menu
if self.referencePrefix:
dynamicSel = 0
else:
dynamicSel = 1
with self:
self.rightClickMenu = pm.popupMenu(button=3)
pm.menuItem(l='New %s' % Selection._TYPE,
c=pm.Callback(self.parentUi.element_create, Selection._TYPE), en=dynamicSel)
pm.menuItem(l='New %s' % Pose._TYPE, c=pm.Callback(self.parentUi.element_create, Pose._TYPE),
en=dynamicSel)
pm.menuItem(l='Delete %s' % self._TYPE, c=pm.Callback(self.delete), en=dynamicSel)
pm.menuItem(divider=1)
pm.menuItem(l='Edit Name', c=pm.Callback(self.cmd_rename), en=dynamicSel)
pm.menuItem(l='Edit Position', c=pm.Callback(self.cmd_changePosition))
pm.menuItem(l='Edit Color', c=pm.Callback(self.cmd_changeColor))
# middle click menu
with self:
self.middleClickMenu = pm.popupMenu(button=2)
# store in tab
self.parentUi.elements.append(self)
# order UI
self.parentUi.element_uiDistribute()
def delete(self):
""" delete UI elements, sets, instance """
# warning
if not self.parentUi.parentUi.tryConfirmDialog(
'Delete %s: "%s" ?' % (self._TYPE, self.getUiName())):
return
# remove from tab selection list
self.parentUi.elements.remove(self)
# delete set (remove member first, else parent set will get deleted if last member gets deleted)
pm.sets(self.parentUi.set, remove=self.set)
pm.delete(self.set)
# reconnect sets, to keep steady input attribute counting
allSets = self.parentUi.set.dnSetMembers.inputs()
self.parentUi.set.dnSetMembers.disconnect()
for each in allSets:
pm.sets(self.parentUi.set, add=each)
# delete from UI
import maya.utils as mu
mu.executeDeferred(self.deleteUi)
def deleteUi(self):
""" delete from UI """
pm.deleteUI(self)
# reorder selections
self.parentUi.element_uiDistribute()
def setColor(self):
""" set color of selection and store in attr """
# attr
self.set.attr(self._ATTR_COLOR).set(self.color)
# UI
pm.formLayout(self, e=1, bgc=self.color)
def getUiName(self):
""" return name for UI display (tab name) """
return self.name
def cmd_rename(self):
""" rename variable, button label and set """
# get name from user
result = pm.promptDialog(title=('Rename %s' % self._TYPE),
message='Rename %s: "%s"' % (self._TYPE, self.name), text=self.name,
button=['OK', 'Cancel'], defaultButton='OK', cancelButton='Cancel',
dismissString='Cancel')
if result == 'OK':
text = pm.promptDialog(q=1, text=1)
self.checkName(text)
# store
self.name = text
# update button label
self.button.setLabel(self.getUiName())
# rename set
self.set.attr(self._ATTR_NAME).set(text)
self.set.rename(self._SET_PREFIX + text + self._SET_SUFFIX)
def cmd_changePosition(self):
""" change element position """
# create pop-up message with all selections for user
strMessage = 'Enter new position index for %s "%s"\n' % (self._TYPE, self.getUiName())
for x, eachSel in enumerate(self.parentUi.elements):
strMessage += '\n%s: %s' % (x, eachSel.getUiName())
if eachSel == self:
strMessage += ' <----- (active)'
# get input from user
result = pm.promptDialog(title='Move %s' % self._TYPE, message=strMessage,
tx=self.parentUi.elements.index(self),
button=['OK', 'Cancel'], defaultButton='OK', cancelButton='Cancel',
dismissString='Cancel')
if result == 'OK':
# get value
newIndex = pm.promptDialog(q=1, text=1)
if not newIndex.isdigit():
raise NameError('Given value is not a digit: ', newIndex)
newIndex = int(newIndex)
# create setList with new order
# get sets
allSets = self.parentUi.set.dnSetMembers.inputs() # pm.sets( self.parentUi.set, q=1 ) # self.parentUi.set.members() # .members() error in maya 2011
# remove from hierarchy
self.parentUi.set.removeMembers(allSets)
# change order
allSets.insert(newIndex, allSets.pop(allSets.index(self.set)))
# re-parent
# self.parentUi.set.resetTo( allSets ) # ignores order // messes up order (not same in input array attr as hierarchy)
# self.parentUi.set.addMembers( allSets ) # makes correct connections in array attr, but in maya hierarchy order is wrong (script should work anyways)
# pm.sets( self.parentUi.set, add=allSets ) # ignores order - not sure, definitely something was wrong
for each in allSets:
# self.parentUi.set.add( each )# creates history in scriptEditor for each
pm.sets(self.parentUi.set, add=each)
# update UI positions
self.parentUi.element_orderUiDeferred()
def cmd_changeColor(self):
""" edit color of selection """
# color UI for user
pm.colorEditor(rgb=self.color)
if pm.colorEditor(q=1, result=1):
self.color = pm.colorEditor(q=1, rgb=1)
self.setColor()
def object_getSceneName(self, objectName):
""" return scene name of given object. Fixed for referenced files """
return self.parentUi.referencePrefix + objectName.replace('|', '|' + self.parentUi.referencePrefix)
class Pose(SetElement):
""" class to apply and store a pose within a tab """
# constants to overwrite
_SET_PREFIX = ''
_SET_SUFFIX = '_pose_set'
_ATTR_TYPE = 'prSel_pose'
_TYPE = 'pose'
# constants custom
_ATTR_POSEVALUES = 'prSel_poseValues'
# variables custom
poseValues = None
def __init__(self, name, parentUi, fromSet=None):
""" create/read pose specific attributes, create button and menuItems """
super(Pose, self).__init__(name, parentUi, fromSet)
# modify new set
if not fromSet:
# create pose attribute
self.set.addAttr(self._ATTR_POSEVALUES, dt='stringArray')
# read pose attributes and values
poseAttrs = []
for each in pm.ls(sl=1):
for eachAttr in each.listAttr(k=1, unlocked=1):
poseAttrs.append(str(eachAttr) + ' ' + str(eachAttr.get()))
for eachAttr in each.listAttr(cb=1, unlocked=1):
poseAttrs.append(str(eachAttr) + ' ' + str(eachAttr.get()))
# store pose
self.set.attr(self._ATTR_POSEVALUES).set(poseAttrs)
# initialize pose variable
self.poseValues = {}
for each in self.set.attr(self._ATTR_POSEVALUES).get():
eachNode = each[:each.find('.')]
eachNode = self.object_getSceneName(eachNode)
if not self.poseValues.has_key(eachNode):
self.poseValues[eachNode] = []
self.poseValues[eachNode].append(each)
# UI
horizontal = pm.horizontalLayout(self, spacing=0)
with horizontal:
self.button = pm.button(l=self.getUiName(), c=pm.Callback(self.cmd_setPose))
horizontal.redistribute()
# menuItems
with self.rightClickMenu:
pm.menuItem(divider=1)
pm.menuItem(l='Print pose', c=pm.Callback(self.cmd_printPose))
with self.middleClickMenu:
pm.menuItem(l='Select nodes', c=pm.Callback(self.cmd_selectNodes))
pm.menuItem(divider=1)
pm.menuItem(l='Pose selected', c=pm.Callback(self.cmd_setPoseOnSelected))
pm.menuItem(l='Pose selected - XY%', c=pm.Callback(self.ui_poseCustomPercent, True))
pm.menuItem(divider=1)
pm.menuItem(l='Pose - 20%', c=pm.Callback(self.cmd_setPose, 20))
pm.menuItem(l='Pose - 40%', c=pm.Callback(self.cmd_setPose, 40))
pm.menuItem(l='Pose - 60%', c=pm.Callback(self.cmd_setPose, 60))
pm.menuItem(l='Pose - 80%', c=pm.Callback(self.cmd_setPose, 80))
pm.menuItem(l='Pose - XY%', c=pm.Callback(self.ui_poseCustomPercent))
def ui_poseCustomPercent(self, poseSelected=False):
""" UI to let user set pose from 1-100% """
# create UI
windowTitle = self.name + ' - Pose'
if poseSelected:
windowTitle += ' selected'
win = pm.window(title=windowTitle, h=40, w=250)
form = pm.horizontalLayout(ratios=[4, 1], spacing=5)
with form:
igPosePercent = pm.intSliderGrp(l='Pose __%', cw3=[50, 30, 10], ad2=1, field=1, min=1, max=100,
value=50, step=1)
pm.button(l='Apply', c=pm.Callback(self.ui_poseCustomPercentCmd, igPosePercent, poseSelected))
form.redistribute(5, 1)
pm.showWindow(win)
def ui_poseCustomPercentCmd(self, intSliderGrp_percent, poseSelected):
""" get int slider group value and call pose function """
percent = pm.intSliderGrp(intSliderGrp_percent, q=1, value=1)
if poseSelected:
self.cmd_setPoseOnSelected(percent)
else:
self.cmd_setPose(percent)
def setGivenPose(self, poseAttr, percent=100):
""" set pose of given string """
eachAttr, eachValue = poseAttr.split(' ')
eachAttr = self.object_getSceneName(eachAttr)
# check for boolean
if eachValue == 'True':
eachValue = 1
elif eachValue == 'False':
eachValue = 0
else:
eachValue = float(eachValue)
# percent
if percent < 100:
oldValue = pm.getAttr(eachAttr)
# check for boolean
if type(oldValue) is bool:
if oldValue:
oldValue = 1
else:
oldValue = 0
# calculate new value # float casting because of Python integer rounding (80/100 = 0) for bool attrs
eachValue = (eachValue * percent + oldValue * (100 - percent)) / 100.0
# set attr
try:
pm.setAttr(eachAttr, eachValue)
except:
self.parentUi.parentUi.createWarning('Invalid objects in pose: ')
print(eachAttr)
def cmd_setPose(self, percent=100):
""" apply stored pose """
for eachPoseAttr in self.set.attr(self._ATTR_POSEVALUES).get():
self.setGivenPose(eachPoseAttr, percent)
def cmd_setPoseOnSelected(self, percent=100):
""" set pose on selected objects """
sel = pm.ls(sl=1)
for each in sel:
each = str(each)
if self.poseValues.has_key(each):
for eachAttr in self.poseValues[each]:
self.setGivenPose(eachAttr, percent)
def cmd_printPose(self):
""" print out attributes and vales of pose """
print(self.set.attr(self._ATTR_POSEVALUES).get())
def cmd_selectNodes(self):
""" select nodes that are part of pose """
pm.select(self.poseValues.keys())
class Selection(SetElement):
""" class for each selection button group inside of a tab """
# parent-constants to overwrite
_SET_PREFIX = ''
_SET_SUFFIX = '_selection_set'
_ATTR_TYPE = 'prSel_selection'
_TYPE = 'selection'
# constants
# _ATTR_MEMBER = 'prSel_SelMember'# stringArray
_ATTR_MEMBER = 'prSel_SelMember'
_ATTR_SHAPEVIS = 'prSel_shapeVisibility'
# variables
selShapevis = None
members = None
def __init__(self, name, parentUi, fromSet=None):
""" initialize variables """
super(Selection, self).__init__(name, parentUi, fromSet)
# modify new set
if not fromSet:
# create custom attributes
self.set.addAttr(self._ATTR_SHAPEVIS, at='bool', dv=1)
# self.set.addAttr( self._ATTR_MEMBER, dt='stringArray' )# stringArray - buggy? causes maya crashes
self.set.addAttr(self._ATTR_MEMBER, dt='string')
# read set values
self.selShapevis = self.set.attr(self._ATTR_SHAPEVIS).get()
self.members = []
# right click menu
if self.referencePrefix:
dynamicSel = 0
else:
dynamicSel = 1
with self.rightClickMenu:
pm.menuItem(divider=1)
pm.menuItem(l='Add selected', en=dynamicSel, c=pm.Callback(self.member_addSelected))
pm.menuItem(l='Remove selected', en=dynamicSel, c=pm.Callback(self.member_removeSelected))
pm.menuItem(l='Remove disabled', en=dynamicSel, c=pm.Callback(self.member_removeDisabled))
pm.menuItem(divider=1)
pm.menuItem(l='Edit Active Members', c=pm.Callback(self.members_window))
pm.menuItem(divider=1)
pm.menuItem(l='Print members', c=pm.Callback(self.member_print))
# middle click menu
with self.middleClickMenu:
pm.menuItem(l='Set keyframe', c=pm.Callback(self.cmd_setKeyframe))
pm.menuItem(l='Delete keyframe', c=pm.Callback(self.cmd_deleteKeyframe))
pm.menuItem(divider=1)
pm.menuItem(l='Reset attributes', c=pm.Callback(self.cmd_resetAttributes))
pm.menuItem(divider=1)
pm.menuItem(l='Hide shapes', c=pm.Callback(self.cmd_shapeVisibiility, 0))
pm.menuItem(l='Show shapes', c=pm.Callback(self.cmd_shapeVisibiility, 1))
# make horizontal
horizontal = pm.horizontalLayout(self, ratios=[1, 3, 1], spacing=0)
with horizontal:
pm.button(l='-', h=1, w=1, c=pm.Callback(self.cmd_select, 'minus'))
self.button = pm.button(l=self.getUiName(), h=1, w=1, c=pm.Callback(self.cmd_select, 'only'))
pm.button(l='+', h=1, w=1, c=pm.Callback(self.cmd_select, 'plus'))
horizontal.redistribute()
# Members
if not fromSet:
self.memberAttr_set(1)
self.members_set()
def getUiName(self):
""" return name for UI (popUps, buttonLabel) """
prefixShapeVis = ''
if not self.selShapevis:
prefixShapeVis = '*'
return prefixShapeVis + super(Selection, self).getUiName()
# #########################
# MEMBER
# #########################
def memberAttr_get(self):
""" return member attribute as list """
# return self.set.attr( self._ATTR_MEMBER ).get()# stringArray
fullString = self.set.attr(self._ATTR_MEMBER).get()
returnList = []
if fullString:
for eachAttr in fullString.split(';'):
eachName, eachStat = eachAttr.split(' ')
returnList.append([eachName, int(eachStat)])
return returnList
def memberAttr_set(self, useSelection=0):
""" set member attribute from variable or selection """
'''# stringArray
memberStringArray = []
if( useSelection ):
for each in pm.ls(sl=1):
memberStringArray.append( '%s 1' % each )
else:
for eachName, eachStat in self.members:
memberStringArray.append( str(eachName)+' '+str(eachStat) )
self.set.attr( self._ATTR_MEMBER ).set( memberStringArray )
'''
memberString = ''
if useSelection:
for each in pm.ls(sl=1):
prefix = ''
if memberString:
prefix = ';'
memberString += (prefix + '%s 1' % each)
else:
for eachName, eachStat in self.members:
prefix = ''
if memberString:
prefix = ';'
memberString += prefix + eachName + ' ' + str(eachStat)
self.set.attr(self._ATTR_MEMBER).set(memberString)
def members_set(self):
""" set members variable from memberAttribute """
self.members = self.memberAttr_get()
# for eachMemberString in self.set.attr( self._ATTR_MEMBER ).get():# stringArray
# self.members.append( eachMemberString.split( ' ' ) )
def members_recreate(self, newMembers):
""" set members variable from given list """
self.members = []
for eachMember, eachStat in newMembers:
self.members.append([eachMember, eachStat])
# update attr
self.memberAttr_set()
def members_add(self, memberArg):
""" add given string or string array to member variable """
if type(memberArg) == type(''):
memberArg = [memberArg, 1]
for each in memberArg:
self.members.append([each, 1])
def members_window(self):
""" open member UI """
MemberWindow(self)
def member_getSceneObjects(self, value=None, returnIndices=False):
""" return list of members with optional reference prefix (value None=all, 1=active, 0=inactive """
returnList = []
invalidList = []
indexList = []
for x, [eachName, eachStat] in enumerate(self.members):
if int(eachStat) == value or value is None:
fullName = self.object_getSceneName(eachName)
if pm.objExists(fullName):
returnList.append(fullName)
indexList.append(x)
else:
invalidList.append(fullName)
# check
if invalidList:
self.parentUi.parentUi.createWarning('Invalid objects in selection:')
print(invalidList)
# finish
if not returnIndices:
return returnList
else:
return returnList, indexList
def member_print(self):
""" print list of: members, active members, inactive members """
print('--- members of selection "%s"' % self.getUiName())
# all members
allMembers = self.member_getSceneObjects()
print('all (%d): %s' % (len(allMembers), allMembers))
# active
activeMembers = self.member_getSceneObjects(1)
print('active (%d): %s' % (len(activeMembers), activeMembers))
# inactive
inactiveMembers = self.member_getSceneObjects(0)
print('inactive (%d): %s' % (len(inactiveMembers), inactiveMembers))
def member_addSelected(self):
""" add selected objects to selection members """
# get selection
sel = pm.ls(sl=1)
# check
if not sel:
self.parentUi.parentUi.createWarning(
'Could not add selected objects, because nothing is selected.')
return
# create list of objects that are not yet in the selection
oldMembers = self.member_getSceneObjects()
newMembers = []
for each in sel:
if not each in oldMembers:
newMembers.append(each)
# check
if not newMembers:
self.parentUi.parentUi.createWarning(
'Could not add selected objects, because all selected objects are already part of selection.')
return
# confirm dialog
popupmsg = 'Add selected objects to "%s":\n' % self.getUiName()
for each in newMembers:
popupmsg += '\n' + each
if not self.parentUi.parentUi.tryConfirmDialog(popupmsg):
return
# save in members variable
self.members_add(newMembers)
# update membersAttr
self.memberAttr_set()
def member_removeSelected(self):
""" remove selected objects from selection members """
# get selection
sel = pm.ls(sl=1)
# check
if not sel:
self.parentUi.parentUi.createWarning(
'Could not remove selected objects, because nothing is selected.')
return
# create list of matches between selection and stored members
oldMembers = self.member_getSceneObjects()
matchingMembers = []
matchingIndices = []
for each in sel:
if each in oldMembers:
matchingMembers.append(each)
matchingIndices.append(oldMembers.index(each))
# check
if not matchingMembers:
self.parentUi.parentUi.createWarning(
'Could not remove selected objects, because none of the selected objects are members.')
return
# pop-up
popupmsg = 'Remove selected objects from "%s":\n' % self.getUiName()
for eachName in matchingMembers:
popupmsg += '\n' + eachName
if not self.parentUi.parentUi.tryConfirmDialog(popupmsg):
return
# delete members
matchingIndices.sort()
matchingIndices.reverse()
for eachIndex in matchingIndices:
self.members.pop(eachIndex)
# update membersAttr
self.memberAttr_set()
def member_removeDisabled(self):
""" remove disabled objects from selection members """
# find disabled members
disabledMembers, disabledIndices = self.member_getSceneObjects(0, True)
# check
if not disabledMembers:
self.parentUi.parentUi.createWarning('Found no disabled members.')
return
# pop-up
popupmsg = 'Remove disabled members from "%s":\n' % self.getUiName()
for each in disabledMembers:
popupmsg += '\n' + each
if not self.parentUi.parentUi.tryConfirmDialog(popupmsg):
return
# delete members
disabledIndices.reverse()
for eachIndex in disabledIndices:
self.members.pop(eachIndex)
# update membersAttr
self.memberAttr_set()
# #########################
# COMMANDS
# #########################
def cmd_select(self, flag):
""" select command """
objects = self.member_getSceneObjects(1)
# select
if flag == 'only':
pm.select(objects)
elif flag == 'plus':
pm.select(objects, add=1)
elif flag == 'minus':
pm.select(objects, deselect=1)
# focus on panel (so shortcuts work [move-tool,...])
self.parentUi.parentUi.tryFocusPanel()
def cmd_getKeyableAttrs(self):
""" return all keyable attributes of given object list. use only channelBox if attrs are selected there """
# get all active objects
objects = self.member_getSceneObjects(1)
# find keyable attributes
keyableAttrs = []
cbAttrs = pm.channelBox('mainChannelBox', q=1, sma=1)
if cbAttrs:
# if channelBox attributes are selected use them
for eachAttr in pm.channelBox('mainChannelBox', q=1, sma=1):
for eachObj in objects:
eachAttrFull = eachObj + '.' + eachAttr
if pm.getAttr(eachAttrFull, k=1) and not pm.getAttr(eachAttrFull, lock=1):
keyableAttrs.append(eachAttrFull)
else:
# else use all keyable attributes
for eachObj in objects:
for eachAttr in pm.PyNode(eachObj).listAttr(k=1, unlocked=1):
keyableAttrs.append(eachAttr)
return keyableAttrs
def cmd_getTimeRange(self):
""" return current timerange (1,10) """
import maya.mel as mm
aPlayBackSliderPython = mm.eval('$tmpVar=$gPlayBackSlider')
sr = pm.timeControl(aPlayBackSliderPython, q=1, range=1)
rangeStart = int(sr[1:sr.find(':')])
rangeEnd = int(sr[sr.find(':') + 1:-1]) - 1
return rangeStart, rangeEnd
def cmd_setKeyframe(self):
""" set keyframe on selection """
attributes = self.cmd_getKeyableAttrs()
if not attributes:
return
# maybe: for performance increase in case of channelBox selection use: maya.mel.eval( 'channelBoxCommand -key' )
pm.setKeyframe(attributes)
def cmd_deleteKeyframe(self):
""" delete keyframes of selection """
# get keyable attributes
attributes = self.cmd_getKeyableAttrs()
if not attributes:
return
# get time range
timeRange = self.cmd_getTimeRange()
# maybe: for performance increase (in case of channelBox selection) use: maya.mel.eval( 'channelBoxCommand -cut' )
# delete keys
for eachAttr in attributes:
pm.cutKey(eachAttr, cl=1, t=timeRange)
def cmd_resetAttributes(self):
""" reset attribute values of selection """
for eachAttr in self.cmd_getKeyableAttrs():
eachAttrStr = str(eachAttr)
defaultValue = \
pm.attributeQuery(eachAttrStr[eachAttr.find('.') + 1:], n=eachAttrStr[:eachAttrStr.find('.')],
listDefault=1)[0]
pm.setAttr(eachAttr, defaultValue)
# eachAttr.set( defaultValue )# maya 2012+
# pm.setAttr( eachAttr, defaultValue )
def cmd_shapeVisibiility(self, value):
""" show/hide member shapes """
# set attr
self.set.attr(self._ATTR_SHAPEVIS).set(value)
self.selShapevis = value
self.button.setLabel(self.getUiName())
# set visibility on member shapes
for eachObject in self.member_getSceneObjects(1):
for eachShape in pm.PyNode(eachObject).getChildren(s=1):
try:
eachShape.v.set(value)
except:
pass
class MemberWindow(pm.uitypes.Window):
""" window to edit active members """
# constants
_windowName = 'members_prSelectionUi'
_BACKGROUND_UNSAVED = [0.5, 0, 0]
_BACKGROUND_DEFAULT = [0.3, 0.3, 0.3]
# variables
memberTSL = None # textScrollList
formsButton = []
unsavedChanges = False
def __new__(cls, parentUi):
""" delete possible old window and create new instance """
if pm.window(cls._windowName, exists=True):
if not parentUi.parentUi.parentUi.tryConfirmDialog('Close existing "' + cls._windowName + '"?'):
return
pm.deleteUI(cls._windowName)
self = pm.window(cls._windowName, title=parentUi.name)
return pm.uitypes.Window.__new__(cls, self)
def __init__(self, parentUi):
""" create layouts, buttons, show window """
# variables
self.parentUi = parentUi
# layout
formRoot = pm.formLayout()
with formRoot:
formMembers = pm.formLayout()
with formMembers:
# member list
self.memberTSL = pm.textScrollList(allowMultiSelection=1,
sc=pm.Callback(self.cmd_TSL_selection))
# UP / DOWN buttons
formUpDown = pm.formLayout(w=20)
# self.formsButton.append( formUpDown )
with formUpDown:
pm.button(ebg=0, l='UP', c=pm.Callback(self.cmd_UP)) # , nbg=0)# maya 2013+
pm.button(ebg=1, l='DN', c=pm.Callback(self.cmd_DN))
formUpDown.vDistribute()
formMembers.hDistribute(5, 1)
# active buttons
formActiveBtns = pm.formLayout(h=20, ebg=0)
# self.formsButton.append( formActiveBtns )
with formActiveBtns:
pm.button(l='Select All', c=pm.Callback(self.cmd_select_all))
pm.button(l='Select None', c=pm.Callback(self.cmd_select_none))
pm.button(l='Toggle', c=pm.Callback(self.cmd_select_toggle))
formActiveBtns.hDistribute()
# storage buttons
formStorage = pm.formLayout(h=20, ebg=1)
self.formsButton.append(formStorage)
with formStorage:
saveBtn = pm.button(l='Save', c=pm.Callback(self.cmd_save))
reloadBtn = pm.button(l='Reload', c=pm.Callback(self.cmd_load))
closeBtn = pm.button(l='Close', c=pm.Callback(self.cmd_close))
formStorage.hDistribute()
formRoot.redistribute(7, 1, 1)
# color / status
self.ui_showChanges(0)
# load members
self.createTSL()
# show window
self.show()
def createTSL(self):
""" read members from selection class """
self.memberTSL.removeAll()
for x, [each, status] in enumerate(self.parentUi.members):
self.memberTSL.append(each)
if status == 1:
self.memberTSL.setSelectIndexedItem(x + 1)
def ui_showChanges(self, value):
"""
show/hide unsaved changes by coloring button backgrounds
could not use for formRoot, because of automated textScrollList background color changes,...
"""
if value:
backgroundColor = self._BACKGROUND_UNSAVED
else:
backgroundColor = self._BACKGROUND_DEFAULT
for eachForm in self.formsButton:
try:
eachForm.setBackgroundColor(backgroundColor)
except:
pass
self.unsavedChanges = value
def cmd_save(self):
""" save changes """
newMemberAttr = []
selectedIndices = self.memberTSL.getSelectIndexedItem()
if selectedIndices is None:
# .getSelectIndexedItem returns None when it should []
selectedIndices = []
for x, each in enumerate(self.memberTSL.getAllItems()):
if (x + 1) in selectedIndices:
newMemberAttr.append([each, 1])
else:
newMemberAttr.append([each, 0])
# update selection
self.parentUi.members_recreate(newMemberAttr)
# update UI color
self.ui_showChanges(0)
def cmd_load(self):
""" save changes """
self.ui_showChanges(False)
self.createTSL()
def cmd_close(self):
""" close window. popup when there are unsaved changes """
if not self.unsavedChanges:
pm.deleteUI(self._windowName)
return
if self.parentUi.parentUi.parentUi.tryConfirmDialog('Close and lose unsaved changes?'):
pm.deleteUI(self._windowName)
def cmd_TSL_selection(self):
""" executed whenever item in textScrollList gets selected """
self.ui_showChanges(1)
def cmd_UP(self):
""" move selected members one index up """
allItems = self.memberTSL.getAllItems()
newSelection = []
lastUp = 1
change = False
# get selected indices
selectedIndices = self.memberTSL.getSelectIndexedItem()
if selectedIndices is None:
# .getSelectIndexedItem returns None when it should []
selectedIndices = []
for index in selectedIndices:
if index > lastUp:
self.memberTSL.removeIndexedItem(index)
self.memberTSL.appendPosition([index - 1, allItems[index - 1]])
newSelection.append(index - 1)
change = True
else:
newSelection.append(index)
lastUp += 1
# recreate selection
for index in newSelection:
self.memberTSL.setSelectIndexedItem(index)
# update UI
if change:
self.ui_showChanges(1)
def cmd_DN(self):
""" move selected members one index down """
# TODO?: merge function with cmd_UP?
allItems = self.memberTSL.getAllItems()
newSelection = []
lastUp = len(allItems)
change = False
# get selected indices
selectedIndices = self.memberTSL.getSelectIndexedItem()
if selectedIndices is None:
# .getSelectIndexedItem returns None when it should []
selectedIndices = []
selectedIndices.reverse()
for index in selectedIndices:
if index < lastUp:
self.memberTSL.removeIndexedItem(index)
self.memberTSL.appendPosition([index + 1, allItems[index - 1]])
newSelection.append(index + 1)
change = True
else:
newSelection.append(index)
lastUp -= 1
# recreate selection
for index in newSelection:
self.memberTSL.setSelectIndexedItem(index)
# update UI
if change:
self.ui_showChanges(1)
def cmd_select_all(self):
""" select all members """
# self.memberTSL.selectAll()# broken: 'TextScrollList' object has no attribute 'selectIndexedItem'
for x in range(len(self.memberTSL.getAllItems())):
self.memberTSL.setSelectIndexedItem(x + 1)
# update UI color
self.ui_showChanges(1)
def cmd_select_none(self):
""" deselect all members """
self.memberTSL.deselectAll()
# update UI color
self.ui_showChanges(1)
def cmd_select_toggle(self):
""" toggle all members selection status """
# get selected indices
selectedIndices = self.memberTSL.getSelectIndexedItem()
if selectedIndices is None:
# .getSelectIndexedItem returns None when it should []
selectedIndices = []
# toggle selection
for x in range(len(self.memberTSL.getAllItems())):
if x + 1 in selectedIndices:
self.memberTSL.deselectIndexedItem(x + 1)
else:
self.memberTSL.setSelectIndexedItem(x + 1)
# update UI color
self.ui_showChanges(1)
#
#
# remove comment from next line, to call the script by executing all the code in this file
# UI()
| [
37811,
198,
2,
311,
31033,
198,
5450,
1378,
70,
388,
13,
1073,
14,
1050,
49283,
9019,
198,
5450,
1378,
12567,
13,
785,
14,
1845,
89,
2473,
12,
305,
2788,
33663,
14,
1050,
11261,
64,
198,
198,
2,
22196,
40165,
198,
32,
8925,
11787,
... | 2.213332 | 28,397 |
from django.contrib.auth.models import Group
from django.contrib.auth.forms import UserCreationForm, UserCreationForm, UserChangeForm
from django import forms
#
from authApp.models import User
#
| [
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
13,
27530,
1330,
4912,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
13,
23914,
1330,
11787,
12443,
341,
8479,
11,
11787,
12443,
341,
8479,
11,
11787,
19400,
8479,
198,
6738,
42625,
142... | 3.45614 | 57 |
#!/usr/bin/python3
import subprocess
import sys
import re
import textwrap
P = []
P.append(['<lb type="x-begin-paragraph"/>', ''])
P.append(['<lb type="x-end-paragraph"/>', '\\'])
P.append(['<q marker="">', ''])
P.append(['</q>', ''])
P.append(['<milestone marker="“" type="cQuote"/>', '"'])
P.append(['<milestone marker="”" type="cQuote"/>', '"\\'])
P.append(['<q level="1" marker="“"/>', '"'])
P.append(['<q level="1" marker="”"/>', '"\\'])
P.append(['<q level="2" marker="‘"/>', "'"])
P.append(['<q level="2" marker="’"/>', "'"])
P.append(['—', '--'])
P.append(['(ESV)', ''])
re_str1 = '\s[es]ID\=\"[0-9.]+\"' # Takes care of the eID and sID strings
re_str2 = '\<[a-zA-Z0-9="/ -]+\>' # Misc markups
refs = ' '.join((sys.argv[1:]))
argd = "-b ESV -e HTML -k "
cmds = "diatheke " + argd + refs
text = subprocess.Popen(cmds, shell=True, stdout=subprocess.PIPE).stdout.read()
text = text.decode("utf-8")
f = text.split('\n')
# f = open('temp.txt', 'r')
# f = f.read()
# f = f.split('\n')
# Do markup replacements and store results in f1
f1 = []
for line in f:
line = re.sub(re_str1, '', line)
for i in range(0, len(P)):
line = line.replace(P[i][0], P[i][1])
line = re.sub(re_str2, ' ', line)
f1.append(line)
# Only want verse numbers if book and chapter haven't change
# from the previous line
f2 = []
book_chap = ''
for line in f1:
if line[0:line.find(':')] == book_chap:
line = line.replace(line[0:(line.find(':')+1)], '')
else:
book_chap = line[0:line.find(':')]
f2.append(line)
# Want consecutive verses in paragraphs if no hard-coded
# breaks are at the end of the verse
f3 = []
new_para = ''
for line in f2:
if line.find('\\') > -1:
line = line.replace('\\', '')
line = line.strip()
new_para = new_para + ' ' + line
new_para = new_para.lstrip()
new_para = re.sub('[ ]+', ' ', new_para) # Reduce multiple spaces to one
f3.append(new_para)
f3.append('')
new_para = ''
last_write = int(1)
else:
line = line.lstrip()
new_para = new_para + ' ' + line
last_write = int(0)
if not last_write:
new_para = new_para.lstrip()
new_para = re.sub('[ ]+', ' ', new_para) # Reduce multiple spaces to one
f3.append(new_para)
f3.append('')
for line in f3:
print(textwrap.fill(line)) # Default is 70 characters
| [
2,
48443,
14629,
14,
8800,
14,
29412,
18,
198,
198,
11748,
850,
14681,
198,
11748,
25064,
198,
11748,
302,
198,
11748,
2420,
37150,
198,
198,
47,
796,
17635,
198,
47,
13,
33295,
7,
17816,
27,
23160,
2099,
2625,
87,
12,
27471,
12,
20... | 2.179211 | 1,116 |
import numpy as np
# Num entries per column
DATA_LENGTH = 10
# Data for tests in datamining lib
data = {
"dates": [
"01/17/2000",
"January 17, 2000",
"Jan 17, 2000",
"17 Jan 2000",
"17 January 2000",
"01-17-2000",
"January 17th 2000",
"17-01-2000",
"17/01/2000",
"17th January 2000",
],
"typos": ["yes", "y", "yse", "YES", "?", "no", "No", "on", "NO", "?"],
"nan1": [0, 1, 2, np.nan, 4, np.nan, np.nan, 7, 8, np.nan],
"nan2": [np.nan, 1, np.nan, 3, 4, 5, 6, np.nan, 8, 9],
}
| [
11748,
299,
32152,
355,
45941,
198,
198,
2,
31835,
12784,
583,
5721,
198,
26947,
62,
43,
49494,
796,
838,
198,
198,
2,
6060,
329,
5254,
287,
4818,
321,
3191,
9195,
198,
7890,
796,
1391,
198,
220,
220,
220,
366,
19581,
1298,
685,
198... | 1.910596 | 302 |
import random
#===exercício 01===
num= int(input("Seu palpite:"))
sor= random.randint(1,5)
if (num == sor):
print("O número sorteado foi {}.\nVocê é o bichão mesmo hein".format(sor))
else:
print("O número sorteado foi {}.\nTente novamente".format(sor))
#===exercício02===
vel= float(input("velocidade do automóvel: "))
if(vel > 80):
mult = float((vel-80)*7)
print("O veículo estava acima do limite de velocidade.\nO valor da multa é de: R$ {}. ".format(mult))
else:
print("O veículo estava dentro do limite de velocidade.")
#===exercício03===
num= int(input("Insira o número: "))
if(num %2 == 0):
print("O número digitado é par.")
else:
print("O número digitado é impar.")
#===exercício04===
dist= float(input("Insira a distância da viagem em km:"))
if(dist <=200):
print("O preço da passagem é de R${}.".format(0.5*dist))
else:
print("O preço da passagem é de R${}.".format(0.45*dist))
#===exercício05===
ano = int(input("Insira o ano em questão:"))
if(ano % 4 == 0):
print("O ano de {} é um ano bissexto.".format(ano))
else:
print("O ano de {} não é um ano bissexto.".format(ano))
#===exercício06===
n1 = int(input("Insira o primeiro número: "))
n2 = int(input("Insira o segundo número: "))
n3 = int(input("Insira o terceiro número: "))
if(n1 > n2 and n1 > n3):
print("{} é o maior número.\n".format(n1))
elif(n2 > n1 and n2 > n3):
print("{} é o maior número.\n".format(n2))
else:
print("{} é o maior número.\n".format(n3))
if(n1 < n2 and n1 < n3):
print("{} é o menor número.\n".format(n1))
elif(n2 < n1 and n2 < n3):
print("{} é o menor número.\n".format(n2))
else:
print("{} é o menor número.\n".format(n3))
#==exercício07===
sal= float(input("Insira o valor do salário: "))
if(sal <= 1250.0):
ns = ((15*sal)/100)
print("O seu aumento será de R${}.\nTotalizando R${}.".format(ns, (sal+ns)))
else:
ns = ((10*sal)/100)
print("O seu aumento será de R${}.\nTotalizando R${}.".format(ns, (sal+ns)))
#===exercício08===
a = float(input("Insira a medida da primeira reta: "))
b = float(input("Insira a medida da segunda reta: "))
c = float(input("Insira a medida da terceira reta: "))
if((b-c) < a < (b+c) and (a-c) < b < (a+c) and (a-b) < c < (a+b)):
print("Essas 3 retas são capazes de formar um triângulo.")
else:
print("Essas 3 retas não formam um triângulo.") | [
11748,
4738,
198,
2,
18604,
1069,
2798,
8836,
66,
952,
5534,
18604,
198,
22510,
28,
493,
7,
15414,
7203,
4653,
84,
6340,
2595,
11097,
4008,
198,
82,
273,
28,
4738,
13,
25192,
600,
7,
16,
11,
20,
8,
198,
361,
357,
22510,
6624,
2565... | 2.136652 | 1,105 |
import abc
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
from typing import Type
from xsdata.exceptions import XmlHandlerError
from xsdata.formats.bindings import AbstractParser
from xsdata.formats.dataclass.parsers.config import ParserConfig
from xsdata.models.enums import EventType
NoneStr = Optional[str]
class PushParser(AbstractParser):
"""
A generic interface for event based content handlers like sax.
:param config: Parser configuration.
"""
config: ParserConfig
ns_map: Dict
@abc.abstractmethod
def start(
self,
clazz: Optional[Type],
queue: List,
objects: List,
qname: str,
attrs: Dict,
ns_map: Dict,
):
"""Queue the next xml node for parsing."""
@abc.abstractmethod
def end(
self,
queue: List,
objects: List,
qname: str,
text: NoneStr,
tail: NoneStr,
) -> bool:
"""
Parse the last xml node and bind any intermediate objects.
:return: The result of the binding process.
"""
def register_namespace(self, prefix: NoneStr, uri: str):
"""
Add the given prefix-URI namespaces mapping if the prefix is new.
:param prefix: Namespace prefix
:param uri: Namespace uri
"""
if prefix not in self.ns_map:
self.ns_map[prefix] = uri
class XmlNode(abc.ABC):
"""
The xml node interface.
The nodes are responsible to find and queue the child nodes when a
new element starts and build the resulting object tree when the
element ends. The parser needs to maintain a queue for these nodes
and a list of all the intermediate object trees.
"""
@abc.abstractmethod
def child(self, qname: str, attrs: Dict, ns_map: Dict, position: int) -> "XmlNode":
"""
Initialize the next child node to be queued, when a new xml element
starts.
This entry point is responsible to create the next node
type with all the necessary information on how to bind
the incoming input data.
:param qname: Qualified name
:param attrs: Attribute key-value map
:param ns_map: Namespace prefix-URI map
:param position: The current objects position, to mark future
objects as children
"""
@abc.abstractmethod
def bind(self, qname: str, text: NoneStr, tail: NoneStr, objects: List) -> bool:
"""
Build the object tree for the ending element and return whether the
result was successful or not.
This entry point is called when an xml element ends and is
responsible to parse the current element attributes/text,
bind any children objects and initialize new object.
:param qname: Qualified name
:param text: Text content
:param tail: Tail content
:param objects: The list of intermediate parsed objects,
eg [(qname, object)]
"""
class XmlHandler:
"""
Abstract content handler.
:param parser: The parser instance to feed with events
:param clazz: The target binding model, auto located if omitted.
"""
__slots__ = ("parser", "clazz", "queue", "objects")
def parse(self, source: Any) -> Any:
"""Parse an XML document from a system identifier or an InputSource."""
raise NotImplementedError("This method must be implemented!")
def merge_parent_namespaces(self, ns_map: Dict) -> Dict:
"""
Merge and return the given prefix-URI map with the parent node.
Register new prefixes with the parser.
:param ns_map: Namespace prefix-URI map
"""
if self.queue:
parent_ns_map = self.queue[-1].ns_map
if not ns_map:
return parent_ns_map
result = parent_ns_map.copy() if parent_ns_map else {}
else:
result = {}
for prefix, uri in ns_map.items():
self.parser.register_namespace(prefix, uri)
result[prefix] = uri
return result
class EventsHandler(XmlHandler):
"""Sax content handler for pre-recorded events."""
__slots__ = ("data_frames", "flush_next")
def parse(self, source: List[Tuple]) -> Any:
"""Forward the pre-recorded events to the main parser."""
for event, *args in source:
if event == EventType.START:
qname, attrs, ns_map = args
self.parser.start(
self.clazz,
self.queue,
self.objects,
qname,
attrs,
ns_map,
)
elif event == EventType.END:
qname, text, tail = args
self.parser.end(self.queue, self.objects, qname, text, tail)
elif event == EventType.START_NS:
prefix, uri = args
self.parser.register_namespace(prefix or None, uri)
else:
raise XmlHandlerError(f"Unhandled event: `{event}`.")
return self.objects[-1][1] if self.objects else None
| [
11748,
450,
66,
198,
6738,
19720,
1330,
4377,
198,
6738,
19720,
1330,
360,
713,
198,
6738,
19720,
1330,
7343,
198,
6738,
19720,
1330,
32233,
198,
6738,
19720,
1330,
309,
29291,
198,
6738,
19720,
1330,
5994,
198,
198,
6738,
2124,
82,
789... | 2.415094 | 2,173 |
#!/usr/bin/env python
import argparse
from mlsurfacelayer.data import process_cabauw_data
from mlsurfacelayer.data import process_idaho_data
if __name__ == "__main__":
main() | [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
11748,
1822,
29572,
198,
6738,
285,
7278,
333,
38942,
417,
2794,
13,
7890,
1330,
1429,
62,
66,
397,
559,
86,
62,
7890,
198,
6738,
285,
7278,
333,
38942,
417,
2794,
13,
7890,
1330,
14... | 2.712121 | 66 |
from LuaStakky.TarantoolAppBuilder import *
import os
import pytest
| [
6738,
43316,
1273,
461,
2584,
13,
51,
4741,
970,
4677,
32875,
1330,
1635,
201,
198,
11748,
28686,
201,
198,
11748,
12972,
9288,
201,
198,
201,
198
] | 2.807692 | 26 |
from dataclasses import dataclass
from daily_fantasy_sports_models.core.sets import is_disjoint
from daily_fantasy_sports_models.draft_kings.nba.models.contests.salary_cap.player_pool.player import Player \
as PlayerPoolPlayer
from daily_fantasy_sports_models.draft_kings.nba.models.core.position import Position
# "Lineups...must include players from at least 2 different NBA games"
# "a valid lineup must not exceed the salary cap of $50,000"
# https://www.draftkings.com/help/rules/4
# In salary cap contests, participants will create a lineup by selecting players listed in the Player Pool.
# Each player listed has an assigned salary and a valid lineup must not exceed the salary cap of $50,000.
# Lineups will consist of 8 players and must include players from at least 2 different NBA games.
@dataclass(init=True,
repr=True,
eq=True,
order=False,
unsafe_hash=False,
frozen=True)
| [
6738,
4818,
330,
28958,
1330,
4818,
330,
31172,
198,
198,
6738,
4445,
62,
69,
34921,
62,
32945,
62,
27530,
13,
7295,
13,
28709,
1330,
318,
62,
6381,
73,
1563,
198,
6738,
4445,
62,
69,
34921,
62,
32945,
62,
27530,
13,
35679,
62,
74,
... | 3.060897 | 312 |
# License information goes here
# -*- coding: utf-8 -*-
"""
======================
sdss_install.install4
======================
This package contains code for installing SDSS-IV software products.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# The line above will help with 2to3 support.
from .Install4 import Install4
from .get_svn_devstr import get_svn_devstr
from .most_recent_tag import most_recent_tag
from .version import version
| [
2,
13789,
1321,
2925,
994,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
4770,
50155,
198,
21282,
824,
62,
17350,
13,
17350,
19,
198,
4770,
50155,
198,
198,
1212,
5301,
4909,
2438,
329,
15975,
311,
... | 3.562963 | 135 |
#########################################################
#
# DO NOT EDIT THIS FILE. IT IS GENERATED AUTOMATICALLY. #
# PLEASE LOOK INTO THE README FOR MORE INFORMATION. #
#
#########################################################
# coding: utf-8
# # CIFAR-10: Part 2
#
# Welcome back! If you have not completed [Part 1](*), please do so before running the code in this notebook.
#
# In Part 2 we will assume you have the training and testing lmdbs, as well as the trained model .pb files from Part 1. As you may recall from Part 1, we created the dataset in the form of lmdbs then trained a model and saved the trained model in the form of a *predict_net.pb* and an *init_net.pb*. In this notebook, we will show how to test that saved model with the test lmdb and how to continue training to increase our test accuracy.
#
# Recall the objectives of the two part CIFAR-10 tutorial:
#
# **Part 1:**
# - Download dataset
# - Write images to lmdbs
# - Define and train a model with checkpoints
# - Save the trained model
#
# **Part 2:**
# - Load pre-trained model from Part 1
# - Run inference on testing lmdb
# - Continue training to improve test accuracy
# - Test the retrained model
#
# As before, let's start with some necessary imports.
# In[ ]:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import os
import shutil
import operator
import glob
from caffe2.python import core,model_helper,optimizer,workspace,brew,utils
from caffe2.proto import caffe2_pb2
import matplotlib.pyplot as plt
from caffe2.python.modeling import initializers
from caffe2.python.modeling.parameter_info import ParameterTags
# ## Check Inputs
#
# Before we get started, let's make sure you have the necessary Part 1 files. We will use the saved model from the most recent run of Part 1.
# In[2]:
# Train lmdb
TRAIN_LMDB = os.path.join(os.path.expanduser('~'),"caffe2_notebooks/tutorial_data/cifar10/training_lmdb")
# Test lmdb
TEST_LMDB = os.path.join(os.path.expanduser('~'),"caffe2_notebooks/tutorial_data/cifar10/testing_lmdb")
# Extract protobuf files from most recent Part 1 run
part1_runs_path = os.path.join(os.path.expanduser('~'), "caffe2_notebooks", "tutorial_files", "tutorial_cifar10")
runs = sorted(glob.glob(part1_runs_path + "/*"))
# Init net
INIT_NET = os.path.join(runs[-1], "cifar10_init_net.pb")
# Predict net
PREDICT_NET = os.path.join(runs[-1], "cifar10_predict_net.pb")
# Make sure they all exist
if (not os.path.exists(TRAIN_LMDB)) or (not os.path.exists(TEST_LMDB)) or (not os.path.exists(INIT_NET)) or (not os.path.exists(PREDICT_NET)):
print("ERROR: input not found!")
else:
print("Success, you may continue!")
# ### Repeat Helper Functions
#
# If these functions look familiar, you are correct; they have been copied-and-pasted from Part 1. To summarize, we will need the *AddInputLayer* function to connect our models to the lmdbs, and the *Add_Original_CIFAR10_Model* function to provide the architecture of the network.
# In[3]:
# ## Test Saved Model From Part 1
#
# ### Construct Model for Testing
#
# The first thing we need is a model helper object that we can attach the lmdb reader to.
# In[4]:
# Create a ModelHelper object with init_params=False
arg_scope = {"order": "NCHW"}
test_model = model_helper.ModelHelper(name="test_model", arg_scope=arg_scope, init_params=False)
# Add the data input layer to the model, pointing at the TEST_LMDB
data,_ = AddInputLayer(test_model,1,TEST_LMDB,'lmdb')
# ### Populate the Model Helper with Saved Model Params
#
# To format a model for testing, we do not need to create params in the model helper, nor do we need to add gradient operators as we will only be performing forward passes. All we really need to do is populate the *.net* and *.param_init_net* members of the model helper with the contents of the saved *predict_net.pb* and *init_net.pb*, respectively. To accomplish this, we construct *caffe2_pb* objects with the protobuf from the pb files, create *Net* objects with the *caffe2_pb* objects, then **append** the net objects to the *.net* and *.param_init_net* members of the model helper. Appending is very important here! If we do not append, we would wipe out the input data layer stuff that we just added.
#
# Recall from Part 1, the saved model expected an input named *data* and produced an output called *softmax*. Conveniently (but not accidentally), the *AddInputLayer* function reads from the lmdb and puts the information into the workspace in a blob called *data*. It is also important to remember what each of the saved nets that we are appending to our model contains. The *predict_net* contains the structure of the model, including the ops involved in the forward pass. It has the definitions of the convolutional, pooling, and fc layers in the model. The *init_net* contains the weight initializations for the parameters that the ops in the *predict_net* expect. For example, if there is an op in the *predict_net* named 'fc1', the *init_net* will contain the trained weights (*fc1_w*), and biases (*fc1_b*) for that layer.
#
# After we append the nets, we add an accuracy layer to the model which uses the *softmax* output from the saved model and the *label* input from the lmdb. Note, we could manually fetch the softmax blob from the workspace after every iteration and check whether or not the class with the highest softmax score is the true label, but instead we opt for the simpler accuacy layer.
# In[5]:
# Populate the model helper obj with the init net stuff, which provides the
# weight initializations for the model
init_net_proto = caffe2_pb2.NetDef()
with open(INIT_NET, "r") as f:
init_net_proto.ParseFromString(f.read())
test_model.param_init_net = test_model.param_init_net.AppendNet(core.Net(init_net_proto))
# Populate the model helper obj with the predict net stuff, which defines
# the structure of the model
predict_net_proto = caffe2_pb2.NetDef()
with open(PREDICT_NET, "r") as f:
predict_net_proto.ParseFromString(f.read())
test_model.net = test_model.net.AppendNet(core.Net(predict_net_proto))
# Add an accuracy feature to the model for convenient reporting during testing
accuracy = brew.accuracy(test_model, ['softmax', 'label' ], 'accuracy')
# ### Run Testing
#
# At this point, our model is initialized as the saved model from Part 1. We can now run the testing loop and check the accuracy.
# In[6]:
# Run the param init net to put the trained model info into the workspace
workspace.RunNetOnce(test_model.param_init_net)
workspace.CreateNet(test_model.net, overwrite=True)
# Stat keeper
avg_accuracy = 0.0
# Number of test iterations to run here, since the full test set is 10k images and the
# batch size is 1, we will run 10000 test batches to cover the entire test set
test_iters = 10000
# Main testing loop
for i in range(test_iters):
workspace.RunNet(test_model.net)
acc = workspace.FetchBlob('accuracy')
avg_accuracy += acc
if (i % 500 == 0) and (i > 0):
print("Iter: {}, Current Accuracy: {}".format(i, avg_accuracy/float(i)))
# Report final test accuracy score as the number of correct predictions divided by 10,000
print("*********************************************")
print("Final Test Accuracy: ",avg_accuracy/float(test_iters))
# ## Continue Training
#
# Our model is performing significantly better than random guessing, but I think we can do a little better with more training. To do this we will:
# - create a new model helper
# - specify that the train data will come from the training lmdb
# - re-define the model architecture with the Add_Original_CIFAR10_Model function
# - grab the trained weights and biases from the saved init_net.pb
# - resume training
#
# ### Construct Model for Re-Training
#
# Here we create a new model helper object for training. Nothing here should look new but take notice that we set **init_params=False**. This is important, as we do not want brew (in *Add_Original_CIFAR10_Model* function) to automatically initialize the params, rather we want to set them ourselves. Once we construct the model helper, we add the input layer and point it to the training lmdb, brew in the model architecture, and finally initialize the parameters by appending the contents of the saved *init_net.pb* to the *.param_init_net* member of the train model.
# In[7]:
# Number of iterations to train for here
training_iters = 3000
# Reset workspace to clear all of the information from the testing stage
workspace.ResetWorkspace()
# Create new model
arg_scope = {"order": "NCHW"}
train_model = model_helper.ModelHelper(name="cifar10_train", arg_scope=arg_scope, init_params=False)
# Add the data layer to the model
data,_ = AddInputLayer(train_model,100,TRAIN_LMDB,'lmdb')
softmax = Add_Original_CIFAR10_Model(train_model, data, 10, 32, 32, 3)
# Populate the param_init_net of the model obj with the contents of the init net
init_net_proto = caffe2_pb2.NetDef()
with open(INIT_NET, "r") as f:
init_net_proto.ParseFromString(f.read())
tmp_init_net = core.Net(init_net_proto)
train_model.param_init_net = train_model.param_init_net.AppendNet(tmp_init_net)
# ### Specify Loss Function and Optimizer
#
# We can now proceed as normal by specifying the loss function, adding the gradient operators, and building the optimizier. Here, we opt for the same loss function and optimizer that we used in Part 1.
# In[8]:
# Add the "training operators" to the model
xent = train_model.LabelCrossEntropy([softmax, 'label'], 'xent')
# compute the expected loss
loss = train_model.AveragedLoss(xent, "loss")
# track the accuracy of the model
accuracy = brew.accuracy(train_model, [softmax, 'label'], "accuracy")
# use the average loss we just computed to add gradient operators to the model
train_model.AddGradientOperators([loss])
# Specify Optimization Algorithm
optimizer.build_sgd(
train_model,
base_learning_rate=0.01,
policy="fixed",
momentum=0.9,
weight_decay=0.004
)
# **Important Note**
#
# Check out the results of the *GetOptimizationParamInfo* function. The *params* that this function returns are the parameters that will be optimized by the optimization function. If you are attempting to retrain a model in a different way, and your model doesnt seem to be learning, check the return value of this fuction. If it returns nothing, look no further for your problem! This is exactly the reason that we brew'ed in the layers of the train model with the *Add_Original_CIFAR10_Model* function, because it creates the params in the model automatically. If we had appended the *.net* member of the Model Helper as we did for the test model, this function would return nothing, meaning no parameters would get optimized. A workaround if you appended the net would be to manually create the params with the *create_param* function, which feels like a bit of a hack, especially if you have the add model code on-hand.
# In[9]:
for param in train_model.GetOptimizationParamInfo():
print("Param to be optimized: ",param)
# ### Run Training
#
# **This step will take a while!**
#
# With our model helper setup we can now run the training as normal. Note, the accuracy and loss reported here is as measured on the *training* batches. Recall that the accuracy reported in Part 1 was the validation accuracy. Be careful how you interpret this number!
# In[10]:
# Prime the workspace
workspace.RunNetOnce(train_model.param_init_net)
workspace.CreateNet(train_model.net, overwrite=True)
# Run the training loop
for i in range(training_iters):
workspace.RunNet(train_model.net)
acc = workspace.FetchBlob('accuracy')
loss = workspace.FetchBlob('loss')
if i % 100 == 0:
print ("Iter: {}, Loss: {}, Accuracy: {}".format(i,loss,acc))
# ## Test the Retrained Model
#
# We will test the retrained model, just as we did in the first part of this notebook. However, since the params already exist in the workspace from the retraining step, we do not need to set the *.param_init_net*. Rather, we set **init_params=False** and brew in the model architecture with *Add_Original_CIFAR10_Model*. When we create the net, the model will find that the required blobs are already in the workspace. Then, we can run the main testing loop, which will report a final test accuracy score (which is hopefully higher).
# In[11]:
arg_scope = {"order": "NCHW"}
# Construct the model
test_model = model_helper.ModelHelper(name="test_model", arg_scope=arg_scope, init_params=False)
# Set the input as the test lmdb
data,_ = AddInputLayer(test_model,1,TEST_LMDB,'lmdb')
# brew in the model architecture
softmax = Add_Original_CIFAR10_Model(test_model, data, 10, 32, 32, 3)
accuracy = brew.accuracy(test_model, ['softmax', 'label' ], 'accuracy')
# Prime the net
workspace.RunNetOnce(test_model.param_init_net)
workspace.CreateNet(test_model.net, overwrite=True)
# Confusion Matrix for CIFAR-10
cmat = np.zeros((10,10))
# Stat keepers
avg_accuracy = 0.0
test_iters = 10000
# Main testing loop
for i in range(test_iters):
workspace.RunNet(test_model.net)
acc = workspace.FetchBlob('accuracy')
avg_accuracy += acc
if (i % 500 == 0) and (i > 0):
print("Iter: {}, Current Accuracy: {}".format(i, avg_accuracy/float(i)))
# Get the top-1 prediction
results = workspace.FetchBlob('softmax')[0]
label = workspace.FetchBlob('label')[0]
max_index, max_value = max(enumerate(results), key=operator.itemgetter(1))
# Update confusion matrix
cmat[label,max_index] += 1
# Report final testing results
print("*********************************************")
print("Final Test Accuracy: ",avg_accuracy/float(test_iters))
# ### Check Results
#
# Notice, the result from testing the re-trained model is better than the original test accuracy. If you wish, you can save the new model as .pb files just as in Part 1, but we will leave that to you. The last thing we will do is attempt to visualize the performance of our classifier by plotting a confusion matrix and looking for a **strong diagonal** trend.
# In[12]:
# Plot confusion matrix
fig = plt.figure(figsize=(10,10))
plt.tight_layout()
ax = fig.add_subplot(111)
res = ax.imshow(cmat, cmap=plt.cm.rainbow,interpolation='nearest')
width, height = cmat.shape
for x in xrange(width):
for y in xrange(height):
ax.annotate(str(cmat[x,y]), xy=(y, x),horizontalalignment='center',verticalalignment='center')
classes = ['Airplane','Automobile','Bird','Cat','Deer','Dog','Frog','Horse','Ship','Truck']
plt.xticks(range(width), classes, rotation=0)
plt.yticks(range(height), classes, rotation=0)
ax.set_xlabel('Predicted Class')
ax.set_ylabel('True Class')
plt.title('CIFAR-10 Confusion Matrix')
plt.show()
| [
29113,
14468,
7804,
2,
198,
2,
198,
2,
8410,
5626,
48483,
12680,
45811,
13,
7283,
3180,
24700,
1137,
11617,
47044,
2662,
1404,
2149,
19807,
13,
1303,
198,
2,
37795,
48045,
39319,
3336,
20832,
11682,
7473,
12011,
38044,
13,
220,
220,
220... | 3.28347 | 4,519 |
from django.db.models import Q
from django.utils.encoding import force_text
from django.db import connection
from ..settings import DEFAULT_SEARCH_FIELDS
__all__ = ('AutocompleteModel', )
class AutocompleteModel(object):
"""Autocomplete which considers choices as a queryset.
.. py:attribute:: choices
A queryset.
.. py:attribute:: limit_choices
Maximum number of choices to display.
.. py:attribute:: search_fields
Fields to search in, configurable like on
:py:attr:`django:django.contrib.admin.ModelAdmin.search_fields`
.. py:attribute:: split_words
If True, AutocompleteModel splits the search query into words and
returns all objects that contain each of the words, case insensitive,
where each word must be in at least one of search_fields. This mimics
the mechanism of django's
:py:attr:`django:django.contrib.admin.ModelAdmin.search_fields`.
If 'or', AutocompleteModel does the same but returns all objects that
contain **any** of the words.
.. py:attribute:: order_by
If set, it will be used to order choices in the deck. It can be a
single field name or an iterable (ie. list, tuple).
However, if AutocompleteModel is instanciated with a list of values,
it'll reproduce the ordering of values.
"""
limit_choices = 20
choices = None
search_fields = DEFAULT_SEARCH_FIELDS
split_words = False
order_by = None
def choice_value(self, choice):
"""
Return the pk of the choice by default.
"""
return choice.pk
def choice_label(self, choice):
"""
Return the textual representation of the choice by default.
"""
return force_text(choice)
def order_choices(self, choices):
"""
Order choices using :py:attr:`order_by` option if it is set.
"""
if isinstance(self.order_by, str):
self.order_by = (self.order_by,)
if self.values:
pk_name = ('id' if not getattr(choices.model._meta, 'pk', None)
else choices.model._meta.pk.column)
field = '"%s"."%s"' if connection.vendor == 'postgresql' \
else '%s.%s'
pk_name = field % (choices.model._meta.db_table, pk_name)
# Order in the user selection order when self.values is set.
clauses = ' '.join(['WHEN %s=\'%s\' THEN %s' % (pk_name, pk, i)
for i, pk in enumerate(self.values)])
ordering = 'CASE %s ELSE 1000 END' % clauses
ordering_alias = '_autocomplete_light_ordering_'
_order_by = (ordering_alias,)
if self.order_by:
# safe concatenation of list/tuple
# thanks lvh from #python@freenode
_order_by = set(_order_by) | set(self.order_by)
return choices.extra(
select={ordering_alias: ordering},
order_by=_order_by)
if self.order_by is None:
return choices
return choices.order_by(*self.order_by)
def choices_for_values(self):
"""
Return ordered choices which pk are in
:py:attr:`~.base.AutocompleteInterface.values`.
"""
assert self.choices is not None, 'choices should be a queryset'
return self.order_choices(self.choices.filter(
pk__in=[x for x in self.values if x != '']))
def choices_for_request(self):
"""
Return a queryset based on :py:attr:`choices` using options
:py:attr:`split_words`, :py:attr:`search_fields` and
:py:attr:`limit_choices`.
"""
assert self.choices is not None, 'choices should be a queryset'
assert self.search_fields, 'autocomplete.search_fields must be set'
assert not isinstance(self.search_fields, str), \
'autocomplete.search_fields must not be a string'
q = self.request.GET.get('q', '')
exclude = self.request.GET.getlist('exclude')
conditions = self._choices_for_request_conditions(q,
self.search_fields)
return self.order_choices(self.choices.filter(
conditions).exclude(pk__in=exclude))[0:self.limit_choices]
def _construct_search(self, field_name):
"""
Using a field name optionnaly prefixed by `^`, `=`, `@`, return a
case-insensitive filter condition name usable as a queryset `filter()`
keyword argument.
"""
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
def _choices_for_request_conditions(self, q, search_fields):
"""
Return a `Q` object usable by `filter()` based on a list of fields to
search in `search_fields` for string `q`.
It uses options `split_words` and `search_fields` . Refer to the
class-level documentation for documentation on each of these options.
"""
conditions = Q()
if self.split_words:
for word in q.strip().split():
word_conditions = Q()
for search_field in search_fields:
word_conditions |= Q(**{
self._construct_search(search_field): word})
if self.split_words == 'or':
conditions |= word_conditions
else:
conditions &= word_conditions
else:
for search_field in search_fields:
conditions |= Q(**{self._construct_search(search_field): q})
return conditions
def validate_values(self):
"""
Return True if all values where found in :py:attr:`choices`.
"""
return len(self.choices_for_values()) == len(self.values)
| [
6738,
42625,
14208,
13,
9945,
13,
27530,
1330,
1195,
198,
6738,
42625,
14208,
13,
26791,
13,
12685,
7656,
1330,
2700,
62,
5239,
198,
6738,
42625,
14208,
13,
9945,
1330,
4637,
198,
198,
6738,
11485,
33692,
1330,
5550,
38865,
62,
5188,
31... | 2.276301 | 2,671 |
import csv
with open("process_mmsdk/mosei_dataset_int.csv", "r") as rf:
csvreader = csv.reader(rf)
next(csvreader)
with open("process_mmsdk/no_sentiment.csv", "w", newline="") as wf:
csvwriter = csv.writer(wf)
headers = ["video_name_segment", "happy", "sad", "anger", "surprise", "disgust", "fear"]
csvwriter.writerow(headers)
for row in csvreader:
# sentiment = int(row[1])
# new_sentiment = convert(sentiment)
csvwriter.writerow([row[0], row[2], row[3], row[4], row[5], row[6], row[7]])
| [
11748,
269,
21370,
628,
198,
4480,
1280,
7203,
14681,
62,
76,
907,
34388,
14,
76,
577,
72,
62,
19608,
292,
316,
62,
600,
13,
40664,
1600,
366,
81,
4943,
355,
374,
69,
25,
198,
220,
220,
220,
269,
21370,
46862,
796,
269,
21370,
13,... | 2.056738 | 282 |
# Generated by Django 2.0.1 on 2018-01-18 09:06
from django.db import migrations, models
| [
2,
2980,
515,
416,
37770,
362,
13,
15,
13,
16,
319,
2864,
12,
486,
12,
1507,
7769,
25,
3312,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
628
] | 2.84375 | 32 |
# celery.py
"""
Using SQS queue & MySQL result
pip install https://github.com/celery/vine/zipball/master#egg=vine
pip install https://github.com/celery/kombu/zipball/master#egg=kombu
pip install https://github.com/celery/py-amqp/zipball/master#egg=amqp
pip install https://github.com/celery/billiard/zipball/master#egg=billiard
pip install https://github.com/celery/celery/zipball/master#egg=celery
celery --app tasks worker --loglevel=debug
celery -A proj worker --loglevel=debug
celery multi start -A proj --loglevel=debug --pidfile=/Users/admin/Documents/tests/python-tester/python-tester/%n.pid --logfile=/Users/admin/Documents/tests/python-tester/python-tester/%n%I.log
Using RabbitMQ queue & result
rabbitmq-server -detached
celery -A proj worker -l info
celery multi start w1 -A proj -l info
celery multi stopwait w1 -A proj -l info
rabbitmqctl stop
"""
from __future__ import absolute_import, unicode_literals
from celery import Celery
from proj import celeryconfig
print('starting...')
app = Celery('proj',
include=['proj.tasks'])
app.config_from_object(celeryconfig)
if __name__ == '__main__':
# def __main__():
app.start()
| [
2,
18725,
1924,
13,
9078,
198,
37811,
198,
12814,
49747,
50,
16834,
1222,
33476,
1255,
198,
79,
541,
2721,
3740,
1378,
12567,
13,
785,
14,
7015,
88,
14,
26818,
14,
13344,
1894,
14,
9866,
2,
33856,
28,
26818,
198,
79,
541,
2721,
3740... | 2.754762 | 420 |
print(f"\n{'*' * 50}")
# Example 1 - Using Square function
print(f'Square of 5 is {square(5)}')
lst = [1, 2, -5, 4]
# Apply the square function to each element in the list - Using For loop
rslt = list() # Or rslt = []
for ii in lst:
rslt.append(square(ii))
print("Example 1 - Using Loop", rslt)
# Apply the square function to each element in the list - Using Map (Python discourages use of map, filter)
mp = map(square, lst)
print("Example 1 - Using Map", list(mp))
# Apply the square function to each element in the list - Using list comprehension (Pythonic Way - Preferred)
rslt = [square(x) for x in lst]
print("Example 1 - Using list comprehension", rslt)
print(f"\n{'*' * 50}")
# Example 2 - Filter odd numbers from a list
lst = [1, 2, 4, 6, 3, 9]
# Using for loop
rslt = []
for ii in lst:
if is_odd(ii):
rslt.append(ii)
print("Example 2 - Using Loop", rslt)
# using filter
rslt = list(filter(is_odd, lst))
print("Example 2 - Using Filter", rslt)
# using list comprehension
rslt = [ii for ii in lst if is_odd(ii)]
print("Example 2 - Using list comprehension", rslt)
print(f"\n{'*' * 50}")
# Example 3 - Create a grid of r rows and c columns
print("Example 3 - Using Loop", create_grid(3,4))
print("Example 3 - Using list comprehension", create_grid1(3,4))
| [
4798,
7,
69,
1,
59,
77,
90,
6,
9,
6,
1635,
2026,
92,
4943,
198,
2,
17934,
352,
532,
8554,
9276,
2163,
198,
198,
4798,
7,
69,
6,
48011,
286,
642,
318,
1391,
23415,
7,
20,
38165,
11537,
198,
198,
75,
301,
796,
685,
16,
11,
362... | 2.825708 | 459 |
from app.application.views import application # noqa
| [
6738,
598,
13,
31438,
13,
33571,
1330,
3586,
220,
1303,
645,
20402,
198
] | 4.153846 | 13 |
from .IMClientProtocol import *
from .IMClientSocket import *
from .user import *
from .localService import *
| [
6738,
764,
3955,
11792,
19703,
4668,
1330,
1635,
201,
198,
6738,
764,
3955,
11792,
39105,
1330,
1635,
201,
198,
6738,
764,
7220,
1330,
1635,
201,
198,
6738,
764,
12001,
16177,
1330,
1635,
201,
198
] | 3.352941 | 34 |
CLIENT_ID = "9ecf1e52d5034f01ac8fce5378decd00"
CLIENT_SECRET = "519c44e4a4764c208327e9ba8059f2fd"
REDIRECT_URI = "http://localhost:8000/spotify/redirect"
| [
5097,
28495,
62,
2389,
796,
366,
24,
721,
69,
16,
68,
4309,
67,
1120,
2682,
69,
486,
330,
23,
69,
344,
20,
30695,
12501,
67,
405,
1,
198,
5097,
28495,
62,
23683,
26087,
796,
366,
47785,
66,
2598,
68,
19,
64,
2857,
2414,
66,
1238... | 2 | 77 |
import unittest
import math
from signals.functions.baseline import BaseLine
if __name__ == '__main__':
unittest.main() | [
11748,
555,
715,
395,
198,
11748,
10688,
198,
6738,
10425,
13,
12543,
2733,
13,
12093,
4470,
1330,
7308,
13949,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
555,
715,
395,
13,
12417,
3419
] | 3 | 41 |
"""
Module: Permissions for templates rendering helpers for MDTUI
Project: Adlibre DMS
Copyright: Adlibre Pty Ltd 2012
License: See LICENSE for license information
Author: Iurii Garmash
"""
from django import template
from mdtui.security import SEC_GROUP_NAMES
register = template.Library()
@register.simple_tag(takes_context=True)
def check_search_permit(context):
"""
Checks request.user for permission to SEARCH in MUI
In fact he must be in search group in security.
Set's up context variable 'search_permitted'
it can be used farther in IF template compassion.
"""
# Do nothing if context variable has already been set
if 'search_permitted' in context:
return ''
user = context['request'].user
permission = False
if not user.is_superuser:
groups = user.groups.all()
for group in groups:
if group.name == SEC_GROUP_NAMES['search']:
permission = True
else:
permission = True
context['search_permitted'] = permission
return ''
@register.simple_tag(takes_context=True)
def check_index_permit(context):
"""
Checks request.user for permission to INDEX in MUI
In fact he must be in search group in security.
Set's up context variable 'index_permitted'
it can be used farther in IF template compassion.
"""
# Do nothing if context variable has already been set
if 'index_permitted' in context:
return ''
user = context['request'].user
permission = False
if not user.is_superuser:
groups = user.groups.all()
for group in groups:
if group.name == SEC_GROUP_NAMES['index']:
permission = True
else:
permission = True
context['index_permitted'] = permission
return ''
@register.simple_tag(takes_context=True)
def check_edit_index_permit(context):
"""
Checks request.user for permission to EDIT DOCUMENT INDEX in MUI
In fact he must be in search group in security.
Set's up context variable 'index_permitted'
it can be used farther in IF template compassion.
"""
# Do nothing if context variable has already been set
if 'edit_index_permitted' in context:
return ''
user = context['request'].user
permission = False
if not user.is_superuser:
groups = user.groups.all()
for group in groups:
if group.name == SEC_GROUP_NAMES['edit_index']:
permission = True
else:
permission = True
context['edit_index_permitted'] = permission
return ''
| [
37811,
198,
26796,
25,
2448,
8481,
329,
24019,
14837,
49385,
329,
10670,
51,
10080,
198,
198,
16775,
25,
1215,
8019,
260,
360,
5653,
198,
15269,
25,
1215,
8019,
260,
350,
774,
12052,
2321,
198,
34156,
25,
4091,
38559,
24290,
329,
5964,
... | 2.778256 | 929 |
import sys
import os
from os import remove
from os.path import join, dirname, realpath, exists
import numpy as np
import time
start_time = None
if __name__ == '__main__':
# time logging
#global start_time
start_time = time.time()
from pickle import load
import argparse
# parse commandline arguments
log_message(sys.argv)
parser = argparse.ArgumentParser(description='Generate synth dataset images.')
parser.add_argument('--idx', type=int,
help='idx of the requested sequence')
parser.add_argument('--ishape', type=int,
help='requested cut, according to the stride')
parser.add_argument('--stride', type=int,
help='stride amount, default 50')
args = parser.parse_args(sys.argv[sys.argv.index("--idx") :])
idx = args.idx
ishape = args.ishape
stride = args.stride
log_message("input idx: %d" % idx)
log_message("input ishape: %d" % ishape)
log_message("input stride: %d" % stride)
if idx == None:
exit(1)
if ishape == None:
exit(1)
if stride == None:
log_message("WARNING: stride not specified, using default value 50")
stride = 50
# import idx info (name, split)
idx_info = load(open("pkl/idx_info.pickle", 'rb'))
# get runpass
(runpass, idx) = divmod(idx, len(idx_info))
log_message("start part 2")
import hashlib
import random
# initialize random seeds with sequence id
s = "synth_data:%d:%d:%d" % (idx, runpass, ishape)
seed_number = int(hashlib.sha1(s.encode('utf-8')).hexdigest(), 16) % (10 ** 8)
log_message("GENERATED SEED %d from string '%s'" % (seed_number, s))
random.seed(seed_number)
np.random.seed(seed_number)
# import configuration
import config
params = config.load_file('config', 'SYNTH_DATA')
smpl_data_folder = params['smpl_data_folder']
smpl_data_filename = params['smpl_data_filename']
resy = params['resy']
resx = params['resx']
tmp_path = params['tmp_path']
output_path = params['output_path']
output_types = params['output_types']
stepsize = params['stepsize']
clipsize = params['clipsize']
openexr_py2_path = params['openexr_py2_path']
# check whether openexr_py2_path is loaded from configuration file
if 'openexr_py2_path' in locals() or 'openexr_py2_path' in globals():
for exr_path in openexr_py2_path.split(':'):
sys.path.insert(1, exr_path)
# to install OpenEXR:
# export ARCHFLAGS = "-arch x86_64"
# CPPFLAGS = "-std=c++11"
# to read exr imgs
import OpenEXR
import array
import Imath
log_message("Loading SMPL data")
smpl_data = np.load(join(smpl_data_folder, smpl_data_filename))
cmu_parms, name = load_body_data(smpl_data, idx)
tmp_path = join(tmp_path, 'run%d_%s_c%04d' % (runpass, name.replace(" ", ""), (ishape + 1)))
res_paths = {k:join(tmp_path, '%05d_%s'%(idx, k)) for k in output_types if output_types[k]}
data = cmu_parms[name]
nframes = len(data['poses'][::stepsize])
output_path = join(output_path, 'run%d' % runpass, name.replace(" ", ""))
# .mat files
matfile_normal = join(output_path, name.replace(" ", "") + "_c%04d_normal.mat" % (ishape + 1))
matfile_gtflow = join(output_path, name.replace(" ", "") + "_c%04d_gtflow.mat" % (ishape + 1))
matfile_depth = join(output_path, name.replace(" ", "") + "_c%04d_depth.mat" % (ishape + 1))
matfile_segm = join(output_path, name.replace(" ", "") + "_c%04d_segm.mat" % (ishape + 1))
dict_normal = {}
dict_gtflow = {}
dict_depth = {}
dict_segm = {}
get_real_frame = lambda ifr: ifr
FLOAT = Imath.PixelType(Imath.PixelType.FLOAT)
# overlap determined by stride (# subsampled frames to skip)
fbegin = ishape*stepsize*stride
fend = min(ishape*stepsize*stride + stepsize*clipsize, len(data['poses']))
# LOOP OVER FRAMES
for seq_frame, (pose, trans) in enumerate(zip(data['poses'][fbegin:fend:stepsize], data['trans'][fbegin:fend:stepsize])):
iframe = seq_frame
log_message("Processing frame %d" % iframe)
for k, folder in res_paths.items():
if not k== 'vblur' and not k=='fg':
path = join(folder, 'Image%04d.exr' % get_real_frame(seq_frame))
exr_file = OpenEXR.InputFile(path)
if k == 'normal':
mat = np.transpose(np.reshape([array.array('f', exr_file.channel(Chan, FLOAT)).tolist() for Chan in ("R", "G", "B")], (3, resx, resy)), (1, 2, 0))
dict_normal['normal_%d' % (iframe + 1)] = mat.astype(np.float32, copy=False) # +1 for the 1-indexing
elif k == 'gtflow':
mat = np.transpose(np.reshape([array.array('f', exr_file.channel(Chan, FLOAT)).tolist() for Chan in ("R", "G")], (2, resx, resy)), (1, 2, 0))
dict_gtflow['gtflow_%d' % (iframe + 1)] = mat.astype(np.float32, copy=False)
elif k == 'depth':
mat = np.reshape([array.array('f', exr_file.channel(Chan, FLOAT)).tolist() for Chan in ("R")], (resx, resy))
dict_depth['depth_%d' % (iframe + 1)] = mat.astype(np.float32, copy=False)
elif k == 'segm':
mat = np.reshape([array.array('f', exr_file.channel(Chan, FLOAT)).tolist() for Chan in ("R")], (resx, resy))
dict_segm['segm_%d' % (iframe + 1)] = mat.astype(np.uint8, copy=False)
#remove(path)
import scipy.io
scipy.io.savemat(matfile_normal, dict_normal, do_compression=True)
scipy.io.savemat(matfile_gtflow, dict_gtflow, do_compression=True)
scipy.io.savemat(matfile_depth, dict_depth, do_compression=True)
scipy.io.savemat(matfile_segm, dict_segm, do_compression=True)
# cleaning up tmp
if tmp_path != "" and tmp_path != "/":
log_message("Cleaning up tmp")
os.system('rm -rf %s' % tmp_path)
log_message("Completed batch") | [
11748,
25064,
198,
11748,
28686,
220,
198,
6738,
28686,
1330,
4781,
198,
6738,
28686,
13,
6978,
1330,
4654,
11,
26672,
3672,
11,
1103,
6978,
11,
7160,
198,
11748,
299,
32152,
355,
45941,
198,
220,
220,
220,
220,
198,
11748,
640,
198,
... | 2.202669 | 2,773 |
from .models import Place
from django.contrib.gis.geos import Point
import random
min_x = -74.8
max_x = -75.2
min_y = 39.7
max_y = 40.2
objects = [
"CD",
"Christmas ornament",
"acorn",
"apple",
"bag",
"bag of cotton balls",
"bag of popcorn",
"bag of rubber bands",
"ball of yarn",
"balloon",
"banana",
"bananas",
"bandana",
"bangle bracelet",
"bar of soap",
"baseball",
"baseball bat",
"baseball hat",
"basketball",
"beaded bracelet",
"beaded necklace",
"bed",
"beef",
"bell",
"belt",
"blouse",
"blowdryer",
"bonesaw",
"book",
"book of jokes",
"book of matches",
"bookmark",
"boom box",
"bottle",
"bottle cap",
"bottle of glue",
"bottle of honey",
"bottle of ink",
"bottle of lotion",
"bottle of nail polish",
"bottle of oil",
"bottle of paint",
"bottle of perfume",
"bottle of pills",
"bottle of soda",
"bottle of sunscreen",
"bottle of syrup",
"bottle of water",
"bouquet of flowers",
"bow",
"bow tie",
"bowl",
"box",
"box of Q-tips",
"box of baking soda",
"box of chalk",
"box of chocolates",
"box of crayons",
"box of markers",
"box of tissues",
"bracelet",
"bread",
"broccoli",
"brush",
"buckle",
"butter knife",
"button",
"camera",
"can of beans",
"can of chili",
"can of peas",
"can of whipped cream",
"candle",
"candlestick",
"candy bar",
"candy cane",
"candy wrapper",
"canteen",
"canvas",
"car",
"card",
"carrot",
"carrots",
"cars",
"carton of ice cream",
"cat",
"catalogue",
"cell phone",
"cellphone",
"cement stone",
"chain",
"chair",
"chalk",
"chapter book",
"check book",
"chenille stick",
"chicken",
"children's book",
"chocolate",
"class ring",
"clay pot",
"clock",
"clothes",
"clothes pin",
"coffee mug",
"coffee pot",
"comb",
"comic book",
"computer",
"conditioner",
"container of pudding",
"cookie jar",
"cookie tin",
"cork",
"couch",
"cow",
"cowboy hat",
"craft book",
"credit card",
"crow",
"crowbar",
"cucumber",
"cup",
"dagger",
"deodorant",
"desk",
"dictionary",
"dog",
"dolphin",
"domino set",
"door",
"dove",
"drawer",
"drill press",
"egg",
"egg beater",
"egg timer",
"empty bottle",
"empty jar",
"empty tin can",
"eraser",
"extension cord",
"eye liner",
"face wash",
"fake flowers",
"feather",
"feather duster",
"few batteries",
"fish",
"fishing hook",
"flag",
"flashlight",
"floor",
"flowers",
"flyswatter",
"food",
"football",
"fork",
"fridge",
"frying pan",
"game CD",
"game cartridge",
"garden spade",
"giraffe",
"glass",
"glasses",
"glow stick",
"grid paper",
"grocery list",
"hair brush",
"hair clip",
"hair pin",
"hair ribbon",
"hair tie",
"hammer",
"hamster",
"hand bag",
"hand fan",
"hand mirror",
"handbasket",
"handful of change",
"handheld game system",
"hanger",
"harmonica",
"helmet",
"house",
"ice cream stick",
"ice cube",
"ice pick",
"incense holder",
"ipod",
"ipod charger",
"jar of jam",
"jar of peanut butter",
"jar of pickles",
"jigsaw puzzle",
"key",
"key chain",
"keyboard",
"keychain",
"keys",
"kitchen knife",
"knife",
"lace",
"ladle",
"lamp",
"lamp shade",
"laser pointer",
"leg warmers",
"lemon",
"letter opener",
"light",
"light bulb",
"lighter",
"lime",
"lion",
"lip gloss",
"locket",
"lotion",
"magazine",
"magnet",
"magnifying glass",
"map",
"marble",
"martini glass",
"matchbook",
"microphone",
"milk",
"miniature portrait",
"mirror",
"mobile phone",
"model car",
"money",
"monitor",
"mop",
"mouse pad",
"mp3 player",
"multitool",
"music CD",
"nail",
"nail clippers",
"nail filer",
"necktie",
"needle",
"notebook",
"notepad",
"novel",
"ocarina",
"orange",
"outlet",
"pack of cards",
"package of crisp and crunchy edibles",
"package of glitter",
"packet of seeds",
"pail",
"paint brush",
"paintbrush",
"pair of binoculars",
"pair of dice",
"pair of earrings",
"pair of glasses",
"pair of handcuffs",
"pair of knitting needles",
"pair of rubber gloves",
"pair of safety goggles",
"pair of scissors",
"pair of socks",
"pair of sunglasses",
"pair of tongs",
"pair of water goggles",
"panda",
"pants",
"paper",
"paperclip",
"pasta strainer",
"pearl necklace",
"pen",
"pencil",
"pencil holder",
"pepper shaker",
"perfume",
"phone",
"photo album",
"picture frame",
"piece of gum",
"pillow",
"pinecone",
"plastic fork",
"plate",
"plush bear",
"plush cat",
"plush dinosaur",
"plush dog",
"plush frog",
"plush octopus",
"plush pony",
"plush rabbit",
"plush unicorn",
"pocketknife",
"pocketwatch",
"pool stick",
"pop can",
"postage stamp",
"puddle",
"purse",
"purse/bag",
"quartz crystal",
"quilt",
"rabbit",
"radio",
"rat",
"remote",
"rhino",
"ring",
"rock",
"roll of duct tape",
"roll of gauze",
"roll of masking tape",
"roll of stickers",
"roll of toilet paper",
"rolling pin",
"rope",
"rubber band",
"rubber duck",
"rubber stamp",
"rug",
"rusty nail",
"safety pin",
"sailboat",
"salt shaker",
"sand paper",
"sandal",
"sandglass",
"scallop shell",
"scarf",
"scotch tape",
"screw",
"screwdriver",
"seat belt",
"shampoo",
"shark",
"sharpie",
"shawl",
"sheep",
"sheet of paper",
"shirt",
"shirt button",
"shoe lace",
"shoes",
"shopping bag",
"shovel",
"sidewalk",
"sketch pad",
"slipper",
"small pouch",
"snail shell",
"snowglobe",
"soap",
"soccer ball",
"socks",
"sofa",
"spatula",
"speakers",
"spectacles",
"spice bottle",
"sponge",
"spool of ribbon",
"spool of string",
"spool of thread",
"spool of wire",
"spoon",
"spring",
"squirrel",
"squirt gun",
"statuette",
"steak knife",
"stick",
"stick of incense",
"sticker book",
"sticky note",
"stockings",
"stop sign",
"straw",
"street lights",
"sun glasses",
"sword",
"table",
"tea cup",
"tea pot",
"teddies",
"television",
"tennis ball",
"tennis racket",
"thermometer",
"thimble",
"thread",
"tiger",
"tire swing",
"tissue box",
"toe ring",
"toilet",
"toilet paper tube",
"tomato",
"tooth pick",
"toothbrush",
"toothpaste",
"toothpick",
"towel",
"toy boat",
"toy car",
"toy plane",
"toy robot",
"toy soldier",
"toy top",
"trash bag",
"tree",
"trucks",
"tube of lip balm",
"tube of lipstick",
"turtle",
"tv",
"tweezers",
"twister",
"umbrella",
"vase",
"video games",
"wallet",
"washcloth",
"washing machine",
"watch",
"water",
"water bottle",
"wedding ring",
"whale",
"whip",
"whistle",
"white out",
"window",
"wine glass",
"wireless control",
"wishbone",
"wooden spoon",
"word search",
"wrench",
"wristwatch",
"zebra",
"zipper"
]
objects_len = len(objects) - 1 | [
6738,
764,
27530,
1330,
8474,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
70,
271,
13,
469,
418,
1330,
6252,
198,
11748,
4738,
198,
198,
1084,
62,
87,
796,
532,
4524,
13,
23,
198,
9806,
62,
87,
796,
532,
2425,
13,
17,
198,
1084,
... | 2.061447 | 3,857 |
#!/usr/bin/env python
#-----------------------------------------------------------------------------
# Title :
#-----------------------------------------------------------------------------
# File : TopLevel.py
# Created : 2017-04-03
#-----------------------------------------------------------------------------
# Description:
#
#-----------------------------------------------------------------------------
# This file is part of the rogue_example software. It is subject to
# the license terms in the LICENSE.txt file found in the top-level directory
# of this distribution and at:
# https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html.
# No part of the rogue_example software, including this file, may be
# copied, modified, propagated, or distributed except according to the terms
# contained in the LICENSE.txt file.
#-----------------------------------------------------------------------------
import pyrogue as pr
import LsstPwrCtrlCore as base
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
10097,
32501,
198,
2,
11851,
220,
220,
220,
220,
220,
1058,
220,
198,
2,
10097,
32501,
198,
2,
9220,
220,
220,
220,
220,
220,
220,
1058,
5849,
4971,
13,
9078,
198,
2,
15622,
220... | 4.618605 | 215 |
#!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates the appropriate JSON data for LB interop test scenarios."""
import json
import os
import yaml
all_scenarios = []
# TODO(https://github.com/grpc/grpc-go/issues/2347): enable
# client_falls_back_because_no_backends_* scenarios for Java/Go.
# TODO(https://github.com/grpc/grpc-java/issues/4887): enable
# *short_stream* scenarios for Java.
# TODO(https://github.com/grpc/grpc-java/issues/4912): enable
# Java TLS tests involving TLS to the balancer.
all_scenarios += generate_no_balancer_because_lb_a_record_returns_nx_domain()
all_scenarios += generate_no_balancer_because_lb_a_record_returns_no_data()
all_scenarios += generate_client_referred_to_backend()
all_scenarios += generate_client_referred_to_backend_fallback_broken()
all_scenarios += generate_client_referred_to_backend_multiple_backends()
all_scenarios += generate_client_falls_back_because_no_backends()
all_scenarios += generate_client_falls_back_because_balancer_connection_broken()
all_scenarios += generate_client_referred_to_backend_multiple_balancers()
print(yaml.dump({
'lb_interop_test_scenarios': all_scenarios,
}))
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
17,
13,
22,
198,
2,
15069,
1853,
308,
49,
5662,
7035,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
7... | 3.054482 | 569 |
# names.py
import tkinter as tk
import sqlite3
from files import get_current_file
from styles import config_generic
from widgets import (
Frame, Label, Button, LabelMovable, LabelH3, Entry, Toplevel)
from window_border import Border
from custom_combobox_widget import Combobox
from scrolling import MousewheelScrolling, Scrollbar, resize_scrolled_content
from autofill import EntryAuto
from toykinter_widgets import run_statusbar_tooltips
from right_click_menu import RightClickMenu, make_rc_menus
from messages_context_help import person_add_help_msg
from messages import open_yes_no_message, names_msg, open_message
from images import get_all_pics
from query_strings import (
select_current_person, select_name_with_id, select_all_names_ids,
select_all_person_ids, select_image_id, select_max_person_id,
insert_images_elements, select_name_type_id, insert_name,
select_all_images, select_all_name_types, insert_person_new,
select_person_gender, select_max_name_type_id, insert_name_type_new,
insert_image_new, select_name_with_id_any, select_birth_names_ids)
import dev_tools as dt
from dev_tools import looky, seeline
GENDER_TYPES = ('unknown', 'female', 'male', 'other')
NAME_SUFFIXES = (
'jr.', 'sr.', 'jr', 'sr', 'junior', 'senior',
'i', 'ii', 'iii', 'iv', 'v', 'vi', 'vii', 'viii',
'ix', 'x', 'xi', 'xii', 'xiii', 'xiv', 'xv')
NAME_TYPES_HIERARCHY = (
'reference name', 'adoptive name', 'also known as', 'married name',
'legally changed name', 'pseudonym', 'pen name', 'stage name', 'nickname',
'call name', 'official name', 'anglicized name', 'religious order name',
'other name type', 'given name')
def make_values_list_for_person_select():
'''
birth names only, probably not useful for autofills
'''
current_file = get_current_file()[0]
conn = sqlite3.connect(current_file)
cur = conn.cursor()
cur.execute(select_birth_names_ids)
peeps = cur.fetchall()
peeps = [list(i) for i in peeps]
cur.close()
conn.close()
combo_peeps = sorted(peeps, key=lambda i: i[2])
people = []
for tup in combo_peeps:
line = '{} #{}'.format(tup[0], tup[1])
people.append(line)
return people
def make_all_names_list_for_person_select():
'''
all name types, best for autofill values
'''
current_file = get_current_file()[0]
conn = sqlite3.connect(current_file)
cur = conn.cursor()
cur.execute(select_all_names_ids)
peeps = cur.fetchall()
peeps = [list(i) for i in peeps]
cur.close()
conn.close()
combo_peeps = sorted(peeps, key=lambda i: i[2])
people = []
for tup in combo_peeps:
line = '{} #{}'.format(tup[0], tup[1])
people.append(line)
return people
if __name__ == "__main__":
root = tk.Tk()
person_input = Entry(root, width=40)
person_input.grid()
person_input.focus_set()
addbutt = Button(
root,
text="ADD NEW PERSON",
command=open_dialog)
addbutt.grid()
root.mainloop()
| [
2,
3891,
13,
9078,
201,
198,
201,
198,
11748,
256,
74,
3849,
355,
256,
74,
201,
198,
11748,
44161,
578,
18,
201,
198,
6738,
3696,
1330,
651,
62,
14421,
62,
7753,
201,
198,
6738,
12186,
1330,
4566,
62,
41357,
201,
198,
6738,
40803,
... | 2.343865 | 1,361 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
from collections import OrderedDict
from datetime import date
CATEGORY_RE = re.compile(r'(?:^| )(beginner|novice|pro|[a-c](?:/[a-c])*|(?:pro/?)*[1-5](?:/[1-5])*)(?: |$)', flags=re.I)
AGE_RANGE_RE = re.compile(r'([7-9]|1[0-9])(-([7-9]|1[0-9]))?')
NAME_RE = re.compile(r'^[a-z.\'-]+', flags=re.I)
NUMBER_RE = re.compile(r'[0-9]+|dnf|dq', flags=re.I)
STANDINGS_RE = re.compile(r'standings|overall|(?<!\d )results|totals|order|after stage|qualifying|hot spots|gender|(overall|cyclocross|road) bar|'
r'(series|team|individual|april|may|june|july) (series|points|competition|final)', flags=re.I)
DISCIPLINE_RE_MAP = { # patterns within each discipline are ordered by precedence
'road': [
['', re.compile('combined', flags=re.I)],
['circuit', re.compile('circuit|barton|dirty circles|kings valley|montinore|'
'piece of cake|tabor|(monday|tuesday)( night)? pir|'
'champion( thursday|ship raceway)|banana belt', flags=re.I)],
['criterium', re.compile('crit', flags=re.I)],
['time_trial', re.compile(' tt|time trial|climb|uphill|revenge of the disc', flags=re.I)],
['gran_fondo', re.compile('fondo|epic|duro|roubaix', flags=re.I)],
['tour', re.compile('tour|stage', flags=re.I)],
],
}
# Points schedule changed effective 2019-08-31
SCHEDULE_2019_DATE = date(2019, 8, 31)
SCHEDULE_2019 = {
'cyclocross': {
'open': [
{'min': 10, 'max': 25, 'points': [3, 2, 1]},
{'min': 26, 'max': 40, 'points': [5, 4, 3, 2, 1]},
{'min': 41, 'max': 75, 'points': [7, 6, 5, 4, 3, 2, 1]},
{'min': 76, 'max': 999, 'points': [10, 8, 7, 5, 4, 3, 2, 1]},
],
'women': [
{'min': 6, 'max': 15, 'points': [3, 2, 1]},
{'min': 16, 'max': 25, 'points': [5, 4, 3, 2, 1]},
{'min': 26, 'max': 60, 'points': [7, 6, 5, 4, 3, 2, 1]},
{'min': 61, 'max': 999, 'points': [10, 8, 7, 5, 4, 3, 2, 1]},
],
},
'circuit': {
'open': [
{'min': 5, 'max': 10, 'points': [3, 2, 1]},
{'min': 11, 'max': 20, 'points': [4, 3, 2, 1]},
{'min': 21, 'max': 49, 'points': [5, 4, 3, 2, 1]},
{'min': 50, 'max': 999, 'points': [7, 5, 4, 3, 2, 1]},
],
},
'criterium': { # same as circuit
'open': [
{'min': 5, 'max': 10, 'points': [3, 2, 1]},
{'min': 11, 'max': 20, 'points': [4, 3, 2, 1]},
{'min': 21, 'max': 49, 'points': [5, 4, 3, 2, 1]},
{'min': 50, 'max': 999, 'points': [7, 5, 4, 3, 2, 1]},
],
},
'road': {
'open': [
{'min': 5, 'max': 10, 'points': [3, 2, 1]},
{'min': 11, 'max': 20, 'points': [7, 5, 4, 3, 2, 1]},
{'min': 21, 'max': 49, 'points': [8, 6, 5, 4, 3, 2, 1]},
{'min': 50, 'max': 999, 'points': [10, 8, 7, 6, 5, 4, 3, 2, 1]},
],
},
'tour': {
'open': [
{'min': 10, 'max': 19, 'points': [5, 3, 2, 1]},
{'min': 20, 'max': 35, 'points': [7, 5, 3, 2, 1]},
{'min': 36, 'max': 49, 'points': [10, 8, 6, 5, 4, 3, 2, 1]},
{'min': 50, 'max': 999, 'points': [20, 18, 16, 14, 12, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1]},
],
},
}
SCHEDULE_2018 = {
'cyclocross': {
'open': [
{'min': 10, 'max': 15, 'points': [3, 2, 1]},
{'min': 16, 'max': 25, 'points': [5, 4, 3, 2, 1]},
{'min': 26, 'max': 60, 'points': [7, 6, 5, 4, 3, 2, 1]},
{'min': 61, 'max': 999, 'points': [10, 8, 7, 5, 4, 3, 2, 1]},
],
'women': [
{'min': 6, 'max': 10, 'points': [3, 2, 1]},
{'min': 11, 'max': 20, 'points': [5, 4, 3, 2, 1]},
{'min': 21, 'max': 50, 'points': [7, 6, 5, 4, 3, 2, 1]},
{'min': 51, 'max': 999, 'points': [10, 8, 7, 5, 4, 3, 2, 1]},
],
},
'circuit': {
'open': [
{'min': 5, 'max': 10, 'points': [3, 2, 1]},
{'min': 11, 'max': 20, 'points': [4, 3, 2, 1]},
{'min': 21, 'max': 49, 'points': [5, 4, 3, 2, 1]},
{'min': 50, 'max': 999, 'points': [7, 5, 4, 3, 2, 1]},
],
},
'criterium': { # same as circuit
'open': [
{'min': 5, 'max': 10, 'points': [3, 2, 1]},
{'min': 11, 'max': 20, 'points': [4, 3, 2, 1]},
{'min': 21, 'max': 49, 'points': [5, 4, 3, 2, 1]},
{'min': 50, 'max': 999, 'points': [7, 5, 4, 3, 2, 1]},
],
},
'road': {
'open': [
{'min': 5, 'max': 10, 'points': [3, 2, 1]},
{'min': 11, 'max': 20, 'points': [7, 5, 4, 3, 2, 1]},
{'min': 21, 'max': 49, 'points': [8, 6, 5, 4, 3, 2, 1]},
{'min': 50, 'max': 999, 'points': [10, 8, 7, 6, 5, 4, 3, 2, 1]},
],
},
'tour': {
'open': [
{'min': 10, 'max': 19, 'points': [5, 3, 2, 1]},
{'min': 20, 'max': 35, 'points': [7, 5, 3, 2, 1]},
{'min': 36, 'max': 49, 'points': [10, 8, 6, 5, 4, 3, 2, 1]},
{'min': 50, 'max': 999, 'points': [20, 18, 16, 14, 12, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1]},
],
},
}
# Minimum points necesary to upgrade to this field.
# Maximum points after which you are mandatorily upgraded.
# FIXME - need minimum field size for mtb men/women
UPGRADES = {
'cyclocross': {
4: {'min': 0, 'max': 20},
3: {'min': 0, 'max': 20},
2: {'min': 20, 'max': 20},
1: {'min': 20, 'max': 35},
},
'mountain_bike-FIXME': {
2: {'podiums': 3},
1: {'podiums': 3},
0: {'podiums': 5},
},
'track-FIXME': {
4: {'min': 0, 'races': 4},
3: {'min': 20, 'races': 5},
2: {'min': 25, 'races': 5},
1: {'min': 30, 'races': 5},
},
'road': {
4: {'min': 15, 'max': 25, 'races': 10},
3: {'min': 20, 'max': 30, 'races': 25},
2: {'min': 25, 'max': 40},
1: {'min': 30, 'max': 50},
},
}
# Map event disciplines to upgrade schedules
# Order here is used elsewhere, such as the web UI
DISCIPLINE_MAP = OrderedDict([
('cyclocross', ['cyclocross']),
('road', ['road', 'circuit', 'criterium', 'gran_fondo', 'gravel', 'time_trial', 'tour']),
('mountain_bike', ['mountain_bike', 'downhill', 'super_d', 'short_track']),
('track', ['track']),
])
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
11748,
302,
198,
6738,
17268,
1330,
14230,
1068,
35,
713,
198,
6738,
4818,
8079,
1330,
3128,
198,
198,
34,
6158,
... | 1.773351 | 3,715 |
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from postgresqleu.adyen.models import Notification, AdyenLog
from postgresqleu.adyen.util import process_one_notification
| [
6738,
42625,
14208,
13,
7295,
13,
27604,
13,
8692,
1330,
7308,
21575,
11,
9455,
12331,
198,
6738,
42625,
14208,
13,
9945,
1330,
8611,
198,
198,
6738,
1281,
34239,
80,
293,
84,
13,
4597,
268,
13,
27530,
1330,
42808,
11,
1215,
88,
268,
... | 3.555556 | 63 |
import numpy as np
from numpy import sin
from numpy.linalg import lstsq
import matplotlib.pyplot as plt
xdata = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14.])
ydata = np.array([0, 0, 0, -1, -2, -4, -8, -16, -8, -4, -2, -1, 0, 0, 0.])
m = 10
a = np.array([[1*sin(i*np.pi*(xdata-0)/(14-0))] for i in range(1, m+1)])
a = a.swapaxes(0,2).swapaxes(1,2).reshape(xdata.shape[0],-1)
c0, residues, rank, s = lstsq(a, ydata)
xs = np.linspace(0, 14, 1000)
a = np.array([[1*sin(i*np.pi*(xs-0)/(14-0))] for i in range(1, m+1)])
a = a.swapaxes(0,2).swapaxes(1,2).reshape(xs.shape[0],-1)
print plt.plot(xdata, ydata, label='data')
print plt.plot(xs, a.dot(c0), label='adjusted')
plt.savefig('test.png')
print a.shape
| [
11748,
299,
32152,
355,
45941,
198,
6738,
299,
32152,
1330,
7813,
198,
6738,
299,
32152,
13,
75,
1292,
70,
1330,
300,
6448,
80,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
628,
198,
87,
7890,
796,
45941,
13,
18747,
... | 1.954178 | 371 |
"""
Views belonging to the core of the site which do not relate to a person, project or task.
"""
from django.views.generic import TemplateView
class IndexView(TemplateView):
"""
View for site index page.
"""
template_name = 'index.html'
| [
37811,
198,
7680,
82,
16686,
284,
262,
4755,
286,
262,
2524,
543,
466,
407,
15124,
284,
257,
1048,
11,
1628,
393,
4876,
13,
198,
37811,
198,
198,
6738,
42625,
14208,
13,
33571,
13,
41357,
1330,
37350,
7680,
628,
198,
4871,
12901,
7680... | 3.2125 | 80 |
#!/usr/bin/env python
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import random
import uuid
from datetime import datetime
from os import environ
import names
from google.cloud import spanner
INSTANCE_ID = environ.get("INSTANCE_ID")
DATABASE_ID = environ.get("DATABASE_ID")
user_id_list = []
if __name__ == "__main__":
insert_users_data(INSTANCE_ID, DATABASE_ID)
read_users(INSTANCE_ID, DATABASE_ID)
asyncio.run(parallel_insert_scores_data())
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
2,
15069,
33160,
3012,
11419,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
... | 3.196203 | 316 |
import numpy as np
import json
from tqdm import tqdm
import gc
#import cupy
# for num in tqdm(range(1,12)):
# dataset[num] = []
# for vec in tqdm(vector_dict['{}'.format(num)]):
#print(num)
#print(vec)
#print(len(vec))
# dataset[num].append(onehot_vec(vec,dict_size))
#print(dataset)
if __name__=='__main__':
md = MakeData()
md.main()
| [
11748,
299,
32152,
355,
45941,
198,
11748,
33918,
198,
6738,
256,
80,
36020,
1330,
256,
80,
36020,
198,
11748,
308,
66,
198,
2,
11748,
6508,
88,
628,
220,
220,
220,
220,
220,
220,
220,
220,
198,
2,
220,
220,
220,
329,
997,
287,
25... | 1.893333 | 225 |
ONE_MINUTE = 1 / 60
SUCCESS = 1
FAIL = 0
FILE_NOT_FOUND = 'File not found.\nBe sure the file exists.'
MALFORMED_FILE = 'File is malformed, please check the file'
WRONG_TIME_RANGE = 'Invalid range of hours'
EMPTY_FILE = 'File is empty'
WORKWEEK = ['MO', 'TU', 'WE', 'TH', 'FR', 'SA', 'SU']
WEEKEND = ['SA', 'SU']
MIN_HOUR = 0
MAX_HOUR = 24
| [
11651,
62,
23678,
37780,
796,
352,
1220,
3126,
198,
198,
12564,
4093,
7597,
796,
352,
198,
7708,
4146,
796,
657,
198,
198,
25664,
62,
11929,
62,
37,
15919,
796,
705,
8979,
407,
1043,
13,
59,
77,
3856,
1654,
262,
2393,
7160,
2637,
19... | 2.432624 | 141 |
import os
import json
_FILENAME = 'environ'
_FILEPATH = './.%s' % _FILENAME
| [
11748,
28686,
198,
11748,
33918,
198,
198,
62,
46700,
1677,
10067,
796,
705,
268,
2268,
6,
198,
62,
25664,
34219,
796,
45302,
11757,
4,
82,
6,
4064,
4808,
46700,
1677,
10067,
198
] | 2.40625 | 32 |
import numpy
import cv2
image = cv2.imread("imagenes/carro.jpg")
cv2.waitKey(0)
| [
11748,
299,
32152,
198,
11748,
269,
85,
17,
198,
198,
9060,
796,
269,
85,
17,
13,
320,
961,
7203,
320,
11286,
274,
14,
7718,
305,
13,
9479,
4943,
198,
198,
33967,
17,
13,
17077,
9218,
7,
15,
8,
628,
198
] | 2.1 | 40 |
# pylint: skip-file
#- @test defines/binding FnTest
#- @x defines/binding ArgX
#- FnTest.node/kind function
#- FnTest param.0 ArgX
#- @foo defines/binding FnFoo
#- @x defines/binding ArgFooX
#- FnFoo.node/kind function
#- FnFoo param.0 ArgFooX
#- @y defines/binding VarY
#- @test ref FnTest
#- @test ref/call FnTest
#- VarY.node/kind variable
y = test(2)
# We don't index this, but it shouldn't crash.
z = (lambda x: x)(1)
#- @bar defines/binding VarBar
bar = lambda x: x
# We don't generate ref/call here.
#- @bar ref VarBar
bar(1)
| [
2,
279,
2645,
600,
25,
14267,
12,
7753,
198,
198,
2,
12,
2488,
9288,
15738,
14,
30786,
37481,
14402,
198,
2,
12,
2488,
87,
15738,
14,
30786,
20559,
55,
198,
2,
12,
37481,
14402,
13,
17440,
14,
11031,
2163,
198,
2,
12,
37481,
14402... | 2.556604 | 212 |
if __name__ == '__main__':
import sys
import blaze.server.spider
sys.exit(blaze.server.spider._main())
| [
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
201,
198,
220,
220,
220,
1330,
25064,
201,
198,
220,
220,
220,
1330,
31259,
13,
15388,
13,
2777,
1304,
201,
198,
201,
198,
220,
220,
220,
25064,
13,
37023,
7,
2436,
6201,
13,... | 2.326923 | 52 |
import torch
import torch.nn as nn
import torch.nn.functional as F
def weighted_soft_dice_loss(inputs, targets, v2=0.9, eps=1e-4):
"""
From https://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=9180275
allows contribution of negative samples
"""
v1 = 1 - v2
iflat = inputs.view(-1)
tflat = targets.view(-1)
w = (tflat * (v2 - v1)) + v1
g_iflat = w * (2 * iflat - 1)
g_tflat = w * (2 * tflat - 1)
intersection = (g_iflat * g_tflat).sum()
calc = 1 - ((2 * intersection + eps)/ (torch.abs(g_iflat).sum() + torch.abs(g_tflat).sum() + eps))
return torch.clip(calc, max=1-eps) | [
11748,
28034,
198,
11748,
28034,
13,
20471,
355,
299,
77,
198,
11748,
28034,
13,
20471,
13,
45124,
355,
376,
628,
198,
4299,
26356,
62,
4215,
62,
67,
501,
62,
22462,
7,
15414,
82,
11,
6670,
11,
410,
17,
28,
15,
13,
24,
11,
304,
... | 2.243728 | 279 |
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 26 23:59:24 2019
@author: orteg
"""
import click
import pandas as pd
import networkx as nx
from preprop_utils import splitgraph
from datetime import datetime
###################################################################
@click.command()
@click.argument('input_file',
type=click.Path(exists=True, readable=True, dir_okay=False))
@click.argument('output_directory',
type=click.Path(exists=True, file_okay=False, dir_okay=True))
@click.option('--generate_dynamic', is_flag=True, required = True)
@click.option('--training_size', default=9, required = True)
@click.option('--interm_directory',
type=click.Path(exists=True, file_okay=False, dir_okay=True))
if __name__ == '__main__':
main() | [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
41972,
319,
30030,
5267,
2608,
2242,
25,
3270,
25,
1731,
13130,
198,
198,
31,
9800,
25,
393,
660,
70,
198,
37811,
198,
11748,
3904,
198,
11748,
19798,
292,
... | 2.667774 | 301 |
"""This package directory is used to store non-Canari transform code.
Why would you want to use for Canari? For easy transform distribution.
You can use Canari as a transform proxy to execute your non-Python or
non-Canari transform code. This allows you to package up your transforms
in an easy-to-use transform package and easily share the package with your
colleagues.
"""
| [
37811,
1212,
5301,
8619,
318,
973,
284,
3650,
1729,
12,
6090,
2743,
6121,
2438,
13,
198,
198,
5195,
561,
345,
765,
284,
779,
329,
1680,
2743,
30,
1114,
2562,
6121,
6082,
13,
198,
1639,
460,
779,
1680,
2743,
355,
257,
6121,
15741,
28... | 4.177778 | 90 |
import unittest
from src.LFSR import LFSR
if __name__ == '__main__':
unittest.main()
| [
11748,
555,
715,
395,
198,
198,
6738,
12351,
13,
43,
10652,
49,
1330,
406,
10652,
49,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
555,
715,
395,
13,
12417,
3419,
198
] | 2.333333 | 39 |
import numpy as np
import matplotlib.pyplot as plt
x1 = np.linspace(0,4,100)
x2 = np.linspace(-0.5,1,100)
x1, x2 = np.meshgrid(x1,x2)
f = (1.5 - x1*(1.0-x2))**2 + (2.25 - x1*(1.0-(x2**2)))**2 + (2.625 - x1*(1.0-(x2**3)))**2
plt.figure()
plt.contourf(x1,x2,f,100)
plt.colorbar()
plt.xlabel(r'$x_1$')
plt.ylabel(r'$x_2$')
plt.show() | [
11748,
299,
32152,
355,
45941,
201,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
201,
198,
201,
198,
87,
16,
796,
45941,
13,
21602,
10223,
7,
15,
11,
19,
11,
3064,
8,
201,
198,
87,
17,
796,
45941,
13,
21602,
10223... | 1.621495 | 214 |
# https://oj.leetcode.com/problems/sudoku-solver/
# @param board, a 9x9 2D array
# Solve the Sudoku by modifying the input board in-place.
# Do not return any value.
import pprint as pp
# Pass OJ !!
# @param board, a 9x9 2D array
# Solve the Sudoku by modifying the input board in-place.
# Do not return any value.
# Test
s = Solution1()
board = [".87654329","2........","3........","4........","5........","6........","7........","8........","9........"]
s.solveSudoku(board)
print board
| [
2,
3740,
1378,
13210,
13,
293,
316,
8189,
13,
785,
14,
1676,
22143,
14,
82,
463,
11601,
12,
82,
14375,
14,
198,
220,
1303,
2488,
17143,
3096,
11,
257,
860,
87,
24,
362,
35,
7177,
198,
220,
1303,
4294,
303,
262,
14818,
11601,
416,
... | 2.880682 | 176 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import sys
import logging
from flask import Flask, request
import requests
from subsonic_api_proxy import __version__
_logger = logging.getLogger(__name__)
def parse_args(args):
"""Parse command line parameters
Args:
args ([str]): command line parameters as list of strings
Returns:
:obj:`argparse.Namespace`: command line parameters namespace
"""
parser = argparse.ArgumentParser(
description="subsonic-api-proxy")
parser.add_argument(
'--version',
action='version',
version='subsonic-api-proxy {ver}'.format(ver=__version__))
parser.add_argument(
'-v',
'--verbose',
dest="loglevel",
help="set loglevel to INFO")
parser.add_argument(
'-vv',
'--very-verbose',
dest="loglevel",
help="set loglevel to DEBUG",
action='store_const',
const=logging.DEBUG)
parser.add_argument(
'-t',
'--target',
dest='target',
help="target subsonic url to call",
required=True,
)
return parser.parse_args(args)
def setup_logging(loglevel):
"""Setup basic logging
Args:
loglevel (int): minimum loglevel for emitting messages
"""
logformat = "[%(asctime)s] %(levelname)s:%(name)s:%(message)s"
logging.basicConfig(level=loglevel, stream=sys.stdout,
format=logformat, datefmt="%Y-%m-%d %H:%M:%S")
def main(args):
"""Main entry point allowing external calls
Args:
args ([str]): command line parameter list
"""
args = parse_args(args)
setup_logging(args.loglevel)
app.config['target'] = args.target
app.run(host="0.0.0.0")
def run():
"""Entry point for console_scripts
"""
main(sys.argv[1:])
if __name__ == "__main__":
run()
app = Flask(__name__)
@app.route('/rest/<operation>')
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
11748,
1822,
29572,
198,
11748,
25064,
198,
11748,
18931,
198,
6738,
42903,
1330,
46947,
11,
2581,
198,
11748,
7007,
628,... | 2.31466 | 839 |
import numpy as np
from saltproc import Process
class Separator(Process):
"""Class evaluates gas removal efficiency in separator (bubble separator).
Attributes
----------
q_salt : float
volumetric salt flow rate (m^3/s)
Default: 0.1
q_he : float
volumetric helium flow rate (m^3/s)
Default: 0.005
do : float
gas outlet diameter (m)
Ranging from 1~3cm in our simulations
Default: 0.02
dp : float
sparger/contractor (pipe) diameter (m)
Default: 0.1
db : float
bubble diameter (m) for bubble generator/separator
Default: 0.001
deltap : float
Pressure difference between the inlet and the gas outlet (Pa)
(from 2e5 to 5e5 Pa)
Default: 4e5
temp_room: real
room temperature (Kelvin)
Default: 900
k : float
Slope of the initial swirling (use 1 for this).
Methods
-------
eff()
Evaluates gas removal efficiency from Jiaqi's correlation. [1]
description()
Contains a dictionary of plot properties.
calc_rem_efficiency(el_name)
Overrides exiting method in Parent class.
References
----------
[1] Jiaqi Chen and Caleb S. Brooks. Milestone 1.2 Report: CFD
Sensitivity Analysis. In preparation
"""
k = 1.0
def __init__(self, q_salt=0.1, q_he=0.005, do=0.02, dp=0.1, db=0.001,
deltap=4e5, temp_salt=900, *initial_data, **kwargs):
""" Initializes the Separator object.
Parameters
----------
q_salt : float
volumetric salt flow rate (m^3/s)
Default: 0.1
q_he : float
volumetric helium flow rate (m^3/s)
Default: 0.005
do : float
gas outlet diameter (m)
Ranging from 1~3cm in our simulations
Default: 0.02
dp : float
sparger/contractor (pipe) diameter (m)
Default: 0.1
db : float
bubble diameter (m) for bubble generator/separator
Default: 0.001
deltap : float
Pressure difference between the inlet and the gas outlet (Pa)
(from 2e5 to 5e5 Pa)
Default: 4e5
temp_room: real
room temperature (Kelvin)
Default: 900
area : float
contactor cross-section (m^2)
jl : float
liquid superficial velocity (m/s)
alpha : float
void fraction
Notes
-----
Default values from Jiaqi's simulation
"""
super().__init__(*initial_data, **kwargs)
self.q_salt = q_salt
self.q_he = q_he
self.do = do
self.deltap = deltap
self.db = db
self.dp = dp
self.temp_salt = temp_salt
self.area = np.pi * (self.dp / 2) ** 2
self.alpha = self.q_he / (self.q_he + self.q_salt)
self.jl = self.q_salt/self.area
self.efficiency = self.eff()
def calc_rem_efficiency(self, el_name):
"""Overrides the existing method in Process class to provide
efficiency values of target isotopes calculated in eff() function.
Parameters
----------
el_name : str
Name of target element to be removed.
Returns
-------
efficiency : float
Extraction efficiency for el_name element.
"""
efficiency = self.eff()[el_name]
return efficiency
def description(self):
"""Class attributes' descriptions for plotting purpose in
sensitivity analysis
Returns
------
pltdict: dict
contains instances' information
"""
plt_dict = {'q_salt': {'xaxis': 'salt flow rate ${(m^3/s)}$',
'yaxis': 'bubble separation efficiency (%)',
'vs': 'Xe_eff'},
'q_he': {'xaxis': 'helium flow rate ${(m^3/s)}$',
'yaxis': 'bubble separation efficiency (%)',
'vs': 'Xe_eff'},
'do': {'xaxis': 'gas outlet diameter ${(m)}$',
'yaxis': 'bubble separation efficiency (%)',
'vs': 'Xe_eff'},
'dp': {'xaxis': 'pipe diameter ${(m)}$',
'yaxis': 'bubble separation efficiency (%)',
'vs': 'Xe_eff'},
'db': {'xaxis': 'bubble diameter ${(m)}$',
'yaxis': 'bubble separation efficiency (%)',
'vs': 'Xe_eff'},
'deltap': {'xaxis': 'pressure difference ${(Pa)}$',
'yaxis': 'bubble separation efficiency (%)',
'vs': 'Xe_eff'},
'temp_salt': {'xaxis': 'average salt temperature ${(K)}$',
'yaxis': 'bubble separation efficiency (%)',
'vs': 'Xe_eff'}
}
return plt_dict
def eff(self):
""" Evaluates gas/bubble separation efficiency from Jiaqi's correlation.
Returns
-------
rem_eff : dict
Dictionary that contains removal efficiency of each target
element.
``key``
Name of target isotope.
``value``
removal efficiency.
"""
dc = 3.41 * self.do
mu = 1.076111581e-2 * (self.temp_salt / 1000)**(-4.833549134)
rho = (6.105 - 0.001272 * self.temp_salt) * 1000
nu = mu / rho
vl = self.q_salt / self.area
number_re = self.dp * vl / nu
etha = 1 / (3.2 * rho * self.jl**2 * dc**2 /
(self.do**2 * self.deltap) + 1)
dvoid = (4.89 * self.dp * (self.dp / self.db)**1.27) /\
(1 + self.k**4 * number_re)
df = self.do / (self.do + dvoid / (100 * self.alpha)**0.5)
sep_eff = df / (1 + 0.23 * etha) + 3.26 * etha * (1-df) * df
rem_eff = {'Xe': sep_eff, 'Kr': sep_eff, 'H': sep_eff}
return rem_eff
if __name__ == "__main__":
print(Separator().eff())
| [
198,
11748,
299,
32152,
355,
45941,
198,
6738,
8268,
36942,
1330,
10854,
628,
198,
4871,
8621,
283,
1352,
7,
18709,
2599,
198,
220,
220,
220,
37227,
9487,
47850,
3623,
9934,
9332,
287,
2880,
1352,
357,
46176,
903,
2880,
1352,
737,
628,
... | 1.957433 | 3,195 |
import core.utils as utils
import core.plots as plots
import pandas as pd
bwap_dict = {}
source_dir = utils.make_path('result', '0061_all')
bwap_S = pd.read_csv(f"{source_dir}S/twap/0.csv")
bwap_V = pd.read_csv(f"{source_dir}2202101349/twap/0.csv")
bwap_dict['bwap_V'] = bwap_V
bwap_dict['bwap_S'] = bwap_S
results_path = f"{source_dir}bwap_grid_test/"
plots.bwap_grid(bwap_dict, results_path,
ylim=[[3.6, 30],
[3.6, 100]],
name="bwap_grid_test")
| [
11748,
4755,
13,
26791,
355,
3384,
4487,
198,
11748,
4755,
13,
489,
1747,
355,
21528,
198,
11748,
19798,
292,
355,
279,
67,
198,
198,
65,
86,
499,
62,
11600,
796,
23884,
198,
10459,
62,
15908,
796,
3384,
4487,
13,
15883,
62,
6978,
1... | 1.819495 | 277 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:Mod: eml_data_format
:Synopsis:
:Author:
servilla
:Created:
3/29/20
"""
import daiquiri
from rendere.eml.eml_utils import clean
logger = daiquiri.getLogger(__name__)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
37811,
198,
25,
5841,
25,
795,
75,
62,
7890,
62,
18982,
198,
198,
25,
49771,
25,
198,
198,
25,
13838,
25,
198,... | 2.219048 | 105 |
"""
Definition of the :class:`NIfTI` model.
"""
import itertools
import json
import logging
from pathlib import Path
from typing import Iterable, List, Union
import nibabel as nib
import numpy as np
from django.db import IntegrityError, models
from django_analyses.models.input import FileInput, ListInput
from django_extensions.db.models import TimeStampedModel
from django_mri.models.messages import NIFTI_FILE_MISSING
from django_mri.utils.compression import compress, uncompress
class NIfTI(TimeStampedModel):
"""
A model representing a NIfTI_ file in the database.
.. _NIfTI: https://nifti.nimh.nih.gov/nifti-1/
"""
#: Path of the *.nii* file within the application's media directory.
path = models.FilePathField(max_length=1000, unique=True)
parent = models.ForeignKey(
"self",
on_delete=models.CASCADE,
null=True,
related_name="derivative_set",
)
#: Whether the created instance is the product of a direct conversion from
#: some raw format to NIfTI or of a manipulation of the data.
is_raw = models.BooleanField(default=False)
APPENDIX_FILES: Iterable[str] = {".json", ".bval", ".bvec"}
B0_THRESHOLD: int = 10
_instance: nib.nifti1.Nifti1Image = None
# Used to cache JSON data to prevent multiple reads.
_json_data = None
# Logger instance for this model.
_logger = logging.getLogger("data.mri.nifti")
def get_data(self, dtype: np.dtype = np.float64) -> np.ndarray:
"""
Uses NiBabel_ to return the underlying pixel data as a NumPy_ array.
.. _NiBabel: https://nipy.org/nibabel/
.. _NumPy: http://www.numpy.org/
Returns
-------
np.ndarray
Pixel data.
"""
return self.instance.get_fdata(dtype=dtype)
def get_b_value(self) -> List[int]:
"""
Returns the degree of diffusion weighting applied (b-value_) for each
diffusion direction. This method relies on dcm2niix_'s default
configuration in which when diffusion-weighted images (DWI_) are
converted, another file with the same name and a "bval" extension is
created alongside.
.. _b-value: https://radiopaedia.org/articles/b-values-1
.. _dcm2niix: https://github.com/rordenlab/dcm2niix
.. _DWI: https://en.wikipedia.org/wiki/Diffusion_MRI
Hint
----
For more information, see dcm2niix's `Diffusion Tensor Imaging`_
section of the user guide.
.. _Diffusion Tensor Imaging:
https://www.nitrc.org/plugins/mwiki/index.php/dcm2nii:MainPage#Diffusion_Tensor_Imaging
See Also
--------
* :attr:`b_value`
Returns
-------
List[int]
b-value for each diffusion direction.
"""
file_name = self.b_value_file
if file_name:
with open(file_name, "r") as file_object:
content = file_object.read()
content = content.splitlines()[0].split(" ")
return [int(value) for value in content]
def get_b_vector(self) -> List[List[float]]:
"""
Returns the b-vectors_ representing the diffusion weighting gradient
scheme. This method relies on dcm2niix_'s default configuration in
which when diffusion-weighted images (DWI_) are converted, another file
with the same name and a "bvec" extension is created alongside.
.. _b-vectors:
https://mrtrix.readthedocs.io/en/latest/concepts/dw_scheme.html
.. _dcm2niix: https://github.com/rordenlab/dcm2niix
.. _DWI: https://en.wikipedia.org/wiki/Diffusion_MRI
Hint
----
For more information, see dcm2niix's `Diffusion Tensor Imaging`_
section of the user guide.
.. _Diffusion Tensor Imaging:
https://www.nitrc.org/plugins/mwiki/index.php/dcm2nii:MainPage#Diffusion_Tensor_Imaging
See Also
--------
* :attr:`b_vector`
Returns
-------
List[List[float]]
b-value for each diffusion direction
"""
file_name = self.b_vector_file
if file_name:
with open(file_name, "r") as file_object:
content = file_object.read()
return [
[float(value) for value in vector.rstrip().split(" ")]
for vector in content.rstrip().split("\n")
]
def read_json(self) -> dict:
"""
Returns the JSON data generated alognside *.nii* files generated
using dcm2niix_\'s *"BIDS sidecar"* option.
.. _dcm2niix: https://github.com/rordenlab/dcm2niix
Notes
-----
* For more information about dcm2niix and the BIDS sidecar, see
dcm2niix's `general usage manual`_.
* For more information about the extracted properties and their usage
see `Acquiring and Using Field-maps`_
.. _Acquiring and Using Field-maps:
https://lcni.uoregon.edu/kb-articles/kb-0003
.. _general usage manual:
https://www.nitrc.org/plugins/mwiki/index.php/dcm2nii:MainPage#General_Usage
Returns
-------
dict
BIDS sidecar information stored in a JSON file, or *{}* if the file
doesn't exist
"""
if self.json_file.is_file():
with open(self.json_file, "r") as f:
return json.load(f)
return {}
def get_total_readout_time(self) -> float:
"""
Reads the total readout time extracted by dcm2niix_ upon conversion.
.. _dcm2niix: https://github.com/rordenlab/dcm2niix
Hint
----
Total readout time is defined as the time from the center of the first
echo to the center of the last (in seconds).
Returns
-------
float
Total readout time
"""
return self.json_data.get("TotalReadoutTime")
def get_effective_spacing(self) -> float:
"""
Reads the effective echo spacing value extracted by dcm2niix_ upon
conversion.
.. _dcm2niix: https://github.com/rordenlab/dcm2niix
Returns
-------
float
Effective echo spacing
"""
return self.json_data.get("EffectiveEchoSpacing")
def get_phase_encoding_direction(self) -> float:
"""
Reads the phase encoding direction value extracted by dcm2niix_ upon
conversion.
.. _dcm2niix: https://github.com/rordenlab/dcm2niix
Returns
-------
float
Phase encoding direction
"""
return self.json_data.get("PhaseEncodingDirection")
def compress(self, keep_source: bool = False) -> Path:
"""
Compress the associated *.nii* using gzip, if it isn't already
compressed.
Parameters
----------
keep_source : bool, optional
Whether to keep a copy of the uncompressed file, by default False
Returns
-------
Path
Path of the compressed (*.nii.gz*) file
"""
if not self.is_compressed:
uncompressed_path = Path(self.path)
compressed_path = compress(
uncompressed_path, keep_source=keep_source
)
self.path = str(compressed_path)
self.save()
return Path(self.path)
def uncompress(self, keep_source: bool = False) -> Path:
"""
Uncompress the associated *.nii* using gzip, if it isn't already
uncompressed.
Parameters
----------
keep_source : bool, optional
Whether to keep a copy of the compressed file, by default False
Returns
-------
Path
Path of the uncompressed (*.nii*) file
"""
if self.is_compressed:
compressed_path = Path(self.path)
uncompressed_path = uncompress(
compressed_path, keep_source=keep_source
)
self.path = str(uncompressed_path)
self.save()
return Path(self.path)
def _resolve_compression_state(self) -> None:
"""
Fixed the instance's path in case it's out of sync with compression
state. This method is used for testing and Should not be required
under normal circumstances.
Raises
------
FileNotFoundError
No associated file found in the file system
"""
path = Path(self.path)
is_compressed = path.suffix == ".gz"
compressed_path = path if is_compressed else path.with_suffix(".gz")
uncompressed_path = path if not is_compressed else path.with_suffix("")
valid_compressed = is_compressed and compressed_path.exists()
valid_uncompressed = uncompressed_path.exists() and not is_compressed
if not valid_compressed and uncompressed_path.exists():
self.path = str(path.with_suffix(""))
self.save()
elif not valid_uncompressed and compressed_path.exists():
self.path = str(path.with_suffix(".gz"))
self.save()
elif valid_compressed or valid_uncompressed:
return
else:
message = NIFTI_FILE_MISSING.format(pk=self.id, path=self.path)
raise FileNotFoundError(message)
@property
def json_file(self) -> Path:
"""
Return path to the corresponding json file.
Returns
-------
Path
Corresponding json file
"""
base_name = Path(self.path).name.split(".")[0]
return (Path(self.path).parent / base_name).with_suffix(".json")
@property
def json_data(self) -> dict:
"""
Reads BIDS sidecar information and caches within a local variable to
prevent multiple reads.
See Also
--------
* :meth:`read_json`
Returns
-------
dict
"BIDS sidecar" JSON data
"""
if self._json_data is None:
self._json_data = self.read_json()
return self._json_data
@property
def b_value_file(self) -> Path:
"""
Return FSL format b-value file path
Returns
-------
Path
FSL format b-value file path
"""
p = Path(self.path)
bval_file = p.parent / Path(p.stem).with_suffix(".bval")
if bval_file.is_file():
return bval_file
@property
def b_vector_file(self) -> Path:
"""
Return FSL format b-vector file path.
Returns
-------
Path
FSL format b-vector file path
"""
p = Path(self.path)
bvec_file = p.parent / Path(p.stem).with_suffix(".bvec")
if bvec_file.is_file():
return bvec_file
@property
def b_value(self) -> List[int]:
"""
Returns the B-value of DWI scans as calculated by dcm2niix_.
.. _dcm2niix: https://github.com/rordenlab/dcm2niix
See Also
--------
* :meth:`get_b_value`
Returns
-------
List[int]
B-value
"""
return self.get_b_value()
@property
def b_vector(self) -> List[List[float]]:
"""
Returns the B-vector of DWI scans as calculated by dcm2niix_.
.. _dcm2niix: https://github.com/rordenlab/dcm2niix
See Also
--------
* :meth:`get_b_vector`
Returns
-------
List[List[float]]
B-vector
"""
return self.get_b_vector()
@property
def is_compressed(self) -> bool:
"""
Whether the associated *.nii* file is compressed with gzip or not.
Returns
-------
bool
Associated *.nii* file gzip compression state
"""
return Path(self.path).suffix == ".gz"
@property
def compressed(self) -> Path:
"""
Compresses the associated *.nii* file using gzip if it isn't and
returns its path.
Returns
-------
Path
Compressed *.nii.gz* file associated with this instance
"""
return self.compress()
@property
def uncompressed(self) -> Path:
"""
Uncompresses the associated *.nii* file using gzip if it isn't and
returns its path.
Returns
-------
Path
Uncompressed *.nii* file associated with this instance
"""
return self.uncompress()
@property
| [
37811,
198,
36621,
286,
262,
1058,
4871,
25,
63,
45,
1532,
25621,
63,
2746,
13,
198,
37811,
198,
11748,
340,
861,
10141,
198,
11748,
33918,
198,
11748,
18931,
198,
6738,
3108,
8019,
1330,
10644,
198,
6738,
19720,
1330,
40806,
540,
11,
... | 2.222866 | 5,694 |
import datetime as dt
from fastapi.responses import JSONResponse as Response
from fastapi import Request
from fast_agave.filters import generic_query
from ..models import Account as AccountModel
from ..validators import AccountQuery, AccountRequest, AccountUpdateRequest
from .base import app
@app.resource('/accounts')
| [
11748,
4818,
8079,
355,
288,
83,
198,
198,
6738,
3049,
15042,
13,
16733,
274,
1330,
19449,
31077,
355,
18261,
198,
6738,
3049,
15042,
1330,
19390,
198,
6738,
3049,
62,
363,
1015,
13,
10379,
1010,
1330,
14276,
62,
22766,
198,
6738,
11485... | 4.0375 | 80 |
## This file is part of Scapy
## Copyright (C) 2008 Arnaud Ebalard <arno@natisbad.org>
## 2015, 2016 Maxence Tury <maxence.tury@ssi.gouv.fr>
## This program is published under a GPLv2 license
"""
PKCS #1 methods as defined in RFC 3447.
"""
import os, popen2, tempfile
import math, random, struct
from hashlib import md5, sha1, sha224, sha256, sha384, sha512
from Crypto.Hash import MD2, MD4
#####################################################################
# Some helpers
#####################################################################
def randstring(l):
"""
Returns a random string of length l (l >= 0)
"""
tmp = map(lambda x: struct.pack("B", random.randrange(0, 256, 1)), [""]*l)
return "".join(tmp)
def zerofree_randstring(l):
"""
Returns a random string of length l (l >= 0) without zero in it.
"""
tmp = map(lambda x: struct.pack("B", random.randrange(1, 256, 1)), [""]*l)
return "".join(tmp)
def strxor(s1, s2):
"""
Returns the binary XOR of the 2 provided strings s1 and s2. s1 and s2
must be of same length.
"""
return "".join(map(lambda x,y:chr(ord(x)^ord(y)), s1, s2))
def strand(s1, s2):
"""
Returns the binary AND of the 2 provided strings s1 and s2. s1 and s2
must be of same length.
"""
return "".join(map(lambda x,y:chr(ord(x)&ord(y)), s1, s2))
# OS2IP function defined in RFC 3447 for octet string to integer conversion
def pkcs_os2ip(x):
"""
Accepts a byte string as input parameter and return the associated long
value:
Input : x octet string to be converted
Output: x corresponding nonnegative integer
Reverse function is pkcs_i2osp()
"""
return int(x.encode("hex"), 16)
# I2OSP function defined in RFC 3447 for integer to octet string conversion
def pkcs_i2osp(x, xLen):
"""
Converts a long (the first parameter) to the associated byte string
representation of length l (second parameter). Basically, the length
parameters allow the function to perform the associated padding.
Input : x nonnegative integer to be converted
xLen intended length of the resulting octet string
Output: x corresponding octet string
Reverse function is pkcs_os2ip().
"""
# The user is responsible for providing an appropriate xLen.
#if x >= 256**xLen:
# raise Exception("Integer too large for provided xLen %d" % xLen)
fmt = "%%0%dx" % (2*xLen)
return (fmt % x).decode("hex")
def pkcs_ilen(n):
"""
This is a log base 256 which determines the minimum octet string
length for unequivocal representation of integer n by pkcs_i2osp.
"""
i = 0
while n > 0:
n >>= 8
i += 1
return i
# for every hash function a tuple is provided, giving access to
# - hash output length in byte
# - associated hash function that take data to be hashed as parameter
# XXX I do not provide update() at the moment.
# - DER encoding of the leading bits of digestInfo (the hash value
# will be concatenated to create the complete digestInfo).
#
# Notes:
# - MD4 asn.1 value should be verified. Also, as stated in
# PKCS#1 v2.1, MD4 should not be used.
# - 'tls' one is the concatenation of both md5 and sha1 hashes used
# by SSL/TLS when signing/verifying things
_hashFuncParams = {
"md2" : (16,
MD2.new,
lambda x: MD2.new(x).digest(),
'\x30\x20\x30\x0c\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x02\x05\x00\x04\x10'),
"md4" : (16,
MD4.new,
lambda x: MD4.new(x).digest(),
'\x30\x20\x30\x0c\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x04\x05\x00\x04\x10'),
"md5" : (16,
md5,
lambda x: md5(x).digest(),
'\x30\x20\x30\x0c\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05\x05\x00\x04\x10'),
"sha1" : (20,
sha1,
lambda x: sha1(x).digest(),
'\x30\x21\x30\x09\x06\x05\x2b\x0e\x03\x02\x1a\x05\x00\x04\x14'),
"sha224" : (28,
sha224,
lambda x: sha224(x).digest(),
'\x30\x2d\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x04\x05\x00\x04\x1c'),
"sha256" : (32,
sha256,
lambda x: sha256(x).digest(),
'\x30\x31\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01\x05\x00\x04\x20'),
"sha384" : (48,
sha384,
lambda x: sha384(x).digest(),
'\x30\x41\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02\x05\x00\x04\x30'),
"sha512" : (64,
sha512,
lambda x: sha512(x).digest(),
'\x30\x51\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03\x05\x00\x04\x40'),
"tls" : (36,
None,
lambda x: md5(x).digest() + sha1(x).digest(),
'')
}
def pkcs_mgf1(mgfSeed, maskLen, h):
"""
Implements generic MGF1 Mask Generation function as described in
Appendix B.2.1 of RFC 3447. The hash function is passed by name.
valid values are 'md2', 'md4', 'md5', 'sha1', 'tls, 'sha256',
'sha384' and 'sha512'. Returns None on error.
Input:
mgfSeed: seed from which mask is generated, an octet string
maskLen: intended length in octets of the mask, at most 2^32 * hLen
hLen (see below)
h : hash function name (in 'md2', 'md4', 'md5', 'sha1', 'tls',
'sha256', 'sha384'). hLen denotes the length in octets of
the hash function output.
Output:
an octet string of length maskLen
"""
# steps are those of Appendix B.2.1
if not _hashFuncParams.has_key(h):
_warning("pkcs_mgf1: invalid hash (%s) provided")
return None
hLen = _hashFuncParams[h][0]
hFunc = _hashFuncParams[h][2]
if maskLen > 2**32 * hLen: # 1)
_warning("pkcs_mgf1: maskLen > 2**32 * hLen")
return None
T = "" # 2)
maxCounter = math.ceil(float(maskLen) / float(hLen)) # 3)
counter = 0
while counter < maxCounter:
C = pkcs_i2osp(counter, 4)
T += hFunc(mgfSeed + C)
counter += 1
return T[:maskLen]
def pkcs_emsa_pss_encode(M, emBits, h, mgf, sLen):
"""
Implements EMSA-PSS-ENCODE() function described in Sect. 9.1.1 of RFC 3447
Input:
M : message to be encoded, an octet string
emBits: maximal bit length of the integer resulting of pkcs_os2ip(EM),
where EM is the encoded message, output of the function.
h : hash function name (in 'md2', 'md4', 'md5', 'sha1', 'tls',
'sha256', 'sha384'). hLen denotes the length in octets of
the hash function output.
mgf : the mask generation function f : seed, maskLen -> mask
sLen : intended length in octets of the salt
Output:
encoded message, an octet string of length emLen = ceil(emBits/8)
On error, None is returned.
"""
# 1) is not done
hLen = _hashFuncParams[h][0] # 2)
hFunc = _hashFuncParams[h][2]
mHash = hFunc(M)
emLen = int(math.ceil(emBits/8.))
if emLen < hLen + sLen + 2: # 3)
_warning("encoding error (emLen < hLen + sLen + 2)")
return None
salt = randstring(sLen) # 4)
MPrime = '\x00'*8 + mHash + salt # 5)
H = hFunc(MPrime) # 6)
PS = '\x00'*(emLen - sLen - hLen - 2) # 7)
DB = PS + '\x01' + salt # 8)
dbMask = mgf(H, emLen - hLen - 1) # 9)
maskedDB = strxor(DB, dbMask) # 10)
l = (8*emLen - emBits)/8 # 11)
rem = 8*emLen - emBits - 8*l # additionnal bits
andMask = l*'\x00'
if rem:
j = chr(reduce(lambda x,y: x+y, map(lambda x: 1<<x, range(8-rem))))
andMask += j
l += 1
maskedDB = strand(maskedDB[:l], andMask) + maskedDB[l:]
EM = maskedDB + H + '\xbc' # 12)
return EM # 13)
def pkcs_emsa_pss_verify(M, EM, emBits, h, mgf, sLen):
"""
Implements EMSA-PSS-VERIFY() function described in Sect. 9.1.2 of RFC 3447
Input:
M : message to be encoded, an octet string
EM : encoded message, an octet string of length emLen=ceil(emBits/8)
emBits: maximal bit length of the integer resulting of pkcs_os2ip(EM)
h : hash function name (in 'md2', 'md4', 'md5', 'sha1', 'tls',
'sha256', 'sha384'). hLen denotes the length in octets of
the hash function output.
mgf : the mask generation function f : seed, maskLen -> mask
sLen : intended length in octets of the salt
Output:
True if the verification is ok, False otherwise.
"""
# 1) is not done
hLen = _hashFuncParams[h][0] # 2)
hFunc = _hashFuncParams[h][2]
mHash = hFunc(M)
emLen = int(math.ceil(emBits/8.)) # 3)
if emLen < hLen + sLen + 2:
return False
if EM[-1] != '\xbc': # 4)
return False
l = emLen - hLen - 1 # 5)
maskedDB = EM[:l]
H = EM[l:l+hLen]
l = (8*emLen - emBits)/8 # 6)
rem = 8*emLen - emBits - 8*l # additionnal bits
andMask = l*'\xff'
if rem:
val = reduce(lambda x,y: x+y, map(lambda x: 1<<x, range(8-rem)))
j = chr(~val & 0xff)
andMask += j
l += 1
if strand(maskedDB[:l], andMask) != '\x00'*l:
return False
dbMask = mgf(H, emLen - hLen - 1) # 7)
DB = strxor(maskedDB, dbMask) # 8)
l = (8*emLen - emBits)/8 # 9)
rem = 8*emLen - emBits - 8*l # additionnal bits
andMask = l*'\x00'
if rem:
j = chr(reduce(lambda x,y: x+y, map(lambda x: 1<<x, range(8-rem))))
andMask += j
l += 1
DB = strand(DB[:l], andMask) + DB[l:]
l = emLen - hLen - sLen - 1 # 10)
if DB[:l] != '\x00'*(l-1) + '\x01':
return False
salt = DB[-sLen:] # 11)
MPrime = '\x00'*8 + mHash + salt # 12)
HPrime = hFunc(MPrime) # 13)
return H == HPrime # 14)
def pkcs_emsa_pkcs1_v1_5_encode(M, emLen, h): # section 9.2 of RFC 3447
"""
Implements EMSA-PKCS1-V1_5-ENCODE() function described in Sect.
9.2 of RFC 3447.
Input:
M : message to be encode, an octet string
emLen: intended length in octets of the encoded message, at least
tLen + 11, where tLen is the octet length of the DER encoding
T of a certain value computed during the encoding operation.
h : hash function name (in 'md2', 'md4', 'md5', 'sha1', 'tls',
'sha256', 'sha384'). hLen denotes the length in octets of
the hash function output.
Output:
encoded message, an octet string of length emLen
On error, None is returned.
"""
hLen = _hashFuncParams[h][0] # 1)
hFunc = _hashFuncParams[h][2]
H = hFunc(M)
hLeadingDigestInfo = _hashFuncParams[h][3] # 2)
T = hLeadingDigestInfo + H
tLen = len(T)
if emLen < tLen + 11: # 3)
_warning("pkcs_emsa_pkcs1_v1_5_encode:"
"intended encoded message length too short")
return None
PS = '\xff'*(emLen - tLen - 3) # 4)
EM = '\x00' + '\x01' + PS + '\x00' + T # 5)
return EM # 6)
# XXX should add other pgf1 instance in a better fashion.
def create_ca_file(anchor_list, filename):
"""
Concatenate all the certificates (PEM format for the export) in
'anchor_list' and write the result to file 'filename'. On success
'filename' is returned, None otherwise.
If you are used to OpenSSL tools, this function builds a CAfile
that can be used for certificate and CRL check.
Also see create_temporary_ca_file().
"""
try:
f = open(filename, "w")
for a in anchor_list:
s = a.output(fmt="PEM")
f.write(s)
f.close()
except:
return None
return filename
def create_temporary_ca_file(anchor_list):
"""
Concatenate all the certificates (PEM format for the export) in
'anchor_list' and write the result to file to a temporary file
using mkstemp() from tempfile module. On success 'filename' is
returned, None otherwise.
If you are used to OpenSSL tools, this function builds a CAfile
that can be used for certificate and CRL check.
"""
try:
f, fname = tempfile.mkstemp()
for a in anchor_list:
s = a.output(fmt="PEM")
l = os.write(f, s)
os.close(f)
except:
return None
return fname
def create_temporary_ca_path(anchor_list, folder):
"""
Create a CA path folder as defined in OpenSSL terminology, by
storing all certificates in 'anchor_list' list in PEM format
under provided 'folder' and then creating the associated links
using the hash as usually done by c_rehash.
Note that you can also include CRL in 'anchor_list'. In that
case, they will also be stored under 'folder' and associated
links will be created.
In folder, the files are created with names of the form
0...ZZ.pem. If you provide an empty list, folder will be created
if it does not already exist, but that's all.
The number of certificates written to folder is returned on
success, None on error.
"""
# We should probably avoid writing duplicate anchors and also
# check if they are all certs.
try:
if not os.path.isdir(folder):
os.makedirs(folder)
except:
return None
l = len(anchor_list)
if l == 0:
return None
fmtstr = "%%0%sd.pem" % math.ceil(math.log(l, 10))
i = 0
try:
for a in anchor_list:
fname = os.path.join(folder, fmtstr % i)
f = open(fname, "w")
s = a.output(fmt="PEM")
f.write(s)
f.close()
i += 1
except:
return None
r,w=popen2.popen2("c_rehash %s" % folder)
r.close(); w.close()
return l
#####################################################################
# Public Key Cryptography related stuff
#####################################################################
| [
2235,
770,
2393,
318,
636,
286,
1446,
12826,
198,
2235,
15069,
357,
34,
8,
3648,
943,
2616,
463,
412,
6893,
446,
1279,
1501,
78,
31,
32353,
271,
14774,
13,
2398,
29,
198,
2235,
220,
220,
220,
220,
220,
220,
220,
220,
1853,
11,
158... | 2.023356 | 7,450 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
from datetime import datetime
import util
from absl import logging
from google.cloud import error_reporting
from google.cloud import secretmanager
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail, From, To, Subject, PlainTextContent, HtmlContent
# Make sure these match the names of your Cloud Secrets. Refer to the README
# in this directory for email alert setup steps.
_SENDGRID_API_SECRET_NAME = 'sendgrid-api-key'
_RECIPIENT_EMAIL_SECRET_NAME = 'alert-destination-email-address'
_SENDER_EMAIL_SECRET_NAME = 'alert-sender-email-address'
# An error not specific to any test, e.g. a failure to read from Pubsub,
# will not have any corresponding training logs.
_NO_LOGS = 'no logs'
| [
2,
15069,
12131,
3012,
11419,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
198,
2,
921,
743,
733... | 3.577657 | 367 |
"""Tests for distos.error.stencil
"""
import sys
sys.path.insert(1, "..")
import unittest
from dictos.spec import has_zero, has_duplicated_points
from dictos.error.stencil import ContainsZeroError, DuplicatedPointError, TooNarrowError
if __name__ == "__main__":
unittest.main()
| [
37811,
51,
3558,
329,
1233,
418,
13,
18224,
13,
26400,
2856,
198,
37811,
198,
11748,
25064,
198,
198,
17597,
13,
6978,
13,
28463,
7,
16,
11,
366,
492,
4943,
198,
198,
11748,
555,
715,
395,
198,
198,
6738,
8633,
418,
13,
16684,
1330,... | 2.851485 | 101 |
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
from .iq_groups import GroupsIqProtocolEntity
class SubjectGroupsIqProtocolEntity(GroupsIqProtocolEntity):
'''
<iq type="set" id="{{id}}" xmlns="w:g2", to={{group_jid}}">
<subject>
{{NEW_VAL}}
</subject>
</iq>
'''
@staticmethod
| [
6738,
331,
1666,
929,
13,
7249,
82,
1330,
20497,
32398,
11,
20497,
27660,
19667,
198,
6738,
764,
25011,
62,
24432,
1330,
27441,
40,
80,
19703,
4668,
32398,
198,
4871,
15540,
38,
14459,
40,
80,
19703,
4668,
32398,
7,
38,
14459,
40,
80,... | 2.306122 | 147 |
"""test_pyutils.py: Tests for pyutils.py."""
| [
37811,
9288,
62,
9078,
26791,
13,
9078,
25,
30307,
329,
12972,
26791,
13,
9078,
526,
15931,
198
] | 2.647059 | 17 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import time
from multiprocessing import cpu_count
import pandas as pd
from joblib import Parallel, delayed
from meteostat import Daily, Hourly, Point
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
628,
198,
11748,
28686,
198,
11748,
640,
198,
6738,
18540,
305,
919,
278,
1330,
42804,
62,
9127,
198,
198,
11748,
19798,... | 3.028571 | 70 |
import tycho
data = bytes.fromhex("40 03 03 66 6f 6f 1e 0b 48 65 6c 6c 6f 20 57 6f 72 6c 64 03 62 61 72 11 0a 03 62 61 7a 10 01")
print(tycho.decode(data))
# Structure({'foo': String('Hello World'), 'bar': Unsigned8(10), 'baz': Boolean(True)}) | [
11748,
1259,
6679,
198,
198,
7890,
796,
9881,
13,
6738,
33095,
7203,
1821,
7643,
7643,
7930,
718,
69,
718,
69,
352,
68,
657,
65,
4764,
6135,
718,
66,
718,
66,
718,
69,
1160,
7632,
718,
69,
7724,
718,
66,
5598,
7643,
8190,
8454,
77... | 2.541667 | 96 |
# type: ignore
import email
import os.path
import sifter.parser
| [
2,
2099,
25,
8856,
198,
198,
11748,
3053,
198,
11748,
28686,
13,
6978,
198,
198,
11748,
264,
18171,
13,
48610,
628,
198
] | 3.090909 | 22 |
import unittest
import tests.download
| [
11748,
555,
715,
395,
198,
11748,
5254,
13,
15002,
198
] | 3.8 | 10 |
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.setup(12, GPIO.OUT)#piros
GPIO.setup(16, GPIO.OUT)#sárga
GPIO.setup(18, GPIO.OUT)#zöld
for x in range(10):
GPIO.output(12, GPIO.HIGH)
print("Felkészülni!")
time.sleep(0.15)
GPIO.output(12, GPIO.LOW)
time.sleep(0.15)
GPIO.output(16, GPIO.HIGH)
print("Vigyázz!")
time.sleep(0.15)
GPIO.output(16, GPIO.LOW)
time.sleep(0.15)
GPIO.output(18, GPIO.HIGH)
print("Rajt!!")
time.sleep(0.15)
GPIO.output(18, GPIO.LOW)
time.sleep(0.15)
| [
11748,
25812,
72,
13,
16960,
9399,
355,
50143,
198,
11748,
640,
198,
198,
16960,
9399,
13,
2617,
14171,
7,
16960,
9399,
13,
8202,
9795,
8,
198,
16960,
9399,
13,
2617,
40539,
654,
7,
25101,
8,
198,
16960,
9399,
13,
40406,
7,
1065,
11... | 1.949091 | 275 |
# This sample tests incompatible method overrides for multiple inheritance.
# This functionality is controlled by the reportIncompatibleMethodOverride
# diagnostic rule.
from typing import Generic, TypeVar
# This should generate an error because func1 is incompatible.
# This should generate an error because func1 is incompatible.
# This should generate an error because func1 is incompatible.
_T_E = TypeVar("_T_E")
| [
2,
770,
6291,
5254,
27294,
2446,
23170,
1460,
329,
3294,
24155,
13,
198,
2,
770,
11244,
318,
6856,
416,
262,
989,
818,
38532,
17410,
37961,
198,
2,
23584,
3896,
13,
628,
198,
6738,
19720,
1330,
42044,
11,
5994,
19852,
628,
628,
198,
... | 4.271845 | 103 |
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Handle loading cassowary package from system or from the bundled copy
"""
try:
from ._bundled.cassowary import * # noqa
except ImportError:
from cassowary import * # noqa
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
15069,
357,
66,
8,
1853,
11,
6911,
9078,
7712,
4816,
13,
198,
2,
4307,
6169,
739,
262,
357,
3605,
8,
347,
10305,
13789,
13,
4091,
38559,
24290,
13,
14116,
329,
51... | 2.981982 | 111 |
import json
import boto3
import pytest
from moto import mock_ecs
from moto import mock_ec2
from moto.ec2 import utils as ec2_utils
# from otter.router.src.shared.client import ECSClient
from otter.router.src.shared.device import Device
from tests.unit import EXAMPLE_AMI_ID
@pytest.fixture
| [
11748,
33918,
198,
198,
11748,
275,
2069,
18,
198,
11748,
12972,
9288,
198,
6738,
285,
2069,
1330,
15290,
62,
721,
82,
198,
6738,
285,
2069,
1330,
15290,
62,
721,
17,
198,
6738,
285,
2069,
13,
721,
17,
1330,
3384,
4487,
355,
9940,
1... | 2.910891 | 101 |
"""Helper functions for mentorbot events and commands."""
from datetime import datetime
import discord
from discord.ext import commands
# Helper Functions
def character_info(cursor, character=None, region=None):
"""Return id, name, color, and icon url of given character/region."""
if character: # If character was given
return cursor.execute(
'''SELECT * FROM characters WHERE name = :character''',
{'character': character.title()}).fetchone()
elif region: # If region was given
return cursor.execute(
'''SELECT * FROM characters WHERE name = :region''',
{'region': region.upper()}).fetchone()
def character_role(guild, cursor, character, main=False):
"""Return role of character with name given."""
character = cursor.execute(
'''SELECT name FROM characters WHERE name = :character''',
{'character': character.title()}).fetchone()
if main:
return discord.utils.get(guild.roles, f"{character['name']} (Main)")
else:
return discord.utils.get(guild.roles, character['name'])
def sidebar_color(color):
"""Return default sidebar color for default role and member colors."""
if color == discord.Color.default():
return 0x202225
else:
return color
def get_nickname(member):
"""Return member's nickname, and if it is their default name."""
if member.nick:
return member.nick
else:
return f'{member.display_name} (No nickname)'
async def update_roles(member, remove, add):
"""Remove/add given roles, and return embed of info."""
await member.remove_roles(remove)
await member.add_roles(add)
embed = discord.Embed(
color=sidebar_color(add.color),
description=f'**Updated roles for {member.mention}:**\n'
'```diff\n'
f'- {remove.name}\n'
f'+ {add.name}```',
timestamp=datetime.utcnow())
embed.set_author(name='Roles updated', icon_url=member.avatar_url)
embed.set_footer(text=f'ID: {member.id}')
return embed
# Helper Command Checks
def in_academy():
"""Check that command is in Adademy or Mentorbot Test Server."""
return commands.check(predicate)
def in_channel(name):
"""Check that command is in channel of given name."""
return commands.check(predicate)
| [
37811,
47429,
5499,
329,
22387,
13645,
2995,
290,
9729,
526,
15931,
198,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
198,
11748,
36446,
198,
6738,
36446,
13,
2302,
1330,
9729,
628,
198,
2,
5053,
525,
40480,
198,
4299,
2095,
62,
10951,... | 2.644766 | 898 |
#!/usr/bin/python
print("Loading libs..."), #Importing all libs
import os
import sys
import time
import simplejson
from threading import Thread
import ConfigParser
import cherrypy
import serial
print("Ok")
if not os.geteuid() == 0: #Check if is started as root
sys.exit('Must be run as root')
#Define Global vars#
Log = Log()
Printer = Printer()
SerialArray = None
PausedPrint = False
GCodeQueue = ['']
NewConsoleLines = ""
InstructionNumber = 0
############################
#----------Config----------#
############################
Config = ConfigParser.ConfigParser() #Loading config file
Config.read('config.conf')
#---Camera Config---#
CamURL = Config.get("LiveCamera", "Url")
EnabledCam = Config.getboolean("LiveCamera", "Enabled")
if not EnabledCam:
CamURL = ""
#---End Camera Config---#
#---Printer Config---#
EmergencyMode = int(Config.get("Other", "EmergencyMode"))
#---End of Printer Config---#
#---Server Config---#
ServerPort = int(Config.get("Server", "Port"))
cherrypy.config.update({'server.socket_port': ServerPort})
ServerHost = Config.get("Server", "Host")
cherrypy.config.update({'server.socket_host': ServerHost})
#---End of Server Config---#
###################################
#----------End of Config----------#
###################################
#Final Launch
if __name__ == '__main__':
SerialRefresh()
Log.Info("Starting server at : " + str(ServerHost) + ":" + str(ServerPort))
cherrypy.quickstart(AxisPrint(), '/', "server.conf") #Launching server !
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
198,
4798,
7203,
19031,
9195,
82,
9313,
828,
1303,
20939,
278,
477,
9195,
82,
198,
11748,
28686,
198,
11748,
25064,
198,
11748,
640,
198,
11748,
2829,
17752,
198,
6738,
4704,
278,
1330,
14122,
... | 3.321586 | 454 |
class Parser:
"""
Parser for coq sentences.
WS: (" " | "\\n" | "\\t")*
unit: WS (comment | bullet | command)
comment: "(*" (comment | [^*] | "*" not followed by ")")* "*)"
bullet: (bullet-selector? WS "{") | "-"+ | "+"+ | "*"+ | "}"
bullet-selector: ([0-9]+ | "[" WS ident WS "]") WS ":"
command: (comment | string | [^\\.] | "." not followed by WS)* "."
string: "\\"" [^\\"]* "\\""
"""
def getUnit(self, line, col, encoding='utf-8'):
""" Return a list of the line, column, content and type of unit that was
matched, starting a line and col. """
self.line = line
self.col = col
self.content = ""
self.parseUnit()
return [self.line, self.col, self.content, self.type]
def peekNext(self):
""" Return the next character, but do not advance """
line = self.buf[self.line]
if len(line) > self.col:
return line[self.col]
elif len(self.buf) > self.line:
return '\n'
else:
raise Exception('No more data')
def getNext(self):
""" Return the next character, and advance """
line = self.buf[self.line]
if len(line) > self.col:
char = line[self.col]
self.col += 1
elif len(self.buf) > self.line:
self.col = 0
self.line += 1
char = '\n'
else:
raise Exception('No more data')
self.content += char
return char
def withBacktrack(self, fun, args=None):
""" Run fun, a function with args (or no argument by default). Save the
current state (line, col and content) and restore it if the function
fails with an exception. """
state = [self.line, self.col, self.content]
try:
if args is None:
fun()
else:
fun(*args)
return True
except:
self.line = state[0]
self.col = state[1]
self.content = state[2]
return False
def parseUnit(self):
""" unit: WS (comment | bullet | command) """
self.parseWS()
if self.peekNext() == '(' and self.withBacktrack(self.parseComment):
return
if self.withBacktrack(self.parseBullet):
return
return self.parseCommand()
def parseWS(self):
""" WS: (" " | "\\n" | "\\t")* """
char = self.peekNext()
if char in [' ', '\n', '\t']:
self.getNext()
self.parseWS()
def parseComment(self, gotParenthesis = 0):
""" comment: "(*" (comment | [^*] | "*" not followed by ")")* "*)" """
if ((gotParenthesis > 0) or self.getNext() == '(') and \
((gotParenthesis > 1) or self.getNext() == '*'):
while self.parseCommentInside():
continue
else:
raise Exception('Not a comment')
self.type = 'comment'
def parseBullet(self):
""" bullet: (bullet-selector? WS "{") | "-"+ | "+"+ | "*"+ | "}" """
if self.withBacktrack(self.parseBrace):
self.type = 'bullet'
return
num = 0
while self.peekNext() == '-':
num += 1
self.getNext()
if num > 0:
self.type = 'bullet'
return
while self.peekNext() == '+':
num += 1
self.getNext()
if num > 0:
self.type = 'bullet'
return
while self.peekNext() == '*':
num += 1
self.getNext()
if num > 0:
self.type = 'bullet'
return
if self.getNext() == '}':
self.type = 'bullet'
return
raise Exception('Not a bullet')
def parseBulletSelector(self):
""" bullet-selector: ([0-9]+ | "[" WS ident WS "]") WS ":" """
if self.withBacktrack(self.parseBulletSelectorNum):
self.parseWS()
if self.getNext() == ':':
return
else:
raise Exception('not a numeric goal selector')
if self.getNext() == '[':
self.parseWS()
self.parseIdent()
self.parseWS()
if self.getNext() == ']':
self.parseWS()
if self.getNext() == ':':
return
raise Exception('not a named goal selector')
raise Exception('not a goal selector at all')
def parseIdent(self):
"""
first_letter ::= a..z ∣ A..Z ∣ _ ∣ unicode-letter
subsequent_letter ::= a..z ∣ A..Z ∣ 0..9 ∣ _ ∣ ' ∣ unicode-letter ∣ unicode-id-part
ident ::= first_letter[subsequent_letter…subsequent_letter]
"""
nextchr = self.getNext()
if nextchr == '_' or ord(nextchr) > 127 or \
(ord(nextchr) >= ord('a') and ord(nextchr) <= ord('z')) or \
(ord(nextchr) >= ord('A') and ord(nextchr) <= ord('Z')):
while True:
nextchr = self.peekNext()
if nextchr == '_' or nextchr == "'" or ord(nextchr) > 127 or \
(ord(nextchr) >= ord('0') and ord(nextchr) <= ord('9')) or \
(ord(nextchr) >= ord('a') and ord(nextchr) <= ord('z')) or \
(ord(nextchr) >= ord('A') and ord(nextchr) <= ord('Z')):
self.getNext()
continue
return
raise Exception('not an ident')
def parseCommand(self):
""" command: (comment | string | [^\\.] | "." not followed by WS)* "." """
while True:
nextchr = self.getborring(['(*', '.', '"'])
if nextchr == '*':
self.parseComment(2)
continue
if nextchr == '"':
while self.getborring(['"']) != '"':
continue
continue
if nextchr != '.':
continue
if self.peekNext() in [' ', '\t', '\n']:
break
self.type = 'command'
def getborring(self, interestings):
""" Traverse a line until the first interesting character, from interestings,
a list of characters. When the firt interesting character is found,
return it and advance to it, as if it were read with getNext. """
line = self.buf[self.line][self.col:]
col = len(line)
for i in map((lambda x: (line.find(x), len(x))), interestings):
(pos, sz) = i
if pos < col and pos >= 0:
col = pos + sz - 1
if col >= len(line):
self.col = 0
self.line += 1
self.content += line + '\n'
return '\n'
else:
self.col += col+1
self.content += line[:col+1]
return line[col]
| [
4871,
23042,
263,
25,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
23042,
263,
329,
763,
80,
13439,
13,
198,
220,
220,
220,
25290,
25,
5855,
366,
930,
366,
6852,
77,
1,
930,
366,
6852,
83,
4943,
9,
198,
220,
220,
220,
4326,
25... | 1.941045 | 3,579 |
import aiohttp
import string
import base64 | [
11748,
257,
952,
4023,
198,
11748,
4731,
198,
11748,
2779,
2414
] | 3.818182 | 11 |
# -*-coding:utf-8-*-
from math import sqrt
from ..models import Rating
import random
random.seed(0)
# 声明一个ItemBased推荐的 对象
usercf = UserBasedCF()
# usercf.generate_dataset()
# usercf.cal_user_sim('A')
# for rank_result in usercf.recommend('A'):
# print('推荐%s,推荐度%.2f' %(rank_result[0],rank_result[1]))
| [
2,
532,
9,
12,
66,
7656,
25,
40477,
12,
23,
12,
9,
12,
198,
198,
6738,
10688,
1330,
19862,
17034,
198,
6738,
11485,
27530,
1330,
12028,
198,
11748,
4738,
198,
198,
25120,
13,
28826,
7,
15,
8,
198,
198,
2,
10263,
96,
108,
23626,
... | 1.90184 | 163 |
import argparse
import functools
import sys
import pkg_resources
from entropylab.dashboard import serve_dashboard
from entropylab.logger import logger
from entropylab.pipeline.results_backend.sqlalchemy import init_db, upgrade_db
# Decorator for friendly error messages
def command(func: callable) -> callable:
"""Decorator that runs commands. On error, prints friendly message when possible"""
@functools.wraps(func)
return wrapper
# CLI command functions
@command
@command
@command
# The parser
# main
if __name__ == "__main__":
main()
| [
11748,
1822,
29572,
198,
11748,
1257,
310,
10141,
198,
11748,
25064,
198,
198,
11748,
279,
10025,
62,
37540,
198,
198,
6738,
920,
1773,
2645,
397,
13,
42460,
3526,
1330,
4691,
62,
42460,
3526,
198,
6738,
920,
1773,
2645,
397,
13,
6404,
... | 3.141304 | 184 |
import requests
import json
import os
from datetime import datetime
import time
# This is the class to connect this code to CoinMarketCap APIs
# This is the class in which I set the parameters to fetch the data I need from CoinMarketCap
while True:
# This is an infinite loop that sleeps for 24 hours each cycle, so it returns 1 report per day
# and print the fundamental information
report = Reports()
print(f"Daily report of crypto according to CoinMarketCap of: {datetime.now()}")
print("")
print(f"The highest trading volume currency of last 24h is "
f"{report.reports['highest_traded']['symbol']} "
f"with a volume of {round(report.reports['highest_traded']['quote']['USD']['volume_24h'], 0)}$")
print("Top 10 by increment: ")
for currency in report.reports['top_10_by_increment']:
print(currency['symbol'], f"{round(currency['quote']['USD']['percent_change_24h'], 2)}%")
print("")
print("Top ten by decrement: ")
for currency in report.reports['top_10_by_decrement']:
print(currency['symbol'], f"{round(currency['quote']['USD']['percent_change_24h'], 2)}%")
print("")
print(f"Total price of 20 best currencies of CoinMarketCap ranking: {report.reports['total_price_top_20']}$")
print(f"Total price of currencies that have a daily volume higher than 76M$: "
f"{report.reports['total_price_of_higher_volume_currencies']}$")
print(f"Percentage change of 20 best currencies of CoinMarketCap ranking: "
f"{report.reports['percent_change_of_twenty_best']}%")
print("------------------------------------------------------------")
make_json_report(report.reports)
time.sleep(86400)
| [
11748,
7007,
198,
11748,
33918,
198,
11748,
28686,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
11748,
640,
628,
220,
220,
220,
1303,
770,
318,
262,
1398,
284,
2018,
428,
2438,
284,
16312,
27470,
15610,
23113,
628,
220,
220,
220,
1303,... | 2.915398 | 591 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
import itertools
import tkinter as tk
root = tk.Tk()
root.geometry('200x200')
CYCLED_IMAGES = itertools.cycle([
tk.PhotoImage(file="icons/ok.png"),
tk.PhotoImage(file="icons/no.png"),
tk.PhotoImage(file="icons/help.png"),
])
panel = tk.Label(root, image=get_next_image())
panel.pack()
button = tk.Button(root, text="ClickMe!")
button.config(command=_on_button_click)
button.pack()
root.mainloop()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
834,
9800,
834,
796,
705,
541,
21879,
1077,
6,
628,
198,
11748,
340,
861,
10141,
198,
11748,
256,
74,
3849,
... | 2.358852 | 209 |
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class ImpactedResourceSummary(object):
"""
Impacted Resource summary Definition.
"""
def __init__(self, **kwargs):
"""
Initializes a new ImpactedResourceSummary object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param id:
The value to assign to the id property of this ImpactedResourceSummary.
:type id: str
:param resource_id:
The value to assign to the resource_id property of this ImpactedResourceSummary.
:type resource_id: str
:param problem_id:
The value to assign to the problem_id property of this ImpactedResourceSummary.
:type problem_id: str
:param compartment_id:
The value to assign to the compartment_id property of this ImpactedResourceSummary.
:type compartment_id: str
:param resource_name:
The value to assign to the resource_name property of this ImpactedResourceSummary.
:type resource_name: str
:param resource_type:
The value to assign to the resource_type property of this ImpactedResourceSummary.
:type resource_type: str
:param region:
The value to assign to the region property of this ImpactedResourceSummary.
:type region: str
:param time_identified:
The value to assign to the time_identified property of this ImpactedResourceSummary.
:type time_identified: datetime
"""
self.swagger_types = {
'id': 'str',
'resource_id': 'str',
'problem_id': 'str',
'compartment_id': 'str',
'resource_name': 'str',
'resource_type': 'str',
'region': 'str',
'time_identified': 'datetime'
}
self.attribute_map = {
'id': 'id',
'resource_id': 'resourceId',
'problem_id': 'problemId',
'compartment_id': 'compartmentId',
'resource_name': 'resourceName',
'resource_type': 'resourceType',
'region': 'region',
'time_identified': 'timeIdentified'
}
self._id = None
self._resource_id = None
self._problem_id = None
self._compartment_id = None
self._resource_name = None
self._resource_type = None
self._region = None
self._time_identified = None
@property
def id(self):
"""
**[Required]** Gets the id of this ImpactedResourceSummary.
Unique identifier for finding event
:return: The id of this ImpactedResourceSummary.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this ImpactedResourceSummary.
Unique identifier for finding event
:param id: The id of this ImpactedResourceSummary.
:type: str
"""
self._id = id
@property
def resource_id(self):
"""
**[Required]** Gets the resource_id of this ImpactedResourceSummary.
Unique id of the Impacted Resource
:return: The resource_id of this ImpactedResourceSummary.
:rtype: str
"""
return self._resource_id
@resource_id.setter
def resource_id(self, resource_id):
"""
Sets the resource_id of this ImpactedResourceSummary.
Unique id of the Impacted Resource
:param resource_id: The resource_id of this ImpactedResourceSummary.
:type: str
"""
self._resource_id = resource_id
@property
def problem_id(self):
"""
**[Required]** Gets the problem_id of this ImpactedResourceSummary.
Problem Id to which the Impacted Resource is associated
:return: The problem_id of this ImpactedResourceSummary.
:rtype: str
"""
return self._problem_id
@problem_id.setter
def problem_id(self, problem_id):
"""
Sets the problem_id of this ImpactedResourceSummary.
Problem Id to which the Impacted Resource is associated
:param problem_id: The problem_id of this ImpactedResourceSummary.
:type: str
"""
self._problem_id = problem_id
@property
def compartment_id(self):
"""
**[Required]** Gets the compartment_id of this ImpactedResourceSummary.
Compartment Id where the resource is created
:return: The compartment_id of this ImpactedResourceSummary.
:rtype: str
"""
return self._compartment_id
@compartment_id.setter
def compartment_id(self, compartment_id):
"""
Sets the compartment_id of this ImpactedResourceSummary.
Compartment Id where the resource is created
:param compartment_id: The compartment_id of this ImpactedResourceSummary.
:type: str
"""
self._compartment_id = compartment_id
@property
def resource_name(self):
"""
**[Required]** Gets the resource_name of this ImpactedResourceSummary.
Name of the Impacted Resource
:return: The resource_name of this ImpactedResourceSummary.
:rtype: str
"""
return self._resource_name
@resource_name.setter
def resource_name(self, resource_name):
"""
Sets the resource_name of this ImpactedResourceSummary.
Name of the Impacted Resource
:param resource_name: The resource_name of this ImpactedResourceSummary.
:type: str
"""
self._resource_name = resource_name
@property
def resource_type(self):
"""
**[Required]** Gets the resource_type of this ImpactedResourceSummary.
Type of the Impacted Resource
:return: The resource_type of this ImpactedResourceSummary.
:rtype: str
"""
return self._resource_type
@resource_type.setter
def resource_type(self, resource_type):
"""
Sets the resource_type of this ImpactedResourceSummary.
Type of the Impacted Resource
:param resource_type: The resource_type of this ImpactedResourceSummary.
:type: str
"""
self._resource_type = resource_type
@property
def region(self):
"""
**[Required]** Gets the region of this ImpactedResourceSummary.
Region where the resource is created
:return: The region of this ImpactedResourceSummary.
:rtype: str
"""
return self._region
@region.setter
def region(self, region):
"""
Sets the region of this ImpactedResourceSummary.
Region where the resource is created
:param region: The region of this ImpactedResourceSummary.
:type: str
"""
self._region = region
@property
def time_identified(self):
"""
**[Required]** Gets the time_identified of this ImpactedResourceSummary.
Time when the problem was identified
:return: The time_identified of this ImpactedResourceSummary.
:rtype: datetime
"""
return self._time_identified
@time_identified.setter
def time_identified(self, time_identified):
"""
Sets the time_identified of this ImpactedResourceSummary.
Time when the problem was identified
:param time_identified: The time_identified of this ImpactedResourceSummary.
:type: datetime
"""
self._time_identified = time_identified
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
2,
15069,
357,
66,
8,
1584,
11,
33448,
11,
18650,
290,
14,
273,
663,
29116,
13,
220,
1439,
2489,
10395,
13,
198,
2,
770,
3788,
318,
10668,
12,
36612,
284,
345,
739,
262,
14499,
2448,
33532,
1... | 2.509822 | 3,258 |
#!/usr/bin/env python
import subprocess
from threading import Thread
from csv import DictReader
from SPAdesPipeline.OLCspades.accessoryFunctions import *
import SPAdesPipeline.OLCspades.metadataprinter as metadataprinter
__author__ = 'adamkoziol'
# If the script is called from the command line, then call the argument parser
if __name__ == '__main__':
# Run the script
Parser()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
11748,
850,
14681,
198,
6738,
4704,
278,
1330,
14122,
198,
6738,
269,
21370,
1330,
360,
713,
33634,
198,
6738,
6226,
2782,
274,
47,
541,
4470,
13,
3535,
34,
2777,
2367,
13,
15526,
652,... | 3 | 130 |
#!/usr/bin/env python
# coding: utf-8
##############################################################################
# joint beta and completeness magnitude estimation
# using p-value of Kolmogorov-Smirnov distance to fitted Gutenberg-Richter law
#
# as described by Mizrahi et al., 2021
# Leila Mizrahi, Shyam Nandan, Stefan Wiemer;
# The Effect of Declustering on the Size Distribution of Mainshocks.
# Seismological Research Letters 2021; doi: https://doi.org/10.1785/0220200231
# inspired by method of Clauset et al., 2009
##############################################################################
import pandas as pd
import numpy as np
# mc is the binned completeness magnitude,
# so the 'true' completeness magnitude is mc - delta_m / 2
def estimate_mc(sample, mcs_test, delta_m, p_pass, stop_when_passed=True, verbose=False, beta=None,
n_samples=10000):
"""
sample: np array of magnitudes to test
mcs_test: completeness magnitudes to test
delta_m: magnitude bins (sample has to be rounded to bins beforehand)
p_pass: p-value with which the test is passed
stop_when_passed: stop calculations when first mc passes the test
verbose: verbose
beta: if beta is 'known', only estimate mc
n_samples: number of magnitude samples to be generated in p-value calculation of KS distance
"""
ks_ds = []
ps = []
i = 0
for mc in mcs_test:
if verbose:
print('\ntesting mc', mc)
ks_d, p, _ = ks_test_gr(sample, mc=mc, delta_m=delta_m, n_samples=n_samples, beta=beta)
ks_ds.append(ks_d)
ps.append(p)
i += 1
if verbose:
print('..p-value: ', p)
if p >= p_pass and stop_when_passed:
break
ps = np.array(ps)
if np.any(ps >= p_pass):
best_mc = mcs_test[np.argmax(ps >= p_pass)]
if beta is None:
beta = estimate_beta_tinti(sample[sample >= best_mc - delta_m / 2], mc=best_mc, delta_m=delta_m)
if verbose:
print("\n\nFirst mc to pass the test:", best_mc, "\nwith a beta of:", beta)
else:
best_mc = None
beta = None
if verbose:
print("None of the mcs passed the test.")
return mcs_test, ks_ds, ps, best_mc, beta
if __name__ == '__main__':
magnitude_sample = np.load("magnitudes.npy")
mcs = round_half_up(np.arange(2.0, 5.5, 0.1), 1)
mcs_tested, ks_distances, p_values, mc_winner, beta_winner = estimate_mc(
magnitude_sample,
mcs,
delta_m=0.1,
p_pass=0.05,
stop_when_passed=False,
verbose=True,
n_samples=1000
)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
19617,
25,
3384,
69,
12,
23,
198,
198,
29113,
29113,
7804,
4242,
2235,
198,
2,
6466,
12159,
290,
1224,
43205,
14735,
31850,
198,
2,
1262,
279,
12,
8367,
286,
25910,
76,
519,
273,
... | 2.42452 | 1,093 |
import re
import numpy as np
from .statistics import Statistics
"""
The module to extract ratings and create their distribution.
""" | [
11748,
302,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
764,
14269,
3969,
1330,
14370,
198,
198,
37811,
198,
464,
8265,
284,
7925,
10109,
290,
2251,
511,
6082,
13,
198,
37811
] | 4.290323 | 31 |
# -*- coding: utf-8 -*-
import json
import logging
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from django.http import HttpResponse
from django.shortcuts import redirect, render, get_object_or_404
from .selected_book_views import selected_book
from ..forms import SetCurrentPageForm
from ..models import Book, AddedBook, TheUser
from ..views import process_ajax, process_form
logger = logging.getLogger('changes')
# ----------------------------------------------------------------------------------------------------------------------
def open_book(request, book_id):
"""
Returns a page for reading book.
"""
book = get_object_or_404(Book, id=book_id)
if request.user.is_authenticated():
user = TheUser.objects.get(id_user=request.user)
try:
added_book = AddedBook.objects.get(id_book=book, id_user=user)
added_book.last_read = added_book.last_read.now()
added_book.save()
logger.info("User '{}' opened book with id: '{}'.".format(user, book.id))
context = {'book': book, 'book_page': added_book.last_page}
return render(request, 'read_book.html', context)
except ObjectDoesNotExist:
return redirect(selected_book, book_id=book_id)
else:
if book.blocked_book:
return redirect(selected_book, book_id=book_id)
if book.private_book:
return HttpResponse(status=404)
try:
book_page = int(request.COOKIES.get('plamber_book_{}'.format(book_id), 1))
except ValueError:
book_page = 1
context = {'book': book, 'book_page': book_page}
return render(request, 'read_book.html', context)
# ----------------------------------------------------------------------------------------------------------------------
@process_ajax(404)
@process_form('POST', SetCurrentPageForm, 404)
def set_current_page(request, form):
"""
Changes current readed page for book of user.
"""
with transaction.atomic():
book = Book.objects.get(id=form.cleaned_data['book'])
user = TheUser.objects.get(id_user=request.user)
added_book = AddedBook.objects.get(id_book=book, id_user=user)
added_book.last_page = form.cleaned_data['page']
added_book.save()
logger.info("User '{}' on book with id: '{}' changed page to: '{}'."
.format(user, book.id, form.cleaned_data['page']))
return HttpResponse(json.dumps(True), content_type='application/json')
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
11748,
33918,
198,
11748,
18931,
198,
198,
6738,
42625,
14208,
13,
7295,
13,
1069,
11755,
1330,
9515,
13921,
3673,
3109,
396,
198,
6738,
42625,
14208,
13,
9945,
1330,... | 2.636735 | 980 |
from scapy.all import sniff
if __name__ == '__main__':
main() | [
6738,
629,
12826,
13,
439,
1330,
26300,
628,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
1388,
3419
] | 2.653846 | 26 |
from construct import *
from construct.lib import *
docstrings_docref = Struct(
'one' / Int8ub,
'two' / Int8ub,
'three' / Int8ub,
'foo' / Computed(lambda this: True),
'parse_inst' / Pointer(0, Int8ub),
)
_schema = docstrings_docref
| [
6738,
5678,
1330,
1635,
198,
6738,
5678,
13,
8019,
1330,
1635,
198,
198,
15390,
37336,
62,
15390,
5420,
796,
32112,
7,
198,
197,
6,
505,
6,
1220,
2558,
23,
549,
11,
198,
197,
470,
21638,
6,
1220,
2558,
23,
549,
11,
198,
197,
470,
... | 2.463918 | 97 |
import hashlib
import hmac
import json
import requests
from time import time
class Txbit:
"""A class to interact with the Txbit.io API
Attributes
----------
endpoint : str
the base url for API calls
APIKey : str
key for working with the Market and Account methods
Secret : str
secret for working with the Market and Account methods
Methods
-------
expandPathToUrl(path, params={}):
adds onto the base url for specific methods
request(path, params={}):
uses `expandPathToUrl()` to make the API call
authenticatedRequest(path, params={})
authenticated API call with APIKey and Secret for Market and Account methods
getMarkets():
get the open and available trading markets along with other meta data
getCurrencies():
get all supported assets along with other meta data
getMarketSummaries():
get the last 24 hour summary of all active markets
getExchangePairs():
get list of all pairs that form markets
getSystemStatus():
get the system related status for all currencies
getOrderBook(market, bookType='both'):
get the orderbook for a given market
getTicker(market):
get current tick values for a market
getMarketHistory(market):
get the latest trades that have occurred for a specific market
getCurrencyInformation(currency):
get specific information and metadata about the listed currency
getCurrencyBalanceSheet(currency):
get solvency information for listed currencies
getBalances():
get all balances from your account
getBalanceFor(currency):
get the balance from your account for a specific asset
getDepositAddress(currency):
get or generate an address for a specific currency
withdraw(currency, quantity, address, paymentid=None):
withdraw funds from your account
getOrder(uuic):
get a single order by uuid
getOrderHistory(market):
get your order history
getWithdrawlHistory(currency):
get your withdrawal history
getDepositHistory(currency):
get your deposit history
butLimit(market, quantity, rate):
place a Buy Limit order in a specific market
sellLimit(market, quantity, rate):
place a Sell Limit order in a specific market
cancel(uuid):
cancel a buy or sell order
getOpenOrders(market):
get all orders that you currently have opened
Notes
-----
Public methods can be run without supplying APIKey or Secret
Market and Account methods need APIKey and Secret for authentification
Market methods need 'ALLOW TRADING' permission set up on the APIKey
Account methods need 'ALLOW READING' permission set up on the APIKey
For more information, see: https://apidocs.txbit.io/#txbit-io-api
"""
endpoint = 'https://api.txbit.io/api/'
def expandPathToUrl(path, params={}):
"""adds onto the base url for specific methods"""
url = Txbit.endpoint + path
url += '?' if params else ''
for key in params:
url += key + '=' + params[key] + '&'
return url
def request(path, params={}):
"""uses `expandPathToUrl()` to make the API call"""
url = Txbit.expandPathToUrl(path, params)
return requests.get(url)
def authenticatedRequest(self, path, params={}):
"""authenticated API call with APIKey and Secret for Market and Account methods"""
params['apikey'] = self.APIKey
params['nonce'] = str(int(time()))
url = Txbit.expandPathToUrl(path, params)
apisign = hmac.new(bytes(self.Secret, 'utf-8'),
bytes(url, 'utf-8'),
hashlib.sha512).hexdigest().upper()
headers = {'apisign': apisign}
return requests.get(url, headers=headers)
## PUBLIC FUNCTIONS ---------------
def getMarkets():
"""(P) get the open and available trading markets along with other meta data"""
res = Txbit.request('public/getmarkets')
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getCurrencies():
"""(P) get all supported assets along with other meta data"""
res = Txbit.request('public/getcurrencies')
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getMarketSummaries():
"""(P) get the last 24 hour summary of all active markets"""
res = Txbit.request('public/getmarketsummaries')
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getExchangePairs():
"""(P) get list of all pairs that form markets"""
res = Txbit.getMarketSummaries()
res.result = [pair['MarketName'] for pair in res.result]
return res
def getSystemStatus():
"""(P) get the system related status for all currencies"""
res = Txbit.request('public/getsystemstatus')
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getOrderBook(market, bookType='both'):
"""(P) get the orderbook for a given market"""
params = {'market': market, 'type': bookType}
res = Txbit.request('public/getorderbook', params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getTicker(market):
"""(P) get current tick values for a market"""
params = {'market': market}
res = Txbit.request('public/getticker', params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getMarketHistory(market):
"""(P) get the latest trades that have occurred for a specific market"""
params = {'market': market}
res = Txbit.request('public/getmarkethistory', params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getCurrencyInformation(currency):
"""(P) get specific information and metadata about the listed currency"""
params = {'currency': currency}
res = Txbit.request('public/getcurrencyinformation', params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getCurrencyBalanceSheet(currency):
"""(P) get solvency information for listed currencies"""
params = {'currency': currency}
res = Txbit.request('public/getcurrencybalancesheet', params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
## ACOUNT FUNCTIONS ---------------
def getBalances(self):
"""(A) get all balances from your account"""
path = 'account/getbalances'
res = self.authenticatedRequest(path)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getBalanceFor(self, currency):
"""(A) get the balance from your account for a specific asset"""
path = 'account/getbalance'
params = {'currency': currency}
res = self.authenticatedRequest(path, params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getDepositAddress(self, currency):
"""(A) get or generate an address for a specific currency
Notes
-----
will return ADDRESS_GENERATING until one is available
"""
path = 'account/getdepositaddress'
params = {'currency': currency}
res = self.authenticatedRequest(path, params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def withdraw(self, currency, quantity, address, paymentid=None):
"""(A) withdraw funds from your account
Notes
-----
account for txfee
"""
path = 'account/withdraw'
params = {'currency': currency, 'quantity': quantity, 'address': address}
if paymentid is not None:
params['paymentid'] = paymentid
res = self.authenticatedRequest(path, params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getOrder(self, uuid):
"""(A) get a single order by uuid"""
path = 'account/getorder'
params = {'uuid': uuid}
res = self.authenticatedRequest(path, params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getOrderHistory(self, market=None):
"""(A) get your order history"""
path = 'account/getorderhistory'
params = {}
if market is not None:
params['market'] = market
res = self.authenticatedRequest(path, params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getWithdrawlHistory(self, currency=None):
"""(A) get your withdrawal history"""
path = 'account/getwithdrawalhistory'
params = {}
if currency is not None:
params['currency'] = currency
res = self.authenticatedRequest(path, params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getDepositHistory(self, currency=None):
"""(A) get your deposit history"""
path = 'account/getdeposithistory'
params = {}
if currency is not None:
params['currency'] = currency
res = self.authenticatedRequest(path, params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
## MARKET FUNCTIONS ---------------
def buyLimit(self, market, quantity, rate):
"""(M) place a Buy Limit order in a specific market"""
path = 'market/buylimit'
params = {'market': market, 'quantity': quantity, 'rate': rate}
res = self.authenticatedRequest(path, params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def sellLimit(self, market, quantity, rate):
"""(M) place a Sell Limit order in a specific market"""
path = 'market/selllimit'
params = {'market': market, 'quantity': quantity, 'rate': rate}
res = self.authenticatedRequest(path, params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def cancel(self, uuid):
"""(M) cancel a buy or sell order"""
path = 'market/cancel'
params = {'uuic': uuic}
res = self.authenticatedRequest(path, params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
def getOpenOrders(self, market=None):
"""(M) get all orders that you currently have opened"""
path = 'getopenorders'
params = {}
if market is not None:
params['market'] = market
res = self.authenticatedRequest(path, params)
result = res.json()['result'] if res.ok and res.json()['success'] else res.status_code
return TxbitResponse(res.ok and res.json()['success'], "", result)
| [
11748,
12234,
8019,
198,
11748,
289,
20285,
198,
11748,
33918,
628,
198,
11748,
7007,
198,
198,
6738,
640,
1330,
640,
198,
198,
4871,
309,
87,
2545,
25,
198,
220,
220,
220,
37227,
32,
1398,
284,
9427,
351,
262,
309,
87,
2545,
13,
95... | 2.667364 | 4,780 |
from onegov.page import Page
from onegov.form import FormDefinition
from onegov.reservation import Resource
class PersonMove(object):
""" Represents a single move of a linked person. """
@classmethod
@staticmethod
class PagePersonMove(PersonMove):
""" Represents a single move of a linked person on a page. """
@property
class FormPersonMove(PersonMove):
""" Represents a single move of a linked person on a form definition. """
@property
class ResourcePersonMove(PersonMove):
""" Represents a single move of a linked person on a form definition. """
@property
| [
6738,
530,
9567,
13,
7700,
1330,
7873,
198,
6738,
530,
9567,
13,
687,
1330,
5178,
36621,
198,
6738,
530,
9567,
13,
411,
13208,
1330,
20857,
628,
198,
4871,
7755,
21774,
7,
15252,
2599,
198,
220,
220,
220,
37227,
1432,
6629,
257,
2060,... | 3.567251 | 171 |
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import msgprint, _
from client.hr_services.doctype.end_of_service_award.end_of_service_award import get_award
from frappe.utils import cint, cstr, date_diff, flt, formatdate, getdate, get_link_to_form, \
comma_or, get_fullname, add_years, add_months, add_days, nowdate, get_first_day, get_last_day
#if frappe.get_value('Financial Custody', filters={'employee' : self.employee}):
#name=frappe.get_value('Financial Custody', filters={'employee' : self.employee})
#custody =frappe.get_doc("Financial Custody",name)
#approver=custody.reported_by
#if approver:
#frappe.throw(self.employee+"/ "+self.employee_name+" have an active Financial Custody approved by "+approver)
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
15069,
357,
66,
8,
1853,
11,
39313,
27768,
21852,
18367,
83,
13,
12052,
13,
290,
20420,
198,
2,
1114,
5964,
1321,
11,
3387,
766,
5964,
13,
14116,
198,
198,
6738,
... | 2.789474 | 342 |
# Generated by Django 3.2.11 on 2022-01-05 07:47
from django.db import migrations
| [
2,
2980,
515,
416,
37770,
513,
13,
17,
13,
1157,
319,
33160,
12,
486,
12,
2713,
8753,
25,
2857,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
628
] | 2.8 | 30 |
from .. import types, alltlobjects
from ..custom.message import Message as _Message
types.MessageEmpty = MessageEmpty
alltlobjects.tlobjects[MessageEmpty.CONSTRUCTOR_ID] = MessageEmpty
types.MessageService = MessageService
alltlobjects.tlobjects[MessageService.CONSTRUCTOR_ID] = MessageService
types.Message = Message
alltlobjects.tlobjects[Message.CONSTRUCTOR_ID] = Message
| [
6738,
11485,
1330,
3858,
11,
477,
28781,
48205,
198,
6738,
11485,
23144,
13,
20500,
1330,
16000,
355,
4808,
12837,
628,
198,
198,
19199,
13,
12837,
40613,
796,
16000,
40613,
198,
439,
28781,
48205,
13,
28781,
48205,
58,
12837,
40613,
13,
... | 3.622642 | 106 |
# STANDARD LIB
import threading
# DJANGAE
from djangae.db import transaction
from djangae.contrib import sleuth
from djangae.test import TestCase
| [
2,
49053,
9795,
45651,
198,
11748,
4704,
278,
198,
198,
2,
13004,
1565,
9273,
36,
198,
6738,
42625,
648,
3609,
13,
9945,
1330,
8611,
198,
6738,
42625,
648,
3609,
13,
3642,
822,
1330,
3133,
1071,
198,
6738,
42625,
648,
3609,
13,
9288,
... | 3.170213 | 47 |
import OGL
from macros import *
from math import cos, sin, radians, degrees
from OpenGL.GL import *
from OpenGL.GLU import *
import pygame
import shoot
import config
from formula import * | [
11748,
440,
8763,
198,
6738,
34749,
1330,
1635,
198,
6738,
10688,
1330,
8615,
11,
7813,
11,
2511,
1547,
11,
7370,
198,
6738,
30672,
13,
8763,
1330,
1635,
198,
6738,
30672,
13,
8763,
52,
1330,
1635,
198,
11748,
12972,
6057,
198,
11748,
... | 3.74 | 50 |
"""API handlers for administering the Hub itself"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import json
import sys
from tornado import web
from tornado.ioloop import IOLoop
from .._version import __version__
from ..scopes import needs_scope
from .base import APIHandler
default_handlers = [
(r"/api/shutdown", ShutdownAPIHandler),
(r"/api/?", RootAPIHandler),
(r"/api/info", InfoAPIHandler),
]
| [
37811,
17614,
32847,
329,
38849,
262,
14699,
2346,
37811,
198,
2,
15069,
357,
66,
8,
449,
929,
88,
353,
7712,
4816,
13,
198,
2,
4307,
6169,
739,
262,
2846,
286,
262,
40499,
347,
10305,
13789,
13,
198,
11748,
33918,
198,
11748,
25064,
... | 3.268966 | 145 |
#!/usr/bin/env jython
import atexit
import logging
from config import Config
from java.util import Properties
from org.jclouds import ContextBuilder
from org.jclouds.abiquo import AbiquoApiMetadata, AbiquoContext
from org.jclouds.logging.slf4j.config import SLF4JLoggingModule
from org.jclouds.sshj.config import SshjSshClientModule
log = logging.getLogger('kahuna')
class ContextLoader:
""" Sets the context to call Abiquo's API.
This class must be the first one to be instantiated when we want to
start a session with Abiquo's API. Just initialize it and call the
load() method.
"""
def __init__(self, overrides=None):
""" Sets the properties and context builders """
self.__context = None
self.__config = Config()
self.__endpoint = "http://" + self.__config.address + "/api"
if overrides:
log.debug("Overriding default config values")
for property in sorted(overrides.iterkeys()):
setattr(self.__config, property, overrides[property])
def __del__(self):
""" Closes the context before destroying """
if self.__context:
log.debug("Disconnecting from %s" % self.__endpoint)
self.__context.close()
def load(self):
""" Creates and configures the context """
if not self.__context: # Avoid loading the same context twice
props = self._load_config()
log.debug("Connecting to %s as %s" % (self.__endpoint,
self.__config.user))
self.__context = ContextBuilder.newBuilder(AbiquoApiMetadata()) \
.endpoint(self.__endpoint) \
.credentials(self.__config.user, self.__config.password) \
.modules([SshjSshClientModule(), SLF4JLoggingModule()]) \
.overrides(props) \
.buildView(AbiquoContext)
api_version = self.__context.getApiContext() \
.getProviderMetadata().getApiMetadata().getVersion()
log.debug("Using Abiquo version: %s" % api_version)
# Close context automatically when exiting
atexit.register(self.__del__)
return self.__context
def _load_config(self):
""" Returns the default jclouds client configuration """
props = Properties()
[props.put(name, value)
for (name, value) in self.__config.client_config]
return props
| [
2,
48443,
14629,
14,
8800,
14,
24330,
474,
7535,
198,
198,
11748,
379,
37023,
198,
11748,
18931,
198,
198,
6738,
4566,
1330,
17056,
198,
6738,
20129,
13,
22602,
1330,
24946,
198,
6738,
8745,
13,
73,
17721,
82,
1330,
30532,
32875,
198,
... | 2.393379 | 1,027 |
import json
from json import JSONDecodeError
from aioredis import Redis
from .config.redis_conf import HOST, PORT, CREDDB, CRED_EXP, COST_EXP, REVOKED_TOKENS, CACHEDB
from .response_exceptions import UserNotInDBException
cred_db = Redis(host=HOST, port=PORT, db=CREDDB, decode_responses=True)
cache_db = Redis(host=HOST, port=PORT, db=CACHEDB, decode_responses=True)
async def get_cred_from_redis(user_id: str) -> dict:
"""
JSON 임시 인증정보를 redis에서 가져옴.
"""
jsonified = await cred_db.get(user_id)
try:
return json.loads(jsonified)
except JSONDecodeError:
raise UserNotInDBException # 로그아웃, 혹은 유효기간 만료로 인해 db에 인증정보가 없음.
async def set_cred_to_redis(user_id: str, cred: dict) -> None:
"""
임시 인증정보를 JSON 변환 후 redis에 set
todo 만료
"""
jsonified = json.dumps(cred)
await cred_db.set(user_id, jsonified, CRED_EXP)
async def add_revoked_redis(token: str) -> None:
"""
집합 자료형에 token을 멤버로 등록
"""
await cred_db.sadd(REVOKED_TOKENS, token)
async def is_member_revoked_redis(token: str) -> bool:
"""
멤버에 token이 존재하는지 확인
"""
return await cred_db.sismember(REVOKED_TOKENS, token)
| [
11748,
33918,
198,
6738,
33918,
1330,
19449,
10707,
1098,
12331,
198,
198,
6738,
257,
72,
1850,
271,
1330,
2297,
271,
198,
198,
6738,
764,
11250,
13,
445,
271,
62,
10414,
1330,
367,
10892,
11,
350,
9863,
11,
8740,
1961,
11012,
11,
874... | 1.742942 | 673 |
birthdays = {
"Albert Einstein": "03/14/1879",
"Benjamin Franklin": "01/06/1705",
"Ada Lovelace": "12/10/1815"
}
lst = birthdays.keys()
print("We know the birthday of:")
for x in lst:
print(x)
choice = input("Who's birthday do you want to look up?")
if choice in lst:
print("{}'s birthday is {}.".format(choice, birthdays[choice]))
else:
print("We don't know {}'s birthday".format(choice)) | [
24280,
12545,
796,
1391,
198,
220,
220,
220,
366,
42590,
24572,
1298,
366,
3070,
14,
1415,
14,
1507,
3720,
1600,
198,
220,
220,
220,
366,
11696,
13337,
14021,
1298,
366,
486,
14,
3312,
14,
1558,
2713,
1600,
198,
220,
220,
220,
366,
... | 2.502994 | 167 |